lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
examples/adc.rs
stm32-rs/stm32f072b-disco
8f9af0195f889c2b326a22f9144cef543d9fce86
#![no_main] #![no_std] #[allow(unused)] use panic_halt; use stm32f072b_disco as board; use board::hal::{adc, prelude::*, serial, stm32}; use cortex_m::{interrupt::Mutex, peripheral::syst::SystClkSource::Core, peripheral::Peripherals}; use cortex_m_rt::{entry, exception}; use core::{cell::RefCell, fmt::Write, ptr}; struct Shared { adc: adc::Adc, temp: adc::VTemp, reference: adc::VRef, tx: serial::Tx<stm32::USART2>, } static SHARED: Mutex<RefCell<Option<Shared>>> = Mutex::new(RefCell::new(None)); fn calculate_temperature(reading: u16) -> i16 { const VDD_CALIB: i32 = 330; const VDD_APPLI: i32 = 300; let cal30 = i32::from(unsafe { ptr::read(0x1FFF_F7B8 as *const u16) }); let cal110 = i32::from(unsafe { ptr::read(0x1FFF_F7C2 as *const u16) }); let mut temperature: i32 = ((i32::from(reading) * VDD_APPLI) / VDD_CALIB) - cal30; temperature *= 110 - 30; temperature /= cal110 - cal30; temperature += 30; temperature as i16 } fn calculate_vdda(reading: u16) -> u16 { let vrefint = u32::from(unsafe { ptr::read(0x1FFF_F7BA as *const u16) }); (3250 * vrefint / u32::from(reading)) as u16 } #[entry] fn main() -> ! { if let (Some(mut p), Some(cp)) = (stm32::Peripherals::take(), Peripherals::take()) { cortex_m::interrupt::free(|cs| { let mut rcc = p.RCC.configure().freeze(&mut p.FLASH); let gpioa = p.GPIOA.split(&mut rcc); let mut syst = cp.SYST; syst.set_clock_source(Core); syst.set_reload(8_000_000 - 1); syst.enable_counter(); syst.enable_interrupt(); let tx = gpioa.pa2.into_alternate_af1(cs); let rx = gpioa.pa15.into_alternate_af1(cs); let (mut tx, _) = serial::Serial::usart2(p.USART2, (tx, rx), 115_200.bps(), &mut rcc).split(); let mut adc = adc::Adc::new(p.ADC, &mut rcc); let mut temp = adc::VTemp::new(); let mut reference = adc::VRef::new(); temp.enable(&mut adc); reference.enable(&mut adc); tx.write_str("\n\rThis ADC example will read various values using the ADC and print them out to the serial terminal\r\n").ok(); *SHARED.borrow(cs).borrow_mut() = Some(Shared { adc, temp, reference, tx, }); }); } loop { continue; } } #[exception] fn SysTick() -> ! { use core::ops::DerefMut; cortex_m::interrupt::free(|cs| { if let Some(ref mut shared) = SHARED.borrow(cs).borrow_mut().deref_mut() { let t: Result<u16, _> = shared.adc.read(&mut shared.temp); if let Ok(t) = t { writeln!(shared.tx, "Temperature {}\r", calculate_temperature(t)).ok(); } else { shared.tx.write_str("Error reading temperature").ok(); } let t: Result<u16, _> = shared.adc.read(&mut shared.reference); if let Ok(t) = t { writeln!(shared.tx, "Vdda {}mV\r", calculate_vdda(t)).ok(); } else { shared.tx.write_str("Error reading Vdda").ok(); } } }); }
#![no_main] #![no_std] #[allow(unused)] use panic_halt; use stm32f072b_disco as board; use board::hal::{adc, prelude::*, serial, stm32}; use cortex_m::{interrupt::Mutex, peripheral::syst::SystClkSource::Core, peripheral::Peripherals}; use cortex_m_rt::{entry, exception}; use core::{cell::RefCell, fmt::Write, ptr}; struct Shared { adc: adc::Adc, temp: adc::VTemp, reference: adc::VRef, tx: serial::Tx<stm32::USART2>, } static SHARED: Mutex<RefCell<Option<Shared>>> = Mutex::new(RefCell::new(None)); fn calculate_temperature(reading: u16) -> i16 { const VDD_CALIB: i32 = 330; const VDD_APPLI: i32 = 300; let cal30 = i32::from(unsafe { ptr::read(0x1FFF_F7B8 as *const u16) }); let cal110 = i32::from(unsafe { ptr::read(0x1FFF_F7C2 as *const u16) }); let mut temperature: i32 = ((i32::from(reading) * VDD_APPLI) / VDD_CALIB) - cal30; temperature *= 110 - 30; temperature /= cal110 - cal30; temperature += 30; temperature as i16 } fn calculate_vdda(reading: u16) -> u16 { let vrefint = u32::from(unsafe { ptr::read(0x1FFF_F7BA as *const u16) }); (3250 * vrefint / u32::from(reading)) as u16 } #[entry] fn main() -> ! { if let (Some(mut p), Some(cp)) = (stm32::Peripherals::take(), Peripherals::take()) { cortex_m::interrupt::free(|cs| { let mut rcc = p.RCC.configure().freeze(&mut p.FLASH); let gpioa = p.GPIOA.split(&mut rcc); let mut syst = cp.SYST; syst.set_clock_source(Core); syst.set_reload(8_000_000 - 1); syst.enable_counter(); syst.enable_interrupt(); let tx = gpioa.pa2.into_alternate_af1(cs); let rx = gpioa.pa15.into_alternate_af1(cs); let (mut tx, _) = serial::Serial::usart2(p.USART2, (tx, rx), 115_200.bps(), &mut rcc).split(); let mut adc = adc::Adc::new(p.ADC, &mut rcc); let mut temp = a
ut shared) = SHARED.borrow(cs).borrow_mut().deref_mut() { let t: Result<u16, _> = shared.adc.read(&mut shared.temp); if let Ok(t) = t { writeln!(shared.tx, "Temperature {}\r", calculate_temperature(t)).ok(); } else { shared.tx.write_str("Error reading temperature").ok(); } let t: Result<u16, _> = shared.adc.read(&mut shared.reference); if let Ok(t) = t { writeln!(shared.tx, "Vdda {}mV\r", calculate_vdda(t)).ok(); } else { shared.tx.write_str("Error reading Vdda").ok(); } } }); }
dc::VTemp::new(); let mut reference = adc::VRef::new(); temp.enable(&mut adc); reference.enable(&mut adc); tx.write_str("\n\rThis ADC example will read various values using the ADC and print them out to the serial terminal\r\n").ok(); *SHARED.borrow(cs).borrow_mut() = Some(Shared { adc, temp, reference, tx, }); }); } loop { continue; } } #[exception] fn SysTick() -> ! { use core::ops::DerefMut; cortex_m::interrupt::free(|cs| { if let Some(ref m
random
[ { "content": "#[entry]\n\nfn main() -> ! {\n\n if let Some(mut p) = stm32::Peripherals::take() {\n\n cortex_m::interrupt::free(|cs| {\n\n let mut rcc = p.RCC.configure().sysclk(48.mhz()).freeze(&mut p.FLASH);\n\n let gpioa = p.GPIOA.split(&mut rcc);\n\n\n\n // USART1 at PA9 (TX) and PA10 (RX) is connectet to ST-Link (well, not quite)\n\n let tx = gpioa.pa9.into_alternate_af1(cs);\n\n let rx = gpioa.pa10.into_alternate_af1(cs);\n\n\n\n // Set up serial port\n\n let mut serial = Serial::usart1(p.USART1, (tx, rx), 115200.bps(), &mut rcc);\n\n\n\n serial.write_str(\"Type for echo!\\n\\r\").ok();\n\n\n\n loop {\n\n let received = block!(serial.read()).unwrap();\n\n block!(serial.write(received)).ok();\n\n }\n\n });\n\n }\n\n\n\n loop {\n\n continue;\n\n }\n\n}\n", "file_path": "examples/serial_echo.rs", "rank": 3, "score": 70029.57987245858 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n if let (Some(mut p), Some(cp)) = (stm32::Peripherals::take(), cortex_m::Peripherals::take()) {\n\n cortex_m::interrupt::free(|cs| {\n\n let mut rcc = p.RCC.configure().sysclk(48.mhz()).freeze(&mut p.FLASH);\n\n let gpioa = p.GPIOA.split(&mut rcc);\n\n let mut nvic = cp.NVIC;\n\n\n\n // USART1 at PA9 (TX) and PA10 (RX) is connectet to ST-Link (well, not quite)\n\n let tx = gpioa.pa9.into_alternate_af1(cs);\n\n let rx = gpioa.pa10.into_alternate_af1(cs);\n\n\n\n // Set up serial port\n\n let mut serial = Serial::usart1(p.USART1, (tx, rx), 115200.bps(), &mut rcc);\n\n\n\n // Enable interrupt generation for received data\n\n serial.listen(Event::Rxne);\n\n\n\n // Output a nice message\n\n serial\n\n .write_str(\"\\r\\nTry typing some characters and watch them being echoed.\\r\\n\")\n", "file_path": "examples/serial_echo_irq.rs", "rank": 5, "score": 67052.34278386114 }, { "content": "// Make some peripherals globally available\n\nstruct Shared {\n\n serial: Serial<stm32::USART1, gpioa::PA9<Alternate<AF1>>, gpioa::PA10<Alternate<AF1>>>,\n\n}\n\n\n\nstatic SHARED: Mutex<RefCell<Option<Shared>>> = Mutex::new(RefCell::new(None));\n\n\n", "file_path": "examples/serial_echo_irq.rs", "rank": 6, "score": 64537.10110419158 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n if let (Some(mut p), Some(cp)) = (stm32::Peripherals::take(), Peripherals::take()) {\n\n cortex_m::interrupt::free(|cs| {\n\n let mut rcc = p.RCC.configure().sysclk(48.mhz()).freeze(&mut p.FLASH);\n\n let gpioa = p.GPIOA.split(&mut rcc);\n\n let gpiob = p.GPIOB.split(&mut rcc);\n\n\n\n // USART1 at PA9 (TX) and PA10 (RX) is connectet to ST-Link (well, not quite)\n\n let tx = gpioa.pa9.into_alternate_af1(cs);\n\n let rx = gpioa.pa10.into_alternate_af1(cs);\n\n\n\n // Obtain resources from GPIO port C\n\n let gpioc = p.GPIOC.split(&mut rcc);\n\n\n\n // Initialize on-board LEDs\n\n let mut orange = orange!(gpioc, cs);\n\n let mut green = green!(gpioc, cs);\n\n let mut red = red!(gpioc, cs);\n\n let mut blue = blue!(gpioc, cs);\n\n\n", "file_path": "examples/touch.rs", "rank": 7, "score": 54968.47291141091 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n if let (Some(mut p), Some(cp)) = (\n\n stm32::Peripherals::take(),\n\n cortex_m::peripheral::Peripherals::take(),\n\n ) {\n\n cortex_m::interrupt::free(|cs| {\n\n let mut rcc = p.RCC.configure().sysclk(8.mhz()).freeze(&mut p.FLASH);\n\n\n\n // (Re-)configure PB3 as output\n\n let mut led = p.GPIOB.split(&mut rcc).pb3.into_push_pull_output(cs);\n\n\n\n // Disable the watchdog when the cpu is stopped under debug\n\n p.DBGMCU.apb1_fz.modify(|_, w| w.dbg_iwdg_stop().set_bit());\n\n\n\n // Initialise watchdoch\n\n let mut watchdog = watchdog::Watchdog::new(p.IWDG);\n\n\n\n // Get delay provider\n\n let mut delay = delay::Delay::new(cp.SYST, &rcc);\n\n\n", "file_path": "examples/watchdog.rs", "rank": 8, "score": 54968.47291141091 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n if let (Some(mut p), Some(cp)) = (stm32::Peripherals::take(), Peripherals::take()) {\n\n cortex_m::interrupt::free(|cs| {\n\n // Configure clock to 8 MHz (i.e. the default) and freeze it\n\n let mut rcc = p.RCC.configure().sysclk(8.mhz()).freeze(&mut p.FLASH);\n\n\n\n // Obtain resources from GPIO port C\n\n let gpioc = p.GPIOC.split(&mut rcc);\n\n\n\n // Initialize on-board LEDs\n\n let mut orange = orange!(gpioc, cs);\n\n let mut green = green!(gpioc, cs);\n\n let mut red = red!(gpioc, cs);\n\n let mut blue = blue!(gpioc, cs);\n\n\n\n // Get delay provider\n\n let mut delay = Delay::new(cp.SYST, &rcc);\n\n\n\n loop {\n\n orange.toggle();\n", "file_path": "examples/gpio_circly.rs", "rank": 9, "score": 52742.87360655317 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n panic!(\"Hello world\");\n\n}\n", "file_path": "examples/panic-ramdump.rs", "rank": 10, "score": 52742.87360655317 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n if let Some(mut p) = stm32::Peripherals::take() {\n\n let mut disp = cortex_m::interrupt::free(|cs| {\n\n let mut rcc = p.RCC.configure().sysclk(48.mhz()).freeze(&mut p.FLASH);\n\n let gpioa = p.GPIOA.split(&mut rcc);\n\n let gpiob = p.GPIOB.split(&mut rcc);\n\n\n\n let scl = gpiob\n\n .pb6\n\n .into_alternate_af1(cs)\n\n .internal_pull_up(cs, true)\n\n .set_open_drain(cs);\n\n let sda = gpiob\n\n .pb7\n\n .into_alternate_af1(cs)\n\n .internal_pull_up(cs, true)\n\n .set_open_drain(cs);\n\n\n\n // Setup I2C1\n\n let i2c = I2c::i2c1(p.I2C1, (scl, sda), 400.khz(), &mut rcc);\n", "file_path": "examples/i2c_hal_ssd1306alphabeter.rs", "rank": 11, "score": 50777.933182326335 }, { "content": "#[interrupt]\n\nfn USART1() {\n\n cortex_m::interrupt::free(|cs| {\n\n // Obtain all Mutex protected resources\n\n if let Some(ref mut shared) = SHARED.borrow(cs).borrow_mut().deref_mut() {\n\n let serial = &mut shared.serial;\n\n\n\n // Read received character\n\n let received = serial.read().unwrap();\n\n\n\n // Write character back\n\n serial.write(received).ok();\n\n\n\n // Clear interrupt\n\n cortex_m::peripheral::NVIC::unpend(USART1);\n\n }\n\n });\n\n}\n", "file_path": "examples/serial_echo_irq.rs", "rank": 13, "score": 39157.20080717333 }, { "content": "#[panic_handler]\n\nfn panic(info: &PanicInfo) -> ! {\n\n cortex_m::interrupt::free(|cs| {\n\n // Obtain mutex protected write part of serial port\n\n if let Some(ref mut tx) = *PANIC_SERIAL.borrow(cs).borrow_mut().deref_mut() {\n\n writeln!(tx, \"\\r\\n{}\", info).ok();\n\n }\n\n\n\n loop {\n\n continue;\n\n }\n\n })\n\n}\n\n\n", "file_path": "examples/i2c_hal_ssd1306alphabeter.rs", "rank": 17, "score": 18827.59800274796 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\n#[allow(unused)]\n\nuse panic_halt;\n\n\n\nuse stm32f072b_disco as board;\n\n\n\nuse board::hal::{prelude::*, serial::Serial, stm32};\n\n\n\nuse cortex_m_rt::entry;\n\nuse nb::block;\n\nuse core::fmt::Write;\n\n\n\n#[entry]\n", "file_path": "examples/serial_echo.rs", "rank": 18, "score": 18445.50854427385 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\n#[allow(unused)]\n\nuse panic_halt;\n\n\n\nuse stm32f072b_disco as board;\n\n\n\nuse board::hal::{\n\n gpio::*,\n\n prelude::*,\n\n serial::{Event, Serial},\n\n stm32::{self, interrupt, Interrupt::USART1},\n\n};\n\n\n\nuse cortex_m::interrupt::Mutex;\n\nuse cortex_m_rt::entry;\n\n\n\nuse core::{cell::RefCell, fmt::Write, ops::DerefMut};\n\n\n\n// Make some peripherals globally available\n", "file_path": "examples/serial_echo_irq.rs", "rank": 19, "score": 17297.62347569486 }, { "content": " .ok();\n\n\n\n // Move all components under Mutex supervision\n\n *SHARED.borrow(cs).borrow_mut() = Some(Shared { serial });\n\n\n\n // Enable USART IRQ and clear any pending IRQs\n\n nvic.enable(USART1);\n\n cortex_m::peripheral::NVIC::unpend(USART1);\n\n });\n\n }\n\n\n\n loop {\n\n // Power down a bit while waiting for interrupts\n\n cortex_m::asm::wfi();\n\n }\n\n}\n\n\n\n// The IRQ handler triggered by a received character in USART buffer\n", "file_path": "examples/serial_echo_irq.rs", "rank": 20, "score": 17290.628715457933 }, { "content": "stm32f072b-disco\n\n================\n\n\n\n_stm32f072b-disco_ contains a basic board support package for the fabulous\n\n[STM32F072 Discovery kit][] microcontroller board to write firmwares using\n\nRust. This standard format board provides 4 user programmable LEDs, a\n\ncapacitive touch area and a gyroscope. There're also headers which can be used\n\nto connect peripherals and also contains a ST-Link V2 debugging interface, so\n\nall that one needs to get going with progamming this device is:\n\n\n\n* STM32F072 Discovery kit\n\n* A computer (macOS and Linux work perfectly, Windows should work but was not tested)\n\n* A bit of open source software\n\n\n\n[STM32F072 Discovery kit]: https://www.st.com/en/evaluation-tools/32f072bdiscovery.html\n\n\n\nLicense\n\n-------\n\n\n\n[0-clause BSD license](LICENSE-0BSD.txt).\n", "file_path": "README.md", "rank": 21, "score": 11136.344705085121 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\n#[allow(unused)]\n\nuse panic_halt;\n\n\n\nuse stm32f072b_disco as board;\n\n\n\nuse board::hal::{delay::Delay, prelude::*, serial::Serial, stm32, tsc::Tsc};\n\nuse board::{blue, green, orange, red};\n\n\n\nuse core::fmt::Write;\n\nuse cortex_m::peripheral::Peripherals;\n\nuse cortex_m_rt::entry;\n\n\n\n#[entry]\n", "file_path": "examples/touch.rs", "rank": 22, "score": 11.240905221905168 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\n#[allow(unused)]\n\nuse panic_halt;\n\n\n\nuse stm32f072b_disco as board;\n\n\n\nuse board::hal::{delay, prelude::*, stm32, time, watchdog};\n\n\n\nuse cortex_m_rt::entry;\n\n\n\n#[entry]\n", "file_path": "examples/watchdog.rs", "rank": 23, "score": 10.95432183549557 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\nuse stm32f0xx_hal as hal;\n\n\n\nuse cortex_m_rt::entry;\n\nuse ssd1306::mode::TerminalMode;\n\nuse ssd1306::Builder;\n\n\n\nuse crate::hal::{gpio::*, i2c::*, prelude::*, serial::*, stm32};\n\nuse cortex_m::interrupt::Mutex;\n\n\n\nuse core::{cell::RefCell, fmt::Write, ops::DerefMut};\n\n\n\n// Make the write part of our serial port globally available\n\nstatic PANIC_SERIAL: Mutex<\n\n RefCell<\n\n Option<\n\n hal::serial::Serial<\n\n stm32::USART2,\n", "file_path": "examples/i2c_hal_ssd1306alphabeter.rs", "rank": 24, "score": 10.43649773097744 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\n#[allow(unused)]\n\nuse panic_halt;\n\n\n\nuse stm32f072b_disco as board;\n\n\n\nuse board::hal::{delay::Delay, prelude::*, stm32};\n\nuse board::{blue, green, orange, red};\n\n\n\nuse cortex_m::peripheral::Peripherals;\n\nuse cortex_m_rt::entry;\n\n\n\n#[entry]\n", "file_path": "examples/gpio_circly.rs", "rank": 25, "score": 10.334064962544952 }, { "content": "\n\n // USART2 at PA2 (TX) and PA15(RX) is connectet to ST-Link\n\n let tx = gpioa.pa2.into_alternate_af1(cs);\n\n let rx = gpioa.pa15.into_alternate_af1(cs);\n\n\n\n let serial = Serial::usart2(p.USART2, (tx, rx), 115_200.bps(), &mut rcc);\n\n\n\n // Transfer write part of serial port into Mutex\n\n *PANIC_SERIAL.borrow(cs).borrow_mut() = Some(serial);\n\n\n\n use ssd1306::displayrotation::DisplayRotation;\n\n let mut disp: TerminalMode<_> =\n\n Builder::new().with_i2c_addr(0x3c).connect_i2c(i2c).into();\n\n\n\n let _ = disp.set_rotation(DisplayRotation::Rotate180);\n\n disp.init().unwrap();\n\n let _ = disp.clear();\n\n\n\n disp\n\n });\n", "file_path": "examples/i2c_hal_ssd1306alphabeter.rs", "rank": 26, "score": 9.981049575416788 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\n#[allow(unused)]\n\nuse panic_ramdump;\n\n\n\n#[allow(unused)]\n\nuse stm32f072b_disco as board;\n\n\n\nuse cortex_m_rt::entry;\n\n\n\n#[entry]\n", "file_path": "examples/panic-ramdump.rs", "rank": 27, "score": 7.858615131362629 }, { "content": " // Set up serial port\n\n let mut serial = Serial::usart1(p.USART1, (tx, rx), 115200.bps(), &mut rcc);\n\n\n\n // Initialise touch controller\n\n let mut tsc = Tsc::tsc(p.TSC, &mut rcc, None);\n\n\n\n let mut sample1 = gpioa.pa3.into_alternate_af3(cs).set_open_drain(cs);\n\n tsc.setup_sample_group(&mut sample1);\n\n let mut sense1 = gpioa.pa2.into_alternate_af3(cs);\n\n tsc.enable_channel(&mut sense1);\n\n\n\n let mut sample2 = gpioa.pa7.into_alternate_af3(cs).set_open_drain(cs);\n\n tsc.setup_sample_group(&mut sample2);\n\n let mut sense2 = gpioa.pa6.into_alternate_af3(cs);\n\n tsc.enable_channel(&mut sense2);\n\n\n\n let mut sample3 = gpiob.pb1.into_alternate_af3(cs).set_open_drain(cs);\n\n tsc.setup_sample_group(&mut sample3);\n\n let mut sense3 = gpiob.pb0.into_alternate_af3(cs);\n\n tsc.enable_channel(&mut sense3);\n", "file_path": "examples/touch.rs", "rank": 28, "score": 7.659537380514012 }, { "content": "\n\n // Get delay provider\n\n let mut delay = Delay::new(cp.SYST, &rcc);\n\n\n\n // Aquire a baseline reading\n\n tsc.acquire().unwrap();\n\n\n\n // Store the baseline values\n\n let mut base1 = tsc.read(&mut sense1).unwrap();\n\n writeln!(serial, \"base: {}!\\r\", base1).ok();\n\n let mut base2 = tsc.read(&mut sense2).unwrap();\n\n writeln!(serial, \"base: {}!\\r\", base2).ok();\n\n let mut base3 = tsc.read(&mut sense3).unwrap();\n\n writeln!(serial, \"base: {}!\\r\", base3).ok();\n\n\n\n loop {\n\n // Aquire a regular reading\n\n tsc.acquire().unwrap();\n\n\n\n // Compare the new values with the baseline\n", "file_path": "examples/touch.rs", "rank": 29, "score": 5.770118576083254 }, { "content": "#![no_std]\n\n#![allow(non_camel_case_types)]\n\n\n\npub use stm32f0xx_hal as hal;\n\n\n\npub use crate::hal::prelude::*;\n\npub use crate::hal::stm32::interrupt::*;\n\npub use crate::hal::stm32::*;\n\npub use crate::hal::*;\n\npub use cortex_m::*;\n\npub use cortex_m_rt::*;\n\n\n\npub mod led;\n", "file_path": "src/lib.rs", "rank": 30, "score": 5.401122773743927 }, { "content": " gpioa::PA2<Alternate<AF1>>,\n\n gpioa::PA15<Alternate<AF1>>,\n\n >,\n\n >,\n\n >,\n\n> = Mutex::new(RefCell::new(None));\n\n\n\nuse core::panic::PanicInfo;\n\n\n\n#[panic_handler]\n", "file_path": "examples/i2c_hal_ssd1306alphabeter.rs", "rank": 31, "score": 3.746410370791616 }, { "content": " let touched = tsc.read(&mut sense1).unwrap();\n\n let reading1 = base1 - touched;\n\n base1 = core::cmp::max(base1, touched);\n\n writeln!(serial, \"sense1: {}!\\r\", reading1).ok();\n\n\n\n let touched = tsc.read(&mut sense2).unwrap();\n\n let reading2 = base2 - touched;\n\n writeln!(serial, \"sense2: {}!\\r\", reading2).ok();\n\n base2 = core::cmp::max(base2, touched);\n\n\n\n let touched = tsc.read(&mut sense3).unwrap();\n\n let reading3 = base3 - touched;\n\n writeln!(serial, \"sense3: {}!\\r\", reading3).ok();\n\n base3 = core::cmp::max(base3, touched);\n\n\n\n // Light up the LEDs depending on the position\n\n if reading1 > 100 {\n\n green.set_high();\n\n } else {\n\n green.set_low();\n", "file_path": "examples/touch.rs", "rank": 32, "score": 3.187701982707252 }, { "content": " delay.delay_ms(200_u16);\n\n red.toggle();\n\n delay.delay_ms(200_u16);\n\n green.toggle();\n\n delay.delay_ms(200_u16);\n\n blue.toggle();\n\n delay.delay_ms(200_u16);\n\n }\n\n });\n\n }\n\n\n\n loop {\n\n continue;\n\n }\n\n}\n", "file_path": "examples/gpio_circly.rs", "rank": 33, "score": 2.6948826128406322 }, { "content": " delay.delay_ms(100_u16);\n\n }\n\n });\n\n }\n\n\n\n loop {\n\n continue;\n\n }\n\n}\n", "file_path": "examples/watchdog.rs", "rank": 34, "score": 2.305171650066119 }, { "content": " delay.delay_ms(100_u16);\n\n }\n\n });\n\n }\n\n\n\n loop {\n\n continue;\n\n }\n\n}\n", "file_path": "examples/touch.rs", "rank": 35, "score": 2.305171650066119 }, { "content": " // LED on for 2s to indicate start of example\n\n led.set_high();\n\n delay.delay_ms(2000_u16);\n\n led.set_low();\n\n\n\n // Arm watchdog with 1s timeout\n\n watchdog.start(time::Hertz(1));\n\n\n\n // Toggle LED a few times a tad slower within the timeout\n\n for _ in 0..=3 {\n\n led.toggle();\n\n delay.delay_ms(200_u16);\n\n }\n\n\n\n // Feed the watchdog once to reset the timer\n\n watchdog.feed();\n\n\n\n // Now keep on toggling the LED quickly until the watchdog triggers a reset\n\n loop {\n\n led.toggle();\n", "file_path": "examples/watchdog.rs", "rank": 36, "score": 1.6029173488512982 } ]
Rust
src/lib/input.rs
CoBrooks/hephaestus-rs
56ffb62ecd00113de2501f28938fb7ca388d4714
use std::collections::{ HashSet, HashMap }; use winit::event::{ VirtualKeyCode, ElementState, DeviceEvent, ButtonId, MouseScrollDelta }; pub struct Input { keyboard: HashSet<u32>, keyboard_prev: HashSet<u32>, buttons: HashSet<u32>, buttons_prev: HashSet<u32>, axes: HashMap<String, f32>, mouse_pos: (f32, f32), mouse_delta: (f32, f32), scroll_wheel: f32, window_size: (u32, u32) } impl Input { pub fn new(window_size: (u32, u32)) -> Self { let mut axes = HashMap::new(); axes.insert("horizontal".into(), 0.0); axes.insert("vertical".into(), 0.0); Self { keyboard: HashSet::new(), keyboard_prev: HashSet::new(), buttons: HashSet::new(), buttons_prev: HashSet::new(), axes, mouse_pos: (window_size.0 as f32 / 2.0, window_size.1 as f32 / 2.0), mouse_delta: (0.0, 0.0), scroll_wheel: 0.0, window_size } } pub fn update(&mut self) { self.keyboard_prev = self.keyboard.clone(); self.buttons_prev = self.buttons.clone(); self.mouse_delta = (0.0, 0.0); self.scroll_wheel = 0.0; if self.get_key(VirtualKeyCode::W) || self.get_key(VirtualKeyCode::Up) { *self.axes.get_mut("vertical").unwrap() = 1.0; } else if self.get_key(VirtualKeyCode::S) || self.get_key(VirtualKeyCode::Down) { *self.axes.get_mut("vertical").unwrap() = -1.0; } else { *self.axes.get_mut("vertical").unwrap() = 0.0; } if self.get_key(VirtualKeyCode::D) || self.get_key(VirtualKeyCode::Right) { *self.axes.get_mut("horizontal").unwrap() = 1.0; } else if self.get_key(VirtualKeyCode::A) || self.get_key(VirtualKeyCode::Left) { *self.axes.get_mut("horizontal").unwrap() = -1.0; } else { *self.axes.get_mut("horizontal").unwrap() = 0.0; } } pub fn parse(&mut self, event: &DeviceEvent) { match event { DeviceEvent::Key(input) => { if input.state == ElementState::Pressed { if let Some(vkey) = input.virtual_keycode { self.keyboard.insert(vkey as u32); } else { self.keyboard.insert(input.scancode); } } else { if let Some(vkey) = input.virtual_keycode { self.keyboard.remove(&(vkey as u32)); } else { self.keyboard.remove(&input.scancode); } } }, DeviceEvent::MouseMotion { delta, .. } => { self.mouse_pos.0 += delta.0 as f32; self.mouse_pos.1 += delta.1 as f32; self.mouse_delta = (delta.0 as f32, delta.1 as f32); }, DeviceEvent::Button { button, state } => { if state == &ElementState::Pressed { self.buttons.insert(*button); } else { self.buttons.remove(button); } }, DeviceEvent::MouseWheel { delta } => { if let MouseScrollDelta::LineDelta(_, y) = delta { self.scroll_wheel = -y.signum(); } }, _ => { } } } pub fn get_key(&self, key: VirtualKeyCode) -> bool { self.keyboard.contains(&(key as u32)) } pub fn get_key_down(&self, key: VirtualKeyCode) -> bool { self.keyboard.contains(&(key as u32)) && !self.keyboard_prev.contains(&(key as u32)) } pub fn get_key_up(&self, key: VirtualKeyCode) -> bool { !self.keyboard.contains(&(key as u32)) && self.keyboard_prev.contains(&(key as u32)) } pub fn get_button(&self, button: ButtonId) -> bool { self.buttons.contains(&button) } pub fn get_button_down(&self, button: ButtonId) -> bool { self.buttons.contains(&button) && !self.buttons_prev.contains(&button) } pub fn get_button_up(&self, button: ButtonId) -> bool { !self.buttons.contains(&button) && self.buttons_prev.contains(&button) } pub fn mouse_pos(&self) -> (f32, f32) { self.mouse_pos } pub fn mouse_pos_rel(&self) -> (f32, f32) { let (m_x, m_y) = self.mouse_pos; let (w_x, w_y) = self.window_size; (m_x / w_x as f32, m_y / w_y as f32) } pub fn mouse_delta(&self) -> (f32, f32) { self.mouse_delta } pub fn scroll_wheel(&self) -> f32 { self.scroll_wheel } pub fn get_axis(&self, axis: &str) -> Option<f32> { self.axes.get(&axis.to_lowercase()).cloned() } }
use std::collections::{ HashSet, HashMap }; use winit::event::{ VirtualKeyCode, ElementState, DeviceEvent, ButtonId, MouseScrollDelta }; pub struct Input { keyboard: HashSet<u32>, keyboard_prev: HashSet<u32>, buttons: HashSet<u32>, buttons_prev: HashSet<u32>, axes: HashMap<String, f32>, mouse_pos: (f32, f32), mouse_delta: (f32, f32), scroll_wheel: f32, window_size: (u32, u32) } impl Input { pub fn new(window_size: (u32, u32)) -> Self { let mut axes = HashMap::ne
pub fn update(&mut self) { self.keyboard_prev = self.keyboard.clone(); self.buttons_prev = self.buttons.clone(); self.mouse_delta = (0.0, 0.0); self.scroll_wheel = 0.0; if self.get_key(VirtualKeyCode::W) || self.get_key(VirtualKeyCode::Up) { *self.axes.get_mut("vertical").unwrap() = 1.0; } else if self.get_key(VirtualKeyCode::S) || self.get_key(VirtualKeyCode::Down) { *self.axes.get_mut("vertical").unwrap() = -1.0; } else { *self.axes.get_mut("vertical").unwrap() = 0.0; } if self.get_key(VirtualKeyCode::D) || self.get_key(VirtualKeyCode::Right) { *self.axes.get_mut("horizontal").unwrap() = 1.0; } else if self.get_key(VirtualKeyCode::A) || self.get_key(VirtualKeyCode::Left) { *self.axes.get_mut("horizontal").unwrap() = -1.0; } else { *self.axes.get_mut("horizontal").unwrap() = 0.0; } } pub fn parse(&mut self, event: &DeviceEvent) { match event { DeviceEvent::Key(input) => { if input.state == ElementState::Pressed { if let Some(vkey) = input.virtual_keycode { self.keyboard.insert(vkey as u32); } else { self.keyboard.insert(input.scancode); } } else { if let Some(vkey) = input.virtual_keycode { self.keyboard.remove(&(vkey as u32)); } else { self.keyboard.remove(&input.scancode); } } }, DeviceEvent::MouseMotion { delta, .. } => { self.mouse_pos.0 += delta.0 as f32; self.mouse_pos.1 += delta.1 as f32; self.mouse_delta = (delta.0 as f32, delta.1 as f32); }, DeviceEvent::Button { button, state } => { if state == &ElementState::Pressed { self.buttons.insert(*button); } else { self.buttons.remove(button); } }, DeviceEvent::MouseWheel { delta } => { if let MouseScrollDelta::LineDelta(_, y) = delta { self.scroll_wheel = -y.signum(); } }, _ => { } } } pub fn get_key(&self, key: VirtualKeyCode) -> bool { self.keyboard.contains(&(key as u32)) } pub fn get_key_down(&self, key: VirtualKeyCode) -> bool { self.keyboard.contains(&(key as u32)) && !self.keyboard_prev.contains(&(key as u32)) } pub fn get_key_up(&self, key: VirtualKeyCode) -> bool { !self.keyboard.contains(&(key as u32)) && self.keyboard_prev.contains(&(key as u32)) } pub fn get_button(&self, button: ButtonId) -> bool { self.buttons.contains(&button) } pub fn get_button_down(&self, button: ButtonId) -> bool { self.buttons.contains(&button) && !self.buttons_prev.contains(&button) } pub fn get_button_up(&self, button: ButtonId) -> bool { !self.buttons.contains(&button) && self.buttons_prev.contains(&button) } pub fn mouse_pos(&self) -> (f32, f32) { self.mouse_pos } pub fn mouse_pos_rel(&self) -> (f32, f32) { let (m_x, m_y) = self.mouse_pos; let (w_x, w_y) = self.window_size; (m_x / w_x as f32, m_y / w_y as f32) } pub fn mouse_delta(&self) -> (f32, f32) { self.mouse_delta } pub fn scroll_wheel(&self) -> f32 { self.scroll_wheel } pub fn get_axis(&self, axis: &str) -> Option<f32> { self.axes.get(&axis.to_lowercase()).cloned() } }
w(); axes.insert("horizontal".into(), 0.0); axes.insert("vertical".into(), 0.0); Self { keyboard: HashSet::new(), keyboard_prev: HashSet::new(), buttons: HashSet::new(), buttons_prev: HashSet::new(), axes, mouse_pos: (window_size.0 as f32 / 2.0, window_size.1 as f32 / 2.0), mouse_delta: (0.0, 0.0), scroll_wheel: 0.0, window_size } }
function_block-function_prefixed
[ { "content": "#[proc_macro_derive(Component)]\n\npub fn component_derive(input: TokenStream) -> TokenStream {\n\n let ast: syn::DeriveInput = syn::parse(input).unwrap();\n\n\n\n let name = &ast.ident;\n\n let gen = quote! {\n\n impl Component for #name {\n\n fn get_id(&self) -> usize {\n\n self.id\n\n }\n\n\n\n fn set_id(&mut self, id: usize) {\n\n self.id = id;\n\n }\n\n }\n\n };\n\n\n\n gen.into()\n\n}\n\n\n", "file_path": "hephaestus-macros/src/lib.rs", "rank": 0, "score": 73983.55267378873 }, { "content": "pub fn get_messages() -> Vec<Message> {\n\n APP_LOGGER.with(|logger| logger.get_all_messages().unwrap_or(Vec::new()))\n\n}\n", "file_path": "src/lib/logger.rs", "rank": 1, "score": 57468.095399401835 }, { "content": "fn init(id: usize, _: &mut World) {\n\n logger::log_debug(&format!(\"{}: INIT!\", id), MessageEmitter::Object(id.to_string()))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 2, "score": 56448.880573041286 }, { "content": "pub fn log_error(content: &str, emitter: MessageEmitter) {\n\n APP_LOGGER.with(|logger| logger.log_error(content, emitter));\n\n}\n\n\n", "file_path": "src/lib/logger.rs", "rank": 3, "score": 48907.62374670076 }, { "content": "pub fn log_info(content: &str, emitter: MessageEmitter) {\n\n APP_LOGGER.with(|logger| logger.log_info(content, emitter));\n\n}\n\n\n", "file_path": "src/lib/logger.rs", "rank": 4, "score": 48907.62374670076 }, { "content": "pub fn log_warning(content: &str, emitter: MessageEmitter) {\n\n APP_LOGGER.with(|logger| logger.log_warning(content, emitter));\n\n}\n\n\n", "file_path": "src/lib/logger.rs", "rank": 5, "score": 48907.62374670076 }, { "content": "pub fn log_debug(content: &str, emitter: MessageEmitter) {\n\n APP_LOGGER.with(|logger| logger.log_debug(content, emitter));\n\n}\n\n\n", "file_path": "src/lib/logger.rs", "rank": 6, "score": 48907.62374670076 }, { "content": "#[allow(unused)]\n\nfn main() {\n\n let mut world = World::new();\n\n world.void_color = [0.01, 0.01, 0.01, 1.0];\n\n\n\n let camera = world.new_entity()\n\n .transform([0.0, 1.0, 0.0], [1.0; 3], [Deg(0.0); 3])\n\n .camera()\n\n .logic(Box::new(init), camera::logic::first_person::<1u8, 1u8>());\n\n world.add_entity(camera);\n\n\n\n let ground_plane = world.new_entity()\n\n .transform([0.0, -1.0, 0.0], [10.0; 3], [Deg(0.0), Deg(0.0), Deg(0.0)])\n\n .mesh(MeshType::Primitive(PrimitiveType::Plane))\n\n .material([0.8; 3]);\n\n world.add_entity(ground_plane);\n\n\n\n let monkey = world.new_entity()\n\n .transform([0.0, 0.0, 1.0], [0.2; 3], [Deg(0.0); 3])\n\n .mesh(MeshType::Model(\"models/suzanne.obj\".into()))\n\n .texture(\"models/textures/monkey_texture.png\")\n", "file_path": "src/main.rs", "rank": 7, "score": 41076.70145000363 }, { "content": "pub trait ComponentClone {\n\n fn boxed_clone(&self) -> Box<dyn Component>;\n\n}\n\n\n\nimpl<C: 'static> ComponentClone for C where C: Component + Clone {\n\n fn boxed_clone(&self) -> Box<dyn Component> {\n\n Box::new(self.clone())\n\n }\n\n}\n\n\n\nimpl Clone for Box<dyn Component> {\n\n fn clone(&self) -> Self {\n\n self.boxed_clone()\n\n }\n\n}\n\n\n\n#[derive(Clone, Component)]\n\npub struct Entity {\n\n id: usize,\n\n}\n", "file_path": "src/lib/entity.rs", "rank": 8, "score": 33960.36218239051 }, { "content": "pub trait MaterialClone {\n\n fn boxed_clone(&self) -> Box<dyn Material>;\n\n}\n\n\n\nimpl<M: 'static> MaterialClone for M where M: Material + Clone {\n\n fn boxed_clone(&self) -> Box<dyn Material> {\n\n Box::new(self.clone())\n\n }\n\n}\n\n\n\nimpl Clone for Box<dyn Material> {\n\n fn clone(&self) -> Self {\n\n self.boxed_clone()\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Diffuse {\n\n color: [f32; 3],\n\n texture_data: Option<(Vec<u8>, ImageDimensions)>,\n", "file_path": "src/lib/material.rs", "rank": 9, "score": 33960.36218239051 }, { "content": "pub trait Material: MaterialClone {\n\n fn get_color(&self) -> [f32; 3];\n\n fn set_color(&mut self, color: [f32; 3]);\n\n fn add_texture(&mut self, tex_path: &str);\n\n fn get_texture_buffer(&self, queue: &Arc<Queue>) -> (Arc<ImageView<Arc<ImmutableImage>>>, CommandBufferExecFuture<NowFuture, PrimaryAutoCommandBuffer>);\n\n fn get_texture_sampler(&self, device: &Arc<Device>) -> Arc<Sampler>;\n\n}\n\n\n", "file_path": "src/lib/material.rs", "rank": 10, "score": 30983.898652620424 }, { "content": "pub trait Component: Downcast + ComponentClone { \n\n fn get_id(&self) -> usize;\n\n fn set_id(&mut self, id: usize);\n\n}\n\nimpl_downcast!(Component);\n\n\n", "file_path": "src/lib/entity.rs", "rank": 11, "score": 28653.227831676886 }, { "content": "use cgmath::{ Matrix4, Vector3 };\n\n\n\n#[derive(Default, Debug, Clone, Copy)]\n\npub struct Vertex {\n\n pub position: [f32; 3],\n\n pub color: [f32; 3],\n\n pub normal: [f32; 3],\n\n pub uv: [f32; 2]\n\n}\n\nvulkano::impl_vertex!(Vertex, position, color, normal, uv);\n\n\n\n#[derive(Default, Debug, Clone, Copy)]\n\npub struct DummyVertex {\n\n pub position: [f32; 2]\n\n}\n\nvulkano::impl_vertex!(DummyVertex, position);\n\n\n\nimpl DummyVertex {\n\n // DummyVertices are only intended to be used by shaders that don't require geometry input\n\n pub fn list() -> [DummyVertex; 6] {\n", "file_path": "src/lib/buffer_objects.rs", "rank": 19, "score": 14.43612259892863 }, { "content": "\n\n#[derive(Clone)]\n\npub struct EntityBuilder {\n\n pub components: Vec<Box<dyn Component>>\n\n}\n\n\n\nimpl EntityBuilder {\n\n pub fn new() -> Self {\n\n Self {\n\n components: vec![Box::new(Entity { id: 0 })]\n\n }\n\n }\n\n\n\n pub fn set_id(&mut self, id: usize) {\n\n self.components.iter_mut()\n\n .for_each(|c| c.set_id(id));\n\n }\n\n\n\n pub fn transform(mut self, translation: [f32; 3], scale: [f32; 3], rotation: [Deg<f32>; 3]) -> Self {\n\n let rotation = Quaternion::from(Euler::new(rotation[0], rotation[1], rotation[2]));\n", "file_path": "src/lib/entity.rs", "rank": 20, "score": 14.353402028319188 }, { "content": "}\n\n\n\nimpl Diffuse {\n\n pub fn new(color: [f32; 3]) -> Self {\n\n let mut d = Diffuse {\n\n color,\n\n texture_data: None,\n\n };\n\n d.add_texture(\"models/textures/null_texture.png\");\n\n\n\n d\n\n }\n\n}\n\n\n\nimpl Material for Diffuse { \n\n fn get_color(&self) -> [f32; 3] {\n\n self.color\n\n }\n\n\n\n fn set_color(&mut self, color: [f32; 3]) {\n", "file_path": "src/lib/material.rs", "rank": 21, "score": 14.004264615982304 }, { "content": "#[derive(Clone)]\n\npub struct DirectionalLight {\n\n pub position: [f32; 4],\n\n pub color: [f32; 3]\n\n}\n\n\n\nimpl DirectionalLight {\n\n pub fn new(position: [f32; 4], color: [f32; 3]) -> Self {\n\n Self {\n\n position,\n\n color\n\n }\n\n }\n\n}\n", "file_path": "src/lib/light.rs", "rank": 22, "score": 13.325480469984868 }, { "content": "use hephaestus_macros::*;\n\n\n\nuse cgmath::{ Point3, Euler, Matrix4, Vector3, Rad, Deg, SquareMatrix, InnerSpace, EuclideanSpace };\n\nuse crate::{\n\n buffer_objects::{ VPBufferObject, UniformBufferObject },\n\n entity::{ Component, Transform },\n\n};\n\n\n\n#[derive(Clone, Component)]\n\npub struct Camera {\n\n id: usize,\n\n view: Matrix4<f32>,\n\n proj: Matrix4<f32>\n\n}\n\n\n\nimpl Camera {\n\n pub fn default() -> Self {\n\n Self {\n\n id: 0,\n\n view: Matrix4::from([[0.0; 4]; 4]),\n", "file_path": "src/lib/camera.rs", "rank": 23, "score": 13.128573334812284 }, { "content": " }\n\n\n\n pub fn get_vp_buffer(&self, dimensions: [u32; 2]) -> VPBufferObject {\n\n let mut proj = cgmath::perspective(Rad::from(Deg(60.0)), dimensions[0] as f32 / dimensions[1] as f32, 0.1, 1000.0);\n\n proj.y.y *= -1.0;\n\n\n\n VPBufferObject {\n\n view: self.view,\n\n proj\n\n }\n\n }\n\n}\n\n\n\npub mod logic {\n\n use crate::entity::{ Transform, UpdateData };\n\n use crate::logger::{ self, MessageEmitter };\n\n use cgmath::{ Rad, Euler };\n\n use winit::event::VirtualKeyCode;\n\n\n\n pub fn first_person<const SENS: u8, const SPEED: u8>() -> Box<fn(usize, &mut UpdateData)> {\n", "file_path": "src/lib/camera.rs", "rank": 24, "score": 12.853812341415093 }, { "content": "use std::time::{ Duration, Instant };\n\nuse winit::event_loop::{ ControlFlow, EventLoop };\n\nuse winit::event::{ Event, WindowEvent };\n\nuse egui_winit_vulkano::Gui;\n\n\n\nuse crate::{\n\n world::*,\n\n renderer::Renderer,\n\n gui::DebugGui,\n\n entity::*,\n\n input::Input,\n\n camera::Camera,\n\n // logger::{ self, MessageEmitter }\n\n};\n\n\n\npub struct EngineTime {\n\n pub delta_time: f32,\n\n pub fps: f32,\n\n pub total_time_ms: f32,\n\n pub total_time_s: f32,\n", "file_path": "src/lib/engine.rs", "rank": 25, "score": 12.829428716422747 }, { "content": "\n\n#[derive(Clone, Component)]\n\npub struct Transform {\n\n id: usize,\n\n pub translation: Vector3<f32>,\n\n pub scale: Vector3<f32>,\n\n pub rotation: Quaternion<f32>,\n\n pub local_rotation: Quaternion<f32>,\n\n}\n\n\n\nimpl Transform {\n\n pub fn default() -> Self {\n\n Self {\n\n id: 0,\n\n translation: [0.0; 3].into(),\n\n scale: [1.0; 3].into(),\n\n rotation: Quaternion::from(Euler::new(Deg(0.0), Deg(0.0), Deg(0.0))),\n\n local_rotation: Quaternion::from(Euler::new(Deg(0.0), Deg(0.0), Deg(0.0))),\n\n }\n\n }\n", "file_path": "src/lib/entity.rs", "rank": 26, "score": 11.803885670973349 }, { "content": "\n\n self\n\n }\n\n\n\n pub fn mesh(mut self, mesh: MeshType) -> Self {\n\n let mut m = Mesh {\n\n id: 0,\n\n data: MeshData::empty(),\n\n mesh_type: mesh\n\n };\n\n m.init();\n\n\n\n self.components.push(Box::new(m));\n\n\n\n self\n\n }\n\n\n\n pub fn material(mut self, color: [f32; 3]) -> Self {\n\n let m = Material {\n\n id: 0,\n", "file_path": "src/lib/entity.rs", "rank": 27, "score": 11.700208835965562 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::{\n\n light::DirectionalLight,\n\n logger::{ self, MessageEmitter },\n\n entity::{ Component, EntityBuilder }\n\n};\n\n\n\n#[derive(Clone)]\n\npub struct World {\n\n pub entities: HashMap<usize, Vec<Box<dyn Component>>>,\n\n pub lights: Vec<DirectionalLight>,\n\n pub void_color: [f32; 4],\n\n next_id: usize\n\n}\n\n\n\nimpl World {\n\n pub fn new() -> Self {\n\n logger::log_debug(\"Instantiating world.\", MessageEmitter::World);\n\n \n", "file_path": "src/lib/world.rs", "rank": 28, "score": 11.673452840607144 }, { "content": " Box::new(|_: usize, _: &mut UpdateData| { })\n\n }\n\n}\n\n\n\npub struct UpdateData<'a> {\n\n pub world: &'a mut World,\n\n pub time: &'a EngineTime,\n\n pub input: &'a Input\n\n}\n\n\n\n#[derive(Clone, Component)]\n\npub struct Mesh {\n\n id: usize,\n\n pub data: MeshData,\n\n pub mesh_type: MeshType\n\n}\n\n\n\nimpl Mesh {\n\n pub fn init(&mut self) {\n\n match self.mesh_type.clone() {\n", "file_path": "src/lib/entity.rs", "rank": 29, "score": 11.665386714687944 }, { "content": " Box::new(|id: usize, data: &mut UpdateData| {\n\n let transform = data.world.get_component_by_id_mut::<Transform>(id).unwrap();\n\n \n\n // Mouse controls rotation\n\n let (d_x, d_y) = data.input.mouse_delta();\n\n\n\n // Right click \n\n if data.input.get_button(3) {\n\n transform.rotate([Rad(0.0), Rad(SENS as f32 * d_x / 500.0), Rad(0.0)]);\n\n transform.rotate_local([-Rad(SENS as f32 * d_y / 500.0), Rad(0.0), Rad(0.0)]);\n\n }\n\n\n\n // Keyboard controls movement\n\n let (x, y) = (data.input.get_axis(\"horizontal\").unwrap(), data.input.get_axis(\"vertical\").unwrap());\n\n transform.translate_local([-x * data.time.delta_time, 0.0, y * data.time.delta_time]);\n\n\n\n // scroll wheel moves camera along forward axis\n\n let scroll = data.input.scroll_wheel();\n\n transform.translate_local([0.0, 0.0, scroll / 2.0]);\n\n\n", "file_path": "src/lib/camera.rs", "rank": 30, "score": 10.561712334681008 }, { "content": " }\n\n \n\n pub fn up_vector(&self) -> Vector3<f32> {\n\n (Matrix4::from(self.rotation) * Matrix4::from(self.local_rotation) * Vector3::unit_y().extend(1.0)).truncate()\n\n }\n\n}\n\n\n\n#[derive(Clone, Component)]\n\npub struct Logic {\n\n id: usize,\n\n pub init: Box<fn(usize, &mut World)>,\n\n pub update: Box<fn(usize, &mut UpdateData)>\n\n}\n\n\n\nimpl Logic {\n\n pub fn empty_init() -> Box<fn(usize, &mut World)> {\n\n Box::new(|_: usize, _: &mut World| { })\n\n }\n\n \n\n pub fn empty_update() -> Box<fn(usize, &mut UpdateData)> {\n", "file_path": "src/lib/entity.rs", "rank": 31, "score": 10.468135951662457 }, { "content": " Primitive(PrimitiveType)\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct MeshData {\n\n pub vertices: Vec<Vertex>,\n\n pub indices: Vec<u16>\n\n}\n\n\n\nimpl MeshData {\n\n pub fn empty() -> Self {\n\n Self {\n\n vertices: Vec::new(),\n\n indices: Vec::new()\n\n }\n\n }\n\n\n\n pub fn load(path: &str) -> MeshData {\n\n if let Some(file) = File::open(path).ok() {\n\n let input = BufReader::new(file);\n", "file_path": "src/lib/mesh_data.rs", "rank": 32, "score": 10.459611385286077 }, { "content": " pub last_60_frame_durations: Vec<f32>,\n\n start_time: Instant,\n\n start_of_last_frame: Instant,\n\n}\n\n\n\nimpl EngineTime {\n\n pub fn new() -> Self {\n\n let now = Instant::now();\n\n\n\n Self {\n\n delta_time: 0.0,\n\n fps: 0.0,\n\n total_time_ms: 0.0,\n\n total_time_s: 0.0,\n\n start_time: now,\n\n start_of_last_frame: now,\n\n last_60_frame_durations: Vec::new()\n\n }\n\n }\n\n\n", "file_path": "src/lib/engine.rs", "rank": 33, "score": 9.763338350726903 }, { "content": "\n\n pub fn translate(&mut self, translation: [f32; 3]) {\n\n self.translation += Vector3::from(translation);\n\n }\n\n\n\n pub fn translate_local(&mut self, translation: [f32; 3]) {\n\n let (x, y, z) = (translation[0], translation[1], translation[2]);\n\n\n\n self.translation += \n\n self.right_vector() * x +\n\n self.up_vector() * y +\n\n self.forward_vector() * z;\n\n }\n\n\n\n pub fn rotate(&mut self, rotation: [Rad<f32>; 3]) {\n\n let (x, y, z) = (rotation[0], rotation[1], rotation[2]);\n\n \n\n self.rotation = Quaternion::from(Euler::new(x, y, z)) * self.rotation;\n\n }\n\n\n", "file_path": "src/lib/entity.rs", "rank": 34, "score": 9.62036858745414 }, { "content": " pub fn rotate_local(&mut self, rotation: [Rad<f32>; 3]) {\n\n let (x, y, z) = (rotation[0], rotation[1], rotation[2]);\n\n \n\n self.local_rotation = Quaternion::from(Euler::new(x, y, z)) * self.local_rotation;\n\n }\n\n\n\n pub fn scale(&mut self, scale: [f32; 3]) {\n\n self.scale = Vector3::new(self.scale.x * scale[0], self.scale.y * scale[1], self.scale.z * scale[2]);\n\n }\n\n\n\n pub fn model_matrix(&self) -> Matrix4<f32> {\n\n // https://solarianprogrammer.com/2013/05/22/opengl-101-matrices-projection-view-model/\n\n let (x, y, z) = self.translation.into();\n\n let t = Matrix4::from_cols(\n\n [1.0, 0.0, 0.0, 0.0].into(),\n\n [0.0, 1.0, 0.0, 0.0].into(),\n\n [0.0, 0.0, 1.0, 0.0].into(),\n\n [x, y, z, 1.0].into(),\n\n );\n\n\n", "file_path": "src/lib/entity.rs", "rank": 35, "score": 8.747708397895138 }, { "content": " self\n\n }\n\n\n\n pub fn camera(mut self) -> Self {\n\n let c = Camera::default();\n\n\n\n self.components.push(Box::new(c));\n\n\n\n self\n\n }\n\n}\n\n\n", "file_path": "src/lib/entity.rs", "rank": 36, "score": 8.73255423394576 }, { "content": "}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Message {\n\n pub content: String,\n\n pub time: SystemTime,\n\n pub level: LogLevel,\n\n pub emitter: MessageEmitter\n\n}\n\n\n\nimpl Message {\n\n pub fn new(content: String, level: LogLevel, emitter: MessageEmitter) -> Self {\n\n Self {\n\n content, \n\n time: SystemTime::now(), \n\n level,\n\n emitter\n\n }\n\n }\n\n\n", "file_path": "src/lib/logger.rs", "rank": 37, "score": 8.691116163925464 }, { "content": "}\n\n\n\npub struct FrameTimeBreakdown {\n\n pub start: Instant,\n\n pub setup: Duration,\n\n pub object_loop: Duration,\n\n pub ambient: Duration,\n\n pub directional: Duration,\n\n pub draw_call: Duration,\n\n temp_time: Instant\n\n}\n\n\n\nimpl FrameTimeBreakdown {\n\n pub fn new() -> Self {\n\n Self {\n\n start: Instant::now(),\n\n setup: Duration::default(),\n\n object_loop: Duration::default(),\n\n ambient: Duration::default(),\n\n directional: Duration::default(),\n", "file_path": "src/lib/engine.rs", "rank": 38, "score": 8.464590374068166 }, { "content": "use std::collections::HashMap;\n\nuse std::cell::RefCell;\n\nuse std::time::SystemTime;\n\nuse chrono::{ DateTime, Local };\n\nuse egui::Color32;\n\nuse colored::Colorize;\n\n\n\n#[derive(PartialEq, Eq, Clone, Debug)]\n\npub enum LogLevel {\n\n Debug,\n\n Info,\n\n Warning,\n\n Error,\n\n}\n\n\n\nimpl LogLevel {\n\n pub fn color(&self) -> Color32 {\n\n match self {\n\n LogLevel::Debug => {\n\n Color32::from_gray(128)\n", "file_path": "src/lib/logger.rs", "rank": 39, "score": 8.33974085056617 }, { "content": " draw_call: Duration::default(),\n\n temp_time: Instant::now(),\n\n }\n\n }\n\n\n\n pub fn restart(&mut self) {\n\n *self = Self::new();\n\n }\n\n\n\n pub fn update_setup(&mut self) {\n\n let now = Instant::now();\n\n self.setup = now - self.start;\n\n self.temp_time = now;\n\n }\n\n\n\n pub fn update_object_loop(&mut self) {\n\n let now = Instant::now();\n\n self.object_loop = now - self.temp_time;\n\n self.temp_time = now;\n\n }\n", "file_path": "src/lib/engine.rs", "rank": 40, "score": 8.269278837751514 }, { "content": " pub fn new() -> Self {\n\n Self {\n\n show_debug_log: true,\n\n }\n\n }\n\n\n\n pub fn show(&mut self, gui: &mut Gui, time: &EngineTime, frame_breakdown: &FrameTimeBreakdown) {\n\n gui.immediate_ui(|gui| {\n\n let ctx = gui.context();\n\n\n\n egui::TopBottomPanel::bottom(\"Debug\")\n\n .default_height(350.0)\n\n .resizable(true)\n\n .max_height(500.0)\n\n .show(&ctx, |mut ui| {\n\n self.debug_log_menu(&mut ui, time);\n\n \n\n if self.show_debug_log {\n\n ui.separator();\n\n\n", "file_path": "src/lib/gui.rs", "rank": 41, "score": 8.21215082963868 }, { "content": "use std::borrow::Cow;\n\nuse egui::{\n\n Color32,\n\n CtxRef,\n\n FontDefinitions,\n\n FontFamily,\n\n Ui\n\n};\n\nuse egui_winit_vulkano::Gui;\n\n\n\nuse crate::{ \n\n logger,\n\n engine::{ EngineTime, FrameTimeBreakdown }\n\n};\n\n\n\npub struct DebugGui { \n\n show_debug_log: bool,\n\n}\n\n\n\nimpl DebugGui {\n", "file_path": "src/lib/gui.rs", "rank": 42, "score": 7.964629173958649 }, { "content": "pub mod engine;\n\npub mod world;\n\npub mod buffer_objects;\n\npub mod shaders;\n\npub mod material;\n\npub mod camera;\n\npub mod light;\n\npub mod renderer;\n\npub mod logger;\n\npub mod gui;\n\npub mod entity;\n\npub mod mesh_data;\n\npub mod input;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn transform_relative_vectors() {\n", "file_path": "src/lib/lib.rs", "rank": 43, "score": 7.8068318628134925 }, { "content": "use hephaestus_macros::*;\n\nuse std::sync::Arc;\n\nuse std::io::Cursor;\n\nuse std::fs;\n\nuse downcast_rs::{ Downcast, impl_downcast };\n\nuse cgmath::{ Vector3, Point3, Matrix4, Quaternion, Euler, Deg, Rad, Rotation3, Rotation, SquareMatrix, InnerSpace };\n\nuse vulkano::image::{ ImageDimensions, ImmutableImage, view::ImageView };\n\nuse vulkano::sampler::Sampler;\n\nuse vulkano::sync::GpuFuture;\n\nuse vulkano::device::{ Device, Queue };\n\nuse vulkano::format::Format;\n\nuse vulkano::buffer::{ BufferUsage, CpuAccessibleBuffer };\n\n\n\nuse crate::{\n\n mesh_data::{ MeshData, MeshType },\n\n world::World,\n\n engine::EngineTime,\n\n input::Input,\n\n camera::Camera,\n\n // logger::{ self, MessageEmitter }\n\n};\n\n\n", "file_path": "src/lib/entity.rs", "rank": 44, "score": 7.7303878938463235 }, { "content": "pub struct VPBufferObject {\n\n pub view: Matrix4<f32>,\n\n pub proj: Matrix4<f32>\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub struct ModelBufferObject {\n\n pub model: Matrix4<f32>,\n\n pub normals: Matrix4<f32>\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub struct AmbientBufferObject {\n\n pub color: Vector3<f32>,\n\n pub intensity: f32\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub struct DirectionalBufferObject {\n\n pub position: [f32; 4],\n\n pub color: Vector3<f32>\n\n}\n", "file_path": "src/lib/buffer_objects.rs", "rank": 45, "score": 7.685390324476416 }, { "content": "\n\n let t = Transform {\n\n id: 0,\n\n translation: translation.into(),\n\n scale: scale.into(),\n\n rotation,\n\n local_rotation: rotation,\n\n };\n\n self.components.push(Box::new(t));\n\n\n\n self\n\n }\n\n\n\n pub fn logic(mut self, init: Box<fn(usize, &mut World)>, update: Box<fn(usize, &mut UpdateData)>) -> Self {\n\n let l = Logic {\n\n id: 0,\n\n init,\n\n update\n\n };\n\n self.components.push(Box::new(l));\n", "file_path": "src/lib/entity.rs", "rank": 46, "score": 7.397837432078001 }, { "content": " use entity::Transform;\n\n use cgmath::{ Vector3, Rad };\n\n use std::f32::consts::FRAC_PI_2;\n\n\n\n let mut t = Transform::default();\n\n\n\n let x_axis = Vector3::unit_x();\n\n let y_axis = Vector3::unit_y();\n\n let z_axis = Vector3::unit_z();\n\n\n\n let r = t.right_vector();\n\n let u = t.up_vector();\n\n let f = t.forward_vector();\n\n\n\n assert_eq!(r, x_axis);\n\n assert_eq!(u, y_axis);\n\n assert_eq!(f, z_axis);\n\n\n\n assert_eq!(r.cross(u), f);\n\n assert_eq!(f.cross(r), u);\n", "file_path": "src/lib/lib.rs", "rank": 47, "score": 7.30459459521361 }, { "content": " color\n\n };\n\n\n\n self.components.push(Box::new(m));\n\n\n\n self\n\n }\n\n\n\n pub fn texture(mut self, path: &str) -> Self {\n\n let mut t = Texture {\n\n id: 0,\n\n path: path.into(),\n\n bytes: Vec::new(),\n\n dimensions: ImageDimensions::Dim2d { width: 0, height: 0, array_layers: 0 },\n\n buffer: None\n\n };\n\n t.init();\n\n\n\n self.components.push(Box::new(t));\n\n\n", "file_path": "src/lib/entity.rs", "rank": 48, "score": 7.223964152206019 }, { "content": " proj: Matrix4::from([[0.0; 4]; 4]),\n\n }\n\n }\n\n\n\n pub fn get_ubo(&self, model: Matrix4<f32>) -> UniformBufferObject {\n\n UniformBufferObject {\n\n model,\n\n view: self.view,\n\n proj: self.proj\n\n }\n\n }\n\n\n\n pub fn calculate_view(&mut self, position: &Transform) {\n\n let (x, y, z) = position.translation.into();\n\n\n\n self.view = Matrix4::look_at_rh(\n\n Point3::new(x, y, z), \n\n Point3::new(x, y, z) + position.forward_vector(),\n\n position.up_vector()\n\n );\n", "file_path": "src/lib/camera.rs", "rank": 49, "score": 6.801274589051848 }, { "content": " .add_buffer(self.vp_buffer.clone()).unwrap()\n\n .build().unwrap()\n\n );\n\n\n\n self.vp_set = vp_set;\n\n }\n\n\n\n pub fn recreate_swapchain(&mut self) {\n\n self.render_stage = RenderStage::NeedsRedraw;\n\n self.commands = None;\n\n\n\n let dimensions: [u32; 2] = self.surface.window().inner_size().into();\n\n let (new_swapchain, new_images) = match self.swapchain.recreate().dimensions(dimensions).build() {\n\n Ok(r) => r,\n\n Err(SwapchainCreationError::UnsupportedDimensions) => return,\n\n Err(e) => panic!(\"{:?}\", e)\n\n };\n\n self.swapchain = new_swapchain;\n\n let (new_framebuffers, new_color_buffer, new_normal_buffer) = Self::window_size_dependent_setup(self.device.clone(), &new_images, self.render_pass.clone(), &mut self.dynamic_state);\n\n self.framebuffers = new_framebuffers;\n", "file_path": "src/lib/renderer.rs", "rank": 50, "score": 6.777431721063864 }, { "content": "pub struct Engine {\n\n pub world: World,\n\n pub renderer: Renderer,\n\n pub debug_gui: DebugGui,\n\n initial_world: World,\n\n time: EngineTime,\n\n}\n\n\n\nimpl Engine {\n\n pub fn initialize(world: World, event_loop: &EventLoop<()>) -> Self {\n\n let initial_world = world.clone();\n\n\n\n let renderer = if let Some(cameras) = initial_world.get_components_of_type::<Camera>() {\n\n let camera_id = cameras[0].get_id();\n\n let camera = world.get_component_by_id::<Camera>(camera_id).unwrap();\n\n\n\n Renderer::new(event_loop, camera.clone())\n\n } else {\n\n panic!(\"No Cameras!\");\n\n };\n", "file_path": "src/lib/engine.rs", "rank": 51, "score": 6.718623301220511 }, { "content": " ];\n\n\n\n data\n\n }\n\n \n\n fn generate_sphere(resolution: u8) -> MeshData { \n\n let mut data = MeshData::empty();\n\n\n\n let t = (1.0 + (5.0f32).sqrt()) / 2.0;\n\n let mut v: Vec<Vector3<f32>> = Vec::new();\n\n let mut i: Vec<u16> = Vec::new();\n\n\n\n // Initial vertices\n\n v.append(&mut vec![\n\n [-1.0, t, 0.0].into(),\n\n [1.0, t, 0.0].into(),\n\n [-1.0, -t, 0.0].into(),\n\n [1.0, -t, 0.0].into(),\n\n [0.0, -1.0, t].into(),\n\n [0.0, 1.0, t].into(),\n", "file_path": "src/lib/mesh_data.rs", "rank": 52, "score": 6.579155885788284 }, { "content": "\n\n let time = EngineTime::new();\n\n let debug_gui = DebugGui::new();\n\n\n\n Self {\n\n initial_world,\n\n world,\n\n renderer,\n\n time,\n\n debug_gui\n\n }\n\n }\n\n\n\n pub fn start(mut self, event_loop: EventLoop<()>) {\n\n let mut gui = Gui::new(self.renderer.surface.clone(), self.renderer.queue.clone(), true);\n\n let mut ctx = gui.context();\n\n self.debug_gui.configure_fonts(&mut ctx);\n\n\n\n let mut previous_frame_end: Option<Box<dyn vulkano::sync::GpuFuture>> = Some(Box::new(vulkano::sync::now(self.renderer.device.clone())));\n\n\n", "file_path": "src/lib/engine.rs", "rank": 53, "score": 6.538640759859099 }, { "content": " [\n\n DummyVertex { position: [-1.0, -1.0] },\n\n DummyVertex { position: [-1.0, 1.0] },\n\n DummyVertex { position: [1.0, 1.0] },\n\n DummyVertex { position: [-1.0, -1.0] },\n\n DummyVertex { position: [1.0, 1.0] },\n\n DummyVertex { position: [1.0, -1.0] },\n\n ]\n\n }\n\n}\n\n\n\n#[allow(dead_code)]\n\n#[derive(Copy, Clone)]\n\npub struct UniformBufferObject {\n\n pub model: Matrix4<f32>,\n\n pub view: Matrix4<f32>,\n\n pub proj: Matrix4<f32>\n\n}\n\n\n\n#[derive(Clone, Copy)]\n", "file_path": "src/lib/buffer_objects.rs", "rank": 54, "score": 6.537418102464498 }, { "content": "use std::fs::File;\n\nuse std::io::BufReader;\n\nuse cgmath::{ InnerSpace, Vector3 };\n\nuse obj::{ Obj, TexturedVertex, load_obj };\n\n\n\nuse crate::{ \n\n buffer_objects::Vertex,\n\n logger::{ self, MessageEmitter }\n\n};\n\n\n\n#[derive(Clone)]\n\npub enum PrimitiveType {\n\n Plane,\n\n Cube,\n\n Sphere(u8)\n\n}\n\n\n\n#[derive(Clone)]\n\npub enum MeshType {\n\n Model(String),\n", "file_path": "src/lib/mesh_data.rs", "rank": 55, "score": 6.39767981941538 }, { "content": " pub fn update(&mut self) {\n\n self.total_time_ms = self.start_time.elapsed().as_millis() as f32;\n\n self.total_time_s = self.start_time.elapsed().as_secs_f32();\n\n\n\n self.delta_time = self.start_of_last_frame.elapsed().as_secs_f32();\n\n\n\n if self.last_60_frame_durations.len() < 100 {\n\n self.last_60_frame_durations.push(self.delta_time);\n\n } else {\n\n self.last_60_frame_durations.reverse();\n\n self.last_60_frame_durations.pop();\n\n self.last_60_frame_durations.reverse();\n\n self.last_60_frame_durations.push(self.delta_time);\n\n }\n\n \n\n let avg_duration_of_last_60_s: f32 = self.last_60_frame_durations.iter().sum::<f32>() / self.last_60_frame_durations.len() as f32;\n\n self.fps = avg_duration_of_last_60_s.recip();\n\n\n\n self.start_of_last_frame = Instant::now();\n\n }\n", "file_path": "src/lib/engine.rs", "rank": 56, "score": 6.298549855984035 }, { "content": " dynamic_state,\n\n framebuffers,\n\n color_buffer,\n\n normal_buffer,\n\n vp_set,\n\n render_stage,\n\n commands,\n\n img_index,\n\n acquire_future,\n\n final_images,\n\n }\n\n }\n\n //}}}\n\n\n\n pub fn start(&mut self, clear_color: [f32; 4]) {\n\n match self.render_stage {\n\n RenderStage::Stopped => {\n\n self.render_stage = RenderStage::Deferred;\n\n },\n\n RenderStage::NeedsRedraw => {\n", "file_path": "src/lib/renderer.rs", "rank": 57, "score": 6.13787276897801 }, { "content": " World {\n\n entities: HashMap::new(),\n\n lights: Vec::new(),\n\n void_color: [0.01, 0.01, 0.01, 1.0],\n\n next_id: 0\n\n }\n\n }\n\n\n\n pub fn new_entity(&mut self) -> EntityBuilder {\n\n EntityBuilder::new()\n\n }\n\n\n\n pub fn add_entity(&mut self, mut entity: EntityBuilder) {\n\n let id = self.get_next_entity_id();\n\n entity.set_id(id);\n\n\n\n self.entities.insert(id, entity.components);\n\n }\n\n\n\n pub fn get_next_entity_id(&mut self) -> usize {\n", "file_path": "src/lib/world.rs", "rank": 58, "score": 6.091423251360988 }, { "content": " self.next_id += 1;\n\n self.next_id\n\n }\n\n\n\n pub fn get_all_ids(&self) -> Option<Vec<usize>> {\n\n let keys: Vec<usize> = self.entities.keys().map(|&k| k.clone()).collect();\n\n\n\n if keys.is_empty() {\n\n None\n\n } else {\n\n Some(keys)\n\n }\n\n }\n\n\n\n pub fn get_entity(&self, id: usize) -> Option<&Vec<Box<dyn Component>>> {\n\n self.entities.get(&id)\n\n }\n\n\n\n pub fn get_entity_mut(&mut self, id: usize) -> Option<&mut Vec<Box<dyn Component>>> {\n\n self.entities.get_mut(&id)\n", "file_path": "src/lib/world.rs", "rank": 59, "score": 6.037711371678007 }, { "content": "\n\n pub fn update_ambient(&mut self) {\n\n let now = Instant::now();\n\n self.ambient = now - self.temp_time;\n\n self.temp_time = now;\n\n }\n\n \n\n pub fn update_directional(&mut self) {\n\n let now = Instant::now();\n\n self.directional = now - self.temp_time;\n\n self.temp_time = now;\n\n }\n\n \n\n pub fn update_draw_call(&mut self) {\n\n let now = Instant::now();\n\n self.draw_call = now - self.temp_time;\n\n self.temp_time = now;\n\n }\n\n}\n\n\n", "file_path": "src/lib/engine.rs", "rank": 60, "score": 6.019815221406945 }, { "content": " let mut frame_breakdown = FrameTimeBreakdown::new();\n\n\n\n // Initialize Entities\n\n let logics = self.initial_world.get_components_of_type::<Logic>().unwrap_or_default();\n\n \n\n for l in &logics {\n\n (l.init)(l.get_id(), &mut self.world)\n\n }\n\n\n\n let window_size = self.renderer.surface.window().inner_size();\n\n let mut input = Input::new((window_size.width, window_size.height));\n\n \n\n event_loop.run(move |event, _, control_flow| {\n\n gui.update(&event);\n\n\n\n if let Event::DeviceEvent { event, .. } = &event {\n\n input.parse(event)\n\n }\n\n\n\n match event {\n", "file_path": "src/lib/engine.rs", "rank": 61, "score": 5.855235127841844 }, { "content": " 10, 7, 6,\n\n 7, 1, 8,\n\n 3, 9, 4, \n\n 3, 4, 2, \n\n 3, 2, 6, \n\n 3, 6, 8,\n\n 3, 8, 9,\n\n 4, 9, 5,\n\n 2, 4, 11, \n\n 6, 2, 10, \n\n 8, 6, 7,\n\n 9, 8, 1\n\n ]);\n\n\n\n let mut last_index = 11;\n\n for _ in 0..resolution {\n\n let mut new_indices: Vec<u16> = Vec::new();\n\n for face in i.clone().chunks(3) {\n\n let mut new_points: Vec<Vector3<f32>> = vec![\n\n (v[face[0] as usize] + v[face[1] as usize]) / 2.0,\n", "file_path": "src/lib/mesh_data.rs", "rank": 62, "score": 5.840682161651293 }, { "content": " ui.columns(2, |columns| {\n\n self.debug_log(&mut columns[0]);\n\n self.frame_breakdown(&mut columns[1], time, frame_breakdown);\n\n });\n\n }\n\n });\n\n });\n\n }\n\n\n\n pub fn configure_fonts(&mut self, ctx: &mut CtxRef) {\n\n let mut font_style = FontDefinitions::default();\n\n font_style.font_data.insert(\"JetBrains Mono\".into(), Cow::Borrowed(include_bytes!(\"../../fonts/JetBrainsMono-Regular.ttf\")));\n\n font_style.fonts_for_family.insert(FontFamily::Monospace, vec![\"JetBrains Mono\".into(), ]);\n\n font_style.family_and_size.insert(egui::TextStyle::Body, (FontFamily::Monospace, 20.0));\n\n font_style.family_and_size.insert(egui::TextStyle::Button, (FontFamily::Proportional, 16.0));\n\n ctx.set_fonts(font_style);\n\n }\n\n\n\n fn debug_log_menu(&mut self, ui: &mut Ui, time: &EngineTime) {\n\n ui.horizontal_top(|ui| {\n", "file_path": "src/lib/gui.rs", "rank": 63, "score": 5.7670634992203915 }, { "content": " let ids = self.world.get_all_ids().unwrap_or_default();\n\n for id in ids {\n\n if let Some(transform) = self.world.get_component_by_id::<Transform>(id) {\n\n if let Some(mesh) = self.world.get_component_by_id::<Mesh>(id) {\n\n self.renderer.geometry(\n\n mesh, \n\n transform, \n\n self.world.get_component_by_id::<Material>(id),\n\n self.initial_world.get_component_by_id_mut::<Texture>(id)\n\n );\n\n }\n\n }\n\n }\n\n\n\n let mut update_data = UpdateData {\n\n world: &mut self.world,\n\n time: &self.time,\n\n input: &input\n\n };\n\n\n", "file_path": "src/lib/engine.rs", "rank": 64, "score": 5.666389224220959 }, { "content": " }\n\n\n\n pub fn get_component_by_id<T: Component>(&self, id: usize) -> Option<&T> {\n\n let entity = self.entities.get(&id)?;\n\n entity.iter()\n\n .find(|c| c.downcast_ref::<T>().is_some())\n\n .map(|c| c.downcast_ref::<T>().unwrap())\n\n }\n\n \n\n pub fn get_component_by_id_mut<T: Component>(&mut self, id: usize) -> Option<&mut T> {\n\n let entity = self.entities.get_mut(&id)?;\n\n entity.iter_mut()\n\n .find(|c| c.downcast_ref::<T>().is_some())\n\n .map(|c| c.downcast_mut::<T>().unwrap())\n\n }\n\n\n\n pub fn get_first_component_of_type<T: Component>(&self) -> Option<&T> {\n\n let components: Vec<&Box<dyn Component>> = self.entities.values().flatten().collect();\n\n let c = components.iter().find(|c| c.downcast_ref::<T>().is_some()).map(|c| c.downcast_ref::<T>().unwrap());\n\n\n", "file_path": "src/lib/world.rs", "rank": 65, "score": 5.655233498628547 }, { "content": " MeshType::Model(path) => {\n\n self.data = MeshData::load(&path);\n\n },\n\n MeshType::Primitive(primitive_type) => {\n\n self.data = MeshData::generate(primitive_type);\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Component)]\n\npub struct Material {\n\n id: usize,\n\n pub color: [f32; 3]\n\n}\n\n\n\n#[derive(Clone, Component)]\n\npub struct Texture {\n\n id: usize,\n\n pub path: String,\n", "file_path": "src/lib/entity.rs", "rank": 66, "score": 5.437102622101846 }, { "content": " let (sx, sy, sz) = self.scale.into();\n\n let s = Matrix4::from_cols(\n\n [sx, 0.0, 0.0, 0.0].into(),\n\n [0.0, sy, 0.0, 0.0].into(),\n\n [0.0, 0.0, sz, 0.0].into(),\n\n [0.0, 0.0, 0.0, 1.0].into(),\n\n );\n\n\n\n let global_r = Matrix4::from(self.rotation);\n\n let local_r = Matrix4::from(self.local_rotation);\n\n\n\n t * global_r * local_r * s\n\n }\n\n\n\n pub fn forward_vector(&self) -> Vector3<f32> {\n\n (Matrix4::from(self.rotation) * Matrix4::from(self.local_rotation) * Vector3::unit_z().extend(1.0)).truncate()\n\n }\n\n\n\n pub fn right_vector(&self) -> Vector3<f32> {\n\n (Matrix4::from(self.rotation) * Matrix4::from(self.local_rotation) * Vector3::unit_x().extend(1.0)).truncate()\n", "file_path": "src/lib/entity.rs", "rank": 67, "score": 5.3939060611498375 }, { "content": " logger::log_error(&format!(\"Unable to read '{}'\", path), MessageEmitter::World);\n\n MeshData::empty()\n\n }\n\n }\n\n \n\n pub fn generate(mesh_type: PrimitiveType) -> MeshData {\n\n match mesh_type {\n\n PrimitiveType::Plane => {\n\n Self::generate_plane()\n\n },\n\n PrimitiveType::Cube => {\n\n Self::generate_cube()\n\n },\n\n PrimitiveType::Sphere(resolution) => {\n\n Self::generate_sphere(resolution)\n\n }\n\n }\n\n }\n\n\n\n fn generate_plane() -> MeshData {\n", "file_path": "src/lib/mesh_data.rs", "rank": 68, "score": 5.084910926418346 }, { "content": "\n\n let mut commands = self.commands.take().unwrap();\n\n commands\n\n .draw(\n\n self.directional_pipeline.clone(),\n\n &self.dynamic_state,\n\n vec![self.dummy_verts.clone()],\n\n directional_set.clone(),\n\n ()\n\n )\n\n .unwrap();\n\n self.commands = Some(commands);\n\n }\n\n\n\n pub fn finish(&mut self, previous_frame_end: &mut Option<Box<dyn GpuFuture>>, gui: &mut Gui) {\n\n match self.render_stage {\n\n RenderStage::Directional => { },\n\n RenderStage::NeedsRedraw => {\n\n self.recreate_swapchain();\n\n self.commands = None;\n", "file_path": "src/lib/renderer.rs", "rank": 69, "score": 4.934186343510454 }, { "content": " reader.next_frame(&mut self.bytes).unwrap();\n\n }\n\n\n\n pub fn get_sampler(device: &Arc<Device>) -> Arc<Sampler> {\n\n Sampler::new(\n\n device.clone(),\n\n vulkano::sampler::Filter::Linear,\n\n vulkano::sampler::Filter::Linear,\n\n vulkano::sampler::MipmapMode::Nearest,\n\n vulkano::sampler::SamplerAddressMode::Repeat,\n\n vulkano::sampler::SamplerAddressMode::Repeat,\n\n vulkano::sampler::SamplerAddressMode::Repeat,\n\n 0.0, 1.0, 0.0, 0.0\n\n ).unwrap()\n\n }\n\n\n\n pub unsafe fn get_buffer(&mut self, queue: &Arc<Queue>) -> (Arc<ImageView<Arc<ImmutableImage>>>, Box<dyn GpuFuture>) {\n\n let buffer = if let Some(b) = &self.buffer {\n\n b.clone()\n\n } else {\n", "file_path": "src/lib/entity.rs", "rank": 70, "score": 4.450895699277675 }, { "content": " let logics = self.initial_world.get_components_of_type::<Logic>().unwrap_or_default();\n\n for logic in &logics {\n\n (logic.update)(logic.get_id(), &mut update_data)\n\n }\n\n frame_breakdown.update_object_loop();\n\n\n\n self.renderer.ambient();\n\n frame_breakdown.update_ambient();\n\n\n\n for i in 0..self.world.lights.len() {\n\n self.renderer.directional(&self.world.lights[i]);\n\n }\n\n frame_breakdown.update_directional();\n\n\n\n self.renderer.finish(&mut previous_frame_end, &mut gui);\n\n frame_breakdown.update_draw_call();\n\n \n\n self.time.update();\n\n input.update();\n\n },\n\n Event::RedrawRequested(_) => {\n\n self.renderer.surface.window().request_redraw();\n\n }\n\n _ => ()\n\n }\n\n })\n\n }\n\n}\n", "file_path": "src/lib/engine.rs", "rank": 71, "score": 4.450130070506187 }, { "content": " None\n\n }\n\n }\n\n\n\n fn filter_messages(&self, filter: &dyn Fn(&&Message) -> bool) -> Option<Vec<Message>> {\n\n if self.borrow().messages.is_some() {\n\n let messages = self.borrow().messages.as_ref().unwrap().clone();\n\n Some(messages.values()\n\n .filter(filter)\n\n .map(|m| m.to_owned())\n\n .collect())\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn log(&self, mut message: Message) {\n\n let time = SystemTime::now();\n\n message.time = time;\n\n\n", "file_path": "src/lib/logger.rs", "rank": 72, "score": 4.323484889577966 }, { "content": " pub fn geometry(&mut self, mesh: &Mesh, transform: &Transform, material: Option<&Material>, texture: Option<&mut Texture>) {\n\n match self.render_stage {\n\n RenderStage::Deferred => { },\n\n RenderStage::NeedsRedraw => {\n\n self.recreate_swapchain();\n\n self.render_stage = RenderStage::Stopped;\n\n self.commands = None;\n\n return;\n\n },\n\n _ => {\n\n self.render_stage = RenderStage::Stopped;\n\n self.commands = None;\n\n return;\n\n }\n\n }\n\n\n\n let model_buffer = {\n\n let model_matrix = transform.model_matrix();\n\n\n\n let uniform_data = ModelBufferObject {\n", "file_path": "src/lib/renderer.rs", "rank": 73, "score": 4.252509301791204 }, { "content": " c\n\n }\n\n\n\n pub fn get_components_of_type<T: Component>(&self) -> Option<Vec<&T>> {\n\n let components: Vec<&Box<dyn Component>> = self.entities.values().flatten().collect();\n\n let components: Vec<&T> = components.iter()\n\n .filter_map(|c| c.downcast_ref::<T>())\n\n .collect();\n\n\n\n if components.is_empty() {\n\n None\n\n } else {\n\n Some(components)\n\n }\n\n }\n\n\n\n pub fn add_light(&mut self, light: DirectionalLight) {\n\n logger::log_debug(\"Adding directional light to world.\", MessageEmitter::World);\n\n \n\n self.lights.push(light);\n\n }\n\n}\n", "file_path": "src/lib/world.rs", "rank": 74, "score": 4.221361624761791 }, { "content": " };\n\n\n\n pool.next(uniform_data).unwrap()\n\n }\n\n \n\n fn window_size_dependent_setup(\n\n device: Arc<Device>,\n\n images: &[Arc<SwapchainImage<Window>>],\n\n render_pass: Arc<RenderPass>,\n\n dynamic_state: &mut DynamicState,\n\n ) -> (Vec<Arc<dyn FramebufferAbstract + Send + Sync>>, Arc<AttachmentImage>, Arc<AttachmentImage>) {\n\n let dimensions = images[0].dimensions();\n\n \n\n let viewport = Viewport {\n\n origin: [0.0, 0.0],\n\n dimensions: [dimensions.width() as f32, dimensions.height() as f32],\n\n depth_range: 0.0..1.0,\n\n };\n\n dynamic_state.viewports = Some(vec![viewport]);\n\n\n", "file_path": "src/lib/renderer.rs", "rank": 75, "score": 4.211966123028532 }, { "content": " bytes: Vec<u8>,\n\n dimensions: ImageDimensions,\n\n buffer: Option<Arc<CpuAccessibleBuffer<[u8]>>>\n\n}\n\n\n\nimpl Texture {\n\n fn init(&mut self) {\n\n let png_bytes = fs::read(&self.path).unwrap(); \n\n let cursor = Cursor::new(png_bytes);\n\n let decoder = png::Decoder::new(cursor);\n\n let mut reader = decoder.read_info().unwrap();\n\n let info = reader.info();\n\n\n\n self.dimensions = ImageDimensions::Dim2d {\n\n width: info.width,\n\n height: info.height,\n\n array_layers: 1\n\n };\n\n\n\n self.bytes.resize((info.width * info.height * 4) as usize, 0);\n", "file_path": "src/lib/entity.rs", "rank": 76, "score": 4.172541768493106 }, { "content": "use std::sync::Arc;\n\nuse std::io::Cursor;\n\nuse std::fs;\n\nuse vulkano::device::{ Device, Queue };\n\nuse vulkano::format::Format;\n\nuse vulkano::image::{ ImageDimensions, ImmutableImage, view::ImageView };\n\nuse vulkano::sampler::Sampler;\n\nuse vulkano::command_buffer::{ PrimaryAutoCommandBuffer, CommandBufferExecFuture };\n\nuse vulkano::sync::NowFuture;\n\n\n", "file_path": "src/lib/material.rs", "rank": 77, "score": 4.113995321590814 }, { "content": "use std::sync::Arc;\n\nuse vulkano::buffer::{ BufferUsage, CpuAccessibleBuffer, CpuBufferPool, cpu_pool::CpuBufferPoolSubbuffer };\n\nuse vulkano::command_buffer::{ AutoCommandBufferBuilder, CommandBufferUsage, DynamicState, PrimaryAutoCommandBuffer, SubpassContents };\n\nuse vulkano::descriptor_set::{ DescriptorSet, PersistentDescriptorSet };\n\nuse vulkano::device::{ Device, Queue, DeviceExtensions };\n\nuse vulkano::device::physical::{ PhysicalDevice, PhysicalDeviceType };\n\nuse vulkano::format::Format;\n\nuse vulkano::image::{ ImageAccess, ImageUsage };\n\nuse vulkano::image::attachment::AttachmentImage;\n\nuse vulkano::image::swapchain::SwapchainImage;\n\nuse vulkano::image::view::ImageView;\n\nuse vulkano::instance::Instance;\n\nuse vulkano::memory::pool::StdMemoryPool;\n\nuse vulkano::pipeline::blend::{ AttachmentBlend, BlendFactor, BlendOp };\n\nuse vulkano::pipeline::{ GraphicsPipeline, GraphicsPipelineAbstract };\n\nuse vulkano::pipeline::viewport::Viewport;\n\nuse vulkano::render_pass::{ Framebuffer, FramebufferAbstract, RenderPass, Subpass };\n\nuse vulkano::swapchain::{ FullscreenExclusive, PresentMode, Surface, SurfaceTransform, Swapchain, SwapchainAcquireFuture, SwapchainCreationError };\n\nuse vulkano::sync::{ FlushError, GpuFuture };\n\nuse vulkano::Version;\n", "file_path": "src/lib/renderer.rs", "rank": 78, "score": 3.985114476963193 }, { "content": "\n\n if let Some(object) = load_obj(input).ok() as Option<Obj<TexturedVertex, u16>> {\n\n let mut data = MeshData::empty();\n\n data.indices = object.indices;\n\n \n\n data.vertices = object.vertices.iter()\n\n .map(|v| Vertex {\n\n position: v.position,\n\n normal: v.normal,\n\n color: [1.0; 3],\n\n uv: [v.texture[0], v.texture[1]]\n\n })\n\n .collect();\n\n\n\n data\n\n } else {\n\n logger::log_error(&format!(\"Unable to load object data from '{}'\", path), MessageEmitter::World);\n\n MeshData::empty()\n\n }\n\n } else {\n", "file_path": "src/lib/mesh_data.rs", "rank": 79, "score": 3.893730681230932 }, { "content": "extern crate proc_macro;\n\n\n\nuse proc_macro::TokenStream;\n\nuse quote::quote;\n\nuse syn;\n\n\n\n#[proc_macro_derive(Component)]\n", "file_path": "hephaestus-macros/src/lib.rs", "rank": 80, "score": 3.8668488630658526 }, { "content": "use vulkano_win::VkSurfaceBuild;\n\nuse winit::event_loop::EventLoop;\n\nuse winit::window::{ Window, WindowBuilder };\n\nuse egui_winit_vulkano::Gui;\n\n\n\nuse crate::{\n\n buffer_objects::*,\n\n camera::Camera,\n\n shaders::{ deferred, directional, ambient },\n\n light::DirectionalLight,\n\n logger::{ self, MessageEmitter },\n\n entity::*\n\n};\n\n\n", "file_path": "src/lib/renderer.rs", "rank": 81, "score": 3.857043438422733 }, { "content": " .unwrap()\n\n .draw(\n\n self.ambient_pipeline.clone(),\n\n &self.dynamic_state,\n\n vec![self.dummy_verts.clone()],\n\n ambient_set.clone(),\n\n ()\n\n )\n\n .unwrap();\n\n self.commands = Some(commands);\n\n }\n\n\n\n pub fn directional(&mut self, directional_light: &DirectionalLight) {\n\n match self.render_stage {\n\n RenderStage::Ambient => {\n\n self.render_stage = RenderStage::Directional;\n\n },\n\n RenderStage::Directional => { },\n\n RenderStage::NeedsRedraw => {\n\n self.recreate_swapchain();\n", "file_path": "src/lib/renderer.rs", "rank": 82, "score": 3.7912396420763237 }, { "content": " if ui.button(if self.show_debug_log { \"Hide Debug Log\" } else { \"Show Debug Log\"}).clicked() {\n\n if !self.show_debug_log { ui.shrink_height_to_current(); }\n\n\n\n self.show_debug_log = !self.show_debug_log;\n\n }\n\n \n\n ui.add_space(ui.available_size_before_wrap().x - ui.fonts().glyph_width(egui::TextStyle::Monospace, '0') * 10.0);\n\n\n\n ui.label(format!(\"{} FPS\", time.fps.round()));\n\n });\n\n }\n\n\n\n fn debug_log(&mut self, ui: &mut Ui) {\n\n // Snap to bottom once egui 0.14.3 is released\n\n egui::ScrollArea::auto_sized()\n\n .show(ui, |ui| {\n\n let messages = logger::get_messages();\n\n for message in messages {\n\n ui.colored_label(message.level.color(), message.formatted());\n\n ui.separator();\n", "file_path": "src/lib/gui.rs", "rank": 83, "score": 3.697057219305203 }, { "content": " }\n\n };\n\n\n\n if suboptimal {\n\n self.recreate_swapchain();\n\n return;\n\n }\n\n\n\n let clear_values = vec![[0.0; 4].into(), clear_color.into(), clear_color.into(), 1f32.into()];\n\n\n\n let mut commands = AutoCommandBufferBuilder::primary(self.device.clone(), self.queue.family(), CommandBufferUsage::OneTimeSubmit).unwrap();\n\n commands\n\n .begin_render_pass(self.framebuffers[img_index].clone(), SubpassContents::Inline, clear_values)\n\n .unwrap();\n\n\n\n self.commands = Some(commands);\n\n self.img_index = img_index;\n\n self.acquire_future = Some(acquire_future);\n\n }\n\n\n", "file_path": "src/lib/renderer.rs", "rank": 84, "score": 3.6343439234141206 }, { "content": " directional_buffer: CpuBufferPool<DirectionalBufferObject>,\n\n render_pass: Arc<RenderPass>,\n\n deferred_pipeline: Arc<dyn GraphicsPipelineAbstract + Send + Sync>,\n\n directional_pipeline: Arc<dyn GraphicsPipelineAbstract + Send + Sync>,\n\n ambient_pipeline: Arc<dyn GraphicsPipelineAbstract + Send + Sync>,\n\n dummy_verts: Arc<CpuAccessibleBuffer<[DummyVertex]>>,\n\n dynamic_state: DynamicState,\n\n framebuffers: Vec<Arc<dyn FramebufferAbstract + Send + Sync>>,\n\n color_buffer: Arc<AttachmentImage>,\n\n normal_buffer: Arc<AttachmentImage>,\n\n vp_set: Arc<dyn DescriptorSet + Send + Sync>,\n\n render_stage: RenderStage,\n\n commands: Option<AutoCommandBufferBuilder<PrimaryAutoCommandBuffer>>,\n\n img_index: usize,\n\n acquire_future: Option<SwapchainAcquireFuture<Window>>,\n\n}\n\n\n\nimpl Renderer {\n\n //{{{ Renderer::new(...)\n\n pub fn new(event_loop: &EventLoop<()>, camera: Camera) -> Self {\n", "file_path": "src/lib/renderer.rs", "rank": 85, "score": 3.62928875221564 }, { "content": " vec![vertex_buffer.clone()],\n\n index_buffer.clone(),\n\n vec![self.vp_set.clone(), model_set.clone(), tex_set.clone()],\n\n (),\n\n ).unwrap();\n\n self.commands = Some(commands);\n\n }\n\n\n\n pub fn ambient(&mut self) {\n\n match self.render_stage {\n\n RenderStage::Deferred => {\n\n self.render_stage = RenderStage::Ambient;\n\n },\n\n RenderStage::Ambient => {\n\n return;\n\n },\n\n RenderStage::NeedsRedraw => {\n\n self.recreate_swapchain();\n\n self.commands = None;\n\n self.render_stage = RenderStage::Stopped;\n", "file_path": "src/lib/renderer.rs", "rank": 86, "score": 3.584987358019874 }, { "content": "use winit::event_loop::EventLoop;\n\nuse cgmath::Deg;\n\n\n\nuse hephaestus_lib::{\n\n engine::Engine,\n\n world::World,\n\n light::DirectionalLight,\n\n logger::{ self, MessageEmitter },\n\n mesh_data::{ MeshType, PrimitiveType },\n\n entity::Logic,\n\n camera,\n\n};\n\n\n\n#[allow(unused)]\n", "file_path": "src/main.rs", "rank": 87, "score": 3.5550770965722895 }, { "content": " message.print();\n\n\n\n if self.borrow().messages.is_some() {\n\n let mut messages = self.borrow_mut().messages.take().unwrap();\n\n messages.insert(time, message);\n\n\n\n self.borrow_mut().messages = Some(messages);\n\n } else {\n\n let mut messages = HashMap::new();\n\n messages.insert(time, message);\n\n \n\n self.borrow_mut().messages = Some(messages);\n\n }\n\n }\n\n\n\n fn get_all_messages(&self) -> Option<Vec<Message>> {\n\n if self.borrow().messages.is_some() {\n\n let mut messages: Vec<_> = self.borrow().messages.as_ref().unwrap().clone().into_iter().collect();\n\n messages.sort_by(|x, y| x.0.cmp(&y.0));\n\n \n", "file_path": "src/lib/logger.rs", "rank": 88, "score": 3.4464145905365506 }, { "content": "pub mod deferred {\n\n pub mod vs {\n\n vulkano_shaders::shader! {\n\n ty: \"vertex\",\n\n path: \"src/shaders/deferred_vert.glsl\"\n\n }\n\n }\n\n \n\n pub mod fs {\n\n vulkano_shaders::shader! {\n\n ty: \"fragment\",\n\n path: \"src/shaders/deferred_frag.glsl\"\n\n }\n\n }\n\n}\n\npub mod directional {\n\n pub mod vs {\n\n vulkano_shaders::shader! {\n\n ty: \"vertex\",\n\n path: \"src/shaders/directional_vert.glsl\"\n", "file_path": "src/lib/shaders.rs", "rank": 89, "score": 3.3644701178807788 }, { "content": " ).unwrap();\n\n\n\n let queue = queues.next().unwrap();\n\n\n\n let (swapchain, images) = {\n\n let caps = surface.capabilities(physical).unwrap();\n\n let mut usage = caps.supported_usage_flags;\n\n usage.depth_stencil_attachment = false;\n\n usage.storage = false;\n\n\n\n let (format, color_space) = caps.supported_formats[0];\n\n let alpha = caps.supported_composite_alpha.iter().next().unwrap();\n\n let dimensions: [u32; 2] = surface.window().inner_size().into();\n\n\n\n Swapchain::start(device.clone(), surface.clone())\n\n .num_images(caps.min_image_count)\n\n .dimensions(dimensions)\n\n .format(format)\n\n .layers(1)\n\n .usage(ImageUsage::color_attachment())\n", "file_path": "src/lib/renderer.rs", "rank": 90, "score": 3.2951838972926795 }, { "content": " color: [1.0; 3],\n\n uv: [\n\n v.z.atan2(v.x) / std::f32::consts::TAU,\n\n (v.y.asin() / std::f32::consts::PI) + 0.5,\n\n ], // https://www.alexisgiard.com/icosahedron-sphere/\n\n normal: v.normalize().into() // smooth shading\n\n }\n\n }).collect();\n\n data.indices = i;\n\n \n\n data\n\n }\n\n}\n\n\n", "file_path": "src/lib/mesh_data.rs", "rank": 91, "score": 3.276136581647365 }, { "content": " }\n\n }\n\n \n\n pub mod fs {\n\n vulkano_shaders::shader! {\n\n ty: \"fragment\",\n\n path: \"src/shaders/directional_frag.glsl\"\n\n }\n\n }\n\n}\n\npub mod ambient {\n\n pub mod vs {\n\n vulkano_shaders::shader! {\n\n ty: \"vertex\",\n\n path: \"src/shaders/ambient_vert.glsl\"\n\n }\n\n }\n\n \n\n pub mod fs {\n\n vulkano_shaders::shader! {\n\n ty: \"fragment\",\n\n path: \"src/shaders/ambient_frag.glsl\"\n\n }\n\n }\n\n}\n", "file_path": "src/lib/shaders.rs", "rank": 92, "score": 3.235317034714407 }, { "content": " match p.properties().device_type {\n\n PhysicalDeviceType::DiscreteGpu => 0,\n\n PhysicalDeviceType::IntegratedGpu => 1,\n\n PhysicalDeviceType::VirtualGpu => 2,\n\n PhysicalDeviceType::Cpu => 3,\n\n PhysicalDeviceType::Other => 4,\n\n }\n\n }).unwrap();\n\n\n\n logger::log_debug(&format!(\"Using device: {} (type: {:?})\", physical.properties().device_name, physical.properties().device_type), MessageEmitter::Renderer);\n\n\n\n let queue_family = physical.queue_families().find(|&q| {\n\n q.supports_graphics() && surface.is_supported(q).unwrap_or(false)\n\n }).unwrap();\n\n\n\n let (device, mut queues) = Device::new(\n\n physical,\n\n physical.supported_features(),\n\n &device_ext,\n\n [(queue_family, 0.5)].iter().cloned()\n", "file_path": "src/lib/renderer.rs", "rank": 93, "score": 3.1609332568241246 }, { "content": " println!(\"Failed to flush future: {:?}\", e);\n\n *previous_frame_end = Some(Box::new(vulkano::sync::now(self.device.clone())) as Box<_>);\n\n }\n\n }\n\n\n\n\n\n self.commands = None;\n\n self.render_stage = RenderStage::Stopped;\n\n }\n\n\n\n pub fn update_camera(&mut self, camera: &Camera) {\n\n self.vp_buffer = CpuAccessibleBuffer::from_data(\n\n self.device.clone(), \n\n BufferUsage::uniform_buffer(), \n\n false, \n\n camera.get_vp_buffer(self.surface.window().inner_size().into())\n\n ).unwrap();\n\n \n\n let vp_layout = self.deferred_pipeline.layout().descriptor_set_layouts().get(0).unwrap();\n\n let vp_set = Arc::new(PersistentDescriptorSet::start(vp_layout.clone())\n", "file_path": "src/lib/renderer.rs", "rank": 94, "score": 3.0115614583185373 }, { "content": " self.color = color;\n\n }\n\n \n\n fn add_texture(&mut self, tex_path: &str) {\n\n let png_bytes = fs::read(&tex_path).unwrap(); \n\n let cursor = Cursor::new(png_bytes);\n\n let decoder = png::Decoder::new(cursor);\n\n let mut reader = decoder.read_info().unwrap();\n\n let info = reader.info();\n\n let dimensions = ImageDimensions::Dim2d {\n\n width: info.width,\n\n height: info.height,\n\n array_layers: 1\n\n };\n\n let mut image_data = Vec::new();\n\n image_data.resize((info.width * info.height * 4) as usize, 0);\n\n reader.next_frame(&mut image_data).unwrap();\n\n\n\n self.texture_data = Some((image_data, dimensions));\n\n }\n", "file_path": "src/lib/material.rs", "rank": 95, "score": 2.92425041626901 }, { "content": " passes: [\n\n {\n\n color: [color, normals],\n\n depth_stencil: {depth},\n\n input: []\n\n },\n\n {\n\n color: [final_color],\n\n depth_stencil: {},\n\n input: [color, normals]\n\n }\n\n ]\n\n ).unwrap());\n\n\n\n let deferred_pass = Subpass::from(render_pass.clone(), 0).unwrap();\n\n let lighting_pass = Subpass::from(render_pass.clone(), 1).unwrap();\n\n\n\n let deferred_vs = deferred::vs::Shader::load(device.clone()).unwrap();\n\n let deferred_fs = deferred::fs::Shader::load(device.clone()).unwrap();\n\n \n", "file_path": "src/lib/renderer.rs", "rank": 96, "score": 2.8273762718724864 }, { "content": " pub fn print(&self) {\n\n match self.level {\n\n LogLevel::Debug => { \n\n println!(\"{}\", self.formatted().dimmed())\n\n },\n\n LogLevel::Info => { \n\n println!(\"{}\", self.formatted().normal())\n\n },\n\n LogLevel::Warning => { \n\n println!(\"{}\", self.formatted().yellow())\n\n },\n\n LogLevel::Error => { \n\n println!(\"{}\", self.formatted().red())\n\n },\n\n }\n\n }\n\n\n\n pub fn formatted(&self) -> String {\n\n let emitter = match &self.emitter {\n\n MessageEmitter::Object(e) => e,\n", "file_path": "src/lib/logger.rs", "rank": 97, "score": 2.7521230023078798 }, { "content": " [0.0, -1.0, -t].into(),\n\n [0.0, 1.0, -t].into(),\n\n [t, 0.0, -1.0].into(),\n\n [t, 0.0, 1.0].into(),\n\n [-t, 0.0, -1.0].into(),\n\n [-t, 0.0, 1.0].into(),\n\n ]);\n\n // Put all vertices on unit sphere\n\n v = v.iter().map(|vertex| vertex.normalize()).collect();\n\n\n\n // Initial faces\n\n i.append(&mut vec![\n\n 0, 11, 5,\n\n 0, 5, 1,\n\n 0, 1, 7,\n\n 0, 7, 10,\n\n 0, 10, 11,\n\n 1, 5, 9,\n\n 5, 11, 4,\n\n 11, 10, 2, \n", "file_path": "src/lib/mesh_data.rs", "rank": 98, "score": 2.625775523424627 }, { "content": " };\n\n\n\n let index_buffer = unsafe {\n\n let buffer = CpuAccessibleBuffer::uninitialized_array(\n\n self.device.clone(),\n\n mesh.data.indices.len() as u64,\n\n BufferUsage::index_buffer(),\n\n false,\n\n ).unwrap();\n\n\n\n {\n\n let mut mapping = buffer.write().unwrap();\n\n mapping.clone_from_slice(mesh.data.indices.as_slice());\n\n }\n\n\n\n buffer\n\n };\n\n \n\n let layout = self.deferred_pipeline.layout().descriptor_set_layouts().get(2).unwrap();\n\n let (image, mut texture_future) = if let Some(texture) = texture {\n", "file_path": "src/lib/renderer.rs", "rank": 99, "score": 2.5237541402403183 } ]
Rust
src/shard_ctrler/server.rs
gloriallluo/MadRaft
8c2b480b431445f5182791bdbd69b9528e69a021
use std::collections::HashMap; use crate::{ shard_ctrler::{msg::*, N_SHARDS}, kvraft::{server::Server, state::State}, }; use serde::{Deserialize, Serialize}; pub type ShardCtrler = Server<ShardInfo>; #[derive(Debug, Serialize, Deserialize)] pub struct ShardInfo { configs: Vec<Config>, } impl Default for ShardInfo { fn default() -> Self { Self { configs: vec![Config::default()], } } } impl ShardInfo { fn new_config(&self) -> Config { self.configs .last() .map(|config| { let mut config = config.clone(); config.num += 1; config }) .unwrap() } } impl State for ShardInfo { type Command = Op; type Output = Option<Config>; fn apply(&mut self, cmd: Self::Command) -> Self::Output { match cmd { Op::Query { num } => { if num < self.configs.len() as ConfigId { Some(self.configs[num as usize].clone()) } else { self.configs.last().map(|v| v.clone()) } }, Op::Move { shard, gid } => { let mut new_config = self.new_config(); new_config.shards[shard] = gid; self.configs.push(new_config.clone()); Some(new_config) }, Op::Join { groups } => { let mut new_config = self.new_config(); let mut ng = Vec::new(); groups .into_iter() .for_each(|g| { ng.push(g.0); new_config.groups.insert(g.0, g.1); }); new_config.balance_join(ng); self.configs.push(new_config.clone()); Some(new_config) }, Op::Leave { gids } => { let mut new_config = self.new_config(); gids .iter() .for_each(|g| { new_config.groups.remove(g); }); new_config.balance_leave(gids); self.configs.push(new_config.clone()); Some(new_config) }, } } } impl Config { fn balance_join(&mut self, mut new_groups: Vec<Gid>) { let n_groups = self.groups.len(); let opt = N_SHARDS / n_groups; let r = N_SHARDS - n_groups * opt; let mut re_alloc_shards: Vec<usize> = Vec::new(); let mut count = HashMap::new(); for (shard, gid) in self.shards.iter().enumerate() { if *gid == 0 { re_alloc_shards.push(shard); continue; } let &cnt = count.get(gid).unwrap_or(&0usize); if cnt + 1 > opt + 1 { re_alloc_shards.push(shard); } else if cnt + 1 == opt + 1 { re_alloc_shards.insert(0, shard); } count.insert(gid, cnt + 1); } new_groups.sort(); new_groups .iter() .enumerate() .for_each(|(i, gid)| { let c = if i < r { opt + 1 } else { opt }; for _ in 0..c { self.shards[re_alloc_shards.pop().unwrap()] = *gid; } }); } fn balance_leave(&mut self, old_groups: Vec<Gid>) { let n_groups = self.groups.len(); if n_groups == 0 { self.shards.iter_mut().for_each(|g| *g = 0); return; } let opt = N_SHARDS / n_groups; let mut re_alloc_shards: Vec<usize> = Vec::new(); let mut count = HashMap::new(); for (shard, gid) in self.shards.iter().enumerate() { if old_groups.contains(gid) { re_alloc_shards.push(shard); continue; } let &cnt = count.get(gid).unwrap_or(&0usize); count.insert(gid, cnt + 1); } let mut re_alloc_groups: Vec<Gid> = Vec::new(); let mut all_groups: Vec<Gid> = self.groups.iter().map(|v| *v.0).collect(); all_groups.sort(); for gid in all_groups { let cnt = count.get(&gid).map_or(0, |v| *v); for _ in cnt..opt { re_alloc_groups.push(gid); } if cnt < opt + 1 { re_alloc_groups.insert(0, gid); } } re_alloc_shards .iter() .for_each(|&shard| { self.shards[shard] = re_alloc_groups.pop().unwrap(); }); } }
use std::collections::HashMap; use crate::{ shard_ctrler::{msg::*, N_SHARDS}, kvraft::{server::Server, state::State}, }; use serde::{Deserialize, Serialize}; pub type ShardCtrler = Server<ShardInfo>; #[derive(Debug, Serialize, Deserialize)] pub struct ShardInfo { configs: Vec<Config>, } impl Default for ShardInfo { fn default() -> Self { Self { configs: vec![Config::default()], } } } impl ShardInfo { fn new_config(&self) -> Config { self.configs .last() .map(|config| { let mut config = config.clone(); config.num += 1; config }) .unwrap() } } impl State for ShardInfo { type Command = Op; type Output = Option<Config>; fn apply(&mut self, cmd: Self::Command) -> Self::Output { match cmd { Op::Query { num } => { if num < self.configs.len() as ConfigId { Some(self.configs[num as usize].clone()) } else { self.configs.last().map(|v| v.clone()) } }, Op::Move { shard, gid } => { let mut new_config = self.new_config(); new_config.shards[shard] = gid; self.configs.push(new_config.clone()); Some(new_config) }, Op::Join { groups } => { let mut new_config = self.new_config(); let mut ng = Vec::new(); groups .into_iter() .for_each(|g| { ng.push(g.0); new_config.grou
.iter() .enumerate() .for_each(|(i, gid)| { let c = if i < r { opt + 1 } else { opt }; for _ in 0..c { self.shards[re_alloc_shards.pop().unwrap()] = *gid; } }); } fn balance_leave(&mut self, old_groups: Vec<Gid>) { let n_groups = self.groups.len(); if n_groups == 0 { self.shards.iter_mut().for_each(|g| *g = 0); return; } let opt = N_SHARDS / n_groups; let mut re_alloc_shards: Vec<usize> = Vec::new(); let mut count = HashMap::new(); for (shard, gid) in self.shards.iter().enumerate() { if old_groups.contains(gid) { re_alloc_shards.push(shard); continue; } let &cnt = count.get(gid).unwrap_or(&0usize); count.insert(gid, cnt + 1); } let mut re_alloc_groups: Vec<Gid> = Vec::new(); let mut all_groups: Vec<Gid> = self.groups.iter().map(|v| *v.0).collect(); all_groups.sort(); for gid in all_groups { let cnt = count.get(&gid).map_or(0, |v| *v); for _ in cnt..opt { re_alloc_groups.push(gid); } if cnt < opt + 1 { re_alloc_groups.insert(0, gid); } } re_alloc_shards .iter() .for_each(|&shard| { self.shards[shard] = re_alloc_groups.pop().unwrap(); }); } }
ps.insert(g.0, g.1); }); new_config.balance_join(ng); self.configs.push(new_config.clone()); Some(new_config) }, Op::Leave { gids } => { let mut new_config = self.new_config(); gids .iter() .for_each(|g| { new_config.groups.remove(g); }); new_config.balance_leave(gids); self.configs.push(new_config.clone()); Some(new_config) }, } } } impl Config { fn balance_join(&mut self, mut new_groups: Vec<Gid>) { let n_groups = self.groups.len(); let opt = N_SHARDS / n_groups; let r = N_SHARDS - n_groups * opt; let mut re_alloc_shards: Vec<usize> = Vec::new(); let mut count = HashMap::new(); for (shard, gid) in self.shards.iter().enumerate() { if *gid == 0 { re_alloc_shards.push(shard); continue; } let &cnt = count.get(gid).unwrap_or(&0usize); if cnt + 1 > opt + 1 { re_alloc_shards.push(shard); } else if cnt + 1 == opt + 1 { re_alloc_shards.insert(0, shard); } count.insert(gid, cnt + 1); } new_groups.sort(); new_groups
random
[ { "content": "pub trait State: net::Message + Default {\n\n type Command: net::Message + Clone;\n\n type Output: net::Message + Clone;\n\n fn apply(&mut self, cmd: Self::Command) -> Self::Output;\n\n}\n\n\n\n\n\n#[derive(Debug, Default, Serialize, Deserialize)]\n\npub struct Kv {\n\n data: HashMap<String, String>,\n\n}\n\n\n\nimpl State for Kv {\n\n type Command = Op;\n\n type Output = String;\n\n\n\n fn apply(&mut self, cmd: Self::Command) -> Self::Output {\n\n match cmd {\n\n Op::Get { key } => self.get(key),\n\n Op::Put { key, value } => self.put(key, value),\n", "file_path": "src/kvraft/state.rs", "rank": 0, "score": 112674.35534124624 }, { "content": "#[derive(Debug, Clone, Serialize, Deserialize)]\n\nstruct ServerCommand<S: State> {\n\n client: usize,\n\n seq: usize,\n\n command: S::Command,\n\n}\n\n\n", "file_path": "src/kvraft/server.rs", "rank": 1, "score": 98034.342515267 }, { "content": "struct Group {\n\n gid: u64,\n\n addrs: Vec<SocketAddr>,\n\n servers: Mutex<Vec<Option<Arc<ShardKvServer>>>>,\n\n}\n\n\n\nimpl Tester {\n\n pub async fn new(n: usize, unreliable: bool, max_raft_state: Option<usize>) -> Tester {\n\n let handle = Handle::current();\n\n if unreliable {\n\n handle.net.update_config(|cfg| {\n\n cfg.packet_loss_rate = 0.1;\n\n cfg.send_latency = Duration::from_millis(1)..Duration::from_millis(27);\n\n });\n\n }\n\n\n\n let n_ctrler = 3;\n\n let ctrler_addrs = (0..n_ctrler)\n\n .map(|i| SocketAddr::from(([0, 0, 1, i as _], 0)))\n\n .collect::<Vec<_>>();\n", "file_path": "src/shardkv/tester.rs", "rank": 2, "score": 87794.45589471054 }, { "content": "/// (state, last applied seq, last output)\n\ntype Snapshot<S> = (S, HashMap<usize, usize>, HashMap<usize, <S as State>::Output>);\n\n\n\n\n\npub struct Server<S: State> {\n\n raft: raft::RaftHandle,\n\n me: usize,\n\n /// Shared via snapshot\n\n pub(crate) state: Arc<Mutex<S>>,\n\n res: Arc<Mutex<Output<S>>>,\n\n /// Last applied seq number for each client.\n\n /// Shared via snapshot\n\n last_applied: Arc<Mutex<HashMap<usize, usize>>>,\n\n last_output: Arc<Mutex<HashMap<usize, S::Output>>>,\n\n}\n\n\n\nimpl<S: State> fmt::Debug for Server<S> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"Server({})\", self.me)\n\n }\n\n}\n", "file_path": "src/kvraft/server.rs", "rank": 3, "score": 72603.82186121764 }, { "content": "pub fn rand_string(len: usize) -> String {\n\n rand::rng()\n\n .sample_iter(&Alphanumeric)\n\n .take(len)\n\n .map(char::from)\n\n .collect()\n\n}\n", "file_path": "src/shardkv/tester.rs", "rank": 4, "score": 65818.2549586576 }, { "content": "#[derive(Debug, Clone, Serialize, Deserialize)]\n\nstruct Persist {\n\n /// Raft state:\n\n term: u64,\n\n voted_for: Option<usize>,\n\n logs: Logs,\n\n}\n\n\n", "file_path": "src/raft/raft_handle.rs", "rank": 5, "score": 41285.64789878405 }, { "content": "#[derive(Debug, Clone, Serialize, Deserialize)]\n\nstruct Snapshot {\n\n snapshot: Vec<u8>,\n\n last_included_log: Option<LogEntry>,\n\n}\n\n\n\n/// # Results\n\n\n\npub type Result<T> = std::result::Result<T, Error>;\n\npub type RPCResult<T> = std::io::Result<T>;\n\n\n\n\n\n/// # RaftHandle\n\n\n\n#[derive(Clone)]\n\npub struct RaftHandle {\n\n me: usize,\n\n peers: Vec<SocketAddr>,\n\n inner: Arc<Mutex<Raft>>,\n\n}\n\n\n", "file_path": "src/raft/raft_handle.rs", "rank": 6, "score": 41285.64789878405 }, { "content": "#[derive(Clone)]\n\nstruct StorageHandle {\n\n /// copy of each server's committed entries\n\n logs: Arc<Mutex<Vec<Vec<Option<Entry>>>>>,\n\n}\n\n\n\nimpl StorageHandle {\n\n fn new(n: usize) -> Self {\n\n StorageHandle {\n\n logs: Arc::new(Mutex::new(vec![vec![None]; n])),\n\n }\n\n }\n\n\n\n fn push_and_check(&self, i: usize, index: u64, entry: Entry) {\n\n // debug!(\"server {} push_and_check index {}: {:?}\", i, index, entry);\n\n let mut logs = self.logs.lock().unwrap();\n\n for (j, log) in logs.iter().enumerate() {\n\n if let Some(Some(old)) = log.get(index as usize) {\n\n // some server has already committed a different value for this entry!\n\n assert_eq!(\n\n *old, entry,\n", "file_path": "src/raft/tester.rs", "rank": 7, "score": 41277.70435437998 }, { "content": "// which shard is a key in?\n\n// please use this function,\n\n// and please do not change it.\n\nfn key2shard(key: &str) -> usize {\n\n use crate::shard_ctrler::N_SHARDS;\n\n key.bytes().next().unwrap_or(b'\\0') as usize % N_SHARDS\n\n}\n", "file_path": "src/shardkv/mod.rs", "rank": 8, "score": 35570.79306719787 }, { "content": "type MsgSender = mpsc::UnboundedSender<ApplyMsg>;\n\npub type MsgRecver = mpsc::UnboundedReceiver<ApplyMsg>;\n\n\n\n/// As each Raft peer becomes aware that successive log entries are committed,\n\n/// the peer should send an `ApplyMsg` to the service (or tester) on the same\n\n/// server, via the `apply_ch` passed to `Raft::new`.\n\n#[derive(Debug)]\n\npub enum ApplyMsg {\n\n Command {\n\n data: Vec<u8>,\n\n index: usize,\n\n },\n\n // For 2D:\n\n Snapshot {\n\n data: Vec<u8>,\n\n term: u64,\n\n index: usize,\n\n },\n\n}\n\n\n", "file_path": "src/raft/raft.rs", "rank": 9, "score": 33212.30664377479 }, { "content": "// check that all known appends are present in a value,\n\n// and are in order for each concurrent client.\n\nfn check_concurrent_appends(v: &str, counts: &[usize]) {\n\n for (i, &count) in counts.iter().enumerate() {\n\n check_clnt_appends(i, v, count);\n\n }\n\n}\n\n\n\n/// Basic test is as follows:\n\n///\n\n/// One or more clients submitting Append/Get operations to set of servers for some period of time.\n\n/// After the period is over, test checks that all appended values are present and in order for a\n\n/// particular key. \n\n///\n\n/// - If unreliable is set, RPCs may fail.\n\n/// - If crash is set, the servers crash after the period is over and restart.\n\n/// - If partitions is set, the test repartitions the network concurrently with\n\n/// the clients and servers.\n\n/// - If maxraftstate is a positive number, the size of the state for Raft\n\n/// (i.e., log size) shouldn't exceed `2 * maxraftstate`.\n\nasync fn generic_test(\n\n part: &str,\n", "file_path": "src/kvraft/tests.rs", "rank": 10, "score": 32545.576282102724 }, { "content": "use std::collections::HashMap;\n\nuse madsim::net;\n\nuse serde::{Deserialize, Serialize};\n\nuse crate::kvraft::msg::*;\n\n\n\n\n", "file_path": "src/kvraft/state.rs", "rank": 11, "score": 30847.918303599818 }, { "content": " Op::Append { key, value } => self.append(key, value),\n\n }\n\n }\n\n}\n\n\n\nimpl Kv {\n\n fn get(&self, key: String) -> String {\n\n self.data\n\n .get(&key)\n\n .map_or(\"\", |v| v.as_str())\n\n .to_string()\n\n }\n\n\n\n fn put(&mut self, key: String, value: String) -> String {\n\n self.data\n\n .insert(key, value)\n\n .unwrap_or(\"\".to_string())\n\n }\n\n\n\n fn append(&mut self, key: String, value: String) -> String {\n", "file_path": "src/kvraft/state.rs", "rank": 12, "score": 30843.888254697496 }, { "content": " self.data\n\n .get_mut(&key)\n\n .map_or(\"\", |v| {\n\n v.push_str(&value);\n\n v.as_str()\n\n })\n\n .to_string()\n\n }\n\n}\n", "file_path": "src/kvraft/state.rs", "rank": 13, "score": 30836.38917597856 }, { "content": "// check that for a specific client all known appends are present in a value,\n\n// and in order\n\nfn check_clnt_appends(clnt: usize, v: &str, count: usize) {\n\n let mut lastoff = None;\n\n for j in 0..count {\n\n let wanted = format!(\"x {} {} y\", clnt, j);\n\n let off = v.find(&wanted).unwrap_or_else(|| {\n\n panic!(\n\n \"{:?} missing element {:?} in Append result {:?}\",\n\n clnt, wanted, v\n\n )\n\n });\n\n let off1 = v.rfind(&wanted).unwrap();\n\n assert_eq!(off1, off, \"duplicate element {:?} in Append result\", wanted);\n\n\n\n if let Some(lastoff) = lastoff {\n\n assert!(\n\n off > lastoff,\n\n \"wrong order for element {:?} in Append result\",\n\n wanted\n\n );\n\n }\n\n lastoff = Some(off);\n\n }\n\n}\n\n\n", "file_path": "src/kvraft/tests.rs", "rank": 14, "score": 29384.648063101187 }, { "content": "use super::N_SHARDS;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{collections::HashMap, net::SocketAddr};\n\n\n\npub type Gid = u64;\n\npub type ConfigId = u64;\n\n\n\n// A configuration -- an assignment of shards to groups.\n\n// Please don't change this.\n\n#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, Eq)]\n\npub struct Config {\n\n /// config number\n\n pub num: ConfigId,\n\n /// shard -> gid\n\n pub shards: [Gid; N_SHARDS],\n\n /// gid -> servers[]\n\n pub groups: HashMap<Gid, Vec<SocketAddr>>,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n", "file_path": "src/shard_ctrler/msg.rs", "rank": 18, "score": 28597.407295620396 }, { "content": "use super::msg::*;\n\nuse crate::kvraft::client::ClerkCore;\n\nuse std::{collections::HashMap, net::SocketAddr};\n\n\n\npub struct Clerk {\n\n core: ClerkCore<Op, Option<Config>>,\n\n}\n\n\n\nimpl Clerk {\n\n pub fn new(servers: Vec<SocketAddr>) -> Clerk {\n\n Clerk {\n\n core: ClerkCore::new(servers),\n\n }\n\n }\n\n\n\n pub async fn query(&self) -> Config {\n\n self.core.call(Op::Query { num: u64::MAX }).await.unwrap()\n\n }\n\n\n\n pub async fn query_at(&self, num: u64) -> Config {\n", "file_path": "src/shard_ctrler/client.rs", "rank": 19, "score": 28587.60035239834 }, { "content": " self.core.call(Op::Query { num }).await.unwrap()\n\n }\n\n\n\n pub async fn join(&self, groups: HashMap<Gid, Vec<SocketAddr>>) {\n\n self.core.call(Op::Join { groups }).await;\n\n }\n\n\n\n pub async fn leave(&self, gids: &[u64]) {\n\n self.core.call(Op::Leave { gids: gids.into() }).await;\n\n }\n\n\n\n pub async fn move_(&self, shard: usize, gid: u64) {\n\n self.core.call(Op::Move { shard, gid }).await;\n\n }\n\n}\n", "file_path": "src/shard_ctrler/client.rs", "rank": 20, "score": 28587.405290819497 }, { "content": "pub enum Op {\n\n Query {\n\n /// desired config number\n\n num: ConfigId,\n\n },\n\n Join {\n\n /// new GID -> servers mappings\n\n groups: HashMap<Gid, Vec<SocketAddr>>,\n\n },\n\n Leave {\n\n gids: Vec<Gid>,\n\n },\n\n Move {\n\n /// assign the shard to gid\n\n shard: usize,\n\n gid: Gid,\n\n },\n\n}\n", "file_path": "src/shard_ctrler/msg.rs", "rank": 22, "score": 28587.00329124717 }, { "content": " }\n\n // any un-allocated shards?\n\n if groups.is_empty() {\n\n for (shard, gid) in c.shards.iter().enumerate() {\n\n assert!(\n\n *gid == 0 || c.groups.contains_key(gid),\n\n \"shard {} -> invalid group {}\",\n\n shard,\n\n gid\n\n );\n\n }\n\n }\n\n // more or less balanced sharding?\n\n let mut counts = HashMap::<u64, usize>::new();\n\n for &gid in c.shards.iter() {\n\n *counts.entry(gid).or_default() += 1;\n\n }\n\n if !c.groups.is_empty() {\n\n let counts = c.groups.keys().map(|gid| *counts.get(gid).unwrap_or(&0));\n\n let min = counts.clone().min().unwrap();\n", "file_path": "src/shard_ctrler/tester.rs", "rank": 23, "score": 28584.867263381395 }, { "content": "use super::{tester::*, N_SHARDS};\n\nuse futures::future;\n\nuse log::*;\n\nuse madsim::{task, time};\n\nuse std::{collections::HashMap, net::SocketAddr};\n\n\n\nmacro_rules! addrs {\n\n ($($addr:expr),* $(,)?) => {\n\n vec![$(SocketAddr::from(([0, 0, 0, $addr as u8], 0))),*]\n\n }\n\n}\n\n\n\n// helper macro to construct groups\n\nmacro_rules! groups {\n\n ($( $gid:expr => $addrs:expr ),* $(,)?) => {{\n\n let mut map = std::collections::HashMap::new();\n\n $(\n\n map.insert($gid, $addrs);\n\n )*\n\n map\n", "file_path": "src/shard_ctrler/tests.rs", "rank": 28, "score": 28578.778115188205 }, { "content": " // real time\n\n let t = self.t0.elapsed();\n\n // number of Raft peers\n\n let npeers = self.n;\n\n // number of RPC sends\n\n let nrpc = self.rpc_total();\n\n\n\n info!(\" ... Passed --\");\n\n info!(\" {:?} {} {}\", t, npeers, nrpc);\n\n }\n\n}\n\n\n\nimpl Clerk {\n\n pub async fn check(&self, groups: &[u64]) {\n\n debug!(\"check: {:?}\", groups);\n\n let c = self.query().await;\n\n assert_eq!(c.groups.len(), groups.len());\n\n // are the groups as expected?\n\n for gid in groups {\n\n assert!(c.groups.contains_key(gid), \"missing group {}\", gid);\n", "file_path": "src/shard_ctrler/tester.rs", "rank": 29, "score": 28577.049027412835 }, { "content": "use madsim::{time::*, Handle};\n\nuse std::{\n\n collections::HashMap,\n\n net::SocketAddr,\n\n sync::{Arc, Mutex},\n\n};\n\n\n\nuse super::{client::Clerk, server::ShardCtrler};\n\n\n\npub struct Tester {\n\n handle: Handle,\n\n n: usize,\n\n addrs: Vec<SocketAddr>,\n\n servers: Mutex<Vec<Option<Arc<ShardCtrler>>>>,\n\n\n\n // begin()/end() statistics\n\n t0: Instant,\n\n}\n\n\n\nimpl Tester {\n", "file_path": "src/shard_ctrler/tester.rs", "rank": 30, "score": 28576.01162549436 }, { "content": " ck.join(groups!(gid3 => addr3)).await;\n\n let gid4 = 504;\n\n let addr4 = addrs![41, 42, 43];\n\n ck.join(groups!(gid4 => addr4)).await;\n\n for i in 0..N_SHARDS {\n\n let cf = ck.query().await;\n\n let shard = if i < N_SHARDS / 2 { gid3 } else { gid4 };\n\n ck.move_(i, shard).await;\n\n if cf.shards[i] != shard {\n\n let cf1 = ck.query().await;\n\n assert!(cf1.num > cf.num, \"Move should increase Tester.Num\");\n\n }\n\n }\n\n let cf2 = ck.query().await;\n\n for i in 0..N_SHARDS {\n\n let shard = if i < N_SHARDS / 2 { gid3 } else { gid4 };\n\n assert_eq!(cf2.shards[i], shard, \"shard {} wrong group\", i);\n\n }\n\n ck.leave(&[gid3]).await;\n\n ck.leave(&[gid4]).await;\n", "file_path": "src/shard_ctrler/tests.rs", "rank": 31, "score": 28575.513560886382 }, { "content": "\n\n info!(\"Test: Minimal transfers after joins ...\");\n\n\n\n let c1 = ck.query().await;\n\n debug!(\"{:#?}\", c1);\n\n for i in 0..5 {\n\n let gid = npara + 1 + i;\n\n ck.join(groups!(gid => addrs![gid + 1, gid + 2, gid + 2]))\n\n .await;\n\n }\n\n let c2 = ck.query().await;\n\n debug!(\"{:#?}\", c2);\n\n for i in 1..=npara {\n\n for j in 0..N_SHARDS {\n\n assert!(\n\n !(c2.shards[j] == i && c1.shards[j] != i),\n\n \"non-minimal transfer after Join()s: {:?} -> {:?}\",\n\n c1.shards,\n\n c2.shards\n\n );\n", "file_path": "src/shard_ctrler/tests.rs", "rank": 32, "score": 28575.21909333919 }, { "content": " let c1 = ck.query().await;\n\n let mut m = HashMap::new();\n\n for i in 0..5 {\n\n let gid = npara + 1 + i;\n\n m.insert(gid, addrs![gid + 1, gid + 2]);\n\n }\n\n ck.join(m).await;\n\n let c2 = ck.query().await;\n\n for i in 1..=npara {\n\n for j in 0..N_SHARDS {\n\n assert!(\n\n !(c2.shards[j] == i && c1.shards[j] != i),\n\n \"non-minimal transfer after Join()s: {:?} -> {:?}\",\n\n c1.shards,\n\n c2.shards\n\n );\n\n }\n\n }\n\n\n\n info!(\" ... Passed\");\n", "file_path": "src/shard_ctrler/tests.rs", "rank": 33, "score": 28575.086069145935 }, { "content": "\n\n info!(\" ... Passed\");\n\n\n\n info!(\"Test: Concurrent leave/join ...\");\n\n\n\n let npara = 10;\n\n let gids: Vec<u64> = (0..npara).map(|i| i as u64 * 10 + 100).collect();\n\n let mut handles = vec![];\n\n for &gid in gids.iter() {\n\n let cka = t.make_client();\n\n handles.push(task::spawn_local(async move {\n\n cka.join(groups!(gid + 1000 => addrs![gid + 1])).await;\n\n cka.join(groups!(gid => addrs![gid + 2])).await;\n\n cka.leave(&[gid + 1000]).await;\n\n }));\n\n }\n\n future::join_all(handles).await;\n\n ck.check(&gids).await;\n\n\n\n info!(\" ... Passed\");\n", "file_path": "src/shard_ctrler/tests.rs", "rank": 34, "score": 28573.804686846433 }, { "content": " pub fn make_client(&self) -> Clerk {\n\n Clerk::new(self.addrs.clone())\n\n }\n\n\n\n /// Shutdown a server.\n\n pub fn shutdown_server(&self, i: usize) {\n\n debug!(\"shutdown_server({})\", i);\n\n self.handle.kill(self.addrs[i]);\n\n self.servers.lock().unwrap()[i] = None;\n\n }\n\n\n\n /// Start a server.\n\n /// If restart servers, first call shutdown_server\n\n pub async fn start_server(&self, i: usize) {\n\n debug!(\"start_server({})\", i);\n\n let addrs = self.addrs.clone();\n\n let handle = self.handle.local_handle(self.addrs[i]);\n\n let kv = handle.spawn(ShardCtrler::new(addrs, i, None)).await;\n\n self.servers.lock().unwrap()[i] = Some(kv);\n\n }\n", "file_path": "src/shard_ctrler/tester.rs", "rank": 35, "score": 28572.488600464712 }, { "content": " let sa3 = cfx.groups[&gid3].as_slice();\n\n assert_eq!(sa3, addr3, \"wrong servers for gid {}\", gid3);\n\n\n\n ck.leave(&[gid1, gid3]).await;\n\n ck.check(&[gid2]).await;\n\n cfa.push(ck.query().await);\n\n\n\n let cfx = ck.query().await;\n\n let sa2 = cfx.groups[&gid2].as_slice();\n\n assert_eq!(sa2, addr2, \"wrong servers for gid {}\", gid2);\n\n\n\n ck.leave(&[gid2]).await;\n\n\n\n info!(\" ... Passed\");\n\n\n\n info!(\"Test: Concurrent multi leave/join ...\");\n\n\n\n let npara = 10;\n\n let gids: Vec<u64> = (0..npara).map(|i| i as u64 + 1000).collect();\n\n let mut handles = vec![];\n", "file_path": "src/shard_ctrler/tests.rs", "rank": 36, "score": 28572.245568085298 }, { "content": "pub mod client;\n\npub mod msg;\n\npub mod server;\n\n#[cfg(test)]\n\nmod tester;\n\n#[cfg(test)]\n\nmod tests;\n\n\n\npub const N_SHARDS: usize = 10;\n", "file_path": "src/shard_ctrler/mod.rs", "rank": 37, "score": 28571.553493546337 }, { "content": " for &gid in gids.iter() {\n\n let cka = t.make_client();\n\n handles.push(task::spawn_local(async move {\n\n cka.join(groups!(\n\n gid => addrs![gid + 1, gid + 2, gid + 3],\n\n gid + 1000 => addrs![gid + 1000 + 1],\n\n gid + 2000 => addrs![gid + 2000 + 1],\n\n ))\n\n .await;\n\n cka.leave(&[gid + 1000, gid + 2000]).await;\n\n }));\n\n }\n\n future::join_all(handles).await;\n\n\n\n ck.check(&gids).await;\n\n\n\n info!(\" ... Passed\");\n\n\n\n info!(\"Test: Minimal transfers after multijoins ...\");\n\n\n", "file_path": "src/shard_ctrler/tests.rs", "rank": 38, "score": 28571.40607120827 }, { "content": " let max = counts.clone().max().unwrap();\n\n assert!(\n\n max <= min + 1,\n\n \"imbalanced sharding, max {} too much larger than min {}: {:?}\",\n\n max,\n\n min,\n\n c.shards,\n\n );\n\n }\n\n }\n\n}\n", "file_path": "src/shard_ctrler/tester.rs", "rank": 39, "score": 28571.31759776681 }, { "content": "\n\n info!(\"Test: Minimal transfers after multileaves ...\");\n\n\n\n let l: Vec<u64> = (0..5).map(|i| npara + 1 + i).collect();\n\n ck.leave(&l).await;\n\n let c3 = ck.query().await;\n\n for i in 1..=npara {\n\n for j in 0..N_SHARDS {\n\n assert!(\n\n !(c2.shards[j] == i && c3.shards[j] != i),\n\n \"non-minimal transfer after Leave()s: {:?} -> {:?}\",\n\n c2.shards,\n\n c3.shards\n\n );\n\n }\n\n }\n\n\n\n info!(\" ... Passed\");\n\n\n\n info!(\"Test: Check Same config on servers ...\");\n", "file_path": "src/shard_ctrler/tests.rs", "rank": 40, "score": 28570.97940807157 }, { "content": " let gid1 = 1;\n\n let addr1 = addrs![11, 12, 13];\n\n let gid2 = 2;\n\n let addr2 = addrs![21, 22, 23];\n\n ck.join(groups!(gid1 => addr1.clone(), gid2 => addr2.clone()))\n\n .await;\n\n ck.check(&[gid1, gid2]).await;\n\n cfa.push(ck.query().await);\n\n\n\n let gid3 = 3;\n\n let addr3 = addrs![31, 32, 33];\n\n ck.join(groups!(gid3 => addr3.clone())).await;\n\n ck.check(&[gid1, gid2, gid3]).await;\n\n cfa.push(ck.query().await);\n\n\n\n let cfx = ck.query().await;\n\n let sa1 = cfx.groups[&gid1].as_slice();\n\n assert_eq!(sa1, addr1, \"wrong servers for gid {}\", gid1);\n\n let sa2 = cfx.groups[&gid2].as_slice();\n\n assert_eq!(sa2, addr2, \"wrong servers for gid {}\", gid2);\n", "file_path": "src/shard_ctrler/tests.rs", "rank": 41, "score": 28570.438253595323 }, { "content": " pub async fn new(n: usize, unreliable: bool) -> Tester {\n\n let handle = Handle::current();\n\n if unreliable {\n\n handle.net.update_config(|cfg| {\n\n cfg.packet_loss_rate = 0.1;\n\n cfg.send_latency = Duration::from_millis(1)..Duration::from_millis(27);\n\n });\n\n }\n\n let mut servers = vec![];\n\n servers.resize_with(n, || None);\n\n let tester = Tester {\n\n handle,\n\n n,\n\n addrs: (0..n)\n\n .map(|i| SocketAddr::from(([0, 0, 1, i as _], 0)))\n\n .collect::<Vec<_>>(),\n\n servers: Mutex::new(servers),\n\n t0: Instant::now(),\n\n };\n\n // create a full set of KV servers.\n", "file_path": "src/shard_ctrler/tester.rs", "rank": 42, "score": 28570.422342364618 }, { "content": "\n\n pub fn leader(&self) -> Option<usize> {\n\n let servers = self.servers.lock().unwrap();\n\n for (i, kv) in servers.iter().enumerate() {\n\n if let Some(kv) = kv {\n\n if kv.is_leader() {\n\n return Some(i);\n\n }\n\n }\n\n }\n\n None\n\n }\n\n\n\n /// End a Test -- the fact that we got here means there\n\n /// was no failure.\n\n /// print the Passed message,\n\n /// and some performance numbers.\n\n pub fn end(&self) {\n\n self.check_timeout();\n\n\n", "file_path": "src/shard_ctrler/tester.rs", "rank": 43, "score": 28570.273352858887 }, { "content": " cfa.push(ck.query().await);\n\n\n\n let gid2 = 2;\n\n let addr2 = addrs![21, 22, 23];\n\n ck.join(groups!(gid2 => addr2.clone())).await;\n\n ck.check(&[gid1, gid2]).await;\n\n cfa.push(ck.query().await);\n\n\n\n let cfx = ck.query().await;\n\n let sa1 = cfx.groups[&gid1].as_slice();\n\n assert_eq!(sa1, addr1, \"wrong servers for gid {}\", gid1);\n\n let sa2 = cfx.groups[&gid2].as_slice();\n\n assert_eq!(sa2, addr2, \"wrong servers for gid {}\", gid2);\n\n\n\n ck.leave(&[gid1]).await;\n\n ck.check(&[gid2]).await;\n\n cfa.push(ck.query().await);\n\n\n\n ck.leave(&[gid2]).await;\n\n cfa.push(ck.query().await);\n", "file_path": "src/shard_ctrler/tests.rs", "rank": 44, "score": 28570.069572796543 }, { "content": " }\n\n\n\n info!(\" ... Passed\");\n\n\n\n t.end();\n\n}\n\n\n\n#[madsim::test]\n\nasync fn multi_4a() {\n\n let nservers = 3;\n\n let t = Tester::new(nservers, false).await;\n\n let ck = t.make_client();\n\n\n\n info!(\"Test: Multi-group leave/join ...\");\n\n\n\n let mut cfa = vec![];\n\n cfa.push(ck.query().await);\n\n\n\n ck.check(&[]).await;\n\n\n", "file_path": "src/shard_ctrler/tests.rs", "rank": 45, "score": 28569.36172811083 }, { "content": " }}\n\n}\n\n\n\n#[madsim::test]\n\nasync fn basic_4a() {\n\n let nservers = 3;\n\n let t = Tester::new(nservers, false).await;\n\n let ck = t.make_client();\n\n\n\n info!(\"Test: Basic leave/join ...\");\n\n\n\n let mut cfa = vec![];\n\n cfa.push(ck.query().await);\n\n\n\n ck.check(&[]).await;\n\n\n\n let gid1 = 1;\n\n let addr1 = addrs![11, 12, 13];\n\n ck.join(groups!(gid1 => addr1.clone())).await;\n\n ck.check(&[gid1]).await;\n", "file_path": "src/shard_ctrler/tests.rs", "rank": 46, "score": 28568.772181615113 }, { "content": " }\n\n }\n\n\n\n info!(\" ... Passed\");\n\n\n\n info!(\"Test: Minimal transfers after leaves ...\");\n\n\n\n for i in 0..5 {\n\n ck.leave(&[npara + 1 + i]).await;\n\n }\n\n let c3 = ck.query().await;\n\n for i in 1..=npara {\n\n for j in 0..N_SHARDS {\n\n assert!(\n\n !(c2.shards[j] == i && c3.shards[j] != i),\n\n \"non-minimal transfer after Leave()s: {:?} -> {:?}\",\n\n c2.shards,\n\n c3.shards\n\n );\n\n }\n", "file_path": "src/shard_ctrler/tests.rs", "rank": 47, "score": 28568.56925451025 }, { "content": "\n\n info!(\" ... Passed\");\n\n\n\n info!(\"Test: Historical queries ...\");\n\n\n\n for s in 0..nservers {\n\n t.shutdown_server(s);\n\n for cf in cfa.iter() {\n\n let c = ck.query_at(cf.num).await;\n\n assert_eq!(&c, cf);\n\n }\n\n t.start_server(s).await;\n\n }\n\n\n\n info!(\" ... Passed\");\n\n\n\n info!(\"Test: Move ...\");\n\n\n\n let gid3 = 503;\n\n let addr3 = addrs![31, 32, 33];\n", "file_path": "src/shard_ctrler/tests.rs", "rank": 48, "score": 28566.837717552517 }, { "content": "\n\n let leader = t.leader().expect(\"Leader not found\");\n\n let c = ck.query().await; // Tester leader claims\n\n t.shutdown_server(leader);\n\n\n\n let mut attempts = 0;\n\n while t.leader().is_some() {\n\n attempts += 1;\n\n assert!(attempts < 3, \"Leader not found\");\n\n time::sleep(Duration::from_secs(1)).await;\n\n }\n\n\n\n let c1 = ck.query().await;\n\n assert_eq!(c, c1);\n\n\n\n info!(\" ... Passed\");\n\n\n\n t.end();\n\n}\n", "file_path": "src/shard_ctrler/tests.rs", "rank": 49, "score": 28566.45819490063 }, { "content": " for i in 0..n {\n\n tester.start_server(i).await;\n\n }\n\n tester\n\n }\n\n\n\n fn rpc_total(&self) -> u64 {\n\n self.handle.net.stat().msg_count / 2\n\n }\n\n\n\n fn check_timeout(&self) {\n\n // enforce a two minute real-time limit on each test\n\n if self.t0.elapsed() > Duration::from_secs(120) {\n\n panic!(\"test took longer than 120 seconds\");\n\n }\n\n }\n\n\n\n // Create a clerk with clerk specific server names.\n\n // Give it connections to all of the servers, but for\n\n // now enable only connections to servers in to[].\n", "file_path": "src/shard_ctrler/tester.rs", "rank": 50, "score": 28563.997238678643 }, { "content": " match cmd {\n\n raft::ApplyMsg::Command { data, index, .. } => {\n\n let cmd: ServerCommand<S> = bincode::deserialize(&data).unwrap();\n\n let ServerCommand { client, seq, command } = cmd;\n\n let mut snapshot = None;\n\n {\n\n let mut kv_output = this.res.lock().unwrap();\n\n let mut last_applied = this.last_applied.lock().unwrap();\n\n let mut last_output = this.last_output.lock().unwrap();\n\n\n\n // not applied in state machine\n\n if Some(&seq) != last_applied.get(&client) {\n\n let mut state = this.state.lock().unwrap();\n\n let output = state.apply(command);\n\n last_applied.insert(client, seq);\n\n last_output.insert(client, output.clone());\n\n kv_output.output.insert(seq, output);\n\n snapshot = if this.raft.log_size() > max_log_size / 2 {\n\n Some(bincode::serialize(\n\n &(&*state, &*last_applied, &*last_output)\n", "file_path": "src/kvraft/server.rs", "rank": 51, "score": 31.82702456150903 }, { "content": "\n\n\n\n#[derive(Default, Serialize, Deserialize)]\n\npub struct ShardKv {\n\n /// shard -> kv\n\n shard2kv: HashMap<usize, HashMap<String, String>>,\n\n /// shard -> last ConfigId applied\n\n shard2cfg: [ConfigId; N_SHARDS],\n\n /// shard -> contained\n\n contains: [bool; N_SHARDS],\n\n}\n\n\n\nimpl fmt::Debug for ShardKv {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{:?}\", self.shard2kv.keys())\n\n }\n\n}\n\n\n\nimpl State for ShardKv {\n\n type Command = Op;\n", "file_path": "src/shardkv/server.rs", "rank": 52, "score": 31.271244358490552 }, { "content": "use std::ops::{Range, Index, IndexMut, RangeFrom};\n\nuse std::fmt::{self, Debug, Formatter};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// # LogEntry\n\n\n\n#[derive(Clone, Serialize, Deserialize)]\n\npub struct LogEntry {\n\n pub(crate) term: u64,\n\n pub(crate) index: usize,\n\n pub(crate) data: Vec<u8>,\n\n}\n\n\n\nimpl Default for LogEntry {\n\n fn default() -> Self {\n\n Self { term: 0, index: 0, data: vec![] }\n\n }\n\n}\n\n\n\nimpl Debug for LogEntry {\n", "file_path": "src/raft/log.rs", "rank": 53, "score": 30.62468207397434 }, { "content": " type Output = Reply;\n\n\n\n fn apply(&mut self, cmd: Self::Command) -> Self::Output {\n\n match cmd {\n\n Op::Get { key } => {\n\n let shard = key2shard(&key);\n\n if !self.contains[shard] {\n\n return Reply::WrongGroup;\n\n }\n\n self.shard2kv\n\n .get(&shard)\n\n .map(|kv| {\n\n let value = kv\n\n .get(&key)\n\n .map(|v| v.clone());\n\n Reply::Get { value }\n\n })\n\n .unwrap()\n\n },\n\n Op::Put { key, value } => {\n", "file_path": "src/shardkv/server.rs", "rank": 54, "score": 30.211129748994587 }, { "content": "use std::{\n\n task::Poll,\n\n sync::{Arc, Mutex},\n\n collections::HashMap,\n\n pin::Pin,\n\n};\n\nuse futures::{Future, task::{Context, Waker}};\n\nuse crate::kvraft::state::State;\n\n\n\npub(crate) struct Output<S: State> {\n\n /// seq -> Output\n\n pub(crate) output: HashMap<usize, S::Output>,\n\n /// seq -> Waker\n\n pub(crate) waker: HashMap<usize, Waker>,\n\n}\n\n\n\nimpl<S: State> Default for Output<S> {\n\n fn default() -> Self {\n\n Self {\n\n output: HashMap::new(),\n", "file_path": "src/kvraft/server_fut.rs", "rank": 55, "score": 26.682462581136942 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse crate::shard_ctrler::msg::ConfigId;\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub enum Op {\n\n Get {\n\n key: String,\n\n },\n\n Put {\n\n key: String,\n\n value: String,\n\n },\n\n Append {\n\n key: String,\n\n value: String,\n\n },\n\n InstallShard {\n\n cfg: ConfigId,\n\n shard: usize,\n\n data: Option<Vec<u8>>,\n", "file_path": "src/shardkv/msg.rs", "rank": 56, "score": 25.07929381701217 }, { "content": "use crate::{\n\n kvraft::{\n\n client::ClerkCore,\n\n server::Server,\n\n state::State,\n\n },\n\n shard_ctrler::{\n\n client::Clerk as CtrlerClerk,\n\n msg::{Config, Gid, ConfigId},\n\n N_SHARDS,\n\n },\n\n shardkv::{msg::*, key2shard},\n\n};\n\nuse serde::{Deserialize, Serialize};\n\nuse madsim::{task, time::{self, Duration}};\n\nuse std::{\n\n net::SocketAddr,\n\n fmt::{self, Formatter},\n\n sync::{Arc, Mutex},\n\n collections::HashMap,\n", "file_path": "src/shardkv/server.rs", "rank": 57, "score": 23.435483993190726 }, { "content": " let mut new_cfg: ConfigId = 0;\n\n loop {\n\n let config = this.ctrl_ck.query_at(new_cfg).await;\n\n let cfg = config.num;\n\n config.groups\n\n .iter()\n\n .for_each(|(gid, servers)| {\n\n if !this.sevr_ck.lock().unwrap().contains_key(gid) {\n\n let servers = servers.clone();\n\n this.sevr_ck.lock().unwrap().insert(\n\n *gid, \n\n Arc::new(ClerkCore::new(servers)),\n\n );\n\n }\n\n });\n\n let prev_shards = this.config.lock().unwrap().shards;\n\n *this.config.lock().unwrap() = config.clone();\n\n\n\n let mut ask_for_shards = FuturesUnordered::new();\n\n for (shard, gid) in config.shards.iter().enumerate() {\n", "file_path": "src/shardkv/server.rs", "rank": 58, "score": 23.038916283469188 }, { "content": " &self,\n\n client: usize,\n\n seq: usize,\n\n cmd: S::Command,\n\n ) -> Result<S::Output, Error> {\n\n // Repeat request\n\n if let Some(res) = self.res.lock().unwrap().output.get(&seq) {\n\n return Ok(res.clone());\n\n }\n\n\n\n if Some(&seq) == self.last_applied.lock().unwrap().get(&client) {\n\n let res = self.last_output\n\n .lock().unwrap().get(&client).unwrap().clone();\n\n return Ok(res);\n\n }\n\n\n\n if self.raft.is_leader() {\n\n let cmd: ServerCommand<S> = ServerCommand { client, seq, command: cmd };\n\n match self.raft.start(&bincode::serialize(&cmd).unwrap()).await {\n\n Ok(_) => {\n", "file_path": "src/kvraft/server.rs", "rank": 59, "score": 22.794567821471002 }, { "content": "use crate::raft::log::LogEntry;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct RequestVoteArgs {\n\n pub(crate) term: u64,\n\n pub(crate) candidate: usize,\n\n pub(crate) last_log_term: u64,\n\n pub(crate) last_log_index: usize,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct RequestVoteReply {\n\n pub(crate) term: u64,\n\n pub(crate) vote_granted: bool,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct AppendEntryArgs {\n\n pub(crate) term: u64,\n", "file_path": "src/raft/args.rs", "rank": 60, "score": 22.43638864209627 }, { "content": " let config = self.ctrl_ck.query().await;\n\n let gid2core = config.groups\n\n .into_iter()\n\n .fold(HashMap::new(), |mut map, (gid, servers)| {\n\n map.insert(\n\n gid,\n\n Arc::new(ClerkCore::<Op, Reply>::new(servers)),\n\n );\n\n map\n\n });\n\n config.shards\n\n .iter()\n\n .enumerate()\n\n .for_each(|(shard, gid)| {\n\n let core = gid2core.get(gid).unwrap().clone();\n\n self.cores.borrow_mut().insert(shard, core);\n\n })\n\n }\n\n}\n", "file_path": "src/shardkv/client.rs", "rank": 61, "score": 21.950016513855832 }, { "content": " waker: HashMap::new(),\n\n }\n\n }\n\n}\n\n\n\npub(crate) struct ServerFuture<S: State> {\n\n seq: usize,\n\n pub(crate) res: Arc<Mutex<Output<S>>>,\n\n}\n\n\n\nimpl<S: State> ServerFuture<S> {\n\n pub(crate) fn new(\n\n seq: usize,\n\n res: Arc<Mutex<Output<S>>>,\n\n ) -> Self {\n\n Self { seq, res }\n\n }\n\n}\n\n\n\nimpl<S: State> Future for ServerFuture<S> {\n", "file_path": "src/kvraft/server_fut.rs", "rank": 62, "score": 21.4874090684086 }, { "content": " Candidate,\n\n Leader,\n\n}\n\n\n\nimpl Default for Role {\n\n fn default() -> Self {\n\n Role::Follower\n\n }\n\n}\n\n\n\n\n\n/// # Raft\n\n\n\npub struct Raft {\n\n pub(crate) me: usize,\n\n pub(crate) role: Role,\n\n pub(crate) apply_ch: MsgSender,\n\n pub(crate) state: State,\n\n pub(crate) timer: Instant,\n\n\n", "file_path": "src/raft/raft.rs", "rank": 63, "score": 21.059832539375144 }, { "content": "};\n\nuse futures::{\n\n select_biased,\n\n StreamExt,\n\n stream::FuturesUnordered,\n\n};\n\n\n\npub struct ShardKvServer {\n\n gid: u64,\n\n ctrl_ck: CtrlerClerk,\n\n sevr_ck: Mutex<HashMap<Gid, Arc<ClerkCore<Op, Reply>>>>,\n\n _inner: Arc<Server<ShardKv>>,\n\n config: Mutex<Config>,\n\n}\n\n\n\nimpl ShardKvServer {\n\n pub async fn new(\n\n ctrl_ck: CtrlerClerk,\n\n servers: Vec<SocketAddr>,\n\n gid: u64,\n", "file_path": "src/shardkv/server.rs", "rank": 64, "score": 20.14079560528277 }, { "content": "impl State {\n\n pub fn new(size: usize) -> Self {\n\n Self {\n\n term: 0,\n\n voted_for: None,\n\n logs: Logs::default(),\n\n commit_index: 1,\n\n applied_index: 1,\n\n next_index: vec![1; size],\n\n match_index: vec![0; size],\n\n }\n\n }\n\n}\n\n\n\n\n\n/// # Role\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]\n\npub enum Role {\n\n Follower,\n", "file_path": "src/raft/raft.rs", "rank": 65, "score": 19.732031796324936 }, { "content": " // log is empty, but snapshot exists.\n\n let log = self.last_included_log.as_ref().unwrap();\n\n (log.term, log.index)\n\n } else {\n\n (0, 0)\n\n }\n\n }\n\n\n\n pub(crate) fn reset_raft(&mut self) {\n\n self.timer = Instant::now();\n\n match self.role {\n\n Role::Leader => {\n\n self.snapshot_done = true;\n\n self.state.next_index.fill(self.state.logs.end());\n\n self.state.match_index.fill(self.state.logs.begin());\n\n }\n\n Role::Candidate => {\n\n self.state.term += 1;\n\n self.state.voted_for = Some(self.me);\n\n }\n", "file_path": "src/raft/raft.rs", "rank": 66, "score": 19.21039285447469 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct Msg<T> {\n\n pub(crate) client: usize,\n\n pub(crate) seq: usize,\n\n pub(crate) data: T,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub enum Op {\n\n Get { key: String },\n\n Put { key: String, value: String },\n\n Append { key: String, value: String },\n\n}\n\n\n\n#[derive(thiserror::Error, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]\n\npub enum Error {\n\n #[error(\"not leader, hint: {hint}\")]\n\n NotLeader { hint: usize },\n\n #[error(\"server timeout\")]\n\n Timeout,\n\n #[error(\"failed to reach consensus\")]\n\n Failed,\n\n}\n", "file_path": "src/kvraft/msg.rs", "rank": 67, "score": 18.871783078923755 }, { "content": " },\n\n RemoveShard {\n\n cfg: ConfigId,\n\n shard: usize,\n\n },\n\n ShardInstalled {\n\n cfg: ConfigId,\n\n shard: usize,\n\n },\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub enum Reply {\n\n Get {\n\n value: Option<String>,\n\n },\n\n Ok,\n\n Retry,\n\n WrongGroup,\n\n Shard {\n\n shard: usize,\n\n data: Vec<u8>,\n\n },\n\n}\n", "file_path": "src/shardkv/msg.rs", "rank": 68, "score": 18.86910497057106 }, { "content": " /// Else stays candidate.\n\n pub(crate) fn handle_request_vote(&mut self, reply: RequestVoteReply) -> bool {\n\n if reply.term > self.state.term {\n\n self.role = Role::Follower;\n\n self.state.term = reply.term;\n\n self.state.voted_for = None;\n\n return true;\n\n }\n\n false\n\n }\n\n\n\n /// Update commit_index.\n\n /// `commit_this_term`: commit if the last log is from this term.\n\n pub(crate) fn update_commit_index(&mut self, commit_this_term: bool) {\n\n let mut sorted_match = self.state.match_index.clone();\n\n sorted_match.sort();\n\n let mid = (self.common_sz + 1) >> 1;\n\n let commit_index = sorted_match[mid];\n\n if commit_index > self.state.commit_index {\n\n if self.state.logs.contains_index(commit_index - 1)\n", "file_path": "src/raft/raft.rs", "rank": 69, "score": 18.193814419190225 }, { "content": " pub(crate) leader: usize,\n\n pub(crate) prev_log_index: usize,\n\n pub(crate) prev_log_term: u64,\n\n pub(crate) log_entries: Vec<LogEntry>,\n\n pub(crate) leader_commit_index: usize,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct AppendEntryReply {\n\n pub(crate) term: u64,\n\n pub(crate) success: bool,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct InstallSnapshotArgs {\n\n pub(crate) term: u64,\n\n pub(crate) leader: usize,\n\n pub(crate) last_included_term: u64,\n\n pub(crate) last_included_index: usize,\n\n pub(crate) offset: usize,\n\n pub(crate) done: bool,\n\n pub(crate) data: Vec<u8>,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct InstallSnapshotReply {\n\n pub(crate) term: u64,\n\n}\n", "file_path": "src/raft/args.rs", "rank": 70, "score": 17.927194341254072 }, { "content": " }\n\n }\n\n\n\n // tell the shardctrler that a group is joining.\n\n pub async fn join(&self, group: usize) {\n\n self.joins(&[group]).await;\n\n }\n\n\n\n pub async fn joins(&self, groups: &[usize]) {\n\n debug!(\"join({:?})\", groups);\n\n let mut m = HashMap::new();\n\n for &g in groups {\n\n let gid = self.groups[g].gid;\n\n let names = self.groups[g].addrs.clone();\n\n m.insert(gid, names);\n\n }\n\n self.ctrler_ck.join(m).await;\n\n }\n\n\n\n // tell the shardctrler that a group is leaving.\n", "file_path": "src/shardkv/tester.rs", "rank": 71, "score": 17.814974631077217 }, { "content": "use std::{fmt, io, net::SocketAddr, sync::{Arc, Mutex}};\n\nuse futures::{\n\n FutureExt,\n\n StreamExt,\n\n channel::mpsc,\n\n pin_mut,\n\n select_biased,\n\n join,\n\n stream::FuturesUnordered,\n\n};\n\nuse crate::raft::{raft::*, args::*, log::*};\n\nuse madsim::{time::*, fs, net, task, rand::{self, Rng}};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// State data needs to be persisted.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n", "file_path": "src/raft/raft_handle.rs", "rank": 72, "score": 17.289782076920922 }, { "content": " self.state.next_index[id] = new_next;\n\n self.state.match_index[id] = new_next;\n\n 0\n\n } else {\n\n self.role = Role::Follower;\n\n self.state.term = reply.term;\n\n self.state.voted_for = None;\n\n -1\n\n }\n\n }\n\n\n\n /// For candidates to send RequestVoteArgs.\n\n pub(crate) fn send_request_vote(&self, peers: &Vec<SocketAddr>)\n\n -> FuturesUnordered<impl Future<Output = RPCResult<RequestVoteReply>>> {\n\n let net = net::NetLocalHandle::current();\n\n let rpcs = FuturesUnordered::new();\n\n let (last_log_term, last_log_index) = self.last_log_info();\n\n let args = RequestVoteArgs {\n\n term: self.state.term,\n\n candidate: self.me,\n", "file_path": "src/raft/raft.rs", "rank": 73, "score": 16.81307351642414 }, { "content": " _ => unreachable!(),\n\n }\n\n }\n\n d\n\n } else {\n\n None\n\n };\n\n\n\n if !skip {\n\n let my_core = this.sevr_ck\n\n .lock().unwrap().get(&this.gid).unwrap().clone();\n\n my_core.call(Op::InstallShard { cfg, shard, data }).await;\n\n if let Some(core) = prev_core {\n\n match core.call(Op::ShardInstalled { cfg, shard }).await {\n\n Reply::Ok => {},\n\n _ => unreachable!(),\n\n }\n\n }\n\n }\n\n });\n", "file_path": "src/shardkv/server.rs", "rank": 74, "score": 16.695619492471426 }, { "content": " let prev_gid = prev_shards[shard];\n\n\n\n // a new shard which didn't belong to me\n\n if *gid == this.gid && prev_gid != this.gid {\n\n let this = this.clone();\n\n ask_for_shards.push(async move {\n\n let prev_core = this.sevr_ck\n\n .lock()\n\n .unwrap()\n\n .get(&prev_gid)\n\n .map(|c| c.clone());\n\n\n\n let mut skip = false;\n\n let data = if let Some(core) = prev_core.clone() {\n\n let d: Option<Vec<u8>>;\n\n loop {\n\n match core.call(Op::RemoveShard { cfg, shard }).await {\n\n Reply::Shard { data, .. } => { d = Some(data); break; },\n\n Reply::Retry => continue,\n\n Reply::Ok => { d = None; skip = true; break; },\n", "file_path": "src/shardkv/server.rs", "rank": 75, "score": 16.528515381190424 }, { "content": " let shard = key2shard(&key);\n\n if !self.contains[shard] {\n\n return Reply::WrongGroup;\n\n }\n\n self.shard2kv\n\n .get_mut(&shard)\n\n .map(|kv| {\n\n kv.insert(key, value);\n\n Reply::Ok\n\n })\n\n .unwrap()\n\n },\n\n Op::Append { key, value } => {\n\n let shard = key2shard(&key);\n\n if !self.contains[shard] {\n\n return Reply::WrongGroup;\n\n }\n\n self.shard2kv\n\n .get_mut(&shard)\n\n .map(|kv| {\n", "file_path": "src/shardkv/server.rs", "rank": 76, "score": 16.396971132372933 }, { "content": " /// rpc redirect\n\n pub(crate) leader: Option<usize>,\n\n pub(crate) common_sz: usize,\n\n\n\n /// Snapshot related data.\n\n /// Only leader maintains them to send snapshot to followers.\n\n pub(crate) last_included_log: Option<LogEntry>,\n\n pub(crate) snapshot: Vec<u8>,\n\n /// Follower maintains: receiving snapshot or not\n\n pub(crate) snapshot_done: bool,\n\n\n\n pub(crate) log_size: usize,\n\n}\n\n\n\nimpl Raft {\n\n pub(crate) fn start(&mut self, data: &[u8]) -> Result<Start> {\n\n if self.role != Role::Leader {\n\n let leader = self.leader.unwrap_or((self.me + 1) % self.common_sz);\n\n return Err(Error::NotLeader(leader));\n\n }\n", "file_path": "src/raft/raft.rs", "rank": 77, "score": 16.281984532062094 }, { "content": " // is leader\n\n let term = self.state.term;\n\n let index = self.state.logs.end();\n\n self.state.logs.push(LogEntry { term, index, data: data.into() });\n\n Ok(Start { term, index })\n\n }\n\n\n\n pub(crate) fn apply(&mut self) {\n\n if self.state.commit_index < self.state.logs.begin() {\n\n return;\n\n }\n\n \n\n if self.state.applied_index <= self.state.logs.begin() {\n\n let last_included_log = self.last_included_log.as_ref().unwrap();\n\n let msg = ApplyMsg::Snapshot {\n\n data: self.snapshot.clone(),\n\n term: last_included_log.term,\n\n index: last_included_log.index,\n\n };\n\n self.state.applied_index = self.state.logs.begin();\n", "file_path": "src/raft/raft.rs", "rank": 78, "score": 16.133634609382675 }, { "content": " pub async fn leave(&self, group: usize) {\n\n self.leaves(&[group]).await;\n\n }\n\n\n\n pub async fn leaves(&self, groups: &[usize]) {\n\n debug!(\"leave({:?})\", groups);\n\n let gids: Vec<u64> = groups.iter().map(|&g| self.groups[g].gid).collect();\n\n self.ctrler_ck.leave(&gids).await;\n\n }\n\n\n\n /// QUERY to find shards now owned by group\n\n pub async fn query_shards_of(&self, group: usize) -> HashSet<usize> {\n\n let c = self.ctrler_ck.query().await;\n\n let gid = self.groups[group].gid;\n\n (0..N_SHARDS).filter(|&i| c.shards[i] == gid).collect()\n\n }\n\n\n\n /// End a Test -- the fact that we got here means there\n\n /// was no failure.\n\n /// print the Passed message,\n", "file_path": "src/shardkv/tester.rs", "rank": 79, "score": 16.02729013763999 }, { "content": "impl Index<usize> for Logs {\n\n type Output = LogEntry;\n\n fn index(&self, index: usize) -> &Self::Output {\n\n &self.logs[index - self.offset]\n\n }\n\n}\n\n\n\nimpl IndexMut<usize> for Logs {\n\n fn index_mut(&mut self, index: usize) -> &mut Self::Output {\n\n &mut self.logs[index - self.offset]\n\n }\n\n}\n\n\n\nimpl Index<Range<usize>> for Logs {\n\n type Output = [LogEntry];\n\n fn index(&self, index: Range<usize>) -> &Self::Output {\n\n let range = Range {\n\n start: index.start - self.offset,\n\n end: index.end - self.offset,\n\n };\n", "file_path": "src/raft/log.rs", "rank": 80, "score": 15.76266166824361 }, { "content": "\n\nimpl<S: State> Server<S> {\n\n pub async fn new(\n\n servers: Vec<SocketAddr>,\n\n me: usize,\n\n max_raft_state: Option<usize>,\n\n ) -> Arc<Self> {\n\n let (raft, apply_ch) = raft::RaftHandle::new(servers, me).await;\n\n\n\n let this = Arc::new(Server {\n\n raft,\n\n me,\n\n state: Arc::new(Mutex::new(S::default())),\n\n res: Arc::new(Mutex::new(Output::default())),\n\n last_applied: Arc::new(Mutex::new(HashMap::new())),\n\n last_output: Arc::new(Mutex::new(HashMap::new())),\n\n });\n\n let max_log_size = max_raft_state.unwrap_or(usize::MAX);\n\n this.start_listen_channel(apply_ch, max_log_size);\n\n this.start_rpc_server();\n", "file_path": "src/kvraft/server.rs", "rank": 81, "score": 15.751573303062822 }, { "content": " kv\n\n .get_mut(&key)\n\n .map(|v| v.push_str(&value));\n\n Reply::Ok\n\n })\n\n .unwrap()\n\n },\n\n Op::InstallShard { cfg, shard, data } => {\n\n if cfg > self.shard2cfg[shard] {\n\n let kv: HashMap<String, String> = data.map_or(\n\n HashMap::new(),\n\n |d| bincode::deserialize(&d).unwrap(),\n\n );\n\n self.shard2kv.insert(shard, kv);\n\n self.shard2cfg[shard] = cfg;\n\n self.contains[shard] = true;\n\n }\n\n Reply::Ok\n\n },\n\n Op::RemoveShard { cfg, shard } => {\n", "file_path": "src/shardkv/server.rs", "rank": 82, "score": 15.55937114058109 }, { "content": "use crate::{kvraft::{msg::*, server_fut::*, state::*}, raft};\n\nuse madsim::{net, task, time};\n\nuse serde::{Deserialize, Serialize};\n\nuse futures::{StreamExt, channel::mpsc::UnboundedReceiver};\n\nuse std::{\n\n fmt::{self, Debug},\n\n net::SocketAddr,\n\n sync::{Arc, Mutex},\n\n time::Duration,\n\n collections::HashMap,\n\n};\n\n\n\n\n\nconst SERVER_TIMEOUT: Duration = Duration::from_millis(400);\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n", "file_path": "src/kvraft/server.rs", "rank": 83, "score": 15.500696803373552 }, { "content": " me: usize,\n\n max_raft_state: Option<usize>,\n\n ) -> Arc<Self> {\n\n let config = ctrl_ck.query().await;\n\n let _inner = Server::new(servers, me, max_raft_state).await;\n\n let this = Arc::new(ShardKvServer { \n\n gid,\n\n ctrl_ck,\n\n sevr_ck: Mutex::new(HashMap::new()),\n\n _inner,\n\n config: Mutex::new(config),\n\n });\n\n\n\n this.start_check_config();\n\n this\n\n }\n\n\n\n fn start_check_config(self: &Arc<Self>) {\n\n let this = self.clone();\n\n task::spawn(async move {\n", "file_path": "src/shardkv/server.rs", "rank": 84, "score": 15.080028463682003 }, { "content": " fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(f, \"(term: {}, index: {})\", self.term, self.index)\n\n }\n\n}\n\n\n\n\n\n/// # Logs\n\n\n\n#[derive(Clone, Serialize, Deserialize)]\n\npub struct Logs {\n\n offset: usize,\n\n logs: Vec<LogEntry>,\n\n}\n\n\n\nimpl Logs {\n\n pub fn push(&mut self, log: LogEntry) {\n\n self.logs.push(log);\n\n }\n\n\n\n pub fn len(&self) -> usize {\n", "file_path": "src/raft/log.rs", "rank": 85, "score": 15.065319199960038 }, { "content": "\n\n/// # State\n\n\n\n#[derive(Clone, Debug)]\n\npub struct State {\n\n /// persistent:\n\n pub(crate) term: u64,\n\n /// vote in this term\n\n pub(crate) voted_for: Option<usize>,\n\n pub(crate) logs: Logs,\n\n\n\n /// volatile:\n\n pub(crate) commit_index: usize,\n\n pub(crate) applied_index: usize,\n\n\n\n /// leader only:\n\n pub(crate) next_index: Vec<usize>,\n\n pub(crate) match_index: Vec<usize>,\n\n}\n\n\n", "file_path": "src/raft/raft.rs", "rank": 86, "score": 14.961752631440163 }, { "content": " };\n\n }\n\n }\n\n\n\n async fn call(&self, args: Op) -> Reply {\n\n let key = match &args {\n\n Op::Get { key } => key,\n\n Op::Put { key, .. } => key,\n\n Op::Append { key, .. } => key,\n\n _ => unreachable!(),\n\n };\n\n let shard = key2shard(key);\n\n if !self.cores.borrow().contains_key(&shard) {\n\n self.renew_cores().await;\n\n }\n\n self.cores.borrow().get(&shard).unwrap().call(args).await\n\n }\n\n\n\n async fn renew_cores(&self) {\n\n self.cores.borrow_mut().clear();\n", "file_path": "src/shardkv/client.rs", "rank": 87, "score": 14.840495793274501 }, { "content": " type Output = S::Output;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let mut res = self.res.lock().unwrap();\n\n if let Some(output) = res.output.get(&self.seq) {\n\n Poll::Ready(output.clone())\n\n } else {\n\n res.waker.insert(self.seq, cx.waker().clone());\n\n Poll::Pending\n\n }\n\n }\n\n}\n", "file_path": "src/kvraft/server_fut.rs", "rank": 88, "score": 14.750270498745074 }, { "content": " }\n\n\n\n pub fn total_size(&self) -> u64 {\n\n let mut size = 0;\n\n for group in self.groups.iter() {\n\n for &addr in group.addrs.iter() {\n\n let state_size = self.handle.fs.get_file_size(addr, \"state\").unwrap();\n\n let snap_size = self.handle.fs.get_file_size(addr, \"snapshot\").unwrap();\n\n size += state_size + snap_size;\n\n }\n\n }\n\n size\n\n }\n\n\n\n fn rpc_total(&self) -> u64 {\n\n self.handle.net.stat().msg_count / 2\n\n }\n\n\n\n // Create a clerk with clerk specific server names.\n\n // Give it connections to all of the servers\n", "file_path": "src/shardkv/tester.rs", "rank": 89, "score": 14.434050201596506 }, { "content": "\n\nimpl Clerk {\n\n pub fn new(servers: Vec<SocketAddr>) -> Clerk {\n\n Clerk {\n\n ctrl_ck: CtrlerClerk::new(servers),\n\n cores: RefCell::new(HashMap::new()),\n\n }\n\n }\n\n\n\n pub async fn get(&self, key: String) -> String {\n\n loop {\n\n let key = key.clone();\n\n match self.call(Op::Get { key }).await {\n\n Reply::Get { value } => return value.unwrap_or(\"\".to_string()),\n\n Reply::WrongGroup => self.renew_cores().await,\n\n _ => unreachable!(),\n\n };\n\n }\n\n }\n\n\n", "file_path": "src/shardkv/client.rs", "rank": 90, "score": 14.307022149180487 }, { "content": "#[macro_use]\n\nextern crate log;\n\n\n\npub mod kvraft;\n\npub mod raft;\n\npub mod shard_ctrler;\n\npub mod shardkv;\n", "file_path": "src/lib.rs", "rank": 91, "score": 14.3022758823933 }, { "content": " *this.last_applied.lock().unwrap() = snapshot.1;\n\n *this.last_output.lock().unwrap() = snapshot.2;\n\n }\n\n },\n\n }\n\n }\n\n }).detach();\n\n }\n\n\n\n /// The current term of this peer.\n\n pub fn term(&self) -> u64 {\n\n self.raft.term()\n\n }\n\n\n\n /// Whether this peer believes it is the leader.\n\n pub fn is_leader(&self) -> bool {\n\n self.raft.is_leader()\n\n }\n\n\n\n pub async fn apply(\n", "file_path": "src/kvraft/server.rs", "rank": 92, "score": 14.182862494532747 }, { "content": "use crate::{\n\n shardkv::msg::*,\n\n kvraft::client::ClerkCore,\n\n shard_ctrler::client::Clerk as CtrlerClerk,\n\n};\n\nuse std::{\n\n net::SocketAddr,\n\n sync::Arc,\n\n cell::RefCell,\n\n collections::HashMap,\n\n};\n\n\n\nuse super::key2shard;\n\n\n\npub struct Clerk {\n\n /// Communicate with ShardCtrler\n\n ctrl_ck: CtrlerClerk,\n\n /// shard -> ClerkCore\n\n cores: RefCell<HashMap<usize, Arc<ClerkCore<Op, Reply>>>>,\n\n}\n", "file_path": "src/shardkv/client.rs", "rank": 93, "score": 14.034285532610301 }, { "content": " pub fn make_client(&self) -> Clerk {\n\n Clerk::new(self.ctrler_addrs.clone())\n\n }\n\n\n\n /// Start i'th server of group.\n\n pub async fn start_server(&self, group: usize, i: usize) {\n\n debug!(\"start_server({}, {})\", group, i);\n\n let group = &self.groups[group];\n\n let addrs = group.addrs.clone();\n\n let handle = self.handle.local_handle(group.addrs[i]);\n\n let ctrl_ck = CtrlerClerk::new(self.ctrler_addrs.clone());\n\n let kv = handle\n\n .spawn(ShardKvServer::new(\n\n ctrl_ck,\n\n addrs,\n\n group.gid,\n\n i,\n\n self.max_raft_state,\n\n ))\n\n .await;\n", "file_path": "src/shardkv/tester.rs", "rank": 94, "score": 13.85828468391602 }, { "content": " /// - 2: retry InstallSnapshot.\n\n pub(crate) fn handle_append_entry(\n\n &mut self,\n\n id: usize,\n\n reply: AppendEntryReply,\n\n new_next: usize,\n\n step: usize,\n\n ) -> i32 {\n\n if reply.success {\n\n // log entries match, update `next_index` and `match_index`.\n\n self.state.next_index[id] = new_next;\n\n self.state.match_index[id] = new_next;\n\n 0\n\n } else if reply.term <= self.state.term {\n\n // log entries don't match, `next_index` retrieves.\n\n let begin = self.state.logs.begin();\n\n if self.state.next_index[id] == begin {\n\n 2\n\n } else {\n\n self.state.next_index[id] = max(\n", "file_path": "src/raft/raft.rs", "rank": 95, "score": 13.838166870013023 }, { "content": " }\n\n\n\n /// Restore previously persisted state.\n\n async fn restore(&self) -> io::Result<()> {\n\n let f1 = async {\n\n match fs::read(\"state\").await {\n\n Ok(persist) => {\n\n let persist: Persist = bincode::deserialize(&persist).unwrap();\n\n let mut inner = self.inner.lock().unwrap();\n\n inner.state.term = persist.term;\n\n inner.state.voted_for = persist.voted_for;\n\n inner.state.logs = persist.logs;\n\n Ok(())\n\n },\n\n Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(()),\n\n Err(e) => Err(e),\n\n }\n\n };\n\n\n\n let f2 = async {\n", "file_path": "src/raft/raft_handle.rs", "rank": 96, "score": 13.765406935515786 }, { "content": "use std::marker::PhantomData;\n\nuse crate::kvraft::msg::*;\n\nuse madsim::{net, time::*, rand::{self, Rng}};\n\nuse std::net::SocketAddr;\n\nuse std::sync::atomic::{AtomicUsize, Ordering};\n\n\n\nconst CLIENT_TIMEOUT: Duration = Duration::from_millis(500);\n\n\n\npub struct Clerk {\n\n core: ClerkCore<Op, String>,\n\n}\n\n\n\nimpl Clerk {\n\n pub fn new(servers: Vec<SocketAddr>) -> Clerk {\n\n Clerk {\n\n core: ClerkCore::new(servers),\n\n }\n\n }\n\n\n\n /// fetch the current value for a key.\n", "file_path": "src/kvraft/client.rs", "rank": 97, "score": 13.6336954297466 }, { "content": " pub fn n_committed(&self, index: u64) -> (usize, Option<Entry>) {\n\n self.storage.n_committed(index)\n\n }\n\n\n\n pub async fn start(&self, i: usize, cmd: Entry) -> Result<Start> {\n\n let raft = self.rafts.lock().unwrap()[i].as_ref().unwrap().clone();\n\n self.handle\n\n .local_handle(self.addrs[i])\n\n .spawn(async move { raft.start(&bincode::serialize(&cmd).unwrap()).await })\n\n .await\n\n }\n\n\n\n /// wait for at least n servers to commit.\n\n /// but don't wait forever.\n\n pub async fn wait(&self, index: u64, n: usize, start_term: Option<u64>) -> Option<Entry> {\n\n let mut to = Duration::from_millis(10);\n\n for _ in 0..30 {\n\n let (nd, _) = self.n_committed(index);\n\n if nd >= n {\n\n break;\n", "file_path": "src/raft/tester.rs", "rank": 98, "score": 13.619496544205393 }, { "content": " self.apply_ch.unbounded_send(msg).unwrap();\n\n }\n\n\n\n while self.state.applied_index < self.state.commit_index {\n\n let index = self.state.applied_index;\n\n let log = self.state.logs[index].clone();\n\n let msg = ApplyMsg::Command {\n\n data: log.data,\n\n index: log.index,\n\n };\n\n self.state.applied_index += 1;\n\n self.apply_ch.unbounded_send(msg).unwrap();\n\n }\n\n }\n\n\n\n /// returns `last_log_term` and `last_log_index`\n\n fn last_log_info(&self) -> (u64, usize) {\n\n if let Some(log) = self.state.logs.last() {\n\n (log.term, log.index)\n\n } else if self.last_included_log.is_some() {\n", "file_path": "src/raft/raft.rs", "rank": 99, "score": 13.524899759041261 } ]
Rust
core/src/ops/cnn/conv/depth_wise.rs
mithril-security/tract-sgx-xargo
ea0d7cc5a81f413250dbfca89d5d876e76746b89
use crate::internal::*; use crate::ops::cnn::patches::{Zone, ZoneScanner}; use crate::ops::cnn::Patch; use crate::ops::nn::DataShape; #[derive(Debug, Clone, new, Hash)] pub struct DepthWise { patch: Patch, input_shape: DataShape, output_shape: DataShape, kernel_chw: Arc<Tensor>, bias: Arc<Tensor>, } impl_dyn_hash!(DepthWise); impl Op for DepthWise { fn name(&self) -> Cow<str> { "DepthWiseConv".into() } fn info(&self) -> TractResult<Vec<String>> { Ok(vec![format!("{:?}", self.patch)]) } fn validation(&self) -> Validation { Validation::Rounding } op_core_lir!(); op_as_typed_op!(); } impl EvalOp for DepthWise { fn is_stateless(&self) -> bool { true } fn eval(&self, inputs: TVec<Arc<Tensor>>) -> TractResult<TVec<Arc<Tensor>>> { dispatch_floatlike!(Self::eval_t(inputs[0].datum_type())(self, inputs)) } } impl DepthWise { fn eval_t<T: Datum + Copy + num_traits::Zero + ndarray::LinalgScalar>( &self, mut inputs: TVec<Arc<Tensor>>, ) -> TractResult<TVec<Arc<Tensor>>> { let img = args_1!(inputs); let mut output = unsafe { Tensor::uninitialized::<T>(&*self.output_shape.shape)? }; let iptr = img.as_ptr::<T>()?; let optr = output.as_ptr_mut::<T>()?; let k_stride_i = self.kernel_chw.strides()[1]; let n = *self.input_shape.n().unwrap_or(&1); let n_stride_i = *self.input_shape.n_stride().unwrap_or(&0) as isize; let n_stride_o = *self.output_shape.n_stride().unwrap_or(&0) as isize; let c_stride_i = *self.input_shape.c_stride() as isize; let c_stride_o = *self.output_shape.c_stride() as isize; let bias = self.bias.as_ptr::<T>()?; let kptr = self.kernel_chw.as_ptr::<T>()?; unsafe { for n in 0..n as isize { let iptr = iptr.offset(n_stride_i * n); let optr = optr.offset(n_stride_o * n); for zone in &self.patch.zones { self.process_zone( zone, c_stride_i, c_stride_o, k_stride_i, iptr, kptr, bias, optr, ) } } } Ok(tvec!(output.into_arc_tensor())) } #[inline(never)] unsafe fn process_zone<T: Datum + Copy + ndarray::LinalgScalar>( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const T, kptr: *const T, bias: *const T, optr: *mut T, ) { if zone.values_offsets.len() == 4 { self.process_zone_4(zone, c_stride_i, c_stride_o, k_stride_i, iptr, kptr, bias, optr) } else { zone.visit_output(&self.patch, |visitor| { for c in 0..*self.input_shape.c() as isize { let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let kptr = kptr.offset(k_stride_i * c); Self::inner_loop::<T>(iptr, kptr, bias, optr, c, visitor) } }) } } #[inline(never)] unsafe fn process_zone_4<T: Datum + Copy + ndarray::LinalgScalar>( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const T, kptr: *const T, bias: *const T, optr: *mut T, ) { let mut visitor = ZoneScanner::new(zone, &self.patch); let ioffset0 = zone.values_offsets[0].1; let ioffset1 = zone.values_offsets[1].1; let ioffset2 = zone.values_offsets[2].1; let ioffset3 = zone.values_offsets[3].1; for c in 0..*self.input_shape.c() as isize { visitor.reset(); let kptr = kptr.offset(k_stride_i * c); let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let k0 = *kptr.offset(zone.values_offsets[0].0 as isize); let k1 = *kptr.offset(zone.values_offsets[1].0 as isize); let k2 = *kptr.offset(zone.values_offsets[2].0 as isize); let k3 = *kptr.offset(zone.values_offsets[3].0 as isize); let bias = *bias.offset(c); while !visitor.done { let iptr = iptr.offset(visitor.input_center_offset); let optr = optr.offset(visitor.output_offset); let mut i = 0isize; while i + 4 < visitor.inner_loop_len as isize { let iptr_a = iptr.offset(visitor.inner_loop_input_full_stride * i); let iptr_b = iptr.offset(visitor.inner_loop_input_full_stride * (i + 1)); let iptr_c = iptr.offset(visitor.inner_loop_input_full_stride * (i + 2)); let iptr_d = iptr.offset(visitor.inner_loop_input_full_stride * (i + 3)); let optr_a = optr.offset(visitor.inner_loop_output_stride * i); let optr_b = optr.offset(visitor.inner_loop_output_stride * (i + 1)); let optr_c = optr.offset(visitor.inner_loop_output_stride * (i + 2)); let optr_d = optr.offset(visitor.inner_loop_output_stride * (i + 3)); let i0_a = *iptr_a.offset(ioffset0); let i0_b = *iptr_b.offset(ioffset0); let i0_c = *iptr_c.offset(ioffset0); let i0_d = *iptr_d.offset(ioffset0); let i1_a = *iptr_a.offset(ioffset1); let i1_b = *iptr_b.offset(ioffset1); let i1_c = *iptr_c.offset(ioffset1); let i1_d = *iptr_d.offset(ioffset1); let i2_a = *iptr_a.offset(ioffset2); let i2_b = *iptr_b.offset(ioffset2); let i2_c = *iptr_c.offset(ioffset2); let i2_d = *iptr_d.offset(ioffset2); let i3_a = *iptr_a.offset(ioffset3); let i3_b = *iptr_b.offset(ioffset3); let i3_c = *iptr_c.offset(ioffset3); let i3_d = *iptr_d.offset(ioffset3); let p0_a = i0_a * k0; let p1_a = i1_a * k1; let p2_a = i2_a * k2; let p3_a = i3_a * k3; let p0_b = i0_b * k0; let p1_b = i1_b * k1; let p2_b = i2_b * k2; let p3_b = i3_b * k3; let p0_c = i0_c * k0; let p1_c = i1_c * k1; let p2_c = i2_c * k2; let p3_c = i3_c * k3; let p0_d = i0_d * k0; let p1_d = i1_d * k1; let p2_d = i2_d * k2; let p3_d = i3_d * k3; *optr_a = bias + p0_a + p1_a + p2_a + p3_a; *optr_b = bias + p0_b + p1_b + p2_b + p3_b; *optr_c = bias + p0_c + p1_c + p2_c + p3_c; *optr_d = bias + p0_d + p1_d + p2_d + p3_d; i += 4; } while i < visitor.inner_loop_len as isize { let iptr = iptr.offset(visitor.inner_loop_input_full_stride * i); let optr = optr.offset(visitor.inner_loop_output_stride * i); let i0 = *iptr.offset(ioffset0); let i1 = *iptr.offset(ioffset1); let i2 = *iptr.offset(ioffset2); let i3 = *iptr.offset(ioffset3); let p0 = i0 * k0; let p1 = i1 * k1; let p2 = i2 * k2; let p3 = i3 * k3; let sum = bias + p0 + p1 + p2 + p3; *optr = sum; i += 1; } visitor.next_non_inner_axis() } } } #[inline(never)] unsafe fn inner_loop<T: Datum + Copy + ndarray::LinalgScalar>( iptr: *const T, kptr: *const T, bias: *const T, optr: *mut T, c: isize, visitor: &ZoneScanner, ) { let mut sum = *bias.offset(c); let mut iter = visitor.valid_offsets_ker_in(); if iter.size_hint() == (4, Some(4)) { let (ix, v) = iter.next().unwrap(); let k0 = *kptr.offset(ix as isize); let i0 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k1 = *kptr.offset(ix as isize); let i1 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k2 = *kptr.offset(ix as isize); let i2 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k3 = *kptr.offset(ix as isize); let i3 = *iptr.offset(v as isize); sum = sum + k0 * i0 + k1 * i1 + k2 * i2 + k3 * i3; } else if iter.size_hint() == (3, Some(3)) { let (ix, v) = iter.next().unwrap(); let k0 = *kptr.offset(ix as isize); let i0 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k1 = *kptr.offset(ix as isize); let i1 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k2 = *kptr.offset(ix as isize); let i2 = *iptr.offset(v as isize); sum = sum + k0 * i0 + k1 * i1 + k2 * i2; } else { for (ix, v) in iter { let k = *kptr.offset(ix as isize); let i = *iptr.offset(v as isize); sum = sum + k * i; } } let optr = optr.offset(visitor.output_offset); *optr = sum; } } impl TypedOp for DepthWise { fn output_facts(&self, inputs: &[&TypedFact]) -> TractResult<TVec<TypedFact>> { anyhow::ensure!( self.input_shape.c() == self.output_shape.c(), "DepthWiseConv must have same input and output channels" ); anyhow::ensure!( *self.input_shape.c() == self.bias.len(), "DepthWiseConv data has {} channels, bias has {}", self.input_shape.c(), self.bias.len() ); Ok(tvec!(TypedFact::dt_shape(inputs[0].datum_type, &self.output_shape.shape))) } fn cost(&self, inputs: &[&TypedFact]) -> TractResult<TVec<(Cost, TDim)>> { let n_output_points = self.patch.output_shape.iter().cloned().product::<usize>(); Ok(tvec!(( Cost::FMA(inputs[0].datum_type), (self.input_shape.n().unwrap_or(&1) * n_output_points * self.kernel_chw.len()).to_dim() ))) } as_op!(); } /* partial alternative impl that may be relevant when simd gets better */ /* #[inline(never)] unsafe fn process_zone_4_f32( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const f32, kptr: *const f32, bias: *const f32, optr: *mut f32, ) { use std::simd::*; let mut visitor = ZoneScanner::new(zone, &self.patch); let ioffset0 = zone.values_offsets[0].1; let ioffset1 = zone.values_offsets[1].1; let ioffset2 = zone.values_offsets[2].1; let ioffset3 = zone.values_offsets[3].1; for c in 0..*self.input_shape.c() as isize { visitor.reset(); let kptr = kptr.offset(k_stride_i * c); let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let k0 = *kptr.offset(zone.values_offsets[0].0 as isize); let k1 = *kptr.offset(zone.values_offsets[1].0 as isize); let k2 = *kptr.offset(zone.values_offsets[2].0 as isize); let k3 = *kptr.offset(zone.values_offsets[3].0 as isize); let k0 = f32x4::splat(k0); let k1 = f32x4::splat(k1); let k2 = f32x4::splat(k2); let k3 = f32x4::splat(k3); let bias = f32x4::splat(*bias.offset(c)); while !visitor.done { let iptr = iptr.offset(visitor.input_center_offset); let optr = optr.offset(visitor.output_offset); let mut i = 0; while i + 4 < for i in 0..visitor.inner_loop_len as isize { let iptr = iptr.offset(visitor.inner_loop_input_full_stride * i); let optr = optr.offset(visitor.inner_loop_output_stride * i); let i0 = *iptr.offset(ioffset0); let i1 = *iptr.offset(ioffset1); let i2 = *iptr.offset(ioffset2); let i3 = *iptr.offset(ioffset3); let i = f32x4::from_array([i0, i1, i2, i3]); let p = (i * k).reduce_sum(); let sum = bias + p; *optr = sum } visitor.next_non_inner_axis() } } } */ /* #[inline(never)] unsafe fn process_zone_4_f32( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const f32, kptr: *const f32, bias: *const f32, optr: *mut f32, ) { use std::simd::*; let mut visitor = ZoneScanner::new(zone, &self.patch); let ioffset0 = zone.values_offsets[0].1; let ioffset1 = zone.values_offsets[1].1; let ioffset2 = zone.values_offsets[2].1; let ioffset3 = zone.values_offsets[3].1; for c in 0..*self.input_shape.c() as isize { visitor.reset(); let kptr = kptr.offset(k_stride_i * c); let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let k0 = *kptr.offset(zone.values_offsets[0].0 as isize); let k1 = *kptr.offset(zone.values_offsets[1].0 as isize); let k2 = *kptr.offset(zone.values_offsets[2].0 as isize); let k3 = *kptr.offset(zone.values_offsets[3].0 as isize); let k = f32x4::from_array([k0, k1, k2, k3]); let bias = *bias.offset(c); while !visitor.done { let iptr = iptr.offset(visitor.input_center_offset); let optr = optr.offset(visitor.output_offset); for i in 0..visitor.inner_loop_len as isize { let iptr = iptr.offset(visitor.inner_loop_input_full_stride * i); let optr = optr.offset(visitor.inner_loop_output_stride * i); let i0 = *iptr.offset(ioffset0); let i1 = *iptr.offset(ioffset1); let i2 = *iptr.offset(ioffset2); let i3 = *iptr.offset(ioffset3); let i = f32x4::from_array([i0, i1, i2, i3]); let p = (i * k).reduce_sum(); let sum = bias + p; *optr = sum } visitor.next_non_inner_axis() } } } */ /* #[inline(never)] unsafe fn process_zone_4<T: Datum + Copy + ndarray::LinalgScalar>( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const T, kptr: *const T, bias: *const T, optr: *mut T, ) { let mut visitor = ZoneScanner::new(zone, &self.patch); let ioffset0 = zone.values_offsets[0].1; let ioffset1 = zone.values_offsets[1].1; let ioffset2 = zone.values_offsets[2].1; let ioffset3 = zone.values_offsets[3].1; for c in 0..*self.input_shape.c() as isize { visitor.reset(); let kptr = kptr.offset(k_stride_i * c); let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let k0 = *kptr.offset(zone.values_offsets[0].0 as isize); let k1 = *kptr.offset(zone.values_offsets[1].0 as isize); let k2 = *kptr.offset(zone.values_offsets[2].0 as isize); let k3 = *kptr.offset(zone.values_offsets[3].0 as isize); let bias = *bias.offset(c); while !visitor.done { let iptr = iptr.offset(visitor.input_center_offset); let optr = optr.offset(visitor.output_offset); for i in 0..visitor.inner_loop_len as isize { let iptr = iptr.offset(visitor.inner_loop_input_full_stride * i); let optr = optr.offset(visitor.inner_loop_output_stride * i); let i0 = *iptr.offset(ioffset0); let i1 = *iptr.offset(ioffset1); let i2 = *iptr.offset(ioffset2); let i3 = *iptr.offset(ioffset3); let p0 = i0 * k0; let p1 = i1 * k1; let p2 = i2 * k2; let p3 = i3 * k3; let sum = bias + p0 + p1 + p2 + p3; *optr = sum } visitor.next_non_inner_axis() } } } */
use crate::internal::*; use crate::ops::cnn::patches::{Zone, ZoneScanner}; use crate::ops::cnn::Patch; use crate::ops::nn::DataShape; #[derive(Debug, Clone, new, Hash)] pub struct DepthWise { patch: Patch, input_shape: DataShape, output_shape: DataShape, kernel_chw: Arc<Tensor>, bias: Arc<Tensor>, } impl_dyn_hash!(DepthWise); impl Op for DepthWise { fn name(&self) -> Cow<str> { "DepthWiseConv".into() } fn info(&self) -> TractResult<Vec<String>> { Ok(vec![format!("{:?}", self.patch)]) } fn validation(&self) -> Validation { Validation::Rounding } op_core_lir!(); op_as_typed_op!(); } impl EvalOp for DepthWise { fn is_stateless(&self) -> bool { true } fn eval(&self, inputs: TVec<Arc<Tensor>>) -> TractResult<TVec<Arc<Tensor>>> { dispatch_floatlike!(Self::eval_t(inputs[0].datum_type())(self, inputs)) } } impl DepthWise { fn eval_t<T: Datum + Copy + num_traits::Zero + ndarray::LinalgScalar>( &self, mut inputs: TVec<Arc<Tensor>>, ) -> TractResult<TVec<Arc<Tensor>>> { let img = args_1!(inputs); let mut output = unsafe { Tensor::uninitialized::<T>(&*self.output_shape.shape)? }; let iptr = img.as_ptr::<T>()?; let optr = output.as_ptr_mut::<T>()?; let k_stride_i = self.kernel_chw.strides()[1]; let n = *self.input_shape.n().unwrap_or(&1); let n_stride_i = *self.input_shape.n_stride().unwrap_or(&0) as isize; let n_stride_o = *self.output_shape.n_stride().unwrap_or(&0) as isize; let c_stride_i = *self.input_shape.c_stride() as isize; let c_stride_o = *self.output_shape.c_stride() as isize; let bias = self.bias.as_ptr::<T>()?; let kptr = self.kernel_chw.as_ptr::<T>()?; unsafe { for n in 0..n as isize { let iptr = iptr.offset(n_stride_i * n); let optr = optr.offset(n_stride_o * n); for zone in &self.patch.zones { self.process_zone( zone, c_stride_i, c_stride_o, k_stride_i, iptr, kptr, bias, optr, ) } } } Ok(tvec!(output.into_arc_tensor())) } #[inline(never)] unsafe fn process_zone<T: Datum + Copy + ndarray::LinalgScalar>( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const T, kptr: *const T, bias: *const T, optr: *mut T, ) { if zone.values_offsets.len() == 4 { self.
#[inline(never)] unsafe fn process_zone_4<T: Datum + Copy + ndarray::LinalgScalar>( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const T, kptr: *const T, bias: *const T, optr: *mut T, ) { let mut visitor = ZoneScanner::new(zone, &self.patch); let ioffset0 = zone.values_offsets[0].1; let ioffset1 = zone.values_offsets[1].1; let ioffset2 = zone.values_offsets[2].1; let ioffset3 = zone.values_offsets[3].1; for c in 0..*self.input_shape.c() as isize { visitor.reset(); let kptr = kptr.offset(k_stride_i * c); let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let k0 = *kptr.offset(zone.values_offsets[0].0 as isize); let k1 = *kptr.offset(zone.values_offsets[1].0 as isize); let k2 = *kptr.offset(zone.values_offsets[2].0 as isize); let k3 = *kptr.offset(zone.values_offsets[3].0 as isize); let bias = *bias.offset(c); while !visitor.done { let iptr = iptr.offset(visitor.input_center_offset); let optr = optr.offset(visitor.output_offset); let mut i = 0isize; while i + 4 < visitor.inner_loop_len as isize { let iptr_a = iptr.offset(visitor.inner_loop_input_full_stride * i); let iptr_b = iptr.offset(visitor.inner_loop_input_full_stride * (i + 1)); let iptr_c = iptr.offset(visitor.inner_loop_input_full_stride * (i + 2)); let iptr_d = iptr.offset(visitor.inner_loop_input_full_stride * (i + 3)); let optr_a = optr.offset(visitor.inner_loop_output_stride * i); let optr_b = optr.offset(visitor.inner_loop_output_stride * (i + 1)); let optr_c = optr.offset(visitor.inner_loop_output_stride * (i + 2)); let optr_d = optr.offset(visitor.inner_loop_output_stride * (i + 3)); let i0_a = *iptr_a.offset(ioffset0); let i0_b = *iptr_b.offset(ioffset0); let i0_c = *iptr_c.offset(ioffset0); let i0_d = *iptr_d.offset(ioffset0); let i1_a = *iptr_a.offset(ioffset1); let i1_b = *iptr_b.offset(ioffset1); let i1_c = *iptr_c.offset(ioffset1); let i1_d = *iptr_d.offset(ioffset1); let i2_a = *iptr_a.offset(ioffset2); let i2_b = *iptr_b.offset(ioffset2); let i2_c = *iptr_c.offset(ioffset2); let i2_d = *iptr_d.offset(ioffset2); let i3_a = *iptr_a.offset(ioffset3); let i3_b = *iptr_b.offset(ioffset3); let i3_c = *iptr_c.offset(ioffset3); let i3_d = *iptr_d.offset(ioffset3); let p0_a = i0_a * k0; let p1_a = i1_a * k1; let p2_a = i2_a * k2; let p3_a = i3_a * k3; let p0_b = i0_b * k0; let p1_b = i1_b * k1; let p2_b = i2_b * k2; let p3_b = i3_b * k3; let p0_c = i0_c * k0; let p1_c = i1_c * k1; let p2_c = i2_c * k2; let p3_c = i3_c * k3; let p0_d = i0_d * k0; let p1_d = i1_d * k1; let p2_d = i2_d * k2; let p3_d = i3_d * k3; *optr_a = bias + p0_a + p1_a + p2_a + p3_a; *optr_b = bias + p0_b + p1_b + p2_b + p3_b; *optr_c = bias + p0_c + p1_c + p2_c + p3_c; *optr_d = bias + p0_d + p1_d + p2_d + p3_d; i += 4; } while i < visitor.inner_loop_len as isize { let iptr = iptr.offset(visitor.inner_loop_input_full_stride * i); let optr = optr.offset(visitor.inner_loop_output_stride * i); let i0 = *iptr.offset(ioffset0); let i1 = *iptr.offset(ioffset1); let i2 = *iptr.offset(ioffset2); let i3 = *iptr.offset(ioffset3); let p0 = i0 * k0; let p1 = i1 * k1; let p2 = i2 * k2; let p3 = i3 * k3; let sum = bias + p0 + p1 + p2 + p3; *optr = sum; i += 1; } visitor.next_non_inner_axis() } } } #[inline(never)] unsafe fn inner_loop<T: Datum + Copy + ndarray::LinalgScalar>( iptr: *const T, kptr: *const T, bias: *const T, optr: *mut T, c: isize, visitor: &ZoneScanner, ) { let mut sum = *bias.offset(c); let mut iter = visitor.valid_offsets_ker_in(); if iter.size_hint() == (4, Some(4)) { let (ix, v) = iter.next().unwrap(); let k0 = *kptr.offset(ix as isize); let i0 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k1 = *kptr.offset(ix as isize); let i1 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k2 = *kptr.offset(ix as isize); let i2 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k3 = *kptr.offset(ix as isize); let i3 = *iptr.offset(v as isize); sum = sum + k0 * i0 + k1 * i1 + k2 * i2 + k3 * i3; } else if iter.size_hint() == (3, Some(3)) { let (ix, v) = iter.next().unwrap(); let k0 = *kptr.offset(ix as isize); let i0 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k1 = *kptr.offset(ix as isize); let i1 = *iptr.offset(v as isize); let (ix, v) = iter.next().unwrap(); let k2 = *kptr.offset(ix as isize); let i2 = *iptr.offset(v as isize); sum = sum + k0 * i0 + k1 * i1 + k2 * i2; } else { for (ix, v) in iter { let k = *kptr.offset(ix as isize); let i = *iptr.offset(v as isize); sum = sum + k * i; } } let optr = optr.offset(visitor.output_offset); *optr = sum; } } impl TypedOp for DepthWise { fn output_facts(&self, inputs: &[&TypedFact]) -> TractResult<TVec<TypedFact>> { anyhow::ensure!( self.input_shape.c() == self.output_shape.c(), "DepthWiseConv must have same input and output channels" ); anyhow::ensure!( *self.input_shape.c() == self.bias.len(), "DepthWiseConv data has {} channels, bias has {}", self.input_shape.c(), self.bias.len() ); Ok(tvec!(TypedFact::dt_shape(inputs[0].datum_type, &self.output_shape.shape))) } fn cost(&self, inputs: &[&TypedFact]) -> TractResult<TVec<(Cost, TDim)>> { let n_output_points = self.patch.output_shape.iter().cloned().product::<usize>(); Ok(tvec!(( Cost::FMA(inputs[0].datum_type), (self.input_shape.n().unwrap_or(&1) * n_output_points * self.kernel_chw.len()).to_dim() ))) } as_op!(); } /* partial alternative impl that may be relevant when simd gets better */ /* #[inline(never)] unsafe fn process_zone_4_f32( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const f32, kptr: *const f32, bias: *const f32, optr: *mut f32, ) { use std::simd::*; let mut visitor = ZoneScanner::new(zone, &self.patch); let ioffset0 = zone.values_offsets[0].1; let ioffset1 = zone.values_offsets[1].1; let ioffset2 = zone.values_offsets[2].1; let ioffset3 = zone.values_offsets[3].1; for c in 0..*self.input_shape.c() as isize { visitor.reset(); let kptr = kptr.offset(k_stride_i * c); let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let k0 = *kptr.offset(zone.values_offsets[0].0 as isize); let k1 = *kptr.offset(zone.values_offsets[1].0 as isize); let k2 = *kptr.offset(zone.values_offsets[2].0 as isize); let k3 = *kptr.offset(zone.values_offsets[3].0 as isize); let k0 = f32x4::splat(k0); let k1 = f32x4::splat(k1); let k2 = f32x4::splat(k2); let k3 = f32x4::splat(k3); let bias = f32x4::splat(*bias.offset(c)); while !visitor.done { let iptr = iptr.offset(visitor.input_center_offset); let optr = optr.offset(visitor.output_offset); let mut i = 0; while i + 4 < for i in 0..visitor.inner_loop_len as isize { let iptr = iptr.offset(visitor.inner_loop_input_full_stride * i); let optr = optr.offset(visitor.inner_loop_output_stride * i); let i0 = *iptr.offset(ioffset0); let i1 = *iptr.offset(ioffset1); let i2 = *iptr.offset(ioffset2); let i3 = *iptr.offset(ioffset3); let i = f32x4::from_array([i0, i1, i2, i3]); let p = (i * k).reduce_sum(); let sum = bias + p; *optr = sum } visitor.next_non_inner_axis() } } } */ /* #[inline(never)] unsafe fn process_zone_4_f32( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const f32, kptr: *const f32, bias: *const f32, optr: *mut f32, ) { use std::simd::*; let mut visitor = ZoneScanner::new(zone, &self.patch); let ioffset0 = zone.values_offsets[0].1; let ioffset1 = zone.values_offsets[1].1; let ioffset2 = zone.values_offsets[2].1; let ioffset3 = zone.values_offsets[3].1; for c in 0..*self.input_shape.c() as isize { visitor.reset(); let kptr = kptr.offset(k_stride_i * c); let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let k0 = *kptr.offset(zone.values_offsets[0].0 as isize); let k1 = *kptr.offset(zone.values_offsets[1].0 as isize); let k2 = *kptr.offset(zone.values_offsets[2].0 as isize); let k3 = *kptr.offset(zone.values_offsets[3].0 as isize); let k = f32x4::from_array([k0, k1, k2, k3]); let bias = *bias.offset(c); while !visitor.done { let iptr = iptr.offset(visitor.input_center_offset); let optr = optr.offset(visitor.output_offset); for i in 0..visitor.inner_loop_len as isize { let iptr = iptr.offset(visitor.inner_loop_input_full_stride * i); let optr = optr.offset(visitor.inner_loop_output_stride * i); let i0 = *iptr.offset(ioffset0); let i1 = *iptr.offset(ioffset1); let i2 = *iptr.offset(ioffset2); let i3 = *iptr.offset(ioffset3); let i = f32x4::from_array([i0, i1, i2, i3]); let p = (i * k).reduce_sum(); let sum = bias + p; *optr = sum } visitor.next_non_inner_axis() } } } */ /* #[inline(never)] unsafe fn process_zone_4<T: Datum + Copy + ndarray::LinalgScalar>( &self, zone: &Zone, c_stride_i: isize, c_stride_o: isize, k_stride_i: isize, iptr: *const T, kptr: *const T, bias: *const T, optr: *mut T, ) { let mut visitor = ZoneScanner::new(zone, &self.patch); let ioffset0 = zone.values_offsets[0].1; let ioffset1 = zone.values_offsets[1].1; let ioffset2 = zone.values_offsets[2].1; let ioffset3 = zone.values_offsets[3].1; for c in 0..*self.input_shape.c() as isize { visitor.reset(); let kptr = kptr.offset(k_stride_i * c); let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let k0 = *kptr.offset(zone.values_offsets[0].0 as isize); let k1 = *kptr.offset(zone.values_offsets[1].0 as isize); let k2 = *kptr.offset(zone.values_offsets[2].0 as isize); let k3 = *kptr.offset(zone.values_offsets[3].0 as isize); let bias = *bias.offset(c); while !visitor.done { let iptr = iptr.offset(visitor.input_center_offset); let optr = optr.offset(visitor.output_offset); for i in 0..visitor.inner_loop_len as isize { let iptr = iptr.offset(visitor.inner_loop_input_full_stride * i); let optr = optr.offset(visitor.inner_loop_output_stride * i); let i0 = *iptr.offset(ioffset0); let i1 = *iptr.offset(ioffset1); let i2 = *iptr.offset(ioffset2); let i3 = *iptr.offset(ioffset3); let p0 = i0 * k0; let p1 = i1 * k1; let p2 = i2 * k2; let p3 = i3 * k3; let sum = bias + p0 + p1 + p2 + p3; *optr = sum } visitor.next_non_inner_axis() } } } */
process_zone_4(zone, c_stride_i, c_stride_o, k_stride_i, iptr, kptr, bias, optr) } else { zone.visit_output(&self.patch, |visitor| { for c in 0..*self.input_shape.c() as isize { let iptr = iptr.offset(c_stride_i * c); let optr = optr.offset(c_stride_o * c); let kptr = kptr.offset(k_stride_i * c); Self::inner_loop::<T>(iptr, kptr, bias, optr, c, visitor) } }) } }
function_block-function_prefixed
[ { "content": "pub fn output_type(input: DatumType) -> DatumType {\n\n if input.is_float() {\n\n input\n\n } else {\n\n i32::datum_type()\n\n }\n\n}\n\n\n\npub(super) fn eval(\n\n a: &Tensor,\n\n b: &Tensor,\n\n a_trans: bool,\n\n b_trans: bool,\n\n c_trans: bool,\n\n) -> TractResult<Tensor> {\n\n unsafe {\n\n let rank = a.rank();\n\n let (m, k, n, c_shape) = compute_shape(a.shape(), b.shape(), a_trans, b_trans, c_trans)?;\n\n let dt = output_type(a.datum_type());\n\n let mm = tract_linalg::ops()\n", "file_path": "core/src/ops/matmul.rs", "rank": 0, "score": 378203.4123340857 }, { "content": "#[allow(dead_code)]\n\npub fn dyn_hash<H: Hash>(h: H, s: &mut dyn Hasher) {\n\n h.hash(&mut WrappedHasher(s))\n\n}\n", "file_path": "data/src/hash.rs", "rank": 1, "score": 309263.98311873915 }, { "content": "pub fn plug(ops: &mut Ops) {\n\n let impls = vec![\n\n armv7neon_mmm_f32_8x4_cortexa7::mmm(),\n\n armv7neon_mmm_f32_8x6_cortexa7::mmm(),\n\n armv7neon_mmm_f32_8x4_cortexa9::mmm(),\n\n armv7neon_mmm_f32_8x6_cortexa9::mmm(),\n\n armv7neon_mmm_f32_8x4_generic::mmm(),\n\n armv7neon_mmm_f32_8x6_generic::mmm(),\n\n crate::generic::mmm::generic_f32_4x4::mmm(),\n\n ];\n\n ops.mmm_f32_impls = impls.clone();\n\n if has_neon() {\n\n log::info!(\"armv7neon activated (smmm, ssigmoid), stanh)\");\n\n let cpu = cpu_part().unwrap_or(0);\n\n\n\n fn prefer_8x4(_m: Option<usize>, _k: Option<usize>, n: Option<usize>) -> bool {\n\n n.map(|n| n % 4 == 0 && n % 6 != 0 && n <= 12).unwrap_or(false)\n\n }\n\n\n\n ops.mmv_f32 = match cpu {\n", "file_path": "linalg/src/arm32.rs", "rank": 2, "score": 301008.74880640267 }, { "content": "pub fn plug(ops: &mut Ops) {\n\n let impls = vec![\n\n arm64simd_mmm_f32_12x8_gen::mmm(),\n\n arm64simd_mmm_f32_12x8_a53::mmm(),\n\n arm64simd_mmm_f32_12x8_a55::mmm(),\n\n arm64simd_mmm_f32_8x8_gen::mmm(),\n\n arm64simd_mmm_f32_8x8_a53::mmm(),\n\n arm64simd_mmm_f32_8x8_a55::mmm(),\n\n arm64simd_mmm_f32_16x4_gen::mmm(),\n\n arm64simd_mmm_f32_16x4_a53::mmm(),\n\n arm64simd_mmm_f32_16x4_a55::mmm(),\n\n arm64simd_mmm_f32_24x4_gen::mmm(),\n\n arm64simd_mmm_f32_24x4_a53::mmm(),\n\n arm64simd_mmm_f32_24x4_a55::mmm(),\n\n crate::generic::mmm::generic_f32_4x4::mmm(),\n\n ];\n\n ops.mmm_f32_impls = impls.clone();\n\n ops.qmmm_i32 = Box::new(|_, _, _| arm64simd_mmm_i32_8x8::mmm());\n\n ops.qmmv_i32 = Box::new(|_, _| arm64simd_mmm_i32_64x1::mmm());\n\n ops.mmv_f32 = match *KIND {\n", "file_path": "linalg/src/arm64.rs", "rank": 3, "score": 301008.7488064028 }, { "content": "pub fn plug(ops: &mut Ops) {\n\n if is_x86_feature_detected!(\"fma\") {\n\n ops.mmv_f32 = Box::new(|_, _| mmm::fma_mmm_f32_64x1::mmm());\n\n ops.mmm_f32_impls.push(mmm::fma_mmm_f32_16x6::mmm());\n\n ops.mmm_f32 = Box::new(|_,_,_| mmm::fma_mmm_f32_16x6::mmm());\n\n ops.mmm_f32_impls.push(mmm::fma_mmm_f32_8x8::mmm());\n\n ops.sigmoid_f32 = Box::new(|| Box::new(ElementWiseImpl::<sigmoid::SigmoidF32, f32>::new()));\n\n ops.tanh_f32 = Box::new(|| Box::new(ElementWiseImpl::<tanh::TanhF32, f32>::new()));\n\n log::info!(\"mmm_f32, sigmoid_f32, tanh_f32: x86_64/fma activated\");\n\n }\n\n if is_x86_feature_detected!(\"avx2\") {\n\n ops.qmmm_i32 = Box::new(|_, _, _| mmm::avx2_mmm_i32_8x8::mmm());\n\n log::info!(\"mmm_i8_i8 and mmm_i8_i32: x86_64/avx2 activated\");\n\n }\n\n}\n", "file_path": "linalg/src/x86_64_fma.rs", "rank": 4, "score": 297111.09628179367 }, { "content": "pub fn register_all_ops(reg: &mut OnnxOpRegister) {\n\n reg.insert(\"CumSum\", cumsum);\n\n}\n\n\n", "file_path": "onnx/src/ops/cumsum.rs", "rank": 5, "score": 291967.6912923807 }, { "content": "pub fn register_all_ops(reg: &mut OnnxOpRegister) {\n\n reg.insert(\"GRU\", gru::gru);\n\n reg.insert(\"LSTM\", lstm::lstm);\n\n reg.insert(\"RNN\", rnn::rnn);\n\n reg.insert(\"Scan\", scan::scan);\n\n}\n", "file_path": "onnx/src/ops/rec.rs", "rank": 6, "score": 291967.6912923807 }, { "content": "pub fn register_all_ops(reg: &mut OnnxOpRegister) {\n\n reg.insert(\"DepthToSpace\", depth_to_space);\n\n}\n\n\n", "file_path": "onnx/src/ops/d2s.rs", "rank": 7, "score": 291967.6912923807 }, { "content": "pub fn register_all_ops(reg: &mut OnnxOpRegister) {\n\n reg.insert(\"Add\", |_, _| Ok((ops::math::Add.into_hir(), vec![])));\n\n reg.insert(\"Sub\", |_, _| Ok((ops::math::Sub.into_hir(), vec![])));\n\n reg.insert(\"Mul\", |_, _| Ok((ops::math::Mul.into_hir(), vec![])));\n\n reg.insert(\"Div\", |_, _| Ok((ops::math::Div.into_hir(), vec![])));\n\n reg.insert(\"Mod\", rem::rem);\n\n\n\n reg.insert(\"BitShift\", bitshift);\n\n\n\n reg.insert(\"Sum\", |_, _| Ok((Box::new(Nary(Box::new(ops::math::Add), false)), vec![])));\n\n reg.insert(\"Max\", |_, _| Ok((Box::new(Nary(Box::new(ops::math::Max), false)), vec![])));\n\n reg.insert(\"Min\", |_, _| Ok((Box::new(Nary(Box::new(ops::math::Min), false)), vec![])));\n\n reg.insert(\"Mean\", |_, _| Ok((Box::new(Nary(Box::new(ops::math::Add), true)), vec![])));\n\n\n\n reg.insert(\"Abs\", |_, _| Ok((Box::new(ops::math::abs()), vec![])));\n\n reg.insert(\"Ceil\", |_, _| Ok((Box::new(ops::math::ceil()), vec![])));\n\n reg.insert(\"Floor\", |_, _| Ok((Box::new(ops::math::floor()), vec![])));\n\n reg.insert(\"Round\", |_, _| Ok((Box::new(ops::math::round_half_to_even()), vec![])));\n\n reg.insert(\"Clip\", clip::clip);\n\n\n", "file_path": "onnx/src/ops/math.rs", "rank": 8, "score": 291967.6912923807 }, { "content": "pub fn register_all_ops(reg: &mut OnnxOpRegister) {\n\n reg.insert(\"QuantizeLinear\", quantize_linear);\n\n reg.insert(\"DequantizeLinear\", dequantize_linear);\n\n reg.insert(\"DynamicQuantizeLinear\", dynamic_quantize_linear);\n\n}\n\n\n", "file_path": "onnx/src/ops/quant.rs", "rank": 9, "score": 291967.6912923807 }, { "content": "pub fn register_all_ops(reg: &mut OnnxOpRegister) {\n\n reg.insert(\"SpaceToDepth\", space_to_depth);\n\n}\n\n\n", "file_path": "onnx/src/ops/s2d.rs", "rank": 10, "score": 291967.6912923807 }, { "content": "pub fn register_all_ops(reg: &mut OnnxOpRegister) {\n\n reg.insert(\"Not\", |_, _| Ok((Box::new(ops::logic::not()), vec![])));\n\n reg.insert(\"And\", |_, _| Ok((ops::logic::And.into_hir(), vec![])));\n\n reg.insert(\"Or\", |_, _| Ok((ops::logic::Or.into_hir(), vec![])));\n\n reg.insert(\"Xor\", |_, _| Ok((ops::logic::Xor.into_hir(), vec![])));\n\n\n\n reg.insert(\"Equal\", |_, _| Ok((ops::logic::Equals.into_hir(), vec![])));\n\n reg.insert(\"Greater\", |_, _| Ok((ops::logic::Greater.into_hir(), vec![])));\n\n reg.insert(\"Less\", |_, _| Ok((ops::logic::Less.into_hir(), vec![])));\n\n reg.insert(\"LessOrEqual\", |_, _| Ok((ops::logic::LessEqual.into_hir(), vec![])));\n\n reg.insert(\"GreaterOrEqual\", |_, _| Ok((ops::logic::GreaterEqual.into_hir(), vec![])));\n\n\n\n reg.insert(\"Where\", |_, _| Ok((expand(tract_hir::ops::logic::Iff), vec![])));\n\n\n\n reg.insert(\"If\", _if)\n\n}\n\n\n", "file_path": "onnx/src/ops/logic.rs", "rank": 11, "score": 291967.6912923807 }, { "content": "pub fn register_all_ops(reg: &mut OnnxOpRegister) {\n\n reg.insert(\"Cast\", cast::cast);\n\n reg.insert(\"Constant\", konst);\n\n reg.insert(\"Identity\", |_, _| Ok((Box::new(ops::identity::Identity::default()), vec![])));\n\n reg.insert(\"Resize\", resize::resize);\n\n array::register_all_ops(reg);\n\n cumsum::register_all_ops(reg);\n\n d2s::register_all_ops(reg);\n\n logic::register_all_ops(reg);\n\n math::register_all_ops(reg);\n\n ml::register_all_ops(reg);\n\n nn::register_all_ops(reg);\n\n quant::register_all_ops(reg);\n\n rec::register_all_ops(reg);\n\n s2d::register_all_ops(reg);\n\n}\n\n\n", "file_path": "onnx/src/ops/mod.rs", "rank": 12, "score": 291967.6912923807 }, { "content": "pub fn register_all_ops(reg: &mut OnnxOpRegister) {\n\n reg.insert(\"ArrayFeatureExtractor\", array_feature_extractor);\n\n reg.insert(\"Compress\", compress::compress);\n\n reg.insert(\"Concat\", concat);\n\n reg.insert(\"ConstantLike\", constant_like);\n\n reg.insert(\"ConstantOfShape\", constant_of_shape);\n\n reg.insert(\"Expand\", |_, _| Ok((expand(array::MultiBroadcastTo::default()), vec![])));\n\n reg.insert(\"EyeLike\", eye_like);\n\n reg.insert(\"Flatten\", flatten);\n\n reg.insert(\"Gather\", gather);\n\n reg.insert(\"GatherElements\", gather_elements);\n\n reg.insert(\"GatherND\", gather_nd);\n\n reg.insert(\"NonZero\", |_, _| Ok((Box::new(nonzero::NonZero::non_zero()), vec![])));\n\n reg.insert(\"OneHot\", one_hot::one_hot);\n\n reg.insert(\"Range\", |_, _| Ok((expand(array::Range::default()), vec![])));\n\n reg.insert(\"Pad\", pad::pad);\n\n reg.insert(\"Reshape\", |_, _| Ok((expand(array::Reshape::default()), vec![])));\n\n reg.insert(\"Scatter\", scatter_elements);\n\n reg.insert(\"ScatterElements\", scatter_elements);\n\n reg.insert(\"ScatterND\", |_, _| Ok((Box::new(array::ScatterNd), vec![])));\n\n reg.insert(\"Shape\", |_, _| Ok((expand(array::Shape::new(DatumType::I64)), vec![])));\n\n reg.insert(\"Size\", |_, _| Ok((expand(array::Size::new(DatumType::I64)), vec![])));\n\n reg.insert(\"Slice\", slice::slice);\n\n reg.insert(\"Split\", split::split);\n\n reg.insert(\"Squeeze\", squeeze::squeeze);\n\n reg.insert(\"Tile\", |_, _| Ok((expand(array::Tile::default()), vec![])));\n\n reg.insert(\"Transpose\", transpose);\n\n reg.insert(\"Unsqueeze\", unsqueeze::unsqueeze);\n\n}\n\n\n", "file_path": "onnx/src/ops/array/mod.rs", "rank": 13, "score": 288756.42020153854 }, { "content": "pub fn register_all_ops(reg: &mut OnnxOpRegister) {\n\n category_mapper::register_all_ops(reg);\n\n tree_ensemble_classifier::register_all_ops(reg);\n\n}\n", "file_path": "onnx/src/ops/ml/mod.rs", "rank": 14, "score": 288756.42020153854 }, { "content": "pub fn register_all_ops(reg: &mut OnnxOpRegister) {\n\n reg.insert(\"ArgMax\", arg_max_min);\n\n reg.insert(\"ArgMin\", arg_max_min);\n\n reg.insert(\"AveragePool\", average_pool);\n\n reg.insert(\"BatchNormalization\", batch_normalization);\n\n reg.insert(\"Conv\", conv);\n\n reg.insert(\"ConvInteger\", conv_integer);\n\n reg.insert(\"ConvTranspose\", conv_transpose::conv_transpose);\n\n reg.insert(\"Dropout\", dropout::dropout);\n\n reg.insert(\"Elu\", elu);\n\n reg.insert(\"GlobalAveragePool\", |_, _| Ok((expand(ops::nn::GlobalAvgPool), vec![])));\n\n reg.insert(\"GlobalLpPool\", global_lp_pool);\n\n reg.insert(\"GlobalMaxPool\", |_, _| Ok((expand(ops::nn::GlobalMaxPool), vec![])));\n\n reg.insert(\"Hardmax\", layer_hard_max);\n\n reg.insert(\"HardSigmoid\", hard_sigmoid);\n\n reg.insert(\"InstanceNormalization\", instance_norm::instance_normalization);\n\n reg.insert(\"LeakyRelu\", leaky_relu);\n\n reg.insert(\"LogSoftmax\", layer_log_soft_max);\n\n reg.insert(\"LRN\", lrn::lrn);\n\n reg.insert(\"MaxPool\", max_pool);\n", "file_path": "onnx/src/ops/nn/mod.rs", "rank": 15, "score": 288756.4202015386 }, { "content": "pub fn register(registry: &mut Registry) {\n\n registry.register_unit_element_wise(\"tract_core_round_even\", &ops::math::RoundHalfToEven {});\n\n\n\n registry.register_binary(\"tract_core_xor\", &ops::logic::Xor {});\n\n\n\n registry.register_binary_with_flipped(\n\n \"tract_shl\",\n\n &ops::math::ShiftLeft,\n\n &ops::math::FlippedShiftLeft,\n\n );\n\n registry.register_binary_with_flipped(\n\n \"tract_shr\",\n\n &ops::math::ShiftRight,\n\n &ops::math::FlippedShiftRight,\n\n );\n\n broadcast::register(registry);\n\n cast::register(registry);\n\n downsample::register(registry);\n\n gather::register(registry);\n\n one_hot::register(registry);\n\n qconv::register(registry);\n\n qmatmul::register(registry);\n\n reduce::register(registry);\n\n scatter::register(registry);\n\n scan::register(registry);\n\n source::register(registry);\n\n}\n", "file_path": "nnef/src/ops/core.rs", "rank": 16, "score": 285717.1877204668 }, { "content": "pub fn register_all_ops(reg: &mut OnnxOpRegister) {\n\n reg.insert(\"CategoryMapper\", category_mapper);\n\n}\n\n\n", "file_path": "onnx/src/ops/ml/category_mapper.rs", "rank": 17, "score": 285659.5707208144 }, { "content": "pub fn register_all_ops(reg: &mut OnnxOpRegister) {\n\n reg.insert(\"TreeEnsembleClassifier\", tree_classifier);\n\n}\n\n\n", "file_path": "onnx/src/ops/ml/tree_ensemble_classifier.rs", "rank": 18, "score": 282670.865800347 }, { "content": "pub fn perm_to_ops(input: &[usize]) -> TVec<AxisOp> {\n\n perm_to_atoms(input).into_iter().map(|pair| AxisOp::Move(pair.0, pair.1)).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_perm_to_cycles() {\n\n assert_eq!(perm_to_cycles(&[1, 2, 0]), tvec!(tvec!(0, 1, 2)));\n\n assert_eq!(perm_to_cycles(&[2, 0, 1]), tvec!(tvec!(0, 2, 1)));\n\n assert_eq!(perm_to_cycles(&[1, 2, 3, 0]), tvec!(tvec!(0, 1, 2, 3)));\n\n assert_eq!(perm_to_cycles(&[3, 0, 1, 2]), tvec!(tvec!(0, 3, 2, 1)));\n\n assert_eq!(perm_to_cycles(&[3, 1, 2, 0, 4]), tvec!(tvec!(0, 3)));\n\n }\n\n\n\n #[test]\n\n fn is_rotation() {\n\n assert_eq!(is_rotation_cycle(&[0, 1, 2]), Some((0, 2)));\n", "file_path": "core/src/ops/change_axes.rs", "rank": 19, "score": 282175.12150859955 }, { "content": "pub fn register(registry: &mut Registry) {\n\n registry.register_dumper(TypeId::of::<TypedSource>(), external_dump);\n\n registry.register_primitive(\"tract_core_external\", &external_parameters(), external_load);\n\n}\n\n\n", "file_path": "nnef/src/ops/core/source.rs", "rank": 20, "score": 281789.7403035469 }, { "content": "pub fn register(registry: &mut Registry) {\n\n use crate::internal::*;\n\n\n\n registry.register_dumper(TypeId::of::<ScatterElements>(), ser_scatter_elements);\n\n registry.register_primitive(\n\n \"tract_core_scatter_elements\",\n\n &[\n\n TypeName::Scalar.tensor().named(\"input\"),\n\n TypeName::Scalar.tensor().named(\"indices\"),\n\n TypeName::Scalar.tensor().named(\"updates\"),\n\n TypeName::Integer.named(\"axis\"),\n\n ],\n\n de_scatter_elements,\n\n );\n\n\n\n registry.register_dumper(TypeId::of::<ScatterNd>(), ser_scatter_nd);\n\n registry.register_primitive(\n\n \"tract_core_scatter_nd\",\n\n &[\n\n TypeName::Scalar.tensor().named(\"input\"),\n\n TypeName::Scalar.tensor().named(\"indices\"),\n\n TypeName::Scalar.tensor().named(\"updates\"),\n\n ],\n\n de_scatter_nd,\n\n );\n\n}\n\n\n", "file_path": "nnef/src/ops/core/scatter.rs", "rank": 21, "score": 281789.7403035469 }, { "content": "pub fn register(registry: &mut Registry) {\n\n registry.register_dumper(\n\n TypeId::of::<tract_core::ops::matmul::mir_quant_unary::QMatMulUnary>(),\n\n qmatmul_unary_dump,\n\n );\n\n registry\n\n .register_dumper(TypeId::of::<tract_core::ops::matmul::mir_quant::QMatMul>(), qmatmul_dump);\n\n registry.register_primitive(\"tract_core_qmatmul\", &qmatmul_parameters(), qmatmul_load);\n\n}\n\n\n", "file_path": "nnef/src/ops/core/qmatmul.rs", "rank": 22, "score": 281789.7403035469 }, { "content": "pub fn register(registry: &mut Registry) {\n\n registry.register_dumper(TypeId::of::<tract_core::ops::cnn::ConvUnary>(), qconv_unary_dump);\n\n registry.register_primitive(\"tract_core_qconv\", &qconv_parameters(), qconv_load);\n\n}\n\n\n", "file_path": "nnef/src/ops/core/qconv.rs", "rank": 23, "score": 281789.74030354683 }, { "content": "pub fn register(registry: &mut Registry) {\n\n registry.register_dumper(TypeId::of::<ops::array::MultiBroadcastTo>(), ser_broadcast);\n\n registry.register_primitive(\n\n \"tract_core_broadcast\",\n\n &[TypeName::Scalar.tensor().named(\"input\"), TypeName::Integer.array().named(\"shape\")],\n\n de_broadcast,\n\n );\n\n}\n\n\n", "file_path": "nnef/src/ops/core/broadcast.rs", "rank": 24, "score": 281789.7403035469 }, { "content": "pub fn register(registry: &mut Registry) {\n\n macro_rules! gather_op_nnef {\n\n ($GatherOp:ty, $name:ident, $field_name:ident) => {\n\n mod $name {\n\n use crate::internal::*;\n\n\n\n pub fn ser_gather(\n\n ast: &mut IntoAst,\n\n node: &TypedNode,\n\n ) -> TractResult<Option<Arc<RValue>>> {\n\n let op = node.op().downcast_ref::<$GatherOp>().unwrap();\n\n let wire = ast.mapping[&node.inputs[0]].clone();\n\n let indices = ast.mapping[&node.inputs[1]].clone();\n\n Ok(Some(invocation(\n\n concat!(\"tract_core_\", stringify!($name)),\n\n &[wire, indices],\n\n &[(stringify!($field_name), numeric(op.$field_name))],\n\n )))\n\n }\n\n\n", "file_path": "nnef/src/ops/core/gather.rs", "rank": 25, "score": 281789.74030354683 }, { "content": "pub fn register(registry: &mut Registry) {\n\n registry.register_dumper(TypeId::of::<ops::nn::Reduce>(), ser_reduce);\n\n for red in &[\n\n \"tract_core_argmax_reduce_last\",\n\n \"tract_core_argmin_reduce_last\",\n\n \"tract_core_product_reduce\",\n\n ] {\n\n registry.register_primitive(\n\n red,\n\n &[TypeName::Scalar.tensor().named(\"input\"), TypeName::Integer.array().named(\"axes\")],\n\n de_reduce,\n\n );\n\n }\n\n}\n\n\n", "file_path": "nnef/src/ops/core/reduce.rs", "rank": 26, "score": 281789.7403035469 }, { "content": "pub fn register(registry: &mut Registry) {\n\n registry.register_dumper(TypeId::of::<tract_core::ops::cast::Cast>(), cast_dump);\n\n registry.register_primitive(\"tract_core_cast\", &cast_parameters(), cast_load);\n\n}\n\n\n", "file_path": "nnef/src/ops/core/cast.rs", "rank": 27, "score": 281789.74030354683 }, { "content": "pub fn register(registry: &mut Registry) {\n\n registry.register_dumper(TypeId::of::<ops::scan::Scan>(), ser_scan);\n\n registry.register_primitive(\n\n \"tract_core_scan\",\n\n &[\n\n TypeName::String.named(\"body\"),\n\n ast::TypeSpec::Tuple(vec![\n\n TypeName::String.spec(), // body param name\n\n TypeName::Scalar.tensor(), // input\n\n TypeName::Integer.spec(), // axis\n\n TypeName::Integer.spec(), // step\n\n ])\n\n .array()\n\n .named(\"scan\"),\n\n ast::TypeSpec::Tuple(vec![\n\n TypeName::String.spec(), // body param name\n\n TypeName::Scalar.tensor(), // input\n\n ])\n\n .array()\n\n .named(\"full\"),\n", "file_path": "nnef/src/ops/core/scan.rs", "rank": 28, "score": 281789.7403035469 }, { "content": "pub fn register(registry: &mut Registry) {\n\n registry.register_dumper(TypeId::of::<ops::Downsample>(), ser_downsample);\n\n registry.register_primitive(\n\n \"tract_core_downsample\",\n\n &[\n\n TypeName::Scalar.tensor().named(\"input\"),\n\n TypeName::Integer.named(\"axis\"),\n\n TypeName::Integer.named(\"stride\"),\n\n TypeName::Integer.named(\"modulo\").default(0),\n\n ],\n\n de_downsample,\n\n );\n\n}\n\n\n", "file_path": "nnef/src/ops/core/downsample.rs", "rank": 29, "score": 281789.74030354683 }, { "content": "pub fn register(registry: &mut Registry) {\n\n registry.register_dumper(TypeId::of::<OneHot>(), one_hot_dump);\n\n registry.register_primitive(\"tract_core_one_hot\", &one_hot_parameters(), one_hot_load);\n\n}\n\n\n", "file_path": "nnef/src/ops/core/one_hot.rs", "rank": 30, "score": 278025.6534797398 }, { "content": "pub fn hash_f32<H: Hasher>(s: &f32, state: &mut H) {\n\n Hash::hash(&s.to_bits(), state)\n\n}\n\n\n", "file_path": "core/src/hash.rs", "rank": 31, "score": 275621.4639184499 }, { "content": "fn mat_vec(be: &mut Bencher, &(dt, m, k, n, cold): &(DatumType, usize, usize, usize, bool)) {\n\n assert_eq!(n, 1);\n\n let mm = tract_linalg::ops().mmm(dt, dt, dt, Some(m), Some(k), Some(n)).unwrap();\n\n let pa =\n\n Tensor::zero_aligned_dt(dt, &[mm.a_pack().len(k, m)], mm.a_pack().alignment()).unwrap();\n\n let pb = Tensor::zero_dt(dt, &[k, 1]).unwrap();\n\n unsafe {\n\n run(\n\n m,\n\n k,\n\n n,\n\n be,\n\n &*mm,\n\n mm.a_packed(dt.size_of(), k).wrap(&pa.view()),\n\n mm.b_packed(dt.size_of(), k).wrap(&pb.view()).unwrap(),\n\n cold,\n\n );\n\n }\n\n}\n", "file_path": "linalg/benches/utils.rs", "rank": 32, "score": 267301.76109783 }, { "content": "pub fn hash_opt_f32<H: Hasher>(s: &Option<f32>, state: &mut H) {\n\n Hash::hash(&s.is_some(), state);\n\n if let Some(s) = s {\n\n Hash::hash(&s.to_bits(), state)\n\n }\n\n}\n\n\n\nimpl Hash for Box<dyn TypedOp> {\n\n fn hash<H: std::hash::Hasher>(&self, state: &mut H) {\n\n std::hash::Hash::hash(&self.type_id(), state);\n\n self.dyn_hash(state)\n\n }\n\n}\n", "file_path": "core/src/hash.rs", "rank": 33, "score": 265739.8111989773 }, { "content": "fn mat_mat(be: &mut Bencher, params: &(DatumType, usize, usize, usize, bool)) {\n\n let (dt, m, k, n, _) = *params;\n\n let mm = tract_linalg::ops().mmm(dt, dt, dt, Some(m), Some(k), Some(n)).unwrap();\n\n mat_mat_with_mm(be, &*mm, params)\n\n}\n\n\n", "file_path": "linalg/benches/utils.rs", "rank": 34, "score": 255046.34734755813 }, { "content": "pub fn to_axis_ops(input_orig: &[TDim], output_spec: &[TDim]) -> TractResult<TVec<AxisOp>> {\n\n let final_output = compute_shape(input_orig, output_spec)?;\n\n let mut stack: TVec<AxisOp> = tvec!();\n\n 'top: loop {\n\n let current_input =\n\n stack.iter().try_fold(TVec::from(input_orig), |shape, op| -> TractResult<_> {\n\n let mut shape = shape.into();\n\n op.change_shape_array(&mut shape, false)?;\n\n Ok(shape)\n\n })?;\n\n if &current_input == &final_output {\n\n return Ok(stack);\n\n }\n\n if let Some(common) =\n\n current_input.iter().zip(final_output.iter()).position(|(a, b)| a != b)\n\n {\n\n if current_input[common].is_one() {\n\n stack.push(AxisOp::Rm(common));\n\n } else if final_output[common].is_one() {\n\n stack.push(AxisOp::Add(common));\n", "file_path": "hir/src/ops/array/reshape.rs", "rank": 35, "score": 254880.27178847784 }, { "content": "#[derive(Clone, Debug)]\n\nstruct LazyIm2col<T: Datum + Copy> {\n\n ptr: *const T,\n\n n: usize,\n\n n_byte_offsets: *const isize,\n\n k_byte_offsets: *const isize,\n\n}\n\n\n\nunsafe impl<T: Datum + Copy> Send for LazyIm2col<T> {}\n\nunsafe impl<T: Datum + Copy> Sync for LazyIm2col<T> {}\n\n\n\nimpl<T: Datum + Copy> LazyIm2col<T> {\n\n fn input_8n(&self, writer: &mut impl PackingWriter<T>, k_range: Range<isize>, n: isize) {\n\n unsafe {\n\n let o1 = *self.n_byte_offsets.offset(n);\n\n let o2 = *self.n_byte_offsets.offset(n + 1);\n\n let o3 = *self.n_byte_offsets.offset(n + 2);\n\n let o4 = *self.n_byte_offsets.offset(n + 3);\n\n let o5 = *self.n_byte_offsets.offset(n + 4);\n\n let o6 = *self.n_byte_offsets.offset(n + 5);\n\n let o7 = *self.n_byte_offsets.offset(n + 6);\n", "file_path": "core/src/ops/cnn/conv/lazy_im2col.rs", "rank": 36, "score": 254262.70126083412 }, { "content": "fn should_use_lazy(_input_shape: &DataShape, pool_spec: &PoolSpec, group: usize) -> bool {\n\n group == 1 && pool_spec.kernel_shape.iter().product::<usize>() > 5\n\n}\n\n\n\n#[allow(non_snake_case)]\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::ops::cnn::PaddingSpec;\n\n use DataFormat::*;\n\n\n\n #[test]\n\n fn onnx_basic_convinteger() {\n\n let op = ConvUnary {\n\n pool_spec: PoolSpec {\n\n data_format: NCHW,\n\n kernel_shape: tvec!(2, 2),\n\n padding: PaddingSpec::Valid,\n\n dilations: None,\n\n strides: None,\n", "file_path": "core/src/ops/cnn/conv/unary.rs", "rank": 37, "score": 250292.04560796262 }, { "content": "fn hash_lookup_table<H: std::hash::Hasher>(lut: &Box<dyn Lut>, h: &mut H) {\n\n Hash::hash_slice(lut.table(), h)\n\n}\n\n\n\n#[derive(Debug, Clone, Hash)]\n\npub struct Scale;\n\nimpl_dyn_hash!(Scale);\n\n\n\nimpl crate::ops::binary::BinMiniOp for Scale {\n\n fn name(&self) -> &'static str {\n\n \"Scale\"\n\n }\n\n\n\n fn result_datum_type(&self, a: DatumType, b: DatumType) -> TractResult<DatumType> {\n\n if a != f32::datum_type() {\n\n bail!(\"Scale left operand must be f32, got {:?}\", a);\n\n }\n\n Ok(b)\n\n }\n\n\n", "file_path": "core/src/ops/quant.rs", "rank": 38, "score": 249796.07272405736 }, { "content": "pub fn cast(to: DatumType) -> Cast {\n\n Cast { to }\n\n}\n\n\n\n#[derive(Debug, Clone, new, Hash)]\n\npub struct Cast {\n\n pub to: DatumType,\n\n}\n\n\n\nimpl_dyn_hash!(Cast);\n\n\n\nimpl Op for Cast {\n\n fn name(&self) -> Cow<str> {\n\n \"Cast\".into()\n\n }\n\n\n\n op_core!();\n\n op_as_typed_op!();\n\n}\n\n\n", "file_path": "core/src/ops/cast.rs", "rank": 39, "score": 248959.6879927433 }, { "content": "pub fn rules<'r, 'p: 'r, 's: 'r, DT: Fn(DatumType, DatumType) -> TractResult<DatumType> + 'p>(\n\n s: &mut Solver<'r>,\n\n inputs: &'p [TensorProxy],\n\n outputs: &'p [TensorProxy],\n\n dt: DT,\n\n) -> InferenceResult {\n\n check_input_arity(&inputs, 2)?;\n\n check_output_arity(&outputs, 1)?;\n\n\n\n s.with(&inputs[0].shape, move |s, a_shape| {\n\n s.with(&inputs[1].shape, move |s, b_shape| {\n\n if let Ok(Some(c_shape)) =\n\n crate::infer::helpers::infer_shape_broadcasting(&[&a_shape, &b_shape])\n\n {\n\n s.equals(&outputs[0].shape, c_shape)?;\n\n }\n\n Ok(())\n\n })\n\n })?;\n\n s.given_2(&inputs[0].datum_type, &inputs[1].datum_type, move |s, typa, typb| {\n\n s.equals(&outputs[0].datum_type, dt(typa, typb)?)\n\n })?;\n\n Ok(())\n\n}\n\n\n", "file_path": "hir/src/ops/binary.rs", "rank": 40, "score": 247989.92033532093 }, { "content": "pub fn optional_outputs(pb: &pb::NodeProto) -> impl Iterator<Item = Option<usize>> + '_ {\n\n let mut real_input = 0;\n\n (0..).map(move |i| {\n\n if pb.output.get(i).filter(|s| !s.is_empty()).is_some() {\n\n real_input += 1;\n\n Some(real_input - 1)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct ParsingContext<'a> {\n\n pub onnx_operator_set_version: i64,\n\n pub framework: &'a Onnx,\n\n pub model: &'a pb::ModelProto,\n\n pub parent_graphs: Vec<&'a pb::GraphProto>,\n\n}\n\n\n", "file_path": "onnx/src/model.rs", "rank": 41, "score": 246996.02241008228 }, { "content": "pub fn optional_inputs(pb: &pb::NodeProto) -> impl Iterator<Item = Option<usize>> + '_ {\n\n let mut real_input = 0;\n\n (0..).map(move |i| {\n\n if pb.input.get(i).filter(|s| !s.is_empty()).is_some() {\n\n real_input += 1;\n\n Some(real_input - 1)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "onnx/src/model.rs", "rank": 42, "score": 246976.79842258123 }, { "content": "pub fn logical(b: bool) -> RValue {\n\n RValue::Literal(Literal::Logical(b))\n\n}\n\n\n", "file_path": "nnef/src/ser.rs", "rank": 43, "score": 246241.59066837427 }, { "content": "pub fn inference_wrap<O, R>(op: O, outputs: usize, rules: R) -> Box<dyn InferenceOp>\n\nwhere\n\n O: TypedOp,\n\n R: for<'r, 'p, 's> Fn(\n\n &'s dyn Op,\n\n &mut Solver<'r>,\n\n &'p [TensorProxy],\n\n &'p [TensorProxy],\n\n ) -> InferenceResult\n\n + Send\n\n + Sync\n\n + 'static,\n\n{\n\n expand(InferenceWrapper { typed_op: Box::new(op), rules: Arc::new(rules), outputs })\n\n}\n\n\n\n#[derive(Clone, new, Educe)]\n\n#[educe(Hash)]\n\npub struct InferenceWrapper {\n\n typed_op: Box<dyn TypedOp>,\n", "file_path": "hir/src/ops/expandable.rs", "rank": 44, "score": 245191.81874514994 }, { "content": "/// Sets the value at the given path (starting from a context).\n\npub fn set_path(context: &mut Context, path: &[isize], value: Wrapped) -> TractResult<()> {\n\n match path[0] {\n\n 0 => set_tensorfacts_path(&mut context.inputs, &path[1..], value),\n\n 1 => set_tensorfacts_path(&mut context.outputs, &path[1..], value),\n\n _ => bail!(\n\n \"The first component of path {:?} should be 0 (for the `inputs` \\\n\n set of facts) or 1 (for the `outputs` set of facts).\",\n\n path\n\n ),\n\n }\n\n}\n\n\n", "file_path": "hir/src/infer/rules/path.rs", "rank": 45, "score": 242969.51980594327 }, { "content": "pub fn output_shape<D: DimLike>(\n\n pool_spec: &PoolSpec,\n\n x_shape: &[D],\n\n adjustments: &[usize],\n\n) -> TractResult<TVec<D>> {\n\n let x_shape = pool_spec.data_format.shape(x_shape)?;\n\n let spatial_input_shape = x_shape.hw_dims();\n\n let spatial_output_details = pool_spec.padding.compute_for_deconv(\n\n &spatial_input_shape,\n\n &pool_spec.kernel_shape,\n\n &pool_spec.dilations(),\n\n &pool_spec.strides(),\n\n &adjustments,\n\n )?;\n\n let deconv_shape: TVec<D> =\n\n spatial_output_details.iter().map(|comp| comp.deconvoluted.clone()).collect();\n\n let co = pool_spec.output_channel_override.unwrap();\n\n let output_shape = pool_spec.data_format.from_n_c_hw(\n\n x_shape.n().cloned().unwrap_or(1.into()),\n\n co.into(),\n\n deconv_shape,\n\n )?;\n\n Ok(output_shape.shape.into())\n\n}\n\n\n", "file_path": "core/src/ops/cnn/deconv/mod.rs", "rank": 46, "score": 240825.43416335568 }, { "content": "pub fn multicast(builder: &mut ModelBuilder, inputs: &[OutletId]) -> TractResult<TVec<OutletId>> {\n\n let ranks = inputs\n\n .iter()\n\n .map(|&i| Ok(builder.model.outlet_fact(i)?.rank()))\n\n .collect::<TractResult<Vec<usize>>>()?;\n\n let max_rank = ranks.iter().copied().max().unwrap();\n\n (inputs.iter())\n\n .zip(ranks.iter())\n\n .map(|(&i, &r)| {\n\n (r..max_rank).try_fold(i, |w, n| Ok(builder.wire(AxisOp::Add(n), &[w])?[0]))\n\n })\n\n .collect()\n\n}\n", "file_path": "nnef/src/registry.rs", "rank": 47, "score": 240031.22262699105 }, { "content": "/// A trait for values produced by expressions.\n\npub trait Output: fmt::Debug + Clone + PartialEq {\n\n /// Wraps self in the Wrapped type.\n\n fn wrap(self) -> Wrapped {\n\n Self::into_wrapped(self)\n\n }\n\n\n\n /// Wraps the fact in the Wrapped type.\n\n fn into_wrapped(source: Self) -> Wrapped;\n\n\n\n /// Retrieves the fact from the Wrapped type.\n\n /// Panics if wrapped doesn't have the right constructor.\n\n fn from_wrapped(wrapped: Wrapped) -> TractResult<Self>;\n\n}\n\n\n\nmacro_rules! impl_output {\n\n ($type:ty, $constr:ident, $name:expr) => {\n\n impl Output for $type {\n\n fn into_wrapped(source: Self) -> Wrapped {\n\n Wrapped::$constr(source)\n\n }\n", "file_path": "hir/src/infer/rules/expr.rs", "rank": 48, "score": 239795.9555677958 }, { "content": "fn hash_mmm<H: std::hash::Hasher>(mmm: &Box<dyn MatMatMul>, state: &mut H) {\n\n mmm.type_id().hash(state)\n\n}\n\n\n\nimpl DynHash for LirMatMulUnary {\n\n fn dyn_hash(&self, hasher: &mut dyn std::hash::Hasher) {\n\n dyn_hash(&self, hasher)\n\n }\n\n}\n\n\n\nimpl Op for LirMatMulUnary {\n\n fn name(&self) -> Cow<str> {\n\n \"LirMatMulUnary\".into()\n\n }\n\n\n\n fn info(&self) -> TractResult<Vec<String>> {\n\n let mut infos = vec![format!(\n\n \"c_shape:{:?}, c_m_axis:{} c_n_axis:{} b_storage:{:?}\",\n\n self.c_fact, self.c_m_axis, self.c_n_axis, self.geometry,\n\n )];\n", "file_path": "core/src/ops/matmul/lir_unary.rs", "rank": 49, "score": 239160.38063868758 }, { "content": "pub fn register(registry: &mut Registry) {\n\n category_mapper::register(registry);\n\n tree_ensemble_classifier::register(registry);\n\n}\n", "file_path": "onnx-opl/src/ml/mod.rs", "rank": 50, "score": 239117.56725864456 }, { "content": "pub fn tensor0<A: Datum>(x: A) -> Tensor {\n\n Tensor::from(arr0(x))\n\n}\n\n\n", "file_path": "data/src/tensor/litteral.rs", "rank": 51, "score": 237541.91529374482 }, { "content": "pub fn register(registry: &mut Registry) {\n\n registry.register_primitive(\n\n \"tract_onnx_ml_direct_lookup\",\n\n &parameters_direct_lookup(),\n\n load_direct_lookup,\n\n );\n\n registry.register_primitive(\n\n \"tract_onnx_ml_reverse_lookup\",\n\n &parameters_reverse_lookup(),\n\n load_reverse_lookup,\n\n );\n\n registry.register_dumper(TypeId::of::<DirectLookup>(), dump_direct_lookup);\n\n registry.register_dumper(TypeId::of::<ReverseLookup>(), dump_reverse_lookup);\n\n}\n\n\n\n#[derive(Clone, Debug, Hash)]\n\npub struct DirectLookup {\n\n values: Arc<Tensor>,\n\n fallback_value: Arc<Tensor>,\n\n}\n", "file_path": "onnx-opl/src/ml/category_mapper.rs", "rank": 52, "score": 236240.94635837845 }, { "content": "pub fn run_bench<T, F: FnMut() -> T>(mut f: F) -> f64 {\n\n let start = Instant::now();\n\n black_box(f());\n\n let once = start.elapsed();\n\n let evaled = if once < Duration::from_millis(1) {\n\n let start = Instant::now();\n\n for _ in 0..1000 {\n\n black_box(f());\n\n }\n\n start.elapsed().as_secs_f64() / 1000.\n\n } else {\n\n once.as_secs_f64()\n\n };\n\n let warmup = (0.3 / evaled) as usize;\n\n let iters = (0.3 / evaled) as usize;\n\n let chunks = 1000;\n\n let chunk = (iters / chunks).max(50);\n\n let chunks = (iters / chunk).max(50);\n\n let mut measures = vec![0.0; chunks];\n\n for _ in 0..warmup {\n", "file_path": "linalg/benches/nano.rs", "rank": 53, "score": 235498.40839279283 }, { "content": "pub fn lident(s: impl Into<String>) -> LValue {\n\n LValue::Identifier(s.into())\n\n}\n\n\n", "file_path": "nnef/src/ser.rs", "rank": 54, "score": 234487.35784398398 }, { "content": "pub fn ident(s: impl Into<String>) -> RValue {\n\n RValue::Identifier(s.into())\n\n}\n\n\n", "file_path": "nnef/src/ser.rs", "rank": 55, "score": 234487.35784398398 }, { "content": "pub fn string(s: impl Into<String>) -> RValue {\n\n RValue::Literal(Literal::String(s.into()))\n\n}\n\n\n", "file_path": "nnef/src/ser.rs", "rank": 56, "score": 234487.35784398398 }, { "content": "pub fn tensor1<A: Datum>(xs: &[A]) -> Tensor {\n\n Tensor::from(arr1(xs))\n\n}\n\n\n", "file_path": "data/src/tensor/litteral.rs", "rank": 57, "score": 234404.12127563945 }, { "content": "pub fn register(registry: &mut Registry) {\n\n registry.register_primitive(\"tract_onnx_ml_tree_ensemble_classifier\", &parameters(), load);\n\n registry.register_dumper(TypeId::of::<TreeEnsembleClassifier>(), dump);\n\n}\n\n\n", "file_path": "onnx-opl/src/ml/tree_ensemble_classifier.rs", "rank": 58, "score": 233481.53960792575 }, { "content": "fn use_masm() -> bool {\n\n env::var(\"CARGO_CFG_TARGET_ENV\") == Ok(\"msvc\".to_string()) && var(\"HOST\").contains(\"-windows-\")\n\n}\n\n\n", "file_path": "linalg/build.rs", "rank": 59, "score": 232940.41202975577 }, { "content": "pub fn packed_packed(c: &mut Criterion, name: &str, m: usize, k: usize, n: usize) {\n\n let mut group = c.benchmark_group(format!(\"{}/packed_packed\", name));\n\n group.throughput(Throughput::Elements((m * k * n) as u64));\n\n let id = format!(\"{}x{}x{}\", m, k, n);\n\n group.bench_with_input(BenchmarkId::new(\"f32/cold\", &id), &(F32, m, k, n, true), mat_mat);\n\n group.bench_with_input(BenchmarkId::new(\"f32/hot\", &id), &(F32, m, k, n, false), mat_mat);\n\n group.bench_with_input(BenchmarkId::new(\"i8/cold\", &id), &(I8, m, k, n, true), mat_mat);\n\n group.bench_with_input(BenchmarkId::new(\"i8/hot\", &id), &(I8, m, k, n, false), mat_mat);\n\n}\n\n\n", "file_path": "linalg/benches/utils.rs", "rank": 60, "score": 230968.95961546467 }, { "content": "pub fn packed_vec(c: &mut Criterion, name: &str, m: usize, k: usize, n: usize) {\n\n assert_eq!(n, 1);\n\n let mut group = c.benchmark_group(format!(\"{}/packed_vec\", name));\n\n group.throughput(Throughput::Elements((m * k * n) as u64));\n\n let id = format!(\"{}x{}x{}\", m, k, n);\n\n group.bench_with_input(BenchmarkId::new(\"f32/cold\", &id), &(F32, m, k, n, true), mat_vec);\n\n group.bench_with_input(BenchmarkId::new(\"f32/hot\", &id), &(F32, m, k, n, false), mat_vec);\n\n group.bench_with_input(BenchmarkId::new(\"i8/cold\", &id), &(I8, m, k, n, true), mat_vec);\n\n group.bench_with_input(BenchmarkId::new(\"i8/hot\", &id), &(I8, m, k, n, false), mat_vec);\n\n}\n\n\n", "file_path": "linalg/benches/utils.rs", "rank": 61, "score": 230968.95961546467 }, { "content": "pub fn rctensor0<A: Datum>(x: A) -> Arc<Tensor> {\n\n Arc::new(Tensor::from(arr0(x)))\n\n}\n\n\n", "file_path": "data/src/tensor/litteral.rs", "rank": 62, "score": 230617.06940273388 }, { "content": "#[inline(always)]\n\npub fn prefetch(start: *const u8, len: usize) {\n\n unsafe { armv7neon_prefetch(start, start.offset(len as isize)) }\n\n}\n\n\n\nMMMKernel!(i32, armv7neon_mmm_i32_8x4; 8, 4; 32, 4; 0, 0; prefetch, crate::arm32::has_neon());\n\nMMMKernel!(i32, armv7neon_mmm_i32_32x1; 32,1 ; 32, 4; 0, 0; prefetch, crate::arm32::has_neon());\n\nMMMKernel!(f32, armv7neon_mmm_f32_8x4_cortexa7; 8, 4; 4, 4; 0, 0; prefetch, crate::arm32::has_neon());\n\nMMMKernel!(f32, armv7neon_mmm_f32_8x4_cortexa9; 8, 4; 4, 4; 0, 0; prefetch, crate::arm32::has_neon());\n\nMMMKernel!(f32, armv7neon_mmm_f32_8x4_generic; 8, 4; 4, 4; 0, 0; prefetch, crate::arm32::has_neon());\n\nMMMKernel!(f32, armv7neon_mmm_f32_8x6_cortexa7; 8, 6; 4, 4; 0, 0; prefetch, crate::arm32::has_neon());\n\nMMMKernel!(f32, armv7neon_mmm_f32_8x6_cortexa9; 8, 6; 4, 4; 0, 0; prefetch, crate::arm32::has_neon());\n\nMMMKernel!(f32, armv7neon_mmm_f32_8x6_generic; 8, 6; 4, 4; 0, 0; prefetch, crate::arm32::has_neon());\n\nMMMKernel!(f32, armv7neon_mmm_f32_32x1_cortexa7; 32, 1; 4, 4; 0, 0; prefetch, crate::arm32::has_neon());\n\nMMMKernel!(f32, armv7neon_mmm_f32_32x1_cortexa9; 32, 1; 4, 4; 0, 0; prefetch, crate::arm32::has_neon());\n\nMMMKernel!(f32, armv7neon_mmm_f32_32x1_generic; 32, 1; 4, 4; 0, 0; prefetch, crate::arm32::has_neon());\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub struct SigmoidF32x4n;\n\n\n\nimpl ElementWiseKer<f32> for SigmoidF32x4n {\n", "file_path": "linalg/src/arm32/armv7neon.rs", "rank": 63, "score": 228673.33877906005 }, { "content": "pub fn no_prefetch(_ptr: *const u8, _len: usize) {}\n\n\n\nmacro_rules! MMMKernel {\n\n ($ti:ident, $func:ident; $mr: expr, $nr: expr; $alignment_bytes_packed_a: expr, $alignment_bytes_packed_b: expr; $end_padding_packed_a: expr, $end_padding_packed_b: expr ; $prefetch: ident, $cond: expr) => {\n\n paste! {\n\n mod [<sys_ $func>] {\n\n use crate::frame::mmm::*;\n\n extern_kernel!(fn $func(op: *const FusedKerSpec<$ti>) -> isize);\n\n }\n\n\n\n #[allow(non_camel_case_types)]\n\n #[derive(Copy, Clone, Debug, new)]\n\n pub struct $func;\n\n\n\n impl MatMatMulKer<$ti> for $func {\n\n #[inline(always)]\n\n fn name() -> &'static str {\n\n stringify!($func)\n\n }\n\n #[inline(always)]\n", "file_path": "linalg/src/frame/mmm.rs", "rank": 64, "score": 228673.33877906005 }, { "content": "/// Partial information about any value.\n\npub trait Factoid: fmt::Debug + Clone + PartialEq + Default + Hash {\n\n type Concrete: fmt::Debug;\n\n\n\n /// Tries to transform the fact into a concrete value.\n\n fn concretize(&self) -> Option<Self::Concrete>;\n\n\n\n /// Returns whether the value is fully determined.\n\n fn is_concrete(&self) -> bool {\n\n self.concretize().is_some()\n\n }\n\n\n\n /// Tries to unify the fact with another fact of the same type.\n\n fn unify(&self, other: &Self) -> TractResult<Self>;\n\n\n\n /// Tries to unify the fact with another fact of the same type and update\n\n /// self.\n\n ///\n\n /// Returns true if it actually changed something.\n\n fn unify_with(&mut self, other: &Self) -> TractResult<bool> {\n\n let new = self.unify(&other)?;\n", "file_path": "hir/src/infer/factoid.rs", "rank": 65, "score": 228472.09922200543 }, { "content": "pub fn rctensor1<A: Datum>(xs: &[A]) -> Arc<Tensor> {\n\n Arc::new(Tensor::from(arr1(xs)))\n\n}\n\n\n", "file_path": "data/src/tensor/litteral.rs", "rank": 66, "score": 227612.5626783779 }, { "content": "pub fn array(items: impl AsRef<[RValue]>) -> RValue {\n\n RValue::Array(items.as_ref().iter().cloned().collect())\n\n}\n\n\n", "file_path": "nnef/src/ser.rs", "rank": 67, "score": 225837.4247397657 }, { "content": "pub fn axis_op(\n\n ast: &mut IntoAst,\n\n node: &TypedNode,\n\n op: &ops::change_axes::AxisOp,\n\n) -> TractResult<Option<Arc<RValue>>> {\n\n let wire = ast.mapping[&node.inputs[0]].clone();\n\n let invoke = match op {\n\n AxisOp::Rm(axis) => invocation(\"squeeze\", &[wire], &[(\"axes\", ints(&[*axis]))]),\n\n AxisOp::Add(axis) => invocation(\"unsqueeze\", &[wire], &[(\"axes\", ints(&[*axis]))]),\n\n AxisOp::Move(from, to) => {\n\n let rank = node.outputs[0].fact.rank();\n\n let mut perm: TVec<usize> = (0..rank).collect();\n\n if from < to {\n\n perm[*from..(to + 1)].rotate_left(1);\n\n } else {\n\n perm[*to..(from + 1)].rotate_right(1);\n\n }\n\n invocation(\"transpose\", &[wire], &[(\"axes\", ints(&*perm))])\n\n }\n\n AxisOp::Reshape(start, from, to) => invocation(\n", "file_path": "nnef/src/ops/nnef/ser.rs", "rank": 68, "score": 225781.3823541252 }, { "content": "#[inline]\n\npub fn scale_by<T: Datum + AsPrimitive<f32>>(b: T, a: f32) -> T\n\nwhere\n\n f32: AsPrimitive<T>,\n\n{\n\n let b = b.as_();\n\n (round_ties_to_even(b.abs() * a) * b.signum()).as_()\n\n}\n\n\n", "file_path": "data/src/datum.rs", "rank": 69, "score": 225282.52948493336 }, { "content": "pub fn tract_blaslike(crit: &mut BenchmarkGroup<WallTime>, m: usize, k: usize, n: usize) {\n\n use tract_linalg::frame::mmm::FusedSpec;\n\n let a = Tensor::zero_dt(DatumType::F32, &[m, k]).unwrap();\n\n let b = Tensor::zero_dt(DatumType::F32, &[k, n]).unwrap();\n\n let mut c = Tensor::zero_dt(DatumType::F32, &[m, n]).unwrap();\n\n\n\n unsafe {\n\n let mmm = tract_linalg::ops()\n\n .mmm(DatumType::F32, DatumType::F32, DatumType::F32, Some(m), Some(k), Some(n))\n\n .unwrap();\n\n let a_storage = mmm.a_packed(f32::datum_type().size_of(), k);\n\n let b_storage = mmm.b_packed(f32::datum_type().size_of(), k);\n\n let c_storage = mmm.c_view(1, 0);\n\n\n\n let mut pa = Tensor::zero_aligned_dt(\n\n DatumType::F32,\n\n &[mmm.a_pack().len(k, m)],\n\n mmm.a_pack().alignment(),\n\n )\n\n .unwrap();\n", "file_path": "linalg/matmul-bench/benches/matmul.rs", "rank": 70, "score": 224888.20931668027 }, { "content": "pub fn tensor2<A: Datum, T>(xs: &[T]) -> Tensor\n\nwhere\n\n T: FixedInitializer<Elem = A> + Clone,\n\n{\n\n Tensor::from(arr2(xs))\n\n}\n\n\n", "file_path": "data/src/tensor/litteral.rs", "rank": 71, "score": 224733.02720083942 }, { "content": "pub fn _if(\n\n ctx: &ParsingContext,\n\n node: &NodeProto,\n\n) -> TractResult<(Box<dyn InferenceOp>, Vec<String>)> {\n\n let graph_then = node.get_attr(\"then_branch\")?;\n\n let graph_else = node.get_attr(\"else_branch\")?;\n\n let ParseResult { model: then_body, unresolved_inputs: unresolved_inputs_then, .. } =\n\n ctx.parse_graph(graph_then)?;\n\n let ParseResult { model: else_body, unresolved_inputs: unresolved_inputs_else, .. } =\n\n ctx.parse_graph(graph_else)?;\n\n let unresolved_inputs: Vec<String> = unresolved_inputs_then\n\n .iter()\n\n .chain(unresolved_inputs_else.iter())\n\n .sorted()\n\n .unique()\n\n .cloned()\n\n .collect();\n\n let then_input_mapping = unresolved_inputs_then\n\n .iter()\n\n .map(|i| unresolved_inputs.iter().position(|s| s == i).unwrap() + 1)\n", "file_path": "onnx/src/ops/logic.rs", "rank": 72, "score": 224596.9076693912 }, { "content": "#[inline]\n\npub fn commute(op: &dyn BinMiniOp, t: &Arc<Tensor>) -> Option<UnaryOp> {\n\n Some(UnaryOp::new(dyn_clone::clone_box(op), t.clone()))\n\n}\n", "file_path": "core/src/ops/binary.rs", "rank": 73, "score": 223814.81764694437 }, { "content": "pub fn one_hot_dump(ast: &mut IntoAst, node: &TypedNode) -> TractResult<Option<Arc<RValue>>> {\n\n let one_hot = node.op_as::<OneHot>().unwrap();\n\n let input = ast.mapping[&node.inputs[0]].clone();\n\n Ok(Some(invocation(\n\n \"tract_core_one_hot\",\n\n &[input],\n\n &[\n\n (\"axis\", numeric(one_hot.axis)),\n\n (\"dim\", numeric(one_hot.dim)),\n\n (\"value_off\", numeric(one_hot.off.cast_to_scalar::<f32>()?)),\n\n (\"value_on\", numeric(one_hot.on.cast_to_scalar::<f32>()?)),\n\n ],\n\n )))\n\n}\n\n\n", "file_path": "nnef/src/ops/core/one_hot.rs", "rank": 74, "score": 223071.01400000893 }, { "content": "#[allow(unused_variables, unused_mut)]\n\npub fn cblas(m: usize, k: usize, n: usize, a: &[f32], b: &[f32], c: &mut [f32]) {\n\n #[cfg(feature = \"blas\")]\n\n unsafe {\n\n cblas::sgemm(\n\n cblas::Layout::RowMajor,\n\n cblas::Transpose::None,\n\n cblas::Transpose::None,\n\n m as _,\n\n n as _,\n\n k as _,\n\n 1.0,\n\n &a,\n\n k as _,\n\n &b,\n\n n as _,\n\n 0.0,\n\n c,\n\n n as _,\n\n )\n\n }\n\n}\n\n\n", "file_path": "linalg/matmul-bench/src/lib.rs", "rank": 75, "score": 222028.84007963171 }, { "content": "pub fn matrixmultiply(m: usize, k: usize, n: usize, a: &[f32], b: &[f32], c: &mut [f32]) {\n\n unsafe {\n\n matrixmultiply::sgemm(\n\n m,\n\n k,\n\n n,\n\n 1.0,\n\n a.as_ptr(),\n\n k as _,\n\n 1,\n\n b.as_ptr(),\n\n n as _,\n\n 1,\n\n 0.0,\n\n c.as_mut_ptr(),\n\n n as _,\n\n 1,\n\n )\n\n }\n\n}\n\n\n", "file_path": "linalg/matmul-bench/src/lib.rs", "rank": 76, "score": 222023.32486022403 }, { "content": "pub fn naive(m: usize, k: usize, n: usize, a: &[f32], b: &[f32], c: &mut [f32]) {\n\n for row in 0..m {\n\n for col in 0..n {\n\n let mut sum = 0.0;\n\n for i in 0..k {\n\n sum += a[row * k + i] * b[i * n + col];\n\n }\n\n c[row * n + col] = sum;\n\n }\n\n }\n\n}\n\n\n", "file_path": "linalg/matmul-bench/src/lib.rs", "rank": 77, "score": 222023.32486022403 }, { "content": "pub fn tract(m: usize, k: usize, n: usize, a: &[f32], b: &[f32], c: &mut [f32]) {\n\n use tract_data::internal::*;\n\n use tract_linalg::frame::mmm::FusedSpec;\n\n unsafe {\n\n let mmm = tract_linalg::ops()\n\n .mmm(DatumType::F32, DatumType::F32, DatumType::F32, Some(m), Some(k), Some(n))\n\n .unwrap();\n\n let a_storage = mmm.a_packed(f32::datum_type().size_of(), k);\n\n let b_storage = mmm.b_packed(f32::datum_type().size_of(), k);\n\n let c_storage = mmm.c_view(0, 1);\n\n\n\n let a = Tensor::from_shape(&[m, k], a).unwrap();\n\n let b = Tensor::from_shape(&[k, n], b).unwrap();\n\n let mut tc = Tensor::uninitialized_dt(f32::datum_type(), &[m, n]).unwrap();\n\n\n\n let mut pa = Tensor::uninitialized_aligned_dt(\n\n DatumType::F32,\n\n &[mmm.a_pack().len(k, m)],\n\n mmm.a_pack().alignment(),\n\n )\n", "file_path": "linalg/matmul-bench/src/lib.rs", "rank": 78, "score": 222023.32486022403 }, { "content": "pub fn ops() -> &'static Ops {\n\n &*OPS\n\n}\n\n\n\nuse num_traits::*;\n\nuse std::fmt::Debug;\n\nuse std::ops::*;\n\n\n", "file_path": "linalg/src/lib.rs", "rank": 79, "score": 221510.87147316517 }, { "content": "pub fn param(s: impl Into<String>, spec: TypeSpec) -> Parameter {\n\n Parameter { id: s.into(), spec, lit: None }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Result_ {\n\n pub id: String,\n\n pub spec: TypeSpec,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Assignment {\n\n pub left: LValue,\n\n pub right: RValue,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum LValue {\n\n Identifier(String),\n\n Array(Vec<LValue>),\n", "file_path": "nnef/src/ast.rs", "rank": 80, "score": 221321.4428462188 }, { "content": "pub fn resize(\n\n _ctx: &ParsingContext,\n\n node: &NodeProto,\n\n) -> TractResult<(Box<dyn InferenceOp>, Vec<String>)> {\n\n let coord_transformer =\n\n match node.get_attr_opt(\"coordinate_transformation_mode\")?.unwrap_or(\"half_pixel\") {\n\n \"align_corners\" => CoordTransformer::AlignCorners,\n\n \"half_pixel\" => CoordTransformer::HalfPixel,\n\n \"asymmetric\" => CoordTransformer::Asymmetric,\n\n s => todo!(\"coordinate_transformation_mode: {}\", s),\n\n };\n\n let interpolator = match node.get_attr(\"mode\")? {\n\n \"linear\" => Interpolator::Linear,\n\n \"nearest\" => Interpolator::Nearest,\n\n s => todo!(\"mode: {}\", s),\n\n };\n\n let nearest = match node.get_attr_opt(\"nearest_mode\")?.unwrap_or(\"round_prefer_floor\") {\n\n \"floor\" => Nearest::Floor,\n\n \"round_prefer_floor\" => Nearest::RoundPreferFloor,\n\n s => todo!(\"nearest_mode: {}\", s),\n", "file_path": "onnx/src/ops/resize.rs", "rank": 81, "score": 221106.94859232916 }, { "content": "pub fn cast(\n\n _ctx: &ParsingContext,\n\n node: &NodeProto,\n\n) -> TractResult<(Box<dyn InferenceOp>, Vec<String>)> {\n\n let mut to = node.get_attr::<DatumType>(\"to\")?;\n\n if to == i64::datum_type() {\n\n to = TDim::datum_type();\n\n }\n\n Ok((Box::new(ElementWiseOp(Box::new(Cast::new(to)))), vec![]))\n\n}\n\n\n\n#[derive(Debug, Clone, new, Hash)]\n\npub struct Cast {\n\n to: DatumType,\n\n}\n\n\n\nimpl_dyn_hash!(Cast);\n\n\n\nimpl ElementWiseMiniOp for Cast {\n\n fn name(&self) -> String {\n", "file_path": "onnx/src/ops/cast.rs", "rank": 82, "score": 221106.94859232916 }, { "content": "pub fn pull_downsample_over_axis_op(\n\n model: &TypedModel,\n\n axis_node: &TypedNode,\n\n axis_op: &AxisOp,\n\n down_node: &TypedNode,\n\n down_op: &Downsample,\n\n) -> TractResult<Option<TypedModelPatch>> {\n\n let mut patch = TypedModelPatch::default();\n\n let tap = patch.tap_model(model, axis_node.inputs[0])?;\n\n let mut new_down = down_op.clone();\n\n new_down.axis =\n\n axis_op.recip().transform_axis(down_op.axis).ok_or_else(|| format_err!(\"Invalid axis\"))?;\n\n let wire = patch.wire_node(&*down_node.name, new_down, [tap].as_ref())?;\n\n let wire = patch.wire_node(&*axis_node.name, axis_op.clone(), &*wire)?[0];\n\n patch.shunt_outside(model, OutletId::new(down_node.id, 0), wire)?;\n\n return Ok(Some(patch));\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "core/src/ops/downsample/array.rs", "rank": 83, "score": 220207.0368255885 }, { "content": "pub fn tile_2x2(m: usize, k: usize, n: usize, a: &[f32], b: &[f32], c: &mut [f32]) {\n\n for row in 0..m / 2 {\n\n for col in 0..n / 2 {\n\n let mut sum00 = 0.0;\n\n let mut sum01 = 0.0;\n\n let mut sum10 = 0.0;\n\n let mut sum11 = 0.0;\n\n for i in 0..k {\n\n let a0 = a[2 * row * k + i];\n\n let a1 = a[(2 * row + 1) * k + i];\n\n let b0 = b[i * n + 2 * col];\n\n let b1 = b[i * n + 2 * col + 1];\n\n sum00 += a0 * b0;\n\n sum01 += a0 * b1;\n\n sum10 += a1 * b0;\n\n sum11 += a1 * b1;\n\n }\n\n c[2 * row * n + 2 * col] = sum00;\n\n c[2 * row * n + 2 * col + 1] = sum01;\n\n c[(2 * row + 1) * n + 2 * col] = sum10;\n\n c[(2 * row + 1) * n + 2 * col + 1] = sum11;\n\n }\n\n }\n\n}\n\n\n", "file_path": "linalg/matmul-bench/src/lib.rs", "rank": 84, "score": 219575.57989211095 }, { "content": "pub fn tile_4x4(m: usize, k: usize, n: usize, a: &[f32], b: &[f32], c: &mut [f32]) {\n\n for row in 0..m / 4 {\n\n for col in 0..n / 4 {\n\n let mut sum00 = 0.0;\n\n let mut sum01 = 0.0;\n\n let mut sum02 = 0.0;\n\n let mut sum03 = 0.0;\n\n let mut sum10 = 0.0;\n\n let mut sum11 = 0.0;\n\n let mut sum12 = 0.0;\n\n let mut sum13 = 0.0;\n\n let mut sum20 = 0.0;\n\n let mut sum21 = 0.0;\n\n let mut sum22 = 0.0;\n\n let mut sum23 = 0.0;\n\n let mut sum30 = 0.0;\n\n let mut sum31 = 0.0;\n\n let mut sum32 = 0.0;\n\n let mut sum33 = 0.0;\n\n for i in 0..k {\n", "file_path": "linalg/matmul-bench/src/lib.rs", "rank": 85, "score": 219575.57989211095 }, { "content": "pub fn ctile_8x8(m: usize, k: usize, n: usize, a: &[f32], b: &[f32], c: &mut [f32]) {\n\n unsafe { c_tile_8x8(m, k, n, a.as_ptr(), b.as_ptr(), c.as_mut_ptr()) }\n\n}\n\n\n", "file_path": "linalg/matmul-bench/src/lib.rs", "rank": 86, "score": 219575.57989211095 }, { "content": "pub fn ctile_2x2(m: usize, k: usize, n: usize, a: &[f32], b: &[f32], c: &mut [f32]) {\n\n unsafe { c_tile_2x2(m, k, n, a.as_ptr(), b.as_ptr(), c.as_mut_ptr()) }\n\n}\n\n\n", "file_path": "linalg/matmul-bench/src/lib.rs", "rank": 87, "score": 219575.57989211095 }, { "content": "pub fn tile_8x8(m: usize, k: usize, n: usize, a: &[f32], b: &[f32], c: &mut [f32]) {\n\n for row in 0..m / 8 {\n\n for col in 0..n / 8 {\n\n let mut sum00 = 0.0;\n\n let mut sum01 = 0.0;\n\n let mut sum02 = 0.0;\n\n let mut sum03 = 0.0;\n\n let mut sum04 = 0.0;\n\n let mut sum05 = 0.0;\n\n let mut sum06 = 0.0;\n\n let mut sum07 = 0.0;\n\n let mut sum10 = 0.0;\n\n let mut sum11 = 0.0;\n\n let mut sum12 = 0.0;\n\n let mut sum13 = 0.0;\n\n let mut sum14 = 0.0;\n\n let mut sum15 = 0.0;\n\n let mut sum16 = 0.0;\n\n let mut sum17 = 0.0;\n\n let mut sum20 = 0.0;\n", "file_path": "linalg/matmul-bench/src/lib.rs", "rank": 88, "score": 219575.57989211095 }, { "content": "pub fn ctile_1x1(m: usize, k: usize, n: usize, a: &[f32], b: &[f32], c: &mut [f32]) {\n\n unsafe { c_tile_1x1(m, k, n, a.as_ptr(), b.as_ptr(), c.as_mut_ptr()) }\n\n}\n\n\n", "file_path": "linalg/matmul-bench/src/lib.rs", "rank": 89, "score": 219575.57989211095 }, { "content": "pub fn ctile_4x4(m: usize, k: usize, n: usize, a: &[f32], b: &[f32], c: &mut [f32]) {\n\n unsafe { c_tile_4x4(m, k, n, a.as_ptr(), b.as_ptr(), c.as_mut_ptr()) }\n\n}\n\n\n", "file_path": "linalg/matmul-bench/src/lib.rs", "rank": 90, "score": 219575.57989211095 }, { "content": "pub fn rctensor2<A: Datum, T>(xs: &[T]) -> Arc<Tensor>\n\nwhere\n\n T: FixedInitializer<Elem = A> + Clone,\n\n{\n\n Arc::new(Tensor::from(arr2(xs)))\n\n}\n\n\n", "file_path": "data/src/tensor/litteral.rs", "rank": 91, "score": 218480.30618834664 }, { "content": "pub fn tensor3<A: Datum, T, U>(xs: &[U]) -> Tensor\n\nwhere\n\n U: FixedInitializer<Elem = T> + Clone,\n\n T: FixedInitializer<Elem = A> + Clone,\n\n{\n\n Tensor::from(arr3(xs))\n\n}\n\n\n", "file_path": "data/src/tensor/litteral.rs", "rank": 92, "score": 218480.30618834664 }, { "content": "// fragment tile<?>( input: tensor<?>, repeats: integer[] ) -> ( output: tensor<?> );\n\npub fn tile(\n\n builder: &mut ModelBuilder,\n\n invocation: &ResolvedInvocation,\n\n) -> TractResult<TVec<OutletId>> {\n\n let multipliers: TVec<usize> = invocation.named_arg_as(builder, \"repeats\")?;\n\n let wire = tvec!(invocation.named_arg_as(builder, \"input\")?);\n\n Ok(builder.wire(ops::array::Tile { multipliers }, &wire)?)\n\n}\n\n\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 93, "score": 217786.26495426433 }, { "content": "// fragment squeeze<?>( input: tensor<?>, axes: integer[] ) -> ( output: tensor<?> );\n\npub fn squeeze(\n\n builder: &mut ModelBuilder,\n\n invocation: &ResolvedInvocation,\n\n) -> TractResult<TVec<OutletId>> {\n\n let axes: TVec<usize> = invocation.named_arg_as(builder, \"axes\")?;\n\n let wire = tvec!(invocation.named_arg_as(builder, \"input\")?);\n\n axes.iter().sorted().rev().try_fold(wire, |wire, &axis| {\n\n Ok(builder.wire(ops::change_axes::AxisOp::Rm(axis as usize), &wire)?)\n\n })\n\n}\n\n\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 94, "score": 217786.26495426433 }, { "content": "// fragment unsqueeze<?>( input: tensor<?>, axes: integer[] ) -> ( output: tensor<?> );\n\npub fn unsqueeze(\n\n builder: &mut ModelBuilder,\n\n invocation: &ResolvedInvocation,\n\n) -> TractResult<TVec<OutletId>> {\n\n let axes: TVec<usize> = invocation.named_arg_as(builder, \"axes\")?;\n\n let wire = tvec!(invocation.named_arg_as(builder, \"input\")?);\n\n axes.iter().sorted().try_fold(wire, |wire, &axis| {\n\n Ok(builder.wire(ops::change_axes::AxisOp::Add(axis as usize), &wire)?)\n\n })\n\n}\n\n\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 95, "score": 217786.26495426433 }, { "content": "// fragment transpose<?>( input: tensor<?>, axes: integer[] ) -> ( output: tensor<?> );\n\npub fn transpose(\n\n builder: &mut ModelBuilder,\n\n invocation: &ResolvedInvocation,\n\n) -> TractResult<TVec<OutletId>> {\n\n let axes: TVec<usize> = invocation.named_arg_as(builder, \"axes\")?;\n\n let wire = tvec!(invocation.named_arg_as(builder, \"input\")?);\n\n ops::change_axes::perm_to_ops(&axes)\n\n .into_iter()\n\n .try_fold(wire, |wire, mov| Ok(builder.wire(mov, &wire)?))\n\n}\n\n\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 96, "score": 217786.26495426433 }, { "content": "// fragment slice<?>( input: tensor<?>, axes: integer[], begin: integer[], end: integer[] ) -> ( output: tensor<?> );\n\npub fn slice(\n\n builder: &mut ModelBuilder,\n\n invocation: &ResolvedInvocation,\n\n) -> TractResult<TVec<OutletId>> {\n\n let wire = tvec!(invocation.named_arg_as(builder, \"input\")?);\n\n let input_fact = builder.model.outlet_fact(wire[0])?.clone();\n\n let axes: TVec<usize> = invocation.named_arg_as(builder, \"axes\")?;\n\n let begins: TVec<i64> = invocation.named_arg_as(builder, \"begin\")?;\n\n let begins = begins.into_iter().enumerate().map(|(ix, b)| -> TDim {\n\n if b < 0 {\n\n input_fact.shape[ix].clone() + b\n\n } else {\n\n b.into()\n\n }\n\n });\n\n let ends: TVec<i64> = invocation.named_arg_as(builder, \"end\")?;\n\n let ends = ends.into_iter().enumerate().map(|(ix, b)| -> TDim {\n\n if b < 0 {\n\n input_fact.shape[ix].clone() + b\n\n } else {\n\n b.into()\n\n }\n\n });\n\n izip!(axes, begins, ends).try_fold(wire, |wire, (axis, start, end)| {\n\n builder.wire(tract_core::ops::array::Slice { axis, start, end }, &wire)\n\n })\n\n}\n\n\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 97, "score": 217785.92714308674 }, { "content": "// fragment reshape<?>( input: tensor<?>, shape: integer[], axis_start: integer = 0, axis_count: integer = -1 )\n\n// -> ( output: tensor<?> );\n\npub fn reshape(\n\n builder: &mut ModelBuilder,\n\n invocation: &ResolvedInvocation,\n\n) -> TractResult<TVec<OutletId>> {\n\n let input = invocation.named_arg_as(builder, \"input\")?;\n\n let input_shape = builder.model.outlet_fact(input)?.shape.to_tvec();\n\n let start: usize = invocation.named_arg_as(builder, \"axis_start\")?;\n\n let count: i64 = invocation.named_arg_as(builder, \"axis_count\")?;\n\n let count = if count == -1 { input_shape.len() - start } else { count as usize };\n\n let shape: TVec<TDim> = invocation.named_arg_as(builder, \"shape\")?;\n\n\n\n let mut replacement = shape.clone();\n\n for i in 0..replacement.len() {\n\n if replacement[i] == 0.to_dim() {\n\n replacement[i] = input_shape[i + start].clone();\n\n }\n\n }\n\n if let Some(pos) = replacement.iter().position(|d| *d == (-1).to_dim()) {\n\n let product: TDim = replacement.iter().filter(|d| **d != (-1).to_dim()).product();\n\n let product_input: TDim = input_shape[start..][..count].iter().product();\n\n replacement[pos] = product_input.maybe_div(&product)?.0;\n\n }\n\n\n\n let op = AxisOp::Reshape(start, input_shape[start..][..count].into(), replacement);\n\n builder.wire(op, &[input])\n\n}\n\n\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 98, "score": 217785.76594173268 }, { "content": "// fragment pad( input: tensor<scalar>, padding: (integer, integer)[], border: string = 'constant', value: scalar = 0.0 ) -> ( output: tensor<scalar> );\n\npub fn pad(\n\n builder: &mut ModelBuilder,\n\n invocation: &ResolvedInvocation,\n\n) -> TractResult<TVec<OutletId>> {\n\n use tract_core::ops::array::{Pad, PadMode};\n\n let wire = tvec!(invocation.named_arg_as(builder, \"input\")?);\n\n let padding: TVec<TVec<usize>> = invocation.named_arg_as(builder, \"padding\")?;\n\n let padding: Vec<(usize, usize)> = padding.iter().map(|a| (a[0], a[1])).collect();\n\n let value: Tensor = tensor0(invocation.named_arg_as::<f32>(builder, \"value\")?);\n\n let border: String = invocation.named_arg_as(builder, \"border\")?;\n\n let mode = match &*border {\n\n \"constant\" => PadMode::Constant(value.into_arc_tensor()),\n\n \"replicated\" => PadMode::Edge,\n\n \"reflect\" => PadMode::Reflect,\n\n _ => bail!(\"unsupported padding mode {}\", border),\n\n };\n\n builder.wire(Pad { pads: padding, mode }, &wire)\n\n}\n\n\n\n/*\n", "file_path": "nnef/src/ops/nnef/deser.rs", "rank": 99, "score": 217785.60956884827 } ]
Rust
cglue-gen/src/util.rs
ko1N/cglue
040074eed961d4783a4c9c8a1c084441c0dcb301
use proc_macro2::TokenStream; use proc_macro_crate::{crate_name, FoundCrate}; use quote::{format_ident, quote}; use syn::parse::{Parse, ParseStream}; use syn::punctuated::Punctuated; use syn::token::Colon2; use syn::token::Comma; use syn::*; pub fn crate_path() -> TokenStream { let (col, ident) = crate_path_ident(); quote!(#col #ident) } pub fn crate_path_ident() -> (Option<Colon2>, Ident) { match crate_path_fixed() { Some(FoundCrate::Itself) => (None, format_ident!("crate")), Some(FoundCrate::Name(name)) => (Some(Default::default()), format_ident!("{}", name)), None => (None, format_ident!("cglue")), } } pub fn crate_path_fixed() -> Option<FoundCrate> { let found_crate = crate_name("cglue").ok()?; let ret = match found_crate { FoundCrate::Itself => { let has_doc_env = std::env::vars().any(|(k, _)| { k == "UNSTABLE_RUSTDOC_TEST_LINE" || k == "UNSTABLE_RUSTDOC_TEST_PATH" }); if has_doc_env { FoundCrate::Name("cglue".to_string()) } else { FoundCrate::Itself } } x => x, }; Some(ret) } pub fn parse_maybe_braced<T: Parse>(input: ParseStream) -> Result<Vec<T>> { let mut ret = vec![]; if let Ok(braces) = syn::group::parse_braces(&input) { let content = braces.content; while !content.is_empty() { let val = content.parse()?; ret.push(val); if !content.is_empty() { content.parse::<Token![,]>()?; } } } else { ret.push(input.parse()?) } Ok(ret) } pub type GenericsOut = Option<Punctuated<GenericArgument, Comma>>; pub fn split_path_ident(in_path: &Path) -> Result<(Path, Ident, GenericsOut)> { let mut path = Path { leading_colon: in_path.leading_colon, segments: Default::default(), }; let mut ident = None; let mut generics = None; for part in in_path.segments.pairs() { match part { punctuated::Pair::Punctuated(p, _) => { path.segments.push_value(p.clone()); path.segments.push_punct(Default::default()); } punctuated::Pair::End(p) => { if let PathArguments::AngleBracketed(arg) = &p.arguments { generics = Some(arg.args.clone()); } ident = Some(p.ident.clone()); } } } let ident = ident.ok_or_else(|| Error::new(proc_macro2::Span::call_site(), "Ident not found!"))?; Ok((path, ident, generics)) } pub fn is_null_pointer_optimizable(ty: &Type, custom_types: &[&'static str]) -> bool { match ty { Type::Reference(_) => true, Type::BareFn(_) => true, Type::Path(path) => { let last = path.path.segments.last(); last.map(|l| { let s = &l.ident.to_string(); ["NonNull", "Box"].contains(&s.as_str()) || custom_types.contains(&s.as_str()) || (s.starts_with("NonZero") && [ "I8", "U8", "I16", "U16", "I32", "U32", "I64", "U64", "I128", "U128", ] .contains(&s.split_at("NonZero".len()).1)) }) == Some(true) } _ => false, } }
use proc_macro2::TokenStream; use proc_macro_crate::{crate_name, FoundCrate}; use quote::{format_ident, quote}; use syn::parse::{Parse, ParseStream}; use syn::punctuated::Punctuated; use syn::token::Colon2; use syn::token::Comma; use syn::*; pub fn crate_path() -> TokenStream { let (col, ident) = crate_path_ident(); quote!(#col #ident) } pub fn crate_path_ident() -> (Option<Colon2>, Ident) { match crate_path_fixed() { Some(FoundCrate::Itself) => (None, format_ident!("crate")), Some(FoundCrate::Name(name)) => (Some(Default::default()), format_ident!("{}", name)), None => (None, format_ident!("cglue")), } } pub fn crate_path_fixed() -> Option<FoundCrate> { let found_crate = crate_name("cglue").ok()?; let ret = match found_crate { FoundCrate::Itself => { let has_doc_env = std::env::vars().any(|(k, _)| { k == "UNSTABLE_RUSTDOC_TEST_LINE" || k == "UNSTABLE_RUSTDOC_TEST_PATH" });
} x => x, }; Some(ret) } pub fn parse_maybe_braced<T: Parse>(input: ParseStream) -> Result<Vec<T>> { let mut ret = vec![]; if let Ok(braces) = syn::group::parse_braces(&input) { let content = braces.content; while !content.is_empty() { let val = content.parse()?; ret.push(val); if !content.is_empty() { content.parse::<Token![,]>()?; } } } else { ret.push(input.parse()?) } Ok(ret) } pub type GenericsOut = Option<Punctuated<GenericArgument, Comma>>; pub fn split_path_ident(in_path: &Path) -> Result<(Path, Ident, GenericsOut)> { let mut path = Path { leading_colon: in_path.leading_colon, segments: Default::default(), }; let mut ident = None; let mut generics = None; for part in in_path.segments.pairs() { match part { punctuated::Pair::Punctuated(p, _) => { path.segments.push_value(p.clone()); path.segments.push_punct(Default::default()); } punctuated::Pair::End(p) => { if let PathArguments::AngleBracketed(arg) = &p.arguments { generics = Some(arg.args.clone()); } ident = Some(p.ident.clone()); } } } let ident = ident.ok_or_else(|| Error::new(proc_macro2::Span::call_site(), "Ident not found!"))?; Ok((path, ident, generics)) } pub fn is_null_pointer_optimizable(ty: &Type, custom_types: &[&'static str]) -> bool { match ty { Type::Reference(_) => true, Type::BareFn(_) => true, Type::Path(path) => { let last = path.path.segments.last(); last.map(|l| { let s = &l.ident.to_string(); ["NonNull", "Box"].contains(&s.as_str()) || custom_types.contains(&s.as_str()) || (s.starts_with("NonZero") && [ "I8", "U8", "I16", "U16", "I32", "U32", "I64", "U64", "I128", "U128", ] .contains(&s.split_at("NonZero".len()).1)) }) == Some(true) } _ => false, } }
if has_doc_env { FoundCrate::Name("cglue".to_string()) } else { FoundCrate::Itself }
if_condition
[ { "content": "pub fn gen_trait(mut tr: ItemTrait, ext_name: Option<&Ident>) -> TokenStream {\n\n // Path to trait group import.\n\n let crate_path = crate::util::crate_path();\n\n let trg_path: TokenStream = quote!(#crate_path::trait_group);\n\n\n\n // Need to preserve the same visibility as the trait itself.\n\n let vis = tr.vis.to_token_stream();\n\n\n\n let trait_name = tr.ident.clone();\n\n let trait_name = &trait_name;\n\n\n\n let trait_impl_name = ext_name.unwrap_or(trait_name);\n\n\n\n let c_void = quote!(::core::ffi::c_void);\n\n\n\n // Additional identifiers\n\n let vtbl_ident = format_ident!(\"{}Vtbl\", trait_name);\n\n let ret_tmp_ident = format_ident!(\"{}RetTmp\", trait_name);\n\n let ret_tmp_ident_phantom = format_ident!(\"{}RetTmpPhantom\", trait_name);\n\n let opaque_vtbl_ident = format_ident!(\"{}OpaqueVtbl\", trait_name);\n", "file_path": "cglue-gen/src/traits.rs", "rank": 0, "score": 189893.82914537337 }, { "content": "/// Returns the absolute export path if ident is in exports, and path is empty.\n\npub fn prelude_remap_with_ident(path: Path, ident: &Ident) -> Path {\n\n if !path.segments.is_empty() {\n\n path\n\n } else if let Some(path) = get_exports().get(&ident) {\n\n path.clone()\n\n } else {\n\n path\n\n }\n\n}\n\n\n", "file_path": "cglue-gen/src/ext/mod.rs", "rank": 1, "score": 177988.15267881227 }, { "content": "#[proc_macro]\n\npub fn cglue_builtin_ext_forward(_: TokenStream) -> TokenStream {\n\n cglue_gen::ext::impl_ext_forward().into()\n\n}\n\n\n\n// Marker macros for wrapping\n\n\n\n/// Mark the trait or function to use `IntResult`.\n\n///\n\n/// This flag has an effect for functions that return `Result<T, E>`, and\n\n/// is valid when `E` implements `IntResult`. Using this attribute results\n\n/// in more efficient code generation.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 4, "score": 164732.8698506354 }, { "content": "#[proc_macro]\n\npub fn cglue_builtin_ext_traits(_: TokenStream) -> TokenStream {\n\n cglue_gen::ext::impl_store().into()\n\n}\n\n\n\n/// Generate forward trait implementation for Fwd.\n\n///\n\n/// This is useful for using references of trait objects as generic parameters.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 5, "score": 164732.8698506354 }, { "content": "pub fn get_exports() -> HashMap<Ident, Path> {\n\n let mut exports = HashMap::new();\n\n\n\n let mut ext_path: Path = parse2(quote!(::ext)).unwrap();\n\n ext_path.segments.push_punct(Default::default());\n\n\n\n core::get_exports(&ext_path, &mut exports);\n\n\n\n exports\n\n}\n\n\n", "file_path": "cglue-gen/src/ext/mod.rs", "rank": 6, "score": 164432.8382575774 }, { "content": "#[proc_macro_attribute]\n\npub fn wrap_with(_: TokenStream, input: TokenStream) -> TokenStream {\n\n input\n\n}\n\n\n\n/// Specify return type conversion with a closure.\n\n///\n\n/// # Arguments\n\n///\n\n/// A closure that accepts original return value and outputs the defined type.\n\n///\n\n/// If the return type is a reference to the associated type, `ret_tmp` value is available for use\n\n/// to write the intermediate value into.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 8, "score": 159513.00291289744 }, { "content": "#[proc_macro_attribute]\n\npub fn wrap_with_group(_: TokenStream, input: TokenStream) -> TokenStream {\n\n input\n\n}\n\n\n\n/// Wrap the associated type with a CGlue trait group reference.\n\n///\n\n/// # SAFETY WARNING\n\n///\n\n///\n", "file_path": "cglue-macro/src/lib.rs", "rank": 9, "score": 158067.18294071069 }, { "content": "#[proc_macro_attribute]\n\npub fn no_int_result(_: TokenStream, input: TokenStream) -> TokenStream {\n\n input\n\n}\n\n\n\n/// Skip reimplementing this function.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 10, "score": 158067.18294071069 }, { "content": "#[proc_macro_attribute]\n\npub fn skip_func(_: TokenStream, input: TokenStream) -> TokenStream {\n\n input\n\n}\n\n\n\n/// Wrap the associated type with a custom type.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 11, "score": 158067.18294071069 }, { "content": "#[proc_macro_attribute]\n\npub fn int_result(_: TokenStream, input: TokenStream) -> TokenStream {\n\n input\n\n}\n\n\n\n/// Exclude a single function from using `IntResult`.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 12, "score": 158067.18294071069 }, { "content": "#[proc_macro_attribute]\n\npub fn wrap_with_obj(_: TokenStream, input: TokenStream) -> TokenStream {\n\n input\n\n}\n\n\n\n/// Wrap the associated type with a CGlue trait object reference.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 13, "score": 158067.18294071069 }, { "content": "#[proc_macro_attribute]\n\npub fn cglue_forward(_: TokenStream, input: TokenStream) -> TokenStream {\n\n let tr = parse_macro_input!(input as ItemTrait);\n\n gen_forward(tr, None).into()\n\n}\n\n\n\n/// Generate forward trait implementation for Fwd.\n\n///\n\n/// This is useful for using references of trait objects as generic parameters.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 14, "score": 158067.18294071069 }, { "content": "#[proc_macro_attribute]\n\npub fn return_wrap(_: TokenStream, input: TokenStream) -> TokenStream {\n\n input\n\n}\n\n\n\n/// Wrap the associated type with a CGlue trait object.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 15, "score": 158067.18294071069 }, { "content": "#[proc_macro_attribute]\n\npub fn wrap_with_group_mut(_: TokenStream, input: TokenStream) -> TokenStream {\n\n input\n\n}\n", "file_path": "cglue-macro/src/lib.rs", "rank": 16, "score": 156688.52817058898 }, { "content": "#[proc_macro_attribute]\n\npub fn wrap_with_group_ref(_: TokenStream, input: TokenStream) -> TokenStream {\n\n input\n\n}\n\n\n\n/// Wrap the associated type with a CGlue trait group mutable reference.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 17, "score": 156688.52817058895 }, { "content": "#[proc_macro_attribute]\n\npub fn wrap_with_obj_mut(_: TokenStream, input: TokenStream) -> TokenStream {\n\n input\n\n}\n\n\n\n/// Wrap the associated type with a CGlue trait group.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 18, "score": 156688.52817058895 }, { "content": "#[proc_macro_attribute]\n\npub fn wrap_with_obj_ref(_: TokenStream, input: TokenStream) -> TokenStream {\n\n input\n\n}\n\n\n\n/// Wrap the associated type with a CGlue trait object mutable reference.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 19, "score": 156688.52817058898 }, { "content": "pub fn get_store() -> HashMap<(Path, Ident), ItemTrait> {\n\n let mut token_list = vec![];\n\n\n\n let mut ext_path: Path = parse2(quote!(::ext)).unwrap();\n\n ext_path.segments.push_punct(Default::default());\n\n\n\n core::get_impl(&ext_path, &mut token_list);\n\n\n\n let mut parsed_traits = HashMap::new();\n\n\n\n for (path, body) in token_list {\n\n let traits = Parser::parse2(parse_traits, body).expect(\"Failed to parse traits\");\n\n\n\n for tr in traits {\n\n parsed_traits.insert((path.clone(), tr.ident.clone()), tr);\n\n }\n\n }\n\n\n\n parsed_traits\n\n}\n\n\n", "file_path": "cglue-gen/src/ext/mod.rs", "rank": 20, "score": 156051.94206256408 }, { "content": "// TODO: Add dynamic setting of Send / Sync\n\npub fn cd_bounds() -> TokenStream {\n\n quote!('static + Clone + Send + Sync)\n\n}\n\n\n", "file_path": "cglue-gen/src/traits.rs", "rank": 21, "score": 145803.64209286874 }, { "content": "pub fn ctx_bound() -> TokenStream {\n\n let crate_path = crate::util::crate_path();\n\n quote!(#crate_path::trait_group::ContextRef<Context = CGlueC> + )\n\n}\n\n\n", "file_path": "cglue-gen/src/traits.rs", "rank": 22, "score": 145803.64209286874 }, { "content": "pub fn cglue_c_opaque_bound() -> TokenStream {\n\n let crate_path = crate::util::crate_path();\n\n quote!(CGlueC::OpaqueTarget: #crate_path::trait_group::Opaquable,)\n\n}\n\n\n", "file_path": "cglue-gen/src/traits.rs", "rank": 23, "score": 144033.3513272027 }, { "content": "pub fn cd_opaque_bound() -> TokenStream {\n\n let crate_path = crate::util::crate_path();\n\n quote!(#crate_path::trait_group::Opaquable<OpaqueTarget = CGlueD>)\n\n}\n\n\n", "file_path": "cglue-gen/src/traits.rs", "rank": 24, "score": 144033.3513272027 }, { "content": "/// Implement the external trait store.\n\npub fn impl_store() -> TokenStream {\n\n impl_inner(\n\n |subpath, name| quote!(pub use #subpath #name;),\n\n |_, _| quote!(#[cglue_trait_ext]),\n\n |exports, out| {\n\n // Re-export everything\n\n for (k, v) in exports.into_iter() {\n\n let subpath = subpath_to_tokens(&v, 1);\n\n\n\n for ident in [\n\n \"\",\n\n \"Ext\",\n\n \"Vtbl\",\n\n \"RetTmp\",\n\n \"OpaqueVtbl\",\n\n \"Any\",\n\n \"Box\",\n\n \"CtxBox\",\n\n \"NoCtxBox\",\n\n \"ArcBox\",\n", "file_path": "cglue-gen/src/ext/mod.rs", "rank": 25, "score": 144033.3513272027 }, { "content": "#[proc_macro]\n\npub fn into(args: TokenStream) -> TokenStream {\n\n let cast = parse_macro_input!(args as TraitCastGroup);\n\n cast.cast_group(CastType::Into).into()\n\n}\n\n\n\n/// Check if the group can be cast to the specified traits.\n\n///\n\n/// The syntax is similar to a cast expression, but uses `impl` keyword:\n\n///\n\n/// ```ignore\n\n/// check!(obj impl Trait1 + Trait2 + Trait3);\n\n/// ```\n\n///\n\n/// The result of `check!` will be a boolean value.\n\n///\n\n/// This macro accepts either:\n\n///\n\n/// 1. A list of optional traits, without any mandatory traits.\n\n///\n\n/// or\n\n///\n\n/// 2. A list of optional traits, with every mandatory trait.\n\n///\n\n/// In either case a successfully cast object will still implement the mandatory traits.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 26, "score": 143335.53282978825 }, { "content": "fn ident_path(ident: Ident) -> Type {\n\n let mut path = Path {\n\n leading_colon: None,\n\n segments: Punctuated::new(),\n\n };\n\n\n\n path.segments.push_value(PathSegment {\n\n ident,\n\n arguments: Default::default(),\n\n });\n\n\n\n Type::Path(TypePath { qself: None, path })\n\n}\n\n\n", "file_path": "cglue-gen/src/generics.rs", "rank": 27, "score": 142939.91134932678 }, { "content": "pub fn impl_ext_forward() -> TokenStream {\n\n impl_inner(\n\n |_, _| quote!(),\n\n |p, _| quote!(#[cglue_forward_ext(::#p)]),\n\n |_, _| {},\n\n )\n\n}\n\n\n", "file_path": "cglue-gen/src/ext/mod.rs", "rank": 28, "score": 142353.72230973956 }, { "content": "#[proc_macro]\n\npub fn cast(args: TokenStream) -> TokenStream {\n\n let cast = parse_macro_input!(args as TraitCastGroup);\n\n cast.cast_group(CastType::Cast).into()\n\n}\n\n\n\n/// Checked cast to a list of optional traits.\n\n///\n\n/// The syntax is similar to a cast expression, but uses `impl` keyword:\n\n///\n\n/// ```ignore\n\n/// as_ref!(obj impl Trait1 + Trait2 + Trait3);\n\n/// ```\n\n///\n\n/// `as_ref!` is non-final, meaning once the reference is dropped, the original group object can be\n\n/// used mutably.\n\n///\n\n/// This macro accepts either:\n\n///\n\n/// 1. A list of optional traits, without any mandatory traits.\n\n///\n\n/// or\n\n///\n\n/// 2. A list of optional traits, with every mandatory trait.\n\n///\n\n/// In either case a successfully cast object will still implement the mandatory traits.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 29, "score": 141655.90381232512 }, { "content": "#[proc_macro]\n\npub fn as_mut(args: TokenStream) -> TokenStream {\n\n let cast = parse_macro_input!(args as TraitCastGroup);\n\n cast.cast_group(CastType::AsMut).into()\n\n}\n\n\n\n/// Checked cast to a list of optional traits.\n\n///\n\n/// The syntax is similar to a cast expression, but uses `impl` keyword:\n\n///\n\n/// ```ignore\n\n/// into!(obj impl Trait1 + Trait2 + Trait3);\n\n/// ```\n\n///\n\n/// `into!` is final. After invoking this conversion it is not possible to retrieve the original\n\n/// object.\n\n///\n\n/// This macro accepts either:\n\n///\n\n/// 1. A list of optional traits, without any mandatory traits.\n\n///\n\n/// or\n\n///\n\n/// 2. A list of optional traits, with every mandatory trait.\n\n///\n\n/// In either case a successfully cast object will still implement the mandatory traits.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 30, "score": 141655.90381232512 }, { "content": "#[proc_macro]\n\npub fn as_ref(args: TokenStream) -> TokenStream {\n\n let cast = parse_macro_input!(args as TraitCastGroup);\n\n cast.cast_group(CastType::AsRef).into()\n\n}\n\n\n\n/// Checked cast to a list of optional traits.\n\n///\n\n/// The syntax is similar to a cast expression, but uses `impl` keyword:\n\n///\n\n/// ```ignore\n\n/// as_mut!(obj impl Trait1 + Trait2 + Trait3);\n\n/// ```\n\n///\n\n/// `as_mut!` is non-final, meaning once the reference is dropped, the original group object can be\n\n/// used.\n\n///\n\n/// This macro accepts either:\n\n///\n\n/// 1. A list of optional traits, without any mandatory traits.\n\n///\n\n/// or\n\n///\n\n/// 2. A list of optional traits, with every mandatory trait.\n\n///\n\n/// In either case a successfully cast object will still implement the mandatory traits.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 31, "score": 141655.90381232512 }, { "content": "#[proc_macro]\n\npub fn check(args: TokenStream) -> TokenStream {\n\n let cast = parse_macro_input!(args as TraitCastGroup);\n\n cast.cast_group(CastType::OnlyCheck).into()\n\n}\n\n\n\n/// Implement builtin external traits.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 32, "score": 141655.90381232512 }, { "content": "#[proc_macro]\n\npub fn group_obj(args: TokenStream) -> TokenStream {\n\n let crate_path = cglue_gen::util::crate_path();\n\n\n\n let GenericCastType {\n\n ident,\n\n target:\n\n GenericType {\n\n path,\n\n target,\n\n generics,\n\n ..\n\n },\n\n } = parse_macro_input!(args as GenericCastType);\n\n\n\n let path = if let Ok(ident) = parse2::<Ident>(target.clone()) {\n\n ext_abs_remap(prelude_remap_with_ident(path, &ident))\n\n } else {\n\n path\n\n };\n\n\n", "file_path": "cglue-macro/src/lib.rs", "rank": 33, "score": 140060.1458502185 }, { "content": "#[proc_macro]\n\npub fn trait_obj(args: TokenStream) -> TokenStream {\n\n let crate_path = cglue_gen::util::crate_path();\n\n\n\n let GenericCastType {\n\n ident,\n\n target:\n\n GenericType {\n\n path,\n\n target,\n\n generics,\n\n ..\n\n },\n\n } = parse_macro_input!(args as GenericCastType);\n\n\n\n let path = if let Ok(ident) = parse2::<Ident>(target.clone()) {\n\n ext_abs_remap(prelude_remap_with_ident(path, &ident))\n\n } else {\n\n path\n\n };\n\n\n", "file_path": "cglue-macro/src/lib.rs", "rank": 34, "score": 140060.1458502185 }, { "content": "pub fn get_exports(parent_path: &Path, exports: &mut HashMap<Ident, Path>) {\n\n let cur_path = super::super::join_paths(parent_path, format_ident!(\"convert\"));\n\n exports.insert(format_ident!(\"AsRef\"), cur_path.clone());\n\n exports.insert(format_ident!(\"AsMut\"), cur_path);\n\n}\n", "file_path": "cglue-gen/src/ext/core/convert.rs", "rank": 35, "score": 138795.31675292735 }, { "content": "pub fn get_exports(_parent_path: &Path, _exports: &mut HashMap<Ident, Path>) {\n\n //let cur_path = super::super::join_paths(parent_path, format_ident!(\"fmt\"));\n\n //exports.insert(format_ident!(\"Debug\"), cur_path);\n\n}\n", "file_path": "cglue-gen/src/ext/core/fmt.rs", "rank": 36, "score": 138795.31675292735 }, { "content": "pub fn get_exports(parent_path: &Path, exports: &mut HashMap<Ident, Path>) {\n\n let cur_path = super::super::join_paths(parent_path, format_ident!(\"clone\"));\n\n exports.insert(format_ident!(\"Clone\"), cur_path);\n\n}\n", "file_path": "cglue-gen/src/ext/core/clone.rs", "rank": 37, "score": 138795.31675292735 }, { "content": "pub fn get_exports(parent_path: &Path, exports: &mut HashMap<Ident, Path>) {\n\n let cur_path = super::join_paths(parent_path, format_ident!(\"core\"));\n\n clone::get_exports(&cur_path, exports);\n\n fmt::get_exports(&cur_path, exports);\n\n convert::get_exports(&cur_path, exports);\n\n}\n", "file_path": "cglue-gen/src/ext/core/mod.rs", "rank": 38, "score": 138795.31675292735 }, { "content": "#[proc_macro]\n\npub fn cglue_trait_group(args: TokenStream) -> TokenStream {\n\n let args = parse_macro_input!(args as TraitGroup);\n\n args.create_group().into()\n\n}\n\n\n\n/// Implement a CGlue group for a specific type.\n\n///\n\n/// # Arguments\n\n///\n\n/// 1. The name of the type to implement the group for.\n\n///\n\n/// 2. The name of the group to implement.\n\n///\n\n/// 3. Optional traits that this object contains. Either a single trait, or a braced list of\n\n/// traits.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 39, "score": 138542.1299075751 }, { "content": "#[proc_macro]\n\npub fn cglue_impl_group(args: TokenStream) -> TokenStream {\n\n let args = parse_macro_input!(args as TraitGroupImpl);\n\n args.implement_group().into()\n\n}\n\n\n\n/// Convert into a CGlue trait group.\n\n///\n\n/// The syntax is the same as a cast expression:\n\n///\n\n/// ```ignore\n\n/// group_obj!(variable as GroupName)\n\n/// ```\n\n///\n\n/// It is possible to pass both owned objects and references.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 40, "score": 138542.1299075751 }, { "content": "#[proc_macro_attribute]\n\npub fn cglue_trait(_args: TokenStream, input: TokenStream) -> TokenStream {\n\n let tr = parse_macro_input!(input as ItemTrait);\n\n\n\n let trait_def = cglue_gen::traits::gen_trait(tr, None);\n\n\n\n trait_def.into()\n\n}\n\n\n\n/// Make an external trait CGlue compatible.\n\n///\n\n/// Invoking this macro will change the name of the trait to be prefixed with `Ext`,\n\n/// and it will act as a wrapper trait for the underlying trait.\n\n///\n\n/// This is very useful when third-party crates are needed to be CGlue compatible.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 41, "score": 133255.49108159693 }, { "content": "#[proc_macro_attribute]\n\npub fn cglue_trait_ext(_args: TokenStream, input: TokenStream) -> TokenStream {\n\n let tr = parse_macro_input!(input as ItemTrait);\n\n\n\n let ext_ident = format_ident!(\"{}Ext\", tr.ident);\n\n\n\n let trait_def = cglue_gen::traits::gen_trait(tr, Some(&ext_ident));\n\n\n\n trait_def.into()\n\n}\n\n\n\n/// Convert into a CGlue compatible object.\n\n///\n\n/// The syntax is the same as a cast expression:\n\n///\n\n/// ```ignore\n\n/// trait_obj!(variable as TraitName)\n\n/// ```\n\n///\n\n/// It is possible to pass both owned objects and references.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 42, "score": 131939.42754113066 }, { "content": "#[proc_macro_attribute]\n\npub fn cglue_forward_ext(args: TokenStream, input: TokenStream) -> TokenStream {\n\n let path = parse_macro_input!(args as proc_macro2::TokenStream);\n\n let tr = parse_macro_input!(input as ItemTrait);\n\n gen_forward(tr, Some(path)).into()\n\n}\n\n\n\n/// Implement [macro@cglue_forward_ext] for all builtin external traits.\n", "file_path": "cglue-macro/src/lib.rs", "rank": 43, "score": 131939.42754113066 }, { "content": "fn ty_ident(ty: &Type) -> Ident {\n\n format_ident!(\"{}\", ty.to_token_stream().to_string())\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct ParsedGenerics {\n\n /// Lifetime declarations on the left side of the type/trait.\n\n ///\n\n /// This may include any bounds it contains, for instance: `'a: 'b,`.\n\n pub life_declare: Punctuated<LifetimeDef, Comma>,\n\n /// Declarations \"using\" the lifetimes i.e. has bounds stripped.\n\n ///\n\n /// For instance: `'a: 'b,` becomes just `'a,`.\n\n pub life_use: Punctuated<Lifetime, Comma>,\n\n /// Type declarations on the left side of the type/trait.\n\n ///\n\n /// This may include any trait bounds it contains, for instance: `T: Clone,`.\n\n pub gen_declare: Punctuated<TypeParam, Comma>,\n\n /// Declarations that \"use\" the traits i.e. has bounds stripped.\n\n ///\n", "file_path": "cglue-gen/src/generics.rs", "rank": 44, "score": 129386.7125763055 }, { "content": "fn join_paths(path: &Path, ident: Ident) -> Path {\n\n let mut ret = path.clone();\n\n\n\n if !ret.segments.empty_or_trailing() {\n\n ret.segments.push_punct(Default::default());\n\n }\n\n\n\n ret.segments.push_value(PathSegment {\n\n ident,\n\n arguments: Default::default(),\n\n });\n\n\n\n ret.segments.push_punct(Default::default());\n\n\n\n ret\n\n}\n", "file_path": "cglue-gen/src/ext/mod.rs", "rank": 45, "score": 127904.67713545854 }, { "content": "pub fn parse_trait(\n\n tr: &ItemTrait,\n\n crate_path: &TokenStream,\n\n mut process_item: impl FnMut(\n\n &TraitItemType,\n\n &Ident,\n\n &ParsedGenerics,\n\n &mut TokenStream,\n\n &mut BTreeMap<Ident, WrappedType>,\n\n &TokenStream,\n\n ),\n\n) -> (Vec<ParsedFunc>, ParsedGenerics, TokenStream) {\n\n let mut funcs = vec![];\n\n let generics = ParsedGenerics::from(&tr.generics);\n\n let mut trait_type_defs = TokenStream::new();\n\n let mut types = BTreeMap::new();\n\n\n\n let trait_name = &tr.ident;\n\n\n\n types.insert(\n", "file_path": "cglue-gen/src/traits.rs", "rank": 46, "score": 124358.39853205543 }, { "content": "pub fn process_item(\n\n ty: &TraitItemType,\n\n trait_name: &Ident,\n\n generics: &ParsedGenerics,\n\n trait_type_defs: &mut TokenStream,\n\n types: &mut BTreeMap<Ident, WrappedType>,\n\n crate_path: &TokenStream,\n\n) {\n\n let c_void = quote!(::core::ffi::c_void);\n\n\n\n let static_lifetime = Lifetime {\n\n apostrophe: proc_macro2::Span::call_site(),\n\n ident: format_ident!(\"static\"),\n\n };\n\n\n\n let cglue_a_lifetime = Lifetime {\n\n apostrophe: proc_macro2::Span::call_site(),\n\n ident: format_ident!(\"cglue_a\"),\n\n };\n\n\n", "file_path": "cglue-gen/src/traits.rs", "rank": 47, "score": 124358.39853205543 }, { "content": "pub fn gen_forward(tr: ItemTrait, ext_path: Option<TokenStream>) -> TokenStream {\n\n let crate_path = crate::util::crate_path();\n\n\n\n let mut types = BTreeMap::new();\n\n\n\n types.insert(\n\n format_ident!(\"Self\"),\n\n WrappedType {\n\n ty: parse2(quote!(Self)).unwrap(),\n\n ty_static: None,\n\n return_conv: None,\n\n lifetime_bound: None,\n\n lifetime_type_bound: None,\n\n other_bounds: None,\n\n other_bounds_simple: None,\n\n impl_return_conv: None,\n\n inject_ret_tmp: false,\n\n unbounded_hrtb: false,\n\n needs_ctx: false,\n\n },\n", "file_path": "cglue-gen/src/forward.rs", "rank": 48, "score": 122994.77911493622 }, { "content": "pub fn get_impl(parent_path: &Path, out: &mut Vec<(Path, TokenStream)>) {\n\n let cur_path = super::super::join_paths(parent_path, format_ident!(\"convert\"));\n\n\n\n out.push((\n\n cur_path,\n\n quote! {\n", "file_path": "cglue-gen/src/ext/core/convert.rs", "rank": 50, "score": 111424.75881921258 }, { "content": "pub fn get_impl(_parent_path: &Path, _out: &mut Vec<(Path, TokenStream)>) {\n\n //let cur_path = super::super::join_paths(parent_path, format_ident!(\"fmt\"));\n\n\n\n /*out.push((\n\n cur_path,\n\n quote! {\n", "file_path": "cglue-gen/src/ext/core/fmt.rs", "rank": 51, "score": 111424.75881921258 }, { "content": "pub fn get_impl(parent_path: &Path, out: &mut Vec<(Path, TokenStream)>) {\n\n let cur_path = super::join_paths(parent_path, format_ident!(\"core\"));\n\n clone::get_impl(&cur_path, out);\n\n fmt::get_impl(&cur_path, out);\n\n convert::get_impl(&cur_path, out);\n\n}\n\n\n", "file_path": "cglue-gen/src/ext/core/mod.rs", "rank": 52, "score": 111424.75881921258 }, { "content": "pub fn get_impl(parent_path: &Path, out: &mut Vec<(Path, TokenStream)>) {\n\n let cur_path = super::super::join_paths(parent_path, format_ident!(\"clone\"));\n\n\n\n out.push((\n\n cur_path,\n\n quote! {\n", "file_path": "cglue-gen/src/ext/core/clone.rs", "rank": 53, "score": 111424.75881921258 }, { "content": "/// Remaps all Ident paths that are in the export list to become ::ext::Ident\n\npub fn prelude_remap(path: Path) -> Path {\n\n if let Some(ident) = path.get_ident().cloned() {\n\n if let Some(path) = get_exports().get(&ident) {\n\n let mut new_path = path.clone();\n\n\n\n new_path.segments.push(PathSegment {\n\n ident,\n\n arguments: Default::default(),\n\n });\n\n\n\n new_path\n\n } else {\n\n path\n\n }\n\n } else {\n\n path\n\n }\n\n}\n\n\n", "file_path": "cglue-gen/src/ext/mod.rs", "rank": 55, "score": 107687.35885119761 }, { "content": "/// Remaps all ::ext:: paths to become ::cglue::ext:: paths.\n\npub fn ext_abs_remap(path: Path) -> Path {\n\n let mut iter = path.segments.iter();\n\n if let (Some(_), Some(seg)) = (path.leading_colon, iter.next()) {\n\n if seg.ident == \"ext\" {\n\n let (leading_colon, ident) = crate::util::crate_path_ident();\n\n\n\n let mut new_path = Path {\n\n leading_colon,\n\n segments: Default::default(),\n\n };\n\n\n\n new_path.segments.push_value(PathSegment {\n\n ident,\n\n arguments: Default::default(),\n\n });\n\n\n\n new_path.segments.push_punct(Default::default());\n\n\n\n std::mem::drop(iter);\n\n\n", "file_path": "cglue-gen/src/ext/mod.rs", "rank": 56, "score": 106086.63286967529 }, { "content": "#[test]\n\nfn use_debug() {\n\n let sa = SA {};\n\n let obj = trait_obj!(sa as Debug);\n\n impl_debug(&obj);\n\n\n\n println!(\"{:?}\", obj);\n\n\n\n assert_eq!(\"SA\", &format!(\"{:?}\", obj));\n\n}\n\n\n", "file_path": "cglue/src/tests/ext/debug.rs", "rank": 57, "score": 100107.66632372825 }, { "content": "#[test]\n\nfn use_subthing() {\n\n let mut sa = SA {};\n\n let val = FwdMut(&mut sa);\n\n\n\n let mut plug = Plug { val };\n\n\n\n let mut obj = trait_obj!(&mut plug as AsSubThing);\n\n obj.get_ta();\n\n}\n\n\n", "file_path": "cglue/src/tests/simple/hrtb.rs", "rank": 58, "score": 100107.66632372825 }, { "content": "#[test]\n\nfn use_clone() {\n\n let sa = SA {};\n\n let obj = trait_obj!(sa as Clone);\n\n impl_clone(&obj)\n\n}\n\n\n", "file_path": "cglue/src/tests/ext/clone.rs", "rank": 59, "score": 100107.66632372825 }, { "content": "#[test]\n\nfn use_plugin() {\n\n let sa = SA {};\n\n\n\n let mut obj = trait_obj!(sa as PluginInner);\n\n\n\n let printer = obj.get_plug();\n\n\n\n printer.do_thing();\n\n}\n\n\n\n/*#[test]\n", "file_path": "cglue/src/tests/simple/hrtb.rs", "rank": 60, "score": 100107.66632372825 }, { "content": "#[test]\n\nfn use_as_ref() {\n\n let sa = SA {};\n\n let obj = trait_obj!(sa as ::ext::core::convert::AsRef);\n\n impl_as_ref(&obj)\n\n}\n\n\n", "file_path": "cglue/src/tests/ext/as_ref.rs", "rank": 61, "score": 100107.66632372825 }, { "content": "#[test]\n\nfn use_dothings() {\n\n let sa = SA {};\n\n let wrapped = CtxBox::from((sa, CArc::from(()).into_opt()));\n\n assert_eq!(wrapped.dt_1(), 55);\n\n}\n\n\n", "file_path": "cglue/src/tests/arc/mod.rs", "rank": 62, "score": 100107.66632372825 }, { "content": "#[test]\n\nfn use_consuming() {\n\n let sa = SA {};\n\n\n\n let obj = trait_obj!(sa as TOnlyConsuming);\n\n\n\n assert_eq!(obj.toc_1(), 57);\n\n}\n\n\n", "file_path": "cglue/src/tests/simple/consuming.rs", "rank": 63, "score": 100107.66632372825 }, { "content": "#[test]\n\nfn use_lifetime() {\n\n let sa = SA {};\n\n\n\n let obj = trait_obj!(sa as GenWithLifetime);\n\n\n\n println!(\"{}\", obj.gwl_1());\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/param.rs", "rank": 64, "score": 100107.66632372825 }, { "content": "#[test]\n\nfn use_getter_obj() {\n\n let sa = SA {};\n\n\n\n let arc = std::sync::Arc::from(());\n\n\n\n assert_eq!(Arc::strong_count(&arc), 1);\n\n\n\n let opt_arc = COptArc::from(Some(CArc::<()>::from(arc.clone())));\n\n\n\n assert_eq!(Arc::strong_count(&arc), 2);\n\n\n\n let wrapped = CtxBox::from((sa, opt_arc));\n\n\n\n let getter: DoerGetterArcBox = trait_obj!(wrapped as DoerGetter);\n\n\n\n assert_eq!(Arc::strong_count(&arc), 2);\n\n\n\n let doer = getter.dget_1();\n\n\n\n assert_eq!(Arc::strong_count(&arc), 3);\n", "file_path": "cglue/src/tests/arc/mod.rs", "rank": 65, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_group_explicit() {\n\n let sa = SA {};\n\n\n\n let obj = group_obj!(sa as GenericGroup<usize>);\n\n\n\n println!(\"Val: {}\", obj.gt_1());\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/groups.rs", "rank": 66, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_gen_infer() {\n\n let sa = SA {};\n\n\n\n let obj = trait_obj!(sa as GenericTrait);\n\n\n\n println!(\"{}\", obj.gt_1());\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/param.rs", "rank": 67, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_group() {\n\n let ga = GA::<usize>::default();\n\n let group = group_obj!(ga as GenGroup);\n\n assert!(cast!(group impl TA).is_some());\n\n\n\n let ga = GA::<u64>::default();\n\n let group = group_obj!(ga as GenGroup);\n\n assert!(cast!(group impl TA).is_none());\n\n}\n", "file_path": "cglue/src/tests/generics/generic_structs.rs", "rank": 68, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_group_consuming() {\n\n let sa = SA {};\n\n\n\n let obj = group_obj!(sa as ConsumerGroup);\n\n\n\n let obj = cast!(obj impl TMixedConsuming).unwrap();\n\n\n\n assert_eq!(obj.tmc_2(), 42);\n\n assert_eq!(obj.tmc_1(), 43);\n\n}\n", "file_path": "cglue/src/tests/simple/consuming.rs", "rank": 69, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_lifetime_explicit_t() {\n\n let sa = SA {};\n\n\n\n let obj = trait_obj!(sa as GenWithLifetime<usize>);\n\n\n\n println!(\"{}\", obj.gwl_1());\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/param.rs", "rank": 70, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_clone_obj() {\n\n let sa = SA {};\n\n\n\n let arc = std::sync::Arc::from(());\n\n\n\n assert_eq!(Arc::strong_count(&arc), 1);\n\n\n\n let opt_arc = CArc::<()>::from(arc.clone()).into_opt();\n\n\n\n assert_eq!(Arc::strong_count(&arc), 2);\n\n\n\n let wrapped = CtxBox::from((sa, opt_arc));\n\n\n\n let obj = trait_obj!(wrapped as Clone);\n\n\n\n assert_eq!(Arc::strong_count(&arc), 2);\n\n\n\n let cloned = obj.clone();\n\n\n\n assert_eq!(Arc::strong_count(&arc), 3);\n\n\n\n std::mem::drop(cloned);\n\n\n\n assert_eq!(Arc::strong_count(&arc), 2);\n\n\n\n std::mem::drop(obj);\n\n\n\n assert_eq!(Arc::strong_count(&arc), 1);\n\n}\n", "file_path": "cglue/src/tests/arc/mod.rs", "rank": 71, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_as_ref_group() {\n\n let sa = SA {};\n\n let obj = group_obj!(sa as MaybeAsRef);\n\n let obj = as_ref!(obj impl AsRef).unwrap();\n\n impl_as_ref(obj)\n\n}\n\n\n", "file_path": "cglue/src/tests/ext/as_ref.rs", "rank": 72, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_ta() {\n\n let ga = GA::default();\n\n\n\n let obj = trait_obj!(ga as TA);\n\n\n\n assert_eq!(obj.ta_1(), 0);\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/generic_structs.rs", "rank": 73, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_lifetime_explicit() {\n\n let sa = SA {};\n\n\n\n let obj = trait_obj!(sa as GenWithLifetime<'static, usize>);\n\n\n\n println!(\"{}\", obj.gwl_1());\n\n}\n", "file_path": "cglue/src/tests/generics/param.rs", "rank": 74, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_group_return() {\n\n let sa = SA {};\n\n\n\n let obj = trait_obj!(sa as GenericGroupReturn);\n\n\n\n let group = obj.ggr_1();\n\n\n\n let cast = cast!(group impl GenWithInlineClause).unwrap();\n\n\n\n assert!(cast.gwi_1(&cast.gt_1()));\n\n assert!(!cast.gwi_1(&(cast.gt_1() + 1)));\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/associated.rs", "rank": 75, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_assoc_return() {\n\n let sa = SA {};\n\n\n\n let obj = trait_obj!(sa as AssociatedReturn);\n\n\n\n let ret = obj.ar_1();\n\n\n\n println!(\"{:?}\", ret);\n\n\n\n assert_eq!(unsafe { *(ret as *const usize) }, 42);\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/associated.rs", "rank": 76, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_gen_explicit() {\n\n let sa = SA {};\n\n\n\n let obj = trait_obj!(sa as GenericTrait<usize>);\n\n\n\n println!(\"{}\", obj.gt_1());\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/param.rs", "rank": 77, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_plugin_group() {\n\n let sa = SA {};\n\n\n\n let mut obj = group_obj!(sa as PluginInstance);\n\n\n\n let printer = obj.get_plug();\n\n\n\n printer.do_thing();\n\n}\n\n\n", "file_path": "cglue/src/tests/simple/hrtb.rs", "rank": 78, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_obj_return() {\n\n let sa = SA {};\n\n\n\n let obj = trait_obj!(sa as ObjReturn);\n\n\n\n let ta = obj.or_1();\n\n\n\n assert_eq!(ta.ta_1(), 5);\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/associated.rs", "rank": 79, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_group_infer() {\n\n let sa = SA {};\n\n\n\n let obj = group_obj!(sa as GenericGroup);\n\n\n\n println!(\"Val: {}\", obj.gt_1());\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/groups.rs", "rank": 80, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_getter() {\n\n let ga = GA { val: 50usize };\n\n\n\n let obj = trait_obj!(ga as Getter);\n\n\n\n assert_eq!(*obj.get_val(), 50);\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/generic_structs.rs", "rank": 81, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_clone_group() {\n\n let sa = SA {};\n\n let obj = group_obj!(sa as MaybeClone);\n\n let obj = as_ref!(obj impl Clone).unwrap();\n\n impl_clone(obj)\n\n}\n\n\n", "file_path": "cglue/src/tests/ext/clone.rs", "rank": 82, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_self_constraint() {\n\n let sa = SA {};\n\n\n\n let obj = trait_obj!(sa as GenWithSelfConstraint);\n\n\n\n let ret = std::thread::spawn(move || obj.gwsc_1(&55)).join().unwrap();\n\n\n\n assert!(ret);\n\n}\n", "file_path": "cglue/src/tests/simple/bounded.rs", "rank": 83, "score": 98110.37997145687 }, { "content": "fn use_plugin_mut() {\n\n let mut sa = SA {};\n\n\n\n let mut obj = trait_obj!(&mut sa as PluginInner);\n\n\n\n let printer = obj.get_plug();\n\n\n\n printer.do_thing();\n\n}\n\n\n", "file_path": "cglue/src/tests/simple/hrtb.rs", "rank": 84, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_mixed_consuming() {\n\n let sa = SA {};\n\n\n\n let obj = trait_obj!(sa as TMixedConsuming);\n\n\n\n assert_eq!(obj.tmc_2(), 42);\n\n assert_eq!(obj.tmc_1(), 43);\n\n}\n\n\n", "file_path": "cglue/src/tests/simple/consuming.rs", "rank": 85, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_debug_group() {\n\n let sa = SA {};\n\n let obj = group_obj!(sa as MaybeDebug);\n\n let obj = as_ref!(obj impl Debug).unwrap();\n\n impl_debug(obj)\n\n}\n\n\n", "file_path": "cglue/src/tests/ext/debug.rs", "rank": 86, "score": 98110.37997145687 }, { "content": "#[test]\n\nfn use_gen_return() {\n\n let sa = SA {};\n\n\n\n let obj = trait_obj!(sa as GenericReturn);\n\n\n\n let ta = obj.gr_1();\n\n\n\n assert_eq!(ta.gt_1(), 27);\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/associated.rs", "rank": 87, "score": 98110.37997145687 }, { "content": "/// Convert from error code to Ok or Err.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `res` - result int value. Value of `0` will return `Ok`.\n\npub fn from_int_result_empty<E: IntError>(res: i32) -> Result<(), E> {\n\n match NonZeroI32::new(res) {\n\n None => Ok(()),\n\n Some(e) => Err(E::from_int_err(e)),\n\n }\n\n}\n", "file_path": "cglue/src/result.rs", "rank": 88, "score": 96555.81759858853 }, { "content": "#[test]\n\nfn use_plugin_group_mut() {\n\n let mut sa = SA {};\n\n\n\n let base = PluginInstance::from(&mut sa);\n\n\n\n let mut obj = crate::trait_group::Opaquable::into_opaque(base);\n\n\n\n let printer = obj.get_plug();\n\n\n\n printer.do_thing();\n\n}*/\n", "file_path": "cglue/src/tests/simple/hrtb.rs", "rank": 89, "score": 96220.90163540577 }, { "content": "#[test]\n\nfn use_group_ref() {\n\n let sa = SA {};\n\n\n\n let obj = trait_obj!(sa as GroupRefReturn);\n\n\n\n let obj2 = obj.grr_1();\n\n\n\n assert_eq!(obj2.ta_1(), 5);\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/associated_ref.rs", "rank": 90, "score": 96220.90163540577 }, { "content": "#[test]\n\nfn use_group_mut() {\n\n let sa = SA {};\n\n\n\n let mut obj = trait_obj!(sa as GroupMutReturn);\n\n\n\n let obj2 = obj.gmr_1();\n\n\n\n assert_eq!(obj2.ta_1(), 5);\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/associated_ref.rs", "rank": 91, "score": 96220.90163540577 }, { "content": "#[test]\n\nfn use_consumed_group_return() {\n\n let sa = SA {};\n\n\n\n let obj = trait_obj!(sa as GenericConsumedGroupReturn);\n\n\n\n let group = obj.gcgr_1();\n\n\n\n let cast = cast!(group impl GenWithInlineClause).unwrap();\n\n\n\n assert!(cast.gwi_1(&cast.gt_1()));\n\n assert!(!cast.gwi_1(&(cast.gt_1() + 1)));\n\n}\n", "file_path": "cglue/src/tests/generics/associated.rs", "rank": 92, "score": 96220.90163540577 }, { "content": "#[test]\n\nfn use_assoc_mut() {\n\n let sa = SA {};\n\n\n\n let mut obj = trait_obj!(sa as ObjMutReturn);\n\n\n\n let obj2 = obj.omr_1();\n\n\n\n assert_eq!(obj2.ta_1(), 5);\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/associated_ref.rs", "rank": 93, "score": 96220.90163540577 }, { "content": "#[test]\n\nfn use_mixed_group_explicit() {\n\n let sa = SA {};\n\n\n\n let obj = group_obj!(sa as MixedGenericGroup<usize, usize>);\n\n\n\n println!(\"Val: {}\", obj.gt_1());\n\n}\n", "file_path": "cglue/src/tests/generics/groups.rs", "rank": 94, "score": 96220.90163540577 }, { "content": "#[test]\n\nfn use_assoc_ref() {\n\n let sa = SA {};\n\n\n\n let obj = trait_obj!(sa as ObjRefReturn);\n\n\n\n let obj2 = obj.orr_1();\n\n\n\n assert_eq!(obj2.ta_1(), 5);\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/associated_ref.rs", "rank": 95, "score": 96220.90163540577 }, { "content": "#[test]\n\nfn use_group_mut_unbounded() {\n\n let mut sa = SA {};\n\n\n\n let mut obj = trait_obj!(&mut sa as GroupMutReturnUnbounded);\n\n\n\n let obj2 = obj.gmru_1();\n\n\n\n assert_eq!(obj2.ta_1(), 5);\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/associated_ref.rs", "rank": 97, "score": 94430.73193254406 }, { "content": "#[test]\n\nfn use_group_lt_mut() {\n\n let sa = SA {};\n\n\n\n let mut obj = trait_obj!(sa as GroupLtMutReturn);\n\n\n\n let obj2 = obj.glmr_1();\n\n\n\n assert_eq!(obj2.ta_1(), 5);\n\n}\n", "file_path": "cglue/src/tests/generics/associated_ref.rs", "rank": 98, "score": 94430.73193254406 }, { "content": "#[test]\n\nfn use_mixed_group_partial_infer() {\n\n let sa = SA {};\n\n\n\n let obj = group_obj!(sa as MixedGenericGroup<usize, _>);\n\n\n\n println!(\"Val: {}\", obj.gt_1());\n\n}\n\n\n", "file_path": "cglue/src/tests/generics/groups.rs", "rank": 99, "score": 94430.73193254406 } ]
Rust
hfo2/src/init.rs
Dongjoo-Kim/hafnium-verification
6071eff162148e4d25a0fedaea003addac242ace
/* * Copyright 2019 Sanguk Park * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use core::mem::MaybeUninit; use core::ptr; use crate::addr::*; use crate::arch::*; use crate::boot_flow::*; use crate::boot_params::*; use crate::cpu::*; use crate::hypervisor::*; use crate::load::*; use crate::manifest::*; use crate::memiter::*; use crate::mm::*; use crate::mpool::*; use crate::page::*; use crate::types::*; use crate::vm::*; extern "C" { fn plat_console_init(); fn arch_one_time_init(); fn dlog_enable_lock(); static callstacks: [[u8; STACK_SIZE]; MAX_CPUS]; static boot_cpu: Cpu; } static mut PTABLE_BUF: MaybeUninit<[RawPage; HEAP_PAGES]> = MaybeUninit::uninit(); static mut INITED: bool = false; static mut HYPERVISOR: MaybeUninit<Hypervisor> = MaybeUninit::uninit(); #[no_mangle] unsafe extern "C" fn one_time_init(c: *const Cpu) -> *const Cpu { if &boot_cpu as *const _ != c || INITED { return c; } plat_console_init(); dlog!("Initialising hafnium\n"); arch_one_time_init(); arch_cpu_module_init(); let ppool = MPool::new(); ppool.free_pages(Pages::from_raw( PTABLE_BUF.get_mut().as_mut_ptr(), HEAP_PAGES, )); let mm = MemoryManager::new(&ppool).expect("mm_init failed"); mm.cpu_init(); dlog_enable_lock(); mpool_enable_locks(); static mut MANIFEST: MaybeUninit<Manifest> = MaybeUninit::uninit(); let mut manifest = MANIFEST.get_mut(); let mut params: BootParams = MaybeUninit::uninit().assume_init(); boot_flow_init( &mut mm.hypervisor_ptable.lock(), &mut manifest, &mut params, &ppool, ) .expect("Could not parse data from FDT."); let cpum = CpuManager::new( &params.cpu_ids[..params.cpu_count], boot_cpu.id, &callstacks, ); ptr::write( HYPERVISOR.get_mut(), Hypervisor::new(ppool, mm, cpum, VmManager::new()), ); for i in 0..params.mem_ranges_count { dlog!( "Memory range: {:#x} - {:#x}\n", pa_addr(params.mem_ranges[i].begin), pa_addr(params.mem_ranges[i].end) - 1 ); } dlog!( "Ramdisk range: {:#x} - {:#x}\n", pa_addr(params.initrd_begin), pa_addr(params.initrd_end) - 1 ); let mut hypervisor_ptable = hypervisor().memory_manager.hypervisor_ptable.lock(); hypervisor_ptable .identity_map( params.initrd_begin, params.initrd_end, Mode::R, &hypervisor().mpool, ) .expect("unable to map initrd in"); let initrd = pa_addr(params.initrd_begin) as *mut _; let cpio = MemIter::from_raw( initrd, pa_difference(params.initrd_begin, params.initrd_end), ); let primary_initrd = load_primary( &mut HYPERVISOR.get_mut().vm_manager, &mut hypervisor_ptable, &cpio, params.kernel_arg, &hypervisor().mpool, ) .expect("unable to load primary VM"); let mut update: BootParamsUpdate = BootParamsUpdate::new( pa_from_va(va_from_ptr(primary_initrd.get_next() as usize as *const _)), pa_from_va(va_from_ptr(primary_initrd.get_limit() as usize as *const _)), ); load_secondary( &mut HYPERVISOR.get_mut().vm_manager, &mut hypervisor_ptable, &mut manifest, &cpio, &params, &mut update, &hypervisor().mpool, ) .expect("unable to load secondary VMs"); boot_params_patch_fdt(&mut hypervisor_ptable, &mut update, &hypervisor().mpool) .expect("plat_update_boot_params failed"); hypervisor_ptable.defrag(&hypervisor().mpool); mm_vm_enable_invalidation(); dlog!("Hafnium initialisation completed\n"); INITED = true; hypervisor().cpu_manager.get_boot_cpu() } pub fn hypervisor() -> &'static Hypervisor { unsafe { HYPERVISOR.get_ref() } } #[no_mangle] pub unsafe extern "C" fn cpu_main(c: *const Cpu) -> *const VCpu { if hypervisor().cpu_manager.index_of(c) != 0 { hypervisor().memory_manager.cpu_init(); } let primary = hypervisor().vm_manager.get_primary(); let vcpu = &primary.vcpus[hypervisor().cpu_manager.index_of(c)]; let vcpu_inner = vcpu.inner.get_mut_unchecked(); vcpu_inner.cpu = c; vcpu_inner.regs.reset(true, vcpu.vm(), (*c).id); vcpu }
/* * Copyright 2019 Sanguk Park * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use core::mem::MaybeUninit; use core::ptr; use crate::addr::*; use crate::arch::*; use crate::boot_flow::*; use crate::boot_params::*; use crate::cpu::*; use crate::hypervisor::*; use crate::load::*; use crate::manifest::*; use crate::memiter::*; use crate::mm::*; use crate::mpool::*; use crate::page::*; use crate::types::*; use crate::vm::*; extern "C" { fn plat_console_init(); fn arch_one_time_init(); fn dlog_enable_lock(); static callstacks: [[u8; STACK_SIZE]; MAX_CPUS]; static boot_cpu: Cpu; } static mut PTABLE_BUF: MaybeUninit<[RawPage; HEAP_PAGES]> = MaybeUninit::uninit(); static mut INITED: bool = false; static mut HYPERVISOR: MaybeUninit<Hypervisor> = MaybeUninit::uninit(); #[no_mangle] unsafe extern "C" fn one_time_init(c: *const Cpu) -> *const Cpu { if &boot_cpu as *const _ != c || INITED { return c; } plat_console_init(); dlog!("Initialising hafnium\n"); arch_one_time_init(); arch_cpu_module_init(); let ppool = MPool::new(); ppool.free_pages(Pages::from_raw( PTABLE_BUF.get_mut().as_mut_ptr(), HEAP_PAGES, )); let mm = MemoryManager::new(&ppool).expect("mm_init failed"); mm.cpu_init(); dlog_enable_lock(); mpool_enable_locks(); static mut MANIFEST: MaybeUninit<Manifest> = MaybeUninit::uninit(); let mut manifest = MANIFEST.get_mut(); let mut params: BootParams = MaybeUninit::uninit().assume_init(); boot_flow_init( &mut mm.hypervisor_ptable.lock(), &mut manifest, &mut params, &ppool, ) .expect("Could not parse data from FDT."); let cpum = CpuManager::new( &params.cpu_ids[..params.cpu_count], boot_cpu.id, &callstacks, ); ptr::write( HYPERVISOR.get_mut(), Hypervisor::new(ppool, mm, cpum, VmManager::new()), );
pub fn hypervisor() -> &'static Hypervisor { unsafe { HYPERVISOR.get_ref() } } #[no_mangle] pub unsafe extern "C" fn cpu_main(c: *const Cpu) -> *const VCpu { if hypervisor().cpu_manager.index_of(c) != 0 { hypervisor().memory_manager.cpu_init(); } let primary = hypervisor().vm_manager.get_primary(); let vcpu = &primary.vcpus[hypervisor().cpu_manager.index_of(c)]; let vcpu_inner = vcpu.inner.get_mut_unchecked(); vcpu_inner.cpu = c; vcpu_inner.regs.reset(true, vcpu.vm(), (*c).id); vcpu }
for i in 0..params.mem_ranges_count { dlog!( "Memory range: {:#x} - {:#x}\n", pa_addr(params.mem_ranges[i].begin), pa_addr(params.mem_ranges[i].end) - 1 ); } dlog!( "Ramdisk range: {:#x} - {:#x}\n", pa_addr(params.initrd_begin), pa_addr(params.initrd_end) - 1 ); let mut hypervisor_ptable = hypervisor().memory_manager.hypervisor_ptable.lock(); hypervisor_ptable .identity_map( params.initrd_begin, params.initrd_end, Mode::R, &hypervisor().mpool, ) .expect("unable to map initrd in"); let initrd = pa_addr(params.initrd_begin) as *mut _; let cpio = MemIter::from_raw( initrd, pa_difference(params.initrd_begin, params.initrd_end), ); let primary_initrd = load_primary( &mut HYPERVISOR.get_mut().vm_manager, &mut hypervisor_ptable, &cpio, params.kernel_arg, &hypervisor().mpool, ) .expect("unable to load primary VM"); let mut update: BootParamsUpdate = BootParamsUpdate::new( pa_from_va(va_from_ptr(primary_initrd.get_next() as usize as *const _)), pa_from_va(va_from_ptr(primary_initrd.get_limit() as usize as *const _)), ); load_secondary( &mut HYPERVISOR.get_mut().vm_manager, &mut hypervisor_ptable, &mut manifest, &cpio, &params, &mut update, &hypervisor().mpool, ) .expect("unable to load secondary VMs"); boot_params_patch_fdt(&mut hypervisor_ptable, &mut update, &hypervisor().mpool) .expect("plat_update_boot_params failed"); hypervisor_ptable.defrag(&hypervisor().mpool); mm_vm_enable_invalidation(); dlog!("Hafnium initialisation completed\n"); INITED = true; hypervisor().cpu_manager.get_boot_cpu() }
function_block-function_prefix_line
[ { "content": "/// Helper method for parsing 32/64-bit units from FDT data.\n\npub fn fdt_parse_number(data: &[u8]) -> Option<u64> {\n\n #[repr(C, align(8))]\n\n struct T {\n\n a: [u8; 8],\n\n }\n\n\n\n // FDT values should be aligned to 32-bit boundary.\n\n assert!(is_aligned(data.as_ptr() as _, FDT_TOKEN_ALIGNMENT));\n\n\n\n let ret = match data.len() {\n\n 4 => {\n\n // Assert that `data` is already sufficiently aligned to dereference as u32.\n\n const_assert!(mem::align_of::<u32>() <= FDT_TOKEN_ALIGNMENT);\n\n unsafe { u32::from_be(*(data.as_ptr() as *const u32)) as u64 }\n\n }\n\n 8 => {\n\n // ARMv8 requires `data` to be realigned to 64-bit boundary to dereferences as u64.\n\n // May not be needed on other architectures.\n\n let mut t = T {\n\n a: Default::default(),\n", "file_path": "hfo2/src/fdt.rs", "rank": 1, "score": 327453.75949096587 }, { "content": "/// Determines if a character is a whitespace.\n\nfn is_space(c: u8) -> bool {\n\n match c as char {\n\n ' ' | '\\t' | '\\n' | '\\r' => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "hfo2/src/memiter.rs", "rank": 2, "score": 318871.86935580545 }, { "content": "/// Checks above constants are correct.\n\n/// HfO2: This checking was originally done in compile time in C. But it was\n\n/// impossible because Rust compiler rejects construction of variables with\n\n/// interior mutability (`VCpu` has `SpinLock`) in constant expressions. Hence\n\n/// we check those constants in runtime.\n\npub fn arch_cpu_module_init() {\n\n assert_eq!(offset_of!(Cpu, id), CPU_ID);\n\n assert_eq!(offset_of!(Cpu, stack_bottom), CPU_STACK_BOTTOM);\n\n // assert_eq!(\n\n // offset_of!(VCpu, inner)\n\n // + 8 // expected value of offset_of!(SpinLock<VCpuState>, data), but it\n\n // // is not working. see Gilnaa/memoffset#21.\n\n // + offset_of!(VCpuInner, regs),\n\n // VCPU_REGS\n\n // );\n\n assert_eq!(offset_of!(ArchRegs, lazy), REGS_LAZY);\n\n assert_eq!(offset_of!(ArchRegs, fp), REGS_FREGS);\n\n assert_eq!(offset_of!(ArchRegs, gic_ich_hcr_el2), REGS_GIC);\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Default)]\n\npub struct ArchSysRegs {\n\n vmpidr_el2: uintreg_t,\n\n csselr_el1: uintreg_t,\n", "file_path": "hfo2/src/arch/aarch64.rs", "rank": 3, "score": 264055.4941936127 }, { "content": "pub fn arch_cpu_module_init() {\n\n // Do nothing.\n\n}\n\n\n\n// TODO(HfO2): Following functions are empty, since linker complains if the\n\n// implementations of functions are missing even they're never called in the\n\n// unit tests. Make a custom target and remove those (#46.)\n\n#[no_mangle]\n\npub extern \"C\" fn arch_one_time_init() {\n\n unreachable!();\n\n}\n", "file_path": "hfo2/src/arch/fake.rs", "rank": 4, "score": 264039.1476631976 }, { "content": "fn as_digit(c: u8) -> Option<u8> {\n\n if b'0' <= c && c <= b'9' {\n\n Some(c - b'0')\n\n } else {\n\n None\n\n }\n\n}\n\n\n\nimpl MemIter {\n\n /// Initialises the given memory iterator.\n\n pub unsafe fn from_raw(data: *const u8, size: usize) -> Self {\n\n Self {\n\n next: data,\n\n limit: data.add(size),\n\n }\n\n }\n\n\n\n /// Moves iterator to the next non-whitespace character.\n\n unsafe fn skip_space(&mut self) {\n\n while let Some(c) = self.peek() {\n", "file_path": "hfo2/src/memiter.rs", "rank": 5, "score": 240039.24015614993 }, { "content": "/// Updates the FDT before being passed to the primary VM's kernel.\n\n///\n\n/// TODO: in future, each VM will declare whether it expects an argument passed and that will be\n\n/// static data e.g. it will provide its own FDT so there will be no FDT modification. This is\n\n/// done because each VM has a very different view of the system and we don't want to force VMs\n\n/// to require loader code when another loader can load the data for it.\n\npub fn boot_params_patch_fdt(\n\n ptable: &mut PageTable<Stage1>,\n\n p: &BootParamsUpdate,\n\n mpool: &MPool,\n\n) -> Result<(), ()> {\n\n unsafe { patch(ptable, plat::get_fdt_addr(), p, mpool) }\n\n}\n", "file_path": "hfo2/src/boot_params.rs", "rank": 6, "score": 237902.5541077905 }, { "content": "pub fn as_asciz(bytes: &[u8]) -> &[u8] {\n\n bytes\n\n .split_at(\n\n bytes\n\n .iter()\n\n .position(|&c| c == b'\\0')\n\n .unwrap_or(bytes.len()),\n\n )\n\n .0\n\n}\n\n\n", "file_path": "hfo2/src/utils.rs", "rank": 7, "score": 209853.24807643043 }, { "content": "/// Parse information from FDT needed to initialize Hafnium.\n\n/// FDT is mapped at the beginning and unmapped before exiting the function.\n\npub fn boot_flow_init(\n\n ptable: &mut PageTable<Stage1>,\n\n manifest: &mut Manifest,\n\n boot_params: &mut BootParams,\n\n ppool: &MPool,\n\n) -> Result<(), ()> {\n\n // Get the memory map from the FDT.\n\n let mut fdt_root = unsafe { map(ptable, plat::get_fdt_addr(), ppool) }.ok_or_else(|| {\n\n dlog!(\"Unable to map FDT.\\n\");\n\n })?;\n\n\n\n let ret = try {\n\n fdt_root.find_child(\"\\0\".as_ptr()).ok_or_else(|| {\n\n dlog!(\"Unable to find FDT root node.\\n\");\n\n })?;\n\n\n\n manifest.init(&fdt_root).map_err(|e| {\n\n dlog!(\n\n \"Could not parse manifest: {}.\\n\",\n\n <Error as Into<&'static str>>::into(e)\n", "file_path": "hfo2/src/boot_flow.rs", "rank": 8, "score": 205220.44157143874 }, { "content": "static struct mpool ppool;\n", "file_path": "test/hftest/mm.c", "rank": 9, "score": 181318.82721697385 }, { "content": " class C {\n\n public static final int CCI = 4;\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/bufferoverrun/CompressedData.java", "rank": 10, "score": 176881.7465230768 }, { "content": " class C {\n\n B b;\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/infer/NullPointerExceptions.java", "rank": 11, "score": 174008.21151409802 }, { "content": " private void init() throws IOException {\n\n InferUndefined.can_throw_ioexception_void();\n\n InferBuiltins.__set_file_attribute(this);\n", "file_path": "experiments/ownership-inference/infer/infer/models/java/src/java/util/zip/ZipFile.java", "rank": 12, "score": 174007.89645441962 }, { "content": " public void write(int b) throws IOException {\n\n InferUndefined.can_throw_ioexception_void();\n", "file_path": "experiments/ownership-inference/infer/infer/models/java/src/java/io/DataOutputStream.java", "rank": 13, "score": 172655.408484323 }, { "content": " private void init() {\n\n InferBuiltins.__set_file_attribute(this);\n", "file_path": "experiments/ownership-inference/infer/infer/models/java/src/java/io/FileOutputStream.java", "rank": 14, "score": 172650.22951493715 }, { "content": " private void init() {\n\n InferBuiltins.__set_file_attribute(this);\n", "file_path": "experiments/ownership-inference/infer/infer/models/java/src/java/io/FileInputStream.java", "rank": 15, "score": 172650.22951493715 }, { "content": " public void write(byte b[], int off, int len) throws IOException {\n\n InferUndefined.can_throw_ioexception_void();\n", "file_path": "experiments/ownership-inference/infer/infer/models/java/src/java/io/RandomAccessFile.java", "rank": 16, "score": 172591.3603692887 }, { "content": " public void write(byte b[], int off, int len) throws IOException {\n\n InferUndefined.can_throw_ioexception_void();\n", "file_path": "experiments/ownership-inference/infer/infer/models/java/src/java/io/FileOutputStream.java", "rank": 17, "score": 172591.3603692887 }, { "content": " public long write(ByteBuffer[] buffers, int offset, int length) throws IOException {\n\n return InferUndefined.can_throw_ioexception_long();\n", "file_path": "experiments/ownership-inference/infer/infer/models/java/src/java/nio/FileChannelImpl.java", "rank": 18, "score": 172591.3603692887 }, { "content": "type param_type = {name: Mangled.t; typ: Typ.t; is_pointer_to_const: bool; annot: Annot.Item.t}\n\n\n\nlet mk_param_type ?(is_pointer_to_const = false) ?(annot = Annot.Item.empty) name typ =\n\n {name; typ; is_pointer_to_const; annot}\n\n\n\n\n", "file_path": "experiments/ownership-inference/infer/infer/src/clang/cMethodSignature.ml", "rank": 19, "score": 171190.72729069603 }, { "content": "/// Check whether the value `v` is aligned to the boundary `a`,\n\n/// with `a` power of 2.\n\npub fn is_aligned(v: usize, a: usize) -> bool {\n\n (v & (a - 1)) == 0\n\n}\n\n\n\n/// As per the C11 specification, mem*_s() operations fill the destination buffer if runtime\n\n/// constraint validation fails, assuming that `dest` and `destsz` are both valid.\n\n#[track_caller]\n\nunsafe fn check_or_fill(cond: bool, dest: *const c_void, destsz: size_t, ch: i32, condmsg: &str) {\n\n if !cond {\n\n if !dest.is_null() && destsz <= RSIZE_MAX {\n\n memset_s(dest, destsz, ch, destsz);\n\n }\n\n panic!(\"failed: {}\", condmsg);\n\n }\n\n}\n\n\n\n#[track_caller]\n\nunsafe fn check_or_fill_zero(cond: bool, dest: *const c_void, destsz: size_t, condmsg: &str) {\n\n check_or_fill(cond, dest, destsz, 0, condmsg)\n\n}\n", "file_path": "hfo2/src/std.rs", "rank": 20, "score": 171158.84525645484 }, { "content": "void arch_one_time_init(void)\n\n{\n\n\tsmc_res_t smc_res =\n\n\t\tsmc32(PSCI_VERSION, 0, 0, 0, 0, 0, 0, SMCCC_CALLER_HYPERVISOR);\n\n\n\n\tel3_psci_version = smc_res.res0;\n\n\n\n\t/* Check there's nothing unexpected about PSCI. */\n\n\tswitch (el3_psci_version) {\n\n\tcase PSCI_VERSION_0_2:\n\n\tcase PSCI_VERSION_1_0:\n\n\tcase PSCI_VERSION_1_1:\n\n\t\t/* Supported EL3 PSCI version. */\n\n\t\tdlog(\"Found PSCI version: %#x\\n\", el3_psci_version);\n\n\t\tbreak;\n\n\n\n\tdefault:\n\n\t\t/* Unsupported EL3 PSCI version. Log a warning but continue. */\n\n\t\tdlog(\"Warning: unknown PSCI version: %#x\\n\", el3_psci_version);\n\n\t\tel3_psci_version = 0;\n\n\t\tbreak;\n\n\t}\n", "file_path": "src/arch/aarch64/hypervisor/psci_handler.c", "rank": 21, "score": 169241.78180696102 }, { "content": "type kind_spec = {kind: t; (* for non-diff analysis *) top_and_bottom: bool}\n\n\n\nlet enabled_cost_kinds =\n\n [{kind= OperationCost; top_and_bottom= true}; {kind= AllocationCost; top_and_bottom= false}]\n", "file_path": "experiments/ownership-inference/infer/infer/src/base/costKind.ml", "rank": 22, "score": 169159.02483799428 }, { "content": "/// Generates a string with the two letters \"vm\" followed by an integer.\n\nfn generate_vm_node_name<'a>(\n\n buf: &'a mut [u8; VM_NAME_BUF_SIZE],\n\n vm_id: spci_vm_id_t,\n\n) -> &'a mut [u8] {\n\n struct BufWrite<'a> {\n\n buf: &'a mut [u8; VM_NAME_BUF_SIZE],\n\n size: usize,\n\n }\n\n\n\n impl<'a> Write for BufWrite<'a> {\n\n fn write_str(&mut self, s: &str) -> Result<(), fmt::Error> {\n\n let dest = self\n\n .buf\n\n .get_mut(self.size..(self.size + s.len()))\n\n .ok_or(fmt::Error)?;\n\n dest.copy_from_slice(s.as_bytes());\n\n self.size += s.len();\n\n\n\n Ok(())\n\n }\n", "file_path": "hfo2/src/manifest.rs", "rank": 23, "score": 167020.10346144403 }, { "content": " public void nullPointerExceptionUnlessFrameFails() {\n\n String s = null;\n\n Object a = frame(new A());\n\n if (a instanceof A) {\n\n s.length();\n\n }\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/infer/NullPointerExceptions.java", "rank": 24, "score": 165950.14027391584 }, { "content": "struct C {\n\n C(int v) : f(v){};\n\n ~C();\n\n int f;\n\n};\n\n\n\nint use_after_scope4_bad() {\n\n C* pc;\n\n {\n\n C c(3);\n\n pc = &c;\n\n }\n\n return pc->f;\n\n}\n\n\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/cpp/pulse/use_after_destructor.cpp", "rank": 25, "score": 163281.00158282407 }, { "content": "/// Retrieves the next file stored in the cpio archive stored in the cpio, and advances the iterator\n\n/// such that another call to this function would return the following file.\n\npub fn parse_cpio(it: &mut MemIter) -> Option<CpioResult> {\n\n let header = unsafe { &*(it.read(mem::size_of::<CpioHeader>())? as *const CpioHeader) };\n\n\n\n // TODO: Check magic.\n\n\n\n let name_len = (header.namesize + 1) & !1;\n\n let name = it.read(name_len as usize)?;\n\n\n\n let contents_len = ((header.filesize[0] as usize) << 16) | header.filesize[1] as usize;\n\n let contents = it.read((contents_len + 1) & !1)?;\n\n\n\n // TODO: Check that string is null-terminated.\n\n\n\n /* Stop enumerating files when we hit the end marker. */\n\n if unsafe { strcmp(name, &(\"TRAILER!!!\\n\".as_bytes()[0])) } == 0 {\n\n return None;\n\n }\n\n\n\n Some(CpioResult {\n\n name,\n\n contents,\n\n size: contents_len,\n\n })\n\n}\n\n\n", "file_path": "hfo2/src/cpio.rs", "rank": 26, "score": 162864.03647228004 }, { "content": "type source_files_filter = SourceFile.t -> bool\n\n\n", "file_path": "experiments/ownership-inference/infer/infer/src/IR/Filtering.ml", "rank": 27, "score": 162833.67609813096 }, { "content": " public static void nullPointerExceptionFromFailingFileOutputStreamConstructor()\n\n throws IOException {\n\n FileOutputStream fos = null;\n\n try {\n\n fos = new FileOutputStream(new File(\"whatever.txt\"));\n\n } catch (IOException e) {\n\n } finally {\n\n fos.close();\n\n }\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/infer/NullPointerExceptions.java", "rank": 28, "score": 162407.0465664965 }, { "content": "class C {\n\n private int x = 0;\n\n\n\n public int get() {\n\n return x;\n\n }\n\n\n\n public void set(int v) {\n\n x = v;\n\n }\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/racerd/ReadWriteRaces.java", "rank": 29, "score": 162347.602508721 }, { "content": "type var_data = {name: Mangled.t; typ: Typ.t; modify_in_block: bool; is_constexpr: bool}\n\n[@@deriving compare]\n\n\n\nlet pp_var_data fmt {name; typ; modify_in_block} =\n\n F.fprintf fmt \"@[<h>{ name=@ %a;@ typ=@ %a;@ modify_in_block=@ %b@ }@]\" Mangled.pp name\n\n (Typ.pp_full Pp.text) typ modify_in_block\n\n\n\n\n", "file_path": "experiments/ownership-inference/infer/infer/src/IR/ProcAttributes.ml", "rank": 30, "score": 151451.32150387482 }, { "content": "type path_filter = SourceFile.t -> bool\n\n\n", "file_path": "experiments/ownership-inference/infer/infer/src/backend/inferconfig.ml", "rank": 31, "score": 149323.73271305996 }, { "content": " void FN_writeToFieldWrittenInLockUsingMethodBad() {\n\n mField3 = 3;\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/racerd/Inference.java", "rank": 32, "score": 144697.12045059394 }, { "content": "type procedures_filter = SourceFile.t -> Procname.t -> bool\n\n\n\nlet filter_of_regexp_opt ~to_string r =\n\n match r with\n\n | None ->\n\n fun _ -> true\n\n | Some regexp ->\n\n fun x -> Str.string_match regexp (to_string x) 0\n\n\n\n\n\nlet ( &&& ) filter1 filter2 x1 x2 = filter1 x1 && filter2 x2\n\n\n\nlet mk_source_file_filter ~filter =\n\n let regexp_opt = Option.map ~f:Str.regexp filter in\n\n filter_of_regexp_opt ~to_string:SourceFile.to_string regexp_opt\n\n\n\n\n\nlet source_files_filter = lazy (mk_source_file_filter ~filter:Config.source_files_filter)\n\n\n\nlet mk_procedure_name_filter ~filter =\n", "file_path": "experiments/ownership-inference/infer/infer/src/IR/Filtering.ml", "rank": 33, "score": 143494.38792722978 }, { "content": " private SomeEnum FN_usingNonStrictifiedStaticFieldsInEnumsShouldBeBad() {\n\n String str = SomeEnum.FAKE_VALUE.toString(); // should not be able to dereference\n\n SomeEnum.FAKE_VALUE.someMethod(); // should not be able to dereference\n\n return SomeEnum.FAKE_VALUE; // should not be able to convert to nonnull\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/nullsafe-default/StrictMode.java", "rank": 34, "score": 140818.47972865248 }, { "content": "struct C { // non-POD struct\n\n int z;\n\n X x{};\n\n virtual void f() {} // this make C non-POD\n\n C() = default;\n\n C(int a, int b, const X& x) : z(a + b), x(x) {}\n\n};\n\n\n\nvoid zero_init_primitive() {\n\n int i{};\n\n int* p{};\n\n float f{};\n\n}\n\n\n\nvoid zero_init_record() {\n\n Y y{};\n\n C c{}; // this will call default constructor\n\n}\n\n\n\nvoid record_init() {\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/cpp/frontend/initialization/init_list.cpp", "rank": 35, "score": 139976.19544161943 }, { "content": "bool mm_init(struct mpool *ppool);\n", "file_path": "inc/hf/mm.h", "rank": 36, "score": 136261.52578759557 }, { "content": "bool mm_ptable_init(struct mm_ptable *t, int flags, struct mpool *ppool);\n", "file_path": "inc/hf/mm.h", "rank": 37, "score": 134392.81827712542 }, { "content": "bool mm_vm_init(struct mm_ptable *t, struct mpool *ppool);\n", "file_path": "inc/hf/mm.h", "rank": 38, "score": 134392.81827712542 }, { "content": "bool hftest_mm_init(void)\n\n{\n\n\tstruct mm_stage1_locked stage1_locked;\n\n\n\n\tmpool_init(&ppool, sizeof(struct mm_page_table));\n\n\tif (!mpool_add_chunk(&ppool, ptable_buf, sizeof(ptable_buf))) {\n\n\t\tHFTEST_FAIL(true, \"Failed to add buffer to page-table pool.\");\n\n\t}\n\n\n\n\tif (!mm_ptable_init(&ptable, MM_FLAG_STAGE1, &ppool)) {\n\n\t\tHFTEST_FAIL(true, \"Unable to allocate memory for page table.\");\n\n\t}\n\n\n\n\tstage1_locked = get_stage1_locked();\n\n\tmm_identity_map_nolock(stage1_locked, pa_init(0),\n\n\t\t\tpa_init(mm_ptable_addr_space_end(MM_FLAG_STAGE1)),\n\n\t\t\tMM_MODE_R | MM_MODE_W | MM_MODE_X, &ppool);\n\n\n\n\tif (!arch_vm_mm_init()) {\n\n\t\treturn false;\n\n\t}\n\n\n\n\tarch_vm_mm_enable(ptable.root);\n\n\n\n\treturn true;\n", "file_path": "test/hftest/mm.c", "rank": 39, "score": 134392.81827712542 }, { "content": "struct cpu boot_cpu = {\n\n\t.stack_bottom = &callstacks[0][STACK_SIZE],\n", "file_path": "src/cpu.c", "rank": 40, "score": 133281.29582652336 }, { "content": "struct cpu {\n\n\t/** CPU identifier. Doesn't have to be contiguous. */\n\n\tcpu_id_t id;\n\n\n\n\t/** Pointer to bottom of the stack. */\n\n\tvoid *stack_bottom;\n\n\n\n\t/** See api.c for the partial ordering on locks. */\n\n\tstruct spinlock lock;\n\n\n\n\t/** Determines whether or not the cpu is currently on. */\n\n\tbool is_on;\n", "file_path": "inc/hf/cpu.h", "rank": 41, "score": 133274.88260095174 }, { "content": "bool boot_params_init(struct boot_params *p, const struct fdt_node *fdt_root);\n", "file_path": "inc/hf/boot_params.h", "rank": 42, "score": 132654.60156711494 }, { "content": "bool mm_vm_unmap_hypervisor(struct mm_ptable *t, struct mpool *ppool);\n", "file_path": "inc/hf/mm.h", "rank": 43, "score": 132594.0242548503 }, { "content": "bool arch_mm_init(void);\n", "file_path": "inc/hf/arch/mm.h", "rank": 44, "score": 132584.180679947 }, { "content": "bool arch_mm_init(void)\n\n{\n\n\tstatic const int pa_bits_table[16] = {32, 36, 40, 42, 44, 48};\n\n\tuint64_t features = read_msr(id_aa64mmfr0_el1);\n\n\tint pa_bits = pa_bits_table[features & 0xf];\n\n\tint extend_bits;\n\n\tint sl0;\n\n\n\n\t/* Check that 4KB granules are supported. */\n\n\tif ((features >> 28) & 0xf) {\n\n\t\tdlog(\"4KB granules are not supported\\n\");\n\n\t\treturn false;\n\n\t}\n\n\n\n\t/* Check the physical address range. */\n\n\tif (!pa_bits) {\n\n\t\tdlog(\"Unsupported value of id_aa64mmfr0_el1.PARange: %x\\n\",\n\n\t\t features & 0xf);\n\n\t\treturn false;\n\n\t}\n\n\n\n\tdlog(\"Supported bits in physical address: %d\\n\", pa_bits);\n\n\n\n\t/*\n\n\t * Determine sl0, starting level of the page table, based on the number\n\n\t * of bits. The value is chosen to give the shallowest tree by making\n\n\t * use of concatenated translation tables.\n\n\t *\n\n\t * - 0 => start at level 1\n\n\t * - 1 => start at level 2\n\n\t * - 2 => start at level 3\n\n\t */\n\n\tif (pa_bits >= 44) {\n\n\t\tsl0 = 2;\n\n\t\tmm_s2_max_level = 3;\n\n\t} else if (pa_bits >= 35) {\n\n\t\tsl0 = 1;\n\n\t\tmm_s2_max_level = 2;\n\n\t} else {\n\n\t\tsl0 = 0;\n\n\t\tmm_s2_max_level = 1;\n\n\t}\n\n\n\n\t/*\n\n\t * Since the shallowest possible tree is used, the maximum number of\n\n\t * concatenated tables must be used. This means if no more than 4 bits\n\n\t * are used from the next level, they are instead used to index into the\n\n\t * concatenated tables.\n\n\t */\n\n\textend_bits = ((pa_bits - PAGE_BITS) % PAGE_LEVEL_BITS);\n\n\tif (extend_bits > 4) {\n\n\t\textend_bits = 0;\n\n\t}\n\n\tmm_s2_root_table_count = 1 << extend_bits;\n\n\n\n\tdlog(\"Stage 2 has %d page table levels with %d pages at the root.\\n\",\n\n\t mm_s2_max_level + 1, mm_s2_root_table_count);\n\n\n\n\tmm_vtcr_el2 = (1u << 31) |\t\t /* RES1. */\n\n\t\t ((features & 0xf) << 16) | /* PS, matching features. */\n\n\t\t (0 << 14) |\t\t /* TG0: 4 KB granule. */\n\n\t\t (3 << 12) |\t\t /* SH0: inner shareable. */\n\n\t\t (1 << 10) |\t /* ORGN0: normal, cacheable ... */\n\n\t\t (1 << 8) |\t /* IRGN0: normal, cacheable ... */\n\n\t\t (sl0 << 6) |\t /* SL0. */\n\n\t\t ((64 - pa_bits) << 0) | /* T0SZ: dependent on PS. */\n\n\t\t 0;\n\n\n\n\t/*\n\n\t * 0 -> Device-nGnRnE memory\n\n\t * 0xff -> Normal memory, Inner/Outer Write-Back Non-transient,\n\n\t * Write-Alloc, Read-Alloc.\n\n\t */\n\n\tmm_mair_el2 = (0 << (8 * STAGE1_DEVICEINDX)) |\n\n\t\t (0xff << (8 * STAGE1_NORMALINDX));\n\n\n\n\t/*\n\n\t * Configure tcr_el2.\n\n\t */\n\n\tmm_tcr_el2 = (1 << 20) |\t\t/* TBI, top byte ignored. */\n\n\t\t ((features & 0xf) << 16) | /* PS. */\n\n\t\t (0 << 14) |\t\t/* TG0, granule size, 4KB. */\n\n\t\t (3 << 12) |\t\t/* SH0, inner shareable. */\n\n\t\t (1 << 10) | /* ORGN0, normal mem, WB RA WA Cacheable. */\n\n\t\t (1 << 8) | /* IRGN0, normal mem, WB RA WA Cacheable. */\n\n\t\t (25 << 0) | /* T0SZ, input address is 2^39 bytes. */\n\n\t\t 0;\n\n\n\n\tmm_sctlr_el2 = (1 << 0) | /* M, enable stage 1 EL2 MMU. */\n\n\t\t (1 << 1) | /* A, enable alignment check faults. */\n\n\t\t (1 << 2) | /* C, data cache enable. */\n\n\t\t (1 << 3) | /* SA, enable stack alignment check. */\n\n\t\t (3 << 4) | /* RES1 bits. */\n\n\t\t (1 << 11) | /* RES1 bit. */\n\n\t\t (1 << 12) | /* I, instruction cache enable. */\n\n\t\t (1 << 16) | /* RES1 bit. */\n\n\t\t (1 << 18) | /* RES1 bit. */\n\n\t\t (1 << 19) | /* WXN bit, writable execute never. */\n\n\t\t (3 << 22) | /* RES1 bits. */\n\n\t\t (3 << 28) | /* RES1 bits. */\n\n\t\t 0;\n\n\n\n\treturn true;\n", "file_path": "src/arch/aarch64/mm.c", "rank": 45, "score": 132577.45809152542 }, { "content": "void hftest_mm_vcpu_init(void)\n\n{\n\n\tarch_vm_mm_enable(ptable.root);\n", "file_path": "test/hftest/mm.c", "rank": 46, "score": 132577.45809152542 }, { "content": "bool arch_mm_init(void)\n\n{\n\n\t/* No initialization required. */\n\n\treturn true;\n", "file_path": "src/arch/fake/mm.c", "rank": 47, "score": 132577.45809152542 }, { "content": "class StaticInitAttributes {\n\n static Binder binder;\n\n\n\n private static void doTransact() {\n\n try {\n\n binder.transact(0, null, null, 0);\n\n } catch (RemoteException e) {\n\n }\n\n }\n\n\n\n @ForUiThread static Executor mUiThreadExecutor = null;\n\n\n\n static Executor mUiExecutor;\n\n static Executor mNonUiExecutor;\n\n static Handler mUiHandler;\n\n static Runnable mBadRunnable;\n\n static Runnable mOkRunnable;\n\n\n\n static {\n\n mUiExecutor = mUiThreadExecutor;\n\n mNonUiExecutor = Executors.getBackgroundExecutor();\n\n mUiHandler = new Handler(Looper.getMainLooper());\n\n mBadRunnable =\n\n new Runnable() {\n\n @Override\n\n public void run() {\n\n doTransact();\n\n }\n\n };\n\n\n\n mOkRunnable =\n\n new Runnable() {\n\n @Override\n\n public void run() {}\n\n };\n\n }\n\n\n\n public void postBlockingCallToUIExecutorBad() {\n\n mUiExecutor.execute(mBadRunnable);\n\n }\n\n\n\n public void postNoopCallToUIExecutorOk() {\n\n mUiExecutor.execute(mOkRunnable);\n\n }\n\n\n\n public void postBlockingCallToNonUIExecutorOk() {\n\n mNonUiExecutor.execute(mBadRunnable);\n\n }\n\n\n\n public void postBlockingCallToUIHandlerBad() {\n\n mUiHandler.post(mBadRunnable);\n\n }\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/starvation-whole-program/StaticInitAttributes.java", "rank": 48, "score": 131975.34695159618 }, { "content": "bool boot_params_patch_fdt(struct mm_stage1_locked stage1_locked,\n", "file_path": "inc/hf/boot_params.h", "rank": 49, "score": 130919.13937073418 }, { "content": " @Override\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/starvation-whole-program/StaticInitAttributes.java", "rank": 50, "score": 130727.97555349473 }, { "content": " private static void doTransact() {\n\n try {\n\n binder.transact(0, null, null, 0);\n\n } catch (RemoteException e) {\n\n }\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/starvation-whole-program/StaticInitAttributes.java", "rank": 51, "score": 130727.97555349473 }, { "content": " static Binder binder;\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/starvation-whole-program/StaticInitAttributes.java", "rank": 52, "score": 130727.97555349473 }, { "content": "type field_info = {typ: Typ.t; annotations: Annot.Item.t; is_static: bool}\n\n\n\nlet find_field field_list field_name_to_lookup =\n\n List.find_map\n\n ~f:(fun (field_name, typ, annotations) ->\n\n if Fieldname.equal field_name field_name_to_lookup then Some (typ, annotations) else None )\n\n field_list\n\n\n\n\n\nlet get_field_info ~lookup field_name_to_lookup (typ : Typ.t) =\n\n let find_field_info field_list ~is_static =\n\n find_field field_list field_name_to_lookup\n\n |> Option.map ~f:(fun (typ, annotations) -> {typ; annotations; is_static})\n\n in\n\n match typ.desc with\n\n | Tstruct name | Tptr ({desc= Tstruct name}, _) -> (\n\n match lookup name with\n\n | Some {fields= non_statics; statics} ->\n\n (* Search in both lists and return the first found *)\n\n find_field_info statics ~is_static:true\n", "file_path": "experiments/ownership-inference/infer/infer/src/IR/Struct.ml", "rank": 53, "score": 130433.06972594759 }, { "content": "/// Looks for a file in the given cpio archive. The filename is not null-terminated, so we use a\n\n/// memory iterator to represent it. The file, if found, is returned in the `it` argument.\n\npub fn find_file_memiter(cpio: &MemIter, filename: &MemIter) -> Option<MemIter> {\n\n let mut iter = cpio.clone();\n\n\n\n while let Some(result) = parse_cpio(&mut iter) {\n\n if unsafe { filename.iseq(result.name) } {\n\n return Some(unsafe { MemIter::from_raw(result.contents, result.size) });\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n\n/// Looks for a file in the given cpio archive. The file, if found, is returned in the `it`\n\n/// argument.\n\npub unsafe fn find_file(cpio: &MemIter, filename: *const u8) -> Option<MemIter> {\n\n let mut iter = cpio.clone();\n\n\n\n while let Some(result) = parse_cpio(&mut iter) {\n\n if strcmp(filename, result.name) == 0 {\n\n return Some(MemIter::from_raw(result.contents, result.size));\n\n }\n\n }\n\n\n\n None\n\n}\n", "file_path": "hfo2/src/cpio.rs", "rank": 54, "score": 130213.43772278284 }, { "content": "void arch_one_time_init(void);\n", "file_path": "inc/hf/arch/init.h", "rank": 55, "score": 130045.57950196616 }, { "content": " static Runnable mOkRunnable;\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/starvation-whole-program/StaticInitAttributes.java", "rank": 56, "score": 129503.96256489094 }, { "content": " static Handler mUiHandler;\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/starvation-whole-program/StaticInitAttributes.java", "rank": 57, "score": 129503.96256489094 }, { "content": " static Executor mUiExecutor;\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/starvation-whole-program/StaticInitAttributes.java", "rank": 58, "score": 129503.96256489094 }, { "content": " static Runnable mBadRunnable;\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/starvation-whole-program/StaticInitAttributes.java", "rank": 59, "score": 129503.96256489094 }, { "content": "type checks = {eradicate: bool; check_ret_type: check_return_type list}\n\n\n\n(** Typecheck an expression. *)\n\nlet rec typecheck_expr ~is_strict_mode find_canonical_duplicate visited checks tenv node instr_ref\n\n (curr_pdesc : Procdesc.t) typestate e tr_default loc : TypeState.range =\n\n match e with\n\n (* null literal or 0 *)\n\n | _ when Exp.is_null_literal e ->\n\n let typ, _ = tr_default in\n\n (* 0 is not the same thing as null. They are encoded as the same thing in SIL.\n\n We distinct them by type.\n\n *)\n\n if PatternMatch.type_is_class typ then\n\n (typ, InferredNullability.create (TypeOrigin.NullConst loc))\n\n else\n\n (* 0 const (this is not the same as null) *)\n\n (typ, InferredNullability.create (TypeOrigin.NonnullConst loc))\n\n | Exp.Const _ ->\n\n let typ, _ = tr_default in\n\n (* We already considered case of null literal above, so this is a non-null const. *)\n", "file_path": "experiments/ownership-inference/infer/infer/src/nullsafe/typeCheck.ml", "rank": 60, "score": 129336.60982732495 }, { "content": "bool arch_vm_mm_init(void)\n\n{\n\n\tstatic const int pa_bits_table[16] = {32, 36, 40, 42, 44, 48};\n\n\tuint64_t features = read_msr(id_aa64mmfr0_el1);\n\n\tint pa_bits = pa_bits_table[features & 0xf];\n\n\n\n\t/* Check that 4KB granules are supported. */\n\n\tif ((features >> 28) & 0xf) {\n\n\t\tdlog(\"4KB granules are not supported\\n\");\n\n\t\treturn false;\n\n\t}\n\n\n\n\t/* Check the physical address range. */\n\n\tif (!pa_bits) {\n\n\t\tdlog(\"Unsupported value of id_aa64mmfr0_el1.PARange: %x\\n\",\n\n\t\t features & 0xf);\n\n\t\treturn false;\n\n\t}\n\n\n\n\t/*\n\n\t * 0 -> Device-nGnRnE memory\n\n\t * 0xff -> Normal memory, Inner/Outer Write-Back Non-transient,\n\n\t * Write-Alloc, Read-Alloc.\n\n\t */\n\n\tmm_mair_el1 = (0 << (8 * STAGE1_DEVICEINDX)) |\n\n\t\t (0xff << (8 * STAGE1_NORMALINDX));\n\n\n\n\tmm_tcr_el1 = (1 << 20) |\t\t/* TBI, top byte ignored. */\n\n\t\t ((features & 0xf) << 16) | /* PS. */\n\n\t\t (0 << 14) |\t\t/* TG0, granule size, 4KB. */\n\n\t\t (3 << 12) |\t\t/* SH0, inner shareable. */\n\n\t\t (1 << 10) | /* ORGN0, normal mem, WB RA WA Cacheable. */\n\n\t\t (1 << 8) | /* IRGN0, normal mem, WB RA WA Cacheable. */\n\n\t\t (25 << 0) | /* T0SZ, input address is 2^39 bytes. */\n\n\t\t 0;\n\n\n\n\tmm_sctlr_el1 = (1 << 0) | /* M, enable stage 1 EL2 MMU. */\n\n\t\t (1 << 1) | /* A, enable alignment check faults. */\n\n\t\t (1 << 2) | /* C, data cache enable. */\n\n\t\t (1 << 3) | /* SA, enable stack alignment check. */\n\n\t\t (3 << 4) | /* RES1 bits. */\n\n\t\t (1 << 11) | /* RES1 bit. */\n\n\t\t (1 << 12) | /* I, instruction cache enable. */\n\n\t\t (1 << 16) | /* RES1 bit. */\n\n\t\t (1 << 18) | /* RES1 bit. */\n\n\t\t (0 << 19) | /* WXN bit, writable execute never. */\n\n\t\t (3 << 22) | /* RES1 bits. */\n\n\t\t (3 << 28) | /* RES1 bits. */\n\n\t\t 0;\n\n\n\n\treturn true;\n", "file_path": "src/arch/aarch64/hftest/mm.c", "rank": 61, "score": 129097.48238707156 }, { "content": " static native void mayExcept() throws Exception;\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/quandary/Exceptions.java", "rank": 62, "score": 128927.86340958661 }, { "content": "type checked_condition = {report_issue_type: report_issue_type; propagate: bool}\n\n\n", "file_path": "experiments/ownership-inference/infer/infer/src/bufferoverrun/bufferOverrunProofObligations.ml", "rank": 63, "score": 128923.64709209866 }, { "content": " static Executor mNonUiExecutor;\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/starvation-whole-program/StaticInitAttributes.java", "rank": 64, "score": 128302.65795397547 }, { "content": " @ForUiThread static Executor mUiThreadExecutor = null;\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/starvation-whole-program/StaticInitAttributes.java", "rank": 65, "score": 128302.65795397547 }, { "content": " static int static_data = 5;\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/classloads/Static.java", "rank": 66, "score": 127641.82803786547 }, { "content": "module FileDiffTest = FileDiff.VISIBLE_FOR_TESTING_DO_NOT_USE_DIRECTLY\n\n\n\nlet u length = List.init ~f:(fun _ -> UnixDiffTest.Unchanged) length\n\n\n\nlet n length = List.init ~f:(fun _ -> UnixDiffTest.New) length\n\n\n\nlet o length = List.init ~f:(fun _ -> UnixDiffTest.Old) length\n\n\n\nlet test_parse_directives_with_valid_input =\n\n let create_test input expected _ =\n\n let found = FileDiffTest.parse_directives input in\n\n let pp_diff fmt (expected, actual) =\n\n let expected_str = Format.asprintf \"%a\" (Pp.seq ~sep:\", \" Format.pp_print_int) expected in\n\n let actual_str = Format.asprintf \"%a\" (Pp.seq ~sep:\", \" Format.pp_print_int) actual in\n\n Format.fprintf fmt \"Expected: '%s', found: '%s'\" expected_str actual_str\n\n in\n\n assert_equal ~cmp:(List.equal Int.equal) ~pp_diff expected found\n\n in\n\n [ (*\n\n === test1 ===\n", "file_path": "experiments/ownership-inference/infer/infer/src/unit/FileDiffTests.ml", "rank": 67, "score": 127556.69619858215 }, { "content": "class Const {\n\n public static void main(String args[]) {\n\n synchronized (ConstA.class) {\n\n }\n\n\n\n java.lang.Class<ConstB> b = ConstB.class;\n\n }\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/classloads/Const.java", "rank": 68, "score": 127147.95048541715 }, { "content": "class Static {\n\n // this loads StaticA\n\n static StaticA s = new StaticA();\n\n\n\n public static void main(String args[]) {\n\n // this loads StaticD\n\n System.out.println(StaticD.static_data);\n\n }\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/classloads/Static.java", "rank": 69, "score": 127134.76715726417 }, { "content": "alignas(PAGE_SIZE) char callstacks[MAX_CPUS][STACK_SIZE];\n", "file_path": "src/cpu.c", "rank": 70, "score": 126565.66361724382 }, { "content": " static StaticCNoLoad c = null;\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/classloads/Static.java", "rank": 71, "score": 126395.21780120593 }, { "content": "public class File {\n\n\n\n public @Nullable File[] listFiles() {\n\n if (InferUndefined.boolean_undefined()) {\n\n return null;\n\n } else {\n\n return (File[]) InferUndefined.object_undefined();\n\n }\n\n }\n", "file_path": "experiments/ownership-inference/infer/infer/models/java/src/java/io/File.java", "rank": 72, "score": 126337.57392486732 }, { "content": " public void postBlockingCallToUIExecutorBad() {\n\n mUiExecutor.execute(mBadRunnable);\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/starvation-whole-program/StaticInitAttributes.java", "rank": 73, "score": 124828.84606325651 }, { "content": " public void postNoopCallToUIExecutorOk() {\n\n mUiExecutor.execute(mOkRunnable);\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/starvation-whole-program/StaticInitAttributes.java", "rank": 74, "score": 124828.84606325651 }, { "content": " public void postBlockingCallToUIHandlerBad() {\n\n mUiHandler.post(mBadRunnable);\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/starvation-whole-program/StaticInitAttributes.java", "rank": 75, "score": 124828.84606325651 }, { "content": " private final C c = new C();\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/racerd/ReadWriteRaces.java", "rank": 76, "score": 124301.55004611355 }, { "content": "bool arch_vm_mm_init(void);\n", "file_path": "src/arch/aarch64/inc/hf/arch/vm/mm.h", "rank": 77, "score": 124223.03113821904 }, { "content": " public Component callBuildSuffixWithoutRequiredBad() {\n\n return buildSuffix(mMyComponent.create());\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/litho-required-props/RequiredProps.java", "rank": 78, "score": 124183.06181261505 }, { "content": " public void postBlockingCallToNonUIExecutorOk() {\n\n mNonUiExecutor.execute(mBadRunnable);\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/starvation-whole-program/StaticInitAttributes.java", "rank": 79, "score": 123712.3366241773 }, { "content": " public MyTreeComponent buildWithoutOk() {\n\n return mMyTreeComponent.create().build();\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/litho-required-props/RequiredProps.java", "rank": 80, "score": 123125.27646551948 }, { "content": " public MyComponent.Builder returnNullWithProp3(boolean b, MyComponent.Builder builder) {\n\n if (b) {\n\n builder.prop3(new Object());\n\n return null;\n\n }\n\n return builder;\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/litho-required-props/RequiredProps.java", "rank": 81, "score": 122483.83988188673 }, { "content": " public void buildPropInConditionalOk(boolean b) {\n\n MyComponent.Builder builder = mMyComponent.create();\n\n if (b) {\n\n builder.prop1(new Object()).prop3(new Object());\n\n } else {\n\n builder.prop1(new Object()).prop3(new Object());\n\n }\n\n builder.build();\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/litho-required-props/RequiredProps.java", "rank": 82, "score": 121298.28904066166 }, { "content": "module PerfProfilerDataMap = Caml.Map.Make (struct\n\n type t = Procname.t\n\n\n\n let compare = Procname.compare\n\nend)\n\n\n\nlet global_perf_profiler_data : Perf_profiler_t.perf_profiler_item PerfProfilerDataMap.t ref =\n\n ref PerfProfilerDataMap.empty\n\n\n\n\n\nlet split_class_method_name =\n\n let class_sep = String.Search_pattern.create \"::\" in\n\n fun qualified_method_name ->\n\n match String.Search_pattern.index class_sep ~in_:qualified_method_name with\n\n | Some class_sep_pos ->\n\n let class_name =\n\n String.sub qualified_method_name ~pos:0 ~len:class_sep_pos\n\n |> String.tr ~target:'/' ~replacement:'.'\n\n in\n\n let method_name =\n", "file_path": "experiments/ownership-inference/infer/infer/src/backend/ExternalPerfData.ml", "rank": 83, "score": 121280.28897205497 }, { "content": "class StaticCNoLoad {}\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/classloads/Static.java", "rank": 84, "score": 121275.6056017084 }, { "content": " public void buildPropMissingInConditionalBad(boolean b) {\n\n MyComponent.Builder builder = mMyComponent.create();\n\n if (b) {\n\n builder.prop1(new Object()).prop3(new Object());\n\n } else {\n\n builder.prop2(new Object()).prop1(new Object());\n\n }\n\n builder.build();\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/litho-required-props/RequiredProps.java", "rank": 85, "score": 120123.38989411914 }, { "content": " public void callReturnNullWithProp3_FP(boolean b) {\n\n MyComponent.Builder builder = mMyComponent.create();\n\n if (returnNullWithProp3(b, builder) == null) {\n\n builder.prop1(new Object()).build();\n\n }\n", "file_path": "experiments/ownership-inference/infer/infer/tests/codetoanalyze/java/litho-required-props/RequiredProps.java", "rank": 86, "score": 120108.42752438955 }, { "content": " \\ exclusive p90 = %f @\\n\\\n\n \\ inclusive p50 = %f @\\n\\\n\n \\ exclusive p50 = %f @\\n\\\n\n \\ inclusive p25 = %f @\\n\\\n\n \\ exclusive p25 = %f @\\n\"\n\n itm.function_name itm.count_trace_id itm.sum_inclusive_cpu_time itm.avg_inclusive_cpu_time_ms\n\n itm.sum_exclusive_cpu_time itm.avg_exclusive_cpu_time_ms itm.p90_inclusive_cpu_time_ms\n\n itm.p90_exclusive_cpu_time_ms itm.p50_inclusive_cpu_time_ms itm.p50_exclusive_cpu_time_ms\n\n itm.p25_inclusive_cpu_time_ms itm.p25_exclusive_cpu_time_ms\n\n\n\n\n\nlet _read_file_perf_data fname =\n\n let perf_profiler_data_str =\n\n match Utils.read_file fname with\n\n | Ok l ->\n\n List.map ~f:Perf_profiler_j.perf_profiler_of_string l\n\n | Error error ->\n\n L.user_error \"Failed to read file '%s': %s@.\" fname error ;\n\n []\n\n in\n", "file_path": "experiments/ownership-inference/infer/infer/src/backend/ExternalPerfData.ml", "rank": 87, "score": 119959.1378327903 }, { "content": " let do_item itm =\n\n pp_perf_profiler_item itm ;\n\n match split_class_method_name itm.Perf_profiler_t.function_name with\n\n | Some (classname, methodname) ->\n\n let procname = JProcname.make_void_signature_procname ~classname ~methodname in\n\n global_perf_profiler_data := PerfProfilerDataMap.add procname itm !global_perf_profiler_data\n\n | _ ->\n\n ()\n\n in\n\n List.iter ~f:(fun items -> List.iter ~f:do_item items) perf_profiler_data_str\n\n\n\n\n\nlet read_file_flag = ref false\n\n\n\nlet in_profiler_data_map key =\n\n match Config.perf_profiler_data_file with\n\n | Some fname ->\n\n if not !read_file_flag then (\n\n _read_file_perf_data fname ;\n\n read_file_flag := true ) ;\n\n if PerfProfilerDataMap.is_empty !global_perf_profiler_data then\n\n L.(debug Analysis Medium) \"@\\n\\n[Perf Profiler Log] WARNING: EMPTY PERF PROFILER DATA@\\n\" ;\n\n PerfProfilerDataMap.mem key !global_perf_profiler_data\n\n | _ ->\n\n false\n", "file_path": "experiments/ownership-inference/infer/infer/src/backend/ExternalPerfData.ml", "rank": 88, "score": 119952.68001439326 }, { "content": " let prefix_len = class_sep_pos + 2 in\n\n String.sub qualified_method_name ~pos:prefix_len\n\n ~len:(String.length qualified_method_name - prefix_len)\n\n in\n\n Some (class_name, method_name)\n\n | _ ->\n\n None\n\n\n\n\n\nlet pp_perf_profiler_item itm =\n\n let open Perf_profiler_t in\n\n L.(debug Analysis Medium)\n\n \"@\\n\\n\\\n\n \\ [Perf Profiler Log] Function: '%s' @\\n\\\n\n \\ count trace id = %i @\\n\\\n\n \\ sum inclusive cpu time = %f@\\n\\\n\n \\ avg inclusive time = %f @\\n\\\n\n \\ sum exclusive cpu time = %f @\\n\\\n\n \\ avg exclusive_time = %f @\\n\\\n\n \\ inclusive p90 = %f @\\n\\\n", "file_path": "experiments/ownership-inference/infer/infer/src/backend/ExternalPerfData.ml", "rank": 89, "score": 119943.99888062218 }, { "content": "(*\n\n * Copyright (c) Facebook, Inc. and its affiliates.\n\n *\n\n * This source code is licensed under the MIT license found in the\n\n * LICENSE file in the root directory of this source tree.\n\n *)\n\nopen! IStd\n", "file_path": "experiments/ownership-inference/infer/infer/src/backend/ExternalPerfData.ml", "rank": 90, "score": 119943.90692137326 }, { "content": "/*\n\n * Copyright 2019 Sanguk Park\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * https://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse crate::addr::*;\n\n\n\nextern \"C\" {\n\n pub fn layout_text_begin() -> paddr_t;\n", "file_path": "hfo2/src/layout.rs", "rank": 91, "score": 156.0661312863458 }, { "content": "/*\n\n * Copyright 2019 Sanguk Park\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * https://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse crate::addr::*;\n\nuse crate::arch::*;\n\nuse crate::boot_params::*;\n\nuse crate::fdt::*;\n", "file_path": "hfo2/src/boot_flow.rs", "rank": 92, "score": 154.97390161270832 }, { "content": "package android.text;\n", "file_path": "experiments/ownership-inference/infer/infer/models/java/src/android/text/TextUtils.java", "rank": 93, "score": 154.02874252047314 }, { "content": "/*\n\n * Copyright 2019 Sanguk Park\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * https://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse core::convert::TryInto;\n\nuse core::fmt::{self, Write};\n\n\n\nuse crate::fdt::*;\n", "file_path": "hfo2/src/manifest.rs", "rank": 94, "score": 153.61738920616799 }, { "content": "/*\n\n * Copyright 2019 Sanguk Park.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * https://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse core::convert::TryFrom;\n\nuse core::mem;\n\n\n\nuse crate::mm::*;\n", "file_path": "hfo2/src/spci.rs", "rank": 95, "score": 152.9573613994957 }, { "content": "/*\n\n * Copyright 2019 Sanguk Park\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * https://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse core::convert::TryFrom;\n\n\n\nuse crate::types::*;\n\n\n", "file_path": "hfo2/src/abi.rs", "rank": 96, "score": 151.62368565746164 }, { "content": "/*\n\n * Copyright 2018 The Hafnium Authors.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * https://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\n#include <gmock/gmock.h>\n\n\n\nextern \"C\" {\n\n#include \"hf/arch/mm.h\"\n", "file_path": "src/mm_test.cc", "rank": 97, "score": 151.01374717734342 }, { "content": "/*\n\n * Copyright 2019 Sanguk Park\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * https://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse crate::addr::*;\n\nuse crate::arch::*;\n\nuse crate::boot_flow::*;\n\nuse crate::fdt::*;\n", "file_path": "hfo2/src/boot_params.rs", "rank": 98, "score": 150.6547438967038 }, { "content": "/*\n\n * Copyright 2019 Sanguk Park\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * https://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse core::fmt;\n\n\n\nuse crate::arch::*;\n\nuse crate::types::*;\n", "file_path": "hfo2/src/addr.rs", "rank": 99, "score": 150.2927780534074 } ]
Rust
src/diff.rs
cysp/git2-rs
78759d028e815954bf750279472d890d14104c93
use std::kinds::marker; use std::str; use {raw, StatusEntry, Delta, Oid}; pub struct DiffDelta<'a> { raw: *mut raw::git_diff_delta, marker1: marker::ContravariantLifetime<'a>, marker2: marker::NoSend, marker3: marker::NoSync, } pub struct DiffFile<'a> { raw: *const raw::git_diff_file, marker1: marker::ContravariantLifetime<'a>, marker2: marker::NoSend, marker3: marker::NoSync, } impl<'a> DiffDelta<'a> { pub unsafe fn from_raw(_entry: &'a StatusEntry, raw: *mut raw::git_diff_delta) -> DiffDelta<'a> { DiffDelta { raw: raw, marker1: marker::ContravariantLifetime, marker2: marker::NoSend, marker3: marker::NoSync, } } pub fn nfiles(&self) -> u16 { unsafe { (*self.raw).nfiles } } pub fn status(&self) -> Delta { match unsafe { (*self.raw).status } { raw::GIT_DELTA_UNMODIFIED => Delta::Unmodified, raw::GIT_DELTA_ADDED => Delta::Added, raw::GIT_DELTA_DELETED => Delta::Deleted, raw::GIT_DELTA_MODIFIED => Delta::Modified, raw::GIT_DELTA_RENAMED => Delta::Renamed, raw::GIT_DELTA_COPIED => Delta::Copied, raw::GIT_DELTA_IGNORED => Delta::Ignored, raw::GIT_DELTA_UNTRACKED => Delta::Untracked, raw::GIT_DELTA_TYPECHANGE => Delta::Typechange, raw::GIT_DELTA_UNREADABLE => Delta::Unreadable, } } pub fn old_file(&self) -> DiffFile { unsafe { DiffFile::from_raw(self, &(*self.raw).old_file) } } pub fn new_file(&self) -> DiffFile { unsafe { DiffFile::from_raw(self, &(*self.raw).new_file) } } } impl<'a> DiffFile<'a> { pub unsafe fn from_raw(_entry: &'a DiffDelta, raw: *const raw::git_diff_file) -> DiffFile<'a> { DiffFile { raw: raw, marker1: marker::ContravariantLifetime, marker2: marker::NoSend, marker3: marker::NoSync, } } pub fn id(&self) -> Oid { unsafe { Oid::from_raw(&(*self.raw).id) } } pub fn path_bytes(&self) -> &[u8] { unsafe { ::opt_bytes(self, (*self.raw).path).unwrap() } } pub fn path(&self) -> Option<&str> { str::from_utf8(self.path_bytes()) } pub fn size(&self) -> u64 { unsafe { (*self.raw).size as u64 } } }
use std::kinds::marker; use std::str; use {raw, StatusEntry, Delta, Oid}; pub struct DiffDelta<'a> { raw: *mut raw::git_diff_delta, marker1: marker::ContravariantLifetime<'a>, marker2: marker::NoSend, marker3: marker::NoSync, } pub struct DiffFile<'a> { raw: *const raw::git_diff_file, marker1: marker::ContravariantLifetime<'a>, marker2: marker::NoSend, marker3: marker::NoSync, } impl<'a> DiffDelta<'a> { pub unsafe fn from_raw(_entry: &'a StatusEntry, raw: *mut raw::git_diff_delta) -> DiffDelta<'a> { DiffDelta { raw: raw, marker1: marker::ContravariantLifetime, marker2: marker::NoSend, marker3: marker::NoSync, } } pub fn nfiles(&self) -> u16 { unsafe { (*self.raw).nfiles } } pub fn status(&self) -> Delta {
} pub fn old_file(&self) -> DiffFile { unsafe { DiffFile::from_raw(self, &(*self.raw).old_file) } } pub fn new_file(&self) -> DiffFile { unsafe { DiffFile::from_raw(self, &(*self.raw).new_file) } } } impl<'a> DiffFile<'a> { pub unsafe fn from_raw(_entry: &'a DiffDelta, raw: *const raw::git_diff_file) -> DiffFile<'a> { DiffFile { raw: raw, marker1: marker::ContravariantLifetime, marker2: marker::NoSend, marker3: marker::NoSync, } } pub fn id(&self) -> Oid { unsafe { Oid::from_raw(&(*self.raw).id) } } pub fn path_bytes(&self) -> &[u8] { unsafe { ::opt_bytes(self, (*self.raw).path).unwrap() } } pub fn path(&self) -> Option<&str> { str::from_utf8(self.path_bytes()) } pub fn size(&self) -> u64 { unsafe { (*self.raw).size as u64 } } }
match unsafe { (*self.raw).status } { raw::GIT_DELTA_UNMODIFIED => Delta::Unmodified, raw::GIT_DELTA_ADDED => Delta::Added, raw::GIT_DELTA_DELETED => Delta::Deleted, raw::GIT_DELTA_MODIFIED => Delta::Modified, raw::GIT_DELTA_RENAMED => Delta::Renamed, raw::GIT_DELTA_COPIED => Delta::Copied, raw::GIT_DELTA_IGNORED => Delta::Ignored, raw::GIT_DELTA_UNTRACKED => Delta::Untracked, raw::GIT_DELTA_TYPECHANGE => Delta::Typechange, raw::GIT_DELTA_UNREADABLE => Delta::Unreadable, }
if_condition
[ { "content": "#[cfg(windows)]\n\npub fn openssl_init() {}\n\n\n\nextern {\n\n // threads\n\n pub fn git_libgit2_init() -> c_int;\n\n pub fn git_libgit2_shutdown();\n\n\n\n // repository\n\n pub fn git_repository_free(repo: *mut git_repository);\n\n pub fn git_repository_open(repo: *mut *mut git_repository,\n\n path: *const c_char) -> c_int;\n\n pub fn git_repository_init(repo: *mut *mut git_repository,\n\n path: *const c_char,\n\n is_bare: c_uint) -> c_int;\n\n pub fn git_repository_init_ext(out: *mut *mut git_repository,\n\n repo_path: *const c_char,\n\n opts: *mut git_repository_init_options)\n\n -> c_int;\n\n pub fn git_repository_init_init_options(opts: *mut git_repository_init_options,\n\n version: c_uint) -> c_int;\n", "file_path": "libgit2-sys/lib.rs", "rank": 0, "score": 81425.62017036541 }, { "content": "pub fn issue_14344_workaround() {\n\n libssh2::issue_14344_workaround();\n\n}\n", "file_path": "libgit2-sys/lib.rs", "rank": 1, "score": 81425.62017036541 }, { "content": "#[cfg(unix)]\n\npub fn openssl_init() {\n\n if !cfg!(target_os = \"linux\") && !cfg!(target_os = \"freebsd\") { return }\n\n\n\n // Currently, libgit2 leverages OpenSSL for SSL support when cloning\n\n // repositories over HTTPS. This means that we're picking up an OpenSSL\n\n // dependency on non-Windows platforms (where it has its own HTTPS\n\n // subsystem). As a result, we need to link to OpenSSL.\n\n //\n\n // Now actually *linking* to OpenSSL isn't so hard. We just need to make\n\n // sure to use pkg-config to discover any relevant system dependencies for\n\n // differences between distributions like CentOS and Ubuntu. The actual\n\n // trickiness comes about when we start *distributing* the resulting\n\n // binaries. Currently Cargo is distributed in binary form as nightlies,\n\n // which means we're distributing a binary with OpenSSL linked in.\n\n //\n\n // For historical reasons, the Linux nightly builder is running a CentOS\n\n // distribution in order to have as much ABI compatibility with other\n\n // distributions as possible. Sadly, however, this compatibility does not\n\n // extend to OpenSSL. Currently OpenSSL has two major versions, 0.9 and 1.0,\n\n // which are incompatible (many ABI differences). The CentOS builder we\n", "file_path": "libgit2-sys/lib.rs", "rank": 2, "score": 81425.62017036541 }, { "content": "fn run(cmd: &mut Command) {\n\n println!(\"running: {}\", cmd);\n\n assert!(cmd.stdout(InheritFd(1))\n\n .stderr(InheritFd(2))\n\n .status()\n\n .unwrap()\n\n .success());\n\n\n\n}\n\n\n", "file_path": "libgit2-sys/build.rs", "rank": 3, "score": 72960.43515904767 }, { "content": "pub fn repo_init() -> (TempDir, Repository) {\n\n let td = TempDir::new(\"test\").unwrap();\n\n let repo = Repository::init(td.path()).unwrap();\n\n {\n\n let mut config = repo.config().unwrap();\n\n config.set_str(\"user.name\", \"name\").unwrap();\n\n config.set_str(\"user.email\", \"email\").unwrap();\n\n let mut index = repo.index().unwrap();\n\n let id = index.write_tree().unwrap();\n\n\n\n let tree = repo.find_tree(id).unwrap();\n\n let sig = repo.signature().unwrap();\n\n repo.commit(Some(\"HEAD\"), &sig, &sig, \"initial\",\n\n &tree, &[]).unwrap();\n\n }\n\n (td, repo)\n\n}\n", "file_path": "src/test.rs", "rank": 4, "score": 69993.37975431065 }, { "content": "pub fn try(ret: libc::c_int) -> Result<libc::c_int, Error> {\n\n match ret {\n\n n if n < 0 => Err(last_error()),\n\n n => Ok(n),\n\n }\n\n}\n\n\n", "file_path": "src/call.rs", "rank": 5, "score": 56818.61784703123 }, { "content": "pub fn convert<T, U: Convert<T>>(u: &U) -> T { u.convert() }\n\n\n", "file_path": "src/call.rs", "rank": 6, "score": 56036.22163241113 }, { "content": "fn init() {\n\n static INIT: Once = ONCE_INIT;\n\n INIT.doit(|| unsafe {\n\n raw::openssl_init();\n\n let r = raw::git_libgit2_init();\n\n assert!(r >= 0,\n\n \"couldn't initialize the libgit2 library: {}\", r);\n\n rt::at_exit(|| {\n\n raw::git_libgit2_shutdown();\n\n });\n\n });\n\n}\n\n\n\nunsafe fn opt_bytes<'a, T>(_: &'a T,\n\n c: *const libc::c_char) -> Option<&'a [u8]> {\n\n if c.is_null() {\n\n None\n\n } else {\n\n let s = CString::new(c, false);\n\n Some(mem::transmute(s.as_bytes_no_nul()))\n", "file_path": "src/lib.rs", "rank": 7, "score": 47773.44043154829 }, { "content": "fn main() {\n\n register_dep(\"SSH2\");\n\n register_dep(\"OPENSSL\");\n\n\n\n let mut opts = pkg_config::default_options(\"libgit2\");\n\n opts.atleast_version = Some(\"0.21.0\".to_string());\n\n match pkg_config::find_library_opts(\"libgit2\", &opts) {\n\n Ok(()) => return,\n\n Err(..) => {}\n\n }\n\n\n\n let mut cflags = os::getenv(\"CFLAGS\").unwrap_or(String::new());\n\n let target = os::getenv(\"TARGET\").unwrap();\n\n let mingw = target.contains(\"windows-gnu\");\n\n cflags.push_str(\" -ffunction-sections -fdata-sections\");\n\n\n\n if target.contains(\"i686\") {\n\n cflags.push_str(\" -m32\");\n\n } else if target.as_slice().contains(\"x86_64\") {\n\n cflags.push_str(\" -m64\");\n", "file_path": "libgit2-sys/build.rs", "rank": 8, "score": 45951.76078571768 }, { "content": "#[test]\n\nfn smoke() {\n\n unsafe { git_threads_init(); }\n\n}\n\n\n", "file_path": "libgit2-sys/lib.rs", "rank": 9, "score": 45951.76078571768 }, { "content": "fn last_error() -> Error {\n\n // Apparently libgit2 isn't necessarily guaranteed to set the last error\n\n // whenever a function returns a negative value!\n\n Error::last_error().unwrap_or_else(|| {\n\n Error::from_str(\"an unknown error occurred\")\n\n })\n\n}\n\n\n\nmod impls {\n\n use std::c_str::CString;\n\n use libc;\n\n\n\n use {raw, ConfigLevel, ResetType, ObjectType, BranchType, Direction};\n\n use call::Convert;\n\n\n\n macro_rules! primitive( ($($p:ident)*) => (\n\n $(impl Convert<$p> for $p { fn convert(&self) -> $p { *self } })*\n\n ) )\n\n\n\n primitive!(i8 i16 i32 i64 int u8 u16 u32 u64 uint)\n", "file_path": "src/call.rs", "rank": 10, "score": 41786.15397850663 }, { "content": "#[doc(hidden)]\n\npub trait Convert<T> {\n\n fn convert(&self) -> T;\n\n}\n\n\n", "file_path": "src/call.rs", "rank": 11, "score": 40717.20968975941 }, { "content": "fn register_dep(dep: &str) {\n\n match os::getenv(format!(\"DEP_{}_ROOT\", dep).as_slice()) {\n\n Some(s) => {\n\n append(\"CMAKE_PREFIX_PATH\", Path::new(s.as_slice()));\n\n append(\"PKG_CONFIG_PATH\", Path::new(s.as_slice()).join(\"lib/pkgconfig\"));\n\n }\n\n None => {}\n\n }\n\n}\n\n\n", "file_path": "libgit2-sys/build.rs", "rank": 12, "score": 37211.69098912519 }, { "content": "use std::fmt;\n\nuse std::hash::{sip, Hash};\n\nuse libc;\n\n\n\nuse {raw, Error};\n\n\n\n/// Unique identity of any object (commit, tree, blob, tag).\n\n#[deriving(Copy)]\n\npub struct Oid {\n\n raw: raw::git_oid,\n\n}\n\n\n\nimpl Oid {\n\n /// Create a new Oid from a raw libgit2 oid structure.\n\n ///\n\n /// This function is unsafe as it does not know if the memory pointed to by\n\n /// `oid` is valid or not.\n\n pub unsafe fn from_raw(oid: *const raw::git_oid) -> Oid {\n\n Oid { raw: *oid }\n\n }\n", "file_path": "src/oid.rs", "rank": 13, "score": 35790.54011151351 }, { "content": " pub fn from_bytes(bytes: &[u8]) -> Result<Oid, Error> {\n\n ::init();\n\n let mut raw = raw::git_oid { id: [0, ..raw::GIT_OID_RAWSZ] };\n\n if bytes.len() != raw::GIT_OID_RAWSZ {\n\n Err(Error::from_str(\"raw byte array must be 20 bytes\"))\n\n } else {\n\n unsafe { raw::git_oid_fromraw(&mut raw, bytes.as_ptr()) }\n\n Ok(Oid { raw: raw })\n\n }\n\n }\n\n\n\n /// Gain access to the underlying raw oid pointer\n\n pub fn raw(&self) -> *const raw::git_oid { &self.raw as *const _ }\n\n\n\n /// View this OID as a byte-slice 20 bytes in length.\n\n pub fn as_bytes(&self) -> &[u8] { self.raw.id.as_slice() }\n\n}\n\n\n\nimpl fmt::Show for Oid {\n\n /// Hex-encode this Oid into a formatter.\n", "file_path": "src/oid.rs", "rank": 14, "score": 35787.8370620082 }, { "content": "\n\n /// Parse a hex-formatted object id into an Oid structure.\n\n ///\n\n /// If the string is not a valid 40-character hex string, an error is\n\n /// returned.\n\n pub fn from_str(s: &str) -> Result<Oid, Error> {\n\n ::init();\n\n let mut raw = raw::git_oid { id: [0, ..raw::GIT_OID_RAWSZ] };\n\n unsafe {\n\n try_call!(raw::git_oid_fromstrn(&mut raw,\n\n s.as_bytes().as_ptr()\n\n as *const libc::c_char,\n\n s.len() as libc::size_t));\n\n }\n\n Ok(Oid { raw: raw })\n\n }\n\n\n\n /// Parse a raw object id into an Oid structure.\n\n ///\n\n /// If the array given is not 20 bytes in length, an error is returned.\n", "file_path": "src/oid.rs", "rank": 15, "score": 35786.91398104217 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let mut dst = [0u8, ..raw::GIT_OID_HEXSZ + 1];\n\n unsafe {\n\n raw::git_oid_tostr(dst.as_mut_ptr() as *mut libc::c_char,\n\n dst.len() as libc::size_t, &self.raw);\n\n }\n\n f.write(dst.slice_to(dst.iter().position(|&a| a == 0).unwrap()))\n\n }\n\n}\n\n\n\nimpl PartialEq for Oid {\n\n fn eq(&self, other: &Oid) -> bool {\n\n unsafe { raw::git_oid_equal(&self.raw, &other.raw) != 0 }\n\n }\n\n}\n\nimpl Eq for Oid {}\n\n\n\nimpl PartialOrd for Oid {\n\n fn partial_cmp(&self, other: &Oid) -> Option<Ordering> {\n\n Some(self.cmp(other))\n", "file_path": "src/oid.rs", "rank": 16, "score": 35786.15352865042 }, { "content": " }\n\n}\n\n\n\nimpl Ord for Oid {\n\n fn cmp(&self, other: &Oid) -> Ordering {\n\n match unsafe { raw::git_oid_cmp(&self.raw, &other.raw) } {\n\n 0 => Equal,\n\n n if n < 0 => Less,\n\n _ => Greater,\n\n }\n\n }\n\n}\n\n\n\nimpl Clone for Oid {\n\n fn clone(&self) -> Oid { *self }\n\n}\n\n\n\nimpl Hash for Oid {\n\n fn hash(&self, into: &mut sip::SipState) {\n\n self.raw.id.as_slice().hash(into)\n", "file_path": "src/oid.rs", "rank": 17, "score": 35784.85400295474 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Oid;\n\n\n\n #[test]\n\n fn conversions() {\n\n assert!(Oid::from_str(\"foo\").is_err());\n\n assert!(Oid::from_str(\"decbf2be529ab6557d5429922251e5ee36519817\").is_ok());\n\n assert!(Oid::from_bytes(b\"foo\").is_err());\n\n assert!(Oid::from_bytes(b\"00000000000000000000\").is_ok());\n\n }\n\n}\n", "file_path": "src/oid.rs", "rank": 18, "score": 35778.166832767114 }, { "content": "fn append(var: &str, val: Path) {\n\n let prefix = os::getenv(var).unwrap_or(String::new());\n\n let mut v = os::split_paths(prefix.as_slice());\n\n v.push(val);\n\n os::setenv(var, os::join_paths(v.as_slice()).unwrap());\n\n}\n", "file_path": "libgit2-sys/build.rs", "rank": 19, "score": 33700.441350098125 }, { "content": " pub unsafe fn from_raw(_repo: &Repository,\n\n raw: *mut raw::git_object) -> Object {\n\n Object::from_raw_ptr(raw)\n\n }\n\n\n\n /// Even more unsafe than `from_raw`, the output lifetime is not attached to\n\n /// any input.\n\n pub unsafe fn from_raw_ptr<'a>(raw: *mut raw::git_object) -> Object<'a> {\n\n Object {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n }\n\n }\n\n\n\n /// Get the id (SHA1) of a repository object\n\n pub fn id(&self) -> Oid {\n\n unsafe {\n\n Oid::from_raw(raw::git_object_id(&*self.raw))\n", "file_path": "src/object.rs", "rank": 22, "score": 27.508016241601737 }, { "content": "use std::kinds::marker;\n\nuse std::str;\n\n\n\nuse {raw, Repository, Signature, Oid};\n\n\n\n/// A structure representing a [note][note] in git.\n\n///\n\n/// [note]: http://git-scm.com/blog/2010/08/25/notes.html\n\npub struct Note<'a> {\n\n raw: *mut raw::git_note,\n\n marker1: marker::ContravariantLifetime<'a>,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n}\n\n\n\n/// An iterator over all of the notes within a repository.\n\npub struct Notes<'a> {\n\n raw: *mut raw::git_note_iterator,\n\n marker1: marker::ContravariantLifetime<'a>,\n\n marker2: marker::NoSend,\n", "file_path": "src/note.rs", "rank": 23, "score": 27.430337156358235 }, { "content": "use std::str;\n\nuse std::kinds::marker;\n\n\n\nuse {raw, Oid, Repository, Error};\n\n\n\n/// A structure to represent a git [submodule][1]\n\n///\n\n/// [1]: http://git-scm.com/book/en/Git-Tools-Submodules\n\npub struct Submodule<'a> {\n\n raw: *mut raw::git_submodule,\n\n marker1: marker::ContravariantLifetime<'a>,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n}\n\n\n\nimpl<'a> Submodule<'a> {\n\n /// Create a new object from its raw component.\n\n ///\n\n /// This method is unsafe as there is no guarantee that `raw` is a valid\n\n /// pointer.\n", "file_path": "src/submodule.rs", "rank": 24, "score": 27.220362452056467 }, { "content": "use std::kinds::marker;\n\nuse std::mem;\n\nuse std::raw as stdraw;\n\n\n\nuse {raw, Oid, Repository};\n\n\n\n/// A structure to represent a git [blob][1]\n\n///\n\n/// [1]: http://git-scm.com/book/en/Git-Internals-Git-Objects\n\npub struct Blob<'a> {\n\n raw: *mut raw::git_blob,\n\n marker1: marker::ContravariantLifetime<'a>,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n}\n\n\n\nimpl<'a> Blob<'a> {\n\n /// Create a new object from its raw component.\n\n ///\n\n /// This method is unsafe as there is no guarantee that `raw` is a valid\n", "file_path": "src/blob.rs", "rank": 25, "score": 27.205311030624955 }, { "content": " /// pointer.\n\n pub unsafe fn from_raw(_repo: &Repository,\n\n raw: *mut raw::git_blob) -> Blob {\n\n Blob {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n }\n\n }\n\n\n\n /// Get the id (SHA1) of a repository blob\n\n pub fn id(&self) -> Oid {\n\n unsafe { Oid::from_raw(raw::git_blob_id(&*self.raw)) }\n\n }\n\n\n\n /// Get access to the underlying raw pointer.\n\n pub fn raw(&self) -> *mut raw::git_blob { self.raw }\n\n\n\n /// Determine if the blob content is most certainly binary or not.\n", "file_path": "src/blob.rs", "rank": 26, "score": 26.907096854871362 }, { "content": "use std::kinds::marker;\n\nuse std::mem;\n\n\n\nuse {raw, Oid, Repository, ObjectType, Error, Buf};\n\n\n\n/// A structure to represent a git [object][1]\n\n///\n\n/// [1]: http://git-scm.com/book/en/Git-Internals-Git-Objects\n\npub struct Object<'a> {\n\n raw: *mut raw::git_object,\n\n marker1: marker::ContravariantLifetime<'a>,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n}\n\n\n\nimpl<'a> Object<'a> {\n\n /// Create a new object from its raw component.\n\n ///\n\n /// This method is unsafe as there is no guarantee that `raw` is a valid\n\n /// pointer.\n", "file_path": "src/object.rs", "rank": 27, "score": 26.754400891606526 }, { "content": " pub unsafe fn from_raw(_repo: &Repository,\n\n raw: *mut raw::git_note_iterator) -> Notes {\n\n Notes {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator<(Oid, Oid)> for Notes<'a> {\n\n fn next(&mut self) -> Option<(Oid, Oid)> {\n\n let mut note_id = raw::git_oid { id: [0, ..raw::GIT_OID_RAWSZ] };\n\n let mut annotated_id = note_id;\n\n unsafe {\n\n match raw::git_note_next(&mut note_id, &mut annotated_id, self.raw) {\n\n 0 => Some((Oid::from_raw(&note_id), Oid::from_raw(&annotated_id))),\n\n _ => None,\n\n }\n", "file_path": "src/note.rs", "rank": 28, "score": 26.536184442247094 }, { "content": "use std::kinds::marker;\n\nuse std::str;\n\n\n\nuse {raw, Error, Oid, Repository, Object, Signature, ObjectType};\n\n\n\n/// A structure to represent a git [tag][1]\n\n///\n\n/// [1]: http://git-scm.com/book/en/Git-Basics-Tagging\n\npub struct Tag<'a> {\n\n raw: *mut raw::git_tag,\n\n marker1: marker::ContravariantLifetime<'a>,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n}\n\n\n\nimpl<'a> Tag<'a> {\n\n /// Create a new tag from its raw component.\n\n ///\n\n /// This method is unsafe as there is no guarantee that `raw` is a valid\n\n /// pointer.\n", "file_path": "src/tag.rs", "rank": 29, "score": 26.45342062180728 }, { "content": " pub unsafe fn from_raw(_repo: &Repository,\n\n raw: *mut raw::git_tag) -> Tag {\n\n Tag {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n }\n\n }\n\n\n\n /// Get the id (SHA1) of a repository tag\n\n pub fn id(&self) -> Oid {\n\n unsafe { Oid::from_raw(raw::git_tag_id(&*self.raw)) }\n\n }\n\n\n\n /// Get the message of a tag\n\n ///\n\n /// Returns None if there is no message or if it is not valid utf8\n\n pub fn message(&self) -> Option<&str> {\n\n self.message_bytes().and_then(str::from_utf8)\n", "file_path": "src/tag.rs", "rank": 30, "score": 26.109477465696372 }, { "content": " commit: &'a Commit<'b>,\n\n}\n\n\n\nimpl<'a> Commit<'a> {\n\n /// Create a new object from its raw component.\n\n ///\n\n /// This method is unsafe as there is no guarantee that `raw` is a valid\n\n /// pointer.\n\n pub unsafe fn from_raw(_repo: &Repository,\n\n raw: *mut raw::git_commit) -> Commit {\n\n Commit {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n }\n\n }\n\n\n\n /// Get the id (SHA1) of a repository commit\n\n pub fn id(&self) -> Oid {\n", "file_path": "src/commit.rs", "rank": 31, "score": 26.037166346160234 }, { "content": "use std::kinds::marker;\n\nuse std::str;\n\nuse libc;\n\n\n\nuse {raw, Repository, Error, Oid, Signature};\n\n\n\n/// A structure to represent a git [reference][1].\n\n///\n\n/// [1]: http://git-scm.com/book/en/Git-Internals-Git-References\n\npub struct Reference<'a> {\n\n raw: *mut raw::git_reference,\n\n marker1: marker::ContravariantLifetime<'a>,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n}\n\n\n\n/// An iterator over the references in a repository.\n\npub struct References<'a> {\n\n repo: &'a Repository,\n\n raw: *mut raw::git_reference_iterator,\n", "file_path": "src/reference.rs", "rank": 32, "score": 25.94581462445569 }, { "content": " raw: *mut raw::git_tree_entry,\n\n owned: bool,\n\n marker1: marker::ContravariantLifetime<'a>,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n}\n\n\n\nimpl<'a> Tree<'a> {\n\n /// Create a new object from its raw component.\n\n ///\n\n /// This method is unsafe as there is no guarantee that `raw` is a valid\n\n /// pointer.\n\n pub unsafe fn from_raw(_repo: &Repository,\n\n raw: *mut raw::git_tree) -> Tree {\n\n Tree {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n }\n", "file_path": "src/tree.rs", "rank": 33, "score": 25.30129486398331 }, { "content": "use std::kinds::marker;\n\nuse std::str;\n\nuse libc;\n\n\n\nuse {raw, Oid, Repository, Error, Signature, Tree};\n\n\n\n/// A structure to represent a git [commit][1]\n\n///\n\n/// [1]: http://git-scm.com/book/en/Git-Internals-Git-Objects\n\npub struct Commit<'a> {\n\n raw: *mut raw::git_commit,\n\n marker1: marker::ContravariantLifetime<'a>,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n}\n\n\n\n/// An iterator over the parent commits of a commit.\n\npub struct Parents<'a, 'b:'a> {\n\n cur: uint,\n\n max: uint,\n", "file_path": "src/commit.rs", "rank": 34, "score": 24.886169977098355 }, { "content": "use std::kinds::marker;\n\nuse std::str;\n\nuse std::io;\n\nuse libc;\n\n\n\nuse {raw, Oid, Repository, Error, Object, ObjectType};\n\n\n\n/// A structure to represent a git [tree][1]\n\n///\n\n/// [1]: http://git-scm.com/book/en/Git-Internals-Git-Objects\n\npub struct Tree<'a> {\n\n raw: *mut raw::git_tree,\n\n marker1: marker::ContravariantLifetime<'a>,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n}\n\n\n\n/// A structure representing an entry inside of a tree. An entry is borrowed\n\n/// from a tree.\n\npub struct TreeEntry<'a> {\n", "file_path": "src/tree.rs", "rank": 35, "score": 24.453165924639617 }, { "content": " marker3: marker::NoSync,\n\n}\n\n\n\nimpl<'a> Note<'a> {\n\n /// Create a new note from its raw component.\n\n ///\n\n /// This method is unsafe as there is no guarantee that `raw` is a valid\n\n /// pointer.\n\n pub unsafe fn from_raw(_repo: &Repository,\n\n raw: *mut raw::git_note) -> Note {\n\n Note {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n }\n\n }\n\n\n\n /// Get the note author\n\n pub fn author(&self) -> Signature {\n", "file_path": "src/note.rs", "rank": 37, "score": 24.054460130130085 }, { "content": " unsafe {\n\n try_call!(raw::git_object_peel(&mut raw, &*self.raw(), kind));\n\n }\n\n Ok(Object {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n })\n\n }\n\n\n\n /// Get a short abbreviated OID string for the object\n\n ///\n\n /// This starts at the \"core.abbrev\" length (default 7 characters) and\n\n /// iteratively extends to a longer string if that length is ambiguous. The\n\n /// result will be unambiguous (at least until new objects are added to the\n\n /// repository).\n\n pub fn short_id(&self) -> Result<Buf, Error> {\n\n unsafe {\n\n let mut raw: raw::git_buf = mem::zeroed();\n", "file_path": "src/object.rs", "rank": 38, "score": 23.86639358920224 }, { "content": "}\n\n\n\n/// An iterator over the refspecs that a remote contains.\n\npub struct Refspecs<'a, 'b: 'a> {\n\n cur: uint,\n\n cnt: uint,\n\n remote: &'a Remote<'a, 'b>,\n\n}\n\n\n\nimpl<'a, 'b> Remote<'a, 'b> {\n\n /// Creates a new remote from its raw pointer.\n\n ///\n\n /// This method is unsafe as there is no guarantee that `raw` is valid or\n\n /// that no other remote is using it.\n\n pub unsafe fn from_raw(_repo: &Repository,\n\n raw: *mut raw::git_remote) -> Remote {\n\n Remote {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n", "file_path": "src/remote.rs", "rank": 39, "score": 22.84635152465472 }, { "content": " pub reference: String,\n\n /// If `None`, the reference was updated successfully, otherwise a message\n\n /// explaining why it could not be updated is provided.\n\n pub message: Option<String>,\n\n}\n\n\n\nimpl<'a> Push<'a> {\n\n /// Create a new push from its raw component.\n\n ///\n\n /// This method is unsafe as there is no guarantee that `raw` is a valid\n\n /// pointer.\n\n pub unsafe fn from_raw<'a>(_remote: &'a Remote,\n\n raw: *mut raw::git_push) -> Push<'a> {\n\n Push {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n }\n\n }\n", "file_path": "src/push.rs", "rank": 40, "score": 22.592225139190134 }, { "content": " -> TreeEntry<'a> {\n\n TreeEntry {\n\n raw: raw as *mut raw::git_tree_entry,\n\n owned: false,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n }\n\n }\n\n\n\n /// Create a new tree entry from the raw pointer provided.\n\n ///\n\n /// This will consume ownership of the pointer and free it when the entry is\n\n /// dropped.\n\n pub unsafe fn from_raw(raw: *mut raw::git_tree_entry) -> TreeEntry<'static> {\n\n TreeEntry {\n\n raw: raw,\n\n owned: true,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n", "file_path": "src/tree.rs", "rank": 41, "score": 22.480230534368186 }, { "content": "use std::kinds::marker;\n\nuse std::str;\n\nuse libc;\n\n\n\nuse {raw, Error, ConfigLevel, Buf};\n\n\n\n/// A structure representing a git configuration key/value store\n\npub struct Config {\n\n raw: *mut raw::git_config,\n\n marker: marker::NoSync,\n\n}\n\n\n\n/// A struct representing a certain entry owned by a `Config` instance.\n\n///\n\n/// An entry has a name, a value, and a level it applies to.\n\npub struct ConfigEntry<'a> {\n\n raw: *const raw::git_config_entry,\n\n marker1: marker::ContravariantLifetime<'a>,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n", "file_path": "src/config.rs", "rank": 42, "score": 22.14814792307915 }, { "content": " pub unsafe fn from_raw(_repo: &Repository,\n\n raw: *mut raw::git_submodule) -> Submodule {\n\n Submodule {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n }\n\n }\n\n\n\n /// Get the submodule's branch.\n\n ///\n\n /// Returns `None` if the branch is not valid utf-8 or if the branch is not\n\n /// yet available.\n\n pub fn branch(&self) -> Option<&str> {\n\n self.branch_bytes().and_then(str::from_utf8)\n\n }\n\n\n\n /// Get the branch for the submodule.\n\n ///\n", "file_path": "src/submodule.rs", "rank": 43, "score": 21.931130159878073 }, { "content": "use std::kinds::marker;\n\nuse std::c_str::CString;\n\nuse libc;\n\n\n\nuse {raw, Remote, Error, Signature};\n\n\n\n/// A structure to represent a pending push operation to a remote.\n\n///\n\n/// Remotes can create a `Push` which is then used to push data to the upstream\n\n/// repository.\n\npub struct Push<'a> {\n\n raw: *mut raw::git_push,\n\n marker1: marker::ContravariantLifetime<'a>,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n}\n\n\n\n/// A status representing the result of updating a remote reference.\n\npub struct PushStatus {\n\n /// The reference that was updated as part of a push.\n", "file_path": "src/push.rs", "rank": 44, "score": 21.894458761306367 }, { "content": " Reference {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n }\n\n }\n\n\n\n /// Ensure the reference name is well-formed.\n\n pub fn is_valid_name(refname: &str) -> bool {\n\n ::init();\n\n let refname = refname.to_c_str();\n\n unsafe { raw::git_reference_is_valid_name(refname.as_ptr()) == 1 }\n\n }\n\n\n\n /// Get access to the underlying raw pointer.\n\n pub fn raw(&self) -> *mut raw::git_reference { self.raw }\n\n\n\n /// Delete an existing reference.\n\n ///\n", "file_path": "src/reference.rs", "rank": 45, "score": 21.50083405023378 }, { "content": " fn next(&mut self) -> Option<Commit<'a>> {\n\n if self.cur == self.max { return None }\n\n self.cur += 1;\n\n let mut raw = 0 as *mut raw::git_commit;\n\n assert_eq!(unsafe {\n\n raw::git_commit_parent(&mut raw, &*self.commit.raw,\n\n (self.cur - 1) as libc::c_uint)\n\n }, 0);\n\n Some(Commit {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n })\n\n }\n\n}\n\n\n\n#[unsafe_destructor]\n\nimpl<'a> Drop for Commit<'a> {\n\n fn drop(&mut self) {\n", "file_path": "src/commit.rs", "rank": 46, "score": 21.28956647058599 }, { "content": "use std::c_str::CString;\n\nuse std::kinds::marker;\n\nuse std::str;\n\nuse libc;\n\n\n\nuse {raw, Repository, Direction, Error, Refspec};\n\nuse {Signature, Push, RemoteCallbacks};\n\n\n\n/// A structure representing a [remote][1] of a git repository.\n\n///\n\n/// [1]: http://git-scm.com/book/en/Git-Basics-Working-with-Remotes\n\n///\n\n/// The lifetime is the lifetime of the repository that it is attached to. The\n\n/// remote is used to manage fetches and pushes as well as refspecs.\n\npub struct Remote<'a, 'b> {\n\n raw: *mut raw::git_remote,\n\n marker1: marker::ContravariantLifetime<'a>,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n callbacks: Option<&'b mut RemoteCallbacks<'b>>,\n", "file_path": "src/remote.rs", "rank": 47, "score": 21.013890004475147 }, { "content": " Ok(Reference {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n })\n\n }\n\n\n\n /// Rename an existing reference.\n\n ///\n\n /// This method works for both direct and symbolic references.\n\n ///\n\n /// If the force flag is not enabled, and there's already a reference with\n\n /// the given name, the renaming will fail.\n\n pub fn rename(&mut self, new_name: &str, force: bool,\n\n sig: Option<&Signature>,\n\n msg: &str) -> Result<Reference<'a>, Error> {\n\n let mut raw = 0 as *mut raw::git_reference;\n\n unsafe {\n\n try_call!(raw::git_reference_rename(&mut raw, self.raw,\n", "file_path": "src/reference.rs", "rank": 49, "score": 20.896362603812772 }, { "content": " marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n callbacks: None,\n\n }\n\n }\n\n}\n\n\n\n#[unsafe_destructor]\n\nimpl<'a, 'b> Drop for Remote<'a, 'b> {\n\n fn drop(&mut self) {\n\n unsafe { raw::git_remote_free(self.raw) }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::io::TempDir;\n\n use std::cell::Cell;\n\n use url::Url;\n\n use {Repository, Remote, RemoteCallbacks, Direction};\n", "file_path": "src/remote.rs", "rank": 50, "score": 20.80049650917345 }, { "content": " try_call!(raw::git_object_short_id(&mut raw, &*self.raw()));\n\n Ok(Buf::from_raw(raw))\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Clone for Object<'a> {\n\n fn clone(&self) -> Object<'a> {\n\n let mut raw = 0 as *mut raw::git_object;\n\n let rc = unsafe { raw::git_object_dup(&mut raw, self.raw) };\n\n assert_eq!(rc, 0);\n\n Object {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n }\n\n }\n\n}\n\n\n\n#[unsafe_destructor]\n\nimpl<'a> Drop for Object<'a> {\n\n fn drop(&mut self) {\n\n unsafe { raw::git_object_free(self.raw) }\n\n }\n\n}\n", "file_path": "src/object.rs", "rank": 51, "score": 20.79857207915386 }, { "content": " /// file lock.\n\n pub fn write(&mut self) -> Result<(), Error> {\n\n unsafe { try_call!(raw::git_index_write(self.raw)); }\n\n Ok(())\n\n }\n\n\n\n /// Write the index as a tree.\n\n ///\n\n /// This method will scan the index and write a representation of its\n\n /// current state back to disk; it recursively creates tree objects for each\n\n /// of the subtrees stored in the index, but only returns the OID of the\n\n /// root tree. This is the OID that can be used e.g. to create a commit.\n\n ///\n\n /// The index instance cannot be bare, and needs to be associated to an\n\n /// existing repository.\n\n ///\n\n /// The index must not contain any file in conflict.\n\n pub fn write_tree(&mut self) -> Result<Oid, Error> {\n\n let mut raw = raw::git_oid { id: [0, ..raw::GIT_OID_RAWSZ] };\n\n unsafe {\n", "file_path": "src/index.rs", "rank": 52, "score": 20.710919264864124 }, { "content": " ///\n\n /// This is unsafe as the `raw` pointer is not guaranteed to be valid.\n\n pub unsafe fn from_raw<'a>(_repo: &'a Remote,\n\n raw: *const raw::git_refspec) -> Refspec<'a> {\n\n Refspec {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n marker4: marker::NoCopy,\n\n }\n\n }\n\n\n\n /// Get the refspec's direction.\n\n pub fn direction(&self) -> Direction {\n\n match unsafe { raw::git_refspec_direction(self.raw) } {\n\n raw::GIT_DIRECTION_FETCH => Direction::Fetch,\n\n raw::GIT_DIRECTION_PUSH => Direction::Push,\n\n }\n\n }\n", "file_path": "src/refspec.rs", "rank": 53, "score": 20.13274439380168 }, { "content": "\n\nimpl<'a> StatusEntry<'a> {\n\n /// Create a new status entry from its raw component.\n\n ///\n\n /// This method is unsafe as there is no guarantee that `raw` is a valid\n\n /// pointer.\n\n pub unsafe fn from_raw(_statuses: &'a Statuses,\n\n raw: *const raw::git_status_entry) -> StatusEntry<'a> {\n\n StatusEntry {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n }\n\n }\n\n\n\n /// Access the bytes for this entry's corresponding pathname\n\n pub fn path_bytes(&self) -> &[u8] {\n\n unsafe {\n\n if (*self.raw).head_to_index.is_null() {\n", "file_path": "src/status.rs", "rank": 54, "score": 20.114510107026124 }, { "content": " pub dev: uint,\n\n pub ino: uint,\n\n pub mode: uint,\n\n pub uid: uint,\n\n pub gid: uint,\n\n pub file_size: u64,\n\n pub id: Oid,\n\n pub flags: u16,\n\n pub flags_extended: u16,\n\n pub path: CString,\n\n}\n\n\n\nimpl Index {\n\n /// Creates a new in-memory index.\n\n ///\n\n /// This index object cannot be read/written to the filesystem, but may be\n\n /// used to perform in-memory index operations.\n\n pub fn new() -> Result<Index, Error> {\n\n ::init();\n\n let mut raw = 0 as *mut raw::git_index;\n", "file_path": "src/index.rs", "rank": 55, "score": 19.844936031699973 }, { "content": " ///\n\n /// The id specified is the Oid of the git object to read the note from.\n\n pub fn find_note(&self, notes_ref: Option<&str>, id: Oid)\n\n -> Result<Note, Error> {\n\n let mut ret = 0 as *mut raw::git_note;\n\n unsafe {\n\n try_call!(raw::git_note_read(&mut ret, self.raw,\n\n notes_ref.map(|s| s.to_c_str()),\n\n &*id.raw()));\n\n Ok(Note::from_raw(self, ret))\n\n }\n\n }\n\n\n\n /// Remove the note for an object.\n\n ///\n\n /// The `notes_ref` argument is the canonical name of the reference to use,\n\n /// defaulting to \"refs/notes/commits\".\n\n ///\n\n /// The id specified is the Oid of the git object to remove the note from.\n\n pub fn note_delete(&self,\n", "file_path": "src/repo.rs", "rank": 56, "score": 19.752102689783246 }, { "content": " marker3: marker::NoSync,\n\n }\n\n }\n\n\n\n /// Gain access to the underlying raw pointer for this tree entry.\n\n pub fn raw(&self) -> *mut raw::git_tree_entry { self.raw }\n\n\n\n /// Get the id of the object pointed by the entry\n\n pub fn id(&self) -> Oid {\n\n unsafe { Oid::from_raw(raw::git_tree_entry_id(&*self.raw)) }\n\n }\n\n\n\n /// Get the filename of a tree entry\n\n ///\n\n /// Returns `None` if the name is not valid utf-8\n\n pub fn name(&self) -> Option<&str> {\n\n str::from_utf8(self.name_bytes())\n\n }\n\n\n\n /// Get the filename of a tree entry\n", "file_path": "src/tree.rs", "rank": 57, "score": 19.533106093317247 }, { "content": " }\n\n Ok(())\n\n }\n\n\n\n /// Add a note for an object\n\n ///\n\n /// The `notes_ref` argument is the canonical name of the reference to use,\n\n /// defaulting to \"refs/notes/commits\". If `force` is specified then\n\n /// previous notes are overwritten.\n\n pub fn note(&self,\n\n author: &Signature,\n\n committer: &Signature,\n\n notes_ref: Option<&str>,\n\n oid: Oid,\n\n note: &str,\n\n force: bool) -> Result<Oid, Error> {\n\n let mut ret = raw::git_oid { id: [0, ..raw::GIT_OID_RAWSZ] };\n\n unsafe {\n\n try_call!(raw::git_note_create(&mut ret,\n\n self.raw,\n", "file_path": "src/repo.rs", "rank": 58, "score": 19.529312049849434 }, { "content": " }\n\n }\n\n }\n\n\n\n /// Get the tagged object of a tag\n\n ///\n\n /// This method performs a repository lookup for the given object and\n\n /// returns it\n\n pub fn target(&self) -> Result<Object<'a>, Error> {\n\n let mut ret = 0 as *mut raw::git_object;\n\n unsafe {\n\n try_call!(raw::git_tag_target(&mut ret, &*self.raw));\n\n Ok(Object::from_raw_ptr(ret))\n\n }\n\n }\n\n\n\n /// Get the OID of the tagged object of a tag\n\n pub fn target_id(&self) -> Oid {\n\n unsafe { Oid::from_raw(raw::git_tag_target_id(&*self.raw)) }\n\n }\n", "file_path": "src/tag.rs", "rank": 59, "score": 19.345433471959094 }, { "content": "\n\n /// Read a file from the filesystem and write its content to the Object\n\n /// Database as a loose blob\n\n ///\n\n /// The Oid returned can in turn be passed to `find_blob` to get a handle to\n\n /// the blob.\n\n pub fn blob_path(&self, path: &Path) -> Result<Oid, Error> {\n\n let mut raw = raw::git_oid { id: [0, ..raw::GIT_OID_RAWSZ] };\n\n unsafe {\n\n try_call!(raw::git_blob_create_fromdisk(&mut raw, self.raw(),\n\n path.to_c_str()));\n\n Ok(Oid::from_raw(&raw))\n\n }\n\n }\n\n\n\n /// Lookup a reference to one of the objects in a repository.\n\n pub fn find_blob(&self, oid: Oid) -> Result<Blob, Error> {\n\n let mut raw = 0 as *mut raw::git_blob;\n\n unsafe {\n\n try_call!(raw::git_blob_lookup(&mut raw, self.raw(), oid.raw()));\n", "file_path": "src/repo.rs", "rank": 60, "score": 19.30564708883813 }, { "content": "use std::kinds::marker;\n\nuse std::str;\n\n\n\nuse {raw, Remote, Direction} ;\n\n\n\n/// A structure to represent a git [refspec][1].\n\n///\n\n/// Refspecs are currently mainly accessed/created through a `Remote`.\n\n///\n\n/// [1]: http://git-scm.com/book/en/Git-Internals-The-Refspec\n\npub struct Refspec<'a> {\n\n raw: *const raw::git_refspec,\n\n marker1: marker::ContravariantLifetime<'a>,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n marker4: marker::NoCopy,\n\n}\n\n\n\nimpl<'a> Refspec<'a> {\n\n /// Creates a new refspec from the raw components.\n", "file_path": "src/refspec.rs", "rank": 61, "score": 19.022486726500198 }, { "content": " ///\n\n /// Given either the submodule name or path (they are usually the same),\n\n /// this returns a structure describing the submodule.\n\n pub fn find_submodule(&self, name: &str) -> Result<Submodule, Error> {\n\n let mut raw = 0 as *mut raw::git_submodule;\n\n unsafe {\n\n try_call!(raw::git_submodule_lookup(&mut raw, self.raw(),\n\n name.to_c_str()));\n\n Ok(Submodule::from_raw(self, raw))\n\n }\n\n }\n\n\n\n /// Lookup a reference to one of the objects in a repository.\n\n pub fn find_tree(&self, oid: Oid) -> Result<Tree, Error> {\n\n let mut raw = 0 as *mut raw::git_tree;\n\n unsafe {\n\n try_call!(raw::git_tree_lookup(&mut raw, self.raw(), oid.raw()));\n\n Ok(Tree::from_raw(self, raw))\n\n }\n\n }\n", "file_path": "src/repo.rs", "rank": 62, "score": 19.01354782163619 }, { "content": " Statuses {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n }\n\n }\n\n\n\n /// Gets a status entry from this list at the specified index.\n\n ///\n\n /// Returns `None` if the index is out of bounds.\n\n pub fn get(&self, index: uint) -> Option<StatusEntry> {\n\n unsafe {\n\n let p = raw::git_status_byindex(self.raw, index as size_t);\n\n if p.is_null() {\n\n None\n\n } else {\n\n Some(StatusEntry::from_raw(self, p))\n\n }\n\n }\n", "file_path": "src/status.rs", "rank": 63, "score": 18.95202227887291 }, { "content": " ///\n\n /// The `notes_ref` argument is the canonical name of the reference to use,\n\n /// defaulting to \"refs/notes/commits\".\n\n ///\n\n /// The iterator returned yields pairs of (Oid, Oid) where the first element\n\n /// is the id of the note and the second id is the id the note is\n\n /// annotating.\n\n pub fn notes(&self, notes_ref: Option<&str>) -> Result<Notes, Error> {\n\n let mut ret = 0 as *mut raw::git_note_iterator;\n\n unsafe {\n\n try_call!(raw::git_note_iterator_new(&mut ret, self.raw,\n\n notes_ref.map(|s| s.to_c_str())));\n\n Ok(Notes::from_raw(self, ret))\n\n }\n\n }\n\n\n\n /// Read the note for an object.\n\n ///\n\n /// The `notes_ref` argument is the canonical name of the reference to use,\n\n /// defaulting to \"refs/notes/commits\".\n", "file_path": "src/repo.rs", "rank": 64, "score": 18.538580482260915 }, { "content": " -> ConfigEntry {\n\n ConfigEntry {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n }\n\n }\n\n\n\n /// Gets the name of this entry.\n\n ///\n\n /// May return `None` if the name is not valid utf-8\n\n pub fn name(&self) -> Option<&str> { str::from_utf8(self.name_bytes()) }\n\n\n\n /// Gets the name of this entry as a byte slice.\n\n pub fn name_bytes(&self) -> &[u8] {\n\n unsafe { ::opt_bytes(self, (*self.raw).name).unwrap() }\n\n }\n\n\n\n /// Gets the value of this entry.\n", "file_path": "src/config.rs", "rank": 65, "score": 18.500962117309374 }, { "content": " }\n\n\n\n /// Get the id (SHA1) of a repository object\n\n pub fn id(&self) -> Oid {\n\n unsafe { Oid::from_raw(raw::git_tree_id(&*self.raw)) }\n\n }\n\n\n\n /// Get access to the underlying raw pointer.\n\n pub fn raw(&self) -> *mut raw::git_tree { self.raw }\n\n\n\n /// Get the number of entries listed in this tree.\n\n pub fn len(&self) -> uint {\n\n unsafe { raw::git_tree_entrycount(&*self.raw) as uint }\n\n }\n\n\n\n /// Lookup a tree entry by SHA value.\n\n pub fn get_id(&self, id: Oid) -> Option<TreeEntry> {\n\n unsafe {\n\n let ptr = raw::git_tree_entry_byid(&*self.raw(), &*id.raw());\n\n if ptr.is_null() {\n", "file_path": "src/tree.rs", "rank": 66, "score": 18.40256364813664 }, { "content": " marker1: marker::ContravariantLifetime<'a>,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n}\n\n\n\n/// An iterator over the statuses in a `Statuses` instance.\n\npub struct StatusIter<'a> {\n\n statuses: &'a Statuses<'a>,\n\n range: Range<uint>,\n\n}\n\n\n\n/// A structure representing an entry in the `Statuses` structure.\n\n///\n\n/// Instances are created through the `.iter()` method or the `.get()` method.\n\npub struct StatusEntry<'a> {\n\n raw: *const raw::git_status_entry,\n\n marker1: marker::ContravariantLifetime<'a>,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n}\n", "file_path": "src/status.rs", "rank": 67, "score": 18.349035804792642 }, { "content": " -> ConfigEntries {\n\n ConfigEntries { raw: raw, _config: config, marker: marker::NoSync }\n\n }\n\n}\n\n\n\n// entries are only valid until the iterator is freed, so this impl is for\n\n// `&'b T` instead of `T` to have a lifetime to tie them to.\n\n//\n\n// It's also not implemented for `&'b mut T` so we can have multiple entries\n\n// (ok).\n\nimpl<'a, 'b> Iterator<ConfigEntry<'b>> for &'b ConfigEntries<'a> {\n\n fn next(&mut self) -> Option<ConfigEntry<'b>> {\n\n let mut raw = 0 as *mut raw::git_config_entry;\n\n unsafe {\n\n if raw::git_config_next(&mut raw, self.raw) == 0 {\n\n Some(ConfigEntry {\n\n raw: raw as *const raw::git_config_entry,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n", "file_path": "src/config.rs", "rank": 68, "score": 18.33149525130811 }, { "content": " unsafe {\n\n try_call!(raw::git_reference_lookup(&mut raw, self.raw(),\n\n name.to_c_str()));\n\n Ok(Reference::from_raw(self, raw))\n\n }\n\n }\n\n\n\n /// Lookup a reference by name and resolve immediately to OID.\n\n ///\n\n /// This function provides a quick way to resolve a reference name straight\n\n /// through to the object id that it refers to. This avoids having to\n\n /// allocate or free any `Reference` objects for simple situations.\n\n pub fn refname_to_id(&self, name: &str) -> Result<Oid, Error> {\n\n let mut ret: raw::git_oid = unsafe { mem::zeroed() };\n\n unsafe {\n\n try_call!(raw::git_reference_name_to_id(&mut ret, self.raw(),\n\n name.to_c_str()));\n\n Ok(Oid::from_raw(&ret))\n\n }\n\n }\n", "file_path": "src/repo.rs", "rank": 69, "score": 18.266155615687452 }, { "content": " }\n\n\n\n /// Get the note object's id\n\n pub fn id(&self) -> Oid {\n\n unsafe { Oid::from_raw(raw::git_note_id(&*self.raw)) }\n\n }\n\n}\n\n\n\n#[unsafe_destructor]\n\nimpl<'a> Drop for Note<'a> {\n\n fn drop(&mut self) {\n\n unsafe { raw::git_note_free(self.raw); }\n\n }\n\n}\n\n\n\nimpl<'a> Notes<'a> {\n\n /// Create a new note iterator from its raw component.\n\n ///\n\n /// This method is unsafe as there is no guarantee that `raw` is a valid\n\n /// pointer.\n", "file_path": "src/note.rs", "rank": 70, "score": 18.21460555322833 }, { "content": " unsafe {\n\n try_call!(raw::git_repository_config(&mut raw, self.raw()));\n\n Ok(Config::from_raw(raw))\n\n }\n\n }\n\n\n\n /// Write an in-memory buffer to the ODB as a blob.\n\n ///\n\n /// The Oid returned can in turn be passed to `find_blob` to get a handle to\n\n /// the blob.\n\n pub fn blob(&self, data: &[u8]) -> Result<Oid, Error> {\n\n let mut raw = raw::git_oid { id: [0, ..raw::GIT_OID_RAWSZ] };\n\n unsafe {\n\n let ptr = data.as_ptr() as *const c_void;\n\n let len = data.len() as size_t;\n\n try_call!(raw::git_blob_create_frombuffer(&mut raw, self.raw(),\n\n ptr, len));\n\n Ok(Oid::from_raw(&raw))\n\n }\n\n }\n", "file_path": "src/repo.rs", "rank": 71, "score": 18.180228985489673 }, { "content": "\n\n /// Get the OID of the tagged object of a tag\n\n pub fn target_type(&self) -> Option<ObjectType> {\n\n unsafe { ObjectType::from_raw(raw::git_tag_target_type(&*self.raw)) }\n\n }\n\n\n\n /// Get access to the underlying raw pointer.\n\n pub fn raw(&self) -> *mut raw::git_tag { self.raw }\n\n}\n\n\n\n#[unsafe_destructor]\n\nimpl<'a> Drop for Tag<'a> {\n\n fn drop(&mut self) {\n\n unsafe { raw::git_tag_free(self.raw) }\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/tag.rs", "rank": 72, "score": 18.090072601993363 }, { "content": " Ok(Oid::from_raw(&raw))\n\n }\n\n }\n\n\n\n /// Lookup a tag object from the repository.\n\n pub fn find_tag(&self, id: Oid) -> Result<Tag, Error> {\n\n let mut raw = 0 as *mut raw::git_tag;\n\n unsafe {\n\n try_call!(raw::git_tag_lookup(&mut raw, self.raw, id.raw()));\n\n Ok(Tag::from_raw(self, raw))\n\n }\n\n }\n\n\n\n /// Delete an existing tag reference.\n\n ///\n\n /// The tag name will be checked for validity, see `tag` for some rules\n\n /// about valid names.\n\n pub fn tag_delete(&self, name: &str) -> Result<(), Error> {\n\n unsafe {\n\n try_call!(raw::git_tag_delete(self.raw, name.to_c_str()));\n", "file_path": "src/repo.rs", "rank": 73, "score": 18.03237307239801 }, { "content": " unsafe { (*self.raw).credtype }\n\n }\n\n\n\n /// Unwrap access to the underlying raw pointer, canceling the destructor\n\n pub unsafe fn unwrap(mut self) -> *mut raw::git_cred {\n\n mem::replace(&mut self.raw, 0 as *mut raw::git_cred)\n\n }\n\n}\n\n\n\nimpl Drop for Cred {\n\n fn drop(&mut self) {\n\n if !self.raw.is_null() {\n\n unsafe { ((*self.raw).free)(self.raw) }\n\n }\n\n }\n\n}\n\n\n\nimpl CredentialHelper {\n\n /// Create a new credential helper object which will be used to probe git's\n\n /// local credential configuration.\n", "file_path": "src/cred.rs", "rank": 74, "score": 17.919925176708933 }, { "content": " try_call!(raw::git_commit_create(&mut raw,\n\n self.raw(),\n\n update_ref.map(|s| s.to_c_str()),\n\n &*author.raw(),\n\n &*committer.raw(),\n\n 0 as *const c_char,\n\n message.to_c_str(),\n\n &*tree.raw(),\n\n parents.len() as size_t,\n\n parent_ptrs.as_ptr()));\n\n Ok(Oid::from_raw(&raw))\n\n }\n\n }\n\n\n\n\n\n /// Lookup a reference to one of the commits in a repository.\n\n pub fn find_commit(&self, oid: Oid) -> Result<Commit, Error> {\n\n let mut raw = 0 as *mut raw::git_commit;\n\n unsafe {\n\n try_call!(raw::git_commit_lookup(&mut raw, self.raw(), oid.raw()));\n", "file_path": "src/repo.rs", "rank": 75, "score": 17.7954610310218 }, { "content": " unsafe { Oid::from_raw(raw::git_commit_id(&*self.raw)) }\n\n }\n\n\n\n /// Get the id of the tree pointed to by this commit.\n\n ///\n\n /// No attempts are made to fetch an object from the\n\n pub fn tree_id(&self) -> Oid {\n\n unsafe { Oid::from_raw(raw::git_commit_tree_id(&*self.raw)) }\n\n }\n\n\n\n /// Get access to the underlying raw pointer.\n\n pub fn raw(&self) -> *mut raw::git_commit { self.raw }\n\n\n\n /// Get the full message of a commit.\n\n ///\n\n /// The returned message will be slightly prettified by removing any\n\n /// potential leading newlines.\n\n ///\n\n /// `None` will be returned if the message is not valid utf-8\n\n pub fn message(&self) -> Option<&str> {\n", "file_path": "src/commit.rs", "rank": 77, "score": 17.72561804185927 }, { "content": " pub total_deltas: c_uint,\n\n pub indexed_deltas: c_uint,\n\n pub received_bytes: size_t,\n\n}\n\n\n\n#[repr(C)]\n\npub struct git_diff_file {\n\n pub id: git_oid,\n\n pub path: *const c_char,\n\n pub size: git_off_t,\n\n pub flags: u32,\n\n pub mode: u16,\n\n}\n\n\n\npub type git_repository_create_cb = extern fn(*mut *mut git_repository,\n\n *const c_char,\n\n c_int, *mut c_void) -> c_int;\n\npub type git_remote_create_cb = extern fn(*mut *mut git_remote,\n\n *mut git_repository,\n\n *const c_char,\n", "file_path": "libgit2-sys/lib.rs", "rank": 78, "score": 17.651911568631146 }, { "content": " try_call!(raw::git_index_write_tree(&mut raw, self.raw));\n\n Ok(Oid::from_raw(&raw))\n\n }\n\n }\n\n\n\n /// Write the index as a tree to the given repository\n\n ///\n\n /// This is the same as `write_tree` except that the destination repository\n\n /// can be chosen.\n\n pub fn write_tree_to(&mut self, repo: &Repository) -> Result<Oid, Error> {\n\n let mut raw = raw::git_oid { id: [0, ..raw::GIT_OID_RAWSZ] };\n\n unsafe {\n\n try_call!(raw::git_index_write_tree_to(&mut raw, self.raw,\n\n repo.raw()));\n\n Ok(Oid::from_raw(&raw))\n\n }\n\n }\n\n}\n\n\n\nextern fn index_matched_path_cb(path: *const libc::c_char,\n", "file_path": "src/index.rs", "rank": 79, "score": 17.620430888693832 }, { "content": " self\n\n }\n\n\n\n /// Set the callbacks which will be used to monitor the download progress.\n\n pub fn remote_callbacks(&mut self, callbacks: RemoteCallbacks<'a>)\n\n -> &mut RepoBuilder<'a> {\n\n self.callbacks = Some(callbacks);\n\n self\n\n }\n\n\n\n /// Clone a remote repository.\n\n ///\n\n /// This will use the options configured so far to clone the specified url\n\n /// into the specified local path.\n\n pub fn clone(&mut self, url: &str, into: &Path) -> Result<Repository, Error> {\n\n let mut opts: raw::git_clone_options = unsafe { mem::zeroed() };\n\n unsafe {\n\n try_call!(raw::git_clone_init_options(&mut opts,\n\n raw::GIT_CLONE_OPTIONS_VERSION));\n\n }\n", "file_path": "src/build.rs", "rank": 80, "score": 17.518850007681735 }, { "content": "\n\n /// Recursively peel a tag until a non tag git_object is found\n\n pub fn peel(&self) -> Result<Object<'a>, Error> {\n\n let mut ret = 0 as *mut raw::git_object;\n\n unsafe {\n\n try_call!(raw::git_tag_peel(&mut ret, &*self.raw));\n\n Ok(Object::from_raw_ptr(ret))\n\n }\n\n }\n\n\n\n /// Get the tagger (author) of a tag\n\n ///\n\n /// If the author is unspecified, then `None` is returned.\n\n pub fn tagger(&self) -> Option<Signature> {\n\n unsafe {\n\n let ptr = raw::git_tag_tagger(&*self.raw);\n\n if ptr.is_null() {\n\n None\n\n } else {\n\n Some(Signature::from_raw_const(self, ptr))\n", "file_path": "src/tag.rs", "rank": 82, "score": 17.277024183668917 }, { "content": " Ok(Commit::from_raw(self, raw))\n\n }\n\n }\n\n\n\n /// Lookup a reference to one of the objects in a repository.\n\n pub fn find_object(&self, oid: Oid,\n\n kind: Option<ObjectType>) -> Result<Object, Error> {\n\n let mut raw = 0 as *mut raw::git_object;\n\n unsafe {\n\n try_call!(raw::git_object_lookup(&mut raw, self.raw(), oid.raw(),\n\n kind));\n\n Ok(Object::from_raw(self, raw))\n\n }\n\n }\n\n\n\n /// Create a new direct reference.\n\n ///\n\n /// This function will return an error if a reference already exists with\n\n /// the given name unless force is true, in which case it will be\n\n /// overwritten.\n", "file_path": "src/repo.rs", "rank": 83, "score": 17.11067269548882 }, { "content": " new_name.to_c_str(),\n\n force,\n\n &*sig.map(|s| s.raw())\n\n .unwrap_or(0 as *mut _),\n\n msg.to_c_str()));\n\n }\n\n Ok(Reference {\n\n raw: raw,\n\n marker1: marker::ContravariantLifetime,\n\n marker2: marker::NoSend,\n\n marker3: marker::NoSync,\n\n })\n\n }\n\n\n\n}\n\n\n\nimpl<'a> PartialOrd for Reference<'a> {\n\n fn partial_cmp(&self, other: &Reference<'a>) -> Option<Ordering> {\n\n Some(self.cmp(other))\n\n }\n", "file_path": "src/reference.rs", "rank": 84, "score": 16.74868396826739 }, { "content": " /// Lookup a tree entry by its filename\n\n pub fn get_name(&self, filename: &str) -> Option<TreeEntry> {\n\n unsafe {\n\n let ptr = call!(raw::git_tree_entry_byname(&*self.raw(),\n\n filename.to_c_str()));\n\n if ptr.is_null() {\n\n None\n\n } else {\n\n Some(TreeEntry::from_raw_const(self, ptr))\n\n }\n\n }\n\n }\n\n\n\n /// Retrieve a tree entry contained in a tree or in any of its subtrees,\n\n /// given its relative path.\n\n pub fn get_path(&self, path: &Path) -> Result<TreeEntry<'static>, Error> {\n\n let mut ret = 0 as *mut raw::git_tree_entry;\n\n unsafe {\n\n try_call!(raw::git_tree_entry_bypath(&mut ret,\n\n &*self.raw(),\n", "file_path": "src/tree.rs", "rank": 85, "score": 16.62485260029208 }, { "content": " path.to_c_str()));\n\n Ok(TreeEntry::from_raw(ret))\n\n }\n\n }\n\n}\n\n\n\n#[unsafe_destructor]\n\nimpl<'a> Drop for Tree<'a> {\n\n fn drop(&mut self) {\n\n unsafe { raw::git_tree_free(self.raw) }\n\n }\n\n}\n\n\n\nimpl<'a> TreeEntry<'a> {\n\n /// Create a new tree entry from the raw pointer provided.\n\n ///\n\n /// The lifetime of the entry is tied to the tree provided and the function\n\n /// is unsafe because the validity of the pointer cannot be guaranteed.\n\n pub unsafe fn from_raw_const<'a>(_tree: &'a Tree,\n\n raw: *const raw::git_tree_entry)\n", "file_path": "src/tree.rs", "rank": 86, "score": 16.522103001942458 }, { "content": " pub fn head_id(&self) -> Option<Oid> {\n\n unsafe {\n\n let ptr = raw::git_submodule_head_id(self.raw);\n\n if ptr.is_null() {\n\n None\n\n } else {\n\n Some(Oid::from_raw(ptr))\n\n }\n\n }\n\n }\n\n\n\n /// Get the OID for the submodule in the index.\n\n pub fn index_id(&self) -> Option<Oid> {\n\n unsafe {\n\n let ptr = raw::git_submodule_index_id(self.raw);\n\n if ptr.is_null() {\n\n None\n\n } else {\n\n Some(Oid::from_raw(ptr))\n\n }\n", "file_path": "src/submodule.rs", "rank": 87, "score": 16.465234709187804 }, { "content": " self.symbolic_target_bytes().and_then(str::from_utf8)\n\n }\n\n\n\n /// Get full name to the reference pointed to by a symbolic reference.\n\n ///\n\n /// Only available if the reference is symbolic.\n\n pub fn symbolic_target_bytes(&self) -> Option<&[u8]> {\n\n unsafe { ::opt_bytes(self, raw::git_reference_symbolic_target(&*self.raw)) }\n\n }\n\n\n\n /// Resolve a symbolic reference to a direct reference.\n\n ///\n\n /// This method iteratively peels a symbolic reference until it resolves to\n\n /// a direct reference to an OID.\n\n ///\n\n /// If a direct reference is passed as an argument, a copy of that\n\n /// reference is returned.\n\n pub fn resolve(&self) -> Result<Reference<'a>, Error> {\n\n let mut raw = 0 as *mut raw::git_reference;\n\n unsafe { try_call!(raw::git_reference_resolve(&mut raw, &*self.raw)); }\n", "file_path": "src/reference.rs", "rank": 88, "score": 16.398859370455792 }, { "content": " /// Create new commit in the repository\n\n ///\n\n /// If the `update_ref` is not `None`, name of the reference that will be\n\n /// updated to point to this commit. If the reference is not direct, it will\n\n /// be resolved to a direct reference. Use \"HEAD\" to update the HEAD of the\n\n /// current branch and make it point to this commit. If the reference\n\n /// doesn't exist yet, it will be created. If it does exist, the first\n\n /// parent must be the tip of this branch.\n\n pub fn commit<'a>(&'a self,\n\n update_ref: Option<&str>,\n\n author: &Signature,\n\n committer: &Signature,\n\n message: &str,\n\n tree: &Tree<'a>,\n\n parents: &[&Commit<'a>]) -> Result<Oid, Error> {\n\n let mut raw = raw::git_oid { id: [0, ..raw::GIT_OID_RAWSZ] };\n\n let parent_ptrs: Vec<*const raw::git_commit> = parents.iter().map(|p| {\n\n p.raw() as *const raw::git_commit\n\n }).collect();\n\n unsafe {\n", "file_path": "src/repo.rs", "rank": 89, "score": 16.295231091320428 }, { "content": " }\n\n }\n\n\n\n /// Build a single-level focused config object from a multi-level one.\n\n ///\n\n /// The returned config object can be used to perform get/set/delete\n\n /// operations on a single specific level.\n\n pub fn open_level(&self, level: ConfigLevel) -> Result<Config, Error> {\n\n let mut raw = 0 as *mut raw::git_config;\n\n unsafe {\n\n try_call!(raw::git_config_open_level(&mut raw, &*self.raw, level));\n\n Ok(Config::from_raw(raw))\n\n }\n\n }\n\n\n\n /// Set the value of a boolean config variable in the config file with the\n\n /// highest level (usually the local one).\n\n pub fn set_bool(&mut self, name: &str, value: bool) -> Result<(), Error> {\n\n unsafe {\n\n try_call!(raw::git_config_set_bool(self.raw, name.to_c_str(),\n", "file_path": "src/config.rs", "rank": 90, "score": 16.209695280550957 }, { "content": " notes_ref.map(|s| s.to_c_str()),\n\n &*author.raw(),\n\n &*committer.raw(),\n\n &*oid.raw(),\n\n note.to_c_str(),\n\n force));\n\n Ok(Oid::from_raw(&ret))\n\n }\n\n }\n\n\n\n /// Get the default notes reference for this repository\n\n pub fn note_default_ref(&self) -> Result<&str, Error> {\n\n let mut ret = 0 as *const c_char;\n\n unsafe {\n\n try_call!(raw::git_note_default_ref(&mut ret, self.raw));\n\n Ok(str::from_utf8(::opt_bytes(self, ret).unwrap()).unwrap())\n\n }\n\n }\n\n\n\n /// Creates a new iterator for notes in this repository.\n", "file_path": "src/repo.rs", "rank": 91, "score": 16.20225878076426 }, { "content": " /// not a symbolic one).\n\n pub fn target(&self) -> Option<Oid> {\n\n let ptr = unsafe { raw::git_reference_target(&*self.raw) };\n\n if ptr.is_null() {None} else {Some(unsafe { Oid::from_raw(ptr) })}\n\n }\n\n\n\n /// Return the peeled OID target of this reference.\n\n ///\n\n /// This peeled OID only applies to direct references that point to a hard\n\n /// Tag object: it is the result of peeling such Tag.\n\n pub fn target_peel(&self) -> Option<Oid> {\n\n let ptr = unsafe { raw::git_reference_target_peel(&*self.raw) };\n\n if ptr.is_null() {None} else {Some(unsafe { Oid::from_raw(ptr) })}\n\n }\n\n\n\n /// Get full name to the reference pointed to by a symbolic reference.\n\n ///\n\n /// May return `None` if the reference is either not symbolic or not a\n\n /// valid utf-8 string.\n\n pub fn symbolic_target(&self) -> Option<&str> {\n", "file_path": "src/reference.rs", "rank": 92, "score": 16.202202216598387 }, { "content": " ///\n\n /// Defaults to false.\n\n pub fn skip_unmerged(&mut self, skip: bool) -> &mut CheckoutBuilder {\n\n self.flag(raw::GIT_CHECKOUT_SKIP_UNMERGED, skip)\n\n }\n\n\n\n /// Indicate whether the checkout should proceed on conflicts by using the\n\n /// stage 2 version of the file (\"ours\").\n\n ///\n\n /// Defaults to false.\n\n pub fn use_ours(&mut self, ours: bool) -> &mut CheckoutBuilder {\n\n self.flag(raw::GIT_CHECKOUT_USE_OURS, ours)\n\n }\n\n\n\n /// Indicate whether the checkout should proceed on conflicts by using the\n\n /// stage 3 version of the file (\"theirs\").\n\n ///\n\n /// Defaults to false.\n\n pub fn use_theirs(&mut self, theirs: bool) -> &mut CheckoutBuilder {\n\n self.flag(raw::GIT_CHECKOUT_USE_THEIRS, theirs)\n", "file_path": "src/build.rs", "rank": 93, "score": 16.057019436659935 }, { "content": " let mut ret = 0 as *mut raw::git_remote;\n\n unsafe {\n\n try_call!(raw::git_remote_create(&mut ret, self.raw,\n\n name.to_c_str(), url.to_c_str()));\n\n Ok(Remote::from_raw(self, ret))\n\n }\n\n }\n\n\n\n /// Create an anonymous remote\n\n ///\n\n /// Create a remote with the given url and refspec in memory. You can use\n\n /// this when you have a URL instead of a remote's name. Note that anonymous\n\n /// remotes cannot be converted to persisted remotes.\n\n pub fn remote_anonymous(&self, url: &str,\n\n fetch: &str) -> Result<Remote, Error> {\n\n let mut ret = 0 as *mut raw::git_remote;\n\n unsafe {\n\n try_call!(raw::git_remote_create_anonymous(&mut ret, self.raw,\n\n url.to_c_str(),\n\n fetch.to_c_str()));\n", "file_path": "src/repo.rs", "rank": 94, "score": 16.024392427154073 }, { "content": " }\n\n\n\n /// Creates a new signature from the give raw pointer, tied to the lifetime\n\n /// of the given object.\n\n ///\n\n /// This function is unsafe as there is no guarantee that `raw` is valid for\n\n /// `'a` nor if it's a valid pointer.\n\n pub unsafe fn from_raw_const<'a, T>(_lt: &'a T,\n\n raw: *const raw::git_signature)\n\n -> Signature<'a> {\n\n Signature {\n\n raw: raw as *mut raw::git_signature,\n\n marker: marker::ContravariantLifetime,\n\n owned: false,\n\n }\n\n }\n\n\n\n /// Gets the name on the signature.\n\n ///\n\n /// Returns `None` if the name is not valid utf-8\n", "file_path": "src/signature.rs", "rank": 95, "score": 16.01290960221091 }, { "content": " }\n\n None => {\n\n try_call!(raw::git_config_iterator_new(&mut ret, &*self.raw));\n\n }\n\n }\n\n Ok(ConfigEntries::from_raw(self, ret))\n\n }\n\n }\n\n\n\n /// Open the global/XDG configuration file according to git's rules\n\n ///\n\n /// Git allows you to store your global configuration at `$HOME/.config` or\n\n /// `$XDG_CONFIG_HOME/git/config`. For backwards compatability, the XDG file\n\n /// shouldn't be used unless the use has created it explicitly. With this\n\n /// function you'll open the correct one to write to.\n\n pub fn open_global(&mut self) -> Result<Config, Error> {\n\n let mut raw = 0 as *mut raw::git_config;\n\n unsafe {\n\n try_call!(raw::git_config_open_global(&mut raw, self.raw));\n\n Ok(Config::from_raw(raw))\n", "file_path": "src/config.rs", "rank": 96, "score": 15.951962548486474 }, { "content": "use std::c_str::CString;\n\nuse std::iter::Range;\n\nuse std::kinds::marker;\n\nuse std::mem;\n\nuse std::path::PosixPath;\n\n\n\nuse libc;\n\nuse time;\n\n\n\nuse {raw, Repository, Error, Tree, Oid, IndexAddOption};\n\n\n\n/// A structure to represent a git [index][1]\n\n///\n\n/// [1]: http://git-scm.com/book/en/Git-Internals-Git-Objects\n\npub struct Index {\n\n raw: *mut raw::git_index,\n\n marker: marker::NoSync,\n\n}\n\n\n\n/// An iterator over the entries in an index\n", "file_path": "src/index.rs", "rank": 97, "score": 15.921620430688762 }, { "content": " pub fn snapshot(&mut self) -> Result<Config, Error> {\n\n let mut ret = 0 as *mut raw::git_config;\n\n unsafe {\n\n try_call!(raw::git_config_snapshot(&mut ret, self.raw));\n\n Ok(Config::from_raw(ret))\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for Config {\n\n fn drop(&mut self) {\n\n unsafe { raw::git_config_free(self.raw) }\n\n }\n\n}\n\n\n\nimpl<'a> ConfigEntry<'a> {\n\n /// Creates a new config entry from the raw components.\n\n ///\n\n /// This method is unsafe as the validity of `raw` is not guaranteed.\n\n pub unsafe fn from_raw(_config: &Config, raw: *const raw::git_config_entry)\n", "file_path": "src/config.rs", "rank": 98, "score": 15.91279285964038 }, { "content": "\n\n /// Get the `when` of this signature in seconds since the epoch.\n\n pub fn when(&self) -> u64 {\n\n unsafe { (*self.raw).when.time as u64 }\n\n }\n\n\n\n /// Get the offset of `when`, in minutes, of the signature's time zone from\n\n /// UTC.\n\n pub fn when_offset(&self) -> int {\n\n unsafe { (*self.raw).when.offset as int }\n\n }\n\n\n\n /// Get access to the underlying raw signature\n\n pub fn raw(&self) -> *mut raw::git_signature { self.raw }\n\n}\n\n\n\nimpl Clone for Signature<'static> {\n\n fn clone(&self) -> Signature<'static> {\n\n let mut raw = 0 as *mut raw::git_signature;\n\n let rc = unsafe { raw::git_signature_dup(&mut raw, &*self.raw) };\n", "file_path": "src/signature.rs", "rank": 99, "score": 15.898348751408607 } ]
Rust
src/lib.rs
bugagashenkj/rust-salsa20
07d50a6997af0fe455d0ee505d6f4cb7ed870e01
#![no_std] mod utils; use core::fmt; use crate::utils::{u8_to_u32, xor_from_slice}; fn quarterround(y0: u32, y1: u32, y2: u32, y3: u32) -> [u32; 4] { let y1 = y1 ^ y0.wrapping_add(y3).rotate_left(7); let y2 = y2 ^ y1.wrapping_add(y0).rotate_left(9); let y3 = y3 ^ y2.wrapping_add(y1).rotate_left(13); let y0 = y0 ^ y3.wrapping_add(y2).rotate_left(18); [y0, y1, y2, y3] } fn columnround(y: [u32; 16]) -> [u32; 16] { let [ [z0, z4, z8, z12], [z5, z9, z13, z1], [z10, z14, z2, z6], [z15, z3, z7, z11] ] = [ quarterround(y[0], y[4], y[8], y[12]), quarterround(y[5], y[9], y[13], y[1]), quarterround(y[10], y[14], y[2], y[6]), quarterround(y[15], y[3], y[7], y[11]), ]; [z0, z1, z2, z3, z4, z5, z6, z7, z8, z9, z10, z11, z12, z13, z14, z15] } fn rowround(y: [u32; 16]) -> [u32; 16] { let [ [z0, z1, z2, z3], [z5, z6, z7, z4], [z10, z11, z8, z9], [z15, z12, z13, z14] ] = [ quarterround(y[0], y[1], y[2], y[3]), quarterround(y[5], y[6], y[7], y[4]), quarterround(y[10], y[11], y[8], y[9]), quarterround(y[15], y[12], y[13], y[14]) ]; [z0, z1, z2, z3, z4, z5, z6, z7, z8, z9, z10, z11, z12, z13, z14, z15] } fn doubleround(y: [u32; 16]) -> [u32; 16] { rowround(columnround(y)) } #[derive(Clone, Copy)] struct Overflow { buffer: [u8; 64], offset: usize } impl Overflow { fn new(buffer: [u8; 64], offset: usize) -> Overflow { Overflow { buffer, offset } } fn modify<F>(&mut self, buffer: &mut [u8], modifier: F) where F: Fn(&mut [u8], &[u8]) { let offset = self.offset; self.offset += buffer.len(); modifier(buffer, &self.buffer[offset..self.offset]); } } impl fmt::Debug for Overflow { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter .debug_struct("Overflow") .field("buffer", &&self.buffer[..]) .field("offset", &self.offset) .finish() } } #[derive(Clone, Copy, Debug)] pub enum Key { Key16([u8; 16]), Key32([u8; 32]) } #[derive(Clone, Copy, Debug)] struct Generator { init_matrix: [u32; 16], cround_matrix: [u32; 16], dround_values: [u32; 4], counter: u64 } impl Generator { fn new(key: Key, nonce: [u8; 8], counter: u64) -> Generator { let mut init_matrix = [0; 16]; init_matrix[0] = 1634760805; init_matrix[15] = 1797285236; init_matrix[8] = counter as u32; init_matrix[9] = (counter >> 32) as u32; u8_to_u32(&nonce[..], &mut init_matrix[6..8]); match key { Key::Key16(key) => { u8_to_u32(&key[..], &mut init_matrix[1..5]); u8_to_u32(&key[..], &mut init_matrix[11..15]); init_matrix[5] = 824206446; init_matrix[10] = 2036477238; } Key::Key32(key) => { u8_to_u32(&key[..16], &mut init_matrix[1..5]); u8_to_u32(&key[16..], &mut init_matrix[11..15]); init_matrix[5] = 857760878; init_matrix[10] = 2036477234; } } let cround_matrix = columnround(init_matrix); let dround_values = quarterround( cround_matrix[5], cround_matrix[6], cround_matrix[7], cround_matrix[4] ); Generator { init_matrix, cround_matrix, dround_values, counter } } fn first_doubleround(&self) -> [u32; 16] { let [r5, r6, r7, r4] = self.dround_values; let [ [r0, r1, r2, r3], [r10, r11, r8, r9], [r15, r12, r13, r14] ] = [ quarterround( self.cround_matrix[0], self.cround_matrix[1], self.cround_matrix[2], self.cround_matrix[3] ), quarterround( self.cround_matrix[10], self.cround_matrix[11], self.cround_matrix[8], self.cround_matrix[9] ), quarterround( self.cround_matrix[15], self.cround_matrix[12], self.cround_matrix[13], self.cround_matrix[14] ) ]; [r0, r1, r2, r3, r4, r5, r6, r7, r8, r9, r10, r11, r12, r13, r14, r15] } fn set_counter(&mut self, counter: u64) { self.counter = counter; self.init_matrix[8] = counter as u32; let [z0, z4, z8, z12] = quarterround( self.init_matrix[0], self.init_matrix[4], self.init_matrix[8], self.init_matrix[12] ); self.cround_matrix[0] = z0; self.cround_matrix[8] = z8; self.cround_matrix[12] = z12; if counter > 0xffffffff_u64 { self.init_matrix[9] = (counter >> 32) as u32; let [z5, z9, z13, z1] = quarterround( self.init_matrix[5], self.init_matrix[9], self.init_matrix[13], self.init_matrix[1] ); self.cround_matrix[1] = z1; self.cround_matrix[9] = z9; self.cround_matrix[13] = z13; self.dround_values = quarterround( z5, self.cround_matrix[6], self.cround_matrix[7], z4 ); } } fn next(&mut self) -> [u8; 64] { let mut buffer = [0; 64]; (0..9) .fold(self.first_doubleround(), |block, _| doubleround(block)) .iter() .zip(self.init_matrix.iter()) .enumerate() .for_each(|(index, (drounds_value, &init_value))| { let offset = index * 4; let sum = drounds_value.wrapping_add(init_value); buffer[offset..offset + 4].copy_from_slice(&sum.to_le_bytes()); }); self.set_counter(self.counter.wrapping_add(1)); buffer } } #[derive(Clone, Copy, Debug)] pub struct Salsa20 { generator: Generator, overflow: Overflow } impl Salsa20 { pub fn new(key: Key, nonce: [u8; 8], counter: u64) -> Salsa20 { let overflow = Overflow::new([0; 64], 64); let generator = Generator::new(key, nonce, counter); Salsa20 { generator, overflow } } fn modify<F>(&mut self, buffer: &mut [u8], modifier: &F) where F: Fn(&mut [u8], &[u8]) { let buffer_len = buffer.len(); let overflow_len = 64 - self.overflow.offset; if overflow_len != 0 { if buffer_len >= overflow_len { self.overflow.modify(&mut buffer[..overflow_len], modifier); } else { self.overflow.modify(&mut buffer[..], modifier); return; } } let last_block_offset = buffer_len - (buffer_len - overflow_len) % 64; for offset in (overflow_len..last_block_offset).step_by(64) { modifier(&mut buffer[offset..offset + 64], &self.generator.next()); } if last_block_offset != buffer_len { self.overflow = Overflow::new(self.generator.next(), 0); self.overflow.modify(&mut buffer[last_block_offset..], modifier); } } pub fn set_counter(&mut self, counter: u64) { if counter != self.generator.counter { self.generator.set_counter(counter); } self.overflow = Overflow::new([0; 64], 64); } pub fn generate(&mut self, buffer: &mut [u8]) { self.modify(buffer, &<[u8]>::copy_from_slice); } pub fn encrypt(&mut self, buffer: &mut [u8]) { self.modify(buffer, &xor_from_slice); } } #[cfg(test)] mod tests { use super::*; #[test] fn quarterround_test() { assert_eq!( quarterround(0x00000000, 0x00000000, 0x00000000, 0x00000000), [0x00000000, 0x00000000, 0x00000000, 0x00000000] ); assert_eq!( quarterround(0xe7e8c006, 0xc4f9417d, 0x6479b4b2, 0x68c67137), [0xe876d72b, 0x9361dfd5, 0xf1460244, 0x948541a3] ); } #[test] fn rowround_test() { test([ 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000 ], [ 0x08008145, 0x00000080, 0x00010200, 0x20500000, 0x20100001, 0x00048044, 0x00000080, 0x00010000, 0x00000001, 0x00002000, 0x80040000, 0x00000000, 0x00000001, 0x00000200, 0x00402000, 0x88000100 ]); test([ 0x08521bd6, 0x1fe88837, 0xbb2aa576, 0x3aa26365, 0xc54c6a5b, 0x2fc74c2f, 0x6dd39cc3, 0xda0a64f6, 0x90a2f23d, 0x067f95a6, 0x06b35f61, 0x41e4732e, 0xe859c100, 0xea4d84b7, 0x0f619bff, 0xbc6e965a ], [ 0xa890d39d, 0x65d71596, 0xe9487daa, 0xc8ca6a86, 0x949d2192, 0x764b7754, 0xe408d9b9, 0x7a41b4d1, 0x3402e183, 0x3c3af432, 0x50669f96, 0xd89ef0a8, 0x0040ede5, 0xb545fbce, 0xd257ed4f, 0x1818882d ]); fn test(input_data: [u32; 16], expected_data: [u32; 16]) { assert_eq!(rowround(input_data), expected_data); } } #[test] fn columnround_test() { test([ 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000 ], [ 0x10090288, 0x00000000, 0x00000000, 0x00000000, 0x00000101, 0x00000000, 0x00000000, 0x00000000, 0x00020401, 0x00000000, 0x00000000, 0x00000000, 0x40a04001, 0x00000000, 0x00000000, 0x00000000 ]); test([ 0x08521bd6, 0x1fe88837, 0xbb2aa576, 0x3aa26365, 0xc54c6a5b, 0x2fc74c2f, 0x6dd39cc3, 0xda0a64f6, 0x90a2f23d, 0x067f95a6, 0x06b35f61, 0x41e4732e, 0xe859c100, 0xea4d84b7, 0x0f619bff, 0xbc6e965a ], [ 0x8c9d190a, 0xce8e4c90, 0x1ef8e9d3, 0x1326a71a, 0x90a20123, 0xead3c4f3, 0x63a091a0, 0xf0708d69, 0x789b010c, 0xd195a681, 0xeb7d5504, 0xa774135c, 0x481c2027, 0x53a8e4b5, 0x4c1f89c5, 0x3f78c9c8 ]); fn test(input_data: [u32; 16], expected_data: [u32; 16]) { assert_eq!(columnround(input_data), expected_data); } } #[test] fn doubleround_test() { test([ 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 ], [ 0x8186a22d, 0x0040a284, 0x82479210, 0x06929051, 0x08000090, 0x02402200, 0x00004000, 0x00800000, 0x00010200, 0x20400000, 0x08008104, 0x00000000, 0x20500000, 0xa0000040, 0x0008180a, 0x612a8020 ]); test([ 0xde501066, 0x6f9eb8f7, 0xe4fbbd9b, 0x454e3f57, 0xb75540d3, 0x43e93a4c, 0x3a6f2aa0, 0x726d6b36, 0x9243f484, 0x9145d1e8, 0x4fa9d247, 0xdc8dee11, 0x054bf545, 0x254dd653, 0xd9421b6d, 0x67b276c1 ], [ 0xccaaf672, 0x23d960f7, 0x9153e63a, 0xcd9a60d0, 0x50440492, 0xf07cad19, 0xae344aa0, 0xdf4cfdfc, 0xca531c29, 0x8e7943db, 0xac1680cd, 0xd503ca00, 0xa74b2ad6, 0xbc331c5c, 0x1dda24c7, 0xee928277 ]); fn test(input_data: [u32; 16], expected_data: [u32; 16]) { assert_eq!(doubleround(input_data), expected_data); } } #[test] fn create_init_matrix_test() { test(Key::Key16([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 ]), [ 101, 120, 112, 97, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 110, 100, 32, 49, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 54, 45, 98, 121, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 116, 101, 32, 107 ]); test(Key::Key32([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216 ]), [ 101, 120, 112, 97, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 110, 100, 32, 51, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 50, 45, 98, 121, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 116, 101, 32, 107 ]); fn test(key: Key, expected_data: [u8; 64]) { let nonce = [101, 102, 103, 104, 105, 106, 107, 108]; let counter = u64::from_le_bytes( [109, 110, 111, 112, 113, 114, 115, 116] ); let generator = Generator::new(key, nonce, counter); let mut expected_data_u32 = [0; 16]; u8_to_u32(&expected_data, &mut expected_data_u32); assert_eq!(generator.init_matrix, expected_data_u32); } } #[test] fn first_doubleround_test() { test(0x00000000, [0x00000000, 0x00000000]); test(0x00000001, [0x00000001, 0x00000000]); test(0x1234567f, [0x1234567f, 0x00000000]); test(0xffffffff, [0xffffffff, 0x00000000]); test(0x100000000, [0x00000000, 0x00000001]); test(0x012345678abcdef, [0x78abcdef, 0x123456]); fn test(counter: u64, counter_as_u32: [u32; 2]) { let key = Key::Key16([0; 16]); let mut generator = Generator::new(key, [0; 8], 0); generator.set_counter(counter); assert_eq!(generator.init_matrix[8..10], counter_as_u32); assert_eq!( generator.first_doubleround(), doubleround(generator.init_matrix) ); }; } #[test] fn generate_test() { test(Key::Key16([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 ]), [ 39, 173, 46, 248, 30, 200, 82, 17, 48, 67, 254, 239, 37, 18, 13, 247, 241, 200, 61, 144, 10, 55, 50, 185, 6, 47, 246, 253, 143, 86, 187, 225, 134, 85, 110, 246, 161, 163, 43, 235, 231, 94, 171, 51, 145, 214, 112, 29, 14, 232, 5, 16, 151, 140, 183, 141, 171, 9, 122, 181, 104, 182, 177, 193 ]); test(Key::Key32([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216 ]), [ 69, 37, 68, 39, 41, 15, 107, 193, 255, 139, 122, 6, 170, 233, 217, 98, 89, 144, 182, 106, 21, 51, 200, 65, 239, 49, 222, 34, 215, 114, 40, 126, 104, 197, 7, 225, 197, 153, 31, 2, 102, 78, 76, 176, 84, 245, 246, 184, 177, 160, 133, 130, 6, 72, 149, 119, 192, 195, 132, 236, 234, 103, 246, 74 ]); fn test(key: Key, expected_data: [u8; 64]) { let nonce = [101, 102, 103, 104, 105, 106, 107, 108]; let counter = u64::from_le_bytes( [109, 110, 111, 112, 113, 114, 115, 116] ); let mut generator = Generator::new(key, nonce, counter); let buffer = generator.next(); assert_eq!(buffer.to_vec(), expected_data.to_vec()); } } }
#![no_std] mod utils; use core::fmt; use crate::utils::{u8_to_u32, xor_from_slice}; fn quarterround(y0: u32, y1: u32, y2: u32, y3: u32) -> [u32; 4] { let y1 = y1 ^ y0.wrapping_add(y3).rotate_left(7); let y2 = y2 ^ y1.wrapping_add(y0).rotate_left(9); let y3 = y3 ^ y2.wrapping_add(y1).rotate_left(13); let y0 = y0 ^ y3.wrapping_add(y2).rotate_left(18); [y0, y1, y2, y3] } fn columnround(y: [u32; 16]) -> [u32; 16] { let [ [z0, z4, z8, z12], [z5, z9, z13, z1], [z10, z14, z2, z6], [z15, z3, z7, z11] ] = [ quarterround(y[0], y[4], y[8], y[12]), quarterround(y[5], y[9], y[13], y[1]), quarterround(y[10], y[14], y[2], y[6]), quarterround(y[15], y[3], y[7], y[11]), ]; [z0, z1, z2, z3, z4, z5, z6, z7, z8, z9, z10, z11, z12, z13, z14, z15] } fn rowround(y: [u32; 16]) -> [u32; 16] { let [ [z0, z1, z2, z3], [z5, z6, z7, z4], [z10, z11, z8, z9], [z15, z12, z13, z14] ] = [ quarterround(y[0], y[1], y[2], y[3]), quarterround(y[5], y[6], y[7], y[4]), quarterround(y[10], y[11], y[8], y[9]), quarterround(y[15], y[12], y[13], y[14]) ]; [z0, z1, z2, z3, z4, z5, z6, z7, z8, z9, z10, z11, z12, z13, z14, z15] } fn doubleround(y: [u32; 16]) -> [u32; 16] { rowround(columnround(y)) } #[derive(Clone, Copy)] struct Overflow { buffer: [u8; 64], offset: usize } impl Overflow { fn new(buffer: [u8; 64], offset: usize) -> Overflow { Overflow { buffer, offset } } fn modify<F>(&mut self, buffer: &mut [u8], modifier: F) where F: Fn(&mut [u8], &[u8]) { let offset = self.offset; self.offset += buffer.len(); modifier(buffer, &self.buffer[offset..self.offset]); } } impl fmt::Debug for Overflow { fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter .debug_struct("Overflow") .field("buffer", &&self.buffer[..]) .field("offset", &self.offset) .finish() } } #[derive(Clone, Copy, Debug)] pub enum Key { Key16([u8; 16]), Key32([u8; 32]) } #[derive(Clone, Copy, Debug)] struct Generator { init_matrix: [u32; 16], cround_matrix: [u32; 16], dround_values: [u32; 4], counter: u64 } impl Generator { fn new(key: Key, nonce: [u8; 8], counter: u64) -> Generator { let mut init_matrix = [0; 16]; init_matrix[0] = 1634760805; init_matrix[15] = 1797285236; init_matrix[8] = counter as u32; init_matrix[9] = (counter >> 32) as u32; u8_to_u32(&nonce[..], &mut init_matrix[6..8]); match key { Key::Key16(key) => { u8_to_u32(&key[..], &mut init_matrix[1..5]); u8_to_u32(&key[..], &mut init_matrix[11..15]); init_matrix[5] = 824206446; init_matrix[10] = 2036477238; } Key::Key32(key) => { u8_to_u32(&key[..16], &mut init_matrix[1..5]); u8_to_u32(&key[16..], &mut init_matrix[11..15]); init_matrix[5] = 857760878; init_matrix[10] = 2036477234; } } let cround_matrix = columnround(init_matrix); let dround_values = quarterround( cround_matrix[5], cround_matrix[6], cround_matrix[7], cround_matrix[4] ); Generator { init_matrix, cround_matrix, dround_values, counter } } fn first_doubleround(&self) -> [u32; 16] { let [r5, r6, r7, r4] = self.dround_values; let [ [r0, r1, r2, r3], [r10, r11, r8, r9], [r15, r12, r13, r14] ] = [ quarterround( self.cround_matrix[0], self.cround_matrix[1], self.cround_matrix[2], self.cround_matrix[3] ), quarterround( self.cround_matrix[10], self.cround_matrix[11], self.cround_matrix[8], self.cround_matrix[9] ), quarterround( self.cround_matrix[15], self.cround_matrix[12], self.cround_matrix[13], self.cround_matrix[14] ) ]; [r0, r1, r2, r3, r4, r5, r6, r7, r8, r9, r10, r11, r12, r13, r14, r15] } fn set_counter(&mut self, counter: u64) { self.counter = counter; self.init_matrix[8] = counter as u32; let [z0, z4, z8, z12] = quarterround( self.init_matrix[0], self.init_matrix[4], self.init_matrix[8], self.init_matrix[12] ); self.cround_matrix[0] = z0; self.cround_matrix[8] = z8; self.cround_matrix[12] = z12; if counter > 0xffffffff_u64 { self.init_matrix[9] = (counter >> 32) as u32; let [z5, z9, z13, z1] = quarterround( self.init_matrix[5], self.init_matrix[9], self.init_matrix[13], self.init_matrix[1] ); self.cround_matrix[1] = z1; self.cround_matrix[9] = z9; self.cround_matrix[13] = z13; self.dround_values = quarterround( z5, self.cround_matrix[6], self.cround_matrix[7], z4 ); } } fn next(&mut self) -> [u8; 64] { let mut buffer = [0; 64]; (0..9) .fold(self.first_doubleround(), |block, _| doubleround(block)) .iter() .zip(self.init_matrix.iter()) .enumerate() .for_each(|(index, (drounds_value, &init_value))| { let offset = index * 4; let sum = drounds_value.wrapping_add(init_value); buffer[offset..offset + 4].copy_from_slice(&sum.to_le_bytes()); }); self.set_counter(self.counter.wrapping_add(1)); buffer } } #[derive(Clone, Copy, Debug)] pub struct Salsa20 { generator: Generator, overflow: Overflow } impl Salsa20 { pub fn new(key: Key, nonce: [u8; 8], counter: u64) -> Salsa20 { let overflow = Overflow::new([0; 64], 64); let generator = Generator::new(key, nonce, counter); Salsa20 { generator, overflow } } fn modify<F>(&mut self, buffer: &mut [u8], modifier: &F) where F: Fn(&mut [u8], &[u8]) { let buffer_len = buffer.len(); let overflow_len = 64 - self.overflow.offset; if overflow_len != 0 { if buffer_len >= overflow_len { self.overflow.modify(&mut buffer[..overflow_len], modifier); } else { self.overflow.modify(&mut buffer[..], modifier); return; } } let last_block_offset = buffer_len - (buffer_len - overflow_len) % 64; for offset in (overflow_len..last_block_offset).step_by(64) { modifier(&mut buffer[offset..offset + 64], &self.generator.next()); } if last_block_offset != buffer_len { self.overflow = Overflow::new(self.generator.next(), 0); self.overflow.modify(&mut buffer[last_block_offset..], modifier); } } pub fn set_counter(&mut self, counter: u64) { if counter != self.generator.counter { self.generator.set_counter(counter); } self.overflow = Overflow::new([0; 64], 64); } pub fn generate(&mut self, buffer: &mut [u8]) { self.modify(buffer, &<[u8]>::copy_from_slice); } pub fn encrypt(&mut self, buffer: &mut [u8]) { self.modify(buffer, &xor_from_slice); } } #[cfg(test)] mod tests { use super::*; #[test] fn quarterround_test() { assert_eq!( quarterround(0x00000000, 0x00000000, 0x00000000, 0x00000000), [0x00000000, 0x00000000, 0x00000000, 0x00000000] ); assert_eq!( quarterround(0xe7e8c006, 0xc4f9417d, 0x6479b4b2, 0x68c67137), [0xe876d72b, 0x9361dfd5, 0xf1460244, 0x948541a3] ); } #[test] fn rowround_test() { test([ 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000 ], [ 0x08008145, 0x00000080, 0x00010200, 0x20500000, 0x20100001, 0x00048044, 0x00000080, 0x00010000, 0x00000001, 0x00002000, 0x80040000, 0x00000000, 0x00000001, 0x00000200, 0x00402000, 0x88000100 ]); test([ 0x08521bd6, 0x1fe88837, 0xbb2aa576, 0x3aa26365, 0xc54c6a5b, 0x2fc74c2f, 0x6dd39cc3, 0xda0a64f6, 0x90a2f23d, 0x067f95a6, 0x06b35f61, 0x41e4732e, 0xe859c100, 0xea4d84b7, 0x0f619bff, 0xbc6e965a ], [ 0xa890d39d, 0x65d71596, 0xe9487daa, 0xc8ca6a86, 0x949d2192, 0x764b7754, 0xe408d9b9, 0x7a41b4d1, 0x3402e183, 0x3c3af432, 0x50669f96, 0xd89ef0a8, 0x0040ede5, 0xb545fbce, 0xd257ed4f, 0x1818882d ]); fn test(input_data: [u32; 16], expected_data: [u32; 16]) { assert_eq!(rowround(input_data), expected_data); } } #[test] fn columnround_test() { test([ 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000001, 0x00000000, 0x00000000, 0x00000000 ], [ 0x10090288, 0x00000000, 0x00000000, 0x00000000, 0x00000101, 0x00000000, 0x00000000, 0x00000000, 0x00020401, 0x00000000, 0x00000000, 0x00000000, 0x40a04001, 0x00000000, 0x0000000
#[test] fn doubleround_test() { test([ 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000, 0x00000000 ], [ 0x8186a22d, 0x0040a284, 0x82479210, 0x06929051, 0x08000090, 0x02402200, 0x00004000, 0x00800000, 0x00010200, 0x20400000, 0x08008104, 0x00000000, 0x20500000, 0xa0000040, 0x0008180a, 0x612a8020 ]); test([ 0xde501066, 0x6f9eb8f7, 0xe4fbbd9b, 0x454e3f57, 0xb75540d3, 0x43e93a4c, 0x3a6f2aa0, 0x726d6b36, 0x9243f484, 0x9145d1e8, 0x4fa9d247, 0xdc8dee11, 0x054bf545, 0x254dd653, 0xd9421b6d, 0x67b276c1 ], [ 0xccaaf672, 0x23d960f7, 0x9153e63a, 0xcd9a60d0, 0x50440492, 0xf07cad19, 0xae344aa0, 0xdf4cfdfc, 0xca531c29, 0x8e7943db, 0xac1680cd, 0xd503ca00, 0xa74b2ad6, 0xbc331c5c, 0x1dda24c7, 0xee928277 ]); fn test(input_data: [u32; 16], expected_data: [u32; 16]) { assert_eq!(doubleround(input_data), expected_data); } } #[test] fn create_init_matrix_test() { test(Key::Key16([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 ]), [ 101, 120, 112, 97, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 110, 100, 32, 49, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 54, 45, 98, 121, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 116, 101, 32, 107 ]); test(Key::Key32([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216 ]), [ 101, 120, 112, 97, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 110, 100, 32, 51, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 50, 45, 98, 121, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 116, 101, 32, 107 ]); fn test(key: Key, expected_data: [u8; 64]) { let nonce = [101, 102, 103, 104, 105, 106, 107, 108]; let counter = u64::from_le_bytes( [109, 110, 111, 112, 113, 114, 115, 116] ); let generator = Generator::new(key, nonce, counter); let mut expected_data_u32 = [0; 16]; u8_to_u32(&expected_data, &mut expected_data_u32); assert_eq!(generator.init_matrix, expected_data_u32); } } #[test] fn first_doubleround_test() { test(0x00000000, [0x00000000, 0x00000000]); test(0x00000001, [0x00000001, 0x00000000]); test(0x1234567f, [0x1234567f, 0x00000000]); test(0xffffffff, [0xffffffff, 0x00000000]); test(0x100000000, [0x00000000, 0x00000001]); test(0x012345678abcdef, [0x78abcdef, 0x123456]); fn test(counter: u64, counter_as_u32: [u32; 2]) { let key = Key::Key16([0; 16]); let mut generator = Generator::new(key, [0; 8], 0); generator.set_counter(counter); assert_eq!(generator.init_matrix[8..10], counter_as_u32); assert_eq!( generator.first_doubleround(), doubleround(generator.init_matrix) ); }; } #[test] fn generate_test() { test(Key::Key16([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 ]), [ 39, 173, 46, 248, 30, 200, 82, 17, 48, 67, 254, 239, 37, 18, 13, 247, 241, 200, 61, 144, 10, 55, 50, 185, 6, 47, 246, 253, 143, 86, 187, 225, 134, 85, 110, 246, 161, 163, 43, 235, 231, 94, 171, 51, 145, 214, 112, 29, 14, 232, 5, 16, 151, 140, 183, 141, 171, 9, 122, 181, 104, 182, 177, 193 ]); test(Key::Key32([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216 ]), [ 69, 37, 68, 39, 41, 15, 107, 193, 255, 139, 122, 6, 170, 233, 217, 98, 89, 144, 182, 106, 21, 51, 200, 65, 239, 49, 222, 34, 215, 114, 40, 126, 104, 197, 7, 225, 197, 153, 31, 2, 102, 78, 76, 176, 84, 245, 246, 184, 177, 160, 133, 130, 6, 72, 149, 119, 192, 195, 132, 236, 234, 103, 246, 74 ]); fn test(key: Key, expected_data: [u8; 64]) { let nonce = [101, 102, 103, 104, 105, 106, 107, 108]; let counter = u64::from_le_bytes( [109, 110, 111, 112, 113, 114, 115, 116] ); let mut generator = Generator::new(key, nonce, counter); let buffer = generator.next(); assert_eq!(buffer.to_vec(), expected_data.to_vec()); } } }
0, 0x00000000 ]); test([ 0x08521bd6, 0x1fe88837, 0xbb2aa576, 0x3aa26365, 0xc54c6a5b, 0x2fc74c2f, 0x6dd39cc3, 0xda0a64f6, 0x90a2f23d, 0x067f95a6, 0x06b35f61, 0x41e4732e, 0xe859c100, 0xea4d84b7, 0x0f619bff, 0xbc6e965a ], [ 0x8c9d190a, 0xce8e4c90, 0x1ef8e9d3, 0x1326a71a, 0x90a20123, 0xead3c4f3, 0x63a091a0, 0xf0708d69, 0x789b010c, 0xd195a681, 0xeb7d5504, 0xa774135c, 0x481c2027, 0x53a8e4b5, 0x4c1f89c5, 0x3f78c9c8 ]); fn test(input_data: [u32; 16], expected_data: [u32; 16]) { assert_eq!(columnround(input_data), expected_data); } }
function_block-function_prefixed
[ { "content": "#[test]\n\nfn generate_test() {\n\n test(\n\n 0x00000fff,\n\n vec![\n\n 134, 70, 88, 94, 194, 104, 117, 219, 33, 198, 116, 85, 32, 52, 54,\n\n 214, 98, 231, 58, 191, 69, 243, 75, 142, 233, 245, 119, 223, 113,\n\n 31, 50, 172, 218, 9, 93, 192, 217, 5, 89, 3, 23, 219, 138, 102,\n\n 123, 126, 49, 43, 100, 214, 63, 72, 58, 63, 235, 216, 134, 201,\n\n 110, 26, 32, 108, 173, 131, 0, 115, 70, 177, 106, 80, 30, 31, 64,\n\n 133, 206, 66, 189, 33, 230, 73, 25, 7, 28, 232, 186, 237, 142, 10,\n\n 249, 100, 2, 137, 9, 38, 38, 76, 198, 28, 204, 149\n\n ]\n\n );\n\n\n\n test(\n\n 0xabcdffffffff,\n\n vec![\n\n 97, 110, 63, 93, 34, 205, 56, 57, 45, 32, 254, 71, 231, 158, 15,\n\n 119, 20, 42, 196, 244, 211, 196, 35, 76, 99, 7, 146, 45, 78, 217,\n\n 181, 127, 19, 15, 133, 99, 183, 18, 33, 117, 233, 111, 214, 39, 88,\n", "file_path": "tests/set-counter.rs", "rank": 5, "score": 61025.80992676163 }, { "content": "fn generate_1_kb_with_overflow(c: &mut Criterion) {\n\n let mut salsa20 = Salsa20::new(Key16([3; 16]), [0; 8], 0);\n\n let mut buffer = [0; 1024];\n\n\n\n c.bench(\n\n \"generate with overflow\",\n\n Benchmark::new(\n\n \"1Kb\",\n\n move |b| b.iter(|| {\n\n salsa20.generate(black_box(&mut buffer[0..7]));\n\n salsa20.generate(black_box(&mut buffer[7..259]));\n\n salsa20.generate(black_box(&mut buffer[259..938]));\n\n salsa20.generate(black_box(&mut buffer[938..1024]));\n\n })\n\n ).throughput(Throughput::Bytes(1024))\n\n );\n\n}\n\n\n\ncriterion_group!(\n\n benches,\n\n encrypt_1_kb,\n\n generate_1_kb,\n\n generate_1_kb_with_overflow\n\n);\n\ncriterion_main!(benches);\n", "file_path": "benches/benchmarks.rs", "rank": 6, "score": 57819.61084696943 }, { "content": "#[test]\n\nfn encrypt_test() {\n\n test(\n\n 0x00000000,\n\n vec![\n\n 42, 129, 33, 161, 137, 35, 27, 149, 115, 154, 192, 232, 160, 76,\n\n 105, 227, 191, 104, 48, 89, 162, 249, 242, 123, 10, 123, 90, 68,\n\n 27, 16, 219, 59, 219, 128, 111, 75, 245, 26, 231, 52, 107, 67, 13,\n\n 79, 81, 158, 1, 86, 11, 50, 35, 238, 166, 23, 112, 201, 114, 197,\n\n 52, 38, 201, 77, 251, 219, 14, 173, 179, 196, 215, 54, 65, 125,\n\n 181, 65, 162, 116, 44, 201, 92, 43, 79, 101, 68, 64, 95, 30, 97,\n\n 108, 254, 161, 159, 254, 10, 59, 177, 76, 76, 79, 41, 89, 234, 74,\n\n 4, 36, 224, 209, 83, 138, 51, 66, 234, 157, 74, 181, 53, 104, 19,\n\n 218, 204, 171, 233, 151, 205, 236, 61, 141, 76, 209, 103, 246, 230,\n\n 85, 129, 125, 34, 84, 68, 39, 240, 59, 9, 244, 168, 11, 134, 89,\n\n 74, 182, 63, 77, 49, 247, 62, 19, 175, 123, 83, 170, 160, 78, 218,\n\n 196, 145, 7, 47, 80, 196, 212, 154, 165, 193, 230, 247, 247, 56,\n\n 132, 231, 146, 154, 132, 177, 83, 67, 251, 90, 71, 4, 52, 246, 25,\n\n 1, 212, 80, 138, 143, 91, 91, 93, 86, 169\n\n ]\n\n );\n", "file_path": "tests/encrypt-test.rs", "rank": 7, "score": 48461.45034935652 }, { "content": "#[test]\n\nfn generate_test() {\n\n test(\n\n 0x00000000,\n\n vec![\n\n 45, 134, 38, 166, 142, 36, 28, 146, 116, 157, 199, 239, 167, 75,\n\n 110, 228, 184, 111, 55, 94, 165, 254, 245, 124, 13, 124, 93, 67,\n\n 28, 23, 220, 60, 220, 135, 104, 76, 242, 29, 224, 51, 108, 68, 10,\n\n 72, 86, 153, 6, 81, 12, 53, 36, 233, 161, 16, 119, 206, 117, 194,\n\n 51, 33, 206, 74, 252, 220, 9, 170, 180, 195, 208, 49, 70, 122, 178,\n\n 70, 165, 115, 43, 206, 91, 44, 72, 98, 67, 71, 88, 25, 102, 107,\n\n 249, 166, 152, 249, 13, 60, 182, 75, 75, 72, 46, 94, 237, 77, 3,\n\n 35, 231, 214, 84, 141, 52, 69, 237, 154, 77, 178, 50, 111, 20, 221,\n\n 203, 172, 238, 144, 202, 235, 58, 138, 75, 214, 96, 241, 225, 82,\n\n 134, 122, 37, 83, 67, 32, 247, 60, 14, 243, 175, 12, 129, 94, 77,\n\n 177, 56, 74, 54, 240, 57, 20, 168, 124, 84, 173, 167, 73, 221, 195,\n\n 150, 0, 40, 87, 195, 211, 157, 162, 198, 225, 240, 240, 63, 131,\n\n 224, 149, 157, 131, 182, 84, 68, 252, 93, 64, 3, 51, 241, 30, 6,\n\n 211, 87, 141, 136, 92, 92, 90, 81, 174\n\n ]\n\n );\n", "file_path": "tests/generate-test.rs", "rank": 8, "score": 48461.45034935652 }, { "content": "fn generate_1_kb(c: &mut Criterion) {\n\n let mut salsa20 = Salsa20::new(Key16([2; 16]), [0; 8], 0);\n\n let mut buffer = [0; 1024];\n\n\n\n c.bench(\n\n \"generate\",\n\n Benchmark::new(\n\n \"1Kb\", move |b| b.iter(|| salsa20.generate(black_box(&mut buffer)))\n\n ).throughput(Throughput::Bytes(1024))\n\n );\n\n}\n\n\n", "file_path": "benches/benchmarks.rs", "rank": 9, "score": 45298.84249629694 }, { "content": "fn encrypt_1_kb(c: &mut Criterion) {\n\n let mut salsa20 = Salsa20::new(Key16([1; 16]), [0; 8], 0);\n\n let mut buffer = [0; 1024];\n\n\n\n c.bench(\n\n \"encrypt\",\n\n Benchmark::new(\n\n \"1Kb\", move |b| b.iter(|| salsa20.encrypt(black_box(&mut buffer)))\n\n ).throughput(Throughput::Bytes(1024))\n\n );\n\n}\n\n\n", "file_path": "benches/benchmarks.rs", "rank": 10, "score": 45298.84249629694 }, { "content": " 176, 34, 246, 166, 177, 26, 251, 99, 106, 175, 169, 8, 171, 109,\n\n 76, 29, 35, 87, 249, 33, 230, 92, 90, 54, 84, 188, 78, 48, 238, 50,\n\n 41, 84, 100, 217, 62, 216, 72, 17, 199, 8, 80, 228, 76, 44, 71, 87,\n\n 16, 31, 20, 226, 164, 160, 186, 140, 113\n\n ]\n\n );\n\n\n\n fn test(counter: u64, expected_data: Vec<u8>) {\n\n let key = Key32([\n\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,\n\n 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31\n\n ]);\n\n let nonce = [1, 2, 3, 4, 5, 6, 7, 8];\n\n let mut salsa = Salsa20::new(key, nonce, 0);\n\n let mut buffer = vec![0; 100];\n\n salsa.generate(&mut buffer);\n\n\n\n salsa.set_counter(counter);\n\n salsa.generate(&mut buffer);\n\n\n\n assert_eq!(buffer, expected_data);\n\n }\n\n}\n", "file_path": "tests/set-counter.rs", "rank": 11, "score": 36070.93762451472 }, { "content": "extern crate rust_salsa20;\n\nuse rust_salsa20::{Salsa20, Key::Key32};\n\n\n\n#[test]\n", "file_path": "tests/set-counter.rs", "rank": 12, "score": 36064.3667951371 }, { "content": "\n\n fn test(counter: u64, expected_data: Vec<u8>) {\n\n let key = Key32([\n\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,\n\n 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31\n\n ]);\n\n let nonce = [1, 2, 3, 4, 5, 6, 7, 8];\n\n let mut salsa = Salsa20::new(key, nonce, counter);\n\n let mut buffer = vec![7; 200];\n\n\n\n salsa.encrypt(&mut buffer[..7]);\n\n salsa.encrypt(&mut buffer[7..13]);\n\n salsa.encrypt(&mut buffer[13..197]);\n\n salsa.encrypt(&mut buffer[197..200]);\n\n\n\n assert_eq!(buffer, expected_data);\n\n }\n\n}\n", "file_path": "tests/encrypt-test.rs", "rank": 14, "score": 23836.73246722656 }, { "content": "\n\n fn test(counter: u64, expected_data: Vec<u8>) {\n\n let key = Key32([\n\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,\n\n 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31\n\n ]);\n\n let nonce = [1, 2, 3, 4, 5, 6, 7, 8];\n\n let mut salsa = Salsa20::new(key, nonce, counter);\n\n let mut buffer = vec![0; 200];\n\n\n\n salsa.generate(&mut buffer[..7]);\n\n salsa.generate(&mut buffer[7..13]);\n\n salsa.generate(&mut buffer[13..197]);\n\n salsa.generate(&mut buffer[197..200]);\n\n\n\n assert_eq!(buffer, expected_data);\n\n }\n\n}\n", "file_path": "tests/generate-test.rs", "rank": 15, "score": 23836.73246722656 }, { "content": "extern crate rust_salsa20;\n\nuse rust_salsa20::{Salsa20, Key::Key32};\n\n\n\n#[test]\n", "file_path": "tests/generate-test.rs", "rank": 16, "score": 23830.709557047696 }, { "content": "extern crate rust_salsa20;\n\nuse rust_salsa20::{Salsa20, Key::Key32};\n\n\n\n#[test]\n", "file_path": "tests/encrypt-test.rs", "rank": 17, "score": 23830.709557047696 }, { "content": "\n\n test(\n\n 0xffffffff,\n\n vec![\n\n 204, 74, 84, 181, 96, 108, 197, 131, 29, 86, 219, 130, 198, 167,\n\n 107, 85, 235, 63, 95, 201, 174, 187, 48, 32, 179, 5, 108, 40, 241,\n\n 161, 240, 41, 134, 154, 124, 228, 226, 196, 21, 154, 185, 169, 116,\n\n 3, 220, 52, 112, 176, 65, 200, 164, 21, 84, 191, 86, 142, 250, 32,\n\n 208, 87, 8, 20, 109, 151, 187, 108, 153, 235, 58, 119, 253, 188,\n\n 172, 214, 188, 58, 27, 201, 234, 7, 202, 56, 213, 116, 61, 106, 6,\n\n 14, 188, 20, 119, 99, 62, 111, 15, 192, 248, 109, 189, 206, 96, 80,\n\n 29, 27, 11, 80, 239, 32, 96, 198, 208, 61, 148, 19, 24, 81, 213,\n\n 147, 78, 126, 57, 25, 182, 75, 70, 101, 86, 208, 142, 156, 94, 103,\n\n 61, 15, 229, 102, 230, 98, 13, 108, 96, 134, 167, 127, 66, 33, 48,\n\n 175, 16, 104, 136, 214, 51, 61, 175, 178, 98, 149, 97, 178, 142,\n\n 52, 133, 189, 150, 180, 8, 245, 229, 183, 199, 34, 38, 245, 97, 41,\n\n 240, 163, 136, 75, 128, 102, 235, 214, 135, 118, 15, 149, 13, 160,\n\n 140, 180, 43, 124, 59, 63, 130, 95, 130, 115\n\n ]\n\n );\n", "file_path": "tests/generate-test.rs", "rank": 18, "score": 23824.68029897614 }, { "content": "\n\n test(\n\n 0xffffffff,\n\n vec![\n\n 203, 77, 83, 178, 103, 107, 194, 132, 26, 81, 220, 133, 193, 160,\n\n 108, 82, 236, 56, 88, 206, 169, 188, 55, 39, 180, 2, 107, 47, 246,\n\n 166, 247, 46, 129, 157, 123, 227, 229, 195, 18, 157, 190, 174, 115,\n\n 4, 219, 51, 119, 183, 70, 207, 163, 18, 83, 184, 81, 137, 253, 39,\n\n 215, 80, 15, 19, 106, 144, 188, 107, 158, 236, 61, 112, 250, 187,\n\n 171, 209, 187, 61, 28, 206, 237, 0, 205, 63, 210, 115, 58, 109, 1,\n\n 9, 187, 19, 112, 100, 57, 104, 8, 199, 255, 106, 186, 201, 103, 87,\n\n 26, 28, 12, 87, 232, 39, 103, 193, 215, 58, 147, 20, 31, 86, 210,\n\n 148, 73, 121, 62, 30, 177, 76, 65, 98, 81, 215, 137, 155, 89, 96,\n\n 58, 8, 226, 97, 225, 101, 10, 107, 103, 129, 160, 120, 69, 38, 55,\n\n 168, 23, 111, 143, 209, 52, 58, 168, 181, 101, 146, 102, 181, 137,\n\n 51, 130, 186, 145, 179, 15, 242, 226, 176, 192, 37, 33, 242, 102,\n\n 46, 247, 164, 143, 76, 135, 97, 236, 209, 128, 113, 8, 146, 10,\n\n 167, 139, 179, 44, 123, 60, 56, 133, 88, 133, 116\n\n ]\n\n );\n", "file_path": "tests/encrypt-test.rs", "rank": 19, "score": 23824.68029897614 }, { "content": "pub(super) fn u8_to_u32(bytes: &[u8], u32_slice: &mut [u32]) {\n\n for (index, value) in u32_slice.iter_mut().enumerate() {\n\n let offset = index * 4;\n\n *value = u32::from_le_bytes([\n\n bytes[offset],\n\n bytes[offset + 1],\n\n bytes[offset + 2],\n\n bytes[offset + 3]\n\n ]);\n\n }\n\n}\n\n\n\npub(super) fn xor_from_slice(to: &mut [u8], from: &[u8]) {\n\n for (to_byte, from_byte) in to.iter_mut().zip(from.iter()) {\n\n *to_byte ^= from_byte;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/utils.rs", "rank": 20, "score": 20005.896046436254 }, { "content": " use super::*;\n\n\n\n #[test]\n\n fn u8_to_u32_test() {\n\n test(&[0, 0, 0, 0], &[0]);\n\n test(&[1, 0, 0, 0], &[1]);\n\n test(&[1, 2, 3, 4], &[67305985]);\n\n test(&[1, 2, 3, 4, 5], &[67305985]);\n\n\n\n fn test(bytes: &[u8], expected_values: &[u32]) {\n\n let mut values = [0];\n\n u8_to_u32(&bytes, &mut values);\n\n assert_eq!(values, expected_values);\n\n }\n\n }\n\n\n\n #[test]\n\n fn xor_from_slice_test() {\n\n test(&mut [0, 0, 0, 0], &[0, 0, 0, 0], &[0, 0, 0, 0]);\n\n test(&mut [0, 0, 0, 1], &[0, 0, 0, 0], &[0, 0, 0, 1]);\n", "file_path": "src/utils.rs", "rank": 21, "score": 19999.034776909786 }, { "content": " test(&mut [1, 0, 1, 0], &[1, 1, 0, 0], &[0, 1, 1, 0]);\n\n test(&mut [1, 2, 3, 4], &[5, 6, 7, 8], &[4, 4, 4, 12]);\n\n test(&mut [1, 2, 3, 4, 0, 0], &[5, 6, 7, 8], &[4, 4, 4, 12, 0, 0]);\n\n\n\n fn test(to: &mut [u8], from: &[u8], expected: &[u8]) {\n\n xor_from_slice(to, from);\n\n assert_eq!(to, expected);\n\n }\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 22, "score": 19994.92692745491 }, { "content": "### Encrypt\n\n```rust\n\nextern crate rust_salsa20;\n\nuse rust_salsa20::{Salsa20, Key::Key32};\n\n\n\nfn main() {\n\n let key = Key32([\n\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,\n\n 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31\n\n ]);\n\n let nonce = [1, 2, 3, 4, 5, 6, 7, 8];\n\n let mut salsa = Salsa20::new(key, nonce, 0);\n\n let mut buffer = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0];\n\n salsa.encrypt(&mut buffer);\n\n\n\n assert_eq!(buffer, [44, 132, 37, 162, 139, 34, 27, 154, 125, 157]);\n\n}\n\n```\n\n## Contributors\n\n\n\nSee github for full [contributors list](https://github.com/bugagashenkj/rust-salsa20/graphs/contributors)\n", "file_path": "README.md", "rank": 23, "score": 10561.218423099564 }, { "content": "[![TravisCI](https://api.travis-ci.org/bugagashenkj/rust-salsa20.svg?branch=master)](https://travis-ci.org/bugagashenkj/rust-salsa20)\n\n[![Crates.io](https://img.shields.io/crates/v/rust-salsa20.svg?)](https://crates.io/crates/rust-salsa20)\n\n[![Docs](https://docs.rs/rust-salsa20/badge.svg)](https://docs.rs/rust-salsa20)\n\n\n\n# Salsa20 stream cipher\n\n\n\n[Salsa20](https://cr.yp.to/snuffle/spec.pdf) is a stream cipher built on a pseudo-random function based on add-rotate-xor operations — 32-bit addition, bitwise addition and rotation operations.\n\n\n\n## Optimization of doubleround calculations\n\n\n\nThe doubleround function modifies the initial matrix 10 times to encrypt every 64-byte block. For each subsequent 64-byte block in the matrix, only 8 bytes associated with the counter are changed. Immutable bytes can be used to pre-compute the result of the first doubleround call. A preliminary calculation of the original matrix optimizes the first call to the doubleround function, reducing the number of calls to the quarterround function from 8 to 4 with a counter of less than 2 ^ 32 and from 8 to 6 with a counter of more than 2 ^ 32, which reduces the algorithmic complexity of the Salsa20 function by 5 and 2.5 percent respectively.\n\n\n\n## Usage\n\n\n\nTo install rust-salsa20, add the following to your Cargo.toml:\n\n\n\n```toml\n\n[dependencies]\n\nrust-salsa20 = \"^0.3\"\n\n```\n\n\n\n## Examples\n\n\n\n### Generate\n\n```rust\n\nextern crate rust_salsa20;\n\nuse rust_salsa20::{Salsa20, Key::Key32};\n\n\n\nfn main() {\n\n let key = Key32([\n\n 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,\n\n 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31\n\n ]);\n\n let nonce = [1, 2, 3, 4, 5, 6, 7, 8];\n\n let mut salsa = Salsa20::new(key, nonce, 0);\n\n let mut buffer = [0; 10];\n\n salsa.generate(&mut buffer);\n\n\n\n assert_eq!(buffer, [45, 134, 38, 166, 142, 36, 28, 146, 116, 157]);\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 24, "score": 10558.032953025253 }, { "content": "extern crate criterion;\n\nextern crate rust_salsa20;\n\n\n\nuse rust_salsa20::{Salsa20, Key::Key16};\n\nuse criterion::*;\n\n\n", "file_path": "benches/benchmarks.rs", "rank": 42, "score": 6.565495398042 } ]
Rust
querier/src/namespace/mod.rs
r4ntix/influxdb_iox
5ff874925101e2afbafa6853385260a2ba044394
use crate::{ cache::CatalogCache, chunk::ChunkAdapter, ingester::IngesterConnection, query_log::QueryLog, table::QuerierTable, }; use data_types::{NamespaceId, NamespaceSchema}; use iox_query::exec::Executor; use parquet_file::storage::ParquetStorage; use schema::Schema; use std::{collections::HashMap, sync::Arc}; mod query_access; #[cfg(test)] mod test_util; #[derive(Debug)] pub struct QuerierNamespace { id: NamespaceId, name: Arc<str>, tables: Arc<HashMap<Arc<str>, Arc<QuerierTable>>>, exec: Arc<Executor>, catalog_cache: Arc<CatalogCache>, query_log: Arc<QueryLog>, } impl QuerierNamespace { pub fn new( chunk_adapter: Arc<ChunkAdapter>, schema: Arc<NamespaceSchema>, name: Arc<str>, exec: Arc<Executor>, ingester_connection: Arc<dyn IngesterConnection>, query_log: Arc<QueryLog>, ) -> Self { let tables: HashMap<_, _> = schema .tables .iter() .map(|(table_name, table_schema)| { let table_name = Arc::from(table_name.clone()); let id = table_schema.id; let schema = Schema::try_from(table_schema.clone()).expect("cannot build schema"); let table = Arc::new(QuerierTable::new( Arc::clone(&name), id, Arc::clone(&table_name), Arc::new(schema), Arc::clone(&ingester_connection), Arc::clone(&chunk_adapter), )); (table_name, table) }) .collect(); let id = schema.id; Self { id, name, tables: Arc::new(tables), exec, catalog_cache: Arc::clone(chunk_adapter.catalog_cache()), query_log, } } #[allow(clippy::too_many_arguments)] pub fn new_testing( catalog_cache: Arc<CatalogCache>, store: ParquetStorage, metric_registry: Arc<metric::Registry>, name: Arc<str>, schema: Arc<NamespaceSchema>, exec: Arc<Executor>, ingester_connection: Arc<dyn IngesterConnection>, ) -> Self { let time_provider = catalog_cache.time_provider(); let chunk_adapter = Arc::new(ChunkAdapter::new( catalog_cache, store, metric_registry, Arc::clone(&time_provider), )); let query_log = Arc::new(QueryLog::new(10, time_provider)); Self::new( chunk_adapter, schema, name, exec, ingester_connection, query_log, ) } pub fn name(&self) -> Arc<str> { Arc::clone(&self.name) } #[must_use] pub fn catalog_cache(&self) -> &Arc<CatalogCache> { &self.catalog_cache } } #[cfg(test)] mod tests { use super::*; use crate::namespace::test_util::querier_namespace; use data_types::ColumnType; use iox_tests::util::TestCatalog; use schema::{builder::SchemaBuilder, InfluxColumnType, InfluxFieldType}; #[tokio::test] async fn test_sync_tables() { let catalog = TestCatalog::new(); let ns = catalog.create_namespace("ns").await; let qns = querier_namespace(&ns).await; assert_eq!(tables(&qns), Vec::<String>::new()); ns.create_table("table1").await; ns.create_table("table2").await; let qns = querier_namespace(&ns).await; assert_eq!( tables(&qns), vec![String::from("table1"), String::from("table2")] ); ns.create_table("table3").await; let qns = querier_namespace(&ns).await; assert_eq!( tables(&qns), vec![ String::from("table1"), String::from("table2"), String::from("table3") ] ); } #[tokio::test] async fn test_sync_schemas() { let catalog = TestCatalog::new(); let ns = catalog.create_namespace("ns").await; let table = ns.create_table("table").await; let qns = querier_namespace(&ns).await; let expected_schema = SchemaBuilder::new().build().unwrap(); let actual_schema = schema(&qns, "table"); assert_eq!(actual_schema.as_ref(), &expected_schema,); table.create_column("col1", ColumnType::I64).await; table.create_column("col2", ColumnType::Bool).await; table.create_column("col3", ColumnType::Tag).await; let qns = querier_namespace(&ns).await; let expected_schema = SchemaBuilder::new() .influx_column("col1", InfluxColumnType::Field(InfluxFieldType::Integer)) .influx_column("col2", InfluxColumnType::Field(InfluxFieldType::Boolean)) .influx_column("col3", InfluxColumnType::Tag) .build() .unwrap(); let actual_schema = schema(&qns, "table"); assert_eq!(actual_schema.as_ref(), &expected_schema,); table.create_column("col4", ColumnType::Tag).await; table.create_column("col5", ColumnType::Time).await; let qns = querier_namespace(&ns).await; let expected_schema = SchemaBuilder::new() .influx_column("col1", InfluxColumnType::Field(InfluxFieldType::Integer)) .influx_column("col2", InfluxColumnType::Field(InfluxFieldType::Boolean)) .influx_column("col3", InfluxColumnType::Tag) .influx_column("col4", InfluxColumnType::Tag) .influx_column("col5", InfluxColumnType::Timestamp) .build() .unwrap(); let actual_schema = schema(&qns, "table"); assert_eq!(actual_schema.as_ref(), &expected_schema,); } fn sorted<T>(mut v: Vec<T>) -> Vec<T> where T: Ord, { v.sort(); v } fn tables(querier_namespace: &QuerierNamespace) -> Vec<String> { sorted( querier_namespace .tables .keys() .map(|s| s.to_string()) .collect(), ) } fn schema(querier_namespace: &QuerierNamespace, table: &str) -> Arc<Schema> { Arc::clone(querier_namespace.tables.get(table).unwrap().schema()) } }
use crate::{ cache::CatalogCache, chunk::ChunkAdapter, ingester::IngesterConnection, query_log::QueryLog, table::QuerierTable, }; use data_types::{Nam
await; let expected_schema = SchemaBuilder::new().build().unwrap(); let actual_schema = schema(&qns, "table"); assert_eq!(actual_schema.as_ref(), &expected_schema,); table.create_column("col1", ColumnType::I64).await; table.create_column("col2", ColumnType::Bool).await; table.create_column("col3", ColumnType::Tag).await; let qns = querier_namespace(&ns).await; let expected_schema = SchemaBuilder::new() .influx_column("col1", InfluxColumnType::Field(InfluxFieldType::Integer)) .influx_column("col2", InfluxColumnType::Field(InfluxFieldType::Boolean)) .influx_column("col3", InfluxColumnType::Tag) .build() .unwrap(); let actual_schema = schema(&qns, "table"); assert_eq!(actual_schema.as_ref(), &expected_schema,); table.create_column("col4", ColumnType::Tag).await; table.create_column("col5", ColumnType::Time).await; let qns = querier_namespace(&ns).await; let expected_schema = SchemaBuilder::new() .influx_column("col1", InfluxColumnType::Field(InfluxFieldType::Integer)) .influx_column("col2", InfluxColumnType::Field(InfluxFieldType::Boolean)) .influx_column("col3", InfluxColumnType::Tag) .influx_column("col4", InfluxColumnType::Tag) .influx_column("col5", InfluxColumnType::Timestamp) .build() .unwrap(); let actual_schema = schema(&qns, "table"); assert_eq!(actual_schema.as_ref(), &expected_schema,); } fn sorted<T>(mut v: Vec<T>) -> Vec<T> where T: Ord, { v.sort(); v } fn tables(querier_namespace: &QuerierNamespace) -> Vec<String> { sorted( querier_namespace .tables .keys() .map(|s| s.to_string()) .collect(), ) } fn schema(querier_namespace: &QuerierNamespace, table: &str) -> Arc<Schema> { Arc::clone(querier_namespace.tables.get(table).unwrap().schema()) } }
espaceId, NamespaceSchema}; use iox_query::exec::Executor; use parquet_file::storage::ParquetStorage; use schema::Schema; use std::{collections::HashMap, sync::Arc}; mod query_access; #[cfg(test)] mod test_util; #[derive(Debug)] pub struct QuerierNamespace { id: NamespaceId, name: Arc<str>, tables: Arc<HashMap<Arc<str>, Arc<QuerierTable>>>, exec: Arc<Executor>, catalog_cache: Arc<CatalogCache>, query_log: Arc<QueryLog>, } impl QuerierNamespace { pub fn new( chunk_adapter: Arc<ChunkAdapter>, schema: Arc<NamespaceSchema>, name: Arc<str>, exec: Arc<Executor>, ingester_connection: Arc<dyn IngesterConnection>, query_log: Arc<QueryLog>, ) -> Self { let tables: HashMap<_, _> = schema .tables .iter() .map(|(table_name, table_schema)| { let table_name = Arc::from(table_name.clone()); let id = table_schema.id; let schema = Schema::try_from(table_schema.clone()).expect("cannot build schema"); let table = Arc::new(QuerierTable::new( Arc::clone(&name), id, Arc::clone(&table_name), Arc::new(schema), Arc::clone(&ingester_connection), Arc::clone(&chunk_adapter), )); (table_name, table) }) .collect(); let id = schema.id; Self { id, name, tables: Arc::new(tables), exec, catalog_cache: Arc::clone(chunk_adapter.catalog_cache()), query_log, } } #[allow(clippy::too_many_arguments)] pub fn new_testing( catalog_cache: Arc<CatalogCache>, store: ParquetStorage, metric_registry: Arc<metric::Registry>, name: Arc<str>, schema: Arc<NamespaceSchema>, exec: Arc<Executor>, ingester_connection: Arc<dyn IngesterConnection>, ) -> Self { let time_provider = catalog_cache.time_provider(); let chunk_adapter = Arc::new(ChunkAdapter::new( catalog_cache, store, metric_registry, Arc::clone(&time_provider), )); let query_log = Arc::new(QueryLog::new(10, time_provider)); Self::new( chunk_adapter, schema, name, exec, ingester_connection, query_log, ) } pub fn name(&self) -> Arc<str> { Arc::clone(&self.name) } #[must_use] pub fn catalog_cache(&self) -> &Arc<CatalogCache> { &self.catalog_cache } } #[cfg(test)] mod tests { use super::*; use crate::namespace::test_util::querier_namespace; use data_types::ColumnType; use iox_tests::util::TestCatalog; use schema::{builder::SchemaBuilder, InfluxColumnType, InfluxFieldType}; #[tokio::test] async fn test_sync_tables() { let catalog = TestCatalog::new(); let ns = catalog.create_namespace("ns").await; let qns = querier_namespace(&ns).await; assert_eq!(tables(&qns), Vec::<String>::new()); ns.create_table("table1").await; ns.create_table("table2").await; let qns = querier_namespace(&ns).await; assert_eq!( tables(&qns), vec![String::from("table1"), String::from("table2")] ); ns.create_table("table3").await; let qns = querier_namespace(&ns).await; assert_eq!( tables(&qns), vec![ String::from("table1"), String::from("table2"), String::from("table3") ] ); } #[tokio::test] async fn test_sync_schemas() { let catalog = TestCatalog::new(); let ns = catalog.create_namespace("ns").await; let table = ns.create_table("table").await; let qns = querier_namespace(&ns).
random
[ { "content": "//! Code for defining values and tag sets with tags that are dependent on other tags.\n\n\n\nuse crate::now_ns;\n\nuse crate::specification::{DataSpec, ValuesSpec};\n\nuse crate::substitution::new_handlebars_registry;\n\nuse crate::tag_pair::StaticTagPair;\n\nuse handlebars::Handlebars;\n\nuse itertools::Itertools;\n\nuse serde_json::json;\n\nuse snafu::{OptionExt, ResultExt, Snafu};\n\n/// Module for pre-generated values and tag sets that can be used when generating samples from\n\n/// agents.\n\nuse std::collections::BTreeMap;\n\nuse std::fmt::Formatter;\n\nuse std::sync::Arc;\n\n\n\n/// Errors that may happen while reading a TOML specification.\n\n#[derive(Snafu, Debug)]\n\n#[allow(missing_docs)]\n\npub enum Error {\n", "file_path": "iox_data_generator/src/tag_set.rs", "rank": 0, "score": 14.585999288147985 }, { "content": "use std::{\n\n borrow::Cow,\n\n cmp::Ordering,\n\n collections::{BTreeMap, BTreeSet},\n\n convert::{TryFrom, TryInto},\n\n fmt::Display,\n\n sync::Arc,\n\n};\n\n\n\nuse hashbrown::{hash_map, HashMap};\n\nuse itertools::Itertools;\n\nuse observability_deps::tracing::{debug, trace};\n\nuse snafu::{ResultExt, Snafu};\n\n\n\nuse crate::column::{self, cmp::Operator, Column, RowIDs, RowIDsOption};\n\nuse crate::schema;\n\nuse crate::schema::{AggregateType, LogicalDataType, ResultSchema};\n\nuse crate::value::{\n\n AggregateVec, EncodedValues, OwnedValue, Scalar, Value, Values, ValuesIterator,\n\n};\n", "file_path": "read_buffer/src/row_group.rs", "rank": 1, "score": 14.570596573305924 }, { "content": "use std::{\n\n net::{SocketAddr, ToSocketAddrs, UdpSocket},\n\n str::FromStr,\n\n};\n\n\n\nuse async_trait::async_trait;\n\n\n\nuse observability_deps::tracing::*;\n\nuse trace::span::Span;\n\n\n\nuse crate::export::AsyncExport;\n\nuse crate::thrift::agent::{AgentSyncClient, TAgentSyncClient};\n\nuse crate::thrift::jaeger;\n\nuse thrift::protocol::{TCompactInputProtocol, TCompactOutputProtocol};\n\n\n\nmod span;\n\n\n\n/// A key=value pair for span annotations.\n\n#[derive(Debug, Clone)]\n\npub struct JaegerTag {\n", "file_path": "trace_exporters/src/jaeger.rs", "rank": 2, "score": 14.262702827241311 }, { "content": "pub use row_group::{BinaryExpr, Predicate};\n\npub use table::ReadFilterResults;\n\n\n\n/// THIS MODULE SHOULD ONLY BE IMPORTED FOR BENCHMARKS.\n\n///\n\n/// This module lets us expose internal parts of the crate so that we can use\n\n/// libraries like criterion for benchmarking.\n\n///\n\n/// It should not be imported into any non-testing or benchmarking crates.\n\npub mod benchmarks {\n\n pub use crate::column::{\n\n cmp::Operator,\n\n encoding::scalar::transcoders::*,\n\n encoding::scalar::{Fixed, FixedNull, ScalarEncoding},\n\n encoding::string,\n\n Column, RowIDs,\n\n };\n\n pub use crate::row_group::{ColumnType, RowGroup};\n\n use crate::{ChunkMetrics, RBChunk};\n\n\n\n // Allow external benchmarks to use this crate-only test method\n\n pub fn new_from_row_group(table_name: impl Into<String>, row_group: RowGroup) -> RBChunk {\n\n RBChunk::new_from_row_group(table_name, row_group, ChunkMetrics::new_unregistered())\n\n }\n\n}\n", "file_path": "read_buffer/src/lib.rs", "rank": 3, "score": 14.250814340285212 }, { "content": "use http::{HeaderValue, Request, Response};\n\nuse http_body::SizeHint;\n\nuse pin_project::pin_project;\n\nuse tower::{Layer, Service};\n\n\n\nuse observability_deps::tracing::error;\n\nuse trace::{span::SpanRecorder, TraceCollector};\n\n\n\nuse crate::classify::{classify_headers, classify_response, Classification};\n\nuse crate::ctx::TraceHeaderParser;\n\nuse crate::metrics::{MetricsCollection, MetricsRecorder};\n\n\n\n/// `TraceLayer` implements `tower::Layer` and can be used to decorate a\n\n/// `tower::Service` to collect information about requests flowing through it\n\n///\n\n/// Including:\n\n///\n\n/// - Extracting distributed trace context and attaching span context\n\n/// - Collecting count and duration metrics - [RED metrics][1]\n\n///\n", "file_path": "trace_http/src/tower.rs", "rank": 4, "score": 14.060397430010841 }, { "content": "pub mod http;\n\npub mod rpc;\n\npub mod server_type;\n\nmod service;\n\n\n\n// These crates are used by the macros we export; provide a stable\n\n// path to use them from in downstream crates.\n\npub mod reexport {\n\n pub use generated_types;\n\n pub use service_grpc_testing;\n\n pub use tokio_stream;\n\n pub use tonic;\n\n pub use tonic_health;\n\n pub use tonic_reflection;\n\n pub use tower_http;\n\n pub use trace_http;\n\n}\n\n\n\npub use service::Service;\n\n\n", "file_path": "ioxd_common/src/lib.rs", "rank": 5, "score": 13.974001649224043 }, { "content": "/// Re-export generated_types\n\npub mod generated_types {\n\n pub use generated_types::influxdata::pbdata::v1::*;\n\n}\n\n\n\nuse self::generated_types::write_service_client::WriteServiceClient;\n\n\n\nuse crate::connection::Connection;\n\nuse crate::error::Error;\n\n\n\n/// An IOx Write API client.\n\n///\n\n/// ```no_run\n\n/// #[tokio::main]\n\n/// # async fn main() {\n\n/// use influxdb_iox_client::{\n\n/// write::Client,\n\n/// connection::Builder,\n\n/// };\n\n///\n", "file_path": "influxdb_iox_client/src/client/write.rs", "rank": 6, "score": 13.688509043570875 }, { "content": "//! Agents responsible for generating points\n\n\n\nuse crate::{\n\n measurement::{MeasurementGenerator, MeasurementLineIterator},\n\n now_ns, specification,\n\n tag_pair::TagPair,\n\n write::PointsWriter,\n\n};\n\n\n\nuse crate::tag_set::GeneratedTagSets;\n\nuse serde_json::json;\n\nuse snafu::{ResultExt, Snafu};\n\nuse std::sync::{\n\n atomic::{AtomicU64, Ordering},\n\n Arc,\n\n};\n\nuse std::time::{Duration, Instant};\n\nuse tracing::{debug, info};\n\n\n\n/// Agent-specific Results\n", "file_path": "iox_data_generator/src/agent.rs", "rank": 7, "score": 13.646035514376166 }, { "content": "pub mod boolean;\n\npub mod cmp;\n\npub mod encoding;\n\npub mod float;\n\npub mod integer;\n\npub mod string;\n\n\n\nuse std::{borrow::Cow, collections::BTreeSet, mem::size_of};\n\n\n\nuse croaring::Bitmap;\n\nuse either::Either;\n\n\n\nuse arrow::array::Array;\n\n\n\nuse crate::schema::LogicalDataType;\n\nuse crate::value::{EncodedValues, OwnedValue, Scalar, Value, Values};\n\nuse boolean::BooleanEncoding;\n\nuse encoding::bool;\n\nuse float::FloatEncoding;\n\nuse integer::IntegerEncoding;\n", "file_path": "read_buffer/src/column.rs", "rank": 8, "score": 13.593165073451942 }, { "content": "#![deny(rustdoc::broken_intra_doc_links, rustdoc::bare_urls, rust_2018_idioms)]\n\n#![warn(\n\n missing_debug_implementations,\n\n clippy::explicit_iter_loop,\n\n clippy::use_self,\n\n clippy::clone_on_ref_ptr,\n\n clippy::future_not_send\n\n)]\n\n\n\nuse crate::export::AsyncExporter;\n\nuse crate::jaeger::JaegerAgentExporter;\n\nuse jaeger::JaegerTag;\n\nuse snafu::Snafu;\n\nuse std::num::NonZeroU16;\n\nuse std::sync::Arc;\n\n\n\npub mod export;\n\n\n\nmod jaeger;\n\n\n", "file_path": "trace_exporters/src/lib.rs", "rank": 9, "score": 13.56903133086593 }, { "content": "//! Functions for filtering rows from a [`MutableBatch`]\n\n//!\n\n//! The returned ranges can then be used with `MutableBatch::extend_from_range`\n\n\n\nuse crate::column::ColumnData;\n\nuse crate::MutableBatch;\n\nuse schema::TIME_COLUMN_NAME;\n\nuse std::ops::Range;\n\n\n\n/// Given a [`MutableBatch`] a time predicate and a set of row ranges, returns the row\n\n/// indexes that pass the predicate\n\n///\n\n/// # Panic\n\n///\n\n/// Panics if `batch` does not contain a time column of the correct type\n", "file_path": "mutable_batch/src/payload/filter.rs", "rank": 10, "score": 13.554033873363569 }, { "content": " use crate as arrow_util;\n\n use crate::assert_batches_eq;\n\n use arrow::array::{\n\n ArrayDataBuilder, DictionaryArray, Float64Array, Int32Array, StringArray, UInt32Array,\n\n };\n\n use arrow::compute::concat;\n\n use arrow_flight::utils::flight_data_to_arrow_batch;\n\n use datafusion::physical_plan::limit::truncate_batch;\n\n use std::iter::FromIterator;\n\n\n\n #[test]\n\n fn test_optimize() {\n\n let values = StringArray::from(vec![\n\n \"duplicate\",\n\n \"duplicate\",\n\n \"foo\",\n\n \"boo\",\n\n \"unused\",\n\n \"duplicate\",\n\n ]);\n", "file_path": "arrow_util/src/optimize.rs", "rank": 11, "score": 13.4930799702998 }, { "content": "/// Re-export generated_types\n\nuse generated_types::{i_ox_testing_client::IOxTestingClient, TestErrorRequest};\n\n\n\nuse crate::connection::Connection;\n\nuse crate::error::Error;\n\n\n\n/// A client for testing purposes\n\n///\n\n/// ```no_run\n\n/// #[tokio::main]\n\n/// # async fn main() {\n\n/// use influxdb_iox_client::{\n\n/// test::Client,\n\n/// connection::Builder,\n\n/// };\n\n///\n\n/// let mut connection = Builder::default()\n\n/// .build(\"http://127.0.0.1:8082\")\n\n/// .await\n\n/// .unwrap();\n", "file_path": "influxdb_iox_client/src/client/test.rs", "rank": 12, "score": 13.448160556283412 }, { "content": "use self::generated_types::{delete_service_client::DeleteServiceClient, *};\n\n\n\nuse crate::connection::Connection;\n\nuse crate::error::Error;\n\n\n\n/// Re-export generated_types\n\npub mod generated_types {\n\n pub use generated_types::influxdata::iox::delete::v1::*;\n\n pub use generated_types::influxdata::iox::predicate::v1::*;\n\n}\n\n\n\n/// An IOx Delete API client.\n\n///\n\n/// This client wraps the underlying `tonic` generated client with a\n\n/// more ergonomic interface.\n\n///\n\n/// ```no_run\n\n/// #[tokio::main]\n\n/// # async fn main() {\n\n/// use influxdb_iox_client::{\n", "file_path": "influxdb_iox_client/src/client/delete.rs", "rank": 13, "score": 13.396596596362613 }, { "content": "use self::generated_types::{object_store_service_client::ObjectStoreServiceClient, *};\n\n\n\nuse crate::connection::Connection;\n\nuse crate::error::Error;\n\n\n\nuse futures_util::stream::BoxStream;\n\nuse tonic::Status;\n\n\n\n/// Re-export generated_types\n\npub mod generated_types {\n\n pub use generated_types::influxdata::iox::object_store::v1::*;\n\n}\n\n\n\n/// A basic client for interacting the a remote catalog.\n\n#[derive(Debug, Clone)]\n\npub struct Client {\n\n inner: ObjectStoreServiceClient<Connection>,\n\n}\n\n\n\nimpl Client {\n", "file_path": "influxdb_iox_client/src/client/store.rs", "rank": 14, "score": 13.396596596362613 }, { "content": "// Export these crates publicly so we can have a single reference\n\npub use tracing;\n\npub use tracing::instrument;\n", "file_path": "observability_deps/src/lib.rs", "rank": 15, "score": 13.390007667067207 }, { "content": "pub use upstream::*;\n\n\n\n// Publically re-export datafusion-proto crate as well\n\npub use datafusion_proto;\n", "file_path": "datafusion/src/lib.rs", "rank": 16, "score": 13.390007667067207 }, { "content": "pub(crate) mod request;\n\npub(crate) mod response;\n\n\n\nuse std::num::NonZeroU64;\n\nuse std::time::Duration;\n\n\n\nuse snafu::{ResultExt, Snafu};\n\nuse tonic::Status;\n\n\n\nuse generated_types::{aggregate::AggregateType, Predicate};\n\nuse influxdb_storage_client::{connection::Connection, Client, OrgAndBucket};\n\nuse influxrpc_parser::predicate;\n\nuse iox_time;\n\n\n\n#[derive(Debug, Snafu)]\n\npub enum ParseError {\n\n #[snafu(display(\"unable to parse timestamp '{:?}'\", t))]\n\n Timestamp { t: String },\n\n\n\n #[snafu(display(\"unable to parse database name '{:?}'\", db_name))]\n", "file_path": "influxdb_iox/src/commands/storage.rs", "rank": 17, "score": 13.345637940253388 }, { "content": "// Autogenerated by Thrift Compiler (0.13.0)\n\n// DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING\n\n\n\n#![allow(unused_imports)]\n\n#![allow(unused_extern_crates)]\n\n#![cfg_attr(rustfmt, rustfmt_skip)]\n\n\n\nextern crate thrift;\n\n\n\nuse thrift::OrderedFloat;\n\nuse std::cell::RefCell;\n\nuse std::collections::{BTreeMap, BTreeSet};\n\nuse std::convert::{From, TryFrom};\n\nuse std::default::Default;\n\nuse std::error::Error;\n\nuse std::fmt;\n\nuse std::fmt::{Display, Formatter};\n\nuse std::rc::Rc;\n\n\n\nuse thrift::{ApplicationError, ApplicationErrorKind, ProtocolError, ProtocolErrorKind, TThriftClient};\n", "file_path": "trace_exporters/src/thrift/zipkincore.rs", "rank": 18, "score": 13.272926064972676 }, { "content": "// Autogenerated by Thrift Compiler (0.13.0)\n\n// DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING\n\n\n\n#![allow(unused_imports)]\n\n#![allow(unused_extern_crates)]\n\n#![cfg_attr(rustfmt, rustfmt_skip)]\n\n\n\nextern crate thrift;\n\n\n\nuse thrift::OrderedFloat;\n\nuse std::cell::RefCell;\n\nuse std::collections::{BTreeMap, BTreeSet};\n\nuse std::convert::{From, TryFrom};\n\nuse std::default::Default;\n\nuse std::error::Error;\n\nuse std::fmt;\n\nuse std::fmt::{Display, Formatter};\n\nuse std::rc::Rc;\n\n\n\nuse thrift::{ApplicationError, ApplicationErrorKind, ProtocolError, ProtocolErrorKind, TThriftClient};\n", "file_path": "trace_exporters/src/thrift/jaeger.rs", "rank": 19, "score": 13.272926064972676 }, { "content": "// Autogenerated by Thrift Compiler (0.13.0)\n\n// DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING\n\n\n\n#![allow(unused_imports)]\n\n#![allow(unused_extern_crates)]\n\n#![cfg_attr(rustfmt, rustfmt_skip)]\n\n\n\nextern crate thrift;\n\n\n\nuse thrift::OrderedFloat;\n\nuse std::cell::RefCell;\n\nuse std::collections::{BTreeMap, BTreeSet};\n\nuse std::convert::{From, TryFrom};\n\nuse std::default::Default;\n\nuse std::error::Error;\n\nuse std::fmt;\n\nuse std::fmt::{Display, Formatter};\n\nuse std::rc::Rc;\n\n\n\nuse thrift::{ApplicationError, ApplicationErrorKind, ProtocolError, ProtocolErrorKind, TThriftClient};\n", "file_path": "trace_exporters/src/thrift/agent.rs", "rank": 20, "score": 13.272926064972676 }, { "content": " .filter_map(move |v| {\n\n let mut ns = NamespaceSchema::new(v.id, v.kafka_topic_id, v.query_pool_id);\n\n ns.tables = joined.remove(&v.id)?;\n\n Some((v, ns))\n\n });\n\n\n\n Ok(iter)\n\n}\n\n\n\n#[cfg(test)]\n\npub(crate) mod test_helpers {\n\n use crate::validate_or_insert_schema;\n\n\n\n use super::*;\n\n use ::test_helpers::{assert_contains, tracing::TracingCapture};\n\n use data_types::ColumnId;\n\n use metric::{Attributes, Metric, U64Histogram};\n\n use std::{\n\n ops::{Add, DerefMut},\n\n sync::Arc,\n", "file_path": "iox_catalog/src/interface.rs", "rank": 21, "score": 13.23988373264773 }, { "content": "use super::DmlSink;\n\nuse crate::lifecycle::{LifecycleHandle, LifecycleHandleImpl};\n\nuse data_types::{KafkaPartition, SequenceNumber};\n\nuse dml::DmlOperation;\n\nuse futures::{pin_mut, FutureExt, StreamExt};\n\nuse iox_time::{SystemProvider, TimeProvider};\n\nuse metric::{Attributes, U64Counter, U64Gauge};\n\nuse observability_deps::tracing::*;\n\nuse std::{fmt::Debug, time::Duration};\n\nuse tokio_util::sync::CancellationToken;\n\nuse write_buffer::core::{WriteBufferErrorKind, WriteBufferStreamHandler};\n\n\n\n/// When the [`LifecycleManager`] indicates that ingest should be paused because\n\n/// of memory pressure, the sequencer will loop, sleeping this long between\n\n/// calls to [`LifecycleHandle::can_resume_ingest()`] with the manager if it\n\n/// can resume ingest.\n\n///\n\n/// [`LifecycleManager`]: crate::lifecycle::LifecycleManager\n\n/// [`LifecycleHandle::can_resume_ingest()`]: crate::lifecycle::LifecycleHandle::can_resume_ingest()\n\nconst INGEST_POLL_INTERVAL: Duration = Duration::from_millis(100);\n", "file_path": "ingester/src/stream_handler/handler.rs", "rank": 22, "score": 13.184069748174801 }, { "content": " fn must_build(self) -> Chunk {\n\n self.build().unwrap()\n\n }\n\n}\n\n\n\n/// Represents metadata about the physical storage of a column in a chunk\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub(crate) struct ChunkColumnSummary {\n\n /// Column name\n\n pub(crate) name: Arc<str>,\n\n\n\n /// Estimated size, in bytes, consumed by this column.\n\n pub(crate) memory_bytes: usize,\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::{\n\n row_group::{ColumnType, RowGroup},\n", "file_path": "read_buffer/src/chunk.rs", "rank": 23, "score": 13.181819104717407 }, { "content": "\n\n let mut sequencers = BTreeMap::new();\n\n for partition in 1..=kafka_partition_count {\n\n let sequencer = txn\n\n .sequencers()\n\n .create_or_get(&kafka_topic, KafkaPartition::new(partition))\n\n .await?;\n\n sequencers.insert(sequencer.id, sequencer);\n\n }\n\n\n\n Ok((kafka_topic, query_pool, sequencers))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::sync::Arc;\n\n\n\n use super::*;\n\n use crate::interface::get_schema_by_name;\n\n use crate::mem::MemCatalog;\n", "file_path": "iox_catalog/src/lib.rs", "rank": 24, "score": 13.164527049160007 }, { "content": "//! Write API\n\n\n\nuse crate::models::WriteDataPoint;\n\nuse crate::{Client, HttpSnafu, RequestError, ReqwestProcessingSnafu};\n\nuse bytes::BufMut;\n\nuse futures::{Stream, StreamExt};\n\nuse reqwest::{Body, Method};\n\nuse snafu::ResultExt;\n\nuse std::io::{self, Write};\n\n\n\nimpl Client {\n\n /// Write line protocol data to the specified organization and bucket.\n\n pub async fn write_line_protocol(\n\n &self,\n\n org: &str,\n\n bucket: &str,\n\n body: impl Into<Body> + Send,\n\n ) -> Result<(), RequestError> {\n\n let body = body.into();\n\n let write_url = format!(\"{}/api/v2/write\", self.url);\n", "file_path": "influxdb2_client/src/api/write.rs", "rank": 25, "score": 13.093120346302754 }, { "content": "//! Module for generating tag key/value pairs to be used in the data generator\n\n\n\nuse crate::specification::TagPairSpec;\n\nuse crate::substitution::new_handlebars_registry;\n\nuse handlebars::Handlebars;\n\nuse serde_json::{json, Value};\n\nuse snafu::{ResultExt, Snafu};\n\nuse std::fmt::Formatter;\n\nuse std::sync::{Arc, Mutex};\n\n\n\n/// Results specific to the tag_pair module\n\npub type Result<T, E = Error> = std::result::Result<T, E>;\n\n\n\n/// Errors that may happen while creating or regenerating tag pairs\n\n#[derive(Snafu, Debug)]\n\npub enum Error {\n\n #[snafu(display(\n\n \"Could not compile template for tag pair {} caused by: {}\",\n\n tag_key,\n\n source\n", "file_path": "iox_data_generator/src/tag_pair.rs", "rank": 26, "score": 13.042970745472951 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use crate::builder::SchemaBuilder;\n\n use crate::InfluxFieldType::Integer;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_merge_same_schema() {\n\n let schema1 = SchemaBuilder::new()\n\n .influx_field(\"int_field\", Integer)\n\n .tag(\"the_tag\")\n\n .build()\n\n .unwrap();\n\n\n\n let schema2 = SchemaBuilder::new()\n\n .influx_field(\"int_field\", Integer)\n\n .tag(\"the_tag\")\n\n .build()\n\n .unwrap();\n", "file_path": "schema/src/merge.rs", "rank": 27, "score": 13.017034954955706 }, { "content": "/// The resulting builder can be used w/ [`add_service`]. After adding all services it should\n\n/// be used w/ [`serve_builder`].\n\n#[macro_export]\n\nmacro_rules! setup_builder {\n\n ($input:ident, $server_type:ident) => {{\n\n #[allow(unused_imports)]\n\n use $crate::{add_service, rpc::RpcBuilder, server_type::ServerType};\n\n\n\n let RpcBuilderInput {\n\n socket,\n\n trace_header_parser,\n\n shutdown,\n\n } = $input;\n\n\n\n let (health_reporter, health_service) =\n\n $crate::reexport::tonic_health::server::health_reporter();\n\n let reflection_service = $crate::reexport::tonic_reflection::server::Builder::configure()\n\n .register_encoded_file_descriptor_set(\n\n $crate::reexport::generated_types::FILE_DESCRIPTOR_SET,\n\n )\n", "file_path": "ioxd_common/src/rpc.rs", "rank": 28, "score": 12.993488219684973 }, { "content": "//! Test setups and data for ingester crate\n\n\n\n#![allow(missing_docs)]\n\n\n\nuse crate::{\n\n data::{\n\n IngesterData, NamespaceData, PartitionData, PersistingBatch, QueryableBatch, SequencerData,\n\n SnapshotBatch, TableData,\n\n },\n\n partioning::DefaultPartitioner,\n\n};\n\nuse arrow::record_batch::RecordBatch;\n\nuse arrow_util::assert_batches_eq;\n\nuse bitflags::bitflags;\n\nuse data_types::{\n\n KafkaPartition, NamespaceId, PartitionId, SequenceNumber, SequencerId, TableId, Timestamp,\n\n Tombstone, TombstoneId,\n\n};\n\nuse iox_catalog::{\n\n interface::{Catalog, INITIAL_COMPACTION_LEVEL},\n", "file_path": "ingester/src/test_util.rs", "rank": 29, "score": 12.975634616828941 }, { "content": "use self::generated_types::{schema_service_client::SchemaServiceClient, *};\n\nuse ::generated_types::google::OptionalField;\n\n\n\nuse crate::connection::Connection;\n\nuse crate::error::Error;\n\n\n\n/// Re-export generated_types\n\npub mod generated_types {\n\n pub use generated_types::influxdata::iox::schema::v1::*;\n\n}\n\n\n\n/// A basic client for fetching the Schema for a Namespace.\n\n#[derive(Debug, Clone)]\n\npub struct Client {\n\n inner: SchemaServiceClient<Connection>,\n\n}\n\n\n\nimpl Client {\n\n /// Creates a new client with the provided connection\n\n pub fn new(channel: Connection) -> Self {\n", "file_path": "influxdb_iox_client/src/client/schema.rs", "rank": 30, "score": 12.963489662980432 }, { "content": "pub mod tests {\n\n // intentionally doesn't use super::* in order to use the public interface only\n\n use super::PinnedStream; // just a utility\n\n use crate::connection_manager::{CachingConnectionManager, ConnectionManager};\n\n use crate::{grpc_router, router, Router, RoutingDestination};\n\n use futures::{FutureExt, StreamExt};\n\n use grpc_router_test_gen::test_proto::{test_client::TestClient, test_server::TestServer, *};\n\n use std::net::SocketAddr;\n\n use tokio_stream::wrappers::TcpListenerStream;\n\n use tonic::transport::{Channel, Server};\n\n use tonic::{Request, Response, Status};\n\n\n\n #[derive(Clone)]\n\n struct TestService {\n\n base: u64,\n\n }\n\n\n\n #[tonic::async_trait]\n\n impl test_server::Test for TestService {\n\n async fn test_unary(\n", "file_path": "grpc-router/src/router.rs", "rank": 31, "score": 12.95313479911442 }, { "content": "//! Code to serialize and deserialize certain expressions.\n\n//!\n\n//! Note that [Ballista] also provides a serialization using [Protocol Buffers 3]. However the\n\n//! protocol is meant as a communication channel between workers and clients of Ballista, not for\n\n//! long term preservation. For IOx we need a more stable solution. Luckily we only need to support\n\n//! a very small subset of expression.\n\n//!\n\n//! [Ballista]: https://github.com/apache/arrow-datafusion/blob/22fcb3d7a68a56afbe12eab9e7d98f7b8de33703/ballista/rust/core/proto/ballista.proto\n\n//! [Protocol Buffers 3]: https://developers.google.com/protocol-buffers/docs/proto3\n\n\n\nuse crate::google::{FieldViolation, FromOptionalField, FromRepeatedField, OptionalField};\n\nuse crate::influxdata::iox::predicate::v1 as proto;\n\nuse crate::influxdata::iox::predicate::v1::scalar::Value;\n\nuse crate::influxdata::iox::predicate::v1::{Expr, Predicate};\n\nuse data_types::{DeleteExpr, DeletePredicate, Op, Scalar, TimestampRange};\n\n\n\nimpl From<DeletePredicate> for proto::Predicate {\n\n fn from(predicate: DeletePredicate) -> Self {\n\n proto::Predicate {\n\n range: Some(proto::TimestampRange {\n", "file_path": "generated_types/src/delete_predicate.rs", "rank": 32, "score": 12.944955717728455 }, { "content": " range.end += 1;\n\n range.start = range.end;\n\n return Some(t);\n\n }\n\n // Predicate failed and start == end\n\n Some(_) => {\n\n range.start += 1;\n\n range.end += 1;\n\n }\n\n None => return None,\n\n }\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::writer::Writer;\n\n use rand::prelude::*;\n\n\n", "file_path": "mutable_batch/src/payload/filter.rs", "rank": 33, "score": 12.931259148626708 }, { "content": "use either::Either;\n\nuse observability_deps::tracing::debug;\n\n\n\nuse crate::column::cmp;\n\nuse crate::column::RowIDs;\n\nuse std::{cmp::Ordering, fmt::Debug, iter, marker::PhantomData, mem::size_of};\n\n\n\nuse super::transcoders::Transcoder;\n\nuse super::ScalarEncoding;\n\n\n\npub const ENCODING_NAME: &str = \"RLE\";\n\n\n\n#[allow(clippy::upper_case_acronyms)] // this looks weird as `Rle`\n\n/// An RLE encoding is one where identical \"runs\" of values in the column are\n\n/// stored as a tuple: `(run_length, value)`, where `run_length` indicates the\n\n/// number of times the value is to be repeated.\n\n///\n\n/// Types are: Physical, Logcial, Transcoder\n\n#[derive(Debug)]\n\npub struct RLE<P, L, T>\n", "file_path": "read_buffer/src/column/encoding/scalar/rle.rs", "rank": 34, "score": 12.853134751524848 }, { "content": "use generated_types::google::FieldViolation;\n\n\n\nuse generated_types::grpc::health::v1::*;\n\n\n\nuse crate::connection::Connection;\n\nuse crate::error::Error;\n\n\n\n/// A client for the gRPC health checking API\n\n///\n\n/// Allows checking the status of a given service\n\n#[derive(Debug)]\n\npub struct Client {\n\n inner: health_client::HealthClient<Connection>,\n\n}\n\n\n\nimpl Client {\n\n /// Creates a new client with the provided connection\n\n pub fn new(channel: Connection) -> Self {\n\n Self {\n\n inner: health_client::HealthClient::new(channel),\n", "file_path": "influxdb_iox_client/src/client/health.rs", "rank": 35, "score": 12.849280138876452 }, { "content": "use self::generated_types::{catalog_service_client::CatalogServiceClient, *};\n\n\n\nuse crate::connection::Connection;\n\nuse crate::error::Error;\n\n\n\n/// Re-export generated_types\n\npub mod generated_types {\n\n pub use generated_types::influxdata::iox::catalog::v1::*;\n\n}\n\n\n\n/// A basic client for interacting the a remote catalog.\n\n#[derive(Debug, Clone)]\n\npub struct Client {\n\n inner: CatalogServiceClient<Connection>,\n\n}\n\n\n\nimpl Client {\n\n /// Creates a new client with the provided connection\n\n pub fn new(channel: Connection) -> Self {\n\n Self {\n", "file_path": "influxdb_iox_client/src/client/catalog.rs", "rank": 36, "score": 12.849280138876452 }, { "content": "//! This module handles the manipulation / execution of storage\n\n//! plans. This is currently implemented using DataFusion, and this\n\n//! interface abstracts away many of the details\n\npub(crate) mod context;\n\npub mod field;\n\npub mod fieldlist;\n\nmod non_null_checker;\n\nmod query_tracing;\n\nmod schema_pivot;\n\npub mod seriesset;\n\npub(crate) mod split;\n\npub mod stringset;\n\npub use context::{DEFAULT_CATALOG, DEFAULT_SCHEMA};\n\nuse executor::DedicatedExecutor;\n\n\n\nuse std::sync::Arc;\n\n\n\nuse datafusion::{\n\n self,\n\n execution::runtime_env::{RuntimeConfig, RuntimeEnv},\n", "file_path": "iox_query/src/exec.rs", "rank": 37, "score": 12.844285472819806 }, { "content": "# workspace-hack\n\n\n\nThis crate is a \"workspace hack\" crate managed by [`cargo hakari`][hakari].\n\n\n\nIts purpose is to unify the features used by all crates in the workspace so that the crates share\n\nmore dependencies and rebuild crates less. There are more details in [hakari's\n\ndocumentation][hakari-docs].\n\n\n\n[hakari]: https://crates.io/crates/cargo-hakari\n\n[hakari-docs]: https://docs.rs/cargo-hakari/0.9.6/cargo_hakari/about/index.html\n\n\n\n## CI failures\n\n\n\nIf the `workspace_hack_checks` CI job is failing, there are two possible reasons and solutions:\n\n\n\n- If `cargo hakari generate --diff` fails, that means a crate has started or stopped using a\n\n feature of some crate and that feature isn't up-to-date in the `workspace-hack` crate. To fix\n\n this, run `cargo hakari generate` and commit the changes.\n\n- If `cargo hakari manage-deps --dry-run` fails, that means a crate in the workspace isn't\n\n depending on the `workspace-hack` crate. To fix this, run `cargo hakari manage-deps` and commit\n\n the changes.\n", "file_path": "workspace-hack/README.md", "rank": 38, "score": 12.762014462568374 }, { "content": "use crate::{google::FieldViolation, influxdata::iox::ingester::v1 as proto};\n\nuse data_types::TimestampRange;\n\nuse datafusion::{\n\n common::DataFusionError, datafusion_proto::bytes::Serializeable, logical_plan::Expr,\n\n};\n\nuse predicate::{Predicate, ValueExpr};\n\nuse prost::Message;\n\nuse snafu::{ResultExt, Snafu};\n\n\n", "file_path": "generated_types/src/ingester.rs", "rank": 39, "score": 12.738853832169717 }, { "content": "//! Query\n\n\n\nuse crate::models::ast::Package;\n\nuse crate::models::File;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::HashMap;\n\n\n\n/// Query influx using the Flux language\n\n#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)]\n\npub struct Query {\n\n /// Query Script\n\n #[serde(rename = \"extern\", skip_serializing_if = \"Option::is_none\")]\n\n pub r#extern: Option<File>,\n\n /// Query script to execute.\n\n pub query: String,\n\n /// The type of query. Must be \\\"flux\\\".\n\n #[serde(rename = \"type\", skip_serializing_if = \"Option::is_none\")]\n\n pub r#type: Option<Type>,\n\n /// Dialect\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n", "file_path": "influxdb2_client/src/models/query.rs", "rank": 40, "score": 12.727482837001107 }, { "content": "//! Health\n\n//!\n\n//! Get health of an InfluxDB instance\n\n\n\nuse crate::models::HealthCheck;\n\nuse crate::{Client, HttpSnafu, RequestError, ReqwestProcessingSnafu};\n\nuse reqwest::{Method, StatusCode};\n\nuse snafu::ResultExt;\n\n\n\nimpl Client {\n\n /// Get health of an instance\n\n pub async fn health(&self) -> Result<HealthCheck, RequestError> {\n\n let health_url = format!(\"{}/health\", self.url);\n\n let response = self\n\n .request(Method::GET, &health_url)\n\n .send()\n\n .await\n\n .context(ReqwestProcessingSnafu)?;\n\n\n\n match response.status() {\n", "file_path": "influxdb2_client/src/api/health.rs", "rank": 41, "score": 12.719023768641184 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::measurement::LineToGenerate;\n\n use crate::{now_ns, specification::*};\n\n use influxdb2_client::models::WriteDataPoint;\n\n\n\n type Error = Box<dyn std::error::Error>;\n\n type Result<T = (), E = Error> = std::result::Result<T, E>;\n\n\n\n impl Agent {\n\n /// Instantiate an agent only with the parameters we're interested in\n\n /// testing, keeping everything else constant across different\n\n /// tests.\n\n fn test_instance(\n\n sampling_interval: Option<Duration>,\n\n continue_on: bool,\n\n current_datetime: i64,\n", "file_path": "iox_data_generator/src/agent.rs", "rank": 42, "score": 12.719023768641184 }, { "content": "use self::generated_types::{write_info_service_client::WriteInfoServiceClient, *};\n\n\n\nuse crate::connection::Connection;\n\nuse crate::error::Error;\n\n\n\n/// Re-export generated_types\n\npub mod generated_types {\n\n pub use generated_types::influxdata::iox::ingester::v1::{\n\n write_info_service_client, write_info_service_server, GetWriteInfoRequest,\n\n GetWriteInfoResponse, KafkaPartitionInfo, KafkaPartitionStatus,\n\n };\n\n pub use generated_types::write_info::merge_responses;\n\n}\n\n\n\n/// A basic client for fetching information about write tokens from a\n\n/// single ingester.\n\n///\n\n/// NOTE: This is an ALPHA / Internal API that is used as part of the\n\n/// end to end tests.\n\n///\n", "file_path": "influxdb_iox_client/src/client/write_info.rs", "rank": 43, "score": 12.688161480729464 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_generic() {\n\n use crate::backend::test_util::test_generic;\n\n\n\n test_generic(HashMap::new);\n\n }\n\n}\n", "file_path": "cache_system/src/backend/hash_map.rs", "rank": 44, "score": 12.667356799418595 }, { "content": "use http::header::HeaderName;\n\nuse http::{HeaderValue, Request, Response};\n\nuse std::sync::Arc;\n\nuse std::task::{Context, Poll};\n\nuse tower::{Layer, Service};\n\n\n\n/// `SetRequestHeadersLayer` sets the provided headers on all requests flowing through it\n\n/// unless they're already set\n\n#[derive(Debug, Clone)]\n\npub(crate) struct SetRequestHeadersLayer {\n\n headers: Arc<Vec<(HeaderName, HeaderValue)>>,\n\n}\n\n\n\nimpl SetRequestHeadersLayer {\n\n pub(crate) fn new(headers: Vec<(HeaderName, HeaderValue)>) -> Self {\n\n Self {\n\n headers: Arc::new(headers),\n\n }\n\n }\n\n}\n", "file_path": "client_util/src/tower.rs", "rank": 45, "score": 12.650910065702284 }, { "content": " warn!(\"woo\");\n\n info!(\"bar\");\n\n debug!(\"baz\");\n\n trace!(\"trax\");\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use crate::test_util::*;\n\n use observability_deps::tracing::{debug, error};\n\n use std::sync::atomic::{AtomicBool, Ordering};\n\n use std::sync::Arc;\n\n\n\n #[test]\n\n fn simple_logging() {\n\n assert_eq!(\n", "file_path": "trogging/src/lib.rs", "rank": 46, "score": 12.601939530281609 }, { "content": "use std::{collections::HashSet, sync::Arc};\n\n\n\nuse datafusion::{\n\n common::{DataFusionError, Result as DataFusionResult},\n\n logical_expr::{AggregateUDF, ScalarUDF},\n\n logical_plan::FunctionRegistry,\n\n};\n\n\n\nuse crate::{regex, window};\n\n\n\nlazy_static::lazy_static! {\n\n static ref REGISTRY: IOxFunctionRegistry = IOxFunctionRegistry::new();\n\n}\n\n\n\n/// Lookup for all DataFusion User Defined Functions used by IOx\n\n#[derive(Debug)]\n\npub(crate) struct IOxFunctionRegistry {}\n\n\n\nimpl IOxFunctionRegistry {\n\n fn new() -> Self {\n", "file_path": "query_functions/src/registry.rs", "rank": 47, "score": 12.591535854181597 }, { "content": "use self::generated_types::{namespace_service_client::NamespaceServiceClient, *};\n\nuse crate::connection::Connection;\n\nuse crate::error::Error;\n\n\n\n/// Re-export generated_types\n\npub mod generated_types {\n\n pub use generated_types::influxdata::iox::namespace::v1::*;\n\n}\n\n\n\n/// A basic client for fetching the Schema for a Namespace.\n\n#[derive(Debug, Clone)]\n\npub struct Client {\n\n inner: NamespaceServiceClient<Connection>,\n\n}\n\n\n\nimpl Client {\n\n /// Creates a new client with the provided connection\n\n pub fn new(channel: Connection) -> Self {\n\n Self {\n\n inner: NamespaceServiceClient::new(channel),\n", "file_path": "influxdb_iox_client/src/client/namespace.rs", "rank": 48, "score": 12.591535854181597 }, { "content": "//! Query\n\n//!\n\n//! Query InfluxDB using InfluxQL or Flux Query\n\n\n\nuse crate::{Client, HttpSnafu, RequestError, ReqwestProcessingSnafu, SerializingSnafu};\n\nuse reqwest::{Method, StatusCode};\n\nuse snafu::ResultExt;\n\n\n\nuse crate::models::{\n\n AnalyzeQueryResponse, AstResponse, FluxSuggestion, FluxSuggestions, LanguageRequest, Query,\n\n};\n\n\n\nimpl Client {\n\n /// Get Query Suggestions\n\n pub async fn query_suggestions(&self) -> Result<FluxSuggestions, RequestError> {\n\n let req_url = format!(\"{}/api/v2/query/suggestions\", self.url);\n\n let response = self\n\n .request(Method::GET, &req_url)\n\n .send()\n\n .await\n", "file_path": "influxdb2_client/src/api/query.rs", "rank": 49, "score": 12.575347856340969 }, { "content": "//! Generating a set of points for one measurement configuration\n\n\n\nuse crate::{field::FieldGeneratorImpl, specification, substitution, tag_pair::TagPair};\n\n\n\nuse crate::tag_set::{GeneratedTagSets, TagSet};\n\nuse influxdb2_client::models::WriteDataPoint;\n\nuse serde_json::json;\n\nuse snafu::{OptionExt, ResultExt, Snafu};\n\nuse std::fmt::Debug;\n\nuse std::sync::{Arc, Mutex};\n\n\n\n/// Measurement-specific Results\n\npub type Result<T, E = Error> = std::result::Result<T, E>;\n\n\n\n/// Errors that may happen while creating measurements\n\n#[derive(Snafu, Debug)]\n\n#[allow(missing_docs)]\n\npub enum Error {\n\n #[snafu(display(\n\n \"Could not build data point for measurement `{}` with Influx Client, caused by:\\n{}\",\n", "file_path": "iox_data_generator/src/measurement.rs", "rank": 50, "score": 12.559640987343055 }, { "content": "///! Types for mapping and converting series data from TSM indexes produced by\n\n///! InfluxDB >= 2.x\n\nuse crate::reader::{BlockData, BlockDecoder, TsmIndexReader, ValuePair};\n\nuse crate::{Block, BlockType, TsmError};\n\n\n\nuse observability_deps::tracing::warn;\n\n\n\nuse std::collections::{BTreeMap, BTreeSet};\n\nuse std::fmt::{Display, Formatter};\n\nuse std::i64;\n\nuse std::io::{Read, Seek};\n\nuse std::iter::Peekable;\n\n\n\n/// `TSMMeasurementMapper` takes a TSM reader and produces an iterator that\n\n/// collects all series data for a given measurement.\n\n///\n\n/// The main purpose of the `TSMMeasurementMapper` is to provide a\n\n/// transformation step that allows one to convert per-series/per-field data\n\n/// into measurement-oriented table data.\n\n#[derive(Debug)]\n", "file_path": "influxdb_tsm/src/mapper.rs", "rank": 51, "score": 12.545058016280414 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use arrow::array::{Int64Array, StringArray};\n\n use arrow_util::assert_batches_sorted_eq;\n\n use datafusion::{\n\n logical_plan::{col, lit},\n\n physical_plan::memory::MemoryExec,\n\n };\n\n use datafusion_util::test_collect_partition;\n\n\n\n use crate::util::df_physical_expr;\n\n\n\n use super::*;\n\n\n\n #[tokio::test]\n", "file_path": "iox_query/src/exec/split.rs", "rank": 52, "score": 12.53876670991764 }, { "content": "use std::any::Any;\n\nuse std::sync::Arc;\n\n\n\nuse tokio::net::TcpListener;\n\nuse tokio_util::sync::CancellationToken;\n\nuse tonic::{body::BoxBody, transport::NamedService, Code};\n\nuse tonic_health::server::HealthReporter;\n\nuse trace_http::ctx::TraceHeaderParser;\n\n\n\nuse crate::server_type::{RpcError, ServerType};\n\n\n\n/// Returns the name of the gRPC service S.\n", "file_path": "ioxd_common/src/rpc.rs", "rank": 53, "score": 12.493705878947527 }, { "content": "//! How to load new cache entries.\n\nuse async_trait::async_trait;\n\nuse futures::{future::BoxFuture, FutureExt};\n\nuse std::future::Future;\n\n\n\npub mod metrics;\n\n\n\n/// Loader for missing [`Cache`](crate::driver::Cache) entries.\n\n#[async_trait]\n", "file_path": "cache_system/src/loader/mod.rs", "rank": 54, "score": 12.4413953579249 }, { "content": " shutdown,\n\n socket,\n\n };\n\n\n\n add_service!(builder, health_service);\n\n add_service!(builder, reflection_service);\n\n add_service!(\n\n builder,\n\n $crate::reexport::service_grpc_testing::make_server()\n\n );\n\n\n\n builder\n\n }};\n\n}\n\n\n\n/// Serve a server constructed using [`RpcBuilder`].\n\n#[macro_export]\n\nmacro_rules! serve_builder {\n\n ($builder:ident) => {{\n\n use $crate::reexport::tokio_stream::wrappers::TcpListenerStream;\n", "file_path": "ioxd_common/src/rpc.rs", "rank": 55, "score": 12.406323410646008 }, { "content": " self.state.pending_futures.load(Ordering::Acquire);\n\n\n\n self.state.notify.notify_waiters();\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::time::Duration;\n\n\n\n use crate::task::registry::AbstractTaskRegistry;\n\n\n\n use super::*;\n\n use futures::FutureExt;\n\n use std::convert::Infallible;\n\n use tokio::sync::oneshot;\n\n\n\n fn pending() -> futures::future::Pending<Result<(), Infallible>> {\n\n futures::future::pending()\n", "file_path": "tracker/src/task.rs", "rank": 56, "score": 12.394378253812159 }, { "content": "mod internal;\n\n\n\npub use internal::{Duration, Window};\n\nuse schema::TIME_DATA_TYPE;\n\n\n\nuse std::sync::Arc;\n\n\n\nuse arrow::{\n\n array::{Array, ArrayRef, TimestampNanosecondArray},\n\n datatypes::DataType,\n\n};\n\nuse datafusion::{\n\n logical_expr::{ScalarUDF, Volatility},\n\n physical_plan::ColumnarValue,\n\n prelude::*,\n\n scalar::ScalarValue,\n\n};\n\n\n\nuse crate::group_by::WindowDuration;\n\n\n", "file_path": "query_functions/src/window.rs", "rank": 57, "score": 12.394378253812159 }, { "content": "//! Write payload abstractions derived from [`MutableBatch`]\n\n\n\nuse crate::{column::ColumnData, MutableBatch, Result};\n\nuse data_types::PartitionTemplate;\n\nuse hashbrown::HashMap;\n\nuse schema::TIME_COLUMN_NAME;\n\nuse std::{num::NonZeroUsize, ops::Range};\n\n\n\nmod filter;\n\nmod partition;\n\n\n\n/// A payload that can be written to a mutable batch\n", "file_path": "mutable_batch/src/payload.rs", "rank": 58, "score": 12.339560008049277 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::time::Duration;\n\n\n\n use iox_catalog::mem::MemCatalog;\n\n use iox_query::exec::Executor;\n\n use iox_time::{MockProvider, Time};\n\n use object_store::memory::InMemory;\n\n use parquet_file::storage::ParquetStorage;\n\n\n\n use crate::{cache::CatalogCache, create_ingester_connection_for_testing};\n\n\n\n use super::*;\n\n\n\n #[tokio::test]\n\n async fn test_shutdown() {\n\n let querier = TestQuerier::new().querier;\n", "file_path": "querier/src/handler.rs", "rank": 59, "score": 12.32703991841747 }, { "content": "//! Tests for the Influx gRPC queries\n\nuse std::sync::Arc;\n\n\n\n#[cfg(test)]\n\nuse crate::scenarios::{\n\n DbScenario, DbSetup, TwoMeasurements, TwoMeasurementsManyFields, TwoMeasurementsWithDelete,\n\n TwoMeasurementsWithDeleteAll,\n\n};\n\nuse crate::{\n\n db::AbstractDb,\n\n influxrpc::util::run_series_set_plan_maybe_error,\n\n scenarios::{\n\n MeasurementStatusCode, MeasurementsForDefect2845, MeasurementsSortableTags,\n\n MeasurementsSortableTagsWithDelete, TwoMeasurementsMultiSeries,\n\n TwoMeasurementsMultiSeriesWithDelete, TwoMeasurementsMultiSeriesWithDeleteAll,\n\n },\n\n};\n\nuse datafusion::logical_plan::{col, lit, when};\n\nuse iox_query::frontend::influxrpc::InfluxRpcPlanner;\n\nuse predicate::rpc_predicate::InfluxRpcPredicate;\n", "file_path": "query_tests/src/influxrpc/read_filter.rs", "rank": 60, "score": 12.315942084361023 }, { "content": "//! Onboarding/Setup\n\n//!\n\n//! Initate and start onboarding process of InfluxDB server.\n\n\n\nuse crate::{Client, HttpSnafu, RequestError, ReqwestProcessingSnafu, SerializingSnafu};\n\nuse reqwest::{Method, StatusCode};\n\nuse snafu::ResultExt;\n\n\n\nuse crate::models::{IsOnboarding, OnboardingRequest, OnboardingResponse};\n\n\n\nimpl Client {\n\n /// Check if database has default user, org, bucket\n\n pub async fn is_onboarding_allowed(&self) -> Result<bool, RequestError> {\n\n let setup_url = format!(\"{}/api/v2/setup\", self.url);\n\n let response = self\n\n .request(Method::GET, &setup_url)\n\n .send()\n\n .await\n\n .context(ReqwestProcessingSnafu)?;\n\n\n", "file_path": "influxdb2_client/src/api/setup.rs", "rank": 61, "score": 12.304322611740272 }, { "content": "pub mod fixed;\n\npub mod fixed_null;\n\npub mod rle;\n\npub mod transcoders;\n\n\n\nuse either::Either;\n\nuse std::{fmt::Debug, fmt::Display};\n\n\n\nuse crate::column::{cmp, RowIDs};\n\n\n\npub use fixed::Fixed;\n\npub use fixed_null::FixedNull;\n\npub use rle::RLE;\n\n\n\n/// `ScalarEncoding` describes the behaviour of a columnar encoding for scalar\n\n/// values.\n", "file_path": "read_buffer/src/column/encoding/scalar.rs", "rank": 62, "score": 12.300669336904798 }, { "content": " if self.start_idx == self.capacity {\n\n self.start_idx = 0;\n\n }\n\n\n\n let old_value = self.values.remove(&old_key).unwrap();\n\n self.values.insert(key, value);\n\n\n\n Some((old_key, old_value))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::sync::Arc;\n\n\n\n use crate::TaskRegistry;\n\n\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "tracker/src/task/history.rs", "rank": 63, "score": 12.244212064565154 }, { "content": " },\n\n other => {\n\n return Err(format!(\"Unknown write buffer type: {}\", other).into());\n\n }\n\n };\n\n\n\n Ok(reader)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::{\n\n core::test_utils::random_topic_name, maybe_skip_kafka_integration,\n\n mock::MockBufferSharedState,\n\n };\n\n use data_types::DatabaseName;\n\n use std::{convert::TryFrom, num::NonZeroU32};\n\n use tempfile::TempDir;\n", "file_path": "write_buffer/src/config.rs", "rank": 64, "score": 12.23858252854344 }, { "content": " Some(a) => a.max(now),\n\n None => now,\n\n });\n\n\n\n span.export()\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::sync::Arc;\n\n\n\n use crate::{RingBufferTraceCollector, TraceCollector};\n\n\n\n use super::*;\n\n\n\n fn make_span(collector: Arc<dyn TraceCollector>) -> Span {\n\n SpanContext::new(collector).child(\"foo\")\n\n }\n", "file_path": "trace/src/span.rs", "rank": 65, "score": 12.18006072444927 }, { "content": "//! This crate only exists for its tests and benchmarks\n\n\n\nuse flate2::read::GzDecoder;\n\nuse std::io::Read;\n\nuse std::path::Path;\n\n\n\n/// Parses the BENCHMARK_LP environment variable for a list of semicolon delimited paths\n\n/// to line protocol files. Returnss a list of (filename, line protocol) pairs for benchmarking\n", "file_path": "mutable_batch_tests/src/lib.rs", "rank": 66, "score": 12.18006072444927 }, { "content": "//! Functions for partitioning rows from a [`MutableBatch`]\n\n//!\n\n//! The returned ranges can then be used with [`MutableBatch::extend_from_range`]\n\n\n\nuse crate::{\n\n column::{Column, ColumnData},\n\n MutableBatch,\n\n};\n\nuse chrono::{format::StrftimeItems, TimeZone, Utc};\n\nuse data_types::{PartitionTemplate, TemplatePart};\n\nuse schema::TIME_COLUMN_NAME;\n\nuse std::ops::Range;\n\n\n\n/// Returns an iterator identifying consecutive ranges for a given partition key\n", "file_path": "mutable_batch/src/payload/partition.rs", "rank": 67, "score": 12.163742773146978 }, { "content": "mod duration;\n\nmod gauge;\n\nmod histogram;\n\nmod metric;\n\n\n\npub use crate::metric::*;\n\npub use counter::*;\n\npub use cumulative::*;\n\npub use duration::*;\n\npub use gauge::*;\n\npub use histogram::*;\n\n\n\n/// A `Registry` stores a map of metric names to `Instrument`\n\n///\n\n/// It allows retrieving them by name, registering new instruments and generating\n\n/// reports of all registered instruments\n\n#[derive(Debug, Default)]\n\npub struct Registry {\n\n /// A list of instruments indexed by metric name\n\n ///\n", "file_path": "metric/src/lib.rs", "rank": 68, "score": 12.163742773146978 }, { "content": "use self::query_access::QuerierTableChunkPruner;\n\nuse self::state_reconciler::Reconciler;\n\nuse crate::{\n\n chunk::ChunkAdapter,\n\n ingester::{self, IngesterPartition},\n\n IngesterConnection,\n\n};\n\nuse data_types::{PartitionId, TableId};\n\nuse futures::join;\n\nuse iox_query::{provider::ChunkPruner, QueryChunk};\n\nuse observability_deps::tracing::debug;\n\nuse predicate::Predicate;\n\nuse schema::Schema;\n\nuse snafu::{ResultExt, Snafu};\n\nuse std::{\n\n collections::{hash_map::Entry, HashMap},\n\n sync::Arc,\n\n};\n\n\n\nmod query_access;\n", "file_path": "querier/src/table/mod.rs", "rank": 69, "score": 12.155846548132187 }, { "content": "//! This module contains gRPC service implementation for \"InfluxRPC\" (aka the storage RPC API used for Flux and InfluxQL)\n\n\n\n/// `[0x00]` is the magic value that that the storage gRPC layer uses to\n\n/// encode a tag_key that means \"measurement name\"\n\npub(crate) const TAG_KEY_MEASUREMENT: &[u8] = &[0];\n\n\n\n/// `[0xff]` is is the magic value that that the storage gRPC layer uses\n\n/// to encode a tag_key that means \"field name\"\n\npub(crate) const TAG_KEY_FIELD: &[u8] = &[255];\n\n\n\npub mod data;\n\npub mod expr;\n\npub mod id;\n\npub mod input;\n\npub mod service;\n\n\n\nuse generated_types::storage_server::{Storage, StorageServer};\n\nuse service_common::QueryDatabaseProvider;\n\nuse std::sync::Arc;\n\n\n\n/// Concrete implementation of the gRPC InfluxDB Storage Service API\n\n#[derive(Debug)]\n", "file_path": "service_grpc_influxrpc/src/lib.rs", "rank": 70, "score": 12.154871421071519 }, { "content": " use tempfile::TempDir;\n\n use trace::RingBufferTraceCollector;\n\n\n\n use crate::core::test_utils::{perform_generic_tests, write, TestAdapter, TestContext};\n\n\n\n use super::test_utils::remove_entry;\n\n use super::*;\n\n\n\n struct FileTestAdapter {\n\n tempdir: TempDir,\n\n }\n\n\n\n impl FileTestAdapter {\n\n fn new() -> Self {\n\n Self {\n\n tempdir: TempDir::new().unwrap(),\n\n }\n\n }\n\n }\n\n\n", "file_path": "write_buffer/src/file.rs", "rank": 71, "score": 12.13974863172296 }, { "content": " }\n\n },\n\n (Some(cur), None) => {\n\n range.end += 1;\n\n last = Some(cur);\n\n }\n\n (None, Some(next)) => return Some((next, range.clone())),\n\n (None, None) => return None,\n\n }\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::writer::Writer;\n\n use rand::prelude::*;\n\n\n\n fn make_rng() -> StdRng {\n\n let seed = rand::rngs::OsRng::default().next_u64();\n", "file_path": "mutable_batch/src/payload/partition.rs", "rank": 72, "score": 12.11667164908561 }, { "content": "//! Interface for reconciling Ingester and catalog state\n\n\n\nuse crate::ingester::IngesterPartition;\n\nuse data_types::{PartitionId, SequenceNumber, SequencerId, Tombstone, TombstoneId};\n\nuse parquet_file::chunk::DecodedParquetFile;\n\nuse std::{ops::Deref, sync::Arc};\n\n\n\n/// Information about an ingester partition.\n\n///\n\n/// This is mostly the same as [`IngesterPartition`] but allows easier mocking.\n", "file_path": "querier/src/table/state_reconciler/interface.rs", "rank": 73, "score": 12.113268281426937 }, { "content": " fn chunk_type(&self) -> &str {\n\n \"PersistingBatch\"\n\n }\n\n\n\n // This function should not be used in PersistingBatch context\n\n fn order(&self) -> ChunkOrder {\n\n unimplemented!()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::test_util::{\n\n create_batches_with_influxtype_different_columns_different_order,\n\n create_one_record_batch_with_influxtype_no_duplicates, create_tombstone,\n\n make_queryable_batch,\n\n };\n\n use arrow::{\n\n array::{\n", "file_path": "ingester/src/query.rs", "rank": 74, "score": 12.113268281426937 }, { "content": "//! Labels\n\n\n\nuse crate::models::{LabelCreateRequest, LabelResponse, LabelUpdate, LabelsResponse};\n\nuse crate::{Client, HttpSnafu, RequestError, ReqwestProcessingSnafu, SerializingSnafu};\n\nuse reqwest::{Method, StatusCode};\n\nuse snafu::ResultExt;\n\nuse std::collections::HashMap;\n\n\n\nimpl Client {\n\n /// List all Labels\n\n pub async fn labels(&self) -> Result<LabelsResponse, RequestError> {\n\n self.get_labels(None).await\n\n }\n\n\n\n /// List all Labels by organization ID\n\n pub async fn labels_by_org(&self, org_id: &str) -> Result<LabelsResponse, RequestError> {\n\n self.get_labels(Some(org_id)).await\n\n }\n\n\n\n async fn get_labels(&self, org_id: Option<&str>) -> Result<LabelsResponse, RequestError> {\n", "file_path": "influxdb2_client/src/api/label.rs", "rank": 75, "score": 12.107399722046605 }, { "content": "//! A [`Column`] stores the rows for a given column name\n\n\n\nuse arrow::{\n\n array::{\n\n ArrayDataBuilder, ArrayRef, BooleanArray, Float64Array, Int64Array,\n\n TimestampNanosecondArray, UInt64Array,\n\n },\n\n datatypes::DataType,\n\n error::ArrowError,\n\n};\n\nuse arrow_util::{bitset::BitSet, string::PackedStringArray};\n\nuse data_types::{StatValues, Statistics};\n\nuse schema::{InfluxColumnType, InfluxFieldType, TIME_DATA_TYPE};\n\nuse snafu::{ResultExt, Snafu};\n\nuse std::{fmt::Formatter, mem, sync::Arc};\n\n\n\n/// A \"dictionary ID\" (DID) is a compact numeric representation of an interned\n\n/// string in the dictionary. The same string always maps the same DID.\n\n///\n\n/// DIDs can be compared, hashed and cheaply copied around, just like small integers.\n\n///\n\n/// An i32 is used to match the default for Arrow dictionaries\n\n#[allow(clippy::upper_case_acronyms)]\n\npub(crate) type DID = i32;\n\n\n\n/// An invalid DID used for NULL rows\n\npub(crate) const INVALID_DID: DID = -1;\n\n\n\n/// The type of the dictionary used\n", "file_path": "mutable_batch/src/column.rs", "rank": 76, "score": 12.088579455813704 }, { "content": "//! Buckets API\n\n\n\nuse crate::models::PostBucketRequest;\n\nuse crate::{Client, HttpSnafu, RequestError, ReqwestProcessingSnafu, SerializingSnafu};\n\nuse reqwest::Method;\n\nuse snafu::ResultExt;\n\n\n\nimpl Client {\n\n /// Create a new bucket in the organization specified by the 16-digit\n\n /// hexadecimal `org_id` and with the bucket name `bucket`.\n\n pub async fn create_bucket(\n\n &self,\n\n post_bucket_request: Option<PostBucketRequest>,\n\n ) -> Result<(), RequestError> {\n\n let create_bucket_url = format!(\"{}/api/v2/buckets\", self.url);\n\n\n\n let response = self\n\n .request(Method::POST, &create_bucket_url)\n\n .body(\n\n serde_json::to_string(&post_bucket_request.unwrap_or_default())\n", "file_path": "influxdb2_client/src/api/buckets.rs", "rank": 77, "score": 12.068923315815635 }, { "content": " let field = ArrowField::new(column_name, arrow_type, nullable);\n\n\n\n self.fields.push((field, column_type));\n\n self\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use InfluxColumnType::*;\n\n use InfluxFieldType::*;\n\n\n\n use crate::assert_column_eq;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_builder_basic() {\n\n let s = SchemaBuilder::new()\n\n .influx_field(\"str_field\", String)\n", "file_path": "schema/src/builder.rs", "rank": 78, "score": 12.059175101815802 }, { "content": "use trace_exporters::TracingConfig;\n\nuse trogging::cli::LoggingConfig;\n\n\n\nuse crate::{object_store::ObjectStoreConfig, socket_addr::SocketAddr};\n\n\n\n/// The default bind address for the HTTP API.\n\npub const DEFAULT_API_BIND_ADDR: &str = \"127.0.0.1:8080\";\n\n\n\n/// The default bind address for the gRPC.\n\npub const DEFAULT_GRPC_BIND_ADDR: &str = \"127.0.0.1:8082\";\n\n\n\n/// Common config for all `run` commands.\n\n#[derive(Debug, Clone, clap::Parser)]\n\npub struct RunConfig {\n\n /// logging options\n\n #[clap(flatten)]\n\n pub(crate) logging_config: LoggingConfig,\n\n\n\n /// tracing options\n\n #[clap(flatten)]\n", "file_path": "clap_blocks/src/run_config.rs", "rank": 79, "score": 12.05662068637941 }, { "content": "use datafusion::error::Result as DataFusionResult;\n\nuse datafusion::logical_plan::{lit, Expr, ExprRewritable, ExprRewriter};\n\n\n\nuse crate::ValueExpr;\n\n\n\n/// Rewrites an expression on `_value` as a boolean true literal, pushing any\n\n/// encountered expressions onto `value_exprs` so they can be moved onto column\n\n/// projections.\n\npub(crate) fn rewrite_field_value_references(\n\n value_exprs: &mut Vec<ValueExpr>,\n\n expr: Expr,\n\n) -> DataFusionResult<Expr> {\n\n let mut rewriter = FieldValueRewriter { value_exprs };\n\n expr.rewrite(&mut rewriter)\n\n}\n\n\n", "file_path": "predicate/src/rpc_predicate/value_rewrite.rs", "rank": 80, "score": 12.05662068637941 }, { "content": " }\n\n Self::Stdout => {\n\n for point in points {\n\n point\n\n .write_data_point_to(std::io::stdout())\n\n .expect(\"should be able to write to stdout\");\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::{generate, now_ns, specification::*};\n\n use std::str::FromStr;\n\n\n\n type Error = Box<dyn std::error::Error>;\n", "file_path": "iox_data_generator/src/write.rs", "rank": 81, "score": 12.054029919920563 }, { "content": " true\n\n }\n\n })\n\n } else {\n\n false\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::collections::HashSet;\n\n\n\n use super::*;\n\n use data_types::{ParquetFile, ParquetFileId};\n\n use iox_tests::util::{TestCatalog, TestNamespace, TestParquetFile, TestPartition, TestTable};\n\n use test_helpers::assert_close;\n\n\n\n use crate::cache::{ram::test_util::test_ram_pool, test_util::assert_histogram_metric_count};\n\n\n", "file_path": "querier/src/cache/parquet_file.rs", "rank": 82, "score": 12.030970369099037 }, { "content": "//! Generating a set of field keys and values given a specification\n\n\n\nuse crate::{\n\n now_ns, specification,\n\n substitution::{self, pick_from_replacements},\n\n};\n\n\n\nuse handlebars::Handlebars;\n\nuse rand::rngs::SmallRng;\n\nuse rand::Rng;\n\nuse rand::SeedableRng;\n\nuse serde_json::json;\n\nuse serde_json::Value;\n\nuse snafu::{ResultExt, Snafu};\n\nuse std::{ops::Range, time::Duration};\n\n\n\n/// Field-specific Results\n\npub type Result<T, E = Error> = std::result::Result<T, E>;\n\n\n\n/// Errors that may happen while creating fields\n", "file_path": "iox_data_generator/src/field.rs", "rank": 83, "score": 12.016575464146156 }, { "content": "use assert_cmd::prelude::*;\n\nuse futures::prelude::*;\n\nuse influxdb_iox_client::connection::Connection;\n\nuse observability_deps::tracing::info;\n\nuse std::{\n\n fmt::Debug,\n\n fs::OpenOptions,\n\n path::Path,\n\n process::{Child, Command},\n\n str,\n\n sync::{Arc, Weak},\n\n time::Duration,\n\n};\n\nuse tempfile::NamedTempFile;\n\nuse test_helpers::timeout::FutureTimeout;\n\nuse tokio::sync::Mutex;\n\n\n\nuse crate::{database::initialize_db, server_type::AddAddrEnv};\n\n\n\nuse super::{addrs::BindAddresses, ServerType, TestConfig};\n", "file_path": "test_helpers_end_to_end/src/server_fixture.rs", "rank": 84, "score": 12.007805540673594 }, { "content": "//! An encoding nullable bool, by an Arrow array.\n\nuse std::cmp::Ordering;\n\nuse std::fmt::Debug;\n\nuse std::mem::size_of;\n\n\n\nuse arrow::array::{Array, BooleanArray};\n\nuse cmp::Operator;\n\n\n\nuse crate::column::{cmp, RowIDs};\n\n\n\n#[derive(Debug)]\n\npub struct Bool {\n\n arr: BooleanArray,\n\n}\n\n\n\nimpl std::fmt::Display for Bool {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(\n\n f,\n\n \"[Bool] rows: {:?}, nulls: {:?}, size: {}\",\n", "file_path": "read_buffer/src/column/encoding/bool.rs", "rank": 85, "score": 11.987597545832426 }, { "content": "//! Writing generated points\n\n\n\nuse crate::measurement::LineToGenerate;\n\nuse futures::stream;\n\nuse influxdb2_client::models::WriteDataPoint;\n\nuse snafu::{ensure, ResultExt, Snafu};\n\n#[cfg(test)]\n\nuse std::{\n\n collections::BTreeMap,\n\n sync::{Arc, Mutex},\n\n};\n\nuse std::{\n\n fs,\n\n fs::{File, OpenOptions},\n\n io::BufWriter,\n\n path::{Path, PathBuf},\n\n};\n\n\n\n/// Errors that may happen while writing points.\n\n#[derive(Snafu, Debug)]\n", "file_path": "iox_data_generator/src/write.rs", "rank": 86, "score": 11.987597545832426 }, { "content": "#[cfg(all(\n\n any(target_arch = \"x86\", target_arch = \"x86_64\"),\n\n target_feature = \"avx2\"\n\n))]\n\n#[cfg(target_arch = \"x86\")]\n\nuse std::arch::x86::*;\n\n#[cfg(all(target_arch = \"x86_64\", target_feature = \"avx2\"))]\n\nuse std::arch::x86_64::*;\n\n\n\nuse std::collections::BTreeSet;\n\nuse std::convert::From;\n\nuse std::mem::size_of;\n\n\n\nuse arrow::array::{Array, StringArray};\n\n\n\nuse super::NULL_ID;\n\nuse crate::column::{cmp, RowIDs};\n\n\n\npub const ENCODING_NAME: &str = \"DICT\";\n\npub struct Dictionary {\n", "file_path": "read_buffer/src/column/encoding/string/dictionary.rs", "rank": 87, "score": 11.97570767410771 }, { "content": " exec::{stringset::StringSet, IOxSessionContext},\n\n util::compute_timenanosecond_min_max,\n\n QueryChunk, QueryChunkError, QueryChunkMeta,\n\n};\n\nuse observability_deps::tracing::{debug, trace, warn};\n\nuse predicate::{Predicate, PredicateMatch};\n\nuse schema::{selection::Selection, sort::SortKey, InfluxColumnType, InfluxFieldType, Schema};\n\nuse snafu::{ensure, OptionExt, ResultExt, Snafu};\n\nuse std::{any::Any, collections::HashMap, sync::Arc};\n\n\n\npub(crate) mod flight_client;\n\npub(crate) mod test_util;\n\n\n\n#[derive(Debug, Snafu)]\n\n#[allow(missing_copy_implementations, missing_docs)]\n\npub enum Error {\n\n #[snafu(display(\n\n \"Internal error: \\\n\n ingester record batch for column '{}' has type '{}' but should have type '{}'\",\n\n column_name,\n", "file_path": "querier/src/ingester/mod.rs", "rank": 88, "score": 11.970993508989128 }, { "content": "//! Crate that mimics the interface of the the various object stores\n\n//! but does nothing if they are not enabled.\n\n\n\nuse async_trait::async_trait;\n\nuse bytes::Bytes;\n\nuse snafu::Snafu;\n\n\n\nuse object_store::{path::Path, GetResult, ListResult, ObjectMeta, ObjectStore, Result};\n\n\n\n/// A specialized `Error` for Azure object store-related errors\n\n#[derive(Debug, Snafu, Clone)]\n\n#[allow(missing_copy_implementations, missing_docs)]\n", "file_path": "object_store_metrics/src/dummy.rs", "rank": 89, "score": 11.952741714862064 }, { "content": " fn kind() -> MetricKind {\n\n MetricKind::U64Histogram\n\n }\n\n\n\n fn recorder(&self) -> Self::Recorder {\n\n self.clone()\n\n }\n\n\n\n fn observe(&self) -> Observation {\n\n Observation::U64Histogram(self.fetch())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::HistogramObservation;\n\n\n\n #[test]\n\n fn test_histogram() {\n", "file_path": "metric/src/histogram.rs", "rank": 90, "score": 11.946912226612827 }, { "content": "use std::fmt::Display;\n\nuse std::mem::size_of;\n\n\n\nuse arrow::array::{Array, PrimitiveArray};\n\nuse arrow::{self, datatypes::*};\n\nuse either::Either;\n\nuse rle::RLE;\n\n\n\nuse super::encoding::scalar::{\n\n transcoders::{ByteTrimmer, NoOpTranscoder, Transcoder},\n\n ScalarEncoding,\n\n};\n\nuse super::encoding::{scalar::rle, scalar::Fixed, scalar::FixedNull};\n\nuse super::{cmp, Statistics};\n\nuse crate::column::{RowIDs, Scalar, Value, Values};\n\n\n\n/// A representation of a column encoding for integer data, providing an\n\n/// API for working against that data in an immutable way.\n\n#[derive(Debug)]\n\npub enum IntegerEncoding {\n", "file_path": "read_buffer/src/column/integer.rs", "rank": 92, "score": 11.907508293362802 }, { "content": "# sqlx-hotswap-pool\n\n\n\nThis crate implements a workaround for the lack of support for password rotation in the `sqlx` crate.\n\n\n\nThere is an upstream ticket for this [Support rotating passwords #445](https://github.com/launchbadge/sqlx/issues/445).\n\nThis crate offers a more quick&dirty solution to the problem.\n\n\n\n## Problem\n\n\n\nSome authentication methods for databases provide short lived passwords that must be regularly rotated.\n\n\n\nExamples are: [AWS IAM database authentication](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/UsingWithRDS.IAMDBAuth.html), HashiCorp Vault's dynamic role, ...\n\n\n\nHowever, in `sqlx` once you create a pool you need to pass the connection string (which includes the credentials) and you can't change it afterwards.\n\nThe pool will create one or more connections with those credentials.\n\n\n\n## Workaround\n\n\n\nThis crate implements a wrapper struct around a reference counted Pool smart pointer. This wrapper can then be updated using internal mutability (mutex protected) whenever the main binary detects a credentials refresh. Every subsequent use of the pool will use the new underlying pool.\n\n\n\nThis workaround has been designed to solve the problem of updating credentials, but it happens to work if you want to point your pool to an entirely different database as well.\n\n\n\nIf the credentials refresh happen before the existing credentials are invalidated, references to the previous pool can still be used for some time.\n\n\n\nIf the credentials refresh contextually invalidates the existing credentials, the process will experience connection errors if they used the pool before it has been updated (and if they cloned the `Arc` before the `update` method has been called).\n\n\n\nAlready open connections will keep working in both cases.\n\n\n\nUsage:\n\n\n\n```rust\n\nuse sqlx_hotswap_pool::HotSwapPool;\n\nuse sqlx::{pool::PoolOptions, Pool, Postgres};\n", "file_path": "sqlx-hotswap-pool/README.md", "rank": 93, "score": 11.902392004362666 }, { "content": "pub mod dictionary;\n\npub mod rle;\n\n\n\nuse std::collections::BTreeSet;\n\n\n\nuse either::Either;\n\n\n\n// This makes the encoding types available under the dictionary module.\n\npub use dictionary::Dictionary;\n\npub use rle::RLE;\n\n\n\nuse crate::column::{cmp, RowIDs};\n\n\n\n/// The encoded id for a NULL value.\n\npub const NULL_ID: u32 = 0;\n\n\n\n#[allow(clippy::upper_case_acronyms)] // this looks weird as `Rle`\n\npub enum Encoding {\n\n RLE(RLE),\n\n Plain(Dictionary),\n", "file_path": "read_buffer/src/column/encoding/string.rs", "rank": 94, "score": 11.901534620051628 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::{\n\n lifecycle::{LifecycleConfig, LifecycleManager},\n\n stream_handler::mock_sink::MockDmlSink,\n\n };\n\n use assert_matches::assert_matches;\n\n use async_trait::async_trait;\n\n use data_types::{DeletePredicate, Sequence, TimestampRange};\n\n use dml::{DmlDelete, DmlMeta, DmlWrite};\n\n use futures::stream::{self, BoxStream};\n\n use iox_time::{SystemProvider, Time};\n\n use metric::Metric;\n\n use mutable_batch_lp::lines_to_batches;\n\n use std::sync::Arc;\n\n use test_helpers::timeout::FutureTimeout;\n\n use tokio::sync::{mpsc, oneshot};\n", "file_path": "ingester/src/stream_handler/handler.rs", "rank": 95, "score": 11.881272708321362 }, { "content": "//! The grpc-router crate helps creating gRPC routers/forwarders for existing gRPC services.\n\n//!\n\n//! The router implements a gRPC service trait and forwards requests to a local struct implementing\n\n//! that same service interface or to a remote service. The tonic gRPC client used to talk to the\n\n//! remote service is provided by the user by implementing the [`Router`] trait for the router service type.\n\n//! The [`Router`] trait allows the user to provide a different gRPC client per request, or to just\n\n//! fall-back to serving the request from a local service implementation (without any further gRPC overhead).\n\n//!\n\n//! This crate also offers an optional caching [`connection_manager`], which can be useful for\n\n//! implementing the [`Router`] trait.\n\n//!\n\n//! # Examples\n\n//!\n\n//! ## Simple introductory example:\n\n//!\n\n//! ```\n\n//! # use std::pin::Pin;\n\n//! # use futures::Stream;\n\n//! use grpc_router_test_gen::test_proto::{test_server::Test, test_client::TestClient, *};\n\n//! use grpc_router::{grpc_router, router, Router, RoutingDestination};\n", "file_path": "grpc-router/src/lib.rs", "rank": 96, "score": 11.876706649233775 }, { "content": "//! An encoding for fixed width, non-nullable values.\n\n//!\n\n//! This encoding stores a column of fixed-width numerical values with a\n\n//! physical type `P` in memory.\n\n//!\n\n//! Some of the methods for finding and materialising values within the encoding\n\n//! allow results to be emitted as some logical type `L` via a transformation\n\n//! `T`.\n\nuse either::Either;\n\nuse observability_deps::tracing::debug;\n\nuse std::cmp::Ordering;\n\nuse std::fmt::Debug;\n\nuse std::marker::PhantomData;\n\nuse std::mem::size_of;\n\nuse std::ops::AddAssign;\n\n\n\nuse super::transcoders::Transcoder;\n\nuse super::ScalarEncoding;\n\nuse crate::column::{cmp, RowIDs};\n\n\n", "file_path": "read_buffer/src/column/encoding/scalar/fixed.rs", "rank": 97, "score": 11.874683066649975 }, { "content": "use crate::{Schema, TIME_COLUMN_NAME};\n\nuse arrow::compute::SortOptions;\n\nuse arrow::{\n\n array::{Array, DictionaryArray, StringArray},\n\n datatypes::{DataType, Int32Type},\n\n record_batch::RecordBatch,\n\n};\n\nuse indexmap::{map::Iter, IndexMap};\n\nuse itertools::Itertools;\n\nuse snafu::Snafu;\n\nuse std::{\n\n collections::{HashMap, HashSet},\n\n fmt::Display,\n\n str::FromStr,\n\n sync::Arc,\n\n};\n\n\n\n#[derive(Debug, Snafu)]\n\npub enum Error {\n\n #[snafu(display(\"invalid column sort: {}\", value))]\n", "file_path": "schema/src/sort.rs", "rank": 98, "score": 11.873871163611515 }, { "content": "use arrow::array::Array;\n\nuse arrow::array::Float64Array;\n\nuse arrow::array::PrimitiveArray;\n\nuse arrow::datatypes::Float64Type;\n\nuse arrow::datatypes::Int16Type;\n\nuse arrow::datatypes::Int32Type;\n\nuse arrow::datatypes::Int8Type;\n\nuse arrow::datatypes::UInt16Type;\n\nuse arrow::datatypes::UInt32Type;\n\nuse arrow::datatypes::UInt8Type;\n\nuse std::mem::size_of;\n\n\n\nuse super::encoding::scalar::{rle, transcoders::*, ScalarEncoding};\n\nuse super::encoding::{\n\n scalar::Fixed,\n\n scalar::{rle::RLE, FixedNull},\n\n};\n\nuse super::{cmp, Statistics};\n\nuse crate::column::{RowIDs, Scalar, Value, Values};\n\n\n", "file_path": "read_buffer/src/column/float.rs", "rank": 99, "score": 11.84937135994764 } ]
Rust
ruzzt_engine/src/tests/world_tester.rs
yokljo/ruzzt
e76b28d5f78e8f51ba78e557c82f5f172523a1c5
pub use crate::engine::RuzztEngine; pub use crate::event::Event; pub use crate::board_simulator::*; pub use zzt_file_format::*; pub use zzt_file_format::dosstring::*; use std::collections::HashMap; #[derive(Clone)] pub struct TestWorld { pub engine: RuzztEngine, pub event: Event, } impl TestWorld { pub fn new() -> TestWorld { let mut cursor = std::io::Cursor::new(include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/src/tests/data/DEFAULT.ZZT")).to_vec()); let mut world = World::parse(&mut cursor).unwrap(); world.boards[1].status_elements.clear(); world.boards[1].tiles[29 + 11*BOARD_WIDTH] = BoardTile::new(ElementType::Empty, 0); let mut engine = RuzztEngine::new(); engine.load_world(world, None); engine.set_in_title_screen(false); engine.is_paused = false; TestWorld { engine, event: Event::None, } } pub fn new_with_player(x: i16, y: i16) -> TestWorld { let mut test_world = TestWorld::new(); test_world.add_player(x, y); test_world } pub fn add_player(&mut self, x: i16, y: i16) { let mut tile_set = TileSet::new(); tile_set.add('&', BoardTile::new(ElementType::Player, 0x1f), Some(StatusElement { cycle: 1, .. StatusElement::default() })); let player_template = TileTemplate::from_text(&tile_set, "&"); self.insert_template(&player_template, x, y); } pub fn insert_tile_and_status(&mut self, tile_and_status: &TileAndStatus, x: i16, y: i16) { self.engine.board_simulator.set_tile(x, y, tile_and_status.tile); if let Some(ref status) = tile_and_status.status { let mut new_status = status.clone(); new_status.location_x = x as u8; new_status.location_y = y as u8; self.engine.board_simulator.status_elements.push(new_status); } } pub fn insert_template(&mut self, template: &TileTemplate, left_x: i16, top_y: i16) { let mut it = template.tiles.iter(); for y in 0 .. template.height as i16 { for x in 0 .. template.width as i16 { if let Some(tile_and_status) = it.next().as_mut().unwrap() { self.engine.board_simulator.set_tile(left_x + x, top_y + y, tile_and_status.tile); if let Some(ref status) = tile_and_status.status { let mut new_status = status.clone(); new_status.location_x = (left_x + x) as u8; new_status.location_y = (top_y + y) as u8; self.engine.board_simulator.status_elements.push(new_status); } } } } } pub fn simulate(&mut self, step_count: usize) { for _ in 0 .. step_count { self.engine.step(self.event, 0.); self.event = Event::None; } } pub fn current_board_equals(&self, expected_world: TestWorld) -> bool { let mut result = true; let selfsim = &self.engine.board_simulator; let othersim = &expected_world.engine.board_simulator; if selfsim.world_header != othersim.world_header { println!("World headers differ"); println!("Actual: {:?}", selfsim.world_header); println!("Expected: {:?}", othersim.world_header); result = false; } if selfsim.board_meta_data != othersim.board_meta_data { println!("Board meta data differs"); println!("Actual: {:?}", selfsim.board_meta_data); println!("Expected: {:?}", othersim.board_meta_data); result = false; } if selfsim.status_elements != othersim.status_elements { println!("Status elements differ"); println!("Actual: {:?}", selfsim.status_elements); println!("Expected: {:?}", othersim.status_elements); result = false; } result = result && self.current_board_tiles_equals(expected_world); result } pub fn current_board_tiles_equals(&self, expected_world: TestWorld) -> bool { let selfsim = &self.engine.board_simulator; let othersim = &expected_world.engine.board_simulator; if selfsim.tiles != othersim.tiles { let mut min_diff_x = BOARD_WIDTH as i16; let mut min_diff_y = BOARD_HEIGHT as i16; let mut max_diff_x = 0; let mut max_diff_y = 0; for x in 0 .. BOARD_WIDTH as i16 { for y in 0 .. BOARD_HEIGHT as i16 { let selftile = selfsim.get_tile(x, y).unwrap(); let othertile = othersim.get_tile(x, y).unwrap(); if selftile != othertile { max_diff_x = max_diff_x.max(x); max_diff_y = max_diff_y.max(y); min_diff_x = min_diff_x.min(x); min_diff_y = min_diff_y.min(y); } } } println!("Board differ from ({}, {}) to ({}, {}). Top lines are self, bottom lines are expected", min_diff_x, min_diff_y, max_diff_x, max_diff_y); for y in min_diff_y ..= max_diff_y { let mut self_line = "".to_string(); let mut other_line = "".to_string(); for x in min_diff_x ..= max_diff_x { let selftile = selfsim.get_tile(x, y).unwrap(); let othertile = othersim.get_tile(x, y).unwrap(); if selftile != othertile { self_line += &format!("{:02x},{:02x} ", selftile.element_id, selftile.colour); other_line += &format!("{:02x},{:02x} ", othertile.element_id, othertile.colour); } else { self_line += "==,== "; other_line += "==,== "; } } println!("{}", self_line); println!("{}", other_line); println!(""); } false } else { true } } pub fn status_at(&mut self, x: i16, y: i16) -> &mut StatusElement { self.engine.board_simulator.get_first_status_for_pos_mut(x, y).unwrap().1 } pub fn world_header(&self) -> &WorldHeader { &self.engine.board_simulator.world_header } } #[derive(Debug, Clone)] pub struct TileAndStatus { pub tile: BoardTile, pub status: Option<StatusElement>, } pub struct TileSet { tile_map: HashMap<char, TileAndStatus>, } impl TileSet { pub fn new() -> TileSet { TileSet { tile_map: HashMap::new(), } } pub fn add(&mut self, c: char, tile: BoardTile, status: Option<StatusElement>) { self.tile_map.insert(c, TileAndStatus { tile, status }); } pub fn add_object(&mut self, c: char, code: &str) { self.add(c, BoardTile::new(ElementType::Object, 0xff), Some(StatusElement { cycle: 1, code_source: CodeSource::Owned(DosString::from_str(code)), .. StatusElement::default() })); } pub fn get(&self, c: char) -> &TileAndStatus { self.tile_map.get(&c).ok_or_else(|| format!("TileSet::get: Tile not found for: {:?}", c)).unwrap() } } #[derive(Debug, Clone)] pub struct TileTemplate { width: usize, height: usize, tiles: Vec<Option<TileAndStatus>>, } impl TileTemplate { pub fn from_text(tile_set: &TileSet, text: &str) -> TileTemplate { let mut height = 0; let mut width = 0; let mut tiles = vec![]; for line in text.lines() { let trimmed = line.trim().to_string(); if !trimmed.is_empty() { let mut current_width = 0; for c in trimmed.chars() { if c == '.' { tiles.push(None); } else { tiles.push(Some(tile_set.get(c).clone())); } current_width += 1; } if width == 0 { width = current_width; } else if width != current_width { panic!("TileTemplate::from_text: Lines are inconsistent lengths"); } height += 1; } } TileTemplate { width, height, tiles, } } }
pub use crate::engine::RuzztEngine; pub use crate::event::Event; pub use crate::board_simulator::*; pub use zzt_file_format::*; pub use zzt_file_format::dosstring::*; use std::collections::HashMap; #[derive(Clone)] pub struct TestWorld { pub engine: RuzztEngine, pub event: Event, } impl TestWorld { pub fn new() -> TestWorld { let mut cursor = std::io::Cursor::new(include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/src/tests/data/DEFAULT.ZZT")).to_vec()); let mut w
al: {:?}", selfsim.status_elements); println!("Expected: {:?}", othersim.status_elements); result = false; } result = result && self.current_board_tiles_equals(expected_world); result } pub fn current_board_tiles_equals(&self, expected_world: TestWorld) -> bool { let selfsim = &self.engine.board_simulator; let othersim = &expected_world.engine.board_simulator; if selfsim.tiles != othersim.tiles { let mut min_diff_x = BOARD_WIDTH as i16; let mut min_diff_y = BOARD_HEIGHT as i16; let mut max_diff_x = 0; let mut max_diff_y = 0; for x in 0 .. BOARD_WIDTH as i16 { for y in 0 .. BOARD_HEIGHT as i16 { let selftile = selfsim.get_tile(x, y).unwrap(); let othertile = othersim.get_tile(x, y).unwrap(); if selftile != othertile { max_diff_x = max_diff_x.max(x); max_diff_y = max_diff_y.max(y); min_diff_x = min_diff_x.min(x); min_diff_y = min_diff_y.min(y); } } } println!("Board differ from ({}, {}) to ({}, {}). Top lines are self, bottom lines are expected", min_diff_x, min_diff_y, max_diff_x, max_diff_y); for y in min_diff_y ..= max_diff_y { let mut self_line = "".to_string(); let mut other_line = "".to_string(); for x in min_diff_x ..= max_diff_x { let selftile = selfsim.get_tile(x, y).unwrap(); let othertile = othersim.get_tile(x, y).unwrap(); if selftile != othertile { self_line += &format!("{:02x},{:02x} ", selftile.element_id, selftile.colour); other_line += &format!("{:02x},{:02x} ", othertile.element_id, othertile.colour); } else { self_line += "==,== "; other_line += "==,== "; } } println!("{}", self_line); println!("{}", other_line); println!(""); } false } else { true } } pub fn status_at(&mut self, x: i16, y: i16) -> &mut StatusElement { self.engine.board_simulator.get_first_status_for_pos_mut(x, y).unwrap().1 } pub fn world_header(&self) -> &WorldHeader { &self.engine.board_simulator.world_header } } #[derive(Debug, Clone)] pub struct TileAndStatus { pub tile: BoardTile, pub status: Option<StatusElement>, } pub struct TileSet { tile_map: HashMap<char, TileAndStatus>, } impl TileSet { pub fn new() -> TileSet { TileSet { tile_map: HashMap::new(), } } pub fn add(&mut self, c: char, tile: BoardTile, status: Option<StatusElement>) { self.tile_map.insert(c, TileAndStatus { tile, status }); } pub fn add_object(&mut self, c: char, code: &str) { self.add(c, BoardTile::new(ElementType::Object, 0xff), Some(StatusElement { cycle: 1, code_source: CodeSource::Owned(DosString::from_str(code)), .. StatusElement::default() })); } pub fn get(&self, c: char) -> &TileAndStatus { self.tile_map.get(&c).ok_or_else(|| format!("TileSet::get: Tile not found for: {:?}", c)).unwrap() } } #[derive(Debug, Clone)] pub struct TileTemplate { width: usize, height: usize, tiles: Vec<Option<TileAndStatus>>, } impl TileTemplate { pub fn from_text(tile_set: &TileSet, text: &str) -> TileTemplate { let mut height = 0; let mut width = 0; let mut tiles = vec![]; for line in text.lines() { let trimmed = line.trim().to_string(); if !trimmed.is_empty() { let mut current_width = 0; for c in trimmed.chars() { if c == '.' { tiles.push(None); } else { tiles.push(Some(tile_set.get(c).clone())); } current_width += 1; } if width == 0 { width = current_width; } else if width != current_width { panic!("TileTemplate::from_text: Lines are inconsistent lengths"); } height += 1; } } TileTemplate { width, height, tiles, } } }
orld = World::parse(&mut cursor).unwrap(); world.boards[1].status_elements.clear(); world.boards[1].tiles[29 + 11*BOARD_WIDTH] = BoardTile::new(ElementType::Empty, 0); let mut engine = RuzztEngine::new(); engine.load_world(world, None); engine.set_in_title_screen(false); engine.is_paused = false; TestWorld { engine, event: Event::None, } } pub fn new_with_player(x: i16, y: i16) -> TestWorld { let mut test_world = TestWorld::new(); test_world.add_player(x, y); test_world } pub fn add_player(&mut self, x: i16, y: i16) { let mut tile_set = TileSet::new(); tile_set.add('&', BoardTile::new(ElementType::Player, 0x1f), Some(StatusElement { cycle: 1, .. StatusElement::default() })); let player_template = TileTemplate::from_text(&tile_set, "&"); self.insert_template(&player_template, x, y); } pub fn insert_tile_and_status(&mut self, tile_and_status: &TileAndStatus, x: i16, y: i16) { self.engine.board_simulator.set_tile(x, y, tile_and_status.tile); if let Some(ref status) = tile_and_status.status { let mut new_status = status.clone(); new_status.location_x = x as u8; new_status.location_y = y as u8; self.engine.board_simulator.status_elements.push(new_status); } } pub fn insert_template(&mut self, template: &TileTemplate, left_x: i16, top_y: i16) { let mut it = template.tiles.iter(); for y in 0 .. template.height as i16 { for x in 0 .. template.width as i16 { if let Some(tile_and_status) = it.next().as_mut().unwrap() { self.engine.board_simulator.set_tile(left_x + x, top_y + y, tile_and_status.tile); if let Some(ref status) = tile_and_status.status { let mut new_status = status.clone(); new_status.location_x = (left_x + x) as u8; new_status.location_y = (top_y + y) as u8; self.engine.board_simulator.status_elements.push(new_status); } } } } } pub fn simulate(&mut self, step_count: usize) { for _ in 0 .. step_count { self.engine.step(self.event, 0.); self.event = Event::None; } } pub fn current_board_equals(&self, expected_world: TestWorld) -> bool { let mut result = true; let selfsim = &self.engine.board_simulator; let othersim = &expected_world.engine.board_simulator; if selfsim.world_header != othersim.world_header { println!("World headers differ"); println!("Actual: {:?}", selfsim.world_header); println!("Expected: {:?}", othersim.world_header); result = false; } if selfsim.board_meta_data != othersim.board_meta_data { println!("Board meta data differs"); println!("Actual: {:?}", selfsim.board_meta_data); println!("Expected: {:?}", othersim.board_meta_data); result = false; } if selfsim.status_elements != othersim.status_elements { println!("Status elements differ"); println!("Actu
random
[ { "content": "pub fn load_zzt_behaviours(sim: &mut BoardSimulator) {\n\n\tsim.set_behaviour(ElementType::Player, Box::new(items::PlayerBehaviour));\n\n\tsim.set_behaviour(ElementType::Ammo, Box::new(items::AmmoBehaviour));\n\n\tsim.set_behaviour(ElementType::Torch, Box::new(items::TorchBehaviour));\n\n\tsim.set_behaviour(ElementType::Gem, Box::new(items::GemBehaviour));\n\n\tsim.set_behaviour(ElementType::Key, Box::new(items::KeyBehaviour));\n\n\tsim.set_behaviour(ElementType::Door, Box::new(items::DoorBehaviour));\n\n\tsim.set_behaviour(ElementType::Scroll, Box::new(items::ScrollBehaviour));\n\n\tsim.set_behaviour(ElementType::Passage, Box::new(items::PassageBehaviour));\n\n\tsim.set_behaviour(ElementType::Duplicator, Box::new(items::DuplicatorBehaviour));\n\n\tsim.set_behaviour(ElementType::Bomb, Box::new(items::BombBehaviour));\n\n\tsim.set_behaviour(ElementType::Energizer, Box::new(items::EnergizerBehaviour));\n\n\tsim.set_behaviour(ElementType::Clockwise, Box::new(items::ConveyorBehaviour{clockwise: true}));\n\n\tsim.set_behaviour(ElementType::Counter, Box::new(items::ConveyorBehaviour{clockwise: false}));\n\n\t\n\n\tsim.set_behaviour(ElementType::Bear, Box::new(creatures::BearBehaviour));\n\n\tsim.set_behaviour(ElementType::Ruffian, Box::new(creatures::RuffianBehaviour));\n\n\tsim.set_behaviour(ElementType::Object, Box::new(creatures::ObjectBehaviour));\n\n\tsim.set_behaviour(ElementType::Slime, Box::new(creatures::SlimeBehaviour));\n\n\tsim.set_behaviour(ElementType::Shark, Box::new(creatures::SharkBehaviour));\n", "file_path": "ruzzt_engine/src/zzt_behaviours.rs", "rank": 0, "score": 159245.91290893764 }, { "content": "pub fn default_damage_impl(is_destructable: bool, x: i16, y: i16, damage_type: DamageType, sim: &BoardSimulator, actions: &mut Vec<Action>) -> DamageResult {\n\n\tlet should_die = match damage_type {\n\n\t\tDamageType::Shot{..} => {\n\n\t\t\tif is_destructable {\n\n\t\t\t\tactions.push(Action::SendBoardMessage(BoardMessage::PlaySoundArray(process_notes_string(b\"t-c\"), SoundPriority::Level(2))));\n\n\t\t\t}\n\n\t\t\tis_destructable\n\n\t\t}\n\n\t\tDamageType::Bombed => {\n\n\t\t\tlet has_status = sim.get_first_status_for_pos(x, y).is_some();\n\n\t\t\tif !has_status && is_destructable {\n\n\t\t\t\ttrue\n\n\t\t\t} else {\n\n\t\t\t\tfalse\n\n\t\t\t}\n\n\t\t}\n\n\t\tDamageType::Other => {\n\n\t\t\tfalse\n\n\t\t}\n\n\t};\n", "file_path": "ruzzt_engine/src/behaviour.rs", "rank": 1, "score": 131766.00999820503 }, { "content": "// When a monster touches a player it dies and takes 10 health.\n\npub fn add_monster_touch_player_actions(x: i16, y: i16, actions: &mut Vec<Action>, sim: &BoardSimulator) {\n\n\tif let Some((_status_index, status)) = sim.get_first_status_for_pos(x, y) {\n\n\t\tactions.push(Action::SetTile {\n\n\t\t\tx,\n\n\t\t\ty,\n\n\t\t\ttile: BoardTile { element_id: status.under_element_id, colour: status.under_colour },\n\n\t\t\tstatus_element: None,\n\n\t\t});\n\n\n\n\t\tlet (player_x, player_y) = sim.get_player_location();\n\n\t\tlet behaviour = sim.behaviour_for_pos(player_x, player_y);\n\n\t\tbehaviour.damage(player_x, player_y, DamageType::Other, sim, actions);\n\n\n\n\t\tif sim.world_header.energy_cycles <= 0 {\n\n\t\t\tactions.push(Action::ModifyPlayerItem {\n\n\t\t\t\titem_type: PlayerItemType::Health,\n\n\t\t\t\toffset: -10,\n\n\t\t\t\trequire_exact_amount: false,\n\n\t\t\t});\n\n\t\t\tactions.push(Action::CheckRestartOnZapped);\n\n\t\t}\n\n\t\t// TODO: Play sound\n\n\t} else {\n\n\t\t// TODO: Do monsters hurt when they don't have a status?\n\n\t}\n\n}\n\n\n", "file_path": "ruzzt_engine/src/zzt_behaviours/monster_interactions.rs", "rank": 2, "score": 121283.0263374238 }, { "content": "pub fn main() {\n\n\tcolor_backtrace::install();\n\n\n\n\tlet mut console = ZztConsole::new();\n\n\tconsole.run();\n\n}\n", "file_path": "ruzzt/src/main.rs", "rank": 3, "score": 114631.29318197927 }, { "content": "pub fn zzt_to_json_impl(zzt_data: &[u8]) -> Result<String, String> {\n\n\tlet mut cursor = std::io::Cursor::new(zzt_data);\n\n\tlet world = World::parse(&mut cursor)?;\n\n\tlet json_str = serde_json::to_string_pretty(&world).map_err(|e| format!(\"{:?}\", e))?;\n\n\tOk(json_str)\n\n}\n\n\n\n#[wasm_bindgen]\n\n#[derive(Clone, Copy)]\n\npub struct FgBgRgb {\n\n\tpub fg_r: u8,\n\n\tpub fg_g: u8,\n\n\tpub fg_b: u8,\n\n\tpub bg_r: u8,\n\n\tpub bg_g: u8,\n\n\tpub bg_b: u8,\n\n\tpub blinking: bool,\n\n}\n\n\n\nimpl FgBgRgb {\n", "file_path": "zzt_web_editor/src/lib.rs", "rank": 4, "score": 110725.05877917095 }, { "content": "/// Call `found_fn` for each tile on the board matching the given `tile_desc`.\n\n/// `found_fn` takes the x/y position of each matching tile.\n\nfn search_tile_desc(tile_desc: TileTypeDesc, sim: &BoardSimulator, found_fn: &mut dyn FnMut(i16, i16, BoardTile)) {\n\n\tsim.visit_all_tiles(&mut |x, y, tile| {\n\n\t\tif tile_desc.matches(tile) {\n\n\t\t\tfound_fn(x, y, tile);\n\n\t\t}\n\n\t});\n\n}\n\n\n", "file_path": "ruzzt_engine/src/oop_parser.rs", "rank": 5, "score": 109230.30816087754 }, { "content": "/// Get a notes string as written in ZZT OOP, and convert it to a list of `SoundEntry` (which is\n\n/// what the sound player actually accepts).\n\npub fn process_notes_string(notes_string: &[u8]) -> Vec<SoundEntry> {\n\n\tlet mut current_note_index = 0;\n\n\tlet mut octave_offset = 3;\n\n\tlet mut length_multiplier = 1;\n\n\tlet mut result = vec![];\n\n\n\n\twhile current_note_index < notes_string.len() {\n\n\t\tmatch notes_string[current_note_index].to_ascii_lowercase() {\n\n\t\t\tb't' => {\n\n\t\t\t\tlength_multiplier = 1;\n\n\t\t\t}\n\n\t\t\tb's' => {\n\n\t\t\t\tlength_multiplier = 2;\n\n\t\t\t}\n\n\t\t\tb'i' => {\n\n\t\t\t\tlength_multiplier = 4;\n\n\t\t\t}\n\n\t\t\tb'q' => {\n\n\t\t\t\tlength_multiplier = 8;\n\n\t\t\t}\n", "file_path": "ruzzt_engine/src/sounds.rs", "rank": 6, "score": 108421.11548884171 }, { "content": "pub fn monster_damage(behaviour: &dyn Behaviour, x: i16, y: i16, damage_type: DamageType, sim: &BoardSimulator, actions: &mut Vec<Action>) -> DamageResult {\n\n\tif let Some((_, ref status)) = sim.get_first_status_for_pos(x, y) {\n\n\t\tactions.push(Action::SetTile {\n\n\t\t\tx,\n\n\t\t\ty,\n\n\t\t\ttile: BoardTile { element_id: status.under_element_id, colour: status.under_colour },\n\n\t\t\tstatus_element: None,\n\n\t\t});\n\n\n\n\t\tactions.push(Action::SendBoardMessage(BoardMessage::PlaySoundArray(\n\n\t\t\tprocess_notes_string(b\"c--c++++c--c\"),\n\n\t\t\tSoundPriority::Level(3)\n\n\t\t)));\n\n\n\n\t\tDamageResult::Died\n\n\t} else {\n\n\t\tdefault_damage_impl(behaviour.destructable(), x, y, damage_type, sim, actions)\n\n\t}\n\n}\n", "file_path": "ruzzt_engine/src/zzt_behaviours/monster_interactions.rs", "rank": 7, "score": 103809.36385984998 }, { "content": "pub fn monster_push(x: i16, y: i16, is_player: bool, sim: &BoardSimulator) -> PushResult {\n\n\tlet mut actions = vec![];\n\n\tlet mut blocked = BlockedStatus::Blocked;\n\n\n\n\tif is_player {\n\n\t\tadd_monster_touch_player_actions(x, y, &mut actions, sim);\n\n\t\tblocked = BlockedStatus::NotBlocked;\n\n\t}\n\n\n\n\tPushResult {\n\n\t\tblocked,\n\n\t\taction_result: ActionResult::with_actions(actions),\n\n\t}\n\n}\n\n\n", "file_path": "ruzzt_engine/src/zzt_behaviours/monster_interactions.rs", "rank": 8, "score": 91843.62672188232 }, { "content": "pub fn char_to_dos_char(c: char) -> Option<u8> {\n\n\tfor dos_char in 0 .. CP437.len() {\n\n\t\tlet unicode = CP437[dos_char];\n\n\t\tif c == unicode {\n\n\t\t\treturn Some(dos_char as u8);\n\n\t\t}\n\n\t}\n\n\tNone\n\n}\n\n\n\n\n\nconst CP437: [char; 256] = [\n\n\t'\\u{2400}',\n\n\t'\\u{263A}',\n\n\t'\\u{263B}',\n\n\t'\\u{2665}',\n\n\t'\\u{2666}',\n\n\t'\\u{2663}',\n\n\t'\\u{2660}',\n\n\t'\\u{2022}',\n", "file_path": "zzt_file_format/src/dosstring.rs", "rank": 9, "score": 89339.33442701296 }, { "content": "#[wasm_bindgen]\n\npub fn zzt_colour_to_rgb(zzt_colour: u8) -> FgBgRgb {\n\n\tlet mut blinking = false;\n\n\tlet mut bg_col = (zzt_colour & 0xF) >> 4;\n\n\tlet fg_col = zzt_colour & 0xF;\n\n\tif bg_col >= 8 {\n\n\t\tbg_col -= 8;\n\n\t\tblinking = true;\n\n\t}\n\n\t\n\n\tlet (fg_r, fg_g, fg_b) = ConsoleColour::from_u8(fg_col).unwrap().to_rgb();\n\n\tlet (bg_r, bg_g, bg_b) = ConsoleColour::from_u8(bg_col).unwrap().to_rgb();\n\n\tFgBgRgb{fg_r, fg_g, fg_b, bg_r, bg_g, bg_b, blinking}\n\n}\n\n\n\n#[wasm_bindgen]\n\npub struct ScreenChar {\n\n\tpub char_code: u8,\n\n\tpub colour: FgBgRgb,\n\n}\n\n\n", "file_path": "zzt_web_editor/src/lib.rs", "rank": 10, "score": 86868.6987637061 }, { "content": "#[derive(Debug, Clone)]\n\nstruct DuplicatorContinuation;\n\n\n\nimpl ActionContinuation for DuplicatorContinuation {\n\n\tfn next_step(&mut self, apply_action_report: ApplyActionResultReport, _status_index: usize, status: &StatusElement, sim: &BoardSimulator) -> ActionContinuationResult {\n\n\t\tlet mut actions = vec![];\n\n\n\n\t\tif apply_action_report.move_was_blocked == BlockedStatus::NotBlocked {\n\n\t\t\t// Duplicate!\n\n\t\t\tlet source_x = status.location_x as i16 + status.step_x;\n\n\t\t\tlet source_y = status.location_y as i16 + status.step_y;\n\n\n\n\t\t\tif let Some(source_tile) = sim.get_tile(source_x, source_y) {\n\n\t\t\t\tlet dest_x = status.location_x as i16 - status.step_x;\n\n\t\t\t\tlet dest_y = status.location_y as i16 - status.step_y;\n\n\n\n\t\t\t\tlet mut duplicated_status_opt = sim.get_first_status_for_pos(source_x, source_y).map(|(_, status)| status.clone());\n\n\t\t\t\tif let Some(ref mut duplicated_status) = duplicated_status_opt {\n\n\t\t\t\t\tduplicated_status.location_x = dest_x as u8;\n\n\t\t\t\t\tduplicated_status.location_y = dest_y as u8;\n\n\t\t\t\t}\n", "file_path": "ruzzt_engine/src/zzt_behaviours/items.rs", "rank": 11, "score": 84038.25604234387 }, { "content": "#[derive(Clone)]\n\nstruct TextInputState {\n\n\tmode: TextInputMode,\n\n\ttext: DosString,\n\n}\n\n\n\n/// If there is a yes/no question open in the side bar, this represents the purpose of that yes/no\n\n/// question.\n\n#[derive(Clone)]\n\npub enum YesNoMode {\n\n\tEndGame,\n\n\tQuit,\n\n}\n\n\n\n/// If a text-based input is open in the side bar, this contains the state of that input.\n", "file_path": "ruzzt_engine/src/side_bar.rs", "rank": 12, "score": 84038.25604234387 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct TileTypeDesc {\n\n\t/// The element ID of the tile.\n\n\telement_id: u8,\n\n\t/// The (optional) colour of the tile.\n\n\tcolour: Option<u8>,\n\n}\n\n\n\nimpl TileTypeDesc {\n\n\t/// Returns true if the `tile` matches the description.\n\n\tfn matches(&self, tile: BoardTile) -> bool {\n\n\t\tlet colour_matches = if let Some(colour) = self.colour {\n\n\t\t\tcolour == tile.colour\n\n\t\t} else {\n\n\t\t\ttrue\n\n\t\t};\n\n\n\n\t\tlet element_id_matches = self.element_id == tile.element_id;\n\n\n\n\t\tcolour_matches && element_id_matches\n\n\t}\n", "file_path": "ruzzt_engine/src/oop_parser.rs", "rank": 13, "score": 84038.25604234387 }, { "content": "#[test]\n\nfn move_directions() {\n\n\tlet mut base_world = TestWorld::new_with_player(1, 1);\n\n\t\n\n\tlet mut tile_set = TileSet::new();\n\n\ttile_set.add_object('O', \"/n/n/e/s/w/i\\n\");\n\n\t\n\n\tlet mut world = base_world.clone();\n\n\tworld.insert_tile_and_status(tile_set.get('O'), 10, 10);\n\n\t\n\n\tlet mut world_1 = base_world.clone();\n\n\tworld_1.insert_tile_and_status(tile_set.get('O'), 10, 9);\n\n\tworld_1.status_at(10, 9).code_current_instruction = 2;\n\n\t\n\n\tlet mut world_2 = base_world.clone();\n\n\tworld_2.insert_tile_and_status(tile_set.get('O'), 10, 8);\n\n\tworld_2.status_at(10, 8).code_current_instruction = 4;\n\n\t\n\n\tlet mut world_3 = base_world.clone();\n\n\tworld_3.insert_tile_and_status(tile_set.get('O'), 11, 8);\n\n\tworld_3.status_at(11, 8).code_current_instruction = 6;\n", "file_path": "ruzzt_engine/src/tests/oop.rs", "rank": 14, "score": 82997.77679115908 }, { "content": "#[test]\n\nfn centipede_walk() {\n\n\t// Test 10 times to hopefully catch possibility of randomness.\n\n\tfor _ in 0 .. 10 {\n\n\t\tlet mut world = TestWorld::new_with_player(1, 1);\n\n\t\t\n\n\t\tlet mut tile_set = TileSet::new();\n\n\t\ttile_set.add('O', BoardTile::new(ElementType::Segment, 0xff), Some(StatusElement {\n\n\t\t\tcycle: 1,\n\n\t\t\t.. StatusElement::default()\n\n\t\t}));\n\n\t\ttile_set.add('@', BoardTile::new(ElementType::Head, 0xff), Some(StatusElement {\n\n\t\t\tcycle: 1,\n\n\t\t\t.. StatusElement::default()\n\n\t\t}));\n\n\t\ttile_set.add('#', BoardTile::new(ElementType::Normal, 0xff), None);\n\n\t\t\n\n\t\tlet room_tmpl = TileTemplate::from_text(&tile_set, \"\n\n\t\t\t######\n\n\t\t\t#.##.#\n\n\t\t\t#....#\n", "file_path": "ruzzt_engine/src/tests/basic.rs", "rank": 15, "score": 82997.77679115908 }, { "content": "#[test]\n\nfn set_flag() {\n\n\tlet mut world = TestWorld::new_with_player(1, 1);\n\n\t\n\n\tlet mut tile_set = TileSet::new();\n\n\ttile_set.add_object('O', \"#set a\\n\");\n\n\tworld.insert_tile_and_status(tile_set.get('O'), 10, 10);\n\n\t\n\n\tassert_eq!(world.world_header().last_matching_flag(DosString::from_str(\"a\")), None);\n\n\tworld.simulate(1);\n\n\tassert_eq!(world.world_header().last_matching_flag(DosString::from_str(\"a\")), Some(0));\n\n}\n\n\n", "file_path": "ruzzt_engine/src/tests/oop.rs", "rank": 16, "score": 82997.77679115908 }, { "content": "#[test]\n\nfn push_blocks() {\n\n\tlet mut world = TestWorld::new_with_player(1, 1);\n\n\t\n\n\tlet mut tile_set = TileSet::new();\n\n\ttile_set.add('>', BoardTile::new(ElementType::Pusher, 0xff), Some(StatusElement {\n\n\t\tcycle: 3,\n\n\t\tstep_x: 1,\n\n\t\t.. StatusElement::default()\n\n\t}));\n\n\ttile_set.add('#', BoardTile::new(ElementType::Boulder, 0xff), None);\n\n\tlet template = TileTemplate::from_text(&tile_set, \"\n\n\t\t>########\n\n\t\");\n\n\t\n\n\tlet mut expected = world.clone();\n\n\tlet mut expected2 = world.clone();\n\n\t\n\n\tworld.insert_template(&template, 10, 10);\n\n\texpected.insert_template(&template, 12, 10);\n\n\texpected2.insert_template(&template, 14, 10);\n\n\n\n\tworld.simulate(6);\n\n\tassert!(world.current_board_equals(expected));\n\n\tworld.simulate(6);\n\n\tassert!(world.current_board_equals(expected2));\n\n}\n\n\n", "file_path": "ruzzt_engine/src/tests/basic.rs", "rank": 17, "score": 82997.77679115908 }, { "content": "#[test]\n\nfn player_move() {\n\n\tlet mut world = TestWorld::new();\n\n\t\n\n\tlet mut expected = world.clone();\n\n\tworld.add_player(20, 20);\n\n\texpected.add_player(21, 20);\n\n\t\n\n\tworld.event = Event::Right;\n\n\t// Expected step is directly related to move event\n\n\texpected.status_at(21, 20).step_x = 1;\n\n\t\n\n\tworld.simulate(1);\n\n\tassert!(world.current_board_equals(expected));\n\n}\n\n\n", "file_path": "ruzzt_engine/src/tests/basic.rs", "rank": 18, "score": 82997.77679115908 }, { "content": "#[wasm_bindgen]\n\npub fn zzt_to_json(zzt_data: &[u8]) -> Result<String, JsValue> {\n\n\tzzt_to_json_impl(zzt_data).map_err(|err| err.into())\n\n}\n\n\n", "file_path": "zzt_web_editor/src/lib.rs", "rank": 19, "score": 82882.37154961552 }, { "content": "#[derive(Debug, Clone)]\n\nstruct CentipedeMovementContinuation;\n\n\n\nimpl ActionContinuation for CentipedeMovementContinuation {\n\n\tfn next_step(&mut self, _apply_action_report: ApplyActionResultReport, _status_index: usize, status: &StatusElement, sim: &BoardSimulator) -> ActionContinuationResult {\n\n\t\tlet mut actions = vec![];\n\n\n\n\t\tactions.push(Action::MoveTile {\n\n\t\t\tfrom_x: status.location_x as i16,\n\n\t\t\tfrom_y: status.location_y as i16,\n\n\t\t\tto_x: status.location_x as i16 + status.step_x,\n\n\t\t\tto_y: status.location_y as i16 + status.step_y,\n\n\t\t\toffset_x: status.step_x,\n\n\t\t\toffset_y: status.step_y,\n\n\t\t\tcheck_push: true,\n\n\t\t\tis_player: false,\n\n\t\t});\n\n\n\n\t\tlet mut current_tail_index = status.follower;\n\n\t\tlet mut prev_x = status.location_x;\n\n\t\tlet mut prev_y = status.location_y;\n", "file_path": "ruzzt_engine/src/zzt_behaviours/centipede.rs", "rank": 20, "score": 82221.87690450653 }, { "content": "/// A description of the Behaviour of a particular element type.\n\npub trait Behaviour: Debug {\n\n\t/// Called every time a status element cycles.\n\n\tfn step(&self, _event: Event, _status: &StatusElement, _status_index: usize, _sim: &BoardSimulator) -> ActionResult {\n\n\t\tActionResult {\n\n\t\t\tactions: vec![],\n\n\t\t\tcontinuation: None,\n\n\t\t}\n\n\t}\n\n\n\n\t/// This is called when applying certain movement actions to try and push something out of the\n\n\t/// way so that another thing can move there. See `PushResult`.\n\n\t/// `x` and `y` are the location of the tile being pushed.\n\n\t/// Using `push_off_x` and `push_off_y` instead of direction, because a pusher can push a\n\n\t/// boulder two away, by two, and that will propagate along a line of boulders.\n\n\t/// `is_player` is true if this is being pushed by a player.\n\n\tfn push(&self, _x: i16, _y: i16, _push_off_x: i16, _push_off_y: i16, _is_player: bool, _sim: &BoardSimulator) -> PushResult {\n\n\t\tPushResult::do_nothing_blocked()\n\n\t}\n\n\n\n\t/// The value for the BLOCKED #if flag. This is also used for other things like walking objects\n", "file_path": "ruzzt_engine/src/behaviour.rs", "rank": 21, "score": 81455.71738143159 }, { "content": "#[test]\n\nfn go_i_doesnt_progress() {\n\n\t\n\n\tlet mut tile_set = TileSet::new();\n\n\ttile_set.add_object('O', \"#go i\\nB\\n\");\n\n\t\n\n\tlet mut world = TestWorld::new_with_player(1, 1);\n\n\tworld.insert_tile_and_status(tile_set.get('O'), 10, 10);\n\n\t\n\n\tlet mut original_world = world.clone();\n\n\t\n\n\tworld.simulate(1);\n\n\tassert!(world.current_board_equals(original_world));\n\n}\n\n\n\n// \"A\\n/i\\nB\\n/s\\nC\\n?i\\nD\\n?s\\nE\\n#set a\\n/i\\nF\\n#send g\\n:g\\nG\\n/i\\nH\\n#go i\\nI\\n/i\\nJ\\n#go s\\nK\\n/i\\nL\\n#try i\\nM\\n/i\\nN\\n#try s\\nO\\n/i\\n\"\n", "file_path": "ruzzt_engine/src/tests/oop.rs", "rank": 22, "score": 81070.03805264579 }, { "content": "#[test]\n\nfn centipede_form_heads() {\n\n\tlet mut world = TestWorld::new_with_player(1, 1);\n\n\t\n\n\tlet mut tile_set = TileSet::new();\n\n\ttile_set.add('O', BoardTile::new(ElementType::Segment, 0xff), Some(StatusElement {\n\n\t\tcycle: 1,\n\n\t\t.. StatusElement::default()\n\n\t}));\n\n\ttile_set.add('@', BoardTile::new(ElementType::Head, 0xff), Some(StatusElement {\n\n\t\tcycle: 1,\n\n\t\t.. StatusElement::default()\n\n\t}));\n\n\ttile_set.add('#', BoardTile::new(ElementType::Normal, 0xff), None);\n\n\t\n\n\tlet room_tmpl = TileTemplate::from_text(&tile_set, \"\n\n\t\t######\n\n\t\t#.##.#\n\n\t\t#....#\n\n\t\t#.####\n\n\t\t###...\n", "file_path": "ruzzt_engine/src/tests/basic.rs", "rank": 23, "score": 81070.03805264579 }, { "content": "struct HeadStepContext<'l> {\n\n\tnew_step_x: i16,\n\n\tnew_step_y: i16,\n\n\tactions: Vec<Action>,\n\n\tcontinuation: Option<Box<dyn ActionContinuation>>,\n\n\tstatus_index_for_head: usize,\n\n\tstatus: &'l StatusElement,\n\n\tstatus_index: usize,\n\n\tsim: &'l BoardSimulator,\n\n}\n\n\n\nimpl<'l> HeadStepContext<'l> {\n\n\t// The player doesn't count as blocked, so a centipede will happily walk into it.\n\n\tfn is_blocked_and_not_player(&self, x: i16, y: i16) -> bool {\n\n\t\tlet (player_x, player_y) = self.sim.get_player_location();\n\n\t\tif x == player_x && y == player_y {\n\n\t\t\tfalse\n\n\t\t} else {\n\n\t\t\tlet dest_behaviour = self.sim.behaviour_for_pos(x, y);\n\n\t\t\tdest_behaviour.blocked(false) == BlockedStatus::Blocked\n", "file_path": "ruzzt_engine/src/zzt_behaviours/centipede.rs", "rank": 24, "score": 80012.04587052928 }, { "content": "/// If a behaviour has to mutate the world then keep processing, it needs to unborrow everything so\n\n/// the world state can be mutated, then come back. This trait represents the current running state\n\n/// of one of those functions that needs to mutate game state and come back. The `next_step` method\n\n/// will be called after applying all actions returned from the behavour method, then the actions\n\n/// returned by `next_step` will be applied, and next_step invoked again.\n\npub trait ActionContinuation: Debug {\n\n\t/// This is called after applying some mutating actions to BoardSimulator, and will continue to\n\n\t/// be called until it returns `finished` as true in the `ActionContinuationResult`.\n\n\tfn next_step(&mut self, apply_action_report: ApplyActionResultReport, status_index: usize, status: &StatusElement, sim: &BoardSimulator) -> ActionContinuationResult;\n\n\n\n\t/// This is guaranteed to be called as the very last operation on a continuation, so some final\n\n\t/// actions can be applied. The ApplyActionResultReport generated by these actions will be\n\n\t/// returned from BoardSimulator::apply_action_result.\n\n\tfn finalise(&mut self, _status_opt: Option<&StatusElement>, _sim: &BoardSimulator) -> Vec<Action> {\n\n\t\tvec![]\n\n\t}\n\n}\n\n\n", "file_path": "ruzzt_engine/src/behaviour.rs", "rank": 25, "score": 79523.8807140272 }, { "content": "/// Returns true if the given element type is always visible when the room is dark.\n\nfn type_visible_in_dark(ty: ElementType) -> bool {\n\n\tmatch ty {\n\n\t\tElementType::Player | ElementType::Passage | ElementType::Torch => true,\n\n\t\t_ => false,\n\n\t}\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct RuzztEngine {\n\n\t/// The `BoardSimulator` used to simulate the current board.\n\n\tpub board_simulator: BoardSimulator,\n\n\t/// Because a board simulation step can pause halfway through (e.g. to open a scroll), this\n\n\t/// stores the state of a partially executed step.\n\n\tpub board_simulator_step_state: Option<BoardSimulatorStepState>,\n\n\t/// The rendered state of the \"console\", which stores the characters and colours to display at\n\n\t/// each location on the screen, including the sidebar.\n\n\tpub console_state: ConsoleState,\n\n\t// TODO: Maybe this should just be replaced with things that aren't already stored in\n\n\t// BoardSimulator, because right now the board simulator's world_header has to be carefully used\n\n\t// all the time, and not the one in this World instance.\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 26, "score": 79383.22091163676 }, { "content": "/// Get the character code associated with the given element type.\n\n/// Note that not all types use this function. For those types it doesn't matter what this returns.\n\nfn element_type_to_char_code(ty: ElementType) -> u8 {\n\n\tuse self::ElementType::*;\n\n\tmatch ty {\n\n\t\tEmpty => 32,\n\n\t\tPlayer => 2,\n\n\t\tMonitor => 0,\n\n\t\tTorch => 157,\n\n\t\tSolid => 0xdb,\n\n\t\tBreakable => 177,\n\n\t\tNormal => 0xb2,\n\n\t\tBoulder => 254,\n\n\t\tScroll => 232,\n\n\t\tDoor => 0x0a,\n\n\t\tAmmo => 132,\n\n\t\tHead => 0xe9,\n\n\t\tSegment => 0x4f,\n\n\t\tBear => 0x99,\n\n\t\tRuffian => 0x05,\n\n\t\tSlime => 0x2a,\n\n\t\tShark => 0x5e,\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 27, "score": 77924.2901846314 }, { "content": "/// Represents a game controller input event.\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub enum Event {\n\n\t/// A no-op event.\n\n\tNone,\n\n\t/// Move west was pressed.\n\n\tLeft,\n\n\t/// Move east was pressed.\n\n\tRight,\n\n\t/// Move north was pressed.\n\n\tUp,\n\n\t/// Move south was pressed.\n\n\tDown,\n\n\t/// The page-up key was pressed (used when scrolls are open).\n\n\tPageUp,\n\n\t/// The page-down key was pressed (used when scrolls are open).\n\n\tPageDown,\n\n\t/// The enter key was pressed (used in scrolls to either click a selected link, or close the\n\n\t/// scroll).\n\n\tEnter,\n", "file_path": "ruzzt_engine/src/event.rs", "rank": 28, "score": 68835.77443545443 }, { "content": "\t/// The key to use the game speed selector was pressed (usually S, only applies in the title\n\n\t/// screen).\n\n\tChangeGameSpeed,\n\n}\n\n\n\n/// Represents a text input event.\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub enum TypingEvent {\n\n\t/// A no-op event.\n\n\tNone,\n\n\t/// The key with the given ASCII code was pressed.\n\n\tChar(u8),\n\n\t/// The Backspace key was pressed.\n\n\tBackspace,\n\n\t/// The Enter key was pressed.\n\n\tEnter,\n\n\t/// The Escape key was pressed.\n\n\tEscape,\n\n}\n", "file_path": "ruzzt_engine/src/event.rs", "rank": 29, "score": 68834.63077651216 }, { "content": "\t/// The key to open the world selection scroll was pressed (usually W, only applies in the title\n\n\t/// screen).\n\n\tOpenWorldSelection,\n\n\t/// The key to start playing the game was pressed (usually P, only applies in the title screen).\n\n\tPlayGame,\n\n\t/// The key to open the saved-game selection scroll was pressed (usually R, only applies in the\n\n\t/// title screen).\n\n\tRestoreGame,\n\n\t/// The key to quit the game was pressed (usually Q).\n\n\t/// Note that this is different from Escape in very particular circumstances.\n\n\tQuit,\n\n\t/// The key to open the \"About\" scroll was pressed (usually A, only applies in the title\n\n\t/// screen).\n\n\tOpenAbout,\n\n\t/// The key to open the highscores scroll was pressed (usually H, only applies in the title\n\n\t/// screen).\n\n\tOpenHighScores,\n\n\t/// The key to open the world editor was pressed (usually E, only applies in the title\n\n\t/// screen).\n\n\tOpenEditor,\n", "file_path": "ruzzt_engine/src/event.rs", "rank": 30, "score": 68826.56969592576 }, { "content": "\t/// The escape key was pressed (quit the game, or close a scroll, etc.).\n\n\tEscape,\n\n\t/// Shoot in the direction the player is already moving (its step X/Y determine this).\n\n\tShootFlow,\n\n\t/// Shoot west was pressed.\n\n\tShootLeft,\n\n\t/// Shoot east was pressed.\n\n\tShootRight,\n\n\t/// Shoot north was pressed.\n\n\tShootUp,\n\n\t/// Shoot south was pressed.\n\n\tShootDown,\n\n\t/// The key to light a torch was pressed (usually T).\n\n\tLightTorch,\n\n\t/// The key to pause the game was pressed (usually P, only applies in-game).\n\n\tPauseGame,\n\n\t/// The key to open the save game input box was pressed (usually S).\n\n\tSaveGame,\n\n\t/// The key to open the debug command input box was pressed (usually ?).\n\n\tDebug,\n", "file_path": "ruzzt_engine/src/event.rs", "rank": 31, "score": 68824.20801484662 }, { "content": "fn get_key_name(index: u8) -> &'static [u8] {\n\n\tmatch index {\n\n\t\t0 => b\"Blue\",\n\n\t\t1 => b\"Green\",\n\n\t\t2 => b\"Cyan\",\n\n\t\t3 => b\"Red\",\n\n\t\t4 => b\"Purple\",\n\n\t\t5 => b\"Yellow\",\n\n\t\t6 => b\"White\",\n\n\t\t_ => b\"?\",\n\n\t}\n\n}\n\n\n\nimpl Behaviour for KeyBehaviour {\n\n\tfn push(&self, x: i16, y: i16, push_off_x: i16, push_off_y: i16, is_player: bool, sim: &BoardSimulator) -> PushResult {\n\n\t\tif is_player {\n\n\t\t\tif let Some(tile) = sim.get_tile(x, y) {\n\n\t\t\t\tlet key_index = tile.colour as isize - 9;\n\n\t\t\t\tprintln!(\"{:?}\", tile);\n\n\t\t\t\tif key_index >= 0 && key_index < 7 {\n", "file_path": "ruzzt_engine/src/zzt_behaviours/items.rs", "rank": 32, "score": 66887.17896850314 }, { "content": "struct ZztConsole {\n\n\tengine: RuzztEngine,\n\n\tcurrent_console_state: ConsoleState,\n\n\tcurrent_run_time_ms: usize,\n\n\tcustom_scroll_state: CustomScrollState,\n\n}\n\n\n\nimpl ZztConsole {\n\n\tfn new() -> ZztConsole {\n\n\t\tlet command_arguments = clap::App::new(\"ruzzt\")\n\n\t\t\t.about(\"A ZZT clone\")\n\n\t\t\t.arg(clap::Arg::with_name(\"WORLD_FILE\")\n\n\t\t\t\t.help(\"A ZZT world file to load on startup.\")\n\n\t\t\t\t.required(false)\n\n\t\t\t\t.index(1))\n\n\t\t\t.arg(clap::Arg::with_name(\"board\")\n\n\t\t\t\t.short(\"b\")\n\n\t\t\t\t.value_name(\"BOARD\")\n\n\t\t\t\t.help(\"Starts on the given board number\"))\n\n\t\t\t.get_matches();\n", "file_path": "ruzzt/src/main.rs", "rank": 33, "score": 61144.85457851155 }, { "content": "/// Create an `Action` that spawns a tile matching the given `tile_desc` at the given `x`/`y`\n\n/// position on the board.\n\nfn create_tile_action(tile_desc: &TileTypeDesc, x: u8, y: u8) -> Action {\n\n\tlet colour = tile_desc.colour.unwrap_or(0x0f);\n\n\n\n\tlet tile = BoardTile {\n\n\t\telement_id: tile_desc.element_id,\n\n\t\tcolour,\n\n\t};\n\n\n\n\tlet mut status_element = None;\n\n\n\n\tif let Some(ty) = ElementType::from_u8(tile_desc.element_id) {\n\n\t\tmatch ty {\n\n\t\t\t| ElementType::Bear\n\n\t\t\t| ElementType::BlinkWall\n\n\t\t\t| ElementType::Bomb\n\n\t\t\t| ElementType::Bullet\n\n\t\t\t| ElementType::Clockwise\n\n\t\t\t| ElementType::Counter\n\n\t\t\t| ElementType::Duplicator\n\n\t\t\t| ElementType::Head\n", "file_path": "ruzzt_engine/src/oop_parser.rs", "rank": 34, "score": 60337.318637737015 }, { "content": "struct WorldSelectionState {\n\n\tentries: Vec<std::path::PathBuf>,\n\n}\n\n\n", "file_path": "ruzzt/src/main.rs", "rank": 35, "score": 59907.01666714118 }, { "content": "#[wasm_bindgen]\n\nstruct WorldState {\n\n\tengine: RuzztEngine,\n\n}\n\n\n\n#[wasm_bindgen]\n\nimpl WorldState {\n\n\tpub fn from_file_data(zzt_file_data: &[u8]) -> Result<WorldState, JsValue> {\n\n\t\tSelf::from_file_data_impl(zzt_file_data).map_err(|err| err.into())\n\n\t}\n\n\t\n\n\tfn from_file_data_impl(zzt_file_data: &[u8]) -> Result<WorldState, String> {\n\n\t\tlet mut cursor = std::io::Cursor::new(zzt_file_data);\n\n\t\tlet world = World::parse(&mut cursor)?;\n\n\t\tlet mut engine = RuzztEngine::new();\n\n\t\tengine.load_world(world, None);\n\n\t\tengine.set_in_title_screen(false);\n\n\t\t\n\n\t\tOk(WorldState {\n\n\t\t\tengine,\n\n\t\t})\n", "file_path": "zzt_web_editor/src/lib.rs", "rank": 36, "score": 58748.64921027701 }, { "content": "struct DosStringVisitor;\n\n\n\nimpl<'de> de::Visitor<'de> for DosStringVisitor {\n\n\ttype Value = DosString;\n\n\n\n\tfn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n\t\tformatter.write_str(\"a DOS ASCII string\")\n\n\t}\n\n\n\n\tfn visit_str<E>(self, value: &str) -> Result<DosString, E> where\n\n\t\tE: de::Error,\n\n\t{\n\n\t\tOk(DosString::from_str(value))\n\n\t}\n\n}\n\n\n\nimpl<'de> de::Deserialize<'de> for DosString {\n\n fn deserialize<D>(deserializer: D) -> Result<DosString, D::Error>\n\n where\n\n D: de::Deserializer<'de>,\n\n {\n\n deserializer.deserialize_str(DosStringVisitor)\n\n }\n\n}\n\n\n", "file_path": "zzt_file_format/src/dosstring.rs", "rank": 37, "score": 57662.337110166336 }, { "content": "fn main() -> Result<(), String> {\n\n\tlet matches = clap::App::new(\"zzt_to_json\")\n\n\t\t.about(\"Converts between ZZT and JSON formats\")\n\n\t\t.arg(clap::Arg::with_name(\"INPUT_TYPE\")\n\n\t\t\t.help(\"The type of the input file: \\\"zzt\\\" or \\\"json\\\"\")\n\n\t\t\t.required(true)\n\n\t\t\t.index(1))\n\n\t\t.arg(clap::Arg::with_name(\"OUTPUT_TYPE\")\n\n\t\t\t.help(\"The type of the output file: \\\"zzt\\\" or \\\"json\\\"\")\n\n\t\t\t.required(true)\n\n\t\t\t.index(2))\n\n\t\t.arg(clap::Arg::with_name(\"INPUT\")\n\n\t\t\t.help(\"The input file\")\n\n\t\t\t.required(true)\n\n\t\t\t.index(3))\n\n\t\t.get_matches();\n\n\t\n\n\tlet input_type = FileType::parse(matches.value_of(\"INPUT_TYPE\").unwrap())?;\n\n\tlet output_type = FileType::parse(matches.value_of(\"OUTPUT_TYPE\").unwrap())?;\n\n\tlet input_file_path = Path::new(matches.value_of(\"INPUT\").unwrap());\n", "file_path": "zzt_to_json/src/main.rs", "rank": 38, "score": 52206.87331313787 }, { "content": "fn generate_sound_code_frequencies() -> Vec<u16> {\n\n\tlet mut result = vec![0; 256];\n\n\tlet c_freq: f64 = 64.;\n\n\tfor octave in 1 ..= 15 {\n\n\t\tlet mut note_freq = c_freq * (2f64.powi(octave as i32 - 1));\n\n\t\tfor note in 0..12 {\n\n\t\t\tresult[(note + octave * 16) as usize] = note_freq.floor() as u16;\n\n\t\t\tnote_freq *= 2f64.powf(1. / 12.);\n\n\t\t}\n\n\t}\n\n\tresult\n\n}\n\n\n\npub struct SoundPlayer {\n\n\tspec: AudioSpec,\n\n\tcurrent_magnitude: f32,\n\n\tvolume: f32,\n\n\tlowpass_level: f32,\n\n\twhole_note_samples: usize,\n\n\t//last_wave_up: Option<bool>,\n", "file_path": "ruzzt/src/sound.rs", "rank": 39, "score": 50109.9746759131 }, { "content": "fn world_selection_info(world_name: &[u8]) -> &[u8] {\n\n\tmatch world_name {\n\n\t\tb\"CAVES\" => b\"The Caves of ZZT\",\n\n\t\tb\"CITY\" => b\"Underground City of ZZT\",\n\n\t\tb\"DUNGEONS\" => b\"The Dungeons of ZZT\",\n\n\t\tb\"TOUR\" => b\"Guided Tour ZZT's Other Worlds\",\n\n\t\tb\"TOWN\" => b\"The Town of ZZT\",\n\n\t\t_ => b\"\",\n\n\t}\n\n}\n\n\n", "file_path": "ruzzt/src/main.rs", "rank": 40, "score": 47788.29536404587 }, { "content": "fn get_ms_from_duration(duration: std::time::Duration) -> usize {\n\n\t(duration.as_secs() * 1000) as usize + duration.subsec_millis() as usize\n\n}\n\n\n", "file_path": "ruzzt/src/main.rs", "rank": 41, "score": 44709.9706908153 }, { "content": "\tpub clicked_link_label: Option<DosString>,\n\n\t/// True when the game is paused.\n\n\tpub is_paused: bool,\n\n\t/// True when the game ended and should start simulating really fast. This is not the same as it\n\n\t/// being the end of the game, because when the player dies they can use cheat codes to bring\n\n\t/// themselves back to life, but the game will continue to simulate fast.\n\n\tpub board_should_simulate_fast: bool,\n\n\t/// Various result data of actions that have been applied recently.\n\n\t/// If the game is paused, then this will just build up and up until the game is unpaused.\n\n\tpub accumulated_data: AccumulatedActionData,\n\n\t/// True when in the title screen.\n\n\tpub in_title_screen: bool,\n\n}\n\n\n\nimpl RuzztEngine {\n\n\t/// Make a new engine with the state of a newly started ZZT game with no world loaded.\n\n\tpub fn new() -> RuzztEngine {\n\n\t\tlet initial_world = zzt_file_format::World::zzt_default();\n\n\n\n\t\tlet mut board_simulator = BoardSimulator::new(initial_world.world_header.clone());\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 42, "score": 37548.61482845619 }, { "content": "\t\tzzt_behaviours::load_zzt_behaviours(&mut board_simulator);\n\n\n\n\t\tboard_simulator.load_board(&initial_world.boards[initial_world.world_header.player_board as usize]);\n\n\t\tlet mut accumulated_data = AccumulatedActionData::new();\n\n\t\tboard_simulator.on_player_entered_board(&mut accumulated_data.board_messages);\n\n\n\n\t\tlet mut engine = RuzztEngine {\n\n\t\t\tboard_simulator,\n\n\t\t\tboard_simulator_step_state: None,\n\n\t\t\tconsole_state: ConsoleState::new(),\n\n\t\t\tworld: initial_world,\n\n\t\t\tglobal_cycle: 1,\n\n\t\t\tpaused_cycle: 1,\n\n\t\t\tscroll_state: None,\n\n\t\t\tcaption_state: None,\n\n\t\t\tside_bar: SideBar::new(),\n\n\t\t\tshown_one_time_notifications: HashSet::new(),\n\n\t\t\tclicked_link_label: None,\n\n\t\t\tis_paused: true,\n\n\t\t\tboard_should_simulate_fast: false,\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 43, "score": 37541.979286866386 }, { "content": "\t\t\tcaption_state.draw_caption(&mut self.console_state);\n\n\t\t}\n\n\n\n\t\tif let Some(ref scroll_state) = self.scroll_state {\n\n\t\t\tscroll_state.draw_scroll(&mut self.console_state);\n\n\t\t}\n\n\t}\n\n\n\n\t/// When `in_typing_mode()` returns true, this should be called instead of `step`.\n\n\t/// This will add characters to text inputs.\n\n\t/// Note that `event` is not the same as the `event` passed to `step`.\n\n\tpub fn process_typing(&mut self, event: TypingEvent) -> Vec<BoardMessage> {\n\n\t\tself.paused_cycle += 1;\n\n\t\tlet board_messages = self.side_bar.process_typing(event, &self.board_simulator.world_header);\n\n\t\tself.update_screen();\n\n\t\tboard_messages\n\n\t}\n\n\n\n\t/// Simulate a single game step. A RUZZT front-end will call this over and over, redrawing the\n\n\t/// screen between each call. The latest controller input should be passed as `event`.\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 44, "score": 37541.48259618064 }, { "content": "\t/// `global_time_passed_seconds` is the wall-clock time passed since the game started,\n\n\t/// regardless of how fast the game is stepping.\n\n\tpub fn step(&mut self, event: Event, global_time_passed_seconds: f64) -> Vec<BoardMessage> {\n\n\t\tlet was_end_of_game = self.is_end_of_game();\n\n\n\n\t\tlet mut board_messages = std::mem::replace(&mut self.accumulated_data.board_messages, vec![]);\n\n\n\n\t\tif self.is_paused {\n\n\t\t\tlet move_dir = match event {\n\n\t\t\t\tEvent::Left => Direction::West,\n\n\t\t\t\tEvent::Right => Direction::East,\n\n\t\t\t\tEvent::Up => Direction::North,\n\n\t\t\t\tEvent::Down => Direction::South,\n\n\t\t\t\t_ => Direction::Idle,\n\n\t\t\t};\n\n\n\n\t\t\tif move_dir != Direction::Idle {\n\n\t\t\t\tlet (off_x, off_y) = move_dir.to_offset();\n\n\t\t\t\tlet player_status = &self.board_simulator.status_elements[0];\n\n\t\t\t\tlet player_x = player_status.location_x as i16;\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 45, "score": 37541.447996550785 }, { "content": "\tpub fn open_scroll(&mut self, title: DosString, content_lines: Vec<DosString>) {\n\n\t\tself.scroll_state = Some(ScrollState::new_title_content(title, content_lines));\n\n\t}\n\n\n\n\t/// Copy the data out of the `BoardSimulator` back into the `World` instance in `RuzztEngine`.\n\n\tpub fn sync_world(&mut self) {\n\n\t\tlet current_board_index = self.board_simulator.world_header.player_board;\n\n\t\tself.board_simulator.save_board(&mut self.world.boards[current_board_index as usize]);\n\n\t\tself.world.world_header = self.board_simulator.world_header.clone();\n\n\t}\n\n\n\n\t/// Returns true if the given `x`/`y` position on the board is currently not lit (so it's on a\n\n\t/// dark board, and is not lit by a torch).\n\n\tfn is_position_dark(&self, x: i16, y: i16) -> bool {\n\n\t\tif let Some(torch_cycles) = self.board_simulator.world_header.torch_cycles {\n\n\t\t\tif torch_cycles > 0 {\n\n\t\t\t\tlet (player_x, player_y) = self.board_simulator.get_player_location();\n\n\n\n\t\t\t\tlet circle_height = CIRCLE_MASK.len() as i16;\n\n\t\t\t\tlet top_left_x = player_x - 1 - ((CIRCLE_MASK_WIDTH as i16 - 1) / 2);\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 46, "score": 37540.816628669156 }, { "content": "\n\n\t/// Load the given `world` into the engine to start simulating it. The current `in_title_screen`\n\n\t/// value will not change. The board that is loaded initially can be overridden by setting\n\n\t/// `start_board` to the desired board's index within the world.\n\n\tpub fn load_world(&mut self, mut world: zzt_file_format::World, start_board: Option<i16>) {\n\n\t\tif let Some(start_board) = start_board {\n\n\t\t\tworld.world_header.player_board = start_board;\n\n\t\t}\n\n\n\n\t\tlet mut board_simulator = BoardSimulator::new(world.world_header.clone());\n\n\t\tzzt_behaviours::load_zzt_behaviours(&mut board_simulator);\n\n\n\n\t\tboard_simulator.load_board(&world.boards[world.world_header.player_board as usize]);\n\n\n\n\t\tlet (player_x, player_y) = self.board_simulator.get_player_location();\n\n\t\tself.board_simulator.board_meta_data.player_enter_x = player_x as u8;\n\n\t\tself.board_simulator.board_meta_data.player_enter_y = player_y as u8;\n\n\n\n\t\tself.board_simulator = board_simulator;\n\n\t\tself.world = world;\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 47, "score": 37540.123579993364 }, { "content": "\t\t\taccumulated_data,\n\n\t\t\tin_title_screen: true,\n\n\t\t};\n\n\n\n\t\tengine.set_in_title_screen(true);\n\n\n\n\t\tengine\n\n\t}\n\n\n\n\t/// Switch between being in-game or in the title screen.\n\n\tpub fn set_in_title_screen(&mut self, in_title_screen: bool) {\n\n\t\tself.in_title_screen = in_title_screen;\n\n\t\tif in_title_screen {\n\n\t\t\tself.board_simulator.load_board(&self.world.boards[0]);\n\n\t\t\tself.is_paused = false;\n\n\t\t} else {\n\n\t\t\tself.board_simulator.load_board(&self.world.boards[self.board_simulator.world_header.player_board as usize]);\n\n\t\t\tself.is_paused = true;\n\n\t\t}\n\n\t}\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 48, "score": 37539.90426324622 }, { "content": "\t\tself.board_should_simulate_fast && self.scroll_state.is_none() && !self.side_bar.in_typing_mode()\n\n\t}\n\n\n\n\t/// Returns true if a board simulation step was paused half-way through, such as when a scroll\n\n\t/// was opened by an OOP script for example.\n\n\tpub fn is_part_way_though_step(&self) -> bool {\n\n\t\tself.board_simulator_step_state.is_some()\n\n\t}\n\n\n\n\t/// Applies the default action for the given `board_message`. For example, it will switch boards\n\n\t/// on a `SwitchBoard` or `TeleportToBoard` message. This doens't have any effect for anything\n\n\t/// to do with input/output (playing sound, opening worlds from the disk) because those are all\n\n\t/// left up to the front-end.\n\n\t/// Returns any BoardMessages that happen to be created when `board_message` is applied.\n\n\tpub fn process_board_message(&mut self, board_message: BoardMessage) -> Vec<BoardMessage> {\n\n\t\tlet mut extra_accumulated_data = AccumulatedActionData::new();\n\n\n\n\t\tmatch board_message {\n\n\t\t\tBoardMessage::SwitchBoard{new_board_index, direction} => {\n\n\t\t\t\tlet mut dest_player_pos = self.board_simulator.get_player_location();\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 49, "score": 37539.80703191059 }, { "content": "\t\t\t\t\t\telement_id: ElementType::Monitor as u8,\n\n\t\t\t\t\t\tcolour: 0,\n\n\t\t\t\t\t});\n\n\t\t\t\t} else {\n\n\t\t\t\t\tself.board_simulator.set_tile(player_x, player_y, BoardTile {\n\n\t\t\t\t\t\telement_id: ElementType::Player as u8,\n\n\t\t\t\t\t\tcolour: 31,\n\n\t\t\t\t\t});\n\n\t\t\t\t}\n\n\n\n\t\t\t\tlet current_global_cycle = self.global_cycle;\n\n\t\t\t\tlet board_simulator_step_state = self.board_simulator_step_state.get_or_insert_with(|| BoardSimulatorStepState::new(event, current_global_cycle));\n\n\n\n\t\t\t\tlet mut process_same_status = false;\n\n\n\n\t\t\t\tif let Some(ref clicked_link_label) = self.clicked_link_label {\n\n\t\t\t\t\tif let Some(processing_status_index) = board_simulator_step_state.processing_status_index_opt {\n\n\t\t\t\t\t\tlet current_status = &self.board_simulator.status_elements[processing_status_index];\n\n\t\t\t\t\t\tlet mut parser = OopParser::new(self.board_simulator.get_status_code(current_status), 0);\n\n\t\t\t\t\t\tparser.jump_to_label(&clicked_link_label);\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 50, "score": 37538.78599257612 }, { "content": "\t/// The current state of the `World`.\n\n\tpub world: zzt_file_format::World,\n\n\t/// `global_cycle` is the number of simulation steps since the start of the game.\n\n\tpub global_cycle: usize,\n\n\t/// Number of times the `RuzztEngine` `step` function has been called since the game was paused.\n\n\t/// This is required because the player blinks while the game is paused, but also doesn't\n\n\t/// increment the `global_cycle`.\n\n\tpub paused_cycle: usize,\n\n\t/// If there's a scroll open, this contains the state of the scroll.\n\n\tpub scroll_state: Option<ScrollState>,\n\n\t/// If there's a caption being displayed, this contains the state of the caption.\n\n\tpub caption_state: Option<CaptionState>,\n\n\t/// The state of the sidebar on the right of the screen.\n\n\tpub side_bar: SideBar,\n\n\t/// `OneTimeNotification`s are notifications that are only shown once. When one is shown it is\n\n\t/// added to the set so it doesn't get shown again.\n\n\tpub shown_one_time_notifications: HashSet<OneTimeNotification>,\n\n\t/// When a link in a scroll is pressed, this will be set to that link's target string.\n\n\t/// If `board_simulator_step_state` is set, then when the next partial step is executed this\n\n\t/// will be used to jump to the associated OOP label on the status currently being processed.\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 51, "score": 37538.7316746995 }, { "content": "use crate::board_simulator::*;\n\nuse crate::scroll::*;\n\nuse crate::event::*;\n\nuse crate::caption::*;\n\nuse crate::console::*;\n\nuse crate::behaviour::*;\n\nuse crate::board_message::*;\n\nuse crate::direction::*;\n\nuse crate::oop_parser::OopParser;\n\nuse crate::side_bar::{self, SideBar};\n\nuse crate::zzt_behaviours;\n\nuse crate::sounds::*;\n\n\n\nuse zzt_file_format::{self, ElementType, BoardTile};\n\nuse zzt_file_format::dosstring::DosString;\n\n\n\nuse num::FromPrimitive;\n\n\n\nuse std::fs::File;\n\nuse std::collections::HashSet;\n\n\n\n/// Get the character code associated with the given element type.\n\n/// Note that not all types use this function. For those types it doesn't matter what this returns.\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 52, "score": 37538.501918206784 }, { "content": "\t\t\tchar_code = 0;\n\n\t\t}\n\n\n\n\t\tConsoleChar {\n\n\t\t\tchar_code,\n\n\t\t\tbackground,\n\n\t\t\tforeground,\n\n\t\t}\n\n\t}\n\n\n\n\t/// Go through all the `StatusElements` and update their appearance in the console if they\n\n\t/// require special rendering. For example, Object elements use their `param1` to determine the\n\n\t/// console character to use.\n\n\tfn render_status_element_tiles(&mut self) {\n\n\t\t// Note that the game seems to draw the even status elements first, then the odd ones (or\n\n\t\t// maybe the other way around?). This likely doesn't affect the excecution order of objects.\n\n\n\n\t\t// The first status is always the player.\n\n\t\tlet mut is_first_status = true;\n\n\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 53, "score": 37537.853638588094 }, { "content": "\n\n\t\t\t\t\t\tlet new_code_current_instruction = parser.pos;\n\n\t\t\t\t\t\tlet current_status = &mut self.board_simulator.status_elements[processing_status_index];\n\n\t\t\t\t\t\tcurrent_status.code_current_instruction = new_code_current_instruction;\n\n\t\t\t\t\t}\n\n\t\t\t\t\tprocess_same_status = true;\n\n\t\t\t\t}\n\n\n\n\t\t\t\tself.clicked_link_label = None;\n\n\n\n\t\t\t\tlet mut is_done = false;\n\n\t\t\t\t// The step pauses as soon as a board message is sent.\n\n\t\t\t\twhile !is_done && board_simulator_step_state.accumulated_data.board_messages.is_empty() {\n\n\t\t\t\t\tis_done = board_simulator_step_state.partial_step(process_same_status, &mut self.board_simulator);\n\n\t\t\t\t\tprocess_same_status = false;\n\n\n\n\t\t\t\t\tif board_simulator_step_state.accumulated_data.should_check_time_elapsed {\n\n\t\t\t\t\t\tboard_simulator_step_state.accumulated_data.should_check_time_elapsed = false;\n\n\n\n\t\t\t\t\t\tlet new_time_passed_ticks = (global_time_passed_seconds * 100.) as i16 % 6000;\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 54, "score": 37537.77742403672 }, { "content": "\t\t\t\t\t\tself.board_simulator.world_header.player_ammo += 5;\n\n\t\t\t\t\t}\n\n\t\t\t\t\tb\"torches\" => {\n\n\t\t\t\t\t\tif let Some(ref mut player_torches) = self.board_simulator.world_header.player_torches {\n\n\t\t\t\t\t\t\t*player_torches += 5;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\tb\"gems\" => {\n\n\t\t\t\t\t\tself.board_simulator.world_header.player_gems += 5;\n\n\t\t\t\t\t}\n\n\t\t\t\t\tb\"health\" => {\n\n\t\t\t\t\t\tself.board_simulator.world_header.player_health += 50;\n\n\t\t\t\t\t}\n\n\t\t\t\t\tb\"zap\" => {\n\n\t\t\t\t\t\tlet player_pos = self.board_simulator.get_player_location();\n\n\t\t\t\t\t\tlet mut report = ApplyActionResultReport::new();\n\n\t\t\t\t\t\tlet mut zap_at_offset = |off_x, off_y| {\n\n\t\t\t\t\t\t\tlet action = Action::SetTile{\n\n\t\t\t\t\t\t\t\tx: player_pos.0 + off_x,\n\n\t\t\t\t\t\t\t\ty: player_pos.1 + off_y,\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 55, "score": 37537.16339763248 }, { "content": "\t\t} else {\n\n\t\t\tlet mut caption_is_finished = false;\n\n\t\t\tif let Some(ref mut caption_state) = self.caption_state {\n\n\t\t\t\tcaption_state.time_left -= 1;\n\n\t\t\t\tif caption_state.time_left == 0 {\n\n\t\t\t\t\tcaption_is_finished = true;\n\n\t\t\t\t}\n\n\t\t\t}\n\n\n\n\t\t\tif caption_is_finished {\n\n\t\t\t\tself.caption_state = None;\n\n\t\t\t}\n\n\n\n\t\t\tif let Some(ref mut scroll_state) = self.scroll_state {\n\n\t\t\t\tboard_messages.extend(scroll_state.step(event));\n\n\t\t\t} else {\n\n\t\t\t\t// Force the player status to point at a player tile.\n\n\t\t\t\tlet (player_x, player_y) = self.board_simulator.get_player_location();\n\n\t\t\t\tif self.in_title_screen {\n\n\t\t\t\t\tself.board_simulator.set_tile(player_x, player_y, BoardTile {\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 56, "score": 37537.12666560409 }, { "content": "\t\tself.set_in_title_screen(self.in_title_screen);\n\n\t\tself.board_should_simulate_fast = false;\n\n\t}\n\n\n\n\t/// This is true if the game is in \"typing\" mode, which usually means a text input is open, and\n\n\t/// the engine wants `process_typing` to be called instead of `step`.\n\n\tpub fn in_typing_mode(&self) -> bool {\n\n\t\tself.side_bar.in_typing_mode()\n\n\t}\n\n\n\n\t// TODO: Don't play sounds when the game is over.\n\n\t/// True when the game is over, and all the user can do is press escape to exit to the title\n\n\t/// screen.\n\n\tpub fn is_end_of_game(&self) -> bool {\n\n\t\tself.board_simulator.world_header.player_health <= 0\n\n\t}\n\n\n\n\t/// See the `board_should_simulate_fast` field in the struct. This doesn't return true if a\n\n\t/// scroll or text input is open.\n\n\tpub fn should_simulate_fast(&self) -> bool {\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 57, "score": 37537.008638025116 }, { "content": "\t\t\t\t\t\tlet mut diff = new_time_passed_ticks - self.board_simulator.world_header.time_passed_ticks;\n\n\t\t\t\t\t\tif diff < 0 {\n\n\t\t\t\t\t\t\tdiff += 6000;\n\n\t\t\t\t\t\t}\n\n\n\n\t\t\t\t\t\tif diff >= 100 {\n\n\t\t\t\t\t\t\t// At least one second has passed.\n\n\t\t\t\t\t\t\tself.board_simulator.world_header.time_passed += 1;\n\n\t\t\t\t\t\t\tself.board_simulator.world_header.time_passed_ticks = new_time_passed_ticks;\n\n\n\n\t\t\t\t\t\t\tif self.board_simulator.board_meta_data.time_limit > 0 {\n\n\t\t\t\t\t\t\t\tlet time_left = self.board_simulator.board_meta_data.time_limit - self.board_simulator.world_header.time_passed;\n\n\n\n\t\t\t\t\t\t\t\tif time_left == 10 {\n\n\t\t\t\t\t\t\t\t\tboard_messages.push(BoardMessage::OpenScroll{title: DosString::new(), content_lines: vec![DosString::from_slice(b\"Running out of time!\")]});\n\n\t\t\t\t\t\t\t\t}\n\n\n\n\t\t\t\t\t\t\t\tif time_left < 0 {\n\n\t\t\t\t\t\t\t\t\tself.board_simulator.world_header.player_health = (self.board_simulator.world_header.player_health - 10).max(0);\n\n\t\t\t\t\t\t\t\t\tself.board_simulator.restart_player_on_board(&mut board_messages);\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 58, "score": 37536.99157502296 }, { "content": "\t\t}\n\n\t}\n\n\n\n\t/// Update the entire console state by drawing the board, side bar, scroll, caption, etc.\n\n\tpub fn update_screen(&mut self) {\n\n\t\t// TODO: The game gives the appearance of health being the value when #endgame was invoked\n\n\t\t// because it doesn't redraw the side bar while the game is over.\n\n\t\tself.side_bar.draw_side_bar(&self.board_simulator.world_header, &self.board_simulator.board_meta_data, self.is_paused, self.in_title_screen, &mut self.console_state, self.paused_cycle);\n\n\n\n\t\tfor y in 0 .. BOARD_HEIGHT - 2 {\n\n\t\t\tfor x in 0 .. BOARD_WIDTH - 2 {\n\n\t\t\t\tlet ref tile = self.board_simulator.get_tile(x as i16 + 1, y as i16 + 1).unwrap();\n\n\n\n\t\t\t\t*self.console_state.get_char_mut(x, y) = self.render_tile(tile, x, y);\n\n\t\t\t}\n\n\t\t}\n\n\n\n\t\tself.render_status_element_tiles();\n\n\n\n\t\tif let Some(ref caption_state) = self.caption_state {\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 59, "score": 37536.48034835921 }, { "content": "\t/// Get the `ConsoleChar` representing how the given `tile` at the given `tile_x`/`tile_y`\n\n\t/// position should look on the screen. This does not account for special drawing that requires\n\n\t/// a `StatusElement` to work. In this case, it doesn't matter what it returns, because it\n\n\t/// should be overwritten by a later call to `render_status_element_tiles()`.\n\n\tfn render_tile(&self, tile: &zzt_file_format::BoardTile, tile_x: usize, tile_y: usize) -> ConsoleChar {\n\n\t\tlet char_code;\n\n\t\tlet mut background = ConsoleColour::Black;\n\n\t\tlet mut foreground = ConsoleColour::Black;\n\n\n\n\t\tif let Some(ty) = ElementType::from_u8(tile.element_id) {\n\n\t\t\tuse self::ElementType::*;\n\n\n\n\t\t\tlet mut override_colours = false;\n\n\n\n\t\t\tif self.board_simulator.board_meta_data.is_dark {\n\n\t\t\t\tif !type_visible_in_dark(ty) && self.is_position_dark(tile_x as i16, tile_y as i16) {\n\n\t\t\t\t\treturn ConsoleChar {\n\n\t\t\t\t\t\tchar_code: 0xb0,\n\n\t\t\t\t\t\tbackground: ConsoleColour::Black,\n\n\t\t\t\t\t\tforeground: ConsoleColour::White,\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 60, "score": 37536.107339109825 }, { "content": "\t\t\t\tmatch direction {\n\n\t\t\t\t\tDirection::North => {\n\n\t\t\t\t\t\tdest_player_pos.1 = BOARD_HEIGHT as i16 - 2;\n\n\t\t\t\t\t}\n\n\t\t\t\t\tDirection::South => {\n\n\t\t\t\t\t\tdest_player_pos.1 = 1;\n\n\t\t\t\t\t}\n\n\t\t\t\t\tDirection::West => {\n\n\t\t\t\t\t\tdest_player_pos.0 = BOARD_WIDTH as i16 - 2;\n\n\t\t\t\t\t}\n\n\t\t\t\t\tDirection::East => {\n\n\t\t\t\t\t\tdest_player_pos.0 = 1;\n\n\t\t\t\t\t}\n\n\t\t\t\t\t_ => {}\n\n\t\t\t\t}\n\n\n\n\t\t\t\tlet original_board_index = self.board_simulator.world_header.player_board;\n\n\t\t\t\tself.board_simulator.world_header.player_board = new_board_index as i16;\n\n\n\n\t\t\t\tself.board_simulator.save_board(&mut self.world.boards[original_board_index as usize]);\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 61, "score": 37535.70551264476 }, { "content": "\t\t\t\tself.board_simulator.load_board(&self.world.boards[self.board_simulator.world_header.player_board as usize]);\n\n\n\n\t\t\t\tlet passage_location_opt = self.board_simulator.get_passage_location(passage_colour);\n\n\t\t\t\tif let Some(passage_location) = passage_location_opt {\n\n\t\t\t\t\tlet player_location = self.board_simulator.get_player_location();\n\n\t\t\t\t\t//self.board_simulator.move_tile(player_location.0, player_location.1, passage_location.0, passage_location.1);\n\n\t\t\t\t\t// For some reason ZZT manually moves the player when they use a passage, so it\n\n\t\t\t\t\t// can do weird stuff like pick up the tile underneath a player and put it\n\n\t\t\t\t\t// somewhere else.\n\n\t\t\t\t\tself.board_simulator.status_elements[0].location_x = passage_location.0 as u8;\n\n\t\t\t\t\tself.board_simulator.status_elements[0].location_y = passage_location.1 as u8;\n\n\t\t\t\t\tif let Some(old_tile) = self.board_simulator.get_tile_mut(player_location.0, player_location.1) {\n\n\t\t\t\t\t\told_tile.element_id = ElementType::Empty as u8;\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t\tself.board_simulator.on_player_entered_board(&mut extra_accumulated_data.board_messages);\n\n\t\t\t\tself.is_paused = true;\n\n\t\t\t}\n\n\t\t\tBoardMessage::ShowOneTimeNotification(notification_type) => {\n\n\t\t\t\tif !self.shown_one_time_notifications.contains(&notification_type) {\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 62, "score": 37534.91992630366 }, { "content": "\t\t\t\t\tself.caption_state = Some(CaptionState::new(notification_type.message_string()));\n\n\t\t\t\t\tself.shown_one_time_notifications.insert(notification_type);\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tBoardMessage::OpenScroll{title, content_lines} => {\n\n\t\t\t\tif content_lines.len() > 1 {\n\n\t\t\t\t\tself.scroll_state = Some(ScrollState::new_title_content(title, content_lines));\n\n\t\t\t\t} else if content_lines.len() == 1 {\n\n\t\t\t\t\tself.caption_state = Some(CaptionState::new(content_lines[0].clone()));\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tBoardMessage::CloseScroll => {\n\n\t\t\t\tself.scroll_state = None;\n\n\t\t\t}\n\n\t\t\tBoardMessage::PlaySoundArray(..) => {\n\n\t\t\t\t// Do nothing. The frontend should handle this itself.\n\n\t\t\t}\n\n\t\t\tBoardMessage::ClearPlayingSound => {\n\n\t\t\t\t// Do nothing. The frontend should handle this itself.\n\n\t\t\t}\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 63, "score": 37534.5444345099 }, { "content": "\n\n\t\t\t\t\t\t\tself.console_state.get_char_mut(screen_x, screen_y).char_code = match frame_index {\n\n\t\t\t\t\t\t\t\t0 => 0x5c,\n\n\t\t\t\t\t\t\t\t1 => 0xc4,\n\n\t\t\t\t\t\t\t\t2 => 0x2f,\n\n\t\t\t\t\t\t\t\t3 => 0xb3,\n\n\t\t\t\t\t\t\t\t_ => 0,\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tElementType::Duplicator => {\n\n\t\t\t\t\t\t\tself.console_state.get_char_mut(screen_x, screen_y).char_code = match status_element.param1 {\n\n\t\t\t\t\t\t\t\t0 => 0xfa,\n\n\t\t\t\t\t\t\t\t1 => 0xf9,\n\n\t\t\t\t\t\t\t\t2 => 0xf8,\n\n\t\t\t\t\t\t\t\t3 => 0x6f,\n\n\t\t\t\t\t\t\t\t4 => 0x4f,\n\n\t\t\t\t\t\t\t\t// TODO: Check this with ZZT:\n\n\t\t\t\t\t\t\t\t_ => 0,\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t}\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 64, "score": 37534.31062533401 }, { "content": "\t\t\t\t\t\tElementType::Object => {\n\n\t\t\t\t\t\t\tself.console_state.get_char_mut(screen_x, screen_y).char_code = status_element.param1;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tElementType::Player => {\n\n\t\t\t\t\t\t\tlet mut screen_char = self.console_state.get_char_mut(screen_x, screen_y);\n\n\t\t\t\t\t\t\tif self.is_paused {\n\n\t\t\t\t\t\t\t\tif is_first_status {\n\n\t\t\t\t\t\t\t\t\tscreen_char.char_code = 0;\n\n\t\t\t\t\t\t\t\t\tscreen_char.background = ConsoleColour::Black;\n\n\t\t\t\t\t\t\t\t\tscreen_char.foreground = ConsoleColour::Black;\n\n\t\t\t\t\t\t\t\t} else {\n\n\t\t\t\t\t\t\t\t\tscreen_char.char_code = 0x02;\n\n\t\t\t\t\t\t\t\t\tscreen_char.background = ConsoleColour::Blue;\n\n\t\t\t\t\t\t\t\t\tscreen_char.foreground = ConsoleColour::White;\n\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tElementType::Pusher => {\n\n\t\t\t\t\t\t\tlet facing_dir = match (status_element.step_x, status_element.step_y) {\n\n\t\t\t\t\t\t\t\t(1, 0) => Direction::East,\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 65, "score": 37534.19115154799 }, { "content": "\t\t\t\t\t\t\t\t(Direction::North, 2) => 0xc4,\n\n\t\t\t\t\t\t\t\t(Direction::South, 0) => 0x5f,\n\n\t\t\t\t\t\t\t\t(Direction::South, 1) => 0x76,\n\n\t\t\t\t\t\t\t\t(Direction::South, 2) => 0xc4,\n\n\t\t\t\t\t\t\t\t_ => 0,\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t_ => {}\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\n\n\t\t\tif is_first_status && self.is_paused && self.paused_cycle % 10 < 5 {\n\n\t\t\t\tlet mut screen_char = self.console_state.get_char_mut(screen_x, screen_y);\n\n\t\t\t\tscreen_char.char_code = 0x02;\n\n\t\t\t\tscreen_char.background = ConsoleColour::Blue;\n\n\t\t\t\tscreen_char.foreground = ConsoleColour::White;\n\n\t\t\t}\n\n\n\n\t\t\tis_first_status = false;\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 66, "score": 37534.16725473936 }, { "content": "\t\t\t\tlet player_y = player_status.location_y as i16;\n\n\t\t\t\tlet blocked = self.board_simulator.push_tile(player_x + off_x, player_y + off_y, off_x, off_y, true, false, 0, None, &mut self.accumulated_data);\n\n\n\n\t\t\t\tif blocked == BlockedStatus::NotBlocked {\n\n\t\t\t\t\tlet player_status = &mut self.board_simulator.status_elements[0];\n\n\t\t\t\t\tlet under_element_id = player_status.under_element_id;\n\n\t\t\t\t\tlet under_colour = player_status.under_colour;\n\n\t\t\t\t\tplayer_status.location_x = (player_x + off_x) as u8;\n\n\t\t\t\t\tplayer_status.location_y = (player_y + off_y) as u8;\n\n\t\t\t\t\tif let Some(old_tile) = self.board_simulator.get_tile_mut(player_x, player_y) {\n\n\t\t\t\t\t\tif old_tile.element_id == ElementType::Player as u8 {\n\n\t\t\t\t\t\t\told_tile.element_id = under_element_id;\n\n\t\t\t\t\t\t\told_tile.colour = under_colour;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\tself.is_paused = false;\n\n\t\t\t\t}\n\n\t\t\t}\n\n\n\n\t\t\tself.paused_cycle += 1;\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 67, "score": 37534.164700727866 }, { "content": "\t\t\t\t\t\t\t\ttile: BoardTile {\n\n\t\t\t\t\t\t\t\t\telement_id: ElementType::Empty as u8,\n\n\t\t\t\t\t\t\t\t\tcolour: 0,\n\n\t\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\t\tstatus_element: None,\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\tself.board_simulator.apply_action(player_pos.0 + off_x, player_pos.1 + off_y, action, 0, None, &mut self.accumulated_data, &mut report);\n\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\tzap_at_offset(-1, 0);\n\n\t\t\t\t\t\tzap_at_offset(1, 0);\n\n\t\t\t\t\t\tzap_at_offset(0, -1);\n\n\t\t\t\t\t\tzap_at_offset(0, 1);\n\n\t\t\t\t\t}\n\n\t\t\t\t\tb\"dark\" => {\n\n\t\t\t\t\t\tself.board_simulator.board_meta_data.is_dark = true;\n\n\t\t\t\t\t}\n\n\t\t\t\t\tb\"-dark\" => {\n\n\t\t\t\t\t\tself.board_simulator.board_meta_data.is_dark = false;\n\n\t\t\t\t\t}\n\n\t\t\t\t\t_ => {\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 68, "score": 37534.071159318264 }, { "content": "\t\t\t\t\t\t\t\t(-1, 0) => Direction::West,\n\n\t\t\t\t\t\t\t\t(0, -1) => Direction::North,\n\n\t\t\t\t\t\t\t\t(0, 1) => Direction::South,\n\n\t\t\t\t\t\t\t\t_ => Direction::South,\n\n\t\t\t\t\t\t\t};\n\n\n\n\t\t\t\t\t\t\tself.console_state.get_char_mut(screen_x, screen_y).char_code = match facing_dir {\n\n\t\t\t\t\t\t\t\tDirection::East => 0x10,\n\n\t\t\t\t\t\t\t\tDirection::West => 0x11,\n\n\t\t\t\t\t\t\t\tDirection::North => 0x1e,\n\n\t\t\t\t\t\t\t\tDirection::South => 0x1f,\n\n\t\t\t\t\t\t\t\tDirection::Idle => 0x1f,\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tElementType::SpinningGun => {\n\n\t\t\t\t\t\t\tlet frame_index = (self.global_cycle % (4 * status_element.cycle) as usize) / status_element.cycle as usize;\n\n\n\n\t\t\t\t\t\t\tself.console_state.get_char_mut(screen_x, screen_y).char_code = match frame_index {\n\n\t\t\t\t\t\t\t\t0 => 0x18,\n\n\t\t\t\t\t\t\t\t1 => 0x1a,\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 69, "score": 37533.84296531228 }, { "content": "\t\t\t\t\t\t\t\t(1, 0) => Direction::East,\n\n\t\t\t\t\t\t\t\t(-1, 0) => Direction::West,\n\n\t\t\t\t\t\t\t\t(0, -1) => Direction::North,\n\n\t\t\t\t\t\t\t\t(0, 1) => Direction::South,\n\n\t\t\t\t\t\t\t\t_ => Direction::East,\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\tlet mut frame_index = (self.global_cycle % (4 * status_element.cycle) as usize) / status_element.cycle as usize;\n\n\t\t\t\t\t\t\tif frame_index == 3 {\n\n\t\t\t\t\t\t\t\tframe_index = 1;\n\n\t\t\t\t\t\t\t}\n\n\n\n\t\t\t\t\t\t\tself.console_state.get_char_mut(screen_x, screen_y).char_code = match (facing_dir, frame_index) {\n\n\t\t\t\t\t\t\t\t(Direction::East, 0) => 0x3e,\n\n\t\t\t\t\t\t\t\t(Direction::East, 1) => 0x29,\n\n\t\t\t\t\t\t\t\t(Direction::East, 2) => 0xb3,\n\n\t\t\t\t\t\t\t\t(Direction::West, 0) => 0x3c,\n\n\t\t\t\t\t\t\t\t(Direction::West, 1) => 0x28,\n\n\t\t\t\t\t\t\t\t(Direction::West, 2) => 0xb3,\n\n\t\t\t\t\t\t\t\t(Direction::North, 0) => 0x7e,\n\n\t\t\t\t\t\t\t\t(Direction::North, 1) => 0x5e,\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 70, "score": 37533.84296531228 }, { "content": "\n\n\t\t\t\t\tmatch ty {\n\n\t\t\t\t\t\tElementType::Bomb => {\n\n\t\t\t\t\t\t\tif status_element.param1 > 1 {\n\n\t\t\t\t\t\t\t\tself.console_state.get_char_mut(screen_x, screen_y).char_code = b'0' + status_element.param1;\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tElementType::Clockwise => {\n\n\t\t\t\t\t\t\tlet frame_index = (self.global_cycle % (4 * status_element.cycle) as usize) / status_element.cycle as usize;\n\n\n\n\t\t\t\t\t\t\tself.console_state.get_char_mut(screen_x, screen_y).char_code = match frame_index {\n\n\t\t\t\t\t\t\t\t0 => 0x2f,\n\n\t\t\t\t\t\t\t\t1 => 0xc4,\n\n\t\t\t\t\t\t\t\t2 => 0x5c,\n\n\t\t\t\t\t\t\t\t3 => 0xb3,\n\n\t\t\t\t\t\t\t\t_ => 0,\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tElementType::Counter => {\n\n\t\t\t\t\t\t\tlet frame_index = (self.global_cycle % (4 * status_element.cycle) as usize) / status_element.cycle as usize;\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 71, "score": 37533.82658951744 }, { "content": "\t\t\t\tself.board_simulator.load_board(&self.world.boards[self.board_simulator.world_header.player_board as usize]);\n\n\n\n\t\t\t\tlet (off_x, off_y) = direction.to_offset();\n\n\t\t\t\t// Check if where the player is trying to go on the destination board is blocked.\n\n\t\t\t\tlet push_blocked = self.board_simulator.push_tile(dest_player_pos.0, dest_player_pos.1, off_x, off_y, true, false, 0, None, &mut extra_accumulated_data);\n\n\n\n\t\t\t\tif push_blocked == BlockedStatus::NotBlocked {\n\n\t\t\t\t\tlet old_board_player_pos = self.board_simulator.get_player_location();\n\n\t\t\t\t\tself.board_simulator.move_tile(old_board_player_pos.0, old_board_player_pos.1, dest_player_pos.0, dest_player_pos.1);\n\n\t\t\t\t\tself.board_simulator.on_player_entered_board(&mut extra_accumulated_data.board_messages);\n\n\t\t\t\t} else {\n\n\t\t\t\t\tself.board_simulator.save_board(&mut self.world.boards[self.board_simulator.world_header.player_board as usize]);\n\n\t\t\t\t\tself.board_simulator.world_header.player_board = original_board_index;\n\n\t\t\t\t\tself.board_simulator.load_board(&self.world.boards[self.board_simulator.world_header.player_board as usize]);\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tBoardMessage::TeleportToBoard{destination_board_index, passage_colour} => {\n\n\t\t\t\tself.board_simulator.save_board(&mut self.world.boards[self.board_simulator.world_header.player_board as usize]);\n\n\n\n\t\t\t\tself.board_simulator.world_header.player_board = destination_board_index as i16;\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 72, "score": 37533.807235617554 }, { "content": "\t\t\tBoardMessage::OpenSaveGameInput => {\n\n\t\t\t\tself.side_bar.open_text_input(side_bar::TextInputMode::SaveFile, b\"SAVED\");\n\n\t\t\t}\n\n\t\t\tBoardMessage::SaveGameToFile(file_name) => {\n\n\t\t\t\tself.sync_world();\n\n\t\t\t\tprintln!(\"Save to {:?}\", file_name);\n\n\t\t\t\tif let Ok(mut file) = File::create(file_name.to_string(false)) {\n\n\t\t\t\t\tif let Err(err) = self.world.write(&mut file) {\n\n\t\t\t\t\t\tprintln!(\"Couldn't write to {:?}: {:?}\", file_name, err);\n\n\t\t\t\t\t}\n\n\t\t\t\t} else {\n\n\t\t\t\t\tprintln!(\"Couldn't open {:?}\", file_name);\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tBoardMessage::OpenDebugInput => {\n\n\t\t\t\tself.side_bar.open_text_input(side_bar::TextInputMode::Debug, b\"\");\n\n\t\t\t}\n\n\t\t\tBoardMessage::DebugCommand(command) => {\n\n\t\t\t\tmatch command.to_lower().data.as_slice() {\n\n\t\t\t\t\tb\"ammo\" => {\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 73, "score": 37533.54277996555 }, { "content": "\n\n\t\tif self.is_end_of_game() {\n\n\t\t\tif !was_end_of_game {\n\n\t\t\t\tboard_messages.push(BoardMessage::PlaySoundArray(process_notes_string(b\"s.-cd#g+c-ga#+dgfg#+cf---hc\"), SoundPriority::Level(5)));\n\n\t\t\t}\n\n\n\n\t\t\tif self.global_cycle % 7 == 0 {\n\n\t\t\t\tself.caption_state = Some(CaptionState::new(DosString::from_slice(b\" Game over \\xc4 Press ESCAPE\")));\n\n\t\t\t}\n\n\n\n\t\t\tself.board_should_simulate_fast = true;\n\n\t\t}\n\n\n\n\t\tboard_messages\n\n\t}\n\n}\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 74, "score": 37533.47600867563 }, { "content": "\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\n\n\t\t\t\tboard_messages.extend(std::mem::replace(&mut board_simulator_step_state.accumulated_data.board_messages, vec![]));\n\n\n\n\t\t\t\tif is_done {\n\n\t\t\t\t\tself.board_simulator_step_state = None;\n\n\t\t\t\t\t// Only increment if the whole step is complete, not when it pauses half way through\n\n\t\t\t\t\t// to open a scroll for example.\n\n\t\t\t\t\tself.global_cycle += 1;\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\n\n\t\t//self.update_screen();\n\n\n\n\t\t//println!(\"{} - {}\", self.board_simulator.world_header.player_board, self.world.boards[self.board_simulator.world_header.player_board as usize].meta_data.board_name.to_string(true));\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 75, "score": 37532.985078129874 }, { "content": "\t\t\t\t\t\t\t\t2 => 0x19,\n\n\t\t\t\t\t\t\t\t3 => 0x1b,\n\n\t\t\t\t\t\t\t\t_ => 0,\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tElementType::Star => {\n\n\t\t\t\t\t\t\t//let frame_offset = (self.global_cycle + (status_element.param2 as usize)) % 2;\n\n\t\t\t\t\t\t\t//let frame_index = ((self.global_cycle & !1) + frame_offset) % 4;\n\n\t\t\t\t\t\t\tlet frame_index = (self.global_cycle % (4 * status_element.cycle) as usize) / status_element.cycle as usize;\n\n\t\t\t\t\t\t\tself.console_state.get_char_mut(screen_x, screen_y).char_code = match frame_index {\n\n\t\t\t\t\t\t\t\t0 => 0x2f,\n\n\t\t\t\t\t\t\t\t1 => 0xc4,\n\n\t\t\t\t\t\t\t\t2 => 0x5c,\n\n\t\t\t\t\t\t\t\t3 => 0xb3,\n\n\t\t\t\t\t\t\t\t_ => 0,\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tElementType::Transporter => {\n\n\t\t\t\t\t\t\t// ZZT shows weird animations for this when the step_x/y is > 1.\n\n\t\t\t\t\t\t\tlet facing_dir = match (status_element.step_x, status_element.step_y) {\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 76, "score": 37532.87890982611 }, { "content": "\t\t\t\t\t\tself.caption_state = Some(CaptionState::new(DosString::from_slice(b\"Unknown debug command\")));\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t\t// TODO: Play a note.\n\n\t\t\t}\n\n\t\t\tBoardMessage::LinkClicked(link_label) => {\n\n\t\t\t\t// TODO: If link_label starts with \"-\", then treat it as a file name to load.\n\n\t\t\t\tself.clicked_link_label = Some(link_label);\n\n\t\t\t}\n\n\t\t\tBoardMessage::PauseGame => {\n\n\t\t\t\tif !self.is_end_of_game() {\n\n\t\t\t\t\tself.is_paused = true;\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tBoardMessage::PlayGame => {\n\n\t\t\t\tself.set_in_title_screen(false);\n\n\t\t\t\textra_accumulated_data.board_messages.push(BoardMessage::ClearPlayingSound);\n\n\t\t\t}\n\n\t\t\tBoardMessage::OpenEndGameConfirmation => {\n\n\t\t\t\tself.side_bar.open_yes_no_input(side_bar::YesNoMode::EndGame);\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 77, "score": 37532.81812041357 }, { "content": "\t\tBullet => 0xf8,\n\n\t\tBlinkRayHorizontal => 0xcd,\n\n\t\tBlinkRayVertical => 0xba,\n\n\t\tStar => 0x0,\n\n\t\t_ => {\n\n\t\t\tprintln!(\"element_type_to_char_code: {:?}\", ty);\n\n\t\t\t0\n\n\t\t},\n\n\t}\n\n}\n\n\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 78, "score": 37530.42555514679 }, { "content": "\t\t\t\t\t} else {\n\n\t\t\t\t\t\tif self.board_simulator.world_header.energy_cycles > 0 {\n\n\t\t\t\t\t\t\tchar_code = 1;\n\n\t\t\t\t\t\t} else {\n\n\t\t\t\t\t\t\tchar_code = 2;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t\t_ => {\n\n\t\t\t\t\tchar_code = element_type_to_char_code(ty);\n\n\t\t\t\t}\n\n\t\t\t}\n\n\n\n\t\t\tif !override_colours {\n\n\t\t\t\tbackground = ConsoleColour::from_u8(tile.colour >> 4).unwrap();\n\n\t\t\t\tforeground = ConsoleColour::from_u8(tile.colour & 0b1111).unwrap();\n\n\t\t\t}\n\n\t\t} else {\n\n\t\t\tbackground = ConsoleColour::Black;\n\n\t\t\tforeground = ConsoleColour::Black;\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 79, "score": 37530.42555514679 }, { "content": "\t\t\t\t\t};\n\n\t\t\t\t}\n\n\t\t\t}\n\n\n\n\t\t\tmatch ty {\n\n\t\t\t\tEmpty => {\n\n\t\t\t\t\tchar_code = 0;\n\n\t\t\t\t\tbackground = ConsoleColour::Black;\n\n\t\t\t\t\tforeground = ConsoleColour::Black;\n\n\t\t\t\t\toverride_colours = true;\n\n\t\t\t\t}\n\n\t\t\t\tTextBlue | TextGreen | TextCyan | TextRed | TextPurple | TextBrown | TextBlack => {\n\n\t\t\t\t\tchar_code = tile.colour;\n\n\t\t\t\t\tforeground = ConsoleColour::White;\n\n\n\n\t\t\t\t\tbackground = match ty {\n\n\t\t\t\t\t\tTextBlue => ConsoleColour::Blue,\n\n\t\t\t\t\t\tTextGreen => ConsoleColour::Green,\n\n\t\t\t\t\t\tTextCyan => ConsoleColour::Cyan,\n\n\t\t\t\t\t\tTextRed => ConsoleColour::Red,\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 80, "score": 37530.42555514679 }, { "content": "\t\t\t\tlet top_left_y = player_y - 1 - ((circle_height - 1) / 2);\n\n\n\n\t\t\t\tif x >= top_left_x && x < top_left_x + CIRCLE_MASK_WIDTH as i16\n\n\t\t\t\t\t&& y >= top_left_y && y < top_left_y + circle_height\n\n\t\t\t\t{\n\n\t\t\t\t\tlet circle_x = x - top_left_x;\n\n\t\t\t\t\tlet circle_y = y - top_left_y;\n\n\t\t\t\t\tlet ref circle_row = CIRCLE_MASK[circle_y as usize];\n\n\t\t\t\t\t(circle_row >> circle_x & 1) == 0\n\n\t\t\t\t} else {\n\n\t\t\t\t\ttrue\n\n\t\t\t\t}\n\n\t\t\t} else {\n\n\t\t\t\ttrue\n\n\t\t\t}\n\n\t\t} else {\n\n\t\t\tfalse\n\n\t\t}\n\n\t}\n\n\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 81, "score": 37530.42555514679 }, { "content": "\t\t\t\t\t\tTextPurple => ConsoleColour::Magenta,\n\n\t\t\t\t\t\tTextBrown => ConsoleColour::Brown,\n\n\t\t\t\t\t\tTextBlack => ConsoleColour::Black,\n\n\t\t\t\t\t\t_ => ConsoleColour::Black,\n\n\t\t\t\t\t};\n\n\t\t\t\t\toverride_colours = true;\n\n\t\t\t\t}\n\n\t\t\t\tObject => {\n\n\t\t\t\t\tchar_code = 0;\n\n\t\t\t\t}\n\n\t\t\t\tTransporter => {\n\n\t\t\t\t\tchar_code = 0;\n\n\t\t\t\t}\n\n\t\t\t\tLine => {\n\n\t\t\t\t\tlet check_adjacent = |offset_x, offset_y| {\n\n\t\t\t\t\t\tlet off_tile_x = tile_x as i16 + offset_x;\n\n\t\t\t\t\t\tlet off_tile_y = tile_y as i16 + offset_y;\n\n\n\n\t\t\t\t\t\tif off_tile_x < 0 || off_tile_x >= BOARD_WIDTH as i16 || off_tile_y < 0 || off_tile_y >= BOARD_HEIGHT as i16 {\n\n\t\t\t\t\t\t\ttrue\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 82, "score": 37530.42555514679 }, { "content": "\t\tfor status_element in &self.board_simulator.status_elements {\n\n\t\t\tlet x = status_element.location_x as usize;\n\n\t\t\tlet y = status_element.location_y as usize;\n\n\n\n\t\t\tif x < 1 || y < 1 {\n\n\t\t\t\tcontinue;\n\n\t\t\t}\n\n\n\n\t\t\tlet screen_x = x - 1;\n\n\t\t\tlet screen_y = y - 1;\n\n\n\n\t\t\tlet tile_opt = self.board_simulator.get_tile(x as i16, y as i16);\n\n\t\t\tif let Some(tile) = tile_opt {\n\n\t\t\t\tif let Some(ty) = ElementType::from_u8(tile.element_id) {\n\n\t\t\t\t\tif self.board_simulator.board_meta_data.is_dark {\n\n\t\t\t\t\t\tif !type_visible_in_dark(ty) && self.is_position_dark(screen_x as i16, screen_y as i16) {\n\n\t\t\t\t\t\t\t// Don't draw any statuses in the darkness.\n\n\t\t\t\t\t\t\tcontinue;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 83, "score": 37530.42555514679 }, { "content": "\t\tLion => 0xea,\n\n\t\tTiger => 0xe3,\n\n\t\tBlinkWall => 0xce,\n\n\t\tSliderNS => 0x12,\n\n\t\tSliderEW => 0x1d,\n\n\t\tPassage => 0xf0,\n\n\t\tGem => 0x04,\n\n\t\tRicochet => 0x2a,\n\n\t\tClockwise => 0x2f,\n\n\t\tCounter => 0x5c,\n\n\t\tKey => 0x0c,\n\n\t\tInvisible => 0x00,\n\n\t\tSpinningGun => 0x0,\n\n\t\tWater => 0xb0,\n\n\t\tForest => 0xb0,\n\n\t\tEnergizer => 0x7f,\n\n\t\tFake => 0xb2,\n\n\t\tPusher => 0x0,\n\n\t\tBomb => 0x0b,\n\n\t\tDuplicator => 0,\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 84, "score": 37530.42555514679 }, { "content": "\t\t\t\t\t\t} else {\n\n\t\t\t\t\t\t\tlet adjacent_tile = self.board_simulator.get_tile(off_tile_x + 1, off_tile_y + 1).unwrap();\n\n\t\t\t\t\t\t\tif let Some(ElementType::Line) | Some(ElementType::BoardEdge) = ElementType::from_u8(adjacent_tile.element_id) {\n\n\t\t\t\t\t\t\t\ttrue\n\n\t\t\t\t\t\t\t} else {\n\n\t\t\t\t\t\t\t\tfalse\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t};\n\n\n\n\t\t\t\t\tlet join_n = check_adjacent(0, -1);\n\n\t\t\t\t\tlet join_s = check_adjacent(0, 1);\n\n\t\t\t\t\tlet join_e = check_adjacent(1, 0);\n\n\t\t\t\t\tlet join_w = check_adjacent(-1, 0);\n\n\n\n\t\t\t\t\tchar_code = match (join_n, join_s, join_e, join_w) {\n\n\t\t\t\t\t\t(false, false, false, false) => 0xfa,\n\n\t\t\t\t\t\t(false, false, false, true) => 0xb5,\n\n\t\t\t\t\t\t(false, false, true, false) => 0xc6,\n\n\t\t\t\t\t\t(false, false, true, true) => 0xcd,\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 85, "score": 37530.42555514679 }, { "content": "\t\t\t\t\t\t(false, true, false, false) => 0xd2,\n\n\t\t\t\t\t\t(false, true, false, true) => 0xbb,\n\n\t\t\t\t\t\t(false, true, true, false) => 0xc9,\n\n\t\t\t\t\t\t(false, true, true, true) => 0xcb,\n\n\t\t\t\t\t\t(true, false, false, false) => 0xd0,\n\n\t\t\t\t\t\t(true, false, false, true) => 0xbc,\n\n\t\t\t\t\t\t(true, false, true, false) => 0xc8,\n\n\t\t\t\t\t\t(true, false, true, true) => 0xca,\n\n\t\t\t\t\t\t(true, true, false, false) => 0xba,\n\n\t\t\t\t\t\t(true, true, false, true) => 0xb9,\n\n\t\t\t\t\t\t(true, true, true, false) => 0xcc,\n\n\t\t\t\t\t\t(true, true, true, true) => 0xce,\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t\tPlayer => {\n\n\t\t\t\t\tif self.is_paused {\n\n\t\t\t\t\t\tchar_code = 2;\n\n\t\t\t\t\t\toverride_colours = true;\n\n\t\t\t\t\t\tbackground = ConsoleColour::Blue;\n\n\t\t\t\t\t\tforeground = ConsoleColour::White;\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 86, "score": 37530.42555514679 }, { "content": "\t\t\t}\n\n\t\t\tBoardMessage::OpenQuitConfirmation => {\n\n\t\t\t\tself.side_bar.open_yes_no_input(side_bar::YesNoMode::Quit);\n\n\t\t\t}\n\n\t\t\tBoardMessage::ReturnToTitleScreen => {\n\n\t\t\t\tself.set_in_title_screen(true);\n\n\t\t\t}\n\n\t\t\t| BoardMessage::Quit\n\n\t\t\t| BoardMessage::OpenWorldSelection\n\n\t\t\t| BoardMessage::OpenSaveSelection\n\n\t\t\t| BoardMessage::OpenWorld{..}\n\n\t\t\t| BoardMessage::EnterPressedInScroll{..} => {\n\n\t\t\t\t// Do nothing. The frontend should handle these itself.\n\n\t\t\t}\n\n\t\t}\n\n\n\n\t\textra_accumulated_data.board_messages\n\n\t}\n\n\n\n\t/// Open a scroll with the given `title` and `content_lines`.\n", "file_path": "ruzzt_engine/src/engine.rs", "rank": 87, "score": 37530.42555514679 }, { "content": "use crate::console::*;\n\n\n\nuse num::FromPrimitive;\n\nuse zzt_file_format::dosstring::DosString;\n\n\n\n#[derive(Clone)]\n\npub struct CaptionState {\n\n\tpub text_with_padding: DosString,\n\n\tpub time_left: isize,\n\n}\n\n\n\nimpl CaptionState {\n\n\tpub fn new(text: DosString) -> CaptionState {\n\n\t\tlet mut text_with_padding = text;\n\n\t\ttext_with_padding.data.insert(0, b' ');\n\n\t\ttext_with_padding.data.push(b' ');\n\n\n\n\t\tCaptionState {\n\n\t\t\ttext_with_padding,\n\n\t\t\ttime_left: 24,\n", "file_path": "ruzzt_engine/src/caption.rs", "rank": 88, "score": 30140.863344575977 }, { "content": "impl ConsoleState {\n\n\t/// Create a new ConsoleState with a completely black buffer.\n\n\tpub fn new() -> ConsoleState {\n\n\t\tConsoleState {\n\n\t\t\tscreen_chars: [[ConsoleChar::black(); SCREEN_WIDTH]; SCREEN_HEIGHT],\n\n\t\t}\n\n\t}\n\n\t\n\n\t/// Get the character on the screen at the `x`x`y` position.\n\n\tpub fn get_char(&self, x: usize, y: usize) -> ConsoleChar {\n\n\t\tself.screen_chars[y][x]\n\n\t}\n\n\t\n\n\t/// Get the character on the screen at the `x`x`y` position as &mut so it can be modified\n\n\t/// directly in place.\n\n\tpub fn get_char_mut(&mut self, x: usize, y: usize) -> &mut ConsoleChar {\n\n\t\t&mut self.screen_chars[y][x]\n\n\t}\n\n\t\n\n\t/// Starting at `x`x`y` and moving to the right, place characters of `text` in the console, with\n", "file_path": "ruzzt_engine/src/console.rs", "rank": 89, "score": 30138.33217230586 }, { "content": "use num_derive::FromPrimitive;\n\n\n\npub const SCREEN_WIDTH: usize = 80;\n\npub const SCREEN_HEIGHT: usize = 25;\n\n\n\n/// A single character in the `ConsoleState`'s buffer.\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub struct ConsoleChar {\n\n\tpub char_code: u8,\n\n\t/// Note that background colours 0x8-0xf are actually the same as 0x0-0x7, except they blink on\n\n\t/// and off regularly.\n\n\tpub background: ConsoleColour,\n\n\tpub foreground: ConsoleColour,\n\n}\n\n\n\nimpl ConsoleChar {\n\n\tpub fn new(char_code: u8, background: ConsoleColour, foreground: ConsoleColour) -> ConsoleChar {\n\n\t\tConsoleChar {\n\n\t\t\tchar_code,\n\n\t\t\tbackground,\n", "file_path": "ruzzt_engine/src/console.rs", "rank": 90, "score": 30136.143893810928 }, { "content": "pub mod behaviour;\n\npub mod board_message;\n\npub mod board_simulator;\n\npub mod caption;\n\npub mod console;\n\npub mod direction;\n\npub mod engine;\n\npub mod event;\n\npub mod oop_parser;\n\npub mod scroll;\n\npub mod side_bar;\n\npub mod sounds;\n\npub mod zzt_behaviours;\n\nmod tests;\n", "file_path": "ruzzt_engine/src/lib.rs", "rank": 91, "score": 30135.219792315587 }, { "content": "\t\t\t}\n\n\t\t} else {\n\n\t\t\tNone\n\n\t\t}\n\n\t}\n\n\n\n\t/// Execute a single simulation step on the scroll, with the given input `event`.\n\n\tpub fn step(&mut self, event: Event) -> Vec<BoardMessage> {\n\n\t\tlet mut board_messages = vec![];\n\n\t\tlet page_size = 14;\n\n\t\tmatch event {\n\n\t\t\tEvent::Escape => {\n\n\t\t\t\tboard_messages.push(BoardMessage::CloseScroll);\n\n\t\t\t}\n\n\t\t\tEvent::Enter => {\n\n\t\t\t\tif let Some(hovering_link_label) = self.hovering_link() {\n\n\t\t\t\t\tlet label = DosString::from_slice(hovering_link_label);\n\n\t\t\t\t\tboard_messages.push(BoardMessage::LinkClicked(label));\n\n\t\t\t\t\tboard_messages.push(BoardMessage::CloseScroll);\n\n\t\t\t\t} else {\n", "file_path": "ruzzt_engine/src/scroll.rs", "rank": 92, "score": 30134.692948879667 }, { "content": "\t/// The line index within `content_lines` that the user currently has selected.\n\n\tcurrent_line: isize,\n\n}\n\n\n\nimpl ScrollState {\n\n\t/// Make a new scroll state with the given `title` and `content_lines`.\n\n\tpub fn new_title_content(title: DosString, content_lines: Vec<DosString>) -> ScrollState {\n\n\t\tScrollState {\n\n\t\t\ttitle,\n\n\t\t\tcontent_lines,\n\n\t\t\tcurrent_line: 0,\n\n\t\t}\n\n\t}\n\n\n\n\t/// Make a new scroll state with no title and no content.\n\n\tpub fn new_empty() -> ScrollState {\n\n\t\tScrollState {\n\n\t\t\ttitle: DosString::new(),\n\n\t\t\tcontent_lines: vec![],\n\n\t\t\tcurrent_line: 0,\n", "file_path": "ruzzt_engine/src/scroll.rs", "rank": 93, "score": 30134.551533068865 }, { "content": "\tpub reprocess_same_status_index_on_removal: bool,\n\n}\n\n\n\nimpl ApplyActionResultReport {\n\n\t/// Make a new default `ApplyActionResultReport`.\n\n\tpub fn new() -> ApplyActionResultReport {\n\n\t\tApplyActionResultReport {\n\n\t\t\tmove_was_blocked: BlockedStatus::NotBlocked,\n\n\t\t\ttake_player_item_failed: false,\n\n\t\t\tremoved_status_indices: StatusIndicesWithMinimum::new(),\n\n\t\t\treprocess_same_status_index_on_removal: false,\n\n\t\t}\n\n\t}\n\n}\n\n\n\n/// The result of the `Behaviour::push()` function.\n\n#[derive(Debug)]\n\npub struct PushResult {\n\n\t/// Whether the push attempt was blocked, and so the thing pushing it shouldn't assume that the\n\n\t/// spot is empty.\n", "file_path": "ruzzt_engine/src/behaviour.rs", "rank": 94, "score": 30134.320690709344 }, { "content": "use crate::board_message::*;\n\nuse crate::board_simulator::*;\n\nuse crate::event::*;\n\nuse crate::sounds::*;\n\n\n\nuse zzt_file_format::*;\n\nuse zzt_file_format::dosstring::DosString;\n\nuse std::fmt::Debug;\n\n\n\n/// This is a description of one mutating operation to perform on the BoardSimulator.\n\n#[derive(Debug, Clone)]\n\npub enum Action {\n\n\t/// Set just the tile `colour` of the tile at `x` by `y`.\n\n\tSetColour {\n\n\t\tx: i16,\n\n\t\ty: i16,\n\n\t\tcolour: u8,\n\n\t},\n\n\t/// Set the leader value of the status with the given `status_index` (this is just for\n\n\t/// centepedes).\n", "file_path": "ruzzt_engine/src/behaviour.rs", "rank": 95, "score": 30134.166100421487 }, { "content": "}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum DamageResult {\n\n\tNone,\n\n\tDied,\n\n}\n\n\n\n/// Helper for keeping track of a list on indices, and the minimum value within that list.\n\n#[derive(Clone)]\n\npub struct StatusIndicesWithMinimum {\n\n\tindices: Vec<usize>,\n\n\tminimum: Option<usize>,\n\n}\n\n\n\nimpl StatusIndicesWithMinimum {\n\n\tpub fn new() -> StatusIndicesWithMinimum {\n\n\t\tStatusIndicesWithMinimum {\n\n\t\t\tindices: vec![],\n\n\t\t\tminimum: None,\n", "file_path": "ruzzt_engine/src/behaviour.rs", "rank": 96, "score": 30133.94039099644 }, { "content": "\t\t}\n\n\t}\n\n}\n\n\n\n/// Standard result for specifying what mutating actions should be applied after a Behaviour\n\n/// function is called.\n\n#[derive(Debug)]\n\npub struct ActionResult {\n\n\t/// List of mutating actions to apply to the BoardSimulator.\n\n\tpub actions: Vec<Action>,\n\n\t/// If this is not None, it will be the continuation object to use to keep executing logic after\n\n\t/// applying `actions`. See `ActionContinuation`.\n\n\tpub continuation: Option<Box<dyn ActionContinuation>>,\n\n}\n\n\n\nimpl ActionResult {\n\n\t/// Helper constructor for making an `ActionResult` with some `actions` and no `continuation`.\n\n\tpub fn with_actions(actions: Vec<Action>) -> ActionResult {\n\n\t\tActionResult {\n\n\t\t\tactions,\n", "file_path": "ruzzt_engine/src/behaviour.rs", "rank": 97, "score": 30133.421706166606 }, { "content": "\t/// the given `background`/`foreground` colours for all the characters.\n\n\tpub fn draw_text_at(&mut self, x: usize, y: usize, text: &[u8], background: ConsoleColour, foreground: ConsoleColour) {\n\n\t\tfor (i, char_code) in text.iter().enumerate() {\n\n\t\t\t*self.get_char_mut(x + i, y) = ConsoleChar::new(*char_code, background, foreground);\n\n\t\t}\n\n\t}\n\n}\n\n\n\n/// The possible colours that can be displayed in the console.\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\n#[derive(FromPrimitive)]\n\n#[repr(u8)]\n\npub enum ConsoleColour {\n\n\tBlack = 0x0,\n\n\tBlue = 0x1,\n\n\tGreen = 0x2,\n\n\tCyan = 0x3,\n\n\tRed = 0x4,\n\n\tMagenta = 0x5,\n\n\tBrown = 0x6,\n", "file_path": "ruzzt_engine/src/console.rs", "rank": 98, "score": 30133.080800060463 }, { "content": "\t\tlet bg = ConsoleColour::Black;\n\n\t\tlet fg = ConsoleColour::White;\n\n\n\n\t\t*console_state.get_char_mut(5, row) = ConsoleChar::new(chars.0, bg, fg);\n\n\t\t*console_state.get_char_mut(6, row) = ConsoleChar::new(chars.1, bg, fg);\n\n\t\tfor x in 7 ..= 51 {\n\n\t\t\t*console_state.get_char_mut(x, row) = ConsoleChar::new(chars.2, bg, fg);\n\n\t\t}\n\n\t\t*console_state.get_char_mut(52, row) = ConsoleChar::new(chars.3, bg, fg);\n\n\t\t*console_state.get_char_mut(53, row) = ConsoleChar::new(chars.4, bg, fg);\n\n\t}\n\n\n\n\t/// Draw the given `text` in the console, starting at the given `x`/`y` position, with a blue\n\n\t/// background, and the given `foreground` colour.\n\n\tfn draw_text_at(&self, x: usize, y: usize, text: &[u8], foreground: ConsoleColour, console_state: &mut ConsoleState) {\n\n\t\tfor (i, char_code) in text.iter().enumerate() {\n\n\t\t\t*console_state.get_char_mut(x + i, y) = ConsoleChar::new(*char_code, ConsoleColour::Blue, foreground);\n\n\t\t}\n\n\t}\n\n\n", "file_path": "ruzzt_engine/src/scroll.rs", "rank": 99, "score": 30132.42415933169 } ]
Rust
src/config/file.rs
lachesis/roughenough
a5e29a47646cc57bdd8e3603818cc9bd46f81bfc
use std::fs::File; use std::io::Read; use std::time::Duration; use data_encoding::{Encoding, HEXLOWER_PERMISSIVE}; use yaml_rust::YamlLoader; use crate::config::{DEFAULT_BATCH_SIZE, DEFAULT_STATUS_INTERVAL}; use crate::config::ServerConfig; use crate::Error; use crate::key::KmsProtection; const HEX: Encoding = HEXLOWER_PERMISSIVE; pub struct FileConfig { port: u16, interface: String, seed: Vec<u8>, batch_size: u8, status_interval: Duration, kms_protection: KmsProtection, health_check_port: Option<u16>, client_stats: bool, fault_percentage: u8, } impl FileConfig { pub fn new(config_file: &str) -> Result<Self, Error> { let mut infile = File::open(config_file) .unwrap_or_else(|_| panic!("failed to open config file '{}'", config_file)); let mut contents = String::new(); infile .read_to_string(&mut contents) .unwrap_or_else(|_| panic!("could not read config file '{}'", config_file)); let cfg = YamlLoader::load_from_str(&contents) .unwrap_or_else(|_| panic!("could not parse config file '{}'", config_file)); if cfg.len() != 1 { return Err(Error::InvalidConfiguration(format!( "Empty or malformed config file '{}'", config_file ))); } let mut config = FileConfig { port: 0, interface: "".to_string(), seed: Vec::new(), batch_size: DEFAULT_BATCH_SIZE, status_interval: DEFAULT_STATUS_INTERVAL, kms_protection: KmsProtection::Plaintext, health_check_port: None, client_stats: false, fault_percentage: 0, }; for (key, value) in cfg[0].as_hash().unwrap() { match key.as_str().unwrap() { "port" => config.port = value.as_i64().unwrap() as u16, "interface" => config.interface = value.as_str().unwrap().to_string(), "batch_size" => config.batch_size = value.as_i64().unwrap() as u8, "seed" => { let val = value.as_str().unwrap().to_string(); config.seed = HEX .decode(val.as_bytes()) .expect("seed value invalid; 'seed' must be a valid hex value"); } "status_interval" => { let val = value.as_i64().expect("status_interval value invalid"); config.status_interval = Duration::from_secs(val as u64) } "kms_protection" => { let val = value.as_str().unwrap().parse().unwrap_or_else(|_| { panic!("invalid kms_protection value: {:?}", value) }); config.kms_protection = val } "health_check_port" => { let val = value.as_i64().unwrap() as u16; config.health_check_port = Some(val); } "client_stats" => { let val = value.as_str().unwrap().to_ascii_lowercase(); config.client_stats = val == "yes" || val == "on"; } "fault_percentage" => { let val = value.as_i64().unwrap() as u8; config.fault_percentage = val; } unknown => { return Err(Error::InvalidConfiguration(format!( "unknown config key: {}", unknown ))); } } } Ok(config) } } impl ServerConfig for FileConfig { fn interface(&self) -> &str { self.interface.as_ref() } fn port(&self) -> u16 { self.port } fn seed(&self) -> Vec<u8> { self.seed.clone() } fn batch_size(&self) -> u8 { self.batch_size } fn status_interval(&self) -> Duration { self.status_interval } fn kms_protection(&self) -> &KmsProtection { &self.kms_protection } fn health_check_port(&self) -> Option<u16> { self.health_check_port } fn client_stats_enabled(&self) -> bool { self.client_stats } fn fault_percentage(&self) -> u8 { self.fault_percentage } }
use std::fs::File; use std::io::Read; use std::time::Duration; use data_encoding::{Encoding, HEXLOWER_PERMISSIVE}; use yaml_rust::YamlLoader; use crate::config::{DEFAULT_BATCH_SIZE, DEFAULT_STATUS_INTERVAL}; use crate::config::ServerConfig; use crate::Error; use crate::key::KmsProtection; const HEX: Encoding = HEXLOWER_PERMISSIVE; pub struct FileConfig { port: u16, interface: String, seed: Vec<u8>, batch_size: u8, status_interval: Duration, kms_protection: KmsProtection, health_check_port: Option<u16>, client_stats: bool, fault_percentage: u8, } impl FileConfig { pub fn new(config_file: &str) -> Result<Self, Error> { let mut infile = File::open(config_file) .unwrap_or_else(|_| panic!("failed to open config file '{}'", config_file)); let mut contents = String::new(); infile .read_to_string(&mut contents) .unwrap_or_else(|_| panic!("could not read config file '{}'", config_file)); let cfg = YamlLoader::load_from_str(&contents) .unwrap_or_else(|_| panic!("could not parse config file '{}'", config_file)); if cfg.len() != 1 { return Err(Error::InvalidConfiguration(format!( "Empty or malformed config file '{}'", config_file ))); } let mut config = FileConfig { port: 0, interface: "".to_string(), seed: Vec::new(), batch_size: DEFAULT_BATCH_SIZE, status_interval: DEFAULT_STATUS_INTERVAL, kms_protection: KmsProtection::Plaintext, health_check_port: None, client_stats: false, fault_percentage: 0, }; for (key, value) in cfg[0].as_hash().unwrap() { match key.as_str().unwrap() { "port" => config.port = value.as_i64().unwrap() as u16, "interface" => config.interface = value.as_str().unwrap().to_string(), "batch_size" => config.batch_size = value.as_i64().unwrap() as u8, "seed" => { let val = value.as_str().unwrap().to_string(); config.seed = HEX .decode(val.as_bytes()) .expect("seed value invalid; 'seed' must be a valid hex value"); } "status_interval" => { let val = value.as_i64().expect("status_interval value invalid"); config.status_interval = Duration::from_secs(val as u6
} impl ServerConfig for FileConfig { fn interface(&self) -> &str { self.interface.as_ref() } fn port(&self) -> u16 { self.port } fn seed(&self) -> Vec<u8> { self.seed.clone() } fn batch_size(&self) -> u8 { self.batch_size } fn status_interval(&self) -> Duration { self.status_interval } fn kms_protection(&self) -> &KmsProtection { &self.kms_protection } fn health_check_port(&self) -> Option<u16> { self.health_check_port } fn client_stats_enabled(&self) -> bool { self.client_stats } fn fault_percentage(&self) -> u8 { self.fault_percentage } }
4) } "kms_protection" => { let val = value.as_str().unwrap().parse().unwrap_or_else(|_| { panic!("invalid kms_protection value: {:?}", value) }); config.kms_protection = val } "health_check_port" => { let val = value.as_i64().unwrap() as u16; config.health_check_port = Some(val); } "client_stats" => { let val = value.as_str().unwrap().to_ascii_lowercase(); config.client_stats = val == "yes" || val == "on"; } "fault_percentage" => { let val = value.as_i64().unwrap() as u8; config.fault_percentage = val; } unknown => { return Err(Error::InvalidConfiguration(format!( "unknown config key: {}", unknown ))); } } } Ok(config) }
function_block-function_prefixed
[ { "content": "#[allow(clippy::useless_let_if_seq)]\n\npub fn is_valid_config(cfg: &dyn ServerConfig) -> bool {\n\n let mut is_valid = true;\n\n\n\n if cfg.port() == 0 {\n\n error!(\"server port not set: {}\", cfg.port());\n\n is_valid = false;\n\n }\n\n\n\n if cfg.interface().is_empty() {\n\n error!(\"'interface' is missing\");\n\n is_valid = false;\n\n }\n\n\n\n if cfg.seed().is_empty() {\n\n error!(\"'seed' value is missing\");\n\n is_valid = false;\n\n } else if *cfg.kms_protection() == KmsProtection::Plaintext\n\n && cfg.seed().len() != SEED_LENGTH as usize\n\n {\n\n error!(\n", "file_path": "src/config/mod.rs", "rank": 0, "score": 194682.81308023568 }, { "content": "#[cfg(feature = \"gcpkms\")]\n\npub fn load_seed(config: &dyn ServerConfig) -> Result<Vec<u8>, error::Error> {\n\n match config.kms_protection() {\n\n KmsProtection::Plaintext => Ok(config.seed()),\n\n KmsProtection::GoogleKmsEnvelope(resource_id) => {\n\n info!(\"Unwrapping seed via Google KMS key '{}'\", resource_id);\n\n let kms = GcpKms::from_resource_id(resource_id)?;\n\n let seed = EnvelopeEncryption::decrypt_seed(&kms, &config.seed())?;\n\n Ok(seed)\n\n }\n\n _ => Err(error::Error::InvalidConfiguration(\n\n \"AWS KMS not supported\".to_string(),\n\n )),\n\n }\n\n}\n\n\n\n/// Load the seed value for the long-term key.\n\n///\n\n/// ## This build has KMS disabled\n\n///\n\n/// *The KMS feature is disabled in this build of Roughenough*.\n\n///\n\n/// The only supported `kms_protection` value in this build is `plaintext`. Any\n\n/// other value will cause a runtime error.\n\n///\n\n/// * `config.seed()` is used as-is and assumed to be a 32-byte hexadecimal value\n\n///\n", "file_path": "src/kms/mod.rs", "rank": 1, "score": 185469.8107495825 }, { "content": "/// Factory function to create a `ServerConfig` _trait object_ based on the value\n\n/// of the provided `arg`.\n\n///\n\n/// * `ENV` will return an [`EnvironmentConfig`](struct.EnvironmentConfig.html)\n\n/// * any other value returns a [`FileConfig`](struct.FileConfig.html)\n\n///\n\npub fn make_config(arg: &str) -> Result<Box<dyn ServerConfig>, Error> {\n\n if arg == \"ENV\" {\n\n match EnvironmentConfig::new() {\n\n Ok(cfg) => Ok(Box::new(cfg)),\n\n Err(e) => Err(e),\n\n }\n\n } else {\n\n match FileConfig::new(arg) {\n\n Ok(cfg) => Ok(Box::new(cfg)),\n\n Err(e) => Err(e),\n\n }\n\n }\n\n}\n\n\n\n///\n\n/// Validate configuration settings. Returns `true` if the config is valid, `false` otherwise.\n\n///\n", "file_path": "src/config/mod.rs", "rank": 2, "score": 153096.56147268735 }, { "content": "/// Guess which protocol the request is using and extract the client's nonce from the request\n\npub fn nonce_from_request(buf: &[u8], num_bytes: usize) -> Result<(Vec<u8>, Version), Error> {\n\n if num_bytes < MIN_REQUEST_LENGTH as usize {\n\n return Err(Error::RequestTooShort);\n\n }\n\n\n\n match guess_protocol_version(buf) {\n\n Version::Classic => nonce_from_classic_request(&buf[..num_bytes]),\n\n Version::Rfc => nonce_from_rfc_request(&buf[..num_bytes]),\n\n }\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 3, "score": 125477.7819926107 }, { "content": "/// Roughenough version string enriched with any compile-time optional features\n\npub fn roughenough_version() -> String {\n\n let kms_str = if cfg!(feature = \"awskms\") {\n\n \" (+AWS KMS)\"\n\n } else if cfg!(feature = \"gcpkms\") {\n\n \" (+GCP KMS)\"\n\n } else {\n\n \"\"\n\n };\n\n\n\n format!(\"{}{}\", VERSION, kms_str)\n\n}\n\n\n\n// Constants and magic numbers of the Roughtime protocol\n\n\n\n/// Minimum size (in bytes) of a client request\n\npub const MIN_REQUEST_LENGTH: u32 = 1024;\n\n\n\n/// Size (in bytes) of seeds used to derive private keys\n\npub const SEED_LENGTH: u32 = 32;\n\n\n", "file_path": "src/lib.rs", "rank": 4, "score": 123610.06106890723 }, { "content": "fn nonce_from_rfc_request(buf: &[u8]) -> Result<(Vec<u8>, Version), Error> {\n\n // first 8 bytes were RFC_REQUEST_FRAME_BYTES, [0..8]\n\n let mut cur = Cursor::new(&buf[8..12]);\n\n let reported_len = cur.read_u32::<LittleEndian>()?;\n\n let actual_len = (buf.len() - 12) as u32;\n\n\n\n if reported_len != actual_len {\n\n return Err(Error::LengthMismatch(reported_len, actual_len));\n\n }\n\n\n\n let msg = RtMessage::from_bytes(&buf[12..])?;\n\n\n\n if !has_supported_version(&msg) {\n\n return Err(Error::NoCompatibleVersion);\n\n }\n\n\n\n match msg.get_field(Tag::NONC) {\n\n Some(nonce) => Ok((nonce.to_vec(), Version::Rfc)),\n\n None => Err(Error::InvalidRequest),\n\n }\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 5, "score": 107767.94401031721 }, { "content": "fn nonce_from_classic_request(buf: &[u8]) -> Result<(Vec<u8>, Version), Error> {\n\n let msg = RtMessage::from_bytes(buf)?;\n\n match msg.get_field(Tag::NONC) {\n\n Some(nonce) => Ok((nonce.to_vec(), Version::Classic)),\n\n None => Err(Error::InvalidRequest),\n\n }\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 6, "score": 107767.94401031721 }, { "content": "fn create_empty_message(c: &mut Criterion) {\n\n c.bench_function(\"create empty message\", |b| {\n\n b.iter(|| RtMessage::with_capacity(0))\n\n });\n\n}\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 7, "score": 98541.13264673956 }, { "content": "///\n\n/// Specifies parameters needed to configure a Roughenough server.\n\n///\n\n/// Parameters labeled \"**Required**\" must always be provided and have no default value\n\n/// while those labeled \"**Optional**\" provide sane default values that can be overridden.\n\n///\n\n/// YAML Key | Environment Variable | Necessity | Description\n\n/// --- | --- | --- | ---\n\n/// `interface` | `ROUGHENOUGH_INTERFACE` | Required | IP address or interface name for listening to client requests\n\n/// `port` | `ROUGHENOUGH_PORT` | Required | UDP port to listen for requests\n\n/// `seed` | `ROUGHENOUGH_SEED` | Required | A 32-byte hexadecimal value used to generate the server's long-term key pair. **This is a secret value and must be un-guessable**, treat it with care. (If compiled with KMS support, length will vary)\n\n/// `batch_size` | `ROUGHENOUGH_BATCH_SIZE` | Optional | The maximum number of requests to process in one batch. All nonces in a batch are used to build a Merkle tree, the root of which is signed. Default is `64` requests per batch.\n\n/// `status_interval` | `ROUGHENOUGH_STATUS_INTERVAL` | Optional | Number of _seconds_ between each logged status update. Default is `600` seconds (10 minutes).\n\n/// `health_check_port` | `ROUGHENOUGH_HEALTH_CHECK_PORT` | Optional | If present, enable an HTTP health check responder on the provided port. **Use with caution**.\n\n/// `kms_protection` | `ROUGHENOUGH_KMS_PROTECTION` | Optional | If compiled with KMS support, the ID of the KMS key used to protect the long-term identity.\n\n/// `client_stats` | `ROUGHENOUGH_CLIENT_STATS` | Optional | A value of `on` or `yes` will enable tracking of per-client request statistics that will be output each time server status is logged. Default is `off` (disabled).\n\n/// `fault_percentage` | `ROUGHENOUGH_FAULT_PERCENTAGE` | Optional | Likelihood (as a percentage) that the server will intentionally return an invalid client response. An integer range from `0` (disabled, all responses valid) to `50` (50% of responses will be invalid). Default is `0` (disabled).\n\n///\n\n/// Implementations of this trait obtain a valid configuration from different back-end\n\n/// sources. See:\n\n/// * [FileConfig](struct.FileConfig.html) - configure via a YAML file\n\n/// * [EnvironmentConfig](struct.EnvironmentConfig.html) - configure via environment variables\n\n/// * [MemoryConfig](struct.MemoryConfig.html) - in-memory configuration for testing\n\n///\n\npub trait ServerConfig {\n\n /// [Required] IP address or interface name to listen for client requests\n\n fn interface(&self) -> &str;\n\n\n\n /// [Required] UDP port to listen for requests\n\n fn port(&self) -> u16;\n\n\n\n /// [Required] A 32-byte hexadecimal value used to generate the server's\n\n /// long-term key pair. **This is a secret value and must be un-guessable**,\n\n /// treat it with care.\n\n fn seed(&self) -> Vec<u8>;\n\n\n\n /// [Optional] The maximum number of requests to process in one batch. All\n\n /// nonces in a batch are used to build a Merkle tree, the root of which is signed.\n\n /// Defaults to [DEFAULT_BATCH_SIZE](constant.DEFAULT_BATCH_SIZE.html)\n\n fn batch_size(&self) -> u8;\n\n\n\n /// [Optional] Amount of time between each logged status update.\n\n /// Defaults to [DEFAULT_STATUS_INTERVAL](constant.DEFAULT_STATUS_INTERVAL.html)\n\n fn status_interval(&self) -> Duration;\n", "file_path": "src/config/mod.rs", "rank": 8, "score": 78620.99511724558 }, { "content": "fn has_supported_version(msg: &RtMessage) -> bool {\n\n let expected_ver_bytes = Version::Rfc.wire_bytes();\n\n\n\n if let Some(tag_bytes) = msg.get_field(Tag::VER) {\n\n // Iterate the list of supplied versions, looking for a match\n\n for found_ver_bytes in tag_bytes.chunks(4) {\n\n if found_ver_bytes == expected_ver_bytes {\n\n return true;\n\n }\n\n }\n\n }\n\n\n\n false\n\n}\n", "file_path": "src/request.rs", "rank": 9, "score": 75019.56158676944 }, { "content": "fn reuse_merkle_trees(c: &mut Criterion) {\n\n let mut tree = MerkleTree::new();\n\n\n\n c.bench_function_over_inputs(\n\n \"reuse existing merkle tree\",\n\n move |b, &size| {\n\n b.iter(|| {\n\n tree.reset();\n\n for _ in 0..*size {\n\n tree.push_leaf(DATA);\n\n }\n\n black_box(tree.compute_root());\n\n })\n\n },\n\n SIZES,\n\n );\n\n}\n\n\n\ncriterion_group!(\n\n message_creation,\n", "file_path": "benches/roughenough-bench.rs", "rank": 10, "score": 73735.38110205991 }, { "content": "fn create_nested_message(c: &mut Criterion) {\n\n let pad = [0u8; 400];\n\n\n\n c.bench_function(\"create nested message\", move |b| {\n\n b.iter(|| {\n\n let mut msg1 = RtMessage::with_capacity(4);\n\n msg1.add_field(Tag::SIG, \"0987\".as_bytes()).unwrap();\n\n msg1.add_field(Tag::NONC, \"wxyz\".as_bytes()).unwrap();\n\n msg1.add_field(Tag::DELE, \"1234\".as_bytes()).unwrap();\n\n msg1.add_field(Tag::PATH, \"abcd\".as_bytes()).unwrap();\n\n\n\n let mut msg2 = RtMessage::with_capacity(2);\n\n msg2.add_field(Tag::PUBK, \"1234567890\".as_bytes()).unwrap();\n\n msg2.add_field(Tag::PAD_CLASSIC, pad.as_ref()).unwrap();\n\n })\n\n });\n\n}\n\n\n\nstatic SIZES: &[u8] = &[1, 3, 9, 17, 200];\n\nstatic DATA: &[u8] = &[1u8; 64];\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 11, "score": 73735.38110205991 }, { "content": "/// Inspect the message in `buf` and guess which Roughtime protocol it corresponds to.\n\nfn guess_protocol_version(buf: &[u8]) -> Version {\n\n if &buf[0..8] == RFC_REQUEST_FRAME_BYTES {\n\n Version::Rfc\n\n } else {\n\n Version::Classic\n\n }\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 12, "score": 73140.30968053004 }, { "content": "fn create_two_field_message(c: &mut Criterion) {\n\n c.bench_function(\"create two field message\", |b| {\n\n b.iter(|| {\n\n let mut msg = RtMessage::with_capacity(2);\n\n msg.add_field(Tag::NONC, \"1234\".as_bytes()).unwrap();\n\n msg.add_field(Tag::PAD_CLASSIC, \"abcd\".as_bytes()).unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 13, "score": 71750.31423057211 }, { "content": "fn create_single_field_message(c: &mut Criterion) {\n\n c.bench_function(\"create single field message\", |b| {\n\n b.iter(|| {\n\n let mut msg = RtMessage::with_capacity(1);\n\n msg.add_field(Tag::NONC, \"1234\".as_bytes()).unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 14, "score": 71750.31423057211 }, { "content": "fn create_new_merkle_tree(c: &mut Criterion) {\n\n c.bench_function_over_inputs(\n\n \"create new merkle trees\",\n\n move |b, &size| {\n\n b.iter(|| {\n\n let mut tree = MerkleTree::new();\n\n for _ in 0..*size {\n\n tree.push_leaf(DATA);\n\n }\n\n black_box(tree.compute_root())\n\n })\n\n },\n\n SIZES,\n\n );\n\n}\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 15, "score": 71750.31423057211 }, { "content": "fn create_four_field_message(c: &mut Criterion) {\n\n c.bench_function(\"create four field message\", |b| {\n\n b.iter(|| {\n\n let mut msg = RtMessage::with_capacity(4);\n\n msg.add_field(Tag::SIG, \"0987\".as_bytes()).unwrap();\n\n msg.add_field(Tag::NONC, \"wxyz\".as_bytes()).unwrap();\n\n msg.add_field(Tag::DELE, \"1234\".as_bytes()).unwrap();\n\n msg.add_field(Tag::PATH, \"abcd\".as_bytes()).unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/roughenough-bench.rs", "rank": 16, "score": 71750.31423057211 }, { "content": "// Convenience function to create zero-filled Vec of given size\n\nfn vec_zero_filled(len: usize) -> Vec<u8> {\n\n (0..len).map(|_| 0).collect()\n\n}\n\n\n\n/// Envelope encryption of the long-term key seed value.\n\n///\n\n/// The seed is encrypted using AES-GCM-256 with:\n\n///\n\n/// * 32 byte (256 bit) random key\n\n/// * 12 byte (96 bit) random nonce\n\n/// * 16 byte (128 bit) authentication tag\n\n///\n\n/// Randomness obtained from\n\n/// [`ring::rand::SecureRandom`](https://briansmith.org/rustdoc/ring/rand/trait.SecureRandom.html).\n\n///\n\n/// The key used to encrypt the seed is wrapped (encrypted) using a\n\n/// [`KmsProvider`](trait.KmsProvider.html) implementation.\n\n///\n\npub struct EnvelopeEncryption;\n\n\n", "file_path": "src/kms/envelope.rs", "rank": 17, "score": 67764.708814087 }, { "content": "type Data = Vec<u8>;\n", "file_path": "src/merkle.rs", "rank": 26, "score": 43509.088653452985 }, { "content": "///\n\n/// A key management system that wraps/unwraps a data encryption key (DEK).\n\n///\n\npub trait KmsProvider {\n\n /// Make a blocking request to encrypt (wrap) the provided plaintext data encryption key.\n\n fn encrypt_dek(&self, plaintext_dek: &PlaintextDEK) -> Result<EncryptedDEK, KmsError>;\n\n\n\n /// Make a blocking request to decrypt (unwrap) a previously encrypted data encryption key.\n\n fn decrypt_dek(&self, encrypted_dek: &EncryptedDEK) -> Result<PlaintextDEK, KmsError>;\n\n}\n\n\n\n#[cfg(feature = \"awskms\")]\n\nmod awskms;\n\n\n\n/// Load the seed value for the long-term key.\n\n///\n\n/// Loading behavior depends on the value of `config.kms_protection()`:\n\n///\n\n/// * If `config.kms_protection() == Plaintext` then the value returned from `config.seed()`\n\n/// is used as-is and assumed to be a 32-byte hexadecimal value.\n\n///\n\n/// * Otherwise `config.seed()` is assumed to be an encrypted opaque blob generated from\n\n/// a prior `EnvelopeEncryption::encrypt_seed` call. The value of `config.kms_protection()`\n\n/// is parsed as a KMS key id and `EnvelopeEncryption::decrypt_seed` is called to obtain\n\n/// the plaintext seed value.\n\n///\n", "file_path": "src/kms/mod.rs", "rank": 27, "score": 43080.6357556672 }, { "content": "///\n\n/// Implementations of this trait record client activity\n\n///\n\npub trait ServerStats {\n\n fn add_rfc_request(&mut self, addr: &IpAddr);\n\n\n\n fn add_classic_request(&mut self, addr: &IpAddr);\n\n\n\n fn add_invalid_request(&mut self, addr: &IpAddr);\n\n\n\n fn add_health_check(&mut self, addr: &IpAddr);\n\n\n\n fn add_rfc_response(&mut self, addr: &IpAddr, bytes_sent: usize);\n\n\n\n fn add_classic_response(&mut self, addr: &IpAddr, bytes_sent: usize);\n\n\n\n fn total_valid_requests(&self) -> u64;\n\n\n\n fn num_rfc_requests(&self) -> u64;\n\n\n\n fn num_classic_requests(&self) -> u64;\n\n\n\n fn total_invalid_requests(&self) -> u64;\n", "file_path": "src/stats/mod.rs", "rank": 28, "score": 43076.40420006202 }, { "content": "#[derive(Debug)]\n\npub enum Error {\n\n /// The associated tag was added to an `RtMessage` in non-increasing order.\n\n TagNotStrictlyIncreasing(Tag),\n\n\n\n /// The associated byte sequence does not correspond to a valid Roughtime tag.\n\n InvalidTag(Box<[u8]>),\n\n\n\n /// Invalid number of tags specified\n\n InvalidNumTags(u32),\n\n\n\n /// Tag value length exceeds length of source bytes\n\n InvalidValueLength(Tag, u32),\n\n\n\n /// Encoding failed. The associated `std::io::Error` should provide more information.\n\n EncodingFailure(std::io::Error),\n\n\n\n /// Request was less than 1024 bytes\n\n RequestTooShort,\n\n\n", "file_path": "src/error.rs", "rank": 29, "score": 32936.19374808941 }, { "content": "\n\n /// Request did not provide versions compatible with this implementation\n\n NoCompatibleVersion,\n\n}\n\n\n\nimpl From<std::io::Error> for Error {\n\n fn from(err: std::io::Error) -> Self {\n\n Error::EncodingFailure(err)\n\n }\n\n}\n\n\n\nimpl From<KmsError> for Error {\n\n fn from(err: KmsError) -> Self {\n\n match err {\n\n KmsError::OperationFailed(m) => {\n\n Error::InvalidConfiguration(format!(\"KMS operation failed: {}\", m))\n\n }\n\n KmsError::InvalidConfiguration(m) => {\n\n Error::InvalidConfiguration(format!(\"invalid KMS config: {}\", m))\n\n }\n", "file_path": "src/error.rs", "rank": 30, "score": 32935.36245892092 }, { "content": " KmsError::InvalidData(m) => {\n\n Error::InvalidConfiguration(format!(\"invalid KMS data: {}\", m))\n\n }\n\n KmsError::InvalidKey(m) => {\n\n Error::InvalidConfiguration(format!(\"invalid KMS key: {}\", m))\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 31, "score": 32928.84961877462 }, { "content": " /// Offset was not 32-bit aligned\n\n InvalidAlignment(u32),\n\n\n\n /// Offset is outside of valid message range\n\n InvalidOffsetValue(u32),\n\n\n\n /// Could not convert bytes to message because bytes were too short\n\n MessageTooShort,\n\n\n\n /// Otherwise invalid request\n\n InvalidRequest,\n\n\n\n /// Otherwise invalid response\n\n InvalidResponse,\n\n\n\n /// Runtime configuration is invalid for the reason provided\n\n InvalidConfiguration(String),\n\n\n\n /// The message length reported by the frame length != the actual message payload length\n\n LengthMismatch(u32, u32),\n", "file_path": "src/error.rs", "rank": 32, "score": 32927.75822215042 }, { "content": "// Copyright 2017-2021 int08h LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse std;\n\n\n\nuse crate::kms::KmsError;\n\nuse crate::tag::Tag;\n\n\n\n/// Error types generated by this implementation\n", "file_path": "src/error.rs", "rank": 33, "score": 32922.66907786443 }, { "content": "use std::str::FromStr;\n\n\n\npub use self::longterm::LongTermKey;\n\npub use self::online::OnlineKey;\n\n\n\nmod longterm;\n\nmod online;\n\n\n\n/// Methods for protecting the server's long-term identity\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Hash, Clone)]\n\npub enum KmsProtection {\n\n /// No protection, seed is in plaintext\n\n Plaintext,\n\n\n\n /// Envelope encryption of the seed using AWS Key Management Service\n\n AwsKmsEnvelope(String),\n\n\n\n /// Envelope encryption of the seed using Google Cloud Key Management Service\n\n GoogleKmsEnvelope(String),\n\n}\n", "file_path": "src/key/mod.rs", "rank": 34, "score": 31115.709130761355 }, { "content": "\n\nimpl Display for KmsProtection {\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), std::fmt::Error> {\n\n match self {\n\n KmsProtection::Plaintext => write!(f, \"Plaintext\"),\n\n KmsProtection::AwsKmsEnvelope(key_id) => write!(f, \"AwsKms({})\", key_id),\n\n KmsProtection::GoogleKmsEnvelope(key_id) => write!(f, \"GoogleKms({})\", key_id),\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for KmsProtection {\n\n type Err = String;\n\n\n\n fn from_str(s: &str) -> Result<KmsProtection, String> {\n\n match s {\n\n \"plaintext\" => Ok(KmsProtection::Plaintext),\n\n s if s.starts_with(\"arn:\") => Ok(KmsProtection::AwsKmsEnvelope(s.to_string())),\n\n s if s.starts_with(\"projects/\") => Ok(KmsProtection::GoogleKmsEnvelope(s.to_string())),\n\n s => Err(format!(\"unknown KmsProtection '{}'\", s)),\n", "file_path": "src/key/mod.rs", "rank": 35, "score": 31115.04862782024 }, { "content": "\n\nuse crate::CERTIFICATE_CONTEXT;\n\nuse crate::key::OnlineKey;\n\nuse crate::message::RtMessage;\n\nuse crate::sign::Signer;\n\nuse crate::tag::Tag;\n\n\n\n///\n\n/// Represents the server's long-term identity.\n\n///\n\npub struct LongTermKey {\n\n signer: Signer,\n\n}\n\n\n\nimpl LongTermKey {\n\n pub fn new(seed: &[u8]) -> Self {\n\n LongTermKey {\n\n signer: Signer::from_seed(seed),\n\n }\n\n }\n", "file_path": "src/key/longterm.rs", "rank": 36, "score": 31114.98986405518 }, { "content": "\n\n /// Create a CERT message with a DELE containing the provided online key\n\n /// and a SIG of the DELE value signed by the long-term key\n\n pub fn make_cert(&mut self, online_key: &OnlineKey) -> RtMessage {\n\n let dele_bytes = online_key.make_dele().encode().unwrap();\n\n\n\n self.signer.update(CERTIFICATE_CONTEXT.as_bytes());\n\n self.signer.update(&dele_bytes);\n\n\n\n let dele_signature = self.signer.sign();\n\n\n\n let mut cert_msg = RtMessage::with_capacity(2);\n\n cert_msg.add_field(Tag::SIG, &dele_signature).unwrap();\n\n cert_msg.add_field(Tag::DELE, &dele_bytes).unwrap();\n\n\n\n cert_msg\n\n }\n\n\n\n /// Return the public key for the provided seed\n\n pub fn public_key(&self) -> &[u8] {\n", "file_path": "src/key/longterm.rs", "rank": 37, "score": 31114.707423572385 }, { "content": " }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::str::FromStr;\n\n\n\n use crate::key::KmsProtection;\n\n\n\n #[test]\n\n fn convert_from_string() {\n\n let arn =\n\n \"arn:aws:kms:some-aws-region:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\";\n\n let resource_id =\n\n \"projects/key-project/locations/global/keyRings/key-ring/cryptoKeys/my-key\";\n\n\n\n match KmsProtection::from_str(\"plaintext\") {\n\n Ok(KmsProtection::Plaintext) => (),\n\n e => panic!(\"unexpected result {:?}\", e),\n", "file_path": "src/key/mod.rs", "rank": 38, "score": 31113.199439890857 }, { "content": " /// signed by this online key.\n\n pub fn make_srep(&mut self, now: SystemTime, merkle_root: &[u8]) -> RtMessage {\n\n let mut radi = [0; 4];\n\n let mut midp = [0; 8];\n\n\n\n // one second (in microseconds)\n\n (&mut radi as &mut [u8])\n\n .write_u32::<LittleEndian>(1_000_000)\n\n .unwrap();\n\n\n\n // current epoch time in microseconds\n\n let midp_time = {\n\n let d = now\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"duration since epoch\");\n\n let secs = d.as_secs() * 1_000_000;\n\n let nsecs = (d.subsec_nanos() as u64) / 1_000;\n\n\n\n secs + nsecs\n\n };\n", "file_path": "src/key/online.rs", "rank": 39, "score": 31109.93983405909 }, { "content": "use crate::message::RtMessage;\n\nuse crate::sign::Signer;\n\nuse crate::SIGNED_RESPONSE_CONTEXT;\n\nuse crate::tag::Tag;\n\n\n\n///\n\n/// Represents the delegated Roughtime ephemeral online key.\n\n///\n\npub struct OnlineKey {\n\n signer: Signer,\n\n}\n\n\n\nimpl Default for OnlineKey {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl OnlineKey {\n\n pub fn new() -> Self {\n", "file_path": "src/key/online.rs", "rank": 40, "score": 31109.51875428371 }, { "content": " OnlineKey {\n\n signer: Signer::new(),\n\n }\n\n }\n\n\n\n /// Create a DELE message containing the public key of this online key\n\n pub fn make_dele(&self) -> RtMessage {\n\n let zeros = [0u8; 8];\n\n let max = [0xff; 8];\n\n let pub_key_bytes = self.signer.public_key_bytes();\n\n\n\n let mut dele_msg = RtMessage::with_capacity(3);\n\n dele_msg.add_field(Tag::PUBK, pub_key_bytes).unwrap();\n\n dele_msg.add_field(Tag::MINT, &zeros).unwrap();\n\n dele_msg.add_field(Tag::MAXT, &max).unwrap();\n\n\n\n dele_msg\n\n }\n\n\n\n /// Create an SREP response containing the provided time and Merkle root,\n", "file_path": "src/key/online.rs", "rank": 41, "score": 31106.289695223175 }, { "content": " self.signer.public_key_bytes()\n\n }\n\n}\n\n\n\nimpl fmt::Display for LongTermKey {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.signer)\n\n }\n\n}\n", "file_path": "src/key/longterm.rs", "rank": 42, "score": 31104.9903366342 }, { "content": " };\n\n\n\n let mut result = RtMessage::with_capacity(2);\n\n result.add_field(Tag::SIG, &srep_signature).unwrap();\n\n result.add_field(Tag::SREP, &srep_bytes).unwrap();\n\n\n\n result\n\n }\n\n}\n\n\n\nimpl fmt::Display for OnlineKey {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.signer)\n\n }\n\n}\n", "file_path": "src/key/online.rs", "rank": 43, "score": 31103.50270532569 }, { "content": "\n\n (&mut midp as &mut [u8])\n\n .write_u64::<LittleEndian>(midp_time)\n\n .unwrap();\n\n\n\n // Signed response SREP\n\n let srep_bytes = {\n\n let mut srep_msg = RtMessage::with_capacity(3);\n\n srep_msg.add_field(Tag::RADI, &radi).unwrap();\n\n srep_msg.add_field(Tag::MIDP, &midp).unwrap();\n\n srep_msg.add_field(Tag::ROOT, merkle_root).unwrap();\n\n\n\n srep_msg.encode().unwrap()\n\n };\n\n\n\n // signature on SREP\n\n let srep_signature = {\n\n self.signer.update(SIGNED_RESPONSE_CONTEXT.as_bytes());\n\n self.signer.update(&srep_bytes);\n\n self.signer.sign()\n", "file_path": "src/key/online.rs", "rank": 44, "score": 31102.170567578767 }, { "content": " };\n\n match KmsProtection::from_str(arn) {\n\n Ok(KmsProtection::AwsKmsEnvelope(msg)) => assert_eq!(msg, arn),\n\n e => panic!(\"unexpected result {:?}\", e),\n\n }\n\n match KmsProtection::from_str(resource_id) {\n\n Ok(KmsProtection::GoogleKmsEnvelope(msg)) => assert_eq!(msg, resource_id),\n\n e => panic!(\"unexpected result {:?}\", e),\n\n }\n\n match KmsProtection::from_str(\"frobble\") {\n\n Err(msg) => assert!(msg.contains(\"unknown KmsProtection\")),\n\n e => panic!(\"unexpected result {:?}\", e),\n\n }\n\n }\n\n}\n", "file_path": "src/key/mod.rs", "rank": 45, "score": 31101.96193528524 }, { "content": "// Copyright 2017-2021 int08h LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//!\n\n//! Representations and management of Roughtime's online and long-term Ed25519 keys\n\n//!\n\n\n\nuse std::fmt::Display;\n\nuse std::fmt::Formatter;\n", "file_path": "src/key/mod.rs", "rank": 46, "score": 31099.938526968625 }, { "content": "// Copyright 2017-2021 int08h LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse std::fmt;\n\nuse std::fmt::Formatter;\n\nuse std::time::{SystemTime, UNIX_EPOCH};\n\n\n\nuse byteorder::{LittleEndian, WriteBytesExt};\n\n\n", "file_path": "src/key/online.rs", "rank": 47, "score": 31098.209995928122 }, { "content": "// Copyright 2017-2021 int08h LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//!\n\n//! Represents the server's long-term identity.\n\n//!\n\n\n\nuse std::fmt;\n\nuse std::fmt::Formatter;\n", "file_path": "src/key/longterm.rs", "rank": 48, "score": 31097.81104828163 }, { "content": "use crate::key::KmsProtection;\n\n\n\nconst HEX: Encoding = HEXLOWER_PERMISSIVE;\n\n\n\n/// A purely in-memory Roughenough config for testing purposes.\n\n///\n\n/// This is useful for testing or fuzzing a server without the need to create additional files.\n\npub struct MemoryConfig {\n\n pub port: u16,\n\n pub interface: String,\n\n pub seed: Vec<u8>,\n\n pub batch_size: u8,\n\n pub status_interval: Duration,\n\n pub kms_protection: KmsProtection,\n\n pub health_check_port: Option<u16>,\n\n pub client_stats: bool,\n\n pub fault_percentage: u8,\n\n}\n\n\n\nimpl MemoryConfig {\n", "file_path": "src/config/memory.rs", "rank": 49, "score": 30656.70264464725 }, { "content": "/// fault_percentage | `ROUGHENOUGH_FAULT_PERCENTAGE`\n\n///\n\npub struct EnvironmentConfig {\n\n port: u16,\n\n interface: String,\n\n seed: Vec<u8>,\n\n batch_size: u8,\n\n status_interval: Duration,\n\n kms_protection: KmsProtection,\n\n health_check_port: Option<u16>,\n\n client_stats: bool,\n\n fault_percentage: u8,\n\n}\n\n\n\nconst ROUGHENOUGH_PORT: &str = \"ROUGHENOUGH_PORT\";\n\nconst ROUGHENOUGH_INTERFACE: &str = \"ROUGHENOUGH_INTERFACE\";\n\nconst ROUGHENOUGH_SEED: &str = \"ROUGHENOUGH_SEED\";\n\nconst ROUGHENOUGH_BATCH_SIZE: &str = \"ROUGHENOUGH_BATCH_SIZE\";\n\nconst ROUGHENOUGH_STATUS_INTERVAL: &str = \"ROUGHENOUGH_STATUS_INTERVAL\";\n\nconst ROUGHENOUGH_KMS_PROTECTION: &str = \"ROUGHENOUGH_KMS_PROTECTION\";\n", "file_path": "src/config/environment.rs", "rank": 50, "score": 30649.99923109901 }, { "content": " pub fn new(port: u16) -> Self {\n\n let seed = b\"a32049da0ffde0ded92ce10a0230d35fe615ec8461c14986baa63fe3b3bac3db\";\n\n MemoryConfig {\n\n port,\n\n interface: \"127.0.0.1\".to_string(),\n\n seed: HEX.decode(seed).unwrap(),\n\n batch_size: DEFAULT_BATCH_SIZE,\n\n status_interval: DEFAULT_STATUS_INTERVAL,\n\n kms_protection: KmsProtection::Plaintext,\n\n health_check_port: None,\n\n client_stats: false,\n\n fault_percentage: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl ServerConfig for MemoryConfig {\n\n fn interface(&self) -> &str {\n\n self.interface.as_ref()\n\n }\n", "file_path": "src/config/memory.rs", "rank": 51, "score": 30647.450992748956 }, { "content": "const ROUGHENOUGH_HEALTH_CHECK_PORT: &str = \"ROUGHENOUGH_HEALTH_CHECK_PORT\";\n\nconst ROUGHENOUGH_CLIENT_STATS: &str = \"ROUGHENOUGH_CLIENT_STATS\";\n\nconst ROUGHENOUGH_FAULT_PERCENTAGE: &str = \"ROUGHENOUGH_FAULT_PERCENTAGE\";\n\n\n\nimpl EnvironmentConfig {\n\n pub fn new() -> Result<Self, Error> {\n\n let mut cfg = EnvironmentConfig {\n\n port: 0,\n\n interface: \"\".to_string(),\n\n seed: Vec::new(),\n\n batch_size: DEFAULT_BATCH_SIZE,\n\n status_interval: DEFAULT_STATUS_INTERVAL,\n\n kms_protection: KmsProtection::Plaintext,\n\n health_check_port: None,\n\n client_stats: false,\n\n fault_percentage: 0,\n\n };\n\n\n\n if let Ok(port) = env::var(ROUGHENOUGH_PORT) {\n\n cfg.port = port\n", "file_path": "src/config/environment.rs", "rank": 52, "score": 30646.672211807905 }, { "content": "pub const DEFAULT_BATCH_SIZE: u8 = 64;\n\n\n\n/// Amount of time between each logged status update.\n\npub const DEFAULT_STATUS_INTERVAL: Duration = Duration::from_secs(600);\n\n\n\n///\n\n/// Specifies parameters needed to configure a Roughenough server.\n\n///\n\n/// Parameters labeled \"**Required**\" must always be provided and have no default value\n\n/// while those labeled \"**Optional**\" provide sane default values that can be overridden.\n\n///\n\n/// YAML Key | Environment Variable | Necessity | Description\n\n/// --- | --- | --- | ---\n\n/// `interface` | `ROUGHENOUGH_INTERFACE` | Required | IP address or interface name for listening to client requests\n\n/// `port` | `ROUGHENOUGH_PORT` | Required | UDP port to listen for requests\n\n/// `seed` | `ROUGHENOUGH_SEED` | Required | A 32-byte hexadecimal value used to generate the server's long-term key pair. **This is a secret value and must be un-guessable**, treat it with care. (If compiled with KMS support, length will vary)\n\n/// `batch_size` | `ROUGHENOUGH_BATCH_SIZE` | Optional | The maximum number of requests to process in one batch. All nonces in a batch are used to build a Merkle tree, the root of which is signed. Default is `64` requests per batch.\n\n/// `status_interval` | `ROUGHENOUGH_STATUS_INTERVAL` | Optional | Number of _seconds_ between each logged status update. Default is `600` seconds (10 minutes).\n\n/// `health_check_port` | `ROUGHENOUGH_HEALTH_CHECK_PORT` | Optional | If present, enable an HTTP health check responder on the provided port. **Use with caution**.\n\n/// `kms_protection` | `ROUGHENOUGH_KMS_PROTECTION` | Optional | If compiled with KMS support, the ID of the KMS key used to protect the long-term identity.\n\n/// `client_stats` | `ROUGHENOUGH_CLIENT_STATS` | Optional | A value of `on` or `yes` will enable tracking of per-client request statistics that will be output each time server status is logged. Default is `off` (disabled).\n\n/// `fault_percentage` | `ROUGHENOUGH_FAULT_PERCENTAGE` | Optional | Likelihood (as a percentage) that the server will intentionally return an invalid client response. An integer range from `0` (disabled, all responses valid) to `50` (50% of responses will be invalid). Default is `0` (disabled).\n\n///\n\n/// Implementations of this trait obtain a valid configuration from different back-end\n\n/// sources. See:\n\n/// * [FileConfig](struct.FileConfig.html) - configure via a YAML file\n\n/// * [EnvironmentConfig](struct.EnvironmentConfig.html) - configure via environment variables\n\n/// * [MemoryConfig](struct.MemoryConfig.html) - in-memory configuration for testing\n\n///\n", "file_path": "src/config/mod.rs", "rank": 53, "score": 30645.103292215797 }, { "content": " .parse()\n\n .unwrap_or_else(|_| panic!(\"invalid port: {}\", port));\n\n };\n\n\n\n if let Ok(interface) = env::var(ROUGHENOUGH_INTERFACE) {\n\n cfg.interface = interface.to_string();\n\n };\n\n\n\n if let Ok(seed) = env::var(ROUGHENOUGH_SEED) {\n\n cfg.seed = HEX\n\n .decode(seed.as_bytes())\n\n .expect(\"invalid seed value; 'seed' should be a hex value\");\n\n };\n\n\n\n if let Ok(batch_size) = env::var(ROUGHENOUGH_BATCH_SIZE) {\n\n cfg.batch_size = batch_size\n\n .parse()\n\n .unwrap_or_else(|_| panic!(\"invalid batch_size: {}\", batch_size));\n\n };\n\n\n", "file_path": "src/config/environment.rs", "rank": 54, "score": 30644.767240467132 }, { "content": " \"plaintext seed value must be 32 characters long, found {}\",\n\n cfg.seed().len()\n\n );\n\n is_valid = false;\n\n } else if *cfg.kms_protection() != KmsProtection::Plaintext\n\n && cfg.seed().len() <= SEED_LENGTH as usize\n\n {\n\n error!(\"KMS use enabled but seed value is too short to be an encrypted blob\");\n\n is_valid = false;\n\n }\n\n\n\n if cfg.batch_size() < 1 || cfg.batch_size() > 64 {\n\n error!(\n\n \"batch_size {} is invalid; valid range 1-64\",\n\n cfg.batch_size()\n\n );\n\n is_valid = false;\n\n }\n\n\n\n if cfg.fault_percentage() > 50 {\n", "file_path": "src/config/mod.rs", "rank": 55, "score": 30644.1365401432 }, { "content": "use crate::config::ServerConfig;\n\nuse crate::Error;\n\nuse crate::key::KmsProtection;\n\n\n\nconst HEX: Encoding = HEXLOWER_PERMISSIVE;\n\n\n\n///\n\n/// Obtain a Roughenough server configuration ([ServerConfig](trait.ServerConfig.html))\n\n/// from environment variables.\n\n///\n\n/// Config parameter | Environment Variable\n\n/// ---------------- | --------------------\n\n/// port | `ROUGHENOUGH_PORT`\n\n/// interface | `ROUGHENOUGH_INTERFACE`\n\n/// seed | `ROUGHENOUGH_SEED`\n\n/// batch_size | `ROUGHENOUGH_BATCH_SIZE`\n\n/// status_interval | `ROUGHENOUGH_STATUS_INTERVAL`\n\n/// kms_protection | `ROUGHENOUGH_KMS_PROTECTION`\n\n/// health_check_port | `ROUGHENOUGH_HEALTH_CHECK_PORT`\n\n/// client_stats | `ROUGHENOUGH_CLIENT_STATS`\n", "file_path": "src/config/environment.rs", "rank": 56, "score": 30643.16117766114 }, { "content": "//! Implementations of `ServerConfig` obtain configurations from different back-end sources\n\n//! such as files or environment variables.\n\n//!\n\n\n\nuse std::net::SocketAddr;\n\nuse std::time::Duration;\n\n\n\nuse crate::Error;\n\nuse crate::key::KmsProtection;\n\nuse crate::SEED_LENGTH;\n\n\n\npub use self::environment::EnvironmentConfig;\n\npub use self::file::FileConfig;\n\npub use self::memory::MemoryConfig;\n\n\n\nmod environment;\n\nmod file;\n\nmod memory;\n\n\n\n/// Maximum number of requests to process in one batch and include the the Merkle tree.\n", "file_path": "src/config/mod.rs", "rank": 57, "score": 30640.882460810335 }, { "content": " if let Ok(status_interval) = env::var(ROUGHENOUGH_STATUS_INTERVAL) {\n\n let val: u16 = status_interval\n\n .parse()\n\n .unwrap_or_else(|_| panic!(\"invalid status_interval: {}\", status_interval));\n\n\n\n cfg.status_interval = Duration::from_secs(u64::from(val));\n\n };\n\n\n\n if let Ok(kms_protection) = env::var(ROUGHENOUGH_KMS_PROTECTION) {\n\n cfg.kms_protection = kms_protection\n\n .parse()\n\n .unwrap_or_else(|_| panic!(\"invalid kms_protection value: {}\", kms_protection));\n\n }\n\n\n\n if let Ok(health_check_port) = env::var(ROUGHENOUGH_HEALTH_CHECK_PORT) {\n\n let val: u16 = health_check_port\n\n .parse()\n\n .unwrap_or_else(|_| panic!(\"invalid health_check_port: {}\", health_check_port));\n\n\n\n cfg.health_check_port = Some(val);\n", "file_path": "src/config/environment.rs", "rank": 58, "score": 30639.673300847033 }, { "content": " error!(\n\n \"fault_percentage {} is invalid; valid range 0-50\",\n\n cfg.fault_percentage()\n\n );\n\n is_valid = false;\n\n }\n\n\n\n if is_valid {\n\n if let Err(e) = cfg.udp_socket_addr() {\n\n error!(\n\n \"failed to create UDP socket {}:{} {:?}\",\n\n cfg.interface(),\n\n cfg.port(),\n\n e\n\n );\n\n is_valid = false;\n\n }\n\n }\n\n\n\n is_valid\n\n}\n", "file_path": "src/config/mod.rs", "rank": 59, "score": 30636.43370743395 }, { "content": " self.interface.as_ref()\n\n }\n\n\n\n fn port(&self) -> u16 {\n\n self.port\n\n }\n\n\n\n fn seed(&self) -> Vec<u8> {\n\n self.seed.clone()\n\n }\n\n\n\n fn batch_size(&self) -> u8 {\n\n self.batch_size\n\n }\n\n\n\n fn status_interval(&self) -> Duration {\n\n self.status_interval\n\n }\n\n\n\n fn kms_protection(&self) -> &KmsProtection {\n", "file_path": "src/config/environment.rs", "rank": 60, "score": 30635.76113162429 }, { "content": "\n\n fn port(&self) -> u16 {\n\n self.port\n\n }\n\n\n\n fn seed(&self) -> Vec<u8> {\n\n self.seed.clone()\n\n }\n\n\n\n fn batch_size(&self) -> u8 {\n\n self.batch_size\n\n }\n\n\n\n fn status_interval(&self) -> Duration {\n\n self.status_interval\n\n }\n\n\n\n fn kms_protection(&self) -> &KmsProtection {\n\n &self.kms_protection\n\n }\n", "file_path": "src/config/memory.rs", "rank": 61, "score": 30635.62637087567 }, { "content": "\n\n /// [Optional] Method used to protect the seed for the server's long-term key pair.\n\n /// Defaults to \"`plaintext`\" (no encryption, seed is in the clear).\n\n fn kms_protection(&self) -> &KmsProtection;\n\n\n\n /// [Optional] If present, the TCP port to respond to Google-style HTTP \"legacy health check\".\n\n /// This is a *very* simplistic check, it emits a fixed HTTP response to all TCP connections.\n\n /// https://cloud.google.com/load-balancing/docs/health-checks#legacy-health-checks\n\n fn health_check_port(&self) -> Option<u16>;\n\n\n\n /// [Optional] A value of `on` or `yes` will enable tracking of per-client request statistics\n\n /// that will be output each time server status is logged. Default is `off` (disabled).\n\n fn client_stats_enabled(&self) -> bool;\n\n\n\n /// [Optional] Likelihood (as a percentage) that the server will intentionally return an\n\n /// invalid client response. An integer range from `0` (disabled, all responses valid) to `50`\n\n /// (~50% of responses will be invalid). Default is `0` (disabled).\n\n ///\n\n /// See the [Roughtime spec](https://roughtime.googlesource.com/roughtime/+/HEAD/ECOSYSTEM.md#maintaining-a-healthy-software-ecosystem)\n\n /// for background and rationale.\n", "file_path": "src/config/mod.rs", "rank": 62, "score": 30635.191663980448 }, { "content": " };\n\n\n\n if let Ok(mut client_stats) = env::var(ROUGHENOUGH_CLIENT_STATS) {\n\n client_stats.make_ascii_lowercase();\n\n\n\n cfg.client_stats = client_stats == \"yes\" || client_stats == \"on\";\n\n }\n\n\n\n if let Ok(fault_percentage) = env::var(ROUGHENOUGH_FAULT_PERCENTAGE) {\n\n cfg.fault_percentage = fault_percentage\n\n .parse()\n\n .unwrap_or_else(|_| panic!(\"invalid fault_percentage: {}\", fault_percentage));\n\n };\n\n\n\n Ok(cfg)\n\n }\n\n}\n\n\n\nimpl ServerConfig for EnvironmentConfig {\n\n fn interface(&self) -> &str {\n", "file_path": "src/config/environment.rs", "rank": 63, "score": 30634.821757672413 }, { "content": " fn fault_percentage(&self) -> u8;\n\n\n\n /// Convenience function to create a `SocketAddr` from the provided `interface` and `port`\n\n fn udp_socket_addr(&self) -> Result<SocketAddr, Error> {\n\n let addr = format!(\"{}:{}\", self.interface(), self.port());\n\n match addr.parse() {\n\n Ok(v) => Ok(v),\n\n Err(_) => Err(Error::InvalidConfiguration(addr)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/config/mod.rs", "rank": 64, "score": 30633.682000231485 }, { "content": "\n\n fn health_check_port(&self) -> Option<u16> {\n\n self.health_check_port\n\n }\n\n\n\n fn client_stats_enabled(&self) -> bool {\n\n self.client_stats\n\n }\n\n\n\n fn fault_percentage(&self) -> u8 {\n\n self.fault_percentage\n\n }\n\n}\n", "file_path": "src/config/memory.rs", "rank": 65, "score": 30631.503869672586 }, { "content": " &self.kms_protection\n\n }\n\n\n\n fn health_check_port(&self) -> Option<u16> {\n\n self.health_check_port\n\n }\n\n\n\n fn client_stats_enabled(&self) -> bool {\n\n self.client_stats\n\n }\n\n\n\n fn fault_percentage(&self) -> u8 {\n\n self.fault_percentage\n\n }\n\n}\n", "file_path": "src/config/environment.rs", "rank": 66, "score": 30631.253844756226 }, { "content": "// Copyright 2017-2021 int08h LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse std::time::Duration;\n\n\n\nuse data_encoding::{Encoding, HEXLOWER_PERMISSIVE};\n\n\n\nuse crate::config::{DEFAULT_BATCH_SIZE, DEFAULT_STATUS_INTERVAL};\n\nuse crate::config::ServerConfig;\n", "file_path": "src/config/memory.rs", "rank": 67, "score": 30630.051348474597 }, { "content": "// Copyright 2017-2021 int08h LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse std::env;\n\nuse std::time::Duration;\n\n\n\nuse data_encoding::{Encoding, HEXLOWER_PERMISSIVE};\n\n\n\nuse crate::config::{DEFAULT_BATCH_SIZE, DEFAULT_STATUS_INTERVAL};\n", "file_path": "src/config/environment.rs", "rank": 68, "score": 30628.725325479776 }, { "content": "// Copyright 2017-2021 int08h LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//!\n\n//! Ways to configure the Roughenough server.\n\n//!\n\n//! The [ServerConfig](trait.ServerConfig.html) trait specifies the required and optional\n\n//! parameters available for configuring a Roughenoguh server instance.\n\n//!\n", "file_path": "src/config/mod.rs", "rank": 69, "score": 30622.539119875353 }, { "content": "use crate::config::ServerConfig;\n\nuse crate::error;\n\nuse crate::key::KmsProtection;\n\n#[cfg(feature = \"awskms\")]\n\npub use crate::kms::awskms::inner::AwsKms;\n\n#[cfg(feature = \"gcpkms\")]\n\npub use crate::kms::gcpkms::inner::GcpKms;\n\n\n\npub use self::envelope::EnvelopeEncryption;\n\n\n\nmod envelope;\n\n\n\n/// Errors generated by KMS operations\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Hash, Clone)]\n\npub enum KmsError {\n\n OperationFailed(String),\n\n InvalidConfiguration(String),\n\n InvalidData(String),\n\n InvalidKey(String),\n\n}\n", "file_path": "src/kms/mod.rs", "rank": 70, "score": 28.211599714336366 }, { "content": "\n\nuse byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};\n\nuse data_encoding::{Encoding, HEXLOWER_PERMISSIVE};\n\n\n\nuse crate::error::Error;\n\nuse crate::RFC_REQUEST_FRAME_BYTES;\n\nuse crate::tag::Tag;\n\n\n\nconst HEX: Encoding = HEXLOWER_PERMISSIVE;\n\n\n\n///\n\n/// A Roughtime protocol message; a map of u32 tags to arbitrary byte-strings.\n\n///\n\n#[derive(Debug, Clone)]\n\npub struct RtMessage {\n\n tags: Vec<Tag>,\n\n values: Vec<Vec<u8>>,\n\n}\n\n\n\nimpl RtMessage {\n", "file_path": "src/message.rs", "rank": 71, "score": 27.503766304771144 }, { "content": " // Invoke KMS to decrypt the DEK\n\n let dek = kms.decrypt_dek(&encrypted_dek)?;\n\n\n\n // Decrypt the seed value using the DEK\n\n let unbound_dek = UnboundKey::new(&AES_256_GCM, &dek)?;\n\n let dek_opening_key = LessSafeKey::new(unbound_dek);\n\n match dek_opening_key.open_in_place(nonce, Aad::from(AD), &mut encrypted_seed) {\n\n Ok(plaintext) => Ok(plaintext.to_vec()),\n\n Err(_) => Err(KmsError::OperationFailed(\n\n \"failed to decrypt plaintext seed\".to_string(),\n\n )),\n\n }\n\n }\n\n\n\n ///\n\n /// Encrypt the seed value and protect the seed's encryption key using a\n\n /// [`KmsProvider`](trait.KmsProvider.html).\n\n ///\n\n /// The returned encrypted byte blob is safe to store on unsecured media.\n\n ///\n", "file_path": "src/kms/envelope.rs", "rank": 72, "score": 25.106943095413015 }, { "content": " result.push_str(&HEX.encode(value));\n\n result.push_str(\"\\n\");\n\n }\n\n }\n\n\n\n result.push_str(&indent1);\n\n result.push_str(\"}\\n\");\n\n\n\n result\n\n }\n\n}\n\n\n\nimpl Display for RtMessage {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.to_string(1))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "src/message.rs", "rank": 73, "score": 24.932360000920053 }, { "content": " // DELE tag\n\n let mut dele = [0u8; 4];\n\n encoded.read_exact(&mut dele).unwrap();\n\n assert_eq!(dele, Tag::DELE.wire_value());\n\n\n\n // MAXT tag\n\n let mut maxt = [0u8; 4];\n\n encoded.read_exact(&mut maxt).unwrap();\n\n assert_eq!(maxt, Tag::MAXT.wire_value());\n\n\n\n // DELE value\n\n let mut read_dele_val = vec![0u8; 24];\n\n encoded.read_exact(&mut read_dele_val).unwrap();\n\n assert_eq!(dele_value, read_dele_val);\n\n\n\n // MAXT value\n\n let mut read_maxt_val = vec![0u8; 32];\n\n encoded.read_exact(&mut read_maxt_val).unwrap();\n\n assert_eq!(maxt_value, read_maxt_val);\n\n\n", "file_path": "src/message.rs", "rank": 74, "score": 24.82761417691239 }, { "content": "impl EnvelopeEncryption {\n\n /// Decrypt a seed previously encrypted with `encrypt_seed()`\n\n pub fn decrypt_seed(\n\n kms: &dyn KmsProvider,\n\n ciphertext_blob: &[u8],\n\n ) -> Result<Vec<u8>, KmsError> {\n\n if ciphertext_blob.len() < MIN_PAYLOAD_SIZE {\n\n return Err(KmsError::InvalidData(format!(\n\n \"ciphertext too short: min {}, found {}\",\n\n MIN_PAYLOAD_SIZE,\n\n ciphertext_blob.len()\n\n )));\n\n }\n\n\n\n let mut tmp = Cursor::new(ciphertext_blob);\n\n\n\n // Read the lengths of the wrapped DEK and of the nonce\n\n let dek_len = tmp.read_u16::<LittleEndian>()? as usize;\n\n let nonce_len = tmp.read_u16::<LittleEndian>()? as usize;\n\n\n", "file_path": "src/kms/envelope.rs", "rank": 75, "score": 24.69066291658141 }, { "content": " pub fn public_key_bytes(&self) -> &[u8] {\n\n self.key_pair.public_key().as_ref()\n\n }\n\n}\n\n\n\nimpl fmt::Display for Signer {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", HEX.encode(self.public_key_bytes()))\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Signer {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"Signer({}, {:?})\",\n\n HEX.encode(self.public_key_bytes()),\n\n self.buf\n\n )\n\n }\n", "file_path": "src/sign.rs", "rank": 76, "score": 24.37619819123485 }, { "content": "\n\nuse byteorder::{LittleEndian, WriteBytesExt};\n\nuse data_encoding::{Encoding, HEXLOWER_PERMISSIVE};\n\nuse mio::net::UdpSocket;\n\n\n\nuse crate::{RtMessage, Tag};\n\nuse crate::config::ServerConfig;\n\nuse crate::grease::Grease;\n\nuse crate::key::{LongTermKey, OnlineKey};\n\nuse crate::merkle::MerkleTree;\n\nuse crate::stats::ServerStats;\n\nuse crate::version::Version;\n\n\n\nconst HEX: Encoding = HEXLOWER_PERMISSIVE;\n\n\n\npub struct Responder {\n\n version: Version,\n\n online_key: OnlineKey,\n\n long_term_public_key: String,\n\n cert_bytes: Vec<u8>,\n", "file_path": "src/responder.rs", "rank": 77, "score": 23.58964786447707 }, { "content": "\n\n /// Returns a slice of the tags in the message\n\n pub fn tags(&self) -> &[Tag] {\n\n &self.tags\n\n }\n\n\n\n /// Returns a slice of the values in the message\n\n pub fn values(&self) -> &[Vec<u8>] {\n\n &self.values\n\n }\n\n\n\n /// Converts the message into a `HashMap` mapping each tag to its value\n\n pub fn into_hash_map(self) -> HashMap<Tag, Vec<u8>> {\n\n self.tags.into_iter().zip(self.values.into_iter()).collect()\n\n }\n\n\n\n /// Encode this message into an on-the-wire representation prefixed with RFC framing.\n\n pub fn encode_framed(&self) -> Result<Vec<u8>, Error> {\n\n let encoded = self.encode()?;\n\n let mut frame = Vec::with_capacity(RFC_REQUEST_FRAME_BYTES.len() + 4 + encoded.len());\n", "file_path": "src/message.rs", "rank": 78, "score": 23.422038165077133 }, { "content": "\n\nuse data_encoding::{Encoding, HEXLOWER_PERMISSIVE};\n\nuse ring::rand;\n\nuse ring::rand::SecureRandom;\n\nuse ring::signature::{self, Ed25519KeyPair, KeyPair};\n\n\n\nconst HEX: Encoding = HEXLOWER_PERMISSIVE;\n\n\n\nconst INITIAL_BUF_SIZE: usize = 1024;\n\n\n\n/// A multi-step (init-update-finish) interface for verifying an Ed25519 signature\n\n#[derive(Debug)]\n\npub struct Verifier {\n\n pubkey: Vec<u8>,\n\n buf: Vec<u8>,\n\n}\n\n\n\nimpl Verifier {\n\n pub fn new(pubkey: &[u8]) -> Self {\n\n Verifier {\n", "file_path": "src/sign.rs", "rank": 79, "score": 22.81663673098891 }, { "content": " assert_eq!(cert, Tag::CERT.wire_value());\n\n\n\n // CERT value\n\n let mut read_val = vec![0u8; 64];\n\n encoded.read_exact(&mut read_val).unwrap();\n\n assert_eq!(value, read_val);\n\n\n\n // Entire message was read\n\n assert_eq!(encoded.position(), 72);\n\n\n\n // Round-trip single-tag message\n\n RtMessage::from_bytes(&msg.encode().unwrap()).unwrap();\n\n }\n\n\n\n #[test]\n\n fn two_field_message_encoding() {\n\n let dele_value = vec![b'a'; 24];\n\n let maxt_value = vec![b'z'; 32];\n\n\n\n let mut msg = RtMessage::with_capacity(2);\n", "file_path": "src/message.rs", "rank": 80, "score": 22.183960353815632 }, { "content": " requests: Vec<(Vec<u8>, SocketAddr)>,\n\n merkle: MerkleTree,\n\n grease: Grease,\n\n}\n\n\n\nimpl Responder {\n\n pub fn new(version: Version, config: &dyn ServerConfig, ltk: &mut LongTermKey) -> Responder {\n\n let online_key = OnlineKey::new();\n\n let cert_bytes = ltk.make_cert(&online_key).encode().expect(\"make_cert\");\n\n let long_term_public_key = HEX.encode(ltk.public_key());\n\n let requests = Vec::with_capacity(config.batch_size() as usize);\n\n let grease = Grease::new(config.fault_percentage());\n\n\n\n let merkle = if version == Version::Classic {\n\n MerkleTree::new_sha512()\n\n } else {\n\n MerkleTree::new_sha512_256()\n\n };\n\n\n\n Responder {\n", "file_path": "src/responder.rs", "rank": 81, "score": 21.509087175142064 }, { "content": "# The public key of 'roughtime.int08h.com' is stored in a DNS TXT record \n\n$ host -t TXT roughtime.int08h.com\n\nroughtime.int08h.com descriptive text \"016e6e0284d24c37c6e4d7d8d5b4e1d3c1949ceaa545bf875616c9dce0c9bec1\"\n\n\n\n# Validate the server response using its public key\n\n$ target/release/roughenough-client -v roughtime.int08h.com 2002 -p 016e6e0284d24c37c6e4d7d8d5b4e1d3c1949ceaa545bf875616c9dce0c9bec1\n\nRequesting time from: \"roughtime.int08h.com\":2002\n\nReceived time from server: midpoint=\"Oct 26 2018 23:22:20\", radius=1000000, verified=Yes (merkle_index=0)\n\nOct 26 2018 23:22:20\n\n```\n\n\n\nThe **`verified=Yes`** in the output confirms that the server's response had a valid signature.\n\n\n\n### Server Configuration\n\n\n\nThere are two (mutually exclusive) ways to configure the Roughenough server: \n\n\n\n1. A YAML file, or\n\n2. Environment variables\n\n\n\nThe server accepts the following configuration parameters:\n\n\n\nYAML Key | Environment Variable | Necessity | Description\n\n--- | --- | --- | ---\n\n`interface` | `ROUGHENOUGH_INTERFACE` | Required | IP address or interface name for listening to client requests\n\n`port` | `ROUGHENOUGH_PORT` | Required | UDP port to listen for requests\n\n`seed` | `ROUGHENOUGH_SEED` | Required | A 32-byte hexadecimal value used to generate the server's long-term key pair. **This is a secret value and must be un-guessable**, treat it with care. (If compiled with KMS support, length will vary; see [Optional Features](#optional-features))\n\n`batch_size` | `ROUGHENOUGH_BATCH_SIZE` | Optional | The maximum number of requests to process in one batch. All nonces in a batch are used to build a Merkle tree, the root of which is signed. Default is `64` requests per batch.\n\n`status_interval` | `ROUGHENOUGH_STATUS_INTERVAL` | Optional | Number of _seconds_ between each logged status update. Default is `600` seconds (10 minutes).\n\n`health_check_port` | `ROUGHENOUGH_HEALTH_CHECK_PORT` | Optional | If present, enable an HTTP health check responder on the provided port. **Use with caution**, see [Optional Features](#optional-features).\n\n`kms_protection` | `ROUGHENOUGH_KMS_PROTECTION` | Optional | If compiled with KMS support, the ID of the KMS key used to protect the long-term identity. See [Optional Features](#optional-features).\n\n`fault_percentage` | `ROUGHENOUGH_FAULT_PERCENTAGE` | Optional | Likelihood (as a percentage) that the server will intentionally return an invalid client response. An integer range from `0` (disabled, all responses valid) to `50` (50% of responses will be invalid). Default is `0` (disabled).\n\n\n", "file_path": "README.md", "rank": 82, "score": 21.478006298799954 }, { "content": " pub fn encrypt_seed(kms: &dyn KmsProvider, plaintext_seed: &[u8]) -> Result<Vec<u8>, KmsError> {\n\n // Generate random DEK and nonce\n\n let rng = SystemRandom::new();\n\n let mut raw_dek = [0u8; DEK_LEN_BYTES];\n\n let mut raw_nonce = [0u8; NONCE_LEN_BYTES];\n\n rng.fill(&mut raw_dek)?;\n\n rng.fill(&mut raw_nonce)?;\n\n\n\n // Ring will overwrite plaintext with ciphertext+tag in this buffer\n\n let mut buf = plaintext_seed.to_vec();\n\n\n\n // Encrypt the plaintext seed (in buf) using the DEK\n\n let nonce = Nonce::assume_unique_for_key(raw_nonce);\n\n let unbound_dek = UnboundKey::new(&AES_256_GCM, &raw_dek)?;\n\n let dek_seal_key = LessSafeKey::new(unbound_dek);\n\n\n\n // Output overwrites context of 'buf' and appends auth tag to 'buf'\n\n if let Err(_) = dek_seal_key.seal_in_place_append_tag(nonce, Aad::from(AD), &mut buf) {\n\n return Err(KmsError::OperationFailed(\n\n \"failed to encrypt plaintext seed\".to_string(),\n", "file_path": "src/kms/envelope.rs", "rank": 83, "score": 21.459825022909886 }, { "content": " /// Dangerous: construct a new RtMessage **without validation or error checking**.\n\n ///\n\n /// Intended _only_ for construction of deliberately bogus responses as part of [Roughtime's\n\n /// ecosystem](https://roughtime.googlesource.com/roughtime/+/HEAD/ECOSYSTEM.md#maintaining-a-healthy-software-ecosystem).\n\n ///\n\n pub fn new_deliberately_invalid(tags: Vec<Tag>, values: Vec<Vec<u8>>) -> Self {\n\n RtMessage { tags, values }\n\n }\n\n\n\n /// Internal function to create a single tag message\n\n fn single_tag_message(bytes: &[u8], msg: &mut Cursor<&[u8]>) -> Result<Self, Error> {\n\n if bytes.len() < 8 {\n\n return Err(Error::MessageTooShort);\n\n }\n\n\n\n let pos = msg.position() as usize;\n\n msg.set_position((pos + 4) as u64);\n\n\n\n let mut value = Vec::new();\n\n msg.read_to_end(&mut value)?;\n", "file_path": "src/message.rs", "rank": 84, "score": 21.37158937011646 }, { "content": " Classic,\n\n\n\n /// IETF standardized version\n\n Rfc,\n\n}\n\n\n\n// RFC version 1\n\nconst VERSION_1: &'static [u8] = &[0x01, 0x00, 0x00, 0x00];\n\n\n\nimpl Version {\n\n /// On-the-wire representation of the version value\n\n pub fn wire_bytes(self) -> &'static [u8] {\n\n match self {\n\n Version::Classic => unreachable!(\"invalid, no version bytes for Classic\"),\n\n Version::Rfc => VERSION_1,\n\n }\n\n }\n\n\n\n /// A short (non-canonical) string representation of the `Version`\n\n pub fn to_string(&self) -> String {\n", "file_path": "src/version.rs", "rank": 85, "score": 21.32037581242257 }, { "content": "/// A multi-step (init-update-finish) interface for creating an Ed25519 signature\n\npub struct Signer {\n\n key_pair: Ed25519KeyPair,\n\n buf: Vec<u8>,\n\n}\n\n\n\nimpl Default for Signer {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl Signer {\n\n pub fn new() -> Self {\n\n let rng = rand::SystemRandom::new();\n\n let mut seed = [0u8; 32];\n\n rng.fill(&mut seed).unwrap();\n\n\n\n Signer::from_seed(&seed)\n\n }\n", "file_path": "src/sign.rs", "rank": 86, "score": 21.30222860727361 }, { "content": "#### Configuration\n\n\n\nCopy and paste the output `kms_protection` and `seed` values into a config or\n\nset the corresponding environment variables. `roughenough-server` will detect that\n\nGoogle KMS is being used and decrypt the seed automatically. For example:\n\n\n\n```yaml\n\ninterface: 127.0.0.1\n\nport: 8686\n\nkms_protection: \"projects/PROJECT_NAME/locations/GCP_LOCATION/keyRings/KEYRING_NAME/cryptoKeys/KEY_NAME\"\n\nseed: 71000c000a2400c7f2553954873ef29aeb37384c25d7a937d389221207c3368657870129d601d084c8da1249008d6fd4640f815596788e97bb3ce02fd007bc25a1019ca51945c3b99283d3945baacd77b1b991f5f6f8848c549a5767f57c9c999e97fe6d28fdb17db1d63c2ea966d8236d20c71e8e9c757c5bab62472c65b48376bc8951700aceb22545fce58d77e7cc147f7134da7a2cca790b54f29e4798442cee6e0d34e57f80ce983f7e5928cceff2\n\n```\n\n\n\nor using environment based configuration:\n\n\n\n```bash\n\n$ export ROUGHENOUGH_INTERFACE=127.0.0.1\n\n$ export ROUGHENOUGH_PORT=8686\n\n$ export ROUGHENOUGH_KMS_PROTECTION=\"projects/PROJECT_NAME/locations/GCP_LOCATION/keyRings/KEYRING_NAME/cryptoKeys/KEY_NAME\"\n\n$ export ROUGHENOUGH_SEED=71000c000a2400c7f2553954873ef29aeb37384c25d7a937d389221207c3368657870129d601d084c8da1249008d6fd4640f815596788e97bb3ce02fd007bc25a1019ca51945c3b99283d3945baacd77b1b991f5f6f8848c549a5767f57c9c999e97fe6d28fdb17db1d63c2ea966d8236d20c71e8e9c757c5bab62472c65b48376bc8951700aceb22545fce58d77e7cc147f7134da7a2cca790b54f29e4798442cee6e0d34e57f80ce983f7e5928cceff2\n\n```\n", "file_path": "doc/OPTIONAL-FEATURES.md", "rank": 87, "score": 20.976921890342325 }, { "content": " /// to be of the form `arn:aws:kms:some-aws-region:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab`\n\n pub fn from_arn(arn: &str) -> Result<Self, KmsError> {\n\n let parts: Vec<&str> = arn.split(':').collect();\n\n\n\n if parts.len() != 6 {\n\n return Err(KmsError::InvalidConfiguration(format!(\n\n \"invalid KMS arn: too few parts {}\",\n\n parts.len()\n\n )));\n\n }\n\n\n\n let region_part = parts.get(3).expect(\"region is missing\");\n\n let region = match Region::from_str(region_part) {\n\n Ok(r) => r,\n\n Err(e) => return Err(KmsError::InvalidConfiguration(e.to_string())),\n\n };\n\n\n\n Ok(AwsKms {\n\n kms_client: KmsClient::new(region),\n\n key_id: arn.to_string(),\n", "file_path": "src/kms/awskms.rs", "rank": 88, "score": 20.903238320491017 }, { "content": "pub use crate::error::Error;\n\npub use crate::message::RtMessage;\n\npub use crate::tag::Tag;\n\n\n\nmod error;\n\nmod message;\n\nmod tag;\n\n\n\npub mod config;\n\npub mod grease;\n\npub mod key;\n\npub mod kms;\n\npub mod merkle;\n\npub mod request;\n\npub mod responder;\n\npub mod server;\n\npub mod sign;\n\npub mod stats;\n\npub mod version;\n\n\n\n/// Version of Roughenough\n\npub const VERSION: &str = \"1.2.0-draft-5\";\n\n\n\n/// Roughenough version string enriched with any compile-time optional features\n", "file_path": "src/lib.rs", "rank": 89, "score": 20.518140047103223 }, { "content": " poll_duration,\n\n status_interval,\n\n timer,\n\n poll,\n\n responder_rfc,\n\n responder_classic,\n\n buf: [0u8; 65_536],\n\n\n\n stats,\n\n\n\n #[cfg(fuzzing)]\n\n fake_client_socket: UdpSocket::bind(&\"127.0.0.1:0\".parse().unwrap()).unwrap(),\n\n }\n\n }\n\n\n\n /// Returns a reference to the server's long-term public key\n\n pub fn get_public_key(&self) -> &str {\n\n &self.responder_rfc.get_public_key()\n\n }\n\n\n", "file_path": "src/server.rs", "rank": 90, "score": 20.42506850397946 }, { "content": " frame.write_all(RFC_REQUEST_FRAME_BYTES)?;\n\n frame.write_u32::<LittleEndian>(encoded.len() as u32)?;\n\n frame.write_all(&encoded)?;\n\n\n\n Ok(frame)\n\n }\n\n\n\n /// Encode this message into its on-the-wire representation.\n\n pub fn encode(&self) -> Result<Vec<u8>, Error> {\n\n let num_tags = self.tags.len();\n\n let mut out = Vec::with_capacity(self.encoded_size());\n\n\n\n // number of tags\n\n out.write_u32::<LittleEndian>(num_tags as u32)?;\n\n\n\n // offset(s) to values, IFF there are two or more tags\n\n if num_tags > 1 {\n\n let mut offset_sum = self.values[0].len();\n\n\n\n for val in &self.values[1..] {\n", "file_path": "src/message.rs", "rank": 91, "score": 20.201515638774378 }, { "content": "#### Configuration\n\n\n\nCopy and paste the output `kms_protection` and `seed` values into a config or\n\nset the corresponding environment variables. The `roughenough-server` will detect that\n\nAWS KMS is being used and decrypt the seed automatically. For example:\n\n\n\n```yaml\n\ninterface: 127.0.0.1\n\nport: 8686\n\nkms_protection: \"arn:aws:kms:SOME_AWS_REGION:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\"\n\nseed: b8000c000102020078d39e85c7386e9e2bed1f30fac6dd322db96b8aaac8974fc6c0e0f566f8f6c971012fca1e69fffffd947fe82a9e505baf580000007e307c06092a864886f70d010706a06f306d020100306806092a864886f70d010701301e060960864801650304012e3011040c55d16d891b3b2a1ae2587a9c020110803bcc74dd96336009087772b28ec908c40e4113b1ab9b98934bd3b4f3dd3c1e8cdc6da82a4321fd8378ad0e2e0507bf0c5ea0e28d447e5f8482533baa423b7af8459ae87736f381d87fe38c21a805fae1c25c43d59200f42cae0d07f741e787a04c0ad72774942dddf818be0767e4963fe5a810f734a0125c\n\n```\n\n\n\nor using environment based configuration:\n\n\n\n```bash\n\n$ export ROUGHENOUGH_INTERFACE=127.0.0.1\n\n$ export ROUGHENOUGH_PORT=8686\n\n$ export ROUGHENOUGH_KMS_PROTECTION=\"arn:aws:kms:SOME_AWS_REGION:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\"\n\n$ export ROUGHENOUGH_SEED=b8000c000102020078d39e85c7386e9e2bed1f30fac6dd322db96b8aaac8974fc6c0e0f566f8f6c971012fca1e69fffffd947fe82a9e505baf580000007e307c06092a864886f70d010706a06f306d020100306806092a864886f70d010701301e060960864801650304012e3011040c55d16d891b3b2a1ae2587a9c020110803bcc74dd96336009087772b28ec908c40e4113b1ab9b98934bd3b4f3dd3c1e8cdc6da82a4321fd8378ad0e2e0507bf0c5ea0e28d447e5f8482533baa423b7af8459ae87736f381d87fe38c21a805fae1c25c43d59200f42cae0d07f741e787a04c0ad72774942dddf818be0767e4963fe5a810f734a0125c\n\n```\n\n\n\n### GCP Example\n\n\n", "file_path": "doc/OPTIONAL-FEATURES.md", "rank": 92, "score": 20.146374003486898 }, { "content": " if nonce_len != NONCE_LEN_BYTES || dek_len > ciphertext_blob.len() {\n\n return Err(KmsError::InvalidData(format!(\n\n \"invalid DEK ({}) or nonce ({}) length\",\n\n dek_len, nonce_len\n\n )));\n\n }\n\n\n\n // Consume the wrapped DEK\n\n let mut encrypted_dek = vec_zero_filled(dek_len);\n\n tmp.read_exact(&mut encrypted_dek)?;\n\n\n\n // Consume the nonce\n\n let mut raw_nonce = [0u8; NONCE_LEN_BYTES];\n\n tmp.read_exact(&mut raw_nonce)?;\n\n let nonce = Nonce::assume_unique_for_key(raw_nonce);\n\n\n\n // Consume the encrypted seed + tag\n\n let mut encrypted_seed = Vec::new();\n\n tmp.read_to_end(&mut encrypted_seed)?;\n\n\n", "file_path": "src/kms/envelope.rs", "rank": 93, "score": 20.10392290992084 }, { "content": "\n\n /// Tags for which values are themselves an `RtMessage`\n\n pub fn is_nested(&self) -> bool {\n\n *self == Tag::CERT || *self == Tag::DELE || *self == Tag::SREP\n\n }\n\n\n\n /// A short (non canonical) string representation of the tag\n\n fn to_string(&self) -> String {\n\n match self {\n\n Tag::PAD_RFC => String::from(\"PAD00\"),\n\n Tag::PAD_CLASSIC => String::from(\"PADff\"),\n\n Tag::SIG => String::from(\"SIG\"),\n\n Tag::VER => String::from(\"VER\"),\n\n _ => String::from_utf8(self.wire_value().to_vec()).unwrap(),\n\n }\n\n }\n\n}\n\n\n\nimpl Display for Tag {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}\", self.to_string())\n\n }\n\n}\n", "file_path": "src/tag.rs", "rank": 94, "score": 19.719946541788133 }, { "content": "#### YAML Configuration \n\n\n\nThe table above lists the YAML keys available in the config file. An example:\n\n\n\n```yaml\n\ninterface: 127.0.0.1\n\nport: 8686\n\nseed: f61075c988feb9cb700a4a6a3291bfbc9cab11b9c9eca8c802468eb38a43d7d3\n\n```\n\n\n\nProvide the config file as the single command-line argument to the Roughenough server binary:\n\n\n\n```bash\n\n$ /path/to/roughenough-server /path/to/config.yaml\n\n```\n\n\n\n#### Environment Configuration\n\n\n\nRoughenough can be configured via the `ROUGHENOUGH_*` [environment variables](https://12factor.net/config) \n\nlisted in the table above. Start the server with a single `ENV` argument to have Roughenough configure itself\n\nfrom the environment. Example:\n\n\n\n```bash\n\n$ export ROUGHENOUGH_INTERFACE=127.0.0.1\n\n$ export ROUGHENOUGH_PORT=8686\n\n$ export ROUGHENOUGH_SEED=f61075c988feb9cb700a4a6a3291bfbc9cab11b9c9eca8c802468eb38a43d7d3\n\n$ /path/to/roughenough-server ENV\n\n```\n\n\n\n### Starting the Server\n\n\n\n```bash\n\n# Build roughenough\n\n$ cargo build --release\n\n\n\n# Via a config file\n\n$ target/release/roughenough-server example.cfg\n\n2018-07-25 00:05:09 INFO [server] Roughenough server v1.0.5 starting\n\n2018-07-25 00:05:09 INFO [server] Long-term public key: d0756ee69ff5fe96cbcf9273208fec53124b1dd3a24d3910e07c7c54e2473012\n\n2018-07-25 00:05:09 INFO [server] Ephemeral public key: 25fd5dc31ceee241aed3e643534e95ed0609e9a20982a45ac0312a5f55e2cc66\n\n2018-07-25 00:05:09 INFO [server] Server listening on 127.0.0.1:8686\n\n\n", "file_path": "README.md", "rank": 95, "score": 19.644645563435823 }, { "content": " }\n\n\n\n ///\n\n /// Returns a *new* `RtMessage` that has been altered to be deliberately invalid.\n\n ///\n\n /// The type of alteration made to `src_msg` is randomly chosen from from\n\n /// [Pathologies](enum.Pathologies.html)\n\n ///\n\n pub fn add_errors(&mut self, src_msg: &RtMessage) -> RtMessage {\n\n match ALL_PATHOLOGIES.choose(&mut self.prng) {\n\n Some(CorruptResponseSignature) => self.corrupt_response_signature(src_msg),\n\n Some(RandomlyOrderTags) => self.randomly_order_tags(src_msg),\n\n None => unreachable!(),\n\n }\n\n }\n\n\n\n ///\n\n /// Randomly shuffle ordering of the (tag, value) pairs in the source message.\n\n ///\n\n fn randomly_order_tags(&mut self, src_msg: &RtMessage) -> RtMessage {\n", "file_path": "src/grease.rs", "rank": 96, "score": 19.037047299081664 }, { "content": "\n\n pub fn from_seed(seed: &[u8]) -> Self {\n\n Signer {\n\n key_pair: Ed25519KeyPair::from_seed_unchecked(seed).unwrap(),\n\n buf: Vec::with_capacity(INITIAL_BUF_SIZE),\n\n }\n\n }\n\n\n\n pub fn update(&mut self, data: &[u8]) {\n\n self.buf.reserve(data.len());\n\n self.buf.extend_from_slice(data);\n\n }\n\n\n\n pub fn sign(&mut self) -> Vec<u8> {\n\n let signature = self.key_pair.sign(&self.buf).as_ref().to_vec();\n\n self.buf.clear();\n\n\n\n signature\n\n }\n\n\n", "file_path": "src/sign.rs", "rank": 97, "score": 18.888092521906533 }, { "content": " Err(e) => Err(KmsError::OperationFailed(e.to_string())),\n\n }\n\n }\n\n }\n\n\n\n #[cfg(feature = \"awskms\")]\n\n impl fmt::Display for AwsKms {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.key_id)\n\n }\n\n }\n\n}\n", "file_path": "src/kms/awskms.rs", "rank": 98, "score": 18.840142006719823 }, { "content": " use std::fmt;\n\n use std::fmt::Formatter;\n\n use std::str::FromStr;\n\n\n\n use bytes::Bytes;\n\n use futures::executor::block_on;\n\n use rusoto_core::Region;\n\n use rusoto_kms::{DecryptRequest, EncryptRequest, Kms, KmsClient};\n\n\n\n use crate::kms::{AD, DEK_LEN_BYTES, EncryptedDEK, KmsError, KmsProvider, PlaintextDEK};\n\n\n\n /// Amazon Web Services Key Management Service\n\n /// https://aws.amazon.com/kms/\n\n pub struct AwsKms {\n\n kms_client: KmsClient,\n\n key_id: String,\n\n }\n\n\n\n impl AwsKms {\n\n /// Create a new instance from the full ARN of a AWS KMS key. The ARN is expected\n", "file_path": "src/kms/awskms.rs", "rank": 99, "score": 18.647978288224746 } ]
Rust
source-code/parser.rs
adrianbielsa1/nabe
3355ecb7b5ba8e21a1db256144935af46f506eca
use crate::token::Token; use crate::statement::*; struct Parser<'a> { tokens: &'a Vec<Token>, tokens_position: usize, } impl<'a> Parser<'a> { pub fn new(tokens: &'a Vec<Token>) -> Parser { return Parser { tokens: tokens, tokens_position: 0, }; } pub fn parse(&mut self) -> Vec<Statement> { let parsers = [ Parser::parse_type, Parser::parse_variable, Parser::parse_constant, Parser::parse_subroutine, Parser::parse_function, Parser::parse_enum, Parser::parse_attribute, Parser::parse_option, ]; let mut statements = vec!(); 'parse_next_statement: while self.tokens_position < self.tokens.len() { for parser in &parsers { let position_before_parsing = self.tokens_position; if let Some(statement) = parser(self) { statements.push(statement); continue 'parse_next_statement; } else { self.tokens_position = position_before_parsing; } } break; } return statements; } fn compare(first: &Token, second: &Token) -> bool { let left_discriminant = std::mem::discriminant(first); let right_discriminant = std::mem::discriminant(second); return left_discriminant == right_discriminant; } fn consume(&mut self, expected_token: Token) -> Option<Token> { if self.tokens_position >= self.tokens.len() { return None; } if Self::compare(&self.tokens[self.tokens_position], &expected_token) { self.tokens_position += 1; return Some(self.tokens[self.tokens_position - 1].clone()); } else { return None; } } fn parse_type(&mut self) -> Option<Statement> { let _ = self.consume(Token::Type)?; let name = self.consume(Token::Identifier(vec!()))?; let mut attributes = vec!(); while let Some(attribute) = self.parse_type_attribute() { attributes.push(attribute); } let _ = self.consume(Token::End)?; let _ = self.consume(Token::Type)?; return Some(Statement::Type(TypeStatement { name: name, attributes: attributes, })); } fn parse_type_attribute(&mut self) -> Option<Statement> { let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::As)?; let kind = self.consume(Token::Identifier(vec!()))?; return Some(Statement::TypeAttribute(TypeAttributeStatement { name: name, kind: kind, })); } fn parse_enum(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t)); let _ = self.consume(Token::Enum)?; let name = self.consume(Token::Identifier(vec!()))?; let mut attributes = vec!(); while let Some(attribute) = self.parse_enum_attribute() { attributes.push(attribute); } let _ = self.consume(Token::End)?; let _ = self.consume(Token::Enum)?; return Some(Statement::Enum(EnumStatement { scope: scope, name: name, attributes: attributes, })); } fn parse_enum_attribute(&mut self) -> Option<Statement> { let name = self.consume(Token::Identifier(vec!()))?; let value = match self.consume(Token::Assignment) { Some(_) => Some(self.consume(Token::Number(vec!()))?), None => None, }; return Some(Statement::EnumAttribute(EnumAttributeStatement { name: name, value: value, })); } fn parse_variable(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private, Token::Static, Token::Dim]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t))?; let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::As)?; let kind = self.consume(Token::Identifier(vec!()))?; return Some(Statement::Variable(VariableStatement { scope: scope.clone(), name: name, kind: kind, })); } fn parse_constant(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private, Token::Static, Token::Dim]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t))?; let _ = self.consume(Token::Const)?; let name = self.consume(Token::Identifier(vec!()))?; let kind = match self.consume(Token::As) { Some(_) => Some(self.consume(Token::Identifier(vec!()))?), None => None, }; let length = match self.consume(Token::Times) { Some(_) => Some(self.consume(Token::Number(vec!()))?), None => None, }; let _ = self.consume(Token::Assignment)?; let possible_values = [ Token::Identifier(vec!()), Token::Number(vec!()), Token::String(vec!()) ]; let value = std::array::IntoIter::new(possible_values).find_map(|t| self.consume(t))?; return Some(Statement::Constant(ConstantStatement { scope: scope.clone(), name: name, kind: kind, length: length, value: value, })); } fn parse_subroutine(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private, Token::Static]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t))?; let _ = self.consume(Token::Sub)?; let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::LeftParentheses)?; let mut arguments = vec!(); while let Some(argument) = self.parse_callable_argument() { arguments.push(argument); } let _ = self.consume(Token::RightParentheses)?; let body = self.parse_callable_body(); let _ = self.consume(Token::End)?; let _ = self.consume(Token::Sub)?; return Some(Statement::Subroutine(SubroutineStatement { scope: scope, name: name, arguments: arguments, body: body, })); } fn parse_function(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private, Token::Static]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t))?; let _ = self.consume(Token::Function)?; let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::LeftParentheses)?; let mut arguments = vec!(); while let Some(argument) = self.parse_callable_argument() { arguments.push(argument); } let _ = self.consume(Token::RightParentheses)?; let kind = match self.consume(Token::As) { Some(_) => self.consume(Token::Identifier(vec!())), None => None, }; let body = self.parse_callable_body(); let _ = self.consume(Token::End)?; let _ = self.consume(Token::Function)?; return Some(Statement::Function(FunctionStatement { scope: scope, name: name, arguments: arguments, kind: kind, body: body, })); } fn parse_callable_argument(&mut self) -> Option<Statement> { let possible_modifiers = [Token::ByVal, Token::ByRef,]; let modifier = std::array::IntoIter::new(possible_modifiers).find_map(|t| self.consume(t)); let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::As)?; let kind = self.consume(Token::Identifier(vec!()))?; return Some(Statement::Argument(ArgumentStatement { modifier: modifier, name: name, kind: kind, })); } fn parse_callable_body(&mut self) -> Vec<Statement> { let parsers = [ Parser::parse_variable, Parser::parse_constant, Parser::parse_assignment, Parser::parse_exit, Parser::parse_return, Parser::parse_attribute, ]; let mut statements = vec!(); 'parse_next_statement: while self.tokens_position < self.tokens.len() { for parser in &parsers { let position_before_parsing = self.tokens_position; if let Some(statement) = parser(self) { statements.push(statement); continue 'parse_next_statement; } else { self.tokens_position = position_before_parsing; } } break; } return statements; } fn parse_assignment(&mut self) -> Option<Statement> { let left = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::Assignment)?; let possible_values = [ Token::Identifier(vec!()), Token::Number(vec!()), Token::String(vec!()) ]; let right = std::array::IntoIter::new(possible_values).find_map(|t| self.consume(t))?; return Some(Statement::Assignment(AssignmentStatement { left: left, right: Box::new(right), })); } fn parse_exit(&mut self) -> Option<Statement> { let _ = self.consume(Token::Exit)?; let possible_blocks = [ Token::Sub, Token::Function, ]; let block = std::array::IntoIter::new(possible_blocks).find_map(|t| self.consume(t))?; return Some(Statement::Exit(ExitStatement { block: block, })); } fn parse_return(&mut self) -> Option<Statement> { let _ = self.consume(Token::Return)?; let possible_values = [ Token::Identifier(vec!()), Token::Number(vec!()), Token::String(vec!()) ]; let value = std::array::IntoIter::new(possible_values).find_map(|t| self.consume(t)); return Some(Statement::Return(ReturnStatement { value: value, })); } fn parse_attribute(&mut self) -> Option<Statement> { let _ = self.consume(Token::Attribute)?; let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::Assignment)?; let possible_values = [ Token::Identifier(vec!()), Token::Number(vec!()), Token::String(vec!()) ]; let value = std::array::IntoIter::new(possible_values).find_map(|t| self.consume(t))?; return Some(Statement::Attribute(AttributeStatement { name: name, value: value, })); } fn parse_option(&mut self) -> Option<Statement> { let _ = self.consume(Token::Option)?; let possible_configurations = [ Token::Explicit, Token::Base, Token::Compare, Token::Private, ]; let configuration = std::array::IntoIter::new(possible_configurations).find_map(|t| self.consume(t))?; let value = match configuration { Token::Explicit => None, Token::Base => Some(self.consume(Token::Number(vec!()))?), Token::Compare => Some(self.consume(Token::Identifier(vec!()))?), Token::Private => Some(self.consume(Token::Module)?), _ => unreachable!(), }; return Some(Statement::Option(OptionStatement { configuration: configuration, value: value, })); } } pub fn parse(tokens: &Vec<Token>) -> Vec<Statement> { let mut parser = Parser::new(tokens); return parser.parse(); }
use crate::token::Token; use crate::statement::*; struct Parser<'a> { tokens: &'a Vec<Token>, tokens_position: usize, } impl<'a> Parser<'a> { pub fn new(tokens: &'a Vec<Token>) -> Parser { return Parser { tokens: tokens, tokens_position: 0, }; } pub fn parse(&mut self) -> Vec<Statement> { let parsers = [ Parser::parse_type, Parser::parse_variable, Parser::parse_constant, Parser::parse_subroutine, Parser::parse_function, Parser::parse_enum, Parser::parse_attribute, Parser::parse_option, ]; let mut statements = vec!(); 'parse_next_statement: while self.tokens_position < self.tokens.len() { for parser in &parsers { let position_before_parsing = self.tokens_position; if let Some(statement) = parser(self) { statements.push(statement); continue 'parse_next_statement; } else { self.tokens_position = position_before_parsing; } } break; } return statements; } fn compare(first: &Token, second: &Token) -> bool { let left_discriminant = std::mem::discriminant(first); let right_discriminant = std::mem::discriminant(second); return left_discriminant == right_discriminant; } fn consume(&mut self, expected_token: Token) -> Option<Token> { if self.tokens_position >= self.tokens.len() { return None; } if Self::compare(&self.tokens[self.tokens_position], &expected_token) { self.tokens_position += 1; return Some(self.tokens[self.tokens_position - 1].clone()); } else { return None; } } fn parse_type(&mut self) -> Option<Statement> { let _ = self.consume(Token::Type)?; let name = self.consume(Token::Identifier(vec!()))?; let mut attributes = vec!(); while let Some(attribute) = self.parse_type_attribute() { attributes.push(attribute); } let _ = self.consume(Token::End)?; let _ = self.consume(Token::Type)?; return Some(Statement::Type(TypeStatement { name: name, attributes: attributes, })); } fn parse_type_attribute(&mut self) -> Option<Statement> { let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::As)?; let kind = self.consume(Token::Identifier(vec!()))?; return Some(Statement::TypeAttribute(TypeAttributeStatement { name: name, kind: kind, })); } fn parse_enum(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t)); let _ = self.consume(Token::Enum)?; let name = self.consume(Token::Identifier(vec!()))?; let mut attributes = vec!(); while let Some(attribute) = self.parse_enum_attribute() { attributes.push(attribut
fn parse_enum_attribute(&mut self) -> Option<Statement> { let name = self.consume(Token::Identifier(vec!()))?; let value = match self.consume(Token::Assignment) { Some(_) => Some(self.consume(Token::Number(vec!()))?), None => None, }; return Some(Statement::EnumAttribute(EnumAttributeStatement { name: name, value: value, })); } fn parse_variable(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private, Token::Static, Token::Dim]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t))?; let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::As)?; let kind = self.consume(Token::Identifier(vec!()))?; return Some(Statement::Variable(VariableStatement { scope: scope.clone(), name: name, kind: kind, })); } fn parse_constant(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private, Token::Static, Token::Dim]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t))?; let _ = self.consume(Token::Const)?; let name = self.consume(Token::Identifier(vec!()))?; let kind = match self.consume(Token::As) { Some(_) => Some(self.consume(Token::Identifier(vec!()))?), None => None, }; let length = match self.consume(Token::Times) { Some(_) => Some(self.consume(Token::Number(vec!()))?), None => None, }; let _ = self.consume(Token::Assignment)?; let possible_values = [ Token::Identifier(vec!()), Token::Number(vec!()), Token::String(vec!()) ]; let value = std::array::IntoIter::new(possible_values).find_map(|t| self.consume(t))?; return Some(Statement::Constant(ConstantStatement { scope: scope.clone(), name: name, kind: kind, length: length, value: value, })); } fn parse_subroutine(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private, Token::Static]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t))?; let _ = self.consume(Token::Sub)?; let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::LeftParentheses)?; let mut arguments = vec!(); while let Some(argument) = self.parse_callable_argument() { arguments.push(argument); } let _ = self.consume(Token::RightParentheses)?; let body = self.parse_callable_body(); let _ = self.consume(Token::End)?; let _ = self.consume(Token::Sub)?; return Some(Statement::Subroutine(SubroutineStatement { scope: scope, name: name, arguments: arguments, body: body, })); } fn parse_function(&mut self) -> Option<Statement> { let possible_scopes = [Token::Public, Token::Private, Token::Static]; let scope = std::array::IntoIter::new(possible_scopes).find_map(|t| self.consume(t))?; let _ = self.consume(Token::Function)?; let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::LeftParentheses)?; let mut arguments = vec!(); while let Some(argument) = self.parse_callable_argument() { arguments.push(argument); } let _ = self.consume(Token::RightParentheses)?; let kind = match self.consume(Token::As) { Some(_) => self.consume(Token::Identifier(vec!())), None => None, }; let body = self.parse_callable_body(); let _ = self.consume(Token::End)?; let _ = self.consume(Token::Function)?; return Some(Statement::Function(FunctionStatement { scope: scope, name: name, arguments: arguments, kind: kind, body: body, })); } fn parse_callable_argument(&mut self) -> Option<Statement> { let possible_modifiers = [Token::ByVal, Token::ByRef,]; let modifier = std::array::IntoIter::new(possible_modifiers).find_map(|t| self.consume(t)); let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::As)?; let kind = self.consume(Token::Identifier(vec!()))?; return Some(Statement::Argument(ArgumentStatement { modifier: modifier, name: name, kind: kind, })); } fn parse_callable_body(&mut self) -> Vec<Statement> { let parsers = [ Parser::parse_variable, Parser::parse_constant, Parser::parse_assignment, Parser::parse_exit, Parser::parse_return, Parser::parse_attribute, ]; let mut statements = vec!(); 'parse_next_statement: while self.tokens_position < self.tokens.len() { for parser in &parsers { let position_before_parsing = self.tokens_position; if let Some(statement) = parser(self) { statements.push(statement); continue 'parse_next_statement; } else { self.tokens_position = position_before_parsing; } } break; } return statements; } fn parse_assignment(&mut self) -> Option<Statement> { let left = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::Assignment)?; let possible_values = [ Token::Identifier(vec!()), Token::Number(vec!()), Token::String(vec!()) ]; let right = std::array::IntoIter::new(possible_values).find_map(|t| self.consume(t))?; return Some(Statement::Assignment(AssignmentStatement { left: left, right: Box::new(right), })); } fn parse_exit(&mut self) -> Option<Statement> { let _ = self.consume(Token::Exit)?; let possible_blocks = [ Token::Sub, Token::Function, ]; let block = std::array::IntoIter::new(possible_blocks).find_map(|t| self.consume(t))?; return Some(Statement::Exit(ExitStatement { block: block, })); } fn parse_return(&mut self) -> Option<Statement> { let _ = self.consume(Token::Return)?; let possible_values = [ Token::Identifier(vec!()), Token::Number(vec!()), Token::String(vec!()) ]; let value = std::array::IntoIter::new(possible_values).find_map(|t| self.consume(t)); return Some(Statement::Return(ReturnStatement { value: value, })); } fn parse_attribute(&mut self) -> Option<Statement> { let _ = self.consume(Token::Attribute)?; let name = self.consume(Token::Identifier(vec!()))?; let _ = self.consume(Token::Assignment)?; let possible_values = [ Token::Identifier(vec!()), Token::Number(vec!()), Token::String(vec!()) ]; let value = std::array::IntoIter::new(possible_values).find_map(|t| self.consume(t))?; return Some(Statement::Attribute(AttributeStatement { name: name, value: value, })); } fn parse_option(&mut self) -> Option<Statement> { let _ = self.consume(Token::Option)?; let possible_configurations = [ Token::Explicit, Token::Base, Token::Compare, Token::Private, ]; let configuration = std::array::IntoIter::new(possible_configurations).find_map(|t| self.consume(t))?; let value = match configuration { Token::Explicit => None, Token::Base => Some(self.consume(Token::Number(vec!()))?), Token::Compare => Some(self.consume(Token::Identifier(vec!()))?), Token::Private => Some(self.consume(Token::Module)?), _ => unreachable!(), }; return Some(Statement::Option(OptionStatement { configuration: configuration, value: value, })); } } pub fn parse(tokens: &Vec<Token>) -> Vec<Statement> { let mut parser = Parser::new(tokens); return parser.parse(); }
e); } let _ = self.consume(Token::End)?; let _ = self.consume(Token::Enum)?; return Some(Statement::Enum(EnumStatement { scope: scope, name: name, attributes: attributes, })); }
function_block-function_prefixed
[ { "content": "// TODO: Handle multiline comments (through the underscore character).\n\n// Or is it a parser matter?\n\nfn lex_comment(characters: &Vec<u8>, position: &mut usize, _tokens: &mut Vec<Token>) -> bool {\n\n let mut character = characters[*position] as char;\n\n\n\n if character != '\\'' { return false; }\n\n\n\n // Count the first character.\n\n let mut length = 1usize;\n\n\n\n while (*position + length) < characters.len() {\n\n // Peek the next character.\n\n character = characters[*position + length] as char;\n\n\n\n // Analyze the next character.\n\n if character == '\\n' { break; }\n\n\n\n // Count the previous character.\n\n length += 1;\n\n }\n\n\n\n // NOTE: Comment tokens are not saved because handling them would increase\n\n // the complexity of the parser which isn't my goal as of now.\n\n *position += length;\n\n\n\n return true;\n\n}\n\n\n", "file_path": "source-code/lexer.rs", "rank": 1, "score": 159914.89016808243 }, { "content": "fn lex_number(characters: &Vec<u8>, position: &mut usize, tokens: &mut Vec<Token>) -> bool {\n\n let mut character = characters[*position] as char;\n\n\n\n // The first character must be a number.\n\n if !(character.is_numeric()) { return false; }\n\n\n\n // Count the first character.\n\n let mut length = 1usize;\n\n\n\n while (*position + length) < characters.len() {\n\n // Peek the next character.\n\n character = characters[*position + length] as char;\n\n\n\n // Analyze the next character.\n\n if !(character.is_numeric()) { break; }\n\n\n\n // Count the previous character.\n\n length += 1;\n\n }\n\n\n", "file_path": "source-code/lexer.rs", "rank": 2, "score": 159912.18545734166 }, { "content": "fn lex_symbol(characters: &Vec<u8>, position: &mut usize, tokens: &mut Vec<Token>) -> bool {\n\n let character = characters[*position] as char;\n\n let next_character = *characters.get(*position + 1).unwrap_or(&b'\\0') as char;\n\n\n\n let token = match (character, next_character) {\n\n ('(', _) => Some(Token::LeftParentheses),\n\n (')', _) => Some(Token::RightParentheses),\n\n\n\n ('[', _) => Some(Token::LeftBracket),\n\n (']', _) => Some(Token::RightBracket),\n\n\n\n ('+', _) => Some(Token::Plus),\n\n ('-', _) => Some(Token::Minus),\n\n ('*', _) => Some(Token::Times),\n\n ('/', _) => Some(Token::Divide),\n\n\n\n ('<', '=') => Some(Token::LessOrEqual),\n\n ('<', _) => Some(Token::Less),\n\n\n\n ('>', '=') => Some(Token::GreaterOrEqual),\n", "file_path": "source-code/lexer.rs", "rank": 3, "score": 159912.18545734166 }, { "content": "fn lex_string(characters: &Vec<u8>, position: &mut usize, tokens: &mut Vec<Token>) -> bool {\n\n let mut character = characters[*position] as char;\n\n\n\n // The first character must be a \" (quote).\n\n if !(character == '\"') { return false; }\n\n\n\n // Count the first character.\n\n let mut length = 1usize;\n\n\n\n while (*position + length) < characters.len() {\n\n // Peek the next character.\n\n character = characters[*position + length] as char;\n\n\n\n // Analyze the next character.\n\n if character == '\"' {\n\n // Count the closing quote.\n\n length += 1;\n\n break;\n\n }\n\n\n", "file_path": "source-code/lexer.rs", "rank": 4, "score": 159912.18545734166 }, { "content": "fn lex_identifier(characters: &Vec<u8>, position: &mut usize, tokens: &mut Vec<Token>) -> bool {\n\n let mut character = characters[*position] as char;\n\n\n\n // The first character must be either a letter or a underscore.\n\n if !(character.is_alphabetic() || character == '_') { return false; }\n\n\n\n // Count the first character.\n\n let mut length = 1usize;\n\n\n\n while (*position + length) < characters.len() {\n\n // Peek the next character.\n\n character = characters[*position + length] as char;\n\n\n\n // Analyze the next character.\n\n if !(character.is_alphanumeric() || character == '_') { break; }\n\n\n\n // Count the previous character.\n\n length += 1;\n\n }\n\n\n", "file_path": "source-code/lexer.rs", "rank": 5, "score": 159912.18545734166 }, { "content": "fn lex_whitespace(characters: &Vec<u8>, position: &mut usize, tokens: &mut Vec<Token>) -> bool {\n\n let mut character = characters[*position] as char;\n\n\n\n if !(character.is_whitespace()) { return false; }\n\n\n\n // Count the first character.\n\n let mut length = 1usize;\n\n\n\n while (*position + length) < characters.len() {\n\n // Peek the next character.\n\n character = characters[*position + length] as char;\n\n\n\n // Analyze the next character.\n\n if !(character.is_whitespace()) { break; }\n\n\n\n // Count the previous character.\n\n length += 1;\n\n }\n\n\n\n // NOTE: Whitespace tokens are not saved.\n\n *position += length;\n\n\n\n return true;\n\n}\n\n\n", "file_path": "source-code/lexer.rs", "rank": 6, "score": 159912.18545734166 }, { "content": "pub fn transform(statements: Vec<Statement>) -> Vec<Statement> {\n\n let mut transformer = Transformer::new(statements);\n\n\n\n return transformer.transform();\n\n}\n", "file_path": "source-code/transformer.rs", "rank": 7, "score": 143151.55465107044 }, { "content": "pub fn generate(statements: Vec<Statement>) -> String {\n\n let mut generator = Generator::new(statements);\n\n\n\n return generator.generate();\n\n}\n", "file_path": "source-code/generator.rs", "rank": 8, "score": 128895.51735236926 }, { "content": "// TODO: Replace return type with a `Result`.\n\npub fn lex(characters: &Vec<u8>) -> Vec<Token> {\n\n let mut tokens = vec!();\n\n let mut position = 0;\n\n\n\n while position < characters.len() {\n\n // NOTE: Order here is useful to prioritize, we want the lexeme to be as big as possible.\n\n if lex_comment(&characters, &mut position, &mut tokens) { continue; }\n\n if lex_whitespace(&characters, &mut position, &mut tokens) { continue; }\n\n if lex_identifier(&characters, &mut position, &mut tokens) { continue; }\n\n if lex_number(&characters, &mut position, &mut tokens) { continue; }\n\n if lex_string(&characters, &mut position, &mut tokens) { continue; }\n\n if lex_symbol(&characters, &mut position, &mut tokens) { continue; }\n\n\n\n // TODO: Return an error containing the lexeme.\n\n break;\n\n }\n\n\n\n return tokens;\n\n}\n\n\n", "file_path": "source-code/lexer.rs", "rank": 9, "score": 124641.25062157452 }, { "content": "struct Block {\n\n name: Token,\n\n}\n\n\n", "file_path": "source-code/transformer.rs", "rank": 11, "score": 41027.56083713818 }, { "content": "struct Generator {\n\n statements: Viewer<Statement>,\n\n}\n\n\n\nimpl Generator {\n\n pub fn new(statements: Vec<Statement>) -> Self {\n\n return Self {\n\n statements: Viewer::new(statements),\n\n };\n\n }\n\n\n\n pub fn generate(&mut self) -> String {\n\n let mut generated_code = String::new();\n\n\n\n while let Some(statement) = self.statements.next() {\n\n let statement_code = match statement {\n\n Statement::Constant(data) => self.generate_constant(&data),\n\n Statement::Subroutine(data) => self.generate_subroutine(&data),\n\n Statement::Function(data) => self.generate_function(&data),\n\n Statement::Type(data) => self.generate_type(&data),\n", "file_path": "source-code/generator.rs", "rank": 12, "score": 41027.56083713818 }, { "content": "struct Transformer {\n\n statements: Viewer<Statement>,\n\n blocks: Vec<Block>,\n\n}\n\n\n\nimpl Transformer {\n\n pub fn new(statements: Vec<Statement>) -> Self {\n\n return Self {\n\n statements: Viewer::new(statements),\n\n blocks: vec!(),\n\n };\n\n }\n\n\n\n pub fn transform(&mut self) -> Vec<Statement> {\n\n let mut transformed_statements = vec!();\n\n\n\n while let Some(statement) = self.statements.next() {\n\n let transformed_statement = match statement {\n\n Statement::Function(data) => self.transform_function(data),\n\n\n", "file_path": "source-code/transformer.rs", "rank": 13, "score": 41027.56083713818 }, { "content": "}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct EnumStatement {\n\n pub scope: Option<Token>,\n\n pub name: Token,\n\n pub attributes: Vec<Statement>,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct EnumAttributeStatement {\n\n pub name: Token,\n\n pub value: Option<Token>,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct VariableStatement {\n\n pub scope: Token,\n\n pub name: Token,\n\n pub kind: Token,\n", "file_path": "source-code/statement.rs", "rank": 14, "score": 24695.03612757446 }, { "content": " Subroutine(SubroutineStatement),\n\n Function(FunctionStatement),\n\n Exit(ExitStatement),\n\n Argument(ArgumentStatement),\n\n Assignment(AssignmentStatement),\n\n Return(ReturnStatement),\n\n Option(OptionStatement),\n\n Attribute(AttributeStatement),\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct TypeStatement {\n\n pub name: Token,\n\n pub attributes: Vec<Statement>,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct TypeAttributeStatement {\n\n pub name: Token,\n\n pub kind: Token, // TODO: Should this be Option<Token>?\n", "file_path": "source-code/statement.rs", "rank": 15, "score": 24694.62364807104 }, { "content": "pub struct FunctionStatement {\n\n pub scope: Token,\n\n pub name: Token,\n\n pub arguments: Vec<Statement>,\n\n pub kind: Option<Token>,\n\n pub body: Vec<Statement>,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct ExitStatement {\n\n pub block: Token,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct ArgumentStatement {\n\n pub modifier: Option<Token>,\n\n pub name: Token,\n\n pub kind: Token,\n\n}\n\n\n", "file_path": "source-code/statement.rs", "rank": 16, "score": 24693.682358592963 }, { "content": "}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct ConstantStatement {\n\n pub scope: Token,\n\n pub name: Token,\n\n pub kind: Option<Token>,\n\n pub length: Option<Token>,\n\n pub value: Token,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct SubroutineStatement {\n\n pub scope: Token,\n\n pub name: Token,\n\n pub arguments: Vec<Statement>,\n\n pub body: Vec<Statement>,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n", "file_path": "source-code/statement.rs", "rank": 17, "score": 24693.308865559655 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\npub struct AssignmentStatement {\n\n pub left: Token,\n\n pub right: Box<Token>, // TODO: This should be `Box<Statement>`.\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct ReturnStatement {\n\n pub value: Option<Token>, // TODO: This should be `Option<Box<Statement>>`.\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct OptionStatement {\n\n pub configuration: Token,\n\n pub value: Option<Token>,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct AttributeStatement {\n\n // TODO: This should be a `Box<Statement>` since `name` could be a field\n\n // (i.e. `something.like.this`).\n\n pub name: Token,\n\n pub value: Token,\n\n}\n", "file_path": "source-code/statement.rs", "rank": 18, "score": 24689.923013538308 }, { "content": "use crate::token::Token;\n\n\n\n// TODO: `Variable` and `Argument` variant's kind should be `Option<Token>`\n\n// because VB6 treats untyped variables as if they were declared with the\n\n// `Variant` data type. So should be `scope`s too.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Statement {\n\n // TODO: Needed?\n\n Program,\n\n\n\n // NOTE: The statement's information is often encapsulated\n\n // into structs because destructuring the variant while\n\n // pattern matching becomes really cumbersome when there are\n\n // multiple attributes to destructure.\n\n Type(TypeStatement),\n\n TypeAttribute(TypeAttributeStatement),\n\n Enum(EnumStatement),\n\n EnumAttribute(EnumAttributeStatement),\n\n Variable(VariableStatement),\n\n Constant(ConstantStatement),\n", "file_path": "source-code/statement.rs", "rank": 19, "score": 24689.53587979809 }, { "content": " Token::Compare => b\"compare\".to_vec(),\n\n Token::Module => b\"module\".to_vec(),\n\n\n\n Token::Attribute => b\"attribute\".to_vec(),\n\n\n\n // TODO: Temporary polyfill.\n\n _ => b\"__POLYFILL__\".to_vec(),\n\n };\n\n }\n\n}\n", "file_path": "source-code/token.rs", "rank": 20, "score": 24231.911515472704 }, { "content": "// TODO: Review.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Token {\n\n // TODO: Remove or use another enumeration, such as:\n\n //\n\n // Keyword(KeywordToken)\n\n // KeywordToken { Public, Private, Static, Dim, ... }\n\n Keyword(Vec<u8>),\n\n Identifier(Vec<u8>),\n\n\n\n Number(Vec<u8>),\n\n String(Vec<u8>),\n\n\n\n Public,\n\n Private,\n\n Static,\n\n Dim,\n\n\n\n ByVal,\n\n ByRef,\n", "file_path": "source-code/token.rs", "rank": 21, "score": 24231.25403077777 }, { "content": " Token::Exit => b\"exit\".to_vec(),\n\n Token::Return => b\"return\".to_vec(),\n\n\n\n Token::LeftParentheses => b\"(\".to_vec(),\n\n Token::RightParentheses => b\")\".to_vec(),\n\n\n\n Token::Plus => b\"+\".to_vec(),\n\n Token::Minus => b\"-\".to_vec(),\n\n\n\n Token::Less => b\"<\".to_vec(),\n\n Token::LessOrEqual => b\"<=\".to_vec(),\n\n\n\n Token::Greater => b\">\".to_vec(),\n\n Token::GreaterOrEqual => b\">=\".to_vec(),\n\n\n\n Token::Dot => b\".\".to_vec(),\n\n\n\n Token::Option => b\"option\".to_vec(),\n\n Token::Explicit => b\"explicit\".to_vec(),\n\n Token::Base => b\"base\".to_vec(),\n", "file_path": "source-code/token.rs", "rank": 22, "score": 24231.012406200633 }, { "content": " pub fn get_lexeme(&self) -> Vec<u8> {\n\n return match &self {\n\n Token::Keyword(lexeme) | Token::Identifier(lexeme) => lexeme.clone(),\n\n Token::Number(lexeme) | Token::String(lexeme) => lexeme.clone(),\n\n\n\n Token::Public => b\"public\".to_vec(),\n\n Token::Private => b\"private\".to_vec(),\n\n Token::Static => b\"static\".to_vec(),\n\n Token::Dim => b\"dim\".to_vec(),\n\n\n\n Token::ByVal => b\"byval\".to_vec(),\n\n Token::ByRef => b\"byref\".to_vec(),\n\n\n\n Token::As => b\"as\".to_vec(),\n\n Token::If => b\"if\".to_vec(),\n\n Token::Sub => b\"sub\".to_vec(),\n\n Token::Function => b\"function\".to_vec(),\n\n Token::Type => b\"type\".to_vec(),\n\n Token::End => b\"end\".to_vec(),\n\n\n", "file_path": "source-code/token.rs", "rank": 23, "score": 24230.72109397148 }, { "content": " Greater,\n\n GreaterOrEqual,\n\n\n\n Assignment,\n\n Dot,\n\n\n\n Option,\n\n Explicit,\n\n Base,\n\n Compare,\n\n Module,\n\n\n\n // Metadata.\n\n Attribute,\n\n}\n\n\n\n\n\n// TODO: Maybe it would be better to store the lexeme no matter what, and just return\n\n// a reference to it (for the sake of efficiency).\n\nimpl Token {\n", "file_path": "source-code/token.rs", "rank": 24, "score": 24226.824691369475 }, { "content": "\n\n As,\n\n If,\n\n Sub,\n\n Function,\n\n Type,\n\n Enum,\n\n Const,\n\n End,\n\n\n\n Exit,\n\n Return,\n\n\n\n Do,\n\n Loop,\n\n\n\n While,\n\n Wend,\n\n\n\n For,\n", "file_path": "source-code/token.rs", "rank": 25, "score": 24225.808039043193 }, { "content": " Next,\n\n\n\n And,\n\n Or,\n\n Xor,\n\n\n\n LeftParentheses,\n\n RightParentheses,\n\n\n\n LeftBracket,\n\n RightBracket,\n\n\n\n Plus,\n\n Minus,\n\n Times,\n\n Divide,\n\n\n\n Less,\n\n LessOrEqual,\n\n\n", "file_path": "source-code/token.rs", "rank": 26, "score": 24223.279311333845 }, { "content": " }\n\n\n\n return transformed_statements;\n\n }\n\n\n\n fn transform_function_return(&mut self, data: ReturnStatement) -> Vec<Statement> {\n\n let mut transformed_statements = vec!();\n\n\n\n transformed_statements.push(Statement::Assignment(AssignmentStatement {\n\n // TODO: Make sure `self.blocks` only contains methods/functions/subroutines\n\n // names, and not loops or other kind of blocks.\n\n left: self.blocks.last().unwrap().name.clone(),\n\n\n\n // NOTE: I unwrap thee because the parser should have already\n\n // checked if a function's return is complete (empty returns\n\n // are allowed only in subroutines).\n\n //\n\n // Failing to do so means the parser is broken, and panicking\n\n // is a good way (TODO: or not?) to signal it.\n\n right: Box::new(data.value.unwrap()),\n", "file_path": "source-code/transformer.rs", "rank": 49, "score": 12.259838431074911 }, { "content": "mod lexer;\n\nmod parser;\n\nmod transformer;\n\nmod generator;\n\nmod viewer;\n\nmod token;\n\nmod statement;\n\n\n\npub use lexer::lex;\n\npub use parser::parse;\n\npub use transformer::transform;\n\npub use generator::generate;\n", "file_path": "source-code/library.rs", "rank": 50, "score": 11.687257218691446 }, { "content": "\n\n self.blocks.pop();\n\n\n\n return Statement::Function(transformed_data);\n\n }\n\n\n\n fn transform_function_body(&mut self, body: Vec<Statement>) -> Vec<Statement> {\n\n let mut transformed_statements = vec!();\n\n let mut viewer = Viewer::new(body);\n\n\n\n while let Some(statement) = viewer.next() {\n\n let mut transformed_statement = match statement {\n\n Statement::Return(data) => self.transform_function_return(data),\n\n\n\n // No transformation aplicable.\n\n _ => vec!(statement),\n\n };\n\n\n\n // TODO: Use `Vec::push`?\n\n transformed_statements.append(&mut transformed_statement);\n", "file_path": "source-code/transformer.rs", "rank": 51, "score": 11.363599654370798 }, { "content": " // because it makes easier to use `while let` statements which\n\n // lock the reference and thus prevent access to other elements\n\n // of the same object.\n\n Some(value) => Some((*value).clone()),\n\n None => None,\n\n };\n\n }\n\n\n\n pub fn mark(&mut self) {\n\n self.mark = self.position.clone();\n\n }\n\n\n\n pub fn rewind(&mut self) {\n\n self.position = self.mark.clone();\n\n }\n\n}\n", "file_path": "source-code/viewer.rs", "rank": 52, "score": 10.960195497368918 }, { "content": " // No transformation aplicable.\n\n _ => statement,\n\n };\n\n\n\n transformed_statements.push(transformed_statement);\n\n }\n\n\n\n return transformed_statements;\n\n }\n\n\n\n fn transform_function(&mut self, data: FunctionStatement) -> Statement {\n\n self.blocks.push(Block { name: data.name.clone() });\n\n\n\n let transformed_data = FunctionStatement {\n\n scope: data.scope,\n\n name: data.name,\n\n arguments: data.arguments,\n\n kind: data.kind,\n\n body: self.transform_function_body(data.body),\n\n };\n", "file_path": "source-code/transformer.rs", "rank": 53, "score": 10.683179701561304 }, { "content": "pub struct Viewer<T: Clone> {\n\n data: Vec<T>,\n\n position: usize,\n\n mark: usize,\n\n}\n\n\n\nimpl<T: Clone> Viewer<T> {\n\n pub fn new(data: Vec<T>) -> Self {\n\n return Self {\n\n data: data,\n\n position: 0,\n\n mark: 0,\n\n };\n\n }\n\n\n\n pub fn next(&mut self) -> Option<T> {\n\n self.position += 1;\n\n\n\n return match self.data.get(self.position - 1) {\n\n // NOTE: I clone the data instead of returning a reference to it\n", "file_path": "source-code/viewer.rs", "rank": 54, "score": 10.630449778612073 }, { "content": " }\n\n\n\n generated_code.push_str(\"end type\\n\");\n\n\n\n return generated_code;\n\n }\n\n\n\n fn generate_enum(&mut self, data: &EnumStatement) -> String {\n\n let mut generated_code = String::new();\n\n\n\n if let Some(scope) = &data.scope {\n\n generated_code.push_str(&String::from_utf8_lossy(&scope.get_lexeme()));\n\n generated_code.push(' ');\n\n }\n\n\n\n generated_code.push_str(\"enum \");\n\n generated_code.push_str(&String::from_utf8_lossy(&data.name.get_lexeme()));\n\n generated_code.push('\\n');\n\n\n\n for statement in &data.attributes {\n", "file_path": "source-code/generator.rs", "rank": 55, "score": 8.221661170861989 }, { "content": "use crate::token::Token;\n\nuse crate::statement::*;\n\nuse crate::viewer::Viewer;\n\n\n", "file_path": "source-code/transformer.rs", "rank": 56, "score": 7.857056770515699 }, { "content": " generated_signature.push_str(\")\");\n\n\n\n // TODO: This seems too imperative.\n\n match &data.kind {\n\n Some(kind) => {\n\n generated_signature.push_str(\" as \");\n\n generated_signature.push_str(&String::from_utf8_lossy(&kind.get_lexeme()));\n\n },\n\n\n\n None => (),\n\n }\n\n\n\n generated_signature.push('\\n');\n\n\n\n return generated_signature;\n\n }\n\n\n\n fn generate_function_body(&mut self, data: &FunctionStatement) -> String {\n\n let mut generated_body = String::new();\n\n\n", "file_path": "source-code/generator.rs", "rank": 57, "score": 7.1530040284201455 }, { "content": "\n\n generated_code.push('\\n');\n\n\n\n return generated_code;\n\n }\n\n\n\n fn generate_attribute(&mut self, data: &AttributeStatement) -> String {\n\n let mut generated_code = String::new();\n\n\n\n generated_code.push_str(\"Attribute \");\n\n generated_code.push_str(&String::from_utf8_lossy(&data.name.get_lexeme()));\n\n generated_code.push_str(\" = \");\n\n generated_code.push_str(&String::from_utf8_lossy(&data.value.get_lexeme()));\n\n generated_code.push('\\n');\n\n\n\n return generated_code;\n\n }\n\n}\n\n\n", "file_path": "source-code/generator.rs", "rank": 58, "score": 6.477578480493587 }, { "content": "\n\n generated_code.push_str(\"type \");\n\n generated_code.push_str(&String::from_utf8_lossy(&data.name.get_lexeme()));\n\n generated_code.push('\\n');\n\n\n\n for statement in &data.attributes {\n\n match statement {\n\n // TODO: This seems too imperative.\n\n Statement::TypeAttribute(data) => {\n\n generated_code.push_str(&String::from_utf8_lossy(&data.name.get_lexeme()));\n\n generated_code.push_str(\" as \");\n\n generated_code.push_str(&String::from_utf8_lossy(&data.kind.get_lexeme()));\n\n generated_code.push('\\n');\n\n },\n\n\n\n // TODO: Is it correct to `panic`?\n\n //\n\n // TODO: Add a message?\n\n _ => unreachable!(),\n\n }\n", "file_path": "source-code/generator.rs", "rank": 59, "score": 6.361893406531724 }, { "content": " for statement in &data.body {\n\n let generated_statement = match statement {\n\n Statement::Assignment(data) => self.generate_assignment(data),\n\n Statement::Constant(data) => self.generate_constant(data),\n\n Statement::Variable(data) => self.generate_variable(data),\n\n Statement::Exit(data) => self.generate_exit(data),\n\n Statement::Attribute(data) => self.generate_attribute(&data),\n\n\n\n // TODO: Handle all cases.\n\n _ => String::from(\"__POLYFILL__\\n\"),\n\n };\n\n\n\n generated_body.push_str(&generated_statement);\n\n }\n\n\n\n return generated_body;\n\n }\n\n\n\n fn generate_type(&mut self, data: &TypeStatement) -> String {\n\n let mut generated_code = String::new();\n", "file_path": "source-code/generator.rs", "rank": 60, "score": 6.12916327733311 }, { "content": " fn generate_subroutine_body(&mut self, data: &SubroutineStatement) -> String {\n\n let mut generated_body = String::new();\n\n\n\n for statement in &data.body {\n\n let generated_statement = match statement {\n\n Statement::Assignment(data) => self.generate_assignment(data),\n\n Statement::Constant(data) => self.generate_constant(data),\n\n Statement::Variable(data) => self.generate_variable(data),\n\n Statement::Exit(data) => self.generate_exit(data),\n\n Statement::Attribute(data) => self.generate_attribute(&data),\n\n\n\n // TODO: Handle all cases.\n\n _ => String::from(\"__POLYFILL__\\n\"),\n\n };\n\n\n\n generated_body.push_str(&generated_statement);\n\n }\n\n\n\n return generated_body;\n\n }\n", "file_path": "source-code/generator.rs", "rank": 61, "score": 6.095519150260046 }, { "content": "\n\n \"attribute\" => Token::Attribute,\n\n\n\n _ => Token::Identifier(lexeme),\n\n };\n\n\n\n tokens.push(token);\n\n *position += length;\n\n\n\n return true;\n\n}\n\n\n", "file_path": "source-code/lexer.rs", "rank": 62, "score": 5.8931926505910415 }, { "content": "\n\n fn generate_function(&mut self, data: &FunctionStatement) -> String {\n\n let generated_signature = self.generate_function_signature(&data);\n\n let generated_body = self.generate_function_body(&data);\n\n let generated_end = \"end function\\n\";\n\n\n\n return generated_signature + &generated_body + generated_end;\n\n }\n\n\n\n fn generate_function_signature(&mut self, data: &FunctionStatement) -> String {\n\n let mut generated_signature = String::new();\n\n\n\n generated_signature.push_str(&String::from_utf8_lossy(&data.scope.get_lexeme()));\n\n generated_signature.push_str(\" function \");\n\n generated_signature.push_str(&String::from_utf8_lossy(&data.name.get_lexeme()));\n\n\n\n generated_signature.push_str(\"(\");\n\n\n\n for statement in &data.arguments {\n\n let argument_code = match statement {\n", "file_path": "source-code/generator.rs", "rank": 63, "score": 5.744059682740483 }, { "content": " return generated_signature + &generated_body + generated_end;\n\n }\n\n\n\n fn generate_subroutine_signature(&mut self, data: &SubroutineStatement) -> String {\n\n let mut generated_signature = String::new();\n\n\n\n generated_signature.push_str(&String::from_utf8_lossy(&data.scope.get_lexeme()));\n\n generated_signature.push_str(\" sub \");\n\n generated_signature.push_str(&String::from_utf8_lossy(&data.name.get_lexeme()));\n\n\n\n generated_signature.push_str(\"(\");\n\n\n\n for statement in &data.arguments {\n\n let argument_code = match statement {\n\n Statement::Argument(argument) => self.generate_argument(argument),\n\n\n\n // TODO: Is it correct to `panic`?\n\n //\n\n // TODO: Add a message?\n\n _ => unreachable!(),\n", "file_path": "source-code/generator.rs", "rank": 64, "score": 5.7185361164412605 }, { "content": " ('>', _) => Some(Token::Greater),\n\n\n\n ('=', _) => Some(Token::Assignment),\n\n ('.', _) => Some(Token::Dot),\n\n\n\n (_, _) => None,\n\n };\n\n\n\n let token = match token {\n\n Some(value) => value,\n\n None => return false,\n\n };\n\n\n\n let length = match token {\n\n Token::LessOrEqual | Token::GreaterOrEqual => 2,\n\n _ => 1,\n\n };\n\n\n\n tokens.push(token);\n\n *position += length;\n\n\n\n return true;\n\n}\n", "file_path": "source-code/lexer.rs", "rank": 65, "score": 5.6164576266834185 }, { "content": " generated_code.push_str(&String::from_utf8_lossy(&data.value.get_lexeme()));\n\n\n\n generated_code.push('\\n');\n\n\n\n return generated_code;\n\n }\n\n\n\n fn generate_variable(&mut self, data: &VariableStatement) -> String {\n\n let mut generated_code = String::new();\n\n\n\n generated_code.push_str(&String::from_utf8_lossy(&data.scope.get_lexeme()));\n\n generated_code.push_str(\" \");\n\n generated_code.push_str(&String::from_utf8_lossy(&data.name.get_lexeme()));\n\n generated_code.push_str(\" as \");\n\n generated_code.push_str(&String::from_utf8_lossy(&data.kind.get_lexeme()));\n\n generated_code.push('\\n');\n\n\n\n return generated_code;\n\n }\n\n\n", "file_path": "source-code/generator.rs", "rank": 66, "score": 5.571221908259507 }, { "content": "use crate::token::Token;\n\n\n\n// TODO: Replace return type with a `Result`.\n", "file_path": "source-code/lexer.rs", "rank": 67, "score": 5.386789024138416 }, { "content": " Statement::Enum(data) => self.generate_enum(&data),\n\n Statement::Variable(data) => self.generate_variable(&data),\n\n Statement::Option(data) => self.generate_option(&data),\n\n Statement::Attribute(data) => self.generate_attribute(&data),\n\n\n\n // TODO: Handle all cases.\n\n _ => String::from(\"__POLYFILL__\\n\"),\n\n };\n\n\n\n generated_code.push_str(&statement_code);\n\n }\n\n\n\n return generated_code;\n\n }\n\n\n\n fn generate_subroutine(&mut self, data: &SubroutineStatement) -> String {\n\n let generated_signature = self.generate_subroutine_signature(&data);\n\n let generated_body = self.generate_subroutine_body(&data);\n\n let generated_end = \"end sub\\n\";\n\n\n", "file_path": "source-code/generator.rs", "rank": 68, "score": 5.341045732620687 }, { "content": "use crate::statement::*;\n\nuse crate::viewer::Viewer;\n\n\n", "file_path": "source-code/generator.rs", "rank": 69, "score": 5.307368134760829 }, { "content": "\n\n fn generate_assignment(&mut self, data: &AssignmentStatement) -> String {\n\n let mut generated_code = String::new();\n\n\n\n generated_code.push_str(&String::from_utf8_lossy(&data.left.get_lexeme()));\n\n generated_code.push_str(\" = \");\n\n generated_code.push_str(&String::from_utf8_lossy(&data.right.get_lexeme()));\n\n generated_code.push('\\n');\n\n\n\n return generated_code;\n\n }\n\n\n\n fn generate_constant(&mut self, data: &ConstantStatement) -> String {\n\n let mut generated_code = String::new();\n\n\n\n generated_code.push_str(&String::from_utf8_lossy(&data.scope.get_lexeme()));\n\n generated_code.push_str(\" const \");\n\n generated_code.push_str(&String::from_utf8_lossy(&data.name.get_lexeme()));\n\n\n\n // TODO: This seems too imperative.\n", "file_path": "source-code/generator.rs", "rank": 70, "score": 5.282310969329709 }, { "content": " }\n\n\n\n let lexeme = characters[*position..*position + length].to_vec();\n\n let token = Token::Number(lexeme);\n\n\n\n tokens.push(token);\n\n *position += length;\n\n\n\n return true;\n\n}\n\n\n", "file_path": "source-code/lexer.rs", "rank": 71, "score": 4.9229878573179935 }, { "content": " \"return\" => Token::Return,\n\n\n\n \"do\" => Token::Do,\n\n \"loop\" => Token::Loop,\n\n\n\n \"while\" => Token::While,\n\n \"wend\" => Token::Wend,\n\n\n\n \"for\" => Token::For,\n\n \"next\" => Token::Next,\n\n\n\n \"and\" => Token::And,\n\n \"or\" => Token::Or,\n\n \"xor\" => Token::Xor,\n\n\n\n \"option\" => Token::Option,\n\n \"explicit\" => Token::Explicit,\n\n \"base\" => Token::Base,\n\n \"compare\" => Token::Compare,\n\n \"module\" => Token::Module,\n", "file_path": "source-code/lexer.rs", "rank": 72, "score": 4.893097161496904 }, { "content": " generated_code.push_str(\"end enum\\n\");\n\n\n\n return generated_code;\n\n }\n\n\n\n fn generate_argument(&mut self, data: &ArgumentStatement) -> String {\n\n let mut generated_code = String::new();\n\n\n\n if let Some(modifier) = &data.modifier {\n\n generated_code.push_str(&String::from_utf8_lossy(&modifier.get_lexeme()));\n\n generated_code.push(' ');\n\n }\n\n\n\n generated_code.push_str(&String::from_utf8_lossy(&data.name.get_lexeme()));\n\n generated_code.push_str(\" as \");\n\n generated_code.push_str(&String::from_utf8_lossy(&data.kind.get_lexeme()));\n\n generated_code.push_str(\", \");\n\n\n\n return generated_code;\n\n }\n", "file_path": "source-code/generator.rs", "rank": 73, "score": 4.8063341135631426 }, { "content": " }));\n\n\n\n transformed_statements.push(Statement::Exit(ExitStatement {\n\n block: Token::Function,\n\n }));\n\n\n\n return transformed_statements;\n\n }\n\n}\n\n\n", "file_path": "source-code/transformer.rs", "rank": 74, "score": 4.7808208624389446 }, { "content": " match &data.kind {\n\n Some(kind) => {\n\n generated_code.push_str(\" as \");\n\n generated_code.push_str(&String::from_utf8_lossy(&kind.get_lexeme()));\n\n },\n\n\n\n None => (),\n\n }\n\n\n\n // TODO: This seems too imperative.\n\n match &data.length {\n\n Some(length) => {\n\n generated_code.push_str(\" * \");\n\n generated_code.push_str(&String::from_utf8_lossy(&length.get_lexeme()));\n\n },\n\n\n\n None => (),\n\n }\n\n\n\n generated_code.push_str(\" = \");\n", "file_path": "source-code/generator.rs", "rank": 75, "score": 4.679231066003503 }, { "content": " match statement {\n\n // TODO: This seems too imperative.\n\n Statement::EnumAttribute(data) => {\n\n generated_code.push_str(&String::from_utf8_lossy(&data.name.get_lexeme()));\n\n\n\n if let Some(value) = &data.value {\n\n generated_code.push_str(\" = \");\n\n generated_code.push_str(&String::from_utf8_lossy(&value.get_lexeme()));\n\n }\n\n\n\n generated_code.push('\\n');\n\n },\n\n\n\n // TODO: Is it correct to `panic`?\n\n //\n\n // TODO: Add a message?\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n", "file_path": "source-code/generator.rs", "rank": 76, "score": 4.384774085317655 }, { "content": " let lexeme = characters[*position..*position + length].to_vec();\n\n let token = match &std::str::from_utf8(&lexeme).unwrap().to_lowercase() as &str {\n\n \"public\" => Token::Public,\n\n \"private\" => Token::Private,\n\n \"static\" => Token::Static,\n\n \"dim\" => Token::Dim,\n\n\n\n \"byval\" => Token::ByVal,\n\n \"byref\" => Token::ByRef,\n\n\n\n \"as\" => Token::As,\n\n \"if\" => Token::If,\n\n \"sub\" => Token::Sub,\n\n \"function\" => Token::Function,\n\n \"type\" => Token::Type,\n\n \"enum\" => Token::Enum,\n\n \"const\" => Token::Const,\n\n \"end\" => Token::End,\n\n\n\n \"exit\" => Token::Exit,\n", "file_path": "source-code/lexer.rs", "rank": 77, "score": 4.3576241325431555 }, { "content": " // Count the previous character.\n\n length += 1;\n\n }\n\n\n\n // If the last peeked character isn't a \" (quote), then we've reached the end of the\n\n // characters and the string wasn't closed properly.\n\n if !(character == '\"') { return false; }\n\n\n\n let lexeme = characters[*position..*position + length].to_vec();\n\n let token = Token::Identifier(lexeme);\n\n\n\n tokens.push(token);\n\n *position += length;\n\n\n\n return true;\n\n}\n\n\n", "file_path": "source-code/lexer.rs", "rank": 78, "score": 4.101325332332351 }, { "content": " fn generate_exit(&mut self, data: &ExitStatement) -> String {\n\n let mut generated_code = String::new();\n\n\n\n generated_code.push_str(\"exit \");\n\n generated_code.push_str(&String::from_utf8_lossy(&data.block.get_lexeme()));\n\n generated_code.push('\\n');\n\n\n\n return generated_code;\n\n }\n\n\n\n fn generate_option(&mut self, data: &OptionStatement) -> String {\n\n let mut generated_code = String::new();\n\n\n\n generated_code.push_str(\"option \");\n\n generated_code.push_str(&String::from_utf8_lossy(&data.configuration.get_lexeme()));\n\n\n\n if let Some(value) = &data.value {\n\n generated_code.push(' ');\n\n generated_code.push_str(&String::from_utf8_lossy(&value.get_lexeme()));\n\n }\n", "file_path": "source-code/generator.rs", "rank": 79, "score": 3.490956368700888 }, { "content": "MIT License\n\n\n\nCopyright (c) 2021 Adrián Bielsa\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n", "file_path": "license.md", "rank": 80, "score": 2.49889921594782 }, { "content": " // TODO: This is a workaround to also lex numbers that have a decimal part.\n\n // We should add bounds checks here, and analyze if the construction of\n\n // decimal numbers should happen at the lexing stage or at the parsing\n\n // stage of the compiler.\n\n if characters[*position + length] as char == '.' {\n\n // TODO: Add bounds checks here!\n\n if (characters[*position + length + 1] as char).is_numeric() {\n\n length += 2;\n\n\n\n while (*position + length) < characters.len() {\n\n // Peek the next character.\n\n character = characters[*position + length] as char;\n\n\n\n // Analyze the next character.\n\n if !(character.is_numeric()) { break; }\n\n\n\n // Count the previous character.\n\n length += 1;\n\n }\n\n }\n", "file_path": "source-code/lexer.rs", "rank": 81, "score": 1.3535409410811665 }, { "content": " Statement::Argument(argument) => self.generate_argument(argument),\n\n\n\n // TODO: Is it correct to `panic`?\n\n //\n\n // TODO: Add a message?\n\n _ => unreachable!(),\n\n };\n\n\n\n generated_signature.push_str(&argument_code);\n\n }\n\n\n\n // Remove the last comma (and space) because it isn't followed by anything\n\n // (and is also a Visual Basic 6 syntax error).\n\n //\n\n // TODO: This feels hardcoded.\n\n if data.arguments.len() > 0 {\n\n let _ = generated_signature.pop(); // Space.\n\n let _ = generated_signature.pop(); // Comma.\n\n }\n\n\n", "file_path": "source-code/generator.rs", "rank": 82, "score": 1.1515869942670074 } ]
Rust
evolvim-lib/src/lib/neat/mod.rs
splintah/evolvim
13e95441e617f5931441995a3fd041447c40a77e
mod genome; mod input; mod output; mod phenotype; pub use genome::Genome; pub use phenotype::NeuralNet; #[derive(Debug)] pub struct NeatBrain { genome: Genome, net: NeuralNet, } impl From<Genome> for NeatBrain { fn from(genome: Genome) -> Self { let net = (&genome).into(); NeatBrain { genome, net } } } impl crate::brain::NeuralNet for NeatBrain { fn load_input(&mut self, env: &crate::brain::Environment) { self.net.load_input(env); } fn run(&mut self) { self.net.run_calculations(); } fn use_output(&self, env: &mut crate::brain::EnvironmentMut<Self>, time_step: f64) { self.net.use_output(env, time_step); } } impl crate::brain::Intentions for NeatBrain { fn wants_birth(&self) -> f64 { unimplemented!() } fn wants_help_birth(&self) -> f64 { unimplemented!() } } impl crate::brain::GenerateRandom for NeatBrain { fn new_random() -> Self { Genome::new_fully_linked().into() } } impl crate::brain::RecombinationTwoParents for NeatBrain { fn recombination_two_parents(parent_a: &Self, parent_b: &Self) -> Self { let genome = Genome::new_from_2(&parent_a.genome, &parent_b.genome); genome.into() } } impl crate::brain::RecombinationInfinite for NeatBrain { fn recombination_infinite_parents(parents: &Vec<crate::softbody::HLSoftBody<Self>>) -> Self { use crate::brain::RecombinationTwoParents; if parents.len() == 1 { let parent = parents[0].borrow(); let mut genome = parent.brain.genome.clone(); genome.mutate(); genome.into() } else { NeatBrain::recombination_two_parents( &parents[0].borrow().brain, &parents[1].borrow().brain, ) } } } impl crate::brain::ProvideInformation for NeatBrain { fn get_keys(&self) -> Vec<String> { vec!["nodes".to_string(), "connections".to_string()] } fn get_raw_values(&self) -> Vec<String> { vec![ format!("{}", self.genome.get_node_genome().len()), format!("{}", self.genome.get_connection_genome().len()), ] } } impl serde::Serialize for NeatBrain { fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { use serde::ser::SerializeStruct; let mut state = serializer.serialize_struct("NeatBrain", 1)?; state.serialize_field("genome", &self.genome)?; state.end() } } impl<'de> serde::Deserialize<'de> for NeatBrain { fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<NeatBrain, D::Error> { use serde::de::*; struct BrainVisitor; impl<'de> Visitor<'de> for BrainVisitor { type Value = NeatBrain; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("struct NeatBrain") } fn visit_seq<V: SeqAccess<'de>>(self, mut seq: V) -> Result<NeatBrain, V::Error> { let genome: Genome = seq .next_element()? .ok_or_else(|| Error::invalid_length(0, &self))?; Ok(genome.into()) } } const FIELDS: &[&str] = &["genome"]; deserializer.deserialize_struct::<BrainVisitor>("NeatBrain", FIELDS, BrainVisitor) } }
mod genome; mod input; mod output; mod phenotype; pub use genome::Genome; pub use phenotype::NeuralNet; #[derive(Debug)] pub struct NeatBrain { genome: Genome, net: NeuralNet, } impl From<Genome> for NeatBrain { fn from(genome: Genome) -> Self { let net = (&genome).into(); NeatBrain { genome, net } } } impl crate::brain::NeuralNet for NeatBrain { fn load_input(&mut self, env: &crate::brain::Environment) { self.net.load_input(env); } fn run(&mut self) { self.net.run_calculations(); } fn use_output(&self, env: &mut crate::brain::EnvironmentMut<Self>, time_step: f64) { self.net.use_output(env, time_step); } } impl crate::brain::Intentions for NeatBrain { fn wants_birth(&self) -> f64 { unimplemented!() } fn wants_help_birth(&self) -> f64 { unimplemented!() } } impl crate::brain::GenerateRandom for NeatBrain { fn new_random() -> Self { Genome::new_fully_linked().into() } } impl crate::brain::RecombinationTwoParents for NeatBrain { fn recombination_two_parents(parent_a: &Self, parent_b: &Self) -> Self { let genome = Genome::new_from_2(&parent_a.genome, &parent_b.genome); genome.into() } } impl crate::brain::RecombinationInfinite for NeatBrain { fn recombination_infinite_parents(parents: &Vec<crate::softbody::HLSoftBody<Self>>) -> Self { use crate::brain::RecombinationTwoParents; if parents.len() == 1 { let parent = parents[0].borrow(); let mut genome = parent.brain.genome.clone(); genome.mutate(); genome.into() } else { NeatBrain::recombination_two_parents( &parents[0].borrow().brain, &parents[1].borrow().brain, ) } } } impl crate::brain::ProvideInformation for NeatBrain { fn get_keys(&self) -> Vec<String> { vec!["nodes".to_string(), "connections".to_string()] } fn get_raw_values(&self) -> Vec<String> { vec![ format!("{}", self.genome.get_node_genome().len()), format!("{}", self.genome.get_connection_genome().len()), ] } } impl serde::Serialize for NeatBrain {
} impl<'de> serde::Deserialize<'de> for NeatBrain { fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<NeatBrain, D::Error> { use serde::de::*; struct BrainVisitor; impl<'de> Visitor<'de> for BrainVisitor { type Value = NeatBrain; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("struct NeatBrain") } fn visit_seq<V: SeqAccess<'de>>(self, mut seq: V) -> Result<NeatBrain, V::Error> { let genome: Genome = seq .next_element()? .ok_or_else(|| Error::invalid_length(0, &self))?; Ok(genome.into()) } } const FIELDS: &[&str] = &["genome"]; deserializer.deserialize_struct::<BrainVisitor>("NeatBrain", FIELDS, BrainVisitor) } }
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { use serde::ser::SerializeStruct; let mut state = serializer.serialize_struct("NeatBrain", 1)?; state.serialize_field("genome", &self.genome)?; state.end() }
function_block-full_function
[ { "content": "#[derive(Debug)]\n\nstruct Output {\n\n node_index: usize,\n\n value: f64,\n\n output_type: OutputType,\n\n}\n\n\n\nimpl Output {\n\n fn use_output(&self, env: &mut crate::brain::EnvironmentMut<super::NeatBrain>, time_step: f64) {\n\n self.output_type.use_output(self.value, env, time_step);\n\n }\n\n\n\n pub fn new(node_index: usize, output_type: OutputType) -> Self {\n\n Output {\n\n node_index,\n\n value: 0.0,\n\n output_type,\n\n }\n\n }\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/neat/phenotype/mod.rs", "rank": 0, "score": 163884.01796679184 }, { "content": "#[derive(Debug)]\n\nstruct Input {\n\n node_index: usize,\n\n input_type: InputType,\n\n}\n\n\n\nimpl Input {\n\n pub fn load_into(&self, nodes: &mut [Node], env: &crate::brain::Environment) {\n\n let data = self.input_type.get_data(env);\n\n nodes[self.node_index].add_to_value(data);\n\n }\n\n\n\n pub fn new(node_index: usize, input_type: InputType) -> Self {\n\n Input {\n\n node_index,\n\n input_type,\n\n }\n\n }\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/neat/phenotype/mod.rs", "rank": 1, "score": 163826.0067967116 }, { "content": "fn sigmoid(n: f64) -> f64 {\n\n 1.0 / (1.0 + n.exp())\n\n}\n", "file_path": "evolvim-lib/src/lib/neat/phenotype/mod.rs", "rank": 2, "score": 157603.7188975304 }, { "content": "/// Returns the distance between two points.\n\n///\n\n/// Uses the Pythagorean theorem: A^2 + B^2 = C^2.\n\npub fn distance(x1: f64, y1: f64, x2: f64, y2: f64) -> f64 {\n\n ((x1 - x2).powi(2) + (y1 - y2).powi(2)).sqrt()\n\n}\n", "file_path": "evolvim-lib/src/lib/softbody/rock.rs", "rank": 3, "score": 134575.625013549 }, { "content": "pub fn get_next_node_id() -> Id {\n\n unsafe {\n\n NODE_NUMBER += 1;\n\n return NODE_NUMBER;\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct Genome {\n\n node_genome: Vec<NodeGene>,\n\n connection_genome: Vec<ConnectionGene>,\n\n}\n\n\n\nimpl Genome {\n\n /// Accessor function to gain readonly access to the `node_genome`; used for generating a phenotype.\n\n pub fn get_node_genome(&self) -> &Vec<NodeGene> {\n\n &self.node_genome\n\n }\n\n /// Accessor function to gain readonly access to the `connection_genome`; used for generating a phenotype.\n\n pub fn get_connection_genome(&self) -> &Vec<ConnectionGene> {\n", "file_path": "evolvim-lib/src/lib/neat/genome/mod.rs", "rank": 4, "score": 134001.50767634792 }, { "content": "pub fn draw_creature<B: lib_evolvim::brain::NeuralNet, G: Graphics>(\n\n creature: &Creature<B>,\n\n context: Context,\n\n graphics: &mut G,\n\n view: &View,\n\n) {\n\n let size = view.get_tile_size();\n\n let transform = context\n\n .transform\n\n .trans(-view.get_precise_x() * size, -view.get_precise_y() * size);\n\n\n\n let radius = creature.get_radius();\n\n let color = from_hsba([creature.get_mouth_hue() as f32, 1.0, 1.0, 1.0]);\n\n\n\n let rect = [\n\n // This gives the upper-left corner of the circle so subtract the radius.\n\n (creature.get_px() - radius) * size,\n\n (creature.get_py() - radius) * size,\n\n radius * 2.0 * size,\n\n radius * 2.0 * size,\n\n ];\n\n\n\n let ellipse = ellipse::Ellipse::new(color);\n\n\n\n ellipse.draw(rect, &context.draw_state, transform, graphics);\n\n}\n\n\n", "file_path": "evolvim-tools/src/main/graphics/mod.rs", "rank": 5, "score": 126165.69842427326 }, { "content": "fn get_noise<N: NoiseFn<Point2<f64>>>(ng: N, x: f64, y: f64) -> f64 {\n\n (ng.get([x, y]) + 1.0) / 2.0\n\n}\n", "file_path": "evolvim-lib/src/lib/terrain/mod.rs", "rank": 6, "score": 125434.65575838214 }, { "content": "pub trait NeuralNet: Intentions {\n\n fn load_input(&mut self, env: &Environment);\n\n\n\n fn run(&mut self);\n\n\n\n fn run_with(&mut self, env: &Environment) {\n\n self.load_input(env);\n\n self.run();\n\n }\n\n\n\n fn use_output(&self, env: &mut EnvironmentMut<Self>, time_step: f64)\n\n where\n\n Self: std::marker::Sized;\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/brain/mod.rs", "rank": 7, "score": 118706.80026797397 }, { "content": "fn get_axon_angles(max: usize, x: usize) -> Vec<f64> {\n\n let mut vec = Vec::with_capacity(max);\n\n const BRAIN_WIDTH: f64 = 3.0;\n\n const BRAIN_HEIGHT: f64 = 11.0;\n\n\n\n for i in 0..max {\n\n vec.push(\n\n PI + ((i as f64 - BRAIN_HEIGHT) / 2.0).atan2(x as f64 - BRAIN_WIDTH / 2.0) / (2.0 * PI),\n\n );\n\n }\n\n\n\n vec\n\n}\n", "file_path": "evolvim-lib/src/lib/brain/feed_forward.rs", "rank": 8, "score": 111832.13980602403 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Connection {\n\n to: *mut f64,\n\n weight: f64,\n\n}\n\n\n\nimpl Connection {\n\n /// Weighted connection to another point in memory\n\n ///\n\n /// This is unsafe, to use this you must manually guarantee that the pointer stays valid\n\n /// at least until we destroy this Neural Network struct.\n\n pub unsafe fn new(to: *mut f64, weight: f64) -> Self {\n\n Connection { to, weight }\n\n }\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/neat/phenotype/mod.rs", "rank": 9, "score": 110186.8591115416 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Node {\n\n pub value: f64,\n\n pub connections: Vec<Connection>,\n\n}\n\n\n\nimpl Node {\n\n pub fn add_to_value(&mut self, n: f64) {\n\n self.value += n;\n\n }\n\n\n\n pub fn perform_sigmoid(&mut self) -> f64 {\n\n return sigmoid(self.value);\n\n }\n\n\n\n pub fn calc(&mut self) {\n\n let sig_value = self.perform_sigmoid();\n\n\n\n self.value = 0.0;\n\n\n\n for c in &self.connections {\n", "file_path": "evolvim-lib/src/lib/neat/phenotype/mod.rs", "rank": 10, "score": 110186.8591115416 }, { "content": "pub fn draw_lines<G, C>(\n\n text_to_draw: Vec<String>,\n\n line_heigth: f64,\n\n line_width: f64,\n\n context: Context,\n\n text: Text,\n\n glyphs: &mut C,\n\n graphics: &mut G,\n\n) where\n\n G: Graphics<Texture = C::Texture>,\n\n C: CharacterCache,\n\n C::Error: Debug,\n\n{\n\n let buffer = 10.0;\n\n let mut transform = context.transform.trans(buffer, buffer);\n\n\n\n // Draw white background\n\n let rect = [\n\n 0.0,\n\n 0.0,\n", "file_path": "evolvim-tools/src/main/graphics/mod.rs", "rank": 11, "score": 110078.26127525799 }, { "content": "pub fn draw_terrain<C, G>(\n\n terrain: &Terrain,\n\n context: Context,\n\n graphics: &mut G,\n\n glyphs: &mut C,\n\n view: &View,\n\n) where\n\n C: CharacterCache,\n\n C::Error: Debug,\n\n G: Graphics<Texture = C::Texture>,\n\n{\n\n let size = view.get_tile_size();\n\n let transform = context\n\n .transform\n\n .trans(-view.get_precise_x() * size, -view.get_precise_y() * size);\n\n\n\n let mut shape = rectangle::Rectangle::new([1., 1., 1., 1.]);\n\n\n\n for x in view.get_x_range() {\n\n for y in view.get_y_range() {\n", "file_path": "evolvim-tools/src/main/graphics/mod.rs", "rank": 12, "score": 110078.261275258 }, { "content": "/// Converts hsba (Hue, Saturation, Brightness, Alpha) into rgba (Red, Green, Blue, Alpha)\n\n///\n\n/// All input values should range from 0 to 1. All output values will range from 0 to 1.\n\n///\n\n/// Formulae from [here](https://en.wikipedia.org/wiki/HSL_and_HSV#From_HSV)\n\npub fn from_hsba(hsba: [f32; 4]) -> Color {\n\n let [hue, sat, bri, alpha] = hsba;\n\n\n\n assert!(hue <= 1.0, \"Hue can't be larger than 1.\");\n\n assert!(sat <= 1.0);\n\n assert!(bri <= 1.0);\n\n assert!(alpha <= 1.0);\n\n\n\n // Chroma\n\n let c = bri * sat;\n\n // H' = hue * 360 / 60 = hue * 6\n\n let mut h = hue * 6.0;\n\n let x = c * (1.0 - (h % 2.0 - 1.0).abs());\n\n\n\n if h == 0.0 {\n\n h = 1.0;\n\n }\n\n\n\n let (r, g, b): (f32, f32, f32) = match h.ceil() as usize {\n\n 1 => (c, x, 0.0),\n", "file_path": "evolvim-tools/src/main/graphics/mod.rs", "rank": 13, "score": 105305.65464545025 }, { "content": "pub fn draw_details_creature<B, C, G>(\n\n creature: &Creature<B>,\n\n context: Context,\n\n graphics: &mut G,\n\n glyphs: &mut C,\n\n view: &View,\n\n) where\n\n B: lib_evolvim::brain::NeuralNet + DrawableBrain,\n\n C: CharacterCache,\n\n C::Error: Debug,\n\n G: Graphics<Texture = C::Texture>,\n\n{\n\n let text = Text::new(18);\n\n let mut text_to_draw = Vec::new();\n\n\n\n text_to_draw.push(format!(\"Energy: {:.3}\", creature.get_energy()));\n\n let time_step = 0.001;\n\n text_to_draw.push(format!(\n\n \"Energy D: {:.3}\",\n\n creature.get_energy_change(time_step)\n", "file_path": "evolvim-tools/src/main/graphics/mod.rs", "rank": 14, "score": 103196.43785908446 }, { "content": "pub trait RecombinationTwoParents {\n\n fn recombination_two_parents(parent_a: &Self, parent_b: &Self) -> Self\n\n where\n\n Self: NeuralNet + std::marker::Sized;\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/brain/mod.rs", "rank": 15, "score": 101324.04722564392 }, { "content": "fn get_innovation_number() -> usize {\n\n unsafe {\n\n INNOVATION_NUMBER += 1;\n\n return INNOVATION_NUMBER;\n\n }\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/neat/genome/mod.rs", "rank": 16, "score": 96777.61452077718 }, { "content": "fn main() {\n\n let matches = App::new(\"Evolvim - GUI launched via CLI\")\n\n .version(clap::crate_version!())\n\n .author(\"Sybrand Aarnoutse\")\n\n .arg(\n\n Arg::with_name(\"output\")\n\n .short(\"o\")\n\n .long(\"output\")\n\n .value_name(\"FILE\")\n\n .takes_value(true)\n\n .help(\"The output file, save to this when done\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"input\")\n\n .short(\"i\")\n\n .long(\"input\")\n\n .value_name(\"FILE\")\n\n .takes_value(true)\n\n .help(\"The input file, start with this as board\"),\n\n )\n", "file_path": "evolvim-tools/src/main/mod.rs", "rank": 17, "score": 81194.5291731907 }, { "content": "#[test]\n\nfn test_run_phenotype() {\n\n let gen = neat::Genome::new_fully_linked();\n\n let mut phen: neat::NeuralNet = (&gen).into();\n\n\n\n phen.run_calculations();\n\n}\n", "file_path": "evolvim-lib/tests/neat.rs", "rank": 18, "score": 81001.12833591443 }, { "content": "#[test]\n\nfn test_generate_phenotype() {\n\n let gen = neat::Genome::new_fully_linked();\n\n let _phen: neat::NeuralNet = (&gen).into();\n\n}\n\n\n", "file_path": "evolvim-lib/tests/neat.rs", "rank": 19, "score": 81001.12833591443 }, { "content": "pub trait Intentions {\n\n fn wants_birth(&self) -> f64;\n\n fn wants_help_birth(&self) -> f64;\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/brain/mod.rs", "rank": 20, "score": 76889.70993784443 }, { "content": "pub trait RecombinationInfinite {\n\n fn recombination_infinite_parents(parents: &Vec<crate::softbody::HLSoftBody<Self>>) -> Self\n\n where\n\n Self: NeuralNet + std::marker::Sized;\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/brain/mod.rs", "rank": 21, "score": 75139.90148013999 }, { "content": "pub trait ProvideInformation {\n\n fn get_raw_values(&self) -> Vec<String> {\n\n vec![String::from(\n\n \"This struct has not yet implemented it's own information system.\",\n\n )]\n\n }\n\n\n\n fn get_keys(&self) -> Vec<String> {\n\n vec![String::from(\"warning\")]\n\n }\n\n\n\n fn get_ordered_key_value_pairs(&self) -> Vec<(String, String)> {\n\n let values = self.get_raw_values();\n\n let keys = self.get_keys();\n\n assert!(values.len() == keys.len(), \"The amount of values ({}) and keys ({}) in the implementation of ProvideInformation does not match.\", values.len(), keys.len());\n\n\n\n // Zip the two iterators\n\n keys.into_iter().zip(values.into_iter()).collect()\n\n }\n\n}\n", "file_path": "evolvim-lib/src/lib/brain/mod.rs", "rank": 22, "score": 75139.90148013999 }, { "content": "pub trait DrawableBrain {\n\n fn draw_brain<C, G>(&self, context: Context, graphics: &mut G, glyphs: &mut C)\n\n where\n\n C: CharacterCache,\n\n C::Error: Debug,\n\n G: Graphics<Texture = C::Texture>;\n\n}\n\n\n\nimpl DrawableBrain for lib_evolvim::neat::NeatBrain {\n\n fn draw_brain<C, G>(&self, context: Context, graphics: &mut G, glyphs: &mut C)\n\n where\n\n C: CharacterCache,\n\n C::Error: Debug,\n\n G: Graphics<Texture = C::Texture>,\n\n {\n\n let text = Text::new(18);\n\n let info = self\n\n .get_ordered_key_value_pairs()\n\n .into_iter()\n\n .map(|(key, value)| format!(\"{}: {}\", key, value))\n", "file_path": "evolvim-tools/src/main/graphics/mod.rs", "rank": 23, "score": 75139.90148013999 }, { "content": "pub trait GenerateRandom {\n\n fn new_random() -> Self;\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/brain/mod.rs", "rank": 24, "score": 75139.90148013999 }, { "content": "/// Checks if the center is inside of the world, possibly corrects it and returns it.\n\npub fn check_center_y(y: usize, board_height: usize) -> usize {\n\n return y.max(0).min(board_height - 1);\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/softbody/rock.rs", "rank": 25, "score": 73947.68427208278 }, { "content": "/// Checks if the center is inside of the world, possibly corrects it and returns it.\n\npub fn check_center_x(x: usize, board_width: usize) -> usize {\n\n return x.max(0).min(board_width - 1);\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/softbody/rock.rs", "rank": 26, "score": 73947.68427208278 }, { "content": "pub mod board;\n\npub mod version;\n\n\n\npub use board::BoardSerde;\n\npub use version::Version;", "file_path": "evolvim-lib/src/lib/serde_structs/mod.rs", "rank": 27, "score": 58868.10591382187 }, { "content": "mod generate;\n\n\n\nuse super::input::InputType;\n\nuse super::output::OutputType;\n\n\n\n// TODO: use unsafe pointers or something to speed things up\n\n#[derive(Debug)]\n\npub struct NeuralNet {\n\n nodes: Box<[Node]>,\n\n\n\n outputs: Box<[Output]>,\n\n inputs: Vec<Input>,\n\n}\n\n\n\nimpl NeuralNet {\n\n pub fn load_input(&mut self, env: &crate::brain::Environment) {\n\n for input in &self.inputs {\n\n input.load_into(&mut self.nodes, env);\n\n }\n\n }\n", "file_path": "evolvim-lib/src/lib/neat/phenotype/mod.rs", "rank": 28, "score": 58844.48770785127 }, { "content": "\n\n pub fn use_output(\n\n &self,\n\n env: &mut crate::brain::EnvironmentMut<super::NeatBrain>,\n\n time_step: f64,\n\n ) {\n\n for output in self.outputs.iter() {\n\n output.use_output(env, time_step);\n\n }\n\n }\n\n\n\n pub fn run_calculations(&mut self) {\n\n for n in self.outputs.iter_mut() {\n\n // Reset the value to 0\n\n n.value = 0.0;\n\n }\n\n\n\n for n in self.nodes.iter_mut() {\n\n n.calc();\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "evolvim-lib/src/lib/neat/phenotype/mod.rs", "rank": 29, "score": 58835.12583885249 }, { "content": " unsafe {\n\n *c.to += c.weight * sig_value;\n\n }\n\n }\n\n }\n\n\n\n pub fn empty() -> Self {\n\n Node {\n\n value: 0.0,\n\n connections: Vec::new(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/neat/phenotype/mod.rs", "rank": 30, "score": 58825.18791242439 }, { "content": "extern crate rand;\n\n\n\nmod gene;\n\nmod mutation;\n\nmod recombination;\n\n// mod innovations;\n\nmod speciation;\n\nmod utils;\n\n\n\nuse self::gene::{ConnectionGene, NodeGene};\n\npub use self::gene::{Id, NodeType};\n\nuse rand::Rng;\n\n\n\nconst AMOUNT_INPUT: usize = 6;\n\nconst AMOUNT_OUTPUT: usize = 4;\n\nstatic mut INNOVATION_NUMBER: usize = AMOUNT_INPUT * AMOUNT_OUTPUT;\n\nstatic mut NODE_NUMBER: Id = AMOUNT_INPUT + AMOUNT_OUTPUT;\n\n\n", "file_path": "evolvim-lib/src/lib/neat/genome/mod.rs", "rank": 31, "score": 58605.79822994832 }, { "content": " node_genome: Vec::new(),\n\n connection_genome: Vec::new(),\n\n };\n\n let mut node_counter = 1;\n\n\n\n use crate::neat::input::Eye;\n\n use crate::neat::input::InputType;\n\n const EYE: [Eye; 3] = Eye::get_all_three(0.0, 0.0);\n\n let input_nodes: [InputType; 6] = [\n\n InputType::Bias(1.0),\n\n InputType::MouthHue,\n\n InputType::Energy,\n\n InputType::Eye(EYE[0]).clone(),\n\n InputType::Eye(EYE[1]).clone(),\n\n InputType::Eye(EYE[2]).clone(),\n\n ];\n\n for i in 0..AMOUNT_INPUT {\n\n genome.add_node(NodeType::Sensor(input_nodes[i].clone()), node_counter);\n\n node_counter += 1;\n\n }\n", "file_path": "evolvim-lib/src/lib/neat/genome/mod.rs", "rank": 32, "score": 58601.39537736411 }, { "content": "\n\n let mut con_counter = 1;\n\n use crate::neat::output::OutputType;\n\n const OUTPUT_NODES: [NodeType; 4] = [\n\n NodeType::Output(OutputType::Turning),\n\n NodeType::Output(OutputType::Accelerating),\n\n NodeType::Output(OutputType::MouthHue),\n\n NodeType::Output(OutputType::Eating),\n\n ];\n\n for i in 0..AMOUNT_OUTPUT {\n\n genome.add_node(OUTPUT_NODES[i].clone(), node_counter);\n\n node_counter += 1;\n\n\n\n let to = genome.node_genome.last().unwrap().id;\n\n for i in 0..AMOUNT_INPUT {\n\n let from = genome.node_genome[i].id;\n\n\n\n // Because all creatures start with this basic genome give all the connections the same innovation number\n\n // `counter` is used for this purpose\n\n genome.connection_genome.push(ConnectionGene {\n", "file_path": "evolvim-lib/src/lib/neat/genome/mod.rs", "rank": 33, "score": 58601.153769986784 }, { "content": " rand::random::<f64>() * 0.4 + 0.8\n\n }\n\n\n\n fn add_node(&mut self, node_type: NodeType, id: Id) {\n\n self.node_genome.push(NodeGene { node_type, id });\n\n }\n\n\n\n fn add_connection(&mut self, from: Id, to: Id, weight: f64) {\n\n self.connection_genome.push(ConnectionGene {\n\n from,\n\n to,\n\n weight,\n\n\n\n enabled: true,\n\n innovation_number: get_innovation_number(),\n\n });\n\n }\n\n\n\n pub fn new_fully_linked() -> Self {\n\n let mut genome = Genome {\n", "file_path": "evolvim-lib/src/lib/neat/genome/mod.rs", "rank": 34, "score": 58600.97082185111 }, { "content": " from,\n\n to,\n\n weight: Self::get_random_weight(),\n\n\n\n enabled: true,\n\n innovation_number: con_counter,\n\n });\n\n con_counter += 1;\n\n }\n\n }\n\n\n\n return genome;\n\n }\n\n}\n\n\n\nimpl Genome {\n\n pub fn log_nodes(&self) {\n\n for n in &self.node_genome {\n\n println!(\"\\tnode {} is {:?}\", n.id, n.node_type);\n\n }\n", "file_path": "evolvim-lib/src/lib/neat/genome/mod.rs", "rank": 35, "score": 58598.91400954613 }, { "content": " &self.connection_genome\n\n }\n\n\n\n fn get_random_node_id(&self) -> Id {\n\n self.node_genome[self.get_random_node_place()].id\n\n }\n\n\n\n fn get_random_node_place(&self) -> usize {\n\n rand::thread_rng().gen_range(0, self.node_genome.len())\n\n }\n\n\n\n fn get_random_connection_place(&self) -> usize {\n\n rand::thread_rng().gen_range(0, self.connection_genome.len())\n\n }\n\n\n\n fn get_random_weight() -> f64 {\n\n rand::random::<f64>() * 2.0 - 1.0\n\n }\n\n\n\n fn get_random_weight_multiplier() -> f64 {\n", "file_path": "evolvim-lib/src/lib/neat/genome/mod.rs", "rank": 36, "score": 58594.10969149394 }, { "content": " }\n\n\n\n pub fn log_connections(&self) {\n\n for n in &self.connection_genome {\n\n print!(\"\\t\");\n\n if n.enabled == false {\n\n print!(\"DISABLED! \");\n\n }\n\n println!(\n\n \"innovation {}: from {} to {} with weight {}\",\n\n n.innovation_number, n.from, n.to, n.weight\n\n );\n\n }\n\n }\n\n}\n", "file_path": "evolvim-lib/src/lib/neat/genome/mod.rs", "rank": 37, "score": 58593.02073409035 }, { "content": "fn get_usize_from_id(m: &HashMap<Id, usize>, id: Id) -> usize {\n\n *m.get(&id).unwrap()\n\n}\n", "file_path": "evolvim-lib/src/lib/neat/phenotype/generate.rs", "rank": 38, "score": 56603.91341496757 }, { "content": "fn main() {\n\n let abort_reader = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false));\n\n let abort_writer = abort_reader.clone();\n\n\n\n ctrlc::set_handler(move || abort_writer.store(true, Ordering::SeqCst))\n\n .expect(\"Error setting SIGINT handler! Blame the other crate!\");\n\n\n\n let matches = App::new(\"Evolvim - cli\")\n\n .version(clap::crate_version!())\n\n .author(\"Sybrand Aarnoutse\")\n\n .arg(\n\n Arg::with_name(\"output\")\n\n .short(\"o\")\n\n .long(\"output\")\n\n .value_name(\"FILE\")\n\n .takes_value(true)\n\n .help(\"The output file, save to this when done\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"input\")\n", "file_path": "evolvim-tools/src/cli.rs", "rank": 39, "score": 52802.48638781459 }, { "content": "#[test]\n\nfn test_recombination() {\n\n let mut gen1 = neat::Genome::new_fully_linked();\n\n let mut gen2 = neat::Genome::new_fully_linked();\n\n\n\n for _i in 0..5 {\n\n gen1.mutate();\n\n gen2.mutate();\n\n }\n\n\n\n println!(\"Parent A:\");\n\n gen1.log_nodes();\n\n gen1.log_connections();\n\n\n\n println!(\"Parent B:\");\n\n gen2.log_nodes();\n\n gen2.log_connections();\n\n\n\n let baby = neat::Genome::new_from_2(&gen1, &gen2);\n\n println!(\"Genome after recombination:\");\n\n baby.log_nodes();\n\n baby.log_connections();\n\n}\n\n\n", "file_path": "evolvim-lib/tests/neat.rs", "rank": 40, "score": 51715.06563495008 }, { "content": "#[test]\n\nfn test_mutation() {\n\n let mut gen = neat::Genome::new_fully_linked();\n\n\n\n println!(\"Before mutation:\");\n\n gen.log_nodes();\n\n gen.log_connections();\n\n\n\n for _i in 0..10 {\n\n gen.mutate();\n\n }\n\n\n\n println!(\"\\nAfter mutation:\");\n\n gen.log_nodes();\n\n gen.log_connections();\n\n}\n\n\n", "file_path": "evolvim-lib/tests/neat.rs", "rank": 41, "score": 51715.06563495008 }, { "content": "#[cfg(multithreading)]\n\ntype MutPoint = std::sync::RwLock;\n", "file_path": "evolvim-lib/src/lib/softbody/mod.rs", "rank": 42, "score": 50742.0028382986 }, { "content": "#[cfg(not(multithreading))]\n\ntype MutPoint<A> = std::cell::RefCell<A>;\n\n\n\nconst COLLISION_FORCE: f64 = 0.01;\n\nconst PIECES: usize = 20;\n\nconst AGE_FACTOR: f64 = 1.0;\n\nconst MATURE_AGE: f64 = 0.01;\n\n\n\n/// Higher-Level SoftBody\n\n///\n\n/// This is a wrapper struct providing some useful functions.\n\n///\n\n/// TODO: come up with a better name.\n\npub struct HLSoftBody<B = Brain>(ReferenceCounter<MutPoint<SoftBody<B>>>);\n\n\n\nimpl<B> From<SoftBody<B>> for HLSoftBody<B> {\n\n fn from(sb: SoftBody<B>) -> HLSoftBody<B> {\n\n HLSoftBody(ReferenceCounter::new(MutPoint::new(sb)))\n\n }\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/softbody/mod.rs", "rank": 43, "score": 50742.0028382986 }, { "content": "#[test]\n\nfn test_board_update() {\n\n let mut board = Board::<Brain>::default();\n\n\n\n board.update(0.001);\n\n}\n\n\n", "file_path": "evolvim-lib/tests/board.rs", "rank": 44, "score": 50698.92333954996 }, { "content": "#[test]\n\nfn test_construct_random() {\n\n let gen1 = neat::Genome::new_fully_linked();\n\n\n\n println!(\"Initial random network:\");\n\n gen1.log_nodes();\n\n gen1.log_connections();\n\n}\n\n\n", "file_path": "evolvim-lib/tests/neat.rs", "rank": 45, "score": 50698.92333954996 }, { "content": "fn main() {\n\n let mut board = Board::<Brain>::default();\n\n\n\n let mut window: PistonWindow = WindowSettings::new(\"Hello Piston!\", [1000, 800])\n\n .exit_on_esc(true)\n\n .build()\n\n .unwrap();\n\n\n\n while let Some(event) = window.next() {\n\n // Draw\n\n window.draw_2d(&event, |context, graphics| {\n\n clear([1.0; 4], graphics);\n\n\n\n for x in 0..100 {\n\n for y in 0..100 {\n\n let size = 10.0;\n\n let tile = board.terrain.get_tile_at((x, y));\n\n\n\n let rect = [x as f64 * size, y as f64 * size, size, size];\n\n\n", "file_path": "evolvim-tools/src/checkTerrainGen.rs", "rank": 46, "score": 50698.92333954996 }, { "content": "#[test]\n\nfn test_brain_evolve() {\n\n let c_1 = HLSoftBody::from(Creature::new_random((100, 100), 0.0));\n\n let c_2 = HLSoftBody::from(Creature::new_random((100, 100), 0.0));\n\n\n\n let _new_brain = Brain::recombination_infinite_parents(&vec![c_1, c_2]);\n\n}\n", "file_path": "evolvim-lib/tests/brain.rs", "rank": 47, "score": 50698.92333954996 }, { "content": "#[test]\n\nfn test_genetical_distance() {\n\n let mut gen1 = neat::Genome::new_fully_linked();\n\n let mut gen2 = neat::Genome::new_fully_linked();\n\n\n\n for _i in 0..5 {\n\n gen1.mutate();\n\n gen2.mutate();\n\n }\n\n\n\n let distance = gen1.genetical_distance(&gen2);\n\n println!(\n\n \"The distance between two randomly mutated genomes is {}\",\n\n distance\n\n );\n\n}\n\n\n", "file_path": "evolvim-lib/tests/neat.rs", "rank": 48, "score": 50698.92333954996 }, { "content": "type FPN = f64;\n\n\n", "file_path": "evolvim-lib/src/lib/brain/feed_forward.rs", "rank": 49, "score": 49793.85540832739 }, { "content": "#[test]\n\nfn test_board_default_intialise() {\n\n let _board = Board::<Brain>::default();\n\n}\n", "file_path": "evolvim-lib/tests/board.rs", "rank": 50, "score": 49747.27364166673 }, { "content": "pub trait SoftBodyBucket<B> {\n\n fn remove_softbody(&mut self, body: HLSoftBody<B>);\n\n\n\n fn add_softbody(&mut self, body: HLSoftBody<B>);\n\n}\n\n\n\npub type SoftBodiesAt<B> = Vec<HLSoftBody<B>>;\n\n\n\nimpl<B> SoftBodyBucket<B> for SoftBodiesAt<B> {\n\n fn remove_softbody(&mut self, body: HLSoftBody<B>) {\n\n // WARNING: Only removes one instance\n\n for i in 0..self.len() {\n\n if self[i] == body {\n\n self.remove(i);\n\n break;\n\n }\n\n }\n\n }\n\n\n\n /// Adds the given `HLSoftBody`, prevents duplicates.\n", "file_path": "evolvim-lib/src/lib/sbip.rs", "rank": 51, "score": 45579.908964683935 }, { "content": "fn linear_interpolation(a: f32, b: f32, x: f32) -> f32 {\n\n return a + (b - a) * x;\n\n}\n", "file_path": "evolvim-lib/src/lib/terrain/tile.rs", "rank": 52, "score": 36961.589458527254 }, { "content": "fn inter_color(a: [f32; 3], b: [f32; 3], x: f32) -> [f32; 3] {\n\n let hue = linear_interpolation(a[0], b[0], x);\n\n let sat = linear_interpolation(a[1], b[1], x);\n\n let bri = linear_interpolation(a[2], b[2], x);\n\n\n\n [hue, sat, bri]\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/terrain/tile.rs", "rank": 53, "score": 35119.84036837292 }, { "content": "fn inter_color_fixed_hue(a: [f32; 3], b: [f32; 3], x: f32, hue: f32) -> [f32; 4] {\n\n let b_saturation = if b[2] == 0.0 {\n\n // if brightness = 0 then saturation = 1\n\n 1.0\n\n } else {\n\n b[1]\n\n };\n\n\n\n let sat = linear_interpolation(a[1], b_saturation, x);\n\n let bri = linear_interpolation(a[2], b[2], x);\n\n\n\n [hue, sat, bri, 1.0]\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/terrain/tile.rs", "rank": 54, "score": 31586.95122469091 }, { "content": "#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub enum OutputType {\n\n MouthHue,\n\n Eating,\n\n Turning,\n\n Accelerating,\n\n Fight,\n\n}\n\n\n\nimpl OutputType {\n\n pub fn use_output<B>(\n\n &self,\n\n value: f64,\n\n env: &mut crate::brain::EnvironmentMut<B>,\n\n time_step: f64,\n\n ) {\n\n use OutputType::*;\n\n\n\n match self {\n\n MouthHue => env.this_body.set_mouth_hue(value),\n", "file_path": "evolvim-lib/src/lib/neat/output.rs", "rank": 55, "score": 31557.256192531924 }, { "content": " Eating => {\n\n let tile_pos = env.this_body.get_random_covered_tile(env.board_size);\n\n let tile = env.terrain.get_tile_at_mut(tile_pos);\n\n env.this_body\n\n .eat(value, time_step, env.time, env.climate, tile);\n\n }\n\n Turning => env.this_body.turn(value, time_step),\n\n Accelerating => env.this_body.accelerate(value, time_step),\n\n Fight => env.this_body.fight(\n\n value,\n\n env.time,\n\n time_step,\n\n env.sbip,\n\n env.self_pointer.clone(),\n\n ),\n\n };\n\n }\n\n}\n", "file_path": "evolvim-lib/src/lib/neat/output.rs", "rank": 56, "score": 31543.135009951893 }, { "content": "use crate::brain::Environment;\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub enum InputType {\n\n Eye(Eye),\n\n Bias(f64),\n\n MouthHue,\n\n Energy,\n\n}\n\n\n\nimpl InputType {\n\n pub fn get_data(&self, env: &Environment) -> f64 {\n\n use InputType::*;\n\n\n\n match &self {\n\n Bias(v) => *v,\n\n Eye(s) => s.get_data(env),\n\n MouthHue => env.this_body.get_mouth_hue(),\n\n Energy => env.this_body.get_energy(),\n\n }\n", "file_path": "evolvim-lib/src/lib/neat/input.rs", "rank": 57, "score": 31492.52406202404 }, { "content": " TileFertility => tile.get_fertility(),\n\n }\n\n }\n\n\n\n pub const fn get_all_three(relative_distance: f64, angle: f64) -> [Self; 3] {\n\n [\n\n Eye {\n\n relative_distance,\n\n angle,\n\n what_to_look_for: EyeType::FoodLevel,\n\n },\n\n Eye {\n\n relative_distance,\n\n angle,\n\n what_to_look_for: EyeType::FoodColor,\n\n },\n\n Eye {\n\n relative_distance,\n\n angle,\n\n what_to_look_for: EyeType::TileFertility,\n\n },\n\n ]\n\n }\n\n}\n", "file_path": "evolvim-lib/src/lib/neat/input.rs", "rank": 58, "score": 31481.98545770902 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct Eye {\n\n relative_distance: f64,\n\n angle: f64,\n\n what_to_look_for: EyeType,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n", "file_path": "evolvim-lib/src/lib/neat/input.rs", "rank": 59, "score": 31480.958036479053 }, { "content": "pub mod terrain;\n\npub mod serde_structs;\n\n\n\npub use self::board::*;\n\npub use self::brain::*;\n\npub use self::climate::Climate;\n\npub use self::sbip::*;\n\npub use self::softbody::*;\n\npub use self::terrain::*;\n", "file_path": "evolvim-lib/src/lib/mod.rs", "rank": 60, "score": 30528.576142865943 }, { "content": "//! Evolvim\n\n\n\n// TODO: ensure documentation for everything, use this to generate warnings\n\n// #![warn(missing_docs)]\n\n// Use this to generate errors\n\n// #![deny(missing_docs)]\n\n\n\n// #![deny(unsafe_code)]\n\n\n\n#[macro_use]\n\nextern crate serde_derive;\n\nextern crate serde;\n\n\n\npub mod board;\n\npub mod brain;\n\npub mod climate;\n\npub mod constants;\n\npub mod neat;\n\npub mod sbip;\n\npub mod softbody;\n", "file_path": "evolvim-lib/src/lib/mod.rs", "rank": 61, "score": 30525.117380807038 }, { "content": " .arg(\n\n Arg::with_name(\"save\")\n\n .short(\"s\")\n\n .long(\"save\")\n\n .takes_value(false)\n\n .conflicts_with(\"output\")\n\n .requires(\"input\")\n\n .help(\"Saves to the input file when done\"),\n\n )\n\n .get_matches();\n\n\n\n let mut view = View::default();\n\n if let Some(filename) = matches.value_of(\"input\") {\n\n view.board = Board::<BrainType>::load_from(filename).unwrap();\n\n }\n\n\n\n let output_file = if matches.is_present(\"save\") {\n\n matches.value_of(\"input\")\n\n } else {\n\n matches.value_of(\"output\")\n", "file_path": "evolvim-tools/src/main/mod.rs", "rank": 62, "score": 30522.724864112264 }, { "content": " use self::Button::Mouse;\n\n use self::ButtonState::*;\n\n use self::Input::*;\n\n\n\n match input {\n\n Button(b_args) => match b_args.button {\n\n Mouse(m_args) => match m_args {\n\n Left => match b_args.state {\n\n Press => view.on_mouse_press(),\n\n Release => view.on_mouse_release(),\n\n },\n\n _ => {}\n\n },\n\n _ => {}\n\n },\n\n _ => {}\n\n }\n\n }\n\n\n\n window.set_title(format!(\n", "file_path": "evolvim-tools/src/main/mod.rs", "rank": 63, "score": 30520.39490271356 }, { "content": "extern crate clap;\n\nextern crate lib_evolvim;\n\nextern crate piston_window;\n\n\n\nmod graphics;\n\n\n\nuse self::graphics::View;\n\nuse clap::{App, Arg};\n\nuse lib_evolvim::Board;\n\nuse piston_window::*;\n\n\n\n// type BrainType = lib_evolvim::neat::NeatBrain;\n", "file_path": "evolvim-tools/src/main/mod.rs", "rank": 64, "score": 30519.829525931156 }, { "content": " view.switch_display_mode();\n\n }\n\n Keyboard(Key::O) => {\n\n view.board.select_oldest();\n\n }\n\n Keyboard(Key::B) => {\n\n view.board.select_biggest();\n\n }\n\n Keyboard(Key::Q) => {\n\n view.board.selected_creature.deselect();\n\n }\n\n // Keyboard(Key::S) => {\n\n // view.board.save_to(\"test.bin\").unwrap();\n\n // }\n\n _ => (),\n\n }\n\n }\n\n\n\n if let Event::Input(input) = event {\n\n use self::mouse::MouseButton::*;\n", "file_path": "evolvim-tools/src/main/mod.rs", "rank": 65, "score": 30518.63519535561 }, { "content": " };\n\n\n\n let time = view.board.get_time();\n\n view.board.update(0.001);\n\n view.board.terrain.update_all(time, &view.board.climate);\n\n\n\n let mut playspeed = 1;\n\n\n\n let mut window: PistonWindow = WindowSettings::new(\"Hello Piston!\", [1000, 900])\n\n .exit_on_esc(true)\n\n .build()\n\n .unwrap();\n\n\n\n window.set_max_fps(20);\n\n\n\n let byte_font = include_bytes!(\"../../assets/default-font.ttf\");\n\n let factory = window.factory.clone();\n\n let text_settings = TextureSettings::new();\n\n let mut glyphs = Glyphs::from_bytes(byte_font, factory, text_settings).unwrap();\n\n\n", "file_path": "evolvim-tools/src/main/mod.rs", "rank": 66, "score": 30516.373095037015 }, { "content": " \"Population size: {}, year: {:.2}, season: {}.\",\n\n view.board.get_population_size(),\n\n view.board.get_time(),\n\n view.board.get_season()\n\n ));\n\n }\n\n\n\n if let Some(filename) = output_file {\n\n view.board.save_to(filename).unwrap();\n\n }\n\n}\n", "file_path": "evolvim-tools/src/main/mod.rs", "rank": 67, "score": 30516.142438061746 }, { "content": " // Match some button presses\n\n if let Some(button) = event.press_args() {\n\n use Button::Keyboard;\n\n\n\n match button {\n\n Keyboard(Key::Up) => {\n\n if playspeed > 0 {\n\n playspeed *= 2;\n\n } else {\n\n playspeed = 1;\n\n }\n\n }\n\n Keyboard(Key::Down) => {\n\n if playspeed > 1 {\n\n playspeed /= 2;\n\n } else {\n\n playspeed = 0;\n\n }\n\n }\n\n Keyboard(Key::D) => {\n", "file_path": "evolvim-tools/src/main/mod.rs", "rank": 68, "score": 30516.01511375208 }, { "content": " while let Some(event) = window.next() {\n\n // Render\n\n event.update(|_args| {\n\n for _i in 0..playspeed {\n\n view.board.update(0.001);\n\n }\n\n });\n\n\n\n // Draw\n\n window.draw_2d(&event, |context, graphics| {\n\n clear([1.0; 4], graphics);\n\n\n\n view.prepare_for_drawing();\n\n view.draw(context, graphics, &mut glyphs);\n\n });\n\n\n\n // Match some events\n\n event.mouse_relative(|x, y| view.on_mouse_move(x, y));\n\n event.mouse_cursor(|x, y| view.update_mouse(x, y));\n\n\n", "file_path": "evolvim-tools/src/main/mod.rs", "rank": 69, "score": 30515.436185802315 }, { "content": "\n\n // Fields relevant for the creatures.\n\n creature_minimum: usize,\n\n // pub soft_bodies_in_positions: SoftBodiesInPositions<B>,\n\n pub creatures: Vec<SoftBody<B>>,\n\n creature_id_up_to: usize,\n\n // _creature_rank_metric: usize,\n\n\n\n // Fields relevant for time or history\n\n year: f64,\n\n\n\n // Fields relevant for temperature\n\n pub climate: Climate,\n\n\n\n // Miscelanious\n\n // pub selected_creature: SelectedCreature<B>,\n\n}\n\n\n\nimpl<B: NeuralNet> From<Board<B>> for BoardSerde<B> {\n\n fn from(bd: Board<B>) -> BoardSerde<B> {\n", "file_path": "evolvim-lib/src/lib/serde_structs/board.rs", "rank": 70, "score": 30353.280586907178 }, { "content": "const VERSION_MAJOR: &str = env!(\"CARGO_PKG_VERSION_MAJOR\");\n\nconst VERSION_MINOR: &str = env!(\"CARGO_PKG_VERSION_MINOR\");\n\nconst VERSION_PATCH: &str = env!(\"CARGO_PKG_VERSION_PATCH\");\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Version {\n\n major: String,\n\n minor: String,\n\n patch: String,\n\n}\n\n\n\nimpl Version {\n\n pub fn current_version() -> Self {\n\n Version {\n\n major: String::from(VERSION_MAJOR),\n\n minor: String::from(VERSION_MINOR),\n\n patch: String::from(VERSION_PATCH),\n\n }\n\n }\n\n\n", "file_path": "evolvim-lib/src/lib/serde_structs/version.rs", "rank": 71, "score": 30352.72247867634 }, { "content": "extern crate serde_derive;\n\n\n\nuse crate::board::Board;\n\nuse crate::terrain::Terrain;\n\nuse crate::softbody::SoftBody;\n\nuse crate::climate::Climate;\n\nuse super::version::Version;\n\n\n\nuse serde_derive::{Deserialize, Serialize};\n\nuse crate::brain::NeuralNet;\n\n\n\n#[derive(Deserialize, Serialize)]\n\npub struct BoardSerde<B: NeuralNet> {\n\n // Fields not in the board\n\n version: Version,\n\n\n\n // Fields relevant for the board itself.\n\n board_width: usize,\n\n board_height: usize,\n\n pub terrain: Terrain,\n", "file_path": "evolvim-lib/src/lib/serde_structs/board.rs", "rank": 72, "score": 30352.117709182745 }, { "content": " climate: bd.climate,\n\n }\n\n }\n\n}\n\n\n\nimpl<B: NeuralNet> From<BoardSerde<B>> for Board<B> {\n\n fn from(bs: BoardSerde<B>) -> Board<B> {\n\n use crate::board::SelectedCreature;\n\n use crate::sbip::SoftBodiesInPositions;\n\n use crate::softbody::HLSoftBody;\n\n\n\n if !bs.version.is_compatible_with_current() {\n\n panic!(\"File from version {} can not be used with current version ({}).\",\n\n bs.version,\n\n Version::current_version()\n\n );\n\n }\n\n\n\n let board_size = (bs.board_width, bs.board_height);\n\n let mut soft_bodies_in_positions = SoftBodiesInPositions::new_allocated(board_size);\n", "file_path": "evolvim-lib/src/lib/serde_structs/board.rs", "rank": 73, "score": 30351.04477105947 }, { "content": " pub fn is_compatible_with_current(&self) -> bool {\n\n if self.major != VERSION_MAJOR {\n\n // False if there is a difference in the major version\n\n false\n\n } else if self.minor != VERSION_MINOR {\n\n // This is currently an unstable crate so breaking changes will bump the minor version.\n\n //\n\n // If this crate ever becomes stable this will need to change.\n\n false\n\n } else {\n\n true\n\n }\n\n }\n\n}\n\n\n\nuse std::fmt;\n\nimpl fmt::Display for Version {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}.{}.{}\", self.major, self.minor, self.patch)\n\n }\n\n}\n", "file_path": "evolvim-lib/src/lib/serde_structs/version.rs", "rank": 74, "score": 30350.440676864735 }, { "content": " let creatures: Vec<HLSoftBody<B>> = bs.creatures.into_iter()\n\n .map(|c| HLSoftBody::from(c)).collect();\n\n\n\n for c in &creatures {\n\n c.set_sbip(&mut soft_bodies_in_positions, board_size);\n\n c.set_sbip(&mut soft_bodies_in_positions, board_size);\n\n }\n\n\n\n Board::new(\n\n bs.board_width,\n\n bs.board_height,\n\n bs.terrain,\n\n\n\n bs.creature_minimum,\n\n soft_bodies_in_positions,\n\n creatures,\n\n bs.creature_id_up_to,\n\n\n\n bs.year,\n\n\n\n bs.climate,\n\n\n\n SelectedCreature::default(),\n\n )\n\n }\n\n}", "file_path": "evolvim-lib/src/lib/serde_structs/board.rs", "rank": 75, "score": 30347.246425461562 }, { "content": " let (board_width, board_height) = bd.get_board_size();\n\n let creature_minimum = bd.get_creature_minimum();\n\n let creature_id_up_to = bd.get_creature_id_up_to();\n\n let year = bd.get_time();\n\n\n\n let creatures: Vec<SoftBody<B>> = bd.creatures.into_iter().map(|c| c.into_inner()).collect();\n\n\n\n BoardSerde {\n\n version: Version::current_version(),\n\n\n\n board_width,\n\n board_height,\n\n terrain: bd.terrain,\n\n\n\n creature_minimum,\n\n creatures,\n\n creature_id_up_to,\n\n \n\n year,\n\n\n", "file_path": "evolvim-lib/src/lib/serde_structs/board.rs", "rank": 76, "score": 30344.75544093543 }, { "content": "use super::super::genome::{Genome, Id, NodeType};\n\nuse super::{Connection, NeuralNet, Node};\n\nuse std::collections::HashMap;\n\n\n\n// TODO: clean this HORRIFIC code up...\n\nimpl From<&Genome> for NeuralNet {\n\n fn from(genome: &Genome) -> Self {\n\n let node_gen = genome.get_node_genome();\n\n let mut nodes: Box<[Node]> = std::iter::repeat(Node::empty())\n\n .take(node_gen.len())\n\n .collect::<Vec<Node>>()\n\n .into_boxed_slice();\n\n let mut inputs = Vec::new();\n\n // Preallocate the memory so we don't have to reallocate and make the *mut-pointers invalid.\n\n let mut outputs = Vec::with_capacity(\n\n node_gen\n\n .iter()\n\n .filter(|node| {\n\n if let NodeType::Output(_) = node.node_type {\n\n true\n", "file_path": "evolvim-lib/src/lib/neat/phenotype/generate.rs", "rank": 77, "score": 30324.67094968922 }, { "content": " .connections\n\n .push(unsafe { Connection::new(to, 1.0) });\n\n }\n\n _ => {}\n\n }\n\n\n\n counter += 1;\n\n }\n\n\n\n for con in genome.get_connection_genome().iter().filter(|c| c.enabled) {\n\n let from = get_usize_from_id(&lookup, con.from);\n\n let to = &mut nodes[get_usize_from_id(&lookup, con.to)].value as *mut f64;\n\n\n\n nodes[from]\n\n .connections\n\n .push(unsafe { Connection::new(to, con.weight) });\n\n }\n\n\n\n NeuralNet {\n\n nodes,\n\n inputs,\n\n outputs: outputs.into_boxed_slice(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/neat/phenotype/generate.rs", "rank": 78, "score": 30317.557595089886 }, { "content": " } else {\n\n false\n\n }\n\n })\n\n .count(),\n\n );\n\n let mut lookup: HashMap<Id, usize> = HashMap::new();\n\n\n\n let mut counter = 0;\n\n for i in node_gen {\n\n lookup.insert(i.id, counter);\n\n\n\n match &i.node_type {\n\n NodeType::Sensor(in_type) => {\n\n inputs.push(super::Input::new(counter, in_type.clone()));\n\n }\n\n NodeType::Output(out_type) => {\n\n outputs.push(super::Output::new(counter, out_type.clone()));\n\n let to: *mut f64 = &mut outputs.last_mut().unwrap().value;\n\n nodes[counter]\n", "file_path": "evolvim-lib/src/lib/neat/phenotype/generate.rs", "rank": 79, "score": 30314.224355724873 }, { "content": "impl<'a> RecombinationGenomesIterator<'a> {\n\n pub fn new(a: &'a Genome, b: &'a Genome) -> Self {\n\n RecombinationGenomesIterator {\n\n parent_a: a.connection_genome.iter().peekable(),\n\n parent_b: b.connection_genome.iter().peekable(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for RecombinationGenomesIterator<'a> {\n\n type Item = RecombinationGeneTypes<'a>;\n\n\n\n // TODO: make some use of if let to remove unneccessary .unwrap()'s\n\n fn next(&mut self) -> Option<RecombinationGeneTypes<'a>> {\n\n use Parent::*;\n\n use RecombinationGeneTypes::*;\n\n\n\n let gene_a = self.parent_a.peek();\n\n let gene_b = self.parent_b.peek();\n\n\n", "file_path": "evolvim-lib/src/lib/neat/genome/utils.rs", "rank": 80, "score": 30093.74482599411 }, { "content": "use super::gene::NodeGene;\n\nuse super::utils::{RecombinationGeneTypes, RecombinationGenomesIterator};\n\nuse super::Genome;\n\n\n\nimpl Genome {\n\n /// Multipoint crossover:\n\n /// - matching genes: average the weight\n\n /// - disjoint genes: always include\n\n /// - excess genes: always include\n\n pub fn new_from_2(parent_a: &Genome, parent_b: &Genome) -> Genome {\n\n let mut genome = Genome {\n\n node_genome: Vec::new(),\n\n connection_genome: Vec::new(),\n\n };\n\n\n\n use RecombinationGeneTypes::*;\n\n for g in RecombinationGenomesIterator::new(parent_a, parent_b) {\n\n match g {\n\n Matching(a, b) => {\n\n genome.connection_genome.push(a.clone());\n", "file_path": "evolvim-lib/src/lib/neat/genome/recombination.rs", "rank": 81, "score": 30092.813418300782 }, { "content": "use super::utils::{RecombinationGeneTypes, RecombinationGenomesIterator};\n\nuse super::Genome;\n\n\n\n// URGENT TODO: change these\n\nconst COEFFICIENT_MATCHING: f64 = 1.0;\n\nconst COEFFICIENT_DISJOINT: f64 = 1.0;\n\nconst COEFFICIENT_EXCESS: f64 = 1.0;\n\n\n\nimpl Genome {\n\n pub fn genetical_distance(&self, other: &Genome) -> f64 {\n\n use RecombinationGeneTypes::*;\n\n\n\n let iter = RecombinationGenomesIterator::new(&self, other);\n\n\n\n let mut weight_differences = 0.0;\n\n let mut counter_matching = 0;\n\n let mut counter_disjoint = 0;\n\n let mut counter_excess = 0;\n\n for g in iter {\n\n match g {\n", "file_path": "evolvim-lib/src/lib/neat/genome/speciation.rs", "rank": 82, "score": 30091.694281469954 }, { "content": "use super::gene::{NodeGene, NodeType};\n\nuse super::{get_next_node_id, Genome};\n\n\n\nconst CHANCE_MUTATE_NEW_LINK: f64 = 0.1;\n\nconst CHANCE_MUTATE_LINK_TO_NODE: f64 = 0.05;\n\nconst CHANCE_MUTATE_TWEAK_WEIGHT: f64 = 0.6;\n\nconst CHANCE_MUTATE_RANDOM_WEIGHT: f64 = 0.2;\n\nconst CHANCE_MUTATE_TOGGLE_ENABLED: f64 = 0.05;\n\n\n\nimpl Genome {\n\n pub fn mutate(&mut self) {\n\n use rand::distributions::Distribution;\n\n use MutationType::*;\n\n\n\n enum MutationType {\n\n AddConnection,\n\n ConnectionToNode,\n\n TweakWeight,\n\n RandomizeWeight,\n\n ToggleEnabled,\n", "file_path": "evolvim-lib/src/lib/neat/genome/mutation.rs", "rank": 83, "score": 30088.582938189284 }, { "content": " parent_a: &Vec<NodeGene>,\n\n parent_b: &Vec<NodeGene>,\n\n ) {\n\n use std::collections::HashSet;\n\n\n\n let mut neuron_ids = HashSet::new();\n\n\n\n for i in &self.connection_genome {\n\n neuron_ids.insert(i.from);\n\n neuron_ids.insert(i.to);\n\n }\n\n\n\n for a in parent_a {\n\n if neuron_ids.remove(&a.id) {\n\n self.node_genome.push(a.clone());\n\n }\n\n }\n\n\n\n for b in parent_b {\n\n if neuron_ids.remove(&b.id) {\n\n self.node_genome.push(b.clone());\n\n }\n\n }\n\n }\n\n}\n", "file_path": "evolvim-lib/src/lib/neat/genome/recombination.rs", "rank": 84, "score": 30088.45540498132 }, { "content": "use super::gene::ConnectionGene;\n\nuse super::Genome;\n\nuse std::iter::Peekable;\n\n\n\npub enum Parent {\n\n A,\n\n B,\n\n}\n\n\n\npub enum RecombinationGeneTypes<'a> {\n\n Matching(&'a ConnectionGene, &'a ConnectionGene),\n\n Disjoint(Parent, &'a ConnectionGene),\n\n Excess(Parent, &'a ConnectionGene),\n\n}\n\n\n\npub struct RecombinationGenomesIterator<'a> {\n\n parent_a: Peekable<std::slice::Iter<'a, ConnectionGene>>,\n\n parent_b: Peekable<std::slice::Iter<'a, ConnectionGene>>,\n\n}\n\n\n", "file_path": "evolvim-lib/src/lib/neat/genome/utils.rs", "rank": 85, "score": 30087.330991838182 }, { "content": "pub type Id = usize;\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub enum NodeType {\n\n Sensor(crate::neat::input::InputType),\n\n Hidden,\n\n Output(crate::neat::output::OutputType),\n\n}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct NodeGene {\n\n pub node_type: NodeType,\n\n pub id: Id,\n\n}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct ConnectionGene {\n\n pub from: Id,\n\n pub to: Id,\n\n pub weight: f64,\n", "file_path": "evolvim-lib/src/lib/neat/genome/gene.rs", "rank": 86, "score": 30085.9159824063 }, { "content": "\n\n pub fn mutate_tweak_weight(&mut self) {\n\n let connection_id = self.get_random_connection_place();\n\n self.connection_genome[connection_id].weight *= Self::get_random_weight_multiplier();\n\n }\n\n\n\n pub fn mutate_randomize_weight(&mut self) {\n\n let connection_id = self.get_random_connection_place();\n\n self.connection_genome[connection_id].weight = Self::get_random_weight();\n\n }\n\n\n\n pub fn mutate_toggle_gene(&mut self) {\n\n let connection_id = self.get_random_connection_place();\n\n // toggle `enabled`\n\n self.connection_genome[connection_id].toggle_enabled();\n\n }\n\n}\n", "file_path": "evolvim-lib/src/lib/neat/genome/mutation.rs", "rank": 87, "score": 30085.836518605935 }, { "content": " genome.connection_genome.last_mut().unwrap().weight += b.weight;\n\n genome.connection_genome.last_mut().unwrap().weight /= 2.0;\n\n }\n\n Disjoint(_, gene) => {\n\n genome.connection_genome.push(gene.clone());\n\n }\n\n Excess(_, gene) => {\n\n genome.connection_genome.push(gene.clone());\n\n }\n\n }\n\n }\n\n\n\n // Make the node genome\n\n genome.generate_nodes_from_connections(&parent_a.node_genome, &parent_b.node_genome);\n\n\n\n return genome;\n\n }\n\n\n\n fn generate_nodes_from_connections(\n\n &mut self,\n", "file_path": "evolvim-lib/src/lib/neat/genome/recombination.rs", "rank": 88, "score": 30084.829377612277 }, { "content": "\n\n pub enabled: bool,\n\n pub innovation_number: usize,\n\n}\n\n\n\nimpl NodeGene {}\n\n\n\nimpl ConnectionGene {\n\n pub fn disable_and_info(&mut self) -> (Id, Id) {\n\n self.enabled = false;\n\n\n\n return (self.from, self.to);\n\n }\n\n\n\n pub fn toggle_enabled(&mut self) {\n\n self.enabled = !self.enabled;\n\n }\n\n}\n", "file_path": "evolvim-lib/src/lib/neat/genome/gene.rs", "rank": 89, "score": 30084.39454200396 }, { "content": " let from = self.get_random_node_id();\n\n let to = self.get_random_node_id();\n\n let weight = Self::get_random_weight();\n\n\n\n self.add_connection(from, to, weight);\n\n }\n\n\n\n pub fn mutate_connection_to_node(&mut self) {\n\n let connection_id = self.get_random_connection_place();\n\n let next_node_id = get_next_node_id();\n\n let (from, to) = self.connection_genome[connection_id].disable_and_info();\n\n\n\n self.add_connection(from, next_node_id, Self::get_random_weight());\n\n self.add_connection(next_node_id, to, Self::get_random_weight());\n\n\n\n self.node_genome.push(NodeGene {\n\n node_type: NodeType::Hidden,\n\n id: next_node_id,\n\n });\n\n }\n", "file_path": "evolvim-lib/src/lib/neat/genome/mutation.rs", "rank": 90, "score": 30082.45002631291 }, { "content": " }\n\n\n\n impl MutationType {\n\n const fn get_choices() -> [Self; 5] {\n\n [\n\n AddConnection,\n\n ConnectionToNode,\n\n TweakWeight,\n\n RandomizeWeight,\n\n ToggleEnabled,\n\n ]\n\n }\n\n\n\n const fn get_weights() -> [f64; 5] {\n\n [\n\n CHANCE_MUTATE_NEW_LINK,\n\n CHANCE_MUTATE_LINK_TO_NODE,\n\n CHANCE_MUTATE_TWEAK_WEIGHT,\n\n CHANCE_MUTATE_RANDOM_WEIGHT,\n\n CHANCE_MUTATE_TOGGLE_ENABLED,\n", "file_path": "evolvim-lib/src/lib/neat/genome/mutation.rs", "rank": 91, "score": 30080.449264744948 }, { "content": " ]\n\n }\n\n }\n\n\n\n let dist = rand::distributions::WeightedIndex::new(&MutationType::get_weights()).unwrap();\n\n let mut rng = rand::thread_rng();\n\n let times = self.connection_genome.len() / 2;\n\n\n\n for _i in 0..times {\n\n match MutationType::get_choices()[dist.sample(&mut rng)] {\n\n AddConnection => self.mutate_add_connection(),\n\n ConnectionToNode => self.mutate_connection_to_node(),\n\n TweakWeight => self.mutate_tweak_weight(),\n\n RandomizeWeight => self.mutate_randomize_weight(),\n\n ToggleEnabled => self.mutate_toggle_gene(),\n\n }\n\n }\n\n }\n\n\n\n pub fn mutate_add_connection(&mut self) {\n", "file_path": "evolvim-lib/src/lib/neat/genome/mutation.rs", "rank": 92, "score": 30080.31729400262 }, { "content": " if gene_a.is_none() {\n\n if let Some(b) = self.parent_b.next() {\n\n return Some(Excess(B, b));\n\n } else {\n\n return None;\n\n }\n\n } else if gene_b.is_none() {\n\n if let Some(a) = self.parent_a.next() {\n\n return Some(Excess(A, a));\n\n } else {\n\n return None;\n\n };\n\n }\n\n\n\n use std::cmp::Ordering;\n\n match gene_a\n\n .unwrap()\n\n .innovation_number\n\n .cmp(&gene_b.unwrap().innovation_number)\n\n {\n", "file_path": "evolvim-lib/src/lib/neat/genome/utils.rs", "rank": 93, "score": 30079.906709945335 }, { "content": " Ordering::Equal => {\n\n // matching gene\n\n return Some(Matching(\n\n self.parent_a.next().unwrap(),\n\n self.parent_b.next().unwrap(),\n\n ));\n\n }\n\n Ordering::Less => {\n\n // disjoint gene from parent A\n\n return Some(Disjoint(A, self.parent_a.next().unwrap()));\n\n }\n\n Ordering::Greater => {\n\n // disjoint gene from parent B\n\n return Some(Disjoint(B, self.parent_b.next().unwrap()));\n\n }\n\n }\n\n }\n\n}\n", "file_path": "evolvim-lib/src/lib/neat/genome/utils.rs", "rank": 94, "score": 30078.344466194405 }, { "content": " Matching(a, b) => {\n\n counter_matching += 1;\n\n weight_differences += (a.weight - b.weight).abs();\n\n }\n\n Disjoint(_, _) => counter_disjoint += 1,\n\n Excess(_, _) => counter_excess += 1,\n\n }\n\n }\n\n\n\n let length = (counter_matching + counter_disjoint + counter_excess) as f64;\n\n\n\n return COEFFICIENT_MATCHING * weight_differences / counter_matching as f64\n\n + COEFFICIENT_DISJOINT * counter_disjoint as f64 / length\n\n + COEFFICIENT_EXCESS * counter_excess as f64 / length;\n\n }\n\n}\n", "file_path": "evolvim-lib/src/lib/neat/genome/speciation.rs", "rank": 95, "score": 30077.50479118015 } ]
Rust
research/gaia/pegasus/pegasus/src/communication/channel.rs
tianliplus/GraphScope
0226e00c106d5959d6fdc1637fe1646b16b26136
use crate::api::function::{MultiRouteFunction, RouteFunction}; use crate::channel_id::{ChannelId, SubChannelId}; use crate::communication::decorator::{count::CountedPush, exchange::ExchangePush, DataPush}; use crate::data::{Data, DataSet}; use crate::data_plane::{GeneralPull, GeneralPush, Push}; use crate::dataflow::DataflowBuilder; use crate::errors::BuildJobError; use crate::graph::Edge; enum ChannelKind<T: Data> { Pipeline, Shuffle(Box<dyn RouteFunction<T>>), Broadcast(Option<Box<dyn MultiRouteFunction<T>>>), Aggregate(u64), } pub struct Channel<T: Data> { kind: ChannelKind<T>, allow_cancel: bool, } #[derive(Copy, Clone, Debug)] pub(crate) struct ChannelMeta { pub id: SubChannelId, pub is_local: bool, pub push_peers: usize, pub forbid_cancel: bool, pub is_aggregate: bool, } impl Into<Edge> for ChannelMeta { fn into(self) -> Edge { Edge { id: self.id.index() as usize, source: Default::default(), target: Default::default(), scope_depth: 0, src_peers: self.push_peers, dst_peers: if self.is_aggregate { 1 } else { self.push_peers }, is_local: self.is_local, } } } pub(crate) struct MaterializedChannel<T: Data> { pub meta: ChannelMeta, push: DataPush<T>, pull: GeneralPull<DataSet<T>>, } impl<T: Data> MaterializedChannel<T> { pub fn take(self) -> (DataPush<T>, GeneralPull<DataSet<T>>) { (self.push, self.pull) } } impl<T: Data> Channel<T> { fn new(kind: ChannelKind<T>, allow_cancel: bool) -> Self { Channel { kind, allow_cancel } } pub fn forbid_cancel(&mut self) { self.allow_cancel = false; } pub(crate) fn materialize( self, dfb: &DataflowBuilder, ) -> Result<MaterializedChannel<T>, BuildJobError> { let index = dfb.next_channel_index(); let ch_id = (ChannelId { job_seq: dfb.config.job_id as u64, index }, dfb.worker_id.index).into(); match self.kind { ChannelKind::Pipeline => { let (tx, rx) = crate::data_plane::pipeline::<DataSet<T>>(ch_id); let meta = ChannelMeta { id: ch_id, is_local: true, push_peers: 1, forbid_cancel: !self.allow_cancel, is_aggregate: false, }; let push = CountedPush::new( ch_id, dfb.worker_id, dfb.worker_id, tx.into(), &dfb.event_bus, ); Ok(MaterializedChannel { meta, push: DataPush::Count(push), pull: rx.into() }) } ChannelKind::Shuffle(r) => { let (raw, pull) = super::build_channel::<DataSet<T>>(index, &dfb.config)?.take(); let meta = ChannelMeta { id: ch_id, is_local: false, push_peers: raw.len(), forbid_cancel: !self.allow_cancel, is_aggregate: false, }; let pushes = decorate_to_count(ch_id, raw, &dfb); let push = ExchangePush::exchange_to_one(dfb.config.batch_size as usize, ch_id, pushes, r); Ok(MaterializedChannel { meta, push: DataPush::Exchange(push), pull: pull.into() }) } ChannelKind::Broadcast(r) => { let (raw, pull) = super::build_channel::<DataSet<T>>(index, &dfb.config)?.take(); let meta = ChannelMeta { id: ch_id, is_local: false, push_peers: raw.len(), forbid_cancel: !self.allow_cancel, is_aggregate: false, }; let pushes = decorate_to_count(ch_id, raw, &dfb); let push = if let Some(r) = r { ExchangePush::exchange_to_some(dfb.config.batch_size as usize, ch_id, pushes, r) } else { ExchangePush::broadcast(dfb.config.batch_size as usize, ch_id, pushes) }; Ok(MaterializedChannel { meta, push: DataPush::Exchange(push), pull: pull.into() }) } ChannelKind::Aggregate(id) => { let (mut raw, pull) = super::build_channel::<DataSet<T>>(index, &dfb.config)?.take(); let meta = ChannelMeta { id: ch_id, is_local: false, push_peers: raw.len(), forbid_cancel: !self.allow_cancel, is_aggregate: true, }; let push = raw.swap_remove(id as usize); let mut target = dfb.worker_id; target.index = id as u32; let push = CountedPush::new(ch_id, dfb.worker_id, target, push, &dfb.event_bus); for mut unused in raw { unused.close().ok(); } Ok(MaterializedChannel { meta, push: DataPush::Count(push), pull: pull.into() }) } } } } #[inline] fn decorate_to_count<T: Data>( ch_id: SubChannelId, raw: Vec<GeneralPush<DataSet<T>>>, dfb: &DataflowBuilder, ) -> Vec<CountedPush<T>> { let mut counts = Vec::with_capacity(raw.len()); let source = dfb.worker_id; for (idx, p) in raw.into_iter().enumerate() { let mut target = source; target.index = idx as u32; let push = CountedPush::new(ch_id, source, target, p, &dfb.event_bus); counts.push(push); } counts } pub struct Pipeline; impl<T: Data> From<Pipeline> for Channel<T> { fn from(_: Pipeline) -> Self { Channel::new(ChannelKind::Pipeline, true) } } impl<T: Data, R: RouteFunction<T>> From<Box<R>> for Channel<T> { fn from(route: Box<R>) -> Self { let kind = ChannelKind::Shuffle(route as Box<dyn RouteFunction<T>>); Channel::new(kind, true) } } impl<T: Data> From<Box<dyn RouteFunction<T>>> for Channel<T> { fn from(route: Box<dyn RouteFunction<T>>) -> Self { let kind = ChannelKind::Shuffle(route); Channel::new(kind, true) } } pub struct Broadcast; impl<T: Data> From<Broadcast> for Channel<T> { fn from(_: Broadcast) -> Self { Channel::new(ChannelKind::Broadcast(None), true) } } impl<T: Data> From<Box<dyn MultiRouteFunction<T>>> for Channel<T> { fn from(route: Box<dyn MultiRouteFunction<T>>) -> Self { let kind = ChannelKind::Broadcast(Some(route)); Channel::new(kind, true) } } pub struct Aggregate(pub u64); impl<T: Data> From<Aggregate> for Channel<T> { fn from(a: Aggregate) -> Self { let kind = ChannelKind::Aggregate(a.0); Channel::new(kind, true) } }
use crate::api::function::{MultiRouteFunction, RouteFunction}; use crate::channel_id::{ChannelId, SubChannelId}; use crate::communication::decorator::{count::CountedPush, exchange::ExchangePush, DataPush}; use crate::data::{Data, DataSet}; use crate::data_plane::{GeneralPull, GeneralPush, Push}; use crate::dataflow::DataflowBuilder; use crate::errors::BuildJobError; use crate::graph::Edge; enum ChannelKind<T: Data> { Pipeline, Shuffle(Box<dyn RouteFunction<T>>), Broadcast(Option<Box<dyn MultiRouteFunction<T>>>), Aggregate(u64), } pub struct Channel<T: Data> { kind: ChannelKind<T>, allow_cancel: bool, } #[derive(Copy, Clone, Debug)] pub(crate) struct ChannelMeta { pub id: SubChannelId, pub is_local: bool, pub push_peers: usize, pub forbid_cancel: bool, pub is_aggregate: bool, } impl Into<Edge> for ChannelMeta { fn into(self) -> Edge { Edge { id: self.id.index() as usize, source: Default::default(), target: Default::default(), scope_depth: 0, src_peers: self.push_peers, dst_peers: if self.is_aggregate { 1 } else { self.push_peers }, is_local: self.is_local, } } } pub(crate) struct MaterializedChannel<T: Data> { pub meta: ChannelMeta, push: DataPush<T>, pull: GeneralPull<DataSet<T>>, } impl<T: Data> MaterializedChannel<T> { pub fn take(self) -> (DataPush<T>, GeneralPull<DataSet<T>>) { (self.push, self.pull) } } impl<T: Data> Channel<T> { fn new(kind: ChannelKind<T>, allow_cancel: bool) -> Self { Channel { kind, allow_cancel } } pub fn forbid_cancel(&mut self) { self.allow_cancel = false; } pub(crate) fn materialize( self, dfb: &DataflowBuilder, ) -> Result<MaterializedChannel<T>, BuildJobError> { let index = dfb.next_channel_index(); let ch_id = (ChannelId { job_seq: dfb.config.job_id as u64, index }, dfb.worker_id.index).into(); match self.kind { ChannelKind::Pipeline => { let (tx, rx) = crate::data_plane::pipeline::<DataSet<T>>(ch_id); let meta = ChannelMeta { id: ch_id, is_local: true, push_peers: 1, forbid_cancel: !self.allow_cancel, is_aggregate: false, }; let push = CountedPush::new( ch_id, dfb.worker_id, dfb.worker_id, tx.into(), &dfb.event_bus, ); Ok(MaterializedChannel { meta, push: DataPush::Count(push), pull: rx.into() }) } ChannelKind::Shuffle(r) => { let (raw, pull) = super::build_channel::<DataSet<T>>(index, &dfb.config)?.take(); let meta = ChannelMeta { id: ch_id, is_local: false, push_peers: raw.len(), forbid_cancel: !self.allow_cancel, is_aggregate: false, }; let pushes = decorate_to_count(ch_id, raw, &dfb); let push = ExchangePush::exchange_to_one(dfb.config.batch_size as usize, ch_id, pushes, r); Ok(MaterializedChannel { meta, push: DataPush::Exchange(push), pull: pull.into() }) } ChannelKind::Broadcast(r) => { let (raw, pull) = super::build_channel::<DataSet<T>>(index, &dfb.config)?.take(); let meta = ChannelMeta { id: ch_id, is_local: false, push_peers: raw.len(), forbid_cancel: !self.allow_cancel, is_aggregate: false, }; let pushes = decorate_to_count(ch_id, raw, &dfb); let push =
; Ok(MaterializedChannel { meta, push: DataPush::Exchange(push), pull: pull.into() }) } ChannelKind::Aggregate(id) => { let (mut raw, pull) = super::build_channel::<DataSet<T>>(index, &dfb.config)?.take(); let meta = ChannelMeta { id: ch_id, is_local: false, push_peers: raw.len(), forbid_cancel: !self.allow_cancel, is_aggregate: true, }; let push = raw.swap_remove(id as usize); let mut target = dfb.worker_id; target.index = id as u32; let push = CountedPush::new(ch_id, dfb.worker_id, target, push, &dfb.event_bus); for mut unused in raw { unused.close().ok(); } Ok(MaterializedChannel { meta, push: DataPush::Count(push), pull: pull.into() }) } } } } #[inline] fn decorate_to_count<T: Data>( ch_id: SubChannelId, raw: Vec<GeneralPush<DataSet<T>>>, dfb: &DataflowBuilder, ) -> Vec<CountedPush<T>> { let mut counts = Vec::with_capacity(raw.len()); let source = dfb.worker_id; for (idx, p) in raw.into_iter().enumerate() { let mut target = source; target.index = idx as u32; let push = CountedPush::new(ch_id, source, target, p, &dfb.event_bus); counts.push(push); } counts } pub struct Pipeline; impl<T: Data> From<Pipeline> for Channel<T> { fn from(_: Pipeline) -> Self { Channel::new(ChannelKind::Pipeline, true) } } impl<T: Data, R: RouteFunction<T>> From<Box<R>> for Channel<T> { fn from(route: Box<R>) -> Self { let kind = ChannelKind::Shuffle(route as Box<dyn RouteFunction<T>>); Channel::new(kind, true) } } impl<T: Data> From<Box<dyn RouteFunction<T>>> for Channel<T> { fn from(route: Box<dyn RouteFunction<T>>) -> Self { let kind = ChannelKind::Shuffle(route); Channel::new(kind, true) } } pub struct Broadcast; impl<T: Data> From<Broadcast> for Channel<T> { fn from(_: Broadcast) -> Self { Channel::new(ChannelKind::Broadcast(None), true) } } impl<T: Data> From<Box<dyn MultiRouteFunction<T>>> for Channel<T> { fn from(route: Box<dyn MultiRouteFunction<T>>) -> Self { let kind = ChannelKind::Broadcast(Some(route)); Channel::new(kind, true) } } pub struct Aggregate(pub u64); impl<T: Data> From<Aggregate> for Channel<T> { fn from(a: Aggregate) -> Self { let kind = ChannelKind::Aggregate(a.0); Channel::new(kind, true) } }
if let Some(r) = r { ExchangePush::exchange_to_some(dfb.config.batch_size as usize, ch_id, pushes, r) } else { ExchangePush::broadcast(dfb.config.batch_size as usize, ch_id, pushes) }
if_condition
[ { "content": "pub fn pipeline<T>(id: SubChannelId) -> (ThreadPush<T>, ThreadPull<T>) {\n\n let queue = Box::new(VecDeque::new());\n\n let ptr =\n\n NonNull::new(Box::into_raw(queue)).expect(\"inter thread communication_old init failure;\");\n\n let exhaust = Arc::new(CachePadded::new(AtomicBool::new(false)));\n\n let closed = Arc::new(CachePadded::new(AtomicBool::new(false)));\n\n (ThreadPush::new(id, ptr, &exhaust, &closed), ThreadPull::new(id, ptr, exhaust, closed))\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn thread_push_pull() {\n\n let id: SubChannelId = [0, 0, 0].into();\n\n let (mut tx, mut rx) = pipeline::<u64>(id);\n\n for i in 0..65535 {\n\n tx.push(i).unwrap();\n\n }\n", "file_path": "research/gaia/pegasus/pegasus/src/data_plane/intra_thread.rs", "rank": 0, "score": 630869.8736624448 }, { "content": "pub fn is_connected(local_id: u64, remote_id: u64) -> bool {\n\n let states = CONNECTION_STATES.read().expect(\"lock poisoned\");\n\n local_id == remote_id\n\n || states.get(&(local_id, remote_id)).map(|s| s.is_connected()).unwrap_or(false)\n\n}\n\n\n", "file_path": "research/gaia/pegasus/network/src/state.rs", "rank": 1, "score": 490063.97474425746 }, { "content": "#[inline]\n\npub fn is_shutdown(server_id: u64) -> bool {\n\n let lock = SHUTDOWN_HOOK.read().expect(\"SHUTDOWN_HOOK read lock failure;\");\n\n if let Some(hook) = lock.get(&server_id) {\n\n hook.load(Ordering::SeqCst)\n\n } else {\n\n true\n\n }\n\n}\n\n\n", "file_path": "research/gaia/pegasus/network/src/lib.rs", "rank": 2, "score": 475379.08787405374 }, { "content": "#[inline]\n\nfn encode_channel_id(id: ChannelId, worker_index: u32) -> u128 {\n\n let mut ch_id = (id.job_seq as u128) << 64;\n\n ch_id |= (id.index as u128) << 32;\n\n ch_id |= worker_index as u128;\n\n ch_id\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use pegasus_network::config::ConnectionParams;\n\n use pegasus_network::Server;\n\n\n\n fn push_pull_ch(workers: usize, ch_res: ChannelResource<u64>) {\n\n let ch_id = ch_res.ch_id;\n\n let (mut pushes, mut pull) = ch_res.take();\n\n assert_eq!(pushes.len(), workers);\n\n let limit = workers as u64 * 1024;\n\n for i in 0..limit {\n\n let offset = i as usize % workers;\n", "file_path": "research/gaia/pegasus/pegasus/src/data_plane/mod.rs", "rank": 3, "score": 458176.0424221505 }, { "content": "pub fn start_connection(self_index: usize, remote_addresses: Vec<(usize, String)>, mut retry_times: u64) -> ::std::io::Result<Vec<(usize, String, TcpStream)>> {\n\n let mut result = Vec::with_capacity(remote_addresses.len());\n\n\n\n for (remote_index, remote_address) in remote_addresses {\n\n if remote_address.is_empty() {\n\n continue;\n\n }\n\n assert!(self_index > remote_index);\n\n loop {\n\n match TcpStream::connect(remote_address.as_str()) {\n\n Ok(mut tcp_stream) => {\n\n tcp_stream.set_nodelay(true).map_err(|e| Error::new(e.kind(), format!(\"set_nodelay call failed, caused: {:?}\", e)))?;\n\n unsafe { abomonation::encode(&HANDSHAKE_MAGIC, &mut tcp_stream) }.map_err(|e| Error::new(e.kind(), format!(\"failed to encode/send handshake magic, caused: {:?}\", e)))?;\n\n unsafe { abomonation::encode(&(self_index as u64), &mut tcp_stream) }.map_err(|e| Error::new(e.kind(), format!(\"failed to encode/send worker index, caused: {:?}\", e)))?;\n\n println!(\"worker {} connect to worker {} success!!\", self_index, remote_index);\n\n result.push((remote_index, remote_address, tcp_stream));\n\n break;\n\n },\n\n Err(error) => {\n\n sleep(Duration::from_millis(CONNECTION_INTERVAL_TIME));\n", "file_path": "interactive_engine/src/executor/Pegasus/src/network.rs", "rank": 4, "score": 438884.22732181835 }, { "content": "pub fn add_connection(local_id: u64, remote_id: u64, addr: SocketAddr) -> Option<Arc<AtomicBool>> {\n\n let disconnected = Arc::new(AtomicBool::new(false));\n\n {\n\n let mut states = CONNECTION_STATES.write().expect(\"lock poisoned\");\n\n let st = ConnectionState { local_id, remote_id, addr, disconnected: disconnected.clone() };\n\n if let Some(s) = states.get_mut(&(local_id, remote_id)) {\n\n if !s.is_connected() {\n\n *s = st;\n\n } else {\n\n error!(\n\n \"add connection to server[id={},addr={:?}] been refused, \\\n\n server[id={},addr={:?}] is in use;\",\n\n remote_id, addr, s.remote_id, s.addr\n\n );\n\n return None;\n\n }\n\n } else {\n\n states.insert((local_id, remote_id), st);\n\n }\n\n }\n\n {\n\n let mut addr_to_id = ADDR_TO_ID.write().expect(\"lock poisoned\");\n\n addr_to_id.insert(addr, remote_id);\n\n }\n\n Some(disconnected)\n\n}\n\n\n", "file_path": "research/gaia/pegasus/network/src/state.rs", "rank": 5, "score": 429580.1502988103 }, { "content": "pub fn await_connection(self_index: usize, tcp_listener: TcpListener, await_address: Vec<(usize, String)>, mut retry_times: u64) -> ::std::io::Result<Vec<(usize, String, TcpStream)>> {\n\n\n\n if cfg!(target_os = \"linux\") {\n\n tcp_listener.set_nonblocking(true).map_err(|e| Error::new(e.kind(), format!(\"Tcp listener cannot set non-blocking: {:?}\", e)))?;\n\n }\n\n\n\n let mut result = Vec::with_capacity(await_address.len());\n\n\n\n for _ in 0..await_address.len() {\n\n loop {\n\n match tcp_listener.accept() {\n\n Ok((mut tcp_stream, _socket_addr)) => {\n\n tcp_stream.set_nodelay(true).map_err(|e| Error::new(e.kind(), format!(\"Stream set_nodelay call failed, caused: {:?}\", e)))?;\n\n let mut buffer = [0u8; 16];\n\n tcp_stream.read_exact(&mut buffer).map_err(|e| Error::new(e.kind(), format!(\"failed to read worker index, caused: {:?}\", e)))?;\n\n let (magic, mut buffer) = unsafe { abomonation::decode::<u64>(&mut buffer) }.expect(\"failed to decode magic\");\n\n if magic != &HANDSHAKE_MAGIC {\n\n let error = ::std::io::Error::new(::std::io::ErrorKind::InvalidData, \"received incorrect timely handshake\");\n\n eprintln!(\"Worker {}: connected from other workers failed, caused by {}.\", self_index, error);\n\n continue;\n", "file_path": "interactive_engine/src/executor/Pegasus/src/network.rs", "rank": 6, "score": 425865.58332745766 }, { "content": "pub fn get_edge_bool_prop_value<E: Edge>(prop_id: i32, v: &E) -> Option<bool> {\n\n if prop_id > 0 {\n\n if let Some(prop) = v.get_property(prop_id as PropId) {\n\n if let Ok(val) = prop.get_bool() {\n\n return Some(val);\n\n }\n\n }\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/runtime/src/dataflow/graph/edge.rs", "rank": 7, "score": 424909.3039021224 }, { "content": "pub fn reconnect(self_index: usize, listener: TcpListener, start_addresses: Vec<(usize, String)>, await_address: Vec<(usize, String)>, retry_times: u64) -> ::std::io::Result<Vec<(usize, String, TcpStream)>> {\n\n let start_task = thread::spawn(move || start_connection(self_index, start_addresses, retry_times));\n\n let await_task = thread::spawn(move || await_connection(self_index, listener, await_address, retry_times));\n\n\n\n let mut result = vec![];\n\n match start_task.join() {\n\n Ok(Ok(sub_result)) => result.extend(sub_result.into_iter()),\n\n Ok(Err(e)) => return Err(e),\n\n Err(_e) => return Err(Error::new(ErrorKind::Other, \"Join start connection failed. \")),\n\n };\n\n\n\n match await_task.join() {\n\n Ok(Ok(sub_result)) => result.extend(sub_result.into_iter()),\n\n Ok(Err(e)) => return Err(e),\n\n Err(_e) => return Err(Error::new(ErrorKind::Other, \"Join await connection failed. \")),\n\n };\n\n\n\n return Ok(result);\n\n}\n\n\n\n\n", "file_path": "interactive_engine/src/executor/Pegasus/src/network.rs", "rank": 8, "score": 422155.5556750731 }, { "content": "pub fn get_edge_bool_prop_value<E: Edge>(prop_id: i32, v: &E) -> Option<bool> {\n\n if prop_id > 0 {\n\n if let Some(prop) = v.get_property(prop_id as PropId) {\n\n if let Ok(val) = prop.get_bool() {\n\n return Some(val);\n\n }\n\n }\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/server/src/filter/mod.rs", "rank": 9, "score": 420329.38261705847 }, { "content": "#[inline]\n\npub fn get_shutdown_hook(server_id: u64) -> Option<Arc<AtomicBool>> {\n\n let lock = SHUTDOWN_HOOK.read().expect(\"SHUTDOWN_HOOK read lock failure;\");\n\n lock.get(&server_id).map(|hook| hook.clone())\n\n}\n\n\n", "file_path": "research/gaia/pegasus/network/src/lib.rs", "rank": 10, "score": 418104.05297923734 }, { "content": "pub fn check_connect(local: u64, remotes: &[u64]) -> bool {\n\n let states = CONNECTION_STATES.read().expect(\"lock poisoned\");\n\n for id in remotes {\n\n if *id != local && !states.get(&(local, *id)).map(|s| s.is_connected()).unwrap_or(false) {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n", "file_path": "research/gaia/pegasus/network/src/state.rs", "rank": 11, "score": 406901.93581420043 }, { "content": "#[inline]\n\npub fn is_in_trace() -> bool {\n\n CURRENT_WORKER.with(|w| w.get().map(|w| w.trace_enable)).unwrap_or(false)\n\n || log_enabled!(log::Level::Trace)\n\n}\n\n\n\nmacro_rules! inspect_worker {\n\n ($lvl:expr, $arg0: expr) => (\n\n if log_enabled!($lvl) {\n\n if let Some(id) = $crate::worker_id::get_current_worker() {\n\n log!($lvl, concat!(\"{:?}: \", $arg0), id);\n\n } else {\n\n log!($lvl, $arg0);\n\n }\n\n } else if $lvl == log::Level::Info {\n\n if let Some(id) = $crate::worker_id::get_current_worker() {\n\n println!(concat!(\"{:?}: \", $arg0), id);\n\n } else {\n\n println!($arg0);\n\n }\n\n }\n", "file_path": "research/gaia/pegasus/pegasus/src/worker_id.rs", "rank": 12, "score": 403134.8893510351 }, { "content": "#[inline]\n\npub fn set_process_index(index: usize) {\n\n std::env::set_var(PROCESS_INDEX, index.to_string());\n\n}\n\n\n", "file_path": "research/gaia/pegasus/config/src/lib.rs", "rank": 13, "score": 399352.1683876353 }, { "content": "pub fn build_test_route() -> impl Fn(&i64) -> u64 + 'static {\n\n let store_config = StoreConfig {\n\n worker_id: 0,\n\n alive_id: 0,\n\n worker_num: 1,\n\n zk_url: \"\".to_string(),\n\n graph_name: \"\".to_string(),\n\n partition_num: 1,\n\n zk_timeout_ms: 0,\n\n zk_auth_enable: false,\n\n zk_auth_user: \"test\".to_string(),\n\n zk_auth_password: \"test\".to_string(),\n\n hb_interval_ms: 0,\n\n insert_thread_count: 0,\n\n download_thread_count: 0,\n\n hadoop_home: \"\".to_string(),\n\n local_data_root: \"\".to_string(),\n\n load_thread_count: 0,\n\n rpc_thread_count: 0,\n\n rpc_port: 0,\n", "file_path": "interactive_engine/src/executor/runtime/src/dataflow/test/mod.rs", "rank": 14, "score": 398214.58477140986 }, { "content": "pub fn build_channels<T: Data>(\n\n id: ChannelId, workers: usize, server_index: usize, servers: &[u64],\n\n) -> Result<LinkedList<ChannelResource<T>>, BuildJobError> {\n\n if servers.is_empty() {\n\n return Ok(build_local_channels(id, workers));\n\n }\n\n assert!(\n\n server_index < servers.len(),\n\n \"invalid server index: {} out of bound: {}\",\n\n server_index,\n\n servers.len()\n\n );\n\n if servers.len() == 1 && server_index == 0 {\n\n return Ok(build_local_channels(id, workers));\n\n }\n\n let my_server_id = servers[server_index];\n\n\n\n let worker_offset = workers * server_index as usize;\n\n // prepare local channels;\n\n let mut to_local_pushes = LinkedList::new();\n", "file_path": "research/gaia/pegasus/pegasus/src/data_plane/mod.rs", "rank": 15, "score": 396824.54511413444 }, { "content": "fn keep_vertex<G: IndexType>(vid: G, peers: usize, work_id: usize) -> bool {\n\n vid.index() % peers == work_id\n\n}\n\n\n\nimpl<G: IndexType + Eq + FromStr + Send + Sync, I: IndexType + Send + Sync> GraphLoader<G, I> {\n\n /// Load vertices recorded in the file of `vertex_type` into the database.\n\n /// Return the number of vertices that are successfully loaded.\n\n fn load_vertices_to_db<R: Read>(&mut self, vertex_type: LabelId, mut rdr: Reader<R>) -> usize {\n\n let mut num_vertices = 0_usize;\n\n let graph_db = &mut self.graph_builder;\n\n let schema = self.graph_schema.clone();\n\n let parser =\n\n LDBCParser::<G>::vertex_parser(vertex_type, schema).expect(\"Get vertex parser error!\");\n\n let timer = Instant::now();\n\n let mut start;\n\n let mut end;\n\n for result in rdr.records() {\n\n if let Ok(record) = result {\n\n start = timer.elapsed().as_secs_f64();\n\n let record_iter = record.iter();\n", "file_path": "research/gaia/graph_store/src/ldbc.rs", "rank": 16, "score": 395516.54808957595 }, { "content": "pub fn build_local_channels<T: Data>(\n\n id: ChannelId, workers: usize,\n\n) -> LinkedList<ChannelResource<T>> {\n\n let mut list = LinkedList::new();\n\n if workers == 0 {\n\n return list;\n\n }\n\n\n\n if workers == 1 {\n\n let ch_id = (id, 0u32).into();\n\n let (tx, rx) = intra_thread::pipeline::<T>(ch_id);\n\n let pushes = vec![tx.into()];\n\n list.push_back(ChannelResource { ch_id, pushes, pull: rx.into() });\n\n return list;\n\n }\n\n\n\n let mut ch_txs = Vec::with_capacity(workers);\n\n let mut ch_rxs = Vec::with_capacity(workers);\n\n for _ in 0..workers {\n\n let (tx, rx) = pegasus_common::channel::unbound::<T>();\n", "file_path": "research/gaia/pegasus/pegasus/src/data_plane/mod.rs", "rank": 17, "score": 394588.0138996745 }, { "content": "#[inline]\n\npub fn server_id() -> Option<u64> {\n\n LOCAL_SERVER_ID.with(|id| {\n\n if let Some(id) = id.get() {\n\n Some(id)\n\n } else {\n\n let server_id = SERVER_ID.lock().expect(\"lock poisoned\");\n\n if let Some(g_id) = server_id.as_ref() {\n\n id.set(Some(*g_id));\n\n Some(*g_id)\n\n } else {\n\n None\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "research/gaia/pegasus/pegasus/src/lib.rs", "rank": 18, "score": 387419.3888998171 }, { "content": "#[inline]\n\npub fn shutdown(server_id: u64) {\n\n info!(\"server {} shutdown...\", server_id);\n\n let mut lock = SHUTDOWN_HOOK.write().expect(\"SHUTDOWN_HOOK write lock failure;\");\n\n if let Some(hook) = lock.remove(&server_id) {\n\n hook.store(true, Ordering::SeqCst);\n\n }\n\n}\n\n\n", "file_path": "research/gaia/pegasus/network/src/lib.rs", "rank": 19, "score": 387419.388899817 }, { "content": "pub fn read_id<R: ReadExt>(reader: &mut R) -> io::Result<ID> {\n\n reader.read_u128()\n\n}\n\n\n\n#[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq, Hash)]\n\npub enum Label {\n\n Str(String),\n\n Id(LabelId),\n\n}\n\n\n\nimpl Label {\n\n pub fn as_object(&self) -> Object {\n\n match self {\n\n Label::Str(s) => Object::String(s.to_string()),\n\n Label::Id(id) => Object::Primitive(Primitives::Integer(*id as i32)),\n\n }\n\n }\n\n}\n\n\n\nimpl Encode for Label {\n", "file_path": "research/gaia/gremlin/gremlin_core/src/structure/element/mod.rs", "rank": 20, "score": 385764.9555894163 }, { "content": "#[inline]\n\npub fn await_termination(server_id: u64) {\n\n let resources = {\n\n let mut lock = NETWORK_THREADS.lock().expect(\"fetch lock of NETWORK_THREADS failure;\");\n\n lock.remove(&server_id)\n\n };\n\n if let Some(mut resources) = resources {\n\n debug!(\"wait {} resources terminate;\", resources.len());\n\n for g in resources.drain(..) {\n\n if let Err(err) = g.join() {\n\n error!(\"network#wait_termination: found error: {:?};\", err);\n\n }\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn add_network_thread(server_id: u64, guard: JoinHandle<()>) {\n\n let mut lock = NETWORK_THREADS.lock().expect(\"fetch lock of NETWORK_THREADS failure;\");\n\n lock.entry(server_id).or_insert_with(|| vec![]).push(guard);\n\n}\n\n\n", "file_path": "research/gaia/pegasus/network/src/lib.rs", "rank": 21, "score": 384932.46676288487 }, { "content": "pub trait Data: Clone + Send + Debug + Encode + Decode + 'static {}\n\nimpl<T: Clone + Send + Debug + Encode + Decode + 'static> Data for T {}\n\n\n\npub struct DataSet<T> {\n\n pub tag: Tag,\n\n data: Vec<T>,\n\n recycle_hook: Option<Sender<Vec<T>>>,\n\n}\n\n\n\nimpl<D> DataSet<D> {\n\n #[inline]\n\n pub fn empty() -> Self {\n\n DataSet { tag: Tag::Root, data: Vec::new(), recycle_hook: None }\n\n }\n\n\n\n pub fn new<T: Into<Tag>>(tag: T, data: Vec<D>) -> Self {\n\n let tag: Tag = tag.into();\n\n DataSet { tag, data, recycle_hook: None }\n\n }\n\n\n", "file_path": "research/gaia/pegasus/pegasus/src/data.rs", "rank": 22, "score": 384492.0334900973 }, { "content": "#[inline]\n\npub fn build_route_fn(store_config: &StoreConfig) -> impl Fn(&i64) -> u64 + 'static {\n\n let worker_per_process = store_config.timely_worker_per_process as u64;\n\n let process_num = store_config.worker_num as u64;\n\n let partition_num = store_config.partition_num as u64;\n\n let partition_per_process = partition_num / process_num;\n\n\n\n move |vid| {\n\n let mut m = vid % partition_num as i64;\n\n if m < 0 { m += partition_num as i64; }\n\n let process_index = m as u64 / partition_per_process;\n\n let worker_index = m as u64 % worker_per_process;\n\n process_index * worker_per_process + worker_index\n\n }\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/runtime/src/execution/mod.rs", "rank": 23, "score": 383682.6231742881 }, { "content": "pub fn encode_store_e_id(e: &ID) -> EdgeId<DefaultId> {\n\n let index = (*e >> ID_SHIFT_BITS) as usize;\n\n let start_id = (*e & ID_MASK) as DefaultId;\n\n (start_id, index)\n\n}\n\n\n", "file_path": "research/gaia/gremlin/gremlin_core/src/storage.rs", "rank": 24, "score": 380664.8330710785 }, { "content": "#[inline]\n\npub fn ch_mgr<'a>(ch_mgrs: &'a Vec<ChannelEvents>, ch_id: &ChannelId) -> Option<&'a ChannelEvents> {\n\n let index = ch_id.0;\n\n if index - 1 >= ch_mgrs.len() {\n\n None\n\n } else {\n\n Some(&ch_mgrs[index - 1])\n\n }\n\n}\n\n\n\npub struct EventManager {\n\n pub worker_id: WorkerId,\n\n in_chs: Vec<Vec<ChannelId>>,\n\n out_chs: HashMap<usize, Vec<(ChannelId, Port)>>,\n\n ch_mgrs: Vec<ChannelEvents>,\n\n //ch_mgrs: HashMap<ChannelId, ChannelEvents>,\n\n event_caster: EventCaster,\n\n event_buffer: EventsBuffer,\n\n deltas: Vec<i64>\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/Pegasus/src/event.rs", "rank": 26, "score": 369287.0623740674 }, { "content": "#[inline]\n\npub fn get_process_index() -> usize {\n\n *RUNTIME_PROCESSE_INDEX\n\n}\n\n\n", "file_path": "research/gaia/pegasus/config/src/lib.rs", "rank": 27, "score": 365665.9196969472 }, { "content": "#[inline]\n\npub fn used_memory_in_bytes() -> usize {\n\n ALLOCATED_MEM.load(SeqCst)\n\n}\n\n\n\npub struct MemoryStat;\n\n\n\nunsafe impl GlobalAlloc for MemoryStat {\n\n unsafe fn alloc(&self, layout: Layout) -> *mut u8 {\n\n let ret = System.alloc(layout);\n\n if !ret.is_null() {\n\n ALLOCATED_MEM.fetch_add(layout.size(), SeqCst);\n\n }\n\n ret\n\n }\n\n\n\n unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {\n\n System.dealloc(ptr, layout);\n\n ALLOCATED_MEM.fetch_sub(layout.size(), SeqCst);\n\n }\n\n}\n", "file_path": "interactive_engine/src/executor/Pegasus/src/memory.rs", "rank": 28, "score": 363253.23552575905 }, { "content": "#[inline]\n\npub fn never_clone<T>(raw: T) -> NeverClone<T> {\n\n NeverClone { inner: raw }\n\n}\n\n\n\npub struct NeverClone<T> {\n\n inner: T,\n\n}\n\n\n\nimpl<T> Clone for NeverClone<T> {\n\n fn clone(&self) -> Self {\n\n unreachable!(\"can't clone;\")\n\n }\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl<T> NeverClone<T> {\n\n pub fn take(self) -> T {\n\n self.inner\n\n }\n\n}\n", "file_path": "research/gaia/pegasus/pegasus/src/operator/concise/mod.rs", "rank": 29, "score": 363135.3428504709 }, { "content": "pub fn sleep_until(flag: &AtomicUsize, target: usize) -> SleepGuard {\n\n while flag.load(Ordering::Relaxed) != target {\n\n sleep_ms(100);\n\n }\n\n SleepGuard::new(flag, target)\n\n}\n\n\n\npub struct SleepGuard<'a> {\n\n flag: &'a AtomicUsize,\n\n target: usize,\n\n}\n\n\n\nimpl<'a> SleepGuard<'a> {\n\n fn new(flag: &'a AtomicUsize, target: usize) -> Self {\n\n SleepGuard {\n\n flag,\n\n target,\n\n }\n\n }\n\n}\n", "file_path": "interactive_engine/src/common/rust/common/src/util/time.rs", "rank": 30, "score": 361073.7978808936 }, { "content": "pub fn exec<D: AnyData>(\n\n stream: &Stream<D>, plan: &[pb::OperatorDef], factory: &Arc<dyn JobCompiler<D>>,\n\n) -> Result<Stream<D>, BuildJobError> {\n\n if plan.is_empty() {\n\n Err(\"should be unreachable, plan length = 0;\")?\n\n }\n\n let mut owned_stream = install(stream, &plan[0], factory)?;\n\n for op in &plan[1..] {\n\n owned_stream = install(&owned_stream, op, factory)?;\n\n }\n\n Ok(owned_stream)\n\n}\n\n\n", "file_path": "research/gaia/pegasus/server/src/materialize.rs", "rank": 31, "score": 359215.742149645 }, { "content": "#[inline]\n\npub fn get_ch_mgr<'a>(ch_mgrs: &'a mut Vec<ChannelEvents>, ch_id: &ChannelId) -> Option<&'a mut ChannelEvents> {\n\n let index = ch_id.0;\n\n if index - 1 >= ch_mgrs.len() {\n\n None\n\n } else {\n\n Some(&mut ch_mgrs[index - 1])\n\n }\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/Pegasus/src/event.rs", "rank": 32, "score": 357242.6391022754 }, { "content": "#[inline]\n\npub fn new_task(task_id: usize) {\n\n PER_TASK_MONITOR.trace_new_task(task_id);\n\n}\n\n\n", "file_path": "research/gaia/pegasus/memory/src/alloc.rs", "rank": 33, "score": 353981.6749201752 }, { "content": "#[inline]\n\npub fn remove_task(task_id: usize) {\n\n PER_TASK_MONITOR.remove_task(task_id);\n\n}\n\n\n", "file_path": "research/gaia/pegasus/memory/src/alloc.rs", "rank": 34, "score": 353981.6749201752 }, { "content": "pub fn get_handshake(server_id: u64, hb: u32) -> u128 {\n\n let mut value = (PASS_PHRASE as u128) << 96;\n\n let server_id = server_id as u128;\n\n value |= server_id << 32;\n\n value |= hb as u128;\n\n value\n\n}\n\n\n", "file_path": "research/gaia/pegasus/network/src/transport/mod.rs", "rank": 35, "score": 350163.0403868957 }, { "content": "#[inline]\n\npub fn check_task_memory(task_id: usize) -> Option<usize> {\n\n PER_TASK_MONITOR.get_task_memory(task_id)\n\n}\n\n\n", "file_path": "research/gaia/pegasus/memory/src/alloc.rs", "rank": 36, "score": 348982.3298631475 }, { "content": "pub trait Data: Serialize + for<'a> Deserialize<'a> + Clone + Send + Any + Debug + 'static {}\n\n\n\nimpl<T: Serialize + for<'a> Deserialize<'a> + Clone + Send + Any + Debug + 'static> Data for T {}\n\n\n", "file_path": "interactive_engine/src/executor/Pegasus/src/lib.rs", "rank": 37, "score": 347190.72642933007 }, { "content": "pub fn generate_task_id(frontend_query_id: String) -> usize {\n\n let query_id = if frontend_query_id.chars().nth(0).unwrap() == '-' {\n\n &frontend_query_id[1..frontend_query_id.len()]\n\n } else {\n\n &frontend_query_id\n\n };\n\n\n\n let task_id = query_id.to_owned().parse::<usize>().expect(\"parser query id failed.\");\n\n info!(\"frontend query id: {}, self query id: {}\", frontend_query_id, task_id);\n\n task_id\n\n}\n", "file_path": "interactive_engine/src/executor/runtime/src/rpc/rpc_pegasus/mod.rs", "rank": 38, "score": 345885.2364265568 }, { "content": "#[inline]\n\npub fn reset_current_task(task_id: Option<usize>) {\n\n TASK_ID.with(|id| id.set(task_id));\n\n}\n\n\n", "file_path": "research/gaia/pegasus/memory/src/alloc.rs", "rank": 39, "score": 342287.0251529091 }, { "content": "#[allow(dead_code)]\n\npub fn report_memory(job_id: u64) -> Option<std::thread::JoinHandle<()>> {\n\n let g = std::thread::Builder::new()\n\n .name(format!(\"memory-reporter {}\", job_id))\n\n .spawn(move || {\n\n let mut max_usage = 0;\n\n let mut count_zero_times = 50;\n\n loop {\n\n if let Some(usage) = pegasus_memory::alloc::check_task_memory(job_id as usize) {\n\n if usage > max_usage {\n\n max_usage = usage;\n\n }\n\n } else if max_usage > 0 {\n\n break;\n\n } else if max_usage == 0 {\n\n count_zero_times -= 1;\n\n if count_zero_times <= 0 {\n\n break;\n\n }\n\n }\n\n std::thread::sleep(std::time::Duration::from_millis(10));\n\n }\n\n info!(\"Job {} memory usage: {:.4} MB;\", job_id, max_usage as f64 / 1_000_000.0);\n\n })\n\n .unwrap();\n\n Some(g)\n\n}\n", "file_path": "research/gaia/pegasus/server/src/lib.rs", "rank": 40, "score": 342260.4227304331 }, { "content": "pub fn get_message_bool_prop_value(prop_id: i32,\n\n v: &RawMessage) -> Option<bool> {\n\n if prop_id > 0 {\n\n if let Some(prop) = v.get_property(prop_id) {\n\n if let Ok(val) = prop.get_value().get_bool() {\n\n return Some(val);\n\n }\n\n }\n\n } else if prop_id == 0 {\n\n if let Some(value) = v.get_value() {\n\n if let Ok(val) = value.get_bool() {\n\n return Some(val);\n\n }\n\n }\n\n } else if prop_id == PROP_KEY {\n\n if let Some(entry) = v.get_entry_value() {\n\n if let Some(key) = entry.get_key().get_value() {\n\n if let Ok(val) = key.get_bool() {\n\n return Some(val);\n\n }\n", "file_path": "interactive_engine/src/executor/runtime/src/dataflow/graph/message.rs", "rank": 41, "score": 339893.5105320128 }, { "content": "#[inline]\n\npub fn new_empty_response(task_id: u64, res_type: ResponseType) -> EmptyResponse {\n\n TaskResponse::<Empty>::empty(task_id, res_type)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::operator::sink::BinarySinker;\n\n use crate::serialize::write_binary;\n\n\n\n struct EchoTask;\n\n impl TaskGenerator for EchoTask {\n\n fn create_task(&self, task: TaskRequest<Option<Bytes>>, _runtime: &Pegasus, sink: &mut Sink) -> Result<(), String> {\n\n let task_id = task.header.task_id;\n\n let res = task.take_body().unwrap();\n\n let echo = Echo::read_from(res).unwrap();\n\n let res = TaskResponse::new(task_id, ResponseType::OK, echo);\n\n let res = write_binary(&res).unwrap();\n\n sink.sink(res).map_err(|err| format!(\"sink error: {:?}\", err))?;\n\n Ok(())\n", "file_path": "interactive_engine/src/executor/Pegasus/src/server/mod.rs", "rank": 42, "score": 339078.12124068046 }, { "content": "pub fn get_vertex_bool_prop_value<V: Vertex>(prop_id: i32, v: &V) -> Option<bool> {\n\n if prop_id > 0 {\n\n if let Some(prop) = v.get_property(prop_id as PropId) {\n\n if let Ok(val) = prop.get_bool() {\n\n return Some(val);\n\n }\n\n }\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/server/src/filter/mod.rs", "rank": 43, "score": 338066.97684529395 }, { "content": "pub fn get_vertex_bool_prop_value<V: Vertex>(prop_id: i32, v: &V) -> Option<bool> {\n\n if prop_id > 0 {\n\n if let Some(prop) = v.get_property(prop_id as PropId) {\n\n if let Ok(val) = prop.get_bool() {\n\n return Some(val);\n\n }\n\n }\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/runtime/src/dataflow/graph/vertex.rs", "rank": 44, "score": 336111.7663434822 }, { "content": "#[inline]\n\npub fn filter_within_value(val: &ValuePayload, list: &Vec<RawMessage>) -> bool {\n\n match val {\n\n ValuePayload::Int(v) => {\n\n for l in list {\n\n if let Some(lv) = l.get_value() {\n\n if let Ok(iv) = lv.get_int() {\n\n if iv == *v {\n\n return true;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n ValuePayload::Long(v) => {\n\n for l in list {\n\n if let Some(lv) = l.get_value() {\n\n if let Ok(iv) = lv.get_long() {\n\n if iv == *v {\n\n return true;\n\n }\n", "file_path": "interactive_engine/src/executor/runtime/src/dataflow/manager/filter.rs", "rank": 45, "score": 333888.15409640083 }, { "content": "#[inline]\n\npub fn filter_without_value(val: &ValuePayload, list: &Vec<RawMessage>) -> bool {\n\n return !filter_within_value(val, list);\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/runtime/src/dataflow/manager/filter.rs", "rank": 46, "score": 333888.15409640083 }, { "content": "#[inline]\n\nfn gen_channel<D: AnyData>(\n\n ch: Option<&pb::ChannelDef>, factory: &Arc<dyn JobCompiler<D>>,\n\n) -> Result<Channel<D>, BuildJobError> {\n\n Ok(match ch {\n\n Some(ch) => match &ch.ch_kind {\n\n Some(pb::channel_def::ChKind::ToLocal(_)) => Pipeline.into(),\n\n Some(pb::channel_def::ChKind::ToAnother(route)) => {\n\n factory.shuffle(&route.resource)?.into()\n\n }\n\n Some(pb::channel_def::ChKind::ToOne(aggre)) => Aggregate(aggre.target as u64).into(),\n\n Some(pb::channel_def::ChKind::ToOthers(broadcast)) => {\n\n if broadcast.resource.is_empty() {\n\n Broadcast.into()\n\n } else {\n\n let route = factory.broadcast(&broadcast.resource)?;\n\n route.into()\n\n }\n\n }\n\n None => Pipeline.into(),\n\n },\n", "file_path": "research/gaia/pegasus/server/src/materialize.rs", "rank": 47, "score": 332743.7451153309 }, { "content": "pub fn parse_dedup_manager(base: &OperatorBase, debug_flag: bool) -> Option<DedupManager> {\n\n let argument = base.get_argument();\n\n if argument.get_dedup_local_flag() && !argument.get_subquery_flag() {\n\n Some(DedupManager::new(debug_flag))\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "interactive_engine/src/executor/runtime/src/dataflow/manager/dedup.rs", "rank": 48, "score": 331776.9530117633 }, { "content": "pub fn is_shutdown() -> bool {\n\n SHUTDOWN_HOOK.load(Ordering::SeqCst)\n\n}\n\n\n", "file_path": "research/gaia/pegasus/executor/src/reactor.rs", "rank": 49, "score": 331101.8004547175 }, { "content": "#[inline]\n\npub fn cmp_vertex_edge(a: &RawMessage, b: &RawMessage, orders: &[OrderComparator]) -> Ordering {\n\n if a.get_message_type() == b.get_message_type() {\n\n match a.get_message_type() {\n\n RawMessageType::VERTEX | RawMessageType::EDGE => {\n\n cmp_vertex_edge_class_value(a, b, orders)\n\n }\n\n RawMessageType::ERROR => {\n\n error!(\"not support: {:?}\", a.get_message_type());\n\n Ordering::Equal\n\n }\n\n _ => {\n\n cmp_value_entity_orders(a, b, orders)\n\n }\n\n }\n\n } else {\n\n error!(\"class type is not equal, {:?} vs {:?}\", a.get_message_type(), b.get_message_type());\n\n Ordering::Equal\n\n }\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/runtime/src/dataflow/manager/order.rs", "rank": 50, "score": 324833.82710678136 }, { "content": "#[inline]\n\npub fn with_tlv<R, P: Fn(&Object) -> R>(func: P) -> Option<R> {\n\n RIGHT_VALUE.with(|tlv| tlv.borrow().as_ref().map(|v| func(v)))\n\n}\n\n\n", "file_path": "research/gaia/gremlin/gremlin_core/src/structure/filter/element/mod.rs", "rank": 51, "score": 324572.9934685305 }, { "content": "pub fn get_edge_int_prop_value<E: Edge>(prop_id: i32, v: &E) -> Option<i32> {\n\n if prop_id == PROP_ID {\n\n return Some(v.get_edge_id() as i32);\n\n } else if prop_id == PROP_ID_LABEL {\n\n return Some(v.get_label_id() as i32);\n\n } else if prop_id > 0 {\n\n if let Some(prop) = v.get_property(prop_id as PropId) {\n\n if let Ok(val) = prop.get_int() {\n\n return Some(val);\n\n }\n\n }\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/runtime/src/dataflow/graph/edge.rs", "rank": 52, "score": 324542.56086655636 }, { "content": "pub fn get_edge_double_prop_value<E: Edge>(prop_id: i32, v: &E) -> Option<f64> {\n\n if prop_id == PROP_ID {\n\n return Some(v.get_edge_id() as f64);\n\n } else if prop_id > 0 {\n\n if let Some(prop) = v.get_property(prop_id as PropId) {\n\n if let Ok(val) = prop.get_double() {\n\n return Some(val);\n\n }\n\n }\n\n }\n\n return None;\n\n}\n", "file_path": "interactive_engine/src/executor/runtime/src/dataflow/graph/edge.rs", "rank": 53, "score": 324542.56086655636 }, { "content": "pub fn get_edge_float_prop_value<E: Edge>(prop_id: i32, v: &E) -> Option<f32> {\n\n if prop_id == PROP_ID {\n\n return Some(v.get_edge_id() as f32);\n\n } else if prop_id > 0 {\n\n if let Some(prop) = v.get_property(prop_id as PropId) {\n\n if let Ok(val) = prop.get_float() {\n\n return Some(val);\n\n }\n\n }\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/runtime/src/dataflow/graph/edge.rs", "rank": 54, "score": 324542.56086655636 }, { "content": "pub fn get_edge_string_prop_value<E: Edge>(prop_id: i32, v: &E) -> Option<String> {\n\n if prop_id == PROP_ID {\n\n return Some(v.get_edge_id().to_string());\n\n } else if prop_id == PROP_ID_LABEL {\n\n return Some(v.get_label_id().to_string());\n\n } else if prop_id > 0 {\n\n if let Some(prop) = v.get_property(prop_id as PropId) {\n\n if let Ok(val) = prop.get_string() {\n\n return Some(val.to_owned());\n\n }\n\n }\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/runtime/src/dataflow/graph/edge.rs", "rank": 55, "score": 324542.56086655636 }, { "content": "pub fn get_edge_long_prop_value<E: Edge>(prop_id: i32, v: &E) -> Option<i64> {\n\n if prop_id == PROP_ID {\n\n return Some(v.get_edge_id());\n\n } else if prop_id == PROP_ID_LABEL {\n\n return Some(v.get_label_id() as i64);\n\n } else if prop_id > 0 {\n\n if let Some(prop) = v.get_property(prop_id as PropId) {\n\n if let Ok(val) = prop.get_long() {\n\n return Some(val);\n\n }\n\n }\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/runtime/src/dataflow/graph/edge.rs", "rank": 56, "score": 324542.56086655636 }, { "content": "pub fn current_time_millis() -> u64 {\n\n let now = SystemTime::now();\n\n let d = now.duration_since(UNIX_EPOCH).unwrap();\n\n d.as_secs() * 1000 + (d.subsec_nanos() / 1000000) as u64\n\n}\n\n\n", "file_path": "interactive_engine/src/common/rust/common/src/util/time.rs", "rank": 57, "score": 321764.5176631114 }, { "content": "pub fn current_time_secs() -> u64 {\n\n let now = SystemTime::now();\n\n now.duration_since(UNIX_EPOCH).unwrap().as_secs()\n\n}\n\n\n", "file_path": "interactive_engine/src/common/rust/common/src/util/time.rs", "rank": 58, "score": 321764.5176631114 }, { "content": "struct AnonymityEmitter<D: Data, F: FnMut(&D) -> Option<u32> + Send + Clone + 'static> {\n\n func: F,\n\n _ph: std::marker::PhantomData<D>,\n\n}\n\n\n\nimpl<D: Data, F: FnMut(&D) -> Option<u32> + Send + Clone + 'static> AnonymityEmitter<D, F> {\n\n pub fn new(func: F) -> Self {\n\n AnonymityEmitter { func, _ph: std::marker::PhantomData }\n\n }\n\n}\n\n\n\nimpl<D: Data, F: FnMut(&D) -> Option<u32> + Send + Clone + 'static> Clone\n\n for AnonymityEmitter<D, F>\n\n{\n\n fn clone(&self) -> Self {\n\n AnonymityEmitter { func: self.func.clone(), _ph: std::marker::PhantomData }\n\n }\n\n}\n\n\n\nimpl<D: Data, F: FnMut(&D) -> Option<u32> + Send + Clone + 'static> ScopeInputEmitter<D>\n", "file_path": "research/gaia/pegasus/pegasus/src/operator/multiplex/mod.rs", "rank": 59, "score": 321345.5156258804 }, { "content": "pub fn has_id(id: Option<ID>) -> ElementFilter {\n\n ElementFilter::HasId(HasId::eq(id))\n\n}\n\n\n", "file_path": "research/gaia/gremlin/gremlin_core/src/structure/filter/element/mod.rs", "rank": 60, "score": 320970.15155242186 }, { "content": "pub fn get_edge_float_prop_value<E: Edge>(prop_id: i32, v: &E) -> Option<f32> {\n\n if prop_id == PROP_ID {\n\n return Some(v.get_edge_id() as f32);\n\n } else if prop_id > 0 {\n\n if let Some(prop) = v.get_property(prop_id as PropId) {\n\n if let Ok(val) = prop.get_float() {\n\n return Some(val);\n\n }\n\n }\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/server/src/filter/mod.rs", "rank": 61, "score": 319566.8894451895 }, { "content": "pub fn get_edge_string_prop_value<E: Edge>(prop_id: i32, v: &E) -> Option<String> {\n\n if prop_id == PROP_ID {\n\n return Some(v.get_edge_id().to_string());\n\n } else if prop_id == PROP_ID_LABEL {\n\n return Some(v.get_label_id().to_string());\n\n } else if prop_id > 0 {\n\n if let Some(prop) = v.get_property(prop_id as PropId) {\n\n if let Ok(val) = prop.get_string() {\n\n return Some(val.to_owned());\n\n }\n\n }\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/server/src/filter/mod.rs", "rank": 62, "score": 319566.8894451895 }, { "content": "pub fn get_edge_int_prop_value<E: Edge>(prop_id: i32, v: &E) -> Option<i32> {\n\n if prop_id == PROP_ID {\n\n return Some(v.get_edge_id() as i32);\n\n } else if prop_id == PROP_ID_LABEL {\n\n return Some(v.get_label_id() as i32);\n\n } else if prop_id > 0 {\n\n if let Some(prop) = v.get_property(prop_id as PropId) {\n\n if let Ok(val) = prop.get_int() {\n\n return Some(val);\n\n }\n\n }\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/server/src/filter/mod.rs", "rank": 63, "score": 319566.8894451895 }, { "content": "pub fn get_edge_double_prop_value<E: Edge>(prop_id: i32, v: &E) -> Option<f64> {\n\n if prop_id == PROP_ID {\n\n return Some(v.get_edge_id() as f64);\n\n } else if prop_id > 0 {\n\n if let Some(prop) = v.get_property(prop_id as PropId) {\n\n if let Ok(val) = prop.get_double() {\n\n return Some(val);\n\n }\n\n }\n\n }\n\n return None;\n\n}\n", "file_path": "interactive_engine/src/executor/server/src/filter/mod.rs", "rank": 64, "score": 319566.8894451895 }, { "content": "pub fn get_edge_long_prop_value<E: Edge>(prop_id: i32, v: &E) -> Option<i64> {\n\n if prop_id == PROP_ID {\n\n return Some(v.get_edge_id());\n\n } else if prop_id == PROP_ID_LABEL {\n\n return Some(v.get_label_id() as i64);\n\n } else if prop_id > 0 {\n\n if let Some(prop) = v.get_property(prop_id as PropId) {\n\n if let Ok(val) = prop.get_long() {\n\n return Some(val);\n\n }\n\n }\n\n }\n\n return None;\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/server/src/filter/mod.rs", "rank": 65, "score": 319566.8894451895 }, { "content": "#[inline]\n\npub fn cmp_vertex_edge_class_value(va: &RawMessage, vb: &RawMessage, orders: &[OrderComparator]) -> Ordering {\n\n let mut ordering = Ordering::Equal;\n\n for order in orders {\n\n let prop_id = order.get_prop_id();\n\n if prop_id > 0 {\n\n let pa = va.get_property(prop_id);\n\n let pb = vb.get_property(prop_id);\n\n ordering = cmp_prop_entity(pa, pb, order);\n\n } else if prop_id < PROP_VALUE {\n\n ordering = cmp_label_entity_value(va.get_label_entity_by_id(prop_id), vb.get_label_entity_by_id(prop_id), order);\n\n } else if prop_id == PROP_ID\n\n || order.get_prop_id() == 0 {\n\n ordering = va.get_id().cmp(&vb.get_id());\n\n } else {\n\n RawMessage::from_error(message::ErrorCode::UNIMPLEMENT, \"not support to compare label/key/value yet\".to_string());\n\n }\n\n if ordering != Ordering::Equal {\n\n break;\n\n }\n\n }\n\n ordering\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/runtime/src/dataflow/manager/order.rs", "rank": 66, "score": 317073.4412478951 }, { "content": "fn set_server_id(server_id: u64) -> Option<u64> {\n\n let mut id = SERVER_ID.lock().expect(\"lock poisoned\");\n\n if let Some(id) = &*id {\n\n Some(*id)\n\n } else {\n\n id.replace(server_id);\n\n None\n\n }\n\n}\n\n\n", "file_path": "research/gaia/pegasus/pegasus/src/lib.rs", "rank": 67, "score": 315285.0886424512 }, { "content": "#[inline]\n\npub fn sink_task_result(tid: ThreadId, seq: usize, result: Option<ExecError>) {\n\n let lock = TASK_RESULTS.read().expect(\"TASK_RESULTS lock poison\");\n\n if let Some(sink) = lock.get(&tid) {\n\n sink.send((seq, result)).expect(\"sink result failure\");\n\n } else {\n\n error!(\n\n \"abandon result of task submitted by thread {:?} as submit thread disconnected;\",\n\n tid\n\n );\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n // use super::*;\n\n // use std::io::ErrorKind;\n\n //\n\n // struct DirectThread;\n\n //\n\n // impl Executor for DirectThread {\n", "file_path": "research/gaia/pegasus/executor/src/lib.rs", "rank": 68, "score": 313963.62244668085 }, { "content": "#[inline]\n\npub fn complete(id: u32) {\n\n let complete = CURRENT_SCOPE.with(|cur| cur.borrow().as_ref().map(|tag| Tag::inherit(tag, id)));\n\n\n\n if let Some(complete) = complete {\n\n EXTRA_COMPLETES.with(|cpe| cpe.borrow_mut().push(complete));\n\n }\n\n}\n", "file_path": "research/gaia/pegasus/pegasus/src/api/scope/enter.rs", "rank": 69, "score": 311733.6095610075 }, { "content": "pub fn sleep_ms(ms: u64) {\n\n thread::sleep(Duration::from_millis(ms));\n\n}\n\n\n", "file_path": "interactive_engine/src/common/rust/common/src/util/time.rs", "rank": 70, "score": 308572.30509243073 }, { "content": "#[inline]\n\npub fn build_empty_router() -> impl Fn(&i64) -> i32 + 'static {\n\n move |_| {\n\n // info!(\"generate store id with 0\");\n\n 0\n\n }\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/runtime/src/execution/mod.rs", "rank": 71, "score": 307551.2240964647 }, { "content": "struct SourceOnce<D: Data> {\n\n src: Box<dyn Iterator<Item = D> + Send>,\n\n input_batch: usize,\n\n}\n\n\n\nimpl<D: Data> SourceOnce<D> {\n\n pub fn new<I: Iterator<Item = D> + Send + 'static>(input_batch: usize, src: I) -> Self {\n\n SourceOnce {\n\n src: Box::new(src),\n\n input_batch\n\n }\n\n }\n\n}\n\n\n\nimpl<D: Data> OperatorCore for SourceOnce<D> {\n\n // For the source, there is no input;\n\n fn on_receive(&mut self, _inputs: &[RefCell<Box<dyn TaggedInput>>], _outputs: &[RefCell<Box<dyn TaggedOutput>>]) -> FireResult {\n\n unimplemented!()\n\n }\n\n\n", "file_path": "interactive_engine/src/executor/Pegasus/src/operator/source.rs", "rank": 72, "score": 305631.12514136685 }, { "content": "pub trait BinaryNotify<L: Data, R: Data, O: Data>: Send + 'static {\n\n type NotifyResult: IntoIterator<Item = O>;\n\n\n\n fn on_receive(\n\n &mut self, input: &mut BinaryInput<L, R>, output: &mut Output<O>,\n\n ) -> Result<(), JobExecError>;\n\n\n\n fn on_notify(&mut self, n: BinaryNotification) -> Self::NotifyResult;\n\n}\n\n\n", "file_path": "research/gaia/pegasus/pegasus/src/api/primitive/binary.rs", "rank": 73, "score": 304034.91306711326 }, { "content": "pub fn contains_id(ids: HashSet<ID>) -> ElementFilter {\n\n ElementFilter::ContainsId(ContainsId::with_in(ids))\n\n}\n\n\n", "file_path": "research/gaia/gremlin/gremlin_core/src/structure/filter/element/mod.rs", "rank": 74, "score": 302667.1074712631 }, { "content": "/// Verify if a given folder stores vertex or edge\n\npub fn is_vertex_file(fname: &str) -> bool {\n\n fname.find('_').is_none()\n\n}\n\n\n\n/// `LDBCParser` defines parsing from the original LDBC-generated raw files.\n\n#[derive(Clone)]\n\npub enum LDBCParser<G = DefaultId> {\n\n Vertex(LDBCVertexParser<G>),\n\n Edge(LDBCEdgeParser<G>),\n\n}\n\n\n\nimpl<G> LDBCParser<G> {\n\n pub fn vertex_parser(vertex_type_id: LabelId, schema: Arc<dyn Schema>) -> GDBResult<Self> {\n\n let header = schema.get_vertex_schema(vertex_type_id).ok_or(GDBError::InvalidTypeError)?;\n\n let id_field = header.get(ID_FIELD).ok_or(GDBError::FieldNotExistError)?;\n\n let label_field = header.get(LABEL_FIELD);\n\n\n\n Ok(LDBCParser::Vertex(LDBCVertexParser {\n\n vertex_type: vertex_type_id,\n\n id_index: id_field.1,\n", "file_path": "research/gaia/graph_store/src/ldbc.rs", "rank": 75, "score": 301944.7146022772 }, { "content": "/// Verify if a given file is a hidden file in Unix system.\n\npub fn is_hidden_file(fname: &str) -> bool {\n\n fname.starts_with('.')\n\n}\n\n\n", "file_path": "research/gaia/graph_store/src/ldbc.rs", "rank": 76, "score": 301937.8588136108 }, { "content": "#[inline]\n\npub fn get_current_worker_uncheck() -> WorkerId {\n\n CURRENT_WORKER.with(|w| w.get()).expect(\"current worker lost;\")\n\n}\n\n\n", "file_path": "research/gaia/pegasus/pegasus/src/worker_id.rs", "rank": 77, "score": 300219.4584547793 }, { "content": "pub fn graph_step_from(\n\n gremlin_step: &mut pb::GremlinStep, num_servers: usize,\n\n) -> Result<GraphVertexStep, BuildJobError> {\n\n if let Some(option) = gremlin_step.step.take() {\n\n match option {\n\n pb::gremlin_step::Step::GraphStep(mut opt) => {\n\n let requirements_pb = unsafe { std::mem::transmute(opt.traverser_requirements) };\n\n let requirements = Requirement::from_pb(requirements_pb)?;\n\n let return_type = unsafe { std::mem::transmute(opt.return_type) };\n\n let mut step = GraphVertexStep::new(return_type, requirements);\n\n step.set_tags(gremlin_step.get_tags());\n\n let mut ids = vec![];\n\n for id in opt.ids {\n\n let id = ID::from_str(&id).unwrap();\n\n ids.push(id);\n\n }\n\n if !ids.is_empty() {\n\n step.set_src(ids, num_servers, return_type);\n\n }\n\n let labels = std::mem::replace(&mut opt.labels, vec![]);\n", "file_path": "research/gaia/gremlin/gremlin_core/src/process/traversal/step/source.rs", "rank": 78, "score": 299855.5889083855 }, { "content": "pub fn duration_to_nanos(d: &Duration) -> u64 {\n\n d.as_secs() * 1000_000_000 + d.subsec_nanos() as u64\n\n}\n\n\n", "file_path": "interactive_engine/src/common/rust/common/src/util/time.rs", "rank": 79, "score": 298301.0686184223 }, { "content": "#[inline(always)]\n\npub fn new_output_session<'a, D: Data>(\n\n generic: &'a Box<dyn OutputProxy>, tag: &Tag,\n\n) -> OutputSession<'a, D> {\n\n RefWrapOutput::<D>::downcast(generic).new_session(tag)\n\n}\n", "file_path": "research/gaia/pegasus/pegasus/src/communication/output/mod.rs", "rank": 80, "score": 298031.90189788747 }, { "content": "#[inline]\n\npub fn new_input_session<'a, D: Data>(\n\n raw: &'a Box<dyn InputProxy>, tag: &Tag,\n\n) -> InputSession<'a, D> {\n\n RefWrapInput::<D>::downcast(raw).new_session(tag.clone())\n\n}\n\n\n\n#[inline]\n\npub(crate) fn new_input<D: Data>(\n\n meta: ChannelMeta, scope_depth: usize, event_bus: &EventBus, pull: GeneralPull<DataSet<D>>,\n\n) -> Box<dyn InputProxy> {\n\n let input = InboundChannel::new(meta, scope_depth, event_bus.clone(), pull);\n\n Box::new(RefWrapInput::wrap(input)) as Box<dyn InputProxy>\n\n}\n", "file_path": "research/gaia/pegasus/pegasus/src/communication/input/mod.rs", "rank": 81, "score": 298031.90189788747 }, { "content": "pub fn parse_property(data: &str, data_type: DataType) -> Property {\n\n match data_type {\n\n DataType::Bool => {\n\n match data {\n\n \"true\" => Property::Bool(true),\n\n \"false\" => Property::Bool(false),\n\n _ => Property::Unknown,\n\n }\n\n }\n\n DataType::Char => {\n\n match data.len() {\n\n 1 => Property::Char(data.as_bytes()[0]),\n\n _ => Property::Unknown,\n\n }\n\n }\n\n DataType::Short => {\n\n match data.parse::<i16>() {\n\n Ok(x) => Property::Short(x),\n\n _ => Property::Unknown,\n\n }\n", "file_path": "interactive_engine/src/executor/store/src/api/property.rs", "rank": 82, "score": 297620.50827736966 }, { "content": "#[derive(Copy,Clone)]\n\nstruct Slice(pub *mut u8, pub usize);\n\n\n\nimpl Slice {\n\n pub fn as_slice(&self) -> &[u8] {\n\n unsafe {\n\n ::std::slice::from_raw_parts(self.0, self.1)\n\n }\n\n }\n\n\n\n pub fn as_mut_slice(&mut self) -> &mut [u8] {\n\n unsafe {\n\n ::std::slice::from_raw_parts_mut(self.0, self.1)\n\n }\n\n }\n\n}\n\n\n\npub struct Bytes {\n\n slice: Slice,\n\n backend: Arc<Box<dyn Any>>\n\n}\n", "file_path": "interactive_engine/src/executor/Pegasus/src/common/mod.rs", "rank": 83, "score": 295176.0847978978 }, { "content": "#[inline]\n\npub fn unary_chain<V, VI, E, EI>(graph: Arc<dyn MVGraph<V=V, VI=VI, E=E, EI=EI>>, partition_ids: Arc<Vec<PartitionId>>, base: &OperatorBase) -> impl Fn(FlowMessage) -> Vec<FlowMessage>\n\n where V: 'static + Vertex, VI: 'static + Iterator<Item=V>, E: 'static + Edge, EI: 'static + Iterator<Item=E> {\n\n build_unary_chain(graph, partition_ids, base.get_chained_function().to_vec())\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/runtime/src/execution/chain.rs", "rank": 84, "score": 293954.09632192296 }, { "content": "pub trait BinaryState<L: Data, R: Data, O: Data, S: State>: Send + 'static {\n\n type NotifyResult: IntoIterator<Item = O>;\n\n\n\n fn on_receive(\n\n &self, input: &mut BinaryInput<L, R>, output: &mut Output<O>, state: &mut S,\n\n ) -> Result<(), JobExecError>;\n\n\n\n fn on_notify(&self, state: S) -> Self::NotifyResult;\n\n}\n\n\n\npub enum BinaryNotification {\n\n Left(Tag),\n\n Right(Tag),\n\n}\n", "file_path": "research/gaia/pegasus/pegasus/src/api/primitive/binary.rs", "rank": 85, "score": 292966.799833906 }, { "content": "#[inline]\n\nfn filter_list_value(compare: &CompareType, prop_id: i32, message: &RawMessage, value: &Value, schema: &Schema) -> bool {\n\n match value.get_value_type() {\n\n VariantType::VT_INTEGER_LIST | VariantType::VT_INTEGER => {\n\n if let Some(vallist) = get_message_int_list_prop_value(prop_id, message) {\n\n return filter_int_list(&vallist, compare, value);\n\n }\n\n }\n\n VariantType::VT_LONG_LIST | VariantType::VT_LONG => {\n\n if let Some(vallist) = get_message_long_list_prop_value(prop_id, message) {\n\n return filter_long_list(&vallist, compare, value);\n\n }\n\n }\n\n VariantType::VT_STRING_LIST | VariantType::VT_STRING => {\n\n if let Some(vallist) = get_message_string_list_prop_value(prop_id, message, schema) {\n\n return filter_string_list(&vallist, compare, value);\n\n }\n\n }\n\n _ => {}\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/runtime/src/dataflow/manager/filter.rs", "rank": 86, "score": 292314.1280241357 }, { "content": "struct Message(Vec<u64>);\n\n\n", "file_path": "interactive_engine/src/executor/Pegasus/examples/channel_perf.rs", "rank": 87, "score": 292121.5810960045 }, { "content": "#[inline]\n\nfn sink_with_encoder<D: Data, O: Output + Clone>(\n\n stream: &Stream<D>, ec: Box<dyn EncodeFunction<D>>, output: JobResultSink<O>,\n\n) -> Result<(), BuildJobError> {\n\n stream.sink_by(|_meta| {\n\n move |_tag, result| match result {\n\n ResultSet::Data(data) => {\n\n let bytes = ec.encode(data);\n\n output.on_next(bytes);\n\n }\n\n ResultSet::End => {\n\n output.close();\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "research/gaia/pegasus/server/src/service.rs", "rank": 89, "score": 291665.57435982174 }, { "content": "#[inline]\n\npub fn get_current_worker() -> Option<WorkerId> {\n\n CURRENT_WORKER.with(|w| w.get())\n\n}\n\n\n", "file_path": "research/gaia/pegasus/pegasus/src/worker_id.rs", "rank": 90, "score": 291428.30136232614 }, { "content": "#[inline]\n\npub fn build_unary_chain<V, VI, E, EI>(graph: Arc<dyn MVGraph<V=V, VI=VI, E=E, EI=EI>>, partition_ids: Arc<Vec<PartitionId>>, functions: Vec<ChainedFunction>) -> impl Fn(FlowMessage) -> Vec<FlowMessage>\n\n where V: 'static + Vertex, VI: 'static + Iterator<Item=V>, E: 'static + Edge, EI: 'static + Iterator<Item=E> {\n\n let mut chained_funcs = vec![];\n\n for func in functions {\n\n chained_funcs.push(build_chain(graph.clone(), partition_ids.clone(), func));\n\n }\n\n move |message| {\n\n let mut input = vec![message];\n\n for chained_func in &chained_funcs {\n\n if input.is_empty() {\n\n break;\n\n }\n\n input = chained_func(input);\n\n }\n\n input\n\n }\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/runtime/src/execution/chain.rs", "rank": 91, "score": 290329.3748018698 }, { "content": "#[inline]\n\npub fn get_cluster_process_peers() -> usize {\n\n *RUNTIME_PROCESSE_PEERS\n\n}\n\n\n", "file_path": "research/gaia/pegasus/config/src/lib.rs", "rank": 92, "score": 290055.49874720024 }, { "content": "struct BufferedPush<D: Data> {\n\n batch_size: usize,\n\n push: CountedPush<D>,\n\n buffer: Vec<D>,\n\n recycle: BufferRecycle<D>,\n\n}\n\n\n\nimpl<D: Data> BufferedPush<D> {\n\n fn push_batch(&mut self, msg: DataSet<D>) -> IOResult<()> {\n\n self.push.push(msg)\n\n }\n\n\n\n fn push(&mut self, msg: D) -> bool {\n\n self.buffer.push(msg);\n\n self.buffer.len() == self.batch_size\n\n }\n\n\n\n fn flush(&mut self, tag: Tag) -> IOResult<()> {\n\n let new_buffer =\n\n self.recycle.try_recycle().unwrap_or_else(|| Vec::with_capacity(self.batch_size));\n", "file_path": "research/gaia/pegasus/pegasus/src/communication/decorator/exchange.rs", "rank": 93, "score": 290002.3018836869 }, { "content": "#[inline]\n\npub fn guard(worker_id: WorkerId) -> CurWorkerGuard {\n\n CurWorkerGuard::new(worker_id)\n\n}\n\n\n", "file_path": "research/gaia/pegasus/pegasus/src/worker_id.rs", "rank": 94, "score": 287704.5766995275 }, { "content": "pub fn create_prop_def(prop_id: PropId,\n\n name: &str,\n\n data_type: DataType,\n\n comment: &str) -> PropDef {\n\n PropDef::from(&create_prop_def_proto(prop_id, name, data_type, comment))\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/store/src/schema/test_util.rs", "rank": 95, "score": 286671.64443817036 }, { "content": "pub fn write_id<W: WriteExt>(id: ID, writer: &mut W) -> io::Result<()> {\n\n writer.write_u128(id)\n\n}\n", "file_path": "research/gaia/gremlin/gremlin_core/src/structure/element/mod.rs", "rank": 96, "score": 285894.5116014965 }, { "content": "#[clonable]\n\npub trait DynType: Any + Send + Sync + Clone + Debug {\n\n fn to_bytes(&self) -> io::Result<Vec<u8>>;\n\n}\n", "file_path": "research/gaia/dyn_type/src/lib.rs", "rank": 97, "score": 285213.6655365835 }, { "content": "#[inline]\n\npub fn get_current_task_and_memory() -> Option<(usize, usize)> {\n\n TASK_ID.with(|id| id.get()).and_then(|id| check_task_memory(id).map(|m| (id, m)))\n\n}\n\n\n\npub struct TaskMemoryTrace {\n\n pub mask: usize,\n\n shards: Box<[ShardedLock<HashMap<usize, AtomicUsize>>]>,\n\n}\n\n\n\nimpl TaskMemoryTrace {\n\n pub fn new(shard_size: usize) -> Self {\n\n let _s = ShadeMemTrace::new();\n\n let mask = shard_size - 1;\n\n assert_eq!(shard_size & mask, 0, \"invalid shard size {};\", shard_size);\n\n let mut shards = Vec::with_capacity(shard_size);\n\n for _ in 0..shard_size {\n\n shards.push(ShardedLock::new(HashMap::new()));\n\n }\n\n TaskMemoryTrace { mask, shards: shards.into_boxed_slice() }\n\n }\n", "file_path": "research/gaia/pegasus/memory/src/alloc.rs", "rank": 98, "score": 285152.7364782746 }, { "content": "pub fn create_prop_def_proto(prop_id: PropId,\n\n name: &str,\n\n data_type: DataType,\n\n comment: &str) -> PropertyDefProto {\n\n let mut proto = PropertyDefProto::new();\n\n proto.set_id(prop_id as i32);\n\n proto.set_name(name.to_owned());\n\n proto.set_dataType(data_type as i32);\n\n proto.set_comment(comment.to_owned());\n\n proto\n\n}\n\n\n", "file_path": "interactive_engine/src/executor/store/src/schema/test_util.rs", "rank": 99, "score": 285131.7370864123 } ]
Rust
neqo-transport/src/connection/tests/resumption.rs
hawkinsw/neqo
197c69b613cae40283c801fc2eb338f5482f3808
use super::{ connect, connect_with_rtt, default_client, default_server, exchange_ticket, get_tokens, send_something, AT_LEAST_PTO, }; use crate::addr_valid::{AddressValidation, ValidateAddress}; use std::cell::RefCell; use std::rc::Rc; use std::time::Duration; use test_fixture::{self, assertions, now}; #[test] fn resume() { let mut client = default_client(); let mut server = default_server(); connect(&mut client, &mut server); let token = exchange_ticket(&mut client, &mut server, now()); let mut client = default_client(); client .enable_resumption(now(), token) .expect("should set token"); let mut server = default_server(); connect(&mut client, &mut server); assert!(client.tls_info().unwrap().resumed()); assert!(server.tls_info().unwrap().resumed()); } #[test] fn remember_smoothed_rtt() { const RTT1: Duration = Duration::from_millis(130); const RTT2: Duration = Duration::from_millis(70); let mut client = default_client(); let mut server = default_server(); let now = connect_with_rtt(&mut client, &mut server, now(), RTT1); assert_eq!(client.loss_recovery.rtt(), RTT1); let token = exchange_ticket(&mut client, &mut server, now); let mut client = default_client(); let mut server = default_server(); client.enable_resumption(now, token).unwrap(); assert_eq!( client.loss_recovery.rtt(), RTT1, "client should remember previous RTT" ); connect_with_rtt(&mut client, &mut server, now, RTT2); assert_eq!( client.loss_recovery.rtt(), RTT2, "previous RTT should be completely erased" ); } #[test] fn address_validation_token_resume() { const RTT: Duration = Duration::from_millis(10); let mut client = default_client(); let mut server = default_server(); let validation = AddressValidation::new(now(), ValidateAddress::Always).unwrap(); let validation = Rc::new(RefCell::new(validation)); server.set_validation(Rc::clone(&validation)); let mut now = connect_with_rtt(&mut client, &mut server, now(), RTT); let token = exchange_ticket(&mut client, &mut server, now); let mut client = default_client(); client.enable_resumption(now, token).unwrap(); let mut server = default_server(); let dgram = client.process(None, now).dgram(); assertions::assert_initial(dgram.as_ref().unwrap(), true); now += AT_LEAST_PTO; connect_with_rtt(&mut client, &mut server, now, RTT); assert!(client.crypto.tls.info().unwrap().resumed()); assert!(server.crypto.tls.info().unwrap().resumed()); } fn can_resume(token: impl AsRef<[u8]>, initial_has_token: bool) { let mut client = default_client(); client.enable_resumption(now(), token).unwrap(); let initial = client.process_output(now()).dgram(); assertions::assert_initial(initial.as_ref().unwrap(), initial_has_token); } #[test] fn two_tickets_on_timer() { let mut client = default_client(); let mut server = default_server(); connect(&mut client, &mut server); server.send_ticket(now(), &[]).expect("send ticket1"); server.send_ticket(now(), &[]).expect("send ticket2"); let pkt = send_something(&mut server, now()); assert!(client.process(Some(pkt), now()).dgram().is_some()); assert_eq!(get_tokens(&mut client).len(), 0); let mut now = now() + 3 * client.get_pto(); let _ = client.process(None, now); let mut recv_tokens = get_tokens(&mut client); assert_eq!(recv_tokens.len(), 1); let token1 = recv_tokens.pop().unwrap(); now += 3 * client.get_pto(); let _ = client.process(None, now); let mut recv_tokens = get_tokens(&mut client); assert_eq!(recv_tokens.len(), 1); let token2 = recv_tokens.pop().unwrap(); now += 3 * client.get_pto(); let _ = client.process(None, now); assert_eq!(get_tokens(&mut client).len(), 0); assert_ne!(token1.as_ref(), token2.as_ref()); can_resume(&token1, false); can_resume(&token2, false); } #[test] fn two_tickets_with_new_token() { let mut client = default_client(); let mut server = default_server(); let validation = AddressValidation::new(now(), ValidateAddress::Always).unwrap(); let validation = Rc::new(RefCell::new(validation)); server.set_validation(Rc::clone(&validation)); connect(&mut client, &mut server); server.send_ticket(now(), &[]).expect("send ticket1"); server.send_ticket(now(), &[]).expect("send ticket2"); let pkt = send_something(&mut server, now()); client.process_input(pkt, now()); let mut all_tokens = get_tokens(&mut client); assert_eq!(all_tokens.len(), 2); let token1 = all_tokens.pop().unwrap(); let token2 = all_tokens.pop().unwrap(); assert_ne!(token1.as_ref(), token2.as_ref()); can_resume(&token1, true); can_resume(&token2, true); } #[test] fn take_token() { let mut client = default_client(); let mut server = default_server(); connect(&mut client, &mut server); server.send_ticket(now(), &[]).unwrap(); let dgram = server.process(None, now()).dgram(); client.process_input(dgram.unwrap(), now()); let tokens = get_tokens(&mut client); assert_eq!(tokens.len(), 0); let token = client.take_resumption_token(now()).unwrap(); can_resume(&token, false); }
use super::{ connect, connect_with_rtt, default_client, default_server, exchange_ticket, get_tokens, send_something, AT_LEAST_PTO, }; use crate::addr_valid::{AddressValidation, ValidateAddress}; use std::cell::RefCell; use std::rc::Rc; use std::time::Duration; use test_fixture::{self, assertions, now}; #[test] fn resume() { let mut client = default_client(); let mut server = default_server(); connect(&mut client, &mut server); let token = exchange_ticket(&mut client, &mut server, now()); let mut client = default_client(); client .enable_resumption(now(), token) .expect("should set token"); let mut server = default_server(); connect(&mut client, &mut server); assert!(client.tls_info().unwrap().resumed()); assert!(server.tls_info().unwrap().resumed()); } #[test] fn remember_smoothed_rtt() { const RTT1: Duration = Duration::from_millis(130); const RTT2: Duration = Duration::from_millis(70); let mut client = default_client(); let mut server = default_server(); let now = connect_with_rtt(&mut client, &mut server, now(), RTT1); assert_eq!(client.loss_recovery.rtt(), RTT1); let token = exchange_ticket(&mut client, &mut server, now); let mut client = default_client(); let mut server = default_server(); client.enable_resumption(now, token).unwrap(); assert_eq!( client.loss_recovery.rtt(), RTT1, "client should remember previous RTT" ); connect_with_rtt(&mut client, &mut server, now, RTT2); assert_eq!( client.loss_recovery.rtt(), RTT2, "previous RTT should be completely erased" ); } #[test] fn address_validation_token_resume() { const RTT: Duration = Duration::from_millis(10); let mut client = default_client(); let mut server = default_server(); let validation = AddressValidation::new(now(), ValidateAddress::Always).unwrap(); let validation = Rc::new(RefCell::new(validation)); server.set_validation(Rc::clone(&validation)); let mut now = connect_with_rtt(&mut client, &mut server, now(), RTT); let token = exchange_ticket(&mut client, &mut server, now); let mut client = default_client(); client.enable_resumption(now, token).unwrap(); let mut server = default_server(); let dgram = client.process(None, now).dgram(); assertions::assert_initial(dgram.as_ref().unwrap(), true); now += AT_LEAST_PTO; connect_with_rtt(&mut client, &mut server, now, RTT); assert!(client.crypto.tls.info().unwrap().resumed()); assert!(server.crypto.tls.info().unwrap().resumed()); } fn can_resume(token: impl AsRef<[u8]>, initial_has_token: bool) { let mut client = default_client(); client.enable_resumption(now(), token).unwrap(); let initial = client.process_output(now()).dgram(); assertions::assert_initial(initial.as_ref().unwrap(), initial_has_token); } #[test] fn two_tickets_on_timer() { let mut client = default_client(); let mut server = default_server(); connect(&mut client, &mut server); server.send_ticket(now(), &[]).expect("send ticket1"); server.send_ticket(now(), &[]).expect("send ticket2"); let pkt = send_something(&mut server, now()); assert!(client.process(Some(pkt), now()).dgram().is_some()); assert_eq!(get_tokens(&mut client).len(), 0); let mut now = now() + 3 * client.get_pto(); let _ = client.process(None, now); let mut recv_tokens = get_tokens(&mut client); assert_eq!(recv_tokens.len(), 1); let token1 = recv_tokens.pop().unwrap(); now += 3 * client.get_pto(); let _ = client.process(None, now); let mut recv_tokens = get_tokens(&mut client); assert_eq!(recv_tokens.len(), 1); let token2 = recv_tokens.pop().unwrap(); now += 3 * client.get_pto(); let _ = client.process(None, now); assert_eq!(get_tokens(&mut client).len(), 0); assert_ne!(token1.as_ref(), token2.as_ref()); can_resume(&token1, false); can_resume(&token2, false); } #[test] fn two_tickets_with_new_token() { let mut client = default_client(); let mut server = default_server(); let validation = AddressValidation::new(now(), ValidateAddress::Always).unwrap(); let validation = Rc::new(RefCell::new(validation)); server.set_validation(Rc::clone(&validation)); connect(&mut client, &mut server); server.send_ticket(now(), &[]).expect("send ticket1"); server.send_ticket(now(), &[]).expect("send ticket2"); let pkt = send_something(&mut server, now()); client.process_input(pkt, now()); let mut all_tokens = get_tokens(&mut client); assert_eq!(all_tokens.len(), 2); let token1 = all_tokens.pop().unwrap(); let token2 = all_tokens.pop().unwrap(); assert_ne!(token1.as_ref(), token2.as_ref()); can_resume(&token1, true); can_resume(&token2, true); } #[test]
fn take_token() { let mut client = default_client(); let mut server = default_server(); connect(&mut client, &mut server); server.send_ticket(now(), &[]).unwrap(); let dgram = server.process(None, now()).dgram(); client.process_input(dgram.unwrap(), now()); let tokens = get_tokens(&mut client); assert_eq!(tokens.len(), 0); let token = client.take_resumption_token(now()).unwrap(); can_resume(&token, false); }
function_block-full_function
[ { "content": "/// Connect with an RTT and then force both peers to be idle.\n\nfn connect_rtt_idle(client: &mut Connection, server: &mut Connection, rtt: Duration) -> Instant {\n\n let now = connect_with_rtt(client, server, now(), rtt);\n\n let now = force_idle(client, server, rtt, now);\n\n // Drain events from both as well.\n\n let _ = client.events().count();\n\n let _ = server.events().count();\n\n now\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/mod.rs", "rank": 0, "score": 455940.05266876175 }, { "content": "/// Connect. This returns a reference to the server connection.\n\nfn connect(client: &mut Connection, server: &mut Server) -> ActiveConnectionRef {\n\n server.set_validation(ValidateAddress::Never);\n\n\n\n assert_eq!(*client.state(), State::Init);\n\n let dgram = client.process(None, now()).dgram(); // ClientHello\n\n assert!(dgram.is_some());\n\n let dgram = server.process(dgram, now()).dgram(); // ServerHello...\n\n assert!(dgram.is_some());\n\n\n\n // Ingest the server Certificate.\n\n let dgram = client.process(dgram, now()).dgram();\n\n assert!(dgram.is_some()); // This should just be an ACK.\n\n let dgram = server.process(dgram, now()).dgram();\n\n assert!(dgram.is_none()); // So the server should have nothing to say.\n\n\n\n // Now mark the server as authenticated.\n\n client.authenticated(AuthenticationStatus::Ok, now());\n\n let dgram = client.process(None, now()).dgram();\n\n assert!(dgram.is_some());\n\n assert_eq!(*client.state(), State::Connected);\n", "file_path": "neqo-transport/tests/server.rs", "rank": 2, "score": 422410.7933122815 }, { "content": "fn connect(client: &mut Connection, server: &mut Connection) {\n\n connect_with_rtt(client, server, now(), Duration::new(0, 0));\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/mod.rs", "rank": 3, "score": 407451.5330499614 }, { "content": "fn expect_no_migration(client: &mut Connection, server: &mut Connection) {\n\n let dgram = fast_handshake(client, server);\n\n\n\n // The client won't probe now, though it could; it remains idle.\n\n let out = client.process(dgram, now());\n\n assert_ne!(out.callback(), Duration::new(0, 0));\n\n\n\n // Data continues on the main path for the client.\n\n let data = send_something(client, now());\n\n assert_v6_path(&data, false);\n\n assert_eq!(client.stats().frame_tx.path_challenge, 0);\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/migration.rs", "rank": 4, "score": 373977.43141327944 }, { "content": "pub fn connect(client: &mut SecretAgent, server: &mut SecretAgent) {\n\n connect_at(now(), client, server);\n\n}\n\n\n", "file_path": "neqo-crypto/tests/handshake.rs", "rank": 5, "score": 373622.6727051537 }, { "content": "fn connect_force_idle(client: &mut Connection, server: &mut Connection) {\n\n connect_rtt_idle(client, server, Duration::new(0, 0));\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/mod.rs", "rank": 6, "score": 373162.9736597634 }, { "content": "pub fn handshake(client: &mut Connection, server: &mut Connection) {\n\n let mut a = client;\n\n let mut b = server;\n\n let mut datagram = None;\n\n let is_done = |c: &Connection| matches!(c.state(), State::Confirmed | State::Closing { .. } | State::Closed(..));\n\n while !is_done(a) {\n\n let _ = maybe_authenticate(a);\n\n let d = a.process(datagram, now());\n\n datagram = d.dgram();\n\n mem::swap(&mut a, &mut b);\n\n }\n\n}\n\n\n", "file_path": "test-fixture/src/lib.rs", "rank": 7, "score": 372562.2626203791 }, { "content": "/// Drive the handshake in the most expeditious fashion.\n\n/// Returns the packet containing `HANDSHAKE_DONE` from the server.\n\nfn fast_handshake(client: &mut Connection, server: &mut Connection) -> Option<Datagram> {\n\n let dgram = client.process_output(now()).dgram();\n\n let dgram = server.process(dgram, now()).dgram();\n\n client.process_input(dgram.unwrap(), now());\n\n assert!(maybe_authenticate(client));\n\n let dgram = client.process_output(now()).dgram();\n\n server.process(dgram, now()).dgram()\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/migration.rs", "rank": 8, "score": 354998.54207106086 }, { "content": "pub fn connect_fail(client: &mut SecretAgent, server: &mut SecretAgent) {\n\n handshake(now(), client, server);\n\n assert!(!client.state().is_connected());\n\n assert!(!server.state().is_connected());\n\n}\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub enum Resumption {\n\n WithoutZeroRtt,\n\n WithZeroRtt,\n\n}\n\n\n\npub const ZERO_RTT_TOKEN_DATA: &[u8] = b\"zero-rtt-token\";\n\n\n\n#[derive(Debug)]\n\npub struct PermissiveZeroRttChecker {\n\n resuming: bool,\n\n}\n\n\n\nimpl Default for PermissiveZeroRttChecker {\n", "file_path": "neqo-crypto/tests/handshake.rs", "rank": 9, "score": 345135.69051341753 }, { "content": "fn get_tokens(client: &mut Connection) -> Vec<ResumptionToken> {\n\n client\n\n .events()\n\n .filter_map(|e| {\n\n if let ConnectionEvent::ResumptionToken(token) = e {\n\n Some(token)\n\n } else {\n\n None\n\n }\n\n })\n\n .collect()\n\n}\n", "file_path": "neqo-transport/src/connection/tests/mod.rs", "rank": 10, "score": 337054.8754047901 }, { "content": "pub fn connect_at(now: Instant, client: &mut SecretAgent, server: &mut SecretAgent) {\n\n handshake(now, client, server);\n\n qinfo!(\"client: {:?}\", client.state());\n\n qinfo!(\"server: {:?}\", server.state());\n\n assert!(client.state().is_connected());\n\n assert!(server.state().is_connected());\n\n}\n\n\n", "file_path": "neqo-crypto/tests/handshake.rs", "rank": 11, "score": 331531.67792287207 }, { "content": "// Check that there is at least one connection. Returns a ref to the first confirmed connection.\n\nfn connected_server(server: &mut Server) -> ActiveConnectionRef {\n\n let server_connections = server.active_connections();\n\n // Find confirmed connections. There should only be one.\n\n let mut confirmed = server_connections\n\n .iter()\n\n .filter(|c: &&ActiveConnectionRef| *c.borrow().state() == State::Confirmed);\n\n let c = confirmed.next().expect(\"one confirmed\");\n\n assert!(confirmed.next().is_none(), \"only one confirmed\");\n\n c.clone()\n\n}\n\n\n", "file_path": "neqo-transport/tests/server.rs", "rank": 12, "score": 330157.10840222094 }, { "content": "// This is used for filtering send_streams and recv_Streams with a stream_ids less than a given id.\n\n// Only the same type (bidirectional or unidirectional) streams are filtered.\n\nfn id_lt<U>(base: StreamId) -> impl FnMut(&u64, &mut U) -> bool\n\nwhere\n\n U: ?Sized,\n\n{\n\n let mut f = id_gte(base);\n\n move |id, v| f((id, v)).is_none()\n\n}\n\n\n", "file_path": "neqo-http3/src/connection_client.rs", "rank": 13, "score": 327184.0251627879 }, { "content": "fn assert_v6_path(dgram: &Datagram, padded: bool) {\n\n assert_path(dgram, addr());\n\n if padded {\n\n assert_eq!(dgram.len(), PATH_MTU_V6);\n\n }\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/migration.rs", "rank": 14, "score": 322614.14767961786 }, { "content": "fn assert_v4_path(dgram: &Datagram, padded: bool) {\n\n assert_path(dgram, addr_v4());\n\n if padded {\n\n assert_eq!(dgram.len(), PATH_MTU_V4);\n\n }\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/migration.rs", "rank": 15, "score": 322614.14767961786 }, { "content": "fn get_ticket(server: &mut Server) -> ResumptionToken {\n\n let mut client = default_client();\n\n let mut server_conn = connect(&mut client, server);\n\n\n\n server_conn.borrow_mut().send_ticket(now(), &[]).unwrap();\n\n let dgram = server.process(None, now()).dgram();\n\n client.process_input(dgram.unwrap(), now()); // Consume ticket, ignore output.\n\n\n\n // Calling active_connections clears the set of active connections.\n\n assert_eq!(server.active_connections().len(), 1);\n\n client\n\n .events()\n\n .find_map(|e| {\n\n if let ConnectionEvent::ResumptionToken(token) = e {\n\n Some(token)\n\n } else {\n\n None\n\n }\n\n })\n\n .unwrap()\n\n}\n\n\n\n// Attempt a retry with 0-RTT, and have 0-RTT packets sent with the second ClientHello.\n", "file_path": "neqo-transport/tests/server.rs", "rank": 16, "score": 320443.60765210533 }, { "content": "fn migration(mut client: Connection) {\n\n let mut server = default_server();\n\n connect_force_idle(&mut client, &mut server);\n\n let now = now();\n\n\n\n client\n\n .migrate(Some(addr_v4()), Some(addr_v4()), false, now)\n\n .unwrap();\n\n\n\n let probe = client.process_output(now).dgram().unwrap();\n\n assert_v4_path(&probe, true); // Contains PATH_CHALLENGE.\n\n assert_eq!(client.stats().frame_tx.path_challenge, 1);\n\n\n\n let resp = server.process(Some(probe), now).dgram().unwrap();\n\n assert_v4_path(&resp, true);\n\n assert_eq!(server.stats().frame_tx.path_response, 1);\n\n assert_eq!(server.stats().frame_tx.path_challenge, 1);\n\n\n\n // Data continues to be exchanged on the new path.\n\n let client_data = send_something(&mut client, now);\n", "file_path": "neqo-transport/src/connection/tests/migration.rs", "rank": 17, "score": 311414.22542234 }, { "content": "pub fn assert_initial(payload: &[u8], expect_token: bool) {\n\n assert_eq!(payload[0] & PACKET_TYPE_MASK, 0b1000_0000);\n\n\n\n // Check that it has a token.\n\n let mut dec = Decoder::from(payload);\n\n dec.skip(5); // Skip type and version.\n\n dec.skip_vec(1); // Destination Connection ID.\n\n dec.skip_vec(1); // Source Connection ID.\n\n let token = dec.decode_vvec().unwrap();\n\n assert_eq!(expect_token, !token.is_empty());\n\n}\n\n\n", "file_path": "test-fixture/src/assertions.rs", "rank": 18, "score": 311270.9763196166 }, { "content": "fn handshake(now: Instant, client: &mut SecretAgent, server: &mut SecretAgent) {\n\n let mut a = client;\n\n let mut b = server;\n\n let mut records = a.handshake_raw(now, None).unwrap();\n\n let is_done = |agent: &mut SecretAgent| agent.state().is_final();\n\n while !is_done(b) {\n\n records = if let Ok(r) = forward_records(now, &mut b, records) {\n\n r\n\n } else {\n\n // TODO(mt) take the alert generated by the failed handshake\n\n // and allow it to be sent to the peer.\n\n return;\n\n };\n\n\n\n if *b.state() == HandshakeState::AuthenticationPending {\n\n b.authenticated(AuthenticationStatus::Ok);\n\n records = b.handshake_raw(now, None).unwrap();\n\n }\n\n mem::swap(&mut a, &mut b);\n\n }\n\n}\n\n\n", "file_path": "neqo-crypto/tests/handshake.rs", "rank": 19, "score": 302479.3289009558 }, { "content": "/// If state is `AuthenticationNeeded` call `authenticated()`. This function will\n\n/// consume all outstanding events on the connection.\n\npub fn maybe_authenticate(conn: &mut Connection) -> bool {\n\n let authentication_needed = |e| matches!(e, ConnectionEvent::AuthenticationNeeded);\n\n if conn.events().any(authentication_needed) {\n\n conn.authenticated(AuthenticationStatus::Ok, now());\n\n return true;\n\n }\n\n false\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/mod.rs", "rank": 22, "score": 288324.83021663566 }, { "content": "#[must_use]\n\npub fn maybe_authenticate(conn: &mut Connection) -> bool {\n\n let authentication_needed = |e| matches!(e, ConnectionEvent::AuthenticationNeeded);\n\n if conn.events().any(authentication_needed) {\n\n conn.authenticated(AuthenticationStatus::Ok, now());\n\n return true;\n\n }\n\n false\n\n}\n\n\n", "file_path": "test-fixture/src/lib.rs", "rank": 23, "score": 285006.72351609985 }, { "content": "fn assert_update_blocked(c: &mut Connection) {\n\n assert_eq!(\n\n c.initiate_key_update().unwrap_err(),\n\n Error::KeyUpdateBlocked\n\n )\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/keys.rs", "rank": 24, "score": 277222.0537758427 }, { "content": "fn process_server_events(server: &mut Http3Server) {\n\n let mut request_found = false;\n\n while let Some(event) = server.next_event() {\n\n if let Http3ServerEvent::Headers {\n\n mut request,\n\n headers,\n\n fin,\n\n } = event\n\n {\n\n assert_eq!(\n\n headers,\n\n vec![\n\n (String::from(\":method\"), String::from(\"GET\")),\n\n (String::from(\":scheme\"), String::from(\"https\")),\n\n (String::from(\":authority\"), String::from(\"something.com\")),\n\n (String::from(\":path\"), String::from(\"/\"))\n\n ]\n\n );\n\n assert_eq!(fin, true);\n\n request\n", "file_path": "neqo-http3/tests/httpconn.rs", "rank": 25, "score": 272241.3363071925 }, { "content": "/// Take a pair of connections in any state and complete the handshake.\n\n/// The `datagram` argument is a packet that was received from the server.\n\n/// See `connect` for what this returns.\n\nfn complete_connection(\n\n client: &mut Connection,\n\n server: &mut Server,\n\n mut datagram: Option<Datagram>,\n\n) -> ActiveConnectionRef {\n\n let is_done = |c: &Connection| matches!(c.state(), State::Confirmed | State::Closing { .. } | State::Closed(..));\n\n while !is_done(client) {\n\n let _ = test_fixture::maybe_authenticate(client);\n\n let out = client.process(datagram, now());\n\n let out = server.process(out.dgram(), now());\n\n datagram = out.dgram();\n\n }\n\n\n\n assert_eq!(*client.state(), State::Confirmed);\n\n connected_server(server)\n\n}\n\n\n", "file_path": "neqo-transport/tests/server.rs", "rank": 26, "score": 267725.9967964697 }, { "content": "#[test]\n\nfn same_initial_after_connected() {\n\n let mut server = default_server();\n\n let mut client = default_client();\n\n\n\n let client_initial = client.process(None, now()).dgram();\n\n assert!(client_initial.is_some());\n\n\n\n let server_initial = server.process(client_initial.clone(), now()).dgram();\n\n assert!(server_initial.is_some());\n\n complete_connection(&mut client, &mut server, server_initial);\n\n // This removes the connection from the active set until something happens to it.\n\n assert_eq!(server.active_connections().len(), 0);\n\n\n\n // Now make a new connection using the exact same initial as before.\n\n // The server should respond to an attempt to connect with the same Initial.\n\n let dgram = server.process(client_initial, now()).dgram();\n\n assert!(dgram.is_some());\n\n // The server should make a new connection object.\n\n assert_eq!(server.active_connections().len(), 1);\n\n}\n\n\n", "file_path": "neqo-transport/tests/server.rs", "rank": 27, "score": 267669.2685789084 }, { "content": "#[test]\n\nfn new_token_0rtt() {\n\n let mut server = default_server();\n\n let token = get_ticket(&mut server);\n\n server.set_validation(ValidateAddress::NoToken);\n\n\n\n let mut client = default_client();\n\n client.enable_resumption(now(), &token).unwrap();\n\n\n\n let client_stream = client.stream_create(StreamType::UniDi).unwrap();\n\n client.stream_send(client_stream, &[1, 2, 3]).unwrap();\n\n\n\n let dgram = client.process(None, now()).dgram(); // Initial w/0-RTT\n\n assert!(dgram.is_some());\n\n assertions::assert_initial(dgram.as_ref().unwrap(), true);\n\n assertions::assert_coalesced_0rtt(dgram.as_ref().unwrap());\n\n let dgram = server.process(dgram, now()).dgram(); // Initial\n\n assert!(dgram.is_some());\n\n assertions::assert_initial(dgram.as_ref().unwrap(), false);\n\n\n\n let dgram = client.process(dgram, now()).dgram();\n\n // Note: the client doesn't need to authenticate the server here\n\n // as there is no certificate; authentication is based on the ticket.\n\n assert!(dgram.is_some());\n\n assert_eq!(*client.state(), State::Connected);\n\n let dgram = server.process(dgram, now()).dgram(); // (done)\n\n assert!(dgram.is_some());\n\n connected_server(&mut server);\n\n assert!(client.tls_info().unwrap().resumed());\n\n}\n\n\n", "file_path": "neqo-transport/tests/server.rs", "rank": 28, "score": 263857.33557240333 }, { "content": "#[test]\n\nfn bad_client_initial() {\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n\n\n let dgram = client.process(None, now()).dgram().expect(\"a datagram\");\n\n let (header, d_cid, s_cid, payload) = decode_initial_header(&dgram);\n\n let (aead, hp) = client_initial_aead_and_hp(d_cid);\n\n let (fixed_header, pn) = remove_header_protection(&hp, header, payload);\n\n let payload = &payload[(fixed_header.len() - header.len())..];\n\n\n\n let mut plaintext_buf = vec![0; dgram.len()];\n\n let plaintext = aead\n\n .decrypt(pn, &fixed_header, payload, &mut plaintext_buf)\n\n .unwrap();\n\n\n\n let mut payload_enc = Encoder::from(plaintext);\n\n payload_enc.encode(&[0x08, 0x02, 0x00, 0x00]); // Add a stream frame.\n\n\n\n // Make a new header with a 1 byte packet number length.\n\n let mut header_enc = Encoder::new();\n", "file_path": "neqo-transport/tests/server.rs", "rank": 29, "score": 262951.4013849366 }, { "content": "fn test_h9(nctx: &NetworkCtx, client: &mut Connection) -> Result<(), String> {\n\n let client_stream_id = client.stream_create(StreamType::BiDi).unwrap();\n\n let req: String = \"GET /10\\r\\n\".to_string();\n\n client\n\n .stream_send(client_stream_id, req.as_bytes())\n\n .unwrap();\n\n let mut hc = H9Handler::default();\n\n hc.streams.insert(client_stream_id);\n\n let res = process_loop(nctx, client, &mut hc);\n\n\n\n if let Err(e) = res {\n\n return Err(format!(\"ERROR: {}\", e));\n\n }\n\n if hc.rbytes == 0 {\n\n return Err(String::from(\"Empty response\"));\n\n }\n\n if !hc.rsfin {\n\n return Err(String::from(\"No FIN\"));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "neqo-interop/src/main.rs", "rank": 30, "score": 262721.59409191064 }, { "content": "fn connect() -> (Http3Client, Http3Server, Option<Datagram>) {\n\n let mut hconn_c = default_http3_client();\n\n let mut hconn_s = default_http3_server();\n\n\n\n assert_eq!(hconn_c.state(), Http3State::Initializing);\n\n let out = hconn_c.process(None, now()); // Initial\n\n let out = hconn_s.process(out.dgram(), now()); // Initial + Handshake\n\n let out = hconn_c.process(out.dgram(), now()); // ACK\n\n let _ = hconn_s.process(out.dgram(), now()); //consume ACK\n\n let authentication_needed = |e| matches!(e, Http3ClientEvent::AuthenticationNeeded);\n\n assert!(hconn_c.events().any(authentication_needed));\n\n hconn_c.authenticated(AuthenticationStatus::Ok, now());\n\n let out = hconn_c.process(None, now()); // Handshake\n\n assert_eq!(hconn_c.state(), Http3State::Connected);\n\n let out = hconn_s.process(out.dgram(), now()); // Handshake\n\n let out = hconn_c.process(out.dgram(), now());\n\n let out = hconn_s.process(out.dgram(), now());\n\n // assert_eq!(hconn_s.settings_received, true);\n\n let out = hconn_c.process(out.dgram(), now());\n\n // assert_eq!(hconn_c.settings_received, true);\n\n\n\n (hconn_c, hconn_s, out.dgram())\n\n}\n\n\n", "file_path": "neqo-http3/tests/httpconn.rs", "rank": 31, "score": 260742.62366398016 }, { "content": "fn connect_with_rtt(\n\n client: &mut Connection,\n\n server: &mut Connection,\n\n now: Instant,\n\n rtt: Duration,\n\n) -> Instant {\n\n let now = handshake(client, server, now, rtt);\n\n assert_eq!(*client.state(), State::Confirmed);\n\n assert_eq!(*server.state(), State::Confirmed);\n\n\n\n assert_eq!(client.loss_recovery.rtt(), rtt);\n\n assert_eq!(server.loss_recovery.rtt(), rtt);\n\n now\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/mod.rs", "rank": 32, "score": 260630.3071658169 }, { "content": "fn check_discarded(peer: &mut Connection, pkt: Datagram, dropped: usize, dups: usize) {\n\n // Make sure to flush any saved datagrams before doing this.\n\n let _ = peer.process_output(now());\n\n\n\n let before = peer.stats();\n\n let out = peer.process(Some(pkt), now());\n\n assert!(out.as_dgram_ref().is_none());\n\n let after = peer.stats();\n\n assert_eq!(dropped, after.dropped_rx - before.dropped_rx);\n\n assert_eq!(dups, after.dups_rx - before.dups_rx);\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/keys.rs", "rank": 33, "score": 258183.55754581688 }, { "content": "fn make_default_server(alpn: &[impl AsRef<str>]) -> Connection {\n\n fixture_init();\n\n\n\n let mut c = Connection::new_server(\n\n DEFAULT_KEYS,\n\n alpn,\n\n Rc::new(RefCell::new(CountingConnectionIdGenerator::default())),\n\n ConnectionParameters::default(),\n\n )\n\n .expect(\"create a default server\");\n\n c.server_enable_0rtt(&anti_replay(), AllowZeroRtt {})\n\n .expect(\"enable 0-RTT\");\n\n c\n\n}\n\n\n\n/// If state is `AuthenticationNeeded` call `authenticated()`.\n\n/// This funstion will consume all outstanding events on the connection.\n", "file_path": "test-fixture/src/lib.rs", "rank": 34, "score": 253814.92254742733 }, { "content": "fn assert_path(dgram: &Datagram, path_addr: SocketAddr) {\n\n assert_eq!(dgram.source(), path_addr);\n\n assert_eq!(dgram.destination(), path_addr);\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/migration.rs", "rank": 35, "score": 248595.3220427965 }, { "content": "// Decode the header of a client Initial packet, returning three values:\n\n// * the entire header short of the packet number,\n\n// * just the DCID,\n\n// * just the SCID, and\n\n// * the protected payload including the packet number.\n\n// Any token is thrown away.\n\nfn decode_initial_header(dgram: &Datagram) -> (&[u8], &[u8], &[u8], &[u8]) {\n\n let mut dec = Decoder::new(&dgram[..]);\n\n let type_and_ver = dec.decode(5).unwrap().to_vec();\n\n assert_eq!(type_and_ver[0] & 0xf0, 0xc0);\n\n let dest_cid = dec.decode_vec(1).unwrap();\n\n let src_cid = dec.decode_vec(1).unwrap();\n\n dec.skip_vvec(); // Ignore any the token.\n\n\n\n // Need to read of the length separately so that we can find the packet number.\n\n let payload_len = usize::try_from(dec.decode_varint().unwrap()).unwrap();\n\n let pn_offset = dgram.len() - dec.remaining();\n\n (\n\n &dgram[..pn_offset],\n\n dest_cid,\n\n src_cid,\n\n dec.decode(payload_len).unwrap(),\n\n )\n\n}\n\n\n", "file_path": "neqo-transport/tests/server.rs", "rank": 36, "score": 248496.87474044712 }, { "content": "fn can_create_streams(c: &mut Connection, t: StreamType, n: u64) {\n\n for _ in 0..n {\n\n c.stream_create(t).unwrap();\n\n }\n\n assert_eq!(c.stream_create(t), Err(Error::StreamLimitError));\n\n}\n\n\n", "file_path": "neqo-transport/tests/server.rs", "rank": 37, "score": 248268.89043975034 }, { "content": "#[test]\n\nfn zero_rtt_before_resumption_token() {\n\n let mut client = default_client();\n\n assert!(client.stream_create(StreamType::BiDi).is_err());\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/zerortt.rs", "rank": 38, "score": 246096.54988452772 }, { "content": "fn assert_default_version(dec: &mut Decoder) {\n\n let version: QuicVersion = u32::try_from(dec.decode_uint(4).unwrap())\n\n .unwrap()\n\n .try_into()\n\n .unwrap();\n\n assert_eq!(version, QuicVersion::default());\n\n}\n\n\n", "file_path": "test-fixture/src/assertions.rs", "rank": 40, "score": 240189.67780881343 }, { "content": "fn process_client_events(conn: &mut Http3Client) {\n\n let mut response_header_found = false;\n\n let mut response_data_found = false;\n\n while let Some(event) = conn.next_event() {\n\n match event {\n\n Http3ClientEvent::HeaderReady { headers, fin, .. } => {\n\n assert_eq!(\n\n headers,\n\n vec![\n\n (String::from(\":status\"), String::from(\"200\")),\n\n (String::from(\"content-length\"), String::from(\"3\")),\n\n ]\n\n );\n\n assert_eq!(fin, false);\n\n response_header_found = true;\n\n }\n\n Http3ClientEvent::DataReadable { stream_id } => {\n\n let mut buf = [0u8; 100];\n\n let (amount, fin) = conn.read_response_data(now(), stream_id, &mut buf).unwrap();\n\n assert_eq!(fin, true);\n", "file_path": "neqo-http3/tests/httpconn.rs", "rank": 41, "score": 235341.47950208708 }, { "content": "fn test_h3(nctx: &NetworkCtx, peer: &Peer, client: Connection, test: &Test) -> Result<(), String> {\n\n let mut hc = connect_h3(nctx, peer, client)?;\n\n\n\n let client_stream_id = hc\n\n .h3\n\n .fetch(Instant::now(), \"GET\", \"https\", &hc.host, &hc.path, &[])\n\n .unwrap();\n\n let _ = hc.h3.stream_close_send(client_stream_id);\n\n\n\n hc.streams.insert(client_stream_id);\n\n if let Err(e) = process_loop_h3(nctx, &mut hc, false, *test != Test::D) {\n\n return Err(format!(\"ERROR: {}\", e));\n\n }\n\n\n\n if *test == Test::D {\n\n // Send another request, when the first one was send we probably did not have the peer's qpack parameter.\n\n let client_stream_id = hc\n\n .h3\n\n .fetch(\n\n Instant::now(),\n", "file_path": "neqo-interop/src/main.rs", "rank": 42, "score": 233956.04498372998 }, { "content": "// This is used for filtering send_streams and recv_Streams with a stream_ids greater than or equal a given id.\n\n// Only the same type (bidirectional or unidirectionsl) streams are filtered.\n\nfn id_gte<U>(base: StreamId) -> impl FnMut((&u64, &U)) -> Option<u64> + 'static\n\nwhere\n\n U: ?Sized,\n\n{\n\n move |(id, _)| {\n\n if *id >= base.as_u64() && !(StreamId::from(*id).is_bidi() ^ base.is_bidi()) {\n\n Some(*id)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "neqo-http3/src/connection_client.rs", "rank": 43, "score": 228907.1558210125 }, { "content": "// Generate an AEAD and header protection object for a client Initial.\n\nfn client_initial_aead_and_hp(dcid: &[u8]) -> (Aead, HpKey) {\n\n const INITIAL_SALT: &[u8] = &[\n\n 0xaf, 0xbf, 0xec, 0x28, 0x99, 0x93, 0xd2, 0x4c, 0x9e, 0x97, 0x86, 0xf1, 0x9c, 0x61, 0x11,\n\n 0xe0, 0x43, 0x90, 0xa8, 0x99,\n\n ];\n\n let initial_secret = hkdf::extract(\n\n TLS_VERSION_1_3,\n\n TLS_AES_128_GCM_SHA256,\n\n Some(\n\n hkdf::import_key(TLS_VERSION_1_3, TLS_AES_128_GCM_SHA256, INITIAL_SALT)\n\n .as_ref()\n\n .unwrap(),\n\n ),\n\n hkdf::import_key(TLS_VERSION_1_3, TLS_AES_128_GCM_SHA256, dcid)\n\n .as_ref()\n\n .unwrap(),\n\n )\n\n .unwrap();\n\n\n\n let secret = hkdf::expand_label(\n", "file_path": "neqo-transport/tests/server.rs", "rank": 44, "score": 225073.76126183575 }, { "content": "#[test]\n\nfn connect() {\n\n let (_client, _server) = test_fixture::connect();\n\n}\n\n\n", "file_path": "neqo-transport/tests/connection.rs", "rank": 45, "score": 223942.32658273267 }, { "content": "// Receive multiple packets and generate an ack-only packet.\n\nfn ack_bytes<D>(dest: &mut Connection, stream: u64, in_dgrams: D, now: Instant) -> Vec<Datagram>\n\nwhere\n\n D: IntoIterator<Item = Datagram>,\n\n D::IntoIter: ExactSizeIterator,\n\n{\n\n let mut srv_buf = [0; 4_096];\n\n\n\n let in_dgrams = in_dgrams.into_iter();\n\n qdebug!([dest], \"ack_bytes {} datagrams\", in_dgrams.len());\n\n for dgram in in_dgrams {\n\n dest.process_input(dgram, now);\n\n }\n\n\n\n loop {\n\n let (bytes_read, _fin) = dest.stream_recv(stream, &mut srv_buf).unwrap();\n\n qtrace!([dest], \"ack_bytes read {} bytes\", bytes_read);\n\n if bytes_read == 0 {\n\n break;\n\n }\n\n }\n", "file_path": "neqo-transport/src/connection/tests/cc.rs", "rank": 46, "score": 221690.17809859518 }, { "content": "#[test]\n\nfn duplicate_initial() {\n\n let mut server = default_server();\n\n let mut client = default_client();\n\n\n\n assert_eq!(*client.state(), State::Init);\n\n let initial = client.process(None, now()).dgram();\n\n assert!(initial.is_some());\n\n\n\n // The server should ignore a packets with the same remote address and\n\n // destination connection ID as an existing connection attempt.\n\n let server_initial = server.process(initial.clone(), now()).dgram();\n\n assert!(server_initial.is_some());\n\n let dgram = server.process(initial, now()).dgram();\n\n assert!(dgram.is_none());\n\n\n\n assert_eq!(server.active_connections().len(), 1);\n\n complete_connection(&mut client, &mut server, server_initial);\n\n}\n\n\n", "file_path": "neqo-transport/tests/server.rs", "rank": 47, "score": 221056.9185736855 }, { "content": "#[test]\n\nfn retry_after_initial() {\n\n let mut server = default_server();\n\n let mut retry_server = default_server();\n\n retry_server.set_validation(ValidateAddress::Always);\n\n let mut client = default_client();\n\n\n\n let cinit = client.process(None, now()).dgram(); // Initial\n\n assert!(cinit.is_some());\n\n let server_flight = server.process(cinit.clone(), now()).dgram(); // Initial\n\n assert!(server_flight.is_some());\n\n\n\n // We need to have the client just process the Initial.\n\n let (server_initial, _other) = split_datagram(server_flight.as_ref().unwrap());\n\n let dgram = client.process(Some(server_initial), now()).dgram();\n\n assert!(dgram.is_some());\n\n assert!(*client.state() != State::Connected);\n\n\n\n let retry = retry_server.process(cinit, now()).dgram(); // Retry!\n\n assert!(retry.is_some());\n\n assertions::assert_retry(&retry.as_ref().unwrap());\n", "file_path": "neqo-transport/tests/server.rs", "rank": 48, "score": 221056.9185736855 }, { "content": "#[test]\n\nfn retry_0rtt() {\n\n let mut server = default_server();\n\n let token = get_ticket(&mut server);\n\n server.set_validation(ValidateAddress::Always);\n\n\n\n let mut client = default_client();\n\n client.enable_resumption(now(), &token).unwrap();\n\n\n\n let client_stream = client.stream_create(StreamType::UniDi).unwrap();\n\n client.stream_send(client_stream, &[1, 2, 3]).unwrap();\n\n\n\n let dgram = client.process(None, now()).dgram(); // Initial w/0-RTT\n\n assert!(dgram.is_some());\n\n assertions::assert_coalesced_0rtt(dgram.as_ref().unwrap());\n\n let dgram = server.process(dgram, now()).dgram(); // Retry\n\n assert!(dgram.is_some());\n\n assertions::assert_retry(dgram.as_ref().unwrap());\n\n\n\n // After retry, there should be a token and still coalesced 0-RTT.\n\n let dgram = client.process(dgram, now()).dgram();\n", "file_path": "neqo-transport/tests/server.rs", "rank": 49, "score": 221050.4178568392 }, { "content": "#[test]\n\nfn single_client() {\n\n let mut server = default_server();\n\n let mut client = default_client();\n\n connect(&mut client, &mut server);\n\n}\n\n\n", "file_path": "neqo-transport/tests/server.rs", "rank": 50, "score": 220138.8381180772 }, { "content": "// Different than the one in the fixture, which is a single connection.\n\nfn default_server() -> Server {\n\n Server::new(\n\n now(),\n\n test_fixture::DEFAULT_KEYS,\n\n test_fixture::DEFAULT_ALPN,\n\n test_fixture::anti_replay(),\n\n Box::new(AllowZeroRtt {}),\n\n Rc::new(RefCell::new(CountingConnectionIdGenerator::default())),\n\n ConnectionParameters::default(),\n\n )\n\n .expect(\"should create a server\")\n\n}\n\n\n", "file_path": "neqo-transport/tests/server.rs", "rank": 51, "score": 218994.10755165687 }, { "content": "#[test]\n\nfn reorder_05rtt_with_0rtt() {\n\n const RTT: Duration = Duration::from_millis(100);\n\n\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n let validation = AddressValidation::new(now(), ValidateAddress::NoToken).unwrap();\n\n let validation = Rc::new(RefCell::new(validation));\n\n server.set_validation(Rc::clone(&validation));\n\n let mut now = connect_with_rtt(&mut client, &mut server, now(), RTT);\n\n\n\n // Include RTT in sending the ticket or the ticket age reported by the\n\n // client is wrong, which causes the server to reject 0-RTT.\n\n now += RTT / 2;\n\n server.send_ticket(now, &[]).unwrap();\n\n let ticket = server.process_output(now).dgram().unwrap();\n\n now += RTT / 2;\n\n client.process_input(ticket, now);\n\n\n\n let token = get_tokens(&mut client).pop().unwrap();\n\n let mut client = default_client();\n", "file_path": "neqo-transport/src/connection/tests/handshake.rs", "rank": 52, "score": 218304.94212500955 }, { "content": "#[test]\n\nfn retry_bad_token() {\n\n let mut client = default_client();\n\n let mut retry_server = default_server();\n\n retry_server.set_validation(ValidateAddress::Always);\n\n let mut server = default_server();\n\n\n\n // Send a retry to one server, then replay it to the other.\n\n let client_initial1 = client.process(None, now()).dgram();\n\n assert!(client_initial1.is_some());\n\n let retry = retry_server.process(client_initial1, now()).dgram();\n\n assert!(retry.is_some());\n\n let client_initial2 = client.process(retry, now()).dgram();\n\n assert!(client_initial2.is_some());\n\n\n\n let dgram = server.process(client_initial2, now()).dgram();\n\n assert!(dgram.is_none());\n\n}\n\n\n\n// This is really a client test, but we need a server with Retry to test it.\n\n// In this test, the client sends Initial on PTO. The Retry should cause\n\n// all loss recovery timers to be reset, but we had a bug where the PTO timer\n\n// was not properly reset. This tests that the client generates a new Initial\n\n// in response to receiving a Retry, even after it sends the Initial on PTO.\n", "file_path": "neqo-transport/tests/server.rs", "rank": 53, "score": 216315.55194715748 }, { "content": "#[test]\n\nfn new_token_expired() {\n\n let mut server = default_server();\n\n let token = get_ticket(&mut server);\n\n server.set_validation(ValidateAddress::NoToken);\n\n\n\n let mut client = default_client();\n\n client.enable_resumption(now(), &token).unwrap();\n\n\n\n let dgram = client.process(None, now()).dgram(); // Initial\n\n assert!(dgram.is_some());\n\n assertions::assert_initial(dgram.as_ref().unwrap(), true);\n\n\n\n // Now move into the future.\n\n // We can't go too far or we'll overflow our field. Not when checking,\n\n // but when trying to generate another Retry. A month is fine.\n\n let the_future = now() + Duration::from_secs(60 * 60 * 24 * 30);\n\n let d = dgram.unwrap();\n\n let src = SocketAddr::new(d.source().ip(), d.source().port() + 1);\n\n let dgram = Some(Datagram::new(src, d.destination(), &d[..]));\n\n let dgram = server.process(dgram, the_future).dgram(); // Retry\n\n assert!(dgram.is_some());\n\n assertions::assert_retry(dgram.as_ref().unwrap());\n\n}\n\n\n", "file_path": "neqo-transport/tests/server.rs", "rank": 54, "score": 216315.55194715748 }, { "content": "#[test]\n\nfn different_initials_same_path() {\n\n let mut server = default_server();\n\n let mut client1 = default_client();\n\n let mut client2 = default_client();\n\n\n\n let client_initial1 = client1.process(None, now()).dgram();\n\n assert!(client_initial1.is_some());\n\n let client_initial2 = client2.process(None, now()).dgram();\n\n assert!(client_initial2.is_some());\n\n\n\n // The server should respond to both as these came from different addresses.\n\n let server_initial1 = server.process(client_initial1, now()).dgram();\n\n assert!(server_initial1.is_some());\n\n\n\n let server_initial2 = server.process(client_initial2, now()).dgram();\n\n assert!(server_initial2.is_some());\n\n\n\n assert_eq!(server.active_connections().len(), 2);\n\n complete_connection(&mut client1, &mut server, server_initial1);\n\n complete_connection(&mut client2, &mut server, server_initial2);\n\n}\n\n\n", "file_path": "neqo-transport/tests/server.rs", "rank": 55, "score": 216298.66832110845 }, { "content": "#[test]\n\nfn drop_non_initial() {\n\n const CID: &[u8] = &[55; 8]; // not a real connection ID\n\n let mut server = default_server();\n\n\n\n // This is big enough to look like an Initial, but it uses the Retry type.\n\n let mut header = neqo_common::Encoder::with_capacity(1200);\n\n header\n\n .encode_byte(0xfa)\n\n .encode_uint(4, QuicVersion::default().as_u32())\n\n .encode_vec(1, CID)\n\n .encode_vec(1, CID);\n\n let mut bogus_data: Vec<u8> = header.into();\n\n bogus_data.resize(1200, 66);\n\n\n\n let bogus = Datagram::new(test_fixture::addr(), test_fixture::addr(), bogus_data);\n\n assert!(server.process(Some(bogus), now()).dgram().is_none());\n\n}\n\n\n", "file_path": "neqo-transport/tests/server.rs", "rank": 56, "score": 216298.66832110845 }, { "content": "#[must_use]\n\nfn send_something(sender: &mut Connection, now: Instant) -> Datagram {\n\n let stream_id = sender.stream_create(StreamType::UniDi).unwrap();\n\n assert!(sender.stream_send(stream_id, DEFAULT_STREAM_DATA).is_ok());\n\n assert!(sender.stream_close_send(stream_id).is_ok());\n\n qdebug!([sender], \"send_something on {}\", stream_id);\n\n let dgram = sender.process(None, now).dgram();\n\n dgram.expect(\"should have something to send\")\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/mod.rs", "rank": 58, "score": 214361.37807561358 }, { "content": "pub fn assert_no_1rtt(payload: &[u8]) {\n\n let mut dec = Decoder::from(payload);\n\n while let Some(b1) = dec.decode_byte() {\n\n // If this is just padding, that's OK. Check.\n\n if payload.iter().skip(dec.offset()).all(|b| *b == 0) {\n\n return;\n\n }\n\n assert_eq!(b1 & 0x80, 0x80); // This has to be a long header.\n\n assert_ne!(b1 & 0b0011_0000, 0b0011_0000); // This can't be Retry.\n\n assert_default_version(&mut dec);\n\n dec.skip_vec(1); // DCID\n\n dec.skip_vec(1); // SCID\n\n if (b1 & 0b0011_0000) == 0b0000_0000 {\n\n dec.skip_vvec(); // Initial token.\n\n }\n\n dec.skip_vvec(); // Skip the payload.\n\n }\n\n}\n", "file_path": "test-fixture/src/assertions.rs", "rank": 59, "score": 214195.14707235215 }, { "content": "fn emit_packet(socket: &mut UdpSocket, out_dgram: Datagram) {\n\n let sent = socket\n\n .send_to(&out_dgram, &out_dgram.destination())\n\n .expect(\"Error sending datagram\");\n\n if sent != out_dgram.len() {\n\n eprintln!(\"Unable to send all {} bytes of datagram\", out_dgram.len());\n\n }\n\n}\n\n\n", "file_path": "neqo-server/src/main.rs", "rank": 60, "score": 213955.09255523706 }, { "content": "fn create_vn(initial_pkt: &[u8], versions: &[u32]) -> Vec<u8> {\n\n let mut dec = Decoder::from(&initial_pkt[5..]); // Skip past version.\n\n let dst_cid = dec.decode_vec(1).expect(\"client DCID\");\n\n let src_cid = dec.decode_vec(1).expect(\"client SCID\");\n\n\n\n let mut encoder = Encoder::default();\n\n encoder.encode_byte(PACKET_BIT_LONG);\n\n encoder.encode(&[0; 4]); // Zero version == VN.\n\n encoder.encode_vec(1, dst_cid);\n\n encoder.encode_vec(1, src_cid);\n\n\n\n for v in versions {\n\n encoder.encode_uint(4, *v);\n\n }\n\n encoder.into()\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/vn.rs", "rank": 61, "score": 213596.54400513452 }, { "content": "/// This fills the congestion window from a single source.\n\n/// As the pacer will interfere with this, this moves time forward\n\n/// as `Output::Callback` is received. Because it is hard to tell\n\n/// from the return value whether a timeout is an ACK delay, PTO, or\n\n/// pacing, this looks at the congestion window to tell when to stop.\n\n/// Returns a list of datagrams and the new time.\n\nfn fill_cwnd(src: &mut Connection, stream: u64, mut now: Instant) -> (Vec<Datagram>, Instant) {\n\n const BLOCK_SIZE: usize = 4_096;\n\n let mut total_dgrams = Vec::new();\n\n\n\n qtrace!(\n\n \"fill_cwnd starting cwnd: {}\",\n\n src.loss_recovery.cwnd_avail()\n\n );\n\n\n\n loop {\n\n let bytes_sent = src.stream_send(stream, &[0x42; BLOCK_SIZE]).unwrap();\n\n qtrace!(\"fill_cwnd wrote {} bytes\", bytes_sent);\n\n if bytes_sent < BLOCK_SIZE {\n\n break;\n\n }\n\n }\n\n\n\n loop {\n\n let pkt = src.process_output(now);\n\n qtrace!(\n", "file_path": "neqo-transport/src/connection/tests/mod.rs", "rank": 62, "score": 213223.15426017332 }, { "content": "#[test]\n\nfn new_token_different_ip() {\n\n let mut server = default_server();\n\n let token = get_ticket(&mut server);\n\n server.set_validation(ValidateAddress::NoToken);\n\n\n\n let mut client = default_client();\n\n client.enable_resumption(now(), &token).unwrap();\n\n\n\n let dgram = client.process(None, now()).dgram(); // Initial\n\n assert!(dgram.is_some());\n\n assertions::assert_initial(dgram.as_ref().unwrap(), true);\n\n\n\n // Now rewrite the source address.\n\n let d = dgram.unwrap();\n\n let src = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 2)), d.source().port());\n\n let dgram = Some(Datagram::new(src, d.destination(), &d[..]));\n\n let dgram = server.process(dgram, now()).dgram(); // Retry\n\n assert!(dgram.is_some());\n\n assertions::assert_retry(dgram.as_ref().unwrap());\n\n}\n\n\n", "file_path": "neqo-transport/tests/server.rs", "rank": 63, "score": 211786.4168969747 }, { "content": "#[test]\n\nfn new_token_different_port() {\n\n let mut server = default_server();\n\n let token = get_ticket(&mut server);\n\n server.set_validation(ValidateAddress::NoToken);\n\n\n\n let mut client = default_client();\n\n client.enable_resumption(now(), &token).unwrap();\n\n\n\n let dgram = client.process(None, now()).dgram(); // Initial\n\n assert!(dgram.is_some());\n\n assertions::assert_initial(dgram.as_ref().unwrap(), true);\n\n\n\n // Now rewrite the source port, which should not change that the token is OK.\n\n let d = dgram.unwrap();\n\n let src = SocketAddr::new(d.source().ip(), d.source().port() + 1);\n\n let dgram = Some(Datagram::new(src, d.destination(), &d[..]));\n\n let dgram = server.process(dgram, now()).dgram(); // Retry\n\n assert!(dgram.is_some());\n\n assertions::assert_initial(dgram.as_ref().unwrap(), false);\n\n}\n\n\n", "file_path": "neqo-transport/tests/server.rs", "rank": 64, "score": 211786.4168969747 }, { "content": "#[test]\n\nfn duplicate_initial_new_path() {\n\n let mut server = default_server();\n\n let mut client = default_client();\n\n\n\n assert_eq!(*client.state(), State::Init);\n\n let initial = client.process(None, now()).dgram().unwrap();\n\n let other = Datagram::new(\n\n SocketAddr::new(initial.source().ip(), initial.source().port() ^ 23),\n\n initial.destination(),\n\n &initial[..],\n\n );\n\n\n\n // The server should respond to both as these came from different addresses.\n\n let dgram = server.process(Some(other), now()).dgram();\n\n assert!(dgram.is_some());\n\n\n\n let server_initial = server.process(Some(initial), now()).dgram();\n\n assert!(server_initial.is_some());\n\n\n\n assert_eq!(server.active_connections().len(), 2);\n\n complete_connection(&mut client, &mut server, server_initial);\n\n}\n\n\n", "file_path": "neqo-transport/tests/server.rs", "rank": 65, "score": 211769.94047633704 }, { "content": "#[test]\n\nfn process_client_initial_29() {\n\n process_client_initial(QuicVersion::Draft29, &INITIAL_PACKET_29);\n\n}\n", "file_path": "neqo-transport/tests/conn_vectors.rs", "rank": 66, "score": 211343.3503399631 }, { "content": "#[test]\n\nfn process_client_initial_27() {\n\n process_client_initial(QuicVersion::Draft27, &INITIAL_PACKET_27);\n\n}\n\n\n", "file_path": "neqo-transport/tests/conn_vectors.rs", "rank": 67, "score": 211343.3503399631 }, { "content": "// Do a simple decode of the datagram to verify that it is coalesced.\n\npub fn assert_coalesced_0rtt(payload: &[u8]) {\n\n assert!(payload.len() >= 1200);\n\n let mut dec = Decoder::from(payload);\n\n let initial_type = dec.decode_byte().unwrap(); // Initial\n\n assert_eq!(initial_type & PACKET_TYPE_MASK, 0b1000_0000);\n\n assert_default_version(&mut dec);\n\n dec.skip_vec(1); // DCID\n\n dec.skip_vec(1); // SCID\n\n dec.skip_vvec();\n\n let initial_len = dec.decode_varint().unwrap();\n\n dec.skip(initial_len.try_into().unwrap());\n\n let zrtt_type = dec.decode_byte().unwrap();\n\n assert_eq!(zrtt_type & PACKET_TYPE_MASK, 0b1001_0000);\n\n}\n\n\n", "file_path": "test-fixture/src/assertions.rs", "rank": 68, "score": 210257.54778589137 }, { "content": "#[test]\n\nfn corrupted_initial() {\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n let d = client.process(None, now()).dgram().unwrap();\n\n let mut corrupted = Vec::from(&d[..]);\n\n // Find the last non-zero value and corrupt that.\n\n let (idx, _) = corrupted\n\n .iter()\n\n .enumerate()\n\n .rev()\n\n .find(|(_, &v)| v != 0)\n\n .unwrap();\n\n corrupted[idx] ^= 0x76;\n\n let dgram = Datagram::new(d.source(), d.destination(), corrupted);\n\n server.process_input(dgram, now());\n\n // The server should have received two packets,\n\n // the first should be dropped, the second saved.\n\n assert_eq!(server.stats().packets_rx, 2);\n\n assert_eq!(server.stats().dropped_rx, 2);\n\n assert_eq!(server.stats().saved_datagrams, 0);\n\n}\n\n\n\n#[test]\n", "file_path": "neqo-transport/src/connection/tests/handshake.rs", "rank": 70, "score": 210179.7277056288 }, { "content": "#[test]\n\nfn pto_initial() {\n\n const INITIAL_PTO: Duration = Duration::from_millis(300);\n\n let mut now = now();\n\n\n\n qdebug!(\"---- client: generate CH\");\n\n let mut client = default_client();\n\n let pkt1 = client.process(None, now).dgram();\n\n assert!(pkt1.is_some());\n\n assert_eq!(pkt1.clone().unwrap().len(), PATH_MTU_V6);\n\n\n\n let delay = client.process(None, now).callback();\n\n assert_eq!(delay, INITIAL_PTO);\n\n\n\n // Resend initial after PTO.\n\n now += delay;\n\n let pkt2 = client.process(None, now).dgram();\n\n assert!(pkt2.is_some());\n\n assert_eq!(pkt2.unwrap().len(), PATH_MTU_V6);\n\n\n\n let pkt3 = client.process(None, now).dgram();\n", "file_path": "neqo-transport/src/connection/tests/recovery.rs", "rank": 71, "score": 210179.7277056288 }, { "content": "#[test]\n\nfn send_05rtt() {\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n\n\n let c1 = client.process(None, now()).dgram();\n\n assert!(c1.is_some());\n\n let s1 = server.process(c1, now()).dgram().unwrap();\n\n assert_eq!(s1.len(), PATH_MTU_V6);\n\n\n\n // The server should accept writes at this point.\n\n let s2 = send_something(&mut server, now());\n\n\n\n // Complete the handshake at the client.\n\n client.process_input(s1, now());\n\n maybe_authenticate(&mut client);\n\n assert_eq!(*client.state(), State::Connected);\n\n\n\n // The client should receive the 0.5-RTT data now.\n\n client.process_input(s2, now());\n\n let mut buf = vec![0; DEFAULT_STREAM_DATA.len() + 1];\n", "file_path": "neqo-transport/src/connection/tests/handshake.rs", "rank": 72, "score": 210173.5405620718 }, { "content": "#[test]\n\nfn reorder_1rtt() {\n\n const RTT: Duration = Duration::from_millis(100);\n\n const PACKETS: usize = 6; // Many, but not enough to overflow cwnd.\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n let mut now = now();\n\n\n\n let c1 = client.process(None, now).dgram();\n\n assert!(c1.is_some());\n\n\n\n now += RTT / 2;\n\n let s1 = server.process(c1, now).dgram();\n\n assert!(s1.is_some());\n\n\n\n now += RTT / 2;\n\n client.process_input(s1.unwrap(), now);\n\n maybe_authenticate(&mut client);\n\n let c2 = client.process(None, now).dgram();\n\n assert!(c2.is_some());\n\n\n", "file_path": "neqo-transport/src/connection/tests/handshake.rs", "rank": 73, "score": 210173.5405620718 }, { "content": "#[test]\n\nfn reorder_05rtt() {\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n\n\n let c1 = client.process(None, now()).dgram();\n\n assert!(c1.is_some());\n\n let s1 = server.process(c1, now()).dgram().unwrap();\n\n\n\n // The server should accept writes at this point.\n\n let s2 = send_something(&mut server, now());\n\n\n\n // We can't use the standard facility to complete the handshake, so\n\n // drive it as aggressively as possible.\n\n client.process_input(s2, now());\n\n assert_eq!(client.stats().saved_datagrams, 1);\n\n\n\n // After processing the first packet, the client should go back and\n\n // process the 0.5-RTT packet data, which should make data available.\n\n client.process_input(s1, now());\n\n // We can't use `maybe_authenticate` here as that consumes events.\n", "file_path": "neqo-transport/src/connection/tests/handshake.rs", "rank": 74, "score": 210173.5405620718 }, { "content": "#[test]\n\nfn coalesce_05rtt() {\n\n const RTT: Duration = Duration::from_millis(100);\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n let mut now = now();\n\n\n\n // The first exchange doesn't offer a chance for the server to send.\n\n // So drop the server flight and wait for the PTO.\n\n let c1 = client.process(None, now).dgram();\n\n assert!(c1.is_some());\n\n now += RTT / 2;\n\n let s1 = server.process(c1, now).dgram();\n\n assert!(s1.is_some());\n\n\n\n // Drop the server flight. Then send some data.\n\n let stream_id = server.stream_create(StreamType::UniDi).unwrap();\n\n assert!(server.stream_send(stream_id, DEFAULT_STREAM_DATA).is_ok());\n\n assert!(server.stream_close_send(stream_id).is_ok());\n\n\n\n // Now after a PTO the client can send another packet.\n", "file_path": "neqo-transport/src/connection/tests/handshake.rs", "rank": 75, "score": 210173.5405620718 }, { "content": "pub fn default_server() -> Connection {\n\n new_server(ConnectionParameters::default())\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/mod.rs", "rank": 76, "score": 209044.4169087901 }, { "content": "pub fn default_client() -> Connection {\n\n new_client(ConnectionParameters::default())\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/mod.rs", "rank": 77, "score": 208637.45717699025 }, { "content": "#[test]\n\nfn pto_handshake_complete() {\n\n let mut now = now();\n\n // start handshake\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n\n\n let pkt = client.process(None, now).dgram();\n\n let cb = client.process(None, now).callback();\n\n assert_eq!(cb, Duration::from_millis(300));\n\n\n\n now += Duration::from_millis(10);\n\n let pkt = server.process(pkt, now).dgram();\n\n\n\n now += Duration::from_millis(10);\n\n let pkt = client.process(pkt, now).dgram();\n\n\n\n let cb = client.process(None, now).callback();\n\n // The client now has a single RTT estimate (20ms), so\n\n // the handshake PTO is set based on that.\n\n assert_eq!(cb, Duration::from_millis(60));\n", "file_path": "neqo-transport/src/connection/tests/recovery.rs", "rank": 78, "score": 205944.1911499071 }, { "content": "#[test]\n\nfn discarded_initial_keys() {\n\n qdebug!(\"---- client: generate CH\");\n\n let mut client = default_client();\n\n let init_pkt_c = client.process(None, now()).dgram();\n\n assert!(init_pkt_c.is_some());\n\n assert_eq!(init_pkt_c.as_ref().unwrap().len(), PATH_MTU_V6);\n\n\n\n qdebug!(\"---- server: CH -> SH, EE, CERT, CV, FIN\");\n\n let mut server = default_server();\n\n let init_pkt_s = server.process(init_pkt_c.clone(), now()).dgram();\n\n assert!(init_pkt_s.is_some());\n\n\n\n qdebug!(\"---- client: cert verification\");\n\n let out = client.process(init_pkt_s.clone(), now()).dgram();\n\n assert!(out.is_some());\n\n\n\n // The client has received handshake packet. It will remove the Initial keys.\n\n // We will check this by processing init_pkt_s a second time.\n\n // The initial packet should be dropped. The packet contains a Handshake packet as well, which\n\n // will be marked as dup. And it will contain padding, which will be \"dropped\".\n", "file_path": "neqo-transport/src/connection/tests/keys.rs", "rank": 79, "score": 205901.75635141152 }, { "content": "#[test]\n\nfn extra_initial_hs() {\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n let mut now = now();\n\n\n\n let c_init = client.process(None, now).dgram();\n\n assert!(c_init.is_some());\n\n now += DEFAULT_RTT / 2;\n\n let s_init = server.process(c_init, now).dgram();\n\n assert!(s_init.is_some());\n\n now += DEFAULT_RTT / 2;\n\n\n\n // Drop the Initial packet, keep only the Handshake.\n\n let (_, undecryptable) = split_datagram(&s_init.unwrap());\n\n assert!(undecryptable.is_some());\n\n\n\n // Feed the same undecryptable packet into the client a few times.\n\n // Do that EXTRA_INITIALS times and each time the client will emit\n\n // another Initial packet.\n\n for _ in 0..=super::super::EXTRA_INITIALS {\n", "file_path": "neqo-transport/src/connection/tests/handshake.rs", "rank": 80, "score": 205901.75635141152 }, { "content": "#[test]\n\nfn zero_rtt_negotiate() {\n\n // Note that the two servers in this test will get different anti-replay filters.\n\n // That's OK because we aren't testing anti-replay.\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n connect(&mut client, &mut server);\n\n\n\n let token = exchange_ticket(&mut client, &mut server, now());\n\n let mut client = default_client();\n\n client\n\n .enable_resumption(now(), token)\n\n .expect(\"should set token\");\n\n let mut server = default_server();\n\n connect(&mut client, &mut server);\n\n assert!(client.tls_info().unwrap().early_data_accepted());\n\n assert!(server.tls_info().unwrap().early_data_accepted());\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/zerortt.rs", "rank": 81, "score": 205895.7149173505 }, { "content": "#[test]\n\nfn dup_server_flight1() {\n\n qdebug!(\"---- client: generate CH\");\n\n let mut client = default_client();\n\n let out = client.process(None, now());\n\n assert!(out.as_dgram_ref().is_some());\n\n assert_eq!(out.as_dgram_ref().unwrap().len(), PATH_MTU_V6);\n\n qdebug!(\"Output={:0x?}\", out.as_dgram_ref());\n\n\n\n qdebug!(\"---- server: CH -> SH, EE, CERT, CV, FIN\");\n\n let mut server = default_server();\n\n let out_to_rep = server.process(out.dgram(), now());\n\n assert!(out_to_rep.as_dgram_ref().is_some());\n\n qdebug!(\"Output={:0x?}\", out_to_rep.as_dgram_ref());\n\n\n\n qdebug!(\"---- client: cert verification\");\n\n let out = client.process(Some(out_to_rep.as_dgram_ref().unwrap().clone()), now());\n\n assert!(out.as_dgram_ref().is_some());\n\n qdebug!(\"Output={:0x?}\", out.as_dgram_ref());\n\n\n\n let out = server.process(out.dgram(), now());\n", "file_path": "neqo-transport/src/connection/tests/handshake.rs", "rank": 82, "score": 205465.0830708519 }, { "content": "#[must_use]\n\npub fn default_server() -> Connection {\n\n make_default_server(DEFAULT_ALPN)\n\n}\n\n\n\n/// Create a transport server with default configuration.\n", "file_path": "test-fixture/src/lib.rs", "rank": 83, "score": 205341.65416543253 }, { "content": "#[must_use]\n\npub fn connect() -> (Connection, Connection) {\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n handshake(&mut client, &mut server);\n\n assert_eq!(*client.state(), State::Confirmed);\n\n assert_eq!(*server.state(), State::Confirmed);\n\n (client, server)\n\n}\n\n\n\n/// Create a http3 client with default configuration.\n", "file_path": "test-fixture/src/lib.rs", "rank": 84, "score": 205131.2749916296 }, { "content": "#[test]\n\nfn client_fin_reorder() {\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n\n\n // Send ClientHello.\n\n let client_hs = client.process(None, now());\n\n assert!(client_hs.as_dgram_ref().is_some());\n\n\n\n let server_hs = server.process(client_hs.dgram(), now());\n\n assert!(server_hs.as_dgram_ref().is_some()); // ServerHello, etc...\n\n\n\n let client_ack = client.process(server_hs.dgram(), now());\n\n assert!(client_ack.as_dgram_ref().is_some());\n\n\n\n let server_out = server.process(client_ack.dgram(), now());\n\n assert!(server_out.as_dgram_ref().is_none());\n\n\n\n assert!(maybe_authenticate(&mut client));\n\n assert_eq!(*client.state(), State::Connected);\n\n\n", "file_path": "neqo-transport/src/connection/tests/stream.rs", "rank": 85, "score": 205048.5392883724 }, { "content": "#[test]\n\nfn stateless_reset_client() {\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n server\n\n .set_local_tparam(\n\n tparams::STATELESS_RESET_TOKEN,\n\n TransportParameter::Bytes(vec![77; 16]),\n\n )\n\n .unwrap();\n\n connect_force_idle(&mut client, &mut server);\n\n\n\n client.process_input(Datagram::new(addr(), addr(), vec![77; 21]), now());\n\n assert_draining(&client, &Error::StatelessReset);\n\n}\n", "file_path": "neqo-transport/src/connection/tests/close.rs", "rank": 86, "score": 205048.5392883724 }, { "content": "#[test]\n\nfn key_update_client() {\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n connect_force_idle(&mut client, &mut server);\n\n let mut now = now();\n\n\n\n assert_eq!(client.get_epochs(), (Some(3), Some(3))); // (write, read)\n\n assert_eq!(server.get_epochs(), (Some(3), Some(3)));\n\n\n\n assert!(client.initiate_key_update().is_ok());\n\n assert_update_blocked(&mut client);\n\n\n\n // Initiating an update should only increase the write epoch.\n\n assert_eq!(\n\n Output::Callback(LOCAL_IDLE_TIMEOUT),\n\n client.process(None, now)\n\n );\n\n assert_eq!(client.get_epochs(), (Some(4), Some(3)));\n\n\n\n // Send something to propagate the update.\n", "file_path": "neqo-transport/src/connection/tests/keys.rs", "rank": 87, "score": 205048.53928837238 }, { "content": "#[test]\n\nfn preferred_address_client() {\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n\n\n client\n\n .set_local_tparam(\n\n tparams::PREFERRED_ADDRESS,\n\n TransportParameter::Bytes(SAMPLE_PREFERRED_ADDRESS.to_vec()),\n\n )\n\n .unwrap();\n\n\n\n connect_fail(\n\n &mut client,\n\n &mut server,\n\n Error::PeerError(Error::TransportParameterError.code()),\n\n Error::TransportParameterError,\n\n );\n\n}\n\n\n\n/// Test that migration isn't permitted if the connection isn't in the right state.\n", "file_path": "neqo-transport/src/connection/tests/migration.rs", "rank": 88, "score": 205048.53928837238 }, { "content": "fn assert_error(c: &Connection, err: &ConnectionError) {\n\n match c.state() {\n\n State::Closing { error, .. } | State::Draining { error, .. } | State::Closed(error) => {\n\n assert_eq!(*error, *err);\n\n }\n\n _ => panic!(\"bad state {:?}\", c.state()),\n\n }\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/mod.rs", "rank": 89, "score": 204924.66079785023 }, { "content": "#[must_use]\n\npub fn default_client() -> Connection {\n\n fixture_init();\n\n Connection::new_client(\n\n DEFAULT_SERVER_NAME,\n\n DEFAULT_ALPN,\n\n Rc::new(RefCell::new(CountingConnectionIdGenerator::default())),\n\n addr(),\n\n addr(),\n\n ConnectionParameters::default(),\n\n )\n\n .expect(\"create a default client\")\n\n}\n\n\n\n/// Create a transport server with default configuration.\n", "file_path": "test-fixture/src/lib.rs", "rank": 90, "score": 204915.06402905862 }, { "content": "fn already_initialized() -> bool {\n\n unsafe { nss::NSS_IsInitialized() != 0 }\n\n}\n\n\n", "file_path": "neqo-crypto/src/lib.rs", "rank": 91, "score": 203379.025984013 }, { "content": "fn exchange_ticket(\n\n client: &mut Connection,\n\n server: &mut Connection,\n\n now: Instant,\n\n) -> ResumptionToken {\n\n let validation = AddressValidation::new(now, ValidateAddress::NoToken).unwrap();\n\n let validation = Rc::new(RefCell::new(validation));\n\n server.set_validation(Rc::clone(&validation));\n\n server.send_ticket(now, &[]).expect(\"can send ticket\");\n\n let ticket = server.process_output(now).dgram();\n\n assert!(ticket.is_some());\n\n client.process_input(ticket.unwrap(), now);\n\n assert_eq!(*client.state(), State::Confirmed);\n\n get_tokens(client).pop().expect(\"should have token\")\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/mod.rs", "rank": 92, "score": 202573.58482818783 }, { "content": "fn assert_draining(c: &Connection, expected: &Error) {\n\n assert!(c.state().closed());\n\n if let State::Draining {\n\n error: ConnectionError::Transport(error),\n\n ..\n\n } = c.state()\n\n {\n\n assert_eq!(error, expected);\n\n } else {\n\n panic!();\n\n }\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/close.rs", "rank": 93, "score": 202529.59968531167 }, { "content": "// Absent path PTU discovery, max v6 packet size should be PATH_MTU_V6.\n\nfn verify_pkt_honors_mtu() {\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n connect_force_idle(&mut client, &mut server);\n\n\n\n let now = now();\n\n\n\n let res = client.process(None, now);\n\n assert_eq!(res, Output::Callback(LOCAL_IDLE_TIMEOUT));\n\n\n\n // Try to send a large stream and verify first packet is correctly sized\n\n let stream_id = client.stream_create(StreamType::UniDi).unwrap();\n\n assert_eq!(client.stream_send(stream_id, &[0xbb; 2000]).unwrap(), 2000);\n\n let pkt0 = client.process(None, now);\n\n assert!(matches!(pkt0, Output::Datagram(_)));\n\n assert_eq!(pkt0.as_dgram_ref().unwrap().len(), PATH_MTU_V6);\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/handshake.rs", "rank": 94, "score": 201852.26696577886 }, { "content": "#[test]\n\nfn extra_initial_invalid_cid() {\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n let mut now = now();\n\n\n\n let c_init = client.process(None, now).dgram();\n\n assert!(c_init.is_some());\n\n now += DEFAULT_RTT / 2;\n\n let s_init = server.process(c_init, now).dgram();\n\n assert!(s_init.is_some());\n\n now += DEFAULT_RTT / 2;\n\n\n\n // If the client receives a packet that contains the wrong connection\n\n // ID, it won't send another Initial.\n\n let (_, hs) = split_datagram(&s_init.unwrap());\n\n let hs = hs.unwrap();\n\n let mut copy = hs.to_vec();\n\n assert_ne!(copy[5], 0); // The DCID should be non-zero length.\n\n copy[6] ^= 0xc4;\n\n let dgram_copy = Datagram::new(hs.destination(), hs.source(), copy);\n\n let nothing = client.process(Some(dgram_copy), now).dgram();\n\n assert!(nothing.is_none());\n\n}\n\n\n", "file_path": "neqo-transport/src/connection/tests/handshake.rs", "rank": 95, "score": 201820.64446786395 }, { "content": "#[test]\n\nfn zero_rtt_send_coalesce() {\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n connect(&mut client, &mut server);\n\n\n\n let token = exchange_ticket(&mut client, &mut server, now());\n\n let mut client = default_client();\n\n client\n\n .enable_resumption(now(), token)\n\n .expect(\"should set token\");\n\n let mut server = default_server();\n\n\n\n // Write 0-RTT before generating any packets.\n\n // This should result in a datagram that coalesces Initial and 0-RTT.\n\n let client_stream_id = client.stream_create(StreamType::UniDi).unwrap();\n\n client.stream_send(client_stream_id, &[1, 2, 3]).unwrap();\n\n let client_0rtt = client.process(None, now());\n\n assert!(client_0rtt.as_dgram_ref().is_some());\n\n\n\n assertions::assert_coalesced_0rtt(&client_0rtt.as_dgram_ref().unwrap()[..]);\n", "file_path": "neqo-transport/src/connection/tests/zerortt.rs", "rank": 96, "score": 201814.74203818289 }, { "content": "#[test]\n\nfn zero_rtt_send_reject() {\n\n const MESSAGE: &[u8] = &[1, 2, 3];\n\n\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n connect(&mut client, &mut server);\n\n\n\n let token = exchange_ticket(&mut client, &mut server, now());\n\n let mut client = default_client();\n\n client\n\n .enable_resumption(now(), token)\n\n .expect(\"should set token\");\n\n let mut server = Connection::new_server(\n\n test_fixture::DEFAULT_KEYS,\n\n test_fixture::DEFAULT_ALPN,\n\n Rc::new(RefCell::new(CountingConnectionIdGenerator::default())),\n\n ConnectionParameters::default(),\n\n )\n\n .unwrap();\n\n // Using a freshly initialized anti-replay context\n", "file_path": "neqo-transport/src/connection/tests/zerortt.rs", "rank": 97, "score": 201814.7420381829 }, { "content": "#[test]\n\nfn zero_rtt_send_recv() {\n\n let mut client = default_client();\n\n let mut server = default_server();\n\n connect(&mut client, &mut server);\n\n\n\n let token = exchange_ticket(&mut client, &mut server, now());\n\n let mut client = default_client();\n\n client\n\n .enable_resumption(now(), token)\n\n .expect(\"should set token\");\n\n let mut server = default_server();\n\n\n\n // Send ClientHello.\n\n let client_hs = client.process(None, now());\n\n assert!(client_hs.as_dgram_ref().is_some());\n\n\n\n // Now send a 0-RTT packet.\n\n let client_stream_id = client.stream_create(StreamType::UniDi).unwrap();\n\n client.stream_send(client_stream_id, &[1, 2, 3]).unwrap();\n\n let client_0rtt = client.process(None, now());\n", "file_path": "neqo-transport/src/connection/tests/zerortt.rs", "rank": 98, "score": 201814.7420381829 }, { "content": "pub fn new_server(params: ConnectionParameters) -> Connection {\n\n fixture_init();\n\n\n\n let mut c = Connection::new_server(\n\n test_fixture::DEFAULT_KEYS,\n\n test_fixture::DEFAULT_ALPN,\n\n Rc::new(RefCell::new(CountingConnectionIdGenerator::default())),\n\n params,\n\n )\n\n .expect(\"create a default server\");\n\n c.server_enable_0rtt(&test_fixture::anti_replay(), AllowZeroRtt {})\n\n .expect(\"enable 0-RTT\");\n\n c\n\n}\n", "file_path": "neqo-transport/src/connection/tests/mod.rs", "rank": 99, "score": 201195.99125843594 } ]
Rust
demo-kitferret/src/st7735.rs
GuiAmPm/kit-ferret-rs
51e042dd591095ce51d18d8b4c31c93c47ac0efd
pub mod instruction; use crate::spi_controller::SpiController; use ferret_rs::system::ScreenTrait; use crate::st7735::instruction::Instruction; use embedded_hal::blocking::delay::DelayMs; use embedded_hal::digital::v2::OutputPin; pub struct ST7735<'a, SPI, DC, RST> where SPI: SpiController, DC: OutputPin, RST: OutputPin, { spi: SPI, dc: DC, rst: RST, rgb: bool, inverted: bool, pub width: u16, pub height: u16, buffer: Option<&'a mut [u8]>, interlace: bool, interlace_even: bool } impl<'a, SPI, DC, RST> ST7735<'a, SPI, DC, RST> where SPI: SpiController, DC: OutputPin, RST: OutputPin, { pub fn new( spi: SPI, dc: DC, rst: RST, rgb: bool, inverted: bool, width: u16, height: u16, ) -> Self { let display = ST7735 { spi, dc, rst, rgb, inverted, width, height, buffer: None, interlace: false, interlace_even: false }; display } pub fn set_buffer(&mut self, buffer: Option<&'a mut [u8]>) { self.buffer = buffer; } pub fn set_interlace(&mut self, value: bool) { if !value { self.set_address_window(0, 0, self.width - 1, self.height - 1) } self.interlace = value; } fn update_entire_screen(&mut self) -> Result<(), ()> { if self.buffer != None { self.write_command(Instruction::RAMWR, &[])?; self.start_data()?; let buffer = self.buffer.as_ref().unwrap(); self.spi.write(&buffer); Ok(()) } else { todo!() } } fn update_screen_interlace(&mut self) -> Result<(), ()> { if self.buffer != None { let width = self.width; let height = self.height; let even = self.interlace_even; let start = if even { 0 } else { 1 }; for y in (start..height).step_by(2) { self.set_address_window(0, y, 160, y + 1); self.write_command(Instruction::RAMWR, &[])?; self.start_data()?; let buffer = self.buffer.as_ref().unwrap(); let start_y = y as usize * 160 * 2; let end_y = (y as usize + 1) * 160 * 2; self.spi.write(&buffer[(start_y..end_y)]); } self.interlace_even = !self.interlace_even; Ok(()) } else { todo!() } } pub fn init<DELAY>(&mut self, delay: &mut DELAY) where DELAY: DelayMs<u8>, { log::info!("Initialising screen"); self.hard_reset(delay); log::info!("SWRESET"); self.write_command(Instruction::SWRESET, &[]); delay.delay_ms(200); log::info!("SLPOUT"); self.write_command(Instruction::SLPOUT, &[]); delay.delay_ms(200); log::info!("FRMCTR1"); self.write_command(Instruction::FRMCTR1, &[0x01, 0x2C, 0x2D]); log::info!("FRMCTR2"); self.write_command(Instruction::FRMCTR2, &[0x01, 0x2C, 0x2D]); log::info!("FRMCTR3"); self.write_command(Instruction::FRMCTR3, &[0x01, 0x2C, 0x2D, 0x01, 0x2C, 0x2D]); log::info!("INVCTR"); self.write_command(Instruction::INVCTR, &[0x07]); log::info!("PWCTR1"); self.write_command(Instruction::PWCTR1, &[0xA2, 0x02, 0x84]); log::info!("PWCTR2"); self.write_command(Instruction::PWCTR2, &[0xC5]); log::info!("PWCTR3"); self.write_command(Instruction::PWCTR3, &[0x0A, 0x00]); log::info!("PWCTR4"); self.write_command(Instruction::PWCTR4, &[0x8A, 0x2A]); log::info!("PWCTR5"); self.write_command(Instruction::PWCTR5, &[0x8A, 0xEE]); log::info!("VMCTR1"); self.write_command(Instruction::VMCTR1, &[0x0E]); if self.inverted { log::info!("INVON"); self.write_command(Instruction::INVON, &[]); } else { log::info!("INVOFF"); self.write_command(Instruction::INVOFF, &[]); } if self.rgb { log::info!("MADCTL"); self.write_command(Instruction::MADCTL, &[0x00]); } else { log::info!("MADCTL"); self.write_command(Instruction::MADCTL, &[0x08]); } log::info!("COLMOD"); self.write_command(Instruction::COLMOD, &[0x05]); log::info!("COLMOD"); self.write_command(Instruction::DISPON, &[]); log::info!("MADCTL"); self.write_command(Instruction::MADCTL, &[0x60]); self.set_address_window(0, 0, self.width - 1, self.height - 1); delay.delay_ms(200); } pub fn hard_reset<DELAY>(&mut self, delay: &mut DELAY) -> Result<(), ()> where DELAY: DelayMs<u8>, { self.rst.set_high().map_err(|_| ())?; delay.delay_ms(10); self.rst.set_low().map_err(|_| ())?; delay.delay_ms(10); self.rst.set_high().map_err(|_| ()) } pub fn set_address_window(&mut self, sx: u16, sy: u16, ex: u16, ey: u16) { self.write_command(Instruction::CASET, &[]); self.start_data(); self.write_word(sx); self.write_word(ex); self.write_command(Instruction::RASET, &[]); self.start_data(); self.write_word(sy); self.write_word(ey); } fn write_word(&mut self, value: u16) { self.write_data(&value.to_be_bytes()); } fn write_command(&mut self, command: Instruction, params: &[u8]) -> Result<(), ()> { self.dc.set_low().map_err(|_| ())?; self.spi.write(&[command as u8]); if !params.is_empty() { self.start_data()?; self.write_data(params); } Ok(()) } fn start_data(&mut self) -> Result<(), ()> { self.dc.set_high().map_err(|_| ()) } fn write_data(&mut self, data: &[u8]) { self.spi.write(data); } fn set_pixel_internal(&mut self, x: u16, y: u16, r: u8, g: u8, b: u8) { let width = self.width; if x > self.width as u16 || y > self.height as u16 { return; } if let Some(buffer) = &mut self.buffer { let index = (y * width as u16 + x) as usize; let r = ((((r as u16) * 31 / 255) & 0b0011_1111) as u16) << 11; let g = ((((g as u16) * 63 / 255) & 0b0111_1111) as u16) << 5; let b = ((((b as u16) * 31 / 255) & 0b0011_1111) as u16) << 0; let color = r+g+b; let bytes = color.to_be_bytes(); buffer[index * 2 + 0] = bytes[0]; buffer[index * 2 + 1] = bytes[1]; } else { } } fn clear_internal(&mut self, red: u8, green: u8, blue: u8) { if let Some(buffer) = &mut self.buffer { for x in (0..buffer.len()).step_by(2) { let r = ((((red as u16) * 31 / 255) & 0b0011_1111) as u16) << 11; let g = ((((green as u16) * 63 / 255) & 0b0111_1111) as u16) << 5; let b = ((((blue as u16) * 31 / 255) & 0b0011_1111) as u16) << 0; let color = r+g+b; let bytes = color.to_be_bytes(); buffer[x + 0] = bytes[0]; buffer[x + 1] = bytes[1]; } } } } impl<'a, SPI, DC, RST> ScreenTrait for ST7735<'a, SPI, DC, RST> where SPI: SpiController, DC: OutputPin, RST: OutputPin, { fn get_width(&self) -> u16 { self.width } fn get_height(&self) -> u16 { self.height } fn set_pixel(&mut self, x: u16, y: u16, r: u8, g: u8, b: u8) { self.set_pixel_internal(x, y, r, g, b); } fn clear(&mut self, r: u8, g: u8, b: u8) { self.clear_internal(r, g, b); } fn update_screen(&mut self) -> core::result::Result<(), ()> { if self.interlace { self.update_screen_interlace() } else { self.update_entire_screen() } } }
pub mod instruction; use crate::spi_controller::SpiController; use ferret_rs::system::ScreenTrait; use crate::st7735::instruction::Instruction; use embedded_hal::blocking::delay::DelayMs; use embedded_hal::digital::v2::OutputPin; pub struct ST7735<'a, SPI, DC, RST> where SPI: SpiController, DC: OutputPin, RST: OutputPin, { spi: SPI, dc: DC, rst: RST, rgb: bool, inverted: bool, pub width: u16, pub height: u16, buffer: Option<&'a mut [u8]>, interlace: bool, interlace_even: bool } impl<'a, SPI, DC, RST> ST7735<'a, SPI, DC, RST> where SPI: SpiController, DC: OutputPin, RST: OutputPin, { pub fn new( spi: SPI, dc: DC, rst: RST, rgb: bool, inverted: bool, width: u16, height: u16, ) -> Self { let display = ST7735 { spi, dc, rst, rgb, inverted, width, height, buffer: None, interlace: false, interlace_even: false }; display } pub fn set_buffer(&mut self, buffer: Option<&'a mut [u8]>) { self.buffer = buffer; } pub fn set_interlace(&mut self, value: bool) { if !value { self.set_address_window(0, 0, self.width - 1, self.height - 1) } self.interlace = value; }
fn update_screen_interlace(&mut self) -> Result<(), ()> { if self.buffer != None { let width = self.width; let height = self.height; let even = self.interlace_even; let start = if even { 0 } else { 1 }; for y in (start..height).step_by(2) { self.set_address_window(0, y, 160, y + 1); self.write_command(Instruction::RAMWR, &[])?; self.start_data()?; let buffer = self.buffer.as_ref().unwrap(); let start_y = y as usize * 160 * 2; let end_y = (y as usize + 1) * 160 * 2; self.spi.write(&buffer[(start_y..end_y)]); } self.interlace_even = !self.interlace_even; Ok(()) } else { todo!() } } pub fn init<DELAY>(&mut self, delay: &mut DELAY) where DELAY: DelayMs<u8>, { log::info!("Initialising screen"); self.hard_reset(delay); log::info!("SWRESET"); self.write_command(Instruction::SWRESET, &[]); delay.delay_ms(200); log::info!("SLPOUT"); self.write_command(Instruction::SLPOUT, &[]); delay.delay_ms(200); log::info!("FRMCTR1"); self.write_command(Instruction::FRMCTR1, &[0x01, 0x2C, 0x2D]); log::info!("FRMCTR2"); self.write_command(Instruction::FRMCTR2, &[0x01, 0x2C, 0x2D]); log::info!("FRMCTR3"); self.write_command(Instruction::FRMCTR3, &[0x01, 0x2C, 0x2D, 0x01, 0x2C, 0x2D]); log::info!("INVCTR"); self.write_command(Instruction::INVCTR, &[0x07]); log::info!("PWCTR1"); self.write_command(Instruction::PWCTR1, &[0xA2, 0x02, 0x84]); log::info!("PWCTR2"); self.write_command(Instruction::PWCTR2, &[0xC5]); log::info!("PWCTR3"); self.write_command(Instruction::PWCTR3, &[0x0A, 0x00]); log::info!("PWCTR4"); self.write_command(Instruction::PWCTR4, &[0x8A, 0x2A]); log::info!("PWCTR5"); self.write_command(Instruction::PWCTR5, &[0x8A, 0xEE]); log::info!("VMCTR1"); self.write_command(Instruction::VMCTR1, &[0x0E]); if self.inverted { log::info!("INVON"); self.write_command(Instruction::INVON, &[]); } else { log::info!("INVOFF"); self.write_command(Instruction::INVOFF, &[]); } if self.rgb { log::info!("MADCTL"); self.write_command(Instruction::MADCTL, &[0x00]); } else { log::info!("MADCTL"); self.write_command(Instruction::MADCTL, &[0x08]); } log::info!("COLMOD"); self.write_command(Instruction::COLMOD, &[0x05]); log::info!("COLMOD"); self.write_command(Instruction::DISPON, &[]); log::info!("MADCTL"); self.write_command(Instruction::MADCTL, &[0x60]); self.set_address_window(0, 0, self.width - 1, self.height - 1); delay.delay_ms(200); } pub fn hard_reset<DELAY>(&mut self, delay: &mut DELAY) -> Result<(), ()> where DELAY: DelayMs<u8>, { self.rst.set_high().map_err(|_| ())?; delay.delay_ms(10); self.rst.set_low().map_err(|_| ())?; delay.delay_ms(10); self.rst.set_high().map_err(|_| ()) } pub fn set_address_window(&mut self, sx: u16, sy: u16, ex: u16, ey: u16) { self.write_command(Instruction::CASET, &[]); self.start_data(); self.write_word(sx); self.write_word(ex); self.write_command(Instruction::RASET, &[]); self.start_data(); self.write_word(sy); self.write_word(ey); } fn write_word(&mut self, value: u16) { self.write_data(&value.to_be_bytes()); } fn write_command(&mut self, command: Instruction, params: &[u8]) -> Result<(), ()> { self.dc.set_low().map_err(|_| ())?; self.spi.write(&[command as u8]); if !params.is_empty() { self.start_data()?; self.write_data(params); } Ok(()) } fn start_data(&mut self) -> Result<(), ()> { self.dc.set_high().map_err(|_| ()) } fn write_data(&mut self, data: &[u8]) { self.spi.write(data); } fn set_pixel_internal(&mut self, x: u16, y: u16, r: u8, g: u8, b: u8) { let width = self.width; if x > self.width as u16 || y > self.height as u16 { return; } if let Some(buffer) = &mut self.buffer { let index = (y * width as u16 + x) as usize; let r = ((((r as u16) * 31 / 255) & 0b0011_1111) as u16) << 11; let g = ((((g as u16) * 63 / 255) & 0b0111_1111) as u16) << 5; let b = ((((b as u16) * 31 / 255) & 0b0011_1111) as u16) << 0; let color = r+g+b; let bytes = color.to_be_bytes(); buffer[index * 2 + 0] = bytes[0]; buffer[index * 2 + 1] = bytes[1]; } else { } } fn clear_internal(&mut self, red: u8, green: u8, blue: u8) { if let Some(buffer) = &mut self.buffer { for x in (0..buffer.len()).step_by(2) { let r = ((((red as u16) * 31 / 255) & 0b0011_1111) as u16) << 11; let g = ((((green as u16) * 63 / 255) & 0b0111_1111) as u16) << 5; let b = ((((blue as u16) * 31 / 255) & 0b0011_1111) as u16) << 0; let color = r+g+b; let bytes = color.to_be_bytes(); buffer[x + 0] = bytes[0]; buffer[x + 1] = bytes[1]; } } } } impl<'a, SPI, DC, RST> ScreenTrait for ST7735<'a, SPI, DC, RST> where SPI: SpiController, DC: OutputPin, RST: OutputPin, { fn get_width(&self) -> u16 { self.width } fn get_height(&self) -> u16 { self.height } fn set_pixel(&mut self, x: u16, y: u16, r: u8, g: u8, b: u8) { self.set_pixel_internal(x, y, r, g, b); } fn clear(&mut self, r: u8, g: u8, b: u8) { self.clear_internal(r, g, b); } fn update_screen(&mut self) -> core::result::Result<(), ()> { if self.interlace { self.update_screen_interlace() } else { self.update_entire_screen() } } }
fn update_entire_screen(&mut self) -> Result<(), ()> { if self.buffer != None { self.write_command(Instruction::RAMWR, &[])?; self.start_data()?; let buffer = self.buffer.as_ref().unwrap(); self.spi.write(&buffer); Ok(()) } else { todo!() } }
function_block-full_function
[ { "content": "// ported from: https://www.geeksforgeeks.org/implement-itoa/\n\npub fn integer_to_string<T>(mut num: T, buffer: &mut [char], base: u8)\n\nwhere T: IntegerType {\n\n let mut index = 0;\n\n let mut is_negative = false;\n\n\n\n if num.is_zero() {\n\n buffer[index] = '0';\n\n buffer[index + 1] = '\\0';\n\n return;\n\n }\n\n\n\n if num.is_lt_zero() && base == 10 {\n\n is_negative = true;\n\n num.invert_sign();\n\n }\n\n\n\n while !num.is_zero() {\n\n let unit = num.modulus(base);\n\n buffer[index] =\n\n if unit > 9 {\n", "file_path": "ferret-utils/src/convert/integers.rs", "rank": 0, "score": 138284.89068759777 }, { "content": "// https://gist.github.com/Linaiz/4e27ea8d9760050008e3638a6fcf8be8\n\npub fn float_to_string<T>(mut num: T, buffer: &mut [char], decimal_count: u8)\n\nwhere T: FloatType + Copy + Clone\n\n{\n\n let mut count = 0;\n\n\n\n if num.is_negative() {\n\n buffer[count] = '-';\n\n num.invert_sign();\n\n count += 1;\n\n }\n\n\n\n let start = count;\n\n let mut temp = num;\n\n\n\n while temp.is_ge_one() {\n\n let res = temp.int_modulus(10);\n\n buffer[count] = (res + '0' as u8) as char;\n\n count += 1;\n\n temp.div_set(10.0);\n\n }\n", "file_path": "ferret-utils/src/convert/floats.rs", "rank": 1, "score": 135762.36005057185 }, { "content": "pub fn main() -> ! {\n\n let sdl_context = sdl2::init().expect(\"Failed to create SDL2 context.\");\n\n let video_subsystem = sdl_context.video().expect(\"Failed to create video subsystem.\");\n\n\n\n let window = video_subsystem\n\n .window(\"Ferret - Desktop Demo\", 160 * 5, 128 * 5)\n\n .position_centered()\n\n .build()\n\n .unwrap();\n\n\n\n let canvas = window.into_canvas().build().expect(\"Failed to create canvas.\");\n\n let event_pump = sdl_context.event_pump().expect(\"Failed to create event pump\");\n\n\n\n let controller = SDL2Controller::new(event_pump);\n\n let mut color_buffer = [0u8; 160 * 128 * 4];\n\n let screen = SDL2Screen::new(canvas, 160, 128, &mut color_buffer);\n\n let timer = SDL2Timer::new();\n\n\n\n let depth_buffer = &mut [0.0f32; 160 * 128];\n\n\n\n let mut game_loop =\n\n GameLoop::new(\n\n controller,\n\n screen,\n\n timer,\n\n depth_buffer\n\n );\n\n\n\n game_loop.start();\n\n}", "file_path": "demo-desktop/src/main.rs", "rank": 2, "score": 74519.94896019979 }, { "content": "pub trait SpiController {\n\n fn write(&mut self, buffer: &[u8]);\n\n}\n\n\n\npub struct BlockingSpiController<SPI>\n\nwhere SPI: spi::Write<u8> {\n\n spi: SPI\n\n}\n\n\n\nimpl<SPI> BlockingSpiController<SPI>\n\nwhere SPI: spi::Write<u8>\n\n{\n\n pub fn new(spi: SPI) -> Self {\n\n Self {\n\n spi\n\n }\n\n }\n\n}\n\n\n\nimpl<SPI> SpiController for BlockingSpiController<SPI>\n", "file_path": "demo-kitferret/src/spi_controller.rs", "rank": 3, "score": 65417.83316448155 }, { "content": "#[derive(num_derive::FromPrimitive, num_derive::ToPrimitive)]\n\npub enum Instruction {\n\n NOP = 0x00,\n\n SWRESET = 0x01,\n\n RDDID = 0x04,\n\n RDDST = 0x09,\n\n SLPIN = 0x10,\n\n SLPOUT = 0x11,\n\n PTLON = 0x12,\n\n NORON = 0x13,\n\n INVOFF = 0x20,\n\n INVON = 0x21,\n\n DISPOFF = 0x28,\n\n DISPON = 0x29,\n\n CASET = 0x2A,\n\n RASET = 0x2B,\n\n RAMWR = 0x2C,\n\n RAMRD = 0x2E,\n\n PTLAR = 0x30,\n\n COLMOD = 0x3A,\n", "file_path": "demo-kitferret/src/st7735/instruction.rs", "rank": 4, "score": 53101.6354834966 }, { "content": " MADCTL = 0x36,\n\n FRMCTR1 = 0xB1,\n\n FRMCTR2 = 0xB2,\n\n FRMCTR3 = 0xB3,\n\n INVCTR = 0xB4,\n\n DISSET5 = 0xB6,\n\n PWCTR1 = 0xC0,\n\n PWCTR2 = 0xC1,\n\n PWCTR3 = 0xC2,\n\n PWCTR4 = 0xC3,\n\n PWCTR5 = 0xC4,\n\n VMCTR1 = 0xC5,\n\n RDID1 = 0xDA,\n\n RDID2 = 0xDB,\n\n RDID3 = 0xDC,\n\n RDID4 = 0xDD,\n\n PWCTR6 = 0xFC,\n\n GMCTRP1 = 0xE0,\n\n GMCTRN1 = 0xE1,\n\n}", "file_path": "demo-kitferret/src/st7735/instruction.rs", "rank": 5, "score": 53100.249888427636 }, { "content": "/// Initialize the USB logging system, and prepares the\n\n/// USB ISR with the poller\n\n///\n\n/// When `init` returns, the USB interrupt will be enabled,\n\n/// and the host may begin to interface the device.\n\n/// You should only call this once.\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if the imxrt-ral USB1 instance is already taken.\n\npub fn init() -> Result<bsp::usb::Reader, bsp::usb::Error> {\n\n let inst = USB1::take().unwrap();\n\n bsp::usb::init(inst, Default::default()).map(|(poller, reader)| {\n\n setup(poller);\n\n reader\n\n })\n\n}\n\n\n", "file_path": "demo-kitferret/src/usb_io.rs", "rank": 6, "score": 52091.168492371115 }, { "content": "/// Split the USB logging system, and prepares the\n\n/// USB ISR with the poller\n\n///\n\n/// When `split` returns, the USB interrupt will be enabled,\n\n/// and the host may begin to interface the device.\n\n/// You should only call this once.\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if the imxrt-ral USB1 instance is already taken.\n\npub fn split() -> Result<(bsp::usb::Reader, bsp::usb::Writer), bsp::usb::Error> {\n\n let inst = USB1::take().unwrap();\n\n bsp::usb::split(inst).map(|(poller, reader, writer)| {\n\n setup(poller);\n\n (reader, writer)\n\n })\n\n}\n\n\n", "file_path": "demo-kitferret/src/usb_io.rs", "rank": 7, "score": 46696.01448571468 }, { "content": "struct SimpleVertexShader {\n\n pub model_view_matrix: Matrix4<f32>,\n\n}\n\n\n\nimpl SimpleVertexShader {\n\n pub fn new() -> Self {\n\n Self {\n\n model_view_matrix: Matrix4::identity()\n\n }\n\n }\n\n}\n\n\n\nimpl VertexShaderTrait<6, 7> for SimpleVertexShader {\n\n fn process(&self, vec: &Vector6<f32>) -> SVector<f32, 7> {\n\n let vec4 = Vector4::new(vec.x, vec.y, vec.z, 1.0);\n\n let vec4_ = self.model_view_matrix * vec4;\n\n let x = vec4_.x;\n\n let y = vec4_.y;\n\n let z = vec4_.z;\n\n let w = vec4_.w;\n\n return nalgebra::vector!(x, y, z, w, vec[3], vec[4], vec[5]);\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 8, "score": 44179.31488800523 }, { "content": "struct SimplePixelShader<'a> {\n\n pub texture: Option<&'a [u8]>,\n\n pub tex_wid: u16,\n\n pub tex_hei: u16\n\n}\n\n\n\nimpl<'a> SimplePixelShader<'a> {\n\n pub fn new() -> Self {\n\n Self {\n\n texture: None,\n\n tex_wid: 0,\n\n tex_hei: 0\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> PixelShaderTrait<7> for SimplePixelShader<'a> {\n\n\n\n fn process(&self, v: &PixelData<7>) -> Vector3<f32> {\n\n if let Some(texture) = self.texture {\n", "file_path": "src/lib.rs", "rank": 9, "score": 42556.185838360514 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let mut peripherals = bsp::Peripherals::take().unwrap();\n\n let mut systick = bsp::SysTick::new(cortex_m::Peripherals::take().unwrap().SYST);\n\n usb_io::init().unwrap();\n\n\n\n systick.delay(2000);\n\n peripherals.ccm\n\n .pll1\n\n .set_arm_clock(bsp::hal::ccm::PLL1::ARM_HZ, &mut peripherals.ccm.handle, &mut peripherals.dcdc);\n\n\n\n let pins = bsp::t40::into_pins(peripherals.iomuxc);\n\n\n\n let mut buffer: [u8; (SCREEN_WIDTH * SCREEN_HEIGHT * 2) as usize]\n\n = [0x00u8; (SCREEN_WIDTH * SCREEN_HEIGHT * 2) as usize];\n\n\n\n peripherals.ccm.pll1.set_arm_clock(\n\n bsp::hal::ccm::PLL1::ARM_HZ,\n\n &mut peripherals.ccm.handle,\n\n &mut peripherals.dcdc,\n\n );\n", "file_path": "demo-kitferret/src/main.rs", "rank": 10, "score": 41846.51161695087 }, { "content": "pub trait TimerTrait {\n\n fn delay(&mut self, millis: u32);\n\n fn measure<F>(&self, act: F) -> u128 where F: FnOnce();\n\n}", "file_path": "src/system/system_traits.rs", "rank": 11, "score": 34197.73937015262 }, { "content": "pub trait ControllerTrait {\n\n fn update(&mut self);\n\n fn get_button_status(&self, button: ControllerButton) -> ButtonState;\n\n}\n\n\n", "file_path": "src/system/system_traits.rs", "rank": 12, "score": 34197.73937015262 }, { "content": "pub trait ScreenTrait {\n\n fn get_width(&self) -> u16;\n\n fn get_height(&self) -> u16;\n\n\n\n fn set_pixel(&mut self, x: u16, y: u16, r: u8, g: u8, b: u8);\n\n fn clear(&mut self, r: u8, g: u8, b: u8);\n\n\n\n fn update_screen(&mut self) -> Result<(), ()>;\n\n}", "file_path": "ferret-graphics/src/screen_trait.rs", "rank": 13, "score": 33233.73108828549 }, { "content": "pub trait FloatType {\n\n fn is_ge_one(&self) -> bool;\n\n fn mul_set(&mut self, value: f32);\n\n fn div_set(&mut self, value: f32);\n\n fn minus_self_as_int(&mut self);\n\n fn is_negative(&self) -> bool;\n\n fn invert_sign(&mut self);\n\n fn int_modulus(&self, value: u8) -> u8;\n\n}\n\n\n\nimpl FloatType for f32 {\n\n fn is_ge_one(&self) -> bool {\n\n *self >= 1.0\n\n }\n\n\n\n fn mul_set(&mut self, value: f32) {\n\n *self *= value\n\n }\n\n\n\n fn div_set(&mut self, value: f32) {\n", "file_path": "ferret-utils/src/convert/floats.rs", "rank": 14, "score": 33233.73108828549 }, { "content": "pub trait IntegerType {\n\n fn is_zero(&self) -> bool;\n\n fn is_lt_zero(&self) -> bool;\n\n fn invert_sign(&mut self);\n\n fn div_set(&mut self, value: u8);\n\n fn modulus(&mut self, value: u8) -> u8;\n\n}\n\n\n\nimpl IntegerType for u8 {\n\n fn is_zero(&self) -> bool {\n\n *self == 0\n\n }\n\n\n\n fn is_lt_zero(&self) -> bool {\n\n false\n\n }\n\n\n\n fn invert_sign(&mut self) {}\n\n\n\n fn div_set(&mut self, value: u8) {\n", "file_path": "ferret-utils/src/convert/integers.rs", "rank": 15, "score": 33233.73108828549 }, { "content": "/// Setup the USB ISR with the USB poller\n\nfn setup(poller: bsp::usb::Poller) {\n\n static POLLER: Mutex<RefCell<Option<bsp::usb::Poller>>> = Mutex::new(RefCell::new(None));\n\n\n\n #[cortex_m_rt::interrupt]\n\n fn USB_OTG1() {\n\n cortex_m::interrupt::free(|cs| {\n\n POLLER\n\n .borrow(cs)\n\n .borrow_mut()\n\n .as_mut()\n\n .map(|poller| poller.poll());\n\n });\n\n }\n\n\n\n cortex_m::interrupt::free(|cs| {\n\n *POLLER.borrow(cs).borrow_mut() = Some(poller);\n\n // Safety: invoked in a critical section that also prepares the ISR\n\n // shared memory. ISR memory is ready by the time the ISR runs.\n\n unsafe { cortex_m::peripheral::NVIC::unmask(bsp::interrupt::USB_OTG1) };\n\n });\n\n}\n", "file_path": "demo-kitferret/src/usb_io.rs", "rank": 16, "score": 32501.35507197612 }, { "content": "where SPI: spi::Write<u8> {\n\n\n\n fn write(&mut self, buffer: &[u8]) {\n\n self.spi.write(buffer);\n\n }\n\n}\n\n\n\npub struct DmaTransferSpiController<F>\n\nwhere F: FnMut(&[u8])\n\n{\n\n do_write: F\n\n}\n\n\n\nimpl<F> DmaTransferSpiController<F>\n\nwhere F: FnMut(&[u8])\n\n{\n\n pub fn new(do_write: F) -> Self\n\n {\n\n Self {\n\n do_write\n", "file_path": "demo-kitferret/src/spi_controller.rs", "rank": 32, "score": 26760.410957916774 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl<F> SpiController for DmaTransferSpiController<F>\n\nwhere F: FnMut(&[u8])\n\n{\n\n fn write(&mut self, buffer: &[u8]) {\n\n (self.do_write)(buffer);\n\n }\n\n}", "file_path": "demo-kitferret/src/spi_controller.rs", "rank": 33, "score": 26753.6938973432 }, { "content": "use embedded_hal::blocking::spi;\n\n\n", "file_path": "demo-kitferret/src/spi_controller.rs", "rank": 34, "score": 26744.734945678854 }, { "content": "pub trait PixelShaderTrait<const TVERTEX_INPUT_SIZE: usize> {\n\n fn process(&self, pixel_data: &PixelData<TVERTEX_INPUT_SIZE>) -> Vector3<f32>;\n\n}", "file_path": "ferret-graphics/src/ferret_3d/pixel_shader_trait.rs", "rank": 35, "score": 25436.413301877918 }, { "content": "pub trait VertexShaderTrait<const TVERTEX_IN_SIZE: usize, const TVERTEX_OUT_SIZE: usize> {\n\n fn process(&self, v: &SVector<f32, TVERTEX_IN_SIZE>) -> SVector<f32, TVERTEX_OUT_SIZE>;\n\n}\n", "file_path": "ferret-graphics/src/ferret_3d/vertex_shader_trait.rs", "rank": 36, "score": 22574.323647213394 }, { "content": " ControllerButton::R => self.r,\n\n }\n\n }\n\n}\n\n\n\npub struct SDL2Screen<'a> {\n\n canvas: Canvas<sdl2::video::Window>,\n\n screen_buffer: Texture,\n\n width: u16,\n\n height: u16,\n\n color_buffer: &'a mut [u8]\n\n}\n\n\n\nimpl<'a> SDL2Screen<'a> {\n\n pub fn new(canvas: Canvas<sdl2::video::Window>, width: u16, height: u16, color_buffer: &'a mut [u8]) -> Self {\n\n let screen_buffer = canvas.create_texture(\n\n PixelFormatEnum::RGBA32,\n\n TextureAccess::Static,\n\n width as u32,\n\n height as u32\n", "file_path": "demo-desktop/src/sdl2_interface.rs", "rank": 37, "score": 21.67329710016904 }, { "content": " ).unwrap();\n\n\n\n Self {\n\n canvas,\n\n screen_buffer,\n\n width,\n\n height,\n\n color_buffer\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> ScreenTrait for SDL2Screen<'a> {\n\n fn get_width(&self) -> u16 { self.width }\n\n fn get_height(&self) -> u16 { self.height }\n\n\n\n fn set_pixel(&mut self, x: u16, y: u16, r: u8, g: u8, b: u8) {\n\n let index = (y * self.get_width() + x) as usize;\n\n\n\n self.color_buffer[index * 4 + 0] = r;\n", "file_path": "demo-desktop/src/sdl2_interface.rs", "rank": 38, "score": 20.440174373503456 }, { "content": " *self /= value as Self\n\n }\n\n\n\n fn modulus(&mut self, value: u8) -> u8 {\n\n (*self % value as Self) as u8\n\n }\n\n}\n\n\n\nimpl IntegerType for i128 {\n\n fn is_zero(&self) -> bool {\n\n *self == 0\n\n }\n\n\n\n fn is_lt_zero(&self) -> bool {\n\n false\n\n }\n\n\n\n fn invert_sign(&mut self) {}\n\n\n\n fn div_set(&mut self, value: u8) {\n\n *self /= value as Self\n\n }\n\n\n\n fn modulus(&mut self, value: u8) -> u8 {\n\n (*self % value as Self) as u8\n\n }\n\n}\n\n\n\n\n", "file_path": "ferret-utils/src/convert/integers.rs", "rank": 39, "score": 19.272081085061433 }, { "content": " *self /= value\n\n }\n\n\n\n fn minus_self_as_int(&mut self) {\n\n *self -= (*self as u32) as Self;\n\n }\n\n\n\n fn is_negative(&self) -> bool {\n\n *self < 0.0\n\n }\n\n\n\n fn invert_sign(&mut self) {\n\n *self = -*self\n\n }\n\n\n\n fn int_modulus(&self, value: u8) -> u8 {\n\n (*self % value as Self) as u8\n\n }\n\n}\n\n\n", "file_path": "ferret-utils/src/convert/floats.rs", "rank": 40, "score": 18.99076110317459 }, { "content": " *self /= value as Self\n\n }\n\n\n\n fn modulus(&mut self, value: u8) -> u8 {\n\n (*self % value as Self) as u8\n\n }\n\n}\n\n\n\nimpl IntegerType for i32 {\n\n fn is_zero(&self) -> bool {\n\n *self == 0\n\n }\n\n\n\n fn is_lt_zero(&self) -> bool {\n\n false\n\n }\n\n\n\n fn invert_sign(&mut self) {}\n\n\n\n fn div_set(&mut self, value: u8) {\n", "file_path": "ferret-utils/src/convert/integers.rs", "rank": 41, "score": 18.807420581399157 }, { "content": " *self /= value as Self\n\n }\n\n\n\n fn modulus(&mut self, value: u8) -> u8 {\n\n (*self % value as Self) as u8\n\n }\n\n}\n\n\n\nimpl IntegerType for i64 {\n\n fn is_zero(&self) -> bool {\n\n *self == 0\n\n }\n\n\n\n fn is_lt_zero(&self) -> bool {\n\n false\n\n }\n\n\n\n fn invert_sign(&mut self) {}\n\n\n\n fn div_set(&mut self, value: u8) {\n", "file_path": "ferret-utils/src/convert/integers.rs", "rank": 42, "score": 18.807420581399157 }, { "content": " *self /= value as Self\n\n }\n\n\n\n fn modulus(&mut self, value: u8) -> u8 {\n\n (*self % value as Self) as u8\n\n }\n\n}\n\n\n\nimpl IntegerType for i8 {\n\n fn is_zero(&self) -> bool {\n\n *self == 0\n\n }\n\n\n\n fn is_lt_zero(&self) -> bool {\n\n false\n\n }\n\n\n\n fn invert_sign(&mut self) {}\n\n\n\n fn div_set(&mut self, value: u8) {\n", "file_path": "ferret-utils/src/convert/integers.rs", "rank": 43, "score": 18.807420581399157 }, { "content": " *self /= value as Self\n\n }\n\n\n\n fn modulus(&mut self, value: u8) -> u8 {\n\n (*self % value as Self) as u8\n\n }\n\n}\n\n\n\nimpl IntegerType for u32 {\n\n fn is_zero(&self) -> bool {\n\n *self == 0\n\n }\n\n\n\n fn is_lt_zero(&self) -> bool {\n\n false\n\n }\n\n\n\n fn invert_sign(&mut self) {}\n\n\n\n fn div_set(&mut self, value: u8) {\n", "file_path": "ferret-utils/src/convert/integers.rs", "rank": 44, "score": 18.807420581399157 }, { "content": " *self /= value as Self\n\n }\n\n\n\n fn modulus(&mut self, value: u8) -> u8 {\n\n (*self % value as Self) as u8\n\n }\n\n}\n\n\n\nimpl IntegerType for u128 {\n\n fn is_zero(&self) -> bool {\n\n *self == 0\n\n }\n\n\n\n fn is_lt_zero(&self) -> bool {\n\n false\n\n }\n\n\n\n fn invert_sign(&mut self) {}\n\n\n\n fn div_set(&mut self, value: u8) {\n", "file_path": "ferret-utils/src/convert/integers.rs", "rank": 45, "score": 18.807420581399157 }, { "content": " *self /= value as Self\n\n }\n\n\n\n fn modulus(&mut self, value: u8) -> u8 {\n\n (*self % value as Self) as u8\n\n }\n\n}\n\n\n\nimpl IntegerType for u64 {\n\n fn is_zero(&self) -> bool {\n\n *self == 0\n\n }\n\n\n\n fn is_lt_zero(&self) -> bool {\n\n false\n\n }\n\n\n\n fn invert_sign(&mut self) {}\n\n\n\n fn div_set(&mut self, value: u8) {\n", "file_path": "ferret-utils/src/convert/integers.rs", "rank": 46, "score": 18.807420581399157 }, { "content": " *self /= value\n\n }\n\n\n\n fn modulus(&mut self, value: u8) -> u8 {\n\n *self % value\n\n }\n\n}\n\n\n\nimpl IntegerType for u16 {\n\n fn is_zero(&self) -> bool {\n\n *self == 0\n\n }\n\n\n\n fn is_lt_zero(&self) -> bool {\n\n false\n\n }\n\n\n\n fn invert_sign(&mut self) {}\n\n\n\n fn div_set(&mut self, value: u8) {\n", "file_path": "ferret-utils/src/convert/integers.rs", "rank": 47, "score": 18.33972631062349 }, { "content": " let p9 = GPIO::new(pins.p9).output();\n\n let p8 = GPIO::new(pins.p8).output();\n\n\n\n let spi_controller = BlockingSpiController::new(spi4);\n\n let mut st7735 = ST7735::new(spi_controller, p9, p8, true, false, SCREEN_WIDTH, SCREEN_HEIGHT);\n\n\n\n st7735.init(&mut systick);\n\n\n\n // Turn on the Backlight\n\n GPIO::new(pins.p7).output().set();\n\n\n\n st7735.set_buffer(Some(&mut buffer));\n\n st7735.set_interlace(true);\n\n\n\n let mut control = Controller::init(\n\n pins.p0,\n\n pins.p1,\n\n pins.p2,\n\n pins.p3,\n\n pins.p4,\n", "file_path": "demo-kitferret/src/main.rs", "rank": 48, "score": 18.219833813627965 }, { "content": "#![no_std]\n\n#![no_main]\n\n\n\nmod usb_io;\n\nmod st7735;\n\nmod controller;\n\nmod timer;\n\nmod spi_controller;\n\n\n\nuse crate::spi_controller::BlockingSpiController;\n\nuse crate::controller::Controller;\n\nuse crate::st7735::ST7735;\n\nuse cortex_m_rt::{entry};\n\nuse ferret_rs::GameLoop;\n\nuse teensy4_panic as _;\n\nuse teensy4_bsp as bsp;\n\nuse bsp::hal::gpio::GPIO;\n\nuse timer::Timer;\n\n\n\nconst SCREEN_WIDTH: u16 = 160;\n\nconst SCREEN_HEIGHT: u16 = 128;\n\n\n\n#[entry]\n", "file_path": "demo-kitferret/src/main.rs", "rank": 49, "score": 18.1949223450003 }, { "content": " screen,\n\n depth_buffer,\n\n depth_test: false\n\n }\n\n }\n\n\n\n pub fn set_depth_test(&mut self, value: bool) {\n\n self.depth_test = value;\n\n }\n\n\n\n pub fn clear_color_buffer(&mut self, color: Color) {\n\n let rgb = color.as_rgb888();\n\n self.screen.clear(rgb.0, rgb.1, rgb.2);\n\n }\n\n\n\n pub fn clear_depth_buffer(&mut self, depth: f32) {\n\n for x in 0..self.depth_buffer.len() {\n\n self.depth_buffer[x] = depth;\n\n }\n\n }\n", "file_path": "ferret-graphics/src/lib.rs", "rank": 50, "score": 17.227243837100577 }, { "content": "\n\n fn invert_sign(&mut self) {\n\n *self = -*self\n\n }\n\n\n\n fn int_modulus(&self, value: u8) -> u8 {\n\n (*self % value as Self) as u8\n\n }\n\n}\n\n\n", "file_path": "ferret-utils/src/convert/floats.rs", "rank": 51, "score": 16.159565415262133 }, { "content": " *self = (*self as i16 / value as i16) as i8;\n\n }\n\n\n\n fn modulus(&mut self, value: u8) -> u8 {\n\n (*self as i16 % value as i16) as u8\n\n }\n\n}\n\n\n\nimpl IntegerType for i16 {\n\n fn is_zero(&self) -> bool {\n\n *self == 0\n\n }\n\n\n\n fn is_lt_zero(&self) -> bool {\n\n false\n\n }\n\n\n\n fn invert_sign(&mut self) {}\n\n\n\n fn div_set(&mut self, value: u8) {\n", "file_path": "ferret-utils/src/convert/integers.rs", "rank": 52, "score": 15.469641169945563 }, { "content": "\n\n self.canvas.copy(&self.screen_buffer, None, None).unwrap();\n\n self.canvas.present();\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\npub struct SDL2Timer {\n\n}\n\n\n\nimpl SDL2Timer {\n\n pub fn new() -> Self {\n\n Self {}\n\n }\n\n}\n\n\n\nimpl TimerTrait for SDL2Timer {\n\n fn delay(&mut self, millis: u32) {\n\n thread::sleep(Duration::from_millis(millis as u64));\n", "file_path": "demo-desktop/src/sdl2_interface.rs", "rank": 53, "score": 14.06765396662922 }, { "content": " self.color_buffer[index * 4 + 1] = g;\n\n self.color_buffer[index * 4 + 2] = b;\n\n self.color_buffer[index * 4 + 3] = 255;\n\n }\n\n\n\n fn clear(&mut self, r: u8, g: u8, b: u8) {\n\n for index in (0..self.color_buffer.len()).step_by(4) {\n\n self.color_buffer[index + 0] = r;\n\n self.color_buffer[index + 1] = g;\n\n self.color_buffer[index + 2] = b;\n\n self.color_buffer[index + 3] = 255;\n\n }\n\n }\n\n\n\n fn update_screen(&mut self) -> std::result::Result<(), ()> {\n\n self.screen_buffer.update(\n\n None,\n\n &self.color_buffer,\n\n (4 * self.get_width()) as usize\n\n ).unwrap();\n", "file_path": "demo-desktop/src/sdl2_interface.rs", "rank": 54, "score": 12.849847157514214 }, { "content": "impl FloatType for f64 {\n\n fn is_ge_one(&self) -> bool {\n\n *self >= 1.0\n\n }\n\n\n\n fn mul_set(&mut self, value: f32) {\n\n *self *= value as f64\n\n }\n\n\n\n fn div_set(&mut self, value: f32) {\n\n *self /= value as f64\n\n }\n\n\n\n fn minus_self_as_int(&mut self) {\n\n *self -= (*self as u32) as Self;\n\n }\n\n\n\n fn is_negative(&self) -> bool {\n\n *self < 0.0\n\n }\n", "file_path": "ferret-utils/src/convert/floats.rs", "rank": 55, "score": 12.802349815438276 }, { "content": "\n\nconst BLOCK_SIZE: usize = 8;\n\nconst DEPTH_RANGE_FAR: f32 = 1.0;\n\nconst DEPTH_RANGE_NEAR: f32 = 0.0;\n\n\n\npub struct FerretGraphics<'a, TScreen>\n\nwhere\n\n TScreen: ScreenTrait\n\n{\n\n screen: TScreen,\n\n depth_buffer: &'a mut [f32],\n\n depth_test: bool\n\n}\n\n\n\nimpl<'a, TScreen> FerretGraphics<'a, TScreen>\n\nwhere\n\n TScreen: ScreenTrait\n\n{\n\n pub fn new(screen: TScreen, depth_buffer: &'a mut [f32]) -> Self {\n\n Self {\n", "file_path": "ferret-graphics/src/lib.rs", "rank": 56, "score": 12.531857797995883 }, { "content": "\n\n self.draw_char(x, y, ch, font, font_color, bg_color);\n\n x += 6;\n\n\n\n if x >= self.screen.get_width() {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n pub fn draw_char_array(\n\n &mut self,\n\n mut x: u16,\n\n mut y: u16,\n\n ch_array: &[char],\n\n start: usize,\n\n font: &[u8],\n\n font_color: Color,\n\n bg_color: Option<Color>\n\n ) {\n", "file_path": "ferret-graphics/src/lib.rs", "rank": 57, "score": 11.716694713083431 }, { "content": "use crate::ferret_3d::edge_data::EdgeData;\n\nuse crate::ferret_3d::triangle_equations::TriangleEquations;\n\n\n\npub struct TriangleEdgeTest<const TPARAMETER_COUNT: usize>(bool, bool, bool);\n\n\n\nimpl<const TPARAMETER_COUNT: usize> TriangleEdgeTest<TPARAMETER_COUNT> {\n\n pub fn new(triangle: &TriangleEquations<TPARAMETER_COUNT>, edge: &EdgeData<TPARAMETER_COUNT>) -> Self {\n\n Self(\n\n triangle.e0.test(edge.ev0),\n\n triangle.e1.test(edge.ev1),\n\n triangle.e2.test(edge.ev2)\n\n )\n\n }\n\n\n\n pub fn all_true(&self) -> bool {\n\n self.0 && self.1 && self.2\n\n }\n\n\n\n pub fn all_same(&self) -> bool {\n\n self.0 == self.1 && self.0 == self.2\n\n }\n\n}", "file_path": "ferret-graphics/src/ferret_3d/triangle_edge_test.rs", "rank": 58, "score": 11.701734235097156 }, { "content": "\n\n pixel_out.step_y(&triangle);\n\n\n\n if TEST_EDGES {\n\n edge.step_y_by_one(&triangle);\n\n }\n\n }\n\n }\n\n\n\n fn depth_test(&self, x: u32, y: u32, z: f32) -> bool {\n\n !self.depth_test || z > self.get_depth_value(x, y)\n\n }\n\n\n\n fn get_depth_value(&self, x: u32, y: u32) -> f32 {\n\n let index = y * self.screen.get_width() as u32 + x;\n\n self.depth_buffer[index as usize]\n\n }\n\n\n\n fn set_depth_value(&mut self, x: u32, y: u32, value: f32) {\n\n let index = y * self.screen.get_width() as u32 + x;\n\n self.depth_buffer[index as usize] = value;\n\n }\n\n}", "file_path": "ferret-graphics/src/lib.rs", "rank": 59, "score": 11.361155781745264 }, { "content": "use ferret_rs::system::TimerTrait;\n\nuse teensy4_bsp::{SysTick, hal::gpt::GPT};\n\n\n\npub struct Timer {\n\n systick: SysTick,\n\n system_timer: GPT\n\n}\n\n\n\nimpl Timer {\n\n pub fn new(systick: SysTick, system_timer: GPT) -> Self {\n\n Self {\n\n systick,\n\n system_timer\n\n }\n\n }\n\n}\n\n\n\nimpl TimerTrait for Timer {\n\n fn delay(&mut self, millis: u32) {\n\n self.systick.delay(millis)\n\n }\n\n\n\n fn measure<F>(&self, act: F) -> u128\n\n where F: FnOnce()\n\n {\n\n let (_, period) = self.system_timer.time(act);\n\n period.as_millis()\n\n }\n\n}", "file_path": "demo-kitferret/src/timer.rs", "rank": 60, "score": 10.832387293552419 }, { "content": "\n\n pub fn draw_string<'b>\n\n (\n\n &mut self,\n\n mut x: u16,\n\n mut y: u16,\n\n text: &'b str,\n\n font: &[u8],\n\n font_color: Color,\n\n bg_color: Option<Color>\n\n )\n\n {\n\n let original_x = x;\n\n for ch in text.chars() {\n\n\n\n if ch == '\\n' {\n\n x = original_x;\n\n y += 9;\n\n continue;\n\n }\n", "file_path": "ferret-graphics/src/lib.rs", "rank": 61, "score": 10.73969914477576 }, { "content": " &mut self,\n\n x: u16,\n\n y: u16,\n\n c: char,\n\n font: &[u8],\n\n font_color: Color,\n\n bg_color: Option<Color>\n\n ) {\n\n let font_color = font_color.as_rgb888();\n\n let mut mask: u8 = 0x01;\n\n\n\n if let Some(bg_color) = bg_color {\n\n let bg_color = bg_color.as_rgb888();\n\n // solid bg\n\n let mut color: (u8, u8, u8);\n\n\n\n for yc in 0..8 {\n\n\n\n // If out of bounds, stop\n\n if yc + y >= self.screen.get_height() {\n", "file_path": "ferret-graphics/src/lib.rs", "rank": 62, "score": 10.723523486415028 }, { "content": "mod floats;\n\nmod integers;\n\n\n\npub use floats::float_to_string;\n\npub use integers::integer_to_string;", "file_path": "ferret-utils/src/convert.rs", "rank": 63, "score": 10.478166403176484 }, { "content": " pins.p5,\n\n pins.p6);\n\n\n\n let depth_buffer = &mut [0.0f32; (SCREEN_WIDTH * SCREEN_HEIGHT) as usize];\n\n\n\n let (_, ipg_hz) = peripherals.ccm.pll1.set_arm_clock(\n\n bsp::hal::ccm::PLL1::ARM_HZ,\n\n &mut peripherals.ccm.handle,\n\n &mut peripherals.dcdc,\n\n );\n\n\n\n let mut cfg = peripherals.ccm.perclk.configure(\n\n &mut peripherals.ccm.handle,\n\n bsp::hal::ccm::perclk::PODF::DIVIDE_1,\n\n bsp::hal::ccm::perclk::CLKSEL::IPG(ipg_hz),\n\n );\n\n\n\n let mut gpt1 = peripherals.gpt1.clock(&mut cfg);\n\n gpt1.set_mode(bsp::hal::gpt::Mode::FreeRunning);\n\n gpt1.set_enable(true);\n\n\n\n let timer = Timer::new(systick, gpt1);\n\n let mut game_loop = GameLoop::new(control, st7735, timer, depth_buffer);\n\n\n\n game_loop.start();\n\n}\n", "file_path": "demo-kitferret/src/main.rs", "rank": 64, "score": 9.928381214642995 }, { "content": "#![no_std]\n\n\n\npub mod color;\n\npub mod fonts;\n\npub mod ferret_3d;\n\npub mod rect;\n\npub mod screen_trait;\n\n\n\nuse crate::screen_trait::ScreenTrait;\n\nuse crate::ferret_3d::pixel_data::PixelData;\n\nuse crate::ferret_3d::triangle_edge_test::TriangleEdgeTest;\n\nuse crate::ferret_3d::edge_data::EdgeData;\n\nuse crate::ferret_3d::triangle_equations::TriangleEquations;\n\nuse crate::ferret_3d::vertex_shader_trait::VertexShaderTrait;\n\nuse crate::ferret_3d::pixel_shader_trait::PixelShaderTrait;\n\nuse crate::color::Color;\n\n\n\nuse nalgebra::SVector;\n\n\n\npub use rect::Rect;\n", "file_path": "ferret-graphics/src/lib.rs", "rank": 65, "score": 9.895686007458815 }, { "content": "use nalgebra::SVector;\n\n\n\npub struct EdgeEquation<const TPARAMETER_COUNT: usize> {\n\n pub a: f32,\n\n pub b: f32,\n\n pub c: f32,\n\n\n\n pub tie: bool\n\n}\n\n\n\nimpl<const TPARAMETER_COUNT: usize> EdgeEquation<TPARAMETER_COUNT> {\n\n pub fn new(v0: &SVector<f32, TPARAMETER_COUNT>, v1: &SVector<f32, TPARAMETER_COUNT>) -> Self {\n\n let a = v0[1] - v1[1];\n\n let b = v1[0] - v0[0];\n\n let c = - (a * (v0[0] + v1[0]) + b * (v0[1] + v1[1])) * 0.5;\n\n\n\n let tie = if a != 0.0 { a > 0.0 } else { b > 0.0 };\n\n\n\n EdgeEquation {\n\n a, b, c, tie\n", "file_path": "ferret-graphics/src/ferret_3d/edge_equation.rs", "rank": 66, "score": 9.695162045056192 }, { "content": "pub mod system_traits;\n\n\n\npub use system_traits::ControllerTrait;\n\npub use ferret_graphics::screen_trait::ScreenTrait;\n\npub use system_traits::TimerTrait;\n\npub use system_traits::ButtonState;\n\npub use system_traits::ControllerButton;\n", "file_path": "src/system.rs", "rank": 67, "score": 9.463115715662553 }, { "content": "{\n\n controller: TController,\n\n graphics: FerretGraphics<'a, TScreen>,\n\n timer: TTimer\n\n}\n\n\n\nimpl<'a, TController, TScreen, TTimer> GameLoop<'a, TController, TScreen, TTimer>\n\nwhere TController: ControllerTrait,\n\n TScreen: ScreenTrait,\n\n TTimer: TimerTrait\n\n{\n\n pub fn new(\n\n controller: TController,\n\n screen: TScreen,\n\n timer: TTimer,\n\n depth_buffer: &'a mut [f32]\n\n ) -> Self {\n\n let graphics = FerretGraphics::new(screen, depth_buffer);\n\n\n\n Self {\n", "file_path": "src/lib.rs", "rank": 68, "score": 9.360486273500957 }, { "content": "\n\n buffer[count + decimal_count as usize] = '\\0';\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn convert_float_zero() {\n\n let expected = ['0', '.', '0', '\\0'];\n\n let mut result = ['\\0'; 4];\n\n\n\n float_to_string(0.0, &mut result, 1);\n\n\n\n assert_eq!(result, expected);\n\n }\n\n\n\n #[test]\n\n fn convert_float_positive() {\n", "file_path": "ferret-utils/src/convert/floats.rs", "rank": 69, "score": 9.14051796526925 }, { "content": "\n\n let d = 1.0/pixel_in.data[3];\n\n if (!TEST_EDGES || edge_in.test(&triangle)) && self.depth_test(x_pos as u32, y_pos as u32, d) {\n\n\n\n let pixel_value = pixel_shader.process(&pixel_in);\n\n\n\n let r = (pixel_value.x * 255.0) as u8;\n\n let g = (pixel_value.y * 255.0) as u8;\n\n let b = (pixel_value.z * 255.0) as u8;\n\n\n\n self.screen.set_pixel(x_pos as u16, y_pos as u16, r, g, b);\n\n self.set_depth_value(x_pos as u32, y_pos as u32, d);\n\n }\n\n\n\n pixel_in.step_x(&triangle);\n\n\n\n if TEST_EDGES {\n\n edge_in.step_x_by_one(&triangle);\n\n }\n\n }\n", "file_path": "ferret-graphics/src/lib.rs", "rank": 70, "score": 8.874133840554707 }, { "content": "use crate::ferret_3d::parameter_equation::ParameterEquation;\n\nuse crate::ferret_3d::edge_equation::EdgeEquation;\n\n\n\nuse nalgebra::SVector;\n\n\n\npub struct TriangleEquations<const TPARAMETER_COUNT: usize> {\n\n pub area2: f32,\n\n pub e0: EdgeEquation<TPARAMETER_COUNT>,\n\n pub e1: EdgeEquation<TPARAMETER_COUNT>,\n\n pub e2: EdgeEquation<TPARAMETER_COUNT>,\n\n pub a_var: [ParameterEquation<TPARAMETER_COUNT>; TPARAMETER_COUNT]\n\n}\n\n\n\nimpl<const TPARAMETER_COUNT: usize> TriangleEquations<TPARAMETER_COUNT> {\n\n pub fn new(v0: &SVector<f32, TPARAMETER_COUNT>, v1: &SVector<f32, TPARAMETER_COUNT>, v2: &SVector<f32, TPARAMETER_COUNT>) -> Self{\n\n let e0 = EdgeEquation::new(v0, v1);\n\n let e1 = EdgeEquation::new(v1, v2);\n\n let e2 = EdgeEquation::new(v2, v0);\n\n\n\n let area2 = e0.c + e1.c + e2.c;\n", "file_path": "ferret-graphics/src/ferret_3d/triangle_equations.rs", "rank": 71, "score": 8.686892302879809 }, { "content": "pub struct Rect<T> {\n\n pub left: T,\n\n pub top: T,\n\n pub right: T,\n\n pub bottom: T\n\n}\n\n\n\nimpl<T> Rect<T> {\n\n pub fn new(left: T, top: T, right: T, bottom: T) -> Self {\n\n Self {\n\n left, top, right, bottom\n\n }\n\n }\n\n}\n\n\n\nimpl<T> From<(T, T, T, T)> for Rect<T> {\n\n fn from(tuple: (T, T, T, T)) -> Self {\n\n Rect::new(tuple.0, tuple.1, tuple.2, tuple.3)\n\n }\n\n}", "file_path": "ferret-graphics/src/rect.rs", "rank": 72, "score": 8.559085867725361 }, { "content": " }\n\n\n\n self.graphics.draw_string(\n\n 10,\n\n 10,\n\n \"This is a test\\nsecond line \",\n\n &GLCD_FONT,\n\n Color(1.0, 1.0, 1.0),\n\n None);\n\n\n\n let mut string_buffer = ['\\0';20];\n\n\n\n integer_to_string(period, &mut string_buffer, 3);\n\n\n\n self.graphics.draw_char_array(\n\n 10,\n\n 27,\n\n &string_buffer,\n\n 0,\n\n &GLCD_FONT,\n", "file_path": "src/lib.rs", "rank": 73, "score": 8.55248418710975 }, { "content": "use crate::ferret_3d::triangle_equations::TriangleEquations;\n\n\n\n#[derive(Clone)]\n\npub struct PixelData<const TPARAMETER_COUNT: usize> {\n\n pub data: [f32; TPARAMETER_COUNT]\n\n}\n\n\n\nimpl<const TPARAMETER_COUNT: usize> PixelData<TPARAMETER_COUNT> {\n\n pub fn new(tri_eq: &TriangleEquations<TPARAMETER_COUNT>, x: f32, y: f32) -> Self {\n\n\n\n #[allow(deprecated)] // TODO: find another way to create uninitialized array\n\n let mut data: [f32; TPARAMETER_COUNT] = unsafe { core::mem::uninitialized() };\n\n\n\n for index in 0..TPARAMETER_COUNT {\n\n data[index] = tri_eq.a_var[index].evaluate(x, y)\n\n }\n\n\n\n Self {\n\n data\n\n }\n", "file_path": "ferret-graphics/src/ferret_3d/pixel_data.rs", "rank": 74, "score": 8.531451870892903 }, { "content": "use crate::ferret_3d::triangle_equations::TriangleEquations;\n\n\n\n#[derive(Clone)]\n\npub struct EdgeData<const TPARAMETER_COUNT: usize> {\n\n pub x: f32,\n\n pub y: f32,\n\n pub ev0: f32,\n\n pub ev1: f32,\n\n pub ev2: f32\n\n}\n\n\n\nimpl<const TPARAMETER_COUNT: usize> EdgeData<TPARAMETER_COUNT> {\n\n pub fn new(tri_eq: &TriangleEquations<TPARAMETER_COUNT>, x: f32, y: f32) -> Self {\n\n Self {\n\n x, y,\n\n ev0: tri_eq.e0.evaluate(x, y),\n\n ev1: tri_eq.e1.evaluate(x, y),\n\n ev2: tri_eq.e2.evaluate(x, y)\n\n }\n\n }\n", "file_path": "ferret-graphics/src/ferret_3d/edge_data.rs", "rank": 75, "score": 8.495568434545923 }, { "content": " controller,\n\n graphics,\n\n timer\n\n }\n\n }\n\n\n\n pub fn start(&mut self) -> ! {\n\n let vertices_0 = [\n\n Vector6::new(-1.0, -1.0, -1.0, 0.0, 0.0, 0.0), // 0\n\n Vector6::new(1.0, -1.0, -1.0, 1.0, 0.0, 0.0), // 1\n\n Vector6::new(-1.0, 1.0, -1.0, 0.0, 1.0, 0.0), // 2\n\n Vector6::new(1.0, 1.0, -1.0, 1.0, 1.0, 0.0), // 3\n\n Vector6::new(1.0, -1.0, 1.0, 0.0, 0.0, 1.0), // 4\n\n Vector6::new(1.0, 1.0, 1.0, 1.0, 0.0, 1.0), // 5\n\n Vector6::new(-1.0, -1.0, 1.0, 0.0, 1.0, 1.0), // 6\n\n Vector6::new(-1.0, 1.0, 1.0, 1.0, 1.0, 1.0), // 7\n\n\n\n Vector6::new(-10.0, 10.0, 100.0, 0.0, 0.0, 1.0), // 8\n\n Vector6::new(10.0, 10.0, 100.0, 1.0, 0.0, 1.0), // 9\n\n Vector6::new(-10.0, 0.0, 0.0, 0.0, 1.0, 1.0), // 10\n", "file_path": "src/lib.rs", "rank": 76, "score": 8.380965325992761 }, { "content": "pub mod edge_data;\n\npub mod edge_equation;\n\npub mod parameter_equation;\n\npub mod pixel_data;\n\npub mod pixel_shader_trait;\n\npub mod triangle_edge_test;\n\npub mod triangle_equations;\n\npub mod vertex_shader_trait;", "file_path": "ferret-graphics/src/ferret_3d.rs", "rank": 77, "score": 8.134443355592786 }, { "content": "#[derive(Copy, Clone)]\n\npub struct Color(pub f32, pub f32, pub f32);\n\n\n\nimpl Color {\n\n pub fn as_rgb888(&self) -> (u8, u8, u8) {\n\n let r = (self.0 * 255.0) as u8;\n\n let g = (self.1 * 255.0) as u8;\n\n let b = (self.2 * 255.0) as u8;\n\n\n\n (r, g, b)\n\n }\n\n}", "file_path": "ferret-graphics/src/color.rs", "rank": 78, "score": 8.002456908019742 }, { "content": "\n\n pub up: ButtonState,\n\n pub down: ButtonState,\n\n pub left: ButtonState,\n\n pub right: ButtonState,\n\n\n\n pub select: ButtonState,\n\n pub start: ButtonState,\n\n pub l: ButtonState,\n\n pub r: ButtonState,\n\n\n\n pub a: ButtonState,\n\n pub b: ButtonState,\n\n pub c: ButtonState,\n\n pub d: ButtonState,\n\n}\n\n\n\nimpl SDL2Controller {\n\n pub fn new(event_pump: EventPump) -> Self {\n\n Self {\n", "file_path": "demo-desktop/src/sdl2_interface.rs", "rank": 79, "score": 7.983868599756142 }, { "content": "\n\n\n\n if count == 0 {\n\n buffer[0] = '0';\n\n count = 1;\n\n } else {\n\n buffer[start..count].reverse();\n\n }\n\n\n\n buffer[count] = '.';\n\n count += 1;\n\n\n\n temp = num;\n\n temp.minus_self_as_int();\n\n\n\n for index in 0..decimal_count {\n\n temp.mul_set(10.0);\n\n let res = temp.int_modulus(10);\n\n buffer[index as usize + count] = (res + '0' as u8) as char;\n\n }\n", "file_path": "ferret-utils/src/convert/floats.rs", "rank": 80, "score": 7.591512615206429 }, { "content": " Color(1.0, 0.0, 0.0),\n\n None\n\n );\n\n\n\n float_to_string(t_z, &mut string_buffer, 10);\n\n self.graphics.draw_char_array(10, 100, &string_buffer, 0, &GLCD_FONT, Color(1.0, 1.0, 1.0), None);\n\n\n\n if self.controller.get_button_status(ControllerButton::Start).is_down() {\n\n self.graphics.draw_string(\n\n 10,\n\n 50,\n\n \"START\",\n\n &GLCD_FONT,\n\n Color(1.0, 1.0, 1.0),\n\n None);\n\n }\n\n\n\n if self.controller.get_button_status(ControllerButton::Select).is_down() {\n\n self.graphics.draw_string(\n\n 50,\n", "file_path": "src/lib.rs", "rank": 81, "score": 7.472712752795594 }, { "content": "use crate::ferret_3d::edge_equation::EdgeEquation;\n\n\n\n#[derive(Copy, Clone)]\n\npub struct ParameterEquation<const TPARAMETER_COUNT: usize> {\n\n pub a: f32,\n\n pub b: f32,\n\n pub c: f32\n\n}\n\n\n\nimpl<const TPARAMETER_COUNT: usize> ParameterEquation<TPARAMETER_COUNT> {\n\n pub fn new(v0: f32,\n\n v1: f32,\n\n v2: f32,\n\n e0: &EdgeEquation<TPARAMETER_COUNT>,\n\n e1: &EdgeEquation<TPARAMETER_COUNT>,\n\n e2: &EdgeEquation<TPARAMETER_COUNT>,\n\n factor: f32\n\n ) -> Self {\n\n let a = factor * (v2 * e0.a + v0 * e1.a + v1 * e2.a);\n\n let b = factor * (v2 * e0.b + v0 * e1.b + v1 * e2.b);\n", "file_path": "ferret-graphics/src/ferret_3d/parameter_equation.rs", "rank": 82, "score": 7.04400978463796 }, { "content": " (unit - 10 + 'a' as u8) as char\n\n } else {\n\n (unit + '0' as u8) as char\n\n };\n\n\n\n num.div_set(base);\n\n\n\n index += 1;\n\n }\n\n\n\n if is_negative {\n\n buffer[index] = '-';\n\n index += 1;\n\n }\n\n\n\n buffer[index] = '\\0';\n\n buffer[0..index-1].reverse()\n\n}\n", "file_path": "ferret-utils/src/convert/integers.rs", "rank": 83, "score": 6.918173059564523 }, { "content": "pub mod glcd;", "file_path": "ferret-graphics/src/fonts.rs", "rank": 84, "score": 6.872971751641044 }, { "content": "#![no_std]\n\n\n\npub mod convert;", "file_path": "ferret-utils/src/lib.rs", "rank": 85, "score": 6.714181092054634 }, { "content": "\n\n pub fn empty() -> Self {\n\n Self {\n\n x: 0.0,\n\n y: 0.0,\n\n ev0: 0.0,\n\n ev1: 0.0,\n\n ev2: 0.0\n\n }\n\n }\n\n\n\n pub fn step_x(&mut self, tri_eq: &TriangleEquations<TPARAMETER_COUNT>, step_size: f32) {\n\n self.ev0 = tri_eq.e0.step_x(self.ev0, step_size);\n\n self.ev1 = tri_eq.e1.step_x(self.ev1, step_size);\n\n self.ev2 = tri_eq.e2.step_x(self.ev2, step_size);\n\n }\n\n\n\n pub fn step_x_by_one(&mut self, tri_eq: &TriangleEquations<TPARAMETER_COUNT>) {\n\n self.ev0 = tri_eq.e0.step_x_by_one(self.ev0);\n\n self.ev1 = tri_eq.e1.step_x_by_one(self.ev1);\n", "file_path": "ferret-graphics/src/ferret_3d/edge_data.rs", "rank": 86, "score": 6.628727952039947 }, { "content": " Pressed = 2,\n\n Held = 3\n\n}\n\n\n\nimpl ButtonState {\n\n #[inline(always)]\n\n pub fn is_down(&self) -> bool {\n\n *self == ButtonState::Pressed\n\n || *self == ButtonState::Held\n\n }\n\n\n\n #[inline(always)]\n\n pub fn is_up(&self) -> bool {\n\n !self.is_down()\n\n }\n\n}\n\n\n", "file_path": "src/system/system_traits.rs", "rank": 87, "score": 6.486269741445838 }, { "content": " pin_selector_1: Ps1,\n\n pin_selector_2: Ps2,\n\n mut pin_input_0: Pi0,\n\n mut pin_input_1: Pi1,\n\n mut pin_input_2: Pi2,\n\n mut pin_input_3: Pi3,\n\n ) -> Self {\n\n const PULL_DOWN: Config = Config::zero()\n\n .set_hysteresis(Hysteresis::Enabled)\n\n .set_pull_keep(PullKeep::Enabled)\n\n .set_pull_keep_select(PullKeepSelect::Pull)\n\n .set_pullupdown(PullUpDown::Pulldown100k);\n\n\n\n configure(&mut pin_input_0, PULL_DOWN);\n\n configure(&mut pin_input_1, PULL_DOWN);\n\n configure(&mut pin_input_2, PULL_DOWN);\n\n configure(&mut pin_input_3, PULL_DOWN);\n\n\n\n let selector0 = GPIO::new(pin_selector_0);\n\n let selector1 = GPIO::new(pin_selector_1);\n", "file_path": "demo-kitferret/src/controller.rs", "rank": 88, "score": 6.480423413424816 }, { "content": "\n\n let (_, _, _, spi4_builder) = peripherals.spi.clock(\n\n &mut peripherals.ccm.handle,\n\n bsp::hal::ccm::spi::ClockSelect::Pll2,\n\n bsp::hal::ccm::spi::PrescalarSelect::LPSPI_PODF_0,\n\n );\n\n\n\n // TODO: use DMA to better performance maybe? https://github.com/mciantyre/teensy4-rs/blob/master/examples/dma_spi.rs\n\n let mut spi4 = spi4_builder.build(pins.p11, pins.p12, pins.p13);\n\n spi4.enable_chip_select_0(pins.p10);\n\n\n\n match spi4.set_clock_speed(bsp::hal::spi::ClockSpeed(130_000_000)) {\n\n Ok(()) => {}\n\n Err(_) => {\n\n loop {\n\n core::hint::spin_loop()\n\n }\n\n }\n\n };\n\n\n", "file_path": "demo-kitferret/src/main.rs", "rank": 89, "score": 6.309334681126725 }, { "content": "#[derive(PartialEq, Eq)]\n\npub enum InputState {\n\n Idle,\n\n Released,\n\n Pressed,\n\n Held\n\n}\n\n\n\nimpl InputState {\n\n #[inline(always)]\n\n pub fn is_down(&self) -> bool {\n\n *self == InputState::Pressed || *self == InputState::Held\n\n }\n\n\n\n #[inline(always)]\n\n pub fn is_up(&self) -> bool {\n\n !self.is_down()\n\n }\n\n}", "file_path": "demo-kitferret/src/input/input_state.rs", "rank": 90, "score": 6.3061434554046 }, { "content": "use ferret_rs::GameLoop;\n\n\n\nmod sdl2_interface;\n\n\n\nuse sdl2_interface::SDL2Controller;\n\nuse sdl2_interface::SDL2Screen;\n\nuse sdl2_interface::SDL2Timer;\n\n\n", "file_path": "demo-desktop/src/main.rs", "rank": 91, "score": 6.248968247776082 }, { "content": "#![no_std]\n\n\n\npub mod system;\n\n\n\nuse nalgebra::SVector;\n\nuse ferret_utils::convert::float_to_string;\n\nuse ferret_graphics::ferret_3d::pixel_data::PixelData;\n\nuse ferret_graphics::fonts::glcd::GLCD_FONT;\n\nuse nalgebra::Vector3;\n\nuse nalgebra::Vector4;\n\nuse nalgebra::Matrix4;\n\nuse ferret_graphics::ferret_3d::vertex_shader_trait::VertexShaderTrait;\n\nuse nalgebra::Vector6;\n\nuse ferret_graphics::ferret_3d::pixel_shader_trait::PixelShaderTrait;\n\nuse ferret_graphics::color::Color;\n\nuse ferret_graphics::FerretGraphics;\n\nuse ferret_utils::convert::integer_to_string;\n\n\n\nuse crate::system::ControllerTrait;\n\nuse crate::system::ControllerButton;\n\nuse crate::system::ButtonState;\n\nuse crate::system::ScreenTrait;\n\nuse crate::system::TimerTrait;\n\n\n", "file_path": "src/lib.rs", "rank": 92, "score": 6.092808033813268 }, { "content": " Vector6::new(10.0, 0.0, 0.0, 1.0, 1.0, 1.0), // 11\n\n ];\n\n\n\n let indexes = [(0, 1, 2), (2, 1, 3),\n\n (1, 4, 3), (3, 4, 5),\n\n (4, 6, 5), (5, 6, 7),\n\n (6, 0, 7), (7, 0, 2),\n\n (0, 4, 1), (4, 0, 6),\n\n (2, 3, 5), (5, 7, 2),\n\n // (8, 9, 10), (10, 9, 8),\n\n ];\n\n\n\n let texture = include_bytes!(\"../assets/box.raw\");\n\n\n\n let mut vertex_shader = SimpleVertexShader::new();\n\n let mut pixel_shader = SimplePixelShader::new();\n\n\n\n pixel_shader.texture = Some(texture);\n\n pixel_shader.tex_wid = 128;\n\n pixel_shader.tex_hei = 128;\n", "file_path": "src/lib.rs", "rank": 93, "score": 5.8524985247342665 }, { "content": "\n\n let mut index = 0;\n\n\n\n let mut rot_x = 0.0f32;\n\n let mut rot_y = 0.0f32;\n\n let mut t_x = 0.0;\n\n let mut t_y = 0.0;\n\n let mut t_z = 3.0;\n\n let mut s = 1.0;\n\n let mut auto_rotate = false;\n\n\n\n let view_matrix = Matrix4::<f32>::identity();\n\n\n\n let fovy = 3.1419 * 90.0 / 180.0;\n\n let aspect = 160.0/128.0;\n\n let near = 2.0;\n\n let far = 100.0;\n\n\n\n let projection_matrix =\n\n nalgebra::Perspective3::new(aspect, fovy, near, far).into_inner()\n", "file_path": "src/lib.rs", "rank": 94, "score": 5.776365803246682 }, { "content": " }\n\n }\n\n\n\n pub fn evaluate(&self, x: f32, y: f32) -> f32 {\n\n self.a * x + self.b * y + self.c\n\n }\n\n\n\n pub fn test(&self, v: f32) -> bool {\n\n v > 0.0 || (v == 0.0 && self.tie)\n\n }\n\n\n\n pub fn step_x_by_one(&self, v: f32) -> f32 {\n\n v + self.a\n\n }\n\n\n\n pub fn step_x(&self, v: f32, step_size: f32) -> f32 {\n\n v + self.a * step_size\n\n }\n\n\n\n pub fn step_y_by_one(&self, v: f32) -> f32 {\n\n v + self.b\n\n }\n\n\n\n pub fn step_y(&self, v: f32, step_size: f32) -> f32 {\n\n v + self.b * step_size\n\n }\n\n}\n\n\n", "file_path": "ferret-graphics/src/ferret_3d/edge_equation.rs", "rank": 95, "score": 5.589041700102353 }, { "content": " * Matrix4::new_nonuniform_scaling(&nalgebra::vector!(1.0, -1.0, -1.0));\n\n\n\n let mut period = 0;\n\n\n\n self.graphics.set_depth_test(true);\n\n\n\n loop {\n\n period = self.timer.measure(|| {\n\n self.controller.update();\n\n\n\n if index == 0 {\n\n index = 1;\n\n } else {\n\n index = 0;\n\n }\n\n\n\n self.graphics.clear_color_buffer(Color (0.0, 0.0, 0.0));\n\n\n\n if self.controller.get_button_status(ControllerButton::R).is_down() {\n\n if self.controller.get_button_status(ControllerButton::B).is_down() {\n", "file_path": "src/lib.rs", "rank": 96, "score": 5.338552630880262 }, { "content": " let c = factor * (v2 * e0.c + v0 * e1.c + v1 * e2.c);\n\n\n\n Self {\n\n a, b, c\n\n }\n\n }\n\n\n\n pub fn evaluate(&self, x: f32, y: f32) -> f32 {\n\n self.a * x + self.b * y + self.c\n\n }\n\n\n\n pub fn step_x(&self, v: f32, step_size: f32) -> f32 {\n\n v + self.a * step_size\n\n }\n\n\n\n pub fn step_y(&self, v: f32, step_size: f32) -> f32 {\n\n v + self.b * step_size\n\n }\n\n}", "file_path": "ferret-graphics/src/ferret_3d/parameter_equation.rs", "rank": 97, "score": 5.267691600279711 }, { "content": " break;\n\n }\n\n\n\n for xc in 0..5 {\n\n\n\n // If out of bounds, stop\n\n if xc + x >= self.screen.get_width() {\n\n break;\n\n }\n\n\n\n if font[c as usize * 5 + xc as usize] & mask != 0 {\n\n color = font_color;\n\n } else {\n\n color = bg_color;\n\n }\n\n\n\n self.screen.set_pixel(\n\n x + xc as u16,\n\n y + yc as u16,\n\n color.0,\n", "file_path": "ferret-graphics/src/lib.rs", "rank": 98, "score": 5.24498827714654 }, { "content": " let mut edge01 = edge00.clone();\n\n edge01.step_y(&triangle, step_size);\n\n\n\n let mut edge10 = edge00.clone();\n\n edge10.step_x(&triangle, step_size);\n\n\n\n let mut edge11 = edge01.clone();\n\n edge11.step_x(&triangle, step_size);\n\n\n\n let e00 = TriangleEdgeTest::new(&triangle, &edge00);\n\n let e01 = TriangleEdgeTest::new(&triangle, &edge01);\n\n let e10 = TriangleEdgeTest::new(&triangle, &edge10);\n\n let e11 = TriangleEdgeTest::new(&triangle, &edge11);\n\n\n\n let e00_all_true = e00.all_true();\n\n let e01_all_true = e01.all_true();\n\n let e10_all_true = e10.all_true();\n\n let e11_all_true = e11.all_true();\n\n\n\n let all_test_false =\n", "file_path": "ferret-graphics/src/lib.rs", "rank": 99, "score": 5.238716443456275 } ]
Rust
macros/src/command/slash.rs
noituri/poise
45f27a86990a9443077c0cb457bd7600af625b3b
use syn::spanned::Spanned as _; use super::{extract_option_type, extract_vec_type, Invocation}; fn generate_options(inv: &Invocation) -> proc_macro2::TokenStream { let check = match &inv.more.check { Some(check) => { quote::quote! { Some(|ctx| Box::pin(#check(ctx.into()))) } } None => quote::quote! { None }, }; let on_error = match &inv.more.on_error { Some(on_error) => quote::quote! { Some(|err, ctx| Box::pin(#on_error(err, ::poise::CommandErrorContext::Application(ctx)))) }, None => quote::quote! { None }, }; let ephemeral = inv.more.ephemeral; quote::quote! { ::poise::ApplicationCommandOptions { check: #check, on_error: #on_error, ephemeral: #ephemeral, } } } pub fn generate_slash_command_spec( inv: &Invocation, ) -> Result<proc_macro2::TokenStream, darling::Error> { let command_name = &inv.command_name; let description = inv.description.as_ref().ok_or_else(|| { syn::Error::new( inv.function.sig.span(), "slash commands must have a description (doc comment)", ) })?; let mut parameter_structs = Vec::new(); for param in inv.parameters { let description = param.more.description.as_ref().ok_or_else(|| { syn::Error::new( param.span, "slash command parameters must have a description", ) })?; let (mut required, type_) = match extract_option_type(&param.type_).or_else(|| extract_vec_type(&param.type_)) { Some(t) => (false, t), None => (true, &param.type_), }; if param.more.flag { required = false; } let param_name = &param.name; let autocomplete_callback = match &param.more.autocomplete { Some(autocomplete_fn) => { quote::quote! { Some(| ctx: poise::ApplicationContext<'_, _, _>, interaction: &poise::serenity_prelude::AutocompleteInteraction, options: &[poise::serenity_prelude::ApplicationCommandInteractionDataOption], | Box::pin(async move { use ::poise::futures::{Stream, StreamExt}; let choice = match options .iter() .find(|option| option.focused && option.name == stringify!(#param_name)) { Some(x) => x, None => return Ok(()), }; let json_value = choice.value .as_ref() .ok_or(::poise::SlashArgError::CommandStructureMismatch("expected argument value"))?; let partial_input = (&&&&&std::marker::PhantomData::<#type_>).extract_partial(json_value)?; let choices_stream = ::poise::into_stream!( #autocomplete_fn(ctx.into(), partial_input).await ); let choices_json = choices_stream .take(25) .map(|value| poise::AutocompleteChoice::from(value)) .map(|choice| serde_json::json!({ "name": choice.name, "value": (&&&&&std::marker::PhantomData::<#type_>).into_json(choice.value), })) .collect() .await; let choices_json = poise::serde_json::Value::Array(choices_json); if let Err(e) = interaction .create_autocomplete_response( &ctx.discord.http, |b| b.set_choices(choices_json), ) .await { println!("Warning: couldn't send autocomplete response: {}", e); } Ok(()) })) } } None => quote::quote! { None }, }; let is_autocomplete = param.more.autocomplete.is_some(); parameter_structs.push(( quote::quote! { ::poise::SlashCommandParameter { builder: |o| (&&&&&std::marker::PhantomData::<#type_>).create(o) .required(#required) .name(stringify!(#param_name)) .description(#description) .set_autocomplete(#is_autocomplete), autocomplete_callback: #autocomplete_callback, } }, required, )); } parameter_structs.sort_by_key(|(_, required)| !required); let parameter_structs = parameter_structs .into_iter() .map(|(builder, _)| builder) .collect::<Vec<_>>(); let param_names = inv.parameters.iter().map(|p| &p.name).collect::<Vec<_>>(); let param_types = inv .parameters .iter() .map(|p| match p.more.flag { true => syn::parse_quote! { FLAG }, false => p.type_.clone(), }) .collect::<Vec<_>>(); let options = generate_options(inv); Ok(quote::quote! { ::poise::SlashCommand { name: #command_name, description: #description, parameters: { use ::poise::{SlashArgumentHack, AutocompletableHack}; vec![ #( #parameter_structs, )* ] }, action: |ctx, args| Box::pin(async move { #[allow(clippy::needless_question_mark)] let ( #( #param_names, )* ) = ::poise::parse_slash_args!( ctx.discord, ctx.interaction.guild_id(), ctx.interaction.channel_id(), args => #( (#param_names: #param_types), )* ).await?; inner(ctx.into(), #( #param_names, )*).await }), id: std::sync::Arc::clone(&command_id), options: #options, } }) } pub fn generate_context_menu_command_spec( inv: &Invocation, name: &str, ) -> Result<proc_macro2::TokenStream, darling::Error> { if inv.parameters.len() != 1 { return Err(syn::Error::new( inv.function.sig.inputs.span(), "Context menu commands require exactly one parameter", ) .into()); } let param_type = &inv.parameters[0].type_; let options = generate_options(inv); Ok(quote::quote! { ::poise::ContextMenuCommand { name: #name, action: <#param_type as ::poise::ContextMenuParameter<_, _>>::to_action(|ctx, value| { Box::pin(async move { inner(ctx.into(), value).await }) }), id: std::sync::Arc::clone(&command_id), options: #options, } }) }
use syn::spanned::Spanned as _; use super::{extract_option_type, extract_vec_type, Invocation}; fn generate_options(inv: &Invocation) -> proc_macro2::TokenStream { let check = match &inv.more.check { Some(check) => { quote::quote! { Some(|ctx| Box::pin(#check(ctx.into()))) } } None => quote::quote! { None }, }; let on_error = match &inv.more.on_error { Some(on_error) => quote::quote! { Some(|err, ctx| Box::pin(#on_error(err, ::poise::CommandErrorContext::Application(ctx)))) }, None => quote::quote! { None }, }; let ephemeral = inv.more.ephemeral; quote::quote! { ::poise::ApplicationCommandOptions { check: #check, on_error: #on_error, ephemeral: #ephemeral, } } } pub fn generate_slash_command_spec( inv: &Invocation, ) -> Result<proc_macro2::TokenStream, darling::Error> { let command_name = &inv.command_name; let description = inv.description.as_ref().ok_or_else(|| { syn::Error::new( inv.function.sig.span(), "slash commands must have a description (doc comment)", ) })?; let mut parameter_structs = Vec::new(); for param in inv.parameters { let description = param.more.description.as_ref().ok_or_else(|| { syn::Error::new( param.span, "slash command parameters must have a description", ) })?; let (mut required, type_) = match extract_option_type(&param.type_).or_else(|| extract_vec_type(&param.type_)) { Some(t) => (false, t), None => (true, &param.type_), }; if param.more.flag { required = false; } let param_name = &param.name; let autocomplete_callback = match &param.more.autocomplete { Some(autocomplete_fn) => { quote::quote! { Some(| ctx: poise::ApplicationContext<'_, _, _>, interaction: &poise::serenity_prelude::AutocompleteInteraction, options: &[poise::serenity_prelude::ApplicationCommandInteractionDataOption], | Box::pin(async move { use ::poise::futures::{Stream, StreamExt}; let choice = match options .iter() .find(|option| option.focused && option.name == stringify!(#param_name)) { Some(x) => x, None => return Ok(()), }; let json_value = choice.value .as_ref() .ok_or(::poise::SlashArgError::CommandStructureMismatch("expected argument value"))?; let partial_input = (&&&&&std::marker::PhantomData::<#type_>).extract_partial(json_value)?; let choices_stream = ::poise::into_stream!( #autocomplete_fn(ctx.into(), partial_input).await ); let choices_json = choices_stream .take(25) .map(|value| poise::AutocompleteChoice::from(value)) .map(|choice| serde_json::json!({ "name": choice.name, "value": (&&&&&std::marker::PhantomData::<#type_>).into_json(choice.value), })) .collect() .await; let choices_json = poise::serde_json::Value::Array(choices_json); if let Err(e) = interaction .create_autocomplete_response( &ctx.discord.http, |b| b.set_choices(choices_json), ) .await { println!("Warning: couldn't send autocomplete response: {}", e); } Ok(()) })) } } None => quote::quote! { None }, }; let is_autocomplete = param.more.autocomplete.is_some(); parameter_structs.push(( quote::quote! { ::poise::SlashCommandParameter { builder: |o| (&&&&&std::marker::PhantomData::<#type_>).create(o) .required(#required) .name(stringify!(#param_name)) .description(#description) .set_autocomplete(#is_autocomplete), autocomplete_callback: #autocomplete_callback, } }, required, )); } parameter_structs.sort_by_key(|(_, required)| !required); let parameter_structs = parameter_structs .into_iter() .map(|(builder, _)| builder) .collect::<Vec<_>>(); let param_names = inv.parameters.iter().map(|p| &p.name).collect::<Vec<_>>(); let param_types = inv .parameters .iter() .map(|p| match p.more.flag { true => syn::parse_quote! { FLAG }, false => p.type_.clone(), }) .collect::<Vec<_>>(); let options = generate_options(inv); Ok(quote::quote! { ::poise::SlashCommand { name: #command_name, description: #description, parameters: { use ::poise::{SlashArgumentHack, AutocompletableHack}; vec![ #( #parameter_structs, )* ] }, action: |ctx, args| Box::pin(async move { #[allow(clippy::needless_question_mark)] let ( #( #param_names, )* ) = ::poise::parse_slash_args!( ctx.discord, ctx.interaction.guild_id(), ctx.interaction.channel_id(), args => #( (#param_names: #param_types), )* ).await?; inner(ctx.into(), #( #param_names, )*).await }), id: std::sync::Arc::clone(&command_id), options: #options, } }) } pub fn generate_context_menu_command_spec( inv: &Invocation, name: &str, ) -> Result<proc_macro2::TokenStream, darling::Error> { if inv.parameters.len() != 1 { return Err(syn::Error::new( inv.function.sig.inputs.span(), "Context menu commands require exactly one parameter", ) .into()); } let param_type = &inv.parameters[0].type_; let options = generate_options(inv);
}
Ok(quote::quote! { ::poise::ContextMenuCommand { name: #name, action: <#param_type as ::poise::ContextMenuParameter<_, _>>::to_action(|ctx, value| { Box::pin(async move { inner(ctx.into(), value).await }) }), id: std::sync::Arc::clone(&command_id), options: #options, } })
call_expression
[ { "content": "/// Implemented for all types that can be used in a context menu command\n\npub trait ContextMenuParameter<U, E> {\n\n /// Convert an action function pointer that takes Self as an argument into the appropriate\n\n /// [`crate::ContextMenuCommandAction`] variant.\n\n fn to_action(\n\n action: fn(crate::ApplicationContext<'_, U, E>, Self) -> BoxFuture<'_, Result<(), E>>,\n\n ) -> crate::ContextMenuCommandAction<U, E>;\n\n}\n\n\n\nimpl<U, E> ContextMenuParameter<U, E> for serenity::User {\n\n fn to_action(\n\n action: fn(crate::ApplicationContext<'_, U, E>, Self) -> BoxFuture<'_, Result<(), E>>,\n\n ) -> crate::ContextMenuCommandAction<U, E> {\n\n crate::ContextMenuCommandAction::User(action)\n\n }\n\n}\n\n\n\nimpl<U, E> ContextMenuParameter<U, E> for serenity::Message {\n\n fn to_action(\n\n action: fn(crate::ApplicationContext<'_, U, E>, Self) -> BoxFuture<'_, Result<(), E>>,\n\n ) -> crate::ContextMenuCommandAction<U, E> {\n\n crate::ContextMenuCommandAction::Message(action)\n\n }\n\n}\n", "file_path": "src/slash/argument/context_menu.rs", "rank": 0, "score": 221056.37470939048 }, { "content": "fn find_matching_application_command<'a, 'b, U, E>(\n\n framework: &'a crate::Framework<U, E>,\n\n interaction: &'b serenity::ApplicationCommandInteractionData,\n\n) -> Option<(\n\n crate::ApplicationCommand<'a, U, E>,\n\n &'b [serenity::ApplicationCommandInteractionDataOption],\n\n)> {\n\n let commands = &framework.options.application_options.commands;\n\n commands.iter().find_map(|cmd| match cmd {\n\n crate::ApplicationCommandTree::ContextMenu(cmd) => {\n\n let application_command_type = match &cmd.action {\n\n crate::ContextMenuCommandAction::User(_) => serenity::ApplicationCommandType::User,\n\n crate::ContextMenuCommandAction::Message(_) => {\n\n serenity::ApplicationCommandType::Message\n\n }\n\n };\n\n if cmd.name == interaction.name && interaction.kind == application_command_type {\n\n Some((\n\n crate::ApplicationCommand::ContextMenu(cmd),\n\n &*interaction.options,\n", "file_path": "src/framework/dispatch/slash.rs", "rank": 3, "score": 171355.13441681798 }, { "content": "pub fn command(\n\n args: CommandOptions,\n\n mut function: syn::ItemFn,\n\n) -> Result<TokenStream, darling::Error> {\n\n // Verify that the function is marked async. Not strictly needed, but avoids confusion\n\n if function.sig.asyncness.is_none() {\n\n return Err(syn::Error::new(function.sig.span(), \"command function must be async\").into());\n\n }\n\n\n\n // Verify that at least one command type was enabled\n\n if !args.prefix_command && !args.slash_command && args.context_menu_command.is_none() {\n\n let err_msg = \"you must enable at least one of `prefix_command`, `slash_command` or \\\n\n `context_menu_command`\";\n\n return Err(syn::Error::new(proc_macro2::Span::call_site(), err_msg).into());\n\n }\n\n\n\n // Collect argument names/types/attributes to insert into generated function\n\n let mut parameters = Vec::new();\n\n for command_param in function.sig.inputs.iter_mut().skip(1) {\n\n let pattern = match command_param {\n", "file_path": "macros/src/command/mod.rs", "rank": 5, "score": 156338.036321713 }, { "content": "#[doc(hidden)]\n\npub trait AutocompletableHack<T> {\n\n type Partial;\n\n\n\n fn extract_partial(self, value: &serde_json::Value) -> Result<Self::Partial, SlashArgError>;\n\n\n\n fn into_json(self, value: T) -> serde_json::Value;\n\n}\n\n\n\n/// Handles arbitrary types that can be parsed from string.\n\n#[async_trait::async_trait]\n\nimpl<T> AutocompletableHack<T> for PhantomData<T>\n\nwhere\n\n T: serenity::ArgumentConvert + ToString + Send + Sync,\n\n T::Err: std::error::Error + Send + Sync + 'static,\n\n{\n\n type Partial = String;\n\n\n\n fn extract_partial(self, value: &serde_json::Value) -> Result<String, SlashArgError> {\n\n let string = value\n\n .as_str()\n", "file_path": "src/slash/argument/autocomplete/autocompletable.rs", "rank": 6, "score": 154868.09890079664 }, { "content": "fn make_command_id(inv: &Invocation) -> proc_macro2::TokenStream {\n\n let identifying_name = &inv.more.identifying_name;\n\n let identifying_name = identifying_name.as_ref().unwrap_or(&inv.command_name);\n\n\n\n let description = wrap_option(inv.description);\n\n let hide_in_help = &inv.more.hide_in_help;\n\n let category = wrap_option(inv.more.category.as_ref());\n\n\n\n let global_cooldown = wrap_option(inv.more.global_cooldown);\n\n let user_cooldown = wrap_option(inv.more.user_cooldown);\n\n let guild_cooldown = wrap_option(inv.more.guild_cooldown);\n\n let channel_cooldown = wrap_option(inv.more.channel_cooldown);\n\n let member_cooldown = wrap_option(inv.more.member_cooldown);\n\n\n\n let required_permissions = inv.required_permissions;\n\n let required_bot_permissions = inv.required_bot_permissions;\n\n let owners_only = inv.more.owners_only;\n\n\n\n quote::quote! {\n\n ::poise::CommandId {\n", "file_path": "macros/src/command/mod.rs", "rank": 7, "score": 152872.95259692945 }, { "content": "pub fn slash_choice_parameter(input: syn::DeriveInput) -> Result<TokenStream, darling::Error> {\n\n let enum_ = match input.data {\n\n syn::Data::Enum(x) => x,\n\n _ => {\n\n return Err(syn::Error::new(\n\n input.ident.span(),\n\n \"Only enums can be used for choice parameters\",\n\n )\n\n .into())\n\n }\n\n };\n\n\n\n let mut variant_idents: Vec<proc_macro2::Ident> = Vec::new();\n\n let mut display_strings: Vec<String> = Vec::new();\n\n let mut more_display_strings = Vec::new();\n\n\n\n for variant in enum_.variants {\n\n if !matches!(&variant.fields, syn::Fields::Unit) {\n\n return Err(syn::Error::new(\n\n variant.fields.span(),\n", "file_path": "macros/src/slash_choice_parameter.rs", "rank": 8, "score": 146712.56453507027 }, { "content": "fn extract_help_from_doc_comments(attrs: &[syn::Attribute]) -> (Option<String>, Option<String>) {\n\n let mut doc_lines = String::new();\n\n for attr in attrs {\n\n if attr.path == quote::format_ident!(\"doc\").into() {\n\n for token in attr.tokens.clone() {\n\n if let Ok(literal) = syn::parse2::<syn::LitStr>(token.into()) {\n\n let literal = literal.value();\n\n let literal = literal.strip_prefix(' ').unwrap_or(&literal);\n\n\n\n doc_lines += literal;\n\n doc_lines += \"\\n\";\n\n }\n\n }\n\n }\n\n }\n\n\n\n // Apply newline escapes\n\n let doc_lines = doc_lines.trim().replace(\"\\\\\\n\", \"\");\n\n\n\n if doc_lines.is_empty() {\n\n return (None, None);\n\n }\n\n\n\n let mut paragraphs = doc_lines.splitn(2, \"\\n\\n\");\n\n let inline_help = paragraphs.next().unwrap().replace(\"\\n\", \" \");\n\n let multiline_help = paragraphs.next().map(|x| x.to_owned());\n\n\n\n (Some(inline_help), multiline_help)\n\n}\n\n\n", "file_path": "macros/src/command/mod.rs", "rank": 9, "score": 146340.6046568732 }, { "content": "/// Types that can be marked autocompletable in a slash command parameter.\n\n///\n\n/// Includes almost all types that can be used as a slash command parameter in general,\n\n/// except some built-in model types (User, Member, Role...)\n\npub trait Autocompletable {\n\n /// Type of the partial input. This should be `Self` except in cases where a partial input\n\n /// cannot be parsed into `Self` (e.g. an IP address)\n\n type Partial;\n\n\n\n /// Try extracting the partial input from the JSON value\n\n ///\n\n /// Equivalent to [`crate::SlashArgument::extract`]\n\n fn extract_partial(value: &serde_json::Value) -> Result<Self::Partial, SlashArgError>;\n\n\n\n /// Serialize an autocompletion choice as a JSON value.\n\n ///\n\n /// This is the counterpart to [`Self::extract_partial`]\n\n fn into_json(self) -> serde_json::Value;\n\n}\n\n\n", "file_path": "src/slash/argument/autocomplete/autocompletable.rs", "rank": 10, "score": 144719.19077394033 }, { "content": "#[proc_macro_derive(SlashChoiceParameter, attributes(name))]\n\npub fn slash_choice_parameter(input: TokenStream) -> TokenStream {\n\n let enum_ = syn::parse_macro_input!(input as syn::DeriveInput);\n\n\n\n match slash_choice_parameter::slash_choice_parameter(enum_) {\n\n Ok(x) => x,\n\n Err(e) => e.write_errors().into(),\n\n }\n\n}\n", "file_path": "macros/src/lib.rs", "rank": 11, "score": 144325.2509983882 }, { "content": "fn prepare_command_definition<U, E>(\n\n definition: crate::CommandDefinition<U, E>,\n\n meta_builder: impl FnOnce(&mut CommandBuilder<U, E>) -> &mut CommandBuilder<U, E>,\n\n) -> CommandBuilder<U, E> {\n\n // Unpack command implementations\n\n let crate::CommandDefinition {\n\n prefix: mut prefix_command,\n\n slash: mut slash_command,\n\n context_menu: mut context_menu_command,\n\n } = definition;\n\n\n\n // Make sure every implementation points to the same CommandId (they may have different\n\n // IDs if each implemented comes from a different function, like rustbot's rustify)\n\n let id = if let Some(prefix_command) = &prefix_command {\n\n prefix_command.id.clone()\n\n } else if let Some(slash_command) = &slash_command {\n\n slash_command.id.clone()\n\n } else if let Some(context_menu_command) = &context_menu_command {\n\n context_menu_command.id.clone()\n\n } else {\n", "file_path": "src/structs/framework_options.rs", "rank": 12, "score": 139194.32441644947 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn test_key_value_args() {\n\n for &(string, pairs, remaining_args) in &[\n\n (\n\n r#\"key1=value1 key2=value2\"#,\n\n &[(\"key1\", \"value1\"), (\"key2\", \"value2\")][..],\n\n \"\",\n\n ),\n\n (\n\n r#\"\"key 1\"=value\\ 1 key\\ 2=\"value 2\"\"#,\n\n &[(\"key 1\", \"value 1\"), (\"key 2\", \"value 2\")],\n\n \"\",\n\n ),\n\n (\n\n r#\"key1\"=value1 key2=value2\"#,\n\n &[],\n\n r#\"key1\"=value1 key2=value2\"#,\n\n ),\n\n (r#\"dummyval\"#, &[], \"dummyval\"),\n\n (r#\"dummyval=\"#, &[(\"dummyval\", \"\")], \"\"),\n\n ] {\n", "file_path": "src/prefix/argument/key_value_args.rs", "rank": 13, "score": 139087.71503089127 }, { "content": "// ngl this is ugly\n\n// transforms a type of form `OuterType<T>` into `T`\n\nfn extract_type_parameter<'a>(outer_type: &str, t: &'a syn::Type) -> Option<&'a syn::Type> {\n\n if let syn::Type::Path(path) = t {\n\n if path.path.segments.len() == 1 {\n\n let path = &path.path.segments[0];\n\n if path.ident == outer_type {\n\n if let syn::PathArguments::AngleBracketed(generics) = &path.arguments {\n\n if generics.args.len() == 1 {\n\n if let syn::GenericArgument::Type(t) = &generics.args[0] {\n\n return Some(t);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "macros/src/command/mod.rs", "rank": 14, "score": 136421.04241012444 }, { "content": "#[proc_macro_attribute]\n\npub fn command(args: TokenStream, function: TokenStream) -> TokenStream {\n\n let args = syn::parse_macro_input!(args as Vec<syn::NestedMeta>);\n\n let args = match <command::CommandOptions as darling::FromMeta>::from_list(&args) {\n\n Ok(x) => x,\n\n Err(e) => return e.write_errors().into(),\n\n };\n\n\n\n let function = syn::parse_macro_input!(function as syn::ItemFn);\n\n\n\n match command::command(args, function) {\n\n Ok(x) => x,\n\n Err(e) => e.write_errors().into(),\n\n }\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 15, "score": 129844.76592401996 }, { "content": "pub fn generate_prefix_command_spec(\n\n inv: &Invocation,\n\n) -> Result<proc_macro2::TokenStream, darling::Error> {\n\n let explanation = match &inv.more.explanation_fn {\n\n Some(explanation_fn) => quote::quote! { Some(#explanation_fn) },\n\n None => match &inv.explanation {\n\n Some(extracted_explanation) => quote::quote! { Some(|| #extracted_explanation.into()) },\n\n None => quote::quote! { None },\n\n },\n\n };\n\n\n\n // Box::pin the check and on_error callbacks in order to store them in a struct\n\n let check = match &inv.more.check {\n\n Some(check) => {\n\n quote::quote! { Some(|ctx| Box::pin(#check(ctx.into()))) }\n\n }\n\n None => quote::quote! { None },\n\n };\n\n let on_error = match &inv.more.on_error {\n\n Some(on_error) => {\n", "file_path": "macros/src/command/prefix.rs", "rank": 16, "score": 124655.8702008181 }, { "content": "#[async_trait::async_trait]\n\npub trait SlashArgument: Sized {\n\n /// Extract a Rust value of type T from the slash command argument, given via a\n\n /// [`serde_json::Value`].\n\n async fn extract(\n\n ctx: &serenity::Context,\n\n guild: Option<serenity::GuildId>,\n\n channel: Option<serenity::ChannelId>,\n\n value: &serde_json::Value,\n\n ) -> Result<Self, SlashArgError>;\n\n\n\n /// Create a slash command parameter equivalent to type T.\n\n ///\n\n /// Only fields about the argument type are filled in. The caller is still responsible for\n\n /// filling in `name()`, `description()`, and possibly `required()` or other fields.\n\n fn create(\n\n builder: &mut serenity::CreateApplicationCommandOption,\n\n ) -> &mut serenity::CreateApplicationCommandOption;\n\n}\n\n\n\n/// Implemented for all types that can be used as a function parameter in a slash command.\n\n///\n\n/// Currently marked `#[doc(hidden)]` because implementing this trait requires some jank due to a\n\n/// `PhantomData` hack and the auto-deref specialization hack.\n", "file_path": "src/slash/argument/slash.rs", "rank": 17, "score": 121046.02798878659 }, { "content": "use crate::serenity_prelude as serenity;\n\nuse crate::BoxFuture;\n\n\n\n/// Implemented for all types that can be used in a context menu command\n", "file_path": "src/slash/argument/context_menu.rs", "rank": 18, "score": 120329.85995165931 }, { "content": "fn extract_vec_type(t: &syn::Type) -> Option<&syn::Type> {\n\n extract_type_parameter(\"Vec\", t)\n\n}\n\n\n", "file_path": "macros/src/command/mod.rs", "rank": 19, "score": 119244.54956694684 }, { "content": "#[doc(hidden)]\n\n#[async_trait::async_trait]\n\npub trait SlashArgumentHack<T> {\n\n async fn extract(\n\n self,\n\n ctx: &serenity::Context,\n\n guild: Option<serenity::GuildId>,\n\n channel: Option<serenity::ChannelId>,\n\n value: &serde_json::Value,\n\n ) -> Result<T, SlashArgError>;\n\n\n\n fn create(\n\n self,\n\n builder: &mut serenity::CreateApplicationCommandOption,\n\n ) -> &mut serenity::CreateApplicationCommandOption;\n\n}\n\n\n\n/// Handles arbitrary types that can be parsed from string.\n\n#[async_trait::async_trait]\n\nimpl<T> SlashArgumentHack<T> for PhantomData<T>\n\nwhere\n\n T: serenity::ArgumentConvert + Send + Sync,\n", "file_path": "src/slash/argument/slash.rs", "rank": 20, "score": 118182.51118280685 }, { "content": "#[doc(hidden)]\n\npub trait ConvertStreamFrom<T> {\n\n type Output;\n\n fn converter(self) -> fn(T) -> Self::Output;\n\n}\n\n\n\nimpl<T: IntoIterator> ConvertStreamFrom<T> for &IntoStreamWrap<'_, T> {\n\n type Output = futures::stream::Iter<T::IntoIter>;\n\n fn converter(self) -> fn(T) -> Self::Output {\n\n |iter| futures::stream::iter(iter)\n\n }\n\n}\n\n\n\nimpl<T: futures::Stream> ConvertStreamFrom<T> for &&IntoStreamWrap<'_, T> {\n\n type Output = T;\n\n fn converter(self) -> fn(T) -> Self::Output {\n\n |stream| stream\n\n }\n\n}\n\n\n\n// Takes an expression that is either an IntoIterator or a Stream, and converts it to a Stream\n", "file_path": "src/slash/argument/autocomplete/into_stream_hack.rs", "rank": 21, "score": 115573.7255987646 }, { "content": "/// Find a command within nested PrefixCommandMeta's by the user message string. Also returns\n\n/// the arguments, i.e. the remaining string.\n\n///\n\n/// May throw an error if a command check fails\n\nfn find_command<'a, U, E>(\n\n framework: &'a crate::Framework<U, E>,\n\n ctx: &'a serenity::Context,\n\n msg: &'a serenity::Message,\n\n prefix: &'a str,\n\n commands: &'a [crate::PrefixCommandMeta<U, E>],\n\n remaining_message: &'a str,\n\n) -> crate::BoxFuture<\n\n 'a,\n\n Result<\n\n Option<(&'a crate::PrefixCommandMeta<U, E>, &'a str)>,\n\n (E, crate::PrefixCommandErrorContext<'a, U, E>),\n\n >,\n\n>\n\nwhere\n\n U: Send + Sync,\n\n{\n\n Box::pin(_find_command(\n\n framework,\n\n ctx,\n", "file_path": "src/framework/dispatch/prefix.rs", "rank": 22, "score": 113808.41130914156 }, { "content": "fn send_as_initial_response(\n\n data: crate::CreateReply<'_>,\n\n allowed_mentions: Option<&serenity::CreateAllowedMentions>,\n\n f: &mut serenity::CreateInteractionResponseData,\n\n) {\n\n let crate::CreateReply {\n\n content,\n\n embed,\n\n attachments: _, // discord doesn't support attachments in initial response :(\n\n components,\n\n ephemeral,\n\n } = data;\n\n\n\n if let Some(content) = content {\n\n f.content(content);\n\n }\n\n if let Some(embed) = embed {\n\n f.add_embed(embed);\n\n }\n\n if let Some(allowed_mentions) = allowed_mentions {\n", "file_path": "src/slash/mod.rs", "rank": 23, "score": 106635.57008890115 }, { "content": "fn extract_option_type(t: &syn::Type) -> Option<&syn::Type> {\n\n extract_type_parameter(\"Option\", t)\n\n}\n\n\n", "file_path": "macros/src/command/mod.rs", "rank": 24, "score": 106059.21346909211 }, { "content": "fn send_as_followup_response<'a>(\n\n data: crate::CreateReply<'a>,\n\n allowed_mentions: Option<&serenity::CreateAllowedMentions>,\n\n f: &mut serenity::CreateInteractionResponseFollowup<'a>,\n\n) {\n\n let crate::CreateReply {\n\n content,\n\n embed,\n\n attachments,\n\n components,\n\n ephemeral,\n\n } = data;\n\n\n\n if let Some(content) = content {\n\n f.content(content);\n\n }\n\n if let Some(embed) = embed {\n\n f.add_embed(embed);\n\n }\n\n if let Some(components) = components {\n", "file_path": "src/slash/mod.rs", "rank": 25, "score": 104814.59750847923 }, { "content": " .ok_or(SlashArgError::CommandStructureMismatch(\"expected string\"))?;\n\n Ok(string.to_owned())\n\n }\n\n\n\n fn into_json(self, value: T) -> serde_json::Value {\n\n serde_json::Value::String(value.to_string())\n\n }\n\n}\n\n\n\n// Handles all integers, signed and unsigned.\n\n#[async_trait::async_trait]\n\nimpl<T: TryFrom<i64> + Into<serde_json::Number> + Send + Sync> AutocompletableHack<T>\n\n for &PhantomData<T>\n\n{\n\n type Partial = T;\n\n\n\n fn extract_partial(self, value: &serde_json::Value) -> Result<T, SlashArgError> {\n\n value\n\n .as_i64()\n\n .ok_or(SlashArgError::CommandStructureMismatch(\"expected integer\"))?\n", "file_path": "src/slash/argument/autocomplete/autocompletable.rs", "rank": 26, "score": 99389.31105569415 }, { "content": " .try_into()\n\n .map_err(|_| SlashArgError::IntegerOutOfBounds)\n\n }\n\n\n\n fn into_json(self, value: T) -> serde_json::Value {\n\n serde_json::Value::Number(value.into())\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl AutocompletableHack<f32> for &&PhantomData<f32> {\n\n type Partial = f32;\n\n\n\n fn extract_partial(self, value: &serde_json::Value) -> Result<f32, SlashArgError> {\n\n Ok(value\n\n .as_f64()\n\n .ok_or(SlashArgError::CommandStructureMismatch(\"expected float\"))? as f32)\n\n }\n\n\n\n fn into_json(self, value: f32) -> serde_json::Value {\n", "file_path": "src/slash/argument/autocomplete/autocompletable.rs", "rank": 27, "score": 99389.16458585561 }, { "content": "use crate::{serenity_prelude as serenity, SlashArgError};\n\nuse std::convert::{TryFrom, TryInto};\n\nuse std::marker::PhantomData;\n\n\n\n/// Types that can be marked autocompletable in a slash command parameter.\n\n///\n\n/// Includes almost all types that can be used as a slash command parameter in general,\n\n/// except some built-in model types (User, Member, Role...)\n", "file_path": "src/slash/argument/autocomplete/autocompletable.rs", "rank": 28, "score": 99387.81621130244 }, { "content": " serde_json::Value::Number(\n\n serde_json::Number::from_f64(value as _).unwrap_or_else(|| serde_json::Number::from(0)),\n\n )\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl AutocompletableHack<f64> for &&PhantomData<f64> {\n\n type Partial = f64;\n\n\n\n fn extract_partial(self, value: &serde_json::Value) -> Result<f64, SlashArgError> {\n\n value\n\n .as_f64()\n\n .ok_or(SlashArgError::CommandStructureMismatch(\"expected float\"))\n\n }\n\n\n\n fn into_json(self, value: f64) -> serde_json::Value {\n\n serde_json::Value::Number(\n\n serde_json::Number::from_f64(value).unwrap_or_else(|| serde_json::Number::from(0)),\n\n )\n", "file_path": "src/slash/argument/autocomplete/autocompletable.rs", "rank": 29, "score": 99384.04726290374 }, { "content": " }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl<T: Autocompletable> AutocompletableHack<T> for &&PhantomData<T> {\n\n type Partial = T::Partial;\n\n\n\n fn extract_partial(self, value: &serde_json::Value) -> Result<T::Partial, SlashArgError> {\n\n <T as Autocompletable>::extract_partial(value)\n\n }\n\n\n\n fn into_json(self, value: T) -> serde_json::Value {\n\n value.into_json()\n\n }\n\n}\n", "file_path": "src/slash/argument/autocomplete/autocompletable.rs", "rank": 30, "score": 99381.06893247458 }, { "content": "// Convert None => None and Some(T) => Some(T)\n\nfn wrap_option<T: quote::ToTokens>(literal: Option<T>) -> syn::Expr {\n\n match literal {\n\n Some(literal) => syn::parse_quote! { Some(#literal) },\n\n None => syn::parse_quote! { None },\n\n }\n\n}\n\n\n", "file_path": "macros/src/command/mod.rs", "rank": 31, "score": 98464.103709975 }, { "content": "\n\n let enum_ident = &input.ident;\n\n let indices1 = 0_u64..(variant_idents.len() as _);\n\n let indices2 = 0_i32..(variant_idents.len() as _);\n\n Ok(quote::quote! {\n\n #[poise::async_trait]\n\n impl poise::SlashArgument for #enum_ident {\n\n async fn extract(\n\n _: &poise::serenity_prelude::Context,\n\n _: Option<poise::serenity_prelude::GuildId>,\n\n _: Option<poise::serenity_prelude::ChannelId>,\n\n value: &poise::serde_json::Value,\n\n ) -> Result<Self, poise::SlashArgError> {\n\n let choice_key = value\n\n .as_u64()\n\n .ok_or(poise::SlashArgError::CommandStructureMismatch(\n\n \"expected u64\",\n\n ))?;\n\n\n\n match choice_key {\n", "file_path": "macros/src/slash_choice_parameter.rs", "rank": 32, "score": 90764.70385693824 }, { "content": " #( #indices1 => Ok(Self::#variant_idents), )*\n\n _ => Err(poise::SlashArgError::CommandStructureMismatch(\"out of bounds choice key\")),\n\n }\n\n }\n\n\n\n fn create(\n\n builder: &mut poise::serenity_prelude::CreateApplicationCommandOption,\n\n ) -> &mut poise::serenity_prelude::CreateApplicationCommandOption {\n\n builder\n\n .kind(poise::serenity_prelude::ApplicationCommandOptionType::Integer)\n\n #( .add_int_choice(#display_strings, #indices2 as i32) )*\n\n }\n\n }\n\n\n\n impl std::str::FromStr for #enum_ident {\n\n type Err = poise::InvalidChoice;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n #(\n\n if s.eq_ignore_ascii_case(#display_strings)\n", "file_path": "macros/src/slash_choice_parameter.rs", "rank": 33, "score": 90764.52921213319 }, { "content": " \"Slash choice parameters cannot have fields\",\n\n )\n\n .into());\n\n }\n\n\n\n let attrs = variant\n\n .attrs\n\n .into_iter()\n\n .map(|attr| attr.parse_meta().map(syn::NestedMeta::Meta))\n\n .collect::<Result<Vec<_>, _>>()?;\n\n let mut names = <VariantAttribute as darling::FromMeta>::from_list(&attrs)?.name;\n\n\n\n if names.is_empty() {\n\n return Err(syn::Error::new(variant.ident.span(), \"Missing `name` attribute\").into());\n\n }\n\n\n\n variant_idents.push(variant.ident);\n\n display_strings.push(names.remove(0));\n\n more_display_strings.push(names);\n\n }\n", "file_path": "macros/src/slash_choice_parameter.rs", "rank": 34, "score": 90757.62716205903 }, { "content": "//! Implements the #[derive(SlashChoiceParameter)] derive macro\n\n\n\nuse proc_macro::TokenStream;\n\nuse syn::spanned::Spanned as _;\n\n\n\n/// Representation of the function parameter attribute arguments\n\n#[derive(Debug, darling::FromMeta)]\n", "file_path": "macros/src/slash_choice_parameter.rs", "rank": 35, "score": 90751.04336722917 }, { "content": " #( || s.eq_ignore_ascii_case(#more_display_strings) )*\n\n {\n\n Ok(Self::#variant_idents)\n\n } else\n\n )* {\n\n Err(poise::InvalidChoice)\n\n }\n\n }\n\n }\n\n }\n\n .into())\n\n}\n", "file_path": "macros/src/slash_choice_parameter.rs", "rank": 36, "score": 90744.0206064246 }, { "content": "mod autocompletable;\n\npub use autocompletable::*;\n\n\n\nmod into_stream_hack;\n\npub use into_stream_hack::*;\n\n\n\n/// A single autocomplete choice, displayed in Discord UI\n\n///\n\n/// This type should be returned by functions set via the `#[autocomplete = ]` attribute on slash\n\n/// command parameters.\n\npub struct AutocompleteChoice<T> {\n\n /// Name of the choice, displayed in the Discord UI\n\n pub name: String,\n\n /// Value of the choice, sent to the bot\n\n pub value: T,\n\n}\n\n\n\nimpl<T: ToString> From<T> for AutocompleteChoice<T> {\n\n fn from(value: T) -> Self {\n\n Self {\n\n name: value.to_string(),\n\n value,\n\n }\n\n }\n\n}\n", "file_path": "src/slash/argument/autocomplete/mod.rs", "rank": 37, "score": 89714.80495084982 }, { "content": "#[derive(Default, Debug, darling::FromMeta)]\n\n#[darling(default)]\n\nstruct ParamOptions {\n\n description: Option<String>,\n\n autocomplete: Option<syn::Path>,\n\n lazy: bool,\n\n flag: bool,\n\n rest: bool,\n\n}\n\n\n", "file_path": "macros/src/command/mod.rs", "rank": 38, "score": 87398.04876804423 }, { "content": "pub struct KeyValueArgs(pub std::collections::HashMap<String, String>);\n\n\n\nimpl KeyValueArgs {\n\n /// Retrieve a single value by its key\n\n pub fn get(&self, key: &str) -> Option<&str> {\n\n self.0.get(key).map(|x| x.as_str())\n\n }\n\n\n\n fn pop_single_key_value_pair<'a>(\n\n args: &ArgString<'a>,\n\n ) -> Option<(ArgString<'a>, (String, String))> {\n\n // TODO: share quote parsing machinery with PopArgumentAsync impl for String\n\n\n\n if args.0.is_empty() {\n\n return None;\n\n }\n\n\n\n let mut key = String::new();\n\n let mut inside_string = false;\n\n let mut escaping = false;\n", "file_path": "src/prefix/argument/key_value_args.rs", "rank": 39, "score": 87142.93717924441 }, { "content": " let args = ArgString(chars.as_str());\n\n // `args` used to contain \"key=value ...\", now it contains \"value ...\", so pop the value off\n\n let (args, value) = String::pop_from(&args).unwrap_or((args, String::new()));\n\n\n\n Some((args, (key, value)))\n\n }\n\n}\n\n\n\nimpl<'a> PopArgument<'a> for KeyValueArgs {\n\n type Err = std::convert::Infallible;\n\n\n\n fn pop_from(args: &ArgString<'a>) -> Result<(ArgString<'a>, Self), Self::Err> {\n\n let mut pairs = std::collections::HashMap::new();\n\n\n\n let mut args = args.clone();\n\n while let Some((new_args, (key, value))) = Self::pop_single_key_value_pair(&args) {\n\n args = new_args;\n\n pairs.insert(key, value);\n\n }\n\n\n\n Ok((args, Self(pairs)))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\n#[test]\n", "file_path": "src/prefix/argument/key_value_args.rs", "rank": 40, "score": 87139.10522037707 }, { "content": "use super::*;\n\n\n\n/// A command parameter type for key-value args\n\n///\n\n/// For example `key1=value1 key2=\"value2 with spaces\"`.\n\n///\n\n/// ```rust\n\n/// use poise::PopArgument;\n\n///\n\n/// let string = r#\"key1=value key2=\"value with spaces\" \"key with spaces\"=\"value with \\\"quotes\\\"\"\"#;\n\n/// let key_value_args = poise::KeyValueArgs::pop_from(&poise::ArgString(string)).unwrap().1;\n\n///\n\n/// let mut expected_result = std::collections::HashMap::new();\n\n/// expected_result.insert(\"key1\".into(), \"value\".into());\n\n/// expected_result.insert(\"key2\".into(), \"value with spaces\".into());\n\n/// expected_result.insert(\"key with spaces\".into(), r#\"value with \"quotes\"\"#.into());\n\n///\n\n/// assert_eq!(key_value_args.0, expected_result);\n\n/// ```\n\n#[derive(Clone, PartialEq, Eq, Debug, Default)]\n", "file_path": "src/prefix/argument/key_value_args.rs", "rank": 41, "score": 87135.12017943963 }, { "content": " let (args, kv_args) = KeyValueArgs::pop_from(&ArgString(string)).unwrap();\n\n\n\n assert_eq!(\n\n kv_args.0,\n\n pairs\n\n .iter()\n\n .map(|&(k, v)| (k.to_owned(), v.to_owned()))\n\n .collect(),\n\n );\n\n assert_eq!(args.0, remaining_args);\n\n }\n\n}\n", "file_path": "src/prefix/argument/key_value_args.rs", "rank": 42, "score": 87127.82403119694 }, { "content": "\n\n let mut chars = args.0.trim_start().chars();\n\n loop {\n\n let c = chars.next()?;\n\n if escaping {\n\n key.push(c);\n\n escaping = false;\n\n } else if !inside_string && c.is_whitespace() {\n\n return None;\n\n } else if c == '\"' {\n\n inside_string = !inside_string;\n\n } else if c == '\\\\' {\n\n escaping = true;\n\n } else if !inside_string && c == '=' {\n\n break;\n\n } else {\n\n key.push(c);\n\n }\n\n }\n\n\n", "file_path": "src/prefix/argument/key_value_args.rs", "rank": 43, "score": 87125.36565182284 }, { "content": "#[doc(hidden)]\n\n#[macro_export]\n\nmacro_rules! into_stream {\n\n ($e:expr) => {\n\n match $e {\n\n value => {\n\n use $crate::ConvertStreamFrom;\n\n (&&$crate::IntoStreamWrap(&value)).converter()(value)\n\n }\n\n }\n\n };\n\n}\n", "file_path": "src/slash/argument/autocomplete/into_stream_hack.rs", "rank": 44, "score": 85719.0314772623 }, { "content": "#[doc(hidden)]\n\npub struct IntoStreamWrap<'a, T>(pub &'a T);\n\n\n\n#[doc(hidden)]\n", "file_path": "src/slash/argument/autocomplete/into_stream_hack.rs", "rank": 45, "score": 85711.91113737461 }, { "content": "#[derive(Debug, darling::FromMeta)]\n\nstruct VariantAttribute {\n\n #[darling(multiple)]\n\n name: Vec<String>,\n\n}\n\n\n", "file_path": "macros/src/slash_choice_parameter.rs", "rank": 46, "score": 83030.57950592694 }, { "content": "/// Superset of [`PopArgumentAsync`] without Discord context available and no async support.\n\n///\n\n/// Similar in spirit to [`std::str::FromStr`].\n\npub trait PopArgument<'a>: Sized {\n\n /// This error type should implement [`std::error::Error`] most of the time\n\n type Err;\n\n\n\n /// Parse [`Self`] from the front of the given string and return a tuple of the remaining string\n\n /// and [`Self`].\n\n fn pop_from(args: &ArgString<'a>) -> Result<(ArgString<'a>, Self), Self::Err>;\n\n}\n\n\n\n/// Parse a value out of a string by popping off the front of the string. Discord message context\n\n/// is available for parsing, and IO may be done as part of the parsing.\n\n///\n\n/// Implementors should assume that a string never starts with whitespace, and fail to parse if it\n\n/// does. This is for consistency's\n\n/// sake and also because it keeps open the possibility of parsing whitespace.\n", "file_path": "src/prefix/argument/mod.rs", "rank": 47, "score": 79235.15522296226 }, { "content": "#[doc(hidden)]\n\n#[async_trait::async_trait]\n\npub trait PrefixArgumentHack<'a, T> {\n\n type Err;\n\n\n\n async fn pop(\n\n self,\n\n args: &ArgString<'a>,\n\n ctx: &serenity::Context,\n\n msg: &serenity::Message,\n\n ) -> Result<(ArgString<'a>, T), Self::Err>;\n\n}\n\n\n\n/// When attempting to parse a string, it can fail either because it's empty, or because it's\n\n/// invalid in some way. This error type covers both cases\n\n#[derive(Debug)]\n\npub enum MaybeEmptyError<E> {\n\n /// If the input was empty and no string could be passed to the underlying type parser\n\n EmptyArgs(crate::EmptyArgs),\n\n /// The underlying type threw a parse error\n\n ParseError(E),\n\n}\n", "file_path": "src/prefix/argument/mod.rs", "rank": 48, "score": 77203.39381468874 }, { "content": "#[async_trait::async_trait]\n\npub trait PopArgumentAsync<'a>: Sized {\n\n /// This error type should implement [`std::error::Error`] most of the time\n\n type Err;\n\n\n\n /// Parse [`Self`] from the front of the given string and return a tuple of the remaining string\n\n /// and [`Self`].\n\n async fn async_pop_from(\n\n ctx: &serenity::Context,\n\n msg: &serenity::Message,\n\n args: &ArgString<'a>,\n\n ) -> Result<(ArgString<'a>, Self), Self::Err>;\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl<'a, T> PopArgumentAsync<'a> for T\n\nwhere\n\n T: PopArgument<'a>,\n\n{\n\n type Err = <Self as PopArgument<'a>>::Err;\n\n\n", "file_path": "src/prefix/argument/mod.rs", "rank": 49, "score": 77199.23097680569 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn test_pop_string() {\n\n // Test that trailing whitespace is not consumed\n\n assert_eq!(\n\n String::pop_from(&ArgString(\"AA BB\")).unwrap().0,\n\n ArgString(\" BB\")\n\n );\n\n\n\n for &(string, arg) in &[\n\n (r#\"AA BB\"#, r#\"AA\"#),\n\n (r#\"\"AA BB\"\"#, r#\"AA BB\"#),\n\n (r#\"\"AA BB\"#, r#\"AA BB\"#),\n\n (r#\"\"AA \"BB\"#, r#\"AA BB\"#),\n\n (r#\"\"\"\"\"A\"\"A\" \"B\"\"\"B\"#, r#\"AA BB\"#),\n\n (r#\"\\\"AA BB\\\"\"#, r#\"\"AA\"#),\n\n (r#\"\\\"AA\\ BB\\\"\"#, r#\"\"AA BB\"\"#),\n\n (r#\"\"\\\"AA BB\\\"\"\"#, r#\"\"AA BB\"\"#),\n\n ] {\n\n assert_eq!(String::pop_from(&ArgString(string)).unwrap().1, arg);\n\n }\n\n}\n", "file_path": "src/prefix/argument/string.rs", "rank": 50, "score": 75129.52328659187 }, { "content": "#[derive(Debug, Default)]\n\nstruct StringList(Vec<String>);\n\n\n\nimpl darling::FromMeta for StringList {\n\n fn from_list(items: &[::syn::NestedMeta]) -> darling::Result<Self> {\n\n items\n\n .iter()\n\n .map(|item| String::from_nested_meta(item))\n\n .collect::<darling::Result<Vec<String>>>()\n\n .map(Self)\n\n }\n\n}\n\n\n\n/// Representation of the command attribute arguments (`#[command(...)]`)\n\n#[derive(Default, Debug, darling::FromMeta)]\n\n#[darling(default)]\n\npub struct CommandOptions {\n\n prefix_command: bool,\n\n slash_command: bool,\n\n context_menu_command: Option<String>,\n\n\n", "file_path": "macros/src/command/mod.rs", "rank": 51, "score": 73483.72753164038 }, { "content": " .await?\n\n )\n\n } else {\n\n None\n\n }\n\n };\n\n\n\n // Extract Vec<T> (delegating to Option<T> because slash commands don't support variadic\n\n // arguments right now)\n\n ($ctx:ident, $guild_id:ident, $channel_id:ident, $args:ident => $name:ident: Vec<$type:ty $(,)*>) => {\n\n match $crate::_parse_slash!($ctx, $guild_id, $channel_id, $args => $name: Option<$type>) {\n\n Some(value) => vec![value],\n\n None => vec![],\n\n }\n\n };\n\n\n\n // Extract #[flag]\n\n ($ctx:ident, $guild_id:ident, $channel_id:ident, $args:ident => $name:ident: FLAG) => {\n\n $crate::_parse_slash!($ctx, $guild_id, $channel_id, $args => $name: Option<bool>)\n\n .unwrap_or(false)\n", "file_path": "src/slash/argument/slash.rs", "rank": 52, "score": 71054.99382172998 }, { "content": " T::Err: std::error::Error + Send + Sync + 'static,\n\n{\n\n async fn extract(\n\n self,\n\n ctx: &serenity::Context,\n\n guild: Option<serenity::GuildId>,\n\n channel: Option<serenity::ChannelId>,\n\n value: &serde_json::Value,\n\n ) -> Result<T, SlashArgError> {\n\n let string = value\n\n .as_str()\n\n .ok_or(SlashArgError::CommandStructureMismatch(\"expected string\"))?;\n\n T::convert(ctx, guild, channel, string)\n\n .await\n\n .map_err(|e| SlashArgError::Parse(e.into()))\n\n }\n\n\n\n fn create(\n\n self,\n\n builder: &mut serenity::CreateApplicationCommandOption,\n", "file_path": "src/slash/argument/slash.rs", "rank": 53, "score": 71053.12628239708 }, { "content": " guild: Option<serenity::GuildId>,\n\n channel: Option<serenity::ChannelId>,\n\n value: &serde_json::Value,\n\n ) -> Result<T, SlashArgError> {\n\n <T as SlashArgument>::extract(ctx, guild, channel, value).await\n\n }\n\n\n\n fn create(\n\n self,\n\n builder: &mut serenity::CreateApplicationCommandOption,\n\n ) -> &mut serenity::CreateApplicationCommandOption {\n\n <T as SlashArgument>::create(builder)\n\n }\n\n}\n\n\n\n// Implement slash argument for a model type that is represented in interactions via an ID\n\nmacro_rules! impl_slash_argument {\n\n ($type:ty, $slash_param_type:ident) => {\n\n #[async_trait::async_trait]\n\n impl SlashArgumentHack<$type> for &&PhantomData<$type> {\n", "file_path": "src/slash/argument/slash.rs", "rank": 54, "score": 71049.21542461477 }, { "content": " };\n\n\n\n // Extract T\n\n ($ctx:ident, $guild_id:ident, $channel_id:ident, $args:ident => $name:ident: $($type:tt)*) => {\n\n $crate::_parse_slash!($ctx, $guild_id, $channel_id, $args => $name: Option<$($type)*>)\n\n .ok_or($crate::SlashArgError::CommandStructureMismatch(\"a required argument is missing\"))?\n\n };\n\n}\n\n\n\n/**\n\nMacro for extracting and parsing slash command arguments out of an array of\n\n[`serenity::ApplicationCommandInteractionDataOption`].\n\n\n\nAn invocation of this macro is generated by `crate::command`, so you usually don't need this macro\n\ndirectly.\n\n\n\n```rust,no_run\n\n# #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n# use poise::serenity_prelude as serenity;\n\nlet ctx: serenity::Context = todo!();\n", "file_path": "src/slash/argument/slash.rs", "rank": 55, "score": 71048.8796781161 }, { "content": " async fn extract(\n\n self,\n\n ctx: &serenity::Context,\n\n guild: Option<serenity::GuildId>,\n\n channel: Option<serenity::ChannelId>,\n\n value: &serde_json::Value,\n\n ) -> Result<$type, SlashArgError> {\n\n // We can parse IDs by falling back to the generic serenity::ArgumentConvert impl\n\n PhantomData::<$type>\n\n .extract(ctx, guild, channel, value)\n\n .await\n\n }\n\n\n\n fn create(\n\n self,\n\n builder: &mut serenity::CreateApplicationCommandOption,\n\n ) -> &mut serenity::CreateApplicationCommandOption {\n\n builder.kind(serenity::ApplicationCommandOptionType::$slash_param_type)\n\n }\n\n }\n", "file_path": "src/slash/argument/slash.rs", "rank": 56, "score": 71048.48133730503 }, { "content": "let guild_id: Option<serenity::GuildId> = todo!();\n\nlet channel_id: serenity::ChannelId = todo!();\n\nlet args: &[serenity::ApplicationCommandInteractionDataOption] = todo!();\n\n\n\nlet (arg1, arg2) = poise::parse_slash_args!(\n\n &ctx, guild_id, channel_id,\n\n args => (arg1: String), (arg2: Option<u32>)\n\n).await?;\n\n\n\n# Ok(()) }\n\n```\n\n*/\n\n#[macro_export]\n\nmacro_rules! parse_slash_args {\n\n ($ctx:expr, $guild_id:expr, $channel_id:expr, $args:expr => $(\n\n ( $name:ident: $($type:tt)* )\n\n ),* $(,)? ) => {\n\n async /* not move! */ {\n\n use $crate::SlashArgumentHack;\n\n\n\n let (ctx, guild_id, channel_id, args) = ($ctx, $guild_id, $channel_id, $args);\n\n\n\n Ok::<_, $crate::SlashArgError>(( $(\n\n $crate::_parse_slash!( ctx, guild_id, channel_id, args => $name: $($type)* ),\n\n )* ))\n\n }\n\n };\n\n}\n", "file_path": "src/slash/argument/slash.rs", "rank": 57, "score": 71047.04127937369 }, { "content": " value: &serde_json::Value,\n\n ) -> Result<f64, SlashArgError> {\n\n value\n\n .as_f64()\n\n .ok_or(SlashArgError::CommandStructureMismatch(\"expected float\"))\n\n }\n\n\n\n fn create(\n\n self,\n\n builder: &mut serenity::CreateApplicationCommandOption,\n\n ) -> &mut serenity::CreateApplicationCommandOption {\n\n builder.kind(serenity::ApplicationCommandOptionType::Number)\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl<T: SlashArgument + Sync> SlashArgumentHack<T> for &&PhantomData<T> {\n\n async fn extract(\n\n self,\n\n ctx: &serenity::Context,\n", "file_path": "src/slash/argument/slash.rs", "rank": 58, "score": 71044.88996667363 }, { "content": " .ok_or(SlashArgError::IntegerOutOfBounds)\n\n }\n\n\n\n fn create(\n\n self,\n\n builder: &mut serenity::CreateApplicationCommandOption,\n\n ) -> &mut serenity::CreateApplicationCommandOption {\n\n builder.kind(serenity::ApplicationCommandOptionType::Integer)\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl SlashArgumentHack<f32> for &&PhantomData<f32> {\n\n async fn extract(\n\n self,\n\n _: &serenity::Context,\n\n _: Option<serenity::GuildId>,\n\n _: Option<serenity::ChannelId>,\n\n value: &serde_json::Value,\n\n ) -> Result<f32, SlashArgError> {\n", "file_path": "src/slash/argument/slash.rs", "rank": 59, "score": 71044.70301407104 }, { "content": " ) -> &mut serenity::CreateApplicationCommandOption {\n\n builder.kind(serenity::ApplicationCommandOptionType::String)\n\n }\n\n}\n\n\n\n// Handles all integers, signed and unsigned, via TryFrom<i64>.\n\n#[async_trait::async_trait]\n\nimpl<T: TryFrom<i64> + Send + Sync> SlashArgumentHack<T> for &PhantomData<T> {\n\n async fn extract(\n\n self,\n\n _: &serenity::Context,\n\n _: Option<serenity::GuildId>,\n\n _: Option<serenity::ChannelId>,\n\n value: &serde_json::Value,\n\n ) -> Result<T, SlashArgError> {\n\n value\n\n .as_i64()\n\n .ok_or(SlashArgError::CommandStructureMismatch(\"expected integer\"))?\n\n .try_into()\n\n .ok()\n", "file_path": "src/slash/argument/slash.rs", "rank": 60, "score": 71044.57303370287 }, { "content": " };\n\n}\n\nimpl_slash_argument!(serenity::Member, User);\n\nimpl_slash_argument!(serenity::User, User);\n\nimpl_slash_argument!(serenity::Channel, Channel);\n\nimpl_slash_argument!(serenity::GuildChannel, Channel);\n\nimpl_slash_argument!(serenity::Role, Role);\n\n\n\n#[doc(hidden)]\n\n#[macro_export]\n\nmacro_rules! _parse_slash {\n\n // Extract Option<T>\n\n ($ctx:ident, $guild_id:ident, $channel_id:ident, $args:ident => $name:ident: Option<$type:ty $(,)*>) => {\n\n if let Some(arg) = $args.iter().find(|arg| arg.name == stringify!($name)) {\n\n let arg = arg.value\n\n .as_ref()\n\n .ok_or($crate::SlashArgError::CommandStructureMismatch(\"expected argument value\"))?;\n\n Some(\n\n (&&&&&std::marker::PhantomData::<$type>)\n\n .extract($ctx, $guild_id, Some($channel_id), arg)\n", "file_path": "src/slash/argument/slash.rs", "rank": 61, "score": 71044.29498162196 }, { "content": " Ok(value\n\n .as_f64()\n\n .ok_or(SlashArgError::CommandStructureMismatch(\"expected float\"))? as f32)\n\n }\n\n\n\n fn create(\n\n self,\n\n builder: &mut serenity::CreateApplicationCommandOption,\n\n ) -> &mut serenity::CreateApplicationCommandOption {\n\n builder.kind(serenity::ApplicationCommandOptionType::Number)\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl SlashArgumentHack<f64> for &&PhantomData<f64> {\n\n async fn extract(\n\n self,\n\n _: &serenity::Context,\n\n _: Option<serenity::GuildId>,\n\n _: Option<serenity::ChannelId>,\n", "file_path": "src/slash/argument/slash.rs", "rank": 62, "score": 71043.82980389861 }, { "content": "}\n\nimpl std::fmt::Display for SlashArgError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n Self::CommandStructureMismatch(detail) => {\n\n write!(\n\n f,\n\n \"Bot author did not register their commands correctly ({})\",\n\n detail\n\n )\n\n }\n\n Self::Parse(e) => write!(f, \"Failed to parse argument: {}\", e),\n\n Self::IntegerOutOfBounds => write!(f, \"Integer out of bounds for target type\"),\n\n }\n\n }\n\n}\n\nimpl std::error::Error for SlashArgError {\n\n fn cause(&self) -> Option<&dyn std::error::Error> {\n\n match self {\n\n Self::Parse(e) => Some(&**e),\n\n Self::CommandStructureMismatch(_) => None,\n\n Self::IntegerOutOfBounds => None,\n\n }\n\n }\n\n}\n\n\n\n/// Implement this trait on types that you want to use as a slash command parameter.\n\n#[async_trait::async_trait]\n", "file_path": "src/slash/argument/slash.rs", "rank": 63, "score": 71036.44795836025 }, { "content": "//! Parse received slash command arguments into Rust types.\n\n\n\nuse std::convert::{TryFrom, TryInto};\n\nuse std::marker::PhantomData;\n\n\n\nuse crate::serenity_prelude as serenity;\n\n\n\n/// Possible errors when parsing slash command arguments\n\n#[derive(Debug)]\n\npub enum SlashArgError {\n\n /// Expected a certain argument type at a certain position in the unstructured list of\n\n /// arguments, but found something else.\n\n ///\n\n /// Most often the result of the bot not having registered the command in Discord, so Discord\n\n /// stores an outdated version of the command and its parameters.\n\n CommandStructureMismatch(&'static str),\n\n /// A string parameter was found, but it could not be parsed into the target type.\n\n Parse(Box<dyn std::error::Error + Send + Sync>),\n\n /// An integer parameter was found, but it did not fit into the target integer type.\n\n IntegerOutOfBounds,\n", "file_path": "src/slash/argument/slash.rs", "rank": 64, "score": 71036.38149118074 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn test_pop_code_block() {\n\n for &(string, code, language) in &[\n\n (\"`hello world`\", \"hello world\", None),\n\n (\"` `\", \" \", None),\n\n (\"``` hi ```\", \" hi \", None),\n\n (\"```rust```\", \"rust\", None),\n\n (\"```rust\\nhi```\", \"hi\", Some(\"rust\")),\n\n (\"```rust hi```\", \"rust hi\", None),\n\n (\"```rust\\n\\n\\n\\n\\nhi\\n\\n\\n\\n```\", \"hi\", Some(\"rust\")),\n\n ] {\n\n assert_eq!(\n\n CodeBlock::pop_from(&ArgString(string)).unwrap().1,\n\n CodeBlock {\n\n code: code.into(),\n\n language: language.map(|x| x.into())\n\n }\n\n );\n\n }\n\n\n\n assert_eq!(\n\n CodeBlock::pop_from(&ArgString(\"``\")),\n\n Err(CodeBlockError::Malformed)\n\n );\n\n assert_eq!(\n\n CodeBlock::pop_from(&ArgString(\"``````\")),\n\n Err(CodeBlockError::Malformed)\n\n );\n\n}\n", "file_path": "src/prefix/argument/code_block.rs", "rank": 65, "score": 70820.72410862839 }, { "content": "fn update_message(message: &mut serenity::Message, update: serenity::MessageUpdateEvent) {\n\n message.id = update.id;\n\n message.channel_id = update.channel_id;\n\n message.guild_id = update.guild_id;\n\n\n\n if let Some(kind) = update.kind {\n\n message.kind = kind;\n\n }\n\n if let Some(content) = update.content {\n\n message.content = content;\n\n }\n\n if let Some(tts) = update.tts {\n\n message.tts = tts;\n\n }\n\n if let Some(pinned) = update.pinned {\n\n message.pinned = pinned;\n\n }\n\n if let Some(timestamp) = update.timestamp {\n\n message.timestamp = timestamp;\n\n }\n", "file_path": "src/prefix/track_edits.rs", "rank": 66, "score": 67805.17842558147 }, { "content": "/// Part of the Invocation struct. Represents a single parameter of a Discord command.\n\nstruct CommandParameter {\n\n name: syn::Ident,\n\n type_: syn::Type,\n\n more: ParamOptions,\n\n span: proc_macro2::Span,\n\n}\n\n\n\n/// Passed to prefix and slash command spec generators; contains info to be included in command spec\n\npub struct Invocation<'a> {\n\n command_name: String,\n\n parameters: &'a [CommandParameter],\n\n description: Option<&'a str>,\n\n explanation: Option<&'a str>,\n\n function: &'a syn::ItemFn,\n\n required_permissions: &'a syn::Expr,\n\n required_bot_permissions: &'a syn::Expr,\n\n more: &'a CommandOptions,\n\n}\n\n\n", "file_path": "macros/src/command/mod.rs", "rank": 67, "score": 67793.05164601348 }, { "content": "mod slash;\n\npub use slash::*;\n\n\n\nmod context_menu;\n\npub use context_menu::*;\n\n\n\nmod autocomplete;\n\npub use autocomplete::*;\n", "file_path": "src/slash/argument/mod.rs", "rank": 75, "score": 61961.354549998876 }, { "content": "use crate::{Context, Error};\n\nuse poise::serenity_prelude as serenity;\n\n\n\n/// Query information about a Discord profile\n\n#[poise::command(context_menu_command = \"User information\", slash_command)]\n\npub async fn user_info(\n\n ctx: Context<'_>,\n\n #[description = \"Discord profile to query information about\"] user: serenity::User,\n\n) -> Result<(), Error> {\n\n let response = format!(\n\n \"**Name**: {}\\n**Created**: {}\",\n\n user.name,\n\n user.created_at()\n\n );\n\n\n\n ctx.say(response).await?;\n\n Ok(())\n\n}\n\n\n\n/// Echo content of a message\n\n#[poise::command(context_menu_command = \"Echo\", slash_command)]\n\npub async fn echo(\n\n ctx: Context<'_>,\n\n #[description = \"Message to echo (enter a link or ID)\"] msg: serenity::Message,\n\n) -> Result<(), Error> {\n\n ctx.say(&msg.content).await?;\n\n Ok(())\n\n}\n", "file_path": "examples/framework_usage/context_menu.rs", "rank": 76, "score": 61289.7442801248 }, { "content": "type Context<'a> = poise::Context<'a, Data, Error>;\n\n\n\n// Custom user data passed to all command functions\n\npub struct Data {\n\n votes: Mutex<HashMap<String, u32>>,\n\n}\n\n\n\n/// Show this help menu\n\n#[poise::command(prefix_command, track_edits, slash_command)]\n\nasync fn help(\n\n ctx: Context<'_>,\n\n #[description = \"Specific command to show help about\"]\n\n #[autocomplete = \"poise::builtins::autocomplete_command\"]\n\n command: Option<String>,\n\n) -> Result<(), Error> {\n\n poise::builtins::help(\n\n ctx,\n\n command.as_deref(),\n\n \"This is an example bot made to showcase features of my custom Discord bot framework\",\n\n poise::builtins::HelpResponseMode::Ephemeral,\n", "file_path": "examples/framework_usage/main.rs", "rank": 77, "score": 57312.54688857206 }, { "content": "/// This type alias will save us some typing, because the Context type is needed often\n\ntype Context<'a> = poise::Context<'a, Data, Error>;\n\n\n\nasync fn event_listener(\n\n _ctx: &serenity::Context,\n\n event: &poise::Event<'_>,\n\n _framework: &poise::Framework<Data, Error>,\n\n _user_data: &Data,\n\n) -> Result<(), Error> {\n\n match event {\n\n poise::Event::Ready { data_about_bot } => {\n\n println!(\"{} is connected!\", data_about_bot.user.name)\n\n }\n\n _ => {}\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n// INFO: poise doesn't yet support sophisticated groups like this\n\n/*\n\n// Sets multiple prefixes for a group.\n\n// This requires us to call commands in this group\n\n// via `~emoji` (or `~em`) instead of just `~`.\n\n#[prefixes(\"emoji\", \"em\")]\n\n// Set a description to appear if a user wants to display a single group\n\n// e.g. via help using the group-name or one of its prefixes.\n\n#[description = \"A group with commands providing an emoji as response.\"]\n\n// Summary only appears when listing multiple groups.\n\n#[summary = \"Do emoji fun!\"]\n\n// Sets a command that will be executed if only a group-prefix was passed.\n", "file_path": "examples/serenity_example_port/main.rs", "rank": 78, "score": 56368.95442923225 }, { "content": "#[doc(hidden)]\n\npub trait _GetGenerics {\n\n type U;\n\n type E;\n\n}\n\nimpl<U, E> _GetGenerics for Context<'_, U, E> {\n\n type U = U;\n\n type E = E;\n\n}\n\n\n\n/// A reference to either a prefix or application command.\n\npub enum CommandRef<'a, U, E> {\n\n /// Prefix command\n\n Prefix(&'a crate::PrefixCommand<U, E>),\n\n /// Application command\n\n Application(crate::ApplicationCommand<'a, U, E>),\n\n}\n\n\n\nimpl<U, E> Clone for CommandRef<'_, U, E> {\n\n fn clone(&self) -> Self {\n\n match *self {\n", "file_path": "src/structs/mod.rs", "rank": 79, "score": 48804.15426967411 }, { "content": "type BoxErrorSendSync = Box<dyn std::error::Error + Send + Sync>;\n\n\n\n/// An error handler that prints the error into the console and also into the Discord chat.\n\n/// If the user invoked the command wrong\n\n/// (i.e. an [`crate::ArgumentParseError`]), the command help is displayed and the user is directed\n\n/// to the help menu.\n\npub async fn on_error<D>(e: BoxErrorSendSync, ctx: crate::ErrorContext<'_, D, BoxErrorSendSync>) {\n\n println!(\"Encountered an error: {:?}\", e);\n\n match ctx {\n\n crate::ErrorContext::Command(ctx) => {\n\n let user_error_msg = if let Some(crate::ArgumentParseError(e)) = e.downcast_ref() {\n\n // If we caught an argument parse error, give a helpful error message with the\n\n // command explanation if available\n\n\n\n let mut usage = \"Please check the help menu for usage information\".into();\n\n if let crate::CommandErrorContext::Prefix(ctx) = &ctx {\n\n if let Some(multiline_help) = &ctx.command.options.multiline_help {\n\n usage = multiline_help();\n\n }\n\n }\n", "file_path": "src/builtins.rs", "rank": 80, "score": 46493.76658543285 }, { "content": "fn _dispatch_error_no_macro<'fut>(\n\n ctx: &'fut mut Context,\n\n msg: &'fut Message,\n\n error: DispatchError,\n\n) -> BoxFuture<'fut, ()> {\n\n async move {\n\n if let DispatchError::Ratelimited(info) = error {\n\n if info.is_first_try {\n\n let _ = msg\n\n .channel_id\n\n .say(&ctx.http, &format!(\"Try this again in {} seconds.\", info.as_secs()))\n\n .await;\n\n }\n\n };\n\n }\n\n .boxed()\n\n}*/\n\n\n\n#[tokio::main]\n\nasync fn main() {\n", "file_path": "examples/serenity_example_port/main.rs", "rank": 81, "score": 43948.742443621915 }, { "content": "// Types used by all command functions\n\ntype Error = Box<dyn std::error::Error + Send + Sync>;\n", "file_path": "examples/framework_usage/main.rs", "rank": 82, "score": 37184.593337054655 }, { "content": "/// This Error type is used throughout all commands and callbacks\n\ntype Error = Box<dyn std::error::Error + Send + Sync>;\n\n\n", "file_path": "examples/serenity_example_port/main.rs", "rank": 83, "score": 36399.14113017206 }, { "content": "use crate::serenity_prelude as serenity;\n\nuse crate::BoxFuture;\n\n\n\n/// A builder to configure and run a framework.\n\n///\n\n/// If one of the following required values is missing, the builder will panic on start:\n\n/// - [`Self::token`]\n\n/// - [`Self::user_data_setup`]\n\n/// - [`Self::options`]\n\n///\n\n/// Before starting, the builder will make an HTTP request to retrieve the bot's application ID and\n\n/// owner.\n\npub struct FrameworkBuilder<U, E> {\n\n user_data_setup: Option<\n\n Box<\n\n dyn Send\n\n + Sync\n\n + for<'a> FnOnce(\n\n &'a serenity::Context,\n\n &'a serenity::Ready,\n", "file_path": "src/framework/builder.rs", "rank": 84, "score": 33847.231618233716 }, { "content": " E: Send + 'static,\n\n {\n\n // Aggregate required values or panic if not provided\n\n let token = self.token.expect(\"No token was provided to the framework\");\n\n let user_data_setup = self\n\n .user_data_setup\n\n .expect(\"No user data setup function was provided to the framework\");\n\n let mut options = self.options.expect(\"No framework options provided\");\n\n\n\n // Retrieve application info via HTTP\n\n let application_info = serenity::Http::new_with_token(&token)\n\n .get_current_application_info()\n\n .await?;\n\n\n\n // Build framework options by concatenating user-set options with commands and owner\n\n for (command, meta_builder) in self.commands {\n\n options.command(command, meta_builder);\n\n }\n\n options.owners.insert(application_info.owner.id);\n\n\n", "file_path": "src/framework/builder.rs", "rank": 85, "score": 33840.58096458954 }, { "content": " /// # ;\n\n /// ```\n\n pub fn commands(\n\n mut self,\n\n commands: impl IntoIterator<Item = fn() -> crate::CommandDefinition<U, E>> + 'static,\n\n ) -> Self {\n\n // Can't use Vec::extend() due to ??? compile errors\n\n for command in commands {\n\n let definition = (command)();\n\n self.commands.push((definition, Box::new(|f| f)));\n\n }\n\n self\n\n }\n\n\n\n /// Build the framework with the specified configuration.\n\n ///\n\n /// For more information, see [`FrameworkBuilder`]\n\n pub async fn build(self) -> Result<std::sync::Arc<crate::Framework<U, E>>, serenity::Error>\n\n where\n\n U: Send + Sync + 'static,\n", "file_path": "src/framework/builder.rs", "rank": 86, "score": 33840.47102173044 }, { "content": " /// [`FrameworkBuilder`]; use [`FrameworkBuilder::token`] to supply a token.\n\n pub fn client_settings(\n\n mut self,\n\n f: impl FnOnce(serenity::ClientBuilder) -> serenity::ClientBuilder + 'static,\n\n ) -> Self {\n\n self.client_settings = Some(Box::new(f));\n\n self\n\n }\n\n\n\n /// The bot token\n\n pub fn token(mut self, token: impl Into<String>) -> Self {\n\n self.token = Some(token.into());\n\n self\n\n }\n\n\n\n /// Add a new command to the framework\n\n pub fn command(\n\n mut self,\n\n definition: crate::CommandDefinition<U, E>,\n\n meta_builder: impl FnOnce(&mut crate::CommandBuilder<U, E>) -> &mut crate::CommandBuilder<U, E>\n", "file_path": "src/framework/builder.rs", "rank": 87, "score": 33840.41659576042 }, { "content": " &'a crate::Framework<U, E>,\n\n ) -> BoxFuture<'a, Result<U, E>>,\n\n >,\n\n >,\n\n options: Option<crate::FrameworkOptions<U, E>>,\n\n client_settings: Option<Box<dyn FnOnce(serenity::ClientBuilder) -> serenity::ClientBuilder>>,\n\n token: Option<String>,\n\n intents: Option<serenity::GatewayIntents>,\n\n commands: Vec<(\n\n crate::CommandDefinition<U, E>,\n\n Box<dyn FnOnce(&mut crate::CommandBuilder<U, E>) -> &mut crate::CommandBuilder<U, E>>,\n\n )>,\n\n}\n\n\n\nimpl<U, E> Default for FrameworkBuilder<U, E> {\n\n fn default() -> Self {\n\n Self {\n\n user_data_setup: Default::default(),\n\n options: Default::default(),\n\n client_settings: Default::default(),\n", "file_path": "src/framework/builder.rs", "rank": 88, "score": 33839.272220878505 }, { "content": " + for<'a> FnOnce(\n\n &'a serenity::Context,\n\n &'a serenity::Ready,\n\n &'a crate::Framework<U, E>,\n\n ) -> BoxFuture<'a, Result<U, E>>,\n\n {\n\n self.user_data_setup = Some(Box::new(user_data_setup) as _);\n\n self\n\n }\n\n\n\n /// Configure framework options\n\n pub fn options(mut self, options: crate::FrameworkOptions<U, E>) -> Self {\n\n self.options = Some(options);\n\n self\n\n }\n\n\n\n /// Configure serenity client settings, like gateway intents, by supplying a custom\n\n /// client builder\n\n ///\n\n /// Note: the builder's token will be overridden by the\n", "file_path": "src/framework/builder.rs", "rank": 89, "score": 33836.33373221572 }, { "content": " token: Default::default(),\n\n intents: Default::default(),\n\n commands: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl<U, E> FrameworkBuilder<U, E> {\n\n /// Set a prefix for commands\n\n #[deprecated = \"Please set the prefix via FrameworkOptions::prefix_options::prefix\"]\n\n pub fn prefix(self, _prefix: impl Into<String>) -> Self {\n\n panic!(\"Please set the prefix via FrameworkOptions::prefix_options::prefix\");\n\n }\n\n\n\n /// Set a callback to be invoked to create the user data instance\n\n pub fn user_data_setup<F>(mut self, user_data_setup: F) -> Self\n\n where\n\n F: Send\n\n + Sync\n\n + 'static\n", "file_path": "src/framework/builder.rs", "rank": 90, "score": 33834.39388112258 }, { "content": " + 'static,\n\n ) -> Self {\n\n self.commands.push((definition, Box::new(meta_builder)));\n\n self\n\n }\n\n\n\n /// Add multiple new commands to the framework. Shorthand for calling [`Self::command`] multiple\n\n /// times with the builder left to defaults, i.e. no command category or subcommands\n\n ///\n\n /// ```rust\n\n /// # type Error = Box<dyn std::error::Error + Send + Sync>;\n\n /// # #[poise::command(prefix_command)]\n\n /// # async fn command1(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> { Ok(()) }\n\n /// # #[poise::command(prefix_command)]\n\n /// # async fn command2(ctx: poise::Context<'_, (), Error>) -> Result<(), Error> { Ok(()) }\n\n ///\n\n /// poise::Framework::build()\n\n /// // framework setup...\n\n /// .commands([command1, command2])\n\n /// // framework startup...\n", "file_path": "src/framework/builder.rs", "rank": 91, "score": 33833.20547851769 }, { "content": "\n\n /// Start the framework with the specified configuration.\n\n ///\n\n /// [`FrameworkBuilder::run`] is just a shorthand that calls [`FrameworkBuilder::build`] and\n\n /// starts the returned framework\n\n pub async fn run(self) -> Result<(), serenity::Error>\n\n where\n\n U: Send + Sync + 'static,\n\n E: Send + 'static,\n\n {\n\n self.build().await?.start().await\n\n }\n\n}\n", "file_path": "src/framework/builder.rs", "rank": 92, "score": 33832.1463859034 }, { "content": " // Create serenity client\n\n let mut client_builder = serenity::ClientBuilder::new(token)\n\n .application_id(application_info.id.0)\n\n .intents(\n\n self.intents\n\n .unwrap_or_else(serenity::GatewayIntents::non_privileged),\n\n );\n\n if let Some(client_settings) = self.client_settings {\n\n client_builder = client_settings(client_builder);\n\n }\n\n\n\n // Create framework with specified settings\n\n crate::Framework::new(\n\n serenity::ApplicationId(application_info.id.0),\n\n client_builder,\n\n user_data_setup,\n\n options,\n\n )\n\n .await\n\n }\n", "file_path": "src/framework/builder.rs", "rank": 93, "score": 33830.50121467285 }, { "content": " id &= !0 >> 42;\n\n\n\n // Calculate Discord's datetime representation (millis since Discord epoch) and\n\n // insert those bits into the ID\n\n id |= ((edited_timestamp.timestamp_millis() - 1420070400000) as u64) << 22;\n\n }\n\n id\n\n }\n\n }\n\n }\n\n\n\n /// Returns a reference to the command.\n\n pub fn command(&self) -> Option<crate::CommandRef<'a, U, E>> {\n\n Some(match self {\n\n Self::Prefix(x) => crate::CommandRef::Prefix(x.command?),\n\n Self::Application(x) => crate::CommandRef::Application(x.command),\n\n })\n\n }\n\n\n\n /// Returns the prefix this command was invoked with, or a slash (`/`), if this is an\n\n /// application command.\n\n pub fn prefix(&self) -> &'a str {\n\n match self {\n\n Context::Prefix(ctx) => ctx.prefix,\n\n Context::Application(_) => \"/\",\n\n }\n\n }\n\n}\n", "file_path": "src/structs/context.rs", "rank": 94, "score": 33641.237849563404 }, { "content": " }\n\n\n\n /// See [`Self::defer()`]\n\n ///\n\n /// This will make the response ephemeral; to make it public, use [`Self::defer()`].\n\n pub async fn defer_ephemeral(self) -> Result<(), serenity::Error> {\n\n if let Self::Application(ctx) = self {\n\n ctx.defer_response(true).await?;\n\n }\n\n Ok(())\n\n }\n\n\n\n /// If this is an application command, [`Self::defer()`] is called\n\n ///\n\n /// If this is a prefix command, a typing broadcast is started until the return value is\n\n /// dropped.\n\n // #[must_use = \"The typing broadcast will only persist if you store it\"] // currently doesn't work\n\n pub async fn defer_or_broadcast(self) -> Result<Option<serenity::Typing>, serenity::Error> {\n\n Ok(match self {\n\n Self::Application(ctx) => {\n", "file_path": "src/structs/context.rs", "rank": 95, "score": 33637.97285077865 }, { "content": " crate::send_reply(self, builder).await\n\n }\n\n}\n\n\n\nimpl<'a, U, E> Context<'a, U, E> {\n\n /// Return the stored [`serenity::Context`] within the underlying context type.\n\n pub fn discord(&self) -> &'a serenity::Context {\n\n match self {\n\n Self::Application(ctx) => ctx.discord,\n\n Self::Prefix(ctx) => ctx.discord,\n\n }\n\n }\n\n\n\n /// Return a read-only reference to [`crate::Framework`].\n\n pub fn framework(&self) -> &'a crate::Framework<U, E> {\n\n match self {\n\n Self::Application(ctx) => ctx.framework,\n\n Self::Prefix(ctx) => ctx.framework,\n\n }\n\n }\n", "file_path": "src/structs/context.rs", "rank": 96, "score": 33637.9239110827 }, { "content": " ctx.defer_response(false).await?;\n\n None\n\n }\n\n Self::Prefix(ctx) => Some(ctx.msg.channel_id.start_typing(&ctx.discord.http)?),\n\n })\n\n }\n\n\n\n /// Shorthand of [`crate::say_reply`]\n\n pub async fn say(\n\n self,\n\n text: impl Into<String>,\n\n ) -> Result<Option<crate::ReplyHandle<'a>>, serenity::Error> {\n\n crate::say_reply(self, text).await\n\n }\n\n\n\n /// Shorthand of [`crate::send_reply`]\n\n pub async fn send<'b>(\n\n self,\n\n builder: impl for<'c> FnOnce(&'c mut crate::CreateReply<'b>) -> &'c mut crate::CreateReply<'b>,\n\n ) -> Result<Option<crate::ReplyHandle<'a>>, serenity::Error> {\n", "file_path": "src/structs/context.rs", "rank": 97, "score": 33636.75992299131 }, { "content": "\n\n /// Return a reference to your custom user data\n\n pub fn data(&self) -> &'a U {\n\n match self {\n\n Self::Application(ctx) => ctx.data,\n\n Self::Prefix(ctx) => ctx.data,\n\n }\n\n }\n\n\n\n /// Return the channel ID of this context\n\n pub fn channel_id(&self) -> serenity::ChannelId {\n\n match self {\n\n Self::Application(ctx) => ctx.interaction.channel_id(),\n\n Self::Prefix(ctx) => ctx.msg.channel_id,\n\n }\n\n }\n\n\n\n /// Returns the guild ID of this context, if we are inside a guild\n\n pub fn guild_id(&self) -> Option<serenity::GuildId> {\n\n match self {\n", "file_path": "src/structs/context.rs", "rank": 98, "score": 33636.5431249854 }, { "content": "\n\n /// Get the author of the command message or application command.\n\n pub fn author(&self) -> &'a serenity::User {\n\n match self {\n\n Self::Application(ctx) => ctx.interaction.user(),\n\n Self::Prefix(ctx) => &ctx.msg.author,\n\n }\n\n }\n\n\n\n /// Return a ID that uniquely identifies this command invocation.\n\n pub fn id(&self) -> u64 {\n\n match self {\n\n Self::Application(ctx) => ctx.interaction.id().0,\n\n Self::Prefix(ctx) => {\n\n let mut id = ctx.msg.id.0;\n\n if let Some(edited_timestamp) = ctx.msg.edited_timestamp {\n\n // We replace the 42 datetime bits with msg.timestamp_edited so that the ID is\n\n // unique even after edits\n\n\n\n // Set existing datetime bits to zero\n", "file_path": "src/structs/context.rs", "rank": 99, "score": 33636.317380784414 } ]
Rust
src/tokenizer.rs
quail-lang/quail
696b6f11b65776843320468fdad0acc9dfad1312
use std::fmt; use std::collections::HashMap; #[derive(Debug, PartialEq, Eq, Clone)] pub enum Token { Ident(Loc, String), Hole(Loc, Option<String>, Option<String>), Lambda(Loc), Let(Loc), Def(Loc), Equals(Loc), In(Loc), Arrow(Loc), FatArrow(Loc), LeftParen(Loc), RightParen(Loc), LeftCurly(Loc), RightCurly(Loc), Match(Loc), With(Loc), Import(Loc), Colon(Loc), Dollar(Loc), As(Loc), Str(Loc, String), Nat(Loc, usize), } pub struct Tokenizer { input: Vec<char>, cur: usize, loc: Loc, } #[derive(Debug, PartialEq, Eq, Clone)] pub struct Loc { pub path: Option<String>, pub line: usize, pub col: usize, } type TokenizeErr = String; impl Token { pub fn name(&self) -> &'static str { use Token::*; match self { Ident(_loc, _x) => "IDENT", Hole(_loc, _x, _contents) => "HOLE", Lambda(_loc) => "LAMBDA", Let(_loc) => "LET", Def(_loc) => "DEF", Equals(_loc) => "EQUALS", In(_loc) => "IN", Arrow(_loc) => "ARROW", FatArrow(_loc) => "FATARROW", LeftParen(_loc) => "LEFTPAREN", RightParen(_loc) => "RIGHTPAREN", LeftCurly(_loc) => "LEFTCURLY", RightCurly(_loc) => "RIGHTCURLY", Match(_loc) => "MATCH", With(_loc) => "WITH", Import(_loc) => "IMPORT", Colon(_loc) => "COLON", Dollar(_loc) => "DOLLAR", As(_loc) => "AS", Str(_loc, _val) => "STR", Nat(_loc, _val) => "NAT", } } pub fn show(&self) -> String { use Token::*; match self { Ident(_loc, x) => format!("IDENT({})", x), Hole(_loc, x, _contents) => format!("HOLE({:?}, ...)", x), Lambda(_loc) => format!("LAMBDA"), Let(_loc) => format!("LET"), Def(_loc) => format!("DEF"), Equals(_loc) => format!("EQUALS"), In(_loc) => format!("IN"), Arrow(_loc) => format!("ARROW"), FatArrow(_loc) => format!("FATARROW"), LeftParen(_loc) => format!("LEFTPAREN"), RightParen(_loc) => format!("RIGHTPAREN"), LeftCurly(_loc) => format!("LEFTCURLY"), RightCurly(_loc) => format!("RIGHTCURLY"), Match(_loc) => format!("MATCH"), With(_loc) => format!("WITH"), Import(_loc) => format!("IMPORT"), Colon(_loc) => format!("COLON"), Dollar(_loc) => format!("DOLLAR"), As(_loc) => format!("AS"), Str(_loc, val) => format!("STR({})", val), Nat(_loc, val) => format!("NAT({})", val), } } pub fn loc(&self) -> &Loc { use Token::*; match self { Ident(loc, _x) => loc, Hole(loc, _x, _contents) => loc, Lambda(loc) => loc, Let(loc) => loc, Def(loc) => loc, Equals(loc) => loc, In(loc) => loc, Arrow(loc) => loc, FatArrow(loc) => loc, LeftParen(loc) => loc, RightParen(loc) => loc, LeftCurly(loc) => loc, RightCurly(loc) => loc, Match(loc) => loc, With(loc) => loc, Import(loc) => loc, Colon(loc) => loc, Dollar(loc) => loc, As(loc) => loc, Str(loc, _val) => loc, Nat(loc, _val) => loc, } } } impl Tokenizer { pub fn new(source: Option<String>, input: &str) -> Self { Tokenizer { input: input.chars().collect(), cur: 0, loc: Loc::new(source), } } pub fn tokenize(&mut self) -> Result<Vec<Token>, TokenizeErr> { let mut tokens = Vec::new(); while let Some(token) = self.token()? { tokens.push(token); } Ok(tokens) } fn tokenize_lines(&mut self) -> Result<Vec<Vec<Token>>, TokenizeErr> { let toks: Vec<Token> = self.tokenize()?; let mut lines: Vec<Vec<Token>> = Vec::new(); let mut cur_line: Vec<Token> = Vec::new(); let mut line_no = 0; for tok in toks { while tok.loc().line > line_no { line_no += 1; lines.push(cur_line); cur_line = Vec::new(); } cur_line.push(tok); } lines.push(cur_line); Ok(lines) } fn double_character_token(&mut self) -> Option<Token> { let head_char = self.peek()?; let next_char = self.peek_ahead(1)?; let chars = format!("{}{}", head_char, next_char); macro_rules! double_char_token { ($characters:literal, $tok:ident) => { if chars == $characters { self.consume(); self.consume(); return Some(Token::$tok(self.loc.clone())); } } } double_char_token!("->", Arrow); double_char_token!("=>", FatArrow); return None; } fn single_character_token(&mut self) -> Option<Token> { let head_char = self.peek()?; macro_rules! single_char_token { ($character:literal, $tok:ident) => { if head_char == $character { self.consume(); return Some(Token::$tok(self.loc.clone())); } } } single_char_token!('(', LeftParen); single_char_token!(')', RightParen); single_char_token!('{', LeftCurly); single_char_token!('}', RightCurly); single_char_token!(':', Colon); single_char_token!('$', Dollar); single_char_token!('=', Equals); return None; } fn token(&mut self) -> Result<Option<Token>, TokenizeErr> { while let Some(head_char) = self.peek() { if head_char.is_ascii_whitespace() { self.consume(); } else if head_char == '#' { self.consume_comment(); } else { break; } } match self.peek() { Some(head_char) => { if let Some(tok) = self.double_character_token() { Ok(Some(tok)) } else if let Some(tok) = self.single_character_token() { Ok(Some(tok)) } else if head_char.is_ascii_alphabetic() { let token = self.tokenize_identifier()?; Ok(Some(token)) } else if head_char == '?' { Ok(Some(self.tokenize_hole()?)) } else if head_char == '"' { Ok(Some(self.tokenize_str()?)) } else if head_char.is_ascii_digit() { Ok(Some(self.tokenize_nat()?)) } else { Err(format!("Unexpected character while parsing: {}", head_char)) } }, None => Ok(None), } } fn tokenize_hole(&mut self) -> Result<Token, TokenizeErr> { let loc = self.loc.clone(); assert_eq!(self.consume(), Some('?')); let peek_char : char; let name: Option<String>; match self.peek() { None => return Ok(Token::Hole(loc, None, None)), Some(chr) => peek_char = chr, } if peek_char.is_ascii_alphabetic() { if let Token::Ident(_, token_name) = self.tokenize_identifier()? { name = Some(token_name); } else { unreachable!(); } } else { name = None; } if let Some('{') = self.peek() { let mut level = 1; let mut contents = String::new(); self.consume(); while let Some(peek_char) = self.consume() { if peek_char == '{' { level += 1; } else if peek_char == '}' { level -= 1; } if level == 0 { break; } else { contents.push(peek_char); } } if level != 0 { Err("Mismatch curly braces.".to_string()) } else { Ok(Token::Hole(loc, name, Some(contents))) } } else { Ok(Token::Hole(loc, name, None)) } } fn tokenize_str(&mut self) -> Result<Token, TokenizeErr> { #![allow(irrefutable_let_patterns)] let loc = self.loc.clone(); assert_eq!(self.consume(), Some('"')); let mut buffer = String::new(); while let consume_char = self.consume() { match consume_char { None => return Err("Expected \" but found end of file. Good luck!".to_string()), Some(chr) => { if chr == '"' { break; } else { buffer.push(chr); } }, } } Ok(Token::Str(loc, buffer)) } fn tokenize_nat(&mut self) -> Result<Token, TokenizeErr> { let loc = self.loc.clone(); let mut buffer = String::new(); match self.peek() { None => return Err("Expected digit but found end of file. Good luck!".to_owned()), Some(ch) => { if !ch.is_ascii_digit() { return Err(format!("Expected digit but found {}.", ch)); } while let Some(ch) = self.peek() { if ch.is_ascii_digit() { self.consume(); buffer.push(ch); } else { break; } } } } let n = buffer.parse::<usize>().unwrap(); Ok(Token::Nat(loc, n)) } fn tokenize_identifier(&mut self) -> Result<Token, TokenizeErr> { let keywords: HashMap<String, Token> = vec![ ("fun".to_string(), Token::Lambda(self.loc.clone())), ("let".to_string(), Token::Let(self.loc.clone())), ("def".to_string(), Token::Def(self.loc.clone())), ("in".to_string(), Token::In(self.loc.clone())), ("match".to_string(), Token::Match(self.loc.clone())), ("with".to_string(), Token::With(self.loc.clone())), ("import".to_string(), Token::Import(self.loc.clone())), ("as".to_string(), Token::As(self.loc.clone())), ].iter().cloned().collect(); let loc = self.loc.clone(); let mut first_char = '\0'; match self.peek() { Some(chr) => { self.consume(); first_char = chr; }, None => assert!(first_char.is_ascii_alphabetic()), } let mut token_string = String::new(); token_string.push(first_char); while let Some(peek_char) = self.peek() { if peek_char.is_ascii_alphabetic() || peek_char == '_' { self.consume(); token_string.push(peek_char); } else { break; } } while let Some(peek_char) = self.peek() { if peek_char == '\'' { self.consume(); token_string.push(peek_char); } else { break; } } match keywords.get(&token_string) { Some(token) => Ok(token.clone()), None => Ok(Token::Ident(loc, token_string)) } } fn peek(&self) -> Option<char> { self.peek_ahead(0) } fn peek_ahead(&self, k: usize) -> Option<char> { match self.input.get(self.cur + k) { Some(c) => Some(*c), None => None, } } #[allow(dead_code)] fn preview(&self, len: usize) -> String { let mut s = String::new(); for i in 0..len { if let Some(ch) = self.peek_ahead(i) { s.push(ch); } else { break; } } s } fn consume(&mut self) -> Option<char> { match self.peek() { Some(peek_char) => { if peek_char == '\n' { self.loc.next_line(); } else { self.loc.next_col(); } self.cur += 1; Some(peek_char) }, None => None, } } fn consume_comment(&mut self) { while let Some(consume_char) = self.consume() { if consume_char == '\n' { break } } } } impl Loc { fn new(source: Option<String>) -> Self { Loc { path: source, line: 0, col: 0, } } fn next_line(&mut self) { self.line += 1; self.col = 0; } fn next_col(&mut self) { self.col += 1; } } impl fmt::Display for Loc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Line {} col {}", self.line + 1, self.col + 1)?; if let Some(path) = &self.path { write!(f, " at {}", path)?; } Ok(()) } } pub fn tokenize(source: Option<String>, input: &str) -> Result<Vec<Token>, TokenizeErr> { let mut tokenizer = Tokenizer::new(source, input); tokenizer.tokenize() } pub fn tokenize_lines(source: Option<String>, input: &str) -> Result<Vec<Vec<Token>>, TokenizeErr> { let mut tokenizer = Tokenizer::new(source, input); tokenizer.tokenize_lines() }
use std::fmt; use std::collections::HashMap; #[derive(Debug, PartialEq, Eq, Clone)] pub enum Token { Ident(Loc, String), Hole(Loc, Option<String>, Option<String>), Lambda(Loc), Let(Loc), Def(Loc), Equals(Loc), In(Loc), Arrow(Loc), FatArrow(Loc), LeftParen(Loc), RightParen(Loc), LeftCurly(Loc), RightCurly(Loc), Match(Loc), With(Loc), Import(Loc), Colon(Loc), Dollar(Loc), As(Loc), Str(Loc, String), Nat(Loc, usize), } pub struct Tokenizer { input: Vec<char>, cur: usize, loc: Loc, } #[derive(Debug, PartialEq, Eq, Clone)] pub struct Loc { pub path: Option<String>, pub line: usize, pub col: usize, } type TokenizeErr = String; impl Token { pub fn name(&self) -> &'static str { use Token::*; match self { Ident(_loc, _x) => "IDENT", Hole(_loc, _x, _contents) => "HOLE", Lambda(_loc) => "LAMBDA", Let(_loc) => "LET", Def(_loc) => "DEF", Equals(_loc) => "EQUALS", In(_loc) => "IN", Arrow(_loc) => "ARROW", FatArrow(_loc) => "FATARROW", LeftParen(_loc) => "LEFTPAREN", RightParen(_loc) => "RIGHTPAREN", LeftCurly(_loc) => "LEFTCURLY", RightCurly(_loc) => "RIGHTCURLY", Match(_loc) => "MATCH", With(_loc) => "WITH", Import(_loc) => "IMPORT", Colon(_loc) => "COLON", Dollar(_loc) => "DOLLAR", As(_loc) => "AS", Str(_loc, _val) => "STR", Nat(_loc, _val) => "NAT", } } pub fn show(&self) -> String { use Token::*; match self { Ident(_loc, x) => format!("IDENT({})", x), Hole(_loc, x, _contents) => format!("HOLE({:?}, ...)", x), Lambda(_loc) => format!("LAMBDA"), Let(_loc) => format!("LET"), Def(_loc) => format!("DEF"), Equals(_loc) => format!("EQUALS"), In(_loc) => format!("IN"), Arrow(_loc) => format!("ARROW"), FatArrow(_loc) => format!("FATARROW"), LeftParen(_loc) => format!("LEFTPAREN"), RightParen(_loc) => format!("RIGHTPAREN"), LeftCurly(_loc) => format!("LEFTCURLY"), RightCurly(_loc) => format!("RIGHTCURLY"), Match(_loc) => format!("MATCH"), With(_loc) => format!("WITH"), Import(_loc) => format!("IMPORT"), Colon(_loc) => format!("COLON"), Dollar(_loc) => format!("DOLLAR"), As(_loc) => format!("AS"), Str(_loc, val) => format!("STR({})", val), Nat(_loc, val) => format!("NAT({})", val), } } pub fn loc(&self) -> &Loc { use Token::*; match self { Ident(loc, _x) => loc, Hole(loc, _x, _contents) => loc, Lambda(loc) => loc, Let(loc) => loc, Def(loc) => loc, Equals(loc) => loc, In(loc) =>
ng(), Token::With(self.loc.clone())), ("import".to_string(), Token::Import(self.loc.clone())), ("as".to_string(), Token::As(self.loc.clone())), ].iter().cloned().collect(); let loc = self.loc.clone(); let mut first_char = '\0'; match self.peek() { Some(chr) => { self.consume(); first_char = chr; }, None => assert!(first_char.is_ascii_alphabetic()), } let mut token_string = String::new(); token_string.push(first_char); while let Some(peek_char) = self.peek() { if peek_char.is_ascii_alphabetic() || peek_char == '_' { self.consume(); token_string.push(peek_char); } else { break; } } while let Some(peek_char) = self.peek() { if peek_char == '\'' { self.consume(); token_string.push(peek_char); } else { break; } } match keywords.get(&token_string) { Some(token) => Ok(token.clone()), None => Ok(Token::Ident(loc, token_string)) } } fn peek(&self) -> Option<char> { self.peek_ahead(0) } fn peek_ahead(&self, k: usize) -> Option<char> { match self.input.get(self.cur + k) { Some(c) => Some(*c), None => None, } } #[allow(dead_code)] fn preview(&self, len: usize) -> String { let mut s = String::new(); for i in 0..len { if let Some(ch) = self.peek_ahead(i) { s.push(ch); } else { break; } } s } fn consume(&mut self) -> Option<char> { match self.peek() { Some(peek_char) => { if peek_char == '\n' { self.loc.next_line(); } else { self.loc.next_col(); } self.cur += 1; Some(peek_char) }, None => None, } } fn consume_comment(&mut self) { while let Some(consume_char) = self.consume() { if consume_char == '\n' { break } } } } impl Loc { fn new(source: Option<String>) -> Self { Loc { path: source, line: 0, col: 0, } } fn next_line(&mut self) { self.line += 1; self.col = 0; } fn next_col(&mut self) { self.col += 1; } } impl fmt::Display for Loc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Line {} col {}", self.line + 1, self.col + 1)?; if let Some(path) = &self.path { write!(f, " at {}", path)?; } Ok(()) } } pub fn tokenize(source: Option<String>, input: &str) -> Result<Vec<Token>, TokenizeErr> { let mut tokenizer = Tokenizer::new(source, input); tokenizer.tokenize() } pub fn tokenize_lines(source: Option<String>, input: &str) -> Result<Vec<Vec<Token>>, TokenizeErr> { let mut tokenizer = Tokenizer::new(source, input); tokenizer.tokenize_lines() }
loc, Arrow(loc) => loc, FatArrow(loc) => loc, LeftParen(loc) => loc, RightParen(loc) => loc, LeftCurly(loc) => loc, RightCurly(loc) => loc, Match(loc) => loc, With(loc) => loc, Import(loc) => loc, Colon(loc) => loc, Dollar(loc) => loc, As(loc) => loc, Str(loc, _val) => loc, Nat(loc, _val) => loc, } } } impl Tokenizer { pub fn new(source: Option<String>, input: &str) -> Self { Tokenizer { input: input.chars().collect(), cur: 0, loc: Loc::new(source), } } pub fn tokenize(&mut self) -> Result<Vec<Token>, TokenizeErr> { let mut tokens = Vec::new(); while let Some(token) = self.token()? { tokens.push(token); } Ok(tokens) } fn tokenize_lines(&mut self) -> Result<Vec<Vec<Token>>, TokenizeErr> { let toks: Vec<Token> = self.tokenize()?; let mut lines: Vec<Vec<Token>> = Vec::new(); let mut cur_line: Vec<Token> = Vec::new(); let mut line_no = 0; for tok in toks { while tok.loc().line > line_no { line_no += 1; lines.push(cur_line); cur_line = Vec::new(); } cur_line.push(tok); } lines.push(cur_line); Ok(lines) } fn double_character_token(&mut self) -> Option<Token> { let head_char = self.peek()?; let next_char = self.peek_ahead(1)?; let chars = format!("{}{}", head_char, next_char); macro_rules! double_char_token { ($characters:literal, $tok:ident) => { if chars == $characters { self.consume(); self.consume(); return Some(Token::$tok(self.loc.clone())); } } } double_char_token!("->", Arrow); double_char_token!("=>", FatArrow); return None; } fn single_character_token(&mut self) -> Option<Token> { let head_char = self.peek()?; macro_rules! single_char_token { ($character:literal, $tok:ident) => { if head_char == $character { self.consume(); return Some(Token::$tok(self.loc.clone())); } } } single_char_token!('(', LeftParen); single_char_token!(')', RightParen); single_char_token!('{', LeftCurly); single_char_token!('}', RightCurly); single_char_token!(':', Colon); single_char_token!('$', Dollar); single_char_token!('=', Equals); return None; } fn token(&mut self) -> Result<Option<Token>, TokenizeErr> { while let Some(head_char) = self.peek() { if head_char.is_ascii_whitespace() { self.consume(); } else if head_char == '#' { self.consume_comment(); } else { break; } } match self.peek() { Some(head_char) => { if let Some(tok) = self.double_character_token() { Ok(Some(tok)) } else if let Some(tok) = self.single_character_token() { Ok(Some(tok)) } else if head_char.is_ascii_alphabetic() { let token = self.tokenize_identifier()?; Ok(Some(token)) } else if head_char == '?' { Ok(Some(self.tokenize_hole()?)) } else if head_char == '"' { Ok(Some(self.tokenize_str()?)) } else if head_char.is_ascii_digit() { Ok(Some(self.tokenize_nat()?)) } else { Err(format!("Unexpected character while parsing: {}", head_char)) } }, None => Ok(None), } } fn tokenize_hole(&mut self) -> Result<Token, TokenizeErr> { let loc = self.loc.clone(); assert_eq!(self.consume(), Some('?')); let peek_char : char; let name: Option<String>; match self.peek() { None => return Ok(Token::Hole(loc, None, None)), Some(chr) => peek_char = chr, } if peek_char.is_ascii_alphabetic() { if let Token::Ident(_, token_name) = self.tokenize_identifier()? { name = Some(token_name); } else { unreachable!(); } } else { name = None; } if let Some('{') = self.peek() { let mut level = 1; let mut contents = String::new(); self.consume(); while let Some(peek_char) = self.consume() { if peek_char == '{' { level += 1; } else if peek_char == '}' { level -= 1; } if level == 0 { break; } else { contents.push(peek_char); } } if level != 0 { Err("Mismatch curly braces.".to_string()) } else { Ok(Token::Hole(loc, name, Some(contents))) } } else { Ok(Token::Hole(loc, name, None)) } } fn tokenize_str(&mut self) -> Result<Token, TokenizeErr> { #![allow(irrefutable_let_patterns)] let loc = self.loc.clone(); assert_eq!(self.consume(), Some('"')); let mut buffer = String::new(); while let consume_char = self.consume() { match consume_char { None => return Err("Expected \" but found end of file. Good luck!".to_string()), Some(chr) => { if chr == '"' { break; } else { buffer.push(chr); } }, } } Ok(Token::Str(loc, buffer)) } fn tokenize_nat(&mut self) -> Result<Token, TokenizeErr> { let loc = self.loc.clone(); let mut buffer = String::new(); match self.peek() { None => return Err("Expected digit but found end of file. Good luck!".to_owned()), Some(ch) => { if !ch.is_ascii_digit() { return Err(format!("Expected digit but found {}.", ch)); } while let Some(ch) = self.peek() { if ch.is_ascii_digit() { self.consume(); buffer.push(ch); } else { break; } } } } let n = buffer.parse::<usize>().unwrap(); Ok(Token::Nat(loc, n)) } fn tokenize_identifier(&mut self) -> Result<Token, TokenizeErr> { let keywords: HashMap<String, Token> = vec![ ("fun".to_string(), Token::Lambda(self.loc.clone())), ("let".to_string(), Token::Let(self.loc.clone())), ("def".to_string(), Token::Def(self.loc.clone())), ("in".to_string(), Token::In(self.loc.clone())), ("match".to_string(), Token::Match(self.loc.clone())), ("with".to_stri
random
[ { "content": "pub fn parse_import(source: Option<String>, input: &str) -> Result<Import, ParseErr> {\n\n let mut toker = Tokenizer::new(source, input);\n\n let tokens = toker.tokenize()?;\n\n\n\n let mut parser = Parser::new(tokens);\n\n parser.parse_import()\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 2, "score": 232842.88736517314 }, { "content": "pub fn parse_def(source: Option<String>, input: &str) -> Result<Def, ParseErr> {\n\n let mut toker = Tokenizer::new(source, input);\n\n let tokens = toker.tokenize()?;\n\n\n\n let mut parser = Parser::new(tokens);\n\n parser.parse_def()\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 3, "score": 232711.67263363372 }, { "content": "pub fn parse_type(source: Option<String>, input: &str) -> Result<Type, ParseErr> {\n\n let mut toker = Tokenizer::new(source, input);\n\n let tokens = toker.tokenize()?;\n\n\n\n let mut parser = Parser::new(tokens);\n\n\n\n let term = parser.parse_type()?;\n\n Ok(term)\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 4, "score": 231714.10437127724 }, { "content": "pub fn parse_variable(source: Option<String>, input: &str) -> Result<Variable, ParseErr> {\n\n let mut toker = Tokenizer::new(source, input);\n\n let tokens = toker.tokenize()?;\n\n\n\n let mut parser = Parser::new(tokens);\n\n\n\n if let TermNode::Var(var) = parser.parse_variable()?.as_node() {\n\n Ok(var.clone())\n\n } else {\n\n unreachable!()\n\n }\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 5, "score": 183400.83863836445 }, { "content": "pub fn parse_term(source: Option<String>, input: &str) -> Result<Term, ParseErr> {\n\n let mut toker = Tokenizer::new(source, input);\n\n let tokens = toker.tokenize()?;\n\n\n\n let mut parser = Parser::new(tokens);\n\n\n\n let term = parser.parse_term()?;\n\n Ok(term)\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 6, "score": 183400.83863836445 }, { "content": "pub fn parse_module(source: Option<String>, input: &str) -> Result<Module, ParseErr> {\n\n let mut toker = Tokenizer::new(source, input);\n\n let tokens = toker.tokenize()?;\n\n\n\n let mut parser = Parser::new(tokens);\n\n\n\n let module = parser.parse_module()?;\n\n Ok(module)\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 7, "score": 183400.83863836445 }, { "content": "pub fn parse_typedef(types_text_lines: &mut dyn Iterator<Item=&str>) -> Option<TypeDef> {\n\n let first_line = types_text_lines.next()?;\n\n let flavor = match first_line.split(\" \").next()? {\n\n \"inductive\" => Flavor::Inductive,\n\n \"coinductive\" => Flavor::Coinductive,\n\n _ => panic!(\"Illegal flavor in type declaration: {}\", first_line),\n\n };\n\n\n\n let mut ctors: Vec<(String, Vec<Type>)> = Vec::new();\n\n\n\n while let Some(line) = types_text_lines.next() {\n\n let line = line.trim();\n\n if line == \"\" {\n\n break;\n\n }\n\n\n\n let parts: Vec<&str> = line.split(\" \").collect();\n\n let (ctor_name, arg_type_names) = parts.split_first()?;\n\n\n\n let mut arg_types = Vec::new();\n", "file_path": "src/runtime/builtins.rs", "rank": 9, "score": 168689.3084764578 }, { "content": "pub fn infer_type(t: &TermNode, ctx: Context<Type>, inductive_typedefs: &HashMap<String, TypeDef>) -> Result<Type, TypeErr> {\n\n match t {\n\n TermNode::Var(v) => {\n\n let x = &v.name;\n\n let k = v.layer;\n\n\n\n match ctx.lookup(x, k) {\n\n None => Err(format!(\"Variable {} not found in context\", x)),\n\n Some(typ) => Ok(typ),\n\n }\n\n },\n\n TermNode::Lam(_y, _body) => Err(\"Can't infer type of functions.\".to_string()),\n\n TermNode::App(f, vs) => {\n\n let mut result = infer_type(&f, ctx.clone(), inductive_typedefs)?;\n\n\n\n for v in vs.iter() {\n\n match result.as_ref() {\n\n TypeNode::Atom(_) => return Err(\"Expected function type.\".to_string()),\n\n TypeNode::Arrow(dom, cod) => {\n\n check_type(&v, ctx.clone(), inductive_typedefs, dom.clone())?;\n", "file_path": "src/types/check.rs", "rank": 10, "score": 166909.9998936032 }, { "content": "pub fn check_type(t: &TermNode, ctx: Context<Type>, inductive_typedefs: &HashMap<String, TypeDef>, typ: Type) -> Result<(), TypeErr> {\n\n match t {\n\n TermNode::Var(v) => {\n\n let x = &v.name;\n\n let k = v.layer;\n\n\n\n match ctx.lookup(x, k) {\n\n Some(x_typ) => {\n\n if x_typ == typ {\n\n Ok(())\n\n } else {\n\n Err(format!(\"Term {} does not have type {:?} in context: {:?}\", x, &typ, &ctx))\n\n }\n\n },\n\n None => Err(format!(\"{} does not appear in context.\", x)),\n\n }\n\n },\n\n TermNode::Lam(x, body) => {\n\n match typ.as_ref() {\n\n TypeNode::Atom(atom) => Err(format!(\"Functions need function types, but we got {}\", atom)),\n", "file_path": "src/types/check.rs", "rank": 11, "score": 163235.54616563194 }, { "content": "fn repl_line_import(runtime: &mut Runtime, line: &str) {\n\n let mut import_resolver = resolver::FileImportResolver::new(\"examples\");\n\n\n\n match parser::parse_import(None, line) {\n\n Ok(Import(module_name)) => {\n\n match runtime.import(&module_name, &mut import_resolver, false) {\n\n Ok(()) => println!(\"import successful\"),\n\n Err(msg) => println!(\"{:?}\", msg),\n\n }\n\n },\n\n Err(e) => println!(\"There was an error {:?}\", e),\n\n }\n\n}\n\n\n", "file_path": "src/interpreter.rs", "rank": 12, "score": 157442.5522666286 }, { "content": "fn repl_line_def(runtime: &mut Runtime, line: &str) {\n\n match parser::parse_def(None, &line) {\n\n Ok(definition) => {\n\n match runtime.define(&definition) {\n\n Ok(()) => {\n\n let Def(name, typ, _body) = definition;\n\n println!(\"=> {} : {}\", name, *typ);\n\n },\n\n Err(err) => println!(\"Error: {:?}\", err),\n\n }\n\n },\n\n Err(e) => println!(\"There was an error {:?}\", e),\n\n }\n\n}\n\n\n", "file_path": "src/interpreter.rs", "rank": 13, "score": 157338.06879187815 }, { "content": "pub fn check_type_match(\n\n discriminee: &TermNode,\n\n match_arms: &[MatchArm],\n\n ctx: Context<Type>,\n\n inductive_typedefs: &HashMap<String, TypeDef>,\n\n typ: Type,\n\n) -> Result<(), TypeErr> {\n\n let match_tags: Vec<Tag> = match_arms.iter().map(|MatchArm(pat, _arm_term)| pat[0].to_string()).collect();\n\n // TODO: handle bottom type\n\n if let Some(first_ctor_tag) = &match_tags.iter().cloned().collect::<Vec<Tag>>().get(0) {\n\n match lookup_typedef_by_ctor_tag(first_ctor_tag, inductive_typedefs) {\n\n None => Err(format!(\"Unknown ctor {:?}\", first_ctor_tag)),\n\n Some(inductive_typedef) => {\n\n let typedef_tags = inductive_typedef.ctor_tags();\n\n analyze_coverage(&typedef_tags, &match_tags)?;\n\n check_type(\n\n discriminee,\n\n ctx.clone(),\n\n inductive_typedefs,\n\n TypeNode::Atom(inductive_typedef.name.to_string()).into(),\n", "file_path": "src/types/check.rs", "rank": 14, "score": 152302.75622118637 }, { "content": "pub fn builtins_type_ctx() -> Context<Type> {\n\n let mut ctx = Context::empty();\n\n\n\n for primdef in builtin_primdefs() {\n\n ctx = ctx.extend(\n\n &primdef.name.to_string(),\n\n primdef.typ,\n\n );\n\n }\n\n ctx\n\n}", "file_path": "src/runtime/builtins.rs", "rank": 15, "score": 138821.64181603055 }, { "content": "fn ctor_type_from_signature(name: &str, ctor_signature: &[Type]) -> Type {\n\n let mut typ: Type = TypeNode::Atom(name.to_string()).into();\n\n for sig_typ in ctor_signature.iter().rev() {\n\n typ = TypeNode::Arrow(sig_typ.clone(), typ).into();\n\n }\n\n typ\n\n}\n\n\n", "file_path": "src/runtime/builtins.rs", "rank": 16, "score": 138703.32962686935 }, { "content": "pub fn heap_to_string(heap: &Heap) -> String {\n\n let mut s = String::new();\n\n for (k, v) in heap.map.iter() {\n\n s.push_str(&format!(\"{} ~> {:?}\\n\", k, v));\n\n }\n\n s\n\n}\n", "file_path": "src/stg/heap.rs", "rank": 17, "score": 135618.20434462948 }, { "content": "fn repl_line(runtime: &mut Runtime, line: &str) {\n\n let line = line.trim();\n\n\n\n if line.is_empty() {\n\n ()\n\n } else if line.starts_with(\"import\") {\n\n repl_line_import(runtime, line);\n\n } else if line.starts_with(\"def\") {\n\n repl_line_def(runtime, line);\n\n } else {\n\n repl_line_term(runtime, line);\n\n }\n\n}\n\n\n", "file_path": "src/interpreter.rs", "rank": 18, "score": 135144.51124749365 }, { "content": "fn repl_line_term(runtime: &mut Runtime, line: &str) {\n\n match parser::parse_term(None, &line) {\n\n Ok(term) => {\n\n let type_context = runtime.builtin_type_ctx.append(runtime.definition_type_ctx.clone());\n\n match check::infer_type(\n\n &term,\n\n type_context,\n\n &runtime.inductive_typedefs,\n\n ) {\n\n Ok(typ) => {\n\n let value = runtime.eval(&term, Context::empty());\n\n println!(\"=> {:?} : {}\", &value, *typ);\n\n },\n\n Err(type_error) => println!(\"Type Error: {:?}\", &type_error),\n\n }\n\n },\n\n Err(e) => println!(\"There was an error {:?}\", e),\n\n }\n\n}\n", "file_path": "src/interpreter.rs", "rank": 19, "score": 132663.49878272283 }, { "content": "///\n\n/// Returns a list of inductive typedefs which are considered \"built-in\" in Quail.\n\n///\n\npub fn builtin_inductive_typedefs() -> Vec<TypeDef> {\n\n let mut typedefs = vec![];\n\n let mut types_text_lines = include_str!(\"../../assets/types.txt\").lines();\n\n while let Some(typedef) = parse_typedef(&mut types_text_lines) {\n\n typedefs.push(typedef);\n\n }\n\n\n\n typedefs\n\n}\n\n\n", "file_path": "src/runtime/builtins.rs", "rank": 20, "score": 128895.33504738999 }, { "content": "type PrimFn = Rc<dyn Fn(&[usize]) -> usize>;\n\n\n\n#[derive(Clone)]\n\npub struct PrimOp {\n\n pub name: String,\n\n pub arg_count: usize,\n\n pub op: PrimFn,\n\n }\n\n\n\n\n\n impl std::fmt::Debug for PrimOp {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"PRIMOP({})\", self.name)\n\n }\n\n}\n\n\n\nimpl PrimOp {\n\n pub fn new(name: &str, arg_count: usize, op: Rc<dyn Fn(&[usize]) -> usize>) -> Self {\n\n PrimOp {\n\n name: name.to_string(),\n", "file_path": "src/stg/machine.rs", "rank": 21, "score": 120906.8930868004 }, { "content": "fn lookup_typedef_by_ctor_tag<'a>(ctor_tag: &Tag, inductive_typedefs: &'a HashMap<String, TypeDef>) -> Option<&'a TypeDef> {\n\n for (_typename, inductive_typedef) in inductive_typedefs.iter() {\n\n let ctor_tags: Vec<Tag> = inductive_typedef.ctor_types.keys().cloned().collect();\n\n if ctor_tags.contains(&ctor_tag) {\n\n return Some(inductive_typedef);\n\n }\n\n }\n\n None\n\n}\n", "file_path": "src/types/check.rs", "rank": 22, "score": 119052.52690755694 }, { "content": "pub fn find_matching_arm(tag: &Tag, match_arms: &[MatchArm]) -> MatchArm {\n\n for match_arm in match_arms {\n\n let MatchArm(pat, _body) = match_arm;\n\n if pat[0] == *tag {\n\n return match_arm.clone();\n\n }\n\n }\n\n panic!(format!(\"No matching arm found for tag {:?}\", tag))\n\n}\n\n\n\nimpl From<TypeNode> for Type {\n\n fn from(tn: TypeNode) -> Self {\n\n Type(rc::Rc::new(tn))\n\n }\n\n}\n\n\n\nimpl TryFrom<&str> for Type {\n\n type Error = parser::ParseErr;\n\n\n\n fn try_from(typ: &str) -> Result<Self, Self::Error> {\n", "file_path": "src/ast/ast.rs", "rank": 23, "score": 107667.0846969105 }, { "content": "pub fn builtin_primdefs() -> Vec<PrimDef> {\n\n let mut primdefs = Vec::new();\n\n\n\n macro_rules! primdef {\n\n ($name:ident, $type:literal) => {\n\n primdefs.push(PrimDef::new(\n\n stringify!($name).to_string(),\n\n $type.try_into().unwrap(),\n\n Box::new(super::prims::$name),\n\n ));\n\n }\n\n }\n\n\n\n primdef!(println, \"Str -> Top\");\n\n primdef!(show, \"Nat -> Str\");\n\n primdef!(show_list, \"List -> Str\");\n\n primdef!(cat, \"Str -> Str -> Str\");\n\n\n\n primdefs\n\n}\n\n\n", "file_path": "src/runtime/builtins.rs", "rank": 24, "score": 107058.8605398256 }, { "content": "fn transform_term_let(x: &str, s: q::Term, t: q::Term, program: &q::Module) -> m::Expr {\n\n let mut free_vars = gather_free_vars(s.clone(), program);\n\n free_vars.retain(|fv| fv != x);\n\n\n\n m::ExprNode::Let(\n\n m::LetType::NonRecursive,\n\n vec![\n\n m::Binding(\n\n x.to_owned(),\n\n m::LambdaForm(\n\n free_vars,\n\n decide_updatability(),\n\n vec![], // TODO should this be fused if the target is a lambda?\n\n transform_term(s.clone(), program),\n\n )\n\n ),\n\n ],\n\n transform_term(t.clone(), program),\n\n ).into()\n\n}\n\n\n", "file_path": "src/stg/transform.rs", "rank": 25, "score": 105475.57364817351 }, { "content": "fn check_type_match_arm(\n\n match_arm: &MatchArm,\n\n inductive_typedef: &TypeDef,\n\n ctx: &Context<Type>,\n\n inductive_typedefs: &HashMap<String, TypeDef>,\n\n typ: &Type,\n\n) -> Result<(), TypeErr> {\n\n let MatchArm(pat, body) = match_arm;\n\n let ctor_tag = pat[0].to_string();\n\n let mut ctor_typ = inductive_typedef.ctor_types.get(&ctor_tag).unwrap();\n\n\n\n let pattern_names: Vec<String> = (&pat[1..]).iter().cloned().collect();\n\n let mut pattern_types: Vec<Type> = Vec::new();\n\n\n\n while let TypeNode::Arrow(dom, cod) = ctor_typ.as_ref() {\n\n pattern_types.push(dom.clone());\n\n ctor_typ = &cod;\n\n }\n\n\n\n if pattern_names.len() != pattern_types.len() {\n\n Err(format!(\"Pattern has the wrong number of variables: {:?} is more than {}\", pattern_names, pattern_types.len()))\n\n } else {\n\n let zipped: Vec<(String, Type)> = pattern_names.into_iter().zip(pattern_types).collect();\n\n let extended_ctx = ctx.extend_many(&zipped);\n\n check_type(&body, extended_ctx.clone(), inductive_typedefs, typ.clone())\n\n }\n\n}\n\n\n", "file_path": "src/types/check.rs", "rank": 26, "score": 101769.561946733 }, { "content": "fn usize_to_nat_term(v: usize) -> Term {\n\n let mut result: Term = TermNode::Var(Variable { name: \"zero\".to_owned(), layer: 0 }).into();\n\n\n\n for _ in 0..v {\n\n result = TermNode::App(\n\n TermNode::Var(Variable { name: \"succ\".to_owned(), layer: 0 }).into(),\n\n vec![result.clone()],\n\n ).into();\n\n }\n\n\n\n result\n\n}\n", "file_path": "src/parser.rs", "rank": 27, "score": 100569.301458537 }, { "content": "#[derive(Debug)]\n\nstruct ContextNode<T>(Vec<(String, T)>);\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Context<T>(rc::Rc<ContextNode<T>>);\n\n\n\nimpl<T: Clone> Context<T> {\n\n pub fn empty() -> Self {\n\n Context(rc::Rc::new(ContextNode(Vec::new())))\n\n }\n\n\n\n pub fn lookup(&self, x: &str, k: usize) -> Option<T> {\n\n let Context(rc_ctx_node) = self;\n\n let ContextNode(var_typ_list) = rc_ctx_node.as_ref();\n\n for (y, typ) in var_typ_list.iter().rev() {\n\n if x == y {\n\n if k == 0 {\n\n return Some(typ.clone());\n\n } else {\n\n return self.lookup(x, k - 1);\n\n }\n", "file_path": "src/context.rs", "rank": 28, "score": 96705.07976394478 }, { "content": "fn debug(msg: &str) {\n\n if DEBUG {\n\n eprintln!(\"{}\", msg);\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for Context {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {\n\n writeln!(f, \"{:?}\", self.0)\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for Instr {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {\n\n match self {\n\n Instr::Eval(e, env) => write!(f, \"Eval {} {}\", e, env),\n\n Instr::Enter(a) => write!(f, \"Enter {}\", a),\n\n Instr::RetCtor(c, ws) => write!(f, \"ReturnCon {} {:?}\", c, ws),\n\n Instr::RetInt(k) => write!(f, \"ReturnInt {}\", k),\n\n }\n\n }\n\n}\n", "file_path": "src/stg/machine.rs", "rank": 29, "score": 89510.28606720315 }, { "content": "fn transform_term_lam(x: &str, t: q::Term, program: &q::Module) -> m::Expr {\n\n let gensym = \"gensym_0\".to_owned();\n\n let mut free_vars = gather_free_vars(t.clone(), program);\n\n free_vars.retain(|fv| fv != &x);\n\n\n\n m::ExprNode::Let(\n\n m::LetType::NonRecursive,\n\n vec![m::Binding(\n\n gensym.clone(),\n\n m::LambdaForm(\n\n free_vars,\n\n false,\n\n vec![x.to_owned()],\n\n transform_term(t.clone(), program),\n\n ),\n\n )],\n\n m::ExprNode::App(m::AppType::Fun, gensym, vec![]).into(),\n\n ).into()\n\n}\n\n\n", "file_path": "src/stg/transform.rs", "rank": 30, "score": 87860.94188430923 }, { "content": "fn is_ctor(var: &str) -> bool {\n\n vec![\n\n \"succ\",\n\n \"zero\",\n\n \"true\",\n\n \"false\",\n\n \"nil\",\n\n \"cons\",\n\n ].contains(&var)\n\n}\n\n\n", "file_path": "src/stg/transform.rs", "rank": 31, "score": 85708.94680074464 }, { "content": "pub fn repl(interpreter: &mut Interpreter) {\n\n loop {\n\n match interpreter.readline() {\n\n Ok(line) => {\n\n repl_line(&mut interpreter.runtime, &line);\n\n },\n\n Err(ReadlineError::Interrupted) => (),\n\n Err(ReadlineError::Eof) => std::process::exit(1),\n\n Err(err) => {\n\n println!(\"Error: {:?}\", err);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/interpreter.rs", "rank": 32, "score": 83713.41563323057 }, { "content": "pub fn builtins_ctx() -> Context<Value> {\n\n let mut ctx = Context::empty();\n\n\n\n for primdef in builtin_primdefs() {\n\n ctx = ctx.extend(\n\n &primdef.name.to_string(),\n\n Value::Prim(rc::Rc::new(primdef.code)),\n\n );\n\n }\n\n ctx\n\n}\n\n\n", "file_path": "src/runtime/builtins.rs", "rank": 33, "score": 83317.07641519271 }, { "content": "type PrimFn = dyn Fn(&mut Runtime, Vec<Value>) -> Value;\n\n\n\nimpl fmt::Debug for Value {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Value::Ctor(tag, contents) => {\n\n write!(f, \"{}\", &tag)?;\n\n for value in contents {\n\n write!(f, \" ({:?})\", value)?;\n\n }\n\n Ok(())\n\n },\n\n Value::CoCtor(tag, contents) => {\n\n write!(f, \"{}\", &tag)?;\n\n for value in contents {\n\n write!(f, \" ({:?})\", value)?;\n\n }\n\n Ok(())\n\n },\n\n Value::Str(s) => write!(f, \"{:?}\", s),\n\n Value::Fun(_, _, _) => write!(f, \"<fun>\"),\n\n Value::Prim(_) => write!(f, \"<prim>\"),\n\n Value::Thunk(_, _) => write!(f, \"<thunk>\"),\n\n }\n\n }\n\n}\n", "file_path": "src/runtime/value.rs", "rank": 34, "score": 81417.9526884777 }, { "content": "fn is_free(v: &Variable, ctx: &[String]) -> bool {\n\n let mut layers_left = v.layer;\n\n\n\n for name in ctx {\n\n if name == &v.name {\n\n if layers_left == 0 {\n\n return false;\n\n } else {\n\n layers_left -= 1\n\n }\n\n }\n\n }\n\n\n\n true\n\n}\n\n\n\nimpl Term {\n\n pub fn free_vars(&self) -> HashSet<Variable> {\n\n self.free_vars_in_ctx(&[])\n\n }\n", "file_path": "src/ast/ast.rs", "rank": 35, "score": 81082.31113841251 }, { "content": "pub trait ImportResolver {\n\n fn resolve(&mut self, import_name: &str) -> Result<ResolvedImport, io::Error>;\n\n}\n\n\n\npub struct ResolvedImport {\n\n pub reader: Box<dyn io::Read>,\n\n pub source: String,\n\n}\n\n\n\nimpl ResolvedImport {\n\n pub fn text(&mut self) -> String {\n\n let mut result = String::new();\n\n self.reader.read_to_string(&mut result).unwrap();\n\n result\n\n }\n\n}\n\n\n\npub struct FileImportResolver(PathBuf);\n\n\n\nimpl FileImportResolver {\n", "file_path": "src/resolver/resolver.rs", "rank": 36, "score": 80924.57300613876 }, { "content": "fn get_var_name(t: q::Term) -> Option<String> {\n\n if let q::TermNode::Var(v) = t.as_ref() {\n\n Some(v.name.clone())\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/stg/transform.rs", "rank": 37, "score": 79282.88927735356 }, { "content": "pub fn transform(module: q::Module) -> m::Program {\n\n assert!(module.imports.is_empty()); // TODO deal with non-empty imports later\n\n\n\n let mut bindings = vec![];\n\n\n\n for q::Def(var, typ, term) in &module.definitions {\n\n let e = transform_term(term.clone(), &module);\n\n let updatable = var == \"main\";\n\n let lf = m::LambdaForm(vec![], updatable, vec![], e);\n\n bindings.push(m::Binding(var.clone(), lf));\n\n }\n\n\n\n let ctors = [\n\n (\"zero\", vec![]),\n\n (\"succ\", vec![\"n\"]),\n\n (\"nil\", vec![]),\n\n (\"cons\", vec![\"x\", \"xs\"]),\n\n (\"true\", vec![]),\n\n (\"false\", vec![]),\n\n ];\n", "file_path": "src/stg/transform.rs", "rank": 38, "score": 79118.3317041652 }, { "content": "fn analyze_coverage(typedef_tags: &Vec<Tag>, match_tags: &Vec<Tag>) -> Result<(), TypeErr> {\n\n let match_tags_set: HashSet<_> = match_tags.iter().cloned().collect();\n\n let typedef_tags_set: HashSet<_> = typedef_tags.iter().cloned().collect();\n\n\n\n let unexpected_tags: HashSet<_> = match_tags_set.difference(&typedef_tags_set).collect();\n\n let missing_tags: HashSet<_> = typedef_tags_set.difference(&match_tags_set).collect();\n\n\n\n let mut sorted_match_tags = match_tags.clone();\n\n sorted_match_tags.sort();\n\n let mut duplicate_tags: HashSet<Tag> = HashSet::new();\n\n let match_tag_with_next: Vec<_> = sorted_match_tags\n\n .iter()\n\n .zip(sorted_match_tags[1..].iter())\n\n .collect();\n\n\n\n for (cur, next) in match_tag_with_next.into_iter() {\n\n if cur == next {\n\n duplicate_tags.insert(cur.to_string());\n\n }\n\n }\n", "file_path": "src/types/check.rs", "rank": 39, "score": 77706.61716566463 }, { "content": "type PrimCode = Box<dyn Fn(&mut Runtime, Vec<Value>) -> Value>;\n\n\n\npub struct PrimDef {\n\n pub name: String,\n\n pub typ: Type,\n\n pub code: PrimCode,\n\n}\n\n\n\nimpl std::fmt::Debug for PrimDef {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {\n\n write!(f, \"PrimDef {{ name: {:?}, typ: {:?}, code: ? }}\", self.name, self.typ)\n\n }\n\n\n\n}\n\n\n\n///\n\n/// Inductive Types consist of a name (such as `Nat`) and a list of constructor tags\n\n/// (`zero` and `succ`) together with their types (`Nat` and `Nat -> Nat`).\n\n///\n\nimpl TypeDef {\n", "file_path": "src/runtime/builtins.rs", "rank": 40, "score": 70120.03218312022 }, { "content": "fn transform_term_match(t: q::Term, match_arms: &[q::MatchArm], program: &q::Module) -> m::Expr {\n\n let t_expr = transform_term(t.clone(), program);\n\n let arm_exprs = m::Alts(match_arms.iter().map(|q::MatchArm(pat, s)| {\n\n let (ctor, xs) = pat.split_first().unwrap(); // TODO\n\n let xs = xs.iter().map(|x| x.to_owned()).collect();\n\n let s_expr = transform_term(s.clone(), program);\n\n m::Alt::Ctor(ctor.clone(), xs, s_expr)\n\n }).collect());\n\n\n\n m::ExprNode::Case(\n\n t_expr,\n\n arm_exprs,\n\n ).into()\n\n}\n\n\n", "file_path": "src/stg/transform.rs", "rank": 41, "score": 64249.679335140485 }, { "content": "fn nat_to_u64(v: Value) -> u64 {\n\n let mut val = v;\n\n let mut result = 0;\n\n\n\n loop {\n\n match val {\n\n Value::Ctor(tag, contents) => {\n\n if tag == \"zero\" {\n\n break\n\n } else if tag == \"succ\" {\n\n val = contents[0].clone();\n\n result += 1;\n\n } else {\n\n panic!(\"This isn't a nat.\")\n\n }\n\n },\n\n _ => panic!(\"This isn't a nat.\"),\n\n }\n\n }\n\n\n\n result\n\n}", "file_path": "src/runtime/prims.rs", "rank": 42, "score": 64056.411213669424 }, { "content": "struct Parser {\n\n tokens: Vec<Token>,\n\n cur: usize,\n\n next_hole_id: HoleId,\n\n hole_count: u64,\n\n}\n\n\n\nmacro_rules! consume_expected_token {\n\n ($tokenizer:expr, $ctor:ident, $token:literal) => {\n\n let expected_token = $token;\n\n match $tokenizer.peek() {\n\n Some(Token::$ctor(_)) => {\n\n $tokenizer.consume();\n\n },\n\n Some(peek_token) => return Err(format!(\"Expected {:?} but found {:?}.\", expected_token, peek_token)),\n\n None => return Err(format!(\"Expected {:?} but found end of input.\", expected_token)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 43, "score": 60476.95909826462 }, { "content": "fn pprint_indent_prefix(f: &mut std::fmt::Formatter, indent: usize) -> Result<(), std::fmt::Error> {\n\n for _ in 0..indent {\n\n write!(f, \" \")?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/stg/ast.rs", "rank": 44, "score": 47472.71509731548 }, { "content": "fn pprint_lf_header(f: &mut std::fmt::Formatter, lf: &LambdaForm) -> Result<(), std::fmt::Error> {\n\n let LambdaForm(vs, pi, xs, _e) = lf;\n\n pprint_list(f, vs)?;\n\n if *pi {\n\n write!(f, \" \\\\u \")?;\n\n } else {\n\n write!(f, \" \\\\n \")?;\n\n }\n\n pprint_list(f, xs)\n\n}\n\n\n", "file_path": "src/stg/ast.rs", "rank": 45, "score": 46429.52698962393 }, { "content": "fn pprint_exprnode(f: &mut std::fmt::Formatter, e: &ExprNode, indent: usize) -> Result<(), std::fmt::Error> {\n\n match e {\n\n ExprNode::App(_app_type, fun, vs) => {\n\n write!(f, \"{} \", fun)?;\n\n let args = vs.iter().map(|v| {\n\n match v {\n\n Atom::Var(var) => var.to_string(),\n\n Atom::Lit(k) => k.to_string(),\n\n }\n\n }).collect::<Vec<String>>();\n\n pprint_list(f, &args)?;\n\n },\n\n ExprNode::Let(let_type, bindings, e) => {\n\n match let_type {\n\n LetType::NonRecursive => write!(f, \"let \")?,\n\n LetType::Recursive => write!(f, \"letrc \")?,\n\n }\n\n\n\n for (i, Binding(name, lf)) in bindings.iter().enumerate() {\n\n if i > 0 {\n", "file_path": "src/stg/ast.rs", "rank": 46, "score": 45883.89788008244 }, { "content": "#[test]\n\nfn run_examples() {\n\n let paths = fs::read_dir(\"examples\").expect(\"Could not open examples/ directory\");\n\n for path in paths {\n\n let filename = path.expect(\"Couldn't open file\").path();\n\n println!(\"{:?}\", filename);\n\n let mut runtime = Runtime::new();\n\n let mut import_resolver = FileImportResolver::new(\"examples\");\n\n let import_name = filename.file_stem().unwrap().to_str().unwrap();\n\n runtime.import(&import_name, &mut import_resolver, true).unwrap();\n\n runtime.exec();\n\n }\n\n}\n", "file_path": "src/tests.rs", "rank": 47, "score": 41620.396792348685 }, { "content": "#[test]\n\nfn test_transform() {\n\n let mut import_resolver = FileImportResolver::new(\"stg_examples\");\n\n let text = import_resolver.resolve(\"nat\").unwrap().text();\n\n let module = parse_module(None, &text).unwrap();\n\n let program = transform(module);\n\n\n\n eprintln!(\"{}\", program);\n\n}\n\n\n\n\n", "file_path": "src/stg/tests.rs", "rank": 48, "score": 40422.95247480807 }, { "content": "#[test]\n\nfn free_vars() {\n\n macro_rules! assert_free_vars_in_term {\n\n ($text:expr, $vs:expr) => {\n\n let term = parse_term(None, &$text).unwrap();\n\n\n\n let mut frees: HashSet<ast::Variable> = HashSet::new();\n\n for v in ($vs as &[&str]) {\n\n let var = (*v).try_into().unwrap();\n\n frees.insert(var);\n\n }\n\n assert_eq!(term.free_vars(), frees);\n\n }\n\n }\n\n\n\n // bare variables (even with type annotations) are free\n\n assert_free_vars_in_term!(\"x\", &[\"x\"]);\n\n assert_free_vars_in_term!(\"x as Nat\", &[\"x\"]);\n\n\n\n // literals and holes have no free variables\n\n assert_free_vars_in_term!(\"\\\"hello\\\"\", &[]);\n", "file_path": "src/ast/tests.rs", "rank": 49, "score": 40422.95247480807 }, { "content": "#[test]\n\nfn test_stg_works() {\n\n let xs = LambdaForm(vec![], false, vec![],\n\n ExprNode::App(\n\n AppType::Ctor,\n\n \"Nil\".to_owned(),\n\n vec![],\n\n ).into(),\n\n );\n\n\n\n let pure = LambdaForm(vec![], false, vec![\"a\".to_owned()],\n\n ExprNode::App(\n\n AppType::Ctor,\n\n \"Cons\".to_owned(),\n\n vec![\"a\".into(), \"xs\".into()],\n\n ).into(),\n\n );\n\n\n\n let map = LambdaForm(vec![], false, vec![\"f\".to_owned(), \"xs\".to_owned()],\n\n ExprNode::Case(\n\n ExprNode::App(AppType::Fun, \"xs\".to_owned(), vec![]).into(),\n", "file_path": "src/stg/tests.rs", "rank": 50, "score": 39319.03310195773 }, { "content": "#[test]\n\nfn test_transform_run() {\n\n let mut import_resolver = FileImportResolver::new(\"stg_examples\");\n\n let text = import_resolver.resolve(\"nat\").unwrap().text();\n\n let module = parse_module(None, &text).unwrap();\n\n let program = transform(module);\n\n let mut m = StgMachine::new(&program, Some(\"main\"));\n\n println!(\"********************************************************************************\");\n\n println!(\"PROGRAM:\");\n\n println!(\"{}\", program);\n\n eprintln!(\"GLOBALS:\");\n\n for g in m.globals.iter() {\n\n eprintln!(\" {:?}\", g);\n\n }\n\n\n\n while !m.is_halted() {\n\n m.step();\n\n }\n\n\n\n eprintln!(\"GLOBALS:\");\n\n for g in m.globals.iter() {\n", "file_path": "src/stg/tests.rs", "rank": 51, "score": 39319.03310195773 }, { "content": "#[test]\n\nfn free_vars_examples() {\n\n let mut import_resolver = FileImportResolver::new(\"examples\");\n\n let text = import_resolver.resolve(\"hello\").unwrap().text();\n\n let module = parse_module(None, &text).unwrap();\n\n let ast::Def(_name, _typ, body) = module.definition(\"main\").unwrap();\n\n assert_eq!(body.free_vars(), vec![\"println\".try_into().unwrap()].into_iter().collect());\n\n}\n\n\n\n\n", "file_path": "src/ast/tests.rs", "rank": 52, "score": 39319.03310195773 }, { "content": "fn decide_updatability() -> bool {\n\n true\n\n}\n\n\n", "file_path": "src/stg/transform.rs", "rank": 53, "score": 38637.48117749057 }, { "content": "fn list_to_vec(v: Value) -> Vec<Value> {\n\n match v {\n\n Value::Ctor(tag, contents) => {\n\n if tag == \"nil\" {\n\n Vec::new()\n\n } else if tag == \"cons\" {\n\n let head = &contents[0];\n\n let tail = &contents[1];\n\n let mut result = list_to_vec(tail.clone());\n\n result.insert(0, head.clone());\n\n result\n\n } else {\n\n panic!(\"This isn't a list.\")\n\n }\n\n },\n\n _ => panic!(\"This isn't a list.\"),\n\n }\n\n}\n\n\n", "file_path": "src/runtime/prims.rs", "rank": 54, "score": 35017.4660030109 }, { "content": "fn print_stuff(m: &StgMachine, addr: Addr) {\n\n let heap = &m.heap;\n\n let Closure(LambdaForm(vs, _pi, xs, e), ws) = heap.lookup(addr);\n\n let ctx = Context::from(vs.clone(), ws.clone());\n\n\n\n if xs.len() > 0 {\n\n println!(\"<THUNK>\");\n\n } else {\n\n if let ExprNode::App(AppType::Ctor, f, ts) = e.as_ref() {\n\n print!(\"{}\", f);\n\n for (i, t) in ts.iter().enumerate() {\n\n if i > 0 {\n\n print!(\",\");\n\n }\n\n match t {\n\n Atom::Lit(k) => print!(\" {}\", k),\n\n Atom::Var(name) => {\n\n let value = m.lookup_var(name, &ctx);\n\n match value {\n\n Value::Addr(a) => {\n", "file_path": "src/stg/tests.rs", "rank": 55, "score": 34136.694993029756 }, { "content": "fn top_level_vars(module: &q::Module) -> Vec<m::Var> {\n\n let mut vars = vec![];\n\n for q::Def(var, _typ, _term) in module.definitions.iter() {\n\n vars.push(var.clone());\n\n }\n\n vars\n\n}\n\n\n", "file_path": "src/stg/transform.rs", "rank": 78, "score": 32455.195525179413 }, { "content": " result = cod.clone();\n\n },\n\n TypeNode::Forall(_, _) => return Err(\"Can't apply to a forall.\".to_string()),\n\n }\n\n }\n\n Ok(result)\n\n },\n\n TermNode::Let(x, v, body) => {\n\n let x_typ = infer_type(&v, ctx.clone(), inductive_typedefs)?;\n\n infer_type(&body, ctx.extend(x, x_typ), inductive_typedefs)\n\n },\n\n TermNode::Match(_t, _match_arms) => {\n\n Err(\"Can't infer type of match statements. (Yet?)\".to_string())\n\n },\n\n TermNode::Hole(_hole_info) => Err(\"Can't infer type of a hole.\".to_string()),\n\n TermNode::StrLit(_contents) => { Ok(TypeNode::Atom(\"Str\".to_string()).into())},\n\n TermNode::As(term, typ) => {\n\n check_type(&term, ctx, inductive_typedefs, typ.clone())?;\n\n Ok(typ.clone())\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/types/check.rs", "rank": 79, "score": 31940.028916811647 }, { "content": "use std::collections::HashSet;\n\nuse std::collections::HashMap;\n\n\n\nuse crate::ast::TermNode;\n\nuse crate::ast::Tag;\n\nuse crate::ast::MatchArm;\n\nuse crate::runtime::TypeDef;\n\nuse crate::ast::Type;\n\nuse crate::ast::TypeNode;\n\nuse crate::context::Context;\n\n\n\npub type TypeErr = String;\n\n\n", "file_path": "src/types/check.rs", "rank": 80, "score": 31936.46836790252 }, { "content": " TypeNode::Arrow(dom, cod) => check_type(&body, ctx.extend(x, dom.clone()), inductive_typedefs, cod.clone()),\n\n TypeNode::Forall(_name, _cod) => Err(format!(\"Functions need function types, but we got a forall\")),\n\n }\n\n },\n\n TermNode::App(_f, _vs) => {\n\n let inferred_typ = infer_type(&t, ctx, inductive_typedefs)?;\n\n if &inferred_typ == &typ {\n\n Ok(())\n\n } else {\n\n Err(format!(\"Type mismatch during application: {:?} vs {:?}\", &inferred_typ, &typ))\n\n }\n\n },\n\n TermNode::StrLit(_contents) => {\n\n if typ == TypeNode::Atom(\"Str\".to_string()).into() {\n\n Ok(())\n\n } else {\n\n Err(format!(\"Type of string literal can't be {:?}\", &typ))\n\n }\n\n },\n\n TermNode::Let(x, v, body) => {\n", "file_path": "src/types/check.rs", "rank": 81, "score": 31935.040535052154 }, { "content": " )?;\n\n for match_arm in match_arms {\n\n check_type_match_arm(match_arm, inductive_typedef, &ctx, inductive_typedefs, &typ)?;\n\n }\n\n Ok(())\n\n },\n\n }\n\n } else {\n\n // NOTE: There is an assumption here that Bot is the only empty type!\n\n check_type(\n\n discriminee,\n\n ctx.clone(),\n\n inductive_typedefs,\n\n TypeNode::Atom(\"Bot\".to_string()).into(),\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/types/check.rs", "rank": 82, "score": 31932.178545430656 }, { "content": " let x_typ = infer_type(&v, ctx.clone(), inductive_typedefs)?;\n\n check_type(&body, ctx.extend(&x, x_typ), inductive_typedefs, typ)\n\n },\n\n TermNode::Match(t, match_arms) => check_type_match(&t, &match_arms, ctx, inductive_typedefs, typ),\n\n TermNode::Hole(_hole_info) => Ok(()),\n\n TermNode::As(term, as_typ) => {\n\n if &typ == as_typ {\n\n check_type(&term, ctx, inductive_typedefs, typ)\n\n } else {\n\n Err(format!(\"Type mismatch during ascription: {:?} vs {:?}\", &as_typ, &typ))\n\n }\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/types/check.rs", "rank": 83, "score": 31931.97179412294 }, { "content": "pub mod check;\n", "file_path": "src/types/mod.rs", "rank": 84, "score": 31923.3540282407 }, { "content": "\n\n if !unexpected_tags.is_empty() {\n\n Err(format!(\"Found unexpected tags: {:?}\", unexpected_tags))\n\n } else if !missing_tags.is_empty() {\n\n Err(format!(\"Expected missing tags: {:?}\", missing_tags))\n\n } else if !duplicate_tags.is_empty() {\n\n Err(format!(\"Duplicate tags: {:?}\", duplicate_tags))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/types/check.rs", "rank": 85, "score": 31919.914763551827 }, { "content": "fn transform_term(term: q::Term, program: &q::Module) -> m::Expr {\n\n use q::TermNode::*;\n\n\n\n match term.as_ref() {\n\n As(t, typ) => transform_term(t.clone(), program),\n\n StrLit(_s) => todo!(),\n\n Hole(_s) => todo!(),\n\n Let(x, s, t) => transform_term_let(x, s.clone(), t.clone(), program),\n\n Var(var) => transform_term_var(var, program),\n\n Lam(x, t) => transform_term_lam(&x, t.clone(), program),\n\n App(t, vs) => transform_term_app(t.clone(), vs, program),\n\n Match(t, match_arms) => transform_term_match(t.clone(), &match_arms, program),\n\n }\n\n}\n\n\n", "file_path": "src/stg/transform.rs", "rank": 86, "score": 31733.524462800753 }, { "content": "fn transform_term_var(var: &q::Variable, program: &q::Module) -> m::Expr {\n\n dbg!(&var.name);\n\n assert_eq!(var.layer, 0); // TODO deal with this\n\n if is_ctor(&var.name) {\n\n m::ExprNode::App(m::AppType::Ctor, var.name.clone(), vec![]).into()\n\n } else {\n\n m::ExprNode::App(m::AppType::Fun, var.name.clone(), vec![]).into()\n\n }\n\n}\n\n\n", "file_path": "src/stg/transform.rs", "rank": 87, "score": 30965.91684846995 }, { "content": "fn gather_free_vars(t: q::Term, module: &q::Module) -> Vec<m::Var> {\n\n let free_vars = t.free_vars().iter().map(|v| v.name.clone()).collect::<HashSet<String>>();\n\n let top_level_vars = top_level_vars(module).iter().map(|s| s.to_owned()).collect::<HashSet<String>>();\n\n let ctors: HashSet<String> = [\n\n \"zero\".to_owned(),\n\n \"succ\".to_owned(),\n\n \"nil\".to_owned(),\n\n \"cons\".to_owned(),\n\n \"true\".to_owned(),\n\n \"false\".to_owned(),\n\n ].iter().cloned().collect::<HashSet<_>>();\n\n\n\n let free_vars: Vec<m::Var> = free_vars.difference(&top_level_vars).cloned().collect::<HashSet<_>>().difference(&ctors).cloned().collect::<Vec<_>>();\n\n assert!(!free_vars.contains(&\"succ\".to_owned()));\n\n free_vars\n\n}\n", "file_path": "src/stg/transform.rs", "rank": 88, "score": 30646.983367973757 }, { "content": "fn transform_term_app(t: q::Term, vs: &[q::Term], program: &q::Module) -> m::Expr {\n\n let mut i = 0;\n\n let mut temps = vec![];\n\n\n\n let f = get_var_name(t.clone()).unwrap_or_else(|| {\n\n let name = format!(\"gensym_{}\", i);\n\n temps.push((name.clone(), t.clone()));\n\n i += 1;\n\n name\n\n });\n\n\n\n let vs_expr = vs.iter().map(|v| {\n\n m::Atom::Var(get_var_name(v.clone()).unwrap_or_else(|| {\n\n let name = format!(\"gensym_{}\", i);\n\n temps.push((name.clone(), v.clone()));\n\n i += 1;\n\n name\n\n }))\n\n }).collect();\n\n\n", "file_path": "src/stg/transform.rs", "rank": 89, "score": 29358.830818753704 }, { "content": "fn pprint_list<V: std::fmt::Display>(f: &mut std::fmt::Formatter, vs: &[V]) -> Result<(), std::fmt::Error> {\n\n write!(f, \"{{\")?;\n\n for (i, v) in vs.iter().enumerate() {\n\n if i > 0 {\n\n write!(f, \",\")?;\n\n }\n\n\n\n write!(f, \" {}\", v)?\n\n }\n\n if vs.len() > 0 {\n\n\n\n write!(f, \" \")?\n\n }\n\n write!(f, \"}}\")\n\n}\n\n\n\n\n\nimpl Alts {\n\n pub fn find_alt_for_ctor(&self, ctor_tag: &Ctor) -> Option<&Alt> {\n\n for alt in self.0.iter() {\n", "file_path": "src/stg/ast.rs", "rank": 90, "score": 24444.85090637598 }, { "content": "\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct HoleInfo {\n\n pub hole_id: HoleId,\n\n pub name: Option<String>,\n\n pub contents: Option<String>,\n\n pub loc: Loc,\n\n}\n\n\n\npub type HoleId = usize;\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct MatchArm(pub Pattern, pub Term);\n\n\n\npub type Pattern = Vec<String>;\n\n\n\npub type Tag = String;\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct Type(rc::Rc<TypeNode>);\n", "file_path": "src/ast/ast.rs", "rank": 91, "score": 27.734888159050094 }, { "content": "#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct Term(Box<TermNode>);\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, Hash)]\n\npub struct Variable {\n\n pub name: String,\n\n pub layer: usize,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub enum TermNode {\n\n Var(Variable),\n\n Lam(String, Term),\n\n App(Term, Vec<Term>),\n\n Let(String, Term, Term),\n\n Match(Term, Vec<MatchArm>),\n\n Hole(HoleInfo),\n\n As(Term, Type),\n\n StrLit(String),\n\n}\n", "file_path": "src/ast/ast.rs", "rank": 92, "score": 27.459208848764064 }, { "content": "use std::collections::HashSet;\n\nuse std::rc;\n\n\n\nuse crate::parser;\n\nuse crate::tokenizer::Loc;\n\n\n\nuse std::convert::TryFrom;\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Module {\n\n pub definitions: Vec<Def>,\n\n pub imports: Vec<Import>,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Def(pub String, pub Type, pub Term);\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Import(pub String);\n\n\n", "file_path": "src/ast/ast.rs", "rank": 93, "score": 26.89654030824821 }, { "content": "impl Parser {\n\n pub fn new(tokens: Vec<Token>) -> Self {\n\n Parser {\n\n tokens,\n\n cur: 0,\n\n next_hole_id: 0,\n\n hole_count: 0,\n\n }\n\n }\n\n\n\n fn peek(&mut self) -> Option<Token> {\n\n self.peek_ahead(0)\n\n }\n\n\n\n fn peek_ahead(&mut self, k: usize) -> Option<Token> {\n\n match self.tokens.get(self.cur + k) {\n\n Some(t) => Some(t.clone()),\n\n None => None,\n\n }\n\n }\n", "file_path": "src/parser.rs", "rank": 94, "score": 25.455732554297803 }, { "content": " if definition_name == name {\n\n return Some(d.clone())\n\n }\n\n }\n\n None\n\n }\n\n}\n\n\n\nimpl TryFrom<&str> for Variable {\n\n type Error = parser::ParseErr;\n\n\n\n fn try_from(vn: &str) -> Result<Self, Self::Error> {\n\n parser::parse_variable(None, vn)\n\n }\n\n}\n\n\n\n\n\nimpl HoleInfo {\n\n pub fn new(hole_id: HoleId, name: Option<String>, contents: Option<String>, loc: Loc) -> Self {\n\n HoleInfo {\n", "file_path": "src/ast/ast.rs", "rank": 95, "score": 24.036286336381636 }, { "content": "use crate::tokenizer::Token;\n\nuse crate::tokenizer::Tokenizer;\n\nuse crate::ast;\n\n\n\nuse ast::HoleId;\n\nuse ast::HoleInfo;\n\nuse ast::Term;\n\nuse ast::TermNode;\n\nuse ast::Module;\n\nuse ast::MatchArm;\n\nuse ast::Def;\n\nuse ast::Import;\n\nuse ast::Pattern;\n\nuse ast::Variable;\n\nuse ast::Type;\n\nuse ast::TypeNode;\n\n\n\npub type ParseErr = String;\n\n\n", "file_path": "src/parser.rs", "rank": 96, "score": 23.724543134719195 }, { "content": "pub enum LetType {\n\n Recursive,\n\n NonRecursive,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum Alt {\n\n Ctor(Ctor, Vec<Var>, Expr),\n\n Lit(usize, Expr),\n\n Default(Var, Expr),\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Alts(pub Vec<Alt>);\n\n\n\nimpl From<&str> for Atom {\n\n fn from(name: &str) -> Self {\n\n Atom::Var(name.to_owned())\n\n }\n\n}\n", "file_path": "src/stg/ast.rs", "rank": 97, "score": 21.63499200047736 }, { "content": "\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub enum TypeNode {\n\n Atom(String),\n\n Arrow(Type, Type),\n\n Forall(String, Type),\n\n}\n\n\n\nimpl std::ops::Deref for Term {\n\n type Target = TermNode;\n\n\n\n fn deref(&self) -> &TermNode {\n\n use std::borrow::Borrow;\n\n let Term(rc_tn) = self;\n\n rc_tn.borrow()\n\n }\n\n}\n\n\n\nimpl AsRef<TermNode> for Term {\n\n fn as_ref(&self) -> &TermNode {\n", "file_path": "src/ast/ast.rs", "rank": 98, "score": 21.465725961196846 }, { "content": " use std::borrow::Borrow;\n\n let Term(rc_tn) = self;\n\n rc_tn.borrow()\n\n }\n\n}\n\n\n\nimpl From<TermNode> for Term {\n\n fn from(tn: TermNode) -> Self {\n\n Term(Box::new(tn))\n\n }\n\n}\n\n\n\nimpl Module {\n\n pub fn new(definitions: Vec<Def>, imports: Vec<Import>) -> Self {\n\n Module { definitions, imports }\n\n }\n\n\n\n pub fn definition(&self, name: &str) -> Option<Def> {\n\n for d in &self.definitions {\n\n let Def(definition_name, _typ, _body) = d;\n", "file_path": "src/ast/ast.rs", "rank": 99, "score": 20.30375037356466 } ]
Rust
src/structs.rs
th0rex/pe_load
edf8caf2a2ae97f48c711398894d5e92a4b1a09b
use std::marker::PhantomData; use std::mem::size_of; use std::os::raw::{c_char, c_void}; use std::slice; use super::rva::{Pointer, RVA}; #[repr(u16)] pub enum Machine { X64 = 0x8864, I386 = 0x14c, } #[repr(u16)] pub enum OptionalHeaderSignature { X64 = 523, X86 = 267, ROM = 263, } #[repr(u32)] #[derive(Clone, Copy)] pub enum Characteristics { NoPad = 0x8, CntCode = 0x20, CntInitializedData = 0x40, CntUninitializedData = 0x80, Gprel = 0x8000, NumRelocationsOverflow = 0x1000000, MemExecute = 0x20000000, MemRead = 0x40000000, MemWrite = 0x80000000, } #[repr(u16)] pub enum FileCharacteristics { RelocsStripped = 0x1, ExecutableImage = 0x2, LineNumsStripped = 0x4, LocalSymsStripped = 0x8, LargeAddressAware = 0x20, X86Machine = 0x100, DebugStripped = 0x200, RemovableRunFromSwap = 0x400, NetRunFromSwap = 0x800, System = 0x1000, Dll = 0x2000, SingleProcessorOnly = 0x4000, } #[repr(u16)] pub enum Subsystem { Unknown = 0, Native = 1, WindowsGUI = 2, WindowsCUI = 3, OS2Cui = 5, PosixCui = 7, WindowsCEGui = 9, EFIApplication = 10, EFIBootServiceDriver = 11, EFIRuntimeDriver = 12, EFIRom = 13, XBox = 14, WindowsBootApplication = 16, } #[repr(u16)] pub enum DirectoryEntry { Export = 0, Import = 1, Resource = 2, Exception = 3, Security = 4, Basereloc = 5, Debug = 6, Architecture = 7, Globalptr = 8, Tls = 9, LoadConfig = 10, BoundImport = 11, Iat = 12, DelayImport = 13, ComDescriptor = 14, } #[derive(Debug, PartialEq)] pub struct RelocationType(u16); pub const RelocateAbsolute: RelocationType = RelocationType(0); pub const RelocateHighLow: RelocationType = RelocationType(3); pub const RelocateDir64: RelocationType = RelocationType(10); #[repr(u16)] pub enum DllCharacteristics { DynamicBase = 0x40, ForceIntegrity = 0x80, NXCompat = 0x100, NoIsolation = 0x200, NoSEH = 0x400, NoBind = 0x800, WDMDriver = 0x2000, TerminalServerAware = 0x8000, } pub union MiscUnion { physical_address: u32, virtual_size: u32, } #[repr(C)] pub struct ImageSectionHeader { pub name: [c_char; 8], pub misc: MiscUnion, pub(crate) virtual_address: RVA<u32, Pointer<*mut u8>>, pub size_of_raw_data: u32, pub p_raw_data: u32, pub p_reloc: u32, pub p_line_nums: u32, pub num_relocations: u16, pub num_line_nums: u16, pub characteristics: Characteristics, } #[repr(C)] pub struct ImageBaseRelocation { pub(crate) virtual_address: RVA<u32, Pointer<*mut u64>>, pub size_of_block: u32, } impl ImageBaseRelocation { pub fn base_relocations<'a>(&'a self) -> BaseRelocationIterator<'a> { BaseRelocationIterator::new(self) } pub fn relocations(&self) -> RelocationIterator { RelocationIterator::new(self) } pub fn next_relocation(&self) -> Option<&ImageBaseRelocation> { let relocations_start = unsafe { (self as *const _).offset(1) as *const u16 }; let count = (self.size_of_block as usize - size_of::<ImageBaseRelocation>()) / 2; let next_base_relocation = unsafe { &*(relocations_start.offset(count as _) as *const ImageBaseRelocation) }; if next_base_relocation.virtual_address.value == 0 && next_base_relocation.size_of_block == 0 { None } else { Some(next_base_relocation) } } } pub type RelocationOffset = u16; pub struct BaseRelocationIterator<'a> { current: Option<&'a ImageBaseRelocation>, } impl<'a> BaseRelocationIterator<'a> { fn new(current: &'a ImageBaseRelocation) -> Self { Self { current: Some(current), } } } impl<'a> Iterator for BaseRelocationIterator<'a> { type Item = &'a ImageBaseRelocation; fn next(&mut self) -> Option<Self::Item> { if let Some(c) = self.current { self.current = c.next_relocation(); Some(c) } else { None } } } pub struct RelocationIterator { relocation: *const u16, current: usize, count: usize, } impl RelocationIterator { fn new(base_relocation: &ImageBaseRelocation) -> Self { Self { relocation: unsafe { (base_relocation as *const _).offset(1) as *const _ }, current: 0, count: (base_relocation.size_of_block as usize - size_of::<ImageBaseRelocation>()) / 2, } } } impl Iterator for RelocationIterator { type Item = (RelocationType, RelocationOffset); fn next(&mut self) -> Option<Self::Item> { if self.current >= self.count { None } else { let value = unsafe { &*self.relocation.offset(self.current as _) }; self.current += 1; Some(( RelocationType((value >> 12) & 0b00001111u16), value & 0x0FFFu16, )) } } } pub(crate) struct ThunkIterator<'a> { current: Pointer<*mut ThunkData>, _p: PhantomData<&'a u32>, } impl<'a> ThunkIterator<'a> { fn new(c: &'a ImportDescriptor, base: u64) -> Self { Self { current: c.first_thunk.resolve(base), _p: PhantomData, } } } impl<'a> Iterator for ThunkIterator<'a> { type Item = &'a mut ThunkData; fn next(&mut self) -> Option<Self::Item> { if unsafe { self.current.address_of_data.value } != 0 { let current = self.current.p; self.current = Pointer { p: unsafe { current.offset(1) }, }; Some(unsafe { &mut *current }) } else { None } } } #[repr(C)] pub struct ImportDescriptor { pub imports_by_name: u32, pub time_stamp: u32, pub forwarder_chain: u32, pub(crate) name: RVA<u32, Pointer<*const c_char>>, pub(crate) first_thunk: RVA<u32, Pointer<*mut ThunkData>>, } impl ImportDescriptor { pub(crate) fn thunk_iterator<'a>(&'a self, base: u64) -> ThunkIterator<'a> { ThunkIterator::new(self, base) } pub(crate) fn import_iterator<'a>(&'a self) -> ImportIterator<'a> { ImportIterator::new(self) } } pub(crate) struct ImportIterator<'a> { p: *const ImportDescriptor, _p: PhantomData<&'a ImportDescriptor>, } impl<'a> ImportIterator<'a> { fn new(i: &'a ImportDescriptor) -> Self { Self { p: i as *const _, _p: PhantomData, } } } impl<'a> Iterator for ImportIterator<'a> { type Item = &'a ImportDescriptor; fn next(&mut self) -> Option<Self::Item> { unsafe { if (*self.p).name.value == 0 { None } else { let item = &(*self.p); self.p = self.p.offset(1); Some(item) } } } } pub fn image_snap_by_ordinal(ordinal: u64) -> bool { (ordinal & 0x8000000000000000) != 0 } pub fn image_ordinal(ordinal: u64) -> u64 { ordinal & 0xffff } #[repr(C)] pub union ThunkData { pub forwarder_string: u64, pub function: u64, pub ordinal: u64, pub(crate) address_of_data: RVA<u64, Pointer<*const ImageImportByName>>, } #[repr(C)] pub struct TlsDirectory { pub address_of_raw_data: u64, pub end_address_of_raw_data: u64, pub address_of_index: u64, pub address_of_callbacks: u64, } pub type TlsCallback = Option<extern "stdcall" fn(*mut c_void, u32, *mut c_void)>; #[repr(C)] pub struct ImageImportByName { pub hint: u16, pub name: c_char, } #[repr(C)] pub struct DataEntry<T> { pub(crate) virtual_address: RVA<u32, Pointer<*mut T>>, pub size: u32, } #[repr(C)] pub struct OptionalHeader { pub signature: OptionalHeaderSignature, pub _major_linker_version: c_char, pub _minor_linker_version: c_char, pub size_of_code: u32, pub size_of_initialized_data: u32, pub size_of_uninitialized_data: u32, pub address_of_entry_point: u32, pub base_of_code: u32, pub image_base: u64, pub section_alignment: u32, pub file_alignment: u32, pub major_os_version: u16, pub minor_os_version: u16, pub major_image_version: u16, pub minor_image_version: u16, pub major_subsystem_version: u16, pub minor_subsystem_version: u16, pub win32_version: u32, pub size_of_image: u32, pub size_of_headers: u32, pub checksum: u32, pub subsystem: Subsystem, pub dll_characteristics: DllCharacteristics, pub size_of_stack_reserve: u64, pub size_of_stack_commit: u64, pub size_of_heap_reserve: u64, pub size_of_heap_commit: u64, pub __loader_flags: u32, pub num_of_rva_and_sizes: u32, } impl OptionalHeader { fn data_entries_start(&self) -> *const u8 { unsafe { (self as *const _ as *const u8).offset(size_of::<OptionalHeader>() as _) } } fn data_entry<T>(&self, e: DirectoryEntry) -> &DataEntry<T> { unsafe { let ptr = (self.data_entries_start() as *const DataEntry<T>).offset(e as _); &*ptr } } pub(crate) fn get_import_descriptor( &self, base: u64, ) -> Option<Pointer<*const ImportDescriptor>> { let entry = self.data_entry::<ImportDescriptor>(DirectoryEntry::Import); if entry.virtual_address.value == 0 || entry.size == 0 { None } else { Some(entry.virtual_address.resolve(base).into()) } } pub(crate) fn get_relocation_entries( &self, base: u64, ) -> Option<Pointer<*mut ImageBaseRelocation>> { let entry = self.data_entry::<ImageBaseRelocation>(DirectoryEntry::Basereloc); if entry.virtual_address.value == 0 { None } else { let reloc = entry.virtual_address.resolve(base); if reloc.size_of_block as usize <= size_of::<ImageBaseRelocation>() { None } else { Some(reloc) } } } pub(crate) fn get_tls_entries(&self, base: u64) -> Option<Pointer<*const TlsDirectory>> { let entry = self.data_entry::<TlsDirectory>(DirectoryEntry::Tls); if entry.virtual_address.value == 0 { None } else { Some(entry.virtual_address.resolve(base).into()) } } /*pub fn get_data_entries(&self) -> &[DataEntry] { unsafe { let self_ptr = self as *const _ as *const c_char; slice::from_raw_parts( self_ptr.offset(size_of::<OptionalHeader>() as _) as *const _, self.num_of_rva_and_sizes as _, ) } }*/ } #[repr(C)] pub struct FileHeader { pub machine: Machine, pub num_sections: u16, pub time_date: u32, pub p_symbol_table: u32, pub num_symbols: u32, pub size_optional_header: u16, pub characteristics: u16, } impl FileHeader { pub fn get_sections(&self) -> &[ImageSectionHeader] { unsafe { let self_ptr = self as *const _ as *const c_char; slice::from_raw_parts( self_ptr.offset((size_of::<FileHeader>() + self.size_optional_header as usize) as _) as *const _, self.num_sections as _, ) } } } #[repr(C)] pub struct PeHeader { pub signature: [c_char; 4], pub file_header: FileHeader, pub optional_header: OptionalHeader, } #[repr(C)] pub struct DosHeader { pub signature: [c_char; 2], pub not_needed: [c_char; 58], pub offset_to_pe_header: u32, } impl DosHeader { pub fn get_pe_header(&self) -> &PeHeader { unsafe { let self_ptr = self as *const _ as *const c_char; &*(self_ptr.offset(self.offset_to_pe_header as _) as *const _) } } }
use std::marker::PhantomData; use std::mem::size_of; use std::os::raw::{c_char, c_void}; use std::slice; use super::rva::{Pointer, RVA}; #[repr(u16)] pub enum Machine { X64 = 0x8864, I386 = 0x14c, } #[repr(u16)] pub enum OptionalHeaderSignature { X64 = 523, X86 = 267, ROM = 263, } #[repr(u32)] #[derive(Clone, Copy)] pub enum Characteristics { NoPad = 0x8, CntCode = 0x20, CntInitializedData = 0x40, CntUninitializedData = 0x80, Gprel = 0x8000, NumRelocationsOverflow = 0x1000000, MemExecute = 0x20000000, MemRead = 0x40000000, MemWrite = 0x80000000, } #[repr(u16)] pub enum FileCharacteristics { RelocsStripped = 0x1, ExecutableImage = 0x2, LineNumsStripped = 0x4, LocalSymsStripped = 0x8, LargeAddressAware = 0x20, X86Machine = 0x100, DebugStripped = 0x200, RemovableRunFromSwap = 0x400, NetRunFromSwap = 0x800, System = 0x1000, Dll = 0x2000, SingleProcessorOnly = 0x4000, } #[repr(u16)] pub enum Subsystem { Unknown = 0, Native = 1, WindowsGUI = 2, WindowsCUI = 3, OS2Cui = 5, PosixCui = 7, WindowsCEGui = 9, EFIApplication = 10, EFIBootServiceDriver = 11, EFIRuntimeDriver = 12, EFIRom = 13, XBox = 14, WindowsBootApplication = 16, } #[repr(u16)] pub enum DirectoryEntry { Export = 0, Import = 1, Resource = 2, Exception = 3, Security = 4, Basereloc = 5, Debug = 6, Architecture = 7, Globalptr = 8, Tls = 9, LoadConfig = 10, BoundImport = 11, Iat = 12, DelayImport = 13, ComDescriptor = 14, } #[derive(Debug, PartialEq)] pub struct RelocationType(u16); pub const RelocateAbsolute: RelocationType = RelocationType(0); pub const RelocateHighLow: RelocationType = RelocationType(3); pub const RelocateDir64: RelocationType = RelocationType(10); #[repr(u16)] pub enum DllCharacteristics { DynamicBase = 0x40, ForceIntegrity = 0x80, NXCompat = 0x100, NoIsolation = 0x200, NoSEH = 0x400, NoBind = 0x800, WDMDriver = 0x2000, TerminalServerAware = 0x8000, } pub union MiscUnion { physical_address: u32, virtual_size: u32, } #[repr(C)] pub struct ImageSectionHeader { pub name: [c_char; 8], pub misc: MiscUnion, pub(crate) virtual_address: RVA<u32, Pointer<*mut u8>>, pub size_of_raw_data: u32, pub p_raw_data: u32, pub p_reloc: u32, pub p_line_nums: u32, pub num_relocations: u16, pub num_line_nums: u16, pub characteristics: Characteristics, } #[repr(C)] pub struct ImageBaseRelocation { pub(crate) virtual_address: RVA<u32, Pointer<*mut u64>>, pub size_of_block: u32, } impl ImageBaseRelocation { pub fn base_relocations<'a>(&'a self) -> BaseRelocationIterator<'a> { BaseRelocationIterator::new(self) } pub fn relocations(&self) -> RelocationIterator { RelocationIterator::new(self) } pub fn next_relocation(&self) -> Option<&ImageBaseRelocation> { let relocations_start = unsafe { (self as *const _).offset(1) as *const u16 }; let count = (self.size_of_block as usize - size_of::<ImageBaseRelocation>()) / 2; let next_base_relocation = unsafe { &*(relocations_start.offset(count as _) as *const ImageBaseRelocation) }; if next_base_relocation.virtual_address.value == 0 && next_base_relocation.size_of_block == 0 { None } else { Some(next_base_relocation) } } } pub type RelocationOffset = u16; pub struct BaseRelocationIterator<'a> { current: Option<&'a ImageBaseRelocation>, } impl<'a> BaseRelocationIterator<'a> { fn new(current: &'a ImageBaseRelocation) -> Self { Self { current: Some(current), } } } impl<'a> Iterator for BaseRelocationIterator<'a> { type Item = &'a ImageBaseRelocation; fn next(&mut self) -> Option<Self::Item> { if let Some(c) = self.current { self.current = c.next_relocation(); Some(c) } else { None } } } pub struct RelocationIterator { relocation: *const u16, current: usize, count: usize, } impl RelocationIterator { fn new(base_relocation: &ImageBaseRelocation) -> Self { Self { relocation: unsafe { (base_relocation as *const _).offset(1) as *const _ }, current: 0, count: (base_relocation.size_of_block as usize - size_of::<ImageBaseRelocation>()) / 2, } } } impl Iterator for RelocationIterator { type Item = (RelocationType, RelocationOffset); fn next(&mut self) -> Option<Self::Item> { if self.current >= self.count { None } else { let value = unsafe { &*self.relocation.offset(self.current as _) }; self.current += 1; Some(( RelocationType((value >> 12) & 0b00001111u16), value & 0x0FFFu16, )) } } } pub(crate) struct ThunkIterator<'a> { current: Pointer<*mut ThunkData>, _p: PhantomData<&'a u32>, } impl<'a> ThunkIterator<'a> { fn new(c: &'a ImportDescriptor, base: u64) -> Self { Self { current: c.first_thunk.resolve(base), _p: PhantomData, } } } impl<'a> Iterator for ThunkIterator<'a> { type Item = &'a mut ThunkData; fn next(&mut self) -> Option<Self::Item> { if unsafe { self.current.address_of_data.value } != 0 { let current = self.current.p; self.current = Pointer { p: unsafe { current.offset(1) }, }; Some(unsafe { &mut *current }) } else { None } } } #[repr(C)] pub struct ImportDescriptor { pub imports_by_name: u32, pub time_stamp: u32, pub forwarder_chain: u32, pub(crate) name: RVA<u32, Pointer<*const c_char>>, pub(crate) first_thunk: RVA<u32, Pointer<*mut ThunkData>>, } impl ImportDescriptor { pub(crate) fn thunk_iterator<'a>(&'a self, base: u64) -> ThunkIterator<'a> { ThunkIterator::new(self, base) } pub(crate) fn import_iterator<'a>(&'a self) -> ImportIterator<'a> { ImportIterator::new(self) } } pub(crate) struct ImportIterator<'a> { p: *const ImportDescriptor, _p: PhantomData<&'a ImportDescriptor>, } impl<'a> ImportIterator<'a> { fn new(i: &'a ImportDescriptor) -> Self { Self { p: i as *const _, _p: PhantomData, } } } impl<'a> Iterator for ImportIterator<'a> { type Item = &'a ImportDescriptor; fn next(&mut self) -> Option<Self::Item> { unsafe { if (*self.p).name.value == 0 { None } else { let item = &(*self.p); self.p = self.p.offset(1); Some(item) } } } } pub fn image_snap_by_ordinal(ordinal: u64) -> bool { (ordinal & 0x8000000000000000) != 0 } pub fn image_ordinal(ordinal: u64) -> u64 { ordinal & 0xffff } #[repr(C)] pub union ThunkData { pub forwarder_string: u64, pub function: u64, pub ordinal: u64, pub(crate) address_of_data: RVA<u64, Pointer<*const ImageImportByName>>, } #[repr(C)] pub struct TlsDirectory { pub address_of_raw_data: u64, pub end_address_of_raw_data: u64, pub address_of_index: u64, pub address_of_callbacks: u64, } pub type TlsCallback = Option<extern "stdcall" fn(*mut c_void, u32, *mut c_void)>; #[repr(C)] pub struct ImageImportByName { pub hint: u16, pub name: c_char, } #[repr(C)] pub struct DataEntry<T> { pub(crate) virtual_address: RVA<u32, Pointer<*mut T>>, pub size: u32, } #[repr(C)] pub struct OptionalHeader { pub signature: OptionalHeaderSignature, pub _major_linker_version: c_char, pub _minor_linker_version: c_char, pub size_of_code: u32, pub size_of_initialized_data: u32, pub size_of_uninitialized_data: u32, pub address_of_entry_point: u32, pub base_of_code: u32, pub image_base: u64, pub sect
[c_char; 2], pub not_needed: [c_char; 58], pub offset_to_pe_header: u32, } impl DosHeader { pub fn get_pe_header(&self) -> &PeHeader { unsafe { let self_ptr = self as *const _ as *const c_char; &*(self_ptr.offset(self.offset_to_pe_header as _) as *const _) } } }
ion_alignment: u32, pub file_alignment: u32, pub major_os_version: u16, pub minor_os_version: u16, pub major_image_version: u16, pub minor_image_version: u16, pub major_subsystem_version: u16, pub minor_subsystem_version: u16, pub win32_version: u32, pub size_of_image: u32, pub size_of_headers: u32, pub checksum: u32, pub subsystem: Subsystem, pub dll_characteristics: DllCharacteristics, pub size_of_stack_reserve: u64, pub size_of_stack_commit: u64, pub size_of_heap_reserve: u64, pub size_of_heap_commit: u64, pub __loader_flags: u32, pub num_of_rva_and_sizes: u32, } impl OptionalHeader { fn data_entries_start(&self) -> *const u8 { unsafe { (self as *const _ as *const u8).offset(size_of::<OptionalHeader>() as _) } } fn data_entry<T>(&self, e: DirectoryEntry) -> &DataEntry<T> { unsafe { let ptr = (self.data_entries_start() as *const DataEntry<T>).offset(e as _); &*ptr } } pub(crate) fn get_import_descriptor( &self, base: u64, ) -> Option<Pointer<*const ImportDescriptor>> { let entry = self.data_entry::<ImportDescriptor>(DirectoryEntry::Import); if entry.virtual_address.value == 0 || entry.size == 0 { None } else { Some(entry.virtual_address.resolve(base).into()) } } pub(crate) fn get_relocation_entries( &self, base: u64, ) -> Option<Pointer<*mut ImageBaseRelocation>> { let entry = self.data_entry::<ImageBaseRelocation>(DirectoryEntry::Basereloc); if entry.virtual_address.value == 0 { None } else { let reloc = entry.virtual_address.resolve(base); if reloc.size_of_block as usize <= size_of::<ImageBaseRelocation>() { None } else { Some(reloc) } } } pub(crate) fn get_tls_entries(&self, base: u64) -> Option<Pointer<*const TlsDirectory>> { let entry = self.data_entry::<TlsDirectory>(DirectoryEntry::Tls); if entry.virtual_address.value == 0 { None } else { Some(entry.virtual_address.resolve(base).into()) } } /*pub fn get_data_entries(&self) -> &[DataEntry] { unsafe { let self_ptr = self as *const _ as *const c_char; slice::from_raw_parts( self_ptr.offset(size_of::<OptionalHeader>() as _) as *const _, self.num_of_rva_and_sizes as _, ) } }*/ } #[repr(C)] pub struct FileHeader { pub machine: Machine, pub num_sections: u16, pub time_date: u32, pub p_symbol_table: u32, pub num_symbols: u32, pub size_optional_header: u16, pub characteristics: u16, } impl FileHeader { pub fn get_sections(&self) -> &[ImageSectionHeader] { unsafe { let self_ptr = self as *const _ as *const c_char; slice::from_raw_parts( self_ptr.offset((size_of::<FileHeader>() + self.size_optional_header as usize) as _) as *const _, self.num_sections as _, ) } } } #[repr(C)] pub struct PeHeader { pub signature: [c_char; 4], pub file_header: FileHeader, pub optional_header: OptionalHeader, } #[repr(C)] pub struct DosHeader { pub signature:
random
[ { "content": "pub fn wrapped_dll_main(ep: extern \"C\" fn()) -> impl FnOnce() -> () {\n\n let x: extern \"stdcall\" fn(HINSTANCE, u32, *mut c_void) = unsafe { mem::transmute(ep) };\n\n let y = move || {\n\n // TODO: Use the mapped address as the HMODULE parameter (i.e. loader.image_base)\n\n x(ptr::null_mut(), DLL_PROCESS_ATTACH, ptr::null_mut());\n\n };\n\n y\n\n}\n\n\n\n/*\n\nTODO:\n\n\n", "file_path": "src/lib.rs", "rank": 2, "score": 78286.0845117622 }, { "content": "fn resolve_raw(base: u64, offset: isize) -> u64 {\n\n base + offset as u64\n\n}\n\n\n\nimpl<T: AsRef<[u8]>> Loader<T> {\n\n pub fn new(pe_buffer: T) -> Self {\n\n Self {\n\n pe_buffer,\n\n image_base: 0,\n\n }\n\n }\n\n\n\n pub fn load(mut self) -> Result<LoadedPEFile, LoadError> {\n\n let mapped_module = self.map_module()?;\n\n self.image_base = mapped_module.raw as _;\n\n\n\n self.relocate()?;\n\n self.resolve_imports()?;\n\n self.mem_protect()?;\n\n self.tls_callbacks()?;\n", "file_path": "src/lib.rs", "rank": 3, "score": 72729.92785621632 }, { "content": "struct LoaderConfig<DllLoadFunc: Fn(...)..., TLSLoadFunc: Fn(...)...> {\n\n dll: DllLoadFunc,\n\n tls: TLSLoadFunc,\n\n preferred_base: PrefferedBase\n\n}\n\n\n\nimpl<..., ...> LoaderConfig<..., ...> {\n\n fn default() -> LoaderConfig<..., ...>;\n\n\n\n fn set_dll_func<NewFunc: Fn(...)...>(self) -> LoaderConfig<NewFunc, old_type_params...>\n\n}\n\n\n\n*/\n\n\n\n// TODO: Use RVA's\n\n\n\npub struct Loader<T: AsRef<[u8]>> {\n\n pe_buffer: T,\n\n image_base: u64,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 4, "score": 56583.18054772056 }, { "content": "fn resolve<T>(base: &WindowsBox<T>, offset: isize) -> *mut T {\n\n unsafe { base.raw.offset(offset) }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 5, "score": 53512.344002083526 }, { "content": "enum PreferredBase {\n\n Exact(u64), // error if cant get\n\n Default, // try default image base, if dont succeed use any\n\n DefaultExact, // fail if can't get default\n\n Any, // no preference\n\n TryExact(u64), // try to get specified address, but don't error if cant get it and just use any\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 6, "score": 50084.14744537385 }, { "content": "fn main() {\n\n let mut file = File::open(PATH).expect(\"couldn't open file\");\n\n let mut contents = vec![];\n\n file.read_to_end(&mut contents).expect(\"couldn't read file\");\n\n let loader = pe_load::Loader::new(contents);\n\n let loaded = loader.load().expect(\"couldn't load .exe\");\n\n let entry = loaded.entry_point.expect(\"entry point is empty\");\n\n pe_load::wrapped_dll_main(entry)();\n\n println!(\"done\");\n\n}\n", "file_path": "examples/dll.rs", "rank": 7, "score": 46657.37711465666 }, { "content": "fn main() {\n\n let mut file = File::open(PATH).expect(\"couldn't open file\");\n\n let mut contents = vec![];\n\n file.read_to_end(&mut contents).expect(\"couldn't read file\");\n\n let loader = pe_load::Loader::new(contents);\n\n let loaded = loader.load().expect(\"couldn't load .exe\");\n\n let entry = loaded.entry_point.expect(\"entry point is empty\");\n\n entry();\n\n println!(\"done\");\n\n}\n", "file_path": "examples/simple.rs", "rank": 8, "score": 27889.203446735708 }, { "content": "struct WindowsBox<T> {\n\n raw: *mut T,\n\n}\n\n\n\nimpl<T> WindowsBox<T> {\n\n fn new(raw: *mut T) -> Self {\n\n Self { raw }\n\n }\n\n\n\n fn alloc(size: u64, flags: u32, protection: u32) -> Result<Self, LoadError> {\n\n let result = unsafe { VirtualAlloc(ptr::null_mut(), size, flags, protection) };\n\n\n\n if result == ptr::null_mut() {\n\n Err(LoadError::NoMemory)\n\n } else {\n\n Ok(Self::new(result as *mut _))\n\n }\n\n }\n\n\n\n fn get(&self) -> &T {\n", "file_path": "src/lib.rs", "rank": 9, "score": 26380.376906674424 }, { "content": "\n\nuse std::marker::PhantomData;\n\nuse std::ops::{Deref, DerefMut};\n\n\n\nuse super::Loader;\n\n\n\n#[repr(C)]\n\n#[derive(Clone, Copy)]\n\npub(crate) struct RVA<StorageType: Copy + Into<u64>, ResolvedType: From<u64>> {\n\n pub(crate) value: StorageType,\n\n _p: PhantomData<ResolvedType>,\n\n}\n\n\n\nimpl<StorageType: Copy + Into<u64>, ResolvedType: From<u64>> RVA<StorageType, ResolvedType> {\n\n pub(crate) fn resolve(&self, base: u64) -> ResolvedType {\n\n (base + (self.value.into())).into()\n\n }\n\n}\n\n\n\n// Sadly *{const|mut} T: From<u64> is not implemented.\n", "file_path": "src/rva.rs", "rank": 10, "score": 20410.216260973586 }, { "content": "impl<T> From<u64> for Pointer<*mut T> {\n\n fn from(address: u64) -> Self {\n\n Self { p: address as _ }\n\n }\n\n}\n\n\n\nimpl<T> Deref for Pointer<*const T> {\n\n type Target = T;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n unsafe { &*self.p }\n\n }\n\n}\n\n\n\nimpl<T> Deref for Pointer<*mut T> {\n\n type Target = T;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n unsafe { &*self.p }\n\n }\n\n}\n\n\n\nimpl<T> DerefMut for Pointer<*mut T> {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n unsafe { &mut *self.p }\n\n }\n\n}\n", "file_path": "src/rva.rs", "rank": 11, "score": 20405.283780195834 }, { "content": "// This kind of fixes it\n\n\n\n// The underlying pointer *must* be valid, because it will be dereferenced.\n\n#[derive(Clone, Copy)]\n\npub(crate) struct Pointer<T> {\n\n pub(crate) p: T,\n\n}\n\n\n\nimpl<T> Into<Pointer<*const T>> for Pointer<*mut T> {\n\n fn into(self) -> Pointer<*const T> {\n\n Pointer { p: self.p as _ }\n\n }\n\n}\n\n\n\nimpl<T> From<u64> for Pointer<*const T> {\n\n fn from(address: u64) -> Self {\n\n Self { p: address as _ }\n\n }\n\n}\n\n\n", "file_path": "src/rva.rs", "rank": 12, "score": 20404.988875995034 }, { "content": "extern crate pe_load;\n\n\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\n\n\nconst PATH: &'static str = \"F:\\\\Programming\\\\C++\\\\call\\\\build\\\\Debug\\\\call.dll\";\n\n\n", "file_path": "examples/dll.rs", "rank": 13, "score": 20289.25502485937 }, { "content": " for import_descriptor in import_descriptor.import_iterator() {\n\n let dll_name = import_descriptor.name.resolve(self.image_base);\n\n let hmod = unsafe { LoadLibraryA(dll_name.p) };\n\n\n\n if hmod.is_null() {\n\n return Err(LoadError::LoadLibraryFailed);\n\n }\n\n\n\n for thunk in import_descriptor.thunk_iterator(self.image_base) {\n\n let function = if image_snap_by_ordinal(unsafe { thunk.ordinal }) {\n\n unsafe {\n\n GetProcAddress(hmod, image_ordinal(thunk.ordinal) as *const c_char)\n\n }\n\n } else {\n\n let name = unsafe { thunk.address_of_data }.resolve(self.image_base);\n\n unsafe { GetProcAddress(hmod, &name.name as *const c_char) }\n\n };\n\n\n\n thunk.function = function as u64;\n\n }\n", "file_path": "src/lib.rs", "rank": 37, "score": 15.479567236918344 }, { "content": " } else if relocation_type == RelocateHighLow {\n\n *address += delta & u32::max_value() as u64;\n\n } else {\n\n return Err(LoadError::UnsupporrtedRelocationType(relocation_type));\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n }\n\n }\n\n\n\n fn resolve_imports(&mut self) -> Result<(), LoadError> {\n\n let dos_header = self.get_dos_header();\n\n let optional_header = &dos_header.get_pe_header().optional_header;\n\n\n\n match optional_header.get_import_descriptor(self.image_base) {\n\n None => Ok(()),\n\n Some(import_descriptor) => {\n", "file_path": "src/lib.rs", "rank": 38, "score": 14.199760825707395 }, { "content": " unsafe { &*self.raw }\n\n }\n\n\n\n fn get_mut(&mut self) -> &mut T {\n\n unsafe { &mut *self.raw }\n\n }\n\n}\n\n\n\nimpl<T> Drop for WindowsBox<T> {\n\n fn drop(&mut self) {\n\n unsafe { VirtualFree(self.raw as *mut _, 0, MEM_RELEASE) };\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum LoadError {\n\n LoadLibraryFailed,\n\n NoMemory,\n\n UnsupporrtedRelocationType(RelocationType),\n\n VirtualProtectFailed,\n\n}\n\n\n\npub struct LoadedPEFile {\n\n pub entry_point: Option<extern \"C\" fn()>,\n\n memory: WindowsBox<u8>,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 39, "score": 13.383915606651549 }, { "content": "#![feature(conservative_impl_trait)]\n\n\n\nextern crate kernel32;\n\nextern crate winapi;\n\n\n\npub(crate) mod rva;\n\nmod structs;\n\n\n\nuse std::borrow::Borrow;\n\nuse std::mem;\n\nuse std::os::raw::{c_char, c_void};\n\nuse std::ptr;\n\n\n\nuse kernel32::{GetNativeSystemInfo, GetProcAddress, LoadLibraryA, VirtualAlloc, VirtualFree,\n\n VirtualProtect};\n\nuse winapi::{HINSTANCE, MEM_COMMIT, MEM_RELEASE, MEM_RESERVE, PAGE_EXECUTE, PAGE_EXECUTE_READ,\n\n PAGE_EXECUTE_READWRITE, PAGE_EXECUTE_WRITECOPY, PAGE_NOACCESS, PAGE_READONLY,\n\n PAGE_READWRITE, PAGE_WRITECOPY};\n\n\n\nuse structs::*;\n\n\n\nconst DLL_PROCESS_ATTACH: u32 = 1;\n\n\n\nunsafe fn get_native_page_size() -> u32 {\n\n let mut sys_info = mem::zeroed();\n\n GetNativeSystemInfo(&mut sys_info);\n\n sys_info.dwPageSize\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 40, "score": 13.225146000687113 }, { "content": "\n\n fn map_module(&mut self) -> Result<WindowsBox<u8>, LoadError> {\n\n let dos_header = self.get_dos_header();\n\n let file_header = &dos_header.get_pe_header().file_header;\n\n\n\n let size = file_header\n\n .get_sections()\n\n .iter()\n\n .filter(|&s| s.virtual_address.value != 0)\n\n .map(|x| x.virtual_address.value + x.size_of_raw_data)\n\n .max()\n\n .unwrap();\n\n let page_size = unsafe { get_native_page_size() } - 1;\n\n let size = (size + page_size) & !page_size;\n\n\n\n let base = WindowsBox::alloc(size as _, MEM_RESERVE | MEM_COMMIT, PAGE_READWRITE)?;\n\n\n\n // TODO: We probably (99.9999%) don't need this, even when loading dlls with LoadLibraryA\n\n unsafe {\n\n ptr::copy(\n", "file_path": "src/lib.rs", "rank": 41, "score": 11.518823195617834 }, { "content": " }\n\n\n\n Ok(())\n\n }\n\n }\n\n }\n\n\n\n fn mem_protect(&mut self) -> Result<(), LoadError> {\n\n let dos_header = self.get_dos_header();\n\n let file_header = &dos_header.get_pe_header().file_header;\n\n let mut old_protect = 0u32;\n\n\n\n for section in file_header\n\n .get_sections()\n\n .iter()\n\n .filter(|&s| s.virtual_address.value != 0 && s.size_of_raw_data != 0)\n\n {\n\n let section_base = resolve_raw(self.image_base, section.virtual_address.value as _);\n\n let characteristics = section.characteristics as u32;\n\n\n", "file_path": "src/lib.rs", "rank": 42, "score": 11.371991687048952 }, { "content": " if self.image_base == optional_header.image_base {\n\n return Ok(());\n\n }\n\n\n\n match optional_header.get_relocation_entries(self.image_base) {\n\n None => Ok(()),\n\n Some(base_relocation) => {\n\n let delta = self.image_base - optional_header.image_base;\n\n\n\n for base_reloc in base_relocation.base_relocations() {\n\n for (relocation_type, offset) in base_reloc.relocations() {\n\n let mut address = base_reloc\n\n .virtual_address\n\n .resolve(self.image_base + offset as u64);\n\n\n\n if relocation_type == RelocateAbsolute {\n\n // TODO what to even do lul ?\n\n //return Err(LoadError::UnsupporrtedRelocationType(relocation_type));\n\n } else if relocation_type == RelocateDir64 {\n\n *address += delta;\n", "file_path": "src/lib.rs", "rank": 43, "score": 11.046868087252678 }, { "content": " };\n\n } else {\n\n let source = unsafe {\n\n self.pe_buffer\n\n .as_ref()\n\n .as_ptr()\n\n .offset(section.p_raw_data as _)\n\n };\n\n unsafe { ptr::copy(source, p, section.size_of_raw_data as _) };\n\n }\n\n }\n\n\n\n Ok(base)\n\n }\n\n\n\n fn relocate(&mut self) -> Result<(), LoadError> {\n\n let dos_header = self.get_dos_header();\n\n let optional_header = &dos_header.get_pe_header().optional_header;\n\n\n\n // We don't need to relocate if we managed to load the image at the preferred base address.\n", "file_path": "src/lib.rs", "rank": 44, "score": 9.236393228675905 }, { "content": " let flags = match (\n\n characteristics & Characteristics::MemExecute as u32,\n\n characteristics & Characteristics::MemRead as u32,\n\n characteristics & Characteristics::MemWrite as u32,\n\n ) {\n\n (0, 0, 0) => PAGE_NOACCESS,\n\n (0, 0, _) => PAGE_WRITECOPY,\n\n (0, _, 0) => PAGE_READONLY,\n\n (0, _, _) => PAGE_READWRITE,\n\n (_, 0, 0) => PAGE_EXECUTE,\n\n (_, 0, _) => PAGE_EXECUTE_WRITECOPY,\n\n (_, _, 0) => PAGE_EXECUTE_READ,\n\n (_, _, _) => PAGE_EXECUTE_READWRITE,\n\n };\n\n\n\n if unsafe {\n\n VirtualProtect(\n\n section_base as _,\n\n section.size_of_raw_data as _,\n\n flags,\n", "file_path": "src/lib.rs", "rank": 45, "score": 8.665835358672032 }, { "content": " if !callback.is_null() {\n\n while let &Some(f) = unsafe { &*callback } {\n\n f(\n\n self.image_base as *mut _,\n\n DLL_PROCESS_ATTACH,\n\n ptr::null_mut(),\n\n );\n\n callback = unsafe { callback.offset(1) };\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 46, "score": 8.15939751888189 }, { "content": " self.pe_buffer.as_ref().as_ptr(),\n\n base.raw,\n\n dos_header.get_pe_header().optional_header.size_of_headers as _,\n\n )\n\n };\n\n\n\n for section in file_header\n\n .get_sections()\n\n .iter()\n\n .filter(|&s| s.virtual_address.value != 0)\n\n {\n\n let p = section.virtual_address.resolve(base.raw as _).p;\n\n if section.size_of_raw_data == 0 {\n\n // ???\n\n unsafe {\n\n ptr::write_bytes(\n\n p,\n\n 0,\n\n dos_header.get_pe_header().optional_header.section_alignment as _,\n\n )\n", "file_path": "src/lib.rs", "rank": 47, "score": 7.340563385385521 }, { "content": "extern crate pe_load;\n\n\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\n\n\nconst PATH: &'static str = \"F:\\\\Programming\\\\C++\\\\Tmp\\\\Malloc\\\\x64\\\\Debug\\\\Malloc.exe\";\n\n\n", "file_path": "examples/simple.rs", "rank": 48, "score": 6.149101147150243 }, { "content": " &mut old_protect,\n\n )\n\n } == 0\n\n {\n\n return Err(LoadError::VirtualProtectFailed);\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n fn tls_callbacks(&mut self) -> Result<(), LoadError> {\n\n let dos_header = self.get_dos_header();\n\n let optional_header = &dos_header.get_pe_header().optional_header;\n\n\n\n match optional_header.get_tls_entries(self.image_base) {\n\n None => Ok(()),\n\n Some(entry) => {\n\n let mut callback = entry.address_of_callbacks as *const TlsCallback;\n\n\n", "file_path": "src/lib.rs", "rank": 49, "score": 4.535878572141307 }, { "content": "\n\n // TODO: We're leaking all mapped sections, lul\n\n // we actually aren't VirtualFree is best\n\n let dos_header = self.get_dos_header();\n\n let file_header = &dos_header.get_pe_header().file_header;\n\n let optional_header = &dos_header.get_pe_header().optional_header;\n\n let address = resolve_raw(self.image_base, optional_header.address_of_entry_point as _);\n\n\n\n Ok(LoadedPEFile {\n\n memory: mapped_module,\n\n entry_point: match address {\n\n 0 => None,\n\n x => unsafe { Some(mem::transmute(x)) },\n\n },\n\n })\n\n }\n\n\n\n fn get_dos_header<'a>(&'a self) -> &'a DosHeader {\n\n unsafe { &*(self.pe_buffer.as_ref().as_ptr() as *const _) }\n\n }\n", "file_path": "src/lib.rs", "rank": 50, "score": 3.5627721041120712 } ]
Rust
src/invoker/src/controller.rs
MikailBag/jjs
bf00423f70f8a6ed508bbcb8b38840225e43cc73
mod notify; mod task_loading; mod toolchains; use crate::{ scheduler::Scheduler, worker::{JudgeOutcome, Request, Response}, }; use anyhow::Context; use notify::Notifier; use std::{ path::{Path, PathBuf}, sync::Arc, }; use tracing::{debug, info, instrument}; use uuid::Uuid; #[derive(Debug)] struct LoweredJudgeRequestExtensions { notifier: Notifier, invocation_dir: PathBuf, } pub enum InvocationFinishReason { Fault, CompileError, TestingDone, } pub struct JudgeRequestAndCallbacks { pub request: invoker_api::JudgeRequest, pub callbacks: Arc<dyn JudgeResponseCallbacks>, } impl std::fmt::Debug for JudgeRequestAndCallbacks { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("JudgeRequestAndCallbacks") .field("request", &self.request) .field("handler", &"..") .finish() } } #[async_trait::async_trait] pub trait JudgeResponseCallbacks: Send + Sync { async fn set_finished( &self, invocation_id: Uuid, reason: InvocationFinishReason, ) -> anyhow::Result<()>; async fn add_outcome_header( &self, invocation_id: Uuid, header: invoker_api::JudgeOutcomeHeader, ) -> anyhow::Result<()>; async fn deliver_live_status_update( &self, invocation_id: Uuid, lsu: invoker_api::LiveStatusUpdate, ) -> anyhow::Result<()>; } #[derive(Clone)] pub struct Controller { scheduler: Arc<Scheduler>, problem_loader: Arc<problem_loader::Loader>, toolchains_dir: Arc<Path>, _config: Arc<crate::config::InvokerConfig>, _temp_dir: Arc<tempfile::TempDir>, toolchain_loader: Arc<toolchains::ToolchainLoader>, } fn get_num_cpus() -> usize { static CACHE: std::sync::atomic::AtomicUsize = std::sync::atomic::AtomicUsize::new(0); let old = CACHE.load(std::sync::atomic::Ordering::Relaxed); if old != 0 { return old; } let corr = num_cpus::get(); assert_ne!(corr, 0); CACHE.store(corr, std::sync::atomic::Ordering::Relaxed); corr } impl Controller { pub async fn new( cfg_data: util::cfg::CfgData, config: Arc<crate::config::InvokerConfig>, ) -> anyhow::Result<Controller> { let worker_count = match config.workers { Some(cnt) => cnt, None => get_num_cpus(), }; info!("Using {} workers", worker_count); let mut scheduler = Scheduler::new(&config).context("failed to initialize Scheduler")?; for _ in 0..worker_count { scheduler .add_worker() .await .context("failed to start a worker")?; } let scheduler = Arc::new(scheduler); let temp_dir = tempfile::TempDir::new().context("can not find temporary dir")?; let problem_loader = problem_loader::Loader::from_config(&config.problems, temp_dir.path().join("problems")) .await .context("can not create ProblemLoader")?; let toolchain_loader = Arc::new( toolchains::ToolchainLoader::new() .await .context("toolchain loader initialization error")?, ); Ok(Controller { scheduler, problem_loader: Arc::new(problem_loader), toolchains_dir: cfg_data.data_dir.join("opt").into(), _config: config, _temp_dir: Arc::new(temp_dir), toolchain_loader, }) } #[instrument(skip(self, chan))] pub fn exec_on(self, chan: async_mpmc::Receiver<JudgeRequestAndCallbacks>) { chan.process_all(move |req| { let this = self.clone(); async move { let request_id = req.request.request_id; if let Err(err) = this.process_request(req).await { tracing::warn!(request_id = %request_id, err = %format_args!("{:#}", err), "Failed to process a judge request"); } } }); } #[instrument(skip(self, req), fields(request_id=%req.request.request_id))] async fn process_request(&self, req: JudgeRequestAndCallbacks) -> anyhow::Result<()> { let (low_req, mut exts) = self .lower_judge_request(&req) .await .context("request preprocessing failed")?; debug!(lowered_judge_request = ?low_req, "created a lowered judge request"); let worker = self.scheduler.find_free_worker().await; let mut responses = worker .send(Request::Judge(low_req)) .await .context("failed to submit lowered judge request")?; loop { let message = responses .next() .await .context("failed to receive next worker message")?; match message { Response::JudgeDone(judge_outcome) => { debug!("Publising: JudgeOutcome {:?}", &judge_outcome); let reason = match judge_outcome { JudgeOutcome::Fault => InvocationFinishReason::Fault, JudgeOutcome::TestingDone => InvocationFinishReason::TestingDone, JudgeOutcome::CompileError(_) => InvocationFinishReason::CompileError, }; req.callbacks .set_finished(req.request.request_id, reason) .await .context("failed to set run outcome in DB")?; break; } Response::LiveScore(score) => { exts.notifier.set_score(score).await; } Response::LiveTest(test) => { exts.notifier.set_test(test).await; } Response::OutcomeHeader(header) => { req.callbacks .add_outcome_header(req.request.request_id, header) .await?; } } } Ok(()) } }
mod notify; mod task_loading; mod toolchains; use crate::{ scheduler::Scheduler, worker::{JudgeOutcome, Request, Response}, }; use anyhow::Context; use notify::Notifier; use std::{ path::{Path, PathBuf}, sync::Arc, }; use tracing::{debug, info, instrument}; use uuid::Uuid; #[derive(Debug)] struct LoweredJudgeRequestExtensions { notifier: Notifier, invocation_dir: PathBuf, } pub enum InvocationFinishReason { Fault, CompileError, TestingDone, } pub struct JudgeRequestAndCallbacks { pub request: invoker_api::JudgeRequest, pub callbacks: Arc<dyn JudgeResponseCallbacks>, } impl std::fmt::Debug for JudgeRequestAndCallbacks {
} #[async_trait::async_trait] pub trait JudgeResponseCallbacks: Send + Sync { async fn set_finished( &self, invocation_id: Uuid, reason: InvocationFinishReason, ) -> anyhow::Result<()>; async fn add_outcome_header( &self, invocation_id: Uuid, header: invoker_api::JudgeOutcomeHeader, ) -> anyhow::Result<()>; async fn deliver_live_status_update( &self, invocation_id: Uuid, lsu: invoker_api::LiveStatusUpdate, ) -> anyhow::Result<()>; } #[derive(Clone)] pub struct Controller { scheduler: Arc<Scheduler>, problem_loader: Arc<problem_loader::Loader>, toolchains_dir: Arc<Path>, _config: Arc<crate::config::InvokerConfig>, _temp_dir: Arc<tempfile::TempDir>, toolchain_loader: Arc<toolchains::ToolchainLoader>, } fn get_num_cpus() -> usize { static CACHE: std::sync::atomic::AtomicUsize = std::sync::atomic::AtomicUsize::new(0); let old = CACHE.load(std::sync::atomic::Ordering::Relaxed); if old != 0 { return old; } let corr = num_cpus::get(); assert_ne!(corr, 0); CACHE.store(corr, std::sync::atomic::Ordering::Relaxed); corr } impl Controller { pub async fn new( cfg_data: util::cfg::CfgData, config: Arc<crate::config::InvokerConfig>, ) -> anyhow::Result<Controller> { let worker_count = match config.workers { Some(cnt) => cnt, None => get_num_cpus(), }; info!("Using {} workers", worker_count); let mut scheduler = Scheduler::new(&config).context("failed to initialize Scheduler")?; for _ in 0..worker_count { scheduler .add_worker() .await .context("failed to start a worker")?; } let scheduler = Arc::new(scheduler); let temp_dir = tempfile::TempDir::new().context("can not find temporary dir")?; let problem_loader = problem_loader::Loader::from_config(&config.problems, temp_dir.path().join("problems")) .await .context("can not create ProblemLoader")?; let toolchain_loader = Arc::new( toolchains::ToolchainLoader::new() .await .context("toolchain loader initialization error")?, ); Ok(Controller { scheduler, problem_loader: Arc::new(problem_loader), toolchains_dir: cfg_data.data_dir.join("opt").into(), _config: config, _temp_dir: Arc::new(temp_dir), toolchain_loader, }) } #[instrument(skip(self, chan))] pub fn exec_on(self, chan: async_mpmc::Receiver<JudgeRequestAndCallbacks>) { chan.process_all(move |req| { let this = self.clone(); async move { let request_id = req.request.request_id; if let Err(err) = this.process_request(req).await { tracing::warn!(request_id = %request_id, err = %format_args!("{:#}", err), "Failed to process a judge request"); } } }); } #[instrument(skip(self, req), fields(request_id=%req.request.request_id))] async fn process_request(&self, req: JudgeRequestAndCallbacks) -> anyhow::Result<()> { let (low_req, mut exts) = self .lower_judge_request(&req) .await .context("request preprocessing failed")?; debug!(lowered_judge_request = ?low_req, "created a lowered judge request"); let worker = self.scheduler.find_free_worker().await; let mut responses = worker .send(Request::Judge(low_req)) .await .context("failed to submit lowered judge request")?; loop { let message = responses .next() .await .context("failed to receive next worker message")?; match message { Response::JudgeDone(judge_outcome) => { debug!("Publising: JudgeOutcome {:?}", &judge_outcome); let reason = match judge_outcome { JudgeOutcome::Fault => InvocationFinishReason::Fault, JudgeOutcome::TestingDone => InvocationFinishReason::TestingDone, JudgeOutcome::CompileError(_) => InvocationFinishReason::CompileError, }; req.callbacks .set_finished(req.request.request_id, reason) .await .context("failed to set run outcome in DB")?; break; } Response::LiveScore(score) => { exts.notifier.set_score(score).await; } Response::LiveTest(test) => { exts.notifier.set_test(test).await; } Response::OutcomeHeader(header) => { req.callbacks .add_outcome_header(req.request.request_id, header) .await?; } } } Ok(()) } }
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("JudgeRequestAndCallbacks") .field("request", &self.request) .field("handler", &"..") .finish() }
function_block-function_prefix_line
[ { "content": " /// HTTP Request.\n\n pub trait Request {\n\n type Form: Form;\n\n\n\n /// Sets the header with the given key and value.\n\n fn header(self, name: &'static str, value: &str) -> Self;\n\n\n\n /// Sets body using the given vector of bytes.\n\n ///\n\n /// **NOTE:** Appropriate `Content-Type` header must be set\n\n /// after calling this method.\n\n fn body_bytes(self, body: Vec<u8>) -> Self;\n\n\n\n /// Sets JSON body based on the given value.\n\n fn json<T: serde::Serialize>(self, value: &T) -> Self;\n\n\n\n /// Sets `multipart/form-data` body using the given form.\n\n fn multipart_form_data(self, form: Self::Form) -> Self;\n\n\n\n /// Sets/adds query parameters based on the given value.\n\n ///\n", "file_path": "src/gen-api-client/lib.rs", "rank": 0, "score": 152697.51952865592 }, { "content": "#[async_trait]\n\npub trait Component: std::fmt::Display {\n\n type Error: std::error::Error + Send + Sync;\n\n async fn state(&self) -> Result<StateKind, Self::Error>;\n\n async fn upgrade(&self) -> Result<(), Self::Error>;\n\n fn name(&self) -> &'static str;\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum StateKind {\n\n UpToDate,\n\n Upgradable,\n\n Errored,\n\n}\n\n\n\nimpl std::fmt::Display for StateKind {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n std::fmt::Debug::fmt(self, f)\n\n }\n\n}\n", "file_path": "src/setup/src/lib.rs", "rank": 1, "score": 134069.8673101418 }, { "content": "#[derive(Clone)]\n\nstruct ToolchainSpec {\n\n dir: PathBuf,\n\n name: String,\n\n cfg: config::ToolchainConfig,\n\n}\n\nmod tpl_info_impls {\n\n use super::*;\n\n use std::{cmp::*, hash::*};\n\n\n\n impl Hash for ToolchainSpec {\n\n fn hash<H: Hasher>(&self, hasher: &mut H) {\n\n self.name.hash(hasher);\n\n }\n\n }\n\n\n\n impl PartialEq for ToolchainSpec {\n\n fn eq(&self, that: &ToolchainSpec) -> bool {\n\n self.name == that.name\n\n }\n\n }\n\n\n\n impl Eq for ToolchainSpec {}\n\n}\n\n\n", "file_path": "src/configure-toolchains/src/main.rs", "rank": 2, "score": 134033.29984826554 }, { "content": "/// SValuer is pure. Only `ValuerDriver` actually performs some IO, interacting with environment, such as JJS invoker.\n\npub trait ValuerDriver: std::fmt::Debug {\n\n /// Retrieves `ProblemInfo`. Will be called once.\n\n fn problem_info(&mut self) -> Result<ProblemInfo>;\n\n /// Sends valuer response\n\n fn send_command(&mut self, cmd: &ValuerResponse) -> Result<()>;\n\n /// Polls notification about test finish\n\n fn poll_notification(&mut self) -> Result<Option<TestDoneNotification>>;\n\n}\n\n\n\n/// SValuer itself\n\n#[derive(Debug)]\n\npub struct SimpleValuer<'a> {\n\n driver: &'a mut dyn ValuerDriver,\n\n /// Amount of tests that are currently running.\n\n running_tests: u32,\n\n /// How many fibers did not emit judge log yet\n\n running_fibers: usize,\n\n /// Amount of tests that were requested to run.\n\n /// It is used for caching purposes.\n\n used_tests: HashSet<TestId>,\n", "file_path": "src/svaluer/src/lib.rs", "rank": 3, "score": 132360.07949773784 }, { "content": "#[derive(Debug, Default, Clone)]\n\nstruct ToolchainGetBuilder1Container {\n\n param_toolchain_id: Option<String>,\n\n}\n\n\n\nimpl<ToolchainId> ToolchainGetBuilder1<ToolchainId> {\n\n #[inline]\n\n pub fn toolchain_id(mut self, value: impl Into<String>) -> ToolchainGetBuilder1<crate::generics::ToolchainIdExists> {\n\n self.inner.param_toolchain_id = Some(value.into());\n\n unsafe { std::mem::transmute(self) }\n\n }\n\n}\n\n\n\nimpl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for ToolchainGetBuilder1<crate::generics::ToolchainIdExists> {\n\n type Output = Toolchain;\n\n\n\n const METHOD: http::Method = http::Method::GET;\n\n\n\n fn rel_path(&self) -> std::borrow::Cow<'static, str> {\n\n format!(\"/toolchains/{toolchain_id}\", toolchain_id=self.inner.param_toolchain_id.as_ref().expect(\"missing parameter toolchain_id?\")).into()\n\n }\n\n}\n", "file_path": "src/gen-api-client/toolchain.rs", "rank": 4, "score": 130825.19835766571 }, { "content": "pub fn connect_memory() -> Result<crate::DbConn> {\n\n let opts = ConnectOptions {\n\n pg: None,\n\n redis: None,\n\n };\n\n connect(opts).now_or_never().unwrap()\n\n}\n", "file_path": "src/db/src/connect.rs", "rank": 5, "score": 130731.99523548465 }, { "content": "#[derive(Clone, Copy)]\n\nenum Tracer {\n\n Strace,\n\n Lxtrace,\n\n}\n\n\n", "file_path": "src/configure-toolchains/src/trace.rs", "rank": 6, "score": 126336.39610989532 }, { "content": "struct WorkerInfo {\n\n state: WorkerState,\n\n child_stdout: Mutex<tokio::io::BufReader<tokio::process::ChildStdout>>,\n\n child_stdin: Mutex<tokio::process::ChildStdin>,\n\n}\n\n\n\nimpl WorkerInfo {\n\n pub async fn recv(&self) -> anyhow::Result<Response> {\n\n let mut line = String::new();\n\n let mut child_stdout = self.child_stdout.lock().await;\n\n\n\n child_stdout.read_line(&mut line).await?;\n\n Ok(serde_json::from_str(&line).context(\"parse error\")?)\n\n }\n\n\n\n pub async fn send(&self, req: Request) -> anyhow::Result<()> {\n\n let mut data = serde_json::to_vec(&req)?;\n\n data.push(b'\\n');\n\n self.child_stdin.lock().await.write_all(&data).await?;\n\n Ok(())\n", "file_path": "src/invoker/src/scheduler.rs", "rank": 7, "score": 126002.1898810959 }, { "content": "#[derive(StructOpt, Clone)]\n\nstruct Options {\n\n /// Template files dir\n\n tpls_dir: PathBuf,\n\n /// Out dir (without trailing opt, e.g. /home/jjs)\n\n out: PathBuf,\n\n /// Trace log\n\n #[structopt(long, short = \"t\")]\n\n trace: Option<PathBuf>,\n\n /// Only listed toolchains will be processed (overrides `skip`)\n\n #[structopt(long)]\n\n toolchains: Vec<String>,\n\n /// (strategy=trace) Do not treat symlinks like regular files\n\n #[structopt(long)]\n\n copy_symlinks: bool,\n\n\n\n /// (strategy=trace) Allow copying directories\n\n #[structopt(long)]\n\n copy_dirs: bool,\n\n\n\n /// Instead of populating target dir with files, log all actions to file\n", "file_path": "src/configure-toolchains/src/main.rs", "rank": 8, "score": 125632.0754276283 }, { "content": "#[derive(Debug)]\n\nstruct TcsState {\n\n installed: Vec<String>,\n\n extra: Vec<String>,\n\n}\n\n\n\n#[async_trait]\n\nimpl<'a> crate::Component for Toolchains<'a> {\n\n type Error = Error;\n\n\n\n fn name(&self) -> &'static str {\n\n \"toolchains\"\n\n }\n\n\n\n async fn state(&self) -> Result<crate::StateKind, Error> {\n\n if self.state.extra.is_empty() {\n\n Ok(crate::StateKind::UpToDate)\n\n } else {\n\n Ok(crate::StateKind::Upgradable)\n\n }\n\n }\n", "file_path": "src/setup/src/toolchains.rs", "rank": 9, "score": 125626.87165502625 }, { "content": "// double Arc, but who cares?\n\nstruct Callbacks {\n\n inner: Arc<Inner>,\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl JudgeResponseCallbacks for Callbacks {\n\n async fn set_finished(\n\n &self,\n\n invocation_id: uuid::Uuid,\n\n _reason: InvocationFinishReason,\n\n ) -> anyhow::Result<()> {\n\n let run_id = self\n\n .inner\n\n .run_mapping\n\n .lock()\n\n .await\n\n .remove(&invocation_id)\n\n .context(\"corrupted run_id_mapping\")?;\n\n let patch = client::models::RunPatch::patch_run().run_id(run_id);\n\n /* TODO\n", "file_path": "src/invoker/src/sources/api_source.rs", "rank": 10, "score": 124066.33959964375 }, { "content": "struct Callbacks;\n\n\n\n#[async_trait::async_trait]\n\nimpl JudgeResponseCallbacks for Callbacks {\n\n async fn set_finished(\n\n &self,\n\n invocation_id: Uuid,\n\n reason: InvocationFinishReason,\n\n ) -> anyhow::Result<()> {\n\n let reason = match reason {\n\n InvocationFinishReason::CompileError => \"CompileError\",\n\n InvocationFinishReason::TestingDone => \"TestingDone\",\n\n InvocationFinishReason::Fault => \"Fault\",\n\n };\n\n print_message(Message::Finish(FinishedMessage {\n\n invocation_id,\n\n reason,\n\n }))\n\n .await\n\n }\n", "file_path": "src/invoker/src/sources/cli_source.rs", "rank": 11, "score": 124066.33959964375 }, { "content": "#[derive(Deserialize)]\n\nstruct DebootstrapConfig {\n\n packages: Vec<String>,\n\n}\n\n\n\nimpl DebootstrapResolver {\n\n pub(super) fn new(opt: &Options) -> DebootstrapResolver {\n\n DebootstrapResolver {\n\n packages: HashSet::new(),\n\n options: opt.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl Resolver for DebootstrapResolver {\n\n fn strategy(&self) -> Strategy {\n\n Strategy::Debootstrap\n\n }\n\n\n\n fn strategy_name(&self) -> &'static str {\n\n \"debootstrap\"\n", "file_path": "src/configure-toolchains/src/debootstrap.rs", "rank": 12, "score": 123698.9886839414 }, { "content": "#[async_trait]\n\npub trait RunsRepo: std::fmt::Debug + Send + Sync {\n\n async fn run_new(&self, run_data: NewRun) -> Result<Run>;\n\n async fn run_try_load(&self, run_id: RunId) -> Result<Option<Run>>;\n\n async fn run_load(&self, run_id: RunId) -> Result<Run> {\n\n match self.run_try_load(run_id).await? {\n\n Some(run) => Ok(run),\n\n None => bail!(\"run_load: unknown run_id\"),\n\n }\n\n }\n\n async fn run_update(&self, run_id: RunId, patch: RunPatch) -> Result<()>;\n\n async fn run_delete(&self, run_id: RunId) -> Result<()>;\n\n async fn run_select(&self, with_run_id: Option<RunId>, limit: Option<u32>) -> Result<Vec<Run>>;\n\n}\n\n\n", "file_path": "src/db/src/repo.rs", "rank": 13, "score": 122561.4681228447 }, { "content": "pub fn print_error(err: &dyn std::error::Error) {\n\n eprintln!(\"error: {}\", err);\n\n let mut iter = err.source();\n\n while let Some(cause) = iter {\n\n eprintln!(\"caused by: {}\", cause);\n\n iter = cause.source();\n\n }\n\n}\n", "file_path": "src/util/src/lib.rs", "rank": 14, "score": 122561.4681228447 }, { "content": "struct DetectScriptOutput {\n\n env: std::collections::HashMap<String, String>,\n\n}\n\n\n\nimpl std::str::FromStr for DetectScriptOutput {\n\n type Err = anyhow::Error;\n\n\n\n fn from_str(s: &str) -> anyhow::Result<Self> {\n\n let mut this = Self {\n\n env: std::collections::HashMap::new(),\n\n };\n\n for line in s.lines() {\n\n if line.starts_with(\"set-env:\") {\n\n let cmd = line.trim_start_matches(\"set-env:\");\n\n let parts: Vec<_> = cmd.splitn(2, '=').collect();\n\n if parts.len() != 2 {\n\n bail!(\"set-env command does not look like var_name=var_value\");\n\n }\n\n this.env.insert(parts[0].to_string(), parts[1].to_string());\n\n } else {\n\n bail!(\"unknown command: {}\", line);\n\n }\n\n }\n\n Ok(this)\n\n }\n\n}\n\n\n", "file_path": "src/configure-toolchains/src/trace.rs", "rank": 16, "score": 121851.25505595605 }, { "content": "pub fn ensure_exists(path: impl AsRef<Path>) -> anyhow::Result<()> {\n\n use std::io::ErrorKind::*;\n\n match fs::create_dir_all(path) {\n\n Ok(_) => (),\n\n Err(e) => match e.kind() {\n\n AlreadyExists => (),\n\n _ => return Err(e.into()),\n\n },\n\n };\n\n\n\n Ok(())\n\n}\n", "file_path": "src/dist-builder/src/fs_util.rs", "rank": 17, "score": 115544.51000582722 }, { "content": "pub fn detect_build_type() -> BuildInfo {\n\n (*BUILD_INFO).clone()\n\n}\n", "file_path": "src/devtool/src/ci.rs", "rank": 18, "score": 112442.82964928073 }, { "content": "class Toolchain(pydantic.BaseModel):\n\n id: str\n\n description: str\n", "file_path": "src/apiserver/api_models.py", "rank": 20, "score": 82831.4770926297 }, { "content": "#[derive(StructOpt, Copy, Clone)]\n\nenum Subcommand {\n\n Describe,\n\n Upgrade,\n\n}\n\n\n\nimpl Subcommand {\n\n fn is_upgrade(self) -> bool {\n\n match self {\n\n Subcommand::Describe => false,\n\n Subcommand::Upgrade => true,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/setup/src/main.rs", "rank": 21, "score": 82007.54945484035 }, { "content": "#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\nenum Workflow {\n\n Pr,\n\n Deploy,\n\n}\n\n\n\nimpl Workflow {\n\n fn detect() -> Workflow {\n\n let workflow_name = var(\"GITHUB_WORKFLOW\").expect(\"GITHUB_WORKFLOW not exists\");\n\n match workflow_name.as_str() {\n\n \"deploy\" => Workflow::Deploy,\n\n \"ci\" => Workflow::Pr,\n\n other => panic!(\"Unknown workflow name: {}\", other),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct BuildInfo {\n\n ty: BuildType,\n\n}\n", "file_path": "src/devtool/src/ci.rs", "rank": 22, "score": 82002.38230179495 }, { "content": "#[derive(thiserror::Error, Debug)]\n\nenum Error {\n\n #[error(\"failed to read {filename}: {source}\")]\n\n ReadFile {\n\n filename: String,\n\n #[source]\n\n source: std::io::Error,\n\n },\n\n #[error(\"userlist is malformed: {}\", source)]\n\n Format {\n\n #[from]\n\n source: list_parse::ParseError,\n\n },\n\n #[error(\"api error: {source}\")]\n\n Api {\n\n #[from]\n\n source: client::Error,\n\n },\n\n #[error(\"invalid base64 string\")]\n\n Base64,\n\n}\n\n\n", "file_path": "src/userlist/src/main.rs", "rank": 23, "score": 82002.38230179495 }, { "content": "#[derive(StructOpt)]\n\nstruct Opts {\n\n profile: PathBuf,\n\n #[structopt(subcommand)]\n\n action: Subcommand,\n\n}\n\n\n", "file_path": "src/setup/src/main.rs", "rank": 24, "score": 81294.39360257768 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct Argv {\n\n #[structopt(long = \"cgroupfs\", short = \"c\", default_value = \"/sys/fs/cgroup\")]\n\n cgroupfs: String,\n\n\n\n #[structopt(long = \"root\", short = \"r\")]\n\n root: String,\n\n\n\n #[structopt(long = \"jail\", short = \"j\")]\n\n jail_id: String,\n\n}\n\n\n", "file_path": "src/cleanup/src/main.rs", "rank": 25, "score": 81294.35646027152 }, { "content": "struct Run {\n\n inner: client::models::Run,\n\n current_score: i64,\n\n current_test: i64,\n\n}\n\n\n\nimpl Run {\n\n fn new(inner: client::models::Run) -> Run {\n\n Run {\n\n inner,\n\n current_score: 0,\n\n current_test: 0,\n\n }\n\n }\n\n\n\n fn into_inner(self) -> client::models::Run {\n\n self.inner\n\n }\n\n\n\n async fn poll(\n", "file_path": "src/cli/src/submit.rs", "rank": 26, "score": 81289.15268766948 }, { "content": "#[derive(Clone)]\n\nstruct State {\n\n task_tx: async_mpmc::Sender<JudgeRequestAndCallbacks>,\n\n cancel_token: tokio::sync::CancellationToken,\n\n}\n\n\n\nasync fn route_ping() -> impl Responder {\n\n HttpResponse::Ok()\n\n .content_type(\"text/plain\")\n\n .body(\"hello, world!\")\n\n}\n\n\n\nasync fn route_ready() -> impl Responder {\n\n \"\"\n\n}\n\n\n\nasync fn route_shutdown(state: web::Data<State>) -> impl Responder {\n\n tracing::info!(\"invoker api: got shutdown request\");\n\n state.cancel_token.cancel();\n\n \"cancellation triggered\"\n\n}\n", "file_path": "src/invoker/src/api.rs", "rank": 27, "score": 81289.15268766948 }, { "content": "struct TestgenSession {\n\n int test_id = 0;\n\n Generator gen;\n\n\n\n TestgenSession(uint64_t _seed);\n\n};\n\n\n\n/// Call this first in test generator\n\nTestgenSession init();\n\n} // namespace testgen\n", "file_path": "src/jtl/include/testgen.h", "rank": 28, "score": 81289.15268766948 }, { "content": "#[derive(Clap)]\n\n#[clap(author, about)]\n\nstruct Opt {\n\n #[clap(subcommand)]\n\n sub: SubOpt,\n\n}\n\n\n", "file_path": "src/cli/src/main.rs", "rank": 29, "score": 81289.15268766948 }, { "content": "#[derive(Debug)]\n\nenum State {\n\n Building,\n\n Running(RunningState),\n\n Waiting(WaitingState),\n\n Skipped(SkippedState),\n\n Finished(FinishedState),\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct Group {\n\n id: SubtaskId,\n\n dep_groups: Vec<u32>,\n\n test_vis_flags: TestVisibleComponents,\n\n subtask_vis_flags: SubtaskVisibleComponents,\n\n run_all_tests: bool,\n\n state: State,\n\n tests: Vec<TestId>,\n\n score: u32,\n\n}\n\n\n", "file_path": "src/svaluer/src/fiber/group.rs", "rank": 30, "score": 80990.60221211906 }, { "content": "enum SystemHealth {\n\n Ok,\n\n Error,\n\n}\n\n\n\nasync fn process_component<C: Component>(\n\n component: C,\n\n upgrade: bool,\n\n) -> Result<SystemHealth, C::Error> {\n\n let kind = component.state().await?;\n\n let name = component.name();\n\n let health = if matches!(kind, setup::StateKind::Errored) {\n\n SystemHealth::Error\n\n } else {\n\n SystemHealth::Ok\n\n };\n\n if upgrade {\n\n match kind {\n\n setup::StateKind::Errored => {\n\n eprintln!(\n", "file_path": "src/setup/src/main.rs", "rank": 31, "score": 80990.60221211906 }, { "content": "#[derive(Clap)]\n\nenum CliArgs {\n\n /// Lint project\n\n Check(check::CheckOpts),\n\n /// Run all tests\n\n Test(TestArgs),\n\n /// Clean all build files except Cargo's\n\n Clean,\n\n /// Perform build & install\n\n Build(build::RawBuildOpts),\n\n /// remove target files, related to JJS. This should prevent cache invalidation\n\n CiClean,\n\n /// Format C++ code\n\n FmtCpp,\n\n /// Launch development version of JJS\n\n Run(run::Opts),\n\n /// Generate some code\n\n Codegen(codegen::Opts),\n\n}\n\n\n", "file_path": "src/devtool/src/main.rs", "rank": 32, "score": 80990.60221211906 }, { "content": "#[derive(Debug)]\n\nenum DatabaseVersion {\n\n RevisionInfoMissing,\n\n Missing,\n\n Version {\n\n current: String,\n\n latest: Option<String>,\n\n },\n\n}\n\n\n\npub struct Database<'a> {\n\n version: DatabaseVersion,\n\n cx: DbContext<'a>,\n\n}\n\n\n\n#[async_trait]\n\nimpl<'a> crate::Component for Database<'a> {\n\n type Error = Error;\n\n\n\n async fn state(&self) -> Result<crate::StateKind, Error> {\n\n let status = match &self.version {\n", "file_path": "src/setup/src/db.rs", "rank": 33, "score": 80990.60221211906 }, { "content": "#[derive(PartialEq, Eq, Clone, Debug)]\n\nenum BuildType {\n\n /// not a CI build\n\n NotCi,\n\n /// PR build,`bors try` or `bors r+`\n\n Check { ty: CheckJobType, privileged: bool },\n\n /// we are on master, want to build something special\n\n Deploy(DeployKind),\n\n}\n\n\n\n#[derive(Eq, PartialEq, Clone, Debug, Copy)]\n\npub enum DeployKind {\n\n Docker,\n\n Man,\n\n Deb,\n\n}\n\n\n\nimpl DeployKind {\n\n fn detect() -> DeployKind {\n\n let e = var(\"JJS_DT_DEPLOY\").expect(\"JJS_DT_DEPLOY missing\");\n\n match e.as_str() {\n\n \"docker\" => DeployKind::Docker,\n\n \"man\" => DeployKind::Man,\n\n \"deb\" => DeployKind::Deb,\n\n _ => unreachable!(\"unknown deploy kind: {}\", &e),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/devtool/src/ci.rs", "rank": 34, "score": 80990.60221211906 }, { "content": "#[derive(Clap)]\n\nenum SubOpt {\n\n Submit(submit::Opt),\n\n ManageRuns(runs::Opt),\n\n Login(login::Opt),\n\n Toolchains(toolchains::Opt),\n\n Wait(wait::Opt),\n\n Problems(problems::Opt),\n\n Completion(completion::Opt),\n\n ApiVersion,\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n util::log::setup();\n\n if let Err(err) = real_main().await {\n\n eprintln!(\"Error: {:#}\", err);\n\n std::process::exit(1);\n\n }\n\n}\n\n\n", "file_path": "src/cli/src/main.rs", "rank": 35, "score": 80990.60221211906 }, { "content": "#[derive(Copy, Clone)]\n\nenum ItemState {\n\n OnlyCert,\n\n OnlyKey,\n\n Both,\n\n}\n\n\n", "file_path": "src/setup/src/certs.rs", "rank": 36, "score": 80990.60221211906 }, { "content": "#[derive(Copy, Clone, Hash, Eq, PartialEq)]\n\nenum ItemKind {\n\n CertificateAuthority,\n\n Invoker,\n\n Root,\n\n}\n\n\n\nimpl ItemKind {\n\n fn all() -> impl Iterator<Item = ItemKind> {\n\n const ALL: &[ItemKind] = &[\n\n ItemKind::CertificateAuthority,\n\n ItemKind::Invoker,\n\n ItemKind::Root,\n\n ];\n\n ALL.iter().copied()\n\n }\n\n\n\n fn file_stem(self) -> &'static str {\n\n match self {\n\n ItemKind::CertificateAuthority => \"ca\",\n\n ItemKind::Invoker => \"invoker\",\n\n ItemKind::Root => \"root\",\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/setup/src/certs.rs", "rank": 37, "score": 80990.60221211906 }, { "content": "struct CheckerData {\n\n checker::CheckerInput inp;\n\n FILE* out_file = nullptr;\n\n FILE* comment_file = nullptr;\n\n};\n\n\n\nCheckerData CHECKER;\n\n\n\nchecker::CheckerInput checker::init(bool open_files) {\n\n checker::CheckerInput inp;\n\n if (open_files) {\n\n inp.corr_answer = get_env_file(\"JJS_CORR\", \"r\");\n\n inp.sol_answer = get_env_file(\"JJS_SOL\", \"r\");\n\n inp.test = get_env_file(\"JJS_TEST\", \"r\");\n\n } else {\n\n inp.fd_corr = get_env_int(\"JJS_CORR\");\n\n inp.fd_sol = get_env_int(\"JJS_SOL\");\n\n inp.fd_test = get_env_int(\"JJS_TEST\");\n\n }\n\n CHECKER.out_file = get_env_file(\"JJS_CHECKER_OUT\", \"w\");\n", "file_path": "src/jtl/src/checker.cpp", "rank": 38, "score": 80281.07775725001 }, { "content": "struct CertsState {\n\n items: HashMap<ItemKind, ItemState>,\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Context<'a> {\n\n pub data_dir: &'a Path,\n\n pub can_create_ca: bool,\n\n}\n\n\n\nasync fn get_state(cx: Context<'_>) -> Result<CertsState, Error> {\n\n let mut items = HashMap::new();\n\n let pki_dir = cx.data_dir.join(\"etc/pki\");\n\n for kind in ItemKind::all() {\n\n let certificate_path = pki_dir.join(format!(\"{}.crt\", kind.file_stem()));\n\n let key_path = pki_dir.join(format!(\"{}.key\", kind.file_stem()));\n\n let has_cert = certificate_path.exists();\n\n let has_key = key_path.exists();\n\n let state = match (has_cert, has_key) {\n\n (true, true) => Some(ItemState::Both),\n", "file_path": "src/setup/src/certs.rs", "rank": 39, "score": 80281.07775725001 }, { "content": "#[derive(Debug)]\n\nstruct TermDriver {\n\n current_tests: HashSet<TestId>,\n\n full_judge_log: Option<invoker_api::valuer_proto::JudgeLog>,\n\n}\n\n\n\nmod term_driver {\n\n use super::TermDriver;\n\n use anyhow::{Context, Result};\n\n use invoker_api::valuer_proto;\n\n use pom::TestId;\n\n use std::{\n\n io::{stdin, stdout, Write},\n\n str::FromStr,\n\n };\n\n fn read_value<T: FromStr>(what: impl AsRef<str>) -> Result<T>\n\n where\n\n <T as FromStr>::Err: std::error::Error,\n\n {\n\n let mut user_input = String::new();\n\n loop {\n", "file_path": "src/svaluer/src/main.rs", "rank": 40, "score": 80281.07775725001 }, { "content": "#[derive(Clone)]\n\nstruct OptionStorage {\n\n base64: bool,\n\n groups: Vec<String>,\n\n ignore_fail: bool,\n\n}\n\n\n\nimpl OptionStorage {\n\n fn new() -> OptionStorage {\n\n OptionStorage {\n\n base64: false,\n\n groups: Vec::new(),\n\n ignore_fail: false,\n\n }\n\n }\n\n\n\n fn flag(&mut self, flag: &str) {\n\n match flag {\n\n \"base64\" => {\n\n self.base64 = true;\n\n }\n", "file_path": "src/userlist/src/main.rs", "rank": 41, "score": 80281.07775725001 }, { "content": "#[derive(Debug)]\n\nstruct MockDriver {\n\n tests: VecDeque<TestMock>,\n\n pending_notifications: VecDeque<TestDoneNotification>,\n\n live_scores: VecDeque<u32>,\n\n problem_info: ProblemInfo,\n\n judge_logs: Vec<JudgeLog>,\n\n}\n\nimpl MockDriver {\n\n fn new(problem_info: ProblemInfo) -> Self {\n\n Self {\n\n tests: VecDeque::new(),\n\n problem_info,\n\n live_scores: VecDeque::new(),\n\n pending_notifications: VecDeque::new(),\n\n judge_logs: Vec::new(),\n\n }\n\n }\n\n\n\n fn add_test(&mut self, test_id: u32, live: bool, ok: bool) -> &mut Self {\n\n let mock = TestMock {\n", "file_path": "src/svaluer/src/tests.rs", "rank": 42, "score": 80281.07775725001 }, { "content": "struct ProblemsState {\n\n config_problems: Vec<String>,\n\n installable_problems: Vec<(String, PathBuf)>,\n\n copyable_problems: Vec<(String, PathBuf)>,\n\n}\n\n\n\nimpl ProblemsState {\n\n fn filter_iterator<'a>(\n\n &'a self,\n\n iter: impl Iterator<Item = &'a (String, PathBuf)> + 'a,\n\n ) -> impl Iterator<Item = (&'a str, &'a Path)> + 'a {\n\n iter.filter(move |s| !self.config_problems.contains(&s.0))\n\n .map(|(s, p)| (s.as_str(), p.as_path()))\n\n }\n\n\n\n fn extra_installable<'a>(&'a self) -> impl Iterator<Item = (&'a str, &'a Path)> + 'a {\n\n self.filter_iterator(self.installable_problems.iter())\n\n }\n\n\n\n fn extra_copyable<'a>(&'a self) -> impl Iterator<Item = (&'a str, &'a Path)> + 'a {\n", "file_path": "src/setup/src/problems.rs", "rank": 43, "score": 80281.07775725001 }, { "content": "#[derive(Debug, Default)]\n\nstruct Data {\n\n // None if run was deleted\n\n runs: Vec<Option<Run>>,\n\n invs: Vec<Invocation>,\n\n users: Vec<User>,\n\n kv: std::collections::HashMap<String, Vec<u8>>,\n\n parts: Vec<Participation>,\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub struct MemoryRepo {\n\n conn: Mutex<Data>,\n\n}\n\n\n\nimpl MemoryRepo {\n\n pub fn new() -> Self {\n\n // TODO duplicates db/migrations/<initial>/up.sql\n\n let this: Self = Self::default();\n\n this.user_new(NewUser {\n\n username: \"Global/Root\".to_string(),\n", "file_path": "src/db/src/repo/memory.rs", "rank": 44, "score": 80281.07775725001 }, { "content": "struct ConfigState {\n\n items: Vec<(String, ConfigStateItem)>,\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub struct Context<'a> {\n\n pub data_dir: &'a Path,\n\n pub install_dir: &'a Path,\n\n}\n\n\n\npub struct Cfg<'a> {\n\n cx: Context<'a>,\n\n state: ConfigState,\n\n}\n\n\n\nimpl std::fmt::Display for Cfg<'_> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n for item in &self.state.items {\n\n let state_description = match item.1 {\n\n ConfigStateItem::CanCopy => \"copyable\",\n", "file_path": "src/setup/src/config.rs", "rank": 45, "score": 80281.07775725001 }, { "content": "#[derive(Debug)]\n\nstruct TestMock {\n\n test_id: TestId,\n\n live: bool,\n\n status: Status,\n\n}\n\n\n", "file_path": "src/svaluer/src/tests.rs", "rank": 46, "score": 80281.07775725001 }, { "content": "#[derive(clap::Clap)]\n\nstruct Opt {\n\n /// Directory used for build files\n\n #[clap(long = \"build-dir\", default_value = \"target\")]\n\n build_dir: PathBuf,\n\n /// Components and sections to enable\n\n ///\n\n /// Available sections: tools, daemons, suggested.\n\n /// Available components: apiserver, invoker.\n\n #[clap(long = \"enable\")]\n\n enable: Vec<String>,\n\n /// Cargo path\n\n #[clap(long, env = \"CARGO\")]\n\n cargo: Option<String>,\n\n /// CMake path\n\n #[clap(long, env = \"CMAKE\")]\n\n cmake: Option<String>,\n\n /// Target triple\n\n #[clap(long = \"target\", short = 'T')]\n\n target: Option<String>,\n\n /// Optimization\n", "file_path": "src/dist-builder/src/main.rs", "rank": 47, "score": 80281.07775725001 }, { "content": "#[derive(Copy, Clone, Eq, PartialEq)]\n\nenum GroupVisPreset {\n\n Full,\n\n Brief,\n\n Hidden,\n\n}\n\n\n\nimpl GroupVisPreset {\n\n fn subtask_flags_for(self, k: JudgeLogKind) -> SubtaskVisibleComponents {\n\n let mut out = SubtaskVisibleComponents::empty();\n\n if self == GroupVisPreset::Full || k == JudgeLogKind::Full {\n\n out |= SubtaskVisibleComponents::all();\n\n }\n\n if self == GroupVisPreset::Brief || k == JudgeLogKind::Full {\n\n out |= SubtaskVisibleComponents::SCORE;\n\n }\n\n out\n\n }\n\n\n\n fn test_flags_for(self, k: JudgeLogKind) -> TestVisibleComponents {\n\n let mut out = TestVisibleComponents::empty();\n", "file_path": "src/svaluer/src/fiber.rs", "rank": 48, "score": 80021.77855481431 }, { "content": "#[derive(Copy, Clone)]\n\nenum DataLayoutState {\n\n Exists,\n\n NotExists,\n\n Unknown,\n\n}\n\n\n\npub struct DataLayout<'a> {\n\n cx: Context<'a>,\n\n state: DataLayoutState,\n\n}\n\n\n\nimpl std::fmt::Display for DataLayout<'_> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self.state {\n\n DataLayoutState::Exists => write!(f, \"exists and non-empty\"),\n\n DataLayoutState::NotExists => write!(f, \"does not exist\"),\n\n DataLayoutState::Unknown => write!(f, \"unexpected\"),\n\n }\n\n }\n\n}\n", "file_path": "src/setup/src/data.rs", "rank": 49, "score": 80021.77855481431 }, { "content": "enum ImportKind {\n\n Problem,\n\n Contest,\n\n}\n\n\n", "file_path": "src/pps/server/src/import.rs", "rank": 50, "score": 80021.77855481431 }, { "content": "enum ConfigStateItem {\n\n Exists,\n\n CanCopy,\n\n}\n\n\n", "file_path": "src/setup/src/config.rs", "rank": 51, "score": 80021.77855481431 }, { "content": "#[derive(PartialEq, Eq, Clone, Debug)]\n\nenum CheckJobType {\n\n EndToEnd,\n\n __Other,\n\n}\n\n\n\nimpl CheckJobType {\n\n fn detect() -> Option<CheckJobType> {\n\n std::env::var(\"JOB\")\n\n .ok()\n\n .and_then(|name| match name.as_str() {\n\n \"e2e\" => Some(CheckJobType::EndToEnd),\n\n _ => panic!(\"unknown job name: {}\", name),\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/devtool/src/ci.rs", "rank": 52, "score": 80021.77855481431 }, { "content": "enum WorkerStateKind {\n\n /// Worker is ready for new tasks\n\n Idle,\n\n /// Worker is ready, but it is locked by a WorkerHandle\n\n Locked,\n\n /// Worker is juding run\n\n Judge,\n\n /// Worker has crashed\n\n Crash,\n\n}\n\n\n", "file_path": "src/invoker/src/scheduler.rs", "rank": 53, "score": 80021.77855481431 }, { "content": "#[derive(clap::Clap)]\n\nstruct Opts {\n\n /// Build testlib\n\n #[clap(long)]\n\n testlib: bool,\n\n /// Build user manual\n\n #[clap(long)]\n\n man: bool,\n\n /// Build api docs\n\n #[clap(long)]\n\n apidoc: bool,\n\n /// Build rustdoc\n\n #[clap(long)]\n\n rustdoc: bool,\n\n /// Generate env activate script\n\n #[clap(long)]\n\n envscript: bool,\n\n /// Source dir\n\n #[clap(long, default_value = \".\")]\n\n source: PathBuf,\n\n /// Build dir\n", "file_path": "src/dist-files-generator/src/main.rs", "rank": 54, "score": 79315.80195187307 }, { "content": "struct Introspector {\n\n gen: schemars::gen::SchemaGenerator,\n\n}\n\n\n\nimpl Introspector {\n\n fn new() -> Introspector {\n\n let mut settings = schemars::gen::SchemaSettings::openapi3();\n\n settings.meta_schema = None;\n\n settings.definitions_path = \"#/components/schemas/\".to_string();\n\n Introspector {\n\n gen: settings.into_generator(),\n\n }\n\n }\n\n\n\n fn add_object<T: crate::api::ApiObject>(&mut self) -> &mut Self {\n\n let schema = self.gen.subschema_for::<T>();\n\n assert!(schema.is_ref());\n\n let name = <T as crate::api::ApiObject>::name().to_string();\n\n let qual_ty_name = std::any::type_name::<T>();\n\n let ty_name = qual_ty_name.rsplit(\"::\").next().unwrap();\n", "file_path": "src/apiserver_old/engine/src/introspect.rs", "rank": 55, "score": 79315.80195187307 }, { "content": "// TODO: cache expiration, checksum, etc\n\n/// Stores cached problem information\n\nstruct ProblemCache {\n\n /// Maps problem name to problem cache.\n\n items: HashMap<String, ProblemCacheItem>,\n\n}\n\n\n\nimpl ProblemCache {\n\n fn new() -> ProblemCache {\n\n ProblemCache {\n\n items: HashMap::new(),\n\n }\n\n }\n\n}\n", "file_path": "src/problem-loader/src/lib.rs", "rank": 56, "score": 79315.80195187307 }, { "content": "#[derive(Debug)]\n\nstruct FinishedState {\n\n score: u32,\n\n success: bool,\n\n tests: Vec<(TestId, Status)>,\n\n}\n\n\n", "file_path": "src/svaluer/src/fiber/group.rs", "rank": 57, "score": 79315.80195187307 }, { "content": "struct Inner {\n\n api: client::ApiClient,\n\n run_mapping: tokio::sync::Mutex<std::collections::HashMap<uuid::Uuid, String>>,\n\n}\n\n\n\n/// Fetches tasks from JJS API\n\npub struct ApiSource {\n\n inner: Arc<Inner>,\n\n chan: async_mpmc::Sender<JudgeRequestAndCallbacks>,\n\n}\n\n\n", "file_path": "src/invoker/src/sources/api_source.rs", "rank": 58, "score": 79315.80195187307 }, { "content": "#[derive(Debug)]\n\nstruct RunningState {\n\n queued_tests: BTreeSet<TestId>,\n\n succeeded_tests: BTreeSet<(TestId, Status)>,\n\n failed_tests: BTreeSet<(TestId, Status)>,\n\n running_tests: BTreeSet<TestId>,\n\n}\n\n\n", "file_path": "src/svaluer/src/fiber/group.rs", "rank": 59, "score": 79315.80195187307 }, { "content": "struct Params {\n\n source: PathBuf,\n\n build: PathBuf,\n\n output: PathBuf,\n\n cmake_build_type: String,\n\n}\n\n\n", "file_path": "src/dist-files-generator/src/main.rs", "rank": 60, "score": 79315.80195187307 }, { "content": "#[derive(Debug)]\n\nstruct WaitingState {\n\n deps: BTreeSet<u32>,\n\n}\n\n\n", "file_path": "src/svaluer/src/fiber/group.rs", "rank": 61, "score": 79315.80195187307 }, { "content": "#[derive(Debug)]\n\nstruct SkippedState {\n\n failed_dep: u32,\n\n}\n\n\n", "file_path": "src/svaluer/src/fiber/group.rs", "rank": 62, "score": 79315.80195187307 }, { "content": "#[derive(Debug)]\n\nstruct EmptyError;\n\n\n\nimpl std::fmt::Display for EmptyError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n f.write_str(\"internal error\")\n\n }\n\n}\n\n\n\nimpl std::error::Error for EmptyError {}\n\n\n", "file_path": "src/apiserver_old/engine/src/api.rs", "rank": 63, "score": 78390.65629073785 }, { "content": "#[derive(Parser)]\n\n#[grammar = \"gram.pest\"]\n\nstruct RawAstParser;\n\n\n\npub(crate) type ParseError = pest::error::Error<Rule>;\n\n\n\npub(crate) enum Option {\n\n Flag(String),\n\n Setting(String, String),\n\n}\n\n\n\npub(crate) enum StatementData {\n\n AddUser {\n\n username: String,\n\n password: String,\n\n options: Vec<Option>,\n\n },\n\n SetOpt {\n\n options: Vec<Option>,\n\n },\n\n}\n\n\n\npub struct Statement {\n\n pub(crate) data: StatementData,\n\n}\n\n\n", "file_path": "src/userlist/src/list_parse.rs", "rank": 64, "score": 78390.65629073785 }, { "content": "struct ProblemCacheItem {\n\n assets: PathBuf,\n\n manifest: pom::Problem,\n\n}\n\n\n\npub struct Loader {\n\n registries: Vec<Box<dyn Registry>>,\n\n cache: tokio::sync::Mutex<ProblemCache>,\n\n /// Each problem will be represented by ${cache_dir}/${problem_name}\n\n cache_dir: PathBuf,\n\n}\n\n\n\nimpl Loader {\n\n pub async fn from_config(conf: &LoaderConfig, cache_dir: PathBuf) -> anyhow::Result<Loader> {\n\n let mut loader = Loader {\n\n registries: vec![],\n\n cache_dir: cache_dir.to_path_buf(),\n\n cache: tokio::sync::Mutex::new(ProblemCache::new()),\n\n };\n\n if let Some(fs) = &conf.fs {\n", "file_path": "src/problem-loader/src/lib.rs", "rank": 65, "score": 78390.65629073785 }, { "content": "struct Random;\n\n\n", "file_path": "src/jtl/src/builtin/checker-cmp-tokens.cpp", "rank": 66, "score": 78390.65629073785 }, { "content": "struct Args {\n\n bool enable_epsilon = false;\n\n long double epsilon = 0.0;\n\n bool ignore_case = false;\n\n};\n\n\n\nbool is_float(const char* s) {\n\n const size_t n = strlen(s);\n\n size_t cnt = 0;\n\n for (size_t i = 0; i < n; ++i) {\n\n if (s[i] != '.' && (s[i] < '0' || s[i] > '9')) {\n\n return false;\n\n }\n\n if (s[i] == '.') {\n\n ++cnt;\n\n }\n\n }\n\n return cnt <= 1 && s[0] != '.' && s[n - 1] != '.';\n\n}\n\n\n", "file_path": "src/jtl/src/builtin/checker-cmp-tokens.cpp", "rank": 67, "score": 78390.65629073785 }, { "content": "struct RunOutcome {\n\n var: RunOutcomeVar,\n\n resource_usage: minion::ResourceUsageData,\n\n}\n\n\n", "file_path": "src/invoker/src/worker/exec_test.rs", "rank": 68, "score": 78390.65629073785 }, { "content": "enum RunOutcomeVar {\n\n Success { out_data_path: PathBuf },\n\n Fail(Status),\n\n}\n\n\n", "file_path": "src/invoker/src/worker/exec_test.rs", "rank": 69, "score": 78202.50357408529 }, { "content": "enum FileCategory {\n\n Validator,\n\n Checker,\n\n Generator,\n\n}\n\n\n\nimpl FileCategory {\n\n fn derive(name: &str) -> Option<FileCategory> {\n\n if name == \"check\" || name == \"checker\" {\n\n return Some(FileCategory::Checker);\n\n }\n\n\n\n if name == \"validator\" {\n\n return Some(FileCategory::Validator);\n\n }\n\n\n\n if name.starts_with(\"gen\") {\n\n return Some(FileCategory::Generator);\n\n }\n\n\n", "file_path": "src/pps/server/src/import/problem_importer.rs", "rank": 70, "score": 78202.50357408529 }, { "content": "#[derive(Debug, Default, Clone)]\n\nstruct RunGetBuilder2Container {\n\n param_run_id: Option<String>,\n\n}\n\n\n\nimpl<RunId> RunGetBuilder2<RunId> {\n\n #[inline]\n\n pub fn run_id(mut self, value: impl Into<String>) -> RunGetBuilder2<crate::generics::RunIdExists> {\n\n self.inner.param_run_id = Some(value.into());\n\n unsafe { std::mem::transmute(self) }\n\n }\n\n}\n\n\n\nimpl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for RunGetBuilder2<crate::generics::RunIdExists> {\n\n type Output = Run;\n\n\n\n const METHOD: http::Method = http::Method::GET;\n\n\n\n fn rel_path(&self) -> std::borrow::Cow<'static, str> {\n\n format!(\"/runs/{run_id}\", run_id=self.inner.param_run_id.as_ref().expect(\"missing parameter run_id?\")).into()\n\n }\n\n}\n", "file_path": "src/gen-api-client/run.rs", "rank": 71, "score": 77503.1891931067 }, { "content": "#[derive(Debug, Default, Clone)]\n\nstruct ContestGetBuilder1Container {\n\n param_contest_name: Option<String>,\n\n}\n\n\n\nimpl<ContestName> ContestGetBuilder1<ContestName> {\n\n #[inline]\n\n pub fn contest_name(mut self, value: impl Into<String>) -> ContestGetBuilder1<crate::generics::ContestNameExists> {\n\n self.inner.param_contest_name = Some(value.into());\n\n unsafe { std::mem::transmute(self) }\n\n }\n\n}\n\n\n\nimpl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for ContestGetBuilder1<crate::generics::ContestNameExists> {\n\n type Output = Contest;\n\n\n\n const METHOD: http::Method = http::Method::GET;\n\n\n\n fn rel_path(&self) -> std::borrow::Cow<'static, str> {\n\n format!(\"/contests/{contest_name}\", contest_name=self.inner.param_contest_name.as_ref().expect(\"missing parameter contest_name?\")).into()\n\n }\n\n}\n", "file_path": "src/gen-api-client/contest.rs", "rank": 72, "score": 77503.1891931067 }, { "content": "#[derive(Debug, Default, Clone)]\n\nstruct RunPostBuilderContainer {\n\n param_limit: Option<i64>,\n\n}\n\n\n\nimpl<Limit> RunPostBuilder<Limit> {\n\n #[inline]\n\n pub fn limit(mut self, value: impl Into<i64>) -> RunPostBuilder<crate::generics::LimitExists> {\n\n self.inner.param_limit = Some(value.into());\n\n unsafe { std::mem::transmute(self) }\n\n }\n\n}\n\n\n\nimpl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for RunPostBuilder<crate::generics::LimitExists> {\n\n type Output = Vec<Run>;\n\n\n\n const METHOD: http::Method = http::Method::POST;\n\n\n\n fn rel_path(&self) -> std::borrow::Cow<'static, str> {\n\n \"/queue\".into()\n\n }\n", "file_path": "src/gen-api-client/run.rs", "rank": 73, "score": 77503.1891931067 }, { "content": "#[derive(Debug, Default, Clone)]\n\nstruct MiscellaneousGetBuilder1Container {\n\n param_run_id: Option<String>,\n\n}\n\n\n\nimpl<RunId> MiscellaneousGetBuilder1<RunId> {\n\n #[inline]\n\n pub fn run_id(mut self, value: impl Into<String>) -> MiscellaneousGetBuilder1<crate::generics::RunIdExists> {\n\n self.inner.param_run_id = Some(value.into());\n\n unsafe { std::mem::transmute(self) }\n\n }\n\n}\n\n\n\nimpl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for MiscellaneousGetBuilder1<crate::generics::RunIdExists> {\n\n type Output = String;\n\n\n\n const METHOD: http::Method = http::Method::GET;\n\n\n\n fn rel_path(&self) -> std::borrow::Cow<'static, str> {\n\n format!(\"/runs/{run_id}/source\", run_id=self.inner.param_run_id.as_ref().expect(\"missing parameter run_id?\")).into()\n\n }\n", "file_path": "src/gen-api-client/miscellaneous.rs", "rank": 74, "score": 77503.1891931067 }, { "content": "#[derive(Debug, Default, Clone)]\n\nstruct ProblemGetBuilderContainer {\n\n param_contest_name: Option<String>,\n\n}\n\n\n\nimpl<ContestName> ProblemGetBuilder<ContestName> {\n\n #[inline]\n\n pub fn contest_name(mut self, value: impl Into<String>) -> ProblemGetBuilder<crate::generics::ContestNameExists> {\n\n self.inner.param_contest_name = Some(value.into());\n\n unsafe { std::mem::transmute(self) }\n\n }\n\n}\n\n\n\nimpl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for ProblemGetBuilder<crate::generics::ContestNameExists> {\n\n type Output = Vec<Problem>;\n\n\n\n const METHOD: http::Method = http::Method::GET;\n\n\n\n fn rel_path(&self) -> std::borrow::Cow<'static, str> {\n\n format!(\"/contests/{contest_name}/problems\", contest_name=self.inner.param_contest_name.as_ref().expect(\"missing parameter contest_name?\")).into()\n\n }\n\n}\n", "file_path": "src/gen-api-client/problem.rs", "rank": 75, "score": 77503.1891931067 }, { "content": "#[derive(Debug, Default, Clone)]\n\nstruct MiscellaneousPutBuilderContainer {\n\n param_problem_id: Option<String>,\n\n param_problem_assets: Option<String>,\n\n param_problem_manifest: Option<String>,\n\n}\n\n\n\nimpl<ProblemId, ProblemAssets, ProblemManifest> MiscellaneousPutBuilder<ProblemId, ProblemAssets, ProblemManifest> {\n\n #[inline]\n\n pub fn problem_id(mut self, value: impl Into<String>) -> MiscellaneousPutBuilder<crate::generics::ProblemIdExists, ProblemAssets, ProblemManifest> {\n\n self.inner.param_problem_id = Some(value.into());\n\n unsafe { std::mem::transmute(self) }\n\n }\n\n\n\n #[inline]\n\n pub fn problem_assets(mut self, value: impl Into<String>) -> MiscellaneousPutBuilder<ProblemId, crate::generics::ProblemAssetsExists, ProblemManifest> {\n\n self.inner.param_problem_assets = Some(value.into());\n\n unsafe { std::mem::transmute(self) }\n\n }\n\n\n\n #[inline]\n", "file_path": "src/gen-api-client/miscellaneous.rs", "rank": 76, "score": 77503.1891931067 }, { "content": "pub fn setup() {\n\n static ONCE: Once = Once::new();\n\n ONCE.call_once(|| {\n\n tracing_subscriber::fmt()\n\n .with_env_filter(tracing_subscriber::EnvFilter::from_default_env())\n\n .with_span_events(tracing_subscriber::fmt::format::FmtSpan::CLOSE)\n\n .with_ansi(false)\n\n // TODO allow customization\n\n .without_time()\n\n .with_writer(std::io::stderr)\n\n .init();\n\n });\n\n}\n", "file_path": "src/util/src/log.rs", "rank": 77, "score": 76935.9631466778 }, { "content": " pub trait Seal {}\n\n}\n\nuse seal::Seal;\n", "file_path": "src/entity/src/entities.rs", "rank": 78, "score": 76935.9631466778 }, { "content": "/// Marker trait for whether the delimiting unit struct can be used by\n\n/// iterators. This is not implemented by `multi` - Multiple instances are\n\n/// allowed only in form data and query, and we need something for parsing\n\n/// stuff from CLI. At the same time, we also cannot allow serializing this\n\n/// container in the same way as others.\n\npub trait Allowed {}\n\n\n\nmacro_rules! impl_delim {\n\n ($ty:ident => $delim:expr) => {\n\n #[derive(Debug, Clone)]\n\n pub struct $ty;\n\n\n\n impl Delimiting for $ty {\n\n const DELIMITER: char = $delim;\n\n }\n\n };\n\n}\n\n\n\nimpl_delim!(Csv => ',');\n\nimpl Allowed for Csv {}\n\n\n\nimpl_delim!(Ssv => ' ');\n\nimpl Allowed for Ssv {}\n\n\n\nimpl_delim!(Tsv => '\\t');\n", "file_path": "src/gen-api-client/util.rs", "rank": 79, "score": 75978.12509919953 }, { "content": "/// Marker trait for delimiting. We represent each type of delimiting\n\n/// with an unit struct and implement this\n\npub trait Delimiting {\n\n const DELIMITER: char;\n\n}\n\n\n", "file_path": "src/gen-api-client/util.rs", "rank": 80, "score": 75974.45045998573 }, { "content": "/// Builds a `Monitor`, given list of all runs\n\n/// # Panics\n\n/// Panics if provided arguments are invalid\n\n// Probably, later some means to build this incrementally will be implemented\n\npub fn build_monitor(\n\n runs: &[Run],\n\n problems: &[(ProblemId, ProblemConfig)],\n\n parties: &[PartyId],\n\n _config: &Config,\n\n) -> Monitor {\n\n let mut party_info = BTreeMap::new();\n\n let mut runs_by_party_and_problem = BTreeMap::new();\n\n for (i, run) in runs.iter().enumerate() {\n\n let k = (run.party, run.problem);\n\n runs_by_party_and_problem\n\n .entry(k)\n\n .or_insert_with(Vec::new)\n\n .push(i);\n\n }\n\n let mut cell_by_party_and_problem = BTreeMap::new();\n\n\n\n let mut stats = StatsRow {\n\n problems: BTreeMap::new(),\n\n };\n", "file_path": "src/ranker/src/lib.rs", "rank": 81, "score": 75969.42477234796 }, { "content": "pub trait CommandExt {\n\n fn run_on(&mut self, runner: &Runner);\n\n\n\n fn try_exec(&mut self) -> Result<(), anyhow::Error>;\n\n fn try_exec_with_output(&mut self) -> Result<std::process::Output, anyhow::Error>;\n\n\n\n fn cargo_color(&mut self);\n\n}\n\n\n\nimpl CommandExt for Command {\n\n fn run_on(&mut self, runner: &Runner) {\n\n runner.exec(self);\n\n }\n\n\n\n fn cargo_color(&mut self) {\n\n if atty::is(atty::Stream::Stdout) {\n\n self.args(&[\"--color\", \"always\"]);\n\n self.env(\"RUST_LOG_STYLE\", \"always\");\n\n }\n\n }\n", "file_path": "src/util/src/cmd.rs", "rank": 82, "score": 75969.42477234796 }, { "content": "#[derive(Debug, Default, Clone)]\n\nstruct RunPatchPatchBuilderContainer {\n\n body: self::RunPatch,\n\n param_run_id: Option<String>,\n\n}\n\n\n\nimpl<RunId> RunPatchPatchBuilder<RunId> {\n\n #[inline]\n\n pub fn run_id(mut self, value: impl Into<String>) -> RunPatchPatchBuilder<crate::generics::RunIdExists> {\n\n self.inner.param_run_id = Some(value.into());\n\n unsafe { std::mem::transmute(self) }\n\n }\n\n\n\n #[inline]\n\n pub fn binary(mut self, value: impl Into<String>) -> Self {\n\n self.inner.body.binary = Some(value.into());\n\n self\n\n }\n\n\n\n #[inline]\n\n pub fn status(mut self, value: impl Iterator<Item = impl Iterator<Item = impl Into<String>>>) -> Self {\n", "file_path": "src/gen-api-client/run_patch.rs", "rank": 83, "score": 75832.44375174197 }, { "content": "#[derive(Debug, Default, Clone)]\n\nstruct LiveStatusGetBuilderContainer {\n\n param_run_id: Option<String>,\n\n}\n\n\n\nimpl<RunId> LiveStatusGetBuilder<RunId> {\n\n #[inline]\n\n pub fn run_id(mut self, value: impl Into<String>) -> LiveStatusGetBuilder<crate::generics::RunIdExists> {\n\n self.inner.param_run_id = Some(value.into());\n\n unsafe { std::mem::transmute(self) }\n\n }\n\n}\n\n\n\nimpl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for LiveStatusGetBuilder<crate::generics::RunIdExists> {\n\n type Output = LiveStatus;\n\n\n\n const METHOD: http::Method = http::Method::GET;\n\n\n\n fn rel_path(&self) -> std::borrow::Cow<'static, str> {\n\n format!(\"/runs/{run_id}/live\", run_id=self.inner.param_run_id.as_ref().expect(\"missing parameter run_id?\")).into()\n\n }\n\n}\n", "file_path": "src/gen-api-client/live_status.rs", "rank": 84, "score": 75832.44375174197 }, { "content": " #[async_trait::async_trait]\n\n pub trait ApiClient {\n\n type Request: Request + Send;\n\n type Response: Response;\n\n\n\n /// Consumes a method and a relative path and produces a request builder for a single API call.\n\n fn request_builder(&self, method: http::Method, rel_path: &str) -> Self::Request;\n\n\n\n /// Performs the HTTP request using the given `Request` object\n\n /// and returns a `Response` future.\n\n async fn make_request(\n\n &self,\n\n req: Self::Request,\n\n ) -> Result<Self::Response, ApiError<Self::Response>>;\n\n }\n\n\n\n /// Defines api key that will be used for all requests.\n\n #[derive(Clone)]\n\n pub struct ApiKey {\n\n /// Key will be sent in this HTTP header\n\n pub header_name: String,\n", "file_path": "src/gen-api-client/lib.rs", "rank": 85, "score": 75043.06903199795 }, { "content": "/// Contains WorkerStateKind\n\nstruct WorkerState(AtomicU8);\n\nconst WORKER_STATE_IDLE: u8 = 0;\n\nconst WORKER_STATE_LOCKED: u8 = 1;\n\nconst WORKER_STATE_CRASH: u8 = 2;\n\nconst WORKER_STATE_JUDGE: u8 = 3;\n\nimpl WorkerState {\n\n fn new(kind: WorkerStateKind) -> Self {\n\n let this = WorkerState(AtomicU8::new(0));\n\n this.store(kind);\n\n this\n\n }\n\n\n\n fn store(&self, kind: WorkerStateKind) {\n\n let value = match kind {\n\n WorkerStateKind::Idle => WORKER_STATE_IDLE,\n\n WorkerStateKind::Locked => WORKER_STATE_LOCKED,\n\n WorkerStateKind::Crash => WORKER_STATE_CRASH,\n\n WorkerStateKind::Judge => WORKER_STATE_JUDGE,\n\n };\n\n self.0.store(value, SeqCst);\n", "file_path": "src/invoker/src/scheduler.rs", "rank": 86, "score": 74809.24002073839 }, { "content": "struct Visitor<'a> {\n\n config: &'a mut svaluer::Config,\n\n tests_info: std::collections::HashMap<u32, String>,\n\n warnings: Vec<String>,\n\n}\n\n\n\nimpl<'a> Visitor<'a> {\n\n fn warn(&mut self, msg: String) {\n\n self.warnings.push(msg);\n\n }\n\n\n\n fn warn_not_sup(&mut self, feat: &str) {\n\n self.warn(format!(\"not supported feature: {}\", feat));\n\n }\n\n\n\n fn visit_global_def(&mut self, _node: pest::iterators::Pair<'a, Rule>) {\n\n // all global options are not supported currently\n\n self.warn_not_sup(\"GlobalDefinitions\");\n\n }\n\n\n", "file_path": "src/pps/server/src/import/valuer_cfg.rs", "rank": 87, "score": 74809.24002073839 }, { "content": "struct BuildOpts(RawBuildOpts);\n\n\n\nimpl BuildOpts {\n\n fn full(&self) -> bool {\n\n let deploy_wants_full = detect_build_type().is_deploy()\n\n && (detect_build_type().deploy_info() != Some(DeployKind::Man));\n\n deploy_wants_full || self.0.full\n\n }\n\n\n\n fn should_build_deb(&self) -> bool {\n\n detect_build_type().is_pr_e2e()\n\n || (detect_build_type().deploy_info() == Some(DeployKind::Deb))\n\n || self.0.deb\n\n }\n\n\n\n fn should_build_doc(&self) -> bool {\n\n let bt = detect_build_type();\n\n bt.deploy_info().contains(&DeployKind::Man) || self.0.full || self.0.docs\n\n }\n\n\n", "file_path": "src/devtool/src/build.rs", "rank": 88, "score": 73921.77292310723 }, { "content": " /// Form object for building multipart request body.\n\n pub trait Form: Sized {\n\n /// Creates a new builder.\n\n fn new() -> Self;\n\n\n\n /// Adds the given key and value as text.\n\n fn text<T, U>(self, key: T, value: U) -> Self\n\n where\n\n T: Into<Cow<'static, str>>,\n\n U: Into<Cow<'static, str>>;\n\n\n\n /// Adds the file from the given path for streaming.\n\n fn file<K>(self, key: K, path: &Path) -> std::io::Result<Self>\n\n where\n\n K: Into<Cow<'static, str>>;\n\n }\n\n\n", "file_path": "src/gen-api-client/lib.rs", "rank": 89, "score": 72118.41528131379 }, { "content": " #[async_trait::async_trait]\n\n pub trait Sendable<Client>\n\n where\n\n Client: ApiClient + Sync + 'static,\n\n Self: Sized,\n\n {\n\n /// The output object from this API request.\n\n type Output: serde::de::DeserializeOwned;\n\n\n\n /// HTTP method used by this call.\n\n const METHOD: http::Method;\n\n\n\n /// Relative URL for this API call formatted appropriately with parameter values.\n\n ///\n\n /// **NOTE:** This URL **must** begin with `/`.\n\n fn rel_path(&self) -> std::borrow::Cow<'static, str>;\n\n\n\n /// Modifier for this object. Builders override this method if they\n\n /// wish to add query parameters, set body, etc.\n\n fn modify(\n\n &self,\n", "file_path": "src/gen-api-client/lib.rs", "rank": 90, "score": 72113.36298773129 }, { "content": "pub fn introspect() -> Introspection {\n\n let mut introspector = Introspector::new();\n\n introspector\n\n .add_object::<crate::api::auth::SessionToken>()\n\n .add_object::<crate::api::auth::SimpleAuthParams>()\n\n .add_object::<crate::api::contests::Contest>()\n\n .add_object::<crate::api::contests::Problem>()\n\n .add_object::<crate::api::contests::Participation>()\n\n .add_object::<crate::api::misc::ApiVersion>()\n\n .add_object::<crate::api::runs::Run>()\n\n .add_object::<crate::api::runs::InvokeStatus>()\n\n .add_object::<crate::api::runs::RunPatch>()\n\n .add_object::<crate::api::runs::RunLiveStatusUpdate>()\n\n .add_object::<crate::api::runs::RunSimpleSubmitParams>()\n\n .add_object::<crate::api::toolchains::Toolchain>()\n\n .add_object::<crate::api::users::User>()\n\n .add_object::<crate::api::users::UserCreateParams>();\n\n\n\n introspector.into_introspection()\n\n}\n", "file_path": "src/apiserver_old/engine/src/introspect.rs", "rank": 91, "score": 71224.73509398576 }, { "content": "pub fn init() -> anyhow::Result<()> {\n\n check_system().context(\"system configuration problem detected\")?;\n\n unshare().context(\"failed to create namespaces\")?;\n\n Ok(())\n\n}\n", "file_path": "src/invoker/src/init.rs", "rank": 92, "score": 69683.62815667034 }, { "content": "#[async_trait]\n\npub trait KvRepo: Send + Sync {\n\n async fn kv_put_raw(&self, key: &str, value: &[u8]) -> Result<()>;\n\n\n\n async fn kv_get_raw(&self, key: &str) -> Result<Option<Vec<u8>>>;\n\n\n\n async fn kv_del(&self, key: &str) -> Result<()>;\n\n}\n\n\n", "file_path": "src/db/src/repo.rs", "rank": 93, "score": 68795.0002629248 }, { "content": "#[async_trait]\n\npub trait Registry: Send + Sync {\n\n fn name(&self) -> &'static str {\n\n std::any::type_name::<Self>()\n\n }\n\n /// Tries to fetch problem manifest and download assets to given path.\n\n /// Returns None if problem was not found.\n\n async fn get_problem(\n\n &self,\n\n problem_name: &str,\n\n assets_path: &Path,\n\n ) -> anyhow::Result<Option<pom::Problem>>;\n\n}\n\n\n\n/// Resolves problems from filesystem\n\n#[derive(Debug)]\n\npub struct FsRegistry {\n\n /// Directory containing all problems\n\n problems_dir: PathBuf,\n\n}\n\n\n", "file_path": "src/problem-loader/src/registry.rs", "rank": 94, "score": 68795.0002629248 }, { "content": "#[async_trait]\n\npub trait ParticipationsRepo: Send + Sync {\n\n async fn part_new(&self, part_data: NewParticipation) -> Result<Participation>;\n\n async fn part_find(&self, id: ParticipationId) -> Result<Option<Participation>>;\n\n async fn part_lookup(&self, user_id: UserId, contest_id: &str)\n\n -> Result<Option<Participation>>;\n\n}\n", "file_path": "src/db/src/repo.rs", "rank": 95, "score": 68795.0002629248 }, { "content": "#[async_trait]\n\npub trait UsersRepo: Send + Sync {\n\n async fn user_new(&self, user_data: NewUser) -> Result<User>;\n\n async fn user_try_load_by_login(&self, login: &str) -> Result<Option<User>>;\n\n}\n\n\n", "file_path": "src/db/src/repo.rs", "rank": 96, "score": 68795.0002629248 }, { "content": "pub fn close(h: i64) {\n\n nix::unistd::close(h as i32).unwrap()\n\n}\n", "file_path": "src/invoker/src/worker/os_util.rs", "rank": 97, "score": 68795.0002629248 }, { "content": "pub fn make_pipe() -> (i64, i64) {\n\n let (a, b) = nix::unistd::pipe().unwrap();\n\n (i64::from(a), i64::from(b))\n\n}\n\n\n", "file_path": "src/invoker/src/worker/os_util.rs", "rank": 98, "score": 67122.06950694774 }, { "content": " #[async_trait::async_trait]\n\n pub trait Response: Debug + Send + Sized {\n\n type Bytes: AsRef<[u8]>;\n\n type Error;\n\n\n\n /// Gets the value for the given header name, if any.\n\n fn header(&self, name: &'static str) -> Option<&str>;\n\n\n\n /// Takes all headers from the response.\n\n fn take_headers(&mut self) -> http::header::HeaderMap;\n\n\n\n /// Status code for this response.\n\n fn status(&self) -> http::status::StatusCode;\n\n\n\n /// Media type for this response body (if any).\n\n fn media_type(&self) -> Option<mime::MediaType>;\n\n\n\n /// Response body as a stream.\n\n fn stream(self) -> Box<dyn Stream<Item = Result<Self::Bytes, Self::Error>> + Unpin>;\n\n\n\n /// Vector of bytes from the response body.\n", "file_path": "src/gen-api-client/lib.rs", "rank": 99, "score": 65894.15538875616 } ]
Rust
src/lib.rs
Thog/blz-nx-rs
3b7a3fc587f064ab5c86da0aaff8e7decea26be1
#![no_std] use byteorder::ByteOrder; use byteorder::LittleEndian; #[derive(Debug)] pub enum Error { Unknown, InvalidBlz, DecompressionBufferTooSmall, CompressionBufferTooSmall, } const BLZ_SHIFT: u8 = 1; const BLZ_MASK: u8 = 0x80; const BLZ_THRESHOLD: usize = 2; const BLZ_MAX_OFFSET: usize = 0x1002; const BLZ_MAX_CODED: usize = (1 << 4) + BLZ_THRESHOLD; pub type BlzResult<T> = core::result::Result<T, Error>; #[inline] pub fn get_worst_compression_buffer_size(raw_len: usize) -> usize { raw_len + ((raw_len + 7) / 8) + 15 } fn get_size_for_decompression(data: &[u8]) -> BlzResult<(u32, u32, u32)> { if data.len() < 4 { return Err(Error::InvalidBlz); } let inc_len = LittleEndian::read_u32(&data[data.len() - 4..]); if inc_len == 0 { let raw_len = data.len() as u32 - 4; Ok((raw_len, 0, raw_len)) } else { if data.len() < 8 { return Err(Error::InvalidBlz); } let header_len = LittleEndian::read_u32(&data[data.len() - 8..]); if data.len() <= header_len as usize { return Err(Error::InvalidBlz); } let enc_len = LittleEndian::read_u32(&data[data.len() - 12..]); let dec_len = data.len() as u32 - enc_len; let pak_len = enc_len - header_len; let raw_len = dec_len + enc_len + inc_len; Ok((dec_len, pak_len, raw_len)) } } pub fn get_decompression_buffer_size(data: &[u8]) -> BlzResult<usize> { Ok(get_size_for_decompression(data)?.2 as usize) } fn invert_slice(data: &mut [u8]) { let mut top_position = 0; let mut bottom_position = data.len() - 1; while top_position < bottom_position { let tmp = data[top_position]; data[top_position] = data[bottom_position]; data[bottom_position] = tmp; bottom_position -= 1; top_position += 1; } } fn compression_search(data: &[u8], current_position: usize) -> (usize, usize) { let mut len = BLZ_THRESHOLD as usize; let mut pos = 0; let max = if current_position >= BLZ_MAX_OFFSET { BLZ_MAX_OFFSET } else { current_position }; for tmp_pos in 3..=max { let mut tmp_len = 0; while tmp_len < BLZ_MAX_CODED { if tmp_len == data[current_position..].len() || tmp_len >= tmp_pos { break; } if data[current_position + tmp_len] != data[current_position + tmp_len - tmp_pos] { break; } tmp_len += 1; } if tmp_len > len { pos = tmp_pos; len = tmp_len; if len == BLZ_MAX_CODED { break; } } } (len, pos) } pub fn compress_raw( decompressed_buffer: &mut [u8], compression_buffer: &mut [u8], ) -> BlzResult<usize> { if compression_buffer.len() < get_worst_compression_buffer_size(decompressed_buffer.len()) { return Err(Error::CompressionBufferTooSmall); } invert_slice(decompressed_buffer); let mut compressed_size_tmp = 0; let mut decompressed_size_tmp = decompressed_buffer.len(); let mut mask = 0; let mut decompressed_pos = 0; let mut compressed_pos = 0; let mut flag_pos = 0; while decompressed_pos < decompressed_buffer.len() { mask >>= BLZ_SHIFT; if mask == 0 { flag_pos = compressed_pos; compression_buffer[flag_pos] = 0; compressed_pos += 1; mask = BLZ_MASK; } let (mut len_best, pos_best) = compression_search(&decompressed_buffer, decompressed_pos); if len_best > BLZ_THRESHOLD { if decompressed_pos + len_best < decompressed_buffer.len() { decompressed_pos += len_best; let (mut len_next, _) = compression_search(&decompressed_buffer, decompressed_pos); decompressed_pos -= len_best - 1; let (mut len_post, _) = compression_search(&decompressed_buffer, decompressed_pos); decompressed_pos -= 1; if len_next <= BLZ_THRESHOLD { len_next = 1; } if len_post <= BLZ_THRESHOLD { len_post = 1; } if len_best + len_next <= 1 + len_post { len_best = 1; } } } compression_buffer[flag_pos] <<= 1; if len_best > BLZ_THRESHOLD { decompressed_pos += len_best; compression_buffer[flag_pos] |= 1; compression_buffer[compressed_pos] = (((len_best - (BLZ_THRESHOLD + 1)) << 4) | ((pos_best - 3) >> 8)) as u8; compression_buffer[compressed_pos + 1] = ((pos_best - 3) & 0xFF) as u8; compressed_pos += 2; } else { compression_buffer[compressed_pos] = decompressed_buffer[decompressed_pos]; compressed_pos += 1; decompressed_pos += 1; } if compressed_pos + decompressed_buffer.len() - decompressed_pos < compressed_size_tmp + decompressed_size_tmp { compressed_size_tmp = compressed_pos; decompressed_size_tmp = decompressed_buffer.len() - decompressed_pos; } } while mask != 0 && mask != 1 { mask >>= BLZ_SHIFT; compression_buffer[flag_pos] <<= 1; } let compressed_size = compressed_pos; invert_slice(decompressed_buffer); invert_slice(&mut compression_buffer[0..compressed_size]); let result_size; if compressed_size_tmp == 0 || (decompressed_buffer.len() + 4 < ((compressed_size_tmp + decompressed_size_tmp + 3) & 0xFFFFFFFC) + 8) { &(compression_buffer[0..decompressed_buffer.len()]).copy_from_slice(&decompressed_buffer); compressed_pos = decompressed_buffer.len(); while (compressed_pos & 3) != 0 { compression_buffer[compressed_pos] = 0; compressed_pos += 1; } LittleEndian::write_u32(&mut compression_buffer[compressed_pos..], 0); compressed_pos += 4; result_size = compressed_pos; } else { let mut i = 0; while i < compressed_size_tmp { compression_buffer[decompressed_size_tmp + i] = compression_buffer[i + compressed_pos - compressed_size_tmp]; i += 1; } (&mut compression_buffer[0..decompressed_size_tmp]) .copy_from_slice(&decompressed_buffer[0..decompressed_size_tmp]); compressed_pos = decompressed_size_tmp + compressed_size_tmp; let compressed_len = compressed_size_tmp; let mut header_size = 12; let inc_len = decompressed_buffer.len() - compressed_len - decompressed_size_tmp; while (compressed_pos & 3) != 0 { compression_buffer[compressed_pos] = 0xFF; compressed_pos += 1; header_size += 1; } LittleEndian::write_u32( &mut compression_buffer[compressed_pos..], (compressed_len + header_size) as u32, ); LittleEndian::write_u32( &mut compression_buffer[compressed_pos + 4..], header_size as u32, ); LittleEndian::write_u32( &mut compression_buffer[compressed_pos + 8..], (inc_len - header_size) as u32, ); compressed_pos += 12; result_size = compressed_pos; } Ok(result_size) } pub fn decompress_raw( compressed_data: &mut [u8], decompression_buffer: &mut [u8], ) -> BlzResult<usize> { let (dec_len, pak_len, raw_len) = get_size_for_decompression(compressed_data)?; if (decompression_buffer.len() as u32) < raw_len { return Err(Error::DecompressionBufferTooSmall); } let mut pak_buffer = &mut compressed_data[0..(dec_len + pak_len) as usize]; let mut raw_buffer = &mut decompression_buffer[0..raw_len as usize]; (&mut raw_buffer[0..dec_len as usize]).copy_from_slice(&pak_buffer[0..dec_len as usize]); pak_buffer = &mut pak_buffer[dec_len as usize..]; raw_buffer = &mut raw_buffer[dec_len as usize..]; invert_slice(pak_buffer); let mut mask = 0; let mut decompression_buffer_position: usize = 0; let mut pak_position: usize = 0; let pak_position_end: usize = pak_len as usize; let mut flags = 0u8; while decompression_buffer_position < raw_buffer.len() { mask >>= BLZ_SHIFT; if mask == 0 { if pak_position == pak_position_end { break; } flags = pak_buffer[pak_position]; pak_position += 1; mask = BLZ_MASK; } if (flags & mask) == 0 { if pak_position == pak_position_end { break; } raw_buffer[decompression_buffer_position] = pak_buffer[pak_position]; decompression_buffer_position += 1; pak_position += 1; } else { if pak_position + 1 >= pak_position_end { break; } let mut pos: u32 = (u32::from(pak_buffer[pak_position]) << 8) | u32::from(pak_buffer[pak_position + 1]); pak_position += 2; let mut len: u32 = (pos >> 12) + BLZ_THRESHOLD as u32 + 1; if decompression_buffer_position + len as usize > raw_buffer.len() { len = (raw_buffer.len() - decompression_buffer_position) as u32; } pos = (pos & 0xFFF) + 3; while len != 0 { raw_buffer[decompression_buffer_position] = raw_buffer[decompression_buffer_position - pos as usize]; decompression_buffer_position += 1; len -= 1; } } } invert_slice(raw_buffer); debug_assert!( decompression_buffer_position == raw_buffer.len(), "Unexpected end of decompression" ); Ok(decompression_buffer_position + dec_len as usize) }
#![no_std] use byteorder::ByteOrder; use byteorder::LittleEndian; #[derive(Debug)] pub enum Error { Unknown, InvalidBlz, DecompressionBufferTooSmall, CompressionBufferTooSmall, } const BLZ_SHIFT: u8 = 1; const BLZ_MASK: u8 = 0x80; const BLZ_THRESHOLD: usize = 2; const BLZ_MAX_OFFSET: usize = 0x1002; const BLZ_MAX_CODED: usize = (1 << 4) + BLZ_THRESHOLD; pub type BlzResult<T> = core::result::Result<T, Error>; #[inline] pub fn get_worst_compression_buffer_size(raw_len: usize) -> usize { raw_len + ((raw_len + 7) / 8) + 15 } fn get_size_for_decompression(data: &[u8]) -> BlzResult<(u32, u32, u32)> { if data.len() < 4 { return Err(Error::InvalidBlz); } let inc_len = LittleEndian::read_u32(&data[data.len() - 4..]); if inc_len == 0 { let raw_len = data.len() as u32 - 4; Ok((raw_len, 0, raw_len)) } else { if data.len() < 8 { return Err(Error::InvalidBlz); } let header_len = LittleEndian::read_u32(&data[data.len() - 8..]); if data.len() <= header_len as usize { return Err(Error::InvalidBlz); } let enc_len = LittleEndian::read_u32(&data[data.len() - 12..]); let dec_len = data.len() as u32 - enc_len; let pak_len = enc_len - header_len; let raw_len = dec_len + enc_len + inc_len; Ok((dec_len, pak_len, raw_len)) } } pub fn get_decompression_buffer_size(data: &[u8]) -> BlzResult<usize> { Ok(get_size_for_decompression(data)?.2 as usize) } fn invert_slice(data: &mut [u8]) { let mut top_position = 0; l
(compressed_len + header_size) as u32, ); LittleEndian::write_u32( &mut compression_buffer[compressed_pos + 4..], header_size as u32, ); LittleEndian::write_u32( &mut compression_buffer[compressed_pos + 8..], (inc_len - header_size) as u32, ); compressed_pos += 12; result_size = compressed_pos; } Ok(result_size) } pub fn decompress_raw( compressed_data: &mut [u8], decompression_buffer: &mut [u8], ) -> BlzResult<usize> { let (dec_len, pak_len, raw_len) = get_size_for_decompression(compressed_data)?; if (decompression_buffer.len() as u32) < raw_len { return Err(Error::DecompressionBufferTooSmall); } let mut pak_buffer = &mut compressed_data[0..(dec_len + pak_len) as usize]; let mut raw_buffer = &mut decompression_buffer[0..raw_len as usize]; (&mut raw_buffer[0..dec_len as usize]).copy_from_slice(&pak_buffer[0..dec_len as usize]); pak_buffer = &mut pak_buffer[dec_len as usize..]; raw_buffer = &mut raw_buffer[dec_len as usize..]; invert_slice(pak_buffer); let mut mask = 0; let mut decompression_buffer_position: usize = 0; let mut pak_position: usize = 0; let pak_position_end: usize = pak_len as usize; let mut flags = 0u8; while decompression_buffer_position < raw_buffer.len() { mask >>= BLZ_SHIFT; if mask == 0 { if pak_position == pak_position_end { break; } flags = pak_buffer[pak_position]; pak_position += 1; mask = BLZ_MASK; } if (flags & mask) == 0 { if pak_position == pak_position_end { break; } raw_buffer[decompression_buffer_position] = pak_buffer[pak_position]; decompression_buffer_position += 1; pak_position += 1; } else { if pak_position + 1 >= pak_position_end { break; } let mut pos: u32 = (u32::from(pak_buffer[pak_position]) << 8) | u32::from(pak_buffer[pak_position + 1]); pak_position += 2; let mut len: u32 = (pos >> 12) + BLZ_THRESHOLD as u32 + 1; if decompression_buffer_position + len as usize > raw_buffer.len() { len = (raw_buffer.len() - decompression_buffer_position) as u32; } pos = (pos & 0xFFF) + 3; while len != 0 { raw_buffer[decompression_buffer_position] = raw_buffer[decompression_buffer_position - pos as usize]; decompression_buffer_position += 1; len -= 1; } } } invert_slice(raw_buffer); debug_assert!( decompression_buffer_position == raw_buffer.len(), "Unexpected end of decompression" ); Ok(decompression_buffer_position + dec_len as usize) }
et mut bottom_position = data.len() - 1; while top_position < bottom_position { let tmp = data[top_position]; data[top_position] = data[bottom_position]; data[bottom_position] = tmp; bottom_position -= 1; top_position += 1; } } fn compression_search(data: &[u8], current_position: usize) -> (usize, usize) { let mut len = BLZ_THRESHOLD as usize; let mut pos = 0; let max = if current_position >= BLZ_MAX_OFFSET { BLZ_MAX_OFFSET } else { current_position }; for tmp_pos in 3..=max { let mut tmp_len = 0; while tmp_len < BLZ_MAX_CODED { if tmp_len == data[current_position..].len() || tmp_len >= tmp_pos { break; } if data[current_position + tmp_len] != data[current_position + tmp_len - tmp_pos] { break; } tmp_len += 1; } if tmp_len > len { pos = tmp_pos; len = tmp_len; if len == BLZ_MAX_CODED { break; } } } (len, pos) } pub fn compress_raw( decompressed_buffer: &mut [u8], compression_buffer: &mut [u8], ) -> BlzResult<usize> { if compression_buffer.len() < get_worst_compression_buffer_size(decompressed_buffer.len()) { return Err(Error::CompressionBufferTooSmall); } invert_slice(decompressed_buffer); let mut compressed_size_tmp = 0; let mut decompressed_size_tmp = decompressed_buffer.len(); let mut mask = 0; let mut decompressed_pos = 0; let mut compressed_pos = 0; let mut flag_pos = 0; while decompressed_pos < decompressed_buffer.len() { mask >>= BLZ_SHIFT; if mask == 0 { flag_pos = compressed_pos; compression_buffer[flag_pos] = 0; compressed_pos += 1; mask = BLZ_MASK; } let (mut len_best, pos_best) = compression_search(&decompressed_buffer, decompressed_pos); if len_best > BLZ_THRESHOLD { if decompressed_pos + len_best < decompressed_buffer.len() { decompressed_pos += len_best; let (mut len_next, _) = compression_search(&decompressed_buffer, decompressed_pos); decompressed_pos -= len_best - 1; let (mut len_post, _) = compression_search(&decompressed_buffer, decompressed_pos); decompressed_pos -= 1; if len_next <= BLZ_THRESHOLD { len_next = 1; } if len_post <= BLZ_THRESHOLD { len_post = 1; } if len_best + len_next <= 1 + len_post { len_best = 1; } } } compression_buffer[flag_pos] <<= 1; if len_best > BLZ_THRESHOLD { decompressed_pos += len_best; compression_buffer[flag_pos] |= 1; compression_buffer[compressed_pos] = (((len_best - (BLZ_THRESHOLD + 1)) << 4) | ((pos_best - 3) >> 8)) as u8; compression_buffer[compressed_pos + 1] = ((pos_best - 3) & 0xFF) as u8; compressed_pos += 2; } else { compression_buffer[compressed_pos] = decompressed_buffer[decompressed_pos]; compressed_pos += 1; decompressed_pos += 1; } if compressed_pos + decompressed_buffer.len() - decompressed_pos < compressed_size_tmp + decompressed_size_tmp { compressed_size_tmp = compressed_pos; decompressed_size_tmp = decompressed_buffer.len() - decompressed_pos; } } while mask != 0 && mask != 1 { mask >>= BLZ_SHIFT; compression_buffer[flag_pos] <<= 1; } let compressed_size = compressed_pos; invert_slice(decompressed_buffer); invert_slice(&mut compression_buffer[0..compressed_size]); let result_size; if compressed_size_tmp == 0 || (decompressed_buffer.len() + 4 < ((compressed_size_tmp + decompressed_size_tmp + 3) & 0xFFFFFFFC) + 8) { &(compression_buffer[0..decompressed_buffer.len()]).copy_from_slice(&decompressed_buffer); compressed_pos = decompressed_buffer.len(); while (compressed_pos & 3) != 0 { compression_buffer[compressed_pos] = 0; compressed_pos += 1; } LittleEndian::write_u32(&mut compression_buffer[compressed_pos..], 0); compressed_pos += 4; result_size = compressed_pos; } else { let mut i = 0; while i < compressed_size_tmp { compression_buffer[decompressed_size_tmp + i] = compression_buffer[i + compressed_pos - compressed_size_tmp]; i += 1; } (&mut compression_buffer[0..decompressed_size_tmp]) .copy_from_slice(&decompressed_buffer[0..decompressed_size_tmp]); compressed_pos = decompressed_size_tmp + compressed_size_tmp; let compressed_len = compressed_size_tmp; let mut header_size = 12; let inc_len = decompressed_buffer.len() - compressed_len - decompressed_size_tmp; while (compressed_pos & 3) != 0 { compression_buffer[compressed_pos] = 0xFF; compressed_pos += 1; header_size += 1; } LittleEndian::write_u32( &mut compression_buffer[compressed_pos..],
random
[ { "content": "# blz-nx-rs\n\n\n\n[![Travis Build](https://img.shields.io/travis/com/Thog/blz-nx-rs.svg?logo=travis)](https://travis-ci.com/Thog/blz-nx-rs) [![Dependabot Status](https://api.dependabot.com/badges/status?host=github&repo=Thog/blz-nx-rs)](https://dependabot.com)\n\n\n\nAn implementation of the Bottom LZ variant used on the Nintendo Switch.\n\n\n\n## License\n\n\n\nblz-nx-rs is distributed under the terms of either the MIT license or the Apache\n\nLicense (Version 2.0), at the user's choice.\n\n\n", "file_path": "README.md", "rank": 17, "score": 0.48107898830381 } ]
Rust
tests/substrate_tests/function_types.rs
reeftotem/solang
56047c48da5f836a23661c092b35a55713346871
use crate::build_solidity; use parity_scale_codec::Encode; use parity_scale_codec_derive::{Decode, Encode}; #[test] fn simple_test() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(bool, u32, u32); let mut runtime = build_solidity( r##" contract ft { function mul(int32 a, int32 b) internal returns (int32) { return a * b; } function add(int32 a, int32 b) internal returns (int32) { return a + b; } function test(bool action, int32 a, int32 b) public returns (int32) { function(int32,int32) internal returns (int32) func; if (action) { func = mul; } else { func = add; } return func(a, b); } }"##, ); runtime.function("test", Args(true, 100, 10).encode()); assert_eq!(runtime.vm.output, 1000u32.encode()); } #[test] fn internal_function_type_in_contract_storage() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(u32, u32); let mut runtime = build_solidity( r##" contract ft { function(int32,int32) internal returns (int32) func; function mul(int32 a, int32 b) internal returns (int32) { return a * b; } function add(int32 a, int32 b) internal returns (int32) { return a + b; } function set_op(bool action) public { if (action) { func = mul; } else { func = add; } } function test(int32 a, int32 b) public returns (int32) { return func(a, b); } }"##, ); runtime.function("set_op", false.encode()); runtime.function("test", Args(100, 10).encode()); assert_eq!(runtime.vm.output, 110u32.encode()); } #[test] #[should_panic] fn internal_function_not_init_called() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(u32, u32); let mut runtime = build_solidity( r##" contract ft { function(int32,int32) internal returns (int32) func; function mul(int32 a, int32 b) internal returns (int32) { return a * b; } function add(int32 a, int32 b) internal returns (int32) { return a + b; } function set_op(bool action) public { if (action) { func = mul; } else { func = add; } } function test(int32 a, int32 b) public returns (int32) { return func(a, b); } }"##, ); runtime.function("test", Args(100, 10).encode()); } #[test] fn base_contract_function() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(bool, u32, u32); let mut runtime = build_solidity( r##" contract ft is Arith { function test(bool action, int32 a, int32 b) public returns (int32) { function(int32,int32) internal returns (int32) func; if (action) { func = Arith.mul; } else { func = Arith.add; } return func(a, b); } } contract Arith { function mul(int32 a, int32 b) internal returns (int32) { return a * b; } function add(int32 a, int32 b) internal returns (int32) { return a + b; } } "##, ); runtime.function("test", Args(true, 100, 10).encode()); assert_eq!(runtime.vm.output, 1000u32.encode()); } #[test] fn virtual_contract_function() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(bool, u32, u32); let mut runtime = build_solidity( r##" contract ft is Arith { function mul(int32 a, int32 b) internal override returns (int32) { return a * b * 10; } function add(int32 a, int32 b) internal override returns (int32) { return a + b + 10; } } contract Arith { function test(bool action, int32 a, int32 b) public returns (int32) { function(int32,int32) internal returns (int32) func; if (action) { func = mul; } else { func = add; } return func(a, b); } function mul(int32 a, int32 b) internal virtual returns (int32) { return a * b; } function add(int32 a, int32 b) internal virtual returns (int32) { return a + b; } } "##, ); runtime.function("test", Args(true, 100, 10).encode()); assert_eq!(runtime.vm.output, 10000u32.encode()); } #[test] fn ext() { let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (bool) func = this.foo; assert(address(this) == func.address); assert(func.selector == hex"42761137"); } function foo(int32) public returns (bool) { return false; } }"##, ); runtime.function("test", Vec::new()); let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (uint64) func = this.foo; assert(func(102) == 0xabbaabba); } function foo(int32) public returns (uint64) { return 0xabbaabba; } }"##, ); runtime.function("test", Vec::new()); let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (uint64) func = this.foo; bar(func); } function foo(int32) public returns (uint64) { return 0xabbaabba; } function bar(function(int32) external returns (uint64) f) internal { assert(f(102) == 0xabbaabba); } }"##, ); runtime.function("test", Vec::new()); let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (uint64) func = this.foo; bar(func); } function foo(int32) public returns (uint64) { return 0xabbaabba; } function bar(function(int32) external returns (uint64) f) internal { assert(f(102) == 0xabbaabba); } }"##, ); runtime.function("test", Vec::new()); println!("return external function type from public function"); let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (uint64) func = this.foo; this.bar(func); } function foo(int32) public returns (uint64) { return 0xabbaabba; } function bar(function(int32) external returns (uint64) f) public { assert(f(102) == 0xabbaabba); } }"##, ); runtime.function("test", Vec::new()); println!("external function type in storage"); let mut runtime = build_solidity( r##" contract ft { function(int32) external returns (uint64) func; function test1() public { func = this.foo; } function test2() public { this.bar(func); } function foo(int32) public returns (uint64) { return 0xabbaabba; } function bar(function(int32) external returns (uint64) f) public { assert(f(102) == 0xabbaabba); } }"##, ); runtime.function("test1", Vec::new()); runtime.function("test2", Vec::new()); }
use crate::build_solidity; use parity_scale_codec::Encode; use parity_scale_codec_derive::{Decode, Encode}; #[test] fn simple_test() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(bool, u32, u32); let mut runtime = build_solidity( r##" contract ft { function mul(int32 a, int32 b) internal returns (int32) { return a * b; } function add(int32 a, int32 b) internal returns (int32) { return a + b; } function test(bool action, int32 a, int32 b) public returns (int32) { function(int32,int32) internal returns (int32) func; if (action) { func = mul; } else { func = add; } return func(a, b); } }"##, ); runtime.function("test", Args(true, 100, 10).encode()); assert_eq!(runtime.vm.output, 1000u32.encode()); } #[test] fn internal_function_type_in_contract_storage() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(u32, u32); let mut runtime = build_solidity( r##" contract ft { function(int32,int32) internal returns (int32) func; function mul(int32 a, int32 b) internal returns (int32) { return a * b; } function add(int32 a, int32 b) internal returns (int32) { return a + b; } function set_op(bool action) public { if (action) { func = mul; } else { func = add; } } function test(int32 a, int32 b) public returns (int32) { return func(a, b); } }"##, ); runtime.function("set_op", false.encode()); runtime.function("test", Args(100, 10).encode()); assert_eq!(runtime.vm.output, 110u32.encode()); } #[test] #[should_panic] fn internal_function_not_init_called() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(u32, u32); let mut runtime = build_solidity( r##" contract ft { function(int32,int32) internal returns (int32) func; function mul(int32 a, int32 b) internal returns (int32) { return a * b; } function add(int32 a, int32 b) internal returns (int32) { return a + b; } function set_op(bool action) public { if (action) { func = mul; } else { func = add; } } function test(int32 a, int32 b) public returns (int32) { return func(a, b); } }"##, ); runtime.function("test", Args(100, 10).encode()); } #[test] fn base_contract_function() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(bool, u32, u32); let mut runtime = build_solidity( r##" contract ft is Arith { function test(bool action, int32 a, int32 b) public returns (int32) { function(int32,int32) internal returns (int32) func; if (action) { func = Arith.mul; } else { func = Arith.add; } return func(a, b); } } contract Arith { function mul(int32 a, int32 b) internal returns (int32) { return a * b; } function add(int32 a, int32 b) internal returns (int32) { return a + b; } } "##, ); runtime.function("test", Args(true, 100, 10).encode()); assert_eq!(runtime.vm.output, 1000u32.encode()); } #[test] fn virtual_contract_function() { #[derive(Debug, PartialEq, Encode, Decode)] struct Args(bool, u32, u32); let mut runtime = build_solidity( r##" contract ft is Arith { function mul(int32 a, int32 b) internal override returns (int32) { return a * b * 10; } function add(int32 a, int32 b) internal override returns (int32) { return a + b + 10; } } contract Arith { function test(bool action, int32 a, int32 b) public returns (int32) { function(int32,int32) internal returns (int32) func; if (action) { func = mul; } else { func = add; } return func(a, b); } function mul(int32 a, int32 b) internal virtual returns (int32) { return a * b; } function add(int32 a, int32 b) internal virtual returns (int32) { return a + b; } } "##, ); runtime.function("test", Args(true, 100, 10).encode()); assert_eq!(runtime.vm.output, 10000u32.encode()); } #[test] fn ext() { let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (bool) func = this.foo; assert(address(this) == func.address); assert(func.selector == hex"42761137"); } function foo(int32) public returns (bool) { return false; } }"##, ); runtime.function("test", Vec::new()); let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (uint64) func = this.foo; assert(func(102) == 0xabbaabba); } function foo(int32) public returns (uint64) { return 0xabbaabba; } }"##, ); runtime.function("test", Vec::new());
runtime.function("test", Vec::new()); let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (uint64) func = this.foo; bar(func); } function foo(int32) public returns (uint64) { return 0xabbaabba; } function bar(function(int32) external returns (uint64) f) internal { assert(f(102) == 0xabbaabba); } }"##, ); runtime.function("test", Vec::new()); println!("return external function type from public function"); let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (uint64) func = this.foo; this.bar(func); } function foo(int32) public returns (uint64) { return 0xabbaabba; } function bar(function(int32) external returns (uint64) f) public { assert(f(102) == 0xabbaabba); } }"##, ); runtime.function("test", Vec::new()); println!("external function type in storage"); let mut runtime = build_solidity( r##" contract ft { function(int32) external returns (uint64) func; function test1() public { func = this.foo; } function test2() public { this.bar(func); } function foo(int32) public returns (uint64) { return 0xabbaabba; } function bar(function(int32) external returns (uint64) f) public { assert(f(102) == 0xabbaabba); } }"##, ); runtime.function("test1", Vec::new()); runtime.function("test2", Vec::new()); }
let mut runtime = build_solidity( r##" contract ft { function test() public { function(int32) external returns (uint64) func = this.foo; bar(func); } function foo(int32) public returns (uint64) { return 0xabbaabba; } function bar(function(int32) external returns (uint64) f) internal { assert(f(102) == 0xabbaabba); } }"##, );
assignment_statement
[]
Rust
cli/src/workflow/render/producer/typescript/render_static.rs
DmitryAstafyev/clibri
9cf501e0274d5cc1aae13fcf8cb50ed7820f8503
use super::{helpers, workflow::event::Event}; use std::include_str; use std::{ fs, path::{Path, PathBuf}, }; #[allow(non_upper_case_globals)] mod paths { pub mod events { pub const connected: &str = "connected.ts"; pub const disconnected: &str = "disconnected.ts"; pub const error: &str = "error.ts"; pub const ready: &str = "ready.ts"; pub const shutdown: &str = "shutdown.ts"; pub const dest: &str = "events"; } pub mod consumer { pub const module: &str = "index.ts"; pub const filter: &str = "filter.ts"; pub const dest: &str = "implementation/consumer"; } pub mod emitters { pub const connected: &str = "connected.ts"; pub const disconnected: &str = "disconnected.ts"; pub const error: &str = "error.ts"; pub const ready: &str = "ready.ts"; pub const shutdown: &str = "shutdown.ts"; pub const dest: &str = "implementation/events"; } pub mod scope { pub const module: &str = "index.ts"; pub const dest: &str = "implementation/scope"; } pub mod index { pub const module: &str = "index.ts"; pub const dest: &str = ""; } pub mod context { pub const module: &str = "context.ts"; pub const dest: &str = ""; } } pub struct Render {} impl Default for Render { fn default() -> Self { Self::new() } } impl Render { pub fn new() -> Self { Self {} } pub fn render(&self, base: &Path, events: &[Event]) -> Result<(), String> { if !events.iter().any(|event| match event.get_reference() { Ok(reference) => reference == "connected", Err(_) => false, }) { if !self .get_dest_file(base, paths::events::dest, paths::events::connected)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::connected)?, include_str!("./static/events/connected.ts").to_owned(), true, )?; } helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::connected)?, include_str!("./static/implementation/events/connected.ts").to_owned(), true, )?; } if !events.iter().any(|event| match event.get_reference() { Ok(reference) => reference == "disconnected", Err(_) => false, }) { if !self .get_dest_file(base, paths::events::dest, paths::events::disconnected)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::disconnected)?, include_str!("./static/events/disconnected.ts").to_owned(), true, )?; } helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::disconnected)?, include_str!("./static/implementation/events/disconnected.ts").to_owned(), true, )?; } if !self .get_dest_file(base, paths::events::dest, paths::events::error)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::error)?, include_str!("./static/events/error.ts").to_owned(), true, )?; } if !self .get_dest_file(base, paths::events::dest, paths::events::ready)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::ready)?, include_str!("./static/events/ready.ts").to_owned(), true, )?; } if !self .get_dest_file(base, paths::events::dest, paths::events::shutdown)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::shutdown)?, include_str!("./static/events/shutdown.ts").to_owned(), true, )?; } helpers::fs::write( self.get_dest_file(base, paths::consumer::dest, paths::consumer::module)?, include_str!("./static/implementation/consumer/index.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::consumer::dest, paths::consumer::filter)?, include_str!("./static/implementation/consumer/filter.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::scope::dest, paths::scope::module)?, include_str!("./static/implementation/scope/index.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::error)?, include_str!("./static/implementation/events/error.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::ready)?, include_str!("./static/implementation/events/ready.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::shutdown)?, include_str!("./static/implementation/events/shutdown.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::index::dest, paths::index::module)?, include_str!("./static/index.ts").to_owned(), true, )?; let context_dest = self.get_dest_file(base, paths::context::dest, paths::context::module)?; if !context_dest.exists() { helpers::fs::write( self.get_dest_file(base, paths::context::dest, paths::context::module)?, include_str!("./static/context.ts").to_owned(), true, )?; } Ok(()) } fn get_dest_file(&self, base: &Path, path: &str, file_name: &str) -> Result<PathBuf, String> { let dest = base.join(path); if !dest.exists() { if let Err(e) = fs::create_dir(&dest) { return Err(format!( "Fail to create dest folder {}. Error: {}", dest.to_string_lossy(), e )); } } Ok(dest.join(file_name)) } }
use super::{helpers, workflow::event::Event}; use std::include_str; use std::{ fs, path::{Path, PathBuf}, }; #[allow(non_upper_case_globals)] mod paths
helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::ready)?, include_str!("./static/events/ready.ts").to_owned(), true, )?; } if !self .get_dest_file(base, paths::events::dest, paths::events::shutdown)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::shutdown)?, include_str!("./static/events/shutdown.ts").to_owned(), true, )?; } helpers::fs::write( self.get_dest_file(base, paths::consumer::dest, paths::consumer::module)?, include_str!("./static/implementation/consumer/index.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::consumer::dest, paths::consumer::filter)?, include_str!("./static/implementation/consumer/filter.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::scope::dest, paths::scope::module)?, include_str!("./static/implementation/scope/index.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::error)?, include_str!("./static/implementation/events/error.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::ready)?, include_str!("./static/implementation/events/ready.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::shutdown)?, include_str!("./static/implementation/events/shutdown.ts").to_owned(), true, )?; helpers::fs::write( self.get_dest_file(base, paths::index::dest, paths::index::module)?, include_str!("./static/index.ts").to_owned(), true, )?; let context_dest = self.get_dest_file(base, paths::context::dest, paths::context::module)?; if !context_dest.exists() { helpers::fs::write( self.get_dest_file(base, paths::context::dest, paths::context::module)?, include_str!("./static/context.ts").to_owned(), true, )?; } Ok(()) } fn get_dest_file(&self, base: &Path, path: &str, file_name: &str) -> Result<PathBuf, String> { let dest = base.join(path); if !dest.exists() { if let Err(e) = fs::create_dir(&dest) { return Err(format!( "Fail to create dest folder {}. Error: {}", dest.to_string_lossy(), e )); } } Ok(dest.join(file_name)) } }
{ pub mod events { pub const connected: &str = "connected.ts"; pub const disconnected: &str = "disconnected.ts"; pub const error: &str = "error.ts"; pub const ready: &str = "ready.ts"; pub const shutdown: &str = "shutdown.ts"; pub const dest: &str = "events"; } pub mod consumer { pub const module: &str = "index.ts"; pub const filter: &str = "filter.ts"; pub const dest: &str = "implementation/consumer"; } pub mod emitters { pub const connected: &str = "connected.ts"; pub const disconnected: &str = "disconnected.ts"; pub const error: &str = "error.ts"; pub const ready: &str = "ready.ts"; pub const shutdown: &str = "shutdown.ts"; pub const dest: &str = "implementation/events"; } pub mod scope { pub const module: &str = "index.ts"; pub const dest: &str = "implementation/scope"; } pub mod index { pub const module: &str = "index.ts"; pub const dest: &str = ""; } pub mod context { pub const module: &str = "context.ts"; pub const dest: &str = ""; } } pub struct Render {} impl Default for Render { fn default() -> Self { Self::new() } } impl Render { pub fn new() -> Self { Self {} } pub fn render(&self, base: &Path, events: &[Event]) -> Result<(), String> { if !events.iter().any(|event| match event.get_reference() { Ok(reference) => reference == "connected", Err(_) => false, }) { if !self .get_dest_file(base, paths::events::dest, paths::events::connected)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::connected)?, include_str!("./static/events/connected.ts").to_owned(), true, )?; } helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::connected)?, include_str!("./static/implementation/events/connected.ts").to_owned(), true, )?; } if !events.iter().any(|event| match event.get_reference() { Ok(reference) => reference == "disconnected", Err(_) => false, }) { if !self .get_dest_file(base, paths::events::dest, paths::events::disconnected)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::disconnected)?, include_str!("./static/events/disconnected.ts").to_owned(), true, )?; } helpers::fs::write( self.get_dest_file(base, paths::emitters::dest, paths::emitters::disconnected)?, include_str!("./static/implementation/events/disconnected.ts").to_owned(), true, )?; } if !self .get_dest_file(base, paths::events::dest, paths::events::error)? .exists() { helpers::fs::write( self.get_dest_file(base, paths::events::dest, paths::events::error)?, include_str!("./static/events/error.ts").to_owned(), true, )?; } if !self .get_dest_file(base, paths::events::dest, paths::events::ready)? .exists() {
random
[ { "content": "pub fn write(filename: PathBuf, content: String, overwrite: bool) -> Result<(), String> {\n\n if filename.exists() && overwrite {\n\n if let Err(e) = remove_file(filename.clone()) {\n\n return Err(e.to_string());\n\n }\n\n } else if filename.exists() && !overwrite {\n\n return Err(format!(\"File {} exists\", filename.to_string_lossy()));\n\n }\n\n match OpenOptions::new()\n\n .write(true)\n\n .create(true)\n\n .open(filename.clone())\n\n {\n\n Ok(mut file) => if let Err(e) = file.write_all(content.as_bytes()) {\n\n Err(e.to_string())\n\n } else {\n\n println!(\n\n \"[OK] {:?} has been written\",\n\n filename\n\n );\n\n Ok(())\n\n }\n\n Err(e) => Err(e.to_string())\n\n }\n\n} ", "file_path": "cli/src/helpers/fs.rs", "rank": 0, "score": 123548.89407896421 }, { "content": "const path = require(\"path\");\n", "file_path": "examples/consumer/typescript/webpack.config.js", "rank": 1, "score": 110712.30766633812 }, { "content": "const path = require(\"path\");\n", "file_path": "tests/workflow/consumer/typescript/webpack.config.js", "rank": 2, "score": 109808.1962269393 }, { "content": "const path = require(\"path\");\n", "file_path": "experimental/workflow_mt/consumer/typescript/webpack.config.js", "rank": 3, "score": 108931.64546073333 }, { "content": "pub fn read_file(path: PathBuf) -> Result<Vec<u8>, String> {\n\n if !path.exists() {\n\n return Err(format!(\"File {:?} doesn't exist\", path));\n\n }\n\n let mut file = match File::open(path.clone()) {\n\n Ok(f) => f,\n\n Err(e) => {\n\n return Err(format!(\"Fail to open file {:?} due error: {}\", path, e));\n\n }\n\n };\n\n let mut buffer = Vec::new();\n\n // read the whole file\n\n if let Err(e) = file.read_to_end(&mut buffer) {\n\n Err(format!(\"Fail to read file {:?} due error: {}\", path, e))\n\n } else {\n\n Ok(buffer)\n\n }\n\n}\n\n\n", "file_path": "tests/protocol/rust/src/reader.rs", "rank": 4, "score": 104103.50884889378 }, { "content": "fn mkdir(dest: &Path) -> Result<(), String> {\n\n if !dest.exists() {\n\n if let Err(e) = fs::create_dir(&dest) {\n\n return Err(format!(\n\n \"Fail to create dest folder {}. Error: {}\",\n\n dest.to_string_lossy(),\n\n e\n\n ));\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "cli/src/workflow/render/render.rs", "rank": 5, "score": 82003.80621977194 }, { "content": "pub fn get_root_dir() -> Result<PathBuf, String> {\n\n if let Ok(exe) = std::env::current_exe() {\n\n if let Some(dest) = exe.as_path().parent() {\n\n let dest = dest.join(\"../../binary\");\n\n if !dest.exists() {\n\n if let Err(e) = create_dir(dest.clone()) {\n\n return Err(format!(\"{}\", e));\n\n }\n\n }\n\n Ok(dest)\n\n } else {\n\n Err(\"Fail to find exe-path\".to_string())\n\n }\n\n } else {\n\n Err(\"Fail to find exe-path\".to_string())\n\n }\n\n}\n\n\n", "file_path": "tests/protocol/rust/src/writer.rs", "rank": 6, "score": 79373.24574957696 }, { "content": "pub fn get_ts_bin_dir() -> Result<PathBuf, String> {\n\n if let Ok(exe) = std::env::current_exe() {\n\n if let Some(dest) = exe.as_path().parent() {\n\n let dest = dest.join(\"../../../typescript/binary\");\n\n if !dest.exists() {\n\n if let Err(e) = create_dir(dest.clone()) {\n\n return Err(format!(\"{}\", e));\n\n }\n\n }\n\n Ok(dest)\n\n } else {\n\n Err(\"Fail to find ts-bin path\".to_string())\n\n }\n\n } else {\n\n Err(\"Fail to find ts-bin path\".to_string())\n\n }\n\n}\n\n\n", "file_path": "tests/protocol/rust/src/reader.rs", "rank": 7, "score": 78548.16584747015 }, { "content": "pub fn get(filename: &Path) -> Result<String, String> {\n\n let input = File::open(filename).map_err(|e| e.to_string())?;\n\n let reader = BufReader::new(input);\n\n let digest = calc_sha256_digest(reader)?;\n\n Ok(HEXUPPER.encode(digest.as_ref()))\n\n}\n\n\n", "file_path": "cli/src/helpers/hash.rs", "rank": 8, "score": 76852.48382896066 }, { "content": "fn write_file(mut dest: PathBuf, buf: &[u8]) -> Result<(), String> {\n\n let dest: PathBuf = match state::state.lock() {\n\n Ok(state) => {\n\n if state.middleware {\n\n dest.set_extension(\"middleware\");\n\n }\n\n dest\n\n }\n\n Err(e) => {\n\n stop!(\"Fail get state due error {}\", e);\n\n }\n\n };\n\n if dest.exists() {\n\n if let Err(err) = remove_file(dest.clone()) {\n\n return Err(format!(\"Fail to remove file {:?} due error: {}\", dest, err));\n\n }\n\n }\n\n match OpenOptions::new()\n\n .write(true)\n\n .create(true)\n", "file_path": "tests/protocol/rust/src/writer.rs", "rank": 9, "score": 71902.37929761577 }, { "content": "module PATHS\n\n self::CLI = \"./cli\"\n\n self::LIB = \"./lib\"\n\n self::PROTOCOL_TEST = \"./tests/protocol\"\n\n self::WORKFLOW_TEST = \"./tests/workflow\"\n\n self::EXAMPLES = \"./examples\"\n\n self::TRANSPORT = \"./environment/transport\"\n\nend\n\n\n\nnamespace :cli do\n\n desc 'Build CLI'\n\n task :build do\n\n Dir.chdir(PATHS::CLI) do\n\n sh 'cargo build --release'\n\n end\n\n end\n\nend\n\n\n\nnamespace :lib do\n\n\n", "file_path": "rakefile.rb", "rank": 10, "score": 66817.05907413602 }, { "content": "use std::{\n\n fs::{\n\n OpenOptions,\n\n remove_file,\n\n },\n\n path::{\n\n PathBuf,\n\n }\n\n};\n\nuse std::io::prelude::*;\n\n\n", "file_path": "cli/src/helpers/fs.rs", "rank": 11, "score": 64527.721942407676 }, { "content": "pub mod chars;\n\npub mod fs;\n\npub mod hash;\n\npub mod output;\n\npub mod render;\n\npub mod string;\n", "file_path": "cli/src/helpers/mod.rs", "rank": 12, "score": 63214.23894393682 }, { "content": "pub mod render_beacon;\n\npub mod render_event;\n\npub mod render_request;\n\n\n\nuse super::{\n\n helpers, helpers::render as tools, workflow, workflow::store::Store as WorkflowStore, Protocol,\n\n};\n\nuse render_beacon::RenderBeacons;\n\nuse render_event::RenderEvent;\n\nuse render_request::RenderRequest;\n\nuse std::path::Path;\n\nmod templates {\n\n pub const MODULE: &str = r#\"@startuml\n\n\n\n collections Consumers as Consumers\n\n [[content]]\n\n@enduml\"#;\n\n}\n\npub struct PumlRender {}\n\n\n", "file_path": "cli/src/workflow/render/puml/mod.rs", "rank": 13, "score": 61100.49526001059 }, { "content": "use super::helpers;\n\nuse super::protocol::enums::{Enum, EnumItem};\n\nuse super::protocol::fields::Field;\n\nuse super::protocol::groups::Group;\n\nuse super::protocol::store::Store;\n\nuse super::protocol::structs::Struct;\n\nuse super::protocol::types::PrimitiveTypes;\n\nuse super::Render;\n\nuse regex::Regex;\n\nuse std::{include_str, path::Path};\n\npub struct RustRender {\n\n embedded: bool,\n\n signature: u16,\n\n}\n\n\n\nimpl RustRender {\n\n fn groups(&self, group: &Group, store: &mut Store, level: u8) -> String {\n\n let mut body = format!(\"{}pub mod {} {{\\n\", self.spaces(level), group.name);\n\n body = format!(\"{}{}use super::*;\\n\", body, self.spaces(level + 1));\n\n body = format!(\"{}{}use std::io::Cursor;\\n\", body, self.spaces(level + 1));\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 14, "score": 61097.64181236275 }, { "content": "#[path = \"./rust/render.rs\"]\n\npub mod rust;\n\n\n\n#[path = \"./typescript/render.rs\"]\n\npub mod typescript;\n\n\n\nuse super::{\n\n helpers,\n\n ImplementationRender,\n\n workflow,\n\n Protocol,\n\n ProtocolRender,\n\n ProtocolRustRender,\n\n ProtocolTypescriptRender,\n\n};", "file_path": "cli/src/workflow/render/producer/mod.rs", "rank": 15, "score": 61096.362040944594 }, { "content": "#[path = \"./rust/render.rs\"]\n\npub mod rust;\n\n\n\n#[path = \"./typescript/render.rs\"]\n\npub mod typescript;\n\n\n\nuse super::{\n\n helpers,\n\n ImplementationRender,\n\n workflow,\n\n Protocol,\n\n ProtocolRender,\n\n ProtocolRustRender,\n\n ProtocolTypescriptRender,\n\n};", "file_path": "cli/src/workflow/render/consumer/mod.rs", "rank": 16, "score": 61096.362040944594 }, { "content": "use super::protocol::enums::Enum;\n\nuse super::protocol::fields::Field;\n\nuse super::protocol::groups::Group;\n\nuse super::protocol::store::Store;\n\nuse super::protocol::structs::Struct;\n\nuse super::protocol::types::PrimitiveTypes;\n\nuse super::{helpers, stop, Render};\n\nuse regex::Regex;\n\nuse std::{include_str, path::Path};\n\n\n\npub struct TypescriptRender {\n\n embedded: bool,\n\n signature: u16,\n\n}\n\n\n\nimpl TypescriptRender {\n\n fn groups(&self, group: &Group, store: &mut Store, level: u8) -> String {\n\n let mut body = format!(\"{}export namespace {} {{\\n\", self.spaces(level), group.name);\n\n body = format!(\n\n \"{}{}\",\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 17, "score": 61094.74869538434 }, { "content": "pub mod context;\n\npub mod implementation;\n\n\n\npub use context::Context;\n\npub use implementation::producer::run;\n\npub use implementation::producer::Options;\n", "file_path": "examples/producer/rust/src/producer/mod.rs", "rank": 18, "score": 61093.94228208775 }, { "content": "pub mod broadcasts;\n\npub mod context;\n\npub mod events;\n\npub mod implementation;\n\n\n\npub use context::Context;\n\npub use implementation::{\n\n connect,\n\n consumer::options::{Options, ReconnectionStrategy},\n\n protocol, Consumer, ConsumerError,\n\n};\n", "file_path": "examples/consumer/rust/src/consumer/mod.rs", "rank": 19, "score": 61093.524205371454 }, { "content": " } else {\n\n break;\n\n }\n\n }\n\n path.reverse();\n\n path\n\n }\n\n\n\n fn get_full_name(&self, name: String, parent: usize, store: &mut Store) -> String {\n\n let path: Vec<String> = self.get_path(parent, store);\n\n if path.is_empty() {\n\n name\n\n } else {\n\n format!(\"{}::{}\", path.join(\"::\"), name)\n\n }\n\n }\n\n\n\n fn get_entity_path(&self, parent: usize, store: &mut Store) -> Vec<String> {\n\n let mut path: Vec<String> = vec![];\n\n let mut parent = parent;\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 20, "score": 61088.56941710643 }, { "content": " break;\n\n }\n\n }\n\n path.reverse();\n\n path\n\n }\n\n\n\n fn get_available_entity(&self, parent: usize, name: &str, store: &mut Store) -> String {\n\n let mut result = String::from(\"\");\n\n //GroupB: { GroupC: { StructExampleB: instance } }\n\n let path = self.get_entity_path(parent, store);\n\n if path.is_empty() {\n\n result = format!(\"{}: instance \", name);\n\n } else {\n\n for part in path.iter() {\n\n result = format!(\"{}{}: {{ \", result, part);\n\n }\n\n result = format!(\"{}{}: instance {}\", result, name, \"} \".repeat(path.len()));\n\n }\n\n result\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 21, "score": 61088.256280367605 }, { "content": " loop {\n\n if parent == 0 {\n\n break;\n\n }\n\n if let Some(group) = store.get_group(parent) {\n\n path.push(group.name.clone());\n\n parent = group.parent;\n\n } else {\n\n break;\n\n }\n\n }\n\n path.reverse();\n\n path\n\n }\n\n\n\n fn get_available_entity(&self, parent: usize, name: &str, store: &mut Store) -> String {\n\n let mut result = String::from(\"\");\n\n let path = self.get_entity_path(parent, store);\n\n if path.is_empty() {\n\n result = format!(\"AvailableMessages::{}(m)\", name);\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 22, "score": 61088.20120815615 }, { "content": " let mut body = format!(\"{}{} {{\\n\", path, strct.name);\n\n for field in &strct.fields {\n\n body = format!(\n\n \"{}{}{}\\n\",\n\n body,\n\n self.spaces(level),\n\n self.field_default(field, &mut store.clone(), level)\n\n );\n\n }\n\n format!(\"{}{}}}\", body, self.spaces(level - 1))\n\n } else if let Some(enums) = store.get_enum(entity_id) {\n\n let path = if !enums.path.is_empty() {\n\n format!(\"{}::\", enums.path.join(\"::\"))\n\n } else {\n\n String::new()\n\n };\n\n format!(\"{}{}::Defaults\", path, enums.name)\n\n } else {\n\n panic!(\"Fail to find a struct/enum id: {}\", entity_id);\n\n }\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 23, "score": 61087.906870466955 }, { "content": " );\n\n for enums in &store.enums {\n\n if enums.parent == 0 {\n\n body = format!(\"{}{}\\n\", body, self.enums(enums, 0));\n\n }\n\n }\n\n for strct in &store.structs {\n\n if strct.parent == 0 {\n\n body = format!(\"{}{}\\n\", body, self.structs(strct, &mut store.clone(), 0));\n\n }\n\n }\n\n for group in &store.groups {\n\n if group.parent == 0 {\n\n body = format!(\"{}{}\\n\", body, self.groups(group, &mut store.clone(), 0));\n\n }\n\n }\n\n body = format!(\"{}{}\\n\", body, self.buffer(&mut store.clone()));\n\n body = format!(\n\n \"{}pub fn hash() -> String {{ String::from(\\\"{}\\\") }}\\n\",\n\n body,\n\n store.get_hash()\n\n );\n\n helpers::fs::write(dest.to_path_buf(), body, true)\n\n }\n\n}\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 24, "score": 61087.599750613976 }, { "content": " for request in &store.requests {\n\n output = format!(\n\n \"{}\\n{}\\n\",\n\n output,\n\n tools::inject_tabs(1, RenderRequest::new().render(request)?),\n\n );\n\n }\n\n for event in &store.events {\n\n output = format!(\n\n \"{}\\n{}\\n\",\n\n output,\n\n tools::inject_tabs(1, RenderEvent::new().render(event)?),\n\n );\n\n }\n\n output = format!(\n\n \"{}\\n{}\\n\",\n\n output,\n\n tools::inject_tabs(1, RenderBeacons::new().render(&store.beacons)?),\n\n );\n\n output = templates::MODULE.replace(\"[[content]]\", &output);\n\n helpers::fs::write(dest.to_path_buf(), output, true)\n\n }\n\n}\n", "file_path": "cli/src/workflow/render/puml/mod.rs", "rank": 25, "score": 61087.599750613976 }, { "content": " format!(\"{}{}\\n\", body, self.enums(enums, &mut store.clone(), 0)).to_string();\n\n }\n\n }\n\n for strct in &store.structs {\n\n if strct.parent == 0 {\n\n body =\n\n format!(\"{}{}\\n\", body, self.structs(strct, &mut store.clone(), 0)).to_string();\n\n }\n\n }\n\n for group in &store.groups {\n\n if group.parent == 0 {\n\n body =\n\n format!(\"{}{}\\n\", body, self.groups(group, &mut store.clone(), 0)).to_string();\n\n }\n\n }\n\n body = format!(\"{}{}\\n\", body, self.buffer(&mut store.clone()));\n\n body = format!(\n\n \"{}export function hash(): string {{ return `{}`; }}\\n\",\n\n body,\n\n store.get_hash()\n\n );\n\n helpers::fs::write(dest.to_path_buf(), body, true)\n\n }\n\n}\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 26, "score": 61087.482718186104 }, { "content": " group.name\n\n );\n\n }\n\n }\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level));\n\n }\n\n body\n\n }\n\n\n\n fn get_entity_path(&self, parent: usize, store: &mut Store) -> Vec<String> {\n\n let mut path: Vec<String> = vec![];\n\n let mut parent = parent;\n\n loop {\n\n if parent == 0 {\n\n break;\n\n }\n\n if let Some(group) = store.get_group(parent) {\n\n path.push(group.name.clone());\n\n parent = group.parent;\n\n } else {\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 27, "score": 61087.21818362483 }, { "content": " self.spaces(level + 1),\n\n group.name,\n\n group.name\n\n );\n\n }\n\n }\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level));\n\n }\n\n body\n\n }\n\n\n\n fn get_path(&self, mut parent: usize, store: &mut Store) -> Vec<String> {\n\n let mut path: Vec<String> = vec![];\n\n loop {\n\n if parent == 0 {\n\n break;\n\n }\n\n if let Some(group) = store.get_group(parent) {\n\n path.push(group.name.clone());\n\n parent = group.parent;\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 28, "score": 61087.18340458896 }, { "content": " PrimitiveTypes::ETypes::Ef64 => \"f64\",\n\n PrimitiveTypes::ETypes::Ebool => \"bool\",\n\n PrimitiveTypes::ETypes::Estr => \"String\",\n\n _ => {\n\n panic!(\"Unknown type ref {:?} for {}\", type_ref, item.name);\n\n }\n\n }\n\n .to_string();\n\n } else {\n\n item.get_full_name()\n\n }\n\n }\n\n\n\n fn entity_default(&self, entity_id: usize, store: &mut Store, level: u8) -> String {\n\n if let Some(strct) = store.get_struct(entity_id) {\n\n let path = if !strct.path.is_empty() {\n\n format!(\"{}::\", strct.path.join(\"::\"))\n\n } else {\n\n String::new()\n\n };\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 29, "score": 61086.76737158108 }, { "content": " if self.embedded {\n\n format!(\n\n \"{}{}{}{}{}{}{}\\n\",\n\n self.get_injectable(include_str!(\"./static/protocol.uses.rs\")),\n\n self.get_injectable(include_str!(\"./static/protocol.sizes.mod.rs\")),\n\n self.get_injectable(include_str!(\"./static/protocol.decode.rs\")),\n\n self.get_injectable(include_str!(\"./static/protocol.encode.rs\")),\n\n self.get_injectable(include_str!(\"./static/protocol.storage.rs\")),\n\n self.get_injectable(include_str!(\"./static/protocol.packing.rs\")),\n\n self.get_injectable(include_str!(\"./static/protocol.buffer.rs\")),\n\n )\n\n } else {\n\n String::new()\n\n }\n\n }\n\n\n\n fn get_injectable(&self, content: &str) -> String {\n\n let re = Regex::new(r\"^([\\n\\r]|.)*(//\\s?injectable)\").unwrap();\n\n re.replace_all(content, \"\").to_string()\n\n }\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 30, "score": 61086.5289891912 }, { "content": " } else {\n\n let mut chain = String::from(\"\");\n\n for part in path.iter() {\n\n result = format!(\"{}{}AvailableMessages::{}(\", result, chain, part);\n\n chain = format!(\"{}{}::\", chain, part);\n\n }\n\n result = format!(\n\n \"{}{}AvailableMessages::{}(m){}\",\n\n result,\n\n chain,\n\n name,\n\n \")\".repeat(path.len())\n\n );\n\n }\n\n result\n\n }\n\n\n\n fn buffer(&self, store: &mut Store) -> String {\n\n let mut body = format!(\n\n \"{}impl DecodeBuffer<AvailableMessages> for Buffer<AvailableMessages> {{\\n\",\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 31, "score": 61086.34725720326 }, { "content": " self.get_field_decode(field, store, level + 1)\n\n );\n\n body = format!(\"{}\\n{}}}\", body, self.spaces(level));\n\n body\n\n } else {\n\n self.get_field_decode(field, store, level)\n\n }\n\n }\n\n\n\n fn get_field_decode(&self, field: &Field, store: &mut Store, level: u8) -> String {\n\n let mut body: String;\n\n if let Some(entity_id) = field.ref_type_id {\n\n if let Some(strct) = store.get_struct(entity_id) {\n\n if field.repeated {\n\n body = format!(\n\n \"{}const arr{}Inst: {} = {}.defaults();\",\n\n self.spaces(level),\n\n field.name,\n\n store.get_struct_path(strct.id).join(\".\"),\n\n store.get_struct_path(strct.id).join(\".\")\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 32, "score": 61085.5627310493 }, { "content": "impl PumlRender {}\n\n\n\nimpl Default for PumlRender {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl PumlRender {\n\n pub fn new() -> Self {\n\n PumlRender {}\n\n }\n\n\n\n pub fn render(\n\n &self,\n\n dest: &Path,\n\n store: &WorkflowStore,\n\n _protocol: &mut Protocol,\n\n ) -> Result<(), String> {\n\n let mut output: String = String::new();\n", "file_path": "cli/src/workflow/render/puml/mod.rs", "rank": 33, "score": 61085.229759858106 }, { "content": "}\n\n\n\nimpl Render for TypescriptRender {\n\n fn new(embedded: bool, signature: u16) -> Self {\n\n TypescriptRender {\n\n embedded,\n\n signature,\n\n }\n\n }\n\n\n\n fn render(&self, store: &mut Store, dest: &Path) -> Result<(), String> {\n\n let mut body = format!(\"{}\\n\", self.includes());\n\n body = format!(\n\n \"{}{}\",\n\n body,\n\n self.get_messages_list(None, &mut store.clone(), 0)\n\n );\n\n for enums in &store.enums {\n\n if enums.parent == 0 {\n\n body =\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 34, "score": 61085.102170761085 }, { "content": "\n\n fn spaces(&self, level: u8) -> String {\n\n \" \".repeat(level as usize)\n\n }\n\n}\n\n\n\nimpl Render for RustRender {\n\n fn new(embedded: bool, signature: u16) -> Self {\n\n RustRender {\n\n embedded,\n\n signature,\n\n }\n\n }\n\n\n\n fn render(&self, store: &mut Store, dest: &Path) -> Result<(), String> {\n\n let mut body = format!(\"{}\\n\", self.includes());\n\n body = format!(\n\n \"{}{}\",\n\n body,\n\n self.get_messages_list(None, &mut store.clone(), 0)\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 35, "score": 61084.945279980784 }, { "content": " } else if let Some(ref_type_id) = variant.ref_type_id {\n\n if let Some(strct) = store.get_struct(ref_type_id) {\n\n body = format!(\n\n \"{}\\n{}case {}: return {}.defaults();\",\n\n body,\n\n self.spaces(level + 1),\n\n pos,\n\n store.get_struct_path(strct.id).join(\".\")\n\n );\n\n } else {\n\n stop!(\n\n \"Unknown type of data in scope of enum {} / {}, ref_type_id: {} \",\n\n enums.name,\n\n variant.name,\n\n ref_type_id\n\n );\n\n }\n\n }\n\n }\n\n body = format!(\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 36, "score": 61084.90821182371 }, { "content": " body = format!(\"{}{}use bytes::{{ Buf }};\\n\", body, self.spaces(level + 1));\n\n body = format!(\n\n \"{}{}\",\n\n body,\n\n self.get_messages_list(Some(group), &mut store.clone(), level + 1)\n\n );\n\n for enum_id in &group.enums {\n\n if let Some(enums) = store.get_enum(*enum_id) {\n\n body = format!(\"{}\\n{}\", body, self.enums(&enums, level + 1));\n\n }\n\n }\n\n for struct_id in &group.structs {\n\n if let Some(strct) = store.get_struct(*struct_id) {\n\n body = format!(\n\n \"{}\\n{}\",\n\n body,\n\n self.structs(&strct, &mut store.clone(), level + 1)\n\n );\n\n }\n\n }\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 37, "score": 61084.8817215869 }, { "content": " }\n\n }\n\n PrimitiveTypes::ETypes::Estr => {\n\n if repeated {\n\n \"Array<string>\"\n\n } else {\n\n \"string\"\n\n }\n\n }\n\n _ => {\n\n stop!(\"Unknown type ref {:?}\", etype);\n\n }\n\n }\n\n .to_string()\n\n }\n\n\n\n fn entity_default(&self, entity_id: usize, store: &mut Store, level: u8) -> String {\n\n if let Some(strct) = store.get_struct(entity_id) {\n\n let mut body = format!(\"new {}({{\", store.get_struct_path(entity_id).join(\".\"));\n\n for field in &strct.fields {\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 38, "score": 61084.87194929082 }, { "content": " field.name,\n\n store.get_struct_path(strct.id).join(\".\")\n\n );\n\n body = format!(\"{}\\n{}}}\", body, self.spaces(level));\n\n } else {\n\n body = format!(\n\n \"{}const {}: {} = {};\",\n\n self.spaces(level),\n\n field.name,\n\n field.get_full_name().join(\".\"),\n\n self.entity_default(entity_id, &mut store.clone(), level)\n\n );\n\n body = format!(\n\n \"{}\\n{}const {}Buf: ArrayBufferLike | undefined = storage.get({});\",\n\n body,\n\n self.spaces(level),\n\n field.name,\n\n field.id\n\n );\n\n body = format!(\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 39, "score": 61084.6699114616 }, { "content": " body = format!(\n\n \"{}\\n{}Protocol.Primitives.{}.getSignature(),\",\n\n body,\n\n self.spaces(level),\n\n self.etype(prim_type_ref, variant.repeated)\n\n );\n\n } else if let Some(ref_type_id) = variant.ref_type_id {\n\n if let Some(strct) = store.get_struct(ref_type_id) {\n\n body = format!(\n\n \"{}\\n{}{}.getSignature(),\",\n\n body,\n\n self.spaces(level),\n\n store.get_struct_path(strct.id).join(\".\")\n\n );\n\n } else {\n\n stop!(\n\n \"Unknown type of data in scope of enum {} / {}, ref_type_id: {} \",\n\n enums.name,\n\n variant.name,\n\n ref_type_id\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 40, "score": 61084.51952322467 }, { "content": " \"i64\" => String::from(\"bigint\"),\n\n \"u8\" => String::from(\"number\"),\n\n \"u16\" => String::from(\"number\"),\n\n \"u32\" => String::from(\"number\"),\n\n \"u64\" => String::from(\"bigint\"),\n\n \"f32\" => String::from(\"number\"),\n\n \"f64\" => String::from(\"number\"),\n\n \"str\" => String::from(\"string\"),\n\n _ => {\n\n if let Some(ref_type_id) = field.ref_type_id {\n\n if store.get_struct(ref_type_id).is_some() {\n\n store.get_struct_path(ref_type_id).join(\".\")\n\n } else if let Some(enums) = store.get_enum(ref_type_id) {\n\n format!(\"I{}\", enums.name)\n\n } else {\n\n stop!(\n\n \"Fail to find a struct/enum id: {} for field {}\",\n\n ref_type_id,\n\n field.name\n\n );\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 41, "score": 61084.331943183475 }, { "content": " fn enums(&self, enums: &Enum, store: &mut Store, level: u8) -> String {\n\n let mut body = format!(\n\n \"{}export interface I{} {{\\n\",\n\n self.spaces(level),\n\n enums.name\n\n );\n\n for variant in &enums.variants {\n\n let variant_type = if let Some(prim_type_ref) = variant.types.clone() {\n\n self.etype_ts(prim_type_ref.clone(), variant.repeated)\n\n } else if let Some(ref_type_id) = variant.ref_type_id {\n\n if let Some(strct) = store.get_struct(ref_type_id) {\n\n store.get_struct_path(strct.id).join(\".\")\n\n } else {\n\n stop!(\n\n \"Unknown type of data in scope of enum {} / {}, ref_type_id: {}\",\n\n enums.name,\n\n variant.name,\n\n ref_type_id\n\n );\n\n }\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 42, "score": 61084.30707695289 }, { "content": " body = format!(\n\n \"{}{}instance = new {}();\\n\",\n\n body,\n\n self.spaces(4),\n\n store.get_enum_path(enums.id).join(\".\")\n\n );\n\n body = format!(\"{}{}err = instance.decode(buffer);\\n\", body, self.spaces(4));\n\n body = format!(\n\n \"{}{}if (err instanceof Error) {{ return err; }}\\n\",\n\n body,\n\n self.spaces(4)\n\n );\n\n body = format!(\n\n \"{}{}enum_instance = instance.get();\\n\",\n\n body,\n\n self.spaces(4)\n\n );\n\n body = format!(\"{}{}instance = enum_instance;\\n\", body, self.spaces(4));\n\n body = format!(\"{}{}return {{ header: {{ id: header.id, sequence: header.sequence, timestamp: header.ts }}, msg: {{ {}}}, getRef: () => instance }};\\n\", body, self.spaces(4), self.get_available_entity(enums.parent, &enums.name, &mut store.clone()));\n\n }\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 43, "score": 61084.235095220865 }, { "content": " self.etype(prim_type_ref.clone(), variant.repeated),\n\n variant.name\n\n )\n\n } else if variant.ref_type_id.is_some() {\n\n format!(\"src.{}\", variant.name)\n\n } else {\n\n stop!(\n\n \"Unknown type of data in scope of enum {} / {}\",\n\n enums.name,\n\n variant.name\n\n );\n\n };\n\n //\n\n let types = if let Some(prim_type_ref) = variant.types.clone() {\n\n self.etype_ts(prim_type_ref, variant.repeated)\n\n } else if let Some(ref_type_id) = variant.ref_type_id {\n\n if let Some(strct) = store.get_struct(ref_type_id) {\n\n store.get_struct_path(strct.id).join(\".\")\n\n } else {\n\n stop!(\"Unknown type of data in scope of enum {} / {}, ref_type_id: {}. Failed to find a struct. \", enums.name, variant.name, ref_type_id);\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 44, "score": 61084.12318925874 }, { "content": " self.spaces(level),\n\n field.name,\n\n store.get_struct_path(strct.id).join(\".\"),\n\n if field.repeated { \"true\" } else { \"false\" },\n\n if field.optional { \"true\" } else { \"false\" }\n\n );\n\n } else if let Some(enums) = store.get_enum(entity_id) {\n\n body = format!(\n\n \"{}\\n{}{{ prop: '{}', optional: {}, options: [\",\n\n body,\n\n self.spaces(level),\n\n field.name,\n\n if field.optional { \"true\" } else { \"false\" }\n\n );\n\n for variant in &enums.variants {\n\n if let Some(struct_id) = variant.ref_type_id {\n\n if let Some(strct) = store.get_struct(struct_id) {\n\n body = format!(\"{}\\n{}{{ prop: '{}', types: {}.getValidator({}), optional: false }},\", body, self.spaces(level + 1), variant.name, strct.name, if variant.repeated { \"true\" } else { \"false\" });\n\n } else {\n\n stop!(\"Nested enums aren't supported.\");\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 45, "score": 61084.12318925874 }, { "content": " for structs in &store.structs {\n\n body = format!(\"{}{}case {}:\\n\", body, self.spaces(3), structs.id);\n\n body = format!(\n\n \"{}{}instance = {}.defaults();\\n\",\n\n body,\n\n self.spaces(4),\n\n store.get_struct_path(structs.id).join(\".\")\n\n );\n\n body = format!(\"{}{}err = instance.decode(buffer);\\n\", body, self.spaces(4));\n\n body = format!(\"{}{}return err instanceof Error ? err : {{ header: {{ id: header.id, sequence: header.sequence, timestamp: header.ts }}, msg: {{ {}}}, getRef: () => instance }};\\n\", body, self.spaces(4), self.get_available_entity(structs.parent, &structs.name, &mut store.clone()));\n\n }\n\n body = format!(\n\n \"{}{}default: throw new Error(`Unknown message id=${{header.id}}`);\\n\",\n\n body,\n\n self.spaces(3)\n\n );\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(2));\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(1));\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(0));\n\n body\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 46, "score": 61083.92539724748 }, { "content": " body = format!(\"{}\\n{}return err;\", body, self.spaces(level + 2));\n\n body = format!(\"{}\\n{}}}\", body, self.spaces(level + 1));\n\n body = format!(\"{}\\n{}}}\", body, self.spaces(level));\n\n }\n\n body\n\n }\n\n\n\n fn get_enum_decode(&self, enums: &Enum, store: &mut Store, level: u8) -> String {\n\n let mut body = format!(\"{}switch (this.getValueIndex()) {{\", self.spaces(level),);\n\n for (pos, variant) in enums.variants.iter().enumerate() {\n\n let types = if let Some(prim_type_ref) = variant.types.clone() {\n\n self.etype_ts(prim_type_ref, variant.repeated)\n\n } else if let Some(ref_type_id) = variant.ref_type_id {\n\n if let Some(strct) = store.get_struct(ref_type_id) {\n\n store.get_struct_path(strct.id).join(\".\")\n\n } else {\n\n stop!(\"Unknown type of data in scope of enum {} / {}, ref_type_id: {}. Failed to find a struct. \", enums.name, variant.name, ref_type_id);\n\n }\n\n } else {\n\n stop!(\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 47, "score": 61083.787949898244 }, { "content": " }\n\n } else {\n\n stop!(\n\n \"Unknown type of data in scope of enum {} / {}\",\n\n enums.name,\n\n variant.name\n\n );\n\n };\n\n body = format!(\n\n \"{}\\n{}if (src.{} !== undefined) {{\",\n\n body,\n\n self.spaces(level),\n\n variant.name\n\n );\n\n body = format!(\"{}\\n{}const err: Error | undefined = this.setValue(new Protocol.Primitives.Option<{}>({}, {}));\", body, self.spaces(level + 1), types, pos, value);\n\n body = format!(\n\n \"{}\\n{}if (err instanceof Error) {{\",\n\n body,\n\n self.spaces(level + 1)\n\n );\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 48, "score": 61081.57120855144 }, { "content": " body = format!(\n\n \"{}{}Ok(mut buf) => {{ buffer.append(&mut buf); }},\\n\",\n\n body,\n\n self.spaces(level + 4)\n\n );\n\n body = format!(\n\n \"{}{}Err(e) => {{ return Err(e); }},\\n\",\n\n body,\n\n self.spaces(level + 4)\n\n );\n\n body = format!(\"{}{}}};\\n\", body, self.spaces(level + 3));\n\n body = format!(\"{}{}}} else {{\\n\", body, self.spaces(level + 2));\n\n body = format!(\n\n \"{}{}match get_empty_buffer_val(Some({})) {{\\n\",\n\n body,\n\n self.spaces(level + 3),\n\n field.id\n\n );\n\n body = format!(\n\n \"{}{}Ok(mut buf) => {{ buffer.append(&mut buf); }},\\n\",\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 49, "score": 61081.57120855144 }, { "content": " \"{}: {};\",\n\n field.name,\n\n self.get_declare_type_ref(field, &mut store.clone())\n\n ),\n\n );\n\n }\n\n body = format!(\"{}\\n{}}}\\n\", body, self.spaces(level));\n\n body = format!(\n\n \"{}{}export class {} extends Protocol.Convertor<{}> implements I{}, ISigned<{}> {{\\n\",\n\n body,\n\n self.spaces(level),\n\n strct.name,\n\n strct.name,\n\n strct.name,\n\n strct.name,\n\n );\n\n body = format!(\n\n \"{}\\n{}\",\n\n body,\n\n self.struct_map(strct, &mut store.clone(), level + 1)\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 50, "score": 61081.57120855144 }, { "content": " self.spaces(0)\n\n );\n\n body = format!(\n\n \"{}{}fn get_msg(&self, id: u32, buf: &[u8]) -> Result<AvailableMessages, String> {{\\n\",\n\n body,\n\n self.spaces(1)\n\n );\n\n body = format!(\"{}{}match id {{\\n\", body, self.spaces(2));\n\n for enums in &store.enums {\n\n body = format!(\n\n \"{}{}{} => match {}::extract(buf.to_vec()) {{\\n\",\n\n body,\n\n self.spaces(3),\n\n enums.id,\n\n self.get_full_name(enums.name.clone(), enums.parent, &mut store.clone())\n\n );\n\n body = format!(\n\n \"{}{}Ok(m) => Ok({}),\\n\",\n\n body,\n\n self.spaces(4),\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 51, "score": 61081.57120855144 }, { "content": " );\n\n body = format!(\n\n \"{}{}Ok(mut buf) => {{ buffer.append(&mut buf); }}\\n\",\n\n body,\n\n self.spaces(level + 3)\n\n );\n\n body = format!(\n\n \"{}{}Err(e) => {{ return Err(e) }},\\n\",\n\n body,\n\n self.spaces(level + 3)\n\n );\n\n body = format!(\"{}{}}};\\n\", body, self.spaces(level + 2));\n\n }\n\n body = format!(\"{}{}Ok(buffer)\\n\", body, self.spaces(level + 2));\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level + 1));\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level));\n\n body = format!(\n\n \"{}{}impl PackingStruct for {} {{ }}\\n\",\n\n body,\n\n self.spaces(level),\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 52, "score": 61081.57120855144 }, { "content": " body = format!(\"{}{}}},\\n\", body, self.spaces(3));\n\n }\n\n body = format!(\n\n \"{}{}_ => Err(String::from(\\\"No message has been found\\\"))\\n\",\n\n body,\n\n self.spaces(3)\n\n );\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(2));\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(1));\n\n body = format!(\n\n \"{}{}fn get_signature(&self) -> u16 {{ {} }}\\n\",\n\n body,\n\n self.spaces(1),\n\n self.signature\n\n );\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(0));\n\n body\n\n }\n\n\n\n fn includes(&self) -> String {\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 53, "score": 61081.57120855144 }, { "content": " );\n\n }\n\n body = format!(\"{}\\n{}}}\\n\", body, self.spaces(level));\n\n body = format!(\"{}{}#[allow(unused_variables)]\\n\", body, self.spaces(level));\n\n body = format!(\"{}{}#[allow(unused_mut)]\\n\", body, self.spaces(level));\n\n body = format!(\n\n \"{}{}impl StructDecode for {} {{\\n\",\n\n body,\n\n self.spaces(level),\n\n strct.name\n\n );\n\n body = format!(\"{}{}fn get_id() -> u32 {{\\n\", body, self.spaces(level + 1));\n\n body = format!(\"{}{}{}\\n\", body, self.spaces(level + 2), strct.id);\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level + 1));\n\n body = format!(\n\n \"{}{}fn defaults() -> {} {{\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n strct.name\n\n );\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 54, "score": 61081.57120855144 }, { "content": " self.spaces(level + 1),\n\n child.name,\n\n child.name\n\n );\n\n }\n\n }\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level));\n\n } else {\n\n body = format!(\n\n \"{}{}pub enum AvailableMessages {{\\n\",\n\n body,\n\n self.spaces(level)\n\n );\n\n for enums in &store.enums {\n\n if enums.parent == 0 {\n\n body = format!(\n\n \"{}{}{}({}),\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n enums.name,\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 55, "score": 61081.57120855144 }, { "content": " }\n\n\n\n fn struct_validator(&self, strct: &Struct, level: u8) -> String {\n\n let mut body = format!(\"{}public static getValidator(array: boolean): {{ validate(value: any): Error | undefined }} {{\", self.spaces(level));\n\n body = format!(\"{}\\n{}if (array) {{\", body, self.spaces(level + 1));\n\n body = format!(\n\n \"{}\\n{}return {{ validate(obj: any): Error | undefined {{\",\n\n body,\n\n self.spaces(level + 2)\n\n );\n\n body = format!(\n\n \"{}\\n{}if (!(obj instanceof Array)) {{\",\n\n body,\n\n self.spaces(level + 3)\n\n );\n\n body = format!(\n\n \"{}\\n{}return new Error(`Expecting Array<{}>`);\",\n\n body,\n\n self.spaces(level + 4),\n\n strct.name\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 56, "score": 61081.57120855144 }, { "content": " \"{}{}impl PackingEnum for {} {{}}\\n\",\n\n body,\n\n self.spaces(level),\n\n enums.name\n\n );\n\n body\n\n }\n\n\n\n fn enum_item_type(&self, item: EnumItem) -> String {\n\n if let Some(type_ref) = item.types {\n\n return match type_ref {\n\n PrimitiveTypes::ETypes::Ei8 => \"i8\",\n\n PrimitiveTypes::ETypes::Ei16 => \"i16\",\n\n PrimitiveTypes::ETypes::Ei32 => \"i32\",\n\n PrimitiveTypes::ETypes::Ei64 => \"i64\",\n\n PrimitiveTypes::ETypes::Eu8 => \"u8\",\n\n PrimitiveTypes::ETypes::Eu16 => \"u16\",\n\n PrimitiveTypes::ETypes::Eu32 => \"u32\",\n\n PrimitiveTypes::ETypes::Eu64 => \"u64\",\n\n PrimitiveTypes::ETypes::Ef32 => \"f32\",\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 57, "score": 61081.57120855144 }, { "content": " index\n\n );\n\n }\n\n body = format!(\n\n \"{}{}_ => {{ return Err(String::from(\\\"Not supportable option\\\")); }},\\n\",\n\n body,\n\n self.spaces(level + 3)\n\n );\n\n body = format!(\"{}{}}};\\n\", body, self.spaces(level + 2));\n\n body = format!(\n\n \"{}{}let mut buf = match buf {{\\n\",\n\n body,\n\n self.spaces(level + 2)\n\n );\n\n body = format!(\"{}{}Ok(buf) => buf,\\n\", body, self.spaces(level + 3));\n\n body = format!(\n\n \"{}{}Err(e) => {{ return Err(e); }},\\n\",\n\n body,\n\n self.spaces(level + 3)\n\n );\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 58, "score": 61081.57120855144 }, { "content": " );\n\n body = format!(\n\n \"{}\\n{}if (storage instanceof Error) {{\",\n\n body,\n\n self.spaces(level + 2)\n\n );\n\n body = format!(\"{}\\n{}return storage;\", body, self.spaces(level + 3));\n\n body = format!(\"{}\\n{}}}\", body, self.spaces(level + 2));\n\n for field in &strct.fields {\n\n body = format!(\n\n \"{}\\n{}\",\n\n body,\n\n self.get_field_decode_wrap(field, &mut store.clone(), level + 2),\n\n );\n\n }\n\n body = format!(\"{}\\n{}return this;\", body, self.spaces(level + 2));\n\n body = format!(\"{}\\n{}}}\", body, self.spaces(level + 1));\n\n body = format!(\"{}\\n\", body);\n\n body = format!(\n\n \"{}\\n{}public defaults(): {} {{\",\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 59, "score": 61081.57120855144 }, { "content": " let childs = store.get_child_groups(group.id);\n\n for group in childs {\n\n body = format!(\n\n \"{}\\n{}\",\n\n body,\n\n self.groups(&group, &mut store.clone(), level + 1)\n\n );\n\n }\n\n format!(\"{}\\n{}}}\\n\", body, self.spaces(level))\n\n }\n\n\n\n fn structs(&self, strct: &Struct, store: &mut Store, level: u8) -> String {\n\n let mut body = format!(\"{}#[derive(Debug, Clone, PartialEq)]\\n\", self.spaces(level));\n\n body = format!(\"{}{}pub struct {} {{\", body, self.spaces(level), strct.name);\n\n for field in &strct.fields {\n\n body = format!(\n\n \"{}\\n{}{}\",\n\n body,\n\n self.spaces(level + 1),\n\n format!(\"pub {}: {},\", field.name, self.get_declare_type_ref(field)),\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 60, "score": 61081.57120855144 }, { "content": " for field in &strct.fields {\n\n body = format!(\n\n \"{}\\n{}{}\",\n\n body,\n\n self.spaces(level + 1),\n\n format!(\n\n \"public {}!: {};\",\n\n field.name,\n\n self.get_declare_type_ref(field, &mut store.clone())\n\n ),\n\n );\n\n }\n\n\n\n for field in &strct.fields {\n\n if let Some(ref_type_id) = field.ref_type_id {\n\n if store.get_enum(ref_type_id).is_some() {\n\n body = format!(\n\n \"{}\\n{}{}\",\n\n body,\n\n self.spaces(level + 1),\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 61, "score": 61081.57120855144 }, { "content": " body = format!(\n\n \"{}\\n{}{},\",\n\n body,\n\n self.spaces(level + 3),\n\n self.get_field_encode(field, &mut store.clone()),\n\n );\n\n }\n\n body = format!(\"{}\\n{}]);\\n\", body, self.spaces(level + 2));\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level + 1));\n\n body = format!(\"{}\\n\", body);\n\n body = format!(\n\n \"{}{}public decode(buffer: ArrayBufferLike): Error | {} {{\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n strct.name,\n\n );\n\n body = format!(\n\n \"{}{}const storage = this.getStorage(buffer);\",\n\n body,\n\n self.spaces(level + 2)\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 62, "score": 61081.57120855144 }, { "content": " \"{}constructor(params: I{}) {{\\n\",\n\n self.spaces(level),\n\n strct.name\n\n );\n\n body = format!(\"{}{}super();\\n\", body, self.spaces(level + 1));\n\n body = format!(\n\n \"{}{}Object.keys(params).forEach((key: string) => {{\\n\",\n\n body,\n\n self.spaces(level + 1)\n\n );\n\n body = format!(\n\n \"{}{}(this as any)[key] = (params as any)[key];\\n\",\n\n body,\n\n self.spaces(level + 2)\n\n );\n\n body = format!(\"{}{}}});\", body, self.spaces(level + 1));\n\n for field in &strct.fields {\n\n if let Some(ref_type_id) = field.ref_type_id {\n\n if let Some(enums) = store.get_enum(ref_type_id) {\n\n // -------\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 63, "score": 61081.57120855144 }, { "content": " strct.name\n\n );\n\n body\n\n }\n\n\n\n fn enums(&self, enums: &Enum, level: u8) -> String {\n\n let mut body = format!(\"{}#[derive(Debug, Clone, PartialEq)]\\n\", self.spaces(level));\n\n body = format!(\"{}{}pub enum {} {{\\n\", body, self.spaces(level), enums.name);\n\n for item in &enums.variants {\n\n let item_type = self.enum_item_type(item.clone());\n\n body = format!(\n\n \"{}{}{},\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n format!(\n\n \"{}({})\",\n\n item.name,\n\n if item.repeated {\n\n format!(\"Vec<{}>\", item_type)\n\n } else {\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 64, "score": 61081.57120855144 }, { "content": " }\n\n }\n\n body = format!(\"{}\\n{}}}\\n\", body, self.spaces(level));\n\n body\n\n }\n\n\n\n fn struct_map(&self, strct: &Struct, store: &mut Store, level: u8) -> String {\n\n let mut body = format!(\n\n \"{}public static scheme: Protocol.IPropScheme[] = [\",\n\n self.spaces(level)\n\n );\n\n for field in &strct.fields {\n\n body = format!(\n\n \"{}{}\",\n\n body,\n\n self.get_field_map_def(field, &mut store.clone(), level + 1),\n\n );\n\n }\n\n body = format!(\"{}\\n{}];\\n\", body, self.spaces(level));\n\n body\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 65, "score": 61081.57120855144 }, { "content": " body,\n\n self.spaces(level + 1),\n\n self.signature,\n\n );\n\n body = format!(\n\n \"{}{}fn abduct(&mut self) -> Result<Vec<u8>, String> {{\\n\",\n\n body,\n\n self.spaces(level + 1)\n\n );\n\n body = format!(\n\n \"{}{}let (buf, index) = match self {{\\n\",\n\n body,\n\n self.spaces(level + 2)\n\n );\n\n for (index, item) in enums.variants.iter().enumerate() {\n\n body = format!(\n\n \"{}{}Self::{}(v) => (v.encode(), {}),\\n\",\n\n body,\n\n self.spaces(level + 3),\n\n item.name,\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 66, "score": 61081.57120855144 }, { "content": " );\n\n body = format!(\"{}\\n{}}}\", body, self.spaces(level + 3));\n\n body = format!(\"{}\\n{}try {{\", body, self.spaces(level + 3));\n\n body = format!(\n\n \"{}\\n{}obj.forEach((o, index: number) => {{\",\n\n body,\n\n self.spaces(level + 4)\n\n );\n\n body = format!(\n\n \"{}\\n{}if (!(o instanceof {})) {{\",\n\n body,\n\n self.spaces(level + 5),\n\n strct.name\n\n );\n\n body = format!(\n\n \"{}\\n{}throw new Error(`Expecting instance of {} on index #${{index}}`);\",\n\n body,\n\n self.spaces(level + 6),\n\n strct.name\n\n );\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 67, "score": 61081.57120855144 }, { "content": " if field.repeated {\n\n type_str = format!(\"Vec::<{}>\", type_str);\n\n }\n\n if field.optional {\n\n type_str = format!(\"Option::<{}>\", type_str);\n\n }\n\n type_str\n\n }\n\n\n\n fn get_declare_type_ref(&self, field: &Field) -> String {\n\n let mut type_str = self.get_type_ref(field);\n\n if field.repeated {\n\n type_str = format!(\"Vec<{}>\", type_str);\n\n }\n\n if field.optional {\n\n type_str = format!(\"Option<{}>\", type_str);\n\n }\n\n type_str\n\n }\n\n\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 68, "score": 61081.57120855144 }, { "content": " self.get_available_entity(enums.parent, &enums.name, &mut store.clone())\n\n );\n\n body = format!(\"{}{}Err(e) => Err(e),\\n\", body, self.spaces(4));\n\n body = format!(\"{}{}}},\\n\", body, self.spaces(3));\n\n }\n\n for structs in &store.structs {\n\n body = format!(\n\n \"{}{}{} => match {}::extract(buf.to_vec()) {{\\n\",\n\n body,\n\n self.spaces(3),\n\n structs.id,\n\n self.get_full_name(structs.name.clone(), structs.parent, &mut store.clone())\n\n );\n\n body = format!(\n\n \"{}{}Ok(m) => Ok({}),\\n\",\n\n body,\n\n self.spaces(4),\n\n self.get_available_entity(structs.parent, &structs.name, &mut store.clone())\n\n );\n\n body = format!(\"{}{}Err(e) => Err(e),\\n\", body, self.spaces(4));\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 69, "score": 61081.57120855144 }, { "content": " body = format!(\"{}{}{} {{\\n\", body, self.spaces(level + 2), strct.name);\n\n for field in &strct.fields {\n\n body = format!(\n\n \"{}{}{}\\n\",\n\n body,\n\n self.spaces(level + 3),\n\n self.field_default(field, &mut store.clone(), level + 3)\n\n );\n\n }\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level + 2));\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level + 1));\n\n body = format!(\n\n \"{}{}fn extract_from_storage(&mut self, mut storage: Storage) -> Result<(), String> {{\\n\",\n\n body,\n\n self.spaces(level + 1)\n\n );\n\n for field in &strct.fields {\n\n if field.optional {\n\n if let Some(id) = field.ref_type_id {\n\n if let Some(enums) = store.get_enum(id) {\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 70, "score": 61081.57120855144 }, { "content": " item_type\n\n }\n\n ),\n\n );\n\n }\n\n body = format!(\"{}{}Defaults,\\n\", body, self.spaces(level + 1));\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level));\n\n body = format!(\n\n \"{}{}impl EnumDecode for {} {{\\n\",\n\n body,\n\n self.spaces(level),\n\n enums.name\n\n );\n\n body = format!(\n\n \"{}{}fn get_id(&self) -> u32 {{ {} }}\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n enums.id\n\n );\n\n body = format!(\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 71, "score": 61081.57120855144 }, { "content": " format!(\"private _{}: Primitives.Enum;\", field.name,),\n\n );\n\n }\n\n }\n\n }\n\n body = format!(\"{}\\n\", body);\n\n body = format!(\n\n \"{}{}public static getSignature(): string {{ return '{}'; }}\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n strct.name\n\n );\n\n body = format!(\n\n \"{}{}public static getId(): number {{ return {}; }}\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n strct.id\n\n );\n\n body = format!(\"{}\\n\", body);\n\n body = format!(\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 72, "score": 61081.57120855144 }, { "content": " enums.name\n\n );\n\n }\n\n }\n\n for strct in &store.structs {\n\n if strct.parent == 0 {\n\n body = format!(\n\n \"{}{}{}({}),\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n strct.name,\n\n strct.name\n\n );\n\n }\n\n }\n\n for group in &store.groups {\n\n if group.parent == 0 {\n\n body = format!(\n\n \"{}{}{}({}::AvailableMessages),\\n\",\n\n body,\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 73, "score": 61081.57120855144 }, { "content": " \"{}{}let mut buffer: Vec<u8> = vec!();\\n\",\n\n body,\n\n self.spaces(level + 2)\n\n );\n\n for field in &strct.fields {\n\n if field.optional {\n\n if let Some(id) = field.ref_type_id {\n\n if store.get_enum(id).is_some() {\n\n body = format!(\n\n \"{}{}if let Some(mut val) = self.{}.clone() {{\\n\",\n\n body,\n\n self.spaces(level + 2),\n\n field.name\n\n );\n\n body = format!(\n\n \"{}{}match val.get_buf_to_store(Some({})) {{\\n\",\n\n body,\n\n self.spaces(level + 3),\n\n field.id\n\n );\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 74, "score": 61081.57120855144 }, { "content": " body = format!(\"{}{}}};\\n\", body, self.spaces(level + 2));\n\n body = format!(\n\n \"{}{}let mut buffer: Vec<u8> = vec!();\\n\",\n\n body,\n\n self.spaces(level + 2)\n\n );\n\n body = format!(\n\n \"{}{}buffer.append(&mut (index as u16).to_le_bytes().to_vec());\\n\",\n\n body,\n\n self.spaces(level + 2)\n\n );\n\n body = format!(\n\n \"{}{}buffer.append(&mut buf);\\n\",\n\n body,\n\n self.spaces(level + 2)\n\n );\n\n body = format!(\"{}{}Ok(buffer)\\n\", body, self.spaces(level + 2));\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level + 1));\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level));\n\n body = format!(\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 75, "score": 61081.57120855144 }, { "content": " \"{}\\n{}default: throw new Error(`No option with id=${{id}}`);\",\n\n body,\n\n self.spaces(level + 1)\n\n );\n\n body = format!(\"{}\\n{}}}\", body, self.spaces(level));\n\n body\n\n }\n\n\n\n fn enum_setter(&self, enums: &Enum, store: &mut Store, level: u8) -> String {\n\n let mut body = format!(\"{}if (Object.keys(src).length > 1) {{\", self.spaces(level),);\n\n body = format!(\n\n \"{}\\n{}return new Error(`Option cannot have more then 1 value.`);\",\n\n body,\n\n self.spaces(level + 1)\n\n );\n\n body = format!(\"{}\\n{}}}\", body, self.spaces(level));\n\n for (pos, variant) in enums.variants.iter().enumerate() {\n\n let value = if let Some(prim_type_ref) = variant.types.clone() {\n\n format!(\n\n \"new Protocol.Primitives.{}(src.{})\",\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 76, "score": 61081.57120855144 }, { "content": " body = format!(\n\n \"{}\\n{}this._{} = new {}()\",\n\n body,\n\n self.spaces(level + 1),\n\n field.name,\n\n enums.name,\n\n );\n\n body = format!(\n\n \"{}\\n{}{}this._{}.set(this.{});\",\n\n body,\n\n self.spaces(level + 1),\n\n if field.optional {\n\n format!(\"this.{} !== undefined && \", field.name)\n\n } else {\n\n \"\".to_string()\n\n },\n\n field.name,\n\n field.name,\n\n );\n\n }\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 77, "score": 61081.57120855144 }, { "content": " }\n\n\n\n fn field_default(&self, field: &Field, store: &mut Store, level: u8) -> String {\n\n let mut body = format!(\"{}: \", field.name);\n\n if field.repeated && !field.optional {\n\n body = format!(\"{}vec![],\", body);\n\n } else if field.optional {\n\n body = format!(\"{}None,\", body);\n\n } else if let Some(default_value) = self.type_default_value(&field.kind) {\n\n body = format!(\"{}{},\", body, default_value);\n\n } else if let Some(struct_id) = field.ref_type_id {\n\n body = format!(\n\n \"{}{},\",\n\n body,\n\n self.entity_default(struct_id, store, level + 1)\n\n );\n\n }\n\n body\n\n }\n\n\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 78, "score": 61081.57120855144 }, { "content": " \"Unknown type of data in scope of enum {} / {}\",\n\n enums.name,\n\n variant.name\n\n );\n\n };\n\n body = format!(\n\n \"{}\\n{}case {}: target.{} = this.getValue<{}>(); break;\",\n\n body,\n\n self.spaces(level + 1),\n\n pos,\n\n variant.name,\n\n types\n\n );\n\n }\n\n body = format!(\"{}\\n{}}}\", body, self.spaces(level));\n\n body\n\n }\n\n\n\n fn struct_constructor(&self, strct: &Struct, store: &mut Store, level: u8) -> String {\n\n let mut body = format!(\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 79, "score": 61081.57120855144 }, { "content": " );\n\n }\n\n }\n\n }\n\n body = format!(\"{}\\n{}]\", body, self.spaces(level - 1));\n\n body\n\n }\n\n\n\n fn enum_getter(&self, enums: &Enum, store: &mut Store, level: u8) -> String {\n\n let mut body = format!(\"{}switch (id) {{\", self.spaces(level));\n\n for (pos, variant) in enums.variants.iter().enumerate() {\n\n if let Some(prim_type_ref) = variant.types.clone() {\n\n body = format!(\n\n \"{}\\n{}case {}: return new Protocol.Primitives.{}({});\",\n\n body,\n\n self.spaces(level + 1),\n\n pos,\n\n self.etype(prim_type_ref.clone(), variant.repeated),\n\n self.etype_def(prim_type_ref, variant.repeated)\n\n );\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 80, "score": 61081.57120855144 }, { "content": " body,\n\n self.get_messages_list(Some(group), &mut store.clone(), level + 1)\n\n );\n\n for enum_id in &group.enums {\n\n if let Some(enums) = store.get_enum(*enum_id) {\n\n body = format!(\n\n \"{}\\n{}\",\n\n body,\n\n self.enums(&enums, &mut store.clone(), level + 1)\n\n );\n\n }\n\n }\n\n for struct_id in &group.structs {\n\n if let Some(strct) = store.get_struct(*struct_id) {\n\n body = format!(\n\n \"{}\\n{}\",\n\n body,\n\n self.structs(&strct, &mut store.clone(), level + 1)\n\n );\n\n }\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 81, "score": 61081.57120855144 }, { "content": " \"{}{}public get(): {} {{ return this; }}\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n strct.name\n\n );\n\n body = format!(\"{}\\n\", body);\n\n body = format!(\n\n \"{}{}public getId(): number {{ return {}; }}\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n strct.id\n\n );\n\n body = format!(\"{}\\n\", body);\n\n body = format!(\n\n \"{}{}public encode(): ArrayBufferLike {{\\n\",\n\n body,\n\n self.spaces(level + 1)\n\n );\n\n body = format!(\"{}{}return this.collect([\", body, self.spaces(level + 2));\n\n for field in &strct.fields {\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 82, "score": 61081.57120855144 }, { "content": " }\n\n }\n\n }\n\n\n\n fn get_messages_list(&self, group: Option<&Group>, store: &mut Store, level: u8) -> String {\n\n let mut body = format!(\"{}#[derive(Debug, Clone)]\\n\", self.spaces(level));\n\n if let Some(group) = group {\n\n body = format!(\n\n \"{}{}pub enum AvailableMessages {{\\n\",\n\n body,\n\n self.spaces(level)\n\n );\n\n for enum_id in &group.enums {\n\n if let Some(enums) = store.get_enum(*enum_id) {\n\n body = format!(\n\n \"{}{}{}({}),\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n enums.name,\n\n enums.name\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 83, "score": 61081.57120855144 }, { "content": " );\n\n }\n\n }\n\n for struct_id in &group.structs {\n\n if let Some(strct) = store.get_struct(*struct_id) {\n\n body = format!(\n\n \"{}{}{}({}),\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n strct.name,\n\n strct.name\n\n );\n\n }\n\n }\n\n let childs = store.get_child_groups(group.id);\n\n for child in childs {\n\n if child.parent == group.id {\n\n body = format!(\n\n \"{}{}{}({}::AvailableMessages),\\n\",\n\n body,\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 84, "score": 61081.57120855144 }, { "content": " body,\n\n self.spaces(level + 4)\n\n );\n\n body = format!(\n\n \"{}{}Err(e) => {{ return Err(e); }},\\n\",\n\n body,\n\n self.spaces(level + 4)\n\n );\n\n body = format!(\"{}{}}};\\n\", body, self.spaces(level + 3));\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level + 2));\n\n continue;\n\n }\n\n }\n\n }\n\n body = format!(\n\n \"{}{}match self.{}.get_buf_to_store(Some({})) {{\\n\",\n\n body,\n\n self.spaces(level + 2),\n\n field.name,\n\n field.id\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 85, "score": 61081.57120855144 }, { "content": " fn type_default_value(&self, type_ref: &str) -> Option<&str> {\n\n match type_ref {\n\n \"bool\" => Some(\"true\"),\n\n \"i8\" => Some(\"0\"),\n\n \"i16\" => Some(\"0\"),\n\n \"i32\" => Some(\"0\"),\n\n \"i64\" => Some(\"0\"),\n\n \"u8\" => Some(\"0\"),\n\n \"u16\" => Some(\"0\"),\n\n \"u32\" => Some(\"0\"),\n\n \"u64\" => Some(\"0\"),\n\n \"f32\" => Some(\"0.0\"),\n\n \"f64\" => Some(\"0.0\"),\n\n \"str\" => Some(\"String::from(\\\"\\\")\"),\n\n _ => None,\n\n }\n\n }\n\n\n\n fn get_decode_type_ref(&self, field: &Field) -> String {\n\n let mut type_str = self.get_type_ref(field);\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 86, "score": 61081.57120855144 }, { "content": " self.get_decode_type_ref(field),\n\n field.id\n\n );\n\n body = format!(\"{}{}Ok(val) => val,\\n\", body, self.spaces(level + 3));\n\n body = format!(\n\n \"{}{}Err(e) => {{ return Err(e) }},\\n\",\n\n body,\n\n self.spaces(level + 3)\n\n );\n\n body = format!(\"{}{}}};\\n\", body, self.spaces(level + 2));\n\n }\n\n body = format!(\"{}{}Ok(())\\n\", body, self.spaces(level + 2));\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level + 1));\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level));\n\n body = format!(\"{}{}#[allow(unused_variables)]\\n\", body, self.spaces(level));\n\n body = format!(\"{}{}#[allow(unused_mut)]\\n\", body, self.spaces(level));\n\n body = format!(\n\n \"{}{}impl StructEncode for {} {{\\n\",\n\n body,\n\n self.spaces(level),\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 87, "score": 61081.57120855144 }, { "content": " }\n\n let childs = store.get_child_groups(group.id);\n\n for group in childs {\n\n body = format!(\n\n \"{}\\n{}\",\n\n body,\n\n self.groups(&group, &mut store.clone(), level + 1)\n\n );\n\n }\n\n format!(\"{}\\n{}}}\\n\", body, self.spaces(level))\n\n }\n\n\n\n fn structs(&self, strct: &Struct, store: &mut Store, level: u8) -> String {\n\n let mut body = format!(\"{}export interface I{} {{\", self.spaces(level), strct.name);\n\n for field in &strct.fields {\n\n body = format!(\n\n \"{}\\n{}{}\",\n\n body,\n\n self.spaces(level + 1),\n\n format!(\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 88, "score": 61081.57120855144 }, { "content": " );\n\n\n\n body = format!(\n\n \"{}\\n{}public static defaults(): {} {{\",\n\n body,\n\n self.spaces(level + 1),\n\n strct.name\n\n );\n\n body = format!(\n\n \"{}\\n{}return {};\",\n\n body,\n\n self.spaces(level + 2),\n\n self.entity_default(strct.id, &mut store.clone(), level + 2)\n\n );\n\n body = format!(\"{}\\n{}}}\\n\", body, self.spaces(level + 1));\n\n\n\n body = format!(\"{}\\n{}\", body, self.struct_validator(strct, level + 1));\n\n\n\n body = format!(\"{}\\n{}\", body, self.struct_from(strct, level + 1));\n\n\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 89, "score": 61081.57120855144 }, { "content": " self.spaces(level + 2)\n\n );\n\n\n\n body = format!(\n\n \"{}{}let mut body_buf = vec![0; buf.len() - sizes::U16_LEN];\\n\",\n\n body,\n\n self.spaces(level + 2)\n\n );\n\n body = format!(\n\n \"{}{}body_buf.copy_from_slice(&buf[sizes::U16_LEN..]);\\n\",\n\n body,\n\n self.spaces(level + 2)\n\n );\n\n body = format!(\"{}{}match index {{\\n\", body, self.spaces(level + 2));\n\n for (index, item) in enums.variants.iter().enumerate() {\n\n let item_type = self.enum_item_type(item.clone());\n\n body = format!(\n\n \"{}{}{} => match {}::decode(&body_buf) {{\\n\",\n\n body,\n\n self.spaces(level + 3),\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 90, "score": 61081.57120855144 }, { "content": " index,\n\n if item.repeated {\n\n format!(\"Vec::<{}>\", item_type)\n\n } else {\n\n item_type\n\n }\n\n );\n\n body = format!(\n\n \"{}{}Ok(v) => Ok({}::{}(v)),\\n\",\n\n body,\n\n self.spaces(level + 4),\n\n enums.name,\n\n item.name\n\n );\n\n body = format!(\"{}{}Err(e) => Err(e)\\n\", body, self.spaces(level + 4));\n\n body = format!(\"{}{}}},\\n\", body, self.spaces(level + 3));\n\n }\n\n body = format!(\n\n \"{}{}_ => Err(String::from(\\\"Fail to find relevant value for {}\\\")),\\n\",\n\n body,\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 91, "score": 61081.57120855144 }, { "content": " \"{}{}fn extract(buf: Vec<u8>) -> Result<{}, String> {{\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n enums.name\n\n );\n\n body = format!(\n\n \"{}{}if buf.len() <= sizes::U16_LEN {{\\n\",\n\n body,\n\n self.spaces(level + 2)\n\n );\n\n body = format!(\"{}{}return Err(String::from(\\\"Fail to extract value for {} because buffer too small\\\"));\\n\", body, self.spaces(level + 3), enums.name);\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level + 2));\n\n body = format!(\n\n \"{}{}let mut cursor: Cursor<&[u8]> = Cursor::new(&buf);\\n\",\n\n body,\n\n self.spaces(level + 2)\n\n );\n\n body = format!(\n\n \"{}{}let index = cursor.get_u16_le();\\n\",\n\n body,\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 92, "score": 61081.57120855144 }, { "content": " );\n\n body = format!(\"{}{}}};\\n\", body, self.spaces(level + 4));\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level + 3));\n\n body = format!(\"{}{}}} else {{\\n\", body, self.spaces(level + 2));\n\n body = format!(\n\n \"{}{}return Err(\\\"Buffer for property {} isn't found\\\".to_string());\\n\",\n\n body,\n\n self.spaces(level + 3),\n\n field.name\n\n );\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level + 2));\n\n continue;\n\n }\n\n }\n\n }\n\n body = format!(\n\n \"{}{}self.{} = match {}::get_from_storage(Source::Storage(&mut storage), Some({})) {{\\n\",\n\n body,\n\n self.spaces(level + 2),\n\n field.name,\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 93, "score": 61081.57120855144 }, { "content": " strct.name\n\n );\n\n body = format!(\n\n \"{}{}fn get_id(&self) -> u32 {{ {} }}\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n strct.id\n\n );\n\n body = format!(\n\n \"{}{}fn get_signature(&self) -> u16 {{ {} }}\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n self.signature,\n\n );\n\n body = format!(\n\n \"{}{}fn abduct(&mut self) -> Result<Vec<u8>, String> {{\\n\",\n\n body,\n\n self.spaces(level + 1)\n\n );\n\n body = format!(\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 94, "score": 61081.57120855144 }, { "content": " \"{}\\n{}\",\n\n body,\n\n self.struct_constructor(strct, &mut store.clone(), level + 1)\n\n );\n\n body = format!(\"{}\\n\", body);\n\n body = format!(\n\n \"{}{}public signature(): number {{ return {}; }}\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n self.signature\n\n );\n\n body = format!(\"{}\\n\", body);\n\n body = format!(\n\n \"{}{}public getSignature(): string {{ return '{}'; }}\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n strct.name\n\n );\n\n body = format!(\"{}\\n\", body);\n\n body = format!(\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 95, "score": 61081.57120855144 }, { "content": " fn get_type_ref(&self, field: &Field) -> String {\n\n match field.kind.clone().as_str() {\n\n \"bool\" => String::from(\"bool\"),\n\n \"i8\" => String::from(\"i8\"),\n\n \"i16\" => String::from(\"i16\"),\n\n \"i32\" => String::from(\"i32\"),\n\n \"i64\" => String::from(\"i64\"),\n\n \"u8\" => String::from(\"u8\"),\n\n \"u16\" => String::from(\"u16\"),\n\n \"u32\" => String::from(\"u32\"),\n\n \"u64\" => String::from(\"u64\"),\n\n \"f32\" => String::from(\"f32\"),\n\n \"f64\" => String::from(\"f64\"),\n\n \"str\" => String::from(\"String\"),\n\n _ => {\n\n if let Some(_ref_type_id) = field.ref_type_id {\n\n field.get_full_name().join(\"::\")\n\n } else {\n\n panic!(\"Invalid type definition for field {}\", field.name);\n\n }\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 96, "score": 61081.57120855144 }, { "content": " self.spaces(level + 3),\n\n enums.name\n\n );\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level + 2));\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level + 1));\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level));\n\n body = format!(\n\n \"{}{}impl EnumEncode for {} {{\\n\",\n\n body,\n\n self.spaces(level),\n\n enums.name\n\n );\n\n body = format!(\n\n \"{}{}fn get_id(&self) -> u32 {{ {} }}\\n\",\n\n body,\n\n self.spaces(level + 1),\n\n enums.id,\n\n );\n\n body = format!(\n\n \"{}{}fn get_signature(&self) -> u16 {{ {} }}\\n\",\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 97, "score": 61081.57120855144 }, { "content": " body = format!(\n\n \"{}{}if let Some(buf) = storage.get({}) {{\\n\",\n\n body,\n\n self.spaces(level + 2),\n\n field.id\n\n );\n\n body = format!(\"{}{}if buf.is_empty() {{\\n\", body, self.spaces(level + 3));\n\n body = format!(\n\n \"{}{}self.{} = None;\\n\",\n\n body,\n\n self.spaces(level + 4),\n\n field.name\n\n );\n\n body = format!(\"{}{}}} else {{\\n\", body, self.spaces(level + 3));\n\n body = format!(\"{}{}self.{} = match {}::get_from_storage(Source::Storage(&mut storage), Some({})) {{\\n\", body, self.spaces(level + 4), field.name, enums.name, field.id);\n\n body = format!(\"{}{}Ok(val) => Some(val),\\n\", body, self.spaces(level + 5));\n\n body = format!(\n\n \"{}{}Err(e) => {{ return Err(e) }},\\n\",\n\n body,\n\n self.spaces(level + 5)\n", "file_path": "cli/src/protocol/render/rust/mod.rs", "rank": 98, "score": 61081.57120855144 }, { "content": " body,\n\n self.spaces(level + 1),\n\n strct.name\n\n );\n\n body = format!(\n\n \"{}\\n{}return {}.defaults();\",\n\n body,\n\n self.spaces(level + 2),\n\n strct.name\n\n );\n\n body = format!(\"{}\\n{}}}\\n\", body, self.spaces(level + 1));\n\n\n\n body = format!(\"{}{}}}\\n\", body, self.spaces(level));\n\n body\n\n }\n\n\n\n fn enum_declaration(&self, enums: &Enum, store: &mut Store, level: u8) -> String {\n\n let mut body = \"[\".to_string();\n\n for variant in &enums.variants {\n\n if let Some(prim_type_ref) = variant.types.clone() {\n", "file_path": "cli/src/protocol/render/typescript/mod.rs", "rank": 99, "score": 61081.57120855144 } ]
Rust
lib/oxrdf/src/interning.rs
etiennept/oxigraph
cbccdfba867204ce4b20b6fc16e37a30719f90eb
use crate::*; use lasso::{Key, Rodeo, Spur}; use std::collections::HashMap; #[derive(Debug, Default)] pub struct Interner { strings: Rodeo, #[cfg(feature = "rdf-star")] triples: HashMap<InternedTriple, Triple>, } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)] pub struct InternedNamedNode { id: Spur, } impl InternedNamedNode { pub fn encoded_into(named_node: NamedNodeRef<'_>, interner: &mut Interner) -> Self { Self { id: interner.strings.get_or_intern(named_node.as_str()), } } pub fn encoded_from(named_node: NamedNodeRef<'_>, interner: &Interner) -> Option<Self> { Some(Self { id: interner.strings.get(named_node.as_str())?, }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> NamedNodeRef<'a> { NamedNodeRef::new_unchecked(interner.strings.resolve(&self.id)) } pub fn first() -> Self { Self { id: fist_spur() } } pub fn next(self) -> Self { Self { id: next_spur(self.id), } } pub fn impossible() -> Self { Self { id: impossible_spur(), } } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)] pub struct InternedBlankNode { id: Spur, } impl InternedBlankNode { pub fn encoded_into(blank_node: BlankNodeRef<'_>, interner: &mut Interner) -> Self { Self { id: interner.strings.get_or_intern(blank_node.as_str()), } } pub fn encoded_from(blank_node: BlankNodeRef<'_>, interner: &Interner) -> Option<Self> { Some(Self { id: interner.strings.get(blank_node.as_str())?, }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> BlankNodeRef<'a> { BlankNodeRef::new_unchecked(interner.strings.resolve(&self.id)) } pub fn next(self) -> Self { Self { id: next_spur(self.id), } } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)] pub enum InternedLiteral { String { value_id: Spur, }, LanguageTaggedString { value_id: Spur, language_id: Spur, }, TypedLiteral { value_id: Spur, datatype: InternedNamedNode, }, } impl InternedLiteral { pub fn encoded_into(literal: LiteralRef<'_>, interner: &mut Interner) -> Self { let value_id = interner.strings.get_or_intern(literal.value()); if literal.is_plain() { if let Some(language) = literal.language() { Self::LanguageTaggedString { value_id, language_id: interner.strings.get_or_intern(language), } } else { Self::String { value_id } } } else { Self::TypedLiteral { value_id, datatype: InternedNamedNode::encoded_into(literal.datatype(), interner), } } } pub fn encoded_from(literal: LiteralRef<'_>, interner: &Interner) -> Option<Self> { let value_id = interner.strings.get(literal.value())?; Some(if literal.is_plain() { if let Some(language) = literal.language() { Self::LanguageTaggedString { value_id, language_id: interner.strings.get(language)?, } } else { Self::String { value_id } } } else { Self::TypedLiteral { value_id, datatype: InternedNamedNode::encoded_from(literal.datatype(), interner)?, } }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> LiteralRef<'a> { match self { InternedLiteral::String { value_id } => { LiteralRef::new_simple_literal(interner.strings.resolve(value_id)) } InternedLiteral::LanguageTaggedString { value_id, language_id, } => LiteralRef::new_language_tagged_literal_unchecked( interner.strings.resolve(value_id), interner.strings.resolve(language_id), ), InternedLiteral::TypedLiteral { value_id, datatype } => LiteralRef::new_typed_literal( interner.strings.resolve(value_id), datatype.decode_from(interner), ), } } pub fn next(&self) -> Self { match self { Self::String { value_id } => Self::String { value_id: next_spur(*value_id), }, Self::LanguageTaggedString { value_id, language_id, } => Self::LanguageTaggedString { value_id: *value_id, language_id: next_spur(*language_id), }, Self::TypedLiteral { value_id, datatype } => Self::TypedLiteral { value_id: *value_id, datatype: datatype.next(), }, } } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub enum InternedSubject { NamedNode(InternedNamedNode), BlankNode(InternedBlankNode), #[cfg(feature = "rdf-star")] Triple(Box<InternedTriple>), } impl InternedSubject { pub fn encoded_into(node: SubjectRef<'_>, interner: &mut Interner) -> Self { match node { SubjectRef::NamedNode(node) => { Self::NamedNode(InternedNamedNode::encoded_into(node, interner)) } SubjectRef::BlankNode(node) => { Self::BlankNode(InternedBlankNode::encoded_into(node, interner)) } #[cfg(feature = "rdf-star")] SubjectRef::Triple(triple) => Self::Triple(Box::new(InternedTriple::encoded_into( triple.as_ref(), interner, ))), } } pub fn encoded_from(node: SubjectRef<'_>, interner: &Interner) -> Option<Self> { Some(match node { SubjectRef::NamedNode(node) => { Self::NamedNode(InternedNamedNode::encoded_from(node, interner)?) } SubjectRef::BlankNode(node) => { Self::BlankNode(InternedBlankNode::encoded_from(node, interner)?) } #[cfg(feature = "rdf-star")] SubjectRef::Triple(triple) => Self::Triple(Box::new(InternedTriple::encoded_from( triple.as_ref(), interner, )?)), }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> SubjectRef<'a> { match self { Self::NamedNode(node) => SubjectRef::NamedNode(node.decode_from(interner)), Self::BlankNode(node) => SubjectRef::BlankNode(node.decode_from(interner)), #[cfg(feature = "rdf-star")] Self::Triple(triple) => SubjectRef::Triple(&interner.triples[triple.as_ref()]), } } pub fn first() -> Self { Self::NamedNode(InternedNamedNode::first()) } pub fn next(&self) -> Self { match self { Self::NamedNode(node) => Self::NamedNode(node.next()), Self::BlankNode(node) => Self::BlankNode(node.next()), #[cfg(feature = "rdf-star")] Self::Triple(triple) => Self::Triple(Box::new(triple.next())), } } pub fn impossible() -> Self { Self::NamedNode(InternedNamedNode::impossible()) } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub enum InternedGraphName { DefaultGraph, NamedNode(InternedNamedNode), BlankNode(InternedBlankNode), } impl InternedGraphName { pub fn encoded_into(node: GraphNameRef<'_>, interner: &mut Interner) -> Self { match node { GraphNameRef::DefaultGraph => Self::DefaultGraph, GraphNameRef::NamedNode(node) => { Self::NamedNode(InternedNamedNode::encoded_into(node, interner)) } GraphNameRef::BlankNode(node) => { Self::BlankNode(InternedBlankNode::encoded_into(node, interner)) } } } pub fn encoded_from(node: GraphNameRef<'_>, interner: &Interner) -> Option<Self> { Some(match node { GraphNameRef::DefaultGraph => Self::DefaultGraph, GraphNameRef::NamedNode(node) => { Self::NamedNode(InternedNamedNode::encoded_from(node, interner)?) } GraphNameRef::BlankNode(node) => { Self::BlankNode(InternedBlankNode::encoded_from(node, interner)?) } }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> GraphNameRef<'a> { match self { Self::DefaultGraph => GraphNameRef::DefaultGraph, Self::NamedNode(node) => GraphNameRef::NamedNode(node.decode_from(interner)), Self::BlankNode(node) => GraphNameRef::BlankNode(node.decode_from(interner)), } } pub fn first() -> Self { Self::DefaultGraph } pub fn next(&self) -> Self { match self { Self::DefaultGraph => Self::NamedNode(InternedNamedNode::first()), Self::NamedNode(node) => Self::NamedNode(node.next()), Self::BlankNode(node) => Self::BlankNode(node.next()), } } pub fn impossible() -> Self { Self::NamedNode(InternedNamedNode::impossible()) } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub enum InternedTerm { NamedNode(InternedNamedNode), BlankNode(InternedBlankNode), Literal(InternedLiteral), #[cfg(feature = "rdf-star")] Triple(Box<InternedTriple>), } impl InternedTerm { pub fn encoded_into(term: TermRef<'_>, interner: &mut Interner) -> Self { match term { TermRef::NamedNode(term) => { Self::NamedNode(InternedNamedNode::encoded_into(term, interner)) } TermRef::BlankNode(term) => { Self::BlankNode(InternedBlankNode::encoded_into(term, interner)) } TermRef::Literal(term) => Self::Literal(InternedLiteral::encoded_into(term, interner)), #[cfg(feature = "rdf-star")] TermRef::Triple(triple) => Self::Triple(Box::new(InternedTriple::encoded_into( triple.as_ref(), interner, ))), } } pub fn encoded_from(term: TermRef<'_>, interner: &Interner) -> Option<Self> { Some(match term { TermRef::NamedNode(term) => { Self::NamedNode(InternedNamedNode::encoded_from(term, interner)?) } TermRef::BlankNode(term) => { Self::BlankNode(InternedBlankNode::encoded_from(term, interner)?) } TermRef::Literal(term) => Self::Literal(InternedLiteral::encoded_from(term, interner)?), #[cfg(feature = "rdf-star")] TermRef::Triple(triple) => Self::Triple(Box::new(InternedTriple::encoded_from( triple.as_ref(), interner, )?)), }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> TermRef<'a> { match self { Self::NamedNode(term) => TermRef::NamedNode(term.decode_from(interner)), Self::BlankNode(term) => TermRef::BlankNode(term.decode_from(interner)), Self::Literal(term) => TermRef::Literal(term.decode_from(interner)), #[cfg(feature = "rdf-star")] Self::Triple(triple) => TermRef::Triple(&interner.triples[triple.as_ref()]), } } pub fn first() -> Self { Self::NamedNode(InternedNamedNode::first()) } pub fn next(&self) -> Self { match self { Self::NamedNode(node) => Self::NamedNode(node.next()), Self::BlankNode(node) => Self::BlankNode(node.next()), Self::Literal(node) => Self::Literal(node.next()), #[cfg(feature = "rdf-star")] Self::Triple(triple) => Self::Triple(Box::new(triple.next())), } } pub fn impossible() -> Self { Self::NamedNode(InternedNamedNode::impossible()) } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub struct InternedTriple { pub subject: InternedSubject, pub predicate: InternedNamedNode, pub object: InternedTerm, } #[cfg(feature = "rdf-star")] impl InternedTriple { pub fn encoded_into(triple: TripleRef<'_>, interner: &mut Interner) -> Self { let interned_triple = Self { subject: InternedSubject::encoded_into(triple.subject, interner), predicate: InternedNamedNode::encoded_into(triple.predicate, interner), object: InternedTerm::encoded_into(triple.object, interner), }; interner .triples .insert(interned_triple.clone(), triple.into_owned()); interned_triple } pub fn encoded_from(triple: TripleRef<'_>, interner: &Interner) -> Option<Self> { let interned_triple = Self { subject: InternedSubject::encoded_from(triple.subject, interner)?, predicate: InternedNamedNode::encoded_from(triple.predicate, interner)?, object: InternedTerm::encoded_from(triple.object, interner)?, }; if interner.triples.contains_key(&interned_triple) { Some(interned_triple) } else { None } } pub fn next(&self) -> Self { Self { subject: self.subject.clone(), predicate: self.predicate, object: self.object.next(), } } } fn fist_spur() -> Spur { Spur::try_from_usize(0).unwrap() } fn next_spur(value: Spur) -> Spur { Spur::try_from_usize(value.into_usize() + 1).unwrap() } fn impossible_spur() -> Spur { Spur::try_from_usize((u32::MAX - 10).try_into().unwrap()).unwrap() }
use crate::*; use lasso::{Key, Rodeo, Spur}; use std::collections::HashMap; #[derive(Debug, Default)] pub struct Interner { strings: Rodeo, #[cfg(feature = "rdf-star")] triples: HashMap<InternedTriple, Triple>, } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)] pub struct InternedNamedNode { id: Spur, } impl InternedNamedNode { pub fn encoded_into(named_node: NamedNodeRef<'_>, interner: &mut Interner) -> Self { Self { id: interner.strings.get_or_intern(named_node.as_str()), } } pub fn encoded_from(named_node: NamedNodeRef<'_>, interner: &Interner) -> Option<Self> { Some(Self { id: interner.strings.get(named_node.as_str())?, }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> NamedNodeRef<'a> { NamedNodeRef::new_unchecked(interner.strings.resolve(&self.id)) } pub fn first() -> Self { Self { id: fist_spur() } } pub fn next(self) -> Self { Self { id: next_spur(self.id), } } pub fn impossible() -> Self { Self { id: impossible_spur(), } } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)] pub struct InternedBlankNode { id: Spur, } impl InternedBlankNode { pub fn encoded_into(blank_node: BlankNodeRef<'_>, interner: &mut Interner) -> Self { Self { id: interner.strings.get_or_intern(blank_node.as_str()), } } pub fn encoded_from(blank_node: BlankNodeRef<'_>, interner: &Interner) -> Option<Self> { Some(Self { id: interner.strings.get(blank_node.as_str())?, }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> BlankNodeRef<'a> { BlankNodeRef::new_unchecked(interner.strings.resolve(&self.id)) } pub fn next(self) -> Self { Self { id: next_spur(self.id), } } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)] pub enum InternedLiteral { String { value_id: Spur, }, LanguageTaggedString { value_id: Spur, language_id: Spur, }, TypedLiteral { value_id: Spur, datatype: InternedNamedNode, }, } impl InternedLiteral { pub fn encoded_into(literal: LiteralRef<'_>, interner: &mut Interner) -> Self { let value_id = interner.strings.get_or_intern(literal.value()); if literal.is_plain() { if let Some(language) = literal.language() { Self::LanguageTaggedString { value_id, language_id: interner.strings.get_or_intern(language), } } else { Self::String { value_id } } } else { Self::TypedLiteral { value_id, datatype: InternedNamedNode::encoded_into(literal.datatype(), interner), } } } pub fn encoded_from(literal: LiteralRef<'_>, interner: &Interner) -> Option<Self> { let value_id = interner.strings.get(literal.value())?; Some(if literal.is_plain() { if let Some(language) = literal.language() { Self::LanguageTaggedString { value_id, language_id: interner.strings.get(language)?, } } else { Self::String { value_id } } } else { Self::TypedLiteral { value_id, datatype: InternedNamedNode::encoded_from(literal.datatype(), interner)?, } }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> LiteralRef<'a> { match self { InternedLiteral::String { value_id } => { LiteralRef::new_simple_literal(interner.strings.resolve(value_id)) } InternedLiteral::LanguageTaggedString { value_id, language_id, } => LiteralRef::new_language_tagged_literal_unchecked( interner.strings.resolve(value_id), interner.strings.resolve(language_id), ), InternedLiteral::TypedLiteral { value_id, datatype } => LiteralRef::new_typed_literal( interner.strings.resolve(value_id), datatype.decode_from(interner), ), } } pub fn next(&self) -> Self { match self { Self::String { value_id } => Self::String { value_id: next_spur(*value_id), }, Self::LanguageTaggedString { value_id, language_id, } => Self::LanguageTaggedString { value_id: *value_id, language_id: next_spur(*language_id), }, Self::TypedLiteral { value_id, datatype } => Self::TypedLiteral { value_id: *value_id, datatype: datatype.next(), }, } } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub enum InternedSubject { NamedNode(InternedNamedNode), BlankNode(InternedBlankNode), #[cfg(feature = "rdf-star")] Triple(Box<InternedTriple>), } impl InternedSubject { pub fn encoded_into(node: SubjectRef<'_>, interner: &mut Interner) -> Self { match node { SubjectRef::NamedNode(node) => { Self::NamedNode(InternedNamedNode::encoded_into(node, interner)) } SubjectRef::BlankNode(node) => { Self::BlankNode(InternedBlankNode::encoded_into(node, interner)) } #[cfg(feature = "rdf-star")] SubjectRef::Triple(triple) => Self::Triple(Box::new(InternedTriple::encoded_into( triple.as_ref(), interner, ))), } } pub fn encoded_from(node: SubjectRef<'_>, interner: &Interner) -> Option<Self> { Some(match node { SubjectRef::NamedNode(node) => { Self::NamedNode(InternedNamedNode::encoded_from(node, interner)?) } SubjectRef::BlankNode(node) => { Self::BlankNode(InternedBlankNode::encoded_from(node, interner)?) } #[cfg(feature = "rdf-star")] SubjectRef::Triple(triple) => Self::Triple(Box::new(InternedTriple::encoded_from( triple.as_ref(), interner, )?)), }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> SubjectRef<'a> { match self { Self::NamedNode(node) => SubjectRef::NamedNode(node.decode_from(interner)), Self::BlankNode(node) => SubjectRef::BlankNode(node.decode_from(interner)), #[cfg(feature = "rdf-star")] Self::Triple(triple) => SubjectRef::Triple(&interner.triples[triple.as_ref()]), } } pub fn first() -> Self { Self::NamedNode(InternedNamedNode::first()) } pub fn next(&self) -> Self { match self { Self::NamedNode(node) => Self::NamedNode(node.next()), Self::BlankNode(node) => Self::BlankNode(node.next()), #[cfg(feature = "rdf-star")] Self::Triple(triple) => Self::Triple(Box::new(triple.next())), } } pub fn impossible() -> Self { Self::NamedNode(InternedNamedNode::impossible()) } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub enum InternedGraphName { DefaultGraph, NamedNode(InternedNamedNode), BlankNode(InternedBlankNode), } impl InternedGraphName { pub fn encoded_into(node: GraphNameRef<'_>, interner: &mut Interner) -> Self { match node { GraphNameRef::DefaultGraph => Self::DefaultGraph, GraphNameRef::NamedNode(node) => { Self::NamedNode(InternedNamedNode::encoded_into(node, interner)) } GraphNameRef::BlankNode(node) => { Self::BlankNode(InternedBlankNode::encoded_into(node, interner)) } } } pub fn encoded_from(node: GraphNameRef<'_>, interner: &Interner) -> Option<Self> { Some(match node { GraphNameRef::DefaultGraph => Self::DefaultGraph, GraphNameRef::NamedNode(node) => { Self::NamedNode(InternedNamedNode::encoded_from(node, interner)?) } GraphNameRef::BlankNode(node) => { Self::BlankNode(InternedBlankNode::encoded_from(node, interner)?) } }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> GraphNameRef<'a> { match self { Self::DefaultGraph => GraphNameRef::DefaultGraph, Self::NamedNode(node) => GraphNameRef::NamedNode(node.decode_from(interner)), Self::BlankNode(node) => GraphNameRef::BlankNode(node.decode_from(interner)), } } pub fn first() -> Self { Self::DefaultGraph } pub fn next(&self) -> Self { match self { Self::DefaultGraph => Self::NamedNode(InternedNamedNode::first()), Self::NamedNode(node) => Self::NamedNode(node.next()
interner, ))), } } pub fn encoded_from(term: TermRef<'_>, interner: &Interner) -> Option<Self> { Some(match term { TermRef::NamedNode(term) => { Self::NamedNode(InternedNamedNode::encoded_from(term, interner)?) } TermRef::BlankNode(term) => { Self::BlankNode(InternedBlankNode::encoded_from(term, interner)?) } TermRef::Literal(term) => Self::Literal(InternedLiteral::encoded_from(term, interner)?), #[cfg(feature = "rdf-star")] TermRef::Triple(triple) => Self::Triple(Box::new(InternedTriple::encoded_from( triple.as_ref(), interner, )?)), }) } pub fn decode_from<'a>(&self, interner: &'a Interner) -> TermRef<'a> { match self { Self::NamedNode(term) => TermRef::NamedNode(term.decode_from(interner)), Self::BlankNode(term) => TermRef::BlankNode(term.decode_from(interner)), Self::Literal(term) => TermRef::Literal(term.decode_from(interner)), #[cfg(feature = "rdf-star")] Self::Triple(triple) => TermRef::Triple(&interner.triples[triple.as_ref()]), } } pub fn first() -> Self { Self::NamedNode(InternedNamedNode::first()) } pub fn next(&self) -> Self { match self { Self::NamedNode(node) => Self::NamedNode(node.next()), Self::BlankNode(node) => Self::BlankNode(node.next()), Self::Literal(node) => Self::Literal(node.next()), #[cfg(feature = "rdf-star")] Self::Triple(triple) => Self::Triple(Box::new(triple.next())), } } pub fn impossible() -> Self { Self::NamedNode(InternedNamedNode::impossible()) } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub struct InternedTriple { pub subject: InternedSubject, pub predicate: InternedNamedNode, pub object: InternedTerm, } #[cfg(feature = "rdf-star")] impl InternedTriple { pub fn encoded_into(triple: TripleRef<'_>, interner: &mut Interner) -> Self { let interned_triple = Self { subject: InternedSubject::encoded_into(triple.subject, interner), predicate: InternedNamedNode::encoded_into(triple.predicate, interner), object: InternedTerm::encoded_into(triple.object, interner), }; interner .triples .insert(interned_triple.clone(), triple.into_owned()); interned_triple } pub fn encoded_from(triple: TripleRef<'_>, interner: &Interner) -> Option<Self> { let interned_triple = Self { subject: InternedSubject::encoded_from(triple.subject, interner)?, predicate: InternedNamedNode::encoded_from(triple.predicate, interner)?, object: InternedTerm::encoded_from(triple.object, interner)?, }; if interner.triples.contains_key(&interned_triple) { Some(interned_triple) } else { None } } pub fn next(&self) -> Self { Self { subject: self.subject.clone(), predicate: self.predicate, object: self.object.next(), } } } fn fist_spur() -> Spur { Spur::try_from_usize(0).unwrap() } fn next_spur(value: Spur) -> Spur { Spur::try_from_usize(value.into_usize() + 1).unwrap() } fn impossible_spur() -> Spur { Spur::try_from_usize((u32::MAX - 10).try_into().unwrap()).unwrap() }
), Self::BlankNode(node) => Self::BlankNode(node.next()), } } pub fn impossible() -> Self { Self::NamedNode(InternedNamedNode::impossible()) } } #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub enum InternedTerm { NamedNode(InternedNamedNode), BlankNode(InternedBlankNode), Literal(InternedLiteral), #[cfg(feature = "rdf-star")] Triple(Box<InternedTriple>), } impl InternedTerm { pub fn encoded_into(term: TermRef<'_>, interner: &mut Interner) -> Self { match term { TermRef::NamedNode(term) => { Self::NamedNode(InternedNamedNode::encoded_into(term, interner)) } TermRef::BlankNode(term) => { Self::BlankNode(InternedBlankNode::encoded_into(term, interner)) } TermRef::Literal(term) => Self::Literal(InternedLiteral::encoded_into(term, interner)), #[cfg(feature = "rdf-star")] TermRef::Triple(triple) => Self::Triple(Box::new(InternedTriple::encoded_into( triple.as_ref(),
random
[ { "content": "fn named_node_repr(node: NamedNodeRef<'_>, buffer: &mut String) {\n\n buffer.push_str(\"<NamedNode value=\");\n\n buffer.push_str(node.as_str());\n\n buffer.push('>');\n\n}\n\n\n", "file_path": "python/src/model.rs", "rank": 1, "score": 289960.8145970217 }, { "content": "fn blank_node_repr(node: BlankNodeRef<'_>, buffer: &mut String) {\n\n buffer.push_str(\"<BlankNode value=\");\n\n buffer.push_str(node.as_str());\n\n buffer.push('>');\n\n}\n\n\n", "file_path": "python/src/model.rs", "rank": 2, "score": 289960.8145970217 }, { "content": "fn triple_repr(triple: TripleRef<'_>, buffer: &mut String) {\n\n buffer.push_str(\"<Triple subject=\");\n\n term_repr(triple.subject.into(), buffer);\n\n buffer.push_str(\" predicate=\");\n\n named_node_repr(triple.predicate, buffer);\n\n buffer.push_str(\" object=\");\n\n term_repr(triple.object, buffer);\n\n buffer.push('>');\n\n}\n\n\n\n#[pyclass(module = \"oxigraph\")]\n\npub struct TripleComponentsIter {\n\n inner: IntoIter<Term>,\n\n}\n\n\n\n#[pymethods]\n\nimpl TripleComponentsIter {\n\n fn __iter__(slf: PyRef<'_, Self>) -> Py<Self> {\n\n slf.into()\n\n }\n", "file_path": "python/src/model.rs", "rank": 3, "score": 268875.6648477781 }, { "content": "pub fn term_repr(term: TermRef<'_>, buffer: &mut String) {\n\n match term {\n\n TermRef::NamedNode(node) => named_node_repr(node, buffer),\n\n TermRef::BlankNode(node) => blank_node_repr(node, buffer),\n\n TermRef::Literal(literal) => literal_repr(literal, buffer),\n\n TermRef::Triple(triple) => triple_repr(triple.as_ref(), buffer),\n\n }\n\n}\n\n\n", "file_path": "python/src/model.rs", "rank": 4, "score": 268648.32737398165 }, { "content": "#[allow(unused_must_use)]\n\npub fn build_report(results: impl IntoIterator<Item = TestResult>) -> String {\n\n let mut buffer = String::new();\n\n writeln!(&mut buffer, \"@prefix dc: <http://purl.org/dc/terms/> .\");\n\n writeln!(\n\n &mut buffer,\n\n \"@prefix doap: <http://usefulinc.com/ns/doap#> .\"\n\n );\n\n writeln!(&mut buffer, \"@prefix earl: <http://www.w3.org/ns/earl#> .\");\n\n writeln!(&mut buffer, \"@prefix foaf: <http://xmlns.com/foaf/0.1/> .\");\n\n writeln!(\n\n &mut buffer,\n\n \"@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\"\n\n );\n\n writeln!(\n\n &mut buffer,\n\n \"@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .\"\n\n );\n\n writeln!(&mut buffer);\n\n writeln!(&mut buffer, \"<> foaf:primaryTopic <http://oxigraph.org/> ;\");\n\n writeln!(\n", "file_path": "testsuite/src/report.rs", "rank": 5, "score": 265551.89086735545 }, { "content": "pub fn to_err(e: impl ToString) -> JsValue {\n\n JsValue::from(Error::new(&e.to_string()))\n\n}\n", "file_path": "js/src/utils.rs", "rank": 6, "score": 262230.6570343904 }, { "content": "#[wasm_bindgen(js_name = namedNode)]\n\npub fn named_node(value: String) -> Result<JsNamedNode, JsValue> {\n\n NamedNode::new(value)\n\n .map(|v| v.into())\n\n .map_err(|v| UriError::new(&v.to_string()).into())\n\n}\n\n\n", "file_path": "js/src/model.rs", "rank": 8, "score": 252950.15425219256 }, { "content": "fn hash(t: &impl Hash) -> u64 {\n\n let mut s = DefaultHasher::new();\n\n t.hash(&mut s);\n\n s.finish()\n\n}\n\n\n", "file_path": "python/src/model.rs", "rank": 9, "score": 250393.6222133567 }, { "content": "#[wasm_bindgen(js_name = blankNode)]\n\npub fn blank_node(value: Option<String>) -> Result<JsBlankNode, JsValue> {\n\n Ok(if let Some(value) = value {\n\n BlankNode::new(value).map_err(to_err)?\n\n } else {\n\n BlankNode::default()\n\n }\n\n .into())\n\n}\n\n\n", "file_path": "js/src/model.rs", "rank": 10, "score": 246110.1469459007 }, { "content": "fn hash_deduplicate<T: Eq + Hash + Clone>(\n\n iter: impl Iterator<Item = Result<T, EvaluationError>>,\n\n) -> impl Iterator<Item = Result<T, EvaluationError>> {\n\n let mut already_seen = HashSet::with_capacity(iter.size_hint().0);\n\n iter.filter(move |e| {\n\n if let Ok(e) = e {\n\n if already_seen.contains(e) {\n\n false\n\n } else {\n\n already_seen.insert(e.clone());\n\n true\n\n }\n\n } else {\n\n true\n\n }\n\n })\n\n}\n\n\n", "file_path": "lib/src/sparql/eval.rs", "rank": 11, "score": 232010.76038377197 }, { "content": "fn write_escaped_csv_string(s: &str, sink: &mut impl Write) -> io::Result<()> {\n\n if s.bytes().any(|c| matches!(c, b'\"' | b',' | b'\\n' | b'\\r')) {\n\n sink.write_all(b\"\\\"\")?;\n\n for c in s.bytes() {\n\n if c == b'\\\"' {\n\n sink.write_all(b\"\\\"\\\"\")\n\n } else {\n\n sink.write_all(&[c])\n\n }?;\n\n }\n\n sink.write_all(b\"\\\"\")\n\n } else {\n\n sink.write_all(s.as_bytes())\n\n }\n\n}\n\n\n", "file_path": "lib/sparesults/src/csv.rs", "rank": 12, "score": 230569.70680727175 }, { "content": "fn generate_uuid(buffer: &mut String) {\n\n let mut uuid = random::<u128>().to_ne_bytes();\n\n uuid[6] = (uuid[6] & 0x0F) | 0x40;\n\n uuid[8] = (uuid[8] & 0x3F) | 0x80;\n\n\n\n write_hexa_bytes(&uuid[0..4], buffer);\n\n buffer.push('-');\n\n write_hexa_bytes(&uuid[4..6], buffer);\n\n buffer.push('-');\n\n write_hexa_bytes(&uuid[6..8], buffer);\n\n buffer.push('-');\n\n write_hexa_bytes(&uuid[8..10], buffer);\n\n buffer.push('-');\n\n write_hexa_bytes(&uuid[10..16], buffer);\n\n}\n\n\n", "file_path": "lib/src/sparql/eval.rs", "rank": 13, "score": 227427.41128494992 }, { "content": "pub fn read_file_to_string(url: &str) -> Result<String> {\n\n let mut buf = String::new();\n\n read_file(url)?.read_to_string(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "testsuite/src/files.rs", "rank": 15, "score": 223124.03277027645 }, { "content": "pub fn insert_term<F: FnMut(&StrHash, &str) -> Result<(), StorageError>>(\n\n term: TermRef<'_>,\n\n encoded: &EncodedTerm,\n\n insert_str: &mut F,\n\n) -> Result<(), StorageError> {\n\n match term {\n\n TermRef::NamedNode(node) => {\n\n if let EncodedTerm::NamedNode { iri_id } = encoded {\n\n insert_str(iri_id, node.as_str())\n\n } else {\n\n unreachable!(\"Invalid term encoding {:?} for {}\", encoded, term)\n\n }\n\n }\n\n TermRef::BlankNode(node) => match encoded {\n\n EncodedTerm::BigBlankNode { id_id } => insert_str(id_id, node.as_str()),\n\n EncodedTerm::SmallBlankNode(..) | EncodedTerm::NumericalBlankNode { .. } => Ok(()),\n\n _ => unreachable!(\"Invalid term encoding {:?} for {}\", encoded, term),\n\n },\n\n TermRef::Literal(literal) => match encoded {\n\n EncodedTerm::BigStringLiteral { value_id }\n", "file_path": "lib/src/storage/numeric_encoder.rs", "rank": 16, "score": 214559.66048977012 }, { "content": "fn build_string_literal_from_id(id: SmallStringOrId) -> EncodedTerm {\n\n match id {\n\n SmallStringOrId::Small(value) => EncodedTerm::SmallStringLiteral(value),\n\n SmallStringOrId::Big(value_id) => EncodedTerm::BigStringLiteral { value_id },\n\n }\n\n}\n\n\n", "file_path": "lib/src/sparql/eval.rs", "rank": 17, "score": 213864.1239125153 }, { "content": "fn get_required_str<L: StrLookup>(lookup: &L, id: &StrHash) -> Result<String, StorageError> {\n\n Ok(lookup.get_str(id)?.ok_or_else(|| {\n\n CorruptionError::new(format!(\n\n \"Not able to find the string with id {:?} in the string store\",\n\n id\n\n ))\n\n })?)\n\n}\n", "file_path": "lib/src/storage/numeric_encoder.rs", "rank": 18, "score": 213805.84221549073 }, { "content": "pub fn register_parser_tests(evaluator: &mut TestEvaluator) {\n\n evaluator.register(\n\n \"http://www.w3.org/ns/rdftest#TestNTriplesPositiveSyntax\",\n\n evaluate_positive_syntax_test,\n\n );\n\n evaluator.register(\n\n \"http://www.w3.org/ns/rdftest#TestNQuadsPositiveSyntax\",\n\n evaluate_positive_syntax_test,\n\n );\n\n evaluator.register(\n\n \"http://www.w3.org/ns/rdftest#TestTurtlePositiveSyntax\",\n\n evaluate_positive_syntax_test,\n\n );\n\n evaluator.register(\n\n \"http://www.w3.org/ns/rdftest#TestTrigPositiveSyntax\",\n\n evaluate_positive_syntax_test,\n\n );\n\n evaluator.register(\n\n \"http://www.w3.org/ns/rdftest#TestNTriplesNegativeSyntax\",\n\n evaluate_negative_syntax_test,\n", "file_path": "testsuite/src/parser_evaluator.rs", "rank": 19, "score": 212900.6836138304 }, { "content": "pub fn register_sparql_tests(evaluator: &mut TestEvaluator) {\n\n evaluator.register(\n\n \"http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#PositiveSyntaxTest\",\n\n evaluate_positive_syntax_test,\n\n );\n\n evaluator.register(\n\n \"http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#PositiveSyntaxTest11\",\n\n evaluate_positive_syntax_test,\n\n );\n\n evaluator.register(\n\n \"http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#NegativeSyntaxTest\",\n\n evaluate_negative_syntax_test,\n\n );\n\n evaluator.register(\n\n \"http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#NegativeSyntaxTest11\",\n\n evaluate_negative_syntax_test,\n\n );\n\n evaluator.register(\n\n \"http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#QueryEvaluationTest\",\n\n evaluate_evaluation_test,\n", "file_path": "testsuite/src/sparql_evaluator.rs", "rank": 20, "score": 212900.6836138304 }, { "content": "#[wasm_bindgen(js_name = defaultGraph)]\n\npub fn default_graph() -> JsDefaultGraph {\n\n JsDefaultGraph {}\n\n}\n\n\n", "file_path": "js/src/model.rs", "rank": 21, "score": 210343.1015581863 }, { "content": "fn write_hexa_bytes(bytes: &[u8], buffer: &mut String) {\n\n for b in bytes {\n\n let high = b / 16;\n\n buffer.push(char::from(if high < 10 {\n\n b'0' + high\n\n } else {\n\n b'a' + (high - 10)\n\n }));\n\n let low = b % 16;\n\n buffer.push(char::from(if low < 10 {\n\n b'0' + low\n\n } else {\n\n b'a' + (low - 10)\n\n }));\n\n }\n\n}\n\n\n", "file_path": "lib/src/sparql/eval.rs", "rank": 22, "score": 208989.4525028415 }, { "content": "fn literal_repr(literal: LiteralRef<'_>, buffer: &mut String) {\n\n buffer.push_str(\"<Literal value=\");\n\n buffer.push_str(literal.value());\n\n if let Some(language) = literal.language() {\n\n buffer.push_str(\" language=\");\n\n buffer.push_str(language);\n\n } else {\n\n buffer.push_str(\" datatype=\");\n\n named_node_repr(literal.datatype(), buffer);\n\n }\n\n buffer.push('>');\n\n}\n\n\n", "file_path": "python/src/model.rs", "rank": 23, "score": 207309.6959130693 }, { "content": "pub fn dataset_diff(expected: &Dataset, actual: &Dataset) -> String {\n\n let (_, changeset) = diff(\n\n &normalize_dataset_text(expected),\n\n &normalize_dataset_text(actual),\n\n \"\\n\",\n\n );\n\n let mut ret = String::new();\n\n ret.push_str(\"Note: missing quads in yellow and extra quads in blue\\n\");\n\n for seq in changeset {\n\n match seq {\n\n Difference::Same(x) => {\n\n ret.push_str(&x);\n\n ret.push('\\n');\n\n }\n\n Difference::Add(x) => {\n\n ret.push_str(\"\\x1B[94m\");\n\n ret.push_str(&x);\n\n ret.push_str(\"\\x1B[0m\");\n\n ret.push('\\n');\n\n }\n", "file_path": "testsuite/src/report.rs", "rank": 24, "score": 206667.45739783562 }, { "content": "fn validate_blank_node_identifier(id: &str) -> Result<(), BlankNodeIdParseError> {\n\n let mut chars = id.chars();\n\n let front = chars.next().ok_or(BlankNodeIdParseError {})?;\n\n match front {\n\n '0'..='9'\n\n | '_'\n\n | ':'\n\n | 'A'..='Z'\n\n | 'a'..='z'\n\n | '\\u{00C0}'..='\\u{00D6}'\n\n | '\\u{00D8}'..='\\u{00F6}'\n\n | '\\u{00F8}'..='\\u{02FF}'\n\n | '\\u{0370}'..='\\u{037D}'\n\n | '\\u{037F}'..='\\u{1FFF}'\n\n | '\\u{200C}'..='\\u{200D}'\n\n | '\\u{2070}'..='\\u{218F}'\n\n | '\\u{2C00}'..='\\u{2FEF}'\n\n | '\\u{3001}'..='\\u{D7FF}'\n\n | '\\u{F900}'..='\\u{FDCF}'\n\n | '\\u{FDF0}'..='\\u{FFFD}'\n", "file_path": "lib/oxrdf/src/blank_node.rs", "rank": 25, "score": 204521.61830306397 }, { "content": "pub fn read_file(url: &str) -> Result<impl BufRead> {\n\n let mut path = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n path.push(if url.starts_with(\"http://w3c.github.io/rdf-tests/\") {\n\n Ok(url.replace(\"http://w3c.github.io/rdf-tests/\", \"rdf-tests/\"))\n\n } else if url.starts_with(\"http://www.w3.org/2013/RDFXMLTests/\") {\n\n Ok(url.replace(\"http://www.w3.org/2013/RDFXMLTests/\", \"rdf-tests/rdf-xml/\"))\n\n } else if url.starts_with(\"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/\") {\n\n Ok(url.replace(\n\n \"http://www.w3.org/2001/sw/DataAccess/tests/\",\n\n \"rdf-tests/sparql11/\",\n\n ))\n\n } else if url.starts_with(\"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/\") {\n\n Ok(url.replace(\n\n \"http://www.w3.org/2009/sparql/docs/tests/\",\n\n \"rdf-tests/sparql11/\",\n\n ))\n\n } else if url.starts_with(\"https://w3c.github.io/rdf-star/\") {\n\n Ok(url.replace(\"https://w3c.github.io/\", \"\"))\n\n } else if url.starts_with(\"https://github.com/oxigraph/oxigraph/tests/\") {\n\n Ok(url.replace(\n\n \"https://github.com/oxigraph/oxigraph/tests/\",\n\n \"oxigraph-tests/\",\n\n ))\n\n } else {\n\n Err(anyhow!(\"Not supported url for file: {}\", url))\n\n }?);\n\n Ok(BufReader::new(File::open(&path)?))\n\n}\n\n\n", "file_path": "testsuite/src/files.rs", "rank": 26, "score": 204142.31952573394 }, { "content": "#[wasm_bindgen(js_name = variable)]\n\npub fn variable(value: String) -> Result<JsVariable, JsValue> {\n\n Ok(Variable::new(value).map_err(to_err)?.into())\n\n}\n\n\n", "file_path": "js/src/model.rs", "rank": 27, "score": 204047.42397898392 }, { "content": "fn graph_name_repr(term: GraphNameRef<'_>, buffer: &mut String) {\n\n match term {\n\n GraphNameRef::NamedNode(node) => named_node_repr(node, buffer),\n\n GraphNameRef::BlankNode(node) => blank_node_repr(node, buffer),\n\n GraphNameRef::DefaultGraph => buffer.push_str(\"<DefaultGraph>\"),\n\n }\n\n}\n\n\n", "file_path": "python/src/model.rs", "rank": 28, "score": 202168.1698152223 }, { "content": "pub fn load_to_graph(url: &str, graph: &mut Graph) -> Result<()> {\n\n let format = url\n\n .rsplit_once('.')\n\n .and_then(|(_, extension)| GraphFormat::from_extension(extension))\n\n .ok_or_else(|| anyhow!(\"Serialization type not found for {}\", url))?;\n\n let parser = GraphParser::from_format(format).with_base_iri(url)?;\n\n for t in parser.read_triples(read_file(url)?)? {\n\n graph.insert(&t?);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "testsuite/src/files.rs", "rank": 29, "score": 200039.23641675478 }, { "content": "#[inline]\n\nfn to_integer_id(id: &str) -> Option<u128> {\n\n let digits = id.as_bytes();\n\n let mut value: u128 = 0;\n\n if let None | Some(b'0') = digits.first() {\n\n return None; // No empty string or leading zeros\n\n }\n\n for digit in digits {\n\n value = value.checked_mul(16)?.checked_add(\n\n match *digit {\n\n b'0'..=b'9' => digit - b'0',\n\n b'a'..=b'f' => digit - b'a' + 10,\n\n _ => return None,\n\n }\n\n .into(),\n\n )?;\n\n }\n\n Some(value)\n\n}\n\n\n\n/// An error raised during [`BlankNode`] IDs validation.\n", "file_path": "lib/oxrdf/src/blank_node.rs", "rank": 30, "score": 198775.5996495739 }, { "content": "fn write_csv_term<'a>(term: impl Into<TermRef<'a>>, sink: &mut impl Write) -> io::Result<()> {\n\n match term.into() {\n\n TermRef::NamedNode(uri) => sink.write_all(uri.as_str().as_bytes()),\n\n TermRef::BlankNode(bnode) => {\n\n sink.write_all(b\"_:\")?;\n\n sink.write_all(bnode.as_str().as_bytes())\n\n }\n\n TermRef::Literal(literal) => write_escaped_csv_string(literal.value(), sink),\n\n #[cfg(feature = \"rdf-star\")]\n\n TermRef::Triple(triple) => {\n\n write_csv_term(&triple.subject, sink)?;\n\n sink.write_all(b\" \")?;\n\n write_csv_term(&triple.predicate, sink)?;\n\n sink.write_all(b\" \")?;\n\n write_csv_term(&triple.object, sink)\n\n }\n\n }\n\n}\n\n\n", "file_path": "lib/sparesults/src/csv.rs", "rank": 31, "score": 197713.97273307474 }, { "content": "fn write_tsv_term<'a>(term: impl Into<TermRef<'a>>, sink: &mut impl Write) -> io::Result<()> {\n\n //TODO: full Turtle serialization\n\n match term.into() {\n\n TermRef::NamedNode(node) => write!(sink, \"<{}>\", node.as_str()),\n\n TermRef::BlankNode(node) => write!(sink, \"_:{}\", node.as_str()),\n\n TermRef::Literal(literal) => match literal.datatype() {\n\n xsd::BOOLEAN => match literal.value() {\n\n \"true\" | \"1\" => sink.write_all(b\"true\"),\n\n \"false\" | \"0\" => sink.write_all(b\"false\"),\n\n _ => sink.write_all(literal.to_string().as_bytes()),\n\n },\n\n xsd::INTEGER => {\n\n if literal.value().bytes().all(|c| matches!(c, b'0'..=b'9')) {\n\n sink.write_all(literal.value().as_bytes())\n\n } else {\n\n sink.write_all(literal.to_string().as_bytes())\n\n }\n\n }\n\n _ => sink.write_all(literal.to_string().as_bytes()),\n\n },\n", "file_path": "lib/sparesults/src/csv.rs", "rank": 32, "score": 197713.97273307474 }, { "content": "pub fn write_term(sink: &mut Vec<u8>, term: &EncodedTerm) {\n\n match term {\n\n EncodedTerm::DefaultGraph => (),\n\n EncodedTerm::NamedNode { iri_id } => {\n\n sink.push(TYPE_NAMED_NODE_ID);\n\n sink.extend_from_slice(&iri_id.to_be_bytes());\n\n }\n\n EncodedTerm::NumericalBlankNode { id } => {\n\n sink.push(TYPE_NUMERICAL_BLANK_NODE_ID);\n\n sink.extend_from_slice(&id.to_be_bytes())\n\n }\n\n EncodedTerm::SmallBlankNode(id) => {\n\n sink.push(TYPE_SMALL_BLANK_NODE_ID);\n\n sink.extend_from_slice(&id.to_be_bytes())\n\n }\n\n EncodedTerm::BigBlankNode { id_id } => {\n\n sink.push(TYPE_BIG_BLANK_NODE_ID);\n\n sink.extend_from_slice(&id_id.to_be_bytes());\n\n }\n\n EncodedTerm::SmallStringLiteral(value) => {\n", "file_path": "lib/src/storage/binary_encoder.rs", "rank": 33, "score": 192773.41374243586 }, { "content": "pub fn write_posg_quad(sink: &mut Vec<u8>, quad: &EncodedQuad) {\n\n write_term(sink, &quad.predicate);\n\n write_term(sink, &quad.object);\n\n write_term(sink, &quad.subject);\n\n write_term(sink, &quad.graph_name);\n\n}\n\n\n", "file_path": "lib/src/storage/binary_encoder.rs", "rank": 34, "score": 190531.55144607255 }, { "content": "pub fn write_spog_quad(sink: &mut Vec<u8>, quad: &EncodedQuad) {\n\n write_term(sink, &quad.subject);\n\n write_term(sink, &quad.predicate);\n\n write_term(sink, &quad.object);\n\n write_term(sink, &quad.graph_name);\n\n}\n\n\n", "file_path": "lib/src/storage/binary_encoder.rs", "rank": 35, "score": 190531.55144607255 }, { "content": "pub fn write_gosp_quad(sink: &mut Vec<u8>, quad: &EncodedQuad) {\n\n write_term(sink, &quad.graph_name);\n\n write_term(sink, &quad.object);\n\n write_term(sink, &quad.subject);\n\n write_term(sink, &quad.predicate);\n\n}\n\n\n", "file_path": "lib/src/storage/binary_encoder.rs", "rank": 36, "score": 190531.55144607255 }, { "content": "pub fn write_gpos_quad(sink: &mut Vec<u8>, quad: &EncodedQuad) {\n\n write_term(sink, &quad.graph_name);\n\n write_term(sink, &quad.predicate);\n\n write_term(sink, &quad.object);\n\n write_term(sink, &quad.subject);\n\n}\n\n\n", "file_path": "lib/src/storage/binary_encoder.rs", "rank": 37, "score": 190531.55144607255 }, { "content": "pub fn write_osp_quad(sink: &mut Vec<u8>, quad: &EncodedQuad) {\n\n write_term(sink, &quad.object);\n\n write_term(sink, &quad.subject);\n\n write_term(sink, &quad.predicate);\n\n}\n\n\n", "file_path": "lib/src/storage/binary_encoder.rs", "rank": 38, "score": 190531.55144607255 }, { "content": "pub fn write_spo_quad(sink: &mut Vec<u8>, quad: &EncodedQuad) {\n\n write_term(sink, &quad.subject);\n\n write_term(sink, &quad.predicate);\n\n write_term(sink, &quad.object);\n\n}\n\n\n", "file_path": "lib/src/storage/binary_encoder.rs", "rank": 39, "score": 190531.55144607255 }, { "content": "pub fn write_gspo_quad(sink: &mut Vec<u8>, quad: &EncodedQuad) {\n\n write_term(sink, &quad.graph_name);\n\n write_term(sink, &quad.subject);\n\n write_term(sink, &quad.predicate);\n\n write_term(sink, &quad.object);\n\n}\n\n\n", "file_path": "lib/src/storage/binary_encoder.rs", "rank": 40, "score": 190531.55144607255 }, { "content": "pub fn write_pos_quad(sink: &mut Vec<u8>, quad: &EncodedQuad) {\n\n write_term(sink, &quad.predicate);\n\n write_term(sink, &quad.object);\n\n write_term(sink, &quad.subject);\n\n}\n\n\n", "file_path": "lib/src/storage/binary_encoder.rs", "rank": 41, "score": 190531.55144607255 }, { "content": "pub fn write_ospg_quad(sink: &mut Vec<u8>, quad: &EncodedQuad) {\n\n write_term(sink, &quad.object);\n\n write_term(sink, &quad.subject);\n\n write_term(sink, &quad.predicate);\n\n write_term(sink, &quad.graph_name);\n\n}\n\n\n", "file_path": "lib/src/storage/binary_encoder.rs", "rank": 42, "score": 190531.55144607255 }, { "content": "fn copy_graph(from: impl Into<GraphName>, to: impl Into<GraphNamePattern>) -> GraphUpdateOperation {\n\n let bgp = GraphPattern::Bgp {\n\n patterns: vec![TriplePattern::new(\n\n Variable::new_unchecked(\"s\"),\n\n Variable::new_unchecked(\"p\"),\n\n Variable::new_unchecked(\"o\"),\n\n )],\n\n };\n\n GraphUpdateOperation::DeleteInsert {\n\n delete: Vec::new(),\n\n insert: vec![QuadPattern::new(\n\n Variable::new_unchecked(\"s\"),\n\n Variable::new_unchecked(\"p\"),\n\n Variable::new_unchecked(\"o\"),\n\n to,\n\n )],\n\n using: None,\n\n pattern: Box::new(match from.into() {\n\n GraphName::NamedNode(from) => GraphPattern::Graph {\n\n name: from.into(),\n\n inner: Box::new(bgp),\n\n },\n\n GraphName::DefaultGraph => bgp,\n\n }),\n\n }\n\n}\n\n\n", "file_path": "lib/spargebra/src/parser.rs", "rank": 43, "score": 184899.0794644289 }, { "content": "fn bnode_key(blank_nodes: &mut Vec<BlankNode>, blank_node: &BlankNode) -> usize {\n\n match slice_key(blank_nodes, blank_node) {\n\n Some(key) => key,\n\n None => {\n\n blank_nodes.push(blank_node.clone());\n\n blank_nodes.len() - 1\n\n }\n\n }\n\n}\n\n\n", "file_path": "lib/src/sparql/plan_builder.rs", "rank": 44, "score": 184773.5276041075 }, { "content": "fn internal_server_error(message: impl fmt::Display) -> Response {\n\n eprintln!(\"Internal server error: {}\", message);\n\n error(Status::INTERNAL_SERVER_ERROR, message)\n\n}\n\n\n", "file_path": "server/src/main.rs", "rank": 45, "score": 183328.60863547513 }, { "content": "#[derive(Eq, PartialEq, Clone, Copy)]\n\nenum SmallStringOrId {\n\n Small(SmallString),\n\n Big(StrHash),\n\n}\n\n\n\nimpl From<SmallString> for SmallStringOrId {\n\n fn from(value: SmallString) -> Self {\n\n Self::Small(value)\n\n }\n\n}\n\n\n\nimpl From<StrHash> for SmallStringOrId {\n\n fn from(value: StrHash) -> Self {\n\n Self::Big(value)\n\n }\n\n}\n\n\n\npub enum ComparatorFunction {\n\n Asc(Rc<dyn Fn(&EncodedTuple) -> Option<EncodedTerm>>),\n\n Desc(Rc<dyn Fn(&EncodedTuple) -> Option<EncodedTerm>>),\n\n}\n\n\n", "file_path": "lib/src/sparql/eval.rs", "rank": 46, "score": 182698.7368190525 }, { "content": "fn transitive_closure<T: Clone + Eq + Hash, NI: Iterator<Item = Result<T, EvaluationError>>>(\n\n start: impl IntoIterator<Item = Result<T, EvaluationError>>,\n\n next: impl Fn(T) -> NI,\n\n) -> impl Iterator<Item = Result<T, EvaluationError>> {\n\n //TODO: optimize\n\n let mut all = HashSet::<T>::default();\n\n let mut errors = Vec::default();\n\n let mut current = start\n\n .into_iter()\n\n .filter_map(|e| match e {\n\n Ok(e) => {\n\n all.insert(e.clone());\n\n Some(e)\n\n }\n\n Err(error) => {\n\n errors.push(error);\n\n None\n\n }\n\n })\n\n .collect::<Vec<_>>();\n", "file_path": "lib/src/sparql/eval.rs", "rank": 47, "score": 180198.74880240814 }, { "content": "fn write_json_term(term: TermRef<'_>, writer: &mut JsonWriter<impl Write>) -> io::Result<()> {\n\n match term {\n\n TermRef::NamedNode(uri) => {\n\n writer.write_event(JsonEvent::StartObject)?;\n\n writer.write_event(JsonEvent::ObjectKey(\"type\"))?;\n\n writer.write_event(JsonEvent::String(\"uri\"))?;\n\n writer.write_event(JsonEvent::ObjectKey(\"value\"))?;\n\n writer.write_event(JsonEvent::String(uri.as_str()))?;\n\n writer.write_event(JsonEvent::EndObject)?;\n\n }\n\n TermRef::BlankNode(bnode) => {\n\n writer.write_event(JsonEvent::StartObject)?;\n\n writer.write_event(JsonEvent::ObjectKey(\"type\"))?;\n\n writer.write_event(JsonEvent::String(\"bnode\"))?;\n\n writer.write_event(JsonEvent::ObjectKey(\"value\"))?;\n\n writer.write_event(JsonEvent::String(bnode.as_str()))?;\n\n writer.write_event(JsonEvent::EndObject)?;\n\n }\n\n TermRef::Literal(literal) => {\n\n writer.write_event(JsonEvent::StartObject)?;\n", "file_path": "lib/sparesults/src/json.rs", "rank": 48, "score": 177570.02552590385 }, { "content": "#[inline]\n\nfn fmt_sse_unary_expression(f: &mut impl fmt::Write, name: &str, e: &Expression) -> fmt::Result {\n\n write!(f, \"({} \", name)?;\n\n e.fmt_sse(f)?;\n\n write!(f, \")\")\n\n}\n\n\n", "file_path": "lib/spargebra/src/algebra.rs", "rank": 49, "score": 176999.56682445947 }, { "content": "#[derive(PartialEq, Eq, Debug, Clone, Hash)]\n\nstruct IdStr([u8; 32]);\n\n\n\nimpl IdStr {\n\n #[inline]\n\n fn new(id: u128) -> Self {\n\n let mut str = [0; 32];\n\n write!(&mut str[..], \"{:x}\", id).unwrap();\n\n Self(str)\n\n }\n\n\n\n #[inline]\n\n fn as_str(&self) -> &str {\n\n let len = self.0.iter().position(|x| x == &0).unwrap_or(32);\n\n str::from_utf8(&self.0[..len]).unwrap()\n\n }\n\n}\n\n\n", "file_path": "lib/oxrdf/src/blank_node.rs", "rank": 50, "score": 174089.72715574078 }, { "content": "fn to_simple_string_id(term: &EncodedTerm) -> Option<SmallStringOrId> {\n\n match term {\n\n EncodedTerm::SmallStringLiteral(value) => Some((*value).into()),\n\n EncodedTerm::BigStringLiteral { value_id } => Some((*value_id).into()),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "lib/src/sparql/eval.rs", "rank": 51, "score": 173711.53141273826 }, { "content": "pub fn write_boolean_csv_result<W: Write>(mut sink: W, value: bool) -> io::Result<W> {\n\n sink.write_all(if value { b\"true\" } else { b\"false\" })?;\n\n Ok(sink)\n\n}\n\n\n\npub struct CsvSolutionsWriter<W: Write> {\n\n sink: W,\n\n variables: Vec<Variable>,\n\n}\n\n\n\nimpl<W: Write> CsvSolutionsWriter<W> {\n\n pub fn start(mut sink: W, variables: Vec<Variable>) -> io::Result<Self> {\n\n let mut start_vars = true;\n\n for variable in &variables {\n\n if start_vars {\n\n start_vars = false;\n\n } else {\n\n sink.write_all(b\",\")?;\n\n }\n\n sink.write_all(variable.as_str().as_bytes())?;\n", "file_path": "lib/sparesults/src/csv.rs", "rank": 52, "score": 172167.6733503336 }, { "content": "pub fn write_boolean_tsv_result<W: Write>(mut sink: W, value: bool) -> io::Result<W> {\n\n sink.write_all(if value { b\"true\" } else { b\"false\" })?;\n\n Ok(sink)\n\n}\n\n\n\npub struct TsvSolutionsWriter<W: Write> {\n\n sink: W,\n\n variables: Vec<Variable>,\n\n}\n\n\n\nimpl<W: Write> TsvSolutionsWriter<W> {\n\n pub fn start(mut sink: W, variables: Vec<Variable>) -> io::Result<Self> {\n\n let mut start_vars = true;\n\n for variable in &variables {\n\n if start_vars {\n\n start_vars = false;\n\n } else {\n\n sink.write_all(b\"\\t\")?;\n\n }\n\n sink.write_all(b\"?\")?;\n", "file_path": "lib/sparesults/src/csv.rs", "rank": 53, "score": 172167.6733503336 }, { "content": "fn build_string_id(dataset: &DatasetView, value: &str) -> SmallStringOrId {\n\n if let Ok(value) = SmallString::try_from(value) {\n\n value.into()\n\n } else {\n\n let id = StrHash::new(value);\n\n dataset.insert_str(&id, value);\n\n SmallStringOrId::Big(id)\n\n }\n\n}\n\n\n", "file_path": "lib/src/sparql/eval.rs", "rank": 54, "score": 170177.76026723132 }, { "content": "fn build_lang_string_literal_from_id(\n\n value_id: SmallStringOrId,\n\n language_id: SmallStringOrId,\n\n) -> EncodedTerm {\n\n match (value_id, language_id) {\n\n (SmallStringOrId::Small(value), SmallStringOrId::Small(language)) => {\n\n EncodedTerm::SmallSmallLangStringLiteral { value, language }\n\n }\n\n (SmallStringOrId::Small(value), SmallStringOrId::Big(language_id)) => {\n\n EncodedTerm::SmallBigLangStringLiteral { value, language_id }\n\n }\n\n (SmallStringOrId::Big(value_id), SmallStringOrId::Small(language)) => {\n\n EncodedTerm::BigSmallLangStringLiteral { value_id, language }\n\n }\n\n (SmallStringOrId::Big(value_id), SmallStringOrId::Big(language_id)) => {\n\n EncodedTerm::BigBigLangStringLiteral {\n\n value_id,\n\n language_id,\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "lib/src/sparql/eval.rs", "rank": 55, "score": 169547.9164891656 }, { "content": "fn to_string_id(dataset: &DatasetView, term: &EncodedTerm) -> Option<SmallStringOrId> {\n\n match term {\n\n EncodedTerm::NamedNode { iri_id } => Some((*iri_id).into()),\n\n EncodedTerm::DefaultGraph\n\n | EncodedTerm::NumericalBlankNode { .. }\n\n | EncodedTerm::SmallBlankNode { .. }\n\n | EncodedTerm::BigBlankNode { .. }\n\n | EncodedTerm::Triple(_) => None,\n\n EncodedTerm::SmallStringLiteral(value)\n\n | EncodedTerm::SmallSmallLangStringLiteral { value, .. }\n\n | EncodedTerm::SmallBigLangStringLiteral { value, .. }\n\n | EncodedTerm::SmallTypedLiteral { value, .. } => Some((*value).into()),\n\n EncodedTerm::BigStringLiteral { value_id }\n\n | EncodedTerm::BigSmallLangStringLiteral { value_id, .. }\n\n | EncodedTerm::BigBigLangStringLiteral { value_id, .. }\n\n | EncodedTerm::BigTypedLiteral { value_id, .. } => Some((*value_id).into()),\n\n EncodedTerm::BooleanLiteral(value) => Some(build_string_id(\n\n dataset,\n\n if *value { \"true\" } else { \"false\" },\n\n )),\n", "file_path": "lib/src/sparql/eval.rs", "rank": 56, "score": 166856.76850853785 }, { "content": "#[pyfunction]\n\n#[pyo3(text_signature = \"(input, /, mime_type, *, base_iri = None)\")]\n\npub fn parse(\n\n input: PyObject,\n\n mime_type: &str,\n\n base_iri: Option<&str>,\n\n py: Python<'_>,\n\n) -> PyResult<PyObject> {\n\n let input = PyFileLike::open(input, py).map_err(map_io_err)?;\n\n if let Some(graph_format) = GraphFormat::from_media_type(mime_type) {\n\n let mut parser = GraphParser::from_format(graph_format);\n\n if let Some(base_iri) = base_iri {\n\n parser = parser\n\n .with_base_iri(base_iri)\n\n .map_err(|e| PyValueError::new_err(e.to_string()))?;\n\n }\n\n Ok(PyTripleReader {\n\n inner: py.allow_threads(|| parser.read_triples(input).map_err(map_parse_error))?,\n\n }\n\n .into_py(py))\n\n } else if let Some(dataset_format) = DatasetFormat::from_media_type(mime_type) {\n\n let mut parser = DatasetParser::from_format(dataset_format);\n", "file_path": "python/src/io.rs", "rank": 57, "score": 165713.73859904875 }, { "content": "#[wasm_bindgen]\n\npub fn literal(\n\n value: Option<String>,\n\n language_or_datatype: &JsValue,\n\n) -> Result<JsLiteral, JsValue> {\n\n if language_or_datatype.is_null() || language_or_datatype.is_undefined() {\n\n Ok(Literal::new_simple_literal(value.unwrap_or_default()).into())\n\n } else if language_or_datatype.is_string() {\n\n Ok(Literal::new_language_tagged_literal(\n\n value.unwrap_or_default(),\n\n language_or_datatype.as_string().unwrap_or_default(),\n\n )\n\n .map_err(to_err)?\n\n .into())\n\n } else if let JsTerm::NamedNode(datatype) = FROM_JS.with(|c| c.to_term(language_or_datatype))? {\n\n Ok(Literal::new_typed_literal(value.unwrap_or_default(), datatype).into())\n\n } else {\n\n Err(format_err!(\"The literal datatype should be a NamedNode\"))\n\n }\n\n}\n\n\n", "file_path": "js/src/model.rs", "rank": 58, "score": 165713.73859904875 }, { "content": "#[wasm_bindgen(start)]\n\npub fn main() {\n\n console_error_panic_hook::set_once();\n\n}\n", "file_path": "js/src/lib.rs", "rank": 59, "score": 165713.73859904875 }, { "content": "#[wasm_bindgen(js_name = quad)]\n\npub fn quad(\n\n subject: &JsValue,\n\n predicate: &JsValue,\n\n object: &JsValue,\n\n graph: &JsValue,\n\n) -> Result<JsQuad, JsValue> {\n\n Ok(FROM_JS\n\n .with(|c| c.to_quad_from_parts(subject, predicate, object, graph))?\n\n .into())\n\n}\n\n\n", "file_path": "js/src/model.rs", "rank": 60, "score": 165713.73859904875 }, { "content": "pub fn parse_query(\n\n query: &str,\n\n base_iri: Option<&str>,\n\n use_default_graph_as_union: bool,\n\n default_graph: Option<&PyAny>,\n\n named_graphs: Option<&PyAny>,\n\n) -> PyResult<Query> {\n\n let mut query = allow_threads_unsafe(|| Query::parse(query, base_iri))\n\n .map_err(|e| map_evaluation_error(e.into()))?;\n\n\n\n if use_default_graph_as_union && default_graph.is_some() {\n\n return Err(PyValueError::new_err(\n\n \"The query() method use_default_graph_as_union and default_graph arguments should not be set at the same time\",\n\n ));\n\n }\n\n\n\n if use_default_graph_as_union {\n\n query.dataset_mut().set_default_graph_as_union();\n\n }\n\n\n", "file_path": "python/src/sparql.rs", "rank": 61, "score": 163455.9476685096 }, { "content": "pub fn load_to_dataset<'a>(\n\n url: &str,\n\n dataset: &mut Dataset,\n\n to_graph_name: impl Into<GraphNameRef<'a>>,\n\n) -> Result<()> {\n\n let to_graph_name = to_graph_name.into();\n\n let extension = url.rsplit_once('.').map(|(_, ext)| ext);\n\n if let Some(format) = extension.and_then(GraphFormat::from_extension) {\n\n let parser = GraphParser::from_format(format).with_base_iri(url)?;\n\n for t in parser.read_triples(read_file(url)?)? {\n\n dataset.insert(&t?.in_graph(to_graph_name));\n\n }\n\n Ok(())\n\n } else if let Some(format) = extension.and_then(DatasetFormat::from_extension) {\n\n let parser = DatasetParser::from_format(format).with_base_iri(url)?;\n\n for q in parser.read_quads(read_file(url)?)? {\n\n dataset.insert(&q?);\n\n }\n\n Ok(())\n\n } else {\n\n Err(anyhow!(\"Serialization type not found for {}\", url))\n\n }\n\n}\n\n\n", "file_path": "testsuite/src/files.rs", "rank": 62, "score": 158950.66739658755 }, { "content": "pub fn load_to_store<'a>(\n\n url: &str,\n\n store: &Store,\n\n to_graph_name: impl Into<GraphNameRef<'a>>,\n\n) -> Result<()> {\n\n if url.ends_with(\".nt\") {\n\n store.load_graph(\n\n read_file(url)?,\n\n GraphFormat::NTriples,\n\n to_graph_name,\n\n Some(url),\n\n )?\n\n } else if url.ends_with(\".ttl\") {\n\n store.load_graph(\n\n read_file(url)?,\n\n GraphFormat::Turtle,\n\n to_graph_name,\n\n Some(url),\n\n )?\n\n } else if url.ends_with(\".rdf\") {\n", "file_path": "testsuite/src/files.rs", "rank": 63, "score": 158950.66739658755 }, { "content": "struct UnsafeEnv(*mut rocksdb_env_t);\n\n\n\n// Hack for lazy_static. OK because only written in lazy static and used in a thread-safe way by RocksDB\n\nunsafe impl Sync for UnsafeEnv {}\n\n\n", "file_path": "lib/src/storage/backend/rocksdb.rs", "rank": 64, "score": 157644.13551865087 }, { "content": "pub fn encode_term_quad(\n\n t1: &EncodedTerm,\n\n t2: &EncodedTerm,\n\n t3: &EncodedTerm,\n\n t4: &EncodedTerm,\n\n) -> Vec<u8> {\n\n let mut vec = Vec::with_capacity(4 * WRITTEN_TERM_MAX_SIZE);\n\n write_term(&mut vec, t1);\n\n write_term(&mut vec, t2);\n\n write_term(&mut vec, t3);\n\n write_term(&mut vec, t4);\n\n vec\n\n}\n\n\n", "file_path": "lib/src/storage/binary_encoder.rs", "rank": 65, "score": 157261.52756187343 }, { "content": "pub fn extract_quads_pattern<'a>(\n\n subject: &'a PyAny,\n\n predicate: &'a PyAny,\n\n object: &'a PyAny,\n\n graph_name: Option<&'a PyAny>,\n\n) -> PyResult<(\n\n Option<PySubjectRef<'a>>,\n\n Option<PyNamedNodeRef<'a>>,\n\n Option<PyTermRef<'a>>,\n\n Option<PyGraphNameRef<'a>>,\n\n)> {\n\n Ok((\n\n if subject.is_none() {\n\n None\n\n } else {\n\n Some(TryFrom::try_from(subject)?)\n\n },\n\n if predicate.is_none() {\n\n None\n\n } else {\n", "file_path": "python/src/store.rs", "rank": 66, "score": 156793.67651210236 }, { "content": "pub fn unescape_characters<'a>(\n\n input: &'a str,\n\n characters: &'static [u8],\n\n replacement: &'static StaticCharSliceMap,\n\n) -> Cow<'a, str> {\n\n if needs_unescape_characters(input, characters) {\n\n UnescapeCharsIterator::new(input, replacement).collect()\n\n } else {\n\n input.into()\n\n }\n\n}\n\n\n", "file_path": "lib/spargebra/src/parser.rs", "rank": 67, "score": 156793.67651210236 }, { "content": "fn store_load(c: &mut Criterion) {\n\n {\n\n let mut data = Vec::new();\n\n read_data(\"explore-1000.nt.zst\")\n\n .read_to_end(&mut data)\n\n .unwrap();\n\n\n\n let mut group = c.benchmark_group(\"store load\");\n\n group.throughput(Throughput::Bytes(data.len() as u64));\n\n group.sample_size(10);\n\n group.bench_function(\"load BSBM explore 1000 in memory\", |b| {\n\n b.iter(|| {\n\n let store = Store::new().unwrap();\n\n do_load(&store, &data);\n\n })\n\n });\n\n group.bench_function(\"load BSBM explore 1000 in on disk\", |b| {\n\n b.iter(|| {\n\n let path = TempDir::default();\n\n let store = Store::open(&path.0).unwrap();\n", "file_path": "lib/benches/store.rs", "rank": 68, "score": 153609.6660744248 }, { "content": "#[wasm_bindgen(js_name = triple)]\n\npub fn triple(subject: &JsValue, predicate: &JsValue, object: &JsValue) -> Result<JsQuad, JsValue> {\n\n quad(subject, predicate, object, &JsValue::UNDEFINED)\n\n}\n\n\n", "file_path": "js/src/model.rs", "rank": 69, "score": 152534.18470728645 }, { "content": "pub fn encode_term_triple(t1: &EncodedTerm, t2: &EncodedTerm, t3: &EncodedTerm) -> Vec<u8> {\n\n let mut vec = Vec::with_capacity(3 * WRITTEN_TERM_MAX_SIZE);\n\n write_term(&mut vec, t1);\n\n write_term(&mut vec, t2);\n\n write_term(&mut vec, t3);\n\n vec\n\n}\n\n\n", "file_path": "lib/src/storage/binary_encoder.rs", "rank": 70, "score": 152444.57467962685 }, { "content": "fn build_language_id(dataset: &DatasetView, value: &EncodedTerm) -> Option<SmallStringOrId> {\n\n let mut language = to_simple_string(dataset, value)?;\n\n language.make_ascii_lowercase();\n\n Some(build_string_id(\n\n dataset,\n\n LanguageTag::parse(language).ok()?.as_str(),\n\n ))\n\n}\n\n\n", "file_path": "lib/src/sparql/eval.rs", "rank": 71, "score": 151969.48348893732 }, { "content": "fn compare_str_ids(dataset: &DatasetView, a: &StrHash, b: &StrHash) -> Option<Ordering> {\n\n Some(dataset.get_str(a).ok()??.cmp(&dataset.get_str(b).ok()??))\n\n}\n\n\n", "file_path": "lib/src/sparql/eval.rs", "rank": 72, "score": 151959.3831197027 }, { "content": "fn store_query_and_update(c: &mut Criterion) {\n\n let mut data = Vec::new();\n\n read_data(\"explore-1000.nt.zst\")\n\n .read_to_end(&mut data)\n\n .unwrap();\n\n\n\n let operations = read_data(\"mix-exploreAndUpdate-1000.tsv.zst\")\n\n .lines()\n\n .map(|l| {\n\n let l = l.unwrap();\n\n let mut parts = l.trim().split('\\t');\n\n let kind = parts.next().unwrap();\n\n let operation = parts.next().unwrap();\n\n match kind {\n\n \"query\" => Operation::Query(Query::parse(operation, None).unwrap()),\n\n \"update\" => Operation::Update(Update::parse(operation, None).unwrap()),\n\n _ => panic!(\"Unexpected operation kind {}\", kind),\n\n }\n\n })\n\n .collect::<Vec<_>>();\n", "file_path": "lib/benches/store.rs", "rank": 73, "score": 151544.08402670774 }, { "content": "pub fn parse_value<'a, T>(\n\n mut f: impl FnMut(&'a str) -> XsdResult<'a, T>,\n\n input: &'a str,\n\n) -> Result<T, XsdParseError> {\n\n let (left, result) = f(input)?;\n\n if left.is_empty() {\n\n Ok(result)\n\n } else {\n\n Err(XsdParseError {\n\n kind: XsdParseErrorKind::TooMuchData { count: left.len() },\n\n })\n\n }\n\n}\n\n\n\n//TODO: check every computation\n\n\n", "file_path": "lib/src/xsd/parser.rs", "rank": 74, "score": 150794.69044083662 }, { "content": "fn normalize_dataset_text(store: &Dataset) -> String {\n\n let mut quads: Vec<_> = store.iter().map(|q| q.to_string()).collect();\n\n quads.sort();\n\n quads.join(\"\\n\")\n\n}\n\n\n", "file_path": "testsuite/src/report.rs", "rank": 75, "score": 149779.17463548292 }, { "content": "pub fn evaluate_update<'a, 'b: 'a>(\n\n transaction: &'a mut StorageWriter<'b>,\n\n update: &Update,\n\n options: &UpdateOptions,\n\n) -> Result<(), EvaluationError> {\n\n SimpleUpdateEvaluator {\n\n transaction,\n\n base_iri: update.inner.base_iri.clone().map(Rc::new),\n\n options: options.clone(),\n\n client: Client::new(options.query_options.http_timeout),\n\n }\n\n .eval_all(&update.inner.operations, &update.using_datasets)\n\n}\n\n\n", "file_path": "lib/src/sparql/update.rs", "rank": 76, "score": 149388.95844141865 }, { "content": "fn encode_named_node(dataset: &DatasetView, node: NamedNodeRef<'_>) -> EncodedTerm {\n\n dataset.encode_term(node)\n\n}\n\n\n", "file_path": "lib/src/sparql/eval.rs", "rank": 77, "score": 147898.20103407186 }, { "content": "fn sparql_w3c_syntax_bench(c: &mut Criterion) {\n\n let manifest_urls = vec![\n\n \"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/manifest-syntax.ttl\",\n\n \"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/manifest-sparql11-query.ttl\",\n\n ];\n\n let queries: Vec<_> = TestManifest::new(manifest_urls)\n\n .flat_map(|test| {\n\n let test = test.unwrap();\n\n if test.kind == \"http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#PositiveSyntaxTest\"\n\n || test.kind\n\n == \"http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#PositiveSyntaxTest11\" {\n\n test.action.map(|query| (read_file_to_string(&query).unwrap(), query))\n\n } else {\n\n None\n\n }\n\n })\n\n .collect();\n\n\n\n c.bench_function(\"query parser\", |b| {\n\n b.iter(|| {\n\n for (query, base) in &queries {\n\n Query::parse(query, Some(base)).unwrap();\n\n }\n\n })\n\n });\n\n}\n", "file_path": "testsuite/benches/sparql_query.rs", "rank": 78, "score": 147672.21201526772 }, { "content": "pub fn main() -> std::io::Result<()> {\n\n let matches = Args::parse();\n\n let store = if let Some(path) = &matches.location {\n\n Store::open(path)\n\n } else {\n\n Store::new()\n\n }?;\n\n\n\n match matches.command {\n\n Command::Load { file, lenient } => {\n\n let handles = file\n\n .iter()\n\n .map(|file| {\n\n let store = store.clone();\n\n let file = file.to_string();\n\n spawn(move || {\n\n let f = file.clone();\n\n let start = Instant::now();\n\n let mut loader = store.bulk_loader().on_progress(move |size| {\n\n let elapsed = start.elapsed();\n", "file_path": "server/src/main.rs", "rank": 79, "score": 147326.16460772688 }, { "content": "fn content_type(request: &Request) -> Option<String> {\n\n let value = request.header(&HeaderName::CONTENT_TYPE)?.to_str().ok()?;\n\n Some(\n\n value\n\n .split_once(';')\n\n .map_or(value, |(b, _)| b)\n\n .trim()\n\n .to_ascii_lowercase(),\n\n )\n\n}\n\n\n", "file_path": "server/src/main.rs", "rank": 80, "score": 146259.582701238 }, { "content": "fn to_string(dataset: &DatasetView, term: &EncodedTerm) -> Option<String> {\n\n match term {\n\n EncodedTerm::SmallStringLiteral(value)\n\n | EncodedTerm::SmallSmallLangStringLiteral { value, .. }\n\n | EncodedTerm::SmallBigLangStringLiteral { value, .. } => Some((*value).into()),\n\n EncodedTerm::BigStringLiteral { value_id }\n\n | EncodedTerm::BigSmallLangStringLiteral { value_id, .. }\n\n | EncodedTerm::BigBigLangStringLiteral { value_id, .. } => {\n\n dataset.get_str(value_id).ok()?\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "lib/src/sparql/eval.rs", "rank": 81, "score": 145690.99311383592 }, { "content": "#[derive(PartialEq, Eq, Debug, Clone, Hash)]\n\nenum BlankNodeContent {\n\n Named(String),\n\n Anonymous { id: u128, str: IdStr },\n\n}\n\n\n\nimpl BlankNode {\n\n /// Creates a blank node from a unique identifier.\n\n ///\n\n /// The blank node identifier must be valid according to N-Triples, Turtle, and SPARQL grammars.\n\n ///\n\n /// In most cases, it is much more convenient to create a blank node using [`BlankNode::default()`]\n\n ///that creates a random ID that could be easily inlined by Oxigraph stores.\n\n pub fn new(id: impl Into<String>) -> Result<Self, BlankNodeIdParseError> {\n\n let id = id.into();\n\n validate_blank_node_identifier(&id)?;\n\n Ok(Self::new_unchecked(id))\n\n }\n\n\n\n /// Creates a blank node from a unique identifier without validation.\n\n ///\n", "file_path": "lib/oxrdf/src/blank_node.rs", "rank": 82, "score": 144506.50009486958 }, { "content": "fn read_data(file: &str) -> impl BufRead {\n\n if !Path::new(file).exists() {\n\n let mut client = oxhttp::Client::new();\n\n client.set_redirection_limit(5);\n\n let url = format!(\n\n \"https://github.com/Tpt/bsbm-tools/releases/download/v0.2/{}\",\n\n file\n\n );\n\n let request = Request::builder(Method::GET, url.parse().unwrap()).build();\n\n let response = client.request(request).unwrap();\n\n assert_eq!(\n\n response.status(),\n\n Status::OK,\n\n \"{}\",\n\n response.into_body().to_string().unwrap()\n\n );\n\n std::io::copy(&mut response.into_body(), &mut File::create(file).unwrap()).unwrap();\n\n }\n\n BufReader::new(zstd::Decoder::new(File::open(file).unwrap()).unwrap())\n\n}\n\n\n", "file_path": "lib/benches/store.rs", "rank": 83, "score": 144465.8425964408 }, { "content": "fn to_simple_string(dataset: &DatasetView, term: &EncodedTerm) -> Option<String> {\n\n match term {\n\n EncodedTerm::SmallStringLiteral(value) => Some((*value).into()),\n\n EncodedTerm::BigStringLiteral { value_id } => dataset.get_str(value_id).ok()?,\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "lib/src/sparql/eval.rs", "rank": 84, "score": 144205.10339479375 }, { "content": "pub fn add_to_module(module: &PyModule) -> PyResult<()> {\n\n module.add_wrapped(wrap_pyfunction!(parse))?;\n\n module.add_wrapped(wrap_pyfunction!(serialize))\n\n}\n\n\n\n/// Parses RDF graph and dataset serialization formats.\n\n///\n\n/// It currently supports the following formats:\n\n///\n\n/// * `N-Triples <https://www.w3.org/TR/n-triples/>`_ (``application/n-triples``)\n\n/// * `N-Quads <https://www.w3.org/TR/n-quads/>`_ (``application/n-quads``)\n\n/// * `Turtle <https://www.w3.org/TR/turtle/>`_ (``text/turtle``)\n\n/// * `TriG <https://www.w3.org/TR/trig/>`_ (``application/trig``)\n\n/// * `RDF/XML <https://www.w3.org/TR/rdf-syntax-grammar/>`_ (``application/rdf+xml``)\n\n///\n\n/// It supports also some MIME type aliases.\n\n/// For example, ``application/turtle`` could also be used for `Turtle <https://www.w3.org/TR/turtle/>`_\n\n/// and ``application/xml`` for `RDF/XML <https://www.w3.org/TR/rdf-syntax-grammar/>`_.\n\n///\n\n/// :param input: The binary I/O object or file path to read from. For example, it could be a file path as a string or a file reader opened in binary mode with ``open('my_file.ttl', 'rb')``.\n", "file_path": "python/src/io.rs", "rank": 85, "score": 141645.0543250661 }, { "content": "fn bad_request(message: impl fmt::Display) -> Response {\n\n error(Status::BAD_REQUEST, message)\n\n}\n\n\n", "file_path": "server/src/main.rs", "rank": 86, "score": 141339.7602962821 }, { "content": "fn base_url(request: &Request) -> Result<String, Response> {\n\n let mut url = request.url().clone();\n\n if let Some(host) = request.url().host_str() {\n\n url.set_host(Some(host)).map_err(bad_request)?;\n\n }\n\n url.set_query(None);\n\n url.set_fragment(None);\n\n Ok(url.into())\n\n}\n\n\n", "file_path": "server/src/main.rs", "rank": 87, "score": 141240.61931256147 }, { "content": "pub fn load_dataset(url: &str) -> Result<Dataset> {\n\n let mut dataset = Dataset::new();\n\n load_to_dataset(url, &mut dataset, GraphNameRef::DefaultGraph)?;\n\n Ok(dataset)\n\n}\n", "file_path": "testsuite/src/files.rs", "rank": 88, "score": 140379.98673867894 }, { "content": "pub fn load_graph(url: &str) -> Result<Graph> {\n\n let mut graph = Graph::new();\n\n load_to_graph(url, &mut graph)?;\n\n Ok(graph)\n\n}\n\n\n", "file_path": "testsuite/src/files.rs", "rank": 89, "score": 140379.98673867894 }, { "content": "#[derive(PartialEq, Eq, Debug, Clone, Copy, Hash)]\n\nenum BlankNodeRefContent<'a> {\n\n Named(&'a str),\n\n Anonymous { id: u128, str: &'a str },\n\n}\n\n\n\nimpl<'a> BlankNodeRef<'a> {\n\n /// Creates a blank node from a unique identifier.\n\n ///\n\n /// The blank node identifier must be valid according to N-Triples, Turtle, and SPARQL grammars.\n\n ///\n\n /// In most cases, it is much more convenient to create a blank node using [`BlankNode::default()`].\n\n /// that creates a random ID that could be easily inlined by Oxigraph stores.\n\n pub fn new(id: &'a str) -> Result<Self, BlankNodeIdParseError> {\n\n validate_blank_node_identifier(id)?;\n\n Ok(Self::new_unchecked(id))\n\n }\n\n\n\n /// Creates a blank node from a unique identifier without validation.\n\n ///\n\n /// It is the caller's responsibility to ensure that `id` is a valid blank node identifier\n", "file_path": "lib/oxrdf/src/blank_node.rs", "rank": 90, "score": 139868.94526252896 }, { "content": "fn compare_str_id_str(dataset: &DatasetView, a: &StrHash, b: &str) -> Option<Ordering> {\n\n Some(dataset.get_str(a).ok()??.as_str().cmp(b))\n\n}\n\n\n", "file_path": "lib/src/sparql/eval.rs", "rank": 91, "score": 139119.145048917 }, { "content": "fn compare_str_str_id(dataset: &DatasetView, a: &str, b: &StrHash) -> Option<Ordering> {\n\n Some(a.cmp(dataset.get_str(b).ok()??.as_str()))\n\n}\n\n\n", "file_path": "lib/src/sparql/eval.rs", "rank": 92, "score": 139119.145048917 }, { "content": "fn validate_variable_identifier(id: &str) -> Result<(), VariableNameParseError> {\n\n let mut chars = id.chars();\n\n let front = chars.next().ok_or(VariableNameParseError {})?;\n\n match front {\n\n '0'..='9'\n\n | '_'\n\n | ':'\n\n | 'A'..='Z'\n\n | 'a'..='z'\n\n | '\\u{00C0}'..='\\u{00D6}'\n\n | '\\u{00D8}'..='\\u{00F6}'\n\n | '\\u{00F8}'..='\\u{02FF}'\n\n | '\\u{0370}'..='\\u{037D}'\n\n | '\\u{037F}'..='\\u{1FFF}'\n\n | '\\u{200C}'..='\\u{200D}'\n\n | '\\u{2070}'..='\\u{218F}'\n\n | '\\u{2C00}'..='\\u{2FEF}'\n\n | '\\u{3001}'..='\\u{D7FF}'\n\n | '\\u{F900}'..='\\u{FDCF}'\n\n | '\\u{FDF0}'..='\\u{FFFD}'\n", "file_path": "lib/oxrdf/src/variable.rs", "rank": 93, "score": 137772.85731764685 }, { "content": "fn unbind_variables(binding: &mut EncodedTuple, variables: &[usize]) {\n\n for var in variables {\n\n binding.unset(*var)\n\n }\n\n}\n\n\n", "file_path": "lib/src/sparql/eval.rs", "rank": 94, "score": 137493.25561941744 }, { "content": "pub fn encode_term(t: &EncodedTerm) -> Vec<u8> {\n\n let mut vec = Vec::with_capacity(WRITTEN_TERM_MAX_SIZE);\n\n write_term(&mut vec, t);\n\n vec\n\n}\n\n\n", "file_path": "lib/src/storage/binary_encoder.rs", "rank": 95, "score": 136823.9311934692 }, { "content": "fn to_io_err(error: impl Into<PyErr>) -> io::Error {\n\n io::Error::new(io::ErrorKind::Other, error.into())\n\n}\n\n\n\npub(crate) fn map_io_err(error: io::Error) -> PyErr {\n\n if error.get_ref().map_or(false, |s| s.is::<PyErr>()) {\n\n *error.into_inner().unwrap().downcast().unwrap()\n\n } else {\n\n PyIOError::new_err(error.to_string())\n\n }\n\n}\n\n\n\npub(crate) fn map_parse_error(error: ParseError) -> PyErr {\n\n match error {\n\n ParseError::Syntax(error) => PySyntaxError::new_err(error.to_string()),\n\n ParseError::Io(error) => map_io_err(error),\n\n }\n\n}\n\n\n\n/// Release the GIL\n", "file_path": "python/src/io.rs", "rank": 96, "score": 136727.57617311025 }, { "content": "fn handle_request(request: &mut Request, store: Store) -> Response {\n\n match (request.url().path(), request.method().as_ref()) {\n\n (\"/\", \"HEAD\") => Response::builder(Status::OK)\n\n .with_header(HeaderName::CONTENT_TYPE, \"text_html\")\n\n .unwrap()\n\n .build(),\n\n (\"/\", \"GET\") => Response::builder(Status::OK)\n\n .with_header(HeaderName::CONTENT_TYPE, \"text_html\")\n\n .unwrap()\n\n .with_body(HTML_ROOT_PAGE),\n\n (\"/logo.svg\", \"HEAD\") => Response::builder(Status::OK)\n\n .with_header(HeaderName::CONTENT_TYPE, \"image/svg+xml\")\n\n .unwrap()\n\n .build(),\n\n (\"/logo.svg\", \"GET\") => Response::builder(Status::OK)\n\n .with_header(HeaderName::CONTENT_TYPE, \"image/svg+xml\")\n\n .unwrap()\n\n .with_body(LOGO),\n\n (\"/query\", \"GET\") => {\n\n configure_and_evaluate_sparql_query(store, &[url_query(request)], None, request)\n", "file_path": "server/src/main.rs", "rank": 97, "score": 136455.25825365505 }, { "content": "fn add_triple_to_triple_or_path_patterns(\n\n subject: TermPattern,\n\n predicate: impl Into<NamedNodePattern>,\n\n object: AnnotatedTermPath,\n\n patterns: &mut Vec<TripleOrPathPattern>,\n\n) -> Result<(), &'static str> {\n\n let triple = TriplePattern::new(subject, predicate, object.term);\n\n #[cfg(feature = \"rdf-star\")]\n\n for (p, os) in object.annotations {\n\n for o in os {\n\n add_to_triple_or_path_patterns(triple.clone().into(), p.clone(), o, patterns)?\n\n }\n\n }\n\n #[cfg(not(feature = \"rdf-star\"))]\n\n if !object.annotations.is_empty() {\n\n return Err(\"Embedded triples are only available in SPARQL-star\");\n\n }\n\n patterns.push(triple.into());\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/spargebra/src/parser.rs", "rank": 98, "score": 135582.86919540566 }, { "content": "enum TripleOrPathPattern {\n\n Triple(TriplePattern),\n\n Path {\n\n subject: TermPattern,\n\n path: PropertyPathExpression,\n\n object: TermPattern,\n\n },\n\n}\n\n\n\nimpl From<TriplePattern> for TripleOrPathPattern {\n\n fn from(tp: TriplePattern) -> Self {\n\n Self::Triple(tp)\n\n }\n\n}\n\n\n", "file_path": "lib/spargebra/src/parser.rs", "rank": 99, "score": 134876.69228251302 } ]
Rust
src/media/audio/lib/test/hermetic_audio_environment/rust/src/lib.rs
mehulagg/fuchsia
3f56175ee594da6b287d5fb19f2f0eccea2897f0
pub mod virtual_audio; pub mod prelude { pub use fidl; pub use fidl_fuchsia_virtualaudio::*; pub use fuchsia_async as fasync; pub type Result<T> = std::result::Result<T, failure::Error>; pub use crate::Environment; pub use fidl_fuchsia_media::*; pub use fuchsia_component::client; pub use fuchsia_zircon as zx; pub use futures::{self, future, FutureExt, SinkExt, StreamExt, TryStreamExt}; pub use test_util::assert_matches; pub use zx::AsHandleRef; } use fidl::endpoints::{create_endpoints, DiscoverableService}; use fidl_fuchsia_io::DirectoryMarker; use fidl_fuchsia_sys::LauncherProxy; use fuchsia_component::client::App; use fuchsia_component::server::*; use lazy_static::lazy_static; use maplit::hashmap; use prelude::*; use std::collections::HashMap; type ServiceName = &'static str; type ComponentUrl = &'static str; struct ComponentLaunchInfo { services: Vec<ServiceName>, arguments: Option<Vec<String>>, } lazy_static! { static ref SERVICES: HashMap<ComponentUrl, ComponentLaunchInfo> = hashmap! { "fuchsia-pkg://fuchsia.com/audio_core#meta/audio_core_nodevfs.cmx" => ComponentLaunchInfo { services: vec![ AudioCoreMarker::SERVICE_NAME, UsageReporterMarker::SERVICE_NAME, AudioDeviceEnumeratorMarker::SERVICE_NAME ], arguments: Some(vec![ "--disable-device-settings-writeback".to_string() ]) }, "fuchsia-pkg://fuchsia.com/virtual_audio_service#meta/virtual_audio_service_nodevfs.cmx" => ComponentLaunchInfo { services: vec![ InputMarker::SERVICE_NAME, OutputMarker::SERVICE_NAME, ], arguments: None }, }; } #[derive(Debug)] struct ConnectRequest { service: ServiceName, component_url: ComponentUrl, channel: zx::Channel, } fn register_services<'a>(fs: &mut ServiceFs<ServiceObj<'a, ConnectRequest>>) { for (component_url, info) in SERVICES.iter() { for service in info.services.iter().copied() { fs.add_service_at(service, move |channel| { Some(ConnectRequest { service, component_url, channel }) }); } } } fn launch_components(launcher: &LauncherProxy) -> Result<HashMap<ComponentUrl, App>> { const TEST_DEV_MGR_URL: &str = "fuchsia-pkg://fuchsia.com/audio_test_devmgr#meta/audio_test_devmgr.cmx"; const TEST_DEV_MGR_NAME: &str = "fuchsia.media.AudioTestDevmgr"; let test_dev_mgr = client::AppBuilder::new(TEST_DEV_MGR_URL).spawn(launcher)?; let mut launched = SERVICES .iter() .map(|(url, launch_info)| { use zx::HandleBased; let test_dev_mgr_handle = { let (dev_enum, directory_request) = create_endpoints::<DirectoryMarker>()?; test_dev_mgr .pass_to_named_service(TEST_DEV_MGR_NAME, directory_request.into_channel())?; dev_enum.into_channel().into_handle() }; let builder = client::AppBuilder::new(*url) .add_handle_to_namespace("/dev".to_string(), test_dev_mgr_handle) .stdout(client::Stdio::Inherit) .stderr(client::Stdio::Inherit); let builder = if let Some(arguments) = launch_info.arguments.as_ref() { builder.args(arguments) } else { builder }; let app = builder.spawn(launcher)?; Ok((*url, app)) }) .collect::<Result<HashMap<ComponentUrl, App>>>()?; launched.insert(TEST_DEV_MGR_URL, test_dev_mgr); Ok(launched) } pub struct Environment { env: NestedEnvironment, } impl Environment { pub fn new() -> Result<Self> { use fidl_fuchsia_logger::LogSinkMarker; let mut fs = ServiceFs::new(); register_services(&mut fs); fs.add_proxy_service::<LogSinkMarker, ConnectRequest>(); let env = fs.create_salted_nested_environment("environment")?; let launched_components = launch_components(env.launcher())?; fasync::spawn(fs.for_each_concurrent(None, move |request| { match launched_components.get(request.component_url) { Some(component) => { component.pass_to_named_service(request.service, request.channel).expect( &format!( "Component {} does not serve {}", request.component_url, request.service ), ); } None => panic!("Unknown component: {:?}", request.component_url), } future::ready(()) })); Ok(Self { env }) } pub fn connect_to_service<S: DiscoverableService>(&self) -> Result<S::Proxy> { self.env.connect_to_service::<S>() } }
pub mod virtual_audio; pub mod prelude { pub use fidl; pub use fidl_fuchsia_virtualaudio::*; pub use fuchsia_async as fasync; pub type Result<T> = std::result::Result<T, failure::Error>; pub use crate::Environment; pub use fidl_fuchsia_media::*; pub use fuchsia_component::client; pub use fuchsia_zircon as zx; pub use futures::{self, future, FutureExt, SinkExt, StreamExt, TryStreamExt}; pub use test_util::assert_matches; pub use zx::AsHandleRef; } use fidl::endpoints::{create_endpoints, DiscoverableService}; use fidl_fuchsia_io::DirectoryMarker; use fidl_fuchsia_sys::LauncherProxy; use fuchsia_component::client::App; use fuchsia_component::server::*; use lazy_static::lazy_static; use maplit::hashmap; use prelude::*; use std::collections::HashMap; type ServiceName = &'static str; type ComponentUrl = &'static str; struct ComponentLaunchInfo { services: Vec<ServiceName>, arguments: Option<Vec<String>>, } lazy_static! { static ref SERVICES: HashMap<ComponentUrl, ComponentLaunchInfo> = hashmap! { "fuchsia-pkg://fuchsia.com/audio_core#meta/audio_core_nodevfs.cmx" => ComponentLaunchInfo { services: vec![ AudioCoreMarker::SERVICE_NAME, UsageReporterMarker::SERVICE_NAME, AudioDeviceEnumeratorMarker::SERVICE_NAME ], arguments: Some(vec![ "--disable-device-settings-writeback".to_string() ]) }, "fuchsia-pkg://fuchsia.com/virtual_audio_service#meta/virtual_audio_service_nodevfs.cmx" => ComponentLaunchInfo { services: vec![ InputMarker::SERVICE_NAME, OutputMarker::SERVICE_NAME, ], arguments: None }, }; } #[derive(Debug)] struct ConnectRequest { service: ServiceName, component_url: ComponentUrl, channel: zx::Channel, } fn register_services<'a>(fs: &mut ServiceFs<ServiceObj<'a, ConnectRequest>>) { for (component_url, info) in SERVICES.iter() { for service in info.services.iter().copied() { fs.add_service_at(service, move |channel| { Some(ConnectRequest { service, component_url, channel }) }); } } } fn launch_components(launcher: &LauncherProxy) -> Result<HashMap<ComponentUrl, App>> { const TEST_DEV_MGR_URL: &str = "fuchsia-pkg://fuchsia.com/audio_test_devmgr#meta/audio_test_devmgr.cmx"; const TEST_DEV_MGR_NAME: &str = "fuchsia.media.AudioTestDevmgr"; let test_dev_mgr = client::AppBuilder::new(TEST_DEV_MGR_URL).spawn(launcher)?; let mut launched = SERVICES .iter() .map(|(url, launch_info)| { use zx::HandleBased; let test_dev_mgr_handle = { let (dev_enum, directory_request) = create_endpoints::<DirectoryMarker>()?; test_dev_mgr .pass_to_named_service(TEST_DEV_MGR_NAME, directory_request.into_channel())?; dev_enum.into_channel().into_handle() }; let builder = client::AppBuilder::new(*url) .add_handle_to_namespace("/dev".to_string(), test_dev_mgr_handle) .stdout(client::Stdio::Inherit) .stderr(client::Stdio::Inherit); let builder = if let Some(arguments) = launch_info.arguments.as_ref() { builder.args(arguments) } else { builder }; let app = builder.spawn(launcher)?; Ok((*url, app)) }) .collect::<Result<HashMap<ComponentUrl, App>>>()?; launched.insert(TEST_DEV_MGR_URL, test_dev_mgr); Ok(launched) } pub struct Environment { env: NestedEnvironment, } impl Environment {
pub fn connect_to_service<S: DiscoverableService>(&self) -> Result<S::Proxy> { self.env.connect_to_service::<S>() } }
pub fn new() -> Result<Self> { use fidl_fuchsia_logger::LogSinkMarker; let mut fs = ServiceFs::new(); register_services(&mut fs); fs.add_proxy_service::<LogSinkMarker, ConnectRequest>(); let env = fs.create_salted_nested_environment("environment")?; let launched_components = launch_components(env.launcher())?; fasync::spawn(fs.for_each_concurrent(None, move |request| { match launched_components.get(request.component_url) { Some(component) => { component.pass_to_named_service(request.service, request.channel).expect( &format!( "Component {} does not serve {}", request.component_url, request.service ), ); } None => panic!("Unknown component: {:?}", request.component_url), } future::ready(()) })); Ok(Self { env }) }
function_block-full_function
[]
Rust
hsp3-analyzer-mini/ham-core/src/analysis/preproc.rs
honobonosun/hsp3-ginger
d2788085d71c8d8fdf31e445a8e262c08e18fba8
use super::{a_scope::*, a_symbol::*}; use crate::{parse::*, source::DocId}; use std::{collections::HashMap, mem::replace}; #[derive(Default)] struct Ctx { doc: DocId, symbols: Vec<ASymbolData>, scope: ALocalScope, modules: HashMap<AModule, AModuleData>, module_len: usize, deffunc_len: usize, } impl Ctx { fn deffunc_scope(&self) -> AScope { AScope::Local(self.scope.clone()) } fn module_scope(&self) -> AScope { AScope::Local(ALocalScope { module_opt: self.scope.module_opt.clone(), deffunc_opt: None, }) } fn privacy_scope_or_local(&self, privacy_opt: &Option<(PPrivacy, PToken)>) -> AScope { match privacy_opt { Some((PPrivacy::Global, _)) => AScope::Global, _ => self.module_scope(), } } fn privacy_scope_or_global(&self, privacy_opt: &Option<(PPrivacy, PToken)>) -> AScope { match privacy_opt { Some((PPrivacy::Local, _)) => self.module_scope(), _ => AScope::Global, } } fn add_symbol(&mut self, kind: ASymbolKind, leader: &PToken, name: &PToken, scope: AScope) { add_symbol(kind, leader, name, scope, &mut self.symbols); } } fn add_symbol( kind: ASymbolKind, leader: &PToken, name: &PToken, scope: AScope, symbols: &mut Vec<ASymbolData>, ) { symbols.push(ASymbolData { kind, name: name.body.text.clone(), def_sites: vec![name.body.loc.clone()], use_sites: vec![], leader: leader.clone(), scope, }); } fn on_stmt(stmt: &PStmt, ctx: &mut Ctx) { match stmt { PStmt::Label(PLabel { star, name_opt }) => { if let Some(name) = name_opt { ctx.add_symbol(ASymbolKind::Label, star, name, ctx.module_scope()); } } PStmt::Assign(_) | PStmt::Command(_) | PStmt::Invoke(_) => {} PStmt::Const(PConstStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::Const, hash, name, scope); } } PStmt::Define(PDefineStmt { hash, privacy_opt, ctype_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); let ctype = ctype_opt.is_some(); ctx.add_symbol(ASymbolKind::Macro { ctype }, hash, name, scope); } } PStmt::Enum(PEnumStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::Enum, hash, name, scope); } } PStmt::DefFunc(PDefFuncStmt { hash, keyword: _, kind, privacy_opt, name_opt, onexit_opt, params, stmts, behind: _, .. }) => { ctx.deffunc_len += 1; let deffunc = ADefFunc::new(ctx.deffunc_len); let kind = match *kind { PDefFuncKind::DefFunc => ASymbolKind::DefFunc, PDefFuncKind::DefCFunc => ASymbolKind::DefCFunc, PDefFuncKind::ModInit | PDefFuncKind::ModTerm | PDefFuncKind::ModFunc => { ASymbolKind::ModFunc } PDefFuncKind::ModCFunc => ASymbolKind::ModCFunc, }; if let Some(name) = name_opt { if onexit_opt.is_none() { let scope = ctx.privacy_scope_or_global(privacy_opt); ctx.add_symbol(kind, hash, name, scope); } } let parent_deffunc = replace(&mut ctx.scope.deffunc_opt, Some(deffunc)); for param in params { if let Some(name) = &param.name_opt { let param_ty = param.param_ty_opt.as_ref().map(|&(t, _)| t); ctx.add_symbol( ASymbolKind::Param(param_ty), hash, name, ctx.deffunc_scope(), ); } } for stmt in stmts { on_stmt(stmt, ctx); } ctx.scope.deffunc_opt = parent_deffunc; } PStmt::UseLib(_) => {} PStmt::LibFunc(PLibFuncStmt { hash, privacy_opt, name_opt, onexit_opt, .. }) => { if let Some(name) = name_opt { if onexit_opt.is_none() { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::LibFunc, hash, name, scope); } } } PStmt::UseCom(PUseComStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::ComInterface, hash, name, scope); } } PStmt::ComFunc(PComFuncStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_global(privacy_opt); ctx.add_symbol(ASymbolKind::ComFunc, hash, name, scope); } } PStmt::RegCmd(_) => {} PStmt::Cmd(PCmdStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::PluginCmd, hash, name, scope); } } PStmt::Module(PModuleStmt { hash, keyword, name_opt, fields, stmts, behind, .. }) => { let module = AModule::new(ctx.doc, &mut ctx.module_len, name_opt); ctx.modules.insert( module.clone(), AModuleData { keyword_loc: keyword.body.loc.clone(), content_loc: hash.body.loc.unite(&behind), }, ); let parent_scope = replace( &mut ctx.scope, ALocalScope { module_opt: Some(module), deffunc_opt: None, }, ); if let Some(name) = name_opt { ctx.add_symbol(ASymbolKind::Module, hash, name, AScope::Global); } for field in fields.iter().filter_map(|param| param.name_opt.as_ref()) { ctx.add_symbol(ASymbolKind::Field, field, field, ctx.module_scope()); } for stmt in stmts { on_stmt(stmt, ctx); } ctx.scope = parent_scope; } PStmt::Global(_) => {} PStmt::Include(_) => {} PStmt::UnknownPreProc(_) => {} } } pub(crate) struct PreprocAnalysisResult { pub(crate) symbols: Vec<ASymbolData>, pub(crate) modules: HashMap<AModule, AModuleData>, } pub(crate) fn analyze_preproc(doc: DocId, root: &PRoot) -> PreprocAnalysisResult { let mut ctx = Ctx::default(); ctx.doc = doc; for stmt in &root.stmts { on_stmt(stmt, &mut ctx); } let Ctx { symbols, modules, .. } = ctx; PreprocAnalysisResult { symbols, modules } }
use super::{a_scope::*, a_symbol::*}; use crate::{parse::*, source::DocId}; use std::{collections::HashMap, mem::replace}; #[derive(Default)] struct Ctx { doc: DocId, symbols: Vec<ASymbolData>, scope: ALocalScope, modules: HashMap<AModule, AModuleData>, module_len: usize, deffunc_len: usize, } impl Ctx { fn deffunc_scope(&self) -> AScope { AScope::Local(self.scope.clone()) } fn module_scope(&self) -> AScope { AScope::Local(ALocalScope { module_opt: self.scope.module_opt.clone(), deffunc_opt: None, }) } fn privacy_scope_or_local(&self, privacy_opt: &Option<(PPrivacy, PToken)>) -> AScope { match privacy_opt { Some((PPrivacy::Global, _)) => AScope::Global, _ => self.module_scope(), } } fn privacy_scope_or_global(&self, privacy_opt: &Option<(PPrivacy, PToken)>) -> AScope { match privacy_opt { Some((PPrivacy::Local, _)) => self.module_scope(), _ => AScope::Global, } } fn add_symbol(&mut self, kind: ASymbolKind, leader: &PToken, name: &PToken, scope: AScope) { add_symbol(kind, leader, name, scope, &mut self.symbols); } } fn add_symbol( kind: ASymbolKind, leader: &PToken, name: &PToken, scope: AScope, symbols: &mut Vec<ASymbolData>, ) { symbols.push(ASymbolData { kind, name: name.body.text.clone(), def_sites: vec![name.body.loc.clone()], use_sites: vec![], leader: leader.clone(), scope, }); } fn on_stmt(stmt: &PStmt, ctx: &mut Ctx) { match stmt { PStmt::Label(PLabel { star, name_opt }) => { if let Some(name) = name_opt { ctx.add_symbol(ASymbolKind::Label, star, name, ctx.module_scope()); } } PStmt::Assign(_) | PStmt::Command(_) | PStmt::Invoke(_) => {} PStmt::Const(PConstStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::Const, hash, name, scope); } } PStmt::Define(PDefineStmt { hash, privacy_opt, ctype_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); let ctype = ctype_opt.is_some(); ctx.add_symbol(ASymbolKind::Macro { ctype }, hash, name, scope); } } PStmt::Enum(PEnumStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::Enum, hash, name, scope); } } PStmt::DefFunc(PDefFuncStmt { hash, keyword: _, kind, privacy_opt, name_opt, onexit_opt, params, stmts, behind: _, .. }) => { ctx.deffunc_len += 1; let deffunc = ADefFunc::new(ctx.deffunc_len); let kind = match *kind { PDefFuncKind::DefFunc => ASymbolKind::DefFunc, PDefFuncKind::DefCFunc => ASymbolKind::DefCFunc, PDefFuncKind::ModInit | PDefFuncKind::ModTerm | PDefFuncKind::ModFunc => { ASymbolKind::ModFunc } PDefFuncKind::ModCFunc => ASymbolKind::ModCFunc, }; if let Some(name) = name_opt { if onexit_opt.is_none() { let scope = ctx.privacy_scope_or_global(privacy_opt); ctx.add_symbol(kind, hash, name, scope); } } let parent_deffunc = replace(&mut ctx.scope.deffunc_opt, Some(deffunc)); for param in params { if let Some(name) = &param.name_opt { let param_ty = param.param_ty_opt.as_ref().map(|&(t, _)| t); ctx.add_symbol( ASymbolKind::Param(param_ty), hash, name, ctx.deffunc_scope(), ); } } for stmt in stmts { on_stmt(stmt, ctx); } ctx.scope.deffunc_opt = parent_deffunc; } PStmt::UseLib(_) => {} PStmt::LibFunc(PLibFuncStmt { hash, privacy_opt, name_opt, onexit_opt, .. }) => { if let Some(name) = name_opt { if onexit_opt.is_none() { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::LibFunc, hash, name, scope); } } } PStmt::UseCom(PUseComStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::ComInterface, hash, name, scope); } } PStmt::ComFunc(PComFuncStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_global(privacy_opt); ctx.add_symbol(ASymbolKind::ComFunc, hash, name, scope); } } PStmt::RegCmd(_) => {} PStmt::Cmd(PCmdStmt { hash, privacy_opt, name_opt, .. }) => { if let Some(name) = name_opt { let scope = ctx.privacy_scope_or_local(privacy_opt); ctx.add_symbol(ASymbolKind::PluginCmd, hash, name, scope); } } PStmt::Module(PModuleStmt { hash, keyword, name_opt, fields, stmts, behind, .. }) => { let module = AModule::new(ctx.doc, &mut ctx.module_len, name_opt); ctx.modules.insert( module.clone(), AModuleData { keyword_loc: keyword.body.loc.clone(), content_loc: hash.body.loc.unite(&behind), }, ); let parent_scope = replace( &mut ctx.scope, ALocalScope { module_opt: Some(module), deffunc_opt: None, }, ); if let Some(name) = name_opt { ctx.add_symbol(ASymbolKind::Module, hash, name, AScope::Global); } for field in fields.iter().filter_map(|param| param.name_opt.as_ref()) { ctx.add_symbol(ASymbolKind::Field, field, field, ctx.module_scope()); } for stmt in stmts { on_stmt(stmt, ctx); } ctx.scope = parent_scope; } PStmt::Global(_) => {} PStmt::Include(_) => {} PStmt::UnknownPreProc(_) => {} } } pub(crate) struct PreprocAnalysisResult { pub(crate) symbols: Vec<ASymbolData>, pub(crate) modules: HashMap<AModule, AModuleData>, } pub(crate) fn analyze_preproc(doc: DocId, root: &PRoot) -> PreprocAnalysisResul
t { let mut ctx = Ctx::default(); ctx.doc = doc; for stmt in &root.stmts { on_stmt(stmt, &mut ctx); } let Ctx { symbols, modules, .. } = ctx; PreprocAnalysisResult { symbols, modules } }
function_block-function_prefixed
[ { "content": "fn add_symbol(kind: ASymbolKind, name: &PToken, def_site: bool, ctx: &mut Ctx) {\n\n // 新しいシンボルを登録する。\n\n let symbol = ASymbol::new(ctx.symbols.len());\n\n\n\n let mut symbol_data = ASymbolData {\n\n kind,\n\n name: name.body.text.clone(),\n\n def_sites: vec![],\n\n use_sites: vec![],\n\n leader: name.clone(),\n\n scope: ctx.module_scope(),\n\n };\n\n\n\n if def_site {\n\n symbol_data.def_sites.push(name.body.loc);\n\n } else {\n\n symbol_data.use_sites.push(name.body.loc);\n\n }\n\n\n\n ctx.symbols.push(symbol_data);\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 0, "score": 443580.44844873896 }, { "content": "fn on_symbol_use(name: &PToken, is_var: bool, ctx: &mut Ctx) {\n\n match resolve_candidate(name.body_text(), &ctx.scope, &ctx.public.env, &ctx.env) {\n\n Some(ws_symbol) if ws_symbol.doc != ctx.doc => {\n\n ctx.public.use_sites.push((ws_symbol, name.body.loc));\n\n }\n\n Some(ws_symbol) => {\n\n assert_eq!(ws_symbol.doc, ctx.doc);\n\n ctx.symbols[ws_symbol.symbol.get()]\n\n .use_sites\n\n .push(name.body.loc);\n\n }\n\n None => {\n\n let kind = if is_var {\n\n ASymbolKind::StaticVar\n\n } else {\n\n ASymbolKind::Unresolved\n\n };\n\n add_symbol(kind, name, USE_SITE, ctx);\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 1, "score": 395653.87791454716 }, { "content": "fn on_symbol_def(name: &PToken, ctx: &mut Ctx) {\n\n match resolve_candidate(name.body_text(), &ctx.scope, &ctx.public.env, &ctx.env) {\n\n Some(ws_symbol) if ws_symbol.doc != ctx.doc => {\n\n ctx.public.def_sites.push((ws_symbol, name.body.loc));\n\n }\n\n Some(ws_symbol) => {\n\n assert_eq!(ws_symbol.doc, ctx.doc);\n\n ctx.symbols[ws_symbol.symbol.get()]\n\n .def_sites\n\n .push(name.body.loc);\n\n }\n\n None => add_symbol(ASymbolKind::StaticVar, name, DEF_SITE, ctx),\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 2, "score": 371433.9779614182 }, { "content": "fn parse_deffunc_like_stmt(hash: PToken, kind: PDefFuncKind, px: &mut Px) -> PDefFuncStmt {\n\n assert!(DEFFUNC_LIKE_KEYWORDS.contains(&px.next_token().body_text()));\n\n\n\n let keyword = px.bump();\n\n\n\n let privacy_opt = parse_privacy(px);\n\n let name_opt = if kind.is_anonymous() {\n\n None\n\n } else {\n\n px.eat(TokenKind::Ident)\n\n };\n\n\n\n let onexit_opt = eat_ident(\"onexit\", px);\n\n let params = parse_deffunc_params(px);\n\n parse_end_of_preproc(px);\n\n\n\n let mut stmts = vec![];\n\n let behind = loop {\n\n match px.next() {\n\n TokenKind::Eof => break px.next_token().behind(),\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 3, "score": 361571.2820110128 }, { "content": "fn on_stmt(stmt: &PStmt, ctx: &mut Ctx) {\n\n match stmt {\n\n PStmt::Label(PLabel { name_opt, .. }) => {\n\n if let Some(name) = name_opt {\n\n add_symbol(ASymbolKind::Label, name, DEF_SITE, ctx);\n\n }\n\n }\n\n PStmt::Assign(PAssignStmt {\n\n left,\n\n op_opt: _,\n\n args,\n\n }) => {\n\n // FIXME: def/use は演算子の種類による\n\n on_compound_def(left, ctx);\n\n on_args(args, ctx);\n\n }\n\n PStmt::Command(PCommandStmt { command, args, .. }) => {\n\n on_symbol_use(command, false, ctx);\n\n\n\n static COMMANDS: &[&str] = &[\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 4, "score": 343158.42047175375 }, { "content": "fn create_param_infos(deffunc: &Symbol, symbols: &Symbols) -> Vec<String> {\n\n let mut params = vec![];\n\n let mut s = String::new();\n\n\n\n for param in symbols.params(deffunc) {\n\n if let Some(param_ty_token) = symbols.param_node(&param).param_ty() {\n\n // 引数を受け取らないパラメータは無視する。\n\n if !ParamTy::from_str(param_ty_token.text())\n\n .map_or(false, |param_ty| param_ty.takes_arg())\n\n {\n\n continue;\n\n }\n\n\n\n s += param_ty_token.text();\n\n s += \" \";\n\n }\n\n\n\n match symbols.unqualified_name(&param) {\n\n Some(name) => s += name,\n\n None => s += \"???\",\n\n }\n\n\n\n params.push(s.clone());\n\n s.clear();\n\n }\n\n\n\n params\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/analysis/get_signature_help.rs", "rank": 6, "score": 331370.0962147484 }, { "content": "fn parse_module_stmt(hash: PToken, px: &mut Px) -> PModuleStmt {\n\n assert_eq!(px.next_token().body_text(), \"module\");\n\n\n\n let keyword = px.bump();\n\n\n\n let name_opt = match px.next() {\n\n TokenKind::Ident | TokenKind::Str => Some(px.bump()),\n\n _ => None,\n\n };\n\n\n\n let fields = parse_deffunc_params(px);\n\n parse_end_of_preproc(px);\n\n\n\n let mut stmts = vec![];\n\n let (global_opt, behind) = loop {\n\n match px.next() {\n\n TokenKind::Eof => break (None, px.next_token().behind()),\n\n TokenKind::Eos | TokenKind::LeftBrace | TokenKind::RightBrace | TokenKind::Colon => {\n\n px.skip();\n\n }\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 7, "score": 330025.1413224584 }, { "content": "fn parse_include_stmt(hash: PToken, kind: PIncludeKind, px: &mut Px) -> PIncludeStmt {\n\n let keyword = px.bump();\n\n let file_path_opt = px.eat(TokenKind::Str);\n\n parse_end_of_preproc(px);\n\n\n\n PIncludeStmt {\n\n hash,\n\n keyword,\n\n kind,\n\n file_path_opt,\n\n }\n\n}\n\n\n\npub(crate) fn parse_preproc_stmt(px: &mut Px) -> Option<PStmt> {\n\n let hash = px.eat(TokenKind::Hash)?;\n\n\n\n let stmt = match px.next_token().body_text() {\n\n \"const\" => PStmt::Const(parse_const_stmt(hash, px)),\n\n \"enum\" => PStmt::Enum(parse_enum_stmt(hash, px)),\n\n \"define\" => PStmt::Define(parse_define_stmt(hash, px)),\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 8, "score": 326428.36355215264 }, { "content": "fn parse_uselib_stmt(hash: PToken, px: &mut Px) -> PUseLibStmt {\n\n assert_eq!(px.next_token().body_text(), \"uselib\");\n\n\n\n let keyword = px.bump();\n\n let file_path_opt = px.eat(TokenKind::Str);\n\n parse_end_of_preproc(px);\n\n\n\n PUseLibStmt {\n\n hash,\n\n keyword,\n\n file_path_opt,\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 9, "score": 314727.4405849392 }, { "content": "fn parse_deffunc_params(px: &mut Px) -> Vec<PParam> {\n\n let mut params = vec![];\n\n\n\n loop {\n\n match px.next() {\n\n TokenKind::Eof | TokenKind::Eos => break,\n\n TokenKind::Comma => {\n\n let comma = px.bump();\n\n\n\n params.push(PParam {\n\n param_ty_opt: None,\n\n name_opt: None,\n\n comma_opt: Some(comma),\n\n });\n\n }\n\n TokenKind::Ident => {\n\n let param_ty_opt = parse_param_ty(px);\n\n let name_opt = px.eat(TokenKind::Ident);\n\n let comma_opt = px.eat(TokenKind::Comma);\n\n let comma_seen = comma_opt.is_some();\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 10, "score": 310786.42313022335 }, { "content": "fn lookahead_after_paren(mut i: usize, px: &mut Px) -> ExprLikeStmtKind {\n\n let mut balance = 1;\n\n\n\n loop {\n\n let kind = px.nth(i);\n\n i += 1;\n\n\n\n match kind {\n\n TokenKind::LeftParen => balance += 1,\n\n TokenKind::RightParen => match balance {\n\n 0 | 1 => break,\n\n _ => balance -= 1,\n\n },\n\n TokenKind::Comma if balance == 1 => {\n\n // カッコの直下にカンマがあるなら添字のカッコなので、代入文で確定。\n\n return ExprLikeStmtKind::Assign;\n\n }\n\n TokenKind::SlimArrow => {\n\n return ExprLikeStmtKind::Invoke;\n\n }\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_stmt.rs", "rank": 11, "score": 300917.9136635652 }, { "content": "fn on_compound_use(compound: &PCompound, ctx: &mut Ctx) {\n\n match compound {\n\n PCompound::Name(name) => on_symbol_use(name, true, ctx),\n\n PCompound::Paren(PNameParen { name, args, .. }) => {\n\n on_symbol_use(name, true, ctx);\n\n\n\n for arg in args {\n\n on_expr_opt(arg.expr_opt.as_ref(), ctx);\n\n }\n\n }\n\n PCompound::Dots(PNameDot { name, args }) => {\n\n on_symbol_use(name, true, ctx);\n\n\n\n for arg in args {\n\n on_expr_opt(arg.expr_opt.as_ref(), ctx);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 12, "score": 280253.57248475554 }, { "content": "fn parse_enum_stmt(hash: PToken, px: &mut Px) -> PEnumStmt {\n\n assert_eq!(px.next_token().body_text(), \"enum\");\n\n let keyword = px.bump();\n\n\n\n let privacy_opt = parse_privacy(px);\n\n let name_opt = px.eat(TokenKind::Ident);\n\n\n\n let equal_opt = px.eat(TokenKind::Equal);\n\n let init_opt = parse_expr(px);\n\n parse_end_of_preproc(px);\n\n\n\n PEnumStmt {\n\n hash,\n\n keyword,\n\n privacy_opt,\n\n name_opt,\n\n equal_opt,\n\n init_opt,\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 13, "score": 272014.05235002976 }, { "content": "fn parse_define_stmt(hash: PToken, px: &mut Px) -> PDefineStmt {\n\n assert_eq!(px.next_token().body_text(), \"define\");\n\n let keyword = px.bump();\n\n\n\n let privacy_opt = parse_privacy(px);\n\n let ctype_opt = eat_ident(\"ctype\", px);\n\n\n\n let name_opt = px.eat(TokenKind::Ident);\n\n let has_params = {\n\n // マクロ名と `(` の間にスペースがないときだけパラメータリストとみなす。\n\n px.next() == TokenKind::LeftParen\n\n && name_opt.as_ref().map_or(false, |name| {\n\n name.body.loc.end() == px.next_token().body.loc.start()\n\n })\n\n };\n\n let (left_paren_opt, params, right_paren_opt) = if has_params {\n\n let left_paren_opt = px.eat(TokenKind::LeftParen);\n\n let params = parse_macro_params(px);\n\n let right_paren_opt = px.eat(TokenKind::RightParen);\n\n (left_paren_opt, params, right_paren_opt)\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 14, "score": 272014.05235002976 }, { "content": "fn parse_const_stmt(hash: PToken, px: &mut Px) -> PConstStmt {\n\n assert_eq!(px.next_token().body_text(), \"const\");\n\n let keyword = px.bump();\n\n\n\n let privacy_opt = parse_privacy(px);\n\n let ty_opt = parse_const_ty(px);\n\n let name_opt = px.eat(TokenKind::Ident);\n\n let init_opt = parse_expr(px);\n\n parse_end_of_preproc(px);\n\n\n\n PConstStmt {\n\n hash,\n\n keyword,\n\n privacy_opt,\n\n ty_opt,\n\n name_opt,\n\n init_opt,\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 15, "score": 272014.05235002976 }, { "content": "fn parse_cmd_stmt(hash: PToken, px: &mut Px) -> PCmdStmt {\n\n assert_eq!(px.next_token().body_text(), \"cmd\");\n\n\n\n let keyword = px.bump();\n\n let privacy_opt = parse_privacy(px);\n\n let name_opt = px.eat(TokenKind::Ident);\n\n let command_id_opt = px.eat(TokenKind::Number);\n\n parse_end_of_preproc(px);\n\n\n\n PCmdStmt {\n\n hash,\n\n keyword,\n\n privacy_opt,\n\n name_opt,\n\n command_id_opt,\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 16, "score": 272014.05235002976 }, { "content": "fn parse_global_stmt(hash: PToken, px: &mut Px) -> PGlobalStmt {\n\n assert_eq!(px.next_token().body_text(), \"global\");\n\n\n\n let keyword = px.bump();\n\n parse_end_of_preproc(px);\n\n\n\n PGlobalStmt { hash, keyword }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 17, "score": 272014.05235002976 }, { "content": "fn parse_regcmd_stmt(hash: PToken, px: &mut Px) -> PRegCmdStmt {\n\n assert_eq!(px.next_token().body_text(), \"regcmd\");\n\n\n\n let keyword = px.bump();\n\n let args = parse_args(px);\n\n parse_end_of_preproc(px);\n\n\n\n PRegCmdStmt {\n\n hash,\n\n keyword,\n\n args,\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 18, "score": 269999.81739693903 }, { "content": "fn parse_lib_func_stmt(hash: PToken, px: &mut Px) -> PLibFuncStmt {\n\n let keyword = px.bump();\n\n\n\n let privacy_opt = parse_privacy(px);\n\n let name_opt = px.eat(TokenKind::Ident);\n\n let onexit_opt = eat_ident(\"onexit\", px);\n\n\n\n let func_name_opt = match px.next() {\n\n TokenKind::Ident | TokenKind::Str => Some(px.bump()),\n\n _ => None,\n\n };\n\n let type_id_opt = px.eat(TokenKind::Number);\n\n let params = parse_deffunc_params(px);\n\n parse_end_of_preproc(px);\n\n\n\n PLibFuncStmt {\n\n hash,\n\n keyword,\n\n privacy_opt,\n\n name_opt,\n\n onexit_opt,\n\n func_name_opt,\n\n type_id_opt,\n\n params,\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 19, "score": 268028.5165016239 }, { "content": "fn parse_macro_params(px: &mut Px) -> Vec<PMacroParam> {\n\n let mut params = vec![];\n\n let mut init = vec![];\n\n\n\n loop {\n\n if let TokenKind::Eof | TokenKind::Eos | TokenKind::RightParen = px.next() {\n\n break;\n\n }\n\n\n\n let percent_opt = px.eat(TokenKind::Percent);\n\n let number_opt = px.eat(TokenKind::Number);\n\n\n\n // 既定値\n\n let equal_opt = px.eat(TokenKind::Equal);\n\n if equal_opt.is_some() {\n\n init.extend(px.eat(TokenKind::Percent));\n\n match px.next() {\n\n TokenKind::Eof\n\n | TokenKind::Eos\n\n | TokenKind::LeftParen\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 20, "score": 261718.92061028615 }, { "content": "fn look_ahead_stmt(p: &mut Px) -> StmtKind {\n\n assert_eq!(p.next(), Token::Ident);\n\n\n\n let second = p.nth(1);\n\n\n\n if second == Token::Minus && p.nth(2).at_end_of_stmt() {\n\n return StmtKind::Assign;\n\n }\n\n\n\n if second == Token::Minus || second == Token::Star {\n\n // 曖昧な文。notes.md を参照。\n\n return StmtKind::Command;\n\n }\n\n\n\n // mes \"hello\" のように識別子の直後に原子式があるケースは、代入文ではない。\n\n // また `on goto ...` のように jump modifier があるケースは命令文に確定。\n\n if (second != Token::LeftParen && second.is_atom_expr_first()) || second.is_jump_modifier() {\n\n return StmtKind::Command;\n\n }\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_stmt.rs", "rank": 21, "score": 258604.55076306147 }, { "content": "fn lookahead_stmt(px: &mut Px) -> ExprLikeStmtKind {\n\n match px.nth(1) {\n\n TokenKind::LeftParen => lookahead_after_paren(2, px),\n\n TokenKind::Dot => ExprLikeStmtKind::Assign,\n\n TokenKind::SlimArrow => ExprLikeStmtKind::Invoke,\n\n TokenKind::Plus | TokenKind::Minus if px.nth(2).is_end_of_stmt() => {\n\n ExprLikeStmtKind::Assign\n\n }\n\n second => match second.to_op_kind() {\n\n None | Some(POpKind::Infix) | Some(POpKind::PrefixOrInfixOrAssign) => {\n\n ExprLikeStmtKind::Command\n\n }\n\n Some(POpKind::InfixOrAssign) | Some(POpKind::Assign) => ExprLikeStmtKind::Assign,\n\n },\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_stmt.rs", "rank": 22, "score": 252132.5992152415 }, { "content": "fn parse_module_stmt_contents(p: &mut Px) {\n\n assert!(p.next_data().text() == \"module\");\n\n\n\n p.bump();\n\n\n\n match p.next() {\n\n Token::Ident => parse_name(p),\n\n Token::StrStart => parse_str_literal(p),\n\n _ => {}\n\n }\n\n\n\n // FIXME: メンバ変数のリスト\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_pp.rs", "rank": 23, "score": 248723.0570943442 }, { "content": "fn create_global_env(symbols: &Symbols, env: &mut Env) {\n\n for symbol in symbols.iter() {\n\n if symbols.kind(&symbol) == SymbolKind::Param {\n\n continue;\n\n }\n\n\n\n if let Some(name) = symbols.unqualified_name(&symbol) {\n\n env.insert(name.to_string(), symbol.clone());\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/analysis/name_resolution.rs", "rank": 24, "score": 246710.60737987672 }, { "content": "fn parse_deffunc_like_stmt_contents(p: &mut Px) {\n\n assert!(at_deffunc_like_keyword(p));\n\n\n\n p.bump();\n\n\n\n if !p.eat_ident(\"global\") {\n\n p.eat_ident(\"local\");\n\n }\n\n\n\n // modinit/modterm のときは名前は不要\n\n if p.next() == Token::Ident {\n\n parse_name(p);\n\n }\n\n\n\n if !p.eat_ident(\"onexit\") {\n\n parse_params(p);\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_pp.rs", "rank": 25, "score": 246218.29649194723 }, { "content": "fn on_args(args: &[PArg], ctx: &mut Ctx) {\n\n for arg in args {\n\n on_expr_opt(arg.expr_opt.as_ref(), ctx);\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 26, "score": 233851.4993942848 }, { "content": "fn on_expr(expr: &PExpr, ctx: &mut Ctx) {\n\n match expr {\n\n PExpr::Literal(_) => {}\n\n PExpr::Label(PLabel { star: _, name_opt }) => {\n\n if let Some(name) = name_opt {\n\n on_symbol_use(name, false, ctx);\n\n }\n\n }\n\n PExpr::Compound(compound) => on_compound_use(compound, ctx),\n\n PExpr::Paren(PParenExpr { body_opt, .. }) => on_expr_opt(body_opt.as_deref(), ctx),\n\n PExpr::Prefix(PPrefixExpr { prefix: _, arg_opt }) => on_expr_opt(arg_opt.as_deref(), ctx),\n\n PExpr::Infix(PInfixExpr {\n\n infix: _,\n\n left,\n\n right_opt,\n\n }) => {\n\n on_expr(left, ctx);\n\n on_expr_opt(right_opt.as_deref(), ctx);\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 27, "score": 233851.4993942848 }, { "content": "fn close_module(node_opt: Option<&SyntaxNode>, gsc: &mut GlobalSymbolCollection) {\n\n let module_symbol = match gsc.current_module_opt.take() {\n\n None => return,\n\n Some(x) => x,\n\n };\n\n\n\n gsc.symbols.define_module(&module_symbol, node_opt.cloned());\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/analysis/get_global_symbols.rs", "rank": 28, "score": 232469.0694175302 }, { "content": "fn close_deffunc(node_opt: Option<&SyntaxNode>, gsc: &mut GlobalSymbolCollection) {\n\n let deffunc_symbol = match gsc.current_deffunc_opt.take() {\n\n None => return,\n\n Some(x) => x,\n\n };\n\n\n\n gsc.symbols\n\n .define_deffunc(&deffunc_symbol, node_opt.cloned());\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/analysis/get_global_symbols.rs", "rank": 29, "score": 232465.5313132056 }, { "content": "fn on_compound_def(compound: &PCompound, ctx: &mut Ctx) {\n\n match compound {\n\n PCompound::Name(name) => on_symbol_def(name, ctx),\n\n PCompound::Paren(PNameParen { name, args, .. }) => {\n\n on_symbol_def(name, ctx);\n\n\n\n for arg in args {\n\n on_expr_opt(arg.expr_opt.as_ref(), ctx);\n\n }\n\n }\n\n PCompound::Dots(PNameDot { name, args }) => {\n\n on_symbol_def(name, ctx);\n\n\n\n for arg in args {\n\n on_expr_opt(arg.expr_opt.as_ref(), ctx);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 30, "score": 232157.08236130956 }, { "content": "fn with_global(f: impl FnOnce(&mut Global)) {\n\n let mut lock = match GLOBAL.lock() {\n\n Err(err) => {\n\n warn!(\"can't lock global {:?}\", err);\n\n return;\n\n }\n\n Ok(lock) => lock,\n\n };\n\n\n\n let global = match lock.as_mut() {\n\n None => {\n\n warn!(\"before initialization\");\n\n return;\n\n }\n\n Some(global) => global,\n\n };\n\n\n\n f(global);\n\n}\n\n\n", "file_path": "hsp3-debug-spider/spider-server/src/lib.rs", "rank": 31, "score": 227241.56448816275 }, { "content": "fn eat_arbitrary_tokens(px: &mut Px) -> Vec<PToken> {\n\n let mut tokens = vec![];\n\n while !px.next().is_end_of_preproc() {\n\n tokens.push(px.bump());\n\n }\n\n tokens\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 32, "score": 226117.91399009107 }, { "content": "fn on_expr_opt(expr_opt: Option<&PExpr>, ctx: &mut Ctx) {\n\n if let Some(expr) = expr_opt {\n\n on_expr(expr, ctx);\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 33, "score": 223215.94386669132 }, { "content": "fn parse_expr_like_stmt(px: &mut Px) -> Option<PStmt> {\n\n match lookahead_stmt(px) {\n\n ExprLikeStmtKind::Assign => parse_assign_stmt(px).map(PStmt::Assign),\n\n ExprLikeStmtKind::Command => parse_command_stmt(px).map(PStmt::Command),\n\n ExprLikeStmtKind::Invoke => parse_invoke_stmt(px).map(PStmt::Invoke),\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_stmt.rs", "rank": 34, "score": 221155.63806277484 }, { "content": "fn parse_stmt(p: &mut Px) {\n\n match p.next() {\n\n Token::Ident => parse_ambiguous_stmt(p),\n\n Token::Star => parse_label_stmt(p),\n\n Token::Hash => parse_pp_stmt(p),\n\n _ if p.next().is_control_keyword() => parse_command_stmt(p),\n\n _ => {\n\n // assert!(p.next().at_end_of_stmt(), \"is_stmt_first/at_end_of_stmt bug\");\n\n parse_end_of_stmt(p);\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn parse_root(p: &mut Px) {\n\n while !p.at_eof() {\n\n // エラー回復\n\n if !p.next().is_stmt_first() && !p.next().at_end_of_stmt() {\n\n p.start_node();\n\n\n\n while !p.at_eof() && !p.next().is_stmt_first() {\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_stmt.rs", "rank": 35, "score": 214875.8234940161 }, { "content": "fn parse_label_stmt(p: &mut Px) {\n\n assert_eq!(p.next(), Token::Star);\n\n\n\n p.start_node();\n\n\n\n parse_label_literal(p);\n\n parse_end_of_stmt(p);\n\n\n\n p.end_node(NodeKind::LabelStmt);\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_stmt.rs", "rank": 36, "score": 213060.95596958368 }, { "content": "/// メソッド起動文のパース。(`objects(i)->\"method\" a, b, c` など)\n\nfn parse_invoke_stmt(p: &mut Px) {\n\n assert_eq!(p.next(), Token::Ident);\n\n\n\n p.start_node();\n\n parse_call_expr(p);\n\n\n\n // エラー回復\n\n if !p.next().at_end_of_stmt() && p.next() != Token::SlimArrow {\n\n p.start_node();\n\n while !p.next().at_end_of_stmt() && p.next() != Token::SlimArrow {\n\n p.bump();\n\n }\n\n p.end_node(NodeKind::Other);\n\n }\n\n\n\n p.eat(Token::SlimArrow);\n\n\n\n parse_args(p);\n\n\n\n parse_end_of_stmt(p);\n\n p.end_node(NodeKind::InvokeStmt);\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_stmt.rs", "rank": 37, "score": 213060.95596958368 }, { "content": "fn parse_assign_stmt(p: &mut Px) {\n\n assert_eq!(p.next(), Token::Ident);\n\n\n\n p.start_node();\n\n parse_call_expr(p);\n\n\n\n // エラー回復\n\n if !p.next().at_end_of_stmt() && !p.next().is_assignment_operator() {\n\n p.start_node();\n\n while !p.next().at_end_of_stmt() && !p.next().is_assignment_operator() {\n\n p.bump();\n\n }\n\n p.end_node(NodeKind::Other);\n\n }\n\n\n\n if p.next().is_assignment_operator() {\n\n p.bump();\n\n }\n\n\n\n parse_args(p);\n\n\n\n parse_end_of_stmt(p);\n\n p.end_node(NodeKind::AssignStmt);\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_stmt.rs", "rank": 38, "score": 213060.95596958368 }, { "content": "fn parse_end_of_stmt(p: &mut Px) {\n\n if !p.at_eof() && !p.next().at_end_of_stmt() {\n\n p.start_node();\n\n\n\n while !p.at_eof() && !p.next().at_end_of_stmt() {\n\n p.bump();\n\n }\n\n\n\n p.end_node(NodeKind::Other);\n\n }\n\n\n\n if !p.at_eof() {\n\n assert!(p.next().at_end_of_stmt());\n\n p.bump();\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_stmt.rs", "rank": 39, "score": 213060.95596958368 }, { "content": "fn parse_ambiguous_stmt(p: &mut Px) {\n\n assert_eq!(p.next(), Token::Ident);\n\n\n\n match look_ahead_stmt(p) {\n\n StmtKind::Assign => parse_assign_stmt(p),\n\n StmtKind::Command => parse_command_stmt(p),\n\n StmtKind::Invoke => parse_invoke_stmt(p),\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_stmt.rs", "rank": 40, "score": 213060.95596958368 }, { "content": "fn parse_command_stmt(p: &mut Px) {\n\n assert!(p.next().is_command_first());\n\n\n\n p.start_node();\n\n\n\n if p.next().is_control_keyword() {\n\n p.start_node();\n\n p.bump();\n\n p.end_node(NodeKind::Ident);\n\n } else {\n\n parse_name(p);\n\n }\n\n\n\n parse_command_stmt_contents(p);\n\n\n\n parse_end_of_stmt(p);\n\n p.end_node(NodeKind::CommandStmt);\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_stmt.rs", "rank": 41, "score": 213060.95596958368 }, { "content": "fn parse_command_stmt_contents(p: &mut Px) {\n\n if p.next().is_jump_modifier() {\n\n p.bump();\n\n }\n\n\n\n if p.next().is_arg_first() {\n\n parse_args(p);\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_stmt.rs", "rank": 42, "score": 211287.93735276643 }, { "content": "fn parse_end_of_stmt(px: &mut Px) {\n\n while !px.next().is_end_of_stmt() {\n\n px.skip();\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_stmt.rs", "rank": 43, "score": 209555.28644092003 }, { "content": "fn parse_param_ty(px: &mut Px) -> Option<(PParamTy, PToken)> {\n\n if px.next() != TokenKind::Ident {\n\n return None;\n\n }\n\n\n\n let param_ty = PParamTy::parse(px.next_token().body_text())?;\n\n let token = px.bump();\n\n Some((param_ty, token))\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 44, "score": 207027.54092729726 }, { "content": "fn parse_args_in_paren(px: &mut Px) -> Option<(PToken, Vec<PArg>, Option<PToken>)> {\n\n let left_paren = px.eat(TokenKind::LeftParen)?;\n\n let args = parse_args(px);\n\n let right_paren_opt = px.eat(TokenKind::RightParen);\n\n Some((left_paren, args, right_paren_opt))\n\n}\n\n\n\npub(crate) fn parse_compound(px: &mut Px) -> Option<PCompound> {\n\n let name = px.eat(TokenKind::Ident)?;\n\n\n\n match px.next() {\n\n TokenKind::Dot => {\n\n let mut args = vec![];\n\n while let Some(dot) = px.eat(TokenKind::Dot) {\n\n let expr_opt = parse_expr(px);\n\n args.push(PDotArg { dot, expr_opt });\n\n }\n\n Some(PCompound::Dots(PNameDot { name, args }))\n\n }\n\n TokenKind::LeftParen => {\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_expr.rs", "rank": 45, "score": 204253.21802514076 }, { "content": "fn parse_params(p: &mut Px) {\n\n // 引数の省略がある parse_args とは異なる方法でカンマや構文エラーを処理する。\n\n\n\n loop {\n\n // エラー回復\n\n if !p.at_eof() && p.next() != Token::Ident && !p.next().at_end_of_pp() {\n\n p.start_node();\n\n while !p.at_eof() && p.next() != Token::Ident && !p.next().at_end_of_pp() {\n\n p.bump();\n\n }\n\n p.end_node(NodeKind::Other);\n\n }\n\n\n\n if p.next() != Token::Ident {\n\n break;\n\n }\n\n\n\n p.start_node();\n\n parse_param_type(p);\n\n\n\n if p.next() == Token::Ident {\n\n parse_name(p);\n\n }\n\n\n\n p.eat(Token::Comma);\n\n p.end_node(NodeKind::Param);\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_pp.rs", "rank": 46, "score": 203343.04908970091 }, { "content": "fn parse_param_type(p: &mut Px) {\n\n if p.next() == Token::Ident {\n\n let text = p.next_data().text();\n\n if PARAM_TY_TABLE.iter().any(|&(_, word)| word == text) {\n\n p.bump();\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_pp.rs", "rank": 47, "score": 201368.80950597455 }, { "content": "fn parse_command_stmt(px: &mut Px) -> Option<PCommandStmt> {\n\n let command = px.bump();\n\n let jump_modifier_opt = parse_jump_modifier(px);\n\n let args = parse_args(px);\n\n parse_end_of_stmt(px);\n\n\n\n Some(PCommandStmt {\n\n command,\n\n jump_modifier_opt,\n\n args,\n\n })\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_stmt.rs", "rank": 48, "score": 199561.43397858163 }, { "content": "fn parse_assign_stmt(px: &mut Px) -> Option<PAssignStmt> {\n\n let left = parse_compound(px)?;\n\n\n\n let op_opt = if px.next().is_assign_op() {\n\n Some(px.bump())\n\n } else {\n\n None\n\n };\n\n\n\n let args = parse_args(px);\n\n parse_end_of_stmt(px);\n\n\n\n Some(PAssignStmt { left, op_opt, args })\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_stmt.rs", "rank": 49, "score": 199561.43397858163 }, { "content": "fn parse_invoke_stmt(px: &mut Px) -> Option<PInvokeStmt> {\n\n let left = parse_compound(px)?;\n\n let arrow_opt = px.eat(TokenKind::SlimArrow);\n\n let method_opt = parse_atomic_expr(px);\n\n let args = parse_args(px);\n\n parse_end_of_stmt(px);\n\n\n\n Some(PInvokeStmt {\n\n left,\n\n arrow_opt,\n\n method_opt,\n\n args,\n\n })\n\n}\n\n\n\npub(crate) fn parse_stmt(px: &mut Px) -> Option<PStmt> {\n\n let stmt_opt = match px.next() {\n\n TokenKind::Ident => parse_expr_like_stmt(px),\n\n TokenKind::Star => parse_label(px).map(PStmt::Label),\n\n TokenKind::Hash => parse_preproc_stmt(px),\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_stmt.rs", "rank": 50, "score": 199561.43397858163 }, { "content": "fn parse_global_stmt_contents(p: &mut Px) {\n\n assert!(p.next_data().text() == \"global\");\n\n\n\n p.bump();\n\n}\n\n\n\npub(crate) fn parse_pp_stmt(p: &mut Px) {\n\n assert_eq!(p.next(), Token::Hash);\n\n\n\n p.start_node();\n\n\n\n p.bump();\n\n\n\n let kind = match p.next_data().text() {\n\n \"deffunc\" | \"defcfunc\" => {\n\n parse_deffunc_like_stmt_contents(p);\n\n NodeKind::DeffuncPp\n\n }\n\n \"module\" => {\n\n parse_module_stmt_contents(p);\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_pp.rs", "rank": 51, "score": 199387.93294065638 }, { "content": "fn go(node: SyntaxNode, gsc: &mut GlobalSymbolCollection) {\n\n for child in node.child_nodes() {\n\n match child.kind() {\n\n NodeKind::Ident => {\n\n let name = AName::cast(&child).unwrap();\n\n gsc.name_context.set_enclosures(\n\n name,\n\n gsc.current_deffunc_opt.clone(),\n\n gsc.current_module_opt.clone(),\n\n );\n\n }\n\n NodeKind::LabelStmt => {\n\n // gsc.symbols.push(GlobalSymbol::Label {\n\n // label_stmt: Rc::new(child.clone()),\n\n // module_stmt_opt: gsc.current_module_opt.clone(),\n\n // });\n\n }\n\n NodeKind::Param => {\n\n let enclosing_deffunc = gsc.current_deffunc_opt.clone();\n\n gsc.symbols\n", "file_path": "hsp3-forgery/hf_core/src/analysis/get_global_symbols.rs", "rank": 52, "score": 195365.39601525036 }, { "content": "fn parse_jump_modifier(px: &mut Px) -> Option<(PJumpModifier, PToken)> {\n\n if px.next() != TokenKind::Ident {\n\n return None;\n\n }\n\n\n\n let jump_modifier = PJumpModifier::parse(px.next_token().body_text())?;\n\n let token = px.bump();\n\n Some((jump_modifier, token))\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_stmt.rs", "rank": 53, "score": 194228.55493097004 }, { "content": "struct ModuleCloseFn {\n\n void operator()(HMODULE h) const {\n\n FreeLibrary(h);\n\n }\n\n};\n\n\n\nusing ModuleHandle = std::unique_ptr<std::remove_pointer<HMODULE>::type, ModuleCloseFn>;\n\n\n\nstatic auto load_library(std::wstring const& full_path) {\n\n auto handle = LoadLibrary(full_path.c_str());\n\n if (handle == nullptr) {\n\n fail_with(L\"Couldn't load library \" + full_path);\n\n }\n\n\n\n return ModuleHandle{ handle };\n\n}\n\n\n", "file_path": "hsp3-debug-ginger/hsp3debug/hsp3-debug-ginger/dllmain.cpp", "rank": 54, "score": 192516.68369641702 }, { "content": "/// ディレクトリにあるヘルプソースファイルを列挙する\n\nfn read_dir(hsphelp_dir: &Path, out: &mut Vec<PathBuf>) -> io::Result<()> {\n\n for entry in fs::read_dir(&hsphelp_dir)? {\n\n let entry = entry?;\n\n\n\n if entry.path().extension().map_or(true, |ext| ext != \"hs\") {\n\n continue;\n\n }\n\n\n\n out.push(hsphelp_dir.join(entry.path()));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/help_source.rs", "rank": 55, "score": 189284.79711012324 }, { "content": "type Env = HashMap<String, Symbol>;\n\n\n", "file_path": "hsp3-forgery/hf_core/src/analysis/name_resolution.rs", "rank": 58, "score": 176124.6893855652 }, { "content": "fn at_deffunc_like_keyword(p: &Px) -> bool {\n\n p.next() == Token::Ident && {\n\n match p.next_data().text() {\n\n \"deffunc\" => true,\n\n \"defcfunc\" => true,\n\n _ => false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_pp.rs", "rank": 59, "score": 175491.739329395 }, { "content": "struct Ctx<'a> {\n\n public: &'a mut APublicState,\n\n\n\n doc: DocId,\n\n\n\n /// ドキュメント内のシンボル\n\n symbols: Vec<ASymbolData>,\n\n\n\n /// ドキュメント内の環境\n\n env: HashMap<ALocalScope, AEnv>,\n\n\n\n deffunc_len: usize,\n\n module_len: usize,\n\n scope: ALocalScope,\n\n}\n\n\n\nimpl Ctx<'_> {\n\n fn module_scope(&self) -> AScope {\n\n AScope::Local(self.module_local_scope())\n\n }\n\n\n\n fn module_local_scope(&self) -> ALocalScope {\n\n ALocalScope {\n\n module_opt: self.scope.module_opt.clone(),\n\n deffunc_opt: None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/analysis/var.rs", "rank": 60, "score": 172506.32616864197 }, { "content": "fn do_scan_all(watched_dir: &Path, changed_files: &mut HashSet<PathBuf>) -> Option<()> {\n\n let glob_pattern = format!(\"{}/**/*.hsp\", watched_dir.to_str()?);\n\n\n\n debug!(\"ファイルリストの取得 '{}'\", glob_pattern);\n\n\n\n let entries = match glob::glob(&glob_pattern) {\n\n Err(err) => {\n\n warn!(\"ファイルリストの取得 {:?}\", err);\n\n return None;\n\n }\n\n Ok(entries) => entries,\n\n };\n\n\n\n for entry in entries {\n\n match entry {\n\n Ok(path) => {\n\n changed_files.insert(path);\n\n }\n\n Err(err) => warn!(\"ファイルエントリの取得 {:?}\", err),\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/lang_service/file_watcher.rs", "rank": 61, "score": 169950.70621703548 }, { "content": "fn is_preproc_statement(doc: DocId, pos: Pos16, wa: &AWorkspaceAnalysis) -> bool {\n\n let tokens = match wa.doc_syntax_map.get(&doc) {\n\n Some(syntax) => &syntax.tokens,\n\n None => return false,\n\n };\n\n\n\n // '#' から文末の間においてプリプロセッサ関連の補完を有効化する。行継続に注意。判定が難しいので構文木を使ったほうがいいかもしれない。\n\n\n\n let row = pos.row as usize;\n\n\n\n // 次の行の最初のトークンを探す。\n\n let upperbound =\n\n match tokens.binary_search_by_key(&(row + 1), |token| token.body.loc.start_row()) {\n\n Ok(it) | Err(it) => it,\n\n };\n\n\n\n // 近くにあるトークンと補完位置の位置関係を調べる。\n\n // (補完位置の付近にトークンがないとき、次の '#' の検索だけだとプリプロセッサ行の後ろがすべて引っかかってしまう。)\n\n let last = tokens.get(upperbound.saturating_sub(1));\n\n let touched = last.map_or(false, |t| pos <= t.behind().end());\n", "file_path": "hsp3-analyzer-mini/ham-core/src/assists/completion.rs", "rank": 62, "score": 159628.88610934428 }, { "content": "\tSTRUCTDAT *param;\t\t\t\t\t// 引数パラメーターリスト\n", "file_path": "hsp3-debug-self/hsp3-debug-self/hspsdk/hsp3struct.h", "rank": 63, "score": 158364.6665404722 }, { "content": "fn threads() -> Vec<dap::Thread> {\n\n vec![dap::Thread {\n\n id: MAIN_THREAD_ID,\n\n name: MAIN_THREAD_NAME.to_owned(),\n\n }]\n\n}\n\n\n\n/// グローバル変数からなるスコープの変数参照Id\n\nconst GLOBAL_SCOPE_REF: i64 = 1;\n\n\n\n/// HSP の変数や変数の要素、あるいは変数をまとめるもの (モジュールなど) を指し示すもの。\n\n#[derive(Clone, Debug)]\n\npub(crate) enum VarPath {\n\n Globals,\n\n Static(usize),\n\n}\n\n\n\n/// Variables reference. VSCode が変数や変数要素を指し示すために使う整数値。\n\npub(crate) type VarRef = i64;\n\n\n", "file_path": "hsp3-debug-ginger/adapter/src/app.rs", "rank": 64, "score": 155830.7845342757 }, { "content": "fn parse_factor(p: &mut Px) {\n\n match p.next() {\n\n Token::Ident => parse_call_expr(p),\n\n Token::LeftParen => parse_group_expr(p),\n\n Token::CharStart => parse_char_literal(p),\n\n Token::FloatInt => parse_double_literal(p),\n\n Token::Minus => parse_unary_expr(p),\n\n Token::Star => parse_label_literal(p),\n\n Token::StrStart => parse_str_literal(p),\n\n _ if p.next().is_int_literal_first() => parse_int_literal(p),\n\n _ => unreachable!(\"is_expr_first\"),\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_expr.rs", "rank": 65, "score": 152739.10008675026 }, { "content": "fn try_resolve_name(\n\n name: &AName,\n\n symbols: &mut Symbols,\n\n env: &mut Env,\n\n name_context: &mut NameContext,\n\n) -> Option<Symbol> {\n\n // FIXME: スコープを考慮する\n\n let unqualified_name = name.unqualified_name();\n\n\n\n // パラメータか?\n\n if let Some(param) = name_context\n\n .enclosing_deffunc(&name)\n\n .into_iter()\n\n .flat_map(|deffunc| symbols.params(&deffunc))\n\n .filter_map(|param| {\n\n let param_name = symbols.unqualified_name(&param)?;\n\n if unqualified_name == param_name {\n\n Some(param)\n\n } else {\n\n None\n", "file_path": "hsp3-forgery/hf_core/src/analysis/name_resolution.rs", "rank": 66, "score": 152104.0948550818 }, { "content": "#[derive(Default)]\n\nstruct GlobalSymbolCollection {\n\n current_module_opt: Option<Symbol>,\n\n current_deffunc_opt: Option<Symbol>,\n\n name_context: NameContext,\n\n symbols: Symbols,\n\n}\n\n\n\nimpl GlobalSymbolCollection {\n\n fn new() -> Self {\n\n Default::default()\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/analysis/get_global_symbols.rs", "rank": 67, "score": 151753.11769902188 }, { "content": "fn tokenize_binary(t: &mut TokenizeContext) {\n\n while char_is_binary(t.next()) {\n\n t.bump();\n\n }\n\n t.commit(Token::Binary);\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 68, "score": 151424.96481417006 }, { "content": "fn parse_group_expr(p: &mut Px) {\n\n assert_eq!(p.next(), Token::LeftParen);\n\n\n\n p.start_node();\n\n p.bump();\n\n\n\n if p.next().is_expr_first() {\n\n parse_expr(p);\n\n }\n\n\n\n p.eat(Token::RightParen);\n\n p.end_node(NodeKind::GroupExpr);\n\n}\n\n\n\npub(crate) fn parse_call_expr(p: &mut Px) {\n\n assert_eq!(p.next(), Token::Ident);\n\n\n\n parse_name(p);\n\n\n\n // FIXME: . 記法\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_expr.rs", "rank": 69, "score": 151424.96481417006 }, { "content": "fn parse_end_of_pp(p: &mut Px) {\n\n if !p.at_eof() && !p.next().at_end_of_pp() {\n\n p.start_node();\n\n while !p.at_eof() && !p.next().at_end_of_pp() {\n\n p.bump();\n\n }\n\n p.end_node(NodeKind::Other);\n\n }\n\n\n\n if !p.at_eof() {\n\n p.bump();\n\n }\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_pp.rs", "rank": 70, "score": 151424.96481417006 }, { "content": "fn parse_binary_expr(p: &mut Px) {\n\n assert!(p.next().is_expr_first());\n\n\n\n do_parse_binary_expr(Some(BinaryOpLevel::LOWEST), p);\n\n}\n\n\n\npub(crate) fn parse_expr(p: &mut Px) {\n\n parse_binary_expr(p)\n\n}\n\n\n\n/// 引数リスト (カンマ区切りの式の並び) を解析する。\n\npub(crate) fn parse_args(p: &mut Px) {\n\n let mut ends_with_comma = false;\n\n\n\n loop {\n\n // エラー回復\n\n if !p.at_eof() && !p.next().is_arg_first() && !p.next().at_end_of_args() {\n\n p.start_node();\n\n while !p.at_eof() && !p.next().is_arg_first() && !p.next().at_end_of_args() {\n\n p.bump();\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_expr.rs", "rank": 71, "score": 151424.96481417006 }, { "content": "/// 小数部を字句解析する。(`3.14` などの `14` の部分)\n\nfn tokenize_fraction(t: &mut TokenizeContext) {\n\n assert!(t.next().is_ascii_digit());\n\n\n\n while t.next().is_ascii_digit() {\n\n t.bump();\n\n }\n\n t.commit(Token::Fraction);\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 72, "score": 151424.96481417006 }, { "content": "fn tokenize_hex(t: &mut TokenizeContext) {\n\n while t.next().is_ascii_hexdigit() {\n\n t.bump();\n\n }\n\n t.commit(Token::Hex);\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 73, "score": 151424.96481417006 }, { "content": "/// 指数部を字句解析する。(`1e+9` などの `e+9` の部分)\n\nfn tokenize_exponent(t: &mut TokenizeContext) {\n\n assert!(t.next() == 'e' || t.next() == 'E');\n\n\n\n t.bump();\n\n t.commit(Token::ExpChar);\n\n\n\n // 指数部の符号\n\n let exp_sign = t.eat(\"+\") || t.eat(\"-\");\n\n if exp_sign {\n\n t.commit(Token::ExpSign);\n\n }\n\n\n\n // 指数部の数値\n\n if !exp_sign && !t.next().is_ascii_digit() {\n\n return;\n\n }\n\n\n\n while t.next().is_ascii_digit() {\n\n t.bump();\n\n }\n\n t.commit(Token::ExpDigit);\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 74, "score": 151424.96481417006 }, { "content": "/// プリプロセッサ命令における改行のエスケープや、\n\n/// 複数行コメントや複数行文字列リテラルの中に改行を\n\nfn tokenize_segment(t: &mut TokenizeContext) {\n\n // この時点で t は行頭に位置する。\n\n // 行頭のスペースやコメントを除去する。(複数行コメントの中に改行があっても1行とみなす。)\n\n tokenize_spaces_comments(false, t);\n\n\n\n let pp = if t.eat(\"#\") {\n\n t.commit(Token::Hash);\n\n true\n\n } else {\n\n false\n\n };\n\n\n\n while !t.at_eof() && !tokenize_eol(t) {\n\n let ok = tokenize_space(pp, t)\n\n || tokenize_comment(t)\n\n || tokenize_number(pp, t)\n\n || tokenize_char(t)\n\n || tokenize_str(t)\n\n || tokenize_multiline_str(t)\n\n || tokenize_ident(t)\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 75, "score": 151424.96481417006 }, { "content": "fn parse_unary_expr(p: &mut Px) {\n\n assert_eq!(p.next(), Token::Minus);\n\n\n\n p.start_node();\n\n p.bump();\n\n\n\n if p.next().is_factor_first() {\n\n parse_factor(p);\n\n }\n\n p.end_node(NodeKind::UnaryExpr);\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/parse/parse_expr.rs", "rank": 76, "score": 151424.96481417006 }, { "content": "fn parse_privacy(px: &mut Px) -> Option<(PPrivacy, PToken)> {\n\n if px.next() != TokenKind::Ident {\n\n return None;\n\n }\n\n\n\n let privacy = PPrivacy::parse(px.next_token().body_text())?;\n\n let token = px.bump();\n\n Some((privacy, token))\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 77, "score": 151235.25496268575 }, { "content": "/// 改行でない空白文字を読み飛ばす。\n\nfn eat_blank(tx: &mut Tx) {\n\n loop {\n\n match tx.next() {\n\n ' ' | '\\t' | '\\u{3000}' => {\n\n tx.bump();\n\n }\n\n '\\r' => match tx.nth(1) {\n\n '\\n' => break,\n\n _ => tx.bump(),\n\n },\n\n '\\n' => break,\n\n c if c.is_whitespace() => {\n\n tx.bump();\n\n }\n\n _ => break,\n\n }\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 78, "score": 150144.67260716038 }, { "content": "/// 行末まで読み飛ばす。改行自体は読まない。\n\nfn eat_line(tx: &mut Tx) {\n\n match tx.find(\"\\n\") {\n\n Some(mut len) => {\n\n // CRLF の LF が見つかったときは CR の前に戻る。\n\n if len >= 1 && tx.nth_byte(len - 1) == b'\\r' {\n\n len -= 1;\n\n }\n\n\n\n tx.bump_many(len)\n\n }\n\n\n\n // 改行が見つからない場合は、いま最終行なので、ファイルの末尾まで読む。\n\n None => tx.bump_all(),\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 79, "score": 150144.67260716038 }, { "content": "fn eat_digits(tx: &mut Tx) {\n\n while tx.next().is_ascii_digit() {\n\n tx.bump();\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 80, "score": 150144.67260716038 }, { "content": "#[allow(unused)]\n\nfn utf8_to_cp932(s: &str) -> Vec<u8> {\n\n let cp932 = encoding::label::encoding_from_windows_code_page(932).unwrap();\n\n let mut buf = cp932.encode(s, encoding::EncoderTrap::Replace).unwrap();\n\n buf.push(0);\n\n buf\n\n}\n\n\n\nmod tests {\n\n #[test]\n\n fn test_encoding_cp932() {\n\n let hello = vec![\n\n 130, 177, 130, 241, 130, 201, 130, 191, 130, 205, 144, 162, 138, 69, 0,\n\n ];\n\n assert_eq!(super::utf8_to_cp932(\"こんにちは世界\"), hello);\n\n assert_eq!(super::cp932_to_utf8(&hello), \"こんにちは世界\");\n\n\n\n assert_eq!(super::cp932_to_utf8(&super::utf8_to_cp932(\"✔\")), \"?\");\n\n }\n\n}\n", "file_path": "hsp3-debug-ginger/adapter/src/helpers.rs", "rank": 81, "score": 149334.17714879566 }, { "content": "fn parse_const_ty(px: &mut Px) -> Option<(PConstTy, PToken)> {\n\n if px.next() != TokenKind::Ident {\n\n return None;\n\n }\n\n\n\n let const_ty = PConstTy::parse(px.next_token().body_text())?;\n\n let token = px.bump();\n\n Some((const_ty, token))\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 82, "score": 149056.88269120484 }, { "content": "#[allow(unused)]\n\nfn cp932_to_utf8(mut s: &[u8]) -> String {\n\n let cp932 = encoding::label::encoding_from_windows_code_page(932).unwrap();\n\n\n\n while let Some(0) = s.last() {\n\n s = &s[0..s.len() - 1];\n\n }\n\n cp932.decode(s, encoding::DecoderTrap::Replace).unwrap()\n\n}\n\n\n\n/// utf-8 の文字列を utf-8 版ではない HSP の文字列 (cp932 エンコード) に変換する。\n", "file_path": "hsp3-debug-ginger/adapter/src/helpers.rs", "rank": 83, "score": 148997.7648486288 }, { "content": "fn eat_hex_digits(tx: &mut Tx) {\n\n while tx.next().is_ascii_hexdigit() {\n\n tx.bump();\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 84, "score": 148896.93273971768 }, { "content": "fn eat_binary_digits(tx: &mut Tx) {\n\n while let '0' | '1' = tx.next() {\n\n tx.bump();\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 85, "score": 148896.93273971768 }, { "content": "fn parse_end_of_preproc(px: &mut Px) {\n\n while !px.next().is_end_of_preproc() {\n\n px.skip()\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 86, "score": 148896.93273971768 }, { "content": "fn switch_on_args(mut args: ArgsOs) {\n\n // Skip self path.\n\n args.next();\n\n\n\n args.next()\n\n .filter(|a| a == \"--hsp\")\n\n .expect(\"Expected --hsp\");\n\n\n\n let hsp3_home = PathBuf::from(args.next().unwrap());\n\n\n\n let arg = parse_args(args).unwrap_or_else(|err| {\n\n eprintln!(\"{}\", err);\n\n exit_with_help();\n\n });\n\n\n\n match arg {\n\n Arg::Version => exit_with_version(),\n\n Arg::Help => exit_with_help(),\n\n Arg::Lsp => start_lsp_server(hsp3_home),\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-lsp-server-exe/src/main.rs", "rank": 87, "score": 148896.93273971768 }, { "content": "#[cfg(windows)]\n\nfn ansi_to_wide_string(s: &[u8]) -> Vec<u16> {\n\n let size = unsafe {\n\n winapi::um::stringapiset::MultiByteToWideChar(\n\n winapi::um::winnls::CP_ACP,\n\n 0,\n\n s.as_ptr() as *mut i8,\n\n s.len() as i32,\n\n ptr::null_mut(),\n\n 0,\n\n )\n\n } as usize;\n\n\n\n let buf = vec![0; size];\n\n unsafe {\n\n winapi::um::stringapiset::MultiByteToWideChar(\n\n winapi::um::winnls::CP_ACP,\n\n 0,\n\n s.as_ptr() as *mut i8,\n\n s.len() as i32,\n\n buf.as_ptr() as *mut u16,\n\n buf.len() as i32,\n\n )\n\n };\n\n\n\n buf\n\n}\n\n\n", "file_path": "hsp3-debug-ginger/adapter/src/helpers.rs", "rank": 88, "score": 147982.53506939707 }, { "content": "#[cfg(windows)]\n\nfn ansi_from_wide_string(s: &[u16]) -> Vec<u8> {\n\n let size = unsafe {\n\n winapi::um::stringapiset::WideCharToMultiByte(\n\n winapi::um::winnls::CP_ACP,\n\n 0,\n\n s.as_ptr() as *mut u16,\n\n s.len() as i32,\n\n ptr::null_mut(),\n\n 0,\n\n ptr::null(),\n\n ptr::null_mut(),\n\n )\n\n } as usize;\n\n\n\n let buf = vec![0; size + 1];\n\n unsafe {\n\n winapi::um::stringapiset::WideCharToMultiByte(\n\n winapi::um::winnls::CP_ACP,\n\n 0,\n\n s.as_ptr() as *mut u16,\n", "file_path": "hsp3-debug-ginger/adapter/src/helpers.rs", "rank": 89, "score": 147982.53506939707 }, { "content": "/// 10進数の数字の直後にある、小数部や指数部を字句解析する。\n\nfn tokenize_digit_suffix(tx: &mut TokenizeContext) {\n\n // 小数部\n\n if tx.eat(\".\") {\n\n eat_digits(tx);\n\n }\n\n\n\n // 指数部\n\n if let 'e' | 'E' = tx.next() {\n\n tx.bump();\n\n\n\n if let '+' | '-' = tx.next() {\n\n tx.bump();\n\n }\n\n\n\n eat_digits(tx);\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 90, "score": 147680.51929715415 }, { "content": "fn eat_ident(pattern: &str, px: &mut Px) -> Option<PToken> {\n\n if px.next() == TokenKind::Ident && px.next_token().body_text() == pattern {\n\n Some(px.bump())\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "hsp3-analyzer-mini/ham-core/src/parse/parse_preproc.rs", "rank": 91, "score": 146768.50719287246 }, { "content": "fn tokenize_pun(t: &mut TokenizeContext) -> bool {\n\n for &(token, pun_text) in PUN_TABLE {\n\n if t.eat(pun_text) {\n\n t.commit(token);\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 92, "score": 145053.99915316913 }, { "content": "fn tokenize_comment(t: &mut TokenizeContext) -> bool {\n\n if t.eat(\";\") || t.eat(\"//\") {\n\n while !t.at_eof() && !char_is_eol(t.next()) {\n\n t.bump();\n\n }\n\n t.commit(Token::Comment);\n\n return true;\n\n }\n\n\n\n if t.eat(\"/*\") {\n\n while !t.at_eof() && !t.eat(\"*/\") {\n\n t.bump();\n\n }\n\n t.commit(Token::Comment);\n\n return true;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 93, "score": 145053.99915316913 }, { "content": "fn tokenize_char(t: &mut TokenizeContext) -> bool {\n\n if t.eat(\"'\") {\n\n t.commit(Token::CharStart);\n\n\n\n tokenize_char_or_str_content(t, '\\'');\n\n\n\n if t.eat(\"'\") {\n\n t.commit(Token::CharEnd);\n\n }\n\n\n\n return true;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 94, "score": 145053.99915316913 }, { "content": "fn tokenize_ident(t: &mut TokenizeContext) -> bool {\n\n if char_is_ident_first(t.next()) || t.next() == '@' {\n\n while char_is_ident(t.next()) {\n\n t.bump();\n\n }\n\n\n\n let token = Token::parse_keyword(t.current_text()).unwrap_or(Token::Ident);\n\n t.commit(token);\n\n\n\n // モジュール名を指定する `@` と直後の名前を字句解析する。(念のため `a@b@c` みたいなのも解釈する。)\n\n loop {\n\n if t.eat(\"@\") {\n\n t.commit(Token::IdentAtSign);\n\n continue;\n\n }\n\n\n\n if char_is_ident_first(t.next()) {\n\n while char_is_ident(t.next()) {\n\n t.bump();\n\n }\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 95, "score": 145053.99915316913 }, { "content": "fn tokenize_eol(t: &mut TokenizeContext) -> bool {\n\n if t.is_followed_by(\"\\r\\n\") || t.next() == '\\n' {\n\n t.commit(Token::Semi);\n\n\n\n loop {\n\n while char_is_space(t.next()) {\n\n t.bump();\n\n }\n\n\n\n if t.eat(\"\\r\\n\") || t.eat(\"\\n\") {\n\n continue;\n\n }\n\n\n\n break;\n\n }\n\n\n\n t.commit(Token::Eol);\n\n return true;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 96, "score": 145053.99915316913 }, { "content": "fn tokenize_str(t: &mut TokenizeContext) -> bool {\n\n if t.eat(\"\\\"\") {\n\n t.commit(Token::StrStart);\n\n\n\n tokenize_char_or_str_content(t, '\"');\n\n\n\n if t.eat(\"\\\"\") {\n\n t.commit(Token::StrEnd);\n\n }\n\n\n\n return true;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 97, "score": 145053.99915316913 }, { "content": "/// 何文字か先読みして、次の字句を決定する。\n\nfn lookahead(tx: &mut Tx) -> Lookahead {\n\n match tx.next() {\n\n '\\0' => Lookahead::Eof,\n\n '\\r' => match tx.nth(1) {\n\n '\\n' => Lookahead::CrLf,\n\n _ => Lookahead::Cr,\n\n },\n\n '\\n' => Lookahead::Lf,\n\n ' ' | '\\t' | '\\u{3000}' => {\n\n // U+3000: 全角空白\n\n Lookahead::Blank\n\n }\n\n '0' => match tx.nth(1) {\n\n 'b' | 'B' => Lookahead::ZeroB,\n\n 'x' | 'X' => Lookahead::ZeroX,\n\n _ => Lookahead::Digit,\n\n },\n\n '$' => Lookahead::Dollar,\n\n '\\'' => Lookahead::SingleQuote,\n\n '\"' => Lookahead::DoubleQuote,\n", "file_path": "hsp3-analyzer-mini/ham-core/src/token/tokenize_rules.rs", "rank": 98, "score": 145053.99915316913 }, { "content": "fn tokenize_multiline_str(t: &mut TokenizeContext) -> bool {\n\n if t.eat(\"{\\\"\") {\n\n t.commit(Token::StrStart);\n\n\n\n // FIXME: 各行の最初のタブ文字は文字列リテラルの値に含まれないので、Token::Space にする。\n\n while !t.at_eof() && !t.is_followed_by(\"\\\"}\") {\n\n t.bump();\n\n }\n\n t.commit(Token::StrVerbatim);\n\n\n\n if t.eat(\"\\\"}\") {\n\n t.commit(Token::StrEnd);\n\n }\n\n\n\n return true;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "hsp3-forgery/hf_core/src/token/tokenize_rules.rs", "rank": 99, "score": 143806.25928572644 } ]
Rust
src/history.rs
nuta/nsh
4e90833e8d205d5311fbb118076568b810557c84
use crate::fuzzy::FuzzyVec; use crate::theme::ThemeColor; use std::collections::HashMap; use std::fs::{File, OpenOptions}; use std::io::{BufRead, BufReader, Write}; use std::path::{Path, PathBuf}; pub struct History { path: PathBuf, history: FuzzyVec, path2cwd: HashMap<String, PathBuf>, } impl History { pub fn new(history_file: &Path) -> History { let mut warned = false; let mut path2cwd = HashMap::new(); let mut history = FuzzyVec::new(); if let Ok(file) = File::open(history_file) { for (i, line) in BufReader::new(file).lines().enumerate() { if let Ok(line) = line { let cwd = line.split('\t').nth(1); let cmd = line.split('\t').nth(2); match (cwd, cmd, warned) { (Some(cwd), Some(cmd), _) => { path2cwd.insert(cmd.to_string(), PathBuf::from(cwd)); history.append(cmd.to_string()); } (_, _, false) => { print_err!( "nsh: warning: failed to parse ~/.nsh_history: at line {}", i + 1 ); warned = true; } (_, _, _) => (), } } } } History { path: history_file.to_owned(), history, path2cwd, } } pub fn len(&self) -> usize { self.history.len() } pub fn nth_last(&self, nth: usize) -> Option<String> { self.history.nth_last(nth) } pub fn search(&self, query: &str, filter_by_cwd: bool) -> Vec<(Option<ThemeColor>, &str)> { if filter_by_cwd { let cwd = std::env::current_dir().unwrap(); self.history .search(query) .iter() .filter(|(_, cmd)| match self.path2cwd.get(*cmd) { Some(path) if *path == cwd => true, Some(path) => { info!("path='{}' {}", path.display(), cwd.display()); false } _ => false, }) .cloned() .collect() } else { self.history.search(query) } } pub fn append(&mut self, cmd: &str) { if cmd.is_empty() { return; } if cmd.len() < 8 { return; } if let Some(last) = self.history.nth_last(0) { if last.as_str() == cmd { return; } } let cwd = std::env::current_dir().unwrap(); if let Ok(mut file) = OpenOptions::new().append(true).open(&self.path) { let time = std::time::SystemTime::now() .duration_since(std::time::UNIX_EPOCH) .expect("failed to get the UNIX timestamp") .as_secs() as usize; let dir = cwd.to_str().unwrap().to_owned(); file.write(format!("{}\t{}\t{}\n", time, dir, cmd).as_bytes()) .ok(); } self.history.append(cmd.to_string()); self.path2cwd.insert(cmd.to_string(), cwd); } } pub struct HistorySelector { offset: usize, input: String, } impl HistorySelector { pub fn new() -> HistorySelector { HistorySelector { offset: 0, input: String::new(), } } pub fn reset(&mut self) { self.offset = 0; } pub fn current(&self, history: &History) -> String { if self.offset == 0 { self.input.clone() } else { history.nth_last(self.offset - 1).unwrap() } } pub fn prev(&mut self, history: &History, input: &str) { if self.offset == 0 { self.input = input.to_string(); } let hist_len = history.len(); self.offset += 1; if self.offset >= hist_len { self.offset = hist_len; } } pub fn next(&mut self) { if self.offset > 0 { self.offset -= 1; } } }
use crate::fuzzy::FuzzyVec; use crate::theme::ThemeColor; use std::collections::HashMap; use std::fs::{File, OpenOptions}; use std::io::{BufRead, BufReader, Write}; use std::path::{Path, PathBuf}; pub struct History { path: PathBuf, history: FuzzyVec, path2cwd: HashMap<String, PathBuf>, } impl History { pub fn new(history_file: &Path) -> History { let mut warned = false; let mut path2cwd = HashMap::new(); let mut history = FuzzyVec::new(); if let Ok(file) = File::open(history_file) { for (i, line) in BufReader::new(file).lines().enumerate() { if let Ok(line) = line { let cwd = line.split('\t').nth(1); let cmd = line.split('\t').nth(2); match (cwd, cmd, warned) { (Some(cwd), Some(cmd), _) => { path2cwd.insert(cmd.to_string(), PathBuf::from(cwd)); history.append(cmd.to_string()); } (_, _, false) => { print_err!( "nsh: warning: failed to parse ~/.nsh_history: at line {}", i + 1 ); warned = true; } (_, _, _) => (), } } } } History { path: history_file.to_owned(), history, path2cwd, } } pub fn len(&self) -> usize { self.history.len() } pub fn nth_last(&self, nth: usize) -> Option<String> { self.history.nth_last(nth) } pub fn search(&self, query: &str, filter_by_cwd: bool) -> Vec<(Option<ThemeColor>, &str)> { if filter_by_cwd { let cwd = std::env::current_dir().unwrap(); self.history .search(query) .iter() .filter(|(_, cmd)| match self.path2cwd.get(*cmd) { Some(path) if *path == cwd => true, Some(path) => { info!("path='{}' {}", path.display(), cwd.display()); false } _ => false, }) .cloned() .collect() } else { self.history.search(query) } } pub fn append(&mut self, cmd: &str) { if cmd.is_empty() { return; } if cmd.len() < 8 { return; } if let Some(last) = self.history.nth_last(0) { if last.as_str() == cmd { return; } } let cwd = std::env::current_dir().unwrap(); if let Ok(mut file) = OpenOptions::new().append(true).open(&self.path) { let time = std::time::SystemTime::now() .duration_since(std::time::UNIX_EPOCH) .expect("failed to get the UNIX timestamp") .as_secs() as usize; let dir = cwd.to_str().unwrap().to_owned(); file.write(format!("{}\t{}\t{}\n", time, dir, cmd).as_bytes()) .ok(); } self.history.append(cmd.to_string()); self.path2cwd.insert(cmd.to_string(), cwd); } } pub struct HistorySelector { offset: usize, input: String, } impl HistorySelector {
pub fn reset(&mut self) { self.offset = 0; } pub fn current(&self, history: &History) -> String { if self.offset == 0 { self.input.clone() } else { history.nth_last(self.offset - 1).unwrap() } } pub fn prev(&mut self, history: &History, input: &str) { if self.offset == 0 { self.input = input.to_string(); } let hist_len = history.len(); self.offset += 1; if self.offset >= hist_len { self.offset = hist_len; } } pub fn next(&mut self) { if self.offset > 0 { self.offset -= 1; } } }
pub fn new() -> HistorySelector { HistorySelector { offset: 0, input: String::new(), } }
function_block-full_function
[ { "content": "fn path_completion(pattern: &str, only_dirs: bool) -> FuzzyVec {\n\n let home_dir = dirs::home_dir().unwrap();\n\n let current_dir = std::env::current_dir().unwrap();\n\n let mut dir = if pattern.is_empty() {\n\n current_dir.clone()\n\n } else if let Some(pattern) = pattern.strip_prefix('~') {\n\n home_dir.join(&pattern.trim_start_matches('/'))\n\n } else {\n\n PathBuf::from(pattern)\n\n };\n\n\n\n // \"/usr/loca\" -> \"/usr\"\n\n dir = if dir.is_dir() {\n\n dir\n\n } else {\n\n dir.pop();\n\n if dir.to_str().unwrap().is_empty() {\n\n current_dir.clone()\n\n } else {\n\n dir\n", "file_path": "src/mainloop.rs", "rank": 0, "score": 204274.7612708144 }, { "content": "pub fn parse(input: &str, cursor: usize) -> InputContext {\n\n let parser = ContextParser::new(input, cursor);\n\n parser.parse()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use pretty_assertions::assert_eq;\n\n\n\n #[test]\n\n fn short() {\n\n let input = \"\".to_owned();\n\n let cursor = 0;\n\n assert_eq!(\n\n parse(&input, cursor),\n\n InputContext {\n\n spans: vec![Span::Argv0(\"\".to_owned()),],\n\n nested: vec![],\n\n current_literal: Some(0..0),\n", "file_path": "src/context_parser.rs", "rank": 1, "score": 203694.5279248767 }, { "content": "fn get_repo_branch(git_dir: &str) -> String {\n\n let rebase_i_file = Path::new(git_dir).join(\"rebase-merge/head-name\");\n\n if rebase_i_file.exists() {\n\n // TODO: remove `refs/<type>/` prefixes.\n\n return std::fs::read_to_string(rebase_i_file)\n\n .unwrap()\n\n .trim()\n\n .to_owned();\n\n }\n\n\n\n let result = std::process::Command::new(\"git\")\n\n .arg(\"rev-parse\")\n\n .arg(\"--abbrev-ref\")\n\n .arg(\"HEAD\")\n\n .output();\n\n\n\n if let Ok(output) = result {\n\n String::from_utf8_lossy(&output.stdout)\n\n .into_owned()\n\n .trim()\n\n .to_owned()\n\n } else {\n\n \"\".to_owned()\n\n }\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 2, "score": 200559.01390817395 }, { "content": "pub fn match_pattern(pattern: &PatternWord, text: &str) -> bool {\n\n pattern_word_match(pattern, text).is_some()\n\n}\n\n\n", "file_path": "src/pattern.rs", "rank": 3, "score": 192578.66840235709 }, { "content": "pub fn match_pattern_all(pattern: &PatternWord, text: &str) -> bool {\n\n match pattern_word_match(pattern, text) {\n\n Some(MatchResult { start, end }) => start == 0 && end == text.len() - 1,\n\n None => false,\n\n }\n\n}\n\n\n", "file_path": "src/pattern.rs", "rank": 4, "score": 192578.66840235709 }, { "content": "pub fn highlight(ctx: &InputContext, shell: &mut Shell) -> String {\n\n use std::fmt::Write;\n\n\n\n let argv0_color = SetForegroundColor(Color::Green);\n\n let invalid_argv0_color = SetForegroundColor(Color::Red);\n\n let option_color = SetForegroundColor(Color::Yellow);\n\n let brace_color = SetForegroundColor(Color::Green);\n\n let quote_color = SetForegroundColor(Color::Cyan);\n\n let command_sep_color = SetForegroundColor(Color::Blue);\n\n let bold = SetAttribute(Attribute::Bold);\n\n let reset = SetAttribute(Attribute::Reset);\n\n\n\n let mut buf = String::new();\n\n let mut in_quote = false;\n\n for span in &ctx.spans {\n\n if in_quote {\n\n write!(buf, \"{}\", quote_color).ok();\n\n }\n\n\n\n match span {\n", "file_path": "src/highlight.rs", "rank": 5, "score": 187393.59575032155 }, { "content": "fn look_for_comp_file(cmd_name: &str) -> Option<String> {\n\n for dir in COMP_DIRS {\n\n for suffix in COMP_SUFFIXES {\n\n let s = format!(\"{}/{}{}\", dir, cmd_name, suffix);\n\n if is_normal_file(&s) {\n\n return Some(s);\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "src/bash_server.rs", "rank": 6, "score": 184745.81411608105 }, { "content": "fn is_normal_file(s: &str) -> bool {\n\n let path = std::path::Path::new(s);\n\n path.exists() && path.is_file()\n\n}\n\n\n", "file_path": "src/bash_server.rs", "rank": 7, "score": 183184.35876938334 }, { "content": "// Guesses the completion function (assuming its name is \"_{cmd_name}\").\n\nfn guess_completion_cmd_name(cmd_name: &str) -> String {\n\n match cmd_name {\n\n \"git\" => \"__git_main\".to_owned(),\n\n _ => format!(\"_{}\", &cmd_name.replace('-', \"_\")),\n\n }\n\n}\n\n\n", "file_path": "src/bash_server.rs", "rank": 8, "score": 176501.47825226712 }, { "content": "fn get_git_dir() -> String {\n\n let result = std::process::Command::new(\"git\")\n\n .arg(\"rev-parse\")\n\n .arg(\"--git-dir\")\n\n .output();\n\n\n\n if let Ok(output) = result {\n\n String::from_utf8_lossy(&output.stdout)\n\n .into_owned()\n\n .trim()\n\n .to_owned()\n\n } else {\n\n warn!(\"failed to get the git dir\");\n\n \"\".to_owned()\n\n }\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 9, "score": 175949.27554172074 }, { "content": "#[inline]\n\nfn truncate(s: &str, len: usize) -> String {\n\n // TODO: Return &str\n\n s.chars().take(len).collect()\n\n}\n\n\n", "file_path": "src/mainloop.rs", "rank": 10, "score": 172461.00524709822 }, { "content": "/// Expands a word into a string. Words in a command span `\"$(echo foo bar)\"` are\n\n/// joined by a whitespace.\n\npub fn expand_word_into_string(shell: &mut Shell, word: &Word) -> Result<String> {\n\n let ws: Vec<String> = expand_word_into_vec(shell, word, &shell.ifs())?\n\n .into_iter()\n\n .map(|w| w.into_string())\n\n .collect();\n\n\n\n Ok(ws.join(\"\"))\n\n}\n\n\n", "file_path": "src/expand.rs", "rank": 11, "score": 162220.62713012777 }, { "content": "/// Returns the length of the last line excluding escape sequences.\n\npub fn draw_prompt(prompt: &Prompt) -> (String, usize) {\n\n let mut len = 0;\n\n let mut buf = String::new();\n\n for span in &prompt.spans {\n\n match span {\n\n Span::Literal(s) => {\n\n len += s.len();\n\n buf.push_str(s)\n\n }\n\n Span::Color(c) => match c {\n\n Color::Red => buf.push_str(\"\\x1b[31m\"),\n\n Color::Blue => buf.push_str(\"\\x1b[34m\"),\n\n Color::Yellow => buf.push_str(\"\\x1b[33m\"),\n\n Color::Green => buf.push_str(\"\\x1b[32m\"),\n\n Color::Cyan => buf.push_str(\"\\x1b[36m\"),\n\n Color::Magenta => buf.push_str(\"\\x1b[35m\"),\n\n Color::Bold => buf.push_str(\"\\x1b[1m\"),\n\n Color::Underline => buf.push_str(\"\\x1b[4m\"),\n\n Color::Reset => buf.push_str(\"\\x1b[0m\"),\n\n },\n", "file_path": "src/prompt.rs", "rank": 12, "score": 161965.62929185102 }, { "content": "fn escape(s: &str) -> String {\n\n s.replace('\\\\', \"\\\\\\\\\").replace('$', \"\\\\$\")\n\n}\n\n\n", "file_path": "src/bash_server.rs", "rank": 13, "score": 157810.73160383292 }, { "content": "fn get_repo_action(git_dir: &str) -> Option<&'static str> {\n\n let git_dir = Path::new(git_dir);\n\n if git_dir.join(\"rebase-merge/interactive\").exists() {\n\n return Some(\"rebase-i\");\n\n }\n\n\n\n if git_dir.join(\"MERGE_HEAD\").exists() {\n\n return Some(\"merge\");\n\n }\n\n\n\n if git_dir.join(\"BISECT_LOG\").exists() {\n\n return Some(\"bisect\");\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 14, "score": 155976.92975010956 }, { "content": "/// Waits for an *any* process, i.e. `waitpid(-1)`, and then updates\n\n/// the process state recorded in the `shell`. Returns `None` it\n\n/// would block.\n\npub fn wait_for_any_process(shell: &mut Shell, no_block: bool) -> Option<Pid> {\n\n let options = if no_block {\n\n WaitPidFlag::WUNTRACED | WaitPidFlag::WNOHANG\n\n } else {\n\n WaitPidFlag::WUNTRACED\n\n };\n\n\n\n let result = waitpid(None, Some(options));\n\n let (pid, state) = match result {\n\n Ok(WaitStatus::Exited(pid, status)) => {\n\n trace!(\"exited: pid={} status={}\", pid, status);\n\n (pid, ProcessState::Completed(status))\n\n }\n\n Ok(WaitStatus::Signaled(pid, _signal, _)) => {\n\n // The `pid` process has been killed by `_signal`.\n\n (pid, ProcessState::Completed(-1))\n\n }\n\n Ok(WaitStatus::Stopped(pid, _signal)) => (pid, ProcessState::Stopped(pid)),\n\n Err(nix::errno::Errno::ECHILD) | Ok(WaitStatus::StillAlive) => {\n\n // No childs to be reported.\n", "file_path": "src/process.rs", "rank": 15, "score": 154813.48350965965 }, { "content": "/// Run CondEx command (`[[ ... ]]`).\n\npub fn evaluate_cond(shell: &mut Shell, cond: &CondExpr) -> Result<bool> {\n\n macro_rules! eval_as_string {\n\n ($expr:expr) => {\n\n evaluate_cond_primary(shell, $expr)?\n\n };\n\n }\n\n\n\n macro_rules! unwrap_word {\n\n ($expr:expr) => {\n\n match $expr {\n\n CondExpr::Word(word) => word,\n\n _ => {\n\n return Err(format_err!(\"cond: expected word\"));\n\n }\n\n }\n\n };\n\n }\n\n\n\n macro_rules! eval_as_bool {\n\n ($expr:expr) => {\n", "file_path": "src/eval.rs", "rank": 16, "score": 151494.3278287313 }, { "content": "#[inline]\n\nfn truncate_and_fill(s: &str, len: usize, fill: char) -> String {\n\n let mut s = truncate(s, len);\n\n for _ in s.len()..(len) {\n\n s.push(fill);\n\n }\n\n\n\n s\n\n}\n\n\n", "file_path": "src/mainloop.rs", "rank": 17, "score": 151071.31157864042 }, { "content": "fn get_hostname() -> String {\n\n let mut hostname_buf = [0u8; 128];\n\n let hostname_cstr = unistd::gethostname(&mut hostname_buf).expect(\"failed to get hostname\");\n\n let hostname = hostname_cstr\n\n .to_str()\n\n .expect(\"Hostname is not valid utf-8 string\");\n\n hostname.to_owned()\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 18, "score": 149678.66503227007 }, { "content": "/// Run CondEx command (`[[ ... ]]`).\n\npub fn evaluate_cond_primary(shell: &mut Shell, cond: &CondExpr) -> Result<String> {\n\n match cond {\n\n CondExpr::Word(word) => expand_word_into_string(shell, word),\n\n _ => Err(format_err!(\"cond: expected word\")),\n\n }\n\n}\n\n\n", "file_path": "src/eval.rs", "rank": 19, "score": 148366.10979380383 }, { "content": "pub fn run_in_background(shell: &mut Shell, job: &Rc<Job>, sigcont: bool) {\n\n shell.set_last_back_job(job.clone());\n\n shell.background_jobs_mut().insert(job.clone());\n\n\n\n if sigcont {\n\n kill_process_group(job.pgid, Signal::SIGCONT).expect(\"failed to kill(SIGCONT)\");\n\n }\n\n}\n\n\n", "file_path": "src/process.rs", "rank": 20, "score": 147856.91623885924 }, { "content": "pub fn continue_job(shell: &mut Shell, job: &Rc<Job>, background: bool) {\n\n // Mark all stopped processes as running.\n\n for proc in &job.processes {\n\n if let ProcessState::Stopped(_) = shell.get_process_state(*proc).unwrap() {\n\n shell.set_process_state(*proc, ProcessState::Running);\n\n }\n\n }\n\n\n\n if background {\n\n run_in_background(shell, job, true);\n\n } else {\n\n run_in_foreground(shell, job, true);\n\n }\n\n}\n\n\n", "file_path": "src/process.rs", "rank": 21, "score": 147856.91623885924 }, { "content": "/// Expands words into a `Vec<String>`. A pattern in a word are expanded as a\n\n/// file path globbing.\n\npub fn expand_words(shell: &mut Shell, words: &[Word]) -> Result<Vec<String>> {\n\n let mut evaluated = Vec::new();\n\n for word in words {\n\n let mut ws = Vec::new();\n\n for w in expand_word_into_vec(shell, word, &shell.ifs())? {\n\n for f in w.expand_glob()? {\n\n ws.push(f);\n\n }\n\n }\n\n\n\n evaluated.extend(ws);\n\n }\n\n\n\n Ok(evaluated)\n\n}\n\n\n", "file_path": "src/expand.rs", "rank": 22, "score": 147831.24961039558 }, { "content": "// FIXME: remove unsafe or use external crate\n\nfn get_current_username() -> String {\n\n let mut passwd_buf = Vec::with_capacity(512);\n\n let mut passwd: libc::passwd = unsafe { std::mem::zeroed() };\n\n let mut result = std::ptr::null_mut();\n\n unsafe {\n\n libc::getpwuid_r(\n\n libc::getuid(),\n\n &mut passwd,\n\n passwd_buf.as_mut_ptr(),\n\n passwd_buf.capacity(),\n\n &mut result,\n\n );\n\n }\n\n\n\n if result.is_null() {\n\n \"\".to_owned()\n\n } else {\n\n let ptr = passwd.pw_name as *const _;\n\n unsafe {\n\n let cstr = std::ffi::CStr::from_ptr(ptr);\n\n cstr.to_string_lossy().into_owned()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 23, "score": 145242.3482537181 }, { "content": "// TODO: Support other systems like SVN.\n\nfn get_repo_info() -> String {\n\n let git_dir = get_git_dir();\n\n let mut columns = Vec::with_capacity(2);\n\n\n\n let mut branch = get_repo_branch(&git_dir);\n\n if is_repo_modified() {\n\n branch.push('*');\n\n }\n\n columns.push(branch);\n\n\n\n if let Some(action) = get_repo_action(&git_dir) {\n\n columns.push(action.to_owned());\n\n }\n\n\n\n columns.join(\"|\")\n\n}\n\n\n\nlazy_static! {\n\n // Use lazy_static to cache the result.\n\n static ref IN_REMOTE: bool = {\n\n std::env::var(\"SSH_CLIENT\").is_ok()\n\n };\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 24, "score": 145238.151240745 }, { "content": "#[test]\n\npub fn test_string_literal() {\n\n assert_eq!(\n\n parse(\"echo \\\"hello\\\"\"),\n\n Ok(Ast {\n\n terms: vec![Term {\n\n code: \"echo \\\"hello\\\"\".into(),\n\n background: false,\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::SimpleCommand {\n\n argv: vec![lit!(\"echo\"), lit!(\"hello\")],\n\n assignments: vec![],\n\n redirects: vec![],\n\n }]\n\n }]\n\n }]\n\n })\n\n );\n\n\n\n assert_eq!(\n", "file_path": "src/parser.rs", "rank": 25, "score": 143680.00726323886 }, { "content": "fn handle_escape_sequence(escaped_arg: &str) -> String {\n\n let mut s = String::new();\n\n let mut escape = false;\n\n for ch in escaped_arg.chars() {\n\n match (escape, ch) {\n\n (true, 'n') => {\n\n s.push('\\n');\n\n escape = false;\n\n }\n\n (true, 't') => {\n\n s.push('\\t');\n\n escape = false;\n\n }\n\n (true, 'e') => {\n\n s.push('\\u{1b}');\n\n escape = false;\n\n }\n\n (true, ch) => {\n\n s.push('\\\\');\n\n s.push(ch);\n", "file_path": "src/builtins/echo.rs", "rank": 26, "score": 143553.84384359943 }, { "content": "fn str_slice_or_empty(slice: &str, start: usize) -> &str {\n\n if slice.len() < start {\n\n \"\"\n\n } else {\n\n &slice[start..]\n\n }\n\n}\n\n\n", "file_path": "src/pattern.rs", "rank": 27, "score": 139965.932736495 }, { "content": "/// Checks if background jobs have been terminated and notify the user that some jobs\n\n/// have been finished.\n\npub fn check_background_jobs(shell: &mut Shell) {\n\n while let Some(pid) = wait_for_any_process(shell, true) {\n\n let job = shell.get_job_by_pid(pid).unwrap().clone();\n\n if job.completed(shell) {\n\n destroy_job(shell, &job);\n\n } else if job.stopped(shell) {\n\n println!(\"[{}] Done: {}\", job.id, job.cmd);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/process.rs", "rank": 28, "score": 139288.26763005677 }, { "content": "pub fn run_in_foreground(shell: &mut Shell, job: &Rc<Job>, sigcont: bool) -> ProcessState {\n\n shell.last_fore_job = Some(job.clone());\n\n shell.background_jobs_mut().remove(job);\n\n set_terminal_process_group(job.pgid);\n\n\n\n if sigcont {\n\n if let Some(ref termios) = *job.termios.borrow() {\n\n restore_terminal_attrs(termios);\n\n }\n\n kill_process_group(job.pgid, Signal::SIGCONT).expect(\"failed to kill(SIGCONT)\");\n\n trace!(\"sent sigcont\");\n\n }\n\n\n\n // Wait for the job to exit or stop.\n\n let status = wait_for_job(shell, job);\n\n\n\n // Save the current terminal status.\n\n job.termios\n\n .replace(Some(tcgetattr(0).expect(\"failed to tcgetattr\")));\n\n\n\n // Go back into the shell.\n\n set_terminal_process_group(shell.shell_pgid);\n\n restore_terminal_attrs(shell.shell_termios.as_ref().unwrap());\n\n\n\n status\n\n}\n\n\n", "file_path": "src/process.rs", "rank": 29, "score": 138654.90923055832 }, { "content": "/// Computes the similarity. Lower is more similar.\n\nfn compute_score(entry: &str, query: &str) -> u8 {\n\n let mut score = std::u8::MAX;\n\n\n\n if entry == query {\n\n score -= 100;\n\n }\n\n\n\n if entry.starts_with(query) {\n\n score -= 10;\n\n }\n\n\n\n score\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_fuzzy_search() {\n", "file_path": "src/fuzzy.rs", "rank": 30, "score": 137156.26255733258 }, { "content": "/// A regex engine based on @nadrane's work: https://nickdrane.com/build-your-own-regex/\n\n/// Returns the index of the matched part or None.\n\nfn regex_match(pattern: &[RegexSpan], text: &str, index: usize) -> Option<usize> {\n\n trace!(\n\n \"regex: match: pattern = {:?}, text='{}', index = {}\",\n\n pattern,\n\n text,\n\n index\n\n );\n\n\n\n match pattern.get(0) {\n\n Some(RegexSpan::AnyChar) | Some(RegexSpan::Literal(_)) => {\n\n if text.is_empty() {\n\n return None;\n\n }\n\n\n\n if !match_one(&pattern[0], text.chars().next().unwrap()) {\n\n return None;\n\n }\n\n\n\n regex_match(\n\n slice_or_empty(pattern, 1),\n", "file_path": "src/pattern.rs", "rank": 31, "score": 132418.74731018586 }, { "content": "/// Expands a word int a `Vec`.\n\npub fn expand_word_into_vec(shell: &mut Shell, word: &Word, ifs: &str) -> Result<Vec<PatternWord>> {\n\n let mut words = Vec::new();\n\n let mut current_word = Vec::new();\n\n for span in word.spans() {\n\n let (frags, expand) = match span {\n\n Span::LiteralChars(..) => {\n\n // Internally used by the parser.\n\n unreachable!()\n\n }\n\n Span::Literal(s) => (vec![LiteralOrGlob::Literal(s.clone())], false),\n\n Span::Parameter { name, op, quoted } => {\n\n let mut frags = Vec::new();\n\n for value in expand_param(shell, name, op)? {\n\n let frag = value.unwrap_or_else(|| \"\".to_owned());\n\n frags.push(LiteralOrGlob::Literal(frag));\n\n }\n\n (frags, !quoted)\n\n }\n\n Span::ArrayParameter {\n\n name,\n", "file_path": "src/expand.rs", "rank": 32, "score": 130452.8805280916 }, { "content": "fn run_bash(bash: &mut Option<Child>, words: Vec<String>, current_word: usize) -> Option<FuzzyVec> {\n\n if words.is_empty() {\n\n return None;\n\n }\n\n\n\n let cmd_name = &words[0];\n\n let needs_comp_file = !PRELOADED_COMPS.contains(cmd_name.as_str());\n\n let comp_file = if needs_comp_file {\n\n if let Some(comp_file) = look_for_comp_file(cmd_name) {\n\n Some(comp_file)\n\n } else {\n\n return None;\n\n }\n\n } else {\n\n None\n\n };\n\n\n\n let mut bash = bash.take().unwrap();\n\n\n\n // Define $COMP_CWORD and $COMP_WORDS.\n", "file_path": "src/bash_server.rs", "rank": 33, "score": 129051.10415747053 }, { "content": "pub fn parse(script: &str) -> Result<Ast, ParseError> {\n\n let mut parser = ShellParser::new();\n\n parser.parse(script)\n\n}\n\n\n\n#[allow(unused)]\n\nmacro_rules! literal_word_vec {\n\n ($($x:expr), *) => {\n\n vec![$( Word(vec![Span::Literal($x.to_string())]), )*]\n\n };\n\n}\n\n\n\n#[allow(unused)]\n\nmacro_rules! lit {\n\n ($x:expr) => {\n\n Word(vec![Span::Literal($x.to_string())])\n\n };\n\n}\n\n\n\n#[allow(unused)]\n\nmacro_rules! param {\n\n ($name:expr, $op:expr, $quoted:expr) => {\n\n Word(vec![Span::Parameter {\n\n name: $name.to_string(),\n\n op: $op,\n\n quoted: $quoted,\n\n }])\n\n };\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 34, "score": 127757.81943016638 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n trace!(\"cd: argv={:?}\", ctx.argv);\n\n let old_dir = std::env::current_dir().expect(\"failed to getcwd()\");\n\n let (dir, pushd) = match ctx.argv.get(1).map(|s| s.as_str()) {\n\n Some(\"-\") => {\n\n if let Some(d) = ctx.shell.popd() {\n\n (d, false)\n\n } else {\n\n return ExitStatus::ExitedWith(1);\n\n }\n\n }\n\n Some(dir) if dir.starts_with('/') => (dir.to_string(), true),\n\n Some(dir) => {\n\n (\n\n // relative path\n\n Path::new(&old_dir)\n\n .join(dir.to_string())\n\n .to_string_lossy()\n\n .into_owned(),\n\n true,\n", "file_path": "src/builtins/cd.rs", "rank": 35, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n trace!(\"wait: argv={:?}\", ctx.argv);\n\n match Opt::from_iter_safe(ctx.argv) {\n\n Ok(opts) => {\n\n if opts.job_id.is_some() {\n\n match parse_job_id(ctx, opts.job_id) {\n\n Ok(job) => {\n\n wait_for_job(ctx.shell, &job);\n\n ExitStatus::ExitedWith(0)\n\n }\n\n Err(status) => status,\n\n }\n\n } else {\n\n // Wait for all jobs.\n\n let jobs: Vec<Rc<Job>> = ctx.shell.jobs().values().cloned().collect();\n\n for job in &jobs {\n\n wait_for_job(ctx.shell, job);\n\n }\n\n\n\n ExitStatus::ExitedWith(0)\n\n }\n\n }\n\n Err(err) => {\n\n writeln!(ctx.stderr, \"wait: {}\", err).ok();\n\n ExitStatus::ExitedWith(1)\n\n }\n\n }\n\n}\n", "file_path": "src/builtins/wait.rs", "rank": 36, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n if let Some(filepath) = ctx.argv.get(1) {\n\n match ctx.shell.run_file(std::path::PathBuf::from(&filepath)) {\n\n Ok(status) => status,\n\n Err(err) => {\n\n writeln!(ctx.stderr, \"nsh: failed open the file: {:?}\", err).ok();\n\n ExitStatus::ExitedWith(1)\n\n }\n\n }\n\n } else {\n\n writeln!(ctx.stderr, \"nsh: source: filename argument required\").ok();\n\n ctx.stderr.flush().ok();\n\n ExitStatus::ExitedWith(0)\n\n }\n\n}\n", "file_path": "src/builtins/source.rs", "rank": 37, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n let mut no_newline = false;\n\n let mut escape = false;\n\n let skip = match ctx.argv.get(1).map(|s| s.as_str()) {\n\n Some(\"-e\") => {\n\n escape = true;\n\n true\n\n }\n\n Some(\"-n\") => {\n\n no_newline = true;\n\n true\n\n }\n\n Some(\"-ne\") | Some(\"-en\") => {\n\n escape = true;\n\n no_newline = true;\n\n true\n\n }\n\n _ => false,\n\n };\n\n\n", "file_path": "src/builtins/echo.rs", "rank": 38, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n let exit_with = if let Some(exit_with) = ctx.argv.get(1) {\n\n exit_with.parse().unwrap_or(1)\n\n } else {\n\n 0\n\n };\n\n\n\n std::process::exit(exit_with);\n\n}\n", "file_path": "src/builtins/exit.rs", "rank": 39, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n if ctx.argv.is_empty() {\n\n for name in ctx.shell.exported_names() {\n\n if let Some(var) = ctx.shell.get(name) {\n\n writeln!(ctx.stdout, \"{}={}\", name, var.as_str()).ok();\n\n }\n\n }\n\n\n\n return ExitStatus::ExitedWith(0);\n\n }\n\n\n\n for arg in ctx.argv {\n\n let frags: Vec<&str> = arg.splitn(2, '=').collect();\n\n let mut iter = frags.iter();\n\n match (iter.next(), iter.next()) {\n\n (Some(name), Some(value)) => {\n\n ctx.shell.export(name);\n\n ctx.shell\n\n .set(name, Value::String(value.to_owned().to_string()), false);\n\n }\n\n (Some(name), None) => {\n\n ctx.shell.export(name);\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n ExitStatus::ExitedWith(0)\n\n}\n", "file_path": "src/builtins/export.rs", "rank": 40, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n // Concatenate arguemts into a string.\n\n let mut program = String::new();\n\n for arg in ctx.argv.iter().skip(1) {\n\n program += arg;\n\n program.push(' ');\n\n }\n\n\n\n ctx.shell.run_str(&program)\n\n}\n", "file_path": "src/builtins/eval.rs", "rank": 41, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n trace!(\"alias: argv={:?}\", ctx.argv);\n\n if let Some(alias) = ctx.argv.get(1) {\n\n match parse_alias(alias) {\n\n Ok(Alias { name, body }) => {\n\n ctx.shell.add_alias(&name, body);\n\n ExitStatus::ExitedWith(0)\n\n }\n\n Err(parser::ParseError::Fatal(err)) => {\n\n writeln!(ctx.stderr, \"nsh: alias: {}\", err).ok();\n\n ExitStatus::ExitedWith(1)\n\n }\n\n Err(parser::ParseError::Empty) => {\n\n writeln!(ctx.stderr, \"nsh: alias: alias can't be empty string\").ok();\n\n ExitStatus::ExitedWith(1)\n\n }\n\n }\n\n } else {\n\n // List defined aliases.\n\n for (name, cmd) in ctx.shell.aliases() {\n\n writeln!(ctx.stdout, \"{}='{}'\", name, cmd).ok();\n\n }\n\n\n\n ExitStatus::ExitedWith(0)\n\n }\n\n}\n", "file_path": "src/builtins/alias.rs", "rank": 42, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n trace!(\"jobs: argv={:?}\", ctx.argv);\n\n match Opt::from_iter_safe(ctx.argv) {\n\n Ok(_) => {\n\n for job in ctx.shell.jobs().values() {\n\n writeln!(\n\n ctx.stdout,\n\n \"[{}] {}: {}\",\n\n job.id(),\n\n job.state(ctx.shell),\n\n job.cmd()\n\n )\n\n .ok();\n\n }\n\n ExitStatus::ExitedWith(0)\n\n }\n\n Err(err) => {\n\n writeln!(ctx.stderr, \"fg: {}\", err).ok();\n\n ExitStatus::ExitedWith(1)\n\n }\n\n }\n\n}\n", "file_path": "src/builtins/jobs.rs", "rank": 43, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n match Opt::from_iter_safe(ctx.argv) {\n\n Ok(_opts) => match ctx.shell.popd() {\n\n Some(dir) => match std::env::set_current_dir(&dir) {\n\n Ok(_) => ExitStatus::ExitedWith(0),\n\n Err(err) => {\n\n writeln!(ctx.stderr, \"nsh: popd: {}: `{}'\", err, dir).ok();\n\n ExitStatus::ExitedWith(1)\n\n }\n\n },\n\n None => {\n\n writeln!(ctx.stderr, \"nsh: popd: directory stack empty\").ok();\n\n ExitStatus::ExitedWith(1)\n\n }\n\n },\n\n Err(err) => {\n\n writeln!(ctx.stderr, \"nsh: popd: {}\", err).ok();\n\n ExitStatus::ExitedWith(1)\n\n }\n\n }\n\n}\n", "file_path": "src/builtins/popd.rs", "rank": 44, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n match Opt::from_iter_safe(ctx.argv) {\n\n Ok(opts) => {\n\n let dir = if let Some(dir) = opts.dir {\n\n dir\n\n } else {\n\n std::env::current_dir()\n\n .unwrap()\n\n .to_str()\n\n .unwrap()\n\n .to_owned()\n\n };\n\n\n\n ctx.shell.pushd(dir);\n\n ExitStatus::ExitedWith(0)\n\n }\n\n Err(err) => {\n\n writeln!(ctx.stderr, \"nsh: pushd: {}\", err).ok();\n\n ExitStatus::ExitedWith(1)\n\n }\n\n }\n\n}\n", "file_path": "src/builtins/pushd.rs", "rank": 45, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n // TODO: Support more options\n\n // TODO: Support +e, +u, ...\n\n\n\n match Opt::from_iter_safe(ctx.argv) {\n\n Ok(opts) => {\n\n ctx.shell.errexit = opts.errexit;\n\n ctx.shell.nounset = opts.nounset;\n\n ctx.shell.noexec = opts.noexec;\n\n ExitStatus::ExitedWith(0)\n\n }\n\n Err(err) => {\n\n writeln!(ctx.stderr, \"nsh: set: {}\", err).ok();\n\n ExitStatus::ExitedWith(1)\n\n }\n\n }\n\n}\n", "file_path": "src/builtins/set.rs", "rank": 46, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n trace!(\"read: argv={:?}\", ctx.argv);\n\n match Opt::from_iter_safe(ctx.argv) {\n\n Ok(opts) => {\n\n if ctx.shell.interactive() {\n\n if let Some(prompt) = opts.prompt {\n\n write!(ctx.stderr, \"{}\", prompt).ok();\n\n ctx.stderr.flush().ok();\n\n }\n\n }\n\n\n\n match ctx.stdin.read_line() {\n\n Some(line) => {\n\n let trimed_value = line.trim_end();\n\n let value = Value::String(trimed_value.to_owned());\n\n ctx.shell.set(&opts.var_name, value, false);\n\n ExitStatus::ExitedWith(0)\n\n }\n\n None => {\n\n // EOF\n", "file_path": "src/builtins/read.rs", "rank": 47, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n trace!(\"bg: argv={:?}\", ctx.argv);\n\n match Opt::from_iter_safe(ctx.argv) {\n\n Ok(opts) => match parse_job_id(ctx, opts.job_id) {\n\n Ok(job) => {\n\n continue_job(ctx.shell, &job, true);\n\n ExitStatus::ExitedWith(0)\n\n }\n\n Err(status) => status,\n\n },\n\n Err(err) => {\n\n writeln!(ctx.stderr, \"bg: {}\", err).ok();\n\n ExitStatus::ExitedWith(1)\n\n }\n\n }\n\n}\n", "file_path": "src/builtins/bg.rs", "rank": 48, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n trace!(\"fg: argv={:?}\", ctx.argv);\n\n match Opt::from_iter_safe(ctx.argv) {\n\n Ok(opts) => match parse_job_id(ctx, opts.job_id) {\n\n Ok(job) => {\n\n continue_job(ctx.shell, &job, false);\n\n ExitStatus::ExitedWith(0)\n\n }\n\n Err(status) => status,\n\n },\n\n Err(err) => {\n\n writeln!(ctx.stderr, \"nsh: fg: {}\", err).ok();\n\n ExitStatus::ExitedWith(1)\n\n }\n\n }\n\n}\n", "file_path": "src/builtins/fg.rs", "rank": 49, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n ctx.shell.path_table_mut().rehash();\n\n ExitStatus::ExitedWith(0)\n\n}\n", "file_path": "src/builtins/rehash.rs", "rank": 50, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n if let Some(command) = ctx.argv.get(1) {\n\n let command = match CString::new(command.as_bytes()) {\n\n Ok(args) => args,\n\n Err(_) => {\n\n writeln!(ctx.stderr, \"nsh: exec: invalid command (perhaps it includes a NUL character?)\").ok();\n\n return ExitStatus::Return;\n\n }\n\n };\n\n // args should include `command`\n\n let args = match ctx.argv[1..]\n\n .into_iter()\n\n .map(|s| CString::new(s.as_bytes()))\n\n .collect::<Result<Vec<_>, _>>()\n\n {\n\n Ok(args) => args,\n\n Err(_) => {\n\n writeln!(ctx.stderr, \"nsh: exec: invalid command (perhaps it includes a NUL character?)\").ok();\n\n return ExitStatus::Return;\n\n }\n", "file_path": "src/builtins/exec.rs", "rank": 51, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n // TODO: Support -f, -v, and -n\n\n\n\n match Opt::from_iter_safe(ctx.argv) {\n\n Ok(opts) => {\n\n ctx.shell.remove(&opts.name);\n\n ExitStatus::ExitedWith(0)\n\n }\n\n Err(err) => {\n\n writeln!(ctx.stderr, \"nsh: unset: {}\", err).ok();\n\n ExitStatus::ExitedWith(1)\n\n }\n\n }\n\n}\n", "file_path": "src/builtins/unset.rs", "rank": 52, "score": 125877.62713612948 }, { "content": "pub fn command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n trace!(\"shift: argv={:?}\", ctx.argv);\n\n match Opt::from_iter_safe(ctx.argv) {\n\n Ok(opts) => {\n\n let current = ctx.shell.current_frame_mut();\n\n let mut args = Vec::new();\n\n for i in 1.. {\n\n if let Some(var) = current.get_nth_arg(i) {\n\n args.push(var);\n\n current.remove_nth_arg(i);\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n for (i, var) in args.iter().skip(opts.n.unwrap_or(1)).enumerate() {\n\n let value = Value::String(var.as_str().to_string());\n\n current.set_nth_arg(i + 1, value);\n\n }\n\n\n\n ExitStatus::ExitedWith(0)\n\n }\n\n Err(err) => {\n\n writeln!(ctx.stderr, \"shift: {}\", err).ok();\n\n ExitStatus::ExitedWith(1)\n\n }\n\n }\n\n}\n", "file_path": "src/builtins/shift.rs", "rank": 53, "score": 125877.62713612948 }, { "content": "/// A super powerful hidden command for some cryptographers.\n\n/// https://xkcd.com/221/\n\npub fn xkcd_rand_command(ctx: &mut InternalCommandContext) -> ExitStatus {\n\n writeln!(ctx.stdout, \"4\").ok();\n\n ctx.stdout.flush().ok();\n\n ExitStatus::ExitedWith(0)\n\n}\n\n\n", "file_path": "src/builtins/mod.rs", "rank": 54, "score": 121096.54483820565 }, { "content": "pub fn destroy_job(shell: &mut Shell, job: &Rc<Job>) {\n\n // TODO: Remove processes from shell.pid_job_mapping\n\n // TODO: I suppose this function should be Drop::drop().\n\n\n\n if shell.background_jobs_mut().remove(job) {\n\n // The job was a background job. Notify the user that the job\n\n // has finished.\n\n println!(\"[{}] Done: {}\", job.id, job.cmd);\n\n }\n\n\n\n shell.jobs_mut().remove(&job.id).unwrap();\n\n\n\n if let Some(ref last_job) = shell.last_fore_job {\n\n if job.id == last_job.id {\n\n shell.last_fore_job = None;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/process.rs", "rank": 55, "score": 120952.04153982611 }, { "content": "pub fn evaluate_expr(shell: &mut Shell, expr: &Expr) -> i32 {\n\n match expr {\n\n Expr::Expr(sub_expr) => evaluate_expr(shell, sub_expr),\n\n Expr::Literal(value) => *value,\n\n Expr::Parameter { name } => shell.get_var_as_i32(name).unwrap_or(0),\n\n Expr::Add(BinaryExpr { lhs, rhs }) => evaluate_expr(shell, lhs) + evaluate_expr(shell, rhs),\n\n Expr::Sub(BinaryExpr { lhs, rhs }) => evaluate_expr(shell, lhs) - evaluate_expr(shell, rhs),\n\n Expr::Mul(BinaryExpr { lhs, rhs }) => evaluate_expr(shell, lhs) * evaluate_expr(shell, rhs),\n\n Expr::Div(BinaryExpr { lhs, rhs }) => evaluate_expr(shell, lhs) / evaluate_expr(shell, rhs),\n\n Expr::Assign { name, rhs } => {\n\n let value = evaluate_expr(shell, rhs);\n\n shell.assign(name, Value::String(value.to_string()));\n\n value\n\n }\n\n Expr::Eq(lhs, rhs) => bool_to_int!(evaluate_expr(shell, lhs) == evaluate_expr(shell, rhs)),\n\n Expr::Ne(lhs, rhs) => bool_to_int!(evaluate_expr(shell, lhs) != evaluate_expr(shell, rhs)),\n\n Expr::Lt(lhs, rhs) => bool_to_int!(evaluate_expr(shell, lhs) < evaluate_expr(shell, rhs)),\n\n Expr::Le(lhs, rhs) => bool_to_int!(evaluate_expr(shell, lhs) <= evaluate_expr(shell, rhs)),\n\n Expr::Gt(lhs, rhs) => bool_to_int!(evaluate_expr(shell, lhs) > evaluate_expr(shell, rhs)),\n\n Expr::Ge(lhs, rhs) => bool_to_int!(evaluate_expr(shell, lhs) >= evaluate_expr(shell, rhs)),\n", "file_path": "src/eval.rs", "rank": 56, "score": 120952.04153982611 }, { "content": "/// Runs commands.\n\npub fn eval(\n\n shell: &mut Shell,\n\n ast: &Ast,\n\n stdin: RawFd,\n\n stdout: RawFd,\n\n stderr: RawFd,\n\n) -> ExitStatus {\n\n trace!(\"ast: {:#?}\", ast);\n\n run_terms(shell, &ast.terms, stdin, stdout, stderr)\n\n}\n\n\n", "file_path": "src/eval.rs", "rank": 57, "score": 119216.5999559969 }, { "content": "fn is_repo_modified() -> bool {\n\n std::process::Command::new(\"git\")\n\n .arg(\"status\")\n\n .arg(\"--porcelain\")\n\n .stderr(std::process::Stdio::null())\n\n .output()\n\n .map(|output| !output.stdout.is_empty())\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 58, "score": 117546.98197411094 }, { "content": "/// Expands a parameter (`$foo` in e.g. `echo $foo`). It returns `Vec` since\n\n/// `op` can be array expansion. `None` value represents *null*.\n\npub fn expand_param(\n\n shell: &mut Shell,\n\n name: &str,\n\n op: &ExpansionOp,\n\n) -> Result<Vec<Option<String>>> {\n\n match name {\n\n \"?\" => {\n\n return Ok(vec![Some(shell.last_status().to_string())]);\n\n }\n\n \"!\" => {\n\n let pgid = match shell.last_back_job() {\n\n Some(job) => job.pgid.to_string(),\n\n None => 0.to_string(),\n\n };\n\n\n\n return Ok(vec![Some(pgid)]);\n\n }\n\n \"0\" => {\n\n return Ok(vec![Some(shell.script_name.clone())]);\n\n }\n", "file_path": "src/expand.rs", "rank": 59, "score": 116001.43763143581 }, { "content": "pub fn replace_pattern(\n\n shell: &mut Shell,\n\n pattern: &Word,\n\n text: &str,\n\n replacement: &Word,\n\n replace_all: bool,\n\n) -> Result<String> {\n\n let pat = expand_into_single_pattern_word(shell, pattern)?;\n\n let dst = expand_word_into_string(shell, replacement)?;\n\n Ok(crate::pattern::replace_pattern(\n\n &pat,\n\n text,\n\n &dst,\n\n replace_all,\n\n ))\n\n}\n", "file_path": "src/expand.rs", "rank": 60, "score": 115997.80498330566 }, { "content": "/// Runs pipelines.\n\npub fn run_terms(\n\n shell: &mut Shell,\n\n terms: &[parser::Term],\n\n stdin: RawFd,\n\n stdout: RawFd,\n\n stderr: RawFd,\n\n) -> ExitStatus {\n\n let mut last_status = ExitStatus::ExitedWith(0);\n\n for term in terms {\n\n for pipeline in &term.pipelines {\n\n // Should we execute the pipline?\n\n match (last_status, &pipeline.run_if) {\n\n (ExitStatus::ExitedWith(0), RunIf::Success) => (),\n\n (ExitStatus::ExitedWith(_), RunIf::Failure) => (),\n\n (ExitStatus::Break, _) => return ExitStatus::Break,\n\n (ExitStatus::Continue, _) => return ExitStatus::Continue,\n\n (ExitStatus::Return, _) => return ExitStatus::Return,\n\n (_, RunIf::Always) => (),\n\n _ => continue,\n\n }\n", "file_path": "src/eval.rs", "rank": 61, "score": 115997.80498330566 }, { "content": "#[test]\n\npub fn test_tilde() {\n\n assert_eq!(\n\n parse(\"echo ~ ~/usr ~seiya ~seiya/usr a/~/b\"),\n\n Ok(Ast {\n\n terms: vec![Term {\n\n code: \"echo ~ ~/usr ~seiya ~seiya/usr a/~/b\".into(),\n\n background: false,\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::SimpleCommand {\n\n argv: vec![\n\n Word(vec![Span::Literal(\"echo\".into())]),\n\n Word(vec![Span::Tilde(None)]),\n\n Word(vec![Span::Tilde(None), Span::Literal(\"/usr\".into())]),\n\n Word(vec![Span::Tilde(Some(\"seiya\".into()))]),\n\n Word(vec![\n\n Span::Tilde(Some(\"seiya\".into())),\n\n Span::Literal(\"/usr\".into()),\n\n ]),\n\n Word(vec![Span::Literal(\"a/~/b\".into())]),\n\n ],\n\n redirects: vec![],\n\n assignments: vec![],\n\n }],\n\n }],\n\n }],\n\n })\n\n );\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 62, "score": 115997.80498330566 }, { "content": "#[test]\n\npub fn test_expansions() {\n\n assert_eq!(\n\n parse(\"ls `echo -l`\"),\n\n Ok(Ast {\n\n terms: vec![Term {\n\n code: \"ls `echo -l`\".into(),\n\n background: false,\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::SimpleCommand {\n\n argv: vec![\n\n Word(vec![Span::Literal(\"ls\".into())]),\n\n Word(vec![Span::Command {\n\n quoted: false,\n\n body: vec![Term {\n\n code: \"echo -l\".into(),\n\n background: false,\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::SimpleCommand {\n", "file_path": "src/parser.rs", "rank": 63, "score": 115997.80498330566 }, { "content": "pub fn replace_pattern(\n\n pattern: &PatternWord,\n\n text: &str,\n\n replacement: &str,\n\n replace_all: bool,\n\n) -> String {\n\n let mut remaining = text;\n\n let mut text = String::new();\n\n loop {\n\n if let Some(m) = pattern_word_match(pattern, remaining) {\n\n text += &remaining[..m.start];\n\n text += replacement;\n\n\n\n if remaining.len() < m.end + 1 {\n\n // Reached to the end of text.\n\n remaining = \"\";\n\n break;\n\n }\n\n\n\n remaining = &remaining[(m.end + 1)..];\n", "file_path": "src/pattern.rs", "rank": 64, "score": 115997.80498330566 }, { "content": "#[test]\n\npub fn test_heredoc() {\n\n assert_eq!(\n\n parse(concat!(\n\n \"cat << EOF > file.txt\\n\",\n\n \"hello world\\n\",\n\n \"from\\n\",\n\n \"heredoc!\\n\",\n\n \"EOF\\n\"\n\n )),\n\n Ok(Ast {\n\n terms: vec![Term {\n\n code: \"cat << EOF > file.txt\".into(),\n\n background: false,\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::SimpleCommand {\n\n argv: vec![lit!(\"cat\")],\n\n redirects: vec![\n\n Redirection {\n\n fd: 0,\n", "file_path": "src/parser.rs", "rank": 65, "score": 115997.80498330566 }, { "content": "#[test]\n\npub fn test_assignments() {\n\n assert_eq!(\n\n parse(\"foo=bar\"),\n\n Ok(Ast {\n\n terms: vec![Term {\n\n code: \"foo=bar\".into(),\n\n background: false,\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::Assignment {\n\n assignments: vec![Assignment {\n\n name: \"foo\".into(),\n\n initializer: Initializer::String(Word(vec![Span::Literal(\n\n \"bar\".into()\n\n )])),\n\n index: None,\n\n }],\n\n }],\n\n }],\n\n }],\n", "file_path": "src/parser.rs", "rank": 66, "score": 115997.80498330566 }, { "content": "#[test]\n\npub fn test_comments() {\n\n assert_eq!(\n\n parse(\"foo bar # this is comment\\n#comment line\\nls -G /tmp # hello world\\n\"),\n\n Ok(Ast {\n\n terms: vec![\n\n Term {\n\n code: \"foo bar # this is comment\".into(),\n\n background: false,\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::SimpleCommand {\n\n argv: literal_word_vec![\"foo\", \"bar\"],\n\n redirects: vec![],\n\n assignments: vec![],\n\n }],\n\n }],\n\n },\n\n Term {\n\n code: \"ls -G /tmp # hello world\".into(),\n\n background: false,\n", "file_path": "src/parser.rs", "rank": 67, "score": 115997.80498330566 }, { "content": "#[test]\n\npub fn test_patterns() {\n\n assert_eq!(\n\n parse(\"echo * a?c\"),\n\n Ok(Ast {\n\n terms: vec![Term {\n\n code: \"echo * a?c\".into(),\n\n background: false,\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::SimpleCommand {\n\n argv: vec![\n\n Word(vec![Span::Literal(\"echo\".into())]),\n\n Word(vec![Span::AnyString { quoted: false }]),\n\n Word(vec![\n\n Span::Literal(\"a\".into()),\n\n Span::AnyChar { quoted: false },\n\n Span::Literal(\"c\".into()),\n\n ]),\n\n ],\n\n redirects: vec![],\n\n assignments: vec![],\n\n }],\n\n }],\n\n }],\n\n })\n\n );\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 68, "score": 115997.80498330566 }, { "content": "/// Evaluates a variable initializer.\n\n///\n\n/// ```\n\n/// this_is_string=hello_world\n\n/// ^^^^^^^^^^^ a string initializer\n\n/// this_is_array=(a b c)\n\n/// ^^^^^^^ an array initializer\n\n/// ```\n\n///\n\npub fn evaluate_initializer(shell: &mut Shell, initializer: &Initializer) -> Result<Value> {\n\n match initializer {\n\n Initializer::String(ref word) => Ok(Value::String(expand_word_into_string(shell, word)?)),\n\n Initializer::Array(ref words) => {\n\n let elems = expand_words(shell, words)?;\n\n match (elems.len(), elems.get(0)) {\n\n (1, Some(body)) if body.is_empty() => {\n\n // Make `foo=()' an empty array.\n\n Ok(Value::Array(vec![]))\n\n }\n\n _ => Ok(Value::Array(elems)),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/eval.rs", "rank": 69, "score": 115230.76295838394 }, { "content": "fn match_one(pat: &RegexSpan, ch: char) -> bool {\n\n trace!(\"regex: one: pattern = {:?}, ch={:?}\", pat, ch);\n\n\n\n match pat {\n\n RegexSpan::Literal(span_ch) => *span_ch == ch,\n\n RegexSpan::AnyChar => true,\n\n _ => false,\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct MatchResult {\n\n start: usize,\n\n end: usize,\n\n}\n\n\n", "file_path": "src/pattern.rs", "rank": 70, "score": 113788.09738269757 }, { "content": "fn pattern_word_match(pattern: &PatternWord, text: &str) -> Option<MatchResult> {\n\n trace!(\"pattern_word_match: text = '{}'\", text);\n\n let mut spans = Vec::new();\n\n for frag in &pattern.fragments {\n\n match frag {\n\n LiteralOrGlob::AnyChar => {\n\n spans.push(RegexSpan::AnyChar);\n\n }\n\n LiteralOrGlob::AnyString => {\n\n spans.push(RegexSpan::AnyString);\n\n }\n\n LiteralOrGlob::Literal(s) => {\n\n for ch in s.chars() {\n\n spans.push(RegexSpan::Literal(ch));\n\n }\n\n }\n\n }\n\n }\n\n\n\n for start in 0..text.len() {\n", "file_path": "src/pattern.rs", "rank": 71, "score": 113676.62852622489 }, { "content": "#[test]\n\npub fn test_escape_sequences() {\n\n assert_eq!(\n\n parse(r#\"echo \"\\e[1m\" \\$a\"b;\\n\\\"c\"d \\\\n \\this_\\i\\s_\\normal\"#),\n\n Ok(Ast {\n\n terms: vec![Term {\n\n code: \"echo \\\"\\\\e[1m\\\" \\\\$a\\\"b;\\\\n\\\\\\\"c\\\"d \\\\\\\\n \\\\this_\\\\i\\\\s_\\\\normal\".into(),\n\n background: false,\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::SimpleCommand {\n\n argv: vec![\n\n lit!(\"echo\"),\n\n lit!(\"\\\\e[1m\"),\n\n Word(vec![\n\n Span::Literal(\"$a\".into()),\n\n Span::Literal(\"b;\\\\n\\\"c\".into()),\n\n Span::Literal(\"d\".into()),\n\n ]),\n\n lit!(\"\\\\n\"),\n\n lit!(\"this_is_normal\")\n\n ],\n\n assignments: vec![],\n\n redirects: vec![],\n\n }]\n\n }]\n\n }]\n\n })\n\n );\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 72, "score": 113078.81749387164 }, { "content": "/// Spawn a child process and execute a command.\n\npub fn run_external_command(\n\n shell: &mut Shell,\n\n ctx: &Context,\n\n argv: Vec<String>,\n\n redirects: &[parser::Redirection],\n\n assignments: &[parser::Assignment],\n\n) -> Result<ExitStatus> {\n\n let mut fds = Vec::new();\n\n for r in redirects {\n\n match r.target {\n\n parser::RedirectionType::Fd(ref fd) => {\n\n fds.push((*fd, r.fd as RawFd));\n\n }\n\n parser::RedirectionType::File(ref wfilepath) => {\n\n let mut options = OpenOptions::new();\n\n match &r.direction {\n\n parser::RedirectionDirection::Input => {\n\n options.read(true);\n\n }\n\n parser::RedirectionDirection::Output => {\n", "file_path": "src/process.rs", "rank": 73, "score": 113078.81749387164 }, { "content": "#[test]\n\npub fn test_compound_commands() {\n\n assert_eq!(\n\n parse(\"if true; then echo it works; fi\"),\n\n Ok(Ast {\n\n terms: vec![Term {\n\n code: \"if true; then echo it works; fi\".into(),\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::If {\n\n condition: vec![Term {\n\n code: \"true\".into(),\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::SimpleCommand {\n\n argv: literal_word_vec![\"true\"],\n\n redirects: vec![],\n\n assignments: vec![],\n\n }],\n\n }],\n\n background: false,\n", "file_path": "src/parser.rs", "rank": 74, "score": 113078.81749387164 }, { "content": "#[test]\n\npub fn test_cond_ex() {\n\n assert_eq!(\n\n parse(\"hello=world; [[ $hello == world ]]\"),\n\n Ok(Ast {\n\n terms: vec![\n\n Term {\n\n code: \"hello=world\".into(),\n\n background: false,\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::Assignment {\n\n assignments: vec![Assignment {\n\n name: \"hello\".into(),\n\n initializer: Initializer::String(lit!(\"world\")),\n\n index: None,\n\n }],\n\n }],\n\n }],\n\n },\n\n Term {\n", "file_path": "src/parser.rs", "rank": 75, "score": 113078.81749387164 }, { "content": "#[test]\n\npub fn test_process_substitution() {\n\n assert_eq!(\n\n parse(\"cat <(echo hello from a file)\"),\n\n Ok(Ast {\n\n terms: vec![Term {\n\n code: \"cat <(echo hello from a file)\".into(),\n\n background: false,\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::SimpleCommand {\n\n argv: vec![\n\n Word(vec![Span::Literal(\"cat\".into())]),\n\n Word(vec![Span::ProcSubst {\n\n subst_type: ProcSubstType::StdoutToFile,\n\n body: vec![Term {\n\n code: \"echo hello from a file\".into(),\n\n background: false,\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::SimpleCommand {\n", "file_path": "src/parser.rs", "rank": 76, "score": 113078.81749387164 }, { "content": "#[test]\n\npub fn test_courner_cases() {\n\n assert_eq!(parse(\"\"), Err(ParseError::Empty));\n\n assert_eq!(parse(\"\\n\"), Err(ParseError::Empty));\n\n assert_eq!(parse(\"\\n\\n\\n\"), Err(ParseError::Empty));\n\n assert_eq!(parse(\"\\n\\t\\n\"), Err(ParseError::Empty));\n\n assert_eq!(parse(\" \"), Err(ParseError::Empty));\n\n assert!(parse(\";;;;;;\").is_err());\n\n assert!(parse(\"||\").is_err());\n\n assert!(parse(\"& &&\").is_err());\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 77, "score": 113078.81749387164 }, { "content": "/// Runs an internal (builtin) command.\n\npub fn run_internal_command(\n\n shell: &mut Shell,\n\n argv: &[String],\n\n mut stdin: RawFd,\n\n mut stdout: RawFd,\n\n mut stderr: RawFd,\n\n redirects: &[parser::Redirection],\n\n) -> Result<ExitStatus> {\n\n let func = match INTERNAL_COMMANDS.get(argv[0].as_str()) {\n\n Some(func) => func,\n\n _ => return Err(Error::from(InternalCommandError::NotFound)),\n\n };\n\n\n\n let mut opened_fds = Vec::new();\n\n for r in redirects {\n\n match r.target {\n\n parser::RedirectionType::Fd(ref fd) => match r.fd {\n\n 0 => stdin = *fd,\n\n 1 => stdout = *fd,\n\n 2 => stderr = *fd,\n", "file_path": "src/process.rs", "rank": 78, "score": 113078.81749387164 }, { "content": "#[test]\n\npub fn test_arith_expr() {\n\n assert_eq!(\n\n parse(\"echo $(( 1 + 2+(-3) ))\"),\n\n Ok(Ast {\n\n terms: vec![Term {\n\n code: \"echo $(( 1 + 2+(-3) ))\".into(),\n\n background: false,\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::SimpleCommand {\n\n argv: vec![\n\n Word(vec![Span::Literal(\"echo\".into())]),\n\n Word(vec![Span::ArithExpr {\n\n expr: Expr::Add(BinaryExpr {\n\n lhs: Box::new(Expr::Literal(1)),\n\n rhs: Box::new(Expr::Add(BinaryExpr {\n\n lhs: Box::new(Expr::Literal(2)),\n\n rhs: Box::new(Expr::Expr(Box::new(Expr::Literal(-3)))),\n\n })),\n\n }),\n", "file_path": "src/parser.rs", "rank": 79, "score": 113078.81749387164 }, { "content": "#[test]\n\npub fn test_simple_commands() {\n\n assert_eq!(\n\n parse(\"ls -G /tmp\\n\"),\n\n Ok(Ast {\n\n terms: vec![Term {\n\n code: \"ls -G /tmp\".into(),\n\n background: false,\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::SimpleCommand {\n\n argv: literal_word_vec![\"ls\", \"-G\", \"/tmp\"],\n\n redirects: vec![],\n\n assignments: vec![],\n\n }],\n\n }],\n\n }],\n\n })\n\n );\n\n\n\n assert_eq!(\n", "file_path": "src/parser.rs", "rank": 80, "score": 113078.81749387164 }, { "content": "pub fn evaluate_heredoc(shell: &mut Shell, heredoc: &HereDoc) -> Result<RawFd> {\n\n let mut lines = Vec::new();\n\n for line in heredoc.lines() {\n\n let mut words = Vec::new();\n\n for word in line {\n\n words.push(expand_word_into_string(shell, word)?);\n\n }\n\n\n\n lines.push(words.join(\" \"));\n\n }\n\n\n\n let mut body = lines.join(\"\\n\");\n\n body += \"\\n\";\n\n\n\n let (pipe_out, pipe_in) = pipe().expect(\"failed to create a pipe\");\n\n unsafe {\n\n let mut file = File::from_raw_fd(pipe_in);\n\n file.write_all(body.as_bytes()).ok();\n\n // Ensure that pipe_in is closed.\n\n drop(file);\n\n };\n\n\n\n Ok(pipe_out)\n\n}\n\n\n", "file_path": "src/eval.rs", "rank": 81, "score": 112751.63577431443 }, { "content": "/// Waits for all processes in the job to exit. Note that the job will be\n\n/// deleted from `shell` if the process has exited.\n\npub fn wait_for_job(shell: &mut Shell, job: &Rc<Job>) -> ProcessState {\n\n loop {\n\n if job.completed(shell) || job.stopped(shell) {\n\n break;\n\n }\n\n\n\n wait_for_any_process(shell, false);\n\n }\n\n\n\n // Get the exit status of the last process.\n\n let state = shell\n\n .get_process_state(*job.processes.iter().last().unwrap())\n\n .cloned();\n\n\n\n match state {\n\n Some(ProcessState::Completed(_)) => {\n\n // Remove the job and processes from the list.\n\n destroy_job(shell, job);\n\n state.unwrap()\n\n }\n\n Some(ProcessState::Stopped(_)) => {\n\n print_err!(\"[{}] Stopped: {}\", job.id, job.cmd);\n\n state.unwrap()\n\n }\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "src/process.rs", "rank": 82, "score": 112751.63577431443 }, { "content": "#[test]\n\npub fn test_assign_like_prefix() {\n\n assert_eq!(\n\n parse(\"./configure --prefix=~/usr in\\\\valid=~\"),\n\n Ok(Ast {\n\n terms: vec![Term {\n\n code: \"./configure --prefix=~/usr in\\\\valid=~\".into(),\n\n background: false,\n\n pipelines: vec![Pipeline {\n\n run_if: RunIf::Always,\n\n commands: vec![Command::SimpleCommand {\n\n argv: vec![\n\n Word(vec![Span::Literal(\"./configure\".into())]),\n\n Word(vec![\n\n Span::Literal(\"--prefix=\".into()),\n\n Span::Tilde(None),\n\n Span::Literal(\"/usr\".into()),\n\n ]),\n\n Word(vec![Span::Literal(\"invalid=~\".into())]),\n\n ],\n\n redirects: vec![],\n\n assignments: vec![],\n\n }],\n\n }],\n\n }],\n\n })\n\n );\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 83, "score": 110419.61408341941 }, { "content": "pub fn parse_prompt(prompt: &str) -> Result<Prompt, pest::error::Error<Rule>> {\n\n PromptParser::parse(Rule::prompt, prompt).map(pairs2prompt)\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 84, "score": 110412.97541760652 }, { "content": "/// Expands and merges all pattern words into a single pattern word.\n\npub fn expand_into_single_pattern_word(shell: &mut Shell, pattern: &Word) -> Result<PatternWord> {\n\n let mut frags = Vec::new();\n\n let ifs = \"\"; /* all whitespaces are treated as a literal */\n\n for word in expand_word_into_vec(shell, pattern, ifs)? {\n\n for frag in word.fragments() {\n\n frags.push(frag.clone());\n\n }\n\n }\n\n\n\n Ok(PatternWord::new(frags))\n\n}\n\n\n", "file_path": "src/expand.rs", "rank": 85, "score": 108289.86145435536 }, { "content": "/// Runs commands in a subshell (`$()` or `<()`).\n\npub fn eval_in_subshell(shell: &mut Shell, terms: &[parser::Term]) -> Result<(i32, i32)> {\n\n let (pipe_out, pipe_in) = pipe().expect(\"failed to create a pipe\");\n\n\n\n let ctx = Context {\n\n stdin: 0,\n\n stdout: pipe_in,\n\n stderr: 2,\n\n pgid: None,\n\n background: false,\n\n interactive: false,\n\n };\n\n\n\n let pid = spawn_subshell(shell, terms, &ctx)?;\n\n close(pipe_in).ok();\n\n let status = wait_child(pid).unwrap_or(1);\n\n Ok((status, pipe_out))\n\n}\n\n\n", "file_path": "src/eval.rs", "rank": 86, "score": 105360.26385798739 }, { "content": "fn evaluate_condition(cond: &Condition) -> bool {\n\n match cond {\n\n Condition::InRepo => {\n\n // TODO: Support other systems like SVN.\n\n std::process::Command::new(\"git\")\n\n .arg(\"rev-parse\")\n\n .arg(\"--is-inside-work-tree\")\n\n .stdout(std::process::Stdio::null())\n\n .stderr(std::process::Stdio::null())\n\n .status()\n\n .map(|status| status.success())\n\n .unwrap_or(false)\n\n }\n\n Condition::InRemote => *IN_REMOTE,\n\n }\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 87, "score": 101675.83420391122 }, { "content": "fn is_whitespace(ch: char) -> bool {\n\n \" \\t\".contains(ch)\n\n}\n\n\n", "file_path": "src/context_parser.rs", "rank": 88, "score": 101675.83420391122 }, { "content": "#[derive(Clone)]\n\nstruct UserInput {\n\n cursor: usize,\n\n input: String,\n\n indices: Vec<usize>,\n\n word_split: &'static str,\n\n}\n\n\n\nimpl UserInput {\n\n pub fn new() -> UserInput {\n\n UserInput {\n\n cursor: 0,\n\n input: String::with_capacity(256),\n\n indices: Vec::with_capacity(256),\n\n word_split: \" \\t/\",\n\n }\n\n }\n\n\n\n pub fn reset(&mut self, input: String) {\n\n self.input = input;\n\n self.update_indices();\n", "file_path": "src/mainloop.rs", "rank": 89, "score": 99666.74960871243 }, { "content": "fn is_varname_char(ch: char) -> bool {\n\n \"@*?!$_\".contains(ch) || ch.is_ascii_alphanumeric()\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub struct InputContext {\n\n // The input string.\n\n pub input: String,\n\n // The cursor position.\n\n pub cursor: usize,\n\n // Words of a command where the cursor is in (`$COMP_WORDS`).\n\n pub words: Vec<String>,\n\n // The fragments of the input. Primarily used for syntax highlighting.\n\n pub spans: Vec<Span>,\n\n // The context of the input. It is not empty if input is incomplete. For\n\n // example, parsing \"if true\" returns nested=[BlockType::If] because it\n\n // does not contain `fi`.\n\n pub nested: Vec<BlockType>,\n\n // The range of the current *literal-like* part over the cursor in `input`.\n\n // Primarily used by completion to extract and replace the current word\n\n // string. It is `None` if the cursor is not at a literal-like `Span` such\n\n // as `Span::Param`, CmdSubstStart, etc.\n\n pub current_literal: Option<Range<usize>>,\n\n // The index of the word where the cursor is at in `words` (`$COMP_CWORD`).\n\n pub current_word: usize,\n\n // The index of the span where the cursor is at in `words`.\n\n pub current_span: Option<usize>,\n\n}\n\n\n", "file_path": "src/context_parser.rs", "rank": 90, "score": 99223.6490848928 }, { "content": "fn is_word_separator(span: &Span) -> bool {\n\n matches!(\n\n span,\n\n Span::Space(_) | Span::CommandSep(_) | Span::CmdSubstStart\n\n )\n\n}\n\n\n", "file_path": "src/context_parser.rs", "rank": 91, "score": 99223.6490848928 }, { "content": "struct DirColorEntry {\n\n bold: bool,\n\n color: String,\n\n}\n\n\n\npub struct DirColor {\n\n map: HashMap<String, DirColorEntry>,\n\n}\n\n\n\nimpl DirColor {\n\n pub fn new() -> DirColor {\n\n DirColor {\n\n map: HashMap::new(),\n\n }\n\n }\n\n\n\n pub fn load(&mut self, dircolors: &str) {\n\n for part in dircolors.trim().split(':') {\n\n let mut columns = part.splitn(2, '=');\n\n if let (Some(key), Some(value)) = (columns.next(), columns.next()) {\n", "file_path": "src/dircolor.rs", "rank": 92, "score": 96340.96110500858 }, { "content": "fn pairs2prompt(mut pairs: Pairs<Rule>) -> Prompt {\n\n visit_prompt(pairs.next().unwrap())\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 93, "score": 91652.30535522838 }, { "content": "pub fn wait_child(pid: Pid) -> Result<i32> {\n\n let wait_status = waitpid(pid, None)?;\n\n match wait_status {\n\n WaitStatus::Exited(_, status) => Ok(status),\n\n // TODO: Handle errors.\n\n _ => {\n\n let err = format_err!(\"waitpid returned an unexpected value: {:?}\", wait_status);\n\n\n\n warn!(\"waitpid: {}\", err);\n\n Err(err)\n\n }\n\n }\n\n}\n\n\n\n/// The exit status or reason why the command exited.\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq)]\n\npub enum ExitStatus {\n\n ExitedWith(i32),\n\n Running(Pid /* pgid */),\n\n Break,\n", "file_path": "src/process.rs", "rank": 94, "score": 88839.3393778706 }, { "content": "type InternalCommand = fn(&mut InternalCommandContext) -> ExitStatus;\n\npub static INTERNAL_COMMANDS: phf::Map<&'static str, InternalCommand> = phf_map! {\n\n \"xkcd-true-random-number\" => xkcd_rand_command,\n\n \"alias\" => crate::builtins::alias::command,\n\n \"echo\" => crate::builtins::echo::command,\n\n \"cd\" => crate::builtins::cd::command,\n\n \"source\" => crate::builtins::source::command,\n\n \"exit\" => crate::builtins::exit::command,\n\n \"exec\" => crate::builtins::exec::command,\n\n \"export\" => crate::builtins::export::command,\n\n \"set\" => crate::builtins::set::command,\n\n \"fg\" => crate::builtins::fg::command,\n\n \"bg\" => crate::builtins::bg::command,\n\n \"wait\" => crate::builtins::wait::command,\n\n \"jobs\" => crate::builtins::jobs::command,\n\n \"shift\" => crate::builtins::shift::command,\n\n \"read\" => crate::builtins::read::command,\n\n \"unset\" => crate::builtins::unset::command,\n\n \"pushd\" => crate::builtins::pushd::command,\n\n \"popd\" => crate::builtins::popd::command,\n\n \"eval\" => crate::builtins::eval::command,\n\n \"rehash\" => crate::builtins::rehash::command,\n\n};\n", "file_path": "src/builtins/mod.rs", "rank": 95, "score": 86828.16982428051 }, { "content": "fn parse_alias(alias: &str) -> Result<Alias, parser::ParseError> {\n\n AliasParser::parse(Rule::alias, alias)\n\n .map_err(|err| parser::ParseError::Fatal(err.to_string()))\n\n .map(|mut pairs| {\n\n let mut inner = pairs.next().unwrap().into_inner();\n\n let name = inner.next().unwrap().as_span().as_str().to_owned();\n\n let body = inner.next().unwrap().as_str().to_owned();\n\n Alias { name, body }\n\n })\n\n}\n\n\n", "file_path": "src/builtins/alias.rs", "rank": 96, "score": 81368.3478772383 }, { "content": "/// TODO: Aliases should be expanded in the parser in order to support\n\n/// compound lists, e.g. alias cowsay-or-echo=\"cowsay hi || echo hi\".\n\n///\n\n/// That said, I believe this feature is not widely-used feature.\n\npub fn expand_alias(shell: &Shell, argv: &[Word]) -> Vec<Word> {\n\n argv\n\n // Get the first word.\n\n .get(0)\n\n // Get the first span in the first word.\n\n .and_then(|word| word.spans().get(0))\n\n // Make sure that the span is a literal (not parameters, etc.).\n\n .and_then(|span| match span {\n\n Span::Literal(lit) => Some(lit),\n\n _ => None,\n\n })\n\n // The very first span is literal. Search the registered aliases.\n\n .and_then(|lit| shell.lookup_alias(lit.as_str()))\n\n .map(|alias_str| {\n\n // Found the alias. Split the alias string by whitespace into words.\n\n let mut alias_words: Vec<Word> = alias_str\n\n .trim()\n\n .split(' ')\n\n .map(|w| {\n\n let span = Span::Literal(w.to_owned());\n", "file_path": "src/expand.rs", "rank": 97, "score": 79897.53693695538 }, { "content": "pub fn bash_server(tx_event: mpsc::Sender<Event>) -> mpsc::Sender<BashRequest> {\n\n let (tx, rx) = mpsc::channel();\n\n std::thread::spawn(move || {\n\n let mut bash = None;\n\n loop {\n\n if bash.is_none() {\n\n bash = Some(preload_bash());\n\n }\n\n\n\n let req = rx.recv().unwrap();\n\n match req {\n\n BashRequest::Complete {\n\n words,\n\n current_word,\n\n } => {\n\n trace!(\"completion: query={:?}\", words);\n\n let started_at = SystemTime::now();\n\n\n\n match run_bash(&mut bash, words, current_word) {\n\n Some(comps) => {\n", "file_path": "src/bash_server.rs", "rank": 98, "score": 71518.49723487491 }, { "content": "fn spawn_subshell(shell: &mut Shell, terms: &[parser::Term], ctx: &Context) -> Result<Pid> {\n\n match unsafe { fork() }.expect(\"failed to fork\") {\n\n ForkResult::Parent { child } => Ok(child),\n\n ForkResult::Child => {\n\n let status = match run_terms(shell, terms, ctx.stdin, ctx.stdout, ctx.stderr) {\n\n ExitStatus::ExitedWith(status) => status,\n\n _ => 1,\n\n };\n\n\n\n std::process::exit(status);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/eval.rs", "rank": 99, "score": 70039.28939756886 } ]
Rust
shell_automaton/src/peer/message/write/peer_message_write_effects.rs
simplestaking/tezos-rs
d859dff0a8db4f5adb4885e4885217d5284f7861
use std::net::SocketAddr; use tezos_encoding::binary_writer::BinaryWriterError; use tezos_messages::p2p::binary_message::BinaryWrite; use tezos_messages::p2p::encoding::peer::{PeerMessage, PeerMessageResponse}; use crate::peer::binary_message::write::{ PeerBinaryMessageWriteSetContentAction, PeerBinaryMessageWriteState, }; use crate::peer::message::write::{PeerMessageWriteErrorAction, PeerMessageWriteSuccessAction}; use crate::peers::graylist::{PeerGraylistReason, PeersGraylistAddressAction}; use crate::service::{Service, StatisticsService}; use crate::{Action, ActionId, ActionWithMeta, State, Store}; use super::{PeerMessageWriteError, PeerMessageWriteNextAction}; fn binary_message_write_init<S: Service>( store: &mut Store<S>, address: SocketAddr, message: &PeerMessageResponse, encoded_message: Result<Vec<u8>, BinaryWriterError>, ) -> bool { match encoded_message { Ok(bytes) => store.dispatch(PeerBinaryMessageWriteSetContentAction { address, message: bytes, }), Err(err) => store.dispatch(PeerMessageWriteErrorAction { address, error: PeerMessageWriteError::Encode(format!( "error: {:?}, message: {:?}", err, message )), }), } } fn stats_message_write_start( _: &State, stats_service: Option<&mut StatisticsService>, message: &PeerMessage, action_id: ActionId, ) { if let Some(stats) = stats_service { let time: u64 = action_id.into(); match message { PeerMessage::GetBlockHeaders(m) => m.get_block_headers().iter().for_each(|b| { stats.block_header_download_start(b, time); }), PeerMessage::GetOperationsForBlocks(m) => m .get_operations_for_blocks() .iter() .for_each(|b| stats.block_operations_download_start(b.block_hash(), time)), _ => {} } } } pub fn peer_message_write_effects<S>(store: &mut Store<S>, action: &ActionWithMeta) where S: Service, { match &action.action { Action::PeerMessageWriteNext(content) => { let peer = match store.state.get().peers.get(&content.address) { Some(peer) => match peer.status.as_handshaked() { Some(v) => v, None => return, }, None => return, }; if let PeerBinaryMessageWriteState::Init { .. } = &peer.message_write.current { if let Some(next_message) = peer.message_write.queue.front() { stats_message_write_start( store.state.get(), store.service.statistics(), next_message.message(), action.id, ); let message_encode_result = next_message.as_bytes(); let next_message = next_message.clone(); binary_message_write_init( store, content.address, &next_message, message_encode_result, ); } } } Action::PeerMessageWriteInit(content) => { let peer = match store.state.get().peers.get(&content.address) { Some(peer) => match peer.status.as_handshaked() { Some(v) => v, None => return, }, None => return, }; if let PeerBinaryMessageWriteState::Init { .. } = &peer.message_write.current { stats_message_write_start( store.state.get(), store.service.statistics(), content.message.message(), action.id, ); let message = content.message.clone(); binary_message_write_init( store, content.address, &message, content.message.as_bytes(), ); } } Action::PeerBinaryMessageWriteReady(content) => { let peer = match store.state().peers.get(&content.address) { Some(peer) => match peer.status.as_handshaked() { Some(handshaked) => handshaked, None => return, }, None => return, }; if let PeerBinaryMessageWriteState::Ready { .. } = &peer.message_write.current { store.dispatch(PeerMessageWriteSuccessAction { address: content.address, }); } } Action::PeerMessageWriteSuccess(content) => { store.dispatch(PeerMessageWriteNextAction { address: content.address, }); } Action::PeerMessageWriteError(content) => { store.dispatch(PeersGraylistAddressAction { address: content.address, reason: PeerGraylistReason::MessageWriteError(content.error.clone()), }); } _ => {} } }
use std::net::SocketAddr; use tezos_encoding::binary_writer::BinaryWriterError; use tezos_messages::p2p::binary_message::BinaryWrite; use tezos_messages::p2p::encoding::peer::{PeerMessage, PeerMessageResponse}; use crate::peer::binary_message::write::{ PeerBinaryMessageWriteSetContentAction, PeerBinaryMessageWriteState, }; use crate::peer::message::write::{PeerMessageWriteErrorAction, PeerMessageWriteSuccessAction}; use crate::peers::graylist::{PeerGraylistReason, PeersGraylistAddressAction}; use crate::service::{Service, StatisticsService}; use crate::{Action, ActionId, ActionWithMeta, State, Store}; use super::{PeerMessageWriteError, PeerMessageWriteNextAction}; fn binary_message_write_init<S: Service>( store: &mut Store<S>, address: SocketAddr, message: &PeerMessageResponse, encoded_message: Result<Vec<u8>, BinaryWriterError>, ) -> bool { match encoded_message { Ok(bytes) => store.dispatch(PeerBinaryMessageWriteSetContentAction { address, message: bytes, }), Err(err) => store.dispatch(PeerMessageWriteErrorAction { address, error: PeerMessageWriteError::Encode(format!( "error: {:?}, message: {:?}", err, message )), }), } } fn stats_message_write_start( _: &State, stats_service: Option<&mut StatisticsService>, message: &PeerMessage, action_id: ActionId, ) { if let Some(stats) = stats_service { let time: u64 = action_id.into(); match message { PeerMessage::GetBlockHeaders(m) => m.get_block_headers().iter().for_each(|b| { stats.block_header_download_start(b, time); }), PeerMessage::GetOperationsForBlocks(m) => m .get_operations_for_blocks() .iter() .for_each(|b| stats.block_operations_download_start(b.block_hash(), time)), _ => {} } } } pub fn peer_message_write_effects<S>(store: &mut Store<S>, action: &ActionWithMeta) where S: Service, { match &action.action { Action::PeerMessageWriteNext(content) => { let peer = match store.state.get().peers.get(&content.address) { Some(peer) => match peer.status.as_handshaked() { Some(v) => v, None => return, }, None => return, }; if let PeerBinaryMessageWriteState::Init { .. } = &peer.message_write.current { if let Some(next_message) = peer.message_write.queue.front() { stats_message_write_start( store.state.get(), store.service.statistics(), next_message.message(), action.id, ); let message_encode_result = next_message.as_bytes(); let next_message = next_message.clone();
; } } } Action::PeerMessageWriteInit(content) => { let peer = match store.state.get().peers.get(&content.address) { Some(peer) => match peer.status.as_handshaked() { Some(v) => v, None => return, }, None => return, }; if let PeerBinaryMessageWriteState::Init { .. } = &peer.message_write.current { stats_message_write_start( store.state.get(), store.service.statistics(), content.message.message(), action.id, ); let message = content.message.clone(); binary_message_write_init( store, content.address, &message, content.message.as_bytes(), ); } } Action::PeerBinaryMessageWriteReady(content) => { let peer = match store.state().peers.get(&content.address) { Some(peer) => match peer.status.as_handshaked() { Some(handshaked) => handshaked, None => return, }, None => return, }; if let PeerBinaryMessageWriteState::Ready { .. } = &peer.message_write.current { store.dispatch(PeerMessageWriteSuccessAction { address: content.address, }); } } Action::PeerMessageWriteSuccess(content) => { store.dispatch(PeerMessageWriteNextAction { address: content.address, }); } Action::PeerMessageWriteError(content) => { store.dispatch(PeersGraylistAddressAction { address: content.address, reason: PeerGraylistReason::MessageWriteError(content.error.clone()), }); } _ => {} } }
binary_message_write_init( store, content.address, &next_message, message_encode_result, )
call_expression
[ { "content": "pub fn peer_message_read_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeerMessageReadInit(content) => {\n\n let peer = match state\n\n .peers\n\n .list\n\n .get_mut(&content.address)\n\n .and_then(|v| v.status.as_handshaked_mut())\n\n {\n\n Some(v) => v,\n\n None => return,\n\n };\n\n if let PeerMessageReadState::Success {\n\n read_crypto,\n\n message,\n\n ..\n\n } = &mut peer.message_read\n\n {\n\n peer.crypto = PeerCrypto::unsplit_after_reading(\n\n read_crypto.clone(),\n", "file_path": "shell_automaton/src/peer/message/read/peer_message_read_reducer.rs", "rank": 0, "score": 558434.2087212028 }, { "content": "pub fn peer_message_write_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeerMessageWriteInit(content) => {\n\n let peer = match state\n\n .peers\n\n .get_mut(&content.address)\n\n .and_then(|v| v.status.as_handshaked_mut())\n\n {\n\n Some(v) => v,\n\n None => return,\n\n };\n\n peer.message_write.queue.push_back(content.message.clone());\n\n\n\n if !matches!(\n\n &peer.message_write.current,\n\n PeerBinaryMessageWriteState::Init { .. }\n\n ) {\n\n return;\n\n }\n\n if let PeerMessage::GetBlockHeaders(m) = content.message.message() {\n", "file_path": "shell_automaton/src/peer/message/write/peer_message_write_reducer.rs", "rank": 1, "score": 558434.2087212028 }, { "content": "pub fn peer_binary_message_write_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeerBinaryMessageWriteSetContent(action) => {\n\n if let Some(peer) = state.peers.get_mut(&action.address) {\n\n let binary_message_state = match &mut peer.status {\n\n PeerStatus::Handshaking(PeerHandshaking { status, .. }) => match status {\n\n PeerHandshakingStatus::MetadataMessageWritePending {\n\n binary_message_state,\n\n ..\n\n }\n\n | PeerHandshakingStatus::AckMessageWritePending {\n\n binary_message_state,\n\n ..\n\n } => binary_message_state,\n\n _ => return,\n\n },\n\n PeerStatus::Handshaked(PeerHandshaked { message_write, .. }) => {\n\n &mut message_write.current\n\n }\n\n _ => return,\n", "file_path": "shell_automaton/src/peer/binary_message/write/peer_binary_message_write_reducer.rs", "rank": 2, "score": 544138.1955121068 }, { "content": "pub fn peer_binary_message_read_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeerBinaryMessageReadInit(action) => {\n\n if let Some(peer) = state.peers.get_mut(&action.address) {\n\n let binary_message_state = match &mut peer.status {\n\n PeerStatus::Handshaking(PeerHandshaking { status, .. }) => match status {\n\n PeerHandshakingStatus::MetadataMessageReadPending {\n\n binary_message_state,\n\n ..\n\n }\n\n | PeerHandshakingStatus::AckMessageReadPending {\n\n binary_message_state,\n\n ..\n\n } => binary_message_state,\n\n _ => return,\n\n },\n\n PeerStatus::Handshaked(PeerHandshaked {\n\n message_read:\n\n PeerMessageReadState::Pending {\n\n binary_message_read,\n", "file_path": "shell_automaton/src/peer/binary_message/read/peer_binary_message_read_reducer.rs", "rank": 3, "score": 544138.1955121068 }, { "content": "pub fn peer_message_read_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::PeerMessageReadInit(content) => {\n\n store.dispatch(PeerBinaryMessageReadInitAction {\n\n address: content.address,\n\n });\n\n }\n\n Action::PeerBinaryMessageReadReady(content) => {\n\n match store.state().peers.get(&content.address) {\n\n Some(peer) => match peer.status.as_handshaked() {\n\n Some(_handshaked) => (),\n\n None => return,\n\n },\n\n None => return,\n\n };\n\n\n\n match PeerMessageResponse::from_bytes(&content.message) {\n", "file_path": "shell_automaton/src/peer/message/read/peer_message_read_effects.rs", "rank": 4, "score": 531187.1896075075 }, { "content": "pub fn peer_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeerTryReadLoopStart(PeerTryReadLoopStartAction { address }) => {\n\n if let Some(peer) = state.peers.get_mut(address) {\n\n peer.try_read_loop = PeerIOLoopState::Started {\n\n time: action.time_as_nanos(),\n\n };\n\n }\n\n }\n\n Action::PeerTryReadLoopFinish(PeerTryReadLoopFinishAction { address, result }) => {\n\n if let Some(peer) = state.peers.get_mut(address) {\n\n peer.try_read_loop = PeerIOLoopState::Finished {\n\n time: action.time_as_nanos(),\n\n result: result.clone(),\n\n };\n\n }\n\n }\n\n Action::PeerTryWriteLoopStart(PeerTryWriteLoopStartAction { address }) => {\n\n if let Some(peer) = state.peers.get_mut(address) {\n\n peer.try_write_loop = PeerIOLoopState::Started {\n", "file_path": "shell_automaton/src/peer/peer_reducer.rs", "rank": 6, "score": 524552.4630396002 }, { "content": "pub fn peer_binary_message_write_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::PeerBinaryMessageWriteSetContent(action) => {\n\n if let Some(peer) = store.state.get().peers.get(&action.address) {\n\n let binary_message_state = match &peer.status {\n\n PeerStatus::Handshaking(PeerHandshaking { status, .. }) => match status {\n\n PeerHandshakingStatus::MetadataMessageWritePending {\n\n binary_message_state,\n\n ..\n\n }\n\n | PeerHandshakingStatus::AckMessageWritePending {\n\n binary_message_state,\n\n ..\n\n } => binary_message_state,\n\n _ => return,\n\n },\n\n PeerStatus::Handshaked(PeerHandshaked { message_write, .. }) => {\n", "file_path": "shell_automaton/src/peer/binary_message/write/peer_binary_message_write_effects.rs", "rank": 7, "score": 517688.65241982765 }, { "content": "pub fn peer_binary_message_read_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::PeerBinaryMessageReadInit(action) => {\n\n if let Some(peer) = store.state.get().peers.get(&action.address) {\n\n let binary_message_state = match &peer.status {\n\n PeerStatus::Handshaking(PeerHandshaking { status, .. }) => match status {\n\n PeerHandshakingStatus::MetadataMessageReadPending {\n\n binary_message_state,\n\n ..\n\n }\n\n | PeerHandshakingStatus::AckMessageReadPending {\n\n binary_message_state,\n\n ..\n\n } => binary_message_state,\n\n _ => return,\n\n },\n\n PeerStatus::Handshaked(PeerHandshaked {\n", "file_path": "shell_automaton/src/peer/binary_message/read/peer_binary_message_read_effects.rs", "rank": 8, "score": 517688.65241982765 }, { "content": "pub fn peer_handshaking_reducer(state: &mut State, action: &ActionWithMeta) {\n\n let action_time = action.time_as_nanos();\n\n\n\n match &action.action {\n\n Action::PeerHandshakingInit(action) => {\n\n if let Some(peer) = state.peers.get_mut(&action.address) {\n\n match peer.status {\n\n PeerStatus::Connecting(PeerConnectionState::Outgoing(\n\n PeerConnectionOutgoingState::Success { token, .. },\n\n )) => {\n\n peer.status = PeerStatus::Handshaking(PeerHandshaking {\n\n token,\n\n incoming: false,\n\n status: PeerHandshakingStatus::Init { time: action_time },\n\n nack_motive: None,\n\n since: action_time,\n\n });\n\n }\n\n PeerStatus::Connecting(PeerConnectionState::Incoming(\n\n PeerConnectionIncomingState::Success { token, .. },\n", "file_path": "shell_automaton/src/peer/handshaking/peer_handshaking_reducer.rs", "rank": 9, "score": 509437.8951690834 }, { "content": "pub fn peer_disconnection_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeerDisconnect(action) => {\n\n let peer = match state.peers.get_mut(&action.address) {\n\n Some(v) => v,\n\n None => return,\n\n };\n\n\n\n peer.status = match &peer.status {\n\n PeerStatus::Potential => return,\n\n PeerStatus::Connecting(state) => {\n\n if let Some(token) = state.token() {\n\n PeerDisconnecting { token }.into()\n\n } else {\n\n PeerStatus::Disconnected\n\n }\n\n }\n\n PeerStatus::Handshaking(state) => PeerDisconnecting { token: state.token }.into(),\n\n PeerStatus::Handshaked(state) => PeerDisconnecting { token: state.token }.into(),\n\n PeerStatus::Disconnecting(_) => return,\n", "file_path": "shell_automaton/src/peer/disconnection/peer_disconnection_reducer.rs", "rank": 10, "score": 509437.8951690834 }, { "content": "pub fn peers_graylist_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeersGraylistIpAdd(action_content) => {\n\n state.peers.ip_blacklist_entry(action_content.ip).or_insert(\n\n PeerBlacklistState::Graylisted {\n\n since: action.time_as_nanos(),\n\n },\n\n );\n\n }\n\n Action::PeersGraylistIpRemove(action_content) => {\n\n state.peers.remove_blacklisted_ip(&action_content.ip);\n\n }\n\n _ => {}\n\n }\n\n}\n", "file_path": "shell_automaton/src/peers/graylist/peers_graylist_reducer.rs", "rank": 11, "score": 509437.8951690834 }, { "content": "pub fn peers_add_reducer(state: &mut State, action: &ActionWithMeta) {\n\n if let Action::PeersAddIncomingPeer(PeersAddIncomingPeerAction { address, token }) =\n\n &action.action\n\n {\n\n if let Ok(entry) = state.peers.entry(*address) {\n\n entry.or_insert_with(|| Peer {\n\n status: PeerStatus::Connecting(\n\n PeerConnectionIncomingState::Pending {\n\n time: action.time_as_nanos(),\n\n token: *token,\n\n }\n\n .into(),\n\n ),\n\n try_read_loop: PeerIOLoopState::Idle,\n\n try_write_loop: PeerIOLoopState::Idle,\n\n });\n\n }\n\n }\n\n}\n", "file_path": "shell_automaton/src/peers/add/peers_add_reducer.rs", "rank": 12, "score": 509437.8951690834 }, { "content": "pub fn peers_remove_reducer(state: &mut State, action: &ActionWithMeta) {\n\n if let Action::PeersRemove(action) = &action.action {\n\n if let Some(peer) = state.peers.get(&action.address) {\n\n // we aren't allowed to remove peer until peer is disconnected.\n\n if matches!(&peer.status, PeerStatus::Disconnected) {\n\n state.peers.remove(&action.address);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "shell_automaton/src/peers/remove/peers_remove_reducer.rs", "rank": 13, "score": 509437.8951690834 }, { "content": "pub fn peers_graylist_effects<S: Service>(store: &mut Store<S>, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeersGraylistAddress(action) => {\n\n if store.state.get().config.peers_graylist_disable {\n\n store.dispatch(PeerDisconnectAction {\n\n address: action.address,\n\n });\n\n return;\n\n }\n\n\n\n store.dispatch(PeersGraylistIpAddAction {\n\n ip: action.address.ip(),\n\n });\n\n }\n\n Action::PeersGraylistIpAdd(action) => {\n\n store.dispatch(PeersGraylistIpAddedAction { ip: action.ip });\n\n }\n\n Action::PeersGraylistIpAdded(action) => {\n\n let peers = &store.state.get().peers;\n\n // find all peers with same ip and disconnect/remove them.\n", "file_path": "shell_automaton/src/peers/graylist/peers_graylist_effects.rs", "rank": 14, "score": 509208.9221547573 }, { "content": "pub fn peers_init_effects<S: Service>(store: &mut Store<S>, action: &ActionWithMeta) {\n\n if let Action::PeersInit(_) = &action.action {\n\n let list = store.state().config.peers_dns_lookup_addresses.clone();\n\n\n\n // Do dns lookups to gather some potential peers.\n\n for (address, port) in list.into_iter() {\n\n store.dispatch(PeersDnsLookupInitAction { address, port });\n\n }\n\n\n\n // Try connecting to potential peers if we need peers.\n\n store.dispatch(PeerConnectionOutgoingRandomInitAction {});\n\n }\n\n}\n", "file_path": "shell_automaton/src/peers/init/peers_init_effects.rs", "rank": 15, "score": 509208.9221547573 }, { "content": "pub fn mempool_effects<S>(store: &mut Store<State, S, Action>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n // println!(\"{:#?}\", action);\n\n if store.state().config.disable_mempool {\n\n match &action.action {\n\n Action::MempoolOperationInject(MempoolOperationInjectAction { rpc_id, .. }) => {\n\n let json = serde_json::Value::String(\"disabled\".to_string());\n\n store.service().rpc().respond(*rpc_id, json);\n\n }\n\n Action::MempoolGetPendingOperations(MempoolGetPendingOperationsAction { rpc_id }) => {\n\n store\n\n .service()\n\n .rpc()\n\n .respond(*rpc_id, MempoolOperations::default());\n\n }\n\n _ => (),\n\n }\n\n return;\n", "file_path": "shell_automaton/src/mempool/mempool_effects.rs", "rank": 16, "score": 497071.9647541711 }, { "content": "pub fn peer_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n // Handle peer related mio event.\n\n Action::P2pPeerEvent(event) => {\n\n let address = event.address();\n\n\n\n if event.is_closed() {\n\n return {\n\n store.dispatch(PeerConnectionClosedAction { address });\n\n };\n\n }\n\n\n\n if event.is_writable() {\n\n // when we receive first writable event from mio,\n\n // that's when we know that we successfuly connected\n\n // to the peer.\n\n let peer = match store.state.get().peers.get(&address) {\n", "file_path": "shell_automaton/src/peer/peer_effects.rs", "rank": 17, "score": 496737.8680790291 }, { "content": "fn update_operation_sent_stats(state: &mut State, address: SocketAddr, time: u64) {\n\n let peer = match state.peers.get(&address) {\n\n Some(v) => match v.status.as_handshaked() {\n\n Some(v) => v,\n\n None => return,\n\n },\n\n None => return,\n\n };\n\n let msg = match peer.message_write.queue.front() {\n\n Some(v) => v.message(),\n\n None => return,\n\n };\n\n\n\n match msg {\n\n PeerMessage::CurrentHead(msg) => {\n\n let block_header = msg.current_block_header();\n\n let mempool = msg.current_mempool();\n\n let op_hash_iter = mempool\n\n .pending()\n\n .iter()\n", "file_path": "shell_automaton/src/mempool/mempool_reducer.rs", "rank": 18, "score": 496102.58493818145 }, { "content": "pub fn peer_connection_incoming_reducer(state: &mut State, action: &ActionWithMeta) {\n\n let action_time = action.time_as_nanos();\n\n\n\n match &action.action {\n\n Action::PeerConnectionIncomingError(action) => {\n\n if let Some(peer) = state.peers.get_mut(&action.address) {\n\n if let PeerStatus::Connecting(PeerConnectionState::Incoming(incoming)) =\n\n &peer.status\n\n {\n\n peer.status = PeerStatus::Connecting(\n\n PeerConnectionIncomingState::Error {\n\n time: action_time,\n\n error: action.error.clone(),\n\n token: incoming.token(),\n\n }\n\n .into(),\n\n );\n\n }\n\n }\n\n }\n", "file_path": "shell_automaton/src/peer/connection/incoming/peer_connection_incoming_reducer.rs", "rank": 19, "score": 495765.95665551955 }, { "content": "pub fn peers_add_multi_reducer(state: &mut State, action: &ActionWithMeta) {\n\n if let Action::PeersAddMulti(PeersAddMultiAction { addresses }) = &action.action {\n\n let max_len = state\n\n .config\n\n .peers_potential_max\n\n .saturating_sub(state.peers.potential_len());\n\n\n\n for address in addresses.iter().take(max_len) {\n\n if let Ok(entry) = state.peers.entry(*address) {\n\n entry.or_insert_with(|| Peer {\n\n status: PeerStatus::Potential,\n\n try_read_loop: PeerIOLoopState::Idle,\n\n try_write_loop: PeerIOLoopState::Idle,\n\n });\n\n }\n\n }\n\n }\n\n}\n", "file_path": "shell_automaton/src/peers/add/multi/peers_add_multi_reducer.rs", "rank": 20, "score": 495765.95665551955 }, { "content": "pub fn peer_connection_outgoing_reducer(state: &mut State, action: &ActionWithMeta) {\n\n let action_time = action.time_as_nanos();\n\n\n\n match &action.action {\n\n Action::PeerConnectionOutgoingInit(action) => {\n\n if let Some(peer) = state.peers.get_mut(&action.address) {\n\n if matches!(peer.status, PeerStatus::Potential) {\n\n peer.status = PeerStatus::Connecting(\n\n PeerConnectionOutgoingState::Idle { time: action_time }.into(),\n\n );\n\n }\n\n }\n\n }\n\n Action::PeerConnectionOutgoingPending(action) => {\n\n if let Some(peer) = state.peers.get_mut(&action.address) {\n\n if matches!(\n\n peer.status,\n\n PeerStatus::Connecting(PeerConnectionState::Outgoing(\n\n PeerConnectionOutgoingState::Idle { .. }\n\n ))\n", "file_path": "shell_automaton/src/peer/connection/outgoing/peer_connection_outgoing_reducer.rs", "rank": 21, "score": 495765.95665551943 }, { "content": "pub fn peers_check_timeouts_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeersCheckTimeoutsInit(_) => {\n\n if matches!(\n\n &state.peers.check_timeouts,\n\n PeersCheckTimeoutsState::Idle { .. }\n\n ) {\n\n state.peers.check_timeouts = PeersCheckTimeoutsState::Init {\n\n time: action.time_as_nanos(),\n\n };\n\n }\n\n }\n\n Action::PeersCheckTimeoutsSuccess(action_content) => {\n\n state.peers.check_timeouts = PeersCheckTimeoutsState::Success {\n\n time: action.time_as_nanos(),\n\n peer_timeouts: action_content.peer_timeouts.clone(),\n\n graylist_timeouts: action_content.graylist_timeouts.clone(),\n\n };\n\n }\n\n Action::PeersCheckTimeoutsCleanup(_) => {\n\n state.peers.check_timeouts = PeersCheckTimeoutsState::Idle {\n\n time: action.time_as_nanos(),\n\n };\n\n }\n\n _ => {}\n\n }\n\n}\n", "file_path": "shell_automaton/src/peers/check/timeouts/peers_check_timeouts_reducer.rs", "rank": 22, "score": 495765.95665551955 }, { "content": "pub fn peers_dns_lookup_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeersDnsLookupInit(action) => {\n\n state.peers.dns_lookup = Some(PeersDnsLookupState {\n\n address: action.address.clone(),\n\n port: action.port,\n\n status: PeersDnsLookupStatus::Init,\n\n });\n\n }\n\n Action::PeersDnsLookupError(action) => {\n\n if let Some(dns_lookup_state) = state.peers.dns_lookup.as_mut() {\n\n if let PeersDnsLookupStatus::Init = dns_lookup_state.status {\n\n dns_lookup_state.status = PeersDnsLookupStatus::Error {\n\n error: action.error,\n\n };\n\n }\n\n }\n\n }\n\n Action::PeersDnsLookupSuccess(action) => {\n\n if let Some(dns_lookup_state) = state.peers.dns_lookup.as_mut() {\n", "file_path": "shell_automaton/src/peers/dns_lookup/peers_dns_lookup_reducer.rs", "rank": 23, "score": 495765.95665551943 }, { "content": "pub fn peer_chunk_read_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeerChunkReadInit(action) => {\n\n if let Some(peer) = state.peers.get_mut(&action.address) {\n\n let chunk_state = match &mut peer.status {\n\n PeerStatus::Handshaking(PeerHandshaking { status, .. }) => match status {\n\n PeerHandshakingStatus::ConnectionMessageReadPending {\n\n chunk_state, ..\n\n } => chunk_state,\n\n PeerHandshakingStatus::MetadataMessageReadPending {\n\n binary_message_state,\n\n ..\n\n }\n\n | PeerHandshakingStatus::AckMessageReadPending {\n\n binary_message_state,\n\n ..\n\n } => match binary_message_state {\n\n PeerBinaryMessageReadState::PendingFirstChunk { chunk }\n\n | PeerBinaryMessageReadState::Pending { chunk, .. } => &mut chunk.state,\n\n _ => return,\n", "file_path": "shell_automaton/src/peer/chunk/read/peer_chunk_read_reducer.rs", "rank": 24, "score": 495765.95665551955 }, { "content": "pub fn peer_chunk_write_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeerChunkWriteSetContent(action) => {\n\n if let Some(peer) = state.peers.get_mut(&action.address) {\n\n let chunk_state = match &mut peer.status {\n\n PeerStatus::Handshaking(PeerHandshaking { status, .. }) => match status {\n\n PeerHandshakingStatus::ConnectionMessageWritePending {\n\n chunk_state,\n\n ..\n\n } => chunk_state,\n\n PeerHandshakingStatus::MetadataMessageWritePending {\n\n binary_message_state: PeerBinaryMessageWriteState::Pending { chunk, .. },\n\n ..\n\n }\n\n | PeerHandshakingStatus::AckMessageWritePending {\n\n binary_message_state: PeerBinaryMessageWriteState::Pending { chunk, .. },\n\n ..\n\n } => &mut chunk.state,\n\n _ => return,\n\n },\n", "file_path": "shell_automaton/src/peer/chunk/write/peer_chunk_write_reducer.rs", "rank": 25, "score": 495765.95665551955 }, { "content": "pub fn peers_dns_lookup_effects<S: Service>(store: &mut Store<S>, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeersDnsLookupInit(action) => {\n\n let result = store\n\n .service()\n\n .dns()\n\n .resolve_dns_name_to_peer_address(&action.address, action.port);\n\n match result {\n\n Ok(addresses) => store.dispatch(PeersDnsLookupSuccessAction { addresses }),\n\n Err(err) => store.dispatch(PeersDnsLookupErrorAction { error: err.into() }),\n\n };\n\n }\n\n Action::PeersDnsLookupSuccess(_) => {\n\n let dns_lookup_state = match store.state.get().peers.dns_lookup.as_ref() {\n\n Some(v) => v,\n\n None => return,\n\n };\n\n if let PeersDnsLookupStatus::Success { addresses } = &dns_lookup_state.status {\n\n let addresses = addresses.clone();\n\n store.dispatch(PeersAddMultiAction { addresses });\n\n }\n\n store.dispatch(PeersDnsLookupCleanupAction {});\n\n }\n\n _ => {}\n\n }\n\n}\n", "file_path": "shell_automaton/src/peers/dns_lookup/peers_dns_lookup_effects.rs", "rank": 26, "score": 494674.3345338118 }, { "content": "pub fn peers_add_multi_effects<S: Service>(store: &mut Store<S>, action: &ActionWithMeta) {\n\n if let Action::PeersAddMulti(_) = &action.action {\n\n store.dispatch(PeerConnectionOutgoingRandomInitAction {});\n\n }\n\n}\n", "file_path": "shell_automaton/src/peers/add/multi/peers_add_multi_effects.rs", "rank": 27, "score": 494674.3345338118 }, { "content": "pub fn peer_requests_potential_peers_get_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeerRequestsPotentialPeersGetInit(content) => {\n\n let peer = match state.peers.get_handshaked_mut(&content.address) {\n\n Some(v) => v,\n\n None => return,\n\n };\n\n peer.requests.potential_peers_get = PeerRequestsPotentialPeersGetState::Init {\n\n time: action.time_as_nanos(),\n\n };\n\n }\n\n Action::PeerRequestsPotentialPeersGetPending(content) => {\n\n let peer = match state.peers.get_handshaked_mut(&content.address) {\n\n Some(v) => v,\n\n None => return,\n\n };\n\n peer.requests.potential_peers_get = PeerRequestsPotentialPeersGetState::Pending {\n\n time: action.time_as_nanos(),\n\n };\n\n }\n", "file_path": "shell_automaton/src/peer/requests/potential_peers_get/peer_requests_potential_peers_get_reducer.rs", "rank": 28, "score": 484335.3753536282 }, { "content": "pub fn peer_connection_incoming_accept_reducer(state: &mut State, action: &ActionWithMeta) {\n\n let action_time = action.time_as_nanos();\n\n\n\n match &action.action {\n\n Action::PeerConnectionIncomingAcceptSuccess(action) => {\n\n match &state.peer_connection_incoming_accept {\n\n PeerConnectionIncomingAcceptState::Idle { .. } => {}\n\n _ => return,\n\n }\n\n state.peer_connection_incoming_accept = PeerConnectionIncomingAcceptState::Success {\n\n time: action_time,\n\n token: action.token,\n\n address: action.address,\n\n };\n\n }\n\n Action::PeerConnectionIncomingAcceptError(action) => {\n\n if matches!(&action.error, PeerConnectionIncomingAcceptError::WouldBlock) {\n\n return;\n\n }\n\n state.peer_connection_incoming_accept = PeerConnectionIncomingAcceptState::Error {\n", "file_path": "shell_automaton/src/peer/connection/incoming/accept/peer_connection_incoming_accept_reducer.rs", "rank": 29, "score": 483330.95829521026 }, { "content": "pub fn peer_disconnection_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::PeerDisconnect(action) => {\n\n let address = action.address;\n\n let peer = match store.state.get().peers.get(&address) {\n\n Some(v) => v,\n\n None => return,\n\n };\n\n\n\n match &peer.status {\n\n PeerStatus::Disconnecting(disconnection_state) => {\n\n let peer_token = disconnection_state.token;\n\n store.service().mio().peer_disconnect(peer_token);\n\n store.dispatch(PeerDisconnectedAction { address });\n\n }\n\n PeerStatus::Disconnected => {\n\n store.dispatch(PeerDisconnectedAction { address });\n", "file_path": "shell_automaton/src/peer/disconnection/peer_disconnection_effects.rs", "rank": 30, "score": 482604.36845785833 }, { "content": "pub fn peer_handshaking_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::PeerHandshakingInit(action) => {\n\n let nonce = store.service().randomness().get_nonce(action.address);\n\n let config = &store.state().config;\n\n match ConnectionMessage::try_new(\n\n config.port,\n\n &config.identity.public_key,\n\n &config.identity.proof_of_work_stamp,\n\n nonce,\n\n config.shell_compatibility_version.to_network_version(),\n\n ) {\n\n Ok(connection_message) => {\n\n store.dispatch(PeerHandshakingConnectionMessageInitAction {\n\n address: action.address,\n\n message: connection_message,\n\n });\n", "file_path": "shell_automaton/src/peer/handshaking/peer_handshaking_effects.rs", "rank": 31, "score": 482604.36845785833 }, { "content": "pub fn effects<S: Service>(store: &mut Store<S>, action: &ActionWithMeta) {\n\n // these four effects must be first and in this order!\n\n // if action.action.as_ref().starts_with(\"Rights\") {\n\n // slog::debug!(store.state().log, \"Rights action\"; \"action\" => format!(\"{:#?}\", action.action));\n\n // }\n\n\n\n logger_effects(store, action);\n\n last_action_effects(store, action);\n\n applied_actions_count_effects(store, action);\n\n\n\n paused_loops_effects(store, action);\n\n\n\n stats_current_head_effects(store, action);\n\n\n\n protocol_runner_effects(store, action);\n\n\n\n protocol_runner_spawn_server_effects(store, action);\n\n\n\n protocol_runner_latest_context_hashes_effects(store, action);\n\n\n", "file_path": "shell_automaton/src/effects.rs", "rank": 32, "score": 475776.4663304592 }, { "content": "pub fn reducer(state: &mut State, action: &ActionWithMeta) {\n\n if let Action::StorageStateSnapshotCreateInit(_) = &action.action {\n\n // This action shouldn't cause changes in the state, so that in the\n\n // effects, we will save exact same state that was before calling\n\n // this action.\n\n return;\n\n }\n\n\n\n chain_reducers!(\n\n state,\n\n action,\n\n paused_loops_reducer,\n\n protocol_runner_latest_context_hashes_reducer,\n\n protocol_runner_spawn_server_reducer,\n\n protocol_runner_init_reducer,\n\n protocol_runner_init_runtime_reducer,\n\n protocol_runner_init_context_reducer,\n\n protocol_runner_init_context_ipc_server_reducer,\n\n protocol_runner_reducer,\n\n current_head_reducer,\n", "file_path": "shell_automaton/src/reducer.rs", "rank": 33, "score": 471871.2518268393 }, { "content": "pub fn peer_connection_outgoing_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::PeerConnectionOutgoingRandomInit(_) => {\n\n let state = store.state.get();\n\n let potential_peers = state.peers.potential_iter().collect::<Vec<_>>();\n\n\n\n if state.peers.connected_len() >= state.config.peers_connected_max {\n\n return;\n\n }\n\n\n\n if let Some(address) = store.service.randomness().choose_peer(&potential_peers) {\n\n store.dispatch(PeerConnectionOutgoingInitAction { address });\n\n }\n\n }\n\n Action::PeerConnectionOutgoingInit(action) => {\n\n let address = action.address;\n\n let result = store.service().mio().peer_connection_init(address);\n", "file_path": "shell_automaton/src/peer/connection/outgoing/peer_connection_outgoing_effects.rs", "rank": 34, "score": 469772.0180568823 }, { "content": "pub fn peer_chunk_read_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::PeerChunkReadInit(action) => {\n\n if let Some(peer) = store.state.get().peers.get(&action.address) {\n\n if peer.try_read_loop.can_be_started() {\n\n store.dispatch(PeerTryReadLoopStartAction {\n\n address: action.address,\n\n });\n\n }\n\n }\n\n }\n\n Action::PeerChunkReadPart(action) => {\n\n if let Some(peer) = store.state.get().peers.get(&action.address) {\n\n let binary_message_state = match &peer.status {\n\n PeerStatus::Handshaking(PeerHandshaking { status, .. }) => match status {\n\n PeerHandshakingStatus::ConnectionMessageReadPending {\n\n chunk_state, ..\n", "file_path": "shell_automaton/src/peer/chunk/read/peer_chunk_read_effects.rs", "rank": 35, "score": 469772.0180568823 }, { "content": "pub fn peer_connection_incoming_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::PeerConnectionIncomingSuccess(action) => {\n\n store.dispatch(PeerHandshakingInitAction {\n\n address: action.address,\n\n });\n\n }\n\n Action::PeerConnectionIncomingError(action) => {\n\n store.dispatch(PeersGraylistAddressAction {\n\n address: action.address,\n\n reason: PeerGraylistReason::ConnectionIncomingError,\n\n });\n\n }\n\n _ => {}\n\n }\n\n}\n", "file_path": "shell_automaton/src/peer/connection/incoming/peer_connection_incoming_effects.rs", "rank": 36, "score": 469772.0180568823 }, { "content": "pub fn peers_check_timeouts_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n let state = store.state.get();\n\n let current_time = state.time_as_nanos();\n\n\n\n match &action.action {\n\n Action::PeersCheckTimeoutsInit(_) => {\n\n if !matches!(\n\n &state.peers.check_timeouts,\n\n PeersCheckTimeoutsState::Init { .. }\n\n ) {\n\n return;\n\n }\n\n\n\n let peer_connecting_timeout = state.config.peer_connecting_timeout.as_nanos() as u64;\n\n let peer_handshaking_timeout = state.config.peer_handshaking_timeout.as_nanos() as u64;\n\n\n\n let peer_timeouts = state\n", "file_path": "shell_automaton/src/peers/check/timeouts/peers_check_timeouts_effects.rs", "rank": 37, "score": 469772.0180568823 }, { "content": "pub fn peer_connection_closed_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n if let Action::PeerConnectionClosed(action) = &action.action {\n\n store.dispatch(PeerDisconnectAction {\n\n address: action.address,\n\n });\n\n }\n\n}\n", "file_path": "shell_automaton/src/peer/connection/closed/peer_connection_closed_effects.rs", "rank": 38, "score": 469772.0180568823 }, { "content": "pub fn peer_chunk_write_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::PeerChunkWriteSetContent(action) => {\n\n if let Some(peer) = store.state.get().peers.get(&action.address) {\n\n let binary_message_state = match &peer.status {\n\n PeerStatus::Handshaking(PeerHandshaking { status, .. }) => match status {\n\n PeerHandshakingStatus::ConnectionMessageWritePending {\n\n chunk_state: PeerChunkWriteState::UnencryptedContent { content },\n\n ..\n\n } => {\n\n return match BinaryChunk::from_content(content) {\n\n Ok(chunk) => {\n\n store.dispatch(PeerChunkWriteCreateChunkAction {\n\n address: action.address,\n\n chunk,\n\n });\n\n }\n", "file_path": "shell_automaton/src/peer/chunk/write/peer_chunk_write_effects.rs", "rank": 39, "score": 469772.0180568823 }, { "content": "pub fn last_action_reducer(state: &mut State, action: &ActionWithMeta) {\n\n state.set_last_action(action);\n\n}\n\n\n", "file_path": "shell_automaton/src/reducer.rs", "rank": 40, "score": 469563.1633621496 }, { "content": "pub fn rights_reducer(state: &mut State, action: &ActionWithMeta<Action>) {\n\n let requests = &mut state.rights.requests;\n\n match &action.action {\n\n // RPC actions\n\n Action::RightsRpcGet(RightsRpcGetAction { key, rpc_id }) => {\n\n state\n\n .rights\n\n .rpc_requests\n\n .entry(key.clone())\n\n .or_default()\n\n .push(*rpc_id);\n\n }\n\n Action::RightsPruneRpcRequest(RightsRpcPruneAction { key }) => {\n\n state.rights.rpc_requests.remove(key);\n\n }\n\n\n\n // Auxiliary actions\n\n Action::RightsInit(RightsInitAction { key }) if !requests.contains_key(key) => {\n\n requests.insert(key.clone(), RightsRequest::Init { start: action.id });\n\n }\n", "file_path": "shell_automaton/src/rights/rights_reducer.rs", "rank": 41, "score": 463914.23291164084 }, { "content": "pub fn applied_actions_count_reducer(state: &mut State, _: &ActionWithMeta) {\n\n state.applied_actions_count += 1;\n\n}\n\n\n", "file_path": "shell_automaton/src/reducer.rs", "rank": 42, "score": 462262.87062779616 }, { "content": "pub fn peer_remote_requests_block_header_get_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeerRemoteRequestsBlockHeaderGetEnqueue(content) => {\n\n let peer = match state.peers.get_handshaked_mut(&content.address) {\n\n Some(v) => v,\n\n None => return,\n\n };\n\n\n\n peer.remote_requests\n\n .block_header_get\n\n .queue\n\n .insert(content.block_hash.clone());\n\n }\n\n Action::PeerRemoteRequestsBlockHeaderGetPending(content) => {\n\n let peer = match state.peers.get_handshaked_mut(&content.address) {\n\n Some(v) => v,\n\n None => return,\n\n };\n\n\n\n let queue = &mut peer.remote_requests.block_header_get.queue;\n", "file_path": "shell_automaton/src/peer/remote_requests/block_header_get/peer_remote_requests_block_header_get_reducer.rs", "rank": 43, "score": 461532.8104814122 }, { "content": "pub fn peer_remote_requests_current_branch_get_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PeerRemoteRequestsCurrentBranchGetInit(content) => {\n\n let peer = match state.peers.get_handshaked_mut(&content.address) {\n\n Some(v) => v,\n\n None => return,\n\n };\n\n\n\n peer.remote_requests.current_branch_get =\n\n PeerRemoteRequestsCurrentBranchGetState::Init {\n\n time: action.time_as_nanos(),\n\n };\n\n }\n\n Action::PeerRemoteRequestsCurrentBranchGetPending(content) => {\n\n let peer = match state.peers.get_handshaked_mut(&content.address) {\n\n Some(v) => v,\n\n None => return,\n\n };\n\n let current_head = match state.current_head.get() {\n\n Some(v) => v,\n", "file_path": "shell_automaton/src/peer/remote_requests/current_branch_get/peer_remote_requests_current_branch_get_reducer.rs", "rank": 44, "score": 461532.81048141216 }, { "content": "#[allow(unused)]\n\npub fn logger_effects<S: Service>(store: &mut Store<S>, action: &ActionWithMeta) {\n\n // eprintln!(\"[+] Action: {}\", action.action.as_ref());\n\n // eprintln!(\"[+] Action: {:#?}\", &action);\n\n // eprintln!(\"[+] State: {:#?}\\n\", store.state());\n\n\n\n let state = store.state.get();\n\n let log = &state.log;\n\n\n\n match &action.action {\n\n Action::CurrentHeadUpdate(content) => {\n\n slog::info!(log, \"CurrentHead Updated\";\n\n \"level\" => content.new_head.header.level(),\n\n \"hash\" => content.new_head.hash.to_string(),\n\n \"fitness\" => display_fitness(content.new_head.header.fitness()),\n\n \"payload_hash\" => format!(\"{:?}\", content.payload_hash.as_ref()));\n\n slog::debug!(log, \"CurrentHead Updated - full header\";\n\n \"new_head\" => slog::FnValue(|_| format!(\"{:?}\", content.new_head)));\n\n }\n\n Action::PeerCurrentHeadUpdate(content) => {\n\n slog::info!(log, \"Peer CurrentHead Updated\";\n", "file_path": "shell_automaton/src/logger/logger_effects.rs", "rank": 45, "score": 460127.712479907 }, { "content": "pub fn rpc_effects<S: Service>(store: &mut Store<S>, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::WakeupEvent(_) => {\n\n let wakeup_timestamp = Instant::now();\n\n while let Ok((msg, rpc_id)) = store.service().rpc().try_recv_stream() {\n\n match msg {\n\n RpcRequestStream::Bootstrapped => {\n\n store.dispatch(RpcBootstrappedAction { rpc_id });\n\n }\n\n RpcRequestStream::ValidBlocks(query) => {\n\n store.dispatch(RpcMonitorValidBlocksAction { query, rpc_id });\n\n }\n\n RpcRequestStream::GetOperations {\n\n applied,\n\n refused,\n\n branch_delayed,\n\n branch_refused,\n\n outdated,\n\n } => {\n\n store.dispatch(MempoolRegisterOperationsStreamAction {\n", "file_path": "shell_automaton/src/rpc/rpc_effects.rs", "rank": 46, "score": 460127.712479907 }, { "content": "pub fn actors_effects<S: Service>(store: &mut Store<S>, action: &ActionWithMeta) {\n\n if let Action::WakeupEvent(_) = &action.action {\n\n while let Ok(msg) = store.service.actors().try_recv() {\n\n match msg {\n\n ActorsMessageFrom::Shutdown => {\n\n store.dispatch(ShutdownInitAction {});\n\n }\n\n ActorsMessageFrom::P2pInit => {\n\n store.dispatch(PeersInitAction {});\n\n }\n\n ActorsMessageFrom::ApplyBlock {\n\n block_hash,\n\n callback,\n\n ..\n\n } => {\n\n store\n\n .service\n\n .actors()\n\n .register_apply_block_callback(block_hash.clone(), callback);\n\n store.dispatch(BlockApplierEnqueueBlockAction {\n\n block_hash,\n\n injector_rpc_id: None,\n\n });\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "shell_automaton/src/actors/actors_effects.rs", "rank": 47, "score": 460127.712479907 }, { "content": "pub fn peer_requests_potential_peers_get_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::PeerRequestsPotentialPeersGetInit(content) => {\n\n store.dispatch(PeerMessageWriteInitAction {\n\n address: content.address,\n\n message: Arc::new(PeerMessage::Bootstrap.into()),\n\n });\n\n store.dispatch(PeerRequestsPotentialPeersGetPendingAction {\n\n address: content.address,\n\n });\n\n }\n\n Action::MioTimeoutEvent(_)\n\n | Action::PeerDisconnected(_)\n\n | Action::PeerRequestsPotentialPeersGetError(_) => {\n\n request_potential_peers_from_any_peer(store);\n\n }\n\n Action::PeerRequestsPotentialPeersGetSuccess(content) => {\n", "file_path": "shell_automaton/src/peer/requests/potential_peers_get/peer_requests_potential_peers_get_effects.rs", "rank": 48, "score": 459994.1909558134 }, { "content": "pub fn storage_state_snapshot_create_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::StorageStateSnapshotCreateInit(_) => {\n\n let req_payload =\n\n StorageRequestPayload::StateSnapshotPut(Box::new(store.state().clone()));\n\n let req = StorageRequest::new(None, req_payload).subscribe();\n\n let _ = store.service.storage().request_send(req);\n\n\n\n let action = StorageStateSnapshotCreatePendingAction {\n\n action_id: store.state().last_action.id(),\n\n applied_actions_count: store.state().applied_actions_count,\n\n };\n\n store.dispatch(action);\n\n }\n\n Action::StorageStateSnapshotCreateError(_)\n\n | Action::StorageStateSnapshotCreateSuccess(_) => {\n\n store.dispatch(StorageStateSnapshotCreateInitAction {});\n\n }\n\n _ => {}\n\n }\n\n}\n", "file_path": "shell_automaton/src/storage/state_snapshot/create/storage_state_snapshot_create_effects.rs", "rank": 49, "score": 458178.79818769643 }, { "content": "pub fn peer_connection_incoming_accept_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::P2pServerEvent(_) => {\n\n store.dispatch(PeerConnectionIncomingAcceptAction {});\n\n }\n\n Action::PeerConnectionIncomingAccept(_) => {\n\n let state = store.state.get();\n\n\n\n match store.service.mio().peer_connection_incoming_accept() {\n\n Ok((peer_token, peer)) => {\n\n let peer_address = peer.address;\n\n\n\n if state.peers.connected_len() >= state.config.peers_connected_max {\n\n store.dispatch(PeerConnectionIncomingRejectedAction {\n\n token: peer_token,\n\n address: peer_address,\n\n reason:\n", "file_path": "shell_automaton/src/peer/connection/incoming/accept/peer_connection_incoming_accept_effects.rs", "rank": 50, "score": 458061.8780250275 }, { "content": "pub fn prechecker_reducer(state: &mut State, action: &ActionWithMeta) {\n\n // let PrecheckerState {\n\n // cached_operations,\n\n // operations,\n\n // proto_cache,\n\n // ..\n\n // } = &mut state.prechecker;\n\n let operations = &mut state.prechecker.operations;\n\n let cached_operations = &mut state.prechecker.cached_operations;\n\n let proto_cache = &mut state.prechecker.proto_cache;\n\n //let rights = &mut state.prechecker.rights;\n\n let rights = &mut state.rights;\n\n match &action.action {\n\n Action::PrecheckerCurrentHeadUpdate(PrecheckerCurrentHeadUpdateAction { head, .. }) => {\n\n let min_level = head.header.level().saturating_sub(2);\n\n let old_operations = cached_operations.remove_older(min_level);\n\n for old_operation in old_operations {\n\n let op_state = operations.remove(&old_operation);\n\n slog::debug!(\n\n state.log,\n", "file_path": "shell_automaton/src/prechecker/prechecker_reducer.rs", "rank": 51, "score": 457323.1835712466 }, { "content": "pub fn bootstrap_reducer(state: &mut State, action: &ActionWithMeta) {\n\n log_stats(state);\n\n match &action.action {\n\n Action::BootstrapInit(_) => {\n\n state.bootstrap = BootstrapState::Init {\n\n time: action.time_as_nanos(),\n\n };\n\n }\n\n Action::BootstrapPeersConnectPending(_) => {\n\n state.bootstrap = BootstrapState::PeersConnectPending {\n\n time: action.time_as_nanos(),\n\n };\n\n }\n\n Action::BootstrapPeersConnectSuccess(_) => {\n\n state.bootstrap = BootstrapState::PeersConnectSuccess {\n\n time: action.time_as_nanos(),\n\n };\n\n }\n\n Action::BootstrapPeersMainBranchFindPending(_) => {\n\n state.bootstrap = BootstrapState::PeersMainBranchFindPending {\n", "file_path": "shell_automaton/src/bootstrap/bootstrap_reducer.rs", "rank": 52, "score": 457323.1835712467 }, { "content": "pub fn shutdown_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::ShutdownInit(_) => {\n\n state.shutdown = ShutdownState::Init {\n\n time: action.time_as_nanos(),\n\n };\n\n }\n\n Action::ShutdownPending(_) => {\n\n state.shutdown = ShutdownState::pending(action.time_as_nanos());\n\n }\n\n Action::ShutdownSuccess(_) => {\n\n state.shutdown = ShutdownState::Success {\n\n time: action.time_as_nanos(),\n\n };\n\n }\n\n Action::ProtocolRunnerShutdownSuccess(_) => {\n\n if let ShutdownState::Pending(state) = &mut state.shutdown {\n\n state.protocol_runner_shutdown = true\n\n }\n\n }\n\n _ => {}\n\n }\n\n}\n", "file_path": "shell_automaton/src/shutdown/shutdown_reducer.rs", "rank": 53, "score": 457323.1835712467 }, { "content": "pub fn mempool_reducer(state: &mut State, action: &ActionWithMeta) {\n\n if state.config.disable_mempool {\n\n return;\n\n }\n\n let mempool_state = &mut state.mempool;\n\n\n\n match &action.action {\n\n Action::MempoolValidatorValidateSuccess(content) => {\n\n let current_head_level = state.current_head.get().map(|v| v.header.level());\n\n\n\n if let Some(rpc_id) = mempool_state.injecting_rpc_ids.remove(&content.op_hash) {\n\n mempool_state.injected_rpc_ids.push(rpc_id);\n\n }\n\n\n\n match &content.result {\n\n MempoolValidatorValidateResult::Applied(v) => {\n\n if let Some(op) = mempool_state.pending_operations.remove(&v.hash) {\n\n mempool_state\n\n .validated_operations\n\n .ops\n", "file_path": "shell_automaton/src/mempool/mempool_reducer.rs", "rank": 54, "score": 457323.1835712467 }, { "content": "pub fn storage_request_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::StorageRequestCreate(content) => {\n\n state.storage.requests.add(StorageRequestState {\n\n status: StorageRequestStatus::Idle {\n\n time: action.time_as_nanos(),\n\n },\n\n payload: content.payload.clone(),\n\n requestor: content.requestor.clone(),\n\n });\n\n }\n\n Action::StorageRequestPending(content) => {\n\n if let Some(req) = state.storage.requests.get_mut(content.req_id) {\n\n if let StorageRequestStatus::Idle { .. } = &req.status {\n\n req.status = StorageRequestStatus::Pending {\n\n time: action.time_as_nanos(),\n\n };\n\n }\n\n }\n\n }\n", "file_path": "shell_automaton/src/storage/request/storage_request_reducer.rs", "rank": 55, "score": 444331.1382702426 }, { "content": "pub fn paused_loops_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::PausedLoopsAdd(action) => {\n\n state.paused_loops.add(action.data.clone());\n\n }\n\n Action::PausedLoopsResumeNextInit(_) => {\n\n if matches!(&state.paused_loops.current, PausedLoopCurrent::Init(_)) {\n\n return;\n\n }\n\n\n\n if let Some(next_loop) = state.paused_loops.pop_front() {\n\n state.paused_loops.current = PausedLoopCurrent::Init(next_loop);\n\n }\n\n }\n\n Action::PausedLoopsResumeNextSuccess(_) => {\n\n let current = &mut state.paused_loops.current;\n\n\n\n if matches!(current, PausedLoopCurrent::Init(_)) {\n\n *current = PausedLoopCurrent::Success;\n\n }\n\n }\n\n _ => {}\n\n }\n\n}\n", "file_path": "shell_automaton/src/paused_loops/paused_loops_reducer.rs", "rank": 56, "score": 444331.1382702426 }, { "content": "pub fn mempool_validator_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::MempoolValidatorInit(_) => {\n\n state.mempool.validator = MempoolValidatorState::Init {\n\n time: action.time_as_nanos(),\n\n };\n\n }\n\n Action::MempoolValidatorPending(_) => {\n\n let block_hash = match state.current_head.get() {\n\n Some(v) => v.hash.clone(),\n\n None => return,\n\n };\n\n state.mempool.validator = MempoolValidatorState::Pending {\n\n time: action.time_as_nanos(),\n\n block_hash,\n\n };\n\n }\n\n Action::MempoolValidatorSuccess(content) => {\n\n if let MempoolValidatorState::Pending {\n\n time, block_hash, ..\n", "file_path": "shell_automaton/src/mempool/validator/mempool_validator_reducer.rs", "rank": 57, "score": 444331.1382702426 }, { "content": "pub fn block_applier_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::BlockApplierEnqueueBlock(content) => {\n\n state\n\n .block_applier\n\n .queue\n\n .push_back((content.block_hash.clone(), content.injector_rpc_id));\n\n }\n\n Action::BlockApplierApplyInit(content) => {\n\n if let Some((block_hash, _)) = state.block_applier.queue.front() {\n\n if block_hash == &content.block_hash {\n\n state.block_applier.queue.pop_front();\n\n }\n\n }\n\n state.block_applier.current = BlockApplierApplyState::Init {\n\n time: action.time_as_nanos(),\n\n block_hash: content.block_hash.clone(),\n\n injector_rpc_id: content.injector_rpc_id,\n\n };\n\n }\n", "file_path": "shell_automaton/src/block_applier/block_applier_reducer.rs", "rank": 58, "score": 444331.1382702426 }, { "content": "pub fn current_head_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::CurrentHeadRehydrateInit(_) => {\n\n state.current_head = CurrentHeadState::RehydrateInit {\n\n time: action.time_as_nanos(),\n\n };\n\n }\n\n Action::CurrentHeadRehydratePending(content) => {\n\n state.current_head = CurrentHeadState::RehydratePending {\n\n time: action.time_as_nanos(),\n\n storage_req_id: content.storage_req_id,\n\n };\n\n }\n\n Action::CurrentHeadRehydrateError(content) => {\n\n state.current_head = CurrentHeadState::RehydrateError {\n\n time: action.time_as_nanos(),\n\n error: content.error.clone(),\n\n };\n\n }\n\n Action::CurrentHeadRehydrateSuccess(content) => {\n", "file_path": "shell_automaton/src/current_head/current_head_reducer.rs", "rank": 59, "score": 444331.1382702426 }, { "content": "pub fn protocol_runner_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::ProtocolRunnerReady(_) => {\n\n let (genesis_commit_hash, latest_context_hashes) = match &state.protocol_runner {\n\n ProtocolRunnerState::LatestContextHashesGet(\n\n ProtocolRunnerLatestContextHashesState::Success {\n\n genesis_commit_hash,\n\n latest_context_hashes,\n\n ..\n\n }\n\n | ProtocolRunnerLatestContextHashesState::Error {\n\n genesis_commit_hash,\n\n latest_context_hashes,\n\n ..\n\n },\n\n ) => (genesis_commit_hash.clone(), latest_context_hashes.clone()),\n\n ProtocolRunnerState::Init(ProtocolRunnerInitState::Success {\n\n genesis_commit_hash,\n\n }) => (genesis_commit_hash.clone(), Vec::new()),\n\n _ => return,\n", "file_path": "shell_automaton/src/protocol_runner/protocol_runner_reducer.rs", "rank": 60, "score": 444331.1382702426 }, { "content": "pub fn storage_state_snapshot_create_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::StorageStateSnapshotCreatePending(action) => {\n\n let state = &mut state.storage.state_snapshot.create;\n\n if !state.is_pending() {\n\n *state = StorageStateSnapshotCreateState::Pending {\n\n action_id: action.action_id,\n\n applied_actions_count: action.applied_actions_count,\n\n };\n\n }\n\n }\n\n Action::StorageStateSnapshotCreateError(action) => {\n\n let state = &mut state.storage.state_snapshot.create;\n\n let (action_id, applied_actions_count) = match state {\n\n StorageStateSnapshotCreateState::Pending {\n\n action_id,\n\n applied_actions_count,\n\n } => (*action_id, *applied_actions_count),\n\n _ => return,\n\n };\n", "file_path": "shell_automaton/src/storage/state_snapshot/create/storage_state_snapshot_create_reducer.rs", "rank": 61, "score": 439830.1281726993 }, { "content": "pub fn protocol_runner_init_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::ProtocolRunnerInit(_) => {\n\n state.protocol_runner = ProtocolRunnerInitState::Init {}.into();\n\n }\n\n Action::ProtocolRunnerInitCheckGenesisApplied(_) => {\n\n state.protocol_runner = ProtocolRunnerInitState::CheckGenesisApplied {}.into();\n\n }\n\n Action::ProtocolRunnerInitCheckGenesisAppliedSuccess(content) => {\n\n state.protocol_runner = ProtocolRunnerInitState::CheckGenesisAppliedSuccess {\n\n is_applied: content.is_applied,\n\n }\n\n .into();\n\n }\n\n Action::ProtocolRunnerInitSuccess(_) => {\n\n let genesis_commit_hash = match &state.protocol_runner {\n\n ProtocolRunnerState::Init(ProtocolRunnerInitState::ContextIpcServer((\n\n result,\n\n _,\n\n ))) => result.genesis_commit_hash.clone(),\n", "file_path": "shell_automaton/src/protocol_runner/init/protocol_runner_init_reducer.rs", "rank": 62, "score": 432651.8020667112 }, { "content": "pub fn rights_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n let requests = &store.state.get().rights.requests;\n\n let cache = &store.state.get().rights.cache;\n\n let log = &store.state.get().log;\n\n match &action.action {\n\n // Main entry action\n\n Action::RightsGet(RightsGetAction { key }) => {\n\n match &key.0 {\n\n RightsInput::Baking(_) => {\n\n if let Some((_, baking_rights)) = key\n\n .level()\n\n .as_ref()\n\n .and_then(|level| cache.baking.get(level))\n\n .cloned()\n\n {\n\n trace!(log, \"Baking rights using cache\"; \"key\" => FnValue(|_| format!(\"{:?}\", key)));\n\n store.dispatch(RightsBakingOldReadyAction {\n", "file_path": "shell_automaton/src/rights/rights_effects.rs", "rank": 63, "score": 430938.6533732443 }, { "content": "pub fn shutdown_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::ShutdownInit(_) => {\n\n store.dispatch(ProtocolRunnerShutdownInitAction {});\n\n store.dispatch(ShutdownPendingAction {});\n\n }\n\n Action::ShutdownPending(_) | Action::ProtocolRunnerShutdownSuccess(_) => {\n\n // Enabling condition for `ShutdownSuccessAction` will be checked\n\n // and if indeed shutdown was successful, this action will be dispatched.\n\n store.dispatch(ShutdownSuccessAction {});\n\n }\n\n _ => {}\n\n }\n\n}\n", "file_path": "shell_automaton/src/shutdown/shutdown_effects.rs", "rank": 64, "score": 430938.6533732443 }, { "content": "pub fn bootstrap_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::BootstrapInit(_) => {\n\n store.dispatch(BootstrapPeersConnectPendingAction {});\n\n }\n\n Action::BootstrapPeersConnectPending(_) => {\n\n store.dispatch(BootstrapPeersConnectSuccessAction {});\n\n }\n\n Action::BootstrapPeersConnectSuccess(_) => {\n\n store.dispatch(BootstrapPeersMainBranchFindInitAction {});\n\n }\n\n Action::PeerHandshakingFinish(content) => {\n\n if let BootstrapState::PeersConnectPending { .. } = &store.state().bootstrap {\n\n store.dispatch(BootstrapPeersConnectSuccessAction {});\n\n return;\n\n }\n\n let message = GetCurrentBranchMessage::new(store.state().config.chain_id.clone());\n", "file_path": "shell_automaton/src/bootstrap/bootstrap_effects.rs", "rank": 65, "score": 430938.6533732443 }, { "content": "pub fn prechecker_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n let prechecker_state = &store.state.get().prechecker;\n\n let prechecker_state_operations = &prechecker_state.operations;\n\n match &action.action {\n\n Action::PrecheckerPrecheckOperation(action) => {\n\n match prechecker_state_operations.get(&action.hash) {\n\n Some(Ok(_)) => {\n\n store.dispatch(PrecheckerDecodeOperationAction::from(&action.hash));\n\n }\n\n Some(Err(_)) => {\n\n store.dispatch(PrecheckerErrorAction::from(&action.hash));\n\n store.dispatch(PrecheckerPruneOperationAction::from(&action.hash));\n\n }\n\n _ => {}\n\n }\n\n }\n\n Action::PrecheckerRevalidateOperation(action) => {\n", "file_path": "shell_automaton/src/prechecker/prechecker_effects.rs", "rank": 66, "score": 430938.6533732443 }, { "content": "pub fn rpc_reducer(state: &mut crate::State, action: &crate::ActionWithMeta) {\n\n match &action.action {\n\n Action::RpcBootstrapped(RpcBootstrappedAction { rpc_id }) => {\n\n let bootstrapped = &mut state.rpc.bootstrapped;\n\n bootstrapped.requests.insert(*rpc_id);\n\n if bootstrapped.state.is_none() {\n\n if let BlockApplierApplyState::Success { block, .. } = &state.block_applier.current\n\n {\n\n bootstrapped.state = Some(BootstrapState {\n\n json: serde_json::json!({\n\n \"block\": block.hash,\n\n \"timestamp\": block.header.timestamp(),\n\n }),\n\n is_bootstrapped: true,\n\n });\n\n }\n\n }\n\n }\n\n Action::RpcBootstrappedNewBlock(RpcBootstrappedNewBlockAction {\n\n block,\n", "file_path": "shell_automaton/src/rpc/rpc_reducer.rs", "rank": 67, "score": 430724.5758107852 }, { "content": "pub fn protocol_runner_init_context_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::ProtocolRunnerInitContext(_) => {\n\n let apply_genesis = match &state.protocol_runner {\n\n ProtocolRunnerState::Init(\n\n ProtocolRunnerInitState::CheckGenesisAppliedSuccess { is_applied },\n\n ) => !is_applied || state.config.init_storage_data.replay.is_some(),\n\n _ => return,\n\n };\n\n state.protocol_runner = ProtocolRunnerState::Init(\n\n ProtocolRunnerInitContextState::Init { apply_genesis }.into(),\n\n );\n\n }\n\n Action::ProtocolRunnerInitContextPending(content) => {\n\n let init_context_state = match &mut state.protocol_runner {\n\n ProtocolRunnerState::Init(ProtocolRunnerInitState::Context(v)) => v,\n\n _ => return,\n\n };\n\n let apply_genesis = init_context_state.apply_genesis();\n\n *init_context_state = ProtocolRunnerInitContextState::Pending {\n", "file_path": "shell_automaton/src/protocol_runner/init/context/protocol_runner_init_context_reducer.rs", "rank": 68, "score": 422091.06020768866 }, { "content": "pub fn storage_blocks_genesis_init_reducer(state: &mut State, action: &ActionWithMeta) {\n\n if let Action::StorageBlocksGenesisInitSuccess(_) = &action.action {\n\n state.storage.blocks.genesis.init = StorageBlocksGenesisInitState::Success;\n\n }\n\n}\n", "file_path": "shell_automaton/src/storage/blocks/genesis/init/storage_blocks_genesis_init_reducer.rs", "rank": 69, "score": 422091.06020768866 }, { "content": "pub fn protocol_runner_init_runtime_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::ProtocolRunnerInitRuntime(_) => {\n\n state.protocol_runner =\n\n ProtocolRunnerState::Init(ProtocolRunnerInitRuntimeState::Init.into());\n\n }\n\n Action::ProtocolRunnerInitRuntimePending(content) => {\n\n state.protocol_runner = ProtocolRunnerState::Init(\n\n ProtocolRunnerInitRuntimeState::Pending {\n\n token: content.token,\n\n }\n\n .into(),\n\n );\n\n }\n\n Action::ProtocolRunnerInitRuntimeError(content) => {\n\n state.protocol_runner = ProtocolRunnerState::Init(\n\n ProtocolRunnerInitRuntimeState::Error {\n\n token: content.token,\n\n }\n\n .into(),\n", "file_path": "shell_automaton/src/protocol_runner/init/runtime/protocol_runner_init_runtime_reducer.rs", "rank": 70, "score": 422091.06020768866 }, { "content": "pub fn protocol_runner_spawn_server_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::ProtocolRunnerSpawnServerInit(_) => {\n\n state.protocol_runner = ProtocolRunnerSpawnServerState::Init.into();\n\n }\n\n Action::ProtocolRunnerSpawnServerPending(_) => {\n\n state.protocol_runner = ProtocolRunnerSpawnServerState::Pending {}.into();\n\n }\n\n Action::ProtocolRunnerSpawnServerError(content) => {\n\n state.protocol_runner = ProtocolRunnerSpawnServerState::Error {\n\n error: content.error.clone(),\n\n }\n\n .into();\n\n }\n\n Action::ProtocolRunnerSpawnServerSuccess(_) => {\n\n state.protocol_runner = ProtocolRunnerSpawnServerState::Success {}.into();\n\n }\n\n _ => {}\n\n }\n\n}\n", "file_path": "shell_automaton/src/protocol_runner/spawn_server/protocol_runner_spawn_server_reducer.rs", "rank": 71, "score": 422091.06020768866 }, { "content": "pub fn current_head_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::CurrentHeadRehydrateInit(_) => {\n\n let chain_id = store.state().config.chain_id.clone();\n\n let level_override = store.state().config.current_head_level_override;\n\n let storage_req_id = store.state().storage.requests.next_req_id();\n\n\n\n let latest_context_hashes = match &store.state().protocol_runner {\n\n ProtocolRunnerState::Ready(state) => state.latest_context_hashes.clone(),\n\n _ => Vec::new(),\n\n };\n\n\n\n store.dispatch(StorageRequestCreateAction {\n\n payload: StorageRequestPayload::CurrentHeadGet(\n\n chain_id,\n\n level_override,\n\n latest_context_hashes,\n", "file_path": "shell_automaton/src/current_head/current_head_effects.rs", "rank": 72, "score": 418839.7047552796 }, { "content": "pub fn mempool_validator_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::MempoolValidatorInit(_) => {\n\n let chain_id = store.state().config.chain_id.clone();\n\n let req = match &store.state().current_head {\n\n CurrentHeadState::Rehydrated { head, .. } => BeginConstructionRequest {\n\n chain_id,\n\n predecessor: (*head.header).clone(),\n\n predecessor_hash: head.hash.clone(),\n\n protocol_data: None,\n\n },\n\n _ => return,\n\n };\n\n store.service().prevalidator().begin_construction(req);\n\n\n\n store.dispatch(MempoolValidatorPendingAction {});\n\n }\n", "file_path": "shell_automaton/src/mempool/validator/mempool_validator_effects.rs", "rank": 73, "score": 418839.7047552796 }, { "content": "pub fn protocol_runner_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::ProtocolRunnerStart(_) => {\n\n store.dispatch(ProtocolRunnerSpawnServerInitAction {});\n\n }\n\n Action::ProtocolRunnerSpawnServerSuccess(_) => {\n\n store.dispatch(ProtocolRunnerInitAction {});\n\n }\n\n Action::ProtocolRunnerInitSuccess(_) => {\n\n let is_irmin_only = matches!(\n\n &store.state.get().config.protocol_runner.storage,\n\n TezosContextStorageConfiguration::IrminOnly(_)\n\n );\n\n\n\n if is_irmin_only {\n\n // irmin doesn't support getting its latest context hashes\n\n store.dispatch(ProtocolRunnerReadyAction {});\n", "file_path": "shell_automaton/src/protocol_runner/protocol_runner_effects.rs", "rank": 74, "score": 418839.70475527947 }, { "content": "pub fn storage_request_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::StorageRequestCreate(_) => {\n\n store.dispatch(StorageRequestInitAction {\n\n req_id: store.state().storage.requests.last_added_req_id(),\n\n });\n\n }\n\n Action::StorageRequestInit(action) => {\n\n let req = match store.state.get().storage.requests.get(action.req_id) {\n\n Some(v) => v,\n\n None => return,\n\n };\n\n match req.status {\n\n StorageRequestStatus::Idle { .. } => {}\n\n _ => return,\n\n }\n\n // TODO: handle send error in case of mpsc disconnection.\n", "file_path": "shell_automaton/src/storage/request/storage_request_effects.rs", "rank": 75, "score": 418839.7047552796 }, { "content": "pub fn block_applier_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::BlockApplierEnqueueBlock(_) => {\n\n start_applying_next_block(store);\n\n }\n\n Action::BlockApplierApplyInit(content) => {\n\n let chain_id = store.state().config.chain_id.clone();\n\n let storage_req_id = store.state().storage.requests.next_req_id();\n\n store.dispatch(StorageRequestCreateAction {\n\n payload: StorageRequestPayload::PrepareApplyBlockData {\n\n chain_id: chain_id.into(),\n\n block_hash: content.block_hash.clone(),\n\n },\n\n requestor: StorageRequestor::BlockApplier,\n\n });\n\n\n\n store.dispatch(BlockApplierApplyPrepareDataPendingAction { storage_req_id });\n", "file_path": "shell_automaton/src/block_applier/block_applier_effects.rs", "rank": 76, "score": 418839.7047552796 }, { "content": "pub fn paused_loops_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n let state = store.state.get();\n\n\n\n match &action.action {\n\n Action::PausedLoopsResumeAll(_) => {\n\n for _ in 0..state.paused_loops.len() {\n\n store.dispatch(PausedLoopsResumeNextInitAction {});\n\n }\n\n }\n\n Action::PausedLoopsResumeNextInit(_) => {\n\n let paused_loop = match &state.paused_loops.current {\n\n PausedLoopCurrent::Init(v) => v,\n\n _ => return,\n\n };\n\n\n\n match paused_loop {\n\n PausedLoop::PeerTryWrite { peer_address } => {\n", "file_path": "shell_automaton/src/paused_loops/paused_loops_effects.rs", "rank": 77, "score": 418839.70475527947 }, { "content": "fn last_action_effects<S: Service>(store: &mut Store<S>, action: &ActionWithMeta) {\n\n if let Some(stats) = store.service.statistics() {\n\n stats.action_new(action)\n\n }\n\n\n\n if !store.state.get().config.record_actions {\n\n return;\n\n }\n\n\n\n let _ = store.service.storage().request_send(StorageRequest::new(\n\n None,\n\n StorageRequestPayload::ActionPut(Box::new(action.clone())),\n\n ));\n\n}\n\n\n", "file_path": "shell_automaton/src/effects.rs", "rank": 78, "score": 413633.6762183575 }, { "content": "pub fn storage_blocks_genesis_check_applied_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::StorageBlocksGenesisCheckAppliedInit(_) => {\n\n state.storage.blocks.genesis.check_applied =\n\n StorageBlocksGenesisCheckAppliedState::GetMetaInit;\n\n }\n\n Action::StorageBlocksGenesisCheckAppliedGetMetaPending(_) => {\n\n let req_id = state.storage.requests.last_added_req_id();\n\n state.storage.blocks.genesis.check_applied =\n\n StorageBlocksGenesisCheckAppliedState::GetMetaPending { req_id };\n\n }\n\n Action::StorageBlocksGenesisCheckAppliedGetMetaError(_) => {\n\n state.storage.blocks.genesis.check_applied =\n\n StorageBlocksGenesisCheckAppliedState::GetMetaError {};\n\n }\n\n Action::StorageBlocksGenesisCheckAppliedGetMetaSuccess(content) => {\n\n state.storage.blocks.genesis.check_applied =\n\n StorageBlocksGenesisCheckAppliedState::GetMetaSuccess {\n\n meta: content.meta.clone(),\n\n };\n", "file_path": "shell_automaton/src/storage/blocks/genesis/check_applied/storage_blocks_genesis_check_applied_reducer.rs", "rank": 79, "score": 412491.9814687291 }, { "content": "pub fn protocol_runner_latest_context_hashes_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::ProtocolRunnerLatestContextHashesInit(_) => {\n\n let genesis_commit_hash = match &state.protocol_runner {\n\n ProtocolRunnerState::Init(ProtocolRunnerInitState::Success {\n\n genesis_commit_hash,\n\n }) => genesis_commit_hash.clone(),\n\n _ => return,\n\n };\n\n\n\n state.protocol_runner = ProtocolRunnerLatestContextHashesState::Init {\n\n genesis_commit_hash,\n\n }\n\n .into();\n\n }\n\n Action::ProtocolRunnerLatestContextHashesPending(content) => {\n\n let genesis_commit_hash = match &state.protocol_runner {\n\n ProtocolRunnerState::LatestContextHashesGet(\n\n ProtocolRunnerLatestContextHashesState::Init {\n\n genesis_commit_hash,\n", "file_path": "shell_automaton/src/protocol_runner/latest_context_hashes/protocol_runner_latest_context_hashes_reducer.rs", "rank": 80, "score": 412491.9814687291 }, { "content": "fn applied_actions_count_effects<S: Service>(store: &mut Store<S>, action: &ActionWithMeta) {\n\n if !matches!(&action.action, Action::StorageStateSnapshotCreateInit(_)) {\n\n store.dispatch(StorageStateSnapshotCreateInitAction {});\n\n }\n\n}\n\n\n", "file_path": "shell_automaton/src/effects.rs", "rank": 81, "score": 409431.4059015969 }, { "content": "pub fn protocol_runner_init_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::ProtocolRunnerInit(_) => {\n\n store.dispatch(ProtocolRunnerInitRuntimeAction {});\n\n }\n\n Action::ProtocolRunnerInitRuntimeSuccess(_) => {\n\n store.dispatch(ProtocolRunnerInitCheckGenesisAppliedAction {});\n\n }\n\n Action::ProtocolRunnerInitCheckGenesisApplied(_) => {\n\n store.dispatch(StorageBlocksGenesisCheckAppliedInitAction {});\n\n }\n\n Action::StorageBlocksGenesisCheckAppliedSuccess(content) => {\n\n store.dispatch(ProtocolRunnerInitCheckGenesisAppliedSuccessAction {\n\n is_applied: content.is_applied,\n\n });\n\n }\n\n Action::ProtocolRunnerInitCheckGenesisAppliedSuccess(_) => {\n", "file_path": "shell_automaton/src/protocol_runner/init/protocol_runner_init_effects.rs", "rank": 82, "score": 407919.86000910867 }, { "content": "pub fn rights_cycle_delegates_reducer(state: &mut crate::State, action: &crate::ActionWithMeta) {\n\n let delegates_state = &mut state.rights.cycle_delegates;\n\n match &action.action {\n\n Action::RightsCycleDelegatesGet(RightsCycleDelegatesGetAction { cycle, .. }) => {\n\n delegates_state.insert(\n\n *cycle,\n\n CycleDelegatesQuery {\n\n state: CycleDelegatesQueryState::Init,\n\n },\n\n );\n\n }\n\n Action::RightsCycleDelegatesRequested(RightsCycleDelegatesRequestedAction {\n\n cycle,\n\n token,\n\n }) => {\n\n if let Some(req) = delegates_state.get_mut(cycle) {\n\n req.state = CycleDelegatesQueryState::ContextRequested(*token);\n\n }\n\n }\n\n Action::RightsCycleDelegatesSuccess(RightsCycleDelegatesSuccessAction {\n", "file_path": "shell_automaton/src/rights/cycle_delegates/rights_cycle_delegates_reducer.rs", "rank": 83, "score": 407716.842511329 }, { "content": "pub fn current_head_precheck_reducer(state: &mut crate::State, action: &crate::ActionWithMeta) {\n\n match &action.action {\n\n Action::CurrentHeadReceived(CurrentHeadReceivedAction {\n\n block_hash,\n\n block_header,\n\n ..\n\n }) => {\n\n let candidates = &mut state.current_heads.candidates;\n\n candidates\n\n .entry(block_hash.clone())\n\n .or_insert(super::CurrentHeadState::Received {\n\n block_header: block_header.clone(),\n\n });\n\n }\n\n Action::CurrentHeadPrecheck(CurrentHeadPrecheckAction { block_hash, .. }) => {\n\n let chain_id = &state.config.chain_id;\n\n let baking_cache = &state.rights.cache.baking;\n\n let applied_head = match state.current_head.get() {\n\n Some(v) => &v.header,\n\n None => return,\n", "file_path": "shell_automaton/src/current_head_precheck/current_head_precheck_reducer.rs", "rank": 84, "score": 407716.8425113289 }, { "content": "pub fn stats_current_head_reducer(state: &mut crate::State, action: &crate::ActionWithMeta) {\n\n match &action.action {\n\n Action::StatsCurrentHeadPrepareSend(StatsCurrentHeadPrepareSendAction {\n\n address,\n\n message,\n\n }) => {\n\n state\n\n .stats\n\n .current_head\n\n .pending_messages\n\n .insert(*address, message.clone());\n\n }\n\n Action::StatsCurrentHeadSent(StatsCurrentHeadSentAction { address })\n\n | Action::StatsCurrentHeadSentError(StatsCurrentHeadSentErrorAction { address }) => {\n\n state.stats.current_head.pending_messages.remove(address);\n\n }\n\n _ => (),\n\n }\n\n}\n", "file_path": "shell_automaton/src/stats/current_head/stats_current_head_reducer.rs", "rank": 85, "score": 407716.842511329 }, { "content": "pub fn rights_cycle_eras_reducer(state: &mut crate::State, action: &crate::ActionWithMeta) {\n\n let eras_state = &mut state.rights.cycle_eras;\n\n match &action.action {\n\n Action::RightsCycleErasGet(RightsCycleErasGetAction {\n\n block_hash,\n\n block_header,\n\n protocol_hash,\n\n }) => {\n\n eras_state.insert(\n\n protocol_hash.clone(),\n\n CycleErasQuery {\n\n block_hash: block_hash.clone(),\n\n block_header: block_header.clone(),\n\n state: CycleErasQueryState::PendingKV,\n\n },\n\n );\n\n }\n\n Action::RightsCycleErasKVSuccess(RightsCycleErasKVSuccessAction {\n\n protocol_hash,\n\n cycle_eras,\n", "file_path": "shell_automaton/src/rights/cycle_eras/rights_cycle_eras_reducer.rs", "rank": 86, "score": 407716.842511329 }, { "content": "pub fn baker_reducer<S, A>(state: &mut S, action: &ActionWithMeta<A>)\n\nwhere\n\n S: AsMut<Option<BakerState>>,\n\n A: AsRef<Option<BakerAction>>,\n\n{\n\n match action.action.as_ref() {\n\n // not our action\n\n None => (),\n\n Some(baker_action) => {\n\n if baker_action.is_event() {\n\n let now = tb::Timestamp {\n\n unix_epoch: Duration::from_nanos(action.time_as_nanos()),\n\n };\n\n\n\n let event = EventWithTime {\n\n action: baker_action.clone(),\n\n now,\n\n };\n\n let baker_state = state\n\n .as_mut()\n\n .take()\n\n .expect(\"baker state should not be empty outside of this reducer\");\n\n let new_baker_state = baker_state.handle_event(event);\n\n *state.as_mut() = Some(new_baker_state);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "apps/baker/src/machine/reducer.rs", "rank": 87, "score": 403945.87351075746 }, { "content": "pub fn storage_blocks_genesis_init_header_put_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::StorageBlocksGenesisInitHeaderPutInit(_) => {\n\n state.storage.blocks.genesis.init = StorageBlocksGenesisInitHeaderPutState::Init.into();\n\n }\n\n Action::StorageBlocksGenesisInitHeaderPutPending(_) => {\n\n let req_id = state.storage.requests.last_added_req_id();\n\n state.storage.blocks.genesis.init =\n\n StorageBlocksGenesisInitHeaderPutState::Pending { req_id }.into();\n\n }\n\n Action::StorageBlocksGenesisInitHeaderPutError(_) => {\n\n state.storage.blocks.genesis.init =\n\n StorageBlocksGenesisInitHeaderPutState::Error {}.into();\n\n }\n\n Action::StorageBlocksGenesisInitHeaderPutSuccess(content) => {\n\n state.storage.blocks.genesis.init = StorageBlocksGenesisInitHeaderPutState::Success {\n\n is_new_block: content.is_new_block,\n\n }\n\n .into();\n\n }\n\n _ => {}\n\n }\n\n}\n", "file_path": "shell_automaton/src/storage/blocks/genesis/init/header_put/storage_blocks_genesis_init_header_put_reducer.rs", "rank": 88, "score": 403726.1968039631 }, { "content": "pub fn protocol_runner_init_context_ipc_server_reducer(state: &mut State, action: &ActionWithMeta) {\n\n match &action.action {\n\n Action::ProtocolRunnerInitContextIpcServer(_) => {\n\n let result = match &state.protocol_runner {\n\n ProtocolRunnerState::Init(ProtocolRunnerInitState::Context(\n\n ProtocolRunnerInitContextState::Success { result, .. },\n\n )) => result,\n\n _ => return,\n\n }\n\n .clone();\n\n state.protocol_runner =\n\n ProtocolRunnerState::Init(ProtocolRunnerInitState::ContextIpcServer((\n\n result,\n\n ProtocolRunnerInitContextIpcServerState::Init {},\n\n )));\n\n }\n\n Action::ProtocolRunnerInitContextIpcServerPending(content) => {\n\n if let ProtocolRunnerState::Init(ProtocolRunnerInitState::ContextIpcServer((\n\n _,\n\n state,\n", "file_path": "shell_automaton/src/protocol_runner/init/context_ipc_server/protocol_runner_init_context_ipc_server_reducer.rs", "rank": 89, "score": 403726.1968039631 }, { "content": "pub fn storage_blocks_genesis_init_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n match &action.action {\n\n Action::StorageBlocksGenesisInit(content) => {\n\n store.dispatch(StorageBlocksGenesisInitHeaderPutInitAction {\n\n genesis_commit_hash: content.genesis_commit_hash.clone(),\n\n });\n\n }\n\n Action::StorageBlocksGenesisInitHeaderPutSuccess(_) => {\n\n store.dispatch(StorageBlocksGenesisInitAdditionalDataPutInitAction {});\n\n }\n\n Action::StorageBlocksGenesisInitAdditionalDataPutSuccess(_) => {\n\n store.dispatch(StorageBlocksGenesisInitCommitResultGetInitAction {});\n\n }\n\n Action::StorageBlocksGenesisInitCommitResultGetSuccess(_) => {\n\n store.dispatch(StorageBlocksGenesisInitCommitResultPutInitAction {});\n\n }\n\n Action::StorageBlocksGenesisInitCommitResultPutSuccess(_) => {\n\n store.dispatch(StorageBlocksGenesisInitSuccessAction {});\n\n }\n\n Action::StorageBlocksGenesisInitSuccess(_) => {\n\n store.dispatch(CurrentHeadRehydrateInitAction {});\n\n store.dispatch(ProtocolRunnerNotifyStatusAction {});\n\n }\n\n _ => {}\n\n }\n\n}\n", "file_path": "shell_automaton/src/storage/blocks/genesis/init/storage_blocks_genesis_init_effects.rs", "rank": 90, "score": 398010.84208994085 }, { "content": "pub fn protocol_runner_init_context_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n if let Action::ProtocolRunnerInitContext(_) = &action.action {\n\n let state = store.state.get();\n\n let apply_genesis = match &state.protocol_runner {\n\n ProtocolRunnerState::Init(ProtocolRunnerInitState::Context(\n\n ProtocolRunnerInitContextState::Init { apply_genesis },\n\n )) => *apply_genesis,\n\n _ => return,\n\n };\n\n let res = store.service.protocol_runner().init_context(\n\n state.config.protocol_runner.storage.clone(),\n\n &state.config.protocol_runner.environment,\n\n apply_genesis,\n\n state.config.protocol_runner.enable_testchain,\n\n false,\n\n state.config.init_storage_data.patch_context.clone(),\n\n state.config.init_storage_data.context_stats_db_path.clone(),\n\n );\n\n match res {\n\n Ok(token) => store.dispatch(ProtocolRunnerInitContextPendingAction { token }),\n\n Err(error) => {\n\n store.dispatch(ProtocolRunnerInitContextErrorAction { token: None, error })\n\n }\n\n };\n\n }\n\n}\n", "file_path": "shell_automaton/src/protocol_runner/init/context/protocol_runner_init_context_effects.rs", "rank": 91, "score": 398010.84208994085 }, { "content": "pub fn protocol_runner_spawn_server_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n if let Action::ProtocolRunnerSpawnServerInit(_) = &action.action {\n\n store.service.protocol_runner().spawn_server();\n\n store.dispatch(ProtocolRunnerSpawnServerPendingAction {});\n\n }\n\n}\n", "file_path": "shell_automaton/src/protocol_runner/spawn_server/protocol_runner_spawn_server_effects.rs", "rank": 92, "score": 398010.84208994085 }, { "content": "pub fn protocol_runner_init_runtime_effects<S>(store: &mut Store<S>, action: &ActionWithMeta)\n\nwhere\n\n S: Service,\n\n{\n\n if let Action::ProtocolRunnerInitRuntime(_) = &action.action {\n\n let config = store\n\n .state()\n\n .config\n\n .protocol_runner\n\n .runtime_configuration\n\n .clone();\n\n let token = store.service.protocol_runner().init_runtime(config);\n\n store.dispatch(ProtocolRunnerInitRuntimePendingAction { token });\n\n }\n\n}\n", "file_path": "shell_automaton/src/protocol_runner/init/runtime/protocol_runner_init_runtime_effects.rs", "rank": 93, "score": 398010.84208994085 }, { "content": "pub fn baker_effects<S, Srv, A>(store: &mut Store<S, Srv, A>, action: &ActionWithMeta<A>)\n\nwhere\n\n S: AsRef<Option<BakerState>>,\n\n Srv: TimeService + BakerService,\n\n A: AsRef<Option<BakerAction>> + From<BakerAction>,\n\n{\n\n let act = action.action.as_ref();\n\n\n\n if act.as_ref().map(|a| a.is_event()).unwrap_or(false) {\n\n if let Some(baker_state) = store.state.get().as_ref() {\n\n let to_dispatch = baker_state.as_ref().actions.clone();\n\n for action in to_dispatch {\n\n store.dispatch(action);\n\n }\n\n }\n\n }\n\n\n\n let st = store.state.get().as_ref().as_ref().unwrap().as_ref();\n\n\n\n match act {\n", "file_path": "apps/baker/src/machine/effects.rs", "rank": 94, "score": 393511.8199963088 }, { "content": "fn is_action_enabled(action: Action, state: &State) -> bool {\n\n action.is_enabled(state)\n\n}\n\n\n", "file_path": "shell_automaton/tests/action_fuzz.rs", "rank": 95, "score": 390511.22651846695 }, { "content": "pub fn stats_current_head_effects<S>(store: &mut crate::Store<S>, action: &crate::ActionWithMeta)\n\nwhere\n\n S: crate::Service,\n\n{\n\n match &action.action {\n\n Action::BlockInject(BlockInjectAction {\n\n chain_id: _,\n\n block_hash,\n\n block_header,\n\n injected_timestamp,\n\n }) => {\n\n let node_id = store.state.get().config.identity.peer_id.clone();\n\n if let Some(s) = store.service.statistics() {\n\n s.block_new(\n\n block_hash.clone(),\n\n block_header.level(),\n\n block_header.timestamp().into(),\n\n block_header.validation_pass(),\n\n block_header.fitness().round(),\n\n action.time_as_nanos(),\n", "file_path": "shell_automaton/src/stats/current_head/stats_current_head_effects.rs", "rank": 96, "score": 386302.2349118703 }, { "content": "pub fn rights_cycle_eras_effects<S>(store: &mut crate::Store<S>, action: &crate::ActionWithMeta)\n\nwhere\n\n S: crate::Service,\n\n{\n\n let log = &store.state.get().log;\n\n let cycle_eras = &store.state.get().rights.cycle_eras;\n\n match &action.action {\n\n Action::RightsCycleErasGet(RightsCycleErasGetAction { protocol_hash, .. }) => {\n\n store.dispatch(kv_cycle_eras::StorageCycleErasGetAction::new(\n\n protocol_hash.clone(),\n\n ));\n\n }\n\n Action::StorageCycleErasOk(kv_cycle_eras::StorageCycleErasOkAction { key, value }) => {\n\n if matches!(\n\n cycle_eras.get_state(key),\n\n Some(CycleErasQueryState::PendingKV)\n\n ) {\n\n store.dispatch(RightsCycleErasKVSuccessAction {\n\n protocol_hash: key.clone(),\n\n cycle_eras: value.iter().map(CycleEra::from).collect(),\n", "file_path": "shell_automaton/src/rights/cycle_eras/rights_cycle_eras_effects.rs", "rank": 97, "score": 386302.2349118703 }, { "content": "pub fn current_head_precheck_effects<S>(store: &mut crate::Store<S>, action: &crate::ActionWithMeta)\n\nwhere\n\n S: crate::Service,\n\n{\n\n match &action.action {\n\n Action::PeerMessageReadSuccess(PeerMessageReadSuccessAction { message, .. }) => {\n\n if !block_prechecking_enabled(store.state()) {\n\n return;\n\n }\n\n let current_head = if let PeerMessage::CurrentHead(current_head) = message.message() {\n\n current_head\n\n } else {\n\n return;\n\n };\n\n\n\n let current_block_header = current_head.current_block_header();\n\n let block_hash = match current_block_header.message_typed_hash::<BlockHash>() {\n\n Ok(v) => v,\n\n Err(_) => return,\n\n };\n", "file_path": "shell_automaton/src/current_head_precheck/current_head_precheck_effects.rs", "rank": 98, "score": 386302.2349118704 }, { "content": "/// All the actions which trigger checking for timeouts are called here.\n\npub fn check_timeouts<S: Service>(store: &mut Store<S>) {\n\n store.dispatch(PeersCheckTimeoutsInitAction {});\n\n store.dispatch(BootstrapCheckTimeoutsInitAction {});\n\n store.dispatch(MempoolTimeoutsInitAction {});\n\n}\n\n\n", "file_path": "shell_automaton/src/effects.rs", "rank": 99, "score": 378784.71243895666 } ]
Rust
src/uu/seq/src/extendedbigdecimal.rs
353fc443/coreutils
ec386fa460e4fe4dfb7e6a0ec0ddcfabe0c41985
use std::cmp::Ordering; use std::fmt::Display; use std::ops::Add; use bigdecimal::BigDecimal; use num_bigint::BigInt; use num_bigint::ToBigInt; use num_traits::One; use num_traits::Zero; use crate::extendedbigint::ExtendedBigInt; #[derive(Debug, Clone)] pub enum ExtendedBigDecimal { BigDecimal(BigDecimal), Infinity, MinusInfinity, MinusZero, Nan, } fn ceil(x: BigDecimal) -> BigInt { if x.is_integer() { x.to_bigint().unwrap() } else { (x + BigDecimal::one().half()).round(0).to_bigint().unwrap() } } fn floor(x: BigDecimal) -> BigInt { if x.is_integer() { x.to_bigint().unwrap() } else { (x - BigDecimal::one().half()).round(0).to_bigint().unwrap() } } impl ExtendedBigDecimal { pub fn ceil(self) -> ExtendedBigInt { match self { ExtendedBigDecimal::BigDecimal(x) => ExtendedBigInt::BigInt(ceil(x)), other => From::from(other), } } pub fn floor(self) -> ExtendedBigInt { match self { ExtendedBigDecimal::BigDecimal(x) => ExtendedBigInt::BigInt(floor(x)), other => From::from(other), } } } impl From<ExtendedBigInt> for ExtendedBigDecimal { fn from(big_int: ExtendedBigInt) -> Self { match big_int { ExtendedBigInt::BigInt(n) => Self::BigDecimal(BigDecimal::from(n)), ExtendedBigInt::Infinity => ExtendedBigDecimal::Infinity, ExtendedBigInt::MinusInfinity => ExtendedBigDecimal::MinusInfinity, ExtendedBigInt::MinusZero => ExtendedBigDecimal::MinusZero, ExtendedBigInt::Nan => ExtendedBigDecimal::Nan, } } } impl Display for ExtendedBigDecimal { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { ExtendedBigDecimal::BigDecimal(x) => { let (n, p) = x.as_bigint_and_exponent(); match p { 0 => ExtendedBigDecimal::BigDecimal(BigDecimal::new(n * 10, 1)).fmt(f), _ => x.fmt(f), } } ExtendedBigDecimal::Infinity => f32::INFINITY.fmt(f), ExtendedBigDecimal::MinusInfinity => f32::NEG_INFINITY.fmt(f), ExtendedBigDecimal::MinusZero => { (0.0f32).fmt(f) } ExtendedBigDecimal::Nan => "nan".fmt(f), } } } impl Zero for ExtendedBigDecimal { fn zero() -> Self { ExtendedBigDecimal::BigDecimal(BigDecimal::zero()) } fn is_zero(&self) -> bool { match self { Self::BigDecimal(n) => n.is_zero(), Self::MinusZero => true, _ => false, } } } impl Add for ExtendedBigDecimal { type Output = Self; fn add(self, other: Self) -> Self { match (self, other) { (Self::BigDecimal(m), Self::BigDecimal(n)) => Self::BigDecimal(m.add(n)), (Self::BigDecimal(_), Self::MinusInfinity) => Self::MinusInfinity, (Self::BigDecimal(_), Self::Infinity) => Self::Infinity, (Self::BigDecimal(_), Self::Nan) => Self::Nan, (Self::BigDecimal(m), Self::MinusZero) => Self::BigDecimal(m), (Self::Infinity, Self::BigDecimal(_)) => Self::Infinity, (Self::Infinity, Self::Infinity) => Self::Infinity, (Self::Infinity, Self::MinusZero) => Self::Infinity, (Self::Infinity, Self::MinusInfinity) => Self::Nan, (Self::Infinity, Self::Nan) => Self::Nan, (Self::MinusInfinity, Self::BigDecimal(_)) => Self::MinusInfinity, (Self::MinusInfinity, Self::MinusInfinity) => Self::MinusInfinity, (Self::MinusInfinity, Self::MinusZero) => Self::MinusInfinity, (Self::MinusInfinity, Self::Infinity) => Self::Nan, (Self::MinusInfinity, Self::Nan) => Self::Nan, (Self::Nan, _) => Self::Nan, (Self::MinusZero, other) => other, } } } impl PartialEq for ExtendedBigDecimal { fn eq(&self, other: &Self) -> bool { match (self, other) { (Self::BigDecimal(m), Self::BigDecimal(n)) => m.eq(n), (Self::BigDecimal(_), Self::MinusInfinity) => false, (Self::BigDecimal(_), Self::Infinity) => false, (Self::BigDecimal(_), Self::Nan) => false, (Self::BigDecimal(_), Self::MinusZero) => false, (Self::Infinity, Self::BigDecimal(_)) => false, (Self::Infinity, Self::Infinity) => true, (Self::Infinity, Self::MinusZero) => false, (Self::Infinity, Self::MinusInfinity) => false, (Self::Infinity, Self::Nan) => false, (Self::MinusInfinity, Self::BigDecimal(_)) => false, (Self::MinusInfinity, Self::Infinity) => false, (Self::MinusInfinity, Self::MinusZero) => false, (Self::MinusInfinity, Self::MinusInfinity) => true, (Self::MinusInfinity, Self::Nan) => false, (Self::Nan, _) => false, (Self::MinusZero, Self::BigDecimal(_)) => false, (Self::MinusZero, Self::Infinity) => false, (Self::MinusZero, Self::MinusZero) => true, (Self::MinusZero, Self::MinusInfinity) => false, (Self::MinusZero, Self::Nan) => false, } } } impl PartialOrd for ExtendedBigDecimal { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { match (self, other) { (Self::BigDecimal(m), Self::BigDecimal(n)) => m.partial_cmp(n), (Self::BigDecimal(_), Self::MinusInfinity) => Some(Ordering::Greater), (Self::BigDecimal(_), Self::Infinity) => Some(Ordering::Less), (Self::BigDecimal(_), Self::Nan) => None, (Self::BigDecimal(m), Self::MinusZero) => m.partial_cmp(&BigDecimal::zero()), (Self::Infinity, Self::BigDecimal(_)) => Some(Ordering::Greater), (Self::Infinity, Self::Infinity) => Some(Ordering::Equal), (Self::Infinity, Self::MinusZero) => Some(Ordering::Greater), (Self::Infinity, Self::MinusInfinity) => Some(Ordering::Greater), (Self::Infinity, Self::Nan) => None, (Self::MinusInfinity, Self::BigDecimal(_)) => Some(Ordering::Less), (Self::MinusInfinity, Self::Infinity) => Some(Ordering::Less), (Self::MinusInfinity, Self::MinusZero) => Some(Ordering::Less), (Self::MinusInfinity, Self::MinusInfinity) => Some(Ordering::Equal), (Self::MinusInfinity, Self::Nan) => None, (Self::Nan, _) => None, (Self::MinusZero, Self::BigDecimal(n)) => BigDecimal::zero().partial_cmp(n), (Self::MinusZero, Self::Infinity) => Some(Ordering::Less), (Self::MinusZero, Self::MinusZero) => Some(Ordering::Equal), (Self::MinusZero, Self::MinusInfinity) => Some(Ordering::Greater), (Self::MinusZero, Self::Nan) => None, } } } #[cfg(test)] mod tests { use bigdecimal::BigDecimal; use num_traits::Zero; use crate::extendedbigdecimal::ExtendedBigDecimal; #[test] fn test_addition_infinity() { let summand1 = ExtendedBigDecimal::BigDecimal(BigDecimal::zero()); let summand2 = ExtendedBigDecimal::Infinity; assert_eq!(summand1 + summand2, ExtendedBigDecimal::Infinity); } #[test] fn test_addition_minus_infinity() { let summand1 = ExtendedBigDecimal::BigDecimal(BigDecimal::zero()); let summand2 = ExtendedBigDecimal::MinusInfinity; assert_eq!(summand1 + summand2, ExtendedBigDecimal::MinusInfinity); } #[test] fn test_addition_nan() { let summand1 = ExtendedBigDecimal::BigDecimal(BigDecimal::zero()); let summand2 = ExtendedBigDecimal::Nan; let sum = summand1 + summand2; match sum { ExtendedBigDecimal::Nan => (), _ => unreachable!(), } } #[test] fn test_display() { assert_eq!( format!("{}", ExtendedBigDecimal::BigDecimal(BigDecimal::zero())), "0.0" ); assert_eq!(format!("{}", ExtendedBigDecimal::Infinity), "inf"); assert_eq!(format!("{}", ExtendedBigDecimal::MinusInfinity), "-inf"); assert_eq!(format!("{}", ExtendedBigDecimal::Nan), "nan"); } }
use std::cmp::Ordering; use std::fmt::Display; use std::ops::Add; use bigdecimal::BigDecimal; use num_bigint::BigInt; use num_bigint::ToBigInt; use num_traits::One; use num_traits::Zero; use crate::extendedbigint::ExtendedBigInt; #[derive(Debug, Clone)] pub enum ExtendedBigDecimal { BigDecimal(BigDecimal), Infinity, MinusInfinity, MinusZero, Nan, } fn ceil(x: BigDecimal) -> BigInt { if x.is_integer() { x.to_bigint().unwrap() } else { (x + BigDecimal::one().half()).round(0).to_bigint().unwrap() } } fn floor(x: BigDecimal) -> BigInt { if x.is_integer() { x.to_bigint().unwrap() } else { (x - BigDecimal::one().half()).round(0).to_bigint().unwrap() } } impl ExtendedBigDecimal { pub fn ceil(self) -> ExtendedBigInt { match self { ExtendedBigDecimal::BigDecimal(x) => ExtendedBigInt::BigInt(ceil(x)), other => From::from(other), } } pub fn floor(self) -> ExtendedBigInt { match self { ExtendedBigDecimal::BigDecimal(x) => ExtendedBigInt::BigInt(floor(x)), other => From::from(other), } } } impl From<ExtendedBigInt> for ExtendedBigDecimal { fn from(big_int: ExtendedBigInt) -> Self { match big_int { ExtendedBigInt::BigInt(n) => Self::BigDecimal(BigDecimal::from(n)), ExtendedBigInt::Infinity => ExtendedBigDecimal::Infinity, ExtendedBigInt::MinusInfinity => ExtendedBigDecimal::MinusInfinity, ExtendedBigInt::MinusZero => ExtendedBigDecimal::MinusZero, ExtendedBigInt::Nan => ExtendedBigDecimal::Nan, } } } impl Display for ExtendedBigDecimal { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { ExtendedBigDecimal::BigDecimal(x) => { let (n, p) = x.as_bigint_and_exponent(); match p { 0 => ExtendedBigDecimal::BigDecimal(BigDecimal::new(n * 10, 1)).fmt(f), _ => x.fmt(f), } } ExtendedBigDecimal::Infinity => f32::INFINITY.fmt(f), ExtendedBigDecimal::MinusInfinity => f32::NEG_INFINITY.fmt(f), ExtendedBigDecimal::MinusZero => { (0.0f32).fmt(f) } ExtendedBigDecimal::Nan => "nan".fmt(f), } } } impl Zero for ExtendedBigDecimal { fn zero() -> Self { ExtendedBigDecimal::BigDecimal(BigDecimal::zero()) }
} impl Add for ExtendedBigDecimal { type Output = Self; fn add(self, other: Self) -> Self { match (self, other) { (Self::BigDecimal(m), Self::BigDecimal(n)) => Self::BigDecimal(m.add(n)), (Self::BigDecimal(_), Self::MinusInfinity) => Self::MinusInfinity, (Self::BigDecimal(_), Self::Infinity) => Self::Infinity, (Self::BigDecimal(_), Self::Nan) => Self::Nan, (Self::BigDecimal(m), Self::MinusZero) => Self::BigDecimal(m), (Self::Infinity, Self::BigDecimal(_)) => Self::Infinity, (Self::Infinity, Self::Infinity) => Self::Infinity, (Self::Infinity, Self::MinusZero) => Self::Infinity, (Self::Infinity, Self::MinusInfinity) => Self::Nan, (Self::Infinity, Self::Nan) => Self::Nan, (Self::MinusInfinity, Self::BigDecimal(_)) => Self::MinusInfinity, (Self::MinusInfinity, Self::MinusInfinity) => Self::MinusInfinity, (Self::MinusInfinity, Self::MinusZero) => Self::MinusInfinity, (Self::MinusInfinity, Self::Infinity) => Self::Nan, (Self::MinusInfinity, Self::Nan) => Self::Nan, (Self::Nan, _) => Self::Nan, (Self::MinusZero, other) => other, } } } impl PartialEq for ExtendedBigDecimal { fn eq(&self, other: &Self) -> bool { match (self, other) { (Self::BigDecimal(m), Self::BigDecimal(n)) => m.eq(n), (Self::BigDecimal(_), Self::MinusInfinity) => false, (Self::BigDecimal(_), Self::Infinity) => false, (Self::BigDecimal(_), Self::Nan) => false, (Self::BigDecimal(_), Self::MinusZero) => false, (Self::Infinity, Self::BigDecimal(_)) => false, (Self::Infinity, Self::Infinity) => true, (Self::Infinity, Self::MinusZero) => false, (Self::Infinity, Self::MinusInfinity) => false, (Self::Infinity, Self::Nan) => false, (Self::MinusInfinity, Self::BigDecimal(_)) => false, (Self::MinusInfinity, Self::Infinity) => false, (Self::MinusInfinity, Self::MinusZero) => false, (Self::MinusInfinity, Self::MinusInfinity) => true, (Self::MinusInfinity, Self::Nan) => false, (Self::Nan, _) => false, (Self::MinusZero, Self::BigDecimal(_)) => false, (Self::MinusZero, Self::Infinity) => false, (Self::MinusZero, Self::MinusZero) => true, (Self::MinusZero, Self::MinusInfinity) => false, (Self::MinusZero, Self::Nan) => false, } } } impl PartialOrd for ExtendedBigDecimal { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { match (self, other) { (Self::BigDecimal(m), Self::BigDecimal(n)) => m.partial_cmp(n), (Self::BigDecimal(_), Self::MinusInfinity) => Some(Ordering::Greater), (Self::BigDecimal(_), Self::Infinity) => Some(Ordering::Less), (Self::BigDecimal(_), Self::Nan) => None, (Self::BigDecimal(m), Self::MinusZero) => m.partial_cmp(&BigDecimal::zero()), (Self::Infinity, Self::BigDecimal(_)) => Some(Ordering::Greater), (Self::Infinity, Self::Infinity) => Some(Ordering::Equal), (Self::Infinity, Self::MinusZero) => Some(Ordering::Greater), (Self::Infinity, Self::MinusInfinity) => Some(Ordering::Greater), (Self::Infinity, Self::Nan) => None, (Self::MinusInfinity, Self::BigDecimal(_)) => Some(Ordering::Less), (Self::MinusInfinity, Self::Infinity) => Some(Ordering::Less), (Self::MinusInfinity, Self::MinusZero) => Some(Ordering::Less), (Self::MinusInfinity, Self::MinusInfinity) => Some(Ordering::Equal), (Self::MinusInfinity, Self::Nan) => None, (Self::Nan, _) => None, (Self::MinusZero, Self::BigDecimal(n)) => BigDecimal::zero().partial_cmp(n), (Self::MinusZero, Self::Infinity) => Some(Ordering::Less), (Self::MinusZero, Self::MinusZero) => Some(Ordering::Equal), (Self::MinusZero, Self::MinusInfinity) => Some(Ordering::Greater), (Self::MinusZero, Self::Nan) => None, } } } #[cfg(test)] mod tests { use bigdecimal::BigDecimal; use num_traits::Zero; use crate::extendedbigdecimal::ExtendedBigDecimal; #[test] fn test_addition_infinity() { let summand1 = ExtendedBigDecimal::BigDecimal(BigDecimal::zero()); let summand2 = ExtendedBigDecimal::Infinity; assert_eq!(summand1 + summand2, ExtendedBigDecimal::Infinity); } #[test] fn test_addition_minus_infinity() { let summand1 = ExtendedBigDecimal::BigDecimal(BigDecimal::zero()); let summand2 = ExtendedBigDecimal::MinusInfinity; assert_eq!(summand1 + summand2, ExtendedBigDecimal::MinusInfinity); } #[test] fn test_addition_nan() { let summand1 = ExtendedBigDecimal::BigDecimal(BigDecimal::zero()); let summand2 = ExtendedBigDecimal::Nan; let sum = summand1 + summand2; match sum { ExtendedBigDecimal::Nan => (), _ => unreachable!(), } } #[test] fn test_display() { assert_eq!( format!("{}", ExtendedBigDecimal::BigDecimal(BigDecimal::zero())), "0.0" ); assert_eq!(format!("{}", ExtendedBigDecimal::Infinity), "inf"); assert_eq!(format!("{}", ExtendedBigDecimal::MinusInfinity), "-inf"); assert_eq!(format!("{}", ExtendedBigDecimal::Nan), "nan"); } }
fn is_zero(&self) -> bool { match self { Self::BigDecimal(n) => n.is_zero(), Self::MinusZero => true, _ => false, } }
function_block-full_function
[ { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(mut args: impl uucore::Args) -> UResult<()> {\n\n let program = args.next().unwrap_or_else(|| OsString::from(\"test\"));\n\n let binary_name = uucore::util_name();\n\n let mut args: Vec<_> = args.collect();\n\n\n\n if binary_name.ends_with('[') {\n\n // If invoked as [ we should recognize --help and --version (but not -h or -v)\n\n if args.len() == 1 && (args[0] == \"--help\" || args[0] == \"--version\") {\n\n // Let clap pretty-print help and version\n\n App::new(binary_name)\n\n .version(crate_version!())\n\n .usage(USAGE)\n\n .after_help(AFTER_HELP)\n\n // Disable printing of -h and -v as valid alternatives for --help and --version,\n\n // since we don't recognize -h and -v as help/version flags.\n\n .setting(AppSettings::NeedsLongHelp)\n\n .setting(AppSettings::NeedsLongVersion)\n\n .get_matches_from(std::iter::once(program).chain(args.into_iter()));\n\n return Ok(());\n\n }\n", "file_path": "src/uu/test/src/test.rs", "rank": 0, "score": 289916.5874599728 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(mut args: impl uucore::Args) -> UResult<()> {\n\n // if there is no program name for some reason, default to \"hashsum\"\n\n let program = args.next().unwrap_or_else(|| OsString::from(NAME));\n\n let binary_name = Path::new(&program)\n\n .file_name()\n\n .unwrap_or_else(|| OsStr::new(NAME))\n\n .to_string_lossy();\n\n\n\n let args = iter::once(program.clone()).chain(args);\n\n\n\n // Default binary in Windows, text mode otherwise\n\n let binary_flag_default = cfg!(windows);\n\n\n\n let app = uu_app(&binary_name);\n\n\n\n // FIXME: this should use get_matches_from_safe() and crash!(), but at the moment that just\n\n // causes \"error: \" to be printed twice (once from crash!() and once from clap). With\n\n // the current setup, the name of the utility is not printed, but I think this is at\n\n // least somewhat better from a user's perspective.\n\n let matches = app.get_matches_from(args);\n", "file_path": "src/uu/hashsum/src/hashsum.rs", "rank": 1, "score": 289916.5874599728 }, { "content": "pub fn factor(mut n: u64) -> Factors {\n\n #[cfg(feature = \"coz\")]\n\n coz::begin!(\"factorization\");\n\n let mut factors = Factors::one();\n\n\n\n if n < 2 {\n\n return factors;\n\n }\n\n\n\n let n_zeros = n.trailing_zeros();\n\n if n_zeros > 0 {\n\n factors.add(2, n_zeros as Exponent);\n\n n >>= n_zeros;\n\n }\n\n\n\n if n == 1 {\n\n #[cfg(feature = \"coz\")]\n\n coz::end!(\"factorization\");\n\n return factors;\n\n }\n", "file_path": "src/uu/factor/src/factor.rs", "rank": 2, "score": 274541.8262842762 }, { "content": "fn read_n_lines(input: &mut impl std::io::BufRead, n: usize, zero: bool) -> std::io::Result<()> {\n\n // Read the first `n` lines from the `input` reader.\n\n let separator = if zero { b'\\0' } else { b'\\n' };\n\n let mut reader = take_lines(input, n, separator);\n\n\n\n // Write those bytes to `stdout`.\n\n let stdout = std::io::stdout();\n\n let stdout = stdout.lock();\n\n let mut writer = BufWriter::with_capacity(BUFWRITER_CAPACITY, stdout);\n\n\n\n io::copy(&mut reader, &mut writer)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/uu/head/src/head.rs", "rank": 3, "score": 267209.88410913735 }, { "content": "fn trim_left_matches_to_start<P: FnMut(char) -> bool>(s: &str, inner: P) {\n\n replace!(s.trim_left_matches(inner) => s.trim_start_matches(inner));\n\n replace!(s.trim_right_matches(inner) => s.trim_end_matches(inner));\n\n}\n", "file_path": "util/rewrite_rules.rs", "rank": 4, "score": 259194.29951491288 }, { "content": "pub fn is_symlink<P: AsRef<Path>>(path: P) -> bool {\n\n match fs::symlink_metadata(path) {\n\n Ok(m) => m.file_type().is_symlink(),\n\n Err(_) => false,\n\n }\n\n}\n", "file_path": "src/uu/chmod/src/chmod.rs", "rank": 5, "score": 245648.51980216906 }, { "content": "pub fn is_symlink<P: AsRef<Path>>(path: P) -> bool {\n\n match fs::symlink_metadata(path) {\n\n Ok(m) => m.file_type().is_symlink(),\n\n Err(_) => false,\n\n }\n\n}\n", "file_path": "src/uu/ln/src/ln.rs", "rank": 6, "score": 245648.51980216906 }, { "content": "pub fn factor(num: &mut u64, factors: &mut Factors) {\n\n for &(prime, inv, ceil) in PRIME_INVERSIONS_U64 {\n\n if *num == 1 {\n\n break;\n\n }\n\n\n\n // inv = prime^-1 mod 2^64\n\n // ceil = floor((2^64-1) / prime)\n\n // if (num * inv) mod 2^64 <= ceil, then prime divides num\n\n // See https://math.stackexchange.com/questions/1251327/\n\n // for a nice explanation.\n\n let mut k = 0;\n\n loop {\n\n let x = num.wrapping_mul(inv);\n\n\n\n // While prime divides num\n\n if x <= ceil {\n\n *num = x;\n\n k += 1;\n\n #[cfg(feature = \"coz\")]\n", "file_path": "src/uu/factor/src/table.rs", "rank": 7, "score": 245426.63016349476 }, { "content": "pub fn gcd(mut u: u64, mut v: u64) -> u64 {\n\n // Stein's binary GCD algorithm\n\n // Base cases: gcd(n, 0) = gcd(0, n) = n\n\n if u == 0 {\n\n return v;\n\n } else if v == 0 {\n\n return u;\n\n }\n\n\n\n // gcd(2ⁱ u, 2ʲ v) = 2ᵏ gcd(u, v) with u, v odd and k = min(i, j)\n\n // 2ᵏ is the greatest power of two that divides both u and v\n\n let k = {\n\n let i = u.trailing_zeros();\n\n let j = v.trailing_zeros();\n\n u >>= i;\n\n v >>= j;\n\n min(i, j)\n\n };\n\n\n\n loop {\n", "file_path": "src/uu/factor/src/numeric/gcd.rs", "rank": 8, "score": 241010.813013667 }, { "content": "/// Return the canonical, absolute form of a path.\n\n///\n\n/// This function is a generalization of [`std::fs::canonicalize`] that\n\n/// allows controlling how symbolic links are resolved and how to deal\n\n/// with missing components. It returns the canonical, absolute form of\n\n/// a path.\n\n/// The `miss_mode` parameter controls how missing path elements are handled\n\n///\n\n/// * [`MissingHandling::Normal`] makes this function behave like\n\n/// [`std::fs::canonicalize`], resolving symbolic links and returning\n\n/// an error if the path does not exist.\n\n/// * [`MissingHandling::Missing`] makes this function ignore non-final\n\n/// components of the path that could not be resolved.\n\n/// * [`MissingHandling::Existing`] makes this function return an error\n\n/// if the final component of the path does not exist.\n\n///\n\n/// The `res_mode` parameter controls how symbolic links are\n\n/// resolved:\n\n///\n\n/// * [`ResolveMode::None`] makes this function not try to resolve\n\n/// any symbolic links.\n\n/// * [`ResolveMode::Physical`] makes this function resolve symlinks as they\n\n/// are encountered\n\n/// * [`ResolveMode::Logical`] makes this function resolve '..' components\n\n/// before symlinks\n\n///\n\npub fn canonicalize<P: AsRef<Path>>(\n\n original: P,\n\n miss_mode: MissingHandling,\n\n res_mode: ResolveMode,\n\n) -> IOResult<PathBuf> {\n\n // Create an absolute path\n\n let original = original.as_ref();\n\n let original = if original.is_absolute() {\n\n original.to_path_buf()\n\n } else {\n\n dunce::canonicalize(env::current_dir().unwrap())\n\n .unwrap()\n\n .join(original)\n\n };\n\n\n\n let mut result = PathBuf::new();\n\n let mut parts = vec![];\n\n\n\n // Split path by directory separator; add prefix (Windows-only) and root\n\n // directory to final path buffer; add remaining parts to temporary\n", "file_path": "src/uucore/src/lib/features/fs.rs", "rank": 9, "score": 239881.18160484126 }, { "content": "/// Perform the change of owner on a path\n\n/// with the various options\n\n/// and error messages management\n\npub fn wrap_chown<P: AsRef<Path>>(\n\n path: P,\n\n meta: &Metadata,\n\n dest_uid: Option<u32>,\n\n dest_gid: Option<u32>,\n\n follow: bool,\n\n verbosity: Verbosity,\n\n) -> Result<String, String> {\n\n let dest_uid = dest_uid.unwrap_or_else(|| meta.uid());\n\n let dest_gid = dest_gid.unwrap_or_else(|| meta.gid());\n\n let path = path.as_ref();\n\n let mut out: String = String::new();\n\n\n\n if let Err(e) = chown(path, dest_uid, dest_gid, follow) {\n\n match verbosity.level {\n\n VerbosityLevel::Silent => (),\n\n level => {\n\n out = format!(\n\n \"changing {} of {}: {}\",\n\n if verbosity.groups_only {\n", "file_path": "src/uucore/src/lib/features/perms.rs", "rank": 10, "score": 236678.01969321648 }, { "content": "pub fn version_cmp(mut a: &str, mut b: &str) -> Ordering {\n\n let str_cmp = a.cmp(b);\n\n if str_cmp == Ordering::Equal {\n\n return str_cmp;\n\n }\n\n\n\n // Special cases:\n\n // 1. Empty strings\n\n match (a.is_empty(), b.is_empty()) {\n\n (true, false) => return Ordering::Less,\n\n (false, true) => return Ordering::Greater,\n\n (true, true) => unreachable!(),\n\n (false, false) => {}\n\n }\n\n // 2. Dots\n\n match (a == \".\", b == \".\") {\n\n (true, false) => return Ordering::Less,\n\n (false, true) => return Ordering::Greater,\n\n (true, true) => unreachable!(),\n\n (false, false) => {}\n", "file_path": "src/uucore/src/lib/mods/version_cmp.rs", "rank": 11, "score": 235505.34469018687 }, { "content": "// parse_options loads the options into the settings, returning an array of\n\n// error messages.\n\npub fn parse_options(settings: &mut crate::Settings, opts: &clap::ArgMatches) -> Vec<String> {\n\n // This vector holds error messages encountered.\n\n let mut errs: Vec<String> = vec![];\n\n settings.renumber = !opts.is_present(options::NO_RENUMBER);\n\n match opts.value_of(options::NUMBER_SEPARATOR) {\n\n None => {}\n\n Some(val) => {\n\n settings.number_separator = val.to_owned();\n\n }\n\n }\n\n match opts.value_of(options::NUMBER_FORMAT) {\n\n None => {}\n\n Some(val) => match val {\n\n \"ln\" => {\n\n settings.number_format = crate::NumberFormat::Left;\n\n }\n\n \"rn\" => {\n\n settings.number_format = crate::NumberFormat::Right;\n\n }\n\n \"rz\" => {\n", "file_path": "src/uu/nl/src/helper.rs", "rank": 12, "score": 230345.11404407662 }, { "content": "pub fn factor_chunk(n_s: &mut [u64; CHUNK_SIZE], f_s: &mut [Factors; CHUNK_SIZE]) {\n\n for &(prime, inv, ceil) in PRIME_INVERSIONS_U64 {\n\n if n_s[0] == 1 && n_s[1] == 1 && n_s[2] == 1 && n_s[3] == 1 {\n\n break;\n\n }\n\n\n\n for (num, factors) in n_s.iter_mut().zip(f_s.iter_mut()) {\n\n if *num == 1 {\n\n continue;\n\n }\n\n let mut k = 0;\n\n loop {\n\n let x = num.wrapping_mul(inv);\n\n\n\n // While prime divides num\n\n if x <= ceil {\n\n *num = x;\n\n k += 1;\n\n } else {\n\n if k > 0 {\n", "file_path": "src/uu/factor/src/table.rs", "rank": 13, "score": 228866.05816096655 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n\n\n let usage = usage();\n\n let after_help = get_long_usage();\n\n\n\n let matches = uu_app()\n\n .usage(&usage[..])\n\n .after_help(&after_help[..])\n\n .get_matches_from(args);\n\n\n\n let files: Vec<String> = matches\n\n .values_of(options::FILE)\n\n .map(|v| v.map(ToString::to_string).collect())\n\n .unwrap_or_default();\n\n\n\n // If true, attempt to canonicalize hostnames via a DNS lookup.\n\n let do_lookup = matches.is_present(options::LOOKUP);\n", "file_path": "src/uu/who/src/who.rs", "rank": 14, "score": 228125.31628646358 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let matches = uu_app().get_matches_from(args);\n\n\n\n let mut buff = String::new();\n\n let silent = matches.is_present(options::SILENT);\n\n if let Some(files) = matches.values_of(options::FILES) {\n\n let mut stdout = setup_term();\n\n let length = files.len();\n\n\n\n let mut files_iter = files.peekable();\n\n while let (Some(file), next_file) = (files_iter.next(), files_iter.peek()) {\n\n let file = Path::new(file);\n\n if file.is_dir() {\n\n terminal::disable_raw_mode().unwrap();\n\n return Err(UUsageError::new(\n\n 1,\n\n format!(\"{} is a directory.\", file.quote()),\n\n ));\n\n }\n\n if !file.exists() {\n", "file_path": "src/uu/more/src/more.rs", "rank": 15, "score": 228125.31628646358 }, { "content": "pub fn args_os() -> impl Iterator<Item = OsString> {\n\n ARGV.iter().cloned()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::ffi::OsStr;\n\n\n\n fn make_os_vec(os_str: &OsStr) -> Vec<OsString> {\n\n vec![\n\n OsString::from(\"test\"),\n\n OsString::from(\"สวัสดี\"), // spell-checker:disable-line\n\n os_str.to_os_string(),\n\n ]\n\n }\n\n\n\n fn collect_os_str(vec: Vec<OsString>, handling: InvalidEncodingHandling) -> ConversionResult {\n\n vec.into_iter().collect_str(handling)\n\n }\n", "file_path": "src/uucore/src/lib/lib.rs", "rank": 16, "score": 227293.97839704665 }, { "content": "#[cfg(unix)]\n\npub fn statfs<P: AsRef<Path>>(path: P) -> Result<StatFs, String>\n\nwhere\n\n Vec<u8>: From<P>,\n\n{\n\n match CString::new(path) {\n\n Ok(p) => {\n\n let mut buffer: StatFs = unsafe { mem::zeroed() };\n\n unsafe {\n\n match statfs_fn(p.as_ptr(), &mut buffer) {\n\n 0 => Ok(buffer),\n\n _ => {\n\n let errno = IOError::last_os_error().raw_os_error().unwrap_or(0);\n\n Err(CString::from_raw(strerror(errno))\n\n .into_string()\n\n .unwrap_or_else(|_| \"Unknown Error\".to_owned()))\n\n }\n\n }\n\n }\n\n }\n\n Err(e) => Err(e.to_string()),\n\n }\n\n}\n\n\n", "file_path": "src/uucore/src/lib/features/fsext.rs", "rank": 17, "score": 226585.12739608754 }, { "content": "pub fn supports_pid_checks(pid: self::Pid) -> bool {\n\n unsafe { !(libc::kill(pid, 0) != 0 && get_errno() == libc::ENOSYS) }\n\n}\n\n\n", "file_path": "src/uu/tail/src/platform/unix.rs", "rank": 18, "score": 224650.58338120894 }, { "content": "pub fn supports_pid_checks(_pid: self::Pid) -> bool {\n\n true\n\n}\n", "file_path": "src/uu/tail/src/platform/windows.rs", "rank": 19, "score": 224650.58338120894 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let files = matches.values_of_os(ARG_FILES).unwrap();\n\n\n\n let (mut atime, mut mtime) =\n\n if let Some(reference) = matches.value_of_os(options::sources::REFERENCE) {\n\n stat(Path::new(reference), !matches.is_present(options::NO_DEREF))?\n\n } else {\n\n let timestamp = if let Some(date) = matches.value_of(options::sources::DATE) {\n\n parse_date(date)?\n\n } else if let Some(current) = matches.value_of(options::sources::CURRENT) {\n\n parse_timestamp(current)?\n\n } else {\n\n local_tm_to_filetime(time::now())\n\n };\n\n (timestamp, timestamp)\n\n };\n", "file_path": "src/uu/touch/src/touch.rs", "rank": 20, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\n#[allow(clippy::cognitive_complexity)]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n\n\n let usage = usage();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let summarize = matches.is_present(options::SUMMARIZE);\n\n\n\n let max_depth = parse_depth(matches.value_of(options::MAX_DEPTH), summarize)?;\n\n\n\n let options = Options {\n\n all: matches.is_present(options::ALL),\n\n util_name: uucore::util_name().to_owned(),\n\n max_depth,\n\n total: matches.is_present(options::TOTAL),\n\n separate_dirs: matches.is_present(options::SEPARATE_DIRS),\n\n one_file_system: matches.is_present(options::ONE_FILE_SYSTEM),\n", "file_path": "src/uu/du/src/du.rs", "rank": 21, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = get_usage();\n\n\n\n chown_base(\n\n uu_app().usage(&usage[..]),\n\n args,\n\n options::ARG_OWNER,\n\n parse_gid_uid_and_filter,\n\n false,\n\n )\n\n}\n\n\n", "file_path": "src/uu/chown/src/chown.rs", "rank": 22, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n\n\n let matches = uu_app().get_matches_from(args);\n\n\n\n let complement = matches.is_present(options::COMPLEMENT);\n\n\n\n let mode_parse = match (\n\n matches.value_of(options::BYTES),\n\n matches.value_of(options::CHARACTERS),\n\n matches.value_of(options::FIELDS),\n\n ) {\n\n (Some(byte_ranges), None, None) => list_to_ranges(byte_ranges, complement).map(|ranges| {\n\n Mode::Bytes(\n\n ranges,\n\n Options {\n\n out_delim: Some(\n\n matches\n", "file_path": "src/uu/cut/src/cut.rs", "rank": 23, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n run_env(args)\n\n}\n", "file_path": "src/uu/env/src/env.rs", "rank": 24, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n /* the list of files */\n\n\n\n let paths: Vec<PathBuf> = matches\n\n .values_of(ARG_FILES)\n\n .unwrap()\n\n .map(PathBuf::from)\n\n .collect();\n\n\n\n let strip = matches.is_present(OPT_STRIP);\n\n let zero = matches.is_present(OPT_ZERO);\n\n let quiet = matches.is_present(OPT_QUIET);\n\n let logical = matches.is_present(OPT_LOGICAL);\n\n let can_mode = if matches.is_present(OPT_CANONICALIZE_EXISTING) {\n\n MissingHandling::Existing\n\n } else if matches.is_present(OPT_CANONICALIZE_MISSING) {\n", "file_path": "src/uu/realpath/src/realpath.rs", "rank": 25, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n #[cfg(windows)]\n\n let _handle = wsa::start().map_err_context(|| \"failed to start Winsock\".to_owned())?;\n\n\n\n match matches.value_of_os(OPT_HOST) {\n\n None => display_hostname(&matches),\n\n Some(host) => hostname::set(host).map_err_context(|| \"failed to set hostname\".to_owned()),\n\n }\n\n}\n\n\n", "file_path": "src/uu/hostname/src/hostname.rs", "rank": 26, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let result =\n\n parse_options(&matches).and_then(|options| match matches.values_of(options::NUMBER) {\n\n Some(values) => handle_args(values, options),\n\n None => handle_stdin(options),\n\n });\n\n\n\n match result {\n\n Err(e) => {\n\n std::io::stdout().flush().expect(\"error flushing stdout\");\n\n // TODO Change `handle_args()` and `handle_stdin()` so that\n\n // they return `UResult`.\n\n return Err(USimpleError::new(1, e));\n\n }\n\n _ => Ok(()),\n\n }\n\n}\n\n\n", "file_path": "src/uu/numfmt/src/numfmt.rs", "rank": 27, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::ConvertLossy)\n\n .accept_any();\n\n\n\n // For expr utility we do not want getopts.\n\n // The following usage should work without escaping hyphens: `expr -15 = 1 + 2 \\* \\( 3 - -4 \\)`\n\n\n\n if maybe_handle_help_or_version(&args) {\n\n Ok(())\n\n } else {\n\n let token_strings = args[1..].to_vec();\n\n\n\n match process_expr(&token_strings) {\n\n Ok(expr_result) => print_expr_ok(&expr_result),\n\n Err(expr_error) => Err(USimpleError::new(2, &expr_error)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/uu/expr/src/expr.rs", "rank": 28, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let format = Format::Base32;\n\n let usage = usage();\n\n\n\n let config: base_common::Config = base_common::parse_base_cmd_args(args, ABOUT, &usage)?;\n\n\n\n // Create a reference to stdin so we can return a locked stdin from\n\n // parse_base_cmd_args\n\n let stdin_raw = stdin();\n\n let mut input: Box<dyn Read> = base_common::get_input(&config, &stdin_raw)?;\n\n\n\n base_common::handle_input(\n\n &mut input,\n\n format,\n\n config.wrap_cols,\n\n config.ignore_garbage,\n\n config.decode,\n\n )\n\n}\n\n\n", "file_path": "src/uu/base32/src/base32.rs", "rank": 29, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::ConvertLossy)\n\n .accept_any();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n // set working mode\n\n let is_posix = matches.values_of(options::POSIX).is_some();\n\n let is_posix_special = matches.values_of(options::POSIX_SPECIAL).is_some();\n\n let is_portability = matches.values_of(options::PORTABILITY).is_some();\n\n\n\n let mode = if (is_posix && is_posix_special) || is_portability {\n\n Mode::Both\n\n } else if is_posix {\n\n Mode::Basic\n\n } else if is_posix_special {\n\n Mode::Extra\n\n } else {\n", "file_path": "src/uu/pathchk/src/pathchk.rs", "rank": 30, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let syntax = format!(\n\n \"{0} [OPTION]... [+FORMAT]...\n\n {0} [OPTION]... [MMDDhhmm[[CC]YY][.ss]]\",\n\n NAME\n\n );\n\n let matches = uu_app().usage(&syntax[..]).get_matches_from(args);\n\n\n\n let format = if let Some(form) = matches.value_of(OPT_FORMAT) {\n\n if !form.starts_with('+') {\n\n return Err(USimpleError::new(\n\n 1,\n\n format!(\"invalid date {}\", form.quote()),\n\n ));\n\n }\n\n let form = form[1..].to_string();\n\n Format::Custom(form)\n\n } else if let Some(fmt) = matches\n\n .values_of(OPT_ISO_8601)\n\n .map(|mut iter| iter.next().unwrap_or(DATE).into())\n", "file_path": "src/uu/date/src/date.rs", "rank": 31, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = get_usage();\n\n\n\n let config = uu_app().usage(usage.as_ref());\n\n\n\n let options = match parse_command_line(config, args) {\n\n Ok(r) => r,\n\n Err(r) => {\n\n if let Error::CommandLine(ref r) = r {\n\n match r.kind {\n\n clap::ErrorKind::HelpDisplayed | clap::ErrorKind::VersionDisplayed => {\n\n println!(\"{}\", r);\n\n return Ok(());\n\n }\n\n _ => {}\n\n }\n\n }\n\n return Err(UUsageError::new(\n\n error_exit_status::ANOTHER_ERROR,\n\n format!(\"{}\", r),\n", "file_path": "src/uu/runcon/src/runcon.rs", "rank": 32, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::ConvertLossy)\n\n .accept_any();\n\n\n\n let matches = uu_app().get_matches_from(args);\n\n\n\n // A mutable settings object, initialized with the defaults.\n\n let mut settings = Settings {\n\n header_numbering: NumberingStyle::NumberForNone,\n\n body_numbering: NumberingStyle::NumberForAll,\n\n footer_numbering: NumberingStyle::NumberForNone,\n\n section_delimiter: ['\\\\', ':'],\n\n starting_line_number: 1,\n\n line_increment: 1,\n\n join_blank_lines: 1,\n\n number_width: 6,\n\n number_format: NumberFormat::Right,\n\n renumber: true,\n\n number_separator: String::from(\"\\t\"),\n", "file_path": "src/uu/nl/src/nl.rs", "rank": 33, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let mut ignore = match matches.value_of(OPT_IGNORE) {\n\n Some(numstr) => match numstr.parse() {\n\n Ok(num) => num,\n\n Err(e) => {\n\n return Err(USimpleError::new(\n\n 1,\n\n format!(\"{} is not a valid number: {}\", numstr.quote(), e),\n\n ));\n\n }\n\n },\n\n None => 0,\n\n };\n\n\n\n if !matches.is_present(OPT_ALL) {\n\n // OMP_NUM_THREADS doesn't have an impact on --all\n\n ignore += match env::var(\"OMP_NUM_THREADS\") {\n", "file_path": "src/uu/nproc/src/nproc.rs", "rank": 34, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::ConvertLossy)\n\n .accept_any();\n\n let usage = usage();\n\n //\n\n // Argument parsing\n\n //\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n // too few arguments\n\n if !matches.is_present(options::NAME) {\n\n return Err(UUsageError::new(1, \"missing operand\".to_string()));\n\n }\n\n\n\n let opt_suffix = matches.is_present(options::SUFFIX);\n\n let opt_multiple = matches.is_present(options::MULTIPLE);\n\n let opt_zero = matches.is_present(options::ZERO);\n\n let multiple_paths = opt_suffix || opt_multiple;\n\n // too many arguments\n", "file_path": "src/uu/basename/src/basename.rs", "rank": 35, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n\n\n let matches = uu_app()\n\n .after_help(&*format!(\n\n \"{}\\n{}\",\n\n LONG_HELP,\n\n backup_control::BACKUP_CONTROL_LONG_HELP\n\n ))\n\n .usage(&usage[..])\n\n .get_matches_from(args);\n\n\n\n let files: Vec<OsString> = matches\n\n .values_of_os(ARG_FILES)\n\n .unwrap_or_default()\n\n .map(|v| v.to_os_string())\n\n .collect();\n\n\n\n let overwrite_mode = determine_overwrite_mode(&matches);\n\n let backup_mode = backup_control::determine_backup_mode(&matches)?;\n", "file_path": "src/uu/mv/src/mv.rs", "rank": 36, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n // get the file to split\n\n let file_name = matches.value_of(options::FILE).unwrap();\n\n\n\n // get the patterns to split on\n\n let patterns: Vec<String> = matches\n\n .values_of(options::PATTERN)\n\n .unwrap()\n\n .map(str::to_string)\n\n .collect();\n\n let patterns = patterns::get_patterns(&patterns[..])?;\n\n let options = CsplitOptions::new(&matches);\n\n if file_name == \"-\" {\n", "file_path": "src/uu/csplit/src/csplit.rs", "rank": 37, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n\n\n if args.len() <= 1 {\n\n return Err(UUsageError::new(1, \"missing operand\"));\n\n }\n\n let formatstr = &args[1];\n\n\n\n if formatstr == \"--help\" {\n\n print!(\"{} {}\", LONGHELP_LEAD, LONGHELP_BODY);\n\n } else if formatstr == \"--version\" {\n\n println!(\"{} {}\", uucore::util_name(), crate_version!());\n\n } else {\n\n let printf_args = &args[2..];\n\n memo::Memo::run_all(formatstr, printf_args);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/uu/printf/src/printf.rs", "rank": 38, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n\n\n let matches = uu_app().get_matches_from(args);\n\n\n\n if matches.is_present(options::CONTEXT) {\n\n return Err(USimpleError::new(1, \"--context is not implemented\"));\n\n }\n\n if matches.is_present(options::SE_LINUX_SECURITY_CONTEXT) {\n\n return Err(USimpleError::new(1, \"-Z is not implemented\"));\n\n }\n\n\n\n let mode = match matches.value_of(options::MODE) {\n\n Some(m) => match usize::from_str_radix(m, 8) {\n\n Ok(m) => m,\n\n Err(e) => return Err(USimpleError::new(1, format!(\"invalid mode: {}\", e))),\n\n },\n\n None => 0o666,\n", "file_path": "src/uu/mkfifo/src/mkfifo.rs", "rank": 39, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = get_usage();\n\n\n\n chown_base(\n\n uu_app().usage(&usage[..]),\n\n args,\n\n options::ARG_GROUP,\n\n parse_gid_and_uid,\n\n true,\n\n )\n\n}\n\n\n", "file_path": "src/uu/chgrp/src/chgrp.rs", "rank": 40, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let after_help = get_description();\n\n\n\n let matches = uu_app()\n\n .usage(&usage[..])\n\n .after_help(&after_help[..])\n\n .get_matches_from(args);\n\n\n\n let users: Vec<String> = matches\n\n .values_of(options::ARG_USERS)\n\n .map(|v| v.map(ToString::to_string).collect())\n\n .unwrap_or_default();\n\n\n\n let mut state = State {\n\n nflag: matches.is_present(options::OPT_NAME),\n\n uflag: matches.is_present(options::OPT_EFFECTIVE_USER),\n\n gflag: matches.is_present(options::OPT_GROUP),\n\n gsflag: matches.is_present(options::OPT_GROUPS),\n\n rflag: matches.is_present(options::OPT_REAL_ID),\n", "file_path": "src/uu/id/src/id.rs", "rank": 41, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let (config, format) = parse_cmd_args(args)?;\n\n // Create a reference to stdin so we can return a locked stdin from\n\n // parse_base_cmd_args\n\n let stdin_raw = stdin();\n\n let mut input: Box<dyn Read> = base_common::get_input(&config, &stdin_raw)?;\n\n\n\n base_common::handle_input(\n\n &mut input,\n\n format,\n\n config.wrap_cols,\n\n config.ignore_garbage,\n\n config.decode,\n\n )\n\n}\n", "file_path": "src/uu/basenc/src/basenc.rs", "rank": 42, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n\n\n let usage = usage();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(&args);\n\n\n\n let files = matches\n\n .values_of(options::FILE)\n\n .map_or(vec![], |file_values| file_values.collect());\n\n\n\n // clap provides .conflicts_with / .conflicts_with_all, but we want to\n\n // manually handle conflicts so we can match the output of GNU coreutils\n\n if (matches.is_present(options::C_SHELL) || matches.is_present(options::BOURNE_SHELL))\n\n && matches.is_present(options::PRINT_DATABASE)\n\n {\n\n return Err(UUsageError::new(\n\n 1,\n", "file_path": "src/uu/dircolors/src/dircolors.rs", "rank": 43, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let mut args = args\n\n .collect_str(InvalidEncodingHandling::ConvertLossy)\n\n .accept_any();\n\n\n\n // Before we can parse 'args' with clap (and previously getopts),\n\n // a possible MODE prefix '-' needs to be removed (e.g. \"chmod -x FILE\").\n\n let mode_had_minus_prefix = strip_minus_from_mode(&mut args);\n\n\n\n let usage = usage();\n\n let after_help = get_long_usage();\n\n\n\n // Linux-specific options, not implemented\n\n // opts.optflag(\"Z\", \"context\", \"set SELinux security context\" +\n\n // \" of each created directory to CTX\"),\n\n let matches = uu_app()\n\n .usage(&usage[..])\n\n .after_help(&after_help[..])\n\n .get_matches_from(args);\n\n\n\n let dirs = matches.values_of_os(options::DIRS).unwrap_or_default();\n\n let verbose = matches.is_present(options::VERBOSE);\n\n let recursive = matches.is_present(options::PARENTS);\n\n\n\n match get_mode(&matches, mode_had_minus_prefix) {\n\n Ok(mode) => exec(dirs, recursive, mode, verbose),\n\n Err(f) => Err(USimpleError::new(1, f)),\n\n }\n\n}\n\n\n", "file_path": "src/uu/mkdir/src/mkdir.rs", "rank": 44, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let mut args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n let obs_signal = handle_obsolete(&mut args);\n\n\n\n let usage = format!(\"{} [OPTIONS]... PID...\", uucore::execution_phrase());\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let mode = if matches.is_present(options::TABLE) || matches.is_present(options::TABLE_OLD) {\n\n Mode::Table\n\n } else if matches.is_present(options::LIST) {\n\n Mode::List\n\n } else {\n\n Mode::Kill\n\n };\n\n\n\n let pids_or_signals: Vec<String> = matches\n\n .values_of(options::PIDS_OR_SIGNALS)\n\n .map(|v| v.map(ToString::to_string).collect())\n", "file_path": "src/uu/kill/src/kill.rs", "rank": 45, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let opts = Opts {\n\n ignore: matches.is_present(OPT_IGNORE_FAIL_NON_EMPTY),\n\n parents: matches.is_present(OPT_PARENTS),\n\n verbose: matches.is_present(OPT_VERBOSE),\n\n };\n\n\n\n for path in matches\n\n .values_of_os(ARG_DIRS)\n\n .unwrap_or_default()\n\n .map(Path::new)\n\n {\n\n if let Err(error) = remove(path, opts) {\n\n let Error { error, path } = error;\n\n\n\n if opts.ignore && dir_not_empty(&error, path) {\n", "file_path": "src/uu/rmdir/src/rmdir.rs", "rank": 46, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let format = Format::Base64;\n\n let usage = usage();\n\n\n\n let config: base_common::Config = base_common::parse_base_cmd_args(args, ABOUT, &usage)?;\n\n\n\n // Create a reference to stdin so we can return a locked stdin from\n\n // parse_base_cmd_args\n\n let stdin_raw = stdin();\n\n let mut input: Box<dyn Read> = base_common::get_input(&config, &stdin_raw)?;\n\n\n\n base_common::handle_input(\n\n &mut input,\n\n format,\n\n config.wrap_cols,\n\n config.ignore_garbage,\n\n config.decode,\n\n )\n\n}\n", "file_path": "src/uu/base64/src/base64.rs", "rank": 47, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n // Linux-specific options, not implemented\n\n // opts.optflag(\"Z\", \"\", \"set the SELinux security context to default type\");\n\n // opts.optopt(\"\", \"context\", \"like -Z, or if CTX is specified then set the SELinux or SMACK security context to CTX\");\n\n\n\n let matches = uu_app().get_matches_from(args);\n\n\n\n let mode = get_mode(&matches).map_err(|e| USimpleError::new(1, e))?;\n\n\n\n let file_name = matches.value_of(\"name\").expect(\"Missing argument 'NAME'\");\n\n\n\n // Only check the first character, to allow mnemonic usage like\n\n // 'mknod /dev/rst0 character 18 0'.\n\n let ch = matches\n\n .value_of(\"type\")\n\n .expect(\"Missing argument 'TYPE'\")\n\n .chars()\n", "file_path": "src/uu/mknod/src/mknod.rs", "rank": 48, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n\n\n let usage = usage();\n\n\n\n let app = uu_app().usage(&usage[..]);\n\n\n\n let matches = app.get_matches_from(args);\n\n\n\n if !matches.is_present(options::FILE) {\n\n return Err(UUsageError::new(1, \"missing file operand\"));\n\n }\n\n\n\n let iterations = match matches.value_of(options::ITERATIONS) {\n\n Some(s) => match s.parse::<usize>() {\n\n Ok(u) => u,\n\n Err(_) => {\n\n return Err(USimpleError::new(\n", "file_path": "src/uu/shred/src/shred.rs", "rank": 49, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n\n\n let _ = uu_app().usage(usage()).get_matches_from(args);\n\n\n\n match get_userlogin() {\n\n Some(userlogin) => println!(\"{}\", userlogin),\n\n None => show_error!(\"no login name\"),\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/uu/logname/src/logname.rs", "rank": 50, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let mut no_newline = matches.is_present(OPT_NO_NEWLINE);\n\n let use_zero = matches.is_present(OPT_ZERO);\n\n let silent = matches.is_present(OPT_SILENT) || matches.is_present(OPT_QUIET);\n\n let verbose = matches.is_present(OPT_VERBOSE);\n\n\n\n let res_mode = if matches.is_present(OPT_CANONICALIZE)\n\n || matches.is_present(OPT_CANONICALIZE_EXISTING)\n\n || matches.is_present(OPT_CANONICALIZE_MISSING)\n\n {\n\n ResolveMode::Logical\n\n } else {\n\n ResolveMode::None\n\n };\n\n\n\n let can_mode = if matches.is_present(OPT_CANONICALIZE_EXISTING) {\n\n MissingHandling::Existing\n", "file_path": "src/uu/readlink/src/readlink.rs", "rank": 51, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n uu_app().get_matches_from(args);\n\n Err(1.into())\n\n}\n\n\n", "file_path": "src/uu/false/src/false.rs", "rank": 52, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let template = matches.value_of(ARG_TEMPLATE).unwrap();\n\n let tmpdir = matches.value_of(OPT_TMPDIR).unwrap_or_default();\n\n\n\n let (template, mut tmpdir) = if matches.is_present(OPT_TMPDIR)\n\n && !PathBuf::from(tmpdir).is_dir() // if a temp dir is provided, it must be an actual path\n\n && tmpdir.contains(\"XXX\")\n\n // If this is a template, it has to contain at least 3 X\n\n && template == DEFAULT_TEMPLATE\n\n // That means that clap does not think we provided a template\n\n {\n\n // Special case to workaround a limitation of clap when doing\n\n // mktemp --tmpdir apt-key-gpghome.XXX\n\n // The behavior should be\n\n // mktemp --tmpdir $TMPDIR apt-key-gpghome.XX\n\n // As --tmpdir is empty\n", "file_path": "src/uu/mktemp/src/mktemp.rs", "rank": 53, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::ConvertLossy)\n\n .accept_any();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n let filename1 = matches.value_of(options::FILE_1).unwrap();\n\n let filename2 = matches.value_of(options::FILE_2).unwrap();\n\n let mut f1 = open_file(filename1).map_err_context(|| filename1.to_string())?;\n\n let mut f2 = open_file(filename2).map_err_context(|| filename2.to_string())?;\n\n\n\n comm(&mut f1, &mut f2, &matches);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/uu/comm/src/comm.rs", "rank": 54, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let paths: Vec<String> = matches\n\n .values_of(ARG_FILES)\n\n .map(|v| v.map(ToString::to_string).collect())\n\n .unwrap_or_default();\n\n\n\n check_unimplemented(&matches)?;\n\n\n\n let behavior = behavior(&matches)?;\n\n\n\n match behavior.main_function {\n\n MainFunction::Directory => directory(paths, behavior),\n\n MainFunction::Standard => standard(paths, behavior),\n\n }\n\n}\n\n\n", "file_path": "src/uu/install/src/install.rs", "rank": 55, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\n#[allow(clippy::cognitive_complexity)]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let mut files: Vec<String> = matches\n\n .values_of(ARG_FILES)\n\n .map(|v| v.map(ToString::to_string).collect())\n\n .unwrap_or_default();\n\n\n\n let mut fmt_opts = FmtOptions {\n\n crown: false,\n\n tagged: false,\n\n mail: false,\n\n uniform: false,\n\n quick: false,\n\n split_only: false,\n\n use_prefix: false,\n\n prefix: String::new(),\n\n xprefix: false,\n", "file_path": "src/uu/fmt/src/fmt.rs", "rank": 56, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(uucore::InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n let mut opts = getopts::Options::new();\n\n\n\n opts.opt(\n\n \"\",\n\n options::PAGE_RANGE_OPTION,\n\n \"Begin and stop printing with page FIRST_PAGE[:LAST_PAGE]\",\n\n \"FIRST_PAGE[:LAST_PAGE]\",\n\n HasArg::Yes,\n\n Occur::Optional,\n\n );\n\n\n\n opts.opt(\n\n options::STRING_HEADER_OPTION,\n\n \"header\",\n\n \"Use the string header to replace the file name \\\n\n in the header line.\",\n", "file_path": "src/uu/pr/src/pr.rs", "rank": 57, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let long_usage = get_long_usage();\n\n\n\n let matches = uu_app()\n\n .usage(&usage[..])\n\n .after_help(&long_usage[..])\n\n .get_matches_from(args);\n\n\n\n let stater = Stater::new(matches)?;\n\n let exit_status = stater.exec();\n\n if exit_status == 0 {\n\n Ok(())\n\n } else {\n\n Err(exit_status.into())\n\n }\n\n}\n\n\n", "file_path": "src/uu/stat/src/stat.rs", "rank": 58, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = match HeadOptions::get_from(args) {\n\n Ok(o) => o,\n\n Err(s) => {\n\n return Err(USimpleError::new(1, s));\n\n }\n\n };\n\n uu_head(&args)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::ffi::OsString;\n\n\n\n use super::*;\n\n fn options(args: &str) -> Result<HeadOptions, String> {\n\n let combined = \"head \".to_owned() + args;\n\n let args = combined.split_whitespace();\n\n HeadOptions::get_from(args.map(OsString::from))\n\n }\n", "file_path": "src/uu/head/src/head.rs", "rank": 59, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n expand(Options::new(&matches)).map_err_context(|| \"failed to write output\".to_string())\n\n}\n\n\n", "file_path": "src/uu/expand/src/expand.rs", "rank": 60, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n uu_app().get_matches_from(args);\n\n let username = platform::get_username().map_err_context(|| \"failed to get username\".into())?;\n\n println_verbatim(&username).map_err_context(|| \"failed to print username\".into())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/uu/whoami/src/whoami.rs", "rank": 61, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n uu_app().get_matches_from(args);\n\n hostid();\n\n Ok(())\n\n}\n\n\n", "file_path": "src/uu/hostid/src/hostid.rs", "rank": 62, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n uu_app().get_matches_from(args);\n\n\n\n let uts = PlatformInfo::new().map_err_context(|| \"cannot get system name\".to_string())?;\n\n println!(\"{}\", uts.machine().trim());\n\n Ok(())\n\n}\n\n\n", "file_path": "src/uu/arch/src/arch.rs", "rank": 63, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n\n\n let clap_opts = uu_app();\n\n\n\n let clap_matches = clap_opts\n\n .clone() // Clone to reuse clap_opts to print help\n\n .get_matches_from(args.clone());\n\n\n\n let od_options = OdOptions::new(clap_matches, args)?;\n\n\n\n let mut input_offset =\n\n InputOffset::new(od_options.radix, od_options.skip_bytes, od_options.label);\n\n\n\n let mut input = open_input_peek_reader(\n\n &od_options.input_strings,\n\n od_options.skip_bytes,\n\n od_options.read_bytes,\n", "file_path": "src/uu/od/src/od.rs", "rank": 64, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let long_usage = get_long_usage();\n\n\n\n let matches = uu_app()\n\n .usage(&usage[..])\n\n .after_help(&long_usage[..])\n\n .get_matches_from(args);\n\n\n\n let mut settings = Settings {\n\n prefix: \"\".to_owned(),\n\n numeric_suffix: false,\n\n suffix_length: 0,\n\n additional_suffix: \"\".to_owned(),\n\n input: \"\".to_owned(),\n\n filter: None,\n\n strategy: Strategy::Lines(1000),\n\n verbose: false,\n\n };\n\n\n", "file_path": "src/uu/split/src/split.rs", "rank": 65, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n let usage = usage();\n\n let mut settings: GlobalSettings = Default::default();\n\n\n\n let matches = match uu_app().usage(&usage[..]).get_matches_from_safe(args) {\n\n Ok(t) => t,\n\n Err(e) => {\n\n // not all clap \"Errors\" are because of a failure to parse arguments.\n\n // \"--version\" also causes an Error to be returned, but we should not print to stderr\n\n // nor return with a non-zero exit code in this case (we should print to stdout and return 0).\n\n // This logic is similar to the code in clap, but we return 2 as the exit code in case of real failure\n\n // (clap returns 1).\n\n if e.use_stderr() {\n\n eprintln!(\"{}\", e.message);\n\n set_exit_code(2);\n\n } else {\n\n println!(\"{}\", e.message);\n", "file_path": "src/uu/sort/src/sort.rs", "rank": 66, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let dashed_args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any()\n\n .into_iter()\n\n .fold(Vec::new(), append_dashes_if_not_present);\n\n\n\n let matches = uu_app()\n\n //.after_help(TODO: Add note about multiplier strings here.)\n\n .get_matches_from(dashed_args);\n\n\n\n match (\n\n matches.is_present(options::INFILE),\n\n matches.is_present(options::OUTFILE),\n\n ) {\n\n (true, true) => {\n\n let i = Input::<File>::new(&matches)?;\n\n let o = Output::<File>::new(&matches)?;\n\n o.dd_out(i)\n\n }\n", "file_path": "src/uu/dd/src/dd.rs", "rank": 67, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let files: Vec<_> = matches\n\n .values_of_os(options::FILES)\n\n .unwrap_or_default()\n\n .collect();\n\n let old = Path::new(files[0]);\n\n let new = Path::new(files[1]);\n\n\n\n hard_link(old, new)\n\n .map_err_context(|| format!(\"cannot create link {} to {}\", new.quote(), old.quote()))\n\n}\n\n\n", "file_path": "src/uu/link/src/link.rs", "rank": 68, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n\n\n let matches = uu_app().get_matches_from(args);\n\n\n\n unexpand(Options::new(matches)).map_err_context(String::new)\n\n}\n\n\n", "file_path": "src/uu/unexpand/src/unexpand.rs", "rank": 69, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let variables: Vec<String> = matches\n\n .values_of(ARG_VARIABLES)\n\n .map(|v| v.map(ToString::to_string).collect())\n\n .unwrap_or_default();\n\n\n\n let separator = if matches.is_present(OPT_NULL) {\n\n \"\\x00\"\n\n } else {\n\n \"\\n\"\n\n };\n\n\n\n if variables.is_empty() {\n\n for (env_var, value) in env::vars() {\n\n print!(\"{}={}{}\", env_var, value, separator);\n\n }\n", "file_path": "src/uu/printenv/src/printenv.rs", "rank": 70, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::ConvertLossy)\n\n .accept_any();\n\n let matches = uu_app().get_matches_from(args);\n\n\n\n let no_newline = matches.is_present(options::NO_NEWLINE);\n\n let escaped = matches.is_present(options::ENABLE_BACKSLASH_ESCAPE);\n\n let values: Vec<String> = match matches.values_of(options::STRING) {\n\n Some(s) => s.map(|s| s.to_string()).collect(),\n\n None => vec![\"\".to_string()],\n\n };\n\n\n\n execute(no_newline, escaped, values).map_err_context(|| \"could not write to stdout\".to_string())\n\n}\n\n\n", "file_path": "src/uu/echo/src/echo.rs", "rank": 71, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let users: Vec<String> = matches\n\n .values_of(options::USERS)\n\n .map(|v| v.map(ToString::to_string).collect())\n\n .unwrap_or_default();\n\n\n\n if users.is_empty() {\n\n let gids = match get_groups_gnu(None) {\n\n Ok(v) => v,\n\n Err(_) => return Err(GroupsError::GetGroupsFailed.into()),\n\n };\n\n let groups: Vec<String> = gids.iter().map(infallible_gid2grp).collect();\n\n println!(\"{}\", groups.join(\" \"));\n\n return Ok(());\n\n }\n\n\n", "file_path": "src/uu/groups/src/groups.rs", "rank": 72, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::ConvertLossy)\n\n .accept_any();\n\n\n\n let (args, obs_width) = handle_obsolete(&args[..]);\n\n let matches = uu_app().get_matches_from(args);\n\n\n\n let bytes = matches.is_present(options::BYTES);\n\n let spaces = matches.is_present(options::SPACES);\n\n let poss_width = match matches.value_of(options::WIDTH) {\n\n Some(v) => Some(v.to_owned()),\n\n None => obs_width,\n\n };\n\n\n\n let width = match poss_width {\n\n Some(inp_width) => inp_width.parse::<usize>().map_err(|e| {\n\n USimpleError::new(\n\n 1,\n\n format!(\"illegal width value ({}): {}\", inp_width.quote(), e),\n", "file_path": "src/uu/fold/src/fold.rs", "rank": 73, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let mut niceness = unsafe {\n\n nix::errno::Errno::clear();\n\n libc::getpriority(PRIO_PROCESS, 0)\n\n };\n\n if Error::last_os_error().raw_os_error().unwrap() != 0 {\n\n return Err(USimpleError::new(\n\n 125,\n\n format!(\"getpriority: {}\", Error::last_os_error()),\n\n ));\n\n }\n\n\n\n let adjustment = match matches.value_of(options::ADJUSTMENT) {\n\n Some(nstr) => {\n\n if !matches.is_present(options::COMMAND) {\n\n return Err(UUsageError::new(\n", "file_path": "src/uu/nice/src/nice.rs", "rank": 74, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let options = Options {\n\n append: matches.is_present(options::APPEND),\n\n ignore_interrupts: matches.is_present(options::IGNORE_INTERRUPTS),\n\n files: matches\n\n .values_of(options::FILE)\n\n .map(|v| v.map(ToString::to_string).collect())\n\n .unwrap_or_default(),\n\n };\n\n\n\n match tee(options) {\n\n Ok(_) => Ok(()),\n\n Err(_) => Err(1.into()),\n\n }\n\n}\n\n\n", "file_path": "src/uu/tee/src/tee.rs", "rank": 75, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::ConvertLossy)\n\n .accept_any();\n\n let usage = usage();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let to = Path::new(matches.value_of(options::TO).unwrap()).to_path_buf(); // required\n\n let from = match matches.value_of(options::FROM) {\n\n Some(p) => Path::new(p).to_path_buf(),\n\n None => env::current_dir().unwrap(),\n\n };\n\n let absto = canonicalize(to, MissingHandling::Normal, ResolveMode::Logical)\n\n .map_err_context(String::new)?;\n\n let absfrom = canonicalize(from, MissingHandling::Normal, ResolveMode::Logical)\n\n .map_err_context(String::new)?;\n\n\n\n if matches.is_present(options::DIR) {\n\n let base = Path::new(&matches.value_of(options::DIR).unwrap()).to_path_buf();\n", "file_path": "src/uu/relpath/src/relpath.rs", "rank": 76, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let after_help = get_long_usage();\n\n\n\n let matches = uu_app()\n\n .usage(&usage[..])\n\n .after_help(&after_help[..])\n\n .get_matches_from(args);\n\n\n\n let files: Vec<&Path> = matches\n\n .values_of_os(ARG_FILES)\n\n .map(|v| v.map(AsRef::as_ref).collect())\n\n .unwrap_or_default();\n\n\n\n let filename = if !files.is_empty() {\n\n files[0]\n\n } else {\n\n utmpx::DEFAULT_FILE.as_ref()\n\n };\n\n\n", "file_path": "src/uu/users/src/users.rs", "rank": 77, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::ConvertLossy)\n\n .accept_any();\n\n\n\n let matches = uu_app().get_matches_from(args);\n\n\n\n let default_shell: &'static str = \"/bin/sh\";\n\n let default_option: &'static str = \"-i\";\n\n let user_shell = std::env::var(\"SHELL\");\n\n\n\n let newroot: &Path = match matches.value_of(options::NEWROOT) {\n\n Some(v) => Path::new(v),\n\n None => return Err(ChrootError::MissingNewRoot.into()),\n\n };\n\n\n\n if !newroot.is_dir() {\n\n return Err(ChrootError::NoSuchDirectory(format!(\"{}\", newroot.display())).into());\n\n }\n\n\n", "file_path": "src/uu/chroot/src/chroot.rs", "rank": 78, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::ConvertLossy)\n\n .accept_any();\n\n\n\n let usage = usage();\n\n let after_help = get_long_usage();\n\n\n\n let matches = uu_app()\n\n .usage(&usage[..])\n\n .after_help(&after_help[..])\n\n .get_matches_from(args);\n\n\n\n let separator = if matches.is_present(options::ZERO) {\n\n \"\\0\"\n\n } else {\n\n \"\\n\"\n\n };\n\n\n\n let dirnames: Vec<String> = matches\n", "file_path": "src/uu/dirname/src/dirname.rs", "rank": 79, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n if let Some(values) = matches.values_of(options::NUMBER) {\n\n let numbers = values.collect();\n\n return sleep(numbers);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/uu/sleep/src/sleep.rs", "rank": 80, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let matches = uu_app().get_matches_from(args);\n\n\n\n let string = if let Some(values) = matches.values_of(\"STRING\") {\n\n let mut result = values.fold(String::new(), |res, s| res + s + \" \");\n\n result.pop();\n\n result.push('\\n');\n\n Cow::from(result)\n\n } else {\n\n Cow::from(\"y\\n\")\n\n };\n\n\n\n let mut buffer = [0; BUF_SIZE];\n\n let bytes = prepare_buffer(&string, &mut buffer);\n\n\n\n match exec(bytes) {\n\n Ok(()) => Ok(()),\n\n Err(err) if err.kind() == io::ErrorKind::BrokenPipe => Ok(()),\n\n Err(err) => Err(USimpleError::new(1, format!(\"standard output: {}\", err))),\n\n }\n\n}\n\n\n", "file_path": "src/uu/yes/src/yes.rs", "rank": 81, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = format!(\"{} [OPTION]...\", uucore::execution_phrase());\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let uname =\n\n PlatformInfo::new().map_err_context(|| \"failed to create PlatformInfo\".to_string())?;\n\n let mut output = String::new();\n\n\n\n let all = matches.is_present(options::ALL);\n\n let kernelname = matches.is_present(options::KERNELNAME);\n\n let nodename = matches.is_present(options::NODENAME);\n\n let kernelrelease = matches.is_present(options::KERNELRELEASE);\n\n let kernelversion = matches.is_present(options::KERNELVERSION);\n\n let machine = matches.is_present(options::MACHINE);\n\n let processor = matches.is_present(options::PROCESSOR);\n\n let hwplatform = matches.is_present(options::HWPLATFORM);\n\n let os = matches.is_present(options::OS);\n\n\n\n let none = !(all\n\n || kernelname\n", "file_path": "src/uu/uname/src/uname.rs", "rank": 82, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n\n\n let usage = usage();\n\n let after_help = get_long_usage();\n\n\n\n let matches = uu_app()\n\n .usage(&usage[..])\n\n .after_help(&after_help[..])\n\n .get_matches_from(args);\n\n\n\n let users: Vec<String> = matches\n\n .values_of(options::USER)\n\n .map(|v| v.map(ToString::to_string).collect())\n\n .unwrap_or_default();\n\n\n\n // If true, display the hours:minutes since each user has touched\n\n // the keyboard, or blank if within the last minute, or days followed\n", "file_path": "src/uu/pinky/src/pinky.rs", "rank": 83, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let matches = uu_app()\n\n .after_help(&*format!(\n\n \"{}\\n{}\",\n\n LONG_HELP,\n\n backup_control::BACKUP_CONTROL_LONG_HELP\n\n ))\n\n .usage(&usage[..])\n\n .get_matches_from(args);\n\n\n\n let options = Options::from_matches(&matches)?;\n\n\n\n if options.overwrite == OverwriteMode::NoClobber && options.backup != BackupMode::NoBackup {\n\n show_usage_error!(\"options --backup and --no-clobber are mutually exclusive\");\n\n return Err(ExitCode(EXIT_ERR).into());\n\n }\n\n\n\n let paths: Vec<String> = matches\n\n .values_of(options::PATHS)\n", "file_path": "src/uu/cp/src/cp.rs", "rank": 84, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::ConvertLossy)\n\n .accept_any();\n\n\n\n let usage = usage();\n\n\n\n let app = uu_app().usage(&usage[..]);\n\n\n\n let matches = app.get_matches_from(args);\n\n\n\n let config = Config::from(matches);\n\n timeout(\n\n &config.command,\n\n config.duration,\n\n config.signal,\n\n config.kill_after,\n\n config.foreground,\n\n config.preserve_status,\n\n config.verbose,\n\n )\n\n}\n\n\n", "file_path": "src/uu/timeout/src/timeout.rs", "rank": 85, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n\n\n let matches = uu_app().get_matches_from(args);\n\n\n\n let files: Vec<String> = match matches.values_of(options::FILE) {\n\n Some(v) => v.clone().map(|v| v.to_owned()).collect(),\n\n None => vec![],\n\n };\n\n\n\n if files.is_empty() {\n\n let (crc, size) = cksum(\"-\")?;\n\n println!(\"{} {}\", crc, size);\n\n return Ok(());\n\n }\n\n\n\n for fname in &files {\n\n match cksum(fname.as_ref()).map_err_context(|| format!(\"{}\", fname.maybe_quote())) {\n\n Ok((crc, size)) => println!(\"{} {} {}\", crc, size, fname),\n\n Err(err) => show!(err),\n\n };\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/uu/cksum/src/cksum.rs", "rank": 86, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let (boot_time, user_count) = process_utmpx();\n\n let uptime = get_uptime(boot_time);\n\n if uptime < 0 {\n\n Err(USimpleError::new(1, \"could not retrieve system uptime\"))\n\n } else {\n\n if matches.is_present(options::SINCE) {\n\n let initial_date = Local.timestamp(Utc::now().timestamp() - uptime, 0);\n\n println!(\"{}\", initial_date.format(\"%Y-%m-%d %H:%M:%S\"));\n\n return Ok(());\n\n }\n\n\n\n print_time();\n\n let upsecs = uptime;\n\n print_uptime(upsecs);\n\n print_nusers(user_count);\n\n print_loadavg();\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/uu/uptime/src/uptime.rs", "rank": 87, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n let cwd = if matches.is_present(OPT_LOGICAL) {\n\n logical_path()\n\n } else {\n\n physical_path()\n\n }\n\n .map_err_context(|| \"failed to get current directory\".to_owned())?;\n\n\n\n // \\\\?\\ is a prefix Windows gives to paths under certain circumstances,\n\n // including when canonicalizing them.\n\n // With the right extension trait we can remove it non-lossily, but\n\n // we print it lossily anyway, so no reason to bother.\n\n #[cfg(windows)]\n\n let cwd = cwd\n\n .to_string_lossy()\n\n .strip_prefix(r\"\\\\?\\\")\n\n .map(Into::into)\n\n .unwrap_or(cwd);\n\n\n\n println_verbatim(&cwd).map_err_context(|| \"failed to print current directory\".to_owned())?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/uu/pwd/src/pwd.rs", "rank": 88, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let matches = uu_app().usage(&usage[..]).get_matches_from(args);\n\n\n\n let numbers = matches.values_of(ARG_NUMBERS).unwrap().collect::<Vec<_>>();\n\n\n\n let options = SeqOptions {\n\n separator: matches.value_of(OPT_SEPARATOR).unwrap_or(\"\\n\").to_string(),\n\n terminator: matches.value_of(OPT_TERMINATOR).unwrap_or(\"\\n\").to_string(),\n\n widths: matches.is_present(OPT_WIDTHS),\n\n };\n\n\n\n let first = if numbers.len() > 1 {\n\n match numbers[0].parse() {\n\n Ok(num) => num,\n\n Err(e) => return Err(SeqError::ParseError(numbers[0].to_string(), e).into()),\n\n }\n\n } else {\n\n PreciseNumber::one()\n\n };\n", "file_path": "src/uu/seq/src/seq.rs", "rank": 89, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let matches = uu_app().get_matches_from(args);\n\n\n\n let serial = matches.is_present(options::SERIAL);\n\n let delimiters = matches.value_of(options::DELIMITER).unwrap().to_owned();\n\n let files = matches\n\n .values_of(options::FILE)\n\n .unwrap()\n\n .map(|s| s.to_owned())\n\n .collect();\n\n paste(files, serial, delimiters)\n\n}\n\n\n", "file_path": "src/uu/paste/src/paste.rs", "rank": 90, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::ConvertLossy)\n\n .accept_any();\n\n\n\n let matches = uu_app().get_matches_from(args);\n\n\n\n let before = matches.is_present(options::BEFORE);\n\n let regex = matches.is_present(options::REGEX);\n\n let raw_separator = matches.value_of(options::SEPARATOR).unwrap_or(\"\\n\");\n\n let separator = if raw_separator.is_empty() {\n\n \"\\0\"\n\n } else {\n\n raw_separator\n\n };\n\n\n\n let files: Vec<&str> = match matches.values_of(options::FILE) {\n\n Some(v) => v.collect(),\n\n None => vec![\"-\"],\n\n };\n\n\n\n tac(files, before, regex, separator)\n\n}\n\n\n", "file_path": "src/uu/tac/src/tac.rs", "rank": 91, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n\n\n let app = uu_app().usage(&usage[..]);\n\n\n\n let matches = app.get_matches_from(args);\n\n\n\n let config = Config::from(&matches)?;\n\n\n\n let locs = matches\n\n .values_of_os(options::PATHS)\n\n .map(|v| v.map(Path::new).collect())\n\n .unwrap_or_else(|| vec![Path::new(\".\")]);\n\n\n\n list(locs, config)\n\n}\n\n\n", "file_path": "src/uu/ls/src/ls.rs", "rank": 92, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let long_usage = get_long_usage();\n\n\n\n let matches = uu_app()\n\n .usage(&usage[..])\n\n .after_help(&long_usage[..])\n\n .get_matches_from(args);\n\n\n\n let files: Vec<String> = matches\n\n .values_of(ARG_FILES)\n\n .map(|v| v.map(ToString::to_string).collect())\n\n .unwrap_or_default();\n\n\n\n let force = matches.is_present(OPT_FORCE);\n\n\n\n if files.is_empty() && !force {\n\n // Still check by hand and not use clap\n\n // Because \"rm -f\" is a thing\n\n return Err(UUsageError::new(1, \"missing operand\"));\n", "file_path": "src/uu/rm/src/rm.rs", "rank": 93, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let matches = uu_app().get_matches_from(args);\n\n let stdout = stdout();\n\n // We use a smaller buffer here to pass a gnu test. 4KiB appears to be the default pipe size for bash.\n\n let mut w = io::BufWriter::with_capacity(4 * 1024, stdout.lock());\n\n let mut factors_buffer = String::new();\n\n\n\n if let Some(values) = matches.values_of(options::NUMBER) {\n\n for number in values {\n\n if let Err(e) = print_factors_str(number, &mut w, &mut factors_buffer) {\n\n show_warning!(\"{}: {}\", number.maybe_quote(), e);\n\n }\n\n }\n\n } else {\n\n let stdin = stdin();\n\n\n\n for line in stdin.lock().lines() {\n\n for number in line.unwrap().split_whitespace() {\n\n if let Err(e) = print_factors_str(number, &mut w, &mut factors_buffer) {\n\n show_warning!(\"{}: {}\", number.maybe_quote(), e);\n", "file_path": "src/uu/factor/src/cli.rs", "rank": 94, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let mut args = args\n\n .collect_str(InvalidEncodingHandling::ConvertLossy)\n\n .accept_any();\n\n\n\n // Before we can parse 'args' with clap (and previously getopts),\n\n // a possible MODE prefix '-' needs to be removed (e.g. \"chmod -x FILE\").\n\n let mode_had_minus_prefix = mode::strip_minus_from_mode(&mut args);\n\n\n\n let usage = usage();\n\n let after_help = get_long_usage();\n\n\n\n let matches = uu_app()\n\n .usage(&usage[..])\n\n .after_help(&after_help[..])\n\n .get_matches_from(args);\n\n\n\n let changes = matches.is_present(options::CHANGES);\n\n let quiet = matches.is_present(options::QUIET);\n\n let verbose = matches.is_present(options::VERBOSE);\n", "file_path": "src/uu/chmod/src/chmod.rs", "rank": 95, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = usage();\n\n let long_usage = long_usage();\n\n\n\n let matches = uu_app()\n\n .usage(&usage[..])\n\n .after_help(&*format!(\n\n \"{}\\n{}\",\n\n long_usage,\n\n backup_control::BACKUP_CONTROL_LONG_HELP\n\n ))\n\n .get_matches_from(args);\n\n\n\n /* the list of files */\n\n\n\n let paths: Vec<PathBuf> = matches\n\n .values_of(ARG_FILES)\n\n .unwrap()\n\n .map(PathBuf::from)\n\n .collect();\n", "file_path": "src/uu/ln/src/ln.rs", "rank": 96, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let usage = get_usage();\n\n\n\n let config = uu_app().usage(usage.as_ref());\n\n\n\n let options = match parse_command_line(config, args) {\n\n Ok(r) => r,\n\n Err(r) => {\n\n if let Error::CommandLine(r) = &r {\n\n match r.kind {\n\n clap::ErrorKind::HelpDisplayed | clap::ErrorKind::VersionDisplayed => {\n\n println!(\"{}\", r);\n\n return Ok(());\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n return Err(UUsageError::new(libc::EXIT_FAILURE, format!(\"{}.\\n\", r)));\n\n }\n", "file_path": "src/uu/chcon/src/chcon.rs", "rank": 97, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n\n\n let matches = uu_app().get_matches_from(args);\n\n\n\n let number_mode = if matches.is_present(options::NUMBER_NONBLANK) {\n\n NumberingMode::NonEmpty\n\n } else if matches.is_present(options::NUMBER) {\n\n NumberingMode::All\n\n } else {\n\n NumberingMode::None\n\n };\n\n\n\n let show_nonprint = vec![\n\n options::SHOW_ALL.to_owned(),\n\n options::SHOW_NONPRINTING_ENDS.to_owned(),\n\n options::SHOW_NONPRINTING_TABS.to_owned(),\n\n options::SHOW_NONPRINTING.to_owned(),\n", "file_path": "src/uu/cat/src/cat.rs", "rank": 98, "score": 221874.97365858784 }, { "content": "#[uucore_procs::gen_uumain]\n\npub fn uumain(args: impl uucore::Args) -> UResult<()> {\n\n let args = args\n\n .collect_str(InvalidEncodingHandling::Ignore)\n\n .accept_any();\n\n\n\n // let mut opts = Options::new();\n\n let matches = uu_app().get_matches_from(args);\n\n\n\n let input_files: Vec<String> = match &matches.values_of(options::FILE) {\n\n Some(v) => v.clone().map(|v| v.to_owned()).collect(),\n\n None => vec![\"-\".to_string()],\n\n };\n\n\n\n let config = get_config(&matches)?;\n\n let word_filter = WordFilter::new(&matches, &config)?;\n\n let file_map = read_input(&input_files, &config).map_err_context(String::new)?;\n\n let word_set = create_word_set(&config, &word_filter, &file_map);\n\n let output_file = if !config.gnu_ext && matches.args.len() == 2 {\n\n matches.value_of(options::FILE).unwrap_or(\"-\").to_string()\n\n } else {\n\n \"-\".to_owned()\n\n };\n\n write_traditional_output(&config, &file_map, &word_set, &output_file)\n\n}\n\n\n", "file_path": "src/uu/ptx/src/ptx.rs", "rank": 99, "score": 221874.97365858784 } ]
Rust
gears_core/src/gear/mod.rs
XBagon/gears
5c42708dece4f0a288b2415a8efa23d3a35a5964
use crate::gear::special::GearSpecial; use crate::gear::{ command::{GearCommand, GearGenericCommand}, compound::GearCompound, internal::GearInternal, special::{io::Input, io::Output, literal::Literal}, }; use crate::ty::*; use crate::util::LiftSlotMap; use enum_dispatch::enum_dispatch; use slotmap::{new_key_type, SlotMap}; use thiserror::Error; pub mod command; pub mod compound; pub mod internal; pub mod special; new_key_type! { pub struct GearId; } impl Geared for TemplateGearId { fn evaluate(&self, register: &GearRegister, input: Vec<TypedValue>) -> Result<Vec<TypedValue>> { register.template_gears[*self].evaluate(register, input) } } new_key_type! { pub struct TemplateGearId; } type GearMap = LiftSlotMap<GearId, Gear>; type TemplateGearMap = SlotMap<TemplateGearId, Gear>; pub struct GearRegister { pub gears: GearMap, pub template_gears: TemplateGearMap, pub internal: internal::Gears, pub special: special::Gears, pub command: command::Gears, } impl GearRegister { pub fn init() -> Self { let mut template_gears = SlotMap::with_key(); Self { internal: internal::Gears::init(&mut template_gears), special: special::Gears::init(&mut template_gears), command: command::Gears::init(&mut template_gears), gears: LiftSlotMap::with_key(), template_gears, } } pub fn register(&mut self, gear: Gear) -> GearId { self.gears.insert(gear) } pub fn register_template(&mut self, gear: Gear) -> TemplateGearId { self.template_gears.insert(gear) } pub fn duplicate(&mut self, gear_id: GearId) -> GearId { let clone = self.gears[gear_id].clone(); self.gears.insert(clone) } pub fn instantiate(&mut self, template_gear_id: TemplateGearId) -> GearId { self.register(self.gear_from_template(template_gear_id)) } pub fn instantiator(&mut self, template_gear_id: TemplateGearId) -> GearBuilder { GearBuilder { gear: self.gear_from_template(template_gear_id), register: self, } } pub fn builder(&mut self, implementation: GearImplementation) -> GearBuilder { let gear = Gear { name: String::new(), inputs: vec![], outputs: vec![], implementation, }; GearBuilder { gear, register: self, } } fn gear_from_template(&self, template_gear_id: TemplateGearId) -> Gear { let template = &self.template_gears[template_gear_id]; Gear { name: template.name.clone(), inputs: template.inputs.clone(), outputs: template.outputs.clone(), implementation: GearImplementation::Template(template_gear_id), } } pub fn get_mut_implementation(&mut self, gear_id: GearId) -> Option<&mut GearImplementation> { let mut gear = &mut self.gears[gear_id]; if let GearImplementation::Template(template_gear_id) = gear.implementation { let template_gear = &self.template_gears[template_gear_id]; gear.implementation = template_gear.implementation.clone(); } Some(&mut gear.implementation) } pub fn get_template_gear_id(&self, gear_id: GearId) -> Option<TemplateGearId> { if let GearImplementation::Template(template_gear_id) = self.gears[gear_id].implementation { Some(template_gear_id) } else { None } } pub fn evaluate(&self, gear_id: GearId, input: Vec<TypedValue>) -> Result<Vec<TypedValue>> { self.gears[gear_id].evaluate(self, input) } } #[must_use] pub struct GearBuilder<'a> { register: &'a mut GearRegister, pub gear: Gear, } impl<'a> GearBuilder<'a> { pub fn instantiate(self) -> GearId { self.register.register(self.gear) } pub fn templatize(self) -> TemplateGearId { self.register.register_template(self.gear) } pub fn name(mut self, name: String) -> Self { self.gear.name = name; self } pub fn input(mut self, io_info: IOInformation) -> Self { self.gear.inputs.push(io_info); self } pub fn output(mut self, io_info: IOInformation) -> Self { self.gear.outputs.push(io_info); self } } impl Default for GearRegister { fn default() -> Self { Self::init() } } #[derive(Clone)] pub struct Gear { pub name: String, pub inputs: Vec<IOInformation>, pub outputs: Vec<IOInformation>, pub implementation: GearImplementation, } impl Geared for Gear { fn evaluate(&self, register: &GearRegister, input: Vec<TypedValue>) -> Result<Vec<TypedValue>> { self.implementation.evaluate(register, input) } } #[derive(Clone)] pub struct IOInformation { pub name: String, pub ty: TypeDiscriminant, } impl IOInformation { pub fn new(name: String, ty: TypeDiscriminant) -> Self { IOInformation { name, ty } } } #[enum_dispatch] #[derive(Clone)] pub enum GearImplementation { GearInternal, GearCompound, GearSpecial, GearCommand, GearGenericCommand, Template(TemplateGearId), } #[derive(Error, Debug)] pub enum Error { #[error("Error occurred in evaluation")] GearInternalError(#[from] Box<dyn std::error::Error>), #[error("IOError occured")] IOError(#[from] std::io::Error), #[error("IOError occured")] FromUTF8Error(#[from] std::string::FromUtf8Error), #[error("This `GearSpecial` isn't evaluable")] NonEvaluable, #[error("Terminated by signal: {0}")] TerminatedBySignal(i32), } pub type Result<T> = std::result::Result<T, Error>; #[enum_dispatch(GearImplementation, GearSpecial)] pub trait Geared { fn evaluate(&self, register: &GearRegister, input: Vec<TypedValue>) -> Result<Vec<TypedValue>>; }
use crate::gear::special::GearSpecial; use crate::gear::{ command::{GearCommand, GearGenericCommand}, compound::GearCompound, internal::GearInternal, special::{io::Input, io::Output, literal::Literal}, }; use crate::ty::*; use crate::util::LiftSlotMap; use enum_dispatch::enum_dispatch; use slotmap::{new_key_type, SlotMap}; use thiserror::Error; pub mod command; pub mod compound; pub mod internal; pub mod special; new_key_type! { pub struct GearId; } impl Geared for TemplateGearId { fn evaluate(&self, register: &GearRegister, input: Vec<TypedValue>) -> Result<Vec<TypedValue>> { register.template_gears[*self].evaluate(register, input) } } new_key_type! { pub struct TemplateGearId; } type GearMap = LiftSlotMap<GearId, Gear>; type TemplateGearMap = SlotMap<TemplateGearId, Gear>; pub struct GearRegister { pub gears: GearMap, pub template_gears: TemplateGearMap, pub internal: internal::Gears, pub special: special::Gears, pub command: command::Gears, } impl GearRegister {
pub fn register(&mut self, gear: Gear) -> GearId { self.gears.insert(gear) } pub fn register_template(&mut self, gear: Gear) -> TemplateGearId { self.template_gears.insert(gear) } pub fn duplicate(&mut self, gear_id: GearId) -> GearId { let clone = self.gears[gear_id].clone(); self.gears.insert(clone) } pub fn instantiate(&mut self, template_gear_id: TemplateGearId) -> GearId { self.register(self.gear_from_template(template_gear_id)) } pub fn instantiator(&mut self, template_gear_id: TemplateGearId) -> GearBuilder { GearBuilder { gear: self.gear_from_template(template_gear_id), register: self, } } pub fn builder(&mut self, implementation: GearImplementation) -> GearBuilder { let gear = Gear { name: String::new(), inputs: vec![], outputs: vec![], implementation, }; GearBuilder { gear, register: self, } } fn gear_from_template(&self, template_gear_id: TemplateGearId) -> Gear { let template = &self.template_gears[template_gear_id]; Gear { name: template.name.clone(), inputs: template.inputs.clone(), outputs: template.outputs.clone(), implementation: GearImplementation::Template(template_gear_id), } } pub fn get_mut_implementation(&mut self, gear_id: GearId) -> Option<&mut GearImplementation> { let mut gear = &mut self.gears[gear_id]; if let GearImplementation::Template(template_gear_id) = gear.implementation { let template_gear = &self.template_gears[template_gear_id]; gear.implementation = template_gear.implementation.clone(); } Some(&mut gear.implementation) } pub fn get_template_gear_id(&self, gear_id: GearId) -> Option<TemplateGearId> { if let GearImplementation::Template(template_gear_id) = self.gears[gear_id].implementation { Some(template_gear_id) } else { None } } pub fn evaluate(&self, gear_id: GearId, input: Vec<TypedValue>) -> Result<Vec<TypedValue>> { self.gears[gear_id].evaluate(self, input) } } #[must_use] pub struct GearBuilder<'a> { register: &'a mut GearRegister, pub gear: Gear, } impl<'a> GearBuilder<'a> { pub fn instantiate(self) -> GearId { self.register.register(self.gear) } pub fn templatize(self) -> TemplateGearId { self.register.register_template(self.gear) } pub fn name(mut self, name: String) -> Self { self.gear.name = name; self } pub fn input(mut self, io_info: IOInformation) -> Self { self.gear.inputs.push(io_info); self } pub fn output(mut self, io_info: IOInformation) -> Self { self.gear.outputs.push(io_info); self } } impl Default for GearRegister { fn default() -> Self { Self::init() } } #[derive(Clone)] pub struct Gear { pub name: String, pub inputs: Vec<IOInformation>, pub outputs: Vec<IOInformation>, pub implementation: GearImplementation, } impl Geared for Gear { fn evaluate(&self, register: &GearRegister, input: Vec<TypedValue>) -> Result<Vec<TypedValue>> { self.implementation.evaluate(register, input) } } #[derive(Clone)] pub struct IOInformation { pub name: String, pub ty: TypeDiscriminant, } impl IOInformation { pub fn new(name: String, ty: TypeDiscriminant) -> Self { IOInformation { name, ty } } } #[enum_dispatch] #[derive(Clone)] pub enum GearImplementation { GearInternal, GearCompound, GearSpecial, GearCommand, GearGenericCommand, Template(TemplateGearId), } #[derive(Error, Debug)] pub enum Error { #[error("Error occurred in evaluation")] GearInternalError(#[from] Box<dyn std::error::Error>), #[error("IOError occured")] IOError(#[from] std::io::Error), #[error("IOError occured")] FromUTF8Error(#[from] std::string::FromUtf8Error), #[error("This `GearSpecial` isn't evaluable")] NonEvaluable, #[error("Terminated by signal: {0}")] TerminatedBySignal(i32), } pub type Result<T> = std::result::Result<T, Error>; #[enum_dispatch(GearImplementation, GearSpecial)] pub trait Geared { fn evaluate(&self, register: &GearRegister, input: Vec<TypedValue>) -> Result<Vec<TypedValue>>; }
pub fn init() -> Self { let mut template_gears = SlotMap::with_key(); Self { internal: internal::Gears::init(&mut template_gears), special: special::Gears::init(&mut template_gears), command: command::Gears::init(&mut template_gears), gears: LiftSlotMap::with_key(), template_gears, } }
function_block-function_prefix_line
[ { "content": "use super::*;\n\n\n\npub mod io;\n\npub mod literal;\n\n\n\npub struct Gears {\n\n pub io: io::Gears,\n\n}\n\n\n\nimpl Gears {\n\n pub fn init(template_gears: &mut TemplateGearMap) -> Self {\n\n Self {\n\n io: io::Gears::init(template_gears),\n\n }\n\n }\n\n}\n\n\n\n#[enum_dispatch]\n\n#[derive(Clone)]\n\npub enum GearSpecial {\n\n Input,\n\n Output,\n\n Literal,\n\n}\n", "file_path": "gears_core/src/gear/special/mod.rs", "rank": 3, "score": 40367.83580593576 }, { "content": "}\n\n\n\nimpl GearInternal {\n\n pub fn new(\n\n function: fn(\n\n Vec<TypedValue>,\n\n ) -> std::result::Result<Vec<TypedValue>, Box<dyn std::error::Error>>,\n\n ) -> Self {\n\n Self { function }\n\n }\n\n}\n\n\n\nimpl Geared for GearInternal {\n\n fn evaluate(\n\n &self,\n\n _register: &GearRegister,\n\n input: Vec<TypedValue>,\n\n ) -> Result<Vec<TypedValue>> {\n\n Ok((self.function)(input)?)\n\n }\n", "file_path": "gears_core/src/gear/internal/mod.rs", "rank": 4, "score": 40257.12262550262 }, { "content": "use super::*;\n\n\n\nmod math;\n\n\n\npub struct Gears {\n\n pub math_gears: math::Gears,\n\n}\n\n\n\nimpl Gears {\n\n pub fn init(template_gears: &mut TemplateGearMap) -> Self {\n\n Self {\n\n math_gears: math::Gears::init(template_gears),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct GearInternal {\n\n pub function:\n\n fn(Vec<TypedValue>) -> std::result::Result<Vec<TypedValue>, Box<dyn std::error::Error>>,\n", "file_path": "gears_core/src/gear/internal/mod.rs", "rank": 5, "score": 40255.51067841048 }, { "content": "\n\n Ok(vec![$(TypedValue::$outty($outname)),*])\n\n }\n\n\n\n fn template() -> Gear {\n\n Gear {\n\n name: String::from(stringify!($name)),\n\n inputs: vec![$(IOInformation::new(String::from(stringify!($inname)), TypedValue::$inty(Default::default()).ty())),*],\n\n outputs: vec![$(IOInformation::new(String::from(stringify!($outname)), TypedValue::$outty(Default::default()).ty())),*],\n\n implementation: GearImplementation::GearInternal(GearInternal::new(Self::function)),\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\npub(crate) use template;\n", "file_path": "gears_core/src/gear/internal/mod.rs", "rank": 6, "score": 40253.35087712365 }, { "content": "}\n\n\n\nmacro_rules! template {\n\n ($name:ident ($($inname:ident: $inty:ident),*) -> ($($outname:ident: $outty:ident),*) {$code:block}) => {\n\n struct $name;\n\n impl $name {\n\n fn function(input: Vec<TypedValue>) -> std::result::Result<Vec<TypedValue>, Box<dyn std::error::Error>> {\n\n let mut input = input.iter();\n\n $(let $inname = if let &TypedValue::$inty($inname) = input.next().unwrap() {\n\n $inname\n\n } else { unreachable!() };)*\n\n drop(input);\n\n\n\n\n\n $(\n\n #[allow(unused_mut)]\n\n let mut $outname;\n\n )*\n\n\n\n $code\n", "file_path": "gears_core/src/gear/internal/mod.rs", "rank": 7, "score": 40249.65794988216 }, { "content": "#[cfg(not(target_family = \"unix\"))]\n\nfn extract_exit_code(status: ExitStatus) -> Result<i32> {\n\n Ok(status.code().unwrap())\n\n}\n", "file_path": "gears_core/src/gear/command.rs", "rank": 8, "score": 38442.67476258777 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn lift_test() {\n\n use crate::gear::GearId;\n\n\n\n let mut map = LiftSlotMap::<GearId, _>::with_key();\n\n let a = map.insert(String::from(\"abc\"));\n\n let b = map.insert(String::from(\"def\"));\n\n let c = map.insert(String::from(\"ghi\"));\n\n map.do_lifted(a, |map, a| {\n\n for (_key, val) in map.iter_mut() {\n\n val.push(a.pop().unwrap())\n\n }\n\n });\n\n assert_eq!(map[a].len(), 1);\n\n assert_eq!(map[b].len(), 4);\n\n assert_eq!(map[c].len(), 4);\n\n}\n", "file_path": "gears_core/src/util.rs", "rank": 9, "score": 31722.72258478368 }, { "content": "use super::*;\n\nuse slotmap::{Key, SecondaryMap};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Clone)]\n\npub struct GearCompound {\n\n pub connections: HashMap<GearId, Vec<(GearId, usize)>>,\n\n pub input_id: GearId,\n\n pub output_id: GearId,\n\n}\n\n\n\nimpl GearCompound {\n\n pub fn new(register: &mut GearRegister, num_inputs: usize, num_outputs: usize) -> Self {\n\n let mut input = register.instantiator(register.special.io.input);\n\n input.gear.outputs =\n\n vec![IOInformation::new(String::from(\"in\"), TypedValue::None.ty()); num_inputs];\n\n let input_id = input.instantiate();\n\n\n\n let mut output = register.instantiator(register.special.io.output);\n\n output.gear.inputs =\n", "file_path": "gears_core/src/gear/compound.rs", "rank": 10, "score": 24282.505805447938 }, { "content": " }\n\n vec[in_index] = (out_gear_id, out_index);\n\n }\n\n\n\n pub fn evaluate_instance(\n\n &self,\n\n register: &GearRegister,\n\n gear_id: GearId,\n\n input: Vec<TypedValue>,\n\n ) -> Result<Vec<TypedValue>> {\n\n register.gears[gear_id].evaluate(register, input)\n\n }\n\n}\n\n\n\nimpl Geared for GearCompound {\n\n fn evaluate(&self, register: &GearRegister, input: Vec<TypedValue>) -> Result<Vec<TypedValue>> {\n\n //Post-Order DFS with cache for evaluations\n\n let mut stack = Vec::new();\n\n let mut cache: SecondaryMap<GearId, Vec<TypedValue>> = SecondaryMap::new(); //capacity known to serdeble wrapper around GearCompound\n\n let mut visited: SecondaryMap<GearId, ()> = SecondaryMap::new(); //capacity known to serdeble wrapper around GearCompound\n", "file_path": "gears_core/src/gear/compound.rs", "rank": 11, "score": 24278.112449601664 }, { "content": "\n\n stack.push(self.output_id);\n\n cache.insert(self.input_id, input);\n\n\n\n while let Some(&current_gear_id) = stack.last() {\n\n if !visited.contains_key(current_gear_id) {\n\n //FIXME: somehow fill before? or at least use entry API\n\n visited.insert(current_gear_id, ());\n\n\n\n if let Some(connections) = self.connections.get(&current_gear_id) {\n\n for &(connected_id, _out_index) in connections {\n\n if !visited.contains_key(connected_id) {\n\n stack.push(connected_id);\n\n }\n\n }\n\n }\n\n } else {\n\n stack.pop();\n\n if !cache.contains_key(current_gear_id) {\n\n let gear = &register.gears[current_gear_id];\n", "file_path": "gears_core/src/gear/compound.rs", "rank": 12, "score": 24272.54496784556 }, { "content": "\n\n dbg!(&self.connections);\n\n let inputs = if let Some(connections) = &self.connections.get(&current_gear_id)\n\n {\n\n connections\n\n .iter()\n\n .map(|&(gear_id, out_index)| cache[gear_id][out_index].clone())\n\n .collect()\n\n } else {\n\n Vec::new()\n\n };\n\n dbg!(&inputs);\n\n cache.insert(current_gear_id, gear.evaluate(register, inputs)?);\n\n //? -> early return/abort\n\n }\n\n }\n\n }\n\n\n\n Ok(cache.remove(self.output_id).unwrap())\n\n }\n\n}\n", "file_path": "gears_core/src/gear/compound.rs", "rank": 13, "score": 24272.228212888793 }, { "content": " vec![IOInformation::new(String::from(\"out\"), TypedValue::None.ty()); num_outputs];\n\n let output_id = output.instantiate();\n\n\n\n Self {\n\n connections: HashMap::new(),\n\n input_id,\n\n output_id,\n\n }\n\n }\n\n\n\n pub fn connect(\n\n &mut self,\n\n out_gear_id: GearId,\n\n out_index: usize,\n\n in_gear_id: GearId,\n\n in_index: usize,\n\n ) {\n\n let vec = self.connections.entry(in_gear_id).or_default();\n\n if vec.len() <= in_index {\n\n vec.resize(in_index + 1, (GearId::null(), 0)); //TODO: set up right size when adding gear\n", "file_path": "gears_core/src/gear/compound.rs", "rank": 14, "score": 24271.961845002123 }, { "content": " name: String::from(\"GenericCommand\"),\n\n inputs: Vec::new(),\n\n outputs: Vec::new(),\n\n implementation: GearImplementation::GearGenericCommand(GearGenericCommand),\n\n }\n\n }\n\n}\n\n\n\nimpl Geared for GearGenericCommand {\n\n fn evaluate(\n\n &self,\n\n _register: &GearRegister,\n\n input: Vec<TypedValue>,\n\n ) -> Result<Vec<TypedValue>> {\n\n let mut input = input.into_iter().map(|input| {\n\n if let TypedValue::String(s) = input {\n\n s\n\n } else {\n\n unreachable!()\n\n }\n", "file_path": "gears_core/src/gear/command.rs", "rank": 15, "score": 24162.213909868693 }, { "content": "use super::*;\n\nuse std::process::{Command, ExitStatus};\n\n\n\npub struct Gears {\n\n pub generic_command: TemplateGearId,\n\n}\n\n\n\nimpl Gears {\n\n pub fn init(template_gears: &mut TemplateGearMap) -> Self {\n\n Self {\n\n generic_command: template_gears.insert(GearGenericCommand::template()),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct GearGenericCommand;\n\nimpl GearGenericCommand {\n\n pub fn template() -> Gear {\n\n Gear {\n", "file_path": "gears_core/src/gear/command.rs", "rank": 16, "score": 24161.090184174867 }, { "content": "\n\nimpl Geared for GearCommand {\n\n fn evaluate(\n\n &self,\n\n _register: &GearRegister,\n\n input: Vec<TypedValue>,\n\n ) -> Result<Vec<TypedValue>> {\n\n let output = Command::new(&self.program)\n\n .args(input.into_iter().map(|input| {\n\n if let TypedValue::String(s) = input {\n\n s\n\n } else {\n\n unreachable!()\n\n }\n\n }))\n\n .output()?;\n\n Ok(vec![\n\n TypedValue::I32(extract_exit_code(output.status)?),\n\n TypedValue::String(String::from_utf8(output.stdout)?),\n\n TypedValue::String(String::from_utf8(output.stderr)?),\n\n ])\n\n }\n\n}\n\n\n\n#[cfg(target_family = \"unix\")]\n", "file_path": "gears_core/src/gear/command.rs", "rank": 17, "score": 24160.67760349745 }, { "content": " });\n\n let output = Command::new(input.next().unwrap()).args(input).output()?;\n\n Ok(vec![\n\n TypedValue::I32(extract_exit_code(output.status)?),\n\n TypedValue::String(String::from_utf8(output.stdout)?),\n\n TypedValue::String(String::from_utf8(output.stderr)?),\n\n ])\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct GearCommand {\n\n program: String,\n\n}\n\n\n\nimpl GearCommand {\n\n pub fn new(program: String) -> Self {\n\n Self { program }\n\n }\n\n}\n", "file_path": "gears_core/src/gear/command.rs", "rank": 18, "score": 24160.32932837188 }, { "content": "use super::*;\n\n\n\n#[derive(Clone)]\n\npub struct Literal(pub TypedValue);\n\nimpl Literal {\n\n pub fn instantiate(register: &mut GearRegister, value: TypedValue) -> GearId {\n\n register.register(Gear {\n\n name: String::from(\"Literal\"),\n\n inputs: Vec::new(),\n\n outputs: vec![IOInformation::new(String::from(\"value\"), value.ty())],\n\n implementation: GearImplementation::GearSpecial(GearSpecial::Literal(Literal(value))),\n\n })\n\n }\n\n}\n\n\n\nimpl Geared for Literal {\n\n fn evaluate(\n\n &self,\n\n _register: &GearRegister,\n\n _input: Vec<TypedValue>,\n\n ) -> Result<Vec<TypedValue>> {\n\n Ok(vec![self.0.clone()])\n\n }\n\n}\n", "file_path": "gears_core/src/gear/special/literal.rs", "rank": 19, "score": 23248.39394877274 }, { "content": "#[derive(Clone)]\n\npub struct Output;\n\nimpl Output {\n\n pub fn template() -> Gear {\n\n Gear {\n\n name: String::from(\"Output\"),\n\n inputs: Vec::new(),\n\n outputs: Vec::new(),\n\n implementation: GearImplementation::GearSpecial(GearSpecial::Output(Output)),\n\n }\n\n }\n\n}\n\n\n\nimpl Geared for Output {\n\n fn evaluate(\n\n &self,\n\n _register: &GearRegister,\n\n input: Vec<TypedValue>,\n\n ) -> Result<Vec<TypedValue>> {\n\n Ok(input)\n\n }\n\n}\n", "file_path": "gears_core/src/gear/special/io.rs", "rank": 20, "score": 23247.71824267707 }, { "content": " pub fn template() -> Gear {\n\n Gear {\n\n name: String::from(\"Input\"),\n\n inputs: Vec::new(),\n\n outputs: Vec::new(),\n\n implementation: GearImplementation::GearSpecial(GearSpecial::Input(Input)),\n\n }\n\n }\n\n}\n\n\n\nimpl Geared for Input {\n\n fn evaluate(\n\n &self,\n\n _register: &GearRegister,\n\n _input: Vec<TypedValue>,\n\n ) -> Result<Vec<TypedValue>> {\n\n Err(NonEvaluable)\n\n }\n\n}\n\n\n", "file_path": "gears_core/src/gear/special/io.rs", "rank": 21, "score": 23247.535933874253 }, { "content": "use super::*;\n\nuse crate::gear::Error::NonEvaluable;\n\n\n\npub struct Gears {\n\n pub input: TemplateGearId,\n\n pub output: TemplateGearId,\n\n}\n\n\n\nimpl Gears {\n\n pub fn init(template_gears: &mut TemplateGearMap) -> Self {\n\n Self {\n\n input: template_gears.insert(Input::template()),\n\n output: template_gears.insert(Output::template()),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Input;\n\nimpl Input {\n", "file_path": "gears_core/src/gear/special/io.rs", "rank": 22, "score": 23244.88721867066 }, { "content": "use super::*;\n\n\n\npub struct Gears {\n\n pub add: TemplateGearId,\n\n pub sub: TemplateGearId,\n\n pub mul: TemplateGearId,\n\n pub div: TemplateGearId,\n\n}\n\n\n\nimpl Gears {\n\n pub fn init(template_gears: &mut TemplateGearMap) -> Self {\n\n Self {\n\n add: template_gears.insert(Add::template()),\n\n sub: template_gears.insert(Sub::template()),\n\n mul: template_gears.insert(Mul::template()),\n\n div: template_gears.insert(Div::template()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "gears_core/src/gear/internal/math.rs", "rank": 23, "score": 23130.024483348963 }, { "content": "//TODO: define for other num types\n\n\n\ntemplate!(Add (summand1: U64, summand2: U64) -> (sum: U64) {{\n\n sum = summand1 + summand2\n\n}});\n\n\n\ntemplate!(Sub (minuend: U64, subtrahend: U64) -> (difference: U64) {{\n\n difference = minuend - subtrahend\n\n}});\n\n\n\ntemplate!(Mul (factor1: U64, factor2: U64) -> (product: U64) {{\n\n product = factor1 * factor2\n\n}});\n\n\n\ntemplate!(Div (dividend: U64, divisor: U64) -> (fraction: U64) {{\n\n fraction = dividend / divisor\n\n}});\n", "file_path": "gears_core/src/gear/internal/math.rs", "rank": 24, "score": 23123.51868329669 }, { "content": "pub mod gear;\n\npub mod ty;\n\npub mod util;\n\n//FIXME: endless loop when connecting wrong index => add checks + typechecks\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::gear::command::GearCommand;\n\n use crate::gear::compound::GearCompound;\n\n use crate::gear::special::literal::Literal;\n\n use crate::gear::*;\n\n use crate::ty::*;\n\n\n\n fn squared_gear_template(register: &mut GearRegister) -> TemplateGearId {\n\n let mut compound = GearCompound::new(register, 1, 1);\n\n let mul = register.instantiate(register.internal.math_gears.mul);\n\n\n\n compound.connect(compound.input_id, 0, mul, 0);\n\n compound.connect(compound.input_id, 0, mul, 1);\n\n compound.connect(mul, 0, compound.output_id, 0);\n", "file_path": "gears_core/src/lib.rs", "rank": 34, "score": 3777.615860193352 }, { "content": " let gear = register.instantiate(squared);\n\n assert_eq!(\n\n register.evaluate(gear, vec![TypedValue::U64(5)]).unwrap()[0],\n\n TypedValue::U64(25)\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_x2_plus_y2() {\n\n let mut register = GearRegister::init();\n\n let squared = squared_gear_template(&mut register);\n\n\n\n let mut compound = GearCompound::new(&mut register, 1, 1);\n\n let squared_x = register.instantiate(squared);\n\n let squared_y = register.instantiate(squared);\n\n let add = register.instantiate(register.internal.math_gears.add);\n\n\n\n compound.connect(compound.input_id, 0, squared_x, 0);\n\n compound.connect(compound.input_id, 1, squared_y, 0);\n\n compound.connect(squared_x, 0, add, 0);\n", "file_path": "gears_core/src/lib.rs", "rank": 35, "score": 3771.0896072913874 }, { "content": " .instantiator(register.internal.math_gears.add)\n\n .instantiate();\n\n\n\n let one = Literal::instantiate(&mut register, TypedValue::U64(1));\n\n\n\n compound.connect(compound.input_id, 0, add, 0);\n\n compound.connect(one, 0, add, 1);\n\n compound.connect(add, 0, compound.output_id, 0);\n\n\n\n let gear = register\n\n .builder(compound.into())\n\n .name(String::from(\"Increment\"))\n\n .input(IOInformation::new(\n\n String::from(\"number\"),\n\n TypedValue::I32(Default::default()).ty(),\n\n ))\n\n .output(IOInformation::new(\n\n String::from(\"incremented\"),\n\n TypedValue::I32(Default::default()).ty(),\n\n ))\n\n .instantiate();\n\n\n\n assert_eq!(\n\n register.evaluate(gear, vec![TypedValue::U64(0)]).unwrap()[0],\n\n TypedValue::U64(1)\n\n );\n\n }\n\n}\n", "file_path": "gears_core/src/lib.rs", "rank": 36, "score": 3770.48725429919 }, { "content": "\n\n #[test]\n\n fn test_cargo() {\n\n let mut register = GearRegister::init();\n\n let gear = register.instantiate(register.command.generic_command);\n\n\n\n assert_eq!(\n\n register\n\n .evaluate(gear, vec![TypedValue::String(String::from(\"cargo\"))])\n\n .unwrap()[0],\n\n TypedValue::I32(0)\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_increment() {\n\n let mut register = GearRegister::init();\n\n\n\n let mut compound = GearCompound::new(&mut register, 1, 1);\n\n let add = register\n", "file_path": "gears_core/src/lib.rs", "rank": 37, "score": 3770.4040326090308 }, { "content": " register\n\n .evaluate(gear, vec![TypedValue::U64(4), TypedValue::U64(6)])\n\n .unwrap()[0],\n\n TypedValue::U64(52)\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_echo() {\n\n let mut register = GearRegister::init();\n\n let command = GearCommand::new(String::from(\"echo\"));\n\n\n\n let gear = register\n\n .builder(command.into())\n\n .name(String::from(\"Echo\"))\n\n .input(IOInformation::new(\n\n String::from(\"text\"),\n\n TypedValue::String(Default::default()).ty(),\n\n ))\n\n .output(IOInformation::new(\n", "file_path": "gears_core/src/lib.rs", "rank": 38, "score": 3769.6194086108194 }, { "content": "\n\n register\n\n .builder(compound.into())\n\n .name(String::from(\"Squared\"))\n\n .input(IOInformation::new(\n\n String::from(\"base\"),\n\n TypedValue::U64(Default::default()).ty(),\n\n ))\n\n .output(IOInformation::new(\n\n String::from(\"square\"),\n\n TypedValue::U64(Default::default()).ty(),\n\n ))\n\n .templatize()\n\n }\n\n\n\n #[test]\n\n fn test_squared_gear() {\n\n let mut register = GearRegister::init();\n\n let squared = squared_gear_template(&mut register);\n\n\n", "file_path": "gears_core/src/lib.rs", "rank": 39, "score": 3768.2661056094294 }, { "content": " compound.connect(squared_y, 0, add, 1);\n\n compound.connect(add, 0, compound.output_id, 0);\n\n\n\n let gear = register\n\n .builder(compound.into())\n\n .name(String::from(\"x² + y² = z²\"))\n\n .input(IOInformation::new(\n\n String::from(\"x\"),\n\n TypedValue::U64(Default::default()).ty(),\n\n ))\n\n .input(IOInformation::new(\n\n String::from(\"y\"),\n\n TypedValue::U64(Default::default()).ty(),\n\n ))\n\n .output(IOInformation::new(\n\n String::from(\"z²\"),\n\n TypedValue::U64(Default::default()).ty(),\n\n ))\n\n .instantiate();\n\n assert_eq!(\n", "file_path": "gears_core/src/lib.rs", "rank": 40, "score": 3767.983738418685 }, { "content": "use core::default::Default;\n\nuse std::fmt::{Display, Formatter};\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum TypedValue {\n\n None,\n\n U32(u32),\n\n U64(u64),\n\n I32(i32),\n\n I64(i64),\n\n F64(f64),\n\n String(String),\n\n}\n\n\n\nimpl Default for TypedValue {\n\n fn default() -> Self {\n\n TypedValue::None\n\n }\n\n}\n\n\n", "file_path": "gears_core/src/ty.rs", "rank": 41, "score": 3766.833705169202 }, { "content": "impl Display for TypedValue {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n TypedValue::None => write!(f, \"\"),\n\n TypedValue::U32(n) => n.fmt(f),\n\n TypedValue::U64(n) => n.fmt(f),\n\n TypedValue::I32(n) => n.fmt(f),\n\n TypedValue::I64(n) => n.fmt(f),\n\n TypedValue::F64(n) => n.fmt(f),\n\n TypedValue::String(s) => s.fmt(f),\n\n }\n\n }\n\n}\n\n\n\npub type TypeDiscriminant = std::mem::Discriminant<TypedValue>;\n\n\n\n//TODO: use lazy_static to create constant `TypeDiscriminant`s for each type, until `std::mem::discriminant` is const on stable\n\nimpl TypedValue {\n\n pub fn ty(&self) -> TypeDiscriminant {\n\n std::mem::discriminant(self)\n\n }\n\n}\n", "file_path": "gears_core/src/ty.rs", "rank": 42, "score": 3766.7170910727023 }, { "content": "use slotmap::SlotMap;\n\nuse std::ops::{Index, IndexMut};\n\n\n\npub struct LiftSlotMap<K: slotmap::Key, V>(SlotMap<K, Option<V>>);\n\n\n\nimpl<K: slotmap::Key, V> From<SlotMap<K, Option<V>>> for LiftSlotMap<K, V> {\n\n fn from(slot_map: SlotMap<K, Option<V>>) -> Self {\n\n Self(slot_map)\n\n }\n\n}\n\n\n\nimpl<K: slotmap::Key, V> Index<K> for LiftSlotMap<K, V> {\n\n type Output = V;\n\n\n\n fn index(&self, index: K) -> &Self::Output {\n\n self.0.index(index).as_ref().expect(\"Accessed lifted slot!\")\n\n }\n\n}\n\n\n\nimpl<K: slotmap::Key, V> IndexMut<K> for LiftSlotMap<K, V> {\n", "file_path": "gears_core/src/util.rs", "rank": 43, "score": 3766.427094694057 }, { "content": " fn index_mut(&mut self, index: K) -> &mut Self::Output {\n\n self.0\n\n .index_mut(index)\n\n .as_mut()\n\n .expect(\"Accessed lifted slot!\")\n\n }\n\n}\n\n\n\nimpl<'a, K: slotmap::Key, V> LiftSlotMap<K, V> {\n\n pub fn with_key() -> Self {\n\n Self(SlotMap::with_key())\n\n }\n\n\n\n pub fn insert(&mut self, value: V) -> K {\n\n self.0.insert(Some(value))\n\n }\n\n\n\n pub fn iter(&mut self) -> impl Iterator<Item = (K, &V)> {\n\n self.0\n\n .iter()\n", "file_path": "gears_core/src/util.rs", "rank": 44, "score": 3765.117815119402 }, { "content": " .map(|(k, v)| v.as_ref().map(|v| (k, v)))\n\n .flatten()\n\n }\n\n\n\n pub fn iter_mut(&mut self) -> impl Iterator<Item = (K, &mut V)> {\n\n self.0\n\n .iter_mut()\n\n .map(|(k, v)| v.as_mut().map(|v| (k, v)))\n\n .flatten()\n\n }\n\n\n\n pub fn do_lifted(&mut self, key: K, mut f: impl FnMut(&mut Self, &mut V)) {\n\n let mut value = self.0.get_mut(key).unwrap().take().unwrap();\n\n f(self, &mut value);\n\n *self.0.get_mut(key).unwrap() = Some(value);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\n#[test]\n", "file_path": "gears_core/src/util.rs", "rank": 45, "score": 3764.814004678371 }, { "content": " String::from(\"exit code\"),\n\n TypedValue::String(Default::default()).ty(),\n\n ))\n\n .output(IOInformation::new(\n\n String::from(\"stdout\"),\n\n TypedValue::String(Default::default()).ty(),\n\n ))\n\n .output(IOInformation::new(\n\n String::from(\"stderr\"),\n\n TypedValue::String(Default::default()).ty(),\n\n ))\n\n .instantiate();\n\n\n\n assert_eq!(\n\n register\n\n .evaluate(gear, vec![TypedValue::String(String::from(\"Hello world!\"))])\n\n .unwrap()[1],\n\n TypedValue::String(String::from(\"Hello world!\\n\"))\n\n );\n\n }\n", "file_path": "gears_core/src/lib.rs", "rank": 46, "score": 3764.445548653962 } ]
Rust
atomics/src/atomic_primitive/atomic_f32.rs
BrianMcDonaldWS/rpc-perf
e36466ce611d151757cf4e8dcfebb1f7f32263d7
use crate::{AtomicPrimitive, Ordering}; #[cfg(feature = "serde")] use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; pub struct AtomicF32 { pub(crate) inner: core::sync::atomic::AtomicU32, } impl AtomicPrimitive for AtomicF32 { type Primitive = f32; fn new(value: Self::Primitive) -> Self { let value = unsafe { std::mem::transmute(value) }; Self { inner: core::sync::atomic::AtomicU32::new(value), } } fn get_mut(&mut self) -> &mut Self::Primitive { unsafe { &mut *(self.inner.get_mut() as *mut u32 as *mut f32) } } fn into_inner(self) -> Self::Primitive { f32::from_bits(self.inner.into_inner()) } fn load(&self, order: Ordering) -> Self::Primitive { f32::from_bits(self.inner.load(order)) } fn store(&self, value: Self::Primitive, order: Ordering) { self.inner.store(value.to_bits(), order) } fn swap(&self, value: Self::Primitive, order: Ordering) -> Self::Primitive { f32::from_bits(self.inner.swap(value.to_bits(), order)) } fn compare_and_swap( &self, current: Self::Primitive, new: Self::Primitive, order: Ordering, ) -> Self::Primitive { f32::from_bits( self.inner .compare_and_swap(current.to_bits(), new.to_bits(), order), ) } fn compare_exchange( &self, current: Self::Primitive, new: Self::Primitive, success: Ordering, failure: Ordering, ) -> Result<Self::Primitive, Self::Primitive> { self.inner .compare_exchange(current.to_bits(), new.to_bits(), success, failure) .map(f32::from_bits) .map_err(f32::from_bits) } fn compare_exchange_weak( &self, current: Self::Primitive, new: Self::Primitive, success: Ordering, failure: Ordering, ) -> Result<Self::Primitive, Self::Primitive> { self.inner .compare_exchange_weak(current.to_bits(), new.to_bits(), success, failure) .map(f32::from_bits) .map_err(f32::from_bits) } } impl Default for AtomicF32 { fn default() -> Self { Self::new(Default::default()) } } impl PartialEq for AtomicF32 { fn eq(&self, other: &Self) -> bool { self.load(Ordering::SeqCst) == other.load(Ordering::SeqCst) } } impl Eq for AtomicF32 {} impl std::fmt::Debug for AtomicF32 { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{:?}", self.load(Ordering::Relaxed)) } } #[cfg(feature = "serde")] struct AtomicF32Visitor; #[cfg(feature = "serde")] impl<'de> Visitor<'de> for AtomicF32Visitor { type Value = AtomicF32; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("a 32bit floating point number") } fn visit_i8<E>(self, value: i8) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(f32::from(value))) } fn visit_i16<E>(self, value: i16) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(f32::from(value))) } fn visit_i32<E>(self, value: i32) -> Result<Self::Value, E> where E: serde::de::Error, { use std::convert::TryFrom; if let Ok(value) = i16::try_from(value).map(f32::from) { Ok(Self::Value::new(value)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } fn visit_i64<E>(self, value: i64) -> Result<Self::Value, E> where E: serde::de::Error, { use std::convert::TryFrom; if let Ok(value) = i16::try_from(value).map(f32::from) { Ok(Self::Value::new(value)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } fn visit_u8<E>(self, value: u8) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(f32::from(value))) } fn visit_u16<E>(self, value: u16) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(f32::from(value))) } fn visit_u32<E>(self, value: u32) -> Result<Self::Value, E> where E: serde::de::Error, { use std::convert::TryFrom; if let Ok(value) = u16::try_from(value).map(f32::from) { Ok(Self::Value::new(value)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E> where E: serde::de::Error, { use std::convert::TryFrom; if let Ok(value) = u16::try_from(value).map(f32::from) { Ok(Self::Value::new(value)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } fn visit_f32<E>(self, value: f32) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(value)) } fn visit_f64<E>(self, value: f64) -> Result<Self::Value, E> where E: serde::de::Error, { if value >= f64::from(std::f32::MIN) && value <= f64::from(std::f32::MAX) { Ok(Self::Value::new(value as f32)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } } #[cfg(feature = "serde")] impl<'de> Deserialize<'de> for AtomicF32 { fn deserialize<D>(deserializer: D) -> Result<AtomicF32, D::Error> where D: Deserializer<'de>, { deserializer.deserialize_any(AtomicF32Visitor) } } #[cfg(feature = "serde")] impl Serialize for AtomicF32 { #[inline] fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_some(&self.load(Ordering::SeqCst)) } } #[cfg(test)] mod tests { use super::*; #[test] fn load() { let atomic = AtomicF32::new(0.0); assert_eq!(atomic.load(Ordering::SeqCst), 0.0); } #[test] fn store() { let atomic = AtomicF32::new(0.0); atomic.store(3.14, Ordering::SeqCst); assert_eq!(atomic.into_inner(), 3.14); } #[test] fn swap() { let atomic = AtomicF32::new(0.0); assert_eq!(atomic.swap(3.14, Ordering::SeqCst), 0.0); } #[test] fn compare_and_swap() { let atomic = AtomicF32::new(0.0); assert_eq!(atomic.compare_and_swap(0.0, 3.14, Ordering::SeqCst), 0.0); assert_eq!(atomic.compare_and_swap(0.0, 42.0, Ordering::SeqCst), 3.14); } #[test] fn compare_exchange() { let atomic = AtomicF32::new(0.0); assert_eq!( atomic.compare_exchange(0.0, 3.14, Ordering::SeqCst, Ordering::SeqCst), Ok(0.0) ); assert_eq!( atomic.compare_exchange(0.0, 42.0, Ordering::SeqCst, Ordering::SeqCst), Err(3.14) ); } #[test] fn compare_exchange_weak() { let atomic = AtomicF32::new(0.0); loop { if atomic .compare_exchange(0.0, 3.14, Ordering::SeqCst, Ordering::SeqCst) .is_ok() { break; } } assert_eq!(atomic.into_inner(), 3.14); } }
use crate::{AtomicPrimitive, Ordering}; #[cfg(feature = "serde")] use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; pub struct AtomicF32 { pub(crate) inner: core::sync::atomic::AtomicU32, } impl AtomicPrimitive for AtomicF32 { type Primitive = f32; fn new(value: Self::Primitive) -> Self { let value = unsafe { std::mem::transmute(value) }; Self { inner: core::sync::atomic::AtomicU32::new(value), } } fn get_mut(&mut self) -> &mut Self::Primitive { unsafe { &mut *(self.inner.get_mut() as *mut u32 as *mut f32) } } fn into_inner(self) -> Self::Primitive { f32::from_bits(self.inner.into_inner()) } fn load(&self, order: Ordering) -> Self::Primitive { f32::from_bits(self.inner.load(order)) } fn store(&self, value: Self::Primitive, order: Ordering) { self.inner.store(value.to_bits(), order) } fn swap(&self, value: Self::Primitive, order: Ordering) -> Self::Primitive { f32::from_bits(self.inner.swap(value.to_bits(), order)) } fn compare_and_swap( &self, current: Self::Primitive, new: Self::Primitive, order: Ordering, ) -> Self::Primitive { f32::from_bits( self.inner .compare_and_swap(current.to_bits(), new.to_bits(), order), ) } fn compare_exchange( &self, current: Self::Primitive, new: Self::Primitive, success: Ordering, failure: Ordering, ) -> Result<Self::Primitive, Self::Primitive> { self.inner .compare_exchange(current.to_bits(), new.to_bits(), success, failure) .map(f32::from_bits) .map_err(f32::from_bits) } fn compare_exchange_weak( &self, current: Self::Primitive, new: Self::Primitive, success: Ordering, failure: Ordering, ) -> Result<Self::Primitive, Self::Primitive> { self.inner .compare_exchange_weak(current.to_bits(), new.to_bits(), success, failure) .map(f32::from_bits) .map_err(f32::from_bits) } } impl Default for AtomicF32 { fn default() -> Self { Self::new(Default::default()) } } impl PartialEq for AtomicF32 { fn eq(&self, other: &Self) -> bool { self.load(Ordering::SeqCst) == other.load(Ordering::SeqCst) } } impl Eq for AtomicF32 {} impl std::fmt::Debug for AtomicF32 { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{:?}", self.load(Ordering::Relaxed)) } } #[cfg(feature = "serde")] struct AtomicF32Visitor; #[cfg(feature = "serde")] impl<'de> Visitor<'de> for AtomicF32Visitor { type Value = AtomicF32; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("a 32bit floating point number") } fn visit_i8<E>(self, value: i8) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(f32::from(value))) } fn visit_i16<E>(self, value: i16) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(f32::from(value))) } fn visit_i32<E>(self, value: i32) -> Result<Self::Value, E> where E: serde::de::Error, { use std::convert::TryFrom; if let Ok(value) = i16::try_from(value).map(f32::from) { Ok(Self::Value::new(value)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } fn visit_i64<E>(self, value: i64) -> Result<Self::Value, E> where E: serde::de::Error, { use std::convert::TryFrom;
} fn visit_u8<E>(self, value: u8) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(f32::from(value))) } fn visit_u16<E>(self, value: u16) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(f32::from(value))) } fn visit_u32<E>(self, value: u32) -> Result<Self::Value, E> where E: serde::de::Error, { use std::convert::TryFrom; if let Ok(value) = u16::try_from(value).map(f32::from) { Ok(Self::Value::new(value)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E> where E: serde::de::Error, { use std::convert::TryFrom; if let Ok(value) = u16::try_from(value).map(f32::from) { Ok(Self::Value::new(value)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } fn visit_f32<E>(self, value: f32) -> Result<Self::Value, E> where E: serde::de::Error, { Ok(Self::Value::new(value)) } fn visit_f64<E>(self, value: f64) -> Result<Self::Value, E> where E: serde::de::Error, { if value >= f64::from(std::f32::MIN) && value <= f64::from(std::f32::MAX) { Ok(Self::Value::new(value as f32)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) } } } #[cfg(feature = "serde")] impl<'de> Deserialize<'de> for AtomicF32 { fn deserialize<D>(deserializer: D) -> Result<AtomicF32, D::Error> where D: Deserializer<'de>, { deserializer.deserialize_any(AtomicF32Visitor) } } #[cfg(feature = "serde")] impl Serialize for AtomicF32 { #[inline] fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_some(&self.load(Ordering::SeqCst)) } } #[cfg(test)] mod tests { use super::*; #[test] fn load() { let atomic = AtomicF32::new(0.0); assert_eq!(atomic.load(Ordering::SeqCst), 0.0); } #[test] fn store() { let atomic = AtomicF32::new(0.0); atomic.store(3.14, Ordering::SeqCst); assert_eq!(atomic.into_inner(), 3.14); } #[test] fn swap() { let atomic = AtomicF32::new(0.0); assert_eq!(atomic.swap(3.14, Ordering::SeqCst), 0.0); } #[test] fn compare_and_swap() { let atomic = AtomicF32::new(0.0); assert_eq!(atomic.compare_and_swap(0.0, 3.14, Ordering::SeqCst), 0.0); assert_eq!(atomic.compare_and_swap(0.0, 42.0, Ordering::SeqCst), 3.14); } #[test] fn compare_exchange() { let atomic = AtomicF32::new(0.0); assert_eq!( atomic.compare_exchange(0.0, 3.14, Ordering::SeqCst, Ordering::SeqCst), Ok(0.0) ); assert_eq!( atomic.compare_exchange(0.0, 42.0, Ordering::SeqCst, Ordering::SeqCst), Err(3.14) ); } #[test] fn compare_exchange_weak() { let atomic = AtomicF32::new(0.0); loop { if atomic .compare_exchange(0.0, 3.14, Ordering::SeqCst, Ordering::SeqCst) .is_ok() { break; } } assert_eq!(atomic.into_inner(), 3.14); } }
if let Ok(value) = i16::try_from(value).map(f32::from) { Ok(Self::Value::new(value)) } else { Err(E::custom(format!("f32 is out of range: {}", value))) }
if_condition
[ { "content": "/// Counter primitives are capable of saturating addition and subtraction\n\npub trait CounterPrimitive: PartialEq + Copy + Default {\n\n /// Perform saturating addition on this `CounterPrimitive`\n\n fn saturating_add(self, rhs: Self) -> Self;\n\n /// Perform saturating subtraction on this `CounterPrimitive`\n\n fn saturating_sub(self, rhs: Self) -> Self;\n\n}\n\n\n", "file_path": "datastructures/src/counter/counter_primitive/mod.rs", "rank": 0, "score": 204556.2822712138 }, { "content": "/// Run a function over each metric and collect the result into a container.\n\n///\n\n/// Due to the underlying API limitations of evmap this is the only way to\n\n/// introspect existing metrics.\n\npub fn for_each_metric<C, F, R>(func: F) -> C\n\nwhere\n\n C: std::iter::FromIterator<R>,\n\n F: FnMut(&str, &MetricInstance) -> R,\n\n{\n\n match State::get() {\n\n Some(state) => state.for_each_metric(func),\n\n None => C::from_iter(std::iter::empty()),\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\npub mod export {\n\n use super::*;\n\n\n\n pub fn create_metadata(attributes: &'static [(&'static str, &'static str)]) -> Metadata {\n\n Metadata::new(attributes)\n\n }\n\n\n\n pub fn current_time() -> Instant {\n", "file_path": "metrics-core/src/lib.rs", "rank": 2, "score": 169413.1958180343 }, { "content": "/// Set the error function.\n\n///\n\n/// Due to the impracticality of having every single metric return a `Result`\n\n/// this library instead opts to have an internal error function that is called\n\n/// whenever an error occurs.\n\n///\n\n/// The default error function will log a warning when an error occurrs.\n\npub fn set_error_fn(err_fn: impl Fn(MetricError) + Send + Sync + 'static) {\n\n use std::sync::Arc;\n\n\n\n State::get_force().set_error_fn(Arc::new(err_fn));\n\n}\n\n\n", "file_path": "metrics-core/src/lib.rs", "rank": 3, "score": 163608.4662251863 }, { "content": "fn default_tcp_nodelay() -> bool {\n\n false\n\n}\n\n\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 4, "score": 157964.5572163189 }, { "content": "fn default_soft_timeout() -> bool {\n\n false\n\n}\n\n\n\n#[derive(Copy, Clone, Deserialize, Debug)]\n\n#[serde(rename_all = \"snake_case\")]\n\n#[serde(deny_unknown_fields)]\n\npub enum Protocol {\n\n Memcache,\n\n PelikanRds,\n\n Ping,\n\n Echo,\n\n RedisResp,\n\n RedisInline,\n\n ThriftCache,\n\n}\n\n\n\nimpl Default for Protocol {\n\n fn default() -> Protocol {\n\n Protocol::Memcache\n\n }\n\n}\n\n\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 5, "score": 157964.5572163189 }, { "content": "fn current_time(b: &mut Bencher) {\n\n b.iter(|| Instant::now());\n\n}\n\n\n", "file_path": "metrics-core/benches/benches.rs", "rank": 6, "score": 152372.15174476404 }, { "content": "fn std_current_time(b: &mut Bencher) {\n\n b.iter(|| std::time::Instant::now());\n\n}\n\n\n", "file_path": "metrics-core/benches/benches.rs", "rank": 7, "score": 149090.18496421984 }, { "content": "#[cfg(feature = \"serde\")]\n\nstruct AtomicBoolVisitor;\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl<'de> Visitor<'de> for AtomicBoolVisitor {\n\n type Value = AtomicBool;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a boolean value\")\n\n }\n\n\n\n fn visit_bool<E>(self, value: bool) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n Ok(AtomicBool::new(value))\n\n }\n\n}\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl<'de> Deserialize<'de> for AtomicBool {\n", "file_path": "atomics/src/atomic_primitive/atomic_bool.rs", "rank": 8, "score": 148184.1765478906 }, { "content": "#[cfg(feature = \"serde\")]\n\nstruct AtomicI32Visitor;\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl<'de> Visitor<'de> for AtomicI32Visitor {\n\n type Value = AtomicI32;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a signed 32bit integer\")\n\n }\n\n\n\n fn visit_i8<E>(self, value: i8) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n Ok(Self::Value::new(i32::from(value)))\n\n }\n\n\n\n fn visit_i16<E>(self, value: i16) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n", "file_path": "atomics/src/atomic_primitive/atomic_i32.rs", "rank": 9, "score": 148151.23162862283 }, { "content": "#[cfg(feature = \"serde\")]\n\nstruct AtomicI64Visitor;\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl<'de> Visitor<'de> for AtomicI64Visitor {\n\n type Value = AtomicI64;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a signed 64bit integer\")\n\n }\n\n\n\n fn visit_i8<E>(self, value: i8) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n Ok(Self::Value::new(i64::from(value)))\n\n }\n\n\n\n fn visit_i16<E>(self, value: i16) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n", "file_path": "atomics/src/atomic_primitive/atomic_i64.rs", "rank": 10, "score": 148151.23162862283 }, { "content": "#[cfg(feature = \"serde\")]\n\nstruct AtomicI16Visitor;\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl<'de> Visitor<'de> for AtomicI16Visitor {\n\n type Value = AtomicI16;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a signed 16bit integer\")\n\n }\n\n\n\n fn visit_i8<E>(self, value: i8) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n Ok(Self::Value::new(i16::from(value)))\n\n }\n\n\n\n fn visit_i16<E>(self, value: i16) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n", "file_path": "atomics/src/atomic_primitive/atomic_i16.rs", "rank": 11, "score": 148144.63090984258 }, { "content": "#[cfg(feature = \"serde\")]\n\nstruct AtomicI8Visitor;\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl<'de> Visitor<'de> for AtomicI8Visitor {\n\n type Value = AtomicI8;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a signed 8bit integer\")\n\n }\n\n\n\n fn visit_i8<E>(self, value: i8) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n Ok(Self::Value::new(value))\n\n }\n\n\n\n fn visit_i16<E>(self, value: i16) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n", "file_path": "atomics/src/atomic_primitive/atomic_i8.rs", "rank": 12, "score": 148144.63090984258 }, { "content": "#[cfg(feature = \"serde\")]\n\nstruct AtomicU32Visitor;\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl<'de> Visitor<'de> for AtomicU32Visitor {\n\n type Value = AtomicU32;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"an unsigned 32bit integer\")\n\n }\n\n\n\n fn visit_i8<E>(self, value: i8) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n use std::convert::TryFrom;\n\n if let Ok(value) = u32::try_from(value) {\n\n Ok(Self::Value::new(value))\n\n } else {\n\n Err(E::custom(format!(\"u32 is out of range: {}\", value)))\n\n }\n", "file_path": "atomics/src/atomic_primitive/atomic_u32.rs", "rank": 13, "score": 148144.63090984258 }, { "content": "/// This trait is used to define the functions which are available on types\n\n/// which may be used as atomic primitives, allowing for them to be used as\n\n/// generic types.\n\npub trait AtomicPrimitive: Send + Sync + Debug + PartialEq {\n\n type Primitive;\n\n\n\n /// Create a new `AtomicPrimitive` from a primitive type\n\n fn new(value: Self::Primitive) -> Self;\n\n\n\n /// Get a mutable reference to the underlying primitive\n\n ///\n\n /// This is safe because the mutable reference guarantees no other threads\n\n /// are concurrently accessing the atomic data\n\n fn get_mut(&mut self) -> &mut Self::Primitive;\n\n\n\n /// Consumes the `AtomicPrimitive` and returns the underlying primitive\n\n ///\n\n /// This is safe because passing `self` by value guarantees that no other\n\n /// threads are concurrently accessing the atomic data\n\n fn into_inner(self) -> Self::Primitive;\n\n\n\n /// Loads the value from the `AtomicPrimitive`\n\n ///\n", "file_path": "atomics/src/atomic_primitive/mod.rs", "rank": 14, "score": 147419.61277041136 }, { "content": "/// Unregister an existing metric.\n\n///\n\n/// If there is no such metric returns an error.\n\npub fn unregister_metric(name: impl AsRef<str>) -> Result<(), UnregisterError> {\n\n match State::get() {\n\n Some(state) => state.unregister_metric(name.as_ref()),\n\n None => Ok(()),\n\n }\n\n}\n\n\n", "file_path": "metrics-core/src/lib.rs", "rank": 15, "score": 144200.9332492353 }, { "content": "/// A trait that describes all functionality of a `Counter`\n\npub trait Counter: Default + AtomicCounter + AtomicPrimitive\n\nwhere\n\n <Self as AtomicPrimitive>::Primitive: Default + PartialEq + Copy + Saturating,\n\n{\n\n /// Convenience function to do a relaxed read\n\n fn get(&self) -> Self::Primitive {\n\n self.load(Ordering::Relaxed)\n\n }\n\n\n\n /// Convenience function to do a squentially consistent write\n\n fn set(&self, value: Self::Primitive) {\n\n self.store(value, Ordering::SeqCst)\n\n }\n\n\n\n /// Convenience function to do a relaxed wrapping add\n\n fn add(&self, value: Self::Primitive) -> Self::Primitive {\n\n self.fetch_add(value, Ordering::Relaxed)\n\n }\n\n\n\n /// Convenience function to do a relaxed wrapping sub\n", "file_path": "datastructures/src/counter/mod.rs", "rank": 16, "score": 143949.89580097693 }, { "content": "pub fn runner(runtime: f64, measurement_type: MeasurementType, label: String) {\n\n for single_channel in [true, false].iter() {\n\n for i in [1, 2, 4, 8, 16, 32, 64].iter() {\n\n timed_run(\n\n *i,\n\n runtime,\n\n measurement_type,\n\n *single_channel,\n\n format!(\"{} (threads: {})\", label, i),\n\n );\n\n }\n\n }\n\n}\n\n\n", "file_path": "metrics/examples/benchmarks.rs", "rank": 17, "score": 140072.30928462802 }, { "content": "fn default_value_class() -> Class {\n\n Class::Alphanumeric\n\n}\n\n\n\nimpl Value {\n\n pub fn length(&self) -> usize {\n\n self.length\n\n }\n\n\n\n pub fn weight(&self) -> usize {\n\n self.weight\n\n }\n\n}\n\n\n\nimpl Config {\n\n /// parse command line options and return `Config`\n\n pub fn new() -> Config {\n\n let app = App::new(NAME)\n\n .version(VERSION)\n\n .author(\"Brian Martin <bmartin@twitter.com>\")\n", "file_path": "rpc-perf/src/config/mod.rs", "rank": 18, "score": 130129.06591399411 }, { "content": "pub fn main() {\n\n println!(\"A simple demo of the logger\");\n\n\n\n logger::Logger::new()\n\n .label(\"demo\")\n\n .level(logger::Level::Trace)\n\n .init()\n\n .expect(\"Failed to initialize logger\");\n\n trace!(\"Some tracing message\");\n\n debug!(\"Some debugging message\");\n\n info!(\"Just some general info\");\n\n warn!(\"You might want to know this\");\n\n error!(\"You need to know this\");\n\n fatal!(\"Something really bad happened! Terminating program\");\n\n // code below would be unreachable\n\n}\n", "file_path": "logger/examples/demo.rs", "rank": 19, "score": 129501.40103116428 }, { "content": "pub fn main() {\n\n let runtime = 5.0;\n\n\n\n runner(\n\n runtime,\n\n Structure::Counter,\n\n Operation::Increment,\n\n \"Counter Incr/s\".to_string(),\n\n );\n\n runner(\n\n runtime,\n\n Structure::Histogram,\n\n Operation::Increment,\n\n \"Histogram Incr/s\".to_string(),\n\n );\n\n runner(\n\n runtime,\n\n Structure::Histogram,\n\n Operation::Percentile,\n\n \"Histogram Percentile/s\".to_string(),\n\n );\n\n}\n\n\n", "file_path": "datastructures/examples/benchmarks.rs", "rank": 20, "score": 129501.40103116428 }, { "content": "pub fn main() {\n\n let runtime = 10.0;\n\n\n\n runner(runtime, MeasurementType::Counter, \"Counter\".to_string());\n\n runner(\n\n runtime,\n\n MeasurementType::Distribution,\n\n \"Distribution\".to_string(),\n\n );\n\n runner(runtime, MeasurementType::Gauge, \"Gauge\".to_string());\n\n runner(runtime, MeasurementType::Increment, \"Increment\".to_string());\n\n runner(\n\n runtime,\n\n MeasurementType::TimeInterval,\n\n \"Time Interval\".to_string(),\n\n );\n\n}\n\n\n", "file_path": "metrics/examples/benchmarks.rs", "rank": 21, "score": 129501.40103116428 }, { "content": "pub fn timed_run(\n\n threads: usize,\n\n runtime: f64,\n\n measurement_type: MeasurementType,\n\n single_channel: bool,\n\n label: String,\n\n) {\n\n let max = 100_000;\n\n let duration = sized_run(threads, max, measurement_type, single_channel);\n\n let rate = max as f64 / duration;\n\n let max = (runtime * rate) as usize;\n\n let duration = sized_run(threads, max, measurement_type, single_channel);\n\n let rate = max as f64 / duration;\n\n println!(\n\n \"{} (single channel: {}): {:.2e} updates/s\",\n\n label, single_channel, rate\n\n );\n\n}\n\n\n", "file_path": "metrics/examples/benchmarks.rs", "rank": 22, "score": 126866.82110998119 }, { "content": "pub fn main() {\n\n let config = config::Config::new();\n\n\n\n Logger::new()\n\n .label(\"rpc_perf\")\n\n .level(config.logging())\n\n .init()\n\n .expect(\"Failed to initialize logger\");\n\n\n\n let metrics = Metrics::new(&config);\n\n\n\n stats::register_stats(&metrics);\n\n\n\n let mut stats_stdout = stats::StandardOut::new(metrics.clone(), config.interval() as u64);\n\n\n\n let readings = Arc::new(Mutex::new(Vec::<Reading>::new()));\n\n if let Some(stats_listen) = config.listen() {\n\n let mut stats_http = stats::Http::new(stats_listen, metrics.clone());\n\n let _ = thread::Builder::new()\n\n .name(\"http\".to_string())\n", "file_path": "rpc-perf/src/main.rs", "rank": 23, "score": 126866.82110998119 }, { "content": "pub fn sized_run(\n\n threads: usize,\n\n max: usize,\n\n measurement_type: MeasurementType,\n\n single_channel: bool,\n\n) -> f64 {\n\n let metrics = Arc::new(Metrics::<AtomicU64>::new());\n\n let mut thread_pool = Vec::new();\n\n let t0 = time::Instant::now();\n\n for id in 0..threads {\n\n let metrics = metrics.clone();\n\n let id = if !single_channel { id } else { 0 };\n\n let statistic = match measurement_type {\n\n MeasurementType::Counter => TestStat::Counter(id),\n\n MeasurementType::Distribution => TestStat::Distribution(id),\n\n MeasurementType::Gauge => TestStat::Gauge(id),\n\n MeasurementType::Increment => TestStat::Counter(id),\n\n MeasurementType::TimeInterval => TestStat::TimeInterval(id),\n\n };\n\n metrics.register(&statistic, Some(Summary::histogram(2_000_000_000, 3, None)));\n", "file_path": "metrics/examples/benchmarks.rs", "rank": 24, "score": 126866.82110998119 }, { "content": "pub fn sized_run(\n\n threads: usize,\n\n max: usize,\n\n structure: Structure,\n\n operation: Operation,\n\n contended: bool,\n\n) -> f64 {\n\n let mut thread_pool = Vec::new();\n\n let t0 = time::Instant::now();\n\n match structure {\n\n Structure::Counter => {\n\n if contended {\n\n let counter = Arc::new(AtomicU64::default());\n\n for _ in 0..threads {\n\n let counter = counter.clone();\n\n match operation {\n\n Operation::Increment => {\n\n thread_pool.push(thread::spawn(move || {\n\n for _ in 0..(max / threads) {\n\n counter.add(1);\n", "file_path": "datastructures/examples/benchmarks.rs", "rank": 25, "score": 126866.82110998119 }, { "content": "pub fn timed_run(\n\n threads: usize,\n\n runtime: f64,\n\n structure: Structure,\n\n operation: Operation,\n\n single_channel: bool,\n\n label: String,\n\n) {\n\n let max = 100_000;\n\n let duration = sized_run(threads, max, structure, operation, single_channel);\n\n let rate = max as f64 / duration;\n\n let max = (runtime * rate) as usize;\n\n let duration = sized_run(threads, max, structure, operation, single_channel);\n\n let rate = max as f64 / duration;\n\n println!(\n\n \"{} (contended: {}): {:.2e} ops\",\n\n label, single_channel, rate\n\n );\n\n}\n\n\n", "file_path": "datastructures/examples/benchmarks.rs", "rank": 26, "score": 126866.82110998119 }, { "content": "/// Register a new counter.\n\n///\n\n/// If a metric has already been registered under the same name, then it will\n\n/// return an error.\n\npub fn register_counter(\n\n name: impl Into<Cow<'static, str>>,\n\n counter: impl Into<DynCow<'static, dyn Counter>>,\n\n metadata: Metadata,\n\n) -> Result<(), RegisterError> {\n\n State::get_force().register_metric(name.into(), Metric::Counter(counter.into()), metadata)\n\n}\n\n\n", "file_path": "metrics-core/src/lib.rs", "rank": 27, "score": 124401.97958721408 }, { "content": "/// Register a new summary.\n\n///\n\n/// If a metric has already been registered under the same name, then it will\n\n/// return an error.\n\npub fn register_summary(\n\n name: impl Into<Cow<'static, str>>,\n\n summary: impl Into<DynCow<'static, dyn Summary>>,\n\n metadata: Metadata,\n\n) -> Result<(), RegisterError> {\n\n State::get_force().register_metric(name.into(), Metric::Summary(summary.into()), metadata)\n\n}\n\n\n", "file_path": "metrics-core/src/lib.rs", "rank": 28, "score": 124401.97958721408 }, { "content": "/// Register a new gauge.\n\n///\n\n/// If a metric has already been registered under the same name, then it will\n\n/// return an error.\n\npub fn register_gauge(\n\n name: impl Into<Cow<'static, str>>,\n\n gauge: impl Into<DynCow<'static, dyn Gauge>>,\n\n metadata: Metadata,\n\n) -> Result<(), RegisterError> {\n\n State::get_force().register_metric(name.into(), Metric::Gauge(gauge.into()), metadata)\n\n}\n\n\n", "file_path": "metrics-core/src/lib.rs", "rank": 29, "score": 124401.97958721408 }, { "content": "fn bench_all(b: &mut Criterion) {\n\n b.bench_function(\"mutex_lock\", mutex_lock);\n\n b.bench_function(\"atomic_add\", atomic_add);\n\n b.bench_function(\"thread_id\", thread_id);\n\n b.bench_function(\"current_time\", current_time);\n\n b.bench_function(\"std_current_time\", std_current_time);\n\n b.bench_function(\"set_noop_metric\", set_noop_metric);\n\n b.bench_function(\"increment_counter\", increment_counter);\n\n b.bench_function(\"noop_metric_external_counter\", noop_metric_external_counter);\n\n}\n\n\n\ncriterion_group!(\n\n name = benches;\n\n config = Criterion::default();\n\n targets = bench_all\n\n);\n\n\n\ncriterion_main!(benches);\n", "file_path": "metrics-core/benches/benches.rs", "rank": 30, "score": 117467.63018979205 }, { "content": "pub fn simulate(shape: Shape) {\n\n println!(\"simulating for {:?}\", shape);\n\n let duration = 60;\n\n\n\n let heatmap = Heatmap::<AtomicU64>::new(SECOND, 3, SECOND, duration * SECOND);\n\n\n\n let cauchy = Cauchy::new(200_000.0, 25_000.0).unwrap();\n\n let normal = Normal::new(200_000.0, 25_000.0).unwrap();\n\n let uniform = Uniform::new_inclusive(175_000.0, 225_000.0);\n\n\n\n let start = std::time::Instant::now();\n\n let mut latch = std::time::Instant::now();\n\n\n\n let mut rng = thread_rng();\n\n\n\n loop {\n\n let now = std::time::Instant::now();\n\n if now - start >= std::time::Duration::new(duration, 0) {\n\n break;\n\n }\n", "file_path": "waterfall/examples/simulator.rs", "rank": 31, "score": 117353.89946040297 }, { "content": "fn decode_incomplete_benchmark(c: &mut Criterion) {\n\n redis_decode_benchmark(c, \"redis decode incomplete\", b\"$7\\r\\nHELLO\\r\\n\");\n\n}\n\n\n", "file_path": "codec/benches/redis.rs", "rank": 32, "score": 115148.57317689797 }, { "content": "fn decode_ok_benchmark(c: &mut Criterion) {\n\n ping_decode_benchmark(c, \"ping decode ok\", b\"+PONG\\r\\n\");\n\n}\n\n\n", "file_path": "codec/benches/ping.rs", "rank": 33, "score": 115148.57317689797 }, { "content": "fn decode_unknown_benchmark(c: &mut Criterion) {\n\n ping_decode_benchmark(c, \"ping decode unknown\", b\"+PONG\\r\\nDEADBEEF\\r\\n\");\n\n}\n\n\n\ncriterion_group!(\n\n benches,\n\n decode_incomplete_benchmark,\n\n decode_ok_benchmark,\n\n decode_unknown_benchmark,\n\n encode_ping_benchmark,\n\n);\n\ncriterion_main!(benches);\n", "file_path": "codec/benches/ping.rs", "rank": 34, "score": 115148.57317689797 }, { "content": "fn encode_get_benchmark(c: &mut Criterion) {\n\n let codec = Memcache::new();\n\n let mut buf = BytesMut::new();\n\n c.bench_function(\"memcache encode get\", move |b| {\n\n b.iter(|| {\n\n codec.get(&mut buf, b\"0\");\n\n buf.clear();\n\n })\n\n });\n\n}\n\n\n", "file_path": "codec/benches/memcache.rs", "rank": 35, "score": 115148.57317689797 }, { "content": "fn encode_ping_benchmark(c: &mut Criterion) {\n\n let codec = Ping::new();\n\n\n\n c.bench_function(\"ping encode\", move |b| {\n\n b.iter(|| {\n\n let mut buf = BytesMut::new();\n\n codec.ping(&mut buf);\n\n })\n\n });\n\n}\n\n\n", "file_path": "codec/benches/ping.rs", "rank": 36, "score": 115148.57317689797 }, { "content": "fn decode_hit_benchmark(c: &mut Criterion) {\n\n memcache_decode_benchmark(\n\n c,\n\n \"memcache decode hit\",\n\n b\"VALUE 0 0 8\\r\\nDEADBEEF\\r\\nEND\\r\\n\",\n\n );\n\n}\n\n\n", "file_path": "codec/benches/memcache.rs", "rank": 37, "score": 115148.57317689797 }, { "content": "fn atomic_add(b: &mut Bencher) {\n\n let ctr = AtomicU64::new(0);\n\n\n\n b.iter(|| ctr.fetch_add(37, Ordering::Relaxed));\n\n}\n\n\n", "file_path": "metrics-core/benches/benches.rs", "rank": 38, "score": 115148.57317689797 }, { "content": "fn decode_ok_benchmark(c: &mut Criterion) {\n\n redis_decode_benchmark(c, \"redis decode ok\", b\"+OK\\r\\n\");\n\n}\n\n\n", "file_path": "codec/benches/redis.rs", "rank": 39, "score": 115148.57317689797 }, { "content": "fn decode_incomplete_benchmark(c: &mut Criterion) {\n\n ping_decode_benchmark(c, \"ping decode incomplete\", b\"+PONG\");\n\n}\n\n\n", "file_path": "codec/benches/ping.rs", "rank": 40, "score": 115148.57317689797 }, { "content": "fn decode_miss_benchmark(c: &mut Criterion) {\n\n memcache_decode_benchmark(c, \"memcache decode miss\", b\"NOT_FOUND\\r\\n\");\n\n}\n\n\n\ncriterion_group!(\n\n benches,\n\n decode_hit_benchmark,\n\n decode_incomplete_benchmark,\n\n decode_miss_benchmark,\n\n decode_ok_benchmark,\n\n encode_get_benchmark,\n\n encode_set_benchmark,\n\n);\n\ncriterion_main!(benches);\n", "file_path": "codec/benches/memcache.rs", "rank": 41, "score": 115148.57317689797 }, { "content": "fn decode_incomplete_benchmark(c: &mut Criterion) {\n\n echo_decode_benchmark(c, \"echo decode incomplete\", b\"\");\n\n}\n\n\n", "file_path": "codec/benches/echo.rs", "rank": 42, "score": 115148.57317689797 }, { "content": "fn decode_hit_benchmark(c: &mut Criterion) {\n\n redis_decode_benchmark(c, \"redis decode hit\", b\"$8\\r\\nDEADBEEF\\r\\n\");\n\n}\n\n\n", "file_path": "codec/benches/redis.rs", "rank": 43, "score": 115148.57317689797 }, { "content": "fn encode_echo_benchmark(c: &mut Criterion) {\n\n let codec = Echo::new();\n\n c.bench_function(\"echo encode\", move |b| {\n\n b.iter(|| codec.echo(&mut BytesMut::new(), b\"0\"))\n\n });\n\n}\n\n\n", "file_path": "codec/benches/echo.rs", "rank": 44, "score": 115148.57317689797 }, { "content": "fn encode_set_benchmark(c: &mut Criterion) {\n\n let codec = Memcache::new();\n\n let mut buf = BytesMut::new();\n\n c.bench_function(\"memcache encode set\", move |b| {\n\n b.iter(|| {\n\n codec.set(&mut buf, b\"0\", b\"0\", None, None);\n\n buf.clear();\n\n })\n\n });\n\n}\n\n\n", "file_path": "codec/benches/memcache.rs", "rank": 45, "score": 115148.57317689797 }, { "content": "fn decode_miss_benchmark(c: &mut Criterion) {\n\n redis_decode_benchmark(c, \"redis decode miss\", b\"$-1\\r\\n\");\n\n}\n\n\n\ncriterion_group!(\n\n benches,\n\n decode_hit_benchmark,\n\n decode_incomplete_benchmark,\n\n decode_miss_benchmark,\n\n decode_ok_benchmark,\n\n encode_inline_get_benchmark,\n\n encode_inline_set_benchmark,\n\n encode_resp_get_benchmark,\n\n encode_resp_set_benchmark,\n\n);\n\ncriterion_main!(benches);\n", "file_path": "codec/benches/redis.rs", "rank": 46, "score": 115148.57317689797 }, { "content": "fn decode_incomplete_benchmark(c: &mut Criterion) {\n\n memcache_decode_benchmark(\n\n c,\n\n \"memcache decode incomplete\",\n\n b\"VALUE 0 0 0\\r\\nSOME DATA GOES HERE\\r\\n\",\n\n );\n\n}\n\n\n", "file_path": "codec/benches/memcache.rs", "rank": 47, "score": 115148.57317689797 }, { "content": "fn decode_ok_benchmark(c: &mut Criterion) {\n\n memcache_decode_benchmark(c, \"memcache decode ok\", b\"OK\\r\\n\");\n\n}\n\n\n", "file_path": "codec/benches/memcache.rs", "rank": 48, "score": 115148.57317689797 }, { "content": "fn decode_ok_benchmark(c: &mut Criterion) {\n\n echo_decode_benchmark(c, \"echo decode ok\", &[0, 1, 2, 8, 84, 137, 127, 13, 10]);\n\n}\n\n\n\ncriterion_group!(\n\n benches,\n\n decode_error_benchmark,\n\n decode_incomplete_benchmark,\n\n decode_ok_benchmark,\n\n encode_echo_benchmark,\n\n);\n\ncriterion_main!(benches);\n", "file_path": "codec/benches/echo.rs", "rank": 49, "score": 115148.57317689797 }, { "content": "fn decode_error_benchmark(c: &mut Criterion) {\n\n echo_decode_benchmark(c, \"echo decode error\", b\"3421780262\\r\\n\");\n\n}\n\n\n", "file_path": "codec/benches/echo.rs", "rank": 50, "score": 115148.57317689797 }, { "content": "fn mutex_lock(b: &mut Bencher) {\n\n let mutex = std::sync::Mutex::new(());\n\n\n\n b.iter(|| mutex.lock());\n\n}\n\n\n", "file_path": "metrics-core/benches/benches.rs", "rank": 51, "score": 115148.57317689797 }, { "content": "fn increment_counter(bench: &mut Bencher) {\n\n let counter = AtomicCounter::new();\n\n let _scoped =\n\n unsafe { ScopedMetric::counter(\"test.metric\", &counter, Metadata::empty()).unwrap() };\n\n\n\n bench.iter(|| {\n\n increment!(\"test.metric\", MetricValue::Unsigned(10));\n\n })\n\n}\n\n\n", "file_path": "metrics-core/benches/benches.rs", "rank": 52, "score": 112966.57375583716 }, { "content": "fn encode_inline_set_benchmark(c: &mut Criterion) {\n\n let codec = Redis::new(RedisMode::Inline);\n\n let mut buf = BytesMut::new();\n\n c.bench_function(\"redis inline encode set\", move |b| {\n\n b.iter(|| {\n\n codec.set(&mut buf, b\"0\", b\"0\", None);\n\n buf.clear();\n\n })\n\n });\n\n}\n\n\n", "file_path": "codec/benches/redis.rs", "rank": 53, "score": 112966.57375583716 }, { "content": "fn thread_id(bencher: &mut Bencher) {\n\n bencher.iter(|| std::thread::current().id())\n\n}\n\n\n", "file_path": "metrics-core/benches/benches.rs", "rank": 54, "score": 112966.57375583716 }, { "content": "fn encode_inline_get_benchmark(c: &mut Criterion) {\n\n let codec = Redis::new(RedisMode::Inline);\n\n let mut buf = BytesMut::new();\n\n c.bench_function(\"redis inline encode get\", move |b| {\n\n b.iter(|| {\n\n codec.get(&mut buf, b\"0\");\n\n buf.clear();\n\n })\n\n });\n\n}\n\n\n", "file_path": "codec/benches/redis.rs", "rank": 55, "score": 112966.57375583716 }, { "content": "fn encode_resp_set_benchmark(c: &mut Criterion) {\n\n let codec = Redis::new(RedisMode::Resp);\n\n let mut buf = BytesMut::new();\n\n c.bench_function(\"redis resp encode set\", move |b| {\n\n b.iter(|| {\n\n codec.set(&mut buf, b\"0\", b\"0\", None);\n\n buf.clear();\n\n })\n\n });\n\n}\n\n\n", "file_path": "codec/benches/redis.rs", "rank": 56, "score": 112966.57375583716 }, { "content": "fn encode_resp_get_benchmark(c: &mut Criterion) {\n\n let codec = Redis::new(RedisMode::Resp);\n\n let mut buf = BytesMut::new();\n\n c.bench_function(\"redis resp encode get\", move |b| {\n\n b.iter(|| {\n\n codec.get(&mut buf, b\"0\");\n\n buf.clear();\n\n })\n\n });\n\n}\n\n\n", "file_path": "codec/benches/redis.rs", "rank": 57, "score": 112966.57375583716 }, { "content": "fn set_noop_metric(bench: &mut Bencher) {\n\n let counter = Noop;\n\n let _scoped =\n\n unsafe { ScopedMetric::counter(\"test.noop\", &counter, Metadata::empty()).unwrap() };\n\n\n\n bench.iter(|| {\n\n value!(\"test.noop\", MetricValue::Unsigned(56));\n\n })\n\n}\n\n\n", "file_path": "metrics-core/benches/benches.rs", "rank": 58, "score": 110909.8303925602 }, { "content": "pub fn register_stats(metrics: &Metrics) {\n\n for statistic in &[\n\n Stat::CommandsDelete,\n\n Stat::CommandsGet,\n\n Stat::CommandsRange,\n\n Stat::CommandsSet,\n\n Stat::KeySize,\n\n Stat::ValueSize,\n\n Stat::Window,\n\n Stat::RequestsEnqueued,\n\n Stat::RequestsDequeued,\n\n Stat::ConnectionsTotal,\n\n Stat::ConnectionsOpened,\n\n Stat::ConnectionsClosed,\n\n Stat::ConnectionsError,\n\n Stat::ConnectionsClientClosed,\n\n Stat::ConnectionsServerClosed,\n\n Stat::ConnectionsTimeout,\n\n Stat::ResponsesTotal,\n\n Stat::ResponsesOk,\n", "file_path": "rpc-perf/src/stats/mod.rs", "rank": 59, "score": 110797.035292045 }, { "content": "fn noop_metric_external_counter(bench: &mut Bencher) {\n\n let counter = Noop;\n\n let _scoped = unsafe {\n\n ScopedMetric::counter(\"test.noop.external-time\", &counter, Metadata::empty()).unwrap()\n\n };\n\n\n\n let time = Instant::now();\n\n\n\n bench.iter(|| {\n\n value!(\n\n \"test.noop.external-time\",\n\n MetricValue::Unsigned(56),\n\n time = time\n\n )\n\n })\n\n}\n\n\n", "file_path": "metrics-core/benches/benches.rs", "rank": 60, "score": 108967.8586656911 }, { "content": "/// Describes a counter as holding only unsigned integer values\n\npub trait UnsignedCounterPrimitive: CounterPrimitive {}\n\n\n\nimpl CounterPrimitive for i8 {\n\n fn saturating_add(self, rhs: Self) -> Self {\n\n self.saturating_add(rhs)\n\n }\n\n fn saturating_sub(self, rhs: Self) -> Self {\n\n self.saturating_sub(rhs)\n\n }\n\n}\n\n\n\nimpl CounterPrimitive for i16 {\n\n fn saturating_add(self, rhs: Self) -> Self {\n\n self.saturating_add(rhs)\n\n }\n\n fn saturating_sub(self, rhs: Self) -> Self {\n\n self.saturating_sub(rhs)\n\n }\n\n}\n\n\n", "file_path": "datastructures/src/counter/counter_primitive/mod.rs", "rank": 61, "score": 106423.26859405408 }, { "content": "fn default_error_fn(err: MetricError) {\n\n warn!(\"A metric error occurred: {}\", err);\n\n}\n\n\n", "file_path": "metrics-core/src/state.rs", "rank": 62, "score": 101646.68708926413 }, { "content": "pub fn render(shape: Shape, heatmap: Heatmap<AtomicU64>) {\n\n let mut labels = HashMap::new();\n\n labels.insert(100, \"100ns\".to_string());\n\n labels.insert(200, \"200ns\".to_string());\n\n labels.insert(400, \"400ns\".to_string());\n\n labels.insert(1_000, \"1us\".to_string());\n\n labels.insert(2_000, \"2us\".to_string());\n\n labels.insert(4_000, \"4us\".to_string());\n\n labels.insert(10_000, \"10us\".to_string());\n\n labels.insert(20_000, \"20us\".to_string());\n\n labels.insert(40_000, \"40us\".to_string());\n\n labels.insert(100_000, \"100us\".to_string());\n\n labels.insert(200_000, \"200us\".to_string());\n\n labels.insert(400_000, \"400us\".to_string());\n\n labels.insert(1_000_000, \"1ms\".to_string());\n\n labels.insert(2_000_000, \"2ms\".to_string());\n\n labels.insert(4_000_000, \"4ms\".to_string());\n\n labels.insert(10_000_000, \"10ms\".to_string());\n\n labels.insert(20_000_000, \"20ms\".to_string());\n\n labels.insert(40_000_000, \"40ms\".to_string());\n", "file_path": "waterfall/examples/simulator.rs", "rank": 63, "score": 101453.04349543562 }, { "content": "#[cfg(feature = \"serde\")]\n\nstruct AtomicU64Visitor;\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl<'de> Visitor<'de> for AtomicU64Visitor {\n\n type Value = AtomicU64;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"an unsigned 64bit integer\")\n\n }\n\n\n\n fn visit_i8<E>(self, value: i8) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n use std::convert::TryFrom;\n\n if let Ok(value) = u64::try_from(value) {\n\n Ok(Self::Value::new(value))\n\n } else {\n\n Err(E::custom(format!(\"u64 is out of range: {}\", value)))\n\n }\n", "file_path": "atomics/src/atomic_primitive/atomic_u64.rs", "rank": 64, "score": 100680.97817146433 }, { "content": "#[cfg(feature = \"serde\")]\n\nstruct AtomicIsizeVisitor;\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl<'de> Visitor<'de> for AtomicIsizeVisitor {\n\n type Value = AtomicIsize;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a signed integer matching the pointer width\")\n\n }\n\n\n\n fn visit_i8<E>(self, value: i8) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n Ok(Self::Value::new(isize::from(value)))\n\n }\n\n\n\n fn visit_i16<E>(self, value: i16) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n", "file_path": "atomics/src/atomic_primitive/atomic_isize.rs", "rank": 65, "score": 100680.97817146433 }, { "content": "#[cfg(feature = \"serde\")]\n\nstruct AtomicU16Visitor;\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl<'de> Visitor<'de> for AtomicU16Visitor {\n\n type Value = AtomicU16;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"an unsigned 16bit integer\")\n\n }\n\n\n\n fn visit_i8<E>(self, value: i8) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n use std::convert::TryFrom;\n\n if let Ok(value) = u16::try_from(value) {\n\n Ok(Self::Value::new(value))\n\n } else {\n\n Err(E::custom(format!(\"u16 is out of range: {}\", value)))\n\n }\n", "file_path": "atomics/src/atomic_primitive/atomic_u16.rs", "rank": 66, "score": 100680.97817146433 }, { "content": "#[cfg(feature = \"serde\")]\n\nstruct AtomicF64Visitor;\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl<'de> Visitor<'de> for AtomicF64Visitor {\n\n type Value = AtomicF64;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a 64bit floating point number\")\n\n }\n\n\n\n fn visit_i8<E>(self, value: i8) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n Ok(Self::Value::new(f64::from(value)))\n\n }\n\n\n\n fn visit_i16<E>(self, value: i16) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n", "file_path": "atomics/src/atomic_primitive/atomic_f64.rs", "rank": 67, "score": 100680.97817146433 }, { "content": "#[cfg(feature = \"serde\")]\n\nstruct AtomicU8Visitor;\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl<'de> Visitor<'de> for AtomicU8Visitor {\n\n type Value = AtomicU8;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"an unsigned 8bit integer\")\n\n }\n\n\n\n fn visit_i8<E>(self, value: i8) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n use std::convert::TryFrom;\n\n if let Ok(value) = u8::try_from(value) {\n\n Ok(Self::Value::new(value))\n\n } else {\n\n Err(E::custom(format!(\"u8 is out of range: {}\", value)))\n\n }\n", "file_path": "atomics/src/atomic_primitive/atomic_u8.rs", "rank": 68, "score": 100680.97817146433 }, { "content": "#[cfg(feature = \"serde\")]\n\nstruct AtomicUsizeVisitor;\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl<'de> Visitor<'de> for AtomicUsizeVisitor {\n\n type Value = AtomicUsize;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a signed integer matching the pointer width\")\n\n }\n\n\n\n fn visit_i8<E>(self, value: i8) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n use std::convert::TryFrom;\n\n if let Ok(value) = usize::try_from(value) {\n\n Ok(Self::Value::new(value))\n\n } else {\n\n Err(E::custom(format!(\"usize is out of range: {}\", value)))\n\n }\n", "file_path": "atomics/src/atomic_primitive/atomic_usize.rs", "rank": 69, "score": 100680.97817146433 }, { "content": "fn saturating_inc<T>(loc: &mut T, val: T)\n\nwhere\n\n T: Saturating,\n\n{\n\n *loc = loc.saturating_add(val);\n\n}\n\n\n\nimpl<T> Extend<u64> for DenseDDSketch<T>\n\nwhere\n\n T: Saturating + AddAssign<T> + Default + PartialEq + Copy + Into<u64> + TryFrom<u64>,\n\n <T as TryFrom<u64>>::Error: std::fmt::Debug,\n\n{\n\n fn extend<I: IntoIterator<Item = u64>>(&mut self, iter: I) {\n\n let one = T::try_from(1).expect(\"1 is not convertable to T\");\n\n for item in iter {\n\n self.increment(item, one)\n\n }\n\n }\n\n}\n\n\n", "file_path": "datastructures/src/ddsketch/dense.rs", "rank": 70, "score": 99710.53638732119 }, { "content": "fn string_buffer(string: &str, size: f32) -> ImageBuffer<ColorRgb> {\n\n // load font\n\n let font_data = dejavu::sans_mono::regular();\n\n let collection = FontCollection::from_bytes(font_data as &[u8]).unwrap();\n\n let font = collection.into_font().unwrap();\n\n\n\n // size and scaling\n\n let height: f32 = size;\n\n let pixel_height = height.ceil() as usize;\n\n let scale = Scale {\n\n x: height * 1.0,\n\n y: height,\n\n };\n\n\n\n let v_metrics = font.v_metrics(scale);\n\n let offset = point(0.0, v_metrics.ascent);\n\n\n\n let glyphs: Vec<PositionedGlyph> = font.layout(string, scale, offset).collect();\n\n\n\n let width = glyphs\n", "file_path": "waterfall/src/lib.rs", "rank": 71, "score": 99321.3198122853 }, { "content": "fn ping_decode_benchmark(c: &mut Criterion, label: &str, msg: &[u8]) {\n\n let codec = Ping::new();\n\n let mut buf = BytesMut::with_capacity(1024);\n\n buf.extend_from_slice(msg);\n\n let buf = buf.freeze();\n\n c.bench_function(label, move |b| b.iter(|| codec.decode(&buf)));\n\n}\n\n\n", "file_path": "codec/benches/ping.rs", "rank": 72, "score": 97795.95623419395 }, { "content": "fn echo_decode_benchmark(c: &mut Criterion, label: &str, msg: &[u8]) {\n\n let codec = Echo::new();\n\n let mut buf = BytesMut::with_capacity(1024);\n\n buf.extend_from_slice(msg);\n\n let buf = buf.freeze();\n\n c.bench_function(label, move |b| b.iter(|| codec.decode(&buf)));\n\n}\n\n\n", "file_path": "codec/benches/echo.rs", "rank": 73, "score": 97795.95623419395 }, { "content": "fn redis_decode_benchmark(c: &mut Criterion, label: &str, msg: &[u8]) {\n\n let codec = Redis::new(RedisMode::Inline);\n\n let mut buf = BytesMut::with_capacity(1024);\n\n buf.extend_from_slice(msg);\n\n let buf = buf.freeze();\n\n c.bench_function(label, move |b| b.iter(|| codec.decode(&buf)));\n\n}\n\n\n", "file_path": "codec/benches/redis.rs", "rank": 74, "score": 97795.95623419395 }, { "content": "fn memcache_decode_benchmark(c: &mut Criterion, label: &str, msg: &[u8]) {\n\n let codec = Memcache::new();\n\n let mut buf = BytesMut::with_capacity(1024);\n\n buf.extend_from_slice(msg);\n\n let buf = buf.freeze();\n\n c.bench_function(label, move |b| b.iter(|| codec.decode(&buf)));\n\n}\n\n\n", "file_path": "codec/benches/memcache.rs", "rank": 75, "score": 97795.95623419395 }, { "content": "fn default_interval() -> usize {\n\n 60\n\n}\n\n\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 76, "score": 96216.25645235192 }, { "content": "fn default_poolsize() -> usize {\n\n 1\n\n}\n\n\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 77, "score": 96216.25645235192 }, { "content": "fn default_clients() -> usize {\n\n 1\n\n}\n\n\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 78, "score": 96216.25645235192 }, { "content": "/// Render and save a waterfall from a `Heatmap` to a file. You can specify\n\n/// `labels` for the value axis. And spacing of labels on the time axis is\n\n/// specified by the `interval` in nanoseconds.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use datastructures::*;\n\n/// use waterfall;\n\n///\n\n/// use std::collections::HashMap;\n\n///\n\n/// // create a heatmap with appropriate configuration for your dataset\n\n/// let heatmap = Heatmap::<AtomicU64>::new(1_000_000, 2, 1_000_000, 5_000_000_000);\n\n///\n\n/// // add data into the heatmap\n\n///\n\n/// // decide on labels and generate waterfall\n\n/// let mut labels = HashMap::new();\n\n/// labels.insert(0, \"0\".to_string());\n\n/// labels.insert(100, \"100\".to_string());\n\n/// labels.insert(1000, \"1000\".to_string());\n\n/// labels.insert(10000, \"10000\".to_string());\n\n/// labels.insert(100000, \"100000\".to_string());\n\n/// waterfall::save_waterfall(&heatmap, \"waterfall.png\", labels, 1_000_000_000);\n\n/// ```\n\npub fn save_waterfall<S: ::std::hash::BuildHasher, T: 'static>(\n\n heatmap: &Heatmap<T>,\n\n file: &str,\n\n labels: HashMap<u64, String, S>,\n\n interval: u64,\n\n) where\n\n T: Counter + Unsigned,\n\n <T as AtomicPrimitive>::Primitive: Default + PartialEq + Copy + Saturating,\n\n u64: From<<T as AtomicPrimitive>::Primitive>,\n\n{\n\n debug!(\"saving waterfall\");\n\n let height = heatmap.slices();\n\n let width = heatmap.buckets();\n\n\n\n // create image buffer\n\n let mut buffer = ImageBuffer::<ColorRgb>::new(width, height);\n\n\n\n let histogram = Histogram::<AtomicU64>::new(heatmap.highest_count(), 3, None, None);\n\n for slice in heatmap {\n\n for b in slice.histogram().into_iter() {\n", "file_path": "waterfall/src/lib.rs", "rank": 79, "score": 95984.03848166042 }, { "content": "fn default_logging_level() -> Level {\n\n Level::Info\n\n}\n\n\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 80, "score": 94030.87045421539 }, { "content": "fn default_request_timeout() -> usize {\n\n 200 * MILLISECOND / MICROSECOND\n\n}\n\n\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 81, "score": 94030.87045421539 }, { "content": "fn default_connect_timeout() -> usize {\n\n 200 * MILLISECOND / MICROSECOND\n\n}\n\n\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 82, "score": 94030.87045421539 }, { "content": "fn default_request_distribution() -> Refill {\n\n Refill::Smooth\n\n}\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 83, "score": 94030.87045421539 }, { "content": "/// This trait is used to define the functions which are available on types\n\n/// which may be used as atomic counters, allowing for them to be used as\n\n/// generic types.\n\npub trait AtomicCounter: AtomicPrimitive\n\nwhere\n\n Self::Primitive: Default + PartialEq + Copy,\n\n{\n\n /// Adds to the current value, returning the previous value.\n\n ///\n\n /// This wraps around on overflow.\n\n ///\n\n /// `fetch_add` take an `Ordering` argument which describes the memory\n\n /// ordering of the operation. All ordering modes are possible. Note that\n\n /// using `Acquire` makes the store part of this operation `Relaxed`, and\n\n /// using `Release` makes the load part of this operation `Relaxed`.\n\n fn fetch_add(&self, value: Self::Primitive, order: Ordering) -> Self::Primitive;\n\n\n\n /// Subtracts from the current value, returning the previous value.\n\n ///\n\n /// This wraps around on overflow.\n\n ///\n\n /// `fetch_sub` take an `Ordering` argument which describes the memory\n\n /// ordering of the operation. All ordering modes are possible. Note that\n", "file_path": "atomics/src/atomic_counter/mod.rs", "rank": 84, "score": 92925.21633318454 }, { "content": "pub fn runner(runtime: f64, structure: Structure, operation: Operation, label: String) {\n\n match operation {\n\n Operation::Increment => {\n\n for single_channel in [true, false].iter() {\n\n for i in [1, 2, 4, 8, 16, 32, 64].iter() {\n\n timed_run(\n\n *i,\n\n runtime,\n\n structure,\n\n operation,\n\n *single_channel,\n\n format!(\"{} (threads: {})\", label, i),\n\n );\n\n }\n\n }\n\n }\n\n Operation::Percentile => {\n\n for i in [1, 2, 4, 8, 16, 32, 64].iter() {\n\n timed_run(\n\n *i,\n", "file_path": "datastructures/examples/benchmarks.rs", "rank": 85, "score": 92729.76755421083 }, { "content": "fn default_windows() -> Option<usize> {\n\n Some(5)\n\n}\n\n\n", "file_path": "rpc-perf/src/config/general.rs", "rank": 86, "score": 92277.96627339328 }, { "content": "/// maps a value to a color based on a low point, mid point, and high point\n\n/// values below low will clip to black\n\n/// mid point is the transition between luminosity (black-blue) and hue (blue->red) ramps\n\n/// values above high will clip to red\n\nfn color_from_value(value: u64, low: u64, mid: u64, high: u64) -> ColorRgb {\n\n let hsl = if value < low {\n\n HSL {\n\n h: 250.0,\n\n s: 1.0,\n\n l: 0.0,\n\n }\n\n } else if value < mid {\n\n HSL {\n\n h: 250.0,\n\n s: 1.0,\n\n l: (value as f64 / mid as f64) * 0.5,\n\n }\n\n } else if value < high {\n\n HSL {\n\n h: 250.0 - (250.0 * (value - mid) as f64 / high as f64),\n\n s: 1.0,\n\n l: 0.5,\n\n }\n\n } else {\n", "file_path": "waterfall/src/lib.rs", "rank": 87, "score": 86116.94166890069 }, { "content": "// Copyright 2019 Twitter, Inc.\n\n// Licensed under the Apache License, Version 2.0\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nuse crate::{AtomicPrimitive, Ordering};\n\n\n\n#[cfg(feature = \"serde\")]\n\nuse serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer};\n\n\n\n/// A boolean type which can be safely shared between threads.\n\npub struct AtomicBool {\n\n pub(crate) inner: core::sync::atomic::AtomicBool,\n\n}\n\n\n\nimpl AtomicPrimitive for AtomicBool {\n\n type Primitive = bool;\n\n\n\n fn new(value: Self::Primitive) -> Self {\n\n Self {\n\n inner: core::sync::atomic::AtomicBool::new(value),\n", "file_path": "atomics/src/atomic_primitive/atomic_bool.rs", "rank": 88, "score": 78123.51766367986 }, { "content": " }\n\n\n\n fn compare_exchange_weak(\n\n &self,\n\n current: Self::Primitive,\n\n new: Self::Primitive,\n\n success: Ordering,\n\n failure: Ordering,\n\n ) -> Result<Self::Primitive, Self::Primitive> {\n\n self.inner\n\n .compare_exchange_weak(current, new, success, failure)\n\n }\n\n}\n\n\n\nimpl Default for AtomicBool {\n\n fn default() -> Self {\n\n Self::new(Default::default())\n\n }\n\n}\n\n\n", "file_path": "atomics/src/atomic_primitive/atomic_bool.rs", "rank": 89, "score": 78122.74195423715 }, { "content": " self.inner.swap(value, order)\n\n }\n\n\n\n fn compare_and_swap(\n\n &self,\n\n current: Self::Primitive,\n\n new: Self::Primitive,\n\n order: Ordering,\n\n ) -> Self::Primitive {\n\n self.inner.compare_and_swap(current, new, order)\n\n }\n\n\n\n fn compare_exchange(\n\n &self,\n\n current: Self::Primitive,\n\n new: Self::Primitive,\n\n success: Ordering,\n\n failure: Ordering,\n\n ) -> Result<Self::Primitive, Self::Primitive> {\n\n self.inner.compare_exchange(current, new, success, failure)\n", "file_path": "atomics/src/atomic_primitive/atomic_bool.rs", "rank": 90, "score": 78117.43351413027 }, { "content": "impl PartialEq for AtomicBool {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.load(Ordering::SeqCst) == other.load(Ordering::SeqCst)\n\n }\n\n}\n\n\n\nimpl Eq for AtomicBool {}\n\n\n\nimpl std::fmt::Debug for AtomicBool {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{:?}\", self.inner)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"serde\")]\n", "file_path": "atomics/src/atomic_primitive/atomic_bool.rs", "rank": 91, "score": 78110.00401062243 }, { "content": " }\n\n }\n\n\n\n fn get_mut(&mut self) -> &mut Self::Primitive {\n\n self.inner.get_mut()\n\n }\n\n\n\n fn into_inner(self) -> Self::Primitive {\n\n self.inner.into_inner()\n\n }\n\n\n\n fn load(&self, order: Ordering) -> Self::Primitive {\n\n self.inner.load(order)\n\n }\n\n\n\n fn store(&self, value: Self::Primitive, order: Ordering) {\n\n self.inner.store(value, order);\n\n }\n\n\n\n fn swap(&self, value: Self::Primitive, order: Ordering) -> Self::Primitive {\n", "file_path": "atomics/src/atomic_primitive/atomic_bool.rs", "rank": 92, "score": 78106.54496456694 }, { "content": " fn deserialize<D>(deserializer: D) -> Result<AtomicBool, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n deserializer.deserialize_bool(AtomicBoolVisitor)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"serde\")]\n\nimpl Serialize for AtomicBool {\n\n #[inline]\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n serializer.serialize_some(&self.load(Ordering::SeqCst))\n\n }\n\n}\n", "file_path": "atomics/src/atomic_primitive/atomic_bool.rs", "rank": 93, "score": 78100.05832795468 }, { "content": "// Copyright 2019 Twitter, Inc.\n\n// Licensed under the Apache License, Version 2.0\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nuse crate::{AtomicPrimitive, Ordering};\n\n\n\n#[cfg(feature = \"serde\")]\n\nuse serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer};\n\n\n\n/// An integer type which can be safely shared between threads.\n\npub struct AtomicI64 {\n\n pub(crate) inner: core::sync::atomic::AtomicI64,\n\n}\n\n\n\nimpl AtomicPrimitive for AtomicI64 {\n\n type Primitive = i64;\n\n\n\n fn new(value: Self::Primitive) -> Self {\n\n Self {\n\n inner: core::sync::atomic::AtomicI64::new(value),\n", "file_path": "atomics/src/atomic_primitive/atomic_i64.rs", "rank": 95, "score": 78096.06197387804 }, { "content": "// Copyright 2019 Twitter, Inc.\n\n// Licensed under the Apache License, Version 2.0\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n\n\nuse crate::{AtomicPrimitive, Ordering};\n\n\n\n#[cfg(feature = \"serde\")]\n\nuse serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer};\n\n\n\n/// An integer type which can be safely shared between threads.\n\npub struct AtomicI32 {\n\n pub(crate) inner: core::sync::atomic::AtomicI32,\n\n}\n\n\n\nimpl AtomicPrimitive for AtomicI32 {\n\n type Primitive = i32;\n\n\n\n fn new(value: Self::Primitive) -> Self {\n\n Self {\n\n inner: core::sync::atomic::AtomicI32::new(value),\n", "file_path": "atomics/src/atomic_primitive/atomic_i32.rs", "rank": 96, "score": 78096.04753155349 }, { "content": " }\n\n\n\n fn compare_exchange_weak(\n\n &self,\n\n current: Self::Primitive,\n\n new: Self::Primitive,\n\n success: Ordering,\n\n failure: Ordering,\n\n ) -> Result<Self::Primitive, Self::Primitive> {\n\n self.inner\n\n .compare_exchange_weak(current, new, success, failure)\n\n }\n\n}\n\n\n\nimpl Default for AtomicI64 {\n\n fn default() -> Self {\n\n Self::new(Default::default())\n\n }\n\n}\n\n\n", "file_path": "atomics/src/atomic_primitive/atomic_i64.rs", "rank": 97, "score": 78095.28513037496 }, { "content": " }\n\n\n\n fn compare_exchange_weak(\n\n &self,\n\n current: Self::Primitive,\n\n new: Self::Primitive,\n\n success: Ordering,\n\n failure: Ordering,\n\n ) -> Result<Self::Primitive, Self::Primitive> {\n\n self.inner\n\n .compare_exchange_weak(current, new, success, failure)\n\n }\n\n}\n\n\n\nimpl Default for AtomicI32 {\n\n fn default() -> Self {\n\n Self::new(Default::default())\n\n }\n\n}\n\n\n", "file_path": "atomics/src/atomic_primitive/atomic_i32.rs", "rank": 98, "score": 78095.2755425007 } ]
Rust
rita/src/rita_exit/database/sms.rs
darkdrgn2k/althea_rs
a331a1b73339f770c1d2baa1c1e75e825493ed8f
use crate::rita_exit::database::database_tools::text_sent; use crate::rita_exit::database::database_tools::verify_client; use crate::rita_exit::database::get_exit_info; use crate::rita_exit::database::struct_tools::texts_sent; use althea_types::{ExitClientDetails, ExitClientIdentity, ExitState}; use diesel; use diesel::prelude::*; use failure::Error; use phonenumber::PhoneNumber; use reqwest; use settings::exit::PhoneVerifSettings; use std::time::Duration; #[derive(Serialize)] pub struct SmsCheck { api_key: String, verification_code: String, phone_number: String, country_code: String, } fn check_text(number: String, code: String, api_key: String) -> Result<bool, Error> { trace!("About to check text message status for {}", number); let number: PhoneNumber = number.parse()?; let client = reqwest::Client::builder() .timeout(Duration::from_secs(1)) .build()?; let res = client .get("https://api.authy.com/protected/json/phones/verification/check") .form(&SmsCheck { api_key, verification_code: code, phone_number: number.national().to_string(), country_code: number.code().value().to_string(), }) .send()?; Ok(res.status().is_success()) } #[derive(Serialize)] pub struct SmsRequest { api_key: String, via: String, phone_number: String, country_code: String, } fn send_text(number: String, api_key: String) -> Result<(), Error> { info!("Sending message for {}", number); let number: PhoneNumber = number.parse()?; let client = reqwest::Client::builder() .timeout(Duration::from_secs(1)) .build()?; let res = client .post("https://api.authy.com/protected/json/phones/verification/start") .form(&SmsRequest { api_key, via: "sms".to_string(), phone_number: number.national().to_string(), country_code: number.code().value().to_string(), }) .send()?; if res.status().is_success() { Ok(()) } else { bail!("SMS API failure! Maybe bad number?") } } pub fn handle_sms_registration( client: &ExitClientIdentity, their_record: &exit_db::models::Client, api_key: String, conn: &PgConnection, ) -> Result<ExitState, Error> { trace!("Handling phone registration for {:?}", client); let text_num = texts_sent(their_record); let sent_more_than_allowed_texts = text_num > 10; match ( client.reg_details.phone.clone(), client.reg_details.phone_code.clone(), sent_more_than_allowed_texts, ) { (Some(number), Some(code), true) => { if check_text(number, code, api_key)? { verify_client(&client, true, conn)?; Ok(ExitState::Registered { our_details: ExitClientDetails { client_internal_ip: their_record.internal_ip.parse()?, }, general_details: get_exit_info(), message: "Registration OK".to_string(), }) } else { Ok(ExitState::Pending { general_details: get_exit_info(), message: "awaiting phone verification".to_string(), email_code: None, phone_code: None, }) } } (Some(_number), None, true) => Ok(ExitState::Pending { general_details: get_exit_info(), message: "awaiting phone verification".to_string(), email_code: None, phone_code: None, }), (Some(number), None, false) => { send_text(number, api_key)?; text_sent(&client, &conn, text_num)?; Ok(ExitState::Pending { general_details: get_exit_info(), message: "awaiting phone verification".to_string(), email_code: None, phone_code: None, }) } (Some(number), Some(code), false) => { if check_text(number, code, api_key)? { verify_client(&client, true, conn)?; Ok(ExitState::Registered { our_details: ExitClientDetails { client_internal_ip: their_record.internal_ip.parse()?, }, general_details: get_exit_info(), message: "Registration OK".to_string(), }) } else { Ok(ExitState::Pending { general_details: get_exit_info(), message: "awaiting phone verification".to_string(), email_code: None, phone_code: None, }) } } (None, _, _) => Ok(ExitState::Denied { message: "This exit requires a phone number to register!".to_string(), }), } } #[derive(Serialize)] pub struct SmsNotification { #[serde(rename = "To")] to: String, #[serde(rename = "From")] from: String, #[serde(rename = "Body")] body: String, } pub fn send_low_balance_sms(number: &str, phone: PhoneVerifSettings) -> Result<(), Error> { info!("Sending low balance message for {}", number); let url = format!( "https://api.twilio.com/2010-04-01/Accounts/{}/Messages.json", phone.twillio_account_id ); let number: PhoneNumber = number.parse()?; let client = reqwest::Client::builder() .timeout(Duration::from_secs(1)) .build()?; let res = client .post(&url) .basic_auth(phone.twillio_account_id, Some(phone.twillio_auth_token)) .form(&SmsNotification { to: number.to_string(), from: phone.notification_number, body: phone.balance_notification_body, }) .send()?; if res.status().is_success() { Ok(()) } else { bail!("SMS API failure! Maybe bad number?") } }
use crate::rita_exit::database::database_tools::text_sent; use crate::rita_exit::database::database_tools::verify_client; use crate::rita_exit::database::get_exit_info; use crate::rita_exit::database::struct_tools::texts_sent; use althea_types::{ExitClientDetails, ExitClientIdentity, ExitState}; use diesel; use diesel::prelude::*; use failure::Error; use phonenumber::PhoneNumber; use reqwest; use settings::exit::PhoneVerifSettings; use std::time::Duration; #[derive(Serialize)] pub struct SmsCheck { api_key: String, verification_code: String, phone_number: String, country_code: String, } fn check_text(number: String, code: String, api_key: String) -> Result<bool, Error> { trace!("About to check text message status for {}", number); let number: PhoneNumber = number.parse()?; let client = reqwest::Client::builder() .timeout(Duration::from_secs(1)) .build()?; let res = client .get("https://api.authy.com/protected/json/phones/verification/check") .form(&SmsCheck { api_key, verification_code: code, phone_number: number.national().to_string(), country_code: number.code().value().to_string(), }) .send()?; Ok(res.status().is_success()) } #[derive(Serialize)] pub struct SmsRequest { api_key: String, via: String, phone_number: String, country_code: String, } fn send_text(number: String, api_key: String) -> Result<(), Error> { info!("Sending message for {}", number); let number: PhoneNumber = number.parse()?; let client = reqwest::Client::builder() .timeout(Duration::from_secs(1)) .build()?; let res = client .post("https://api.authy.com/protected/json/phones/verification/start") .form(&SmsRequest { api_key, via: "sms".to_string(), phone_number: number.national().to_string(), country_code: number.code().value().to_string(), }) .send()?; if res.status().is_success() { Ok(()) } else { bail!("SMS API failure! Maybe bad number?") } } pub fn handle_sms_registration( client: &ExitClientIdentity, their_record: &exit_db::models::Client, api_key: String, conn: &PgConnection, ) -> Result<ExitState, Error> { trace!("Handling phone registration for {:?}", client); let text_num = texts_sent(their_record); let sent_more_than_allowed_texts = text_num > 10; match ( client.reg_details.phone.clone(), client.reg_details.phone_code.clone(), sent_more_than_allowed_texts, ) { (Some(number), Some(code), true) => { if check_text(number, code, api_key)? { verify_client(&client, true, conn)?; Ok(ExitState::Registered { our_details: ExitClientDetails { client_internal_ip: their_record.internal_ip.parse()?, }, general_details: get_exit_info(), message: "Registration OK".to_string(), }) } else { Ok(ExitState::Pending { general_details: get_exit_info(), message: "awaiting phone verification".to_string(), email_code: None, phone_code: None, }) } } (Some(_number), None, true) => Ok(ExitState::Pending { general_details: get_exit_info(), message: "awaiting phone verification".to_string(), email_code: None, phone_code: None, }), (Some(number), None, false) => { send_text(number, api_key)?; text_sent(&client, &conn, text_num)?; Ok(ExitState::Pending { general_details: get_exit_info(), message: "awaiting phone verification".to_string(), email_code: None, phone_code: None, }) } (Some(number), Some(code), false) => { if check_text(number, code, api_key)? { verify_client(&client, true, conn)?; Ok(ExitState::Registered { our_details: ExitClientDetails { client_internal_ip: their_record.internal_ip.parse()?, }, general_details: get_exit_info(), message: "Registration OK".to_string(), }) } else { Ok(ExitState::Pending { general_details: get_exit_info(), message: "awaiting phone verification".to_string(), email_code: None, phone_code: None, }) } } (None, _, _) => Ok(ExitState::Denied { message: "This exit requires a phone number to register!".to_string(), }), } } #[derive(Serialize)] pub struct SmsNotification { #[serde(rename = "To")] to: String, #[serde(rename = "From")] from: String, #[serde(rename = "Body")] body: String, } pub fn send_low_balance_sms(number: &str, phone: PhoneVerifSettings) -> Result<(), Error> { info!("Sending low balance message for {}", number); let url = format!( "https://api.twilio.com/2010-04-01/Accounts/{}/Messages.json", phone.twillio_account_id ); let number: PhoneNumber = number.parse()?; let client = reqwest::Client::builder() .timeout(Duration::from_secs(1)) .
build()?; let res = client .post(&url) .basic_auth(phone.twillio_account_id, Some(phone.twillio_auth_token)) .form(&SmsNotification { to: number.to_string(), from: phone.notification_number, body: phone.balance_notification_body, }) .send()?; if res.status().is_success() { Ok(()) } else { bail!("SMS API failure! Maybe bad number?") } }
function_block-function_prefix_line
[]
Rust
src/stm32f407.rs
SweGecko/vectrex-cart
8f24cfab5c53c418487d872878d98afc0fdbe5ec
/* * Custom wrappers for some CPU registers */ use volatile_register::{RO, RW, WO}; #[repr(C)] pub struct Syscfg { _fsmc: u32, _pmc: u32, pub exticr1: RW<u16>, _reserved: u16, } const SYSCFG_ADDR: u32 = 0x4001_3800; impl Syscfg { pub fn syscfg() -> &'static mut Syscfg { unsafe { &mut *(SYSCFG_ADDR as *mut Syscfg) } } } #[repr(C)] pub struct Exti { pub imr: RW<u32>, _emr: u32, pub rtsr: RW<u32>, pub ftsr: RW<u32>, _swier: u32, pub pr: RW<u32>, } const EXTI_ADDR: u32 = 0x4001_3C00; impl Exti { pub fn exti() -> &'static mut Exti { unsafe { &mut *(EXTI_ADDR as *mut Exti) } } } #[repr(C)] pub struct Gpio { pub moder0: RW<u16>, pub moder1: RW<u16>, pub otyper0: RW<u8>, pub otyper1: RW<u8>, _reserved1: u16, pub ospeedr0: RW<u16>, pub ospeedr1: RW<u16>, pub pupdr0: RW<u16>, pub pupdr1: RW<u16>, pub idr: RO<u16>, _reserved2: u16, pub odr0: WO<u8>, pub odr1: WO<u8>, _reserved3: u16, pub bsrr: WO<u32>, _lckr: u32, _afrl: u32, _afrh: u32, } impl Gpio { pub fn gpioa() -> &'static mut Gpio { unsafe { &mut *(0x4002_0000 as *mut Gpio) } } pub fn gpiob() -> &'static mut Gpio { unsafe { &mut *(0x4002_0400 as *mut Gpio) } } pub fn gpiod() -> &'static mut Gpio { unsafe { &mut *(0x4002_0C00 as *mut Gpio) } } pub fn gpioe() -> &'static mut Gpio { unsafe { &mut *(0x4002_1000 as *mut Gpio) } } } /* macro_rules! rpt { ( 0; $x:block ) => {}; ( 1; $x:block ) => { $x; }; ( 2; $x:block ) => { $x; $x; }; ( 3; $x:block ) => { rpt!(2; $x); $x; }; ( 4; $x:block) => { rpt!(2; $x); rpt!(2; $x); }; ( 5; $x:block ) => { rpt!(4; $x); $x; }; ( 6 ; $x:block) => { rpt!(4; $x); rpt!(2; $x); }; ( 7 ; $x:block) => { rpt!(6; $x); $x; }; ( 8 ; $x:block) => { rpt!(6; $x); rpt!(2; $x); }; ( 9; $x:block ) => { rpt!(8; $x); $x; }; ( 10; $x:block ) => { rpt!(8; $x); rpt!(2; $x); }; // Repeat $block n*m times (0 <= n <= 10, 0 <= m <= 10) ( $n:tt; $m:tt; $block:block ) => { rpt!($n; { rpt!($m; $block); }); }; } macro_rules! rpt_nop { // Nop n times (0 <= n <= 10) ( $n:tt ) => { rpt!($n; { asm::nop() }) }; // Nop n*m times (0 <= n <= 10, 0 <= m <= 10) ( $n:tt; $m:tt ) => { rpt!($n; $m; { asm::nop() }) }; } */
/* * Custom wrappers for some CPU registers */ use volatile_register::{RO, RW, WO}; #[repr(C)] pub struct Syscfg { _fsmc: u32, _pmc: u32, pub exticr1: RW<u16>, _reserved: u16, } const SYSCFG_ADDR: u32 = 0x4001_3800; impl Syscfg { pub fn syscfg() -> &'static mut Syscfg { unsafe { &mut *(SYSCFG_ADDR as *mut Syscfg) } } } #[repr(C)] pub struct Exti { pub imr: RW<u32>, _emr: u32, pub rtsr: RW<u32>, pub ftsr: RW<u32>, _swier: u32, pub pr: RW<u32>, } const EXTI_ADDR: u32 = 0x4001_3C00; impl Exti { pub fn exti() -> &'static mut Exti { unsafe { &mut *(EXTI_ADDR as *mut Exti) } } } #[repr(C)] pub struct Gpio { pub moder0: RW<u16>, pub moder1: RW<u16>, pub otyper0: RW<u8>, pub otyper1: RW<u8>, _reserved1: u16, pub ospeedr0: RW<u16>, pub ospeedr1: RW<u16>, pub pupdr0: RW<u16>, pub pupdr1: RW<u16>, pub idr: RO<u16>, _reserved2: u16, pub odr0: WO<u8>, pub odr1: WO<u8>, _reserved3: u16, pub bsrr: WO<u32>, _lckr: u32, _afrl: u32, _afrh: u32, } impl Gpio { pub fn gpioa() -> &'static mut Gpio { unsafe { &mut *(0x4002_0000 as *mut Gpio) } } pub fn gpiob() -> &'static mut Gpio { unsafe { &mut *(0x4002_0400 as *mut Gpio) } } pub fn gpiod() -> &'static mut Gpio { unsafe { &mut *(0x4002_0C00 as *mut Gpio) } } pub fn gpioe() -> &'static mut Gpio { unsafe { &mut *(0x4002_1000 as *mut Gpio) } } } /* macro_rules! rpt { ( 0; $x:block )
<= 10, 0 <= m <= 10) ( $n:tt; $m:tt ) => { rpt!($n; $m; { asm::nop() }) }; } */
=> {}; ( 1; $x:block ) => { $x; }; ( 2; $x:block ) => { $x; $x; }; ( 3; $x:block ) => { rpt!(2; $x); $x; }; ( 4; $x:block) => { rpt!(2; $x); rpt!(2; $x); }; ( 5; $x:block ) => { rpt!(4; $x); $x; }; ( 6 ; $x:block) => { rpt!(4; $x); rpt!(2; $x); }; ( 7 ; $x:block) => { rpt!(6; $x); $x; }; ( 8 ; $x:block) => { rpt!(6; $x); rpt!(2; $x); }; ( 9; $x:block ) => { rpt!(8; $x); $x; }; ( 10; $x:block ) => { rpt!(8; $x); rpt!(2; $x); }; // Repeat $block n*m times (0 <= n <= 10, 0 <= m <= 10) ( $n:tt; $m:tt; $block:block ) => { rpt!($n; { rpt!($m; $block); }); }; } macro_rules! rpt_nop { // Nop n times (0 <= n <= 10) ( $n:tt ) => { rpt!($n; { asm::nop() }) }; // Nop n*m times (0 <= n
random
[ { "content": "#[derive(Debug, PartialEq, PartialOrd, Eq, Ord)]\n\nstruct CartHdr {\n\n name: Vec<u8>,\n\n year: u32,\n\n path: PathBuf,\n\n}\n\n\n", "file_path": "build.rs", "rank": 0, "score": 29200.684055384212 }, { "content": "fn main() {\n\n // Put the linker script somewhere the linker can find it\n\n let out_dir = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out_dir.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n\n\n println!(\"cargo:rustc-link-search={}\", out_dir.display());\n\n\n\n // Only re-run the build script when memory.x is changed,\n\n // instead of when any part of the source code changes.\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n\n\n // rebuild multicart.bin if changed\n\n println!(\"cargo:rerun-if-changed=multicart/multicart.asm\");\n\n match std::process::Command::new(\"make\")\n\n .args(&[\"-C\", \"multicart\"])\n\n .status()\n\n {\n", "file_path": "build.rs", "rank": 1, "score": 25119.278354810285 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n // Configure LED outputs, using HAL\n\n let p = stm32::Peripherals::take().unwrap();\n\n let cp = Peripherals::take().unwrap();\n\n let gpiod = p.GPIOD.split();\n\n let mut green = gpiod\n\n .pd12\n\n .into_push_pull_output()\n\n .set_speed(Speed::VeryHigh);\n\n let mut blue = gpiod\n\n .pd15\n\n .into_push_pull_output()\n\n .set_speed(Speed::VeryHigh);\n\n\n\n // Enable GPIO clocks (Needed when bypassing the HAL)\n\n use crate::stm32::RCC;\n\n let rcc = unsafe { &(*RCC::ptr()) };\n\n rcc.ahb1enr.modify(|_, w| w.gpioaen().set_bit());\n\n rcc.ahb1enr.modify(|_, w| w.gpioben().set_bit());\n\n rcc.ahb1enr.modify(|_, w| w.gpioden().set_bit());\n", "file_path": "src/main.rs", "rank": 2, "score": 23806.490907423115 }, { "content": "#[interrupt]\n\nfn EXTI1() {\n\n static mut BOOT_TRIGGERED: u8 = 0;\n\n\n\n let gpioa = Gpio::gpioa();\n\n let gpiob = Gpio::gpiob();\n\n let gpiod = Gpio::gpiod();\n\n let gpioe = Gpio::gpioe();\n\n let exti = Exti::exti();\n\n\n\n // unsafe { gpioe.odr1.write(0b11111111) } // Indicate interrupt start\n\n\n\n // Clear interrupt pending bit\n\n unsafe { exti.pr.write(0b00000000_00000000_00000000_00000010) }\n\n\n\n // Decode address lines (mixed between multiple GPIOs\n\n const AMASK_PD: u16 = 0b0000_1111_1100_1111; /* PD0-3 + PD6-11 */\n\n const AMASK_PE: u16 = 0b0000_0000_0011_0000; /* PE4-5 */\n\n const AMASK_PB: u16 = 0b0111_0000_0000_0000; /* PB12-14 */\n\n let addr: u16 = (gpiod.idr.read() & AMASK_PD)\n\n | (gpioe.idr.read() & AMASK_PE)\n", "file_path": "src/main.rs", "rank": 3, "score": 23806.490907423115 }, { "content": "fn parse_cart_header(path: &PathBuf) -> Option<CartHdr> {\n\n if let Ok(file) = File::open(path) {\n\n let mut r = BufReader::new(file);\n\n let mut copyright = Vec::new();\n\n let mut name = Vec::new();\n\n if r.read_until(0x80, &mut copyright).is_err()\n\n || copyright.len() < 5\n\n || &copyright[0..=1] != b\"g \"\n\n {\n\n println!(\n\n \"cargo:warning={:?}: not a cart (© = {:?})?\",\n\n path,\n\n String::from_utf8_lossy(&copyright)\n\n );\n\n return None;\n\n }\n\n\n\n std::io::copy(&mut r.by_ref().take(6), &mut std::io::sink()).unwrap(); // Skip next six bytes\n\n if r.take(64).read_until(0x80, &mut name).is_err() || name.len() < 1 {\n\n println!(\n", "file_path": "build.rs", "rank": 4, "score": 15814.918286031736 }, { "content": "fn make_cart_table(carts: &Vec<CartHdr>) -> Vec<u8> {\n\n let max_len = carts\n\n .iter()\n\n .map(|c| c.name.len())\n\n .max()\n\n .unwrap()\n\n .min(MAX_NAME_LEN);\n\n\n\n let pointer_list_len = ((carts.len() + 1) * std::mem::size_of::<u16>()) as u16;\n\n let mut string_ptr = FILEDATA + pointer_list_len;\n\n let mut pointers: Vec<u8> = Vec::new();\n\n let mut strings: Vec<u8> = Vec::new();\n\n for cart in carts {\n\n pointers.extend_from_slice(&string_ptr.to_be_bytes());\n\n\n\n let mut string = cart.name.clone();\n\n while string.len() < max_len {\n\n string.push(b' ');\n\n }\n\n while string.len() > max_len {\n", "file_path": "build.rs", "rank": 5, "score": 15484.864549228067 }, { "content": "# vectrex-cart\n\nA programmable Vectrex multicart based on ARM SOC\n\n\n\n- [ ] TODO: beautiful image of cart\n\n\n\nThis hack uses a cheap-ish STM32F407G development kit (STM32F407G-DISC1) to emulate vectrex carts. The carts are loaded from the internal flash of the STM32F4.\n\n\n\n## Hardware\n\n\n\nI used an official STM32F407G-DISC1 board from STM. It is easy to source, and has a built-in ST-link programming / debug interface which is nice. There are smaller and cheaper development kits out there that may work as well.\n\n\n\nThe exact development board used shouldn't matter much. However, if you don't use a STM32F407G-DISC1,\n\nmake sure the used GPIOs aren't connected to board components such as LEDs, USB, IMU, etc...\n\n\n\n(The plentiful on-board stuff on the STM32F407G-DISC1 is the reason why the address lines are such a mess)\n\n\n\nFor the physical Vectrex interface I simply desoldered the ROM from an old cart and replaced it with wires to the corresponding STM32F GPIO pins. This doesn't look good, but it works and requires only a minimum of soldering. \n\n\n\n### Pin mapping\n\nVectrex | STM32F407 | Note\n\n--------|-----------|-----\n\n*OE | PA1 | \"inverse CPU E clock\". Triggers cart emulation interrupt once every cycle.\n\n*CE | PA2 | \"inverse Chip Enable\". A15 on CPU? Only push data when low.\n\n*WE | PA3 | \"inverse Write Enable\". Not used (yet)\n\nA0 | PD0\n\nA1 | PD1\n\nA2 | PD2\n\nA3 | PD3\n\nA4 | PE4\n\nA5 | PE5\n\nA6 | PD6\n\nA7 | PD7\n\nA8 | PD8\n\nA9 | PD9\n\nA10 | PD10\n\nA11 | PD11\n\nA12 | PB12\n\nA13 | PB13\n\nA14 | PB14\n\nX | PB15 | \"Software controlled line\" - Not used (yet)\n\nD0 | PE8\n\n... | ...\n\nD7 | PE15\n\nGND | GND\n\n+5V | +5V | Powers the board. Just make sure to unplug the USB cable!\n\n*HALT | -\n\n*CART | -\n\n*NMI | -\n\n*IRQ | -\n\n\n", "file_path": "README.md", "rank": 6, "score": 10115.556996817408 }, { "content": "### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any\n\nadditional terms or conditions.\n\n\n\n### 3rd party components\n\n\n\nJeroen Domburg's [multicart](multicart/multicart.asm) is licensed under GNU LGPL v3. See that file for more information.\n\n\n", "file_path": "README.md", "rank": 7, "score": 10111.992576355256 }, { "content": "## Software\n\n\n\nTo build the ARM code you need an Cortex-M4F rust toolchain. See the Embedded Rust Book for installation instructions: https://rust-embedded.github.io/book/intro/tooling.html\n\n\n\nThe main work is done by an interrupt routing that incidentally has just about good enough timing to handle the data. There should be a few cycles left outside to add more stuff later.\n\n\n\nThe code is very far from idiomatic rust, but I didn't feel the need for safe abstractions for this little project. Suggestions on improvements are welcome.\n\n\n\nI chose to continue on Jeroen Domburg's *Extreme Multicart* protocol and multicart loader ROM, so there is a simple protocol implemented to pass data back.\n\nFor assembling the 6809 multicart code you need a [http://www.6809.org.uk/asm6809/dl/2.3/](6809 assembler).\n\n\n\n## References\n\n* STM32F407 Reference Manual - *ST Microelectronics* - https://www.st.com/content/ccc/resource/technical/document/reference_manual/3d/6d/5a/66/b4/99/40/d4/DM00031020.pdf/files/DM00031020.pdf/jcr:content/translations/en.DM00031020.pdf\n\n* STM32F407G-DISC1 User Manual - *ST Microelectronics* - https://www.st.com/content/ccc/resource/technical/document/user_manual/70/fe/4a/3f/e7/e1/4f/7d/DM00039084.pdf/files/DM00039084.pdf/jcr:content/translations/en.DM00039084.pdf\n\n* How to construct a Vectrex Multicart (Very simple) - *Tursi (M. Brent)* - http://www.harmlesslion.com/text/vectrex_multicart.htm\n\n* Emulating a GameBoy Cartridge with an STM32F4 - *Dhole* - http://dhole.github.io/post/gameboy_cartridge_emu_1/\n\n* Extreme Vectrex multicart - *Jeroen \"Sprite\" Domburg* - https://spritesmods.com/?art=veccart&page=1\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n", "file_path": "README.md", "rank": 8, "score": 10111.992576355256 }, { "content": "const LOADER_ROM: *const u8 = include_bytes!(concat!(env!(\"OUT_DIR\"), \"/loader.bin\")) as *const u8;\n\n\n\n// ROM ptr shared with Interrupt handler.\n\n// May point at any cart in flash, or even in RAM\n\nstatic mut CART_MEMORY: *const u8 = LOADER_ROM;\n\n\n\n// Flash addresses of available carts\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/carts.rs\"));\n\n\n\nunsafe fn setup_gpio_pins() {\n\n let gpioa = Gpio::gpioa();\n\n let gpiob = Gpio::gpiob();\n\n let gpiod = Gpio::gpiod();\n\n let gpioe = Gpio::gpioe();\n\n\n\n // Configure data output pins PE8-PE15 (bypass the HAL, though)\n\n gpioe.otyper1.write(0b1111_1111); // 1: output open-drain\n\n gpioe.ospeedr1.write(0b11111111_11111111); // 11: Very High Speed\n\n gpioe.pupdr1.write(0b00000000_00000000); // 00: No pull-up/pull-down\n\n gpioe.odr1.write(0xff);\n", "file_path": "src/main.rs", "rank": 13, "score": 16.34767555687861 }, { "content": " //gpioe.pupdr1.write(0b01010101_01010101); // 01: Pull-up\n\n //gpioe.pupdr1.write(0b10101010_10101010); // 10: Pull-down\n\n gpioe.moder1.write(0b01010101_01010101); // 01: General Purpose output mode\n\n\n\n // Configure address input pins\n\n gpioa\n\n .moder0\n\n .modify(|v| (v & 0b11111111_00000011) | 0b00000000_00000000); // 00: Input\n\n gpioa\n\n .pupdr0\n\n .modify(|v| (v & 0b11111111_00000011) | 0b00000000_01010100); // 01: Pull-up\n\n\n\n gpiob\n\n .moder1\n\n .modify(|v| (v & 0b00000000_11111111) | 0b00000000_00000000); // 00: Input\n\n gpiob\n\n .pupdr1\n\n .modify(|v| (v & 0b00000000_11111111) | 0b01010101_00000000); // 01: Pull-up\n\n\n\n gpiod\n", "file_path": "src/main.rs", "rank": 15, "score": 14.834929046744914 }, { "content": " gpioe.otyper1.write(0b0000_0000); // 0: output push-pull\n\n };\n\n }\n\n (false, true, addr) if addr <= 0xC7FF - 0x8000 => {\n\n // RAM extension write\n\n unsafe {\n\n gpioe.otyper1.write(0b1111_1111); // 1: output open-drain\n\n gpioe.odr1.write(0xff);\n\n };\n\n\n\n // Reconfigure data pins for reading\n\n unsafe {\n\n gpioe.moder1.write(0b00000000_00000000); // 00: Input\n\n gpioe.pupdr1.write(0b01010101_01010100); // 01: Pull-up\n\n }\n\n\n\n unsafe { gpiod.bsrr.write(0b10000000_00000000_00000000_00000000) }\n\n\n\n let ram = unsafe { read_volatile(&EXTENDED_MEMORY) };\n\n let byte = (gpioe.idr.read() >> 8) as u8;\n", "file_path": "src/main.rs", "rank": 16, "score": 14.230838341965624 }, { "content": " // Set up GPIOA Interrupt on PA1 (/OE)\n\n let syscfg = Syscfg::syscfg();\n\n let exti = Exti::exti();\n\n syscfg.exticr1.modify(|v| (v & !(0xf << 4)) | (0b0000 << 4));\n\n\n\n // Trigger on rising edge\n\n let line_1_bit = 1 << 1;\n\n exti.rtsr.modify(|v| v | line_1_bit);\n\n exti.ftsr.modify(|v| v & !line_1_bit);\n\n // Enable interrupt\n\n exti.imr.modify(|v| v | line_1_bit);\n\n}\n\n\n\n#[entry]\n", "file_path": "src/main.rs", "rank": 17, "score": 13.498218314147616 }, { "content": " .moder0\n\n .modify(|v| (v & 0b00001111_00000000) | 0b00000000_00000000); // 00: Input\n\n gpiod\n\n .pupdr0\n\n .modify(|v| (v & 0b00001111_00000000) | 0b01010000_01010101); // 01: Pull-up\n\n\n\n gpiod\n\n .moder1\n\n .modify(|v| (v & 0b11111111_00000000) | 0b00000000_00000000); // 00: Input\n\n gpiod\n\n .pupdr1\n\n .modify(|v| (v & 0b11111111_00000000) | 0b00000000_01010101); // 01: Pull-up\n\n\n\n gpioe\n\n .moder0\n\n .modify(|v| (v & 0b11110000_11111111) | 0b00000000_00000000); // 00: Input\n\n gpioe\n\n .pupdr0\n\n .modify(|v| (v & 0b11110000_11111111) | 0b00000101_00000000); // 01: Pull-up\n\n\n", "file_path": "src/main.rs", "rank": 18, "score": 12.257891358992403 }, { "content": " unsafe {\n\n gpioe.otyper1.write(0b1111_1111); // 1: output open-drain\n\n gpioe.odr1.write(0xff);\n\n };\n\n\n\n let rom = unsafe { core::ptr::read_volatile(&CART_MEMORY) };\n\n if rom == LOADER_ROM {\n\n // Ignore bootups from multicart loader\n\n *BOOT_TRIGGERED = 0;\n\n } else if *BOOT_TRIGGERED > 0 {\n\n // Turn off blue led for fun and profit\n\n unsafe { gpiod.bsrr.write(0b10000000_00000000_00000000_00000000) }\n\n\n\n // Swap in multicart loader under the hood\n\n unsafe { core::ptr::write_volatile(&mut CART_MEMORY, LOADER_ROM) }\n\n *BOOT_TRIGGERED = 0;\n\n } else {\n\n *BOOT_TRIGGERED = 1;\n\n }\n\n }\n", "file_path": "src/main.rs", "rank": 19, "score": 10.35986844412025 }, { "content": " | (gpiob.idr.read() & AMASK_PB);\n\n\n\n // Check if this is a cart access (*ce == low)\n\n let pa = gpioa.idr.read();\n\n let we = (pa & 0b1000) == 0; // Read *WE\n\n let ce = (pa & 0b0100) == 0; // Read *CE\n\n match (ce, we, addr) {\n\n (true, false, addr) => {\n\n // Normal cart ROM read. Critical path\n\n unsafe {\n\n let rom = read_volatile(&CART_MEMORY);\n\n gpioe.odr1.write(*rom.offset(addr as isize));\n\n gpioe.otyper1.write(0b0000_0000); // 0: output push-pull\n\n };\n\n }\n\n (false, false, addr) if addr <= 0xC7FF - 0x8000 => {\n\n // RAM extension read\n\n unsafe {\n\n let ram = read_volatile(&EXTENDED_MEMORY);\n\n gpioe.odr1.write(*ram.offset(addr as isize));\n", "file_path": "src/main.rs", "rank": 20, "score": 10.164675249436856 }, { "content": " if addr == 0xC7FF - 0x8000 {\n\n match byte {\n\n // RPC command\n\n 0x01 => unsafe {\n\n // Switch to the selected cart\n\n let param = *ram.offset((0xC7FE - 0x8000) as isize);\n\n write_volatile(&mut CART_MEMORY, CARTS[param as usize]);\n\n },\n\n _ => {}\n\n }\n\n }\n\n unsafe { *ram.offset(addr as isize) = byte }\n\n\n\n unsafe {\n\n gpioe.moder1.write(0b01010101_01010101); // 01: General Purpose output mode\n\n gpioe.pupdr1.write(0b00000000_00000000); // 00: No pull-up/pull-down\n\n }\n\n }\n\n (false, false, 0x7000) => {\n\n // \"Cold_Boot\" read, used to detect reboots\n", "file_path": "src/main.rs", "rank": 21, "score": 8.779785605024971 }, { "content": " _ => {\n\n // Anything else\n\n unsafe {\n\n gpioe.otyper1.write(0b1111_1111); // 1: output open-drain\n\n gpioe.odr1.write(0xff);\n\n };\n\n }\n\n };\n\n}\n", "file_path": "src/main.rs", "rank": 22, "score": 8.68858931889826 }, { "content": "use cortex_m_rt::entry;\n\n// use cortex_m_semihosting::hprintln;\n\n\n\nuse board::gpio::Speed;\n\nuse board::hal::delay::Delay;\n\nuse board::hal::prelude::*;\n\nuse board::hal::stm32::{self, interrupt, Interrupt};\n\n\n\n// use cortex_m::asm;\n\nuse cortex_m::peripheral::Peripherals;\n\n\n\nuse core::ptr::{null_mut, read_volatile, write_volatile};\n\n\n\nmod stm32f407;\n\nuse stm32f407::*;\n\n\n\n// RAM ptr to memory extension and RPC command buffer\n\nstatic mut EXTENDED_MEMORY: *mut u8 = null_mut();\n\n\n\n// ROM ptr to the generated loader binary\n", "file_path": "src/main.rs", "rank": 23, "score": 7.298239777868629 }, { "content": " .freeze();\n\n\n\n // 18432 bytes RAM expansion\n\n let mut extension: [u8; 0xC800 - 0x8000] = [0; 0xC800 - 0x8000];\n\n unsafe { write_volatile(&mut EXTENDED_MEMORY, &mut extension[0] as *mut u8) }\n\n\n\n unsafe { board::NVIC::unmask(Interrupt::EXTI1) }\n\n\n\n // Get delay provider\n\n let mut delay = Delay::new(cp.SYST, clocks);\n\n\n\n loop {\n\n blue.set_high();\n\n green.set_high();\n\n delay.delay_ms(500_u16);\n\n green.set_low();\n\n delay.delay_ms(500_u16);\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 24, "score": 6.009422491148931 }, { "content": " rcc.ahb1enr.modify(|_, w| w.gpioeen().set_bit());\n\n\n\n unsafe { setup_gpio_pins() }\n\n\n\n // Constrain clock registers\n\n let rcc = p.RCC.constrain();\n\n\n\n // Helps speed\n\n let flash = p.FLASH;\n\n flash.acr.modify(|_, w| w.prften().set_bit());\n\n flash.acr.modify(|_, w| w.dcen().set_bit());\n\n flash.acr.modify(|_, w| w.icen().set_bit());\n\n\n\n // Configure clock to 168 MHz (i.e. the maximum) and freeze it\n\n let clocks = rcc\n\n .cfgr\n\n //.use_hse(8.mhz())\n\n .sysclk(168.mhz())\n\n //.pclk1(42.mhz())\n\n //.pclk2(84.mhz())\n", "file_path": "src/main.rs", "rank": 25, "score": 5.122356452938481 }, { "content": " string.pop().unwrap();\n\n }\n\n string.extend_from_slice(&format!(\" {}\", cart.year).as_bytes());\n\n string.push(0x80); // String terminator\n\n\n\n /*\n\n println!(\n\n \"cargo:warning={:04x}: {:?} ({})\",\n\n string_ptr,\n\n String::from_utf8_lossy(&string),\n\n cart.year\n\n );\n\n */\n\n\n\n string_ptr += string.len() as u16;\n\n strings.append(&mut string);\n\n }\n\n pointers.push(0x00); // String list terminator\n\n pointers.push(0x00);\n\n pointers.append(&mut strings);\n\n\n\n pointers\n\n}\n\n\n", "file_path": "build.rs", "rank": 26, "score": 3.195824250088471 }, { "content": " \"cargo:warning={:?}: not a cart (name = {:?})?\",\n\n path,\n\n String::from_utf8_lossy(&name)\n\n );\n\n return None;\n\n }\n\n name.pop().unwrap();\n\n\n\n let year = &copyright[copyright.len() - 5..copyright.len() - 1];\n\n if let Ok(year) = String::from_utf8_lossy(&year).parse() {\n\n return Some(CartHdr {\n\n path: path.clone(),\n\n year,\n\n name,\n\n });\n\n }\n\n }\n\n None\n\n}\n\n\n\nconst FILEDATA: u16 = 0x400; // From multicart.asm\n\nconst MAX_NAME_LEN: usize = 18; // With default font used in multicart.asm\n\n\n", "file_path": "build.rs", "rank": 29, "score": 2.7913718082562546 }, { "content": "use std::env;\n\nuse std::fs::{read_dir, File};\n\nuse std::io::{BufRead, BufReader, BufWriter, Read, Write};\n\nuse std::path::PathBuf;\n\n\n\n#[derive(Debug, PartialEq, PartialOrd, Eq, Ord)]\n", "file_path": "build.rs", "rank": 31, "score": 2.6373161957544053 }, { "content": " )\n\n .unwrap();\n\n for cart in carts {\n\n writeln!(\n\n &mut writer,\n\n \"include_bytes!(\\\"{}\\\") as *const u8,\\n\",\n\n cart.path.to_str().unwrap(),\n\n )\n\n .unwrap();\n\n }\n\n writeln!(&mut writer, \"];\\n\").unwrap();\n\n}\n", "file_path": "build.rs", "rank": 32, "score": 2.2797587918315334 }, { "content": " let cart_table = make_cart_table(&carts);\n\n let multicart = include_bytes!(concat!(\n\n env!(\"CARGO_MANIFEST_DIR\"),\n\n \"/multicart/multicart.bin\"\n\n ));\n\n let cart_path = out_dir.join(\"loader.bin\");\n\n let mut cart_file = File::create(&cart_path).unwrap();\n\n cart_file\n\n .write_all(&multicart[0..FILEDATA as usize])\n\n .unwrap();\n\n cart_file.write_all(&cart_table).unwrap();\n\n\n\n // Generate code for embedding carts in stm flash\n\n let source_path = out_dir.join(\"carts.rs\");\n\n let source_file = File::create(&source_path).unwrap();\n\n let mut writer = BufWriter::new(&source_file);\n\n writeln!(\n\n &mut writer,\n\n \"const CARTS: [*const u8; {}] = [\\n\",\n\n carts.len()\n", "file_path": "build.rs", "rank": 33, "score": 1.8392156195443026 }, { "content": " Err(e) => println!(\"cargo:warning=failed to rebuild multicart: {:?}\", e),\n\n _ => (),\n\n }\n\n\n\n // Read carts directory\n\n let root_dir = &PathBuf::from(env::var(\"CARGO_MANIFEST_DIR\").unwrap()).join(\"carts\");\n\n let mut carts: Vec<CartHdr> = read_dir(&root_dir)\n\n .expect(\"carts directory:\")\n\n .flatten()\n\n .map(|e| parse_cart_header(&e.path()))\n\n .flatten()\n\n .collect();\n\n\n\n // Rerun if carts directory changes\n\n println!(\"cargo:rerun-if-changed=carts\");\n\n\n\n // Sort by name,year,path\n\n carts.sort();\n\n\n\n // Generate multicart loader with the table\n", "file_path": "build.rs", "rank": 34, "score": 1.155146350754729 } ]
Rust
src/api/context/propagation/composite_propagator.rs
bnjjj/opentelemetry-rust
8a828a81b3c9750ce4ccebe47d104e69929e8ee6
use crate::api::{self, HttpTextFormat}; use std::fmt::Debug; #[derive(Debug)] pub struct HttpTextCompositePropagator { propagators: Vec<Box<dyn HttpTextFormat + Send + Sync>>, } impl HttpTextCompositePropagator { pub fn new(propagators: Vec<Box<dyn HttpTextFormat + Send + Sync>>) -> Self { HttpTextCompositePropagator { propagators } } } impl HttpTextFormat for HttpTextCompositePropagator { fn inject_context(&self, context: &api::Context, carrier: &mut dyn api::Carrier) { for propagator in &self.propagators { propagator.inject_context(context, carrier) } } fn extract_with_context(&self, cx: &api::Context, carrier: &dyn api::Carrier) -> api::Context { self.propagators .iter() .fold(cx.clone(), |current_cx, propagator| { propagator.extract_with_context(&current_cx, carrier) }) } } #[cfg(test)] mod tests { use super::*; use crate::api::trace::b3_propagator::B3Encoding; use crate::api::TraceContextExt; use crate::api::{B3Propagator, Context, SpanContext, SpanId, TraceContextPropagator, TraceId}; use std::collections::HashMap; fn test_data() -> Vec<(&'static str, &'static str)> { vec![ ( "b3", "00000000000000000000000000000001-0000000000000001-0", ), ( "traceparent", "00-00000000000000000000000000000001-0000000000000001-00", ), ] } #[derive(Debug)] struct TestSpan(api::SpanContext); impl api::Span for TestSpan { fn add_event_with_timestamp( &self, _name: String, _timestamp: std::time::SystemTime, _attributes: Vec<api::KeyValue>, ) { } fn span_context(&self) -> api::SpanContext { self.0.clone() } fn is_recording(&self) -> bool { false } fn set_attribute(&self, _attribute: api::KeyValue) {} fn set_status(&self, _code: api::StatusCode, _message: String) {} fn update_name(&self, _new_name: String) {} fn end(&self) {} } #[test] fn inject_multiple_propagators() { let b3 = B3Propagator::with_encoding(B3Encoding::SingleHeader); let trace_context = TraceContextPropagator::new(); let composite_propagator = HttpTextCompositePropagator { propagators: vec![Box::new(b3), Box::new(trace_context)], }; let cx = Context::default().with_span(TestSpan(SpanContext::new( TraceId::from_u128(1), SpanId::from_u64(1), 0, false, ))); let mut carrier = HashMap::new(); composite_propagator.inject_context(&cx, &mut carrier); for (header_name, header_value) in test_data() { assert_eq!(carrier.get(header_name), Some(&header_value.to_string())); } } #[test] fn extract_multiple_propagators() { let b3 = B3Propagator::with_encoding(B3Encoding::SingleHeader); let trace_context = TraceContextPropagator::new(); let composite_propagator = HttpTextCompositePropagator { propagators: vec![Box::new(b3), Box::new(trace_context)], }; for (header_name, header_value) in test_data() { let mut carrier = HashMap::new(); carrier.insert(header_name.to_string(), header_value.to_string()); assert_eq!( composite_propagator.extract(&carrier).remote_span_context(), Some(&SpanContext::new( TraceId::from_u128(1), SpanId::from_u64(1), 0, true, )) ); } } }
use crate::api::{self, HttpTextFormat}; use std::fmt::Debug; #[derive(Debug)] pub struct HttpTextCompositePropagator { propagators: Vec<Box<dyn HttpTextFormat + Send + Sync>>, } impl HttpTextCompositePropagator { pub fn new(propagators: Vec<Box<dyn HttpTextFormat + Send + Sync>>) -> Self { HttpTextCompositePropagator { propagators } } } impl HttpTextFormat for HttpTextCompositePropagator { fn inject_context(&self, context: &api::Context, carrier: &mut dyn api::Carrier) { for propagator in &self.propagators { propagator.inject_context(context, carrier) } } fn extract_with_context(&self, cx: &api::Context, carrier: &dyn api::Carrier) -> api::Context { self.propagators .iter() .fold(cx.clone(), |current_cx, propagator| { propagator.extract_with_context(&current_cx, carrier) }) } } #[cfg(test)] mod tests { use super::*; use crate::api::trace::b3_propagator::B3Encoding; use crate::api::TraceContextExt; use crate::api::{B3Propagator, Context, SpanContext, SpanId, TraceContextPropagator, TraceId}; use std::collections::HashMap; fn test_data() -> Vec<(&'static str, &'static str)> { vec![ ( "b3", "00000000000000000000000000000001-0000000000000001-0", ), ( "traceparent", "00-00000000000000000000000000000001-0000000000000001-00", ), ] } #[derive(Debug)] struct TestSpan(api::SpanContext); impl api::Span for TestSpan { fn add_event_with_timestamp( &self, _name: String, _timestamp: std::time::SystemTime, _attributes: Vec<api::KeyValue>, ) { } fn span_context(&self) -> api::SpanContext { self.0.clone() } fn is_recording(&self) -> bool { false } fn set_attribute(&self, _attribute: api::KeyValue) {} fn set_status(&self, _code: api::StatusCode, _message: String) {} fn update_name(&self, _new_name: String) {} fn end(&self) {} } #[test] fn inject_multiple_propagators() { let b3 = B3Propagator::with_encoding(B3Encoding::SingleHeader); let trace_context = TraceContextPropagator::new(); let composite_propagato
ace_context = TraceContextPropagator::new(); let composite_propagator = HttpTextCompositePropagator { propagators: vec![Box::new(b3), Box::new(trace_context)], }; for (header_name, header_value) in test_data() { let mut carrier = HashMap::new(); carrier.insert(header_name.to_string(), header_value.to_string()); assert_eq!( composite_propagator.extract(&carrier).remote_span_context(), Some(&SpanContext::new( TraceId::from_u128(1), SpanId::from_u64(1), 0, true, )) ); } } }
r = HttpTextCompositePropagator { propagators: vec![Box::new(b3), Box::new(trace_context)], }; let cx = Context::default().with_span(TestSpan(SpanContext::new( TraceId::from_u128(1), SpanId::from_u64(1), 0, false, ))); let mut carrier = HashMap::new(); composite_propagator.inject_context(&cx, &mut carrier); for (header_name, header_value) in test_data() { assert_eq!(carrier.get(header_name), Some(&header_value.to_string())); } } #[test] fn extract_multiple_propagators() { let b3 = B3Propagator::with_encoding(B3Encoding::SingleHeader); let tr
random
[ { "content": "/// Sets the given [`HttpTextFormat`] propagator as the current global propagator.\n\n///\n\n/// [`HttpTextFormat`]: ../api/context/propagation/trait.HttpTextFormat.html\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use opentelemetry::{api, global};\n\n///\n\n/// // create your http text propagator\n\n/// let propagator = api::TraceContextPropagator::new();\n\n///\n\n/// // assign it as the global propagator\n\n/// global::set_http_text_propagator(propagator);\n\n/// ```\n\npub fn set_http_text_propagator<P: api::HttpTextFormat + Send + Sync + 'static>(propagator: P) {\n\n let _lock = GLOBAL_HTTP_TEXT_PROPAGATOR\n\n .write()\n\n .map(|mut global_propagator| *global_propagator = Box::new(propagator));\n\n}\n\n\n", "file_path": "src/global.rs", "rank": 0, "score": 280949.38915224915 }, { "content": "struct Span(Box<dyn api::Span + Send + Sync>);\n", "file_path": "src/api/trace/context.rs", "rank": 1, "score": 243380.32871435134 }, { "content": "/// Carriers provide an interface for adding and removing fields from an\n\n/// underlying struct like `HashMap`.\n\npub trait Carrier {\n\n /// Get a value for a key from the underlying data.\n\n fn get(&self, key: &str) -> Option<&str>;\n\n /// Add a key and value to the underlying.\n\n fn set(&mut self, key: &str, value: String);\n\n}\n\n\n\nimpl<S: std::hash::BuildHasher> api::Carrier for HashMap<String, String, S> {\n\n /// Get a value for a key from the HashMap.\n\n fn get(&self, key: &str) -> Option<&str> {\n\n self.get(key).map(|v| v.as_str())\n\n }\n\n\n\n /// Set a key and value in the HashMap.\n\n fn set(&mut self, key: &str, value: String) {\n\n self.insert(String::from(key), value);\n\n }\n\n}\n\n\n\n#[cfg(feature = \"http\")]\n", "file_path": "src/api/context/propagation/mod.rs", "rank": 2, "score": 233082.83171177443 }, { "content": "/// Creates a named instance of [`Tracer`] via the configured [`GlobalProvider`].\n\n///\n\n/// If the name is an empty string, the provider will use a default name.\n\n///\n\n/// This is a more convenient way of expressing `global::trace_provider().get_tracer(name)`.\n\n///\n\n/// [`Tracer`]: ../api/trace/tracer/trait.Tracer.html\n\n/// [`GlobalProvider`]: struct.GlobalProvider.html\n\npub fn tracer(name: &'static str) -> BoxedTracer {\n\n trace_provider().get_tracer(name)\n\n}\n\n\n", "file_path": "src/global.rs", "rank": 3, "score": 218730.00187537645 }, { "content": "/// Interface for a single operation within a trace.\n\npub trait Span: fmt::Debug + 'static + Send + Sync {\n\n /// An API to record events in the context of a given `Span`.\n\n ///\n\n /// Events have a time associated with the moment when they are\n\n /// added to the `Span`.\n\n ///\n\n /// Events SHOULD preserve the order in which they're set. This will typically match\n\n /// the ordering of the events' timestamps.\n\n ///\n\n /// Note that the OpenTelemetry project documents certain [\"standard event names and\n\n /// keys\"](https://github.com/open-telemetry/opentelemetry-specification/tree/v0.5.0/specification/trace/semantic_conventions/README.md)\n\n /// which have prescribed semantic meanings.\n\n fn add_event(&self, name: String, attributes: Vec<api::KeyValue>) {\n\n self.add_event_with_timestamp(name, SystemTime::now(), attributes)\n\n }\n\n\n\n /// Convenience method to record an exception/error as an `Event`\n\n ///\n\n /// An exception SHOULD be recorded as an Event on the span during which it occurred.\n\n /// The name of the event MUST be \"exception\".\n", "file_path": "src/api/trace/span.rs", "rank": 4, "score": 208931.10797682888 }, { "content": "/// Executes a closure with a reference to this thread's current context.\n\n///\n\n/// Note: This function will panic if you attempt to attach another context\n\n/// while the context is still borrowed.\n\nfn get_current<F: FnMut(&Context) -> T, T>(mut f: F) -> T {\n\n CURRENT_CONTEXT\n\n .try_with(|cx| f(&*cx.borrow()))\n\n .unwrap_or_else(|_| DEFAULT_CONTEXT.with(|cx| f(&*cx)))\n\n}\n\n\n\n/// With TypeIds as keys, there's no need to hash them. They are already hashes\n\n/// themselves, coming from the compiler. The IdHasher holds the u64 of\n\n/// the TypeId, and then returns it, instead of doing any bit fiddling.\n", "file_path": "src/api/context/mod.rs", "rank": 5, "score": 201624.581858665 }, { "content": "/// `SpanExporter` defines the interface that protocol-specific exporters must\n\n/// implement so that they can be plugged into OpenTelemetry SDK and support\n\n/// sending of telemetry data.\n\n///\n\n/// The goals of the interface are:\n\n///\n\n/// - Minimize burden of implementation for protocol-dependent telemetry\n\n/// exporters. The protocol exporter is expected to be primarily a simple\n\n/// telemetry data encoder and transmitter.\n\n/// - Allow implementing helpers as composable components that use the same\n\n/// chainable Exporter interface. SDK authors are encouraged to implement common\n\n/// functionality such as queuing, batching, tagging, etc. as helpers. This\n\n/// functionality will be applicable regardless of what protocol exporter is used.\n\npub trait SpanExporter: Send + Sync + std::fmt::Debug {\n\n /// Exports a batch of telemetry data. Protocol exporters that will implement\n\n /// this function are typically expected to serialize and transmit the data\n\n /// to the destination.\n\n ///\n\n /// This function will never be called concurrently for the same exporter\n\n /// instance. It can be called again only after the current call returns.\n\n ///\n\n /// This function must not block indefinitely, there must be a reasonable\n\n /// upper limit after which the call must time out with an error result.\n\n fn export(&self, batch: Vec<Arc<SpanData>>) -> ExportResult;\n\n\n\n /// Shuts down the exporter. Called when SDK is shut down. This is an\n\n /// opportunity for exporter to do any cleanup required.\n\n ///\n\n /// `shutdown` should be called only once for each Exporter instance. After\n\n /// the call to `shutdown`, subsequent calls to `SpanExport` are not allowed\n\n /// and should return an error.\n\n ///\n\n /// Shutdown should not block indefinitely (e.g. if it attempts to flush the\n", "file_path": "src/exporter/trace/mod.rs", "rank": 6, "score": 184979.53509724297 }, { "content": "/// Executes a closure with a reference to the current global [`HttpTextFormat`] propagator.\n\n///\n\n/// [`HttpTextFormat`]: ../api/context/propagation/trait.HttpTextFormat.html\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use opentelemetry::{api, api::HttpTextFormat, global};\n\n/// use std::collections::HashMap;\n\n///\n\n/// let example_carrier = HashMap::new();\n\n///\n\n/// // create your http text propagator\n\n/// let tc_propagator = api::TraceContextPropagator::new();\n\n/// global::set_http_text_propagator(tc_propagator);\n\n///\n\n/// // use the global http text propagator to extract contexts\n\n/// let _cx = global::get_http_text_propagator(|propagator| propagator.extract(&example_carrier));\n\n/// ```\n\npub fn get_http_text_propagator<T, F>(mut f: F) -> T\n\nwhere\n\n F: FnMut(&dyn api::HttpTextFormat) -> T,\n\n{\n\n GLOBAL_HTTP_TEXT_PROPAGATOR\n\n .read()\n\n .map(|propagator| f(&**propagator))\n\n .unwrap_or_else(|_| f(&*DEFAULT_HTTP_TEXT_PROPAGATOR as &dyn api::HttpTextFormat))\n\n}\n\n\n", "file_path": "src/global.rs", "rank": 7, "score": 180589.06955037094 }, { "content": "type DynSpan = dyn api::Span + Send + Sync;\n\n\n\nimpl api::Span for BoxedSpan {\n\n /// Records events at a specific time in the context of a given `Span`.\n\n ///\n\n /// Note that the OpenTelemetry project documents certain [\"standard event names and\n\n /// keys\"](https://github.com/open-telemetry/opentelemetry-specification/tree/v0.5.0/specification/trace/semantic_conventions/README.md)\n\n /// which have prescribed semantic meanings.\n\n fn add_event_with_timestamp(\n\n &self,\n\n name: String,\n\n timestamp: SystemTime,\n\n attributes: Vec<api::KeyValue>,\n\n ) {\n\n self.0.add_event_with_timestamp(name, timestamp, attributes)\n\n }\n\n\n\n /// Returns the `SpanContext` for the given `Span`.\n\n fn span_context(&self) -> api::SpanContext {\n\n self.0.span_context()\n", "file_path": "src/global.rs", "rank": 8, "score": 177934.6636278588 }, { "content": "/// Interface for generating IDs\n\npub trait IdGenerator: Send + Sync + fmt::Debug {\n\n /// Generate a new `TraceId`\n\n fn new_trace_id(&self) -> api::TraceId;\n\n\n\n /// Generate a new `SpanId`\n\n fn new_span_id(&self) -> api::SpanId;\n\n}\n", "file_path": "src/api/trace/id_generator.rs", "rank": 9, "score": 156567.79506234382 }, { "content": "/// The `Sampler` interface allows implementations to provide samplers which will\n\n/// return a sampling `SamplingResult` based on information that is typically\n\n/// available just before the `Span` was created.\n\npub trait Sampler: Send + Sync + std::fmt::Debug {\n\n /// Returns the `SamplingDecision` for a `Span` to be created.\n\n #[allow(clippy::too_many_arguments)]\n\n fn should_sample(\n\n &self,\n\n parent_context: Option<&api::SpanContext>,\n\n trace_id: api::TraceId,\n\n name: &str,\n\n span_kind: &api::SpanKind,\n\n attributes: &[api::KeyValue],\n\n links: &[api::Link],\n\n ) -> SamplingResult;\n\n}\n\n\n\n/// The result of sampling logic for a given `Span`.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct SamplingResult {\n\n /// `SamplingDecision` reached by this result\n\n pub decision: SamplingDecision,\n\n /// Extra attributes added by this result\n", "file_path": "src/api/trace/sampler.rs", "rank": 10, "score": 155983.44341895377 }, { "content": "fn trace_benchmark_group<F: Fn(&sdk::Tracer)>(c: &mut Criterion, name: &str, f: F) {\n\n let mut group = c.benchmark_group(name);\n\n\n\n group.bench_function(\"always-sample\", |b| {\n\n let always_sample = sdk::Provider::builder()\n\n .with_config(sdk::Config {\n\n default_sampler: Box::new(sdk::Sampler::AlwaysOn),\n\n ..Default::default()\n\n })\n\n .build()\n\n .get_tracer(\"always-sample\");\n\n\n\n b.iter(|| f(&always_sample));\n\n });\n\n\n\n group.bench_function(\"never-sample\", |b| {\n\n let never_sample = sdk::Provider::builder()\n\n .with_config(sdk::Config {\n\n default_sampler: Box::new(sdk::Sampler::AlwaysOff),\n\n ..Default::default()\n", "file_path": "benches/trace.rs", "rank": 11, "score": 155703.23611832742 }, { "content": "/// Methods to inject and extract a value as text into carriers that travel\n\n/// in-band across process boundaries.\n\npub trait HttpTextFormat: Debug {\n\n /// Properly encodes the values of the current [`Context`] and injects them into\n\n /// the [`Carrier`].\n\n ///\n\n /// [`Context`]: ../../struct.Context.html\n\n /// [`Carrier`]: ../trait.Carrier.html\n\n fn inject(&self, carrier: &mut dyn api::Carrier) {\n\n self.inject_context(&Context::current(), carrier)\n\n }\n\n\n\n /// Properly encodes the values of the [`Context`] and injects them into the\n\n /// [`Carrier`].\n\n ///\n\n /// [`Context`]: ../../struct.Context.html\n\n /// [`Carrier`]: ../trait.Carrier.html\n\n fn inject_context(&self, cx: &Context, carrier: &mut dyn api::Carrier);\n\n\n\n /// Retrieves encoded data using the provided [`Carrier`]. If no data for this\n\n /// format was retrieved OR if the retrieved data is invalid, then the current\n\n /// [`Context`] is returned.\n", "file_path": "src/api/context/propagation/text_propagator.rs", "rank": 12, "score": 155645.92157234877 }, { "content": "/// `SpanProcessor`s allow finished spans to be processed.\n\npub trait SpanProcessor: Send + Sync + std::fmt::Debug {\n\n /// `on_start` method is invoked when a `Span` is started.\n\n fn on_start(&self, span: Arc<exporter::trace::SpanData>);\n\n /// `on_end` method is invoked when a `Span` is ended.\n\n fn on_end(&self, span: Arc<exporter::trace::SpanData>);\n\n /// Shutdown is invoked when SDK shuts down. Use this call to cleanup any\n\n /// processor data. No calls to `on_start` and `on_end` method is invoked\n\n /// after `shutdown` call is made.\n\n fn shutdown(&self);\n\n}\n", "file_path": "src/api/trace/span_processor.rs", "rank": 13, "score": 151041.77095560223 }, { "content": "/// Convert from `sdk::LabelSet` to `prometheus`' label format.\n\nfn convert_label_set(label_set: &sdk::LabelSet) -> HashMap<&str, &str> {\n\n label_set\n\n .iter()\n\n .map(|(key, value)| (key.as_ref(), value.as_ref()))\n\n .collect()\n\n}\n\n\n\n/// Convert from list of `Key`s to prometheus' label format.\n\npub(crate) fn convert_labels(labels: &[Key]) -> Vec<&str> {\n\n labels.iter().map(|k| k.as_str()).collect()\n\n}\n\n\n\n/// Prometheus IntCounterHandle\n\n#[derive(Clone)]\n\n#[allow(missing_debug_implementations)]\n\npub struct IntCounterHandle(prometheus::IntCounter);\n\n\n\nimpl api::Counter<i64, sdk::LabelSet> for prometheus::IntCounterVec {\n\n /// Prometheus' `CounterHandle`\n\n type Handle = IntCounterHandle;\n", "file_path": "src/exporter/metrics/prometheus/mod.rs", "rank": 14, "score": 141816.53338817984 }, { "content": "struct Correlations(CorrelationContext);\n\n\n", "file_path": "src/api/correlation/propagation.rs", "rank": 15, "score": 135511.2238001743 }, { "content": "#[derive(Clone, Default, Debug)]\n\nstruct IdHasher(u64);\n\n\n\nimpl Hasher for IdHasher {\n\n fn write(&mut self, _: &[u8]) {\n\n unreachable!(\"TypeId calls write_u64\");\n\n }\n\n\n\n #[inline]\n\n fn write_u64(&mut self, id: u64) {\n\n self.0 = id;\n\n }\n\n\n\n #[inline]\n\n fn finish(&self) -> u64 {\n\n self.0\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/api/context/mod.rs", "rank": 16, "score": 135407.16906918067 }, { "content": "/// Used to serialize and deserialize `SpanContext`s to and from a binary\n\n/// representation.\n\npub trait BinaryFormat {\n\n /// Serializes span context into a byte array and returns the array.\n\n fn to_bytes(&self, context: &api::SpanContext) -> [u8; 29];\n\n\n\n /// Deserializes a span context from a byte array.\n\n fn from_bytes(&self, bytes: Vec<u8>) -> api::SpanContext;\n\n}\n\n\n\n/// Extracts and injects `SpanContext`s from byte arrays.\n\n#[derive(Debug, Default)]\n\npub struct BinaryPropagator {}\n\n\n\nimpl BinaryPropagator {\n\n /// Create a new binary propagator.\n\n pub fn new() -> Self {\n\n BinaryPropagator {}\n\n }\n\n}\n\n\n\nimpl BinaryFormat for BinaryPropagator {\n", "file_path": "src/experimental/api/context/propagation/binary_propagator.rs", "rank": 17, "score": 134352.8055115195 }, { "content": "struct RemoteSpanContext(api::SpanContext);\n\n\n", "file_path": "src/api/trace/context.rs", "rank": 18, "score": 132693.99593686938 }, { "content": "struct TonicMetadataMapCarrier<'a>(&'a mut tonic::metadata::MetadataMap);\n\nimpl<'a> api::Carrier for TonicMetadataMapCarrier<'a> {\n\n fn get(&self, key: &'static str) -> Option<&str> {\n\n self.0.get(key).and_then(|metadata| metadata.to_str().ok())\n\n }\n\n\n\n fn set(&mut self, key: &'static str, value: String) {\n\n if let Ok(key) = tonic::metadata::MetadataKey::from_bytes(key.to_lowercase().as_bytes()) {\n\n self.0.insert(\n\n key,\n\n tonic::metadata::MetadataValue::from_str(&value).unwrap(),\n\n );\n\n }\n\n }\n\n}\n\n\n\n#[instrument]\n\nasync fn greet() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut client = GreeterClient::connect(\"http://[::1]:50051\")\n\n .instrument(info_span!(\"client connect\"))\n", "file_path": "examples/tracing-grpc/src/client.rs", "rank": 19, "score": 131397.45678848712 }, { "content": "/// Methods for soring and retrieving correlation data in a context.\n\npub trait CorrelationContextExt {\n\n /// Returns a clone of the current context with the included name / value pairs.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use opentelemetry::api::{Context, CorrelationContextExt, KeyValue, Value};\n\n ///\n\n /// let cx = Context::current_with_correlations(vec![KeyValue::new(\"my-name\", \"my-value\")]);\n\n ///\n\n /// assert_eq!(\n\n /// cx.correlation_context().get(\"my-name\"),\n\n /// Some(&Value::String(\"my-value\".to_string())),\n\n /// )\n\n /// ```\n\n fn current_with_correlations<T: IntoIterator<Item = KeyValue>>(correlations: T) -> Self;\n\n\n\n /// Returns a clone of the given context with the included name / value pairs.\n\n ///\n\n /// # Examples\n", "file_path": "src/api/correlation/propagation.rs", "rank": 20, "score": 129554.44928051904 }, { "content": "#[instrument]\n\nfn expensive_fn(to_print: String) {\n\n for _ in 0..5 {\n\n std::thread::sleep(std::time::Duration::from_secs(1));\n\n info!(\"{}\", to_print);\n\n }\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub struct MyGreeter {}\n\n\n\n#[tonic::async_trait]\n\nimpl Greeter for MyGreeter {\n\n #[instrument]\n\n async fn say_hello(\n\n &self,\n\n request: Request<HelloRequest>, // Accept request of type HelloRequest\n\n ) -> Result<Response<HelloReply>, Status> {\n\n let propagator = api::TraceContextPropagator::new();\n\n let parent_cx = propagator.extract(&HttpHeaderMapCarrier(request.metadata()));\n\n let span = tracing::Span::current();\n", "file_path": "examples/tracing-grpc/src/server.rs", "rank": 21, "score": 128201.36983068335 }, { "content": "fn format_value_array_as_string(v: &[Value]) -> String {\n\n format!(\n\n \"[{}]\",\n\n v.iter()\n\n .map(|elem| match elem {\n\n v @ Value::String(_) | v @ Value::Bytes(_) => format!(r#\"\"{}\"\"#, String::from(v)),\n\n v => String::from(v),\n\n })\n\n .collect::<Vec<_>>()\n\n .join(\",\")\n\n )\n\n}\n\n\n\n/// `KeyValue` pairs are used by `LabelSet`s and `Span` attributes.\n\n#[cfg_attr(feature = \"serialize\", derive(Deserialize, Serialize))]\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct KeyValue {\n\n /// Dimension or event key\n\n pub key: Key,\n\n /// Dimension or event value\n", "file_path": "src/api/core.rs", "rank": 22, "score": 125330.05819125922 }, { "content": "/// Used to serialize and deserialize `SpanContext`s to and from a base64\n\n/// representation.\n\npub trait Base64Format {\n\n /// Serializes span context into a base64 encoded string\n\n fn to_base64(&self, context: &api::SpanContext) -> String;\n\n\n\n /// Deserialize a span context from a base64 encoded string\n\n fn from_base64(&self, base64: &str) -> api::SpanContext;\n\n}\n\n\n\nimpl<Format> Base64Format for Format\n\nwhere\n\n Format: BinaryFormat,\n\n{\n\n fn to_base64(&self, context: &api::SpanContext) -> String {\n\n encode(&self.to_bytes(context))\n\n }\n\n\n\n fn from_base64(&self, base64: &str) -> api::SpanContext {\n\n if let Ok(bytes) = decode(base64.as_bytes()) {\n\n self.from_bytes(bytes)\n\n } else {\n", "file_path": "src/experimental/api/context/propagation/base64_format.rs", "rank": 23, "score": 123840.16785786039 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n trace_benchmark_group(c, \"start-end-span\", |tracer| tracer.start(\"foo\").end());\n\n\n\n trace_benchmark_group(c, \"start-end-span-4-attrs\", |tracer| {\n\n let span = tracer.start(\"foo\");\n\n span.set_attribute(Key::new(\"key1\").bool(false));\n\n span.set_attribute(Key::new(\"key2\").string(\"hello\"));\n\n span.set_attribute(Key::new(\"key3\").u64(123));\n\n span.set_attribute(Key::new(\"key4\").f64(123.456));\n\n span.end();\n\n });\n\n\n\n trace_benchmark_group(c, \"start-end-span-8-attrs\", |tracer| {\n\n let span = tracer.start(\"foo\");\n\n span.set_attribute(Key::new(\"key1\").bool(false));\n\n span.set_attribute(Key::new(\"key2\").string(\"hello\"));\n\n span.set_attribute(Key::new(\"key3\").u64(123));\n\n span.set_attribute(Key::new(\"key4\").f64(123.456));\n\n span.set_attribute(Key::new(\"key11\").bool(false));\n\n span.set_attribute(Key::new(\"key12\").string(\"hello\"));\n", "file_path": "benches/trace.rs", "rank": 24, "score": 120899.21887409104 }, { "content": "fn map_from_kvs<T>(kvs: T) -> HashMap<String, String>\n\nwhere\n\n T: IntoIterator<Item = api::KeyValue>,\n\n{\n\n let mut map: HashMap<String, String> = HashMap::new();\n\n for kv in kvs {\n\n map.insert(kv.key.into(), kv.value.into());\n\n }\n\n map\n\n}\n", "file_path": "opentelemetry-zipkin/src/lib.rs", "rank": 25, "score": 119390.59702141861 }, { "content": "/// Returns an instance of the currently configured global [`Provider`] through\n\n/// [`GlobalProvider`].\n\n///\n\n/// [`Provider`]: ../api/trace/provider/trait.Provider.html\n\n/// [`GlobalProvider`]: struct.GlobalProvider.html\n\npub fn trace_provider() -> GlobalProvider {\n\n GLOBAL_TRACER_PROVIDER\n\n .read()\n\n .expect(\"GLOBAL_TRACER_PROVIDER RwLock poisoned\")\n\n .clone()\n\n}\n\n\n", "file_path": "src/global.rs", "rank": 26, "score": 118128.21246201632 }, { "content": "//! with `Get` method as described below.\n\n//!\n\n//! ##### Get\n\n//!\n\n//! The Get function MUST return the first value of the given propagation\n\n//! key or return `None` if the key doesn't exist.\n\n//!\n\n//! Required arguments:\n\n//!\n\n//! - the carrier of propagation fields, such as an HTTP request.\n\n//! - the key of the field.\n\n//!\n\n//! The `get` function is responsible for handling case sensitivity. If\n\n//! the getter is intended to work with an HTTP request object, the getter\n\n//! MUST be case insensitive. To improve compatibility with other text-based\n\n//! protocols, text format implementations MUST ensure to always use the\n\n//! canonical casing for their attributes. NOTE: Canonical casing for HTTP\n\n//! headers is usually title case (e.g. `Content-Type` instead of `content-type`).\n\n//!\n\nuse crate::api;\n\nuse std::collections::HashMap;\n\n\n\npub mod composite_propagator;\n\npub mod text_propagator;\n\n\n\n/// Carriers provide an interface for adding and removing fields from an\n\n/// underlying struct like `HashMap`.\n", "file_path": "src/api/context/propagation/mod.rs", "rank": 27, "score": 115352.7747775152 }, { "content": "}\n\n\n\n#[cfg(feature = \"tonic\")]\n\nimpl api::Carrier for tonic::metadata::MetadataMap {\n\n /// Get a value for a key from the MetadataMap. If the value can't be converted to &str, returns None\n\n fn get(&self, key: &str) -> Option<&str> {\n\n self.get(key).and_then(|metadata| metadata.to_str().ok())\n\n }\n\n\n\n /// Set a key and value in the MetadataMap. Does nothing if the key or value are not valid inputs\n\n fn set(&mut self, key: &str, value: String) {\n\n if let Ok(key) = tonic::metadata::MetadataKey::from_bytes(key.to_lowercase().as_bytes()) {\n\n if let Ok(val) = tonic::metadata::MetadataValue::from_str(&value) {\n\n self.insert(key, val);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/api/context/propagation/mod.rs", "rank": 28, "score": 115350.02548534697 }, { "content": "impl api::Carrier for http::HeaderMap {\n\n /// Get a value for a key from the HeaderMap. If the value is not valid ASCII, returns None.\n\n fn get(&self, key: &str) -> Option<&str> {\n\n match self.get(key) {\n\n Some(val) => match val.to_str() {\n\n Ok(ascii) => Some(ascii),\n\n Err(_) => None,\n\n },\n\n None => None,\n\n }\n\n }\n\n\n\n /// Set a key and value in the HeaderMap. Does nothing if the key or value are not valid inputs.\n\n fn set(&mut self, key: &str, value: String) {\n\n if let Ok(name) = http::header::HeaderName::from_bytes(key.as_bytes()) {\n\n if let Ok(val) = http::header::HeaderValue::from_str(&value) {\n\n self.insert(name, val);\n\n }\n\n }\n\n }\n", "file_path": "src/api/context/propagation/mod.rs", "rank": 29, "score": 115349.70457581591 }, { "content": "//! ### Fields\n\n//!\n\n//! The propagation fields defined. If your carrier is reused, you should\n\n//! delete the fields here before calling `inject`.\n\n//!\n\n//! For example, if the carrier is a single-use or immutable request object,\n\n//! you don't need to clear fields as they couldn't have been set before.\n\n//! If it is a mutable, retryable object, successive calls should clear\n\n//! these fields first.\n\n//!\n\n//! The use cases of this are:\n\n//!\n\n//! - allow pre-allocation of fields, especially in systems like gRPC\n\n//! Metadata\n\n//! - allow a single-pass over an iterator\n\n//!\n\n//! Returns list of fields that will be used by this formatter.\n\n//!\n\n//! ### Inject\n\n//!\n", "file_path": "src/api/context/propagation/mod.rs", "rank": 30, "score": 115348.70009101224 }, { "content": "//! # OpenTelemetry Propagator interface\n\n//!\n\n//! Propagators API consists of two main formats:\n\n//!\n\n//! - `BinaryFormat` is used to serialize and deserialize a value\n\n//! into a binary representation.\n\n//! - `HttpTextFormat` is used to inject and extract a value as\n\n//! text into carriers that travel in-band across process boundaries.\n\n//!\n\n//! Deserializing must set `is_remote` to true on the returned\n\n//! `SpanContext`.\n\n//!\n\n//! ## Binary Format\n\n//!\n\n//! `BinaryFormat` is a formatter to serialize and deserialize a value\n\n//! into a binary format.\n\n//!\n\n//! `BinaryFormat` MUST expose the APIs that serializes values into bytes,\n\n//! and deserializes values from bytes.\n\n//!\n", "file_path": "src/api/context/propagation/mod.rs", "rank": 31, "score": 115347.28409745806 }, { "content": "//! Injects the value downstream. For example, as http headers.\n\n//!\n\n//! Required arguments:\n\n//!\n\n//! - the `SpanContext` to be injected.\n\n//! - the carrier that holds propagation fields. For example, an outgoing\n\n//! message or http request.\n\n//! - the `Setter` invoked for each propagation key to add or remove.\n\n//!\n\n//! #### Setter argument\n\n//!\n\n//! Setter is an argument in `Inject` that puts value into given field.\n\n//!\n\n//! `Setter` allows a `HttpTextFormat` to set propagated fields into a\n\n//! carrier.\n\n//!\n\n//! `Setter` MUST be stateless and allowed to be saved as a constant to\n\n//! avoid runtime allocations. One of the ways to implement it is `Setter`\n\n//! class with `Put` method as described below.\n\n//!\n", "file_path": "src/api/context/propagation/mod.rs", "rank": 32, "score": 115344.91136309462 }, { "content": "//! ##### Put\n\n//!\n\n//! Replaces a propagated field with the given value.\n\n//!\n\n//! Required arguments:\n\n//!\n\n//! - the carrier holds propagation fields. For example, an outgoing message\n\n//! or http request.\n\n//! - the key of the field.\n\n//! - the value of the field.\n\n//!\n\n//! The implementation SHOULD preserve casing (e.g. it should not transform\n\n//! `Content-Type` to `content-type`) if the used protocol is case insensitive,\n\n//! otherwise it MUST preserve casing.\n\n//!\n\n//! ### Extract\n\n//!\n\n//! Extracts the value from upstream. For example, as http headers.\n\n//!\n\n//! If the value could not be parsed, the underlying implementation will\n", "file_path": "src/api/context/propagation/mod.rs", "rank": 33, "score": 115343.98340183664 }, { "content": "//! decide to return an object representing either an empty value, an invalid\n\n//! value, or a valid value.\n\n//!\n\n//! Required arguments:\n\n//!\n\n//! - the carrier holds propagation fields. For example, an outgoing message\n\n//! or http request.\n\n//! - the instance of `Getter` invoked for each propagation key to get.\n\n//!\n\n//! Returns the non-null extracted value.\n\n//!\n\n//! #### Getter argument\n\n//!\n\n//! Getter is an argument in `Extract` that get value from given field\n\n//!\n\n//! `Getter` allows a `HttpTextFormat` to read propagated fields from a\n\n//! carrier.\n\n//!\n\n//! `Getter` MUST be stateless and allowed to be saved as a constant to avoid\n\n//! runtime allocations. One of the ways to implement it is `Getter` class\n", "file_path": "src/api/context/propagation/mod.rs", "rank": 34, "score": 115341.87262211188 }, { "content": "//! - on-the-wire byte representation of the value.\n\n//!\n\n//! Returns a value deserialized from bytes.\n\n//!\n\n//! ## HTTP Text Format\n\n//!\n\n//! `HttpTextFormat` is a formatter that injects and extracts a value\n\n//! as text into carriers that travel in-band across process boundaries.\n\n//!\n\n//! Encoding is expected to conform to the HTTP Header Field semantics.\n\n//! Values are often encoded as RPC/HTTP request headers.\n\n//!\n\n//! The carrier of propagated data on both the client (injector) and\n\n//! server (extractor) side is usually an http request. Propagation is\n\n//! usually implemented via library-specific request interceptors, where\n\n//! the client-side injects values and the server-side extracts them.\n\n//!\n\n//! `HttpTextFormat` MUST expose the APIs that injects values into carriers,\n\n//! and extracts values from carriers.\n\n//!\n", "file_path": "src/api/context/propagation/mod.rs", "rank": 35, "score": 115341.79482278164 }, { "content": "//! ### ToBytes\n\n//!\n\n//! Serializes the given value into the on-the-wire representation.\n\n//!\n\n//! Required arguments:\n\n//!\n\n//! - the value to serialize, can be `SpanContext` or `DistributedContext`.\n\n//!\n\n//! Returns the on-the-wire byte representation of the value.\n\n//!\n\n//! ### FromBytes\n\n//!\n\n//! Creates a value from the given on-the-wire encoded representation.\n\n//!\n\n//! If the value could not be parsed, the underlying implementation\n\n//! SHOULD decide to return ether an empty value, an invalid value, or\n\n//! a valid value.\n\n//!\n\n//! Required arguments:\n\n//!\n", "file_path": "src/api/context/propagation/mod.rs", "rank": 36, "score": 115337.35928704581 }, { "content": "/// Allows a specific [`Provider`] to be used generically by the\n\n/// [`GlobalProvider`] by mirroring the interface and boxing the return types.\n\n///\n\n/// [`Provider`]: ../api/trace/provider/trait.Provider.html\n\n/// [`GlobalProvider`]: struct.GlobalProvider.html\n\npub trait GenericProvider: fmt::Debug + 'static {\n\n /// Creates a named tracer instance that is a trait object through the underlying `Provider`.\n\n fn get_tracer_boxed(&self, name: &'static str) -> Box<dyn GenericTracer + Send + Sync>;\n\n}\n\n\n\nimpl<S, T, P> GenericProvider for P\n\nwhere\n\n S: api::Span + Send + Sync,\n\n T: api::Tracer<Span = S> + Send + Sync,\n\n P: api::Provider<Tracer = T>,\n\n{\n\n /// Return a boxed generic tracer\n\n fn get_tracer_boxed(&self, name: &'static str) -> Box<dyn GenericTracer + Send + Sync> {\n\n Box::new(self.get_tracer(name))\n\n }\n\n}\n\n\n\n/// Represents the globally configured [`Provider`] instance for this\n\n/// application. This allows generic tracing through the returned\n\n/// [`BoxedTracer`] instances.\n", "file_path": "src/global.rs", "rank": 37, "score": 113193.70082119483 }, { "content": "/// Allows a specific [`Tracer`] to be used generically by [`BoxedTracer`]\n\n/// instances by mirroring the interface and boxing the return types.\n\n///\n\n/// [`Tracer`]: ../api/trace/tracer/trait.Tracer.html\n\n/// [`BoxedTracer`]: struct.BoxedTracer.html\n\npub trait GenericTracer: fmt::Debug + 'static {\n\n /// Create a new invalid span for use in cases where there are no active spans.\n\n fn invalid_boxed(&self) -> Box<DynSpan>;\n\n\n\n /// Returns a trait object so the underlying implementation can be swapped\n\n /// out at runtime.\n\n fn start_with_context_boxed(&self, name: &str, cx: &api::Context) -> Box<DynSpan>;\n\n\n\n /// Returns a trait object so the underlying implementation can be swapped\n\n /// out at runtime.\n\n fn build_with_context_boxed(\n\n &self,\n\n builder: api::SpanBuilder,\n\n cx: &api::Context,\n\n ) -> Box<DynSpan>;\n\n}\n\n\n\nimpl<S, T> GenericTracer for T\n\nwhere\n\n S: api::Span + Send + Sync,\n", "file_path": "src/global.rs", "rank": 38, "score": 113193.64151121458 }, { "content": "//! ### FromBytes\n\n//!\n\n//! Creates a value from the given on-the-wire encoded representation.\n\n//!\n\n//! If the value could not be parsed, the underlying implementation\n\n//! SHOULD decide to return ether an empty value, an invalid value, or\n\n//! a valid value.\n\n//!\n\n//! Required arguments:\n\n//!\n\n//! - on-the-wire byte representation of the value.\n\n//!\n\n//! Returns a value deserialized from bytes.\n\n//!\n\n\n\n#[cfg(feature = \"base64\")]\n\npub mod base64_format;\n\npub mod binary_propagator;\n", "file_path": "src/experimental/api/context/propagation/mod.rs", "rank": 39, "score": 111859.43816757594 }, { "content": "//! # OpenTelemetry Experimental Propagator interface\n\n//!\n\n//! ## Binary Format\n\n//!\n\n//! `BinaryFormat` is a formatter to serialize and deserialize a value\n\n//! into a binary format.\n\n//!\n\n//! `BinaryFormat` MUST expose the APIs that serializes values into bytes,\n\n//! and deserializes values from bytes.\n\n//!\n\n//! ### ToBytes\n\n//!\n\n//! Serializes the given value into the on-the-wire representation.\n\n//!\n\n//! Required arguments:\n\n//!\n\n//! - the value to serialize, can be `SpanContext` or `DistributedContext`.\n\n//!\n\n//! Returns the on-the-wire byte representation of the value.\n\n//!\n", "file_path": "src/experimental/api/context/propagation/mod.rs", "rank": 40, "score": 111854.62862040944 }, { "content": "/// Interface for constructing `Span`s.\n\npub trait Tracer: fmt::Debug + 'static {\n\n /// The `Span` type used by this `Tracer`.\n\n type Span: api::Span;\n\n\n\n /// Returns a span with an invalid `SpanContext`. Used by functions that\n\n /// need to return a default span like `get_active_span` if no span is present.\n\n fn invalid(&self) -> Self::Span;\n\n\n\n /// Starts a new `Span`.\n\n ///\n\n /// By default the currently active `Span` is set as the new `Span`'s\n\n /// parent. The `Tracer` MAY provide other default options for newly\n\n /// created `Span`s.\n\n ///\n\n /// `Span` creation MUST NOT set the newly created `Span` as the currently\n\n /// active `Span` by default, but this functionality MAY be offered additionally\n\n /// as a separate operation.\n\n ///\n\n /// Each span has zero or one parent spans and zero or more child spans, which\n\n /// represent causally related operations. A tree of related spans comprises a\n", "file_path": "src/api/trace/tracer.rs", "rank": 41, "score": 111200.61449929097 }, { "content": "/// An interface to create `Tracer` instances.\n\npub trait Provider: fmt::Debug + 'static {\n\n /// The `Tracer` type that this `Provider` will return.\n\n type Tracer: api::Tracer;\n\n\n\n /// Creates a named tracer instance of `Self::Tracer`.\n\n /// If the name is an empty string then provider uses default name.\n\n fn get_tracer(&self, name: &'static str) -> Self::Tracer;\n\n}\n", "file_path": "src/api/trace/provider.rs", "rank": 42, "score": 111200.61449929097 }, { "content": "/// Returns [`NoopMeter`] for now\n\n///\n\n/// [`NoopMeter`]: ../api/trace/noop/struct.NoopMeter.html\n\npub fn global_meter() -> crate::api::NoopMeter {\n\n crate::api::NoopMeter {}\n\n}\n", "file_path": "src/global.rs", "rank": 43, "score": 108884.29751012381 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n tonic_build::compile_protos(\"proto/helloworld.proto\")?;\n\n Ok(())\n\n}\n", "file_path": "examples/grpc/build.rs", "rank": 44, "score": 105521.98126487565 }, { "content": "/// Methods for storing and retrieving trace data in a context.\n\npub trait TraceContextExt {\n\n /// Returns a clone of the current context with the included span.\n\n ///\n\n /// This is useful for building tracers.\n\n fn current_with_span<T: api::Span + Send + Sync>(span: T) -> Self;\n\n\n\n /// Returns a clone of this context with the included span.\n\n ///\n\n /// This is useful for building tracers.\n\n fn with_span<T: api::Span + Send + Sync>(&self, span: T) -> Self;\n\n\n\n /// Returns a reference to this context's span, or the default no-op span if\n\n /// none has been set.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use opentelemetry::{api, api::{Context, Provider, TraceContextExt, Tracer}, sdk};\n\n ///\n\n /// // returns a reference to an empty span by default\n", "file_path": "src/api/trace/context.rs", "rank": 45, "score": 104537.5778814682 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n tonic_build::compile_protos(\"proto/helloworld.proto\")?;\n\n Ok(())\n\n}\n", "file_path": "examples/tracing-grpc/build.rs", "rank": 46, "score": 103741.2936446504 }, { "content": "fn tracing_init() -> Result<(), Box<dyn std::error::Error>> {\n\n let builder = opentelemetry_jaeger::Exporter::builder()\n\n .with_agent_endpoint(\"127.0.0.1:6831\".parse().unwrap());\n\n\n\n let exporter = builder\n\n .with_process(opentelemetry_jaeger::Process {\n\n service_name: \"grpc-server\".to_string(),\n\n tags: vec![KeyValue::new(\"version\", \"0.1.0\")],\n\n })\n\n .init()?;\n\n\n\n // For the demonstration, use `Sampler::AlwaysOn` sampler to sample all traces. In a production\n\n // application, use `Sampler::ParentOrElse` or `Sampler::Probability` with a desired probability.\n\n let provider = sdk::Provider::builder()\n\n .with_simple_exporter(exporter)\n\n .with_config(sdk::Config {\n\n default_sampler: Box::new(Sampler::AlwaysOn),\n\n ..Default::default()\n\n })\n\n .build();\n", "file_path": "examples/grpc/src/server.rs", "rank": 47, "score": 102051.72705228027 }, { "content": "fn tracing_init() -> Result<(), Box<dyn std::error::Error>> {\n\n let builder = opentelemetry_jaeger::Exporter::builder()\n\n .with_agent_endpoint(\"127.0.0.1:6831\".parse().unwrap());\n\n\n\n let exporter = builder\n\n .with_process(opentelemetry_jaeger::Process {\n\n service_name: \"grpc-client\".to_string(),\n\n tags: vec![KeyValue::new(\"version\", \"0.1.0\")],\n\n })\n\n .init()?;\n\n\n\n // For the demonstration, use `Sampler::AlwaysOn` sampler to sample all traces. In a production\n\n // application, use `Sampler::ParentOrElse` or `Sampler::Probability` with a desired probability.\n\n let provider = sdk::Provider::builder()\n\n .with_simple_exporter(exporter)\n\n .with_config(sdk::Config {\n\n default_sampler: Box::new(Sampler::AlwaysOn),\n\n ..Default::default()\n\n })\n\n .build();\n", "file_path": "examples/grpc/src/client.rs", "rank": 48, "score": 102051.72705228027 }, { "content": "/// Sets the given [`Provider`] instance as the current global provider.\n\n///\n\n/// [`Provider`]: ../api/trace/provider/trait.Provider.html\n\npub fn set_provider<P, T, S>(new_provider: P)\n\nwhere\n\n S: api::Span + Send + Sync,\n\n T: api::Tracer<Span = S> + Send + Sync,\n\n P: api::Provider<Tracer = T> + Send + Sync,\n\n{\n\n let mut global_provider = GLOBAL_TRACER_PROVIDER\n\n .write()\n\n .expect(\"GLOBAL_TRACER_PROVIDER RwLock poisoned\");\n\n *global_provider = GlobalProvider::new(new_provider);\n\n}\n\n\n", "file_path": "src/global.rs", "rank": 49, "score": 101137.37836779881 }, { "content": "fn tracing_init() -> Result<(), Box<dyn std::error::Error>> {\n\n let builder = opentelemetry_jaeger::Exporter::builder()\n\n .with_agent_endpoint(\"127.0.0.1:6831\".parse().unwrap());\n\n\n\n let exporter = builder\n\n .with_process(opentelemetry_jaeger::Process {\n\n service_name: \"grpc-server\".to_string(),\n\n tags: vec![KeyValue::new(\"version\", \"0.1.0\")],\n\n })\n\n .init()?;\n\n\n\n // For the demonstration, use `Sampler::Always` sampler to sample all traces. In a production\n\n // application, use `Sampler::Parent` or `Sampler::Probability` with a desired probability.\n\n let provider = sdk::Provider::builder()\n\n .with_simple_exporter(exporter)\n\n .with_config(sdk::Config {\n\n default_sampler: Box::new(Sampler::Always),\n\n ..Default::default()\n\n })\n\n .build();\n\n let tracer = provider.get_tracer(\"grpc-server\");\n\n\n\n let opentelemetry = tracing_opentelemetry::layer().with_tracer(tracer);\n\n tracing_subscriber::registry()\n\n .with(opentelemetry)\n\n .try_init()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/tracing-grpc/src/server.rs", "rank": 50, "score": 100446.46173400714 }, { "content": "fn tracing_init() -> Result<(), Box<dyn std::error::Error>> {\n\n let builder = opentelemetry_jaeger::Exporter::builder()\n\n .with_agent_endpoint(\"127.0.0.1:6831\".parse().unwrap());\n\n\n\n let exporter = builder\n\n .with_process(opentelemetry_jaeger::Process {\n\n service_name: \"grpc-client\".to_string(),\n\n tags: vec![KeyValue::new(\"version\", \"0.1.0\")],\n\n })\n\n .init()?;\n\n\n\n // For the demonstration, use `Sampler::Always` sampler to sample all traces. In a production\n\n // application, use `Sampler::Parent` or `Sampler::Probability` with a desired probability.\n\n let provider = sdk::Provider::builder()\n\n .with_simple_exporter(exporter)\n\n .with_config(sdk::Config {\n\n default_sampler: Box::new(Sampler::Always),\n\n ..Default::default()\n\n })\n\n .build();\n\n let tracer = provider.get_tracer(\"grpc-client\");\n\n\n\n let opentelemetry = tracing_opentelemetry::layer().with_tracer(tracer);\n\n tracing_subscriber::registry()\n\n .with(opentelemetry)\n\n .try_init()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/tracing-grpc/src/client.rs", "rank": 51, "score": 100446.46173400714 }, { "content": "/// Meter is an interface to the metrics portion of the OpenTelemetry SDK.\n\n///\n\n/// The Meter interface allows creating of a registered metric instrument using methods specific to\n\n/// each kind of metric. There are six constructors representing the three kinds of instrument\n\n/// taking either floating point or integer inputs, see the detailed design below.\n\n///\n\n/// Binding instruments to a single Meter instance has two benefits:\n\n///\n\n/// 1. Instruments can be exported from the zero state, prior to first use, with no explicit\n\n/// Register call\n\n/// 2. The component name provided by the named Meter satisfies a namespace requirement\n\n///\n\n/// The recommended practice is to define structures to contain the instruments in use and keep\n\n/// references only to the instruments that are specifically needed.\n\n///\n\n/// We recognize that many existing metric systems support allocating metric instruments statically\n\n/// and providing the Meter interface at the time of use. In this example, typical of statsd\n\n/// clients, existing code may not be structured with a convenient place to store new metric\n\n/// instruments. Where this becomes a burden, it is recommended to use the global meter factory to\n\n/// construct a static named Meter, to construct metric instruments.\n\n///\n\n/// The situation is similar for users of Prometheus clients, where instruments are allocated\n\n/// statically and there is an implicit global. Such code may not have access to the appropriate\n\n/// Meter where instruments are defined. Where this becomes a burden, it is recommended to use the\n\n/// global meter factory to construct a static named Meter, to construct metric instruments.\n\n///\n\n/// Applications are expected to construct long-lived instruments. Instruments are considered\n\n/// permanent for the lifetime of a SDK, there is no method to delete them.\n\npub trait Meter {\n\n /// The `LabelSet` data type for this meter.\n\n type LabelSet: LabelSet;\n\n /// The `I64Counter` data type for this meter.\n\n type I64Counter: Counter<i64, Self::LabelSet>;\n\n /// The `F64Counter` data type for this meter.\n\n type F64Counter: Counter<f64, Self::LabelSet>;\n\n /// The `I64Gauge` data type for this meter.\n\n type I64Gauge: Gauge<i64, Self::LabelSet>;\n\n /// The `F64Gauge` data type for this meter.\n\n type F64Gauge: Gauge<f64, Self::LabelSet>;\n\n /// The `I64Measure` data type for this meter.\n\n type I64Measure: Measure<i64, Self::LabelSet>;\n\n /// The `F64Measure` data type for this meter.\n\n type F64Measure: Measure<f64, Self::LabelSet>;\n\n\n\n /// Returns a reference to a set of labels that cannot be read by the application.\n\n fn labels(&self, key_values: Vec<api::KeyValue>) -> Self::LabelSet;\n\n\n\n /// Creates a new `i64` counter with a given name and customized with passed options.\n", "file_path": "src/api/metrics/mod.rs", "rank": 52, "score": 98601.65003848124 }, { "content": "/// `LabelSet` is an implementation-level interface that represents a\n\n/// set of `KeyValue` for use as pre-defined labels in the metrics API.\n\npub trait LabelSet {}\n\n\n\n/// `MetricOptions` contains some options for metrics of any kind.\n\n#[derive(Default, Debug)]\n\npub struct MetricOptions {\n\n /// Description is an optional field describing the metric instrument.\n\n pub description: String,\n\n\n\n /// Unit is an optional field describing the metric instrument.\n\n /// Valid values are specified according to the\n\n /// [UCUM](http://unitsofmeasure.org/ucum.html).\n\n pub unit: api::Unit,\n\n\n\n /// Keys are dimension names for the given metric.\n\n pub keys: Vec<api::Key>,\n\n\n\n /// Alternate defines the property of metric value dependent on\n\n /// a metric type.\n\n ///\n\n /// - for `Counter`, `true` implies that the metric is an up-down\n", "file_path": "src/api/metrics/mod.rs", "rank": 53, "score": 96394.52487467944 }, { "content": "/// The implementation-level interface to Set/Add/Record individual\n\n/// metrics with precomputed labels.\n\npub trait InstrumentHandle {\n\n /// Allows the SDK to observe a single metric event.\n\n fn record_one(&self, value: MeasurementValue);\n\n}\n\n\n", "file_path": "src/api/metrics/mod.rs", "rank": 54, "score": 96390.43747843416 }, { "content": "/// The implementation-level interface to Set/Add/Record individual\n\n/// metrics without precomputed labels.\n\npub trait Instrument<LS> {\n\n /// Allows the SDK to observe a single metric event for a given set of labels.\n\n fn record_one(&self, value: MeasurementValue, label_set: &LS);\n\n}\n\n\n", "file_path": "src/api/metrics/mod.rs", "rank": 55, "score": 94849.47238441941 }, { "content": "pub trait CollectorSyncHandler {\n\n fn handle_submit_batches(&self, batches: Vec<Batch>) -> thrift::Result<Vec<BatchSubmitResponse>>;\n\n}\n\n\n\npub struct CollectorSyncProcessor<H: CollectorSyncHandler> {\n\n handler: H,\n\n}\n\n\n\nimpl <H: CollectorSyncHandler> CollectorSyncProcessor<H> {\n\n pub fn new(handler: H) -> CollectorSyncProcessor<H> {\n\n CollectorSyncProcessor {\n\n handler,\n\n }\n\n }\n\n fn process_submit_batches(&self, incoming_sequence_number: i32, i_prot: &mut dyn TInputProtocol, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {\n\n TCollectorProcessFunctions::process_submit_batches(&self.handler, incoming_sequence_number, i_prot, o_prot)\n\n }\n\n}\n\n\n\npub struct TCollectorProcessFunctions;\n", "file_path": "opentelemetry-jaeger/src/thrift/jaeger.rs", "rank": 56, "score": 93091.32738292446 }, { "content": "pub trait AgentSyncHandler {\n\n fn handle_emit_zipkin_batch(&self, spans: Vec<zipkincore::Span>) -> thrift::Result<()>;\n\n fn handle_emit_batch(&self, batch: jaeger::Batch) -> thrift::Result<()>;\n\n}\n\n\n\npub struct AgentSyncProcessor<H: AgentSyncHandler> {\n\n handler: H,\n\n}\n\n\n\nimpl <H: AgentSyncHandler> AgentSyncProcessor<H> {\n\n pub fn new(handler: H) -> AgentSyncProcessor<H> {\n\n AgentSyncProcessor {\n\n handler,\n\n }\n\n }\n\n fn process_emit_zipkin_batch(&self, incoming_sequence_number: i32, i_prot: &mut dyn TInputProtocol, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {\n\n TAgentProcessFunctions::process_emit_zipkin_batch(&self.handler, incoming_sequence_number, i_prot, o_prot)\n\n }\n\n fn process_emit_batch(&self, incoming_sequence_number: i32, i_prot: &mut dyn TInputProtocol, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {\n\n TAgentProcessFunctions::process_emit_batch(&self.handler, incoming_sequence_number, i_prot, o_prot)\n", "file_path": "opentelemetry-jaeger/src/thrift/agent.rs", "rank": 57, "score": 93091.32738292446 }, { "content": "fn build_tags(span_data: &Arc<trace::SpanData>) -> Option<Vec<jaeger::Tag>> {\n\n let mut user_overrides = UserOverrides::default();\n\n // TODO determine if namespacing is required to avoid collisions with set attributes\n\n let mut tags = span_data\n\n .attributes\n\n .iter()\n\n .map(|(k, v)| {\n\n user_overrides.record_attr(k.as_str());\n\n api::KeyValue::new(k.clone(), v.clone()).into()\n\n })\n\n .chain(\n\n span_data\n\n .resource\n\n .iter()\n\n .map(|(k, v)| api::KeyValue::new(k.clone(), v.clone()).into()),\n\n )\n\n .collect::<Vec<_>>();\n\n\n\n // Ensure error status is set\n\n if span_data.status_code != api::StatusCode::OK && !user_overrides.error {\n", "file_path": "opentelemetry-jaeger/src/lib.rs", "rank": 58, "score": 92811.34684053465 }, { "content": "fn events_to_logs(events: &sdk::EvictedQueue<api::Event>) -> Option<Vec<jaeger::Log>> {\n\n if events.is_empty() {\n\n None\n\n } else {\n\n Some(events.iter().cloned().map(Into::into).collect())\n\n }\n\n}\n", "file_path": "opentelemetry-jaeger/src/lib.rs", "rank": 59, "score": 91265.66862186776 }, { "content": "pub trait TCollectorSyncClient {\n\n fn submit_batches(&mut self, batches: Vec<Batch>) -> thrift::Result<Vec<BatchSubmitResponse>>;\n\n}\n\n\n", "file_path": "opentelemetry-jaeger/src/thrift/jaeger.rs", "rank": 60, "score": 91215.1537215801 }, { "content": "pub trait ZipkinCollectorSyncHandler {\n\n fn handle_submit_zipkin_batch(&self, spans: Vec<Span>) -> thrift::Result<Vec<Response>>;\n\n}\n\n\n\npub struct ZipkinCollectorSyncProcessor<H: ZipkinCollectorSyncHandler> {\n\n handler: H,\n\n}\n\n\n\nimpl <H: ZipkinCollectorSyncHandler> ZipkinCollectorSyncProcessor<H> {\n\n pub fn new(handler: H) -> ZipkinCollectorSyncProcessor<H> {\n\n ZipkinCollectorSyncProcessor {\n\n handler,\n\n }\n\n }\n\n fn process_submit_zipkin_batch(&self, incoming_sequence_number: i32, i_prot: &mut dyn TInputProtocol, o_prot: &mut dyn TOutputProtocol) -> thrift::Result<()> {\n\n TZipkinCollectorProcessFunctions::process_submit_zipkin_batch(&self.handler, incoming_sequence_number, i_prot, o_prot)\n\n }\n\n}\n\n\n\npub struct TZipkinCollectorProcessFunctions;\n", "file_path": "opentelemetry-jaeger/src/thrift/zipkincore.rs", "rank": 61, "score": 91215.1537215801 }, { "content": "pub trait TAgentSyncClient {\n\n fn emit_zipkin_batch(&mut self, spans: Vec<zipkincore::Span>) -> thrift::Result<()>;\n\n fn emit_batch(&mut self, batch: jaeger::Batch) -> thrift::Result<()>;\n\n}\n\n\n", "file_path": "opentelemetry-jaeger/src/thrift/agent.rs", "rank": 62, "score": 91215.1537215801 }, { "content": "fn links_to_references(links: &sdk::EvictedQueue<api::Link>) -> Option<Vec<jaeger::SpanRef>> {\n\n if !links.is_empty() {\n\n let refs = links\n\n .iter()\n\n .map(|link| {\n\n let span_context = link.span_context();\n\n let trace_id = span_context.trace_id().to_u128();\n\n let trace_id_high = (trace_id >> 64) as i64;\n\n let trace_id_low = trace_id as i64;\n\n\n\n // TODO: properly set the reference type when specs are defined\n\n // see https://github.com/open-telemetry/opentelemetry-specification/issues/65\n\n jaeger::SpanRef::new(\n\n jaeger::SpanRefType::ChildOf,\n\n trace_id_low,\n\n trace_id_high,\n\n span_context.span_id().to_u64() as i64,\n\n )\n\n })\n\n .collect();\n\n Some(refs)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "opentelemetry-jaeger/src/lib.rs", "rank": 63, "score": 89940.8256130107 }, { "content": "pub trait TAgentSyncClientMarker {}\n\n\n\npub struct AgentSyncClient<IP, OP> where IP: TInputProtocol, OP: TOutputProtocol {\n\n _i_prot: IP,\n\n _o_prot: OP,\n\n _sequence_number: i32,\n\n}\n\n\n\nimpl <IP, OP> AgentSyncClient<IP, OP> where IP: TInputProtocol, OP: TOutputProtocol {\n\n pub fn new(input_protocol: IP, output_protocol: OP) -> AgentSyncClient<IP, OP> {\n\n AgentSyncClient { _i_prot: input_protocol, _o_prot: output_protocol, _sequence_number: 0 }\n\n }\n\n}\n\n\n\nimpl <IP, OP> TThriftClient for AgentSyncClient<IP, OP> where IP: TInputProtocol, OP: TOutputProtocol {\n\n fn i_prot_mut(&mut self) -> &mut dyn TInputProtocol { &mut self._i_prot }\n\n fn o_prot_mut(&mut self) -> &mut dyn TOutputProtocol { &mut self._o_prot }\n\n fn sequence_number(&self) -> i32 { self._sequence_number }\n\n fn increment_sequence_number(&mut self) -> i32 { self._sequence_number += 1; self._sequence_number }\n\n}\n", "file_path": "opentelemetry-jaeger/src/thrift/agent.rs", "rank": 64, "score": 89437.5082152256 }, { "content": "pub trait TZipkinCollectorSyncClient {\n\n fn submit_zipkin_batch(&mut self, spans: Vec<Span>) -> thrift::Result<Vec<Response>>;\n\n}\n\n\n", "file_path": "opentelemetry-jaeger/src/thrift/zipkincore.rs", "rank": 65, "score": 89437.5082152256 }, { "content": "pub trait TCollectorSyncClientMarker {}\n\n\n\npub struct CollectorSyncClient<IP, OP> where IP: TInputProtocol, OP: TOutputProtocol {\n\n _i_prot: IP,\n\n _o_prot: OP,\n\n _sequence_number: i32,\n\n}\n\n\n\nimpl <IP, OP> CollectorSyncClient<IP, OP> where IP: TInputProtocol, OP: TOutputProtocol {\n\n pub fn new(input_protocol: IP, output_protocol: OP) -> CollectorSyncClient<IP, OP> {\n\n CollectorSyncClient { _i_prot: input_protocol, _o_prot: output_protocol, _sequence_number: 0 }\n\n }\n\n}\n\n\n\nimpl <IP, OP> TThriftClient for CollectorSyncClient<IP, OP> where IP: TInputProtocol, OP: TOutputProtocol {\n\n fn i_prot_mut(&mut self) -> &mut dyn TInputProtocol { &mut self._i_prot }\n\n fn o_prot_mut(&mut self) -> &mut dyn TOutputProtocol { &mut self._o_prot }\n\n fn sequence_number(&self) -> i32 { self._sequence_number }\n\n fn increment_sequence_number(&mut self) -> i32 { self._sequence_number += 1; self._sequence_number }\n\n}\n", "file_path": "opentelemetry-jaeger/src/thrift/jaeger.rs", "rank": 66, "score": 89437.5082152256 }, { "content": "pub trait TZipkinCollectorSyncClientMarker {}\n\n\n\npub struct ZipkinCollectorSyncClient<IP, OP> where IP: TInputProtocol, OP: TOutputProtocol {\n\n _i_prot: IP,\n\n _o_prot: OP,\n\n _sequence_number: i32,\n\n}\n\n\n\nimpl <IP, OP> ZipkinCollectorSyncClient<IP, OP> where IP: TInputProtocol, OP: TOutputProtocol {\n\n pub fn new(input_protocol: IP, output_protocol: OP) -> ZipkinCollectorSyncClient<IP, OP> {\n\n ZipkinCollectorSyncClient { _i_prot: input_protocol, _o_prot: output_protocol, _sequence_number: 0 }\n\n }\n\n}\n\n\n\nimpl <IP, OP> TThriftClient for ZipkinCollectorSyncClient<IP, OP> where IP: TInputProtocol, OP: TOutputProtocol {\n\n fn i_prot_mut(&mut self) -> &mut dyn TInputProtocol { &mut self._i_prot }\n\n fn o_prot_mut(&mut self) -> &mut dyn TOutputProtocol { &mut self._o_prot }\n\n fn sequence_number(&self) -> i32 { self._sequence_number }\n\n fn increment_sequence_number(&mut self) -> i32 { self._sequence_number += 1; self._sequence_number }\n\n}\n", "file_path": "opentelemetry-jaeger/src/thrift/zipkincore.rs", "rank": 67, "score": 87750.82806622027 }, { "content": " ///\n\n /// [`Context`]: ../../struct.Context.html\n\n /// [`Carrier`]: ../trait.Carrier.html\n\n fn extract(&self, carrier: &dyn api::Carrier) -> Context {\n\n self.extract_with_context(&Context::current(), carrier)\n\n }\n\n\n\n /// Retrieves encoded data using the provided [`Carrier`]. If no data for this\n\n /// format was retrieved OR if the retrieved data is invalid, then the given\n\n /// [`Context`] is returned.\n\n ///\n\n /// [`Context`]: ../../struct.Context.html\n\n /// [`Carrier`]: ../trait.Carrier.html\n\n fn extract_with_context(&self, cx: &Context, carrier: &dyn api::Carrier) -> Context;\n\n}\n", "file_path": "src/api/context/propagation/text_propagator.rs", "rank": 75, "score": 84460.80819828252 }, { "content": "//! # Text Propagator\n\n//!\n\n//! `HttpTextFormat` is a formatter to serialize and deserialize a value into a\n\n//! text format.\n\nuse crate::{api, api::Context};\n\nuse std::fmt::Debug;\n\n\n\n/// Methods to inject and extract a value as text into carriers that travel\n\n/// in-band across process boundaries.\n", "file_path": "src/api/context/propagation/text_propagator.rs", "rank": 76, "score": 84454.09835397347 }, { "content": "struct HttpHeaderMapCarrier<'a>(&'a tonic::metadata::MetadataMap);\n\nimpl<'a> api::Carrier for HttpHeaderMapCarrier<'a> {\n\n fn get(&self, key: &'static str) -> Option<&str> {\n\n self.0\n\n .get(key.to_lowercase().as_str())\n\n .and_then(|value| value.to_str().ok())\n\n }\n\n\n\n fn set(&mut self, _key: &'static str, _value: String) {\n\n unimplemented!()\n\n }\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n tracing_init()?;\n\n let addr = \"[::1]:50051\".parse()?;\n\n let greeter = MyGreeter::default();\n\n\n\n Server::builder()\n\n .add_service(GreeterServer::new(greeter))\n\n .serve(addr)\n\n .await?;\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/tracing-grpc/src/server.rs", "rank": 78, "score": 83336.98341465664 }, { "content": " api::SpanId::from_u64(span_id),\n\n trace_flags,\n\n true,\n\n );\n\n\n\n if span_context.is_valid() {\n\n span_context\n\n } else {\n\n api::SpanContext::empty_context()\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[rustfmt::skip]\n\n fn to_bytes_data() -> Vec<(api::SpanContext, [u8; 29])> {\n\n vec![\n", "file_path": "src/experimental/api/context/propagation/binary_propagator.rs", "rank": 79, "score": 82486.86310210776 }, { "content": " 0x00, 0x00, 0x4b, 0xf9, 0x2f, 0x35, 0x77, 0xb3, 0x4d,\n\n ]),\n\n ]\n\n }\n\n\n\n #[test]\n\n fn to_bytes_conversion() {\n\n let propagator = BinaryPropagator::new();\n\n\n\n for (context, data) in to_bytes_data() {\n\n assert_eq!(propagator.to_bytes(&context), data)\n\n }\n\n }\n\n\n\n #[test]\n\n fn from_bytes_conversion() {\n\n let propagator = BinaryPropagator::new();\n\n\n\n for (context, data) in from_bytes_data() {\n\n assert_eq!(propagator.from_bytes(data), context)\n\n }\n\n }\n\n}\n", "file_path": "src/experimental/api/context/propagation/binary_propagator.rs", "rank": 80, "score": 82476.5129714257 }, { "content": "//! # Binary Propagator\n\n//!\n\n//! `BinaryFormat` is a formatter to serialize and deserialize a\n\n//! value into a binary format.\n\n//!\n\n//! `BinaryFormat` MUST expose the APIs that serializes values into bytes,\n\n//! and deserializes values from bytes.\n\nuse crate::api;\n\nuse std::convert::TryInto;\n\n\n\n/// Used to serialize and deserialize `SpanContext`s to and from a binary\n\n/// representation.\n", "file_path": "src/experimental/api/context/propagation/binary_propagator.rs", "rank": 81, "score": 82473.42933694946 }, { "content": "\n\n #[rustfmt::skip]\n\n fn from_bytes_data() -> Vec<(api::SpanContext, Vec<u8>)> {\n\n vec![\n\n // Future version of the proto\n\n (api::SpanContext::new(api::TraceId::from_u128(0x4bf9_2f35_77b3_4da6_a3ce_929d_0e0e_4736), api::SpanId::from_u64(0x00f0_67aa_0ba9_02b7), 1, true), vec![\n\n 0x02, 0x00, 0x4b, 0xf9, 0x2f, 0x35, 0x77, 0xb3, 0x4d, 0xa6, 0xa3, 0xce, 0x92, 0x9d, 0x0e, 0x0e, 0x47, 0x36,\n\n 0x01, 0x00, 0xf0, 0x67, 0xaa, 0x0b, 0xa9, 0x02, 0xb7,\n\n 0x02, 0x01,\n\n ]),\n\n // current version with sampled\n\n (api::SpanContext::new(api::TraceId::from_u128(0x4bf9_2f35_77b3_4da6_a3ce_929d_0e0e_4736), api::SpanId::from_u64(0x00f0_67aa_0ba9_02b7), 1, true), vec![\n\n 0x02, 0x00, 0x4b, 0xf9, 0x2f, 0x35, 0x77, 0xb3, 0x4d, 0xa6, 0xa3, 0xce, 0x92, 0x9d, 0x0e, 0x0e, 0x47, 0x36,\n\n 0x01, 0x00, 0xf0, 0x67, 0xaa, 0x0b, 0xa9, 0x02, 0xb7,\n\n 0x02, 0x01,\n\n ]),\n\n // valid context without option\n\n (api::SpanContext::new(api::TraceId::from_u128(0x4bf9_2f35_77b3_4da6_a3ce_929d_0e0e_4736), api::SpanId::from_u64(0x00f0_67aa_0ba9_02b7), 0, true), vec![\n\n 0x00, 0x00, 0x4b, 0xf9, 0x2f, 0x35, 0x77, 0xb3, 0x4d, 0xa6, 0xa3, 0xce, 0x92, 0x9d, 0x0e, 0x0e, 0x47, 0x36,\n\n 0x01, 0x00, 0xf0, 0x67, 0xaa, 0x0b, 0xa9, 0x02, 0xb7,\n", "file_path": "src/experimental/api/context/propagation/binary_propagator.rs", "rank": 82, "score": 82473.32517446765 }, { "content": " /// Serializes span context into a byte array and returns the array.\n\n fn to_bytes(&self, context: &api::SpanContext) -> [u8; 29] {\n\n let mut res = [0u8; 29];\n\n if !context.is_valid() {\n\n return res;\n\n }\n\n res[2..18].copy_from_slice(&context.trace_id().to_u128().to_be_bytes());\n\n res[18] = 1;\n\n res[19..27].copy_from_slice(&context.span_id().to_u64().to_be_bytes());\n\n res[27] = 2;\n\n res[28] = context.trace_flags();\n\n\n\n res\n\n }\n\n\n\n /// Deserializes a span context from a byte array.\n\n fn from_bytes(&self, bytes: Vec<u8>) -> api::SpanContext {\n\n if bytes.is_empty() {\n\n return api::SpanContext::empty_context();\n\n }\n", "file_path": "src/experimental/api/context/propagation/binary_propagator.rs", "rank": 83, "score": 82473.03149218654 }, { "content": " let trace_id: u128;\n\n let mut span_id = 0;\n\n let mut trace_flags = 0;\n\n let mut b = &bytes[1..];\n\n if b.len() >= 17 && b[0] == 0 {\n\n trace_id = u128::from_be_bytes(b[1..17].try_into().unwrap());\n\n b = &b[17..];\n\n } else {\n\n return api::SpanContext::empty_context();\n\n }\n\n if b.len() >= 9 && b[0] == 1 {\n\n span_id = u64::from_be_bytes(b[1..9].try_into().unwrap());\n\n b = &b[9..];\n\n }\n\n if b.len() >= 2 && b[0] == 2 {\n\n trace_flags = b[1]\n\n }\n\n\n\n let span_context = api::SpanContext::new(\n\n api::TraceId::from_u128(trace_id),\n", "file_path": "src/experimental/api/context/propagation/binary_propagator.rs", "rank": 84, "score": 82471.29293589215 }, { "content": " ]),\n\n // zero trace id\n\n (api::SpanContext::empty_context(), vec![\n\n 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\n 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\n 0x02, 0x01,\n\n ]),\n\n // zero span id\n\n (api::SpanContext::empty_context(), vec![\n\n 0x00, 0x00, 0x4b, 0xf9, 0x2f, 0x35, 0x77, 0xb3, 0x4d, 0xa6, 0xa3, 0xce, 0x92, 0x9d, 0x0e, 0x0e, 0x47, 0x36,\n\n 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\n 0x02, 0x01,\n\n ]),\n\n // wrong trace id field number\n\n (api::SpanContext::empty_context(), vec![\n\n 0x00, 0x01, 0x4b, 0xf9, 0x2f, 0x35, 0x77, 0xb3, 0x4d, 0xa6, 0xa3, 0xce, 0x92, 0x9d, 0x0e, 0x0e, 0x47, 0x36,\n\n 0x01, 0x00, 0xf0, 0x67, 0xaa, 0x0b, 0xa9, 0x02, 0xb7,\n\n ]),\n\n // short byte array\n\n (api::SpanContext::empty_context(), vec![\n", "file_path": "src/experimental/api/context/propagation/binary_propagator.rs", "rank": 85, "score": 82470.61625285906 }, { "content": " // Context with sampled\n\n (api::SpanContext::new(\n\n api::TraceId::from_u128(0x4bf9_2f35_77b3_4da6_a3ce_929d_0e0e_4736),\n\n api::SpanId::from_u64(0x00f0_67aa_0ba9_02b7), 1, true), [\n\n 0x00, 0x00, 0x4b, 0xf9, 0x2f, 0x35, 0x77, 0xb3, 0x4d, 0xa6, 0xa3, 0xce, 0x92, 0x9d, 0x0e, 0x0e, 0x47, 0x36,\n\n 0x01, 0x00, 0xf0, 0x67, 0xaa, 0x0b, 0xa9, 0x02, 0xb7,\n\n 0x02, 0x01,\n\n ]),\n\n // Context without sampled\n\n (api::SpanContext::new(\n\n api::TraceId::from_u128(0x4bf9_2f35_77b3_4da6_a3ce_929d_0e0e_4736),\n\n api::SpanId::from_u64(0x00f0_67aa_0ba9_02b7), 0, true), [\n\n 0x00, 0x00, 0x4b, 0xf9, 0x2f, 0x35, 0x77, 0xb3, 0x4d, 0xa6, 0xa3, 0xce, 0x92, 0x9d, 0x0e, 0x0e, 0x47, 0x36,\n\n 0x01, 0x00, 0xf0, 0x67, 0xaa, 0x0b, 0xa9, 0x02, 0xb7,\n\n 0x02, 0x00,\n\n ]),\n\n // Invalid context\n\n (api::SpanContext::empty_context(), [0u8; 29]),\n\n ]\n\n }\n", "file_path": "src/experimental/api/context/propagation/binary_propagator.rs", "rank": 86, "score": 82469.67925753411 }, { "content": "#[derive(Clone, Default)]\n\npub struct Context {\n\n entries: HashMap<TypeId, Arc<dyn Any + Sync + Send>, BuildHasherDefault<IdHasher>>,\n\n}\n\n\n\nimpl Context {\n\n /// Creates an empty `Context`.\n\n ///\n\n /// The context is initially created with a capacity of 0, so it will not\n\n /// allocate. Use [`with_value`] to create a new context that has entries.\n\n ///\n\n /// [`with_value`]: struct.Context.html#method.with_value\n\n pub fn new() -> Self {\n\n Context::default()\n\n }\n\n\n\n /// Returns an immutable snapshot of the current thread's context.\n\n ///\n\n /// # Examples\n\n ///\n", "file_path": "src/api/context/mod.rs", "rank": 87, "score": 79321.98542812423 }, { "content": " /// assert_eq!(cx_with_a.get::<ValueB>(), None);\n\n ///\n\n /// // The second context now contains both values\n\n /// assert_eq!(cx_with_a_and_b.get::<ValueA>(), Some(&ValueA(\"a\")));\n\n /// assert_eq!(cx_with_a_and_b.get::<ValueB>(), Some(&ValueB(42)));\n\n /// ```\n\n pub fn with_value<T: 'static + Send + Sync>(&self, value: T) -> Self {\n\n let mut new_context = self.clone();\n\n new_context\n\n .entries\n\n .insert(TypeId::of::<T>(), Arc::new(value));\n\n\n\n new_context\n\n }\n\n\n\n /// Replaces the current context on this thread with this context.\n\n ///\n\n /// Dropping the returned [`ContextGuard`] will reset the current context to the\n\n /// previous value.\n\n ///\n", "file_path": "src/api/context/mod.rs", "rank": 88, "score": 79320.99294542753 }, { "content": " /// assert_eq!(all_current_and_b.get::<ValueA>(), Some(&ValueA(\"a\")));\n\n /// assert_eq!(all_current_and_b.get::<ValueB>(), Some(&ValueB(42)));\n\n /// ```\n\n pub fn current_with_value<T: 'static + Send + Sync>(value: T) -> Self {\n\n let mut new_context = Context::current();\n\n new_context\n\n .entries\n\n .insert(TypeId::of::<T>(), Arc::new(value));\n\n\n\n new_context\n\n }\n\n\n\n /// Returns a reference to the entry for the corresponding value type.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use opentelemetry::api::Context;\n\n ///\n\n /// // Given some value types defined in your application\n", "file_path": "src/api/context/mod.rs", "rank": 89, "score": 79320.37287302206 }, { "content": " /// ```\n\n /// use opentelemetry::api::Context;\n\n ///\n\n /// #[derive(Debug, PartialEq)]\n\n /// struct ValueA(&'static str);\n\n ///\n\n /// fn do_work() {\n\n /// assert_eq!(Context::current().get(), Some(&ValueA(\"a\")));\n\n /// }\n\n ///\n\n /// let _guard = Context::new().with_value(ValueA(\"a\")).attach();\n\n /// do_work()\n\n /// ```\n\n pub fn current() -> Self {\n\n get_current(|cx| cx.clone())\n\n }\n\n\n\n /// Returns a clone of the current thread's context with the given value.\n\n ///\n\n /// This is a more efficient form of `Context::current().with_value(value)`\n", "file_path": "src/api/context/mod.rs", "rank": 90, "score": 79319.60988129764 }, { "content": "//! let current = Context::current();\n\n//! assert_eq!(current.get::<ValueA>(), Some(&ValueA(\"a\")));\n\n//! assert_eq!(current.get::<ValueB>(), None);\n\n//! ```\n\n\n\nuse std::any::{Any, TypeId};\n\nuse std::cell::RefCell;\n\nuse std::collections::HashMap;\n\nuse std::fmt;\n\nuse std::hash::{BuildHasherDefault, Hasher};\n\nuse std::sync::Arc;\n\n\n\npub mod propagation;\n\n\n\nthread_local! {\n\n static CURRENT_CONTEXT: RefCell<Context> = RefCell::new(Context::default());\n\n static DEFAULT_CONTEXT: Context = Context::default();\n\n}\n\n\n\n/// An execution-scoped collection of values.\n", "file_path": "src/api/context/mod.rs", "rank": 91, "score": 79319.14035544454 }, { "content": " f.debug_struct(\"Context\")\n\n .field(\"entries\", &self.entries.len())\n\n .finish()\n\n }\n\n}\n\n\n\n/// A guard that resets the current context to the prior context when dropped.\n\n#[allow(missing_debug_implementations)]\n\npub struct ContextGuard(Option<Context>);\n\n\n\nimpl Drop for ContextGuard {\n\n fn drop(&mut self) {\n\n if let Some(previous_cx) = self.0.take() {\n\n let _ = CURRENT_CONTEXT.try_with(|current| current.replace(previous_cx));\n\n }\n\n }\n\n}\n\n\n\n/// Executes a closure with a reference to this thread's current context.\n\n///\n\n/// Note: This function will panic if you attempt to attach another context\n\n/// while the context is still borrowed.\n", "file_path": "src/api/context/mod.rs", "rank": 92, "score": 79315.33915584441 }, { "content": " ///\n\n /// Guards do not need to be explicitly dropped:\n\n ///\n\n /// ```\n\n /// use opentelemetry::api::Context;\n\n ///\n\n /// #[derive(Debug, PartialEq)]\n\n /// struct ValueA(&'static str);\n\n ///\n\n /// fn my_function() -> String {\n\n /// // attach a context the duration of this function.\n\n /// let my_cx = Context::new().with_value(ValueA(\"a\"));\n\n /// // NOTE: a variable name after the underscore is **required** or rust\n\n /// // will drop the guard, restoring the previous context _immediately_.\n\n /// let _guard = my_cx.attach();\n\n ///\n\n /// // anything happening in functions we call can still access my_cx...\n\n /// my_other_function();\n\n ///\n\n /// // returning from the function drops the guard, exiting the span.\n", "file_path": "src/api/context/mod.rs", "rank": 93, "score": 79315.27088901224 }, { "content": " /// return \"Hello world\".to_owned();\n\n /// }\n\n ///\n\n /// fn my_other_function() {\n\n /// // ...\n\n /// }\n\n /// ```\n\n /// Sub-scopes may be created to limit the duration for which the span is\n\n /// entered:\n\n ///\n\n /// ```\n\n /// use opentelemetry::api::Context;\n\n ///\n\n /// #[derive(Debug, PartialEq)]\n\n /// struct ValueA(&'static str);\n\n ///\n\n /// let my_cx = Context::new().with_value(ValueA(\"a\"));\n\n ///\n\n /// {\n\n /// let _guard = my_cx.attach();\n", "file_path": "src/api/context/mod.rs", "rank": 94, "score": 79313.64231289606 }, { "content": " use super::*;\n\n\n\n #[test]\n\n fn nested_contexts() {\n\n #[derive(Debug, PartialEq)]\n\n struct ValueA(&'static str);\n\n #[derive(Debug, PartialEq)]\n\n struct ValueB(u64);\n\n let _outer_guard = Context::new().with_value(ValueA(\"a\")).attach();\n\n\n\n // Only value `a` is set\n\n let current = Context::current();\n\n assert_eq!(current.get(), Some(&ValueA(\"a\")));\n\n assert_eq!(current.get::<ValueB>(), None);\n\n\n\n {\n\n let _inner_guard = Context::current_with_value(ValueB(42)).attach();\n\n // Both values are set in inner context\n\n let current = Context::current();\n\n assert_eq!(current.get(), Some(&ValueA(\"a\")));\n", "file_path": "src/api/context/mod.rs", "rank": 95, "score": 79313.53792743589 }, { "content": " /// [`ContextGuard`]: struct.ContextGuard.html\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use opentelemetry::api::Context;\n\n ///\n\n /// #[derive(Debug, PartialEq)]\n\n /// struct ValueA(&'static str);\n\n ///\n\n /// let my_cx = Context::new().with_value(ValueA(\"a\"));\n\n ///\n\n /// // Set the current thread context\n\n /// let cx_guard = my_cx.attach();\n\n /// assert_eq!(Context::current().get::<ValueA>(), Some(&ValueA(\"a\")));\n\n ///\n\n /// // Drop the guard to restore the previous context\n\n /// drop(cx_guard);\n\n /// assert_eq!(Context::current().get::<ValueA>(), None);\n\n /// ```\n", "file_path": "src/api/context/mod.rs", "rank": 96, "score": 79313.46093255382 }, { "content": " ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use opentelemetry::api::Context;\n\n ///\n\n /// // Given some value types defined in your application\n\n /// #[derive(Debug, PartialEq)]\n\n /// struct ValueA(&'static str);\n\n /// #[derive(Debug, PartialEq)]\n\n /// struct ValueB(u64);\n\n ///\n\n /// // You can create a context with the first value set to \"a\"\n\n /// let cx_with_a = Context::new().with_value(ValueA(\"a\"));\n\n ///\n\n /// // And create another context based on the fist with a new value\n\n /// let cx_with_a_and_b = cx_with_a.with_value(ValueB(42));\n\n ///\n\n /// // The first context is still available and unmodified\n\n /// assert_eq!(cx_with_a.get::<ValueA>(), Some(&ValueA(\"a\")));\n", "file_path": "src/api/context/mod.rs", "rank": 97, "score": 79312.71794527132 }, { "content": " /// #[derive(Debug, PartialEq)]\n\n /// struct ValueA(&'static str);\n\n /// #[derive(Debug, PartialEq)]\n\n /// struct MyUser();\n\n ///\n\n /// let cx = Context::new().with_value(ValueA(\"a\"));\n\n ///\n\n /// // Values can be queried by type\n\n /// assert_eq!(cx.get::<ValueA>(), Some(&ValueA(\"a\")));\n\n ///\n\n /// // And return none if not yet set\n\n /// assert_eq!(cx.get::<MyUser>(), None);\n\n /// ```\n\n pub fn get<T: 'static>(&self) -> Option<&T> {\n\n self.entries\n\n .get(&TypeId::of::<T>())\n\n .and_then(|rc| (&*rc).downcast_ref())\n\n }\n\n\n\n /// Returns a copy of the context with the new value included.\n", "file_path": "src/api/context/mod.rs", "rank": 98, "score": 79312.54399353091 }, { "content": " ///\n\n /// // the current context can access variables in\n\n /// assert_eq!(Context::current().get::<ValueA>(), Some(&ValueA(\"a\")));\n\n ///\n\n /// // exiting the scope drops the guard, detaching the context.\n\n /// }\n\n ///\n\n /// // this is back in the default empty context\n\n /// assert_eq!(Context::current().get::<ValueA>(), None);\n\n /// ```\n\n pub fn attach(self) -> ContextGuard {\n\n let prior = CURRENT_CONTEXT\n\n .try_with(|current| current.replace(self))\n\n .ok();\n\n ContextGuard(prior)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Context {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "src/api/context/mod.rs", "rank": 99, "score": 79308.11887324379 } ]
Rust
src/hubris/elaborate/pattern_matching/mod.rs
jroesch/hubris
0333d4d26b7d66924acfe065c94d914e0ba011e4
use super::super::ast::{self}; use super::super::core::{self, Term}; use super::{LocalElabCx, Error}; use std::collections::HashMap; struct PatternMatchCx<'ecx, 'cx: 'ecx> { elab_cx: &'ecx mut LocalElabCx<'cx>, } mod renamer; mod simplify; use self::simplify::*; impl<'ecx, 'cx: 'ecx>PatternMatchCx<'ecx, 'cx> { fn new(elab_cx: &'ecx mut LocalElabCx<'cx>) -> PatternMatchCx<'ecx, 'cx> { PatternMatchCx { elab_cx: elab_cx, } } #[inline] fn enter_pattern_scope<F, R>(&mut self, name_and_type: Vec<(ast::Name, core::Term)>, body: F) -> Result<R, Error> where F: FnOnce(&mut PatternMatchCx, Vec<core::Name>) -> Result<R, Error> { let mut locals = vec![]; let old_context = self.elab_cx.locals.clone(); let old_locals_in_order = self.elab_cx.locals_in_order.clone(); for (name, ty) in name_and_type { let repr = match name.clone().repr { ast::NameKind::Qualified(..) => panic!(), ast::NameKind::Unqualified(s) => s, ast::NameKind::Placeholder => "_".to_string(), }; let local = self.elab_cx.cx.ty_cx.local_with_repr_and_mode(repr, ty, core::BindingMode::Explicit); self.elab_cx.locals.insert(name, local.clone()); self.elab_cx.locals_in_order.push(local.clone()); locals.push(local); } let result = try!(body(self, locals)); self.elab_cx.locals = old_context; self.elab_cx.locals_in_order = old_locals_in_order; Ok(result) } fn elaborate_simple_match(&mut self, simple_match: SimpleMatch) -> Result<core::Term, Error> { let SimpleMatch { scrutinee, cases, pattern_type, } = simple_match; let escrutinee = try!(self.elab_cx.elaborate_term(scrutinee)); let scrutinee_ty = try!(self.elab_cx.cx.ty_cx.type_check_term(&escrutinee, None)).1; let (inductive_ty, args) = scrutinee_ty.uncurry(); let inductive_ty = match inductive_ty { Term::Var { name } => name, other => panic!("{}", other), }; let datatype = match self.elab_cx.cx.ty_cx.types.get(&inductive_ty) { None => panic!("can't fine dt decl"), Some(dt) => dt.clone(), }; let ctor_map : HashMap<_, _> = datatype.ctors .clone() .into_iter() .collect(); let cases : Vec<_> = try!(cases.into_iter() .map(|c| self.elaborate_simple_case(c, &scrutinee_ty, &ctor_map)) .collect()); match pattern_type { PatternType::Cases => { let cases_on = inductive_ty.in_scope("cases_on".to_string()).unwrap(); let head = try!(self.elab_cx.apply_implicit_args(cases_on.to_term())); let mut args = vec![escrutinee]; args.extend(cases.into_iter()); let result = Term::apply_all(head, args); debug!("elaborated_match: {}", result); Ok(result) } } } fn simple_pattern_binders(&mut self, simple_pattern: SimplePattern, scrutinee_ty: &core::Term, ctor_map: &HashMap<core::Name, core::Term>) -> Result<Vec<(ast::Name, core::Term)>, Error> { match simple_pattern { SimplePattern::Name(n) => { let elab_name = try!(self.elab_cx.cx.elaborate_global_name(n.clone())); match ctor_map.get(&elab_name) { None => return Ok(vec![(n, scrutinee_ty.clone())]), Some(ctor_ty) => { return Ok(vec![]); } } } SimplePattern::Constructor(ctor, args) => { let elab_name = try!(self.elab_cx.cx.elaborate_global_name(ctor.clone())); match ctor_map.get(&elab_name) { None => panic!("not the right"), Some(ctor_ty) => { debug!("{:?}", ctor_ty.binders()); let (inductive_ty, i_args) = scrutinee_ty.uncurry(); let mut ctor_ty = ctor_ty.clone(); for arg in i_args { match ctor_ty { Term::Forall { term, .. } => { debug!("arg {}", arg); ctor_ty = term.instantiate(&arg); } _ => panic!() } } debug!("ctor_ty {}", ctor_ty); let binders = ctor_ty.binders() .unwrap_or(vec![]) .iter() .cloned() .zip(args.into_iter()) .map(|(t, n)| { (n, t.clone()) }).collect(); return Ok(binders); } } } } } fn elaborate_simple_case(&mut self, simple_case: SimpleCase, scrutinee_ty: &core::Term, ctor_map: &HashMap<core::Name, core::Term>) -> Result<core::Term, Error> { let SimpleCase { pattern, rhs, } = simple_case; debug!("pattern: {} rhs: {}", pattern, rhs); let binders = try!(self.simple_pattern_binders( pattern, scrutinee_ty, ctor_map)); for &(ref n, ref ty) in &binders { debug!("{} {}", n, ty); } self.enter_pattern_scope(binders, move |pat_cx, names| { match rhs { SimpleMatchArm::Term(rhs) => Ok(Term::abstract_lambda(names, try!(pat_cx.elab_cx.elaborate_term(rhs)))), SimpleMatchArm::Match(mat) => pat_cx.elaborate_simple_match(mat) } }) } } pub fn elaborate_pattern_match<'ecx>( elab_cx: &mut LocalElabCx<'ecx>, scrutinee: ast::Term, cases: Vec<ast::Case>) -> Result<Term, Error> { let mut pmcx = PatternMatchCx::new(elab_cx); let simplified_match = simplify_match(scrutinee, cases); debug!("simplified_match: {}", simplified_match); pmcx.elaborate_simple_match(simplified_match) }
use super::super::ast::{self}; use super::super::core::{self, Term}; use super::{LocalElabCx, Error}; use std::collections::HashMap; struct PatternMatchCx<'ecx, 'cx: 'ecx> { elab_cx: &'ecx mut LocalElabCx<'cx>, } mod renamer; mod simplify; use self::simplify::*; impl<'ecx, 'cx: 'ecx>PatternMatchCx<'ecx, 'cx> { fn new(elab_cx: &'ecx mut LocalElabCx<'cx>) -> PatternMatchCx<'ecx, 'cx> { PatternMatchCx { elab_cx: elab_cx, } } #[inline] fn enter_pattern_scope<F, R>(&mut self, name_and_type: Vec<(ast::Name, core::Term)>, body: F) -> Result<R, Error> where F: FnOnce(&mut PatternMatchCx, Vec<core::Name>) -> Result<R, Error> { let mut locals = vec![]; let old_context = self.elab_cx.locals.clone(); let old_locals_in_order = self.elab_cx.locals_in_order.clone(); for (name, ty) in name_and_type { let repr = match name.clone().repr { ast::NameKind::Qualified(..) => panic!(), ast::NameKind::Unqualified(s) => s, ast::NameKind::Placeholder => "_".to_string(), }; let local = self.elab_cx.cx.ty_cx.local_with_repr_and_mode(repr, ty, core::BindingMode::Explicit); self.elab_cx.locals.insert(name, local.clone()); self.elab_cx.locals_in_order.push(local.clone()); locals.push(local); } let result = try!(body(self, locals)); self.elab_cx.locals = old_context; self.elab_cx.locals_in_order = old_locals_in_order; Ok(result) } fn elaborate_simple_match(&mut self, simple_match: SimpleMatch) -> Result<core::Term, Error> { let SimpleMatch { scrutinee, cases, pattern_type, } = simple_match; let escrutinee = try!(self.elab_cx.elaborate_term(scrutinee)); let scrutinee_ty = try!(self.elab_cx.cx.ty_cx.type_check_term(&escrutinee, None)).1; let (inductive_ty, args) = scrutinee_ty.uncurry(); let inductive_ty = match inductive_ty { Term::Var { name } => name, other => panic!("{}", other), }; let datatype = match self.elab_cx.cx.ty_cx.types.get(&inductive_ty) { None => panic!("can't fine dt decl"), Some(dt) => dt.clone(), }; let ctor_map : HashMap<_, _> = datatype.ctors .clone() .into_iter() .collect(); let cases : Vec<_> = try!(cases.into_iter() .map(|c| self.elaborate_simple_case(c, &scrutinee_ty, &ctor_map)) .collect()); match pattern_type { PatternType::Cases => { let cases_on = inductive_ty.in_scope("cases_on".to_string()).unwrap(); let head = try!(self.elab_cx.apply_implicit_args(cases_on.to_term())); let mut args = vec![escrutinee]; args.extend(cases.into_iter()); let result = Term::apply_all(head, args); debug!("elaborated_match: {}", result); Ok(result) } } } fn simple_pattern_binders(&mut self, simple_pattern: SimplePattern, scrutinee_ty: &core::Term, ctor_map: &HashMap<core::Name, core::Term>) -> Result<Vec<(ast::Name, core::Term)>, Error> { match simple_pattern { SimplePattern::Name(n) => { let elab_name = try!(self.elab_cx.cx.elaborate_global_name(n.clone())); match ctor_map.get(&elab_name) { None => return Ok(vec![(n, scrutinee_ty.clone())]), Some(ctor_ty) => { return Ok(vec![]); } } } SimplePattern::Constructor(ctor, args) => { let elab_name = try!(self.elab_cx.cx.elaborate_global_name(ctor.clone())); match ctor_map.get(&elab_name) { None => panic!("not the right"), Some(ctor_ty) => { debug!("{:?}", ctor_ty.binders()); let (inductive_ty, i_args) = scrutinee_ty.uncurry(); let mut ctor_ty = ctor_ty.clone(); for arg in i_args { match ctor_ty { Term::Forall { term, .. } => { debug!("arg {}", arg); ctor_ty = term.instantiate(&arg); } _ => panic!() } } debug!("ctor_ty {}", ctor_ty); let binders = ctor_ty.binders() .unwrap_or(vec![]) .iter() .cloned() .zip(args.into_iter()) .
fn elaborate_simple_case(&mut self, simple_case: SimpleCase, scrutinee_ty: &core::Term, ctor_map: &HashMap<core::Name, core::Term>) -> Result<core::Term, Error> { let SimpleCase { pattern, rhs, } = simple_case; debug!("pattern: {} rhs: {}", pattern, rhs); let binders = try!(self.simple_pattern_binders( pattern, scrutinee_ty, ctor_map)); for &(ref n, ref ty) in &binders { debug!("{} {}", n, ty); } self.enter_pattern_scope(binders, move |pat_cx, names| { match rhs { SimpleMatchArm::Term(rhs) => Ok(Term::abstract_lambda(names, try!(pat_cx.elab_cx.elaborate_term(rhs)))), SimpleMatchArm::Match(mat) => pat_cx.elaborate_simple_match(mat) } }) } } pub fn elaborate_pattern_match<'ecx>( elab_cx: &mut LocalElabCx<'ecx>, scrutinee: ast::Term, cases: Vec<ast::Case>) -> Result<Term, Error> { let mut pmcx = PatternMatchCx::new(elab_cx); let simplified_match = simplify_match(scrutinee, cases); debug!("simplified_match: {}", simplified_match); pmcx.elaborate_simple_match(simplified_match) }
map(|(t, n)| { (n, t.clone()) }).collect(); return Ok(binders); } } } } }
function_block-function_prefix_line
[ { "content": "pub fn simplify_match(scrutinee: ast::Term, cases: Vec<ast::Case>) -> SimpleMatch {\n\n let mut simple_cases = vec![];\n\n\n\n for case in cases {\n\n let rhs = SimpleMatchArm::Term(case.rhs);\n\n simple_cases.push(simplify_pattern(case.pattern, rhs));\n\n }\n\n\n\n let simple_match = SimpleMatch {\n\n scrutinee: scrutinee,\n\n cases: simple_cases,\n\n pattern_type: PatternType::Cases,\n\n };\n\n\n\n let simple_match = match condense(SimpleMatchArm::Match(simple_match)) {\n\n SimpleMatchArm::Match(m) => m,\n\n _ => panic!(\"condensing a match should result in at least one match\")\n\n };\n\n\n\n simple_match\n\n}\n\n\n", "file_path": "src/hubris/elaborate/pattern_matching/simplify.rs", "rank": 1, "score": 272126.1033837206 }, { "content": "/// Construct a recursor for `data_type`.\n\npub fn make_recursor(ty_cx: &mut TyCtxt, data_type: &Data) -> Result<(), Error> {\n\n let mut rcx = InductiveCx::new(ty_cx, data_type);\n\n let recursor = try!(rcx.recursor());\n\n\n\n // Add an axiom with the recursor type, and the associated computation rule.\n\n rcx.ty_cx\n\n .axioms\n\n .insert(recursor.name, super::Axiom {\n\n ty: recursor.ty,\n\n computation_rule: Some(recursor.computation_rule),\n\n });\n\n\n\n // Now setup all the automatically generated constructs.\n\n try!(rcx.make_cases_on());\n\n\n\n Ok(())\n\n}\n", "file_path": "src/hubris/typeck/inductive.rs", "rank": 2, "score": 233972.97129379347 }, { "content": "pub fn rename_term(rename_map: RenameMap, term: &mut ast::Term) {\n\n let mut renamer = Renamer {\n\n rename_map: rename_map,\n\n };\n\n\n\n renamer.visit_mut_term(term);\n\n}\n", "file_path": "src/hubris/elaborate/pattern_matching/renamer.rs", "rank": 3, "score": 225273.1289040369 }, { "content": "#[derive(Clone)]\n\nstruct ChoiceProcedure(Rc<Fn(Term, Term, HashMap<Name, (Term, Justification)>) -> ()>);\n\n\n\nimpl Debug for ChoiceProcedure {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n panic!()\n\n }\n\n}\n\n\n\nimpl Constraint {\n\n /// Categorizes a constraint into one of constraint categories,\n\n /// this will also cannonicalize the constraints so that that\n\n /// the solver does not have to deal with some symmetric cases.\n\n pub fn categorize(self) -> CategorizedConstraint {\n\n use self::Constraint::*;\n\n use self::ConstraintCategory::*;\n\n\n\n match self {\n\n Unification(t, u, j) => {\n\n debug!(\"categorize: t={}\", t);\n\n debug!(\"categorize: u={}\", u);\n", "file_path": "src/hubris/typeck/constraint.rs", "rank": 4, "score": 205576.43987412355 }, { "content": "fn error<T>(c: ErrorCode, l: usize) -> Result<T,Error> {\n\n Err(Error { location: l, code: c })\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub enum Tok<'input> {\n\n // Keywords;\n\n Def,\n\n Axiom,\n\n End,\n\n Extern,\n\n Forall,\n\n Fun,\n\n Inductive,\n\n In,\n\n Import,\n\n Let,\n\n Match,\n\n Module,\n\n Type,\n", "file_path": "src/hubris_syntax/src/tok/mod.rs", "rank": 5, "score": 185145.68143368216 }, { "content": "pub fn simplify_pattern(pattern: ast::Pattern, mut rhs: SimpleMatchArm) -> SimpleCase {\n\n match pattern {\n\n ast::Pattern::Placeholder => {\n\n panic!()\n\n }\n\n ast::Pattern::Name(n) => {\n\n SimpleCase {\n\n pattern: SimplePattern::Name(n),\n\n rhs: rhs,\n\n }\n\n }\n\n ast::Pattern::Constructor(pat_head, pat_args) => {\n\n println!(\"pattern_head: {}\", pat_head);\n\n\n\n let mut arg_names = vec![];\n\n\n\n for (i, pat_arg) in pat_args.into_iter().enumerate().rev() {\n\n println!(\"pattern_arg {}\", pat_arg);\n\n let arg_name = ast::Name::from_str(&format!(\"a{}\", i)[..]);\n\n arg_names.push(arg_name.clone());\n", "file_path": "src/hubris/elaborate/pattern_matching/simplify.rs", "rank": 7, "score": 175985.290116961 }, { "content": "pub fn walk_mut_case<'v, V: VisitorMut<'v>>(visitor: &mut V, case: &'v mut Case) {\n\n let &mut Case {\n\n ref mut span,\n\n ref mut pattern,\n\n ref mut rhs,\n\n } = case;\n\n\n\n visitor.visit_mut_span(span);\n\n visitor.visit_mut_pattern(pattern);\n\n visitor.visit_mut_term(rhs);\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit_mut.rs", "rank": 8, "score": 168050.20076619132 }, { "content": "pub fn walk_mut_binder<'v, V: VisitorMut<'v>>(visitor: &mut V, binder: &'v mut Binder) {\n\n visitor.visit_mut_span(&mut binder.span);\n\n for name in &mut binder.names {\n\n visitor.visit_mut_name(name);\n\n }\n\n binder.ty.as_mut().map(|ty| visitor.visit_mut_term(ty));\n\n}\n", "file_path": "src/hubris_syntax/src/visit/visit_mut.rs", "rank": 9, "score": 167992.59375802768 }, { "content": "pub fn walk_mut_name<'v, V: VisitorMut<'v>>(visitor: &mut V, name: &'v mut Name) {\n\n visitor.visit_mut_span(&mut name.span);\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit_mut.rs", "rank": 10, "score": 167622.66279677226 }, { "content": "pub fn walk_mut_term<'v, V: VisitorMut<'v>>(visitor: &mut V, term: &'v mut Term) {\n\n use ast::Term::*;\n\n\n\n match term {\n\n &mut Literal { ref mut span, ref mut lit } => {\n\n visitor.visit_mut_span(span);\n\n visitor.visit_mut_literal(lit);\n\n }\n\n &mut Var { ref mut name, .. } =>\n\n visitor.visit_mut_name(name),\n\n &mut Match { ref mut span, ref mut scrutinee, ref mut cases } => {\n\n visitor.visit_mut_span(span);\n\n visitor.visit_mut_term(scrutinee);\n\n for case in cases {\n\n visitor.visit_mut_case(case);\n\n }\n\n }\n\n\n\n &mut App { ref mut span, ref mut fun, ref mut arg } => {\n\n visitor.visit_mut_span(span);\n", "file_path": "src/hubris_syntax/src/visit/visit_mut.rs", "rank": 11, "score": 167203.50600535824 }, { "content": "fn term_to_rust(term: &Term) -> Doc {\n\n match term {\n\n &Term::Call(ref f, ref args) => {\n\n let args : Vec<_> = args.iter().map(|x| term_to_rust(x)).collect();\n\n term_to_rust(&**f) + parens(seperate(&args[..], &\",\".pretty()))\n\n }\n\n &Term::Var(ref name) => name_to_rust(name),\n\n &Term::Lambda(ref ns, ref body) => {\n\n let args : Vec<_> =\n\n ns.iter()\n\n .map(|n| name_to_rust(n) + \": Obj\".pretty())\n\n .collect();\n\n to_object(\"|\".pretty() + seperate(&args[..], &\",\".pretty()) + \"|\".pretty() +\n\n block(term_to_rust(body)))\n\n }\n\n &Term::Panic(ref msg) => {\n\n \"panic!\".pretty() + parens(\"\\\"\".pretty() + msg.pretty() + \"\\\"\".pretty())\n\n }\n\n t => panic!(\"{:?}\", t),\n\n }\n\n}\n\n\n", "file_path": "src/hubris/backend/mod.rs", "rank": 12, "score": 160187.6040590164 }, { "content": "fn name_to_rust(name: &core::Name) -> Doc {\n\n match name {\n\n &core::Name::Qual { ref components, .. } => {\n\n let pieces: Vec<_> =\n\n components.iter()\n\n .map(|c| c.pretty())\n\n .collect();\n\n seperate(&pieces[..], &\"_\".pretty())\n\n }\n\n &core::Name::DeBruijn { ref repr, .. } => repr.pretty(),\n\n &core::Name::Local { ref repr, .. } => repr.pretty(),\n\n &core::Name::Meta { .. } => panic!(),\n\n }\n\n}\n\n\n", "file_path": "src/hubris/backend/mod.rs", "rank": 13, "score": 155621.85550436538 }, { "content": "fn walk_name<'v, V: Visitor<'v>>(visitor: &mut V, name: &'v Name) {\n\n visitor.visit_span(name.span);\n\n}\n", "file_path": "src/hubris/core/visit.rs", "rank": 14, "score": 154353.92143662146 }, { "content": "fn walk_term<'v, V: Visitor<'v>>(visitor: &mut V, term: &'v Term) {\n\n use ast::Term::*;\n\n\n\n match term {\n\n &Literal { ref span, ref lit } =>\n\n panic!(),\n\n &Var { ref name } =>\n\n visitor.visit_name(name),\n\n &Match { ref span, ref scrutinee, ref cases } =>\n\n panic!(),\n\n &App { span, ref fun, ref arg } => {\n\n visitor.visit_span(span);\n\n visitor.visit_term(fun);\n\n visitor.visit_term(arg);\n\n }\n\n &Forall { span, ref name, ref ty, ref term } => {\n\n visitor.visit_span(span);\n\n visitor.visit_name(name);\n\n visitor.visit_term(ty);\n\n visitor.visit_term(term);\n", "file_path": "src/hubris/core/visit.rs", "rank": 15, "score": 153896.09641132937 }, { "content": "fn name_to_path(name: &Name) -> Option<PathBuf> {\n\n match name {\n\n &Name::Qual { ref components, .. } => {\n\n assert!(components.len() > 0);\n\n let mut cs = components.iter();\n\n let first = cs.next().unwrap();\n\n let mut path = PathBuf::from(first);\n\n\n\n for c in cs {\n\n path = path.join(c);\n\n }\n\n\n\n path.set_extension(\"hbr\");\n\n\n\n Some(path)\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/hubris/typeck/mod.rs", "rank": 16, "score": 153323.7661974805 }, { "content": "pub fn walk_case<'v, V: Visitor<'v>>(visitor: &mut V, case: &'v Case) {\n\n let &Case {\n\n ref span,\n\n ref pattern,\n\n ref rhs,\n\n } = case;\n\n\n\n visitor.visit_span(span);\n\n visitor.visit_pattern(pattern);\n\n visitor.visit_term(rhs);\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit.rs", "rank": 17, "score": 146874.1426702598 }, { "content": "pub fn walk_binder<'v, V: Visitor<'v>>(visitor: &mut V, binder: &'v Binder) {\n\n visitor.visit_span(&binder.span);\n\n for name in &binder.names {\n\n visitor.visit_name(name);\n\n }\n\n binder.ty.as_ref().map(|ty| visitor.visit_term(ty));\n\n}\n", "file_path": "src/hubris_syntax/src/visit/visit.rs", "rank": 18, "score": 146813.53007906154 }, { "content": "fn def_eq_name_modulo(n1: &Name, n2: &Name) -> bool {\n\n debug!(\"equal_name_modulo: {} == {}\", n1, n2);\n\n\n\n match (n1, n2) {\n\n (&Name::Meta { number: number1, .. },\n\n &Name::Meta { number: number2, .. }) => {\n\n number1 == number2\n\n }\n\n (&Name::Meta { .. }, _) => {\n\n false\n\n }\n\n (_, &Name::Meta { .. }) => {\n\n false\n\n }\n\n _ => n1 == n2\n\n }\n\n}\n\n\n", "file_path": "src/hubris/typeck/mod.rs", "rank": 19, "score": 146656.22774241972 }, { "content": "pub fn walk_name<'v, V: Visitor<'v>>(visitor: &mut V, name: &'v Name) {\n\n visitor.visit_span(&name.span);\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit.rs", "rank": 20, "score": 146428.37158803036 }, { "content": "pub fn walk_term<'v, V: Visitor<'v>>(visitor: &mut V, term: &'v Term) {\n\n use ast::Term::*;\n\n\n\n match term {\n\n &Literal { ref span, ref lit } => {\n\n visitor.visit_span(span);\n\n visitor.visit_literal(lit);\n\n }\n\n &Var { ref name, .. } =>\n\n visitor.visit_name(name),\n\n &Match { ref span, ref scrutinee, ref cases } => {\n\n visitor.visit_span(span);\n\n visitor.visit_term(scrutinee);\n\n for case in cases {\n\n visitor.visit_case(case);\n\n }\n\n },\n\n &App { ref span, ref fun, ref arg } => {\n\n visitor.visit_span(span);\n\n visitor.visit_term(fun);\n", "file_path": "src/hubris_syntax/src/visit/visit.rs", "rank": 21, "score": 145988.17337720885 }, { "content": "pub fn ensure_success(cmd: &mut Command) -> io::Result<()> {\n\n let output = try!(cmd.output());\n\n if !output.status.success() {\n\n panic!(\"command {:?} failed with: {:?}\", cmd, output.stdout);\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/hubris/backend/llvm.rs", "rank": 22, "score": 143386.8103014992 }, { "content": "struct Renamer {\n\n rename_map: RenameMap,\n\n}\n\n\n\nimpl<'v> VisitorMut<'v> for Renamer {\n\n fn visit_mut_term(&mut self, term: &'v mut Term) {\n\n let replace = match term {\n\n &mut Term::Var { ref name, .. } => {\n\n match self.rename_map.get(&name) {\n\n None => None,\n\n Some(t) => Some(t.clone())\n\n }\n\n }\n\n _ => None,\n\n };\n\n\n\n match replace {\n\n None => {}\n\n Some(t) => *term = t,\n\n }\n\n\n\n walk_mut_term(self, term)\n\n }\n\n}\n\n\n", "file_path": "src/hubris/elaborate/pattern_matching/renamer.rs", "rank": 23, "score": 141572.49204252727 }, { "content": "pub fn condense(simple_match: SimpleMatchArm) -> SimpleMatchArm {\n\n match simple_match {\n\n SimpleMatchArm::Term(rhs) => SimpleMatchArm::Term(rhs),\n\n SimpleMatchArm::Match(simple_match) => {\n\n let SimpleMatch {\n\n scrutinee,\n\n cases,\n\n pattern_type,\n\n } = simple_match;\n\n\n\n // This handles the case in which the simplification pass has generated\n\n // a simple match like `match a with | b => rhs`, we just simplify to\n\n // rhs.\n\n if cases.len() == 1 {\n\n let case = cases[0].clone();\n\n match case.pattern {\n\n SimplePattern::Name(n) => {\n\n let mut name_map = HashMap::new();\n\n name_map.insert(n, scrutinee.clone());\n\n condense(case.rhs).rename(&name_map)\n", "file_path": "src/hubris/elaborate/pattern_matching/simplify.rs", "rank": 24, "score": 140388.60595421033 }, { "content": "fn handler(req: &mut Request) -> IronResult<Response> {\n\n // match req.get_ref::<UrlEncodedQuery>() {\n\n // Err(ref e) => {\n\n // let err_msg = format!(\"{:?}\", e);\n\n // Ok(Response::with((status::Ok, &err_msg[..])))\n\n // },\n\n // Ok(ref hashmap) => {\n\n // println!(\"Parsed GET request query string:\\n {:?}\", hashmap);\n\n // Ok(Response::with((status::Ok, \"he\")))\n\n // }\n\n // }\n\n panic!(\"fix me, something in Iron broke\")\n\n}\n\n\n", "file_path": "src/hubris/server.rs", "rank": 25, "score": 130285.44754413719 }, { "content": "// pub trait Backend {\n\n// fn compile(self)\n\n// }\n\nstruct ModuleCx<'cx, 'm> {\n\n cx: &'cx llvm::Context,\n\n module: llvm::Module,\n\n cps_module: &'m cps::Module,\n\n}\n\n\n\nimpl<'cx, 'm> ModuleCx<'cx, 'm> {\n\n fn new(cx: &'cx llvm::Context, m: &'m cps::Module) -> ModuleCx<'cx, 'm> {\n\n panic!()\n\n // let module = llvm::Module::with_name(m.name.to_string());\n\n // module.set_target(\"x86_64-apple-darwin\".to_string());\n\n //\n\n // ModuleCx {\n\n // cx: cx,\n\n // module: module,\n\n // cps_module: m,\n\n // }\n\n }\n\n\n\n pub fn emit_module<P: AsRef<Path> + Debug>(&self, output: P) -> Result<(), Error> {\n", "file_path": "src/hubris/backend/llvm.rs", "rank": 26, "score": 129698.56265917132 }, { "content": "/// This context is used to do type erasure, and lowering of `core::Term` to an\n\n/// untyped lambda calculus.\n\nstruct ErasureCx<'tcx> {\n\n ty_cx: &'tcx TyCtxt\n\n}\n\n\n\nimpl<'tcx> ErasureCx<'tcx> {\n\n pub fn new(ty_cx: &'tcx TyCtxt) -> ErasureCx<'tcx> {\n\n ErasureCx {\n\n ty_cx: ty_cx\n\n }\n\n }\n\n\n\n// fn lower_module(module: core::Module) -> Module {\n\n// Module {\n\n// definitions:\n\n// module.defs\n\n// .into_iter()\n\n// .filter_map(|i| match i {\n\n// core::Item::Fn(d) => Some(lower_def(d)),\n\n// _ => None,\n\n// })\n", "file_path": "src/hubris/backend/mod.rs", "rank": 27, "score": 127808.36634109044 }, { "content": "pub fn to_qualified_name(name: ast::Name) -> Option<core::Name> {\n\n let components = match name.repr {\n\n ast::NameKind::Qualified(components) => components,\n\n ast::NameKind::Unqualified(s) => vec![s],\n\n _ => return None,\n\n };\n\n\n\n Some(core::Name::Qual {\n\n components: components,\n\n span: name.span,\n\n })\n\n}\n", "file_path": "src/hubris/elaborate/util.rs", "rank": 28, "score": 123261.29496571832 }, { "content": "pub fn compile_file<T: AsRef<Path>>(path: T, output: Option<PathBuf>) -> Result<(), Error> {\n\n let module_id = ast::ModuleId(0);\n\n let parser = try!(parser::from_file(path.as_ref(), module_id));\n\n let module = try!(parser.parse());\n\n\n\n let session =\n\n session::Session::from_root(\n\n path.as_ref());\n\n\n\n session.add_source_map_for(\n\n module_id,\n\n parser.source_map);\n\n\n\n let mut ecx =\n\n elaborate::ElabCx::from_module(\n\n module,\n\n session);\n\n\n\n let core_module = ecx.elaborate_module();\n\n\n", "file_path": "src/hubris/lib.rs", "rank": 29, "score": 122012.52055643426 }, { "content": "// Pretty print a list of binders. Groups binders of the same type\n\npub fn pretty_binders<'a>(binders: &[&'a Binder]) -> Doc<'a> {\n\n let mut ds = Vec::new();\n\n for thing in binders.iter() {\n\n ds.push(thing.pretty());\n\n }\n\n // for (ty, g) in binders.iter().group_by(|elt| (&elt.ty, &elt.mode)) {\n\n // println!(\"binder_ty={:?}\", ty);\n\n // if g.len() == 1{\n\n // ds.push(g[0].pretty());\n\n // } else {\n\n // let d = seperate(g.iter().map(|x| x.name.pretty())\n\n // .collect::<Vec<Doc<'a>>>().as_slice()\n\n // , &Doc::text(\" \"))\n\n // + \" : \".pretty() + g[0].ty.pretty();\n\n // if g[0].is_implicit() {\n\n // ds.push(braces(d));\n\n // } else {\n\n // ds.push(parens(d));\n\n // }\n\n // }\n\n // };\n\n seperate(ds.as_slice(), &Doc::text(\" \"))\n\n}\n", "file_path": "src/hubris/core/binder.rs", "rank": 30, "score": 115016.46756660024 }, { "content": "/// For use in the REPL or editor server.\n\npub fn from_string(contents: String, id: ModuleId) -> io::Result<Parser> {\n\n let source_map = SourceMap::from_source(contents);\n\n Ok(Parser { source_map: source_map, id: id })\n\n}\n", "file_path": "src/hubris_syntax/src/parser/mod.rs", "rank": 31, "score": 110986.5791457381 }, { "content": "pub fn walk_mut_axiom<'v, V: VisitorMut<'v>>(visitor: &mut V, a: &'v mut Axiom) {\n\n visitor.visit_mut_span(&mut a.span);\n\n visitor.visit_mut_name(&mut a.name);\n\n visitor.visit_mut_term(&mut a.ty);\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit_mut.rs", "rank": 32, "score": 105276.67245198917 }, { "content": "pub fn walk_mut_module<'v, V: VisitorMut<'v>>(visitor: &mut V, module: &'v mut Module) {\n\n visitor.visit_mut_span(&mut module.span);\n\n visitor.visit_mut_name(&mut module.name);\n\n\n\n for item in &mut module.items {\n\n visitor.visit_mut_item(item);\n\n }\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit_mut.rs", "rank": 33, "score": 104338.06976656502 }, { "content": "pub fn walk_mut_def<'v, V: VisitorMut<'v>>(visitor: &mut V, def: &'v mut Def) {\n\n visitor.visit_mut_span(&mut def.span);\n\n visitor.visit_mut_name(&mut def.name);\n\n\n\n for binder in &mut def.args {\n\n visitor.visit_mut_binder(binder);\n\n }\n\n\n\n visitor.visit_mut_term(&mut def.ty);\n\n visitor.visit_mut_term(&mut def.body);\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit_mut.rs", "rank": 34, "score": 104338.06976656502 }, { "content": "pub fn walk_mut_extern<'v, V: VisitorMut<'v>>(visitor: &mut V, ext: &'v mut Extern) {\n\n visitor.visit_mut_span(&mut ext.span);\n\n visitor.visit_mut_name(&mut ext.name);\n\n visitor.visit_mut_term(&mut ext.term);\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit_mut.rs", "rank": 35, "score": 104338.06976656502 }, { "content": "pub fn walk_mut_pattern<'v, V: VisitorMut<'v>>(visitor: &mut V, pattern: &'v mut Pattern) {\n\n use ast::Pattern::*;\n\n\n\n match pattern {\n\n &mut Name(ref mut name) => visitor.visit_mut_name(name),\n\n &mut Constructor(ref mut name, ref mut pats) => {\n\n visitor.visit_mut_name(name);\n\n for pat in pats {\n\n visitor.visit_mut_pattern(pat);\n\n }\n\n }\n\n &mut Placeholder => {}\n\n }\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit_mut.rs", "rank": 36, "score": 104338.06976656502 }, { "content": "pub fn walk_mut_item<'v, V: VisitorMut<'v>>(visitor: &mut V, item: &'v mut Item) {\n\n match item {\n\n &mut Item::Inductive(ref mut d) => visitor.visit_mut_data(d),\n\n &mut Item::Def(ref mut def) => visitor.visit_mut_def(def),\n\n &mut Item::Axiom(ref mut a) => visitor.visi_mut_axiom(a),\n\n &mut Item::Extern(ref mut ext) => panic!(),\n\n &mut Item::Comment(ref mut s) => panic!(),\n\n &mut Item::Import(ref mut n) => visitor.visit_mut_name(n),\n\n }\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit_mut.rs", "rank": 37, "score": 104338.06976656502 }, { "content": "pub fn walk_mut_inductive<'v, V: VisitorMut<'v>>(visitor: &mut V, inductive: &'v mut Inductive) {\n\n visitor.visit_mut_span(&mut inductive.span);\n\n visitor.visit_mut_name(&mut inductive.name);\n\n\n\n for binder in &mut inductive.parameters {\n\n visitor.visit_mut_binder(binder);\n\n }\n\n\n\n visitor.visit_mut_term(&mut inductive.ty);\n\n\n\n for &mut (ref mut n, ref mut t) in &mut inductive.ctors {\n\n visitor.visit_mut_name(n);\n\n visitor.visit_mut_term(t);\n\n }\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit_mut.rs", "rank": 38, "score": 104338.06976656502 }, { "content": "pub fn in_build_path<P: AsRef<Path>, F, R>(build_path: P, f: F) -> R\n\n where F: Fn(&Path) -> R {\n\n\n\n let current_dir = env::current_dir().unwrap();\n\n env::set_current_dir(build_path.as_ref());\n\n let result = f(build_path.as_ref());\n\n env::set_current_dir(current_dir);\n\n result\n\n}\n\n\n", "file_path": "src/hubris/backend/llvm.rs", "rank": 39, "score": 103825.81678109613 }, { "content": "pub fn from_file<T: AsRef<Path>>(path: T, id: ModuleId) -> io::Result<Parser> {\n\n let path = path.as_ref();\n\n\n\n let mut file = try!(File::open(path));\n\n let mut contents = String::new();\n\n\n\n try!(file.read_to_string(&mut contents));\n\n\n\n Ok(Parser {\n\n source_map: SourceMap::from_file(\n\n format!(\"{}\", path.to_owned().display()),\n\n contents),\n\n id: id,\n\n })\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/parser/mod.rs", "rank": 40, "score": 101324.45983747035 }, { "content": "struct Definition {\n\n name: core::Name,\n\n body: Term,\n\n}\n\n\n\nimpl Pretty for Definition {\n\n fn pretty(&self) -> Doc {\n\n let &Definition {\n\n ref name,\n\n ref body,\n\n } = self;\n\n\n\n \"def \".pretty() + name.pretty() + \" :=\\n\".pretty() + body.pretty()\n\n }\n\n}\n\n\n\nimpl Display for Definition {\n\n fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {\n\n format(self, formatter)\n\n }\n\n}\n\n\n", "file_path": "src/hubris/backend/mod.rs", "rank": 41, "score": 100493.85895448556 }, { "content": "struct Module {\n\n //constructor: Vec<()>,\n\n definitions: Vec<Definition>,\n\n}\n\n\n", "file_path": "src/hubris/backend/mod.rs", "rank": 42, "score": 100493.85895448556 }, { "content": "#[derive(Debug, Clone)]\n\nenum Term {\n\n Local(core::Name, usize),\n\n Var(core::Name),\n\n // Free(core::)\n\n Switch(Rc<Term>),\n\n Call(Rc<Term>, Vec<Term>),\n\n Lambda(Vec<core::Name>, Box<Term>),\n\n Panic(String),\n\n}\n\n\n\nimpl Pretty for Term {\n\n fn pretty(&self) -> Doc {\n\n use self::Term::*;\n\n\n\n match self {\n\n &Local(_, i) => panic!(),\n\n &Var(ref name) => name.pretty(),\n\n &Switch(ref scrut) => panic!(),\n\n &Call(ref f, ref args) => {\n\n let pargs =\n", "file_path": "src/hubris/backend/mod.rs", "rank": 43, "score": 100247.81450929918 }, { "content": "pub fn llc<P: AsRef<Path> + Debug>(file: P) {\n\n debug!(\"executing llc on {:?}\", file);\n\n Command::new(\"llc\")\n\n .arg(file.as_ref())\n\n .output()\n\n .unwrap_or_else(|e| panic!(\"llc: failed with {:?}\", e));\n\n}\n", "file_path": "src/hubris/llvm/tools/llc.rs", "rank": 44, "score": 91968.54670319939 }, { "content": "fn def_eq_modulo(\n\n t1: &Term,\n\n t2: &Term,\n\n constraints: &mut ConstraintSeq) -> bool {\n\n use core::Term::*;\n\n\n\n debug!(\"equal_modulo: {} == {}\", t1, t2);\n\n\n\n match (t1, t2) {\n\n (&App { fun: ref fun1, arg: ref arg1, .. },\n\n &App { fun: ref fun2, arg: ref arg2, .. }) => {\n\n def_eq_modulo(fun1, fun2, constraints) &&\n\n def_eq_modulo(arg1, arg2, constraints)\n\n }\n\n (&Forall { binder: ref binder1, term: ref term1, .. },\n\n &Forall { binder: ref binder2, term: ref term2, .. }) => {\n\n def_eq_modulo(&*binder1.ty, &*binder2.ty, constraints) &&\n\n def_eq_modulo(term1, term2, constraints)\n\n }\n\n (&Lambda { binder: ref binder1, body: ref body1, .. },\n", "file_path": "src/hubris/typeck/mod.rs", "rank": 45, "score": 90834.2618617281 }, { "content": "#[test]\n\nfn test_is_bi_reducible() {\n\n let ty_cx = TyCtxt::new();\n\n panic!()\n\n}\n", "file_path": "src/hubris/typeck/mod.rs", "rank": 46, "score": 90834.2618617281 }, { "content": "struct DummySpanVisitor {\n\n count: usize\n\n}\n\n\n\nimpl<'v> Visitor<'v> for DummySpanVisitor {\n\n fn visit_term(&mut self, term: &'v Term) {\n\n // println!(\"term: {:?}\", term);\n\n walk_term(self, term)\n\n }\n\n\n\n fn visit_span(&mut self, span: &'v Span) {\n\n if span == &Span::dummy() {\n\n self.count += 1;\n\n panic!()\n\n }\n\n }\n\n}\n", "file_path": "src/hubris_syntax/src/parser/dummy_span_debug.rs", "rank": 47, "score": 90492.29500909208 }, { "content": "pub fn subst_meta_binder(\n\n mut b: Binder,\n\n subst_map: &HashMap<Name, (Term, Justification)>,\n\n errs: &mut Vec<Name>) -> Binder {\n\n b.ty = Box::new(replace_metavars_with_err(*b.ty, subst_map, errs));\n\n b\n\n}\n", "file_path": "src/hubris/typeck/solver.rs", "rank": 48, "score": 88698.11291857422 }, { "content": "pub fn annotate_module_id(module: &mut Module, id: ModuleId) {\n\n let mut annotator = ModuleIdAnnotator {\n\n id: id\n\n };\n\n\n\n annotator.visit_mut_module(module)\n\n}\n", "file_path": "src/hubris_syntax/src/parser/annotate_module_id.rs", "rank": 49, "score": 85728.37079008047 }, { "content": "fn walk_def<'v, V: Visitor<'v>>(visitor: &mut V, def: &'v Function) {\n\n visitor.visit_span(def.span);\n\n visitor.visit_name(&def.name);\n\n\n\n for &(ref n, ref t) in &def.args {\n\n visitor.visit_name(n);\n\n visitor.visit_term(t);\n\n }\n\n\n\n visitor.visit_term(&def.ty);\n\n visitor.visit_term(&def.body);\n\n}\n\n\n", "file_path": "src/hubris/core/visit.rs", "rank": 50, "score": 85676.57040431141 }, { "content": "fn walk_module<'v, V: Visitor<'v>>(visitor: &mut V, module: &'v Module) {\n\n visitor.visit_span(module.span);\n\n visitor.visit_name(&module.name);\n\n\n\n for item in &module.items {\n\n visitor.visit_item(item);\n\n }\n\n}\n\n\n", "file_path": "src/hubris/core/visit.rs", "rank": 51, "score": 85676.57040431141 }, { "content": "fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) {\n\n use ast::Item::*;\n\n\n\n match item {\n\n &Item::Data(ref d) => visitor.visit_data(d),\n\n &Item::Fn(ref d) => visitor.visit_def(d),\n\n &Item::Extern(ref ext) => panic!(),\n\n &Item::Comment(()) => panic!(),\n\n &Item::Import(ref n) => visitor.visit_name(n),\n\n }\n\n}\n\n\n", "file_path": "src/hubris/core/visit.rs", "rank": 52, "score": 85676.57040431141 }, { "content": "fn walk_inductive<'v, V: Visitor<'v>>(visitor: &mut V, inductive: &'v Data) {\n\n visitor.visit_span(inductive.span);\n\n visitor.visit_name(&inductive.name);\n\n\n\n for &(ref n, ref t) in &inductive.parameters {\n\n visitor.visit_name(n);\n\n visitor.visit_term(t);\n\n }\n\n\n\n visitor.visit_term(&inductive.ty);\n\n\n\n for &(ref n, ref t) in &inductive.ctors {\n\n visitor.visit_name(n);\n\n visitor.visit_term(t);\n\n }\n\n}\n\n\n", "file_path": "src/hubris/core/visit.rs", "rank": 53, "score": 85676.57040431141 }, { "content": "fn to_object(value: Doc) -> Doc {\n\n \"Obj::from\".pretty() + parens(value)\n\n}\n\n\n", "file_path": "src/hubris/backend/mod.rs", "rank": 54, "score": 83714.51480895656 }, { "content": "fn block(value: Doc) -> Doc {\n\n \"{\".pretty() + Doc::newline() +\n\n value.nest(4) +\n\n \"}\".pretty()\n\n}\n\n\n", "file_path": "src/hubris/backend/mod.rs", "rank": 55, "score": 83714.51480895656 }, { "content": "fn def_to_rust(def: &Definition) -> Doc {\n\n let (args, body) = match &def.body {\n\n &Term::Lambda(ref ns, ref body) => {\n\n let args : Vec<_> =\n\n ns.iter()\n\n .map(|n| name_to_rust(n) + \": Obj\".pretty())\n\n .collect();\n\n\n\n (args, &**body)\n\n }\n\n t => (vec![], t)\n\n };\n\n\n\n \"fn \".pretty() +\n\n name_to_rust(&def.name) +\n\n parens(seperate(&args[..], &\",\".pretty())) + \" -> Obj {\\n\".pretty() +\n\n term_to_rust(body) + \"\\n\".pretty() +\n\n \"}\\n\".pretty()\n\n}\n\n\n", "file_path": "src/hubris/backend/mod.rs", "rank": 56, "score": 81857.4871137599 }, { "content": "pub fn walk_axiom<'v, V: Visitor<'v>>(visitor: &mut V, a: &'v Axiom) {\n\n visitor.visit_span(&a.span);\n\n visitor.visit_name(&a.name);\n\n visitor.visit_term(&a.ty);\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit.rs", "rank": 57, "score": 81585.11921009747 }, { "content": "pub fn create_executable<P: AsRef<Path> + Debug>(module: &cps::Module, output: Option<P>) {\n\n let current_dir = env::current_dir().unwrap();\n\n let output = output.as_ref().map(|x| x.as_ref()).unwrap_or(current_dir.as_ref());\n\n let tmp = PathBuf::from(\"/tmp\");\n\n\n\n ensure_success(\n\n Command::new(\"mkdir\")\n\n .arg(\"-p\")\n\n .arg(\"/tmp/hubris\"));\n\n\n\n let exe = in_build_path(tmp.join(\"hubris\"), |build_path| {\n\n let file_name = module.file_name();\n\n\n\n let mut context = llvm::Context::new();\n\n let mcx = ModuleCx::new(&context, module);\n\n\n\n mcx.emit_module(\n\n build_path.join(file_name.with_extension(\"ll\")));\n\n\n\n llvm::tools::llc(\n", "file_path": "src/hubris/backend/llvm.rs", "rank": 58, "score": 80410.53465718823 }, { "content": "pub fn walk_inductive<'v, V: Visitor<'v>>(visitor: &mut V, inductive: &'v Inductive) {\n\n visitor.visit_span(&inductive.span);\n\n visitor.visit_name(&inductive.name);\n\n\n\n for binder in &inductive.parameters {\n\n visitor.visit_binder(binder);\n\n }\n\n\n\n visitor.visit_term(&inductive.ty);\n\n\n\n for &(ref n, ref t) in &inductive.ctors {\n\n visitor.visit_name(n);\n\n visitor.visit_term(t);\n\n }\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit.rs", "rank": 59, "score": 80284.39811796384 }, { "content": "pub fn walk_module<'v, V: Visitor<'v>>(visitor: &mut V, module: &'v Module) {\n\n visitor.visit_span(&module.span);\n\n visitor.visit_name(&module.name);\n\n\n\n for item in &module.items {\n\n visitor.visit_item(item);\n\n }\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit.rs", "rank": 60, "score": 80284.39811796384 }, { "content": "pub fn walk_def<'v, V: Visitor<'v>>(visitor: &mut V, def: &'v Def) {\n\n visitor.visit_span(&def.span);\n\n visitor.visit_name(&def.name);\n\n\n\n for binder in &def.args {\n\n visitor.visit_binder(binder);\n\n }\n\n\n\n visitor.visit_term(&def.ty);\n\n visitor.visit_term(&def.body);\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit.rs", "rank": 61, "score": 80284.39811796384 }, { "content": "pub fn walk_pattern<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pattern) {\n\n use ast::Pattern::*;\n\n\n\n match pattern {\n\n &Name(ref name) => visitor.visit_name(name),\n\n &Constructor(ref name, ref pats) => {\n\n visitor.visit_name(name);\n\n for pat in pats {\n\n visitor.visit_pattern(pat);\n\n }\n\n }\n\n &Placeholder => {}\n\n }\n\n}\n\n\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit.rs", "rank": 62, "score": 80284.39811796384 }, { "content": "pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) {\n\n match item {\n\n &Item::Inductive(ref d) => visitor.visit_data(d),\n\n &Item::Def(ref def) => visitor.visit_def(def),\n\n &Item::Axiom(ref a) => visitor.visit_axiom(a),\n\n &Item::Extern(ref ext) => visitor.visit_extern(ext),\n\n &Item::Comment(ref _s) => panic!(),\n\n &Item::Import(ref n) => visitor.visit_name(n),\n\n }\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit.rs", "rank": 63, "score": 80284.39811796384 }, { "content": "pub fn walk_extern<'v, V: Visitor<'v>>(visitor: &mut V, ext: &'v Extern) {\n\n visitor.visit_span(&ext.span);\n\n visitor.visit_name(&ext.name);\n\n visitor.visit_term(&ext.term);\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/visit/visit.rs", "rank": 64, "score": 80284.39811796384 }, { "content": "fn is_identifier_start(c: char) -> bool {\n\n UnicodeXID::is_xid_start(c)\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/tok/mod.rs", "rank": 65, "score": 80106.23947715035 }, { "content": "fn is_identifier_continue(c: char) -> bool {\n\n UnicodeXID::is_xid_continue(c)\n\n}\n", "file_path": "src/hubris_syntax/src/tok/mod.rs", "rank": 66, "score": 80106.23947715035 }, { "content": "pub fn ensure_no_dummy_spans(module: &Module) {\n\n let mut dsv = DummySpanVisitor { count: 0 };\n\n dsv.visit_module(module);\n\n assert_eq!(dsv.count, 0);\n\n}\n\n\n", "file_path": "src/hubris_syntax/src/parser/dummy_span_debug.rs", "rank": 67, "score": 75566.21573033104 }, { "content": "use super::super::super::ast::{self, Term};\n\nuse super::super::super::syntax::visit::*;\n\n\n\nuse std::collections::HashMap;\n\n\n\npub type RenameMap = HashMap<ast::Name, ast::Term>;\n\n\n", "file_path": "src/hubris/elaborate/pattern_matching/renamer.rs", "rank": 68, "score": 69987.1000584431 }, { "content": " rhs = SimpleMatchArm::Match(SimpleMatch {\n\n scrutinee: ast::Term::Var { name: arg_name, implicit: false },\n\n cases: vec![simplify_pattern(pat_arg, rhs)],\n\n pattern_type: PatternType::Cases,\n\n })\n\n }\n\n\n\n SimpleCase {\n\n pattern: SimplePattern::Constructor(pat_head, arg_names.into_iter().rev().collect()),\n\n rhs: rhs,\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/hubris/elaborate/pattern_matching/simplify.rs", "rank": 69, "score": 69889.56117564191 }, { "content": "impl SimpleMatch {\n\n fn rename(self, rename_map: &RenameMap) -> SimpleMatch {\n\n let mut scrutinee = self.scrutinee;\n\n rename_term(rename_map.clone(), &mut scrutinee);\n\n let cases = self.cases.into_iter().map(|c| c.rename(rename_map)).collect();\n\n\n\n SimpleMatch {\n\n scrutinee: scrutinee,\n\n cases: cases,\n\n pattern_type: self.pattern_type,\n\n }\n\n }\n\n}\n\n\n\nimpl Pretty for SimpleMatch {\n\n fn pretty(&self) -> Doc {\n\n let cases : Vec<_> = self.cases.iter().map(|x| x.pretty()).collect();\n\n \"match \".pretty() + self.scrutinee.pretty() + \" with\\n\".pretty() +\n\n seperate(&cases[..], &\"\\n\".pretty()) + \"\\nend\".pretty()\n\n }\n", "file_path": "src/hubris/elaborate/pattern_matching/simplify.rs", "rank": 70, "score": 69887.5280659566 }, { "content": "use super::super::super::ast::{self};\n\nuse super::renamer::{rename_term, RenameMap};\n\n\n\nuse std::collections::HashMap;\n\nuse std::fmt::{self, Display, Formatter};\n\n\n\nuse pretty::*;\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub enum PatternType {\n\n Cases,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum SimpleMatchArm {\n\n Match(SimpleMatch),\n\n Term(ast::Term),\n\n}\n\n\n\nimpl SimpleMatchArm {\n", "file_path": "src/hubris/elaborate/pattern_matching/simplify.rs", "rank": 71, "score": 69885.88277106553 }, { "content": "}\n\n\n\nimpl Display for SimpleMatch {\n\n fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {\n\n format(self, formatter)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct SimpleCase {\n\n pub pattern: SimplePattern,\n\n pub rhs: SimpleMatchArm,\n\n}\n\n\n\nimpl SimpleCase {\n\n fn rename(self, rename_map: &RenameMap) -> SimpleCase {\n\n panic!()\n\n }\n\n}\n\n\n", "file_path": "src/hubris/elaborate/pattern_matching/simplify.rs", "rank": 72, "score": 69885.81347420975 }, { "content": " &Match(ref m) => m.pretty(),\n\n &Term(ref t) => t.pretty()\n\n }\n\n }\n\n}\n\n\n\nimpl Display for SimpleMatchArm {\n\n fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {\n\n format(self, formatter)\n\n }\n\n}\n\n\n\n/// A struct representing a simple pattern match, i.e one that can not have nested patterns.\n\n#[derive(Debug, Clone)]\n\npub struct SimpleMatch {\n\n pub scrutinee: ast::Term,\n\n pub cases: Vec<SimpleCase>,\n\n pub pattern_type: PatternType,\n\n}\n\n\n", "file_path": "src/hubris/elaborate/pattern_matching/simplify.rs", "rank": 73, "score": 69883.202936455 }, { "content": " use self::SimplePattern::*;\n\n\n\n match self {\n\n &Constructor(ref n, ref ns) => {\n\n let ns: Vec<_> = ns.iter().map(|x| parens(x.pretty())).collect();\n\n n.pretty() + seperate(&ns[..], &\" \".pretty())\n\n },\n\n &Name(ref n) => n.pretty()\n\n }\n\n }\n\n}\n\n\n\nimpl Display for SimplePattern {\n\n fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {\n\n format(self, formatter)\n\n }\n\n}\n\n\n", "file_path": "src/hubris/elaborate/pattern_matching/simplify.rs", "rank": 74, "score": 69882.33078503454 }, { "content": " fn rename(self, rename_map: &RenameMap) -> SimpleMatchArm {\n\n use self::SimpleMatchArm::*;\n\n\n\n match self {\n\n Match(m) => {\n\n Match(m.rename(rename_map))\n\n }\n\n Term(mut t) => {\n\n rename_term(rename_map.clone(), &mut t);\n\n Term(t)\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Pretty for SimpleMatchArm {\n\n fn pretty(&self) -> Doc {\n\n use self::SimpleMatchArm::*;\n\n\n\n match self {\n", "file_path": "src/hubris/elaborate/pattern_matching/simplify.rs", "rank": 75, "score": 69880.9606054491 }, { "content": "impl Pretty for SimpleCase {\n\n fn pretty(&self) -> Doc {\n\n \"| \".pretty() + self.pattern.pretty() + \" => \".pretty() + self.rhs.pretty()\n\n }\n\n}\n\n\n\nimpl Display for SimpleCase {\n\n fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {\n\n format(self, formatter)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum SimplePattern {\n\n Constructor(ast::Name, Vec<ast::Name>),\n\n Name(ast::Name),\n\n}\n\n\n\nimpl Pretty for SimplePattern {\n\n fn pretty(&self) -> Doc {\n", "file_path": "src/hubris/elaborate/pattern_matching/simplify.rs", "rank": 76, "score": 69880.17018854817 }, { "content": " }\n\n _ => {\n\n let cases =\n\n cases.into_iter()\n\n .map(|mut case| {\n\n case.rhs = condense(case.rhs);\n\n case\n\n })\n\n .collect();\n\n\n\n SimpleMatchArm::Match(SimpleMatch {\n\n scrutinee: scrutinee,\n\n cases: cases,\n\n pattern_type: pattern_type,\n\n })\n\n }\n\n }\n\n } else {\n\n let new_cases = vec![];\n\n SimpleMatchArm::Match(SimpleMatch {\n\n scrutinee: scrutinee,\n\n cases: new_cases,\n\n pattern_type: PatternType::Cases,\n\n })\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/hubris/elaborate/pattern_matching/simplify.rs", "rank": 77, "score": 69877.41036925802 }, { "content": "#[derive(Debug, Clone)]\n\nenum Error {\n\n UnknownSymbol(String),\n\n}\n\n\n", "file_path": "src/hubris/backend/llvm.rs", "rank": 88, "score": 66742.34259889845 }, { "content": "struct Definition {\n\n name: core::Name,\n\n body: Term,\n\n}\n\n\n\nimpl Pretty for Term {\n\n fn pretty(&self) -> Doc {\n\n let &Definition {\n\n ref name,\n\n ref body,\n\n } = self;\n\n\n\n \"def \".pretty() + name.pretty() + \" :=\\n\" + body.pretty()\n\n }\n\n}\n\n\n", "file_path": "src/hubris/backend/ut.rs", "rank": 89, "score": 66255.6103216427 }, { "content": "struct Module {\n\n //constructor: Vec<()>,\n\n definitions: Vec<Definition>,\n\n}\n\n\n", "file_path": "src/hubris/backend/ut.rs", "rank": 90, "score": 66255.6103216427 }, { "content": "/// `Recursor` describes the recursor for a inductive type, each field\n\n/// is split here, enabling easy manipulation of recursors.\n\nstruct Recursor {\n\n motif: Name,\n\n parameters: Vec<Name>,\n\n minor_premises: Vec<Name>,\n\n major_premise: Term,\n\n ty: Term,\n\n name: Name,\n\n computation_rule: ComputationRule,\n\n}\n\n\n\nimpl<'i, 'tcx> InductiveCx<'i, 'tcx> {\n\n ///\n\n fn new(ty_cx: &'tcx mut TyCtxt, inductive_ty: &'i Data) -> InductiveCx<'i, 'tcx> {\n\n\n\n let mut rcx = InductiveCx {\n\n ty_cx: ty_cx,\n\n inductive_ty: inductive_ty,\n\n ind_hyp: inductive_ty.name.clone(),\n\n };\n\n\n", "file_path": "src/hubris/typeck/inductive.rs", "rank": 91, "score": 66255.6103216427 }, { "content": "enum Term {\n\n Var(core::Name),\n\n Switch(Rc<Term>, ),\n\n Call(Rc<Term>, Vec<Term>),\n\n Lambda(Vec<core::Name>, Vec<Term>),\n\n}\n\n\n\nimpl Pretty for Term {\n\n fn pretty(&self) -> Doc {\n\n panic!();\n\n // use self::Term::*;\n\n //\n\n // match self {\n\n // &Var { ref name, .. } => name.pretty(),\n\n // &App { ref fun, ref arg, .. } => {\n\n // let pretty_fun = match &**fun {\n\n // complex @ &Term::Lambda { .. } =>\n\n // parens(complex.pretty()),\n\n // t => t.pretty()\n\n // };\n", "file_path": "src/hubris/backend/ut.rs", "rank": 92, "score": 66001.35308920933 }, { "content": "struct ObjValue {\n\n ptr: *mut usize,\n\n}\n\n\n\npub struct Obj(Rc<ObjValue>);\n\n\n\nimpl Obj {\n\n pub fn from<T>(t: T) -> Obj {\n\n unsafe {\n\n let boxed_val = Box::new(t);\n\n\n\n let val = ObjValue {\n\n ptr: transmute(Box::into_raw(boxed_val)),\n\n };\n\n\n\n Obj(Rc::new(val))\n\n }\n\n }\n\n\n\n pub fn unbox<T>(&self) -> &T {\n\n let ptr: *mut usize = self.0.ptr;\n\n unsafe { transmute(ptr) }\n\n }\n\n}\n", "file_path": "src/hubris_rt/src/lib.rs", "rank": 93, "score": 64001.76188455449 }, { "content": "fn main() {\n\n lalrpop::process_root().unwrap();\n\n}\n", "file_path": "src/hubris_syntax/build.rs", "rank": 94, "score": 60934.47607245489 }, { "content": "pub fn run() {\n\n let mut router = Router::new(); // Alternative syntax:\n\n router.get(\"/check\", handler); // get \"/:query\" => handler);\n\n\n\n Iron::new(router).http(\"127.0.0.1:3000\").unwrap();\n\n}\n", "file_path": "src/hubris/server.rs", "rank": 95, "score": 58952.2215699812 }, { "content": "pub fn replace_metavars(\n\n term: Term,\n\n subst_map: &HashMap<Name, (Term, Justification)>) -> Result<Term, Error> {\n\n let mut errs = vec![];\n\n let term = replace_metavars_with_err(term, subst_map, &mut errs);\n\n\n\n if errs.len() > 0 {\n\n Err(Error::NoSolution(errs, term))\n\n } else {\n\n Ok(term)\n\n }\n\n}\n\n\n", "file_path": "src/hubris/typeck/solver.rs", "rank": 96, "score": 56746.244800429464 }, { "content": "pub fn replace_metavars_with_err(\n\n t: Term, subst_map: &HashMap<Name, (Term, Justification)>,\n\n errs: &mut Vec<Name>) -> Term {\n\n use core::Term::*;\n\n\n\n match t {\n\n App { fun, arg, span } => {\n\n App {\n\n fun: Box::new(replace_metavars_with_err(*fun, subst_map, errs)),\n\n arg: Box::new(replace_metavars_with_err(*arg, subst_map, errs)),\n\n span: span,\n\n }\n\n }\n\n Forall { binder, term, span } => {\n\n Forall {\n\n binder: subst_meta_binder(binder, subst_map, errs),\n\n term: Box::new((replace_metavars_with_err(*term, subst_map, errs))),\n\n span: span,\n\n }\n\n }\n", "file_path": "src/hubris/typeck/solver.rs", "rank": 97, "score": 55743.19160487826 }, { "content": "fn lower_module(module: core::Module) -> Module {\n\n Module {\n\n definitions:\n\n module.defs\n\n .into_iter()\n\n .filter_map(|i| match i {\n\n core::Item::Fn(d) => Some(lower_def(d)),\n\n _ => None,\n\n })\n\n .collect()\n\n }\n\n}\n\n\n", "file_path": "src/hubris/backend/ut.rs", "rank": 98, "score": 49460.37586231547 }, { "content": "fn lower_def(def: core::Def) -> Definition {\n\n panic!(\"{}\", def);\n\n}\n\n\n\n\n\nimpl Backend for Rust {\n\n fn create_executable<P: AsRef<Path> + Debug>(module: core::Module, output: Option<P>) {\n\n let m = lower_module(module);\n\n for def in m.defs {\n\n println!(\"{}\", def.pretty())\n\n }\n\n }\n\n}\n", "file_path": "src/hubris/backend/ut.rs", "rank": 99, "score": 49460.37586231547 } ]
Rust
src/model/category_lookup.rs
creinig/naday
975eb02a0e2e71bfe63ed8efd29141ddc5d77f7d
use super::Category; use anyhow::{bail, Result}; use std::collections::HashMap; use std::rc::Rc; #[derive(Debug)] pub struct CategoryLookup { categories: HashMap<String, Rc<Category>>, by_name_or_alias: HashMap<String, Rc<Category>>, } impl CategoryLookup { pub fn new() -> CategoryLookup { CategoryLookup { categories: HashMap::new(), by_name_or_alias: HashMap::new(), } } pub fn add(&mut self, category: Category) -> Result<()> { if self.categories.contains_key(&category.name.to_lowercase()) { return Ok(()); } for name in category.all_names() { if self.by_name_or_alias.contains_key(&name.to_lowercase()) { bail!( "Duplicate category key: '{}' is used by '{}' and '{}'", name, category.name, self.find(name).unwrap().name ); } } let cat_rc = Rc::new(category); self.categories .insert(String::from(&cat_rc.name).to_lowercase(), cat_rc.clone()); for name in cat_rc.all_names() { self.by_name_or_alias .insert(name.to_string().to_lowercase(), cat_rc.clone()); } Ok(()) } pub fn find<S: AsRef<str>>(&self, alias_or_name: S) -> Option<Rc<Category>> { let lc = alias_or_name.as_ref().to_lowercase(); match self.by_name_or_alias.get(&lc) { Some(cat) => Some(cat.clone()), None => None, } } #[cfg(test)] pub fn len(&self) -> usize { self.categories.len() } pub fn iter(&self) -> std::collections::hash_map::Values<'_, String, Rc<Category>> { self.categories.values() } } #[cfg(test)] mod tests { use super::*; #[test] fn basic_add_and_find() { let mut lookup = CategoryLookup::new(); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); lookup .add(Category::new("Situps", 1.0, vec!["su", "si"])) .unwrap(); lookup .add(Category::new("Burpees", 1.5, vec!["bu", "oof"])) .unwrap(); assert_eq!("Burpees", &(lookup.find("Burpees").unwrap().name)); assert_eq!("Burpees", &(lookup.find("bUrPeEs").unwrap().name)); assert_eq!("Burpees", &(lookup.find("oof").unwrap().name)); assert_eq!("Burpees", &(lookup.find("OOF").unwrap().name)); assert_eq!("Situps", &(lookup.find("su").unwrap().name)); assert_eq!("Pushups", &(lookup.find("push").unwrap().name)); } #[test] fn duplicates() { let mut lookup = CategoryLookup::new(); assert_eq!(0, lookup.len()); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); assert_eq!(1, lookup.len()); } #[test] fn duplicate_alias() { let mut lookup = CategoryLookup::new(); assert_eq!(0, lookup.len()); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); lookup .add(Category::new("Pushdowns", 1.0, vec!["pd", "push"])) .expect_err("Should return an error for duplicate key"); } }
use super::Category; use anyhow::{bail, Result}; use std::collections::HashMap; use std::rc::Rc; #[derive(Debug)] pub struct CategoryLookup { categories: HashMap<String, Rc<Category>>, by_name_or_alias: HashMap<String, Rc<Category>>, } impl CategoryLookup { pub fn new() -> CategoryLookup { CategoryLookup { categories: HashMap::new(), by_name_or_alias: HashMap::new(), } } pub fn add(&mut self, category: Category) -> Result<()> { if self.categories.contains_key(&category.name.to_lowercase()) { return Ok(()); } for name in category.all_names() { if self.by_name_or_alias.contains_key(&name.to_lowercase()) { bail!( "Duplicate category key: '{}' is used by '{}' and '{}'", name, category.name, self.find(name).unwrap().name ); } } let cat_rc = Rc::new(category); self.categories .insert(String::from(&cat_rc.name).to_lowercase(), cat_rc.clone()); for name in cat_rc.all_names() { self.by_name_or_alias .insert(name.to_string().to_low
nwrap(); lookup .add(Category::new("Pushdowns", 1.0, vec!["pd", "push"])) .expect_err("Should return an error for duplicate key"); } }
ercase(), cat_rc.clone()); } Ok(()) } pub fn find<S: AsRef<str>>(&self, alias_or_name: S) -> Option<Rc<Category>> { let lc = alias_or_name.as_ref().to_lowercase(); match self.by_name_or_alias.get(&lc) { Some(cat) => Some(cat.clone()), None => None, } } #[cfg(test)] pub fn len(&self) -> usize { self.categories.len() } pub fn iter(&self) -> std::collections::hash_map::Values<'_, String, Rc<Category>> { self.categories.values() } } #[cfg(test)] mod tests { use super::*; #[test] fn basic_add_and_find() { let mut lookup = CategoryLookup::new(); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); lookup .add(Category::new("Situps", 1.0, vec!["su", "si"])) .unwrap(); lookup .add(Category::new("Burpees", 1.5, vec!["bu", "oof"])) .unwrap(); assert_eq!("Burpees", &(lookup.find("Burpees").unwrap().name)); assert_eq!("Burpees", &(lookup.find("bUrPeEs").unwrap().name)); assert_eq!("Burpees", &(lookup.find("oof").unwrap().name)); assert_eq!("Burpees", &(lookup.find("OOF").unwrap().name)); assert_eq!("Situps", &(lookup.find("su").unwrap().name)); assert_eq!("Pushups", &(lookup.find("push").unwrap().name)); } #[test] fn duplicates() { let mut lookup = CategoryLookup::new(); assert_eq!(0, lookup.len()); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .unwrap(); assert_eq!(1, lookup.len()); } #[test] fn duplicate_alias() { let mut lookup = CategoryLookup::new(); assert_eq!(0, lookup.len()); lookup .add(Category::new("Pushups", 1.0, vec!["pu", "push"])) .u
random
[ { "content": "/// Read all categories and return a populated lookup structure\n\npub fn read_categories(cfg: &Config) -> Result<CategoryLookup> {\n\n let categories = category::read_categories(cfg)?;\n\n let mut lookup = CategoryLookup::new();\n\n\n\n for category in categories {\n\n lookup.add(category)?;\n\n }\n\n\n\n Ok(lookup)\n\n}\n\n\n\n//\n\n// Internals --------------------------------------\n\n//\n\n\n", "file_path": "src/storage/fs.rs", "rank": 0, "score": 151402.98744075585 }, { "content": "/// Read all categories and return a populated lookup structure\n\npub fn read_categories(cfg: &Config) -> Result<CategoryLookup, String> {\n\n match fs::read_categories(cfg) {\n\n Ok(lookup) => Ok(lookup),\n\n Err(error) => Err(format!(\"{:?}\", error)),\n\n }\n\n}\n", "file_path": "src/storage.rs", "rank": 1, "score": 148665.03355330756 }, { "content": "pub fn read_categories(cfg: &Config) -> Result<Vec<Category>> {\n\n let path = &(init_category_file(cfg)?);\n\n let contents = fs::read_to_string(path)\n\n .with_context(|| format!(\"Unable to read category file {:?}\", path))?;\n\n\n\n let mut categories: Vec<Category> = Vec::new();\n\n let mut lines = contents.lines();\n\n\n\n if let Some(preamble) = lines.next() {\n\n if preamble.trim() != PREAMBLE_CATEGORIES_V1 {\n\n bail!(ParseError::new(\n\n \"No valid preamble found - unable to determine category file format\",\n\n ));\n\n }\n\n } else {\n\n bail!(ParseError::new(\"Category file seems to be empty\"));\n\n }\n\n\n\n for line in lines {\n\n let line = line.trim();\n", "file_path": "src/storage/fs/category.rs", "rank": 2, "score": 130979.1996265873 }, { "content": "pub fn sliding_week(category: Option<String>, config: &Config) -> Result<(), String> {\n\n sliding::sliding_days(Local::now().date(), 7, category, config)\n\n}\n", "file_path": "src/report.rs", "rank": 3, "score": 116159.81971588018 }, { "content": "pub fn sliding_month(category: Option<String>, config: &Config) -> Result<(), String> {\n\n sliding::sliding_days(Local::now().date(), 31, category, config)\n\n}\n\n\n", "file_path": "src/report.rs", "rank": 4, "score": 116159.81971588018 }, { "content": "/// Calculate the weighted total repetitions over the the given activities\n\npub fn weighted_total(activities: &[Activity], categories: &CategoryLookup) -> u32 {\n\n let mut total = 0;\n\n\n\n for activity in activities {\n\n let cat = &activity.category;\n\n if let Some(category) = categories.find(cat) {\n\n total += ((activity.reps as f64) * category.weight) as u32;\n\n } else {\n\n // default to a weight of 1 (e.g. for categories that don't exist anymore)\n\n total += activity.reps;\n\n }\n\n }\n\n\n\n total\n\n}\n\n\n\n/// Struct representing the aggregated stats for one day\n\n#[derive(Debug)]\n\npub struct DayStats {\n\n pub day: Date<Local>,\n", "file_path": "src/report/common.rs", "rank": 5, "score": 110596.97341184191 }, { "content": "pub fn today(config: &Config) -> Result<(), String> {\n\n today::run(config)\n\n}\n\n\n", "file_path": "src/report.rs", "rank": 6, "score": 109417.43362450215 }, { "content": "pub fn run(ctx: RunContext) -> Result<(), String> {\n\n match ctx.action {\n\n CliAction::Report {\n\n kind,\n\n category,\n\n sliding: _,\n\n } => match kind {\n\n cli::ReportKind::Day => report::today(&ctx.config),\n\n cli::ReportKind::Week => report::sliding_week(category, &ctx.config),\n\n cli::ReportKind::Month => report::sliding_month(category, &ctx.config),\n\n },\n\n CliAction::System => run_system(&ctx.config),\n\n CliAction::AddActivity {\n\n repetitions,\n\n category,\n\n } => run_add_activity(repetitions, category, &ctx.config),\n\n }\n\n}\n\n\n\n//\n\n// Main Command handlers ----------------------------\n\n//\n\n\n", "file_path": "src/lib.rs", "rank": 7, "score": 107250.68296639374 }, { "content": "/// Print the report for today\n\npub fn run(config: &Config) -> Result<(), String> {\n\n let categories = storage::read_categories(config)?;\n\n let activities = storage::read_today(config)?;\n\n\n\n println!(\"\\n{}\", report(&activities, &categories));\n\n Ok(())\n\n}\n\n\n\n//\n\n// Internals -----------------------------\n\n//\n\n\n", "file_path": "src/report/today.rs", "rank": 8, "score": 107250.68296639374 }, { "content": "/// Store the given activity on the filesystem\n\npub fn store(activity: &Activity, config: &Config) -> Result<()> {\n\n activity::store(activity, config)\n\n}\n\n\n", "file_path": "src/storage/fs.rs", "rank": 9, "score": 103353.41205086136 }, { "content": "fn new_lookup() -> CategoryLookup {\n\n let mut lookup = CategoryLookup::new();\n\n\n\n lookup.add(newcat(\"Pushups\", 1.0)).unwrap();\n\n lookup.add(newcat(\"Burpees\", 1.5)).unwrap();\n\n lookup.add(newcat(\"Steps\", 0.01)).unwrap();\n\n\n\n lookup\n\n}\n", "file_path": "src/report/test_common.rs", "rank": 10, "score": 102247.94626601382 }, { "content": "//\n\n// Main Interface -----------------------\n\n//\n\npub fn store(activity: &Activity, config: &Config) -> Result<(), String> {\n\n match fs::store(activity, config) {\n\n Ok(_) => Ok(()),\n\n Err(error) => Err(format!(\"{:?}\", error)),\n\n }\n\n}\n\n\n", "file_path": "src/storage.rs", "rank": 11, "score": 101639.88403481002 }, { "content": "/// Read all activities for today\n\npub fn read_today(config: &Config) -> Result<Vec<Activity>> {\n\n let now = Local::today();\n\n read_day(&now, config)\n\n}\n\n\n", "file_path": "src/storage/fs.rs", "rank": 12, "score": 101448.9992670361 }, { "content": "/// Store the given activity on the filesystem\n\npub fn store(activity: &Activity, config: &Config) -> Result<()> {\n\n let dir_path = super::init_data_dir(&config);\n\n\n\n let file_path = path_for_date(&activity.timestamp.date(), config);\n\n\n\n let mut file: File = init_activity_file(&file_path)\n\n .with_context(|| format!(\"Activity file {:?} could not be initialized\", &dir_path))?;\n\n\n\n writeln!(\n\n &mut file,\n\n \"{};{};{}\",\n\n ts2str(activity.timestamp),\n\n activity.reps,\n\n activity.category\n\n )\n\n .with_context(|| format!(\"Could not write activity to file {:?}\", &dir_path))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/storage/fs/activity.rs", "rank": 13, "score": 101448.9992670361 }, { "content": "pub fn read_today(config: &Config) -> Result<Vec<Activity>, String> {\n\n match fs::read_today(config) {\n\n Ok(activities) => Ok(activities),\n\n Err(error) => Err(format!(\"{:?}\", error)),\n\n }\n\n}\n\n\n", "file_path": "src/storage.rs", "rank": 14, "score": 99735.47125098477 }, { "content": "fn parse_category(line: &str) -> Result<Category> {\n\n let mut parts = line.split(';');\n\n\n\n let name = match parts.next() {\n\n Some(name) => name.trim(),\n\n None => bail!(\"No category name found\"),\n\n };\n\n\n\n let weight: f64 = match parts.next() {\n\n Some(weight) => weight.parse().with_context(|| {\n\n format!(\n\n \"Unable to parse category weight <{}> in line <{}>\",\n\n weight, line\n\n )\n\n })?,\n\n None => 1.0,\n\n };\n\n\n\n let mut aliases = Vec::new();\n\n for alias in parts {\n", "file_path": "src/storage/fs/category.rs", "rank": 15, "score": 99340.72825502859 }, { "content": "fn newcat(name: &str, weight: f64) -> Category {\n\n Category::new(name, weight, Vec::<String>::new())\n\n}\n\n\n", "file_path": "src/report/test_common.rs", "rank": 16, "score": 92242.51190614153 }, { "content": "fn init_category_file(cfg: &Config) -> Result<PathBuf> {\n\n let mut path = super::init_data_dir(cfg)?;\n\n\n\n path.push(\"categories.txt\");\n\n\n\n if !path.exists() {\n\n let mut file: File = OpenOptions::new()\n\n .create(true)\n\n .write(true)\n\n .open(&path)\n\n .with_context(|| {\n\n format!(\n\n \"Could not open category file <{}> for writing\",\n\n path.display()\n\n )\n\n })?;\n\n\n\n writeln!(&mut file, \"\\\n\n{}\n\n# List of activity categories and their attributes for the 'naday' tool (https://github.com/creinig/naday).\n", "file_path": "src/storage/fs/category.rs", "rank": 17, "score": 91919.20941380327 }, { "content": "/// Read all activities for a given day\n\npub fn read_day(date: &Date<Local>, config: &Config) -> Result<Vec<Activity>> {\n\n activity::read_day(date, config)\n\n}\n\n\n", "file_path": "src/storage/fs.rs", "rank": 18, "score": 91631.37795365488 }, { "content": "/// Read all activities for the given range of days (both ends inclusive)\n\npub fn read_days(start: &Date<Local>, end: &Date<Local>, config: &Config) -> Result<Vec<Activity>> {\n\n activity::read_days(start, end, config)\n\n}\n\n\n", "file_path": "src/storage/fs.rs", "rank": 19, "score": 83579.80486083566 }, { "content": "pub fn read_days(\n\n start: &Date<Local>,\n\n end: &Date<Local>,\n\n config: &Config,\n\n) -> Result<Vec<Activity>, String> {\n\n match fs::read_days(start, end, config) {\n\n Ok(activities) => Ok(activities),\n\n Err(error) => Err(format!(\"{:?}\", error)),\n\n }\n\n}\n\n\n", "file_path": "src/storage.rs", "rank": 20, "score": 81128.11390557831 }, { "content": "pub fn sliding_days(\n\n end_date: Date<Local>,\n\n number_of_days: u32,\n\n category: Option<String>,\n\n config: &Config,\n\n) -> Result<(), String> {\n\n let start_date = end_date - Duration::days((number_of_days - 1).into());\n\n\n\n let categories = storage::read_categories(config)?;\n\n let activities = storage::read_days(&start_date, &end_date, config)?;\n\n let stats = build_stats(&activities, &start_date, &end_date);\n\n\n\n print_stats(&stats, category, &categories);\n\n\n\n Ok(())\n\n}\n\n\n\n//\n\n// Internals -----------------------------------\n\n//\n\n\n", "file_path": "src/report/sliding.rs", "rank": 21, "score": 79357.49029270199 }, { "content": "fn run_add_activity(repetitions: u32, category: String, config: &Config) -> Result<(), String> {\n\n let categories = storage::read_categories(config)?;\n\n\n\n let category = match categories.find(&category) {\n\n Some(cat) => cat.name.to_string(),\n\n None => {\n\n eprintln!(\"Activity category '{}' is not known\", category);\n\n process::exit(1);\n\n }\n\n };\n\n\n\n let activity = Activity::new(repetitions, &category);\n\n storage::store(&activity, config)?;\n\n\n\n println!(\"Added {} {}\", repetitions, &category);\n\n report::today(config)?;\n\n Ok(())\n\n}\n", "file_path": "src/lib.rs", "rank": 22, "score": 78497.19509627612 }, { "content": "/// Generate the report for today as string\n\nfn report(activities: &[Activity], categories: &CategoryLookup) -> String {\n\n let mut by_category = HashMap::new();\n\n let mut individual: HashMap<String, Vec<u32>> = HashMap::new();\n\n let total = common::weighted_total(activities, categories);\n\n\n\n for activity in activities {\n\n let cat = &activity.category;\n\n\n\n if by_category.contains_key(cat) {\n\n let parts = individual.get_mut(cat).unwrap();\n\n parts.push(activity.reps);\n\n let sum = by_category.get(cat).unwrap() + activity.reps;\n\n by_category.insert(cat.to_string(), sum);\n\n } else {\n\n let mut parts = Vec::new();\n\n parts.push(activity.reps);\n\n individual.insert(cat.to_string(), parts);\n\n\n\n by_category.insert(cat.to_string(), activity.reps);\n\n };\n", "file_path": "src/report/today.rs", "rank": 23, "score": 76823.89385204585 }, { "content": "fn print_stats(stats: &[DayStats], category: Option<String>, categories: &CategoryLookup) {\n\n match category {\n\n Some(ref cat) => {\n\n let cat_name = &categories.find(cat).unwrap().name;\n\n println!(\n\n \"Report on {} for the past {} days\\n\", cat_name, stats.len()\n\n );\n\n\n\n for day in stats {\n\n println!(\n\n \"{:3}: {:>5} reps ({:>5} total)\",\n\n day.day.weekday(),\n\n day.reps_by_category.get(cat_name).unwrap_or(&0),\n\n day.reps_total(categories)\n\n );\n\n }\n\n }\n\n\n\n None => {\n\n println!(\n", "file_path": "src/report/sliding.rs", "rank": 24, "score": 74089.93984626698 }, { "content": "fn run_system(config: &Config) -> Result<(), String> {\n\n let categories = storage::read_categories(config)?;\n\n\n\n println!(\"Storage directory: {}\", &config.data_dir);\n\n println!(\"Known Categories:\");\n\n for category in categories\n\n .iter()\n\n .sorted_by(|a, b| Ord::cmp(&a.name, &b.name))\n\n {\n\n println!(\n\n \" {:<15} (weight {:<5}), aliases {}\",\n\n &category.name,\n\n &category.weight,\n\n category.aliases.iter().join(\", \")\n\n );\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 25, "score": 69752.47286450808 }, { "content": "fn parse_report(spec: &str) -> Result<CliAction> {\n\n let groups = match REPORT_PATTERN.captures(spec) {\n\n Some(groups) => groups,\n\n None => {\n\n bail!(ParseError::new(\"Unable to parse report shorthand\",))\n\n }\n\n };\n\n\n\n let kind = match &groups[1] {\n\n \"d\" | \"D\" => ReportKind::Day,\n\n \"w\" | \"W\" => ReportKind::Week,\n\n _ => ReportKind::Month,\n\n };\n\n\n\n Ok(CliAction::Report {\n\n kind,\n\n category: None,\n\n sliding: true,\n\n })\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 26, "score": 68332.04261038064 }, { "content": "fn parse_shorthand(spec: &str) -> Result<CliAction, ()> {\n\n if let Ok(activity) = parse_activity(spec) {\n\n Ok(activity)\n\n } else if let Ok(report) = parse_report(spec) {\n\n Ok(report)\n\n } else {\n\n eprintln!(\"Could not parse shorthand spec '{}'\", spec);\n\n Err(())\n\n }\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 27, "score": 68332.04261038064 }, { "content": "fn parse_activity(spec: &str) -> Result<CliAction> {\n\n let groups = match ACTIVITY_PATTERN.captures(spec) {\n\n Some(groups) => groups,\n\n None => {\n\n bail!(ParseError::new(\"Unable to parse activity\"));\n\n }\n\n };\n\n\n\n let repetitions: u32 = groups.get(1).unwrap().as_str().parse()?;\n\n let category: String = groups.get(2).unwrap().as_str().to_string();\n\n\n\n Ok(CliAction::AddActivity {\n\n repetitions,\n\n category,\n\n })\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 28, "score": 68332.04261038064 }, { "content": "pub fn cli_parse(args: env::Args) -> RunContext {\n\n match RunContext::new(args) {\n\n Ok(ctx) => ctx,\n\n Err(msg) => {\n\n eprintln!(\"{}\", msg);\n\n std::process::exit(1);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 29, "score": 67553.62525316035 }, { "content": "/// parse a single line from an activity file into an Activity struct\n\nfn parse_activity(line: &str) -> Result<Activity> {\n\n let mut parts = line.split(';');\n\n\n\n //let mut timestamp: DateTime<Local> = Local::now();\n\n let mut category: String = String::new();\n\n\n\n let timestamp = match parts.next() {\n\n Some(ts) => str2ts(ts)?,\n\n None => bail!(\"No activity timestamp found\"),\n\n };\n\n\n\n let reps = match parts.next() {\n\n Some(rep_str) => rep_str.trim().parse().with_context(|| {\n\n format!(\n\n \"Repetitions <{}> can not be parsed as whole number\",\n\n rep_str\n\n )\n\n })?,\n\n None => bail!(\"No repetitions found\"),\n\n };\n", "file_path": "src/storage/fs/activity.rs", "rank": 30, "score": 67004.64661199143 }, { "content": "fn parse_cmdline<I, T>(args: I) -> Result<CliAction, ()>\n\nwhere\n\n I: IntoIterator<Item = T>,\n\n T: Into<OsString> + Clone,\n\n{\n\n let app = setup_clap_app();\n\n\n\n let matches = match app.get_matches_from_safe(args) {\n\n Ok(m) => m,\n\n Err(error) => {\n\n eprintln!(\"{}\", error);\n\n return Err(());\n\n }\n\n };\n\n\n\n if let Some(ref report) = matches.subcommand_matches(\"report\") {\n\n return Ok(eval_report(report));\n\n } else if let Some(_system) = matches.subcommand_matches(\"system\") {\n\n return Ok(CliAction::System);\n\n } else if let Some(log) = matches.subcommand_matches(\"log\") {\n", "file_path": "src/cli.rs", "rank": 31, "score": 66053.40504672505 }, { "content": "fn init_data_dir(cfg: &Config) -> Result<PathBuf> {\n\n let path = PathBuf::from(&cfg.data_dir);\n\n std::fs::create_dir_all(&path)\n\n .with_context(|| format!(\"Could not create base directory {:?}\", &path))?;\n\n\n\n Ok(path)\n\n}\n", "file_path": "src/storage/fs.rs", "rank": 32, "score": 65753.30750927293 }, { "content": "/// Open the activity file for the given timestamp.\n\n/// If it doesn't exist, initialize it\n\nfn init_activity_file(path: &Path) -> Result<File> {\n\n if path.exists() {\n\n return Ok(OpenOptions::new().append(true).open(path)?);\n\n }\n\n\n\n let mut file: File = OpenOptions::new().create(true).write(true).open(path)?;\n\n\n\n writeln!(\n\n &mut file,\n\n \"\\\n\n{}\n\n# List of recorded activities for the 'naday' tool (https://github.com/creinig/naday)\n\n# Lines beginning with '#' are comments and are ignored by the tool\n\n# The remaining lines are plain CSV, with one recorded activity per line.\n\n# Separator character is ';', encoding is UTF-8.\n\n# Columns: timestamp (local time zone) ; number of repetitions ; category (excercise)\",\n\n PREAMBLE_ACTIVITIES_V1\n\n )?;\n\n\n\n Ok(file)\n\n}\n\n\n", "file_path": "src/storage/fs/activity.rs", "rank": 33, "score": 65753.30750927293 }, { "content": "/// Read all activities for a given day\n\npub fn read_day(date: &Date<Local>, config: &Config) -> ActivitiesOrError {\n\n read_days(date, date, config)\n\n}\n\n\n", "file_path": "src/storage/fs/activity.rs", "rank": 34, "score": 60739.79805056788 }, { "content": "/// parse string as activity timestamp\n\nfn str2ts<S: AsRef<str>>(raw: S) -> Result<DateTime<Local>> {\n\n let ts = Local\n\n .datetime_from_str(raw.as_ref().trim(), ACTIVITY_TS_FORMAT)\n\n .with_context(|| format!(\"Unable to patse activity timestamp <{}>\", raw.as_ref()))?;\n\n Ok(ts)\n\n}\n\n\n\n//\n\n// Tests ---------------------------------\n\n//\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::model::{Activity, Config};\n\n use chrono::prelude::{DateTime, Local};\n\n use chrono::Duration;\n\n use std::path::Path;\n\n use tempfile::TempDir;\n\n\n", "file_path": "src/storage/fs/activity.rs", "rank": 35, "score": 58234.49162913064 }, { "content": "/// Read all activities for the days from \"start\" up to \"end\" (inclusive)\n\npub fn read_days(start: &Date<Local>, end: &Date<Local>, config: &Config) -> ActivitiesOrError {\n\n if end < start {\n\n panic!(\"end is before start\");\n\n }\n\n\n\n let mut paths = Vec::new();\n\n let mut day = *start;\n\n while day <= *end {\n\n paths.push(path_for_date(&day, config));\n\n day = day.succ();\n\n }\n\n\n\n let paths = paths.into_iter().unique();\n\n let mut activities = Vec::new();\n\n\n\n for path in paths {\n\n debug!(\"Reading path {:?}\", &path);\n\n let mut for_path = read_activities(&path)\n\n .with_context(|| format!(\"Unable to read activities from file {:?}\", &path))?;\n\n activities.append(&mut for_path);\n", "file_path": "src/storage/fs/activity.rs", "rank": 36, "score": 55217.01464837706 }, { "content": "type ActivitiesOrError = Result<Vec<Activity>>;\n\n\n", "file_path": "src/storage/fs/activity.rs", "rank": 37, "score": 40012.37095991349 }, { "content": "fn main() {\n\n setup_panic!();\n\n\n\n let ctx = naday::cli_parse(env::args());\n\n\n\n env_logger::builder()\n\n .format_timestamp(None)\n\n .format_module_path(false)\n\n .init();\n\n\n\n if let Err(msg) = naday::run(ctx) {\n\n eprintln!(\"Error: {}\", msg);\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 38, "score": 36226.15472562574 }, { "content": "#[test]\n\nfn daystats_basic() {\n\n let mut stats = DayStats::new(&Local::now().date());\n\n let lookup = new_lookup();\n\n\n\n assert_eq!(0, stats.reps_total(&lookup));\n\n stats.add(&Activity::new(15, \"Pushups\"));\n\n stats.add(&Activity::new(20, \"Pushups\"));\n\n stats.add(&Activity::new(23, \"Pullups\"));\n\n stats.add(&Activity::new(20, \"Burpees\"));\n\n stats.add(&Activity::new(1500, \"Steps\"));\n\n\n\n assert_eq!(\n\n stats.reps_total(&lookup),\n\n (15.0 + 20.0 + 23.0 + (20.0 * 1.5) + (1500.0 * 0.01)) as u32\n\n );\n\n\n\n assert_eq!(*stats.reps_by_category.get(\"Pushups\").unwrap(), 15 + 20);\n\n assert_eq!(*stats.reps_by_category.get(\"Pullups\").unwrap(), 23);\n\n assert_eq!(*stats.reps_by_category.get(\"Burpees\").unwrap(), 20);\n\n assert_eq!(*stats.reps_by_category.get(\"Steps\").unwrap(), 1500);\n\n}\n\n\n", "file_path": "src/report/test_common.rs", "rank": 39, "score": 33470.46830240359 }, { "content": "fn default_data_dir() -> String {\n\n let homedir = BaseDirs::new().unwrap();\n\n let homedir = homedir.home_dir();\n\n\n\n homedir.join(\".naday\").to_str().unwrap().to_string()\n\n}\n\n\n\n//\n\n// Tests ---------------------------------------------------------\n\n//\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn simple_commands() {\n\n let ctx = RunContext::new(build_args(vec![\"system\"]).into_iter());\n\n assert_eq!(CliAction::System, ctx.unwrap().action);\n\n\n\n let ctx = RunContext::new(build_args(vec![\"report\"]).into_iter());\n", "file_path": "src/cli.rs", "rank": 40, "score": 32867.99790786393 }, { "content": "#[test]\n\nfn weighted_total_basic() {\n\n let lookup = new_lookup();\n\n\n\n let mut activities = Vec::new();\n\n activities.push(Activity::new(15, \"Pushups\"));\n\n activities.push(Activity::new(20, \"Burpees\"));\n\n activities.push(Activity::new(13, \"Burpees\"));\n\n activities.push(Activity::new(3200, \"Steps\"));\n\n activities.push(Activity::new(28, \"Beers\"));\n\n\n\n assert_eq!(\n\n weighted_total(&activities, &lookup),\n\n (15.0 + ((20.0 + 13.0) * 1.5) + (3200.0 * 0.01) + 28.0) as u32\n\n );\n\n}\n\n\n", "file_path": "src/report/test_common.rs", "rank": 41, "score": 32680.766691890458 }, { "content": "fn setup_clap_app() -> App<'static, 'static> {\n\n App::new(\"naday\")\n\n .version(crate_version!())\n\n .author(crate_authors!())\n\n .about(\"A little tool for tracking (physical) excercise of the 'N repetitions a day' variant - 100 pushups per day, 10'000 steps per day etc.\")\n\n .arg(Arg::from_usage(\"[SHORTHAND] 'Shorthand notation for the most common use cases'\")\n\n .long_help(\n\n\"'18pu' is short for 'log 18pu'\n\n'rd' is short for 'report --day'\n\n'rw' is short for 'report --week\")\n\n .conflicts_with_all(&[\"log\", \"system\", \"report\"]))\n\n .subcommand(\n\n App::new(\"log\").about(\"Log an activity\")\n\n .arg(Arg::from_usage(\"[SPEC] 'Shorthand notation of the activity to log'\"))\n\n )\n\n .subcommand(\n\n App::new(\"system\").about(\"Get information on the tool's environment and settings\")\n\n )\n\n .subcommand(\n\n App::new(\"report\").about(\"Generate a report on logged activities\")\n\n .arg(Arg::from_usage(\"-d, --day 'Print detailed report for today'\"))\n\n .arg(Arg::from_usage(\"-w, --week 'Print a report of the current week'\"))\n\n .arg(Arg::from_usage(\"-m, --month 'Print a report of the current month'\"))\n\n .group(ArgGroup::with_name(\"report_kind\").args(&[\"day\", \"week\", \"month\"]).required(false).multiple(false))\n\n .arg(Arg::from_usage(\"-c, --category=<NAME_OR_ALIAS> 'print stats on that category instead of the total'\").required(false))\n\n )\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 42, "score": 30316.290337108534 }, { "content": "fn eval_report(report: &ArgMatches) -> CliAction {\n\n let kind = if report.is_present(\"day\") {\n\n ReportKind::Day\n\n } else if report.is_present(\"week\") {\n\n ReportKind::Week\n\n } else {\n\n ReportKind::Month\n\n };\n\n\n\n let category = match report.value_of(\"category\") {\n\n Some(name) => Some(name.to_string()),\n\n None => None,\n\n };\n\n\n\n CliAction::Report {\n\n kind,\n\n category,\n\n sliding: true,\n\n }\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 43, "score": 29628.356940184065 }, { "content": "/// convert activity timestamp to string\n\nfn ts2str(timestamp: DateTime<Local>) -> String {\n\n timestamp.format(ACTIVITY_TS_FORMAT).to_string()\n\n}\n\n\n", "file_path": "src/storage/fs/activity.rs", "rank": 44, "score": 28543.820970810986 }, { "content": "/// Read all activities in the given file. If the file does not exist an empty list is returned.\n\nfn read_activities(file_path: &Path) -> ActivitiesOrError {\n\n let contents = match fs::read_to_string(file_path) {\n\n Ok(raw) => raw,\n\n Err(error) => match error.kind() {\n\n std::io::ErrorKind::NotFound => return Ok(Vec::new()),\n\n _ => bail!(error),\n\n },\n\n };\n\n\n\n let mut activities = Vec::new();\n\n\n\n let mut lines = contents.lines();\n\n if let Some(preamble) = lines.next() {\n\n if preamble.trim() != PREAMBLE_ACTIVITIES_V1 {\n\n bail!(ParseError::new(\n\n \"No valid preamble found - unable to determine file format\",\n\n ));\n\n }\n\n } else {\n\n bail!(ParseError::new(\"File seems to be empty\"));\n", "file_path": "src/storage/fs/activity.rs", "rank": 45, "score": 28382.18435727471 }, { "content": "/// Get the path (fully qualified filename) of the file containing the activities of the given\n\n/// date.\n\n/// This does not check whether the file or its parent directories exist.\n\nfn path_for_date(date: &Date<Local>, config: &Config) -> PathBuf {\n\n let filename = date.format(ACTIVITY_FILE_FORMAT).to_string();\n\n let file_path: PathBuf = [&config.data_dir, &filename].iter().collect();\n\n file_path\n\n}\n\n\n", "file_path": "src/storage/fs/activity.rs", "rank": 46, "score": 26068.438863831307 }, { "content": " aliases.push(alias.trim());\n\n }\n\n\n\n Ok(Category::new(name, weight, aliases))\n\n}\n\n\n\n//\n\n// Tests --------------------------------\n\n//\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::model::Config;\n\n use tempfile::TempDir;\n\n\n\n #[test]\n\n fn file_init() {\n\n let tmp_dir = TempDir::new().unwrap();\n\n let cfg = cfg(&tmp_dir);\n\n\n", "file_path": "src/storage/fs/category.rs", "rank": 49, "score": 26008.524429357883 }, { "content": "use crate::model::{Category, Config};\n\n\n\nuse crate::error::ParseError;\n\nuse anyhow::{bail, Context, Result};\n\nuse std::fs;\n\nuse std::fs::{File, OpenOptions};\n\nuse std::io::Write;\n\nuse std::path::PathBuf;\n\n\n", "file_path": "src/storage/fs/category.rs", "rank": 51, "score": 26007.290056723054 }, { "content": " if line.starts_with('#') || line.is_empty() {\n\n continue;\n\n }\n\n\n\n match parse_category(line) {\n\n Ok(category) => categories.push(category),\n\n Err(msg) => eprintln!(\n\n \"Skipping unreadable category <{}> in {}: {}\",\n\n line,\n\n path.to_str().unwrap(),\n\n msg\n\n ),\n\n }\n\n }\n\n\n\n Ok(categories)\n\n}\n\n\n\n//\n\n// Internals ---------------------------\n\n//\n\n\n\nconst PREAMBLE_CATEGORIES_V1: &str = \"naday categories v1\";\n\n\n", "file_path": "src/storage/fs/category.rs", "rank": 56, "score": 26004.2484901136 }, { "content": "# Lines beginning with '#' are comments and are ignored by the tool.\n\n# The remaining lines are basically plain CSV, with one category per line.\n\n# Separator character is ';', encoding is UTF-8.\n\n# Columns: display name ; 'weight' of repetitions in relation to other activities [; alias]*\n\nPushups;1;pu;push\n\nSitups;1;si\n\nBurpees;1.5;bu\n\nPlankSeconds;0.33;pl\n\nWalkingSteps;0.01;wa\n\n# General category for unplanned / one-off strenuous activity\n\nExtra;1;x\n\n\", PREAMBLE_CATEGORIES_V1)?;\n\n }\n\n\n\n // return a readonly handle\n\n Ok(path)\n\n}\n\n\n", "file_path": "src/storage/fs/category.rs", "rank": 57, "score": 26003.68198073564 }, { "content": " let categories = read_categories(&cfg).unwrap();\n\n assert_eq!(6, categories.len());\n\n\n\n assert_eq!(\"Pushups\", &(categories.get(0).unwrap().name));\n\n assert_eq!(1.0, categories.get(0).unwrap().weight);\n\n assert_eq!(\n\n \"Category (Pushups, 1, [\\\"pu\\\", \\\"push\\\"])\",\n\n categories.get(0).unwrap().to_string()\n\n );\n\n assert_eq!(\n\n \"Category (Situps, 1, [\\\"si\\\"])\",\n\n categories.get(1).unwrap().to_string()\n\n );\n\n assert_eq!(\n\n \"Category (Burpees, 1.5, [\\\"bu\\\"])\",\n\n categories.get(2).unwrap().to_string()\n\n );\n\n assert_eq!(\n\n \"Category (PlankSeconds, 0.33, [\\\"pl\\\"])\",\n\n categories.get(3).unwrap().to_string()\n", "file_path": "src/storage/fs/category.rs", "rank": 58, "score": 26003.097099671802 }, { "content": " );\n\n }\n\n\n\n fn cfg(tmp: &TempDir) -> Config {\n\n Config {\n\n data_dir: tmp.path().to_str().unwrap().to_string(),\n\n }\n\n }\n\n}\n", "file_path": "src/storage/fs/category.rs", "rank": 59, "score": 25998.709929314988 }, { "content": "/// Build daily statistics for the given category\n\n///\n\n/// # Arguments\n\n/// * `activities`: All recorded activities in the given interval\n\n/// * `start`: Interval start date\n\n/// * `end`: Interval end date\n\n///\n\n/// # Returns\n\n/// A vector with one entry per day in (start..=end), each holding the total number of reps for\n\n/// all categories\n\nfn build_stats(activities: &[Activity], start: &Date<Local>, end: &Date<Local>) -> Vec<DayStats> {\n\n let mut by_day: HashMap<Date<Local>, DayStats> = HashMap::new();\n\n\n\n for activity in activities {\n\n let today = activity.timestamp.date();\n\n\n\n let stats = by_day.entry(today).or_insert_with(|| DayStats::new(&today));\n\n stats.add(&activity);\n\n }\n\n\n\n let mut results = Vec::new();\n\n let mut day = *start;\n\n while day <= *end {\n\n let stats = by_day.remove(&day).unwrap_or_else(|| DayStats::new(&day));\n\n results.push(stats);\n\n day = day.succ();\n\n }\n\n\n\n results\n\n}\n\n\n", "file_path": "src/report/sliding.rs", "rank": 60, "score": 23442.257178588225 }, { "content": " pub fn new<T: Display>(name: &str, weight: f64, aliases: Vec<T>) -> Category {\n\n Category {\n\n name: name.to_string(),\n\n weight,\n\n aliases: aliases.iter().map(|a| a.to_string()).collect(),\n\n }\n\n }\n\n\n\n fn all_names(&self) -> Vec<&str> {\n\n let mut result: Vec<&str> = Vec::new();\n\n result.push(&self.name);\n\n for alias in &self.aliases {\n\n result.push(&alias);\n\n }\n\n\n\n result\n\n }\n\n}\n\n\n\nimpl Display for Category {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"Category ({}, {}, {:?})\",\n\n self.name, self.weight, self.aliases\n\n )\n\n }\n\n}\n", "file_path": "src/model.rs", "rank": 61, "score": 13.290425072686123 }, { "content": "impl PartialEq for Activity {\n\n fn eq(&self, other: &Self) -> bool {\n\n (self.timestamp == other.timestamp)\n\n && (self.category == other.category)\n\n && (self.reps == other.reps)\n\n }\n\n}\n\n\n\n//\n\n// Category ----------------------------\n\n//\n\n\n\n#[derive(Debug)]\n\npub struct Category {\n\n pub name: String,\n\n pub aliases: Vec<String>,\n\n pub weight: f64,\n\n}\n\n\n\nimpl Category {\n", "file_path": "src/model.rs", "rank": 62, "score": 12.518511750069628 }, { "content": " if by_category.len() > 1 {\n\n result.push_str(&format!(\" Weighted total : {}\", total));\n\n }\n\n\n\n result\n\n}\n\n\n\n//\n\n// Tests ------------------------------------\n\n//\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::model::{Activity, Category, CategoryLookup};\n\n\n\n #[test]\n\n fn basic() {\n\n let mut lookup = CategoryLookup::new();\n\n\n\n lookup.add(newcat(\"Pushups\", 1.0)).unwrap();\n", "file_path": "src/report/today.rs", "rank": 63, "score": 9.109534904870364 }, { "content": "use std::error::Error;\n\nuse std::fmt;\n\n\n\n//\n\n// Error types --------------------------\n\n//\n\n\n\n#[derive(Debug)]\n\npub struct ParseError {\n\n pub msg: String,\n\n}\n\n\n\nimpl fmt::Display for ParseError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"ParseError: {}\", self.msg)\n\n }\n\n}\n\n\n\nimpl Error for ParseError {}\n\n\n", "file_path": "src/error.rs", "rank": 64, "score": 9.094626352082576 }, { "content": "\n\n#[derive(Debug)]\n\npub struct Activity {\n\n pub timestamp: DateTime<Local>,\n\n pub category: String,\n\n pub reps: u32,\n\n}\n\n\n\nimpl Activity {\n\n pub fn new<S: AsRef<str>>(repetitions: u32, category: S) -> Activity {\n\n let now = Local::now();\n\n\n\n Activity {\n\n timestamp: now,\n\n reps: repetitions,\n\n category: category.as_ref().to_string(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/model.rs", "rank": 65, "score": 9.0767883196816 }, { "content": " }\n\n\n\n let mut result = String::new();\n\n\n\n result.push_str(\"Stats for today:\\n\");\n\n for category in by_category.keys().sorted() {\n\n let reps = by_category.get(category).unwrap();\n\n\n\n let details = if individual.get(category).unwrap().len() > 1 {\n\n format!(\n\n \" ({})\",\n\n individual.get(category).unwrap().iter().join(\" + \")\n\n )\n\n } else {\n\n \"\".to_string()\n\n };\n\n\n\n result.push_str(&format!(\" {:<15}: {} reps{}\\n\", category, reps, details));\n\n }\n\n\n", "file_path": "src/report/today.rs", "rank": 66, "score": 8.959174158819865 }, { "content": " pub reps_by_category: HashMap<String, u32>,\n\n}\n\n\n\nimpl DayStats {\n\n pub fn new(day: &Date<Local>) -> DayStats {\n\n DayStats {\n\n day: *day,\n\n reps_by_category: HashMap::new(),\n\n }\n\n }\n\n\n\n /// Calculate the weighted total of all repetitions in this day\n\n pub fn reps_total(&self, categories: &CategoryLookup) -> u32 {\n\n let mut total = 0;\n\n\n\n for (cat, reps) in self.reps_by_category.iter() {\n\n if let Some(category) = categories.find(cat) {\n\n total += ((*reps as f64) * category.weight) as u32;\n\n } else {\n\n // default to a weight of 1 (e.g. for categories that don't exist anymore)\n", "file_path": "src/report/common.rs", "rank": 67, "score": 8.548170421954557 }, { "content": "use chrono::{DateTime, Local};\n\nuse std::fmt;\n\nuse std::fmt::Display;\n\n\n\nmod category_lookup;\n\n\n\npub use category_lookup::CategoryLookup;\n\n\n\n//\n\n// Config -------------------------\n\n//\n\n\n\n#[derive(Debug)]\n\npub struct Config {\n\n pub data_dir: String,\n\n}\n\n\n\n//\n\n// Activity -----------------------\n\n//\n", "file_path": "src/model.rs", "rank": 68, "score": 8.547800106595645 }, { "content": " pub action: CliAction,\n\n}\n\n\n\nimpl RunContext {\n\n pub fn new<T>(args: T) -> Result<RunContext, String>\n\n where\n\n T: Iterator<Item = String>,\n\n {\n\n let mut ctx = RunContext {\n\n config: Config {\n\n data_dir: default_data_dir(),\n\n },\n\n action: CliAction::System,\n\n };\n\n\n\n match parse_cmdline(args) {\n\n Ok(action) => {\n\n ctx.action = action;\n\n Ok(ctx)\n\n }\n\n Err(msg) => Err(format!(\"{:?}\", msg)),\n\n }\n\n }\n\n}\n\n\n\n//\n\n// functions -------------------------------------\n\n//\n\n\n", "file_path": "src/cli.rs", "rank": 69, "score": 8.47635365727722 }, { "content": "use crate::error::ParseError;\n\nuse crate::model::Config;\n\nuse anyhow::{bail, Result};\n\nuse clap::{arg_enum, crate_authors, crate_version, App, Arg, ArgGroup, ArgMatches};\n\nuse directories::BaseDirs;\n\nuse lazy_static::lazy_static;\n\nuse regex::Regex;\n\nuse std::ffi::OsString;\n\n\n\nlazy_static! {\n\n static ref ACTIVITY_PATTERN: Regex = Regex::new(r\"^(\\d+)([a-zA-Z_]\\w*)$\").unwrap();\n\n static ref REPORT_PATTERN: Regex = Regex::new(r\"^[rR]([dmwDMW])$\").unwrap();\n\n}\n\n\n\narg_enum! {\n\n #[derive(PartialEq, Debug)]\n\n pub enum ReportKind {\n\n Day,\n\n Week,\n\n Month\n", "file_path": "src/cli.rs", "rank": 70, "score": 8.064297945004466 }, { "content": "use crate::model::{Activity, CategoryLookup, Config};\n\n\n\nuse anyhow::{Context, Result};\n\nuse chrono::prelude::*;\n\nuse std::path::PathBuf;\n\n\n\nmod activity;\n\nmod category;\n\n\n\n/// Store the given activity on the filesystem\n", "file_path": "src/storage/fs.rs", "rank": 71, "score": 7.395583650294589 }, { "content": "use crate::model::{Activity, Config};\n\n\n\nuse crate::error::ParseError;\n\nuse anyhow::{bail, Context, Result};\n\nuse chrono::prelude::*;\n\nuse itertools::Itertools;\n\nuse log::debug;\n\nuse std::fs;\n\nuse std::fs::{File, OpenOptions};\n\nuse std::io::Write;\n\nuse std::path::{Path, PathBuf};\n\n\n", "file_path": "src/storage/fs/activity.rs", "rank": 72, "score": 6.700049933685139 }, { "content": " lookup.add(newcat(\"Burpees\", 1.5)).unwrap();\n\n lookup.add(newcat(\"Steps\", 0.01)).unwrap();\n\n\n\n let mut activities = Vec::new();\n\n activities.push(Activity::new(15, \"Pushups\"));\n\n activities.push(Activity::new(20, \"Burpees\"));\n\n activities.push(Activity::new(13, \"Burpees\"));\n\n activities.push(Activity::new(3200, \"Steps\"));\n\n activities.push(Activity::new(28, \"Beers\"));\n\n\n\n let report = report(&activities, &lookup);\n\n\n\n assert_eq!(\n\n report,\n\n \"\\\n\nStats for today:\n\n Beers : 28 reps\n\n Burpees : 33 reps (20 + 13)\n\n Pushups : 15 reps\n\n Steps : 3200 reps\n\n Weighted total : 124\"\n\n );\n\n }\n\n\n\n fn newcat(name: &str, weight: f64) -> Category {\n\n Category::new(name, weight, Vec::<String>::new())\n\n }\n\n}\n", "file_path": "src/report/today.rs", "rank": 73, "score": 6.500190693600061 }, { "content": "impl ParseError {\n\n pub fn new<T: AsRef<str>>(msg: T) -> ParseError {\n\n ParseError {\n\n msg: msg.as_ref().to_string(),\n\n }\n\n }\n\n}\n\n\n\n//\n\n// Helper Functions ---------------------\n\n//\n", "file_path": "src/error.rs", "rank": 74, "score": 6.472941750849158 }, { "content": "use crate::model::{Activity, Category, CategoryLookup};\n\nuse crate::report::common::*;\n\nuse chrono::Local;\n\n\n\n#[test]\n", "file_path": "src/report/test_common.rs", "rank": 75, "score": 5.845206881258151 }, { "content": " #[test]\n\n fn store_dirinit() -> Result<()> {\n\n let tmp_dir = TempDir::new()?;\n\n let cfg = cfg(&tmp_dir);\n\n let timestamp: DateTime<Local> = Local::now();\n\n\n\n let activity = Activity::new(34, \"Pushups\");\n\n\n\n store(&activity, &cfg)?;\n\n\n\n let filename = timestamp.format(ACTIVITY_FILE_FORMAT).to_string();\n\n let filepath = cfg.data_dir + &(std::path::MAIN_SEPARATOR.to_string()) + &filename;\n\n\n\n assert!(Path::new(&filepath).exists());\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn parse_activity() {\n", "file_path": "src/storage/fs/activity.rs", "rank": 76, "score": 5.302440551003841 }, { "content": "mod tests {\n\n use super::*;\n\n use crate::model::Activity;\n\n use chrono::{Local, TimeZone};\n\n\n\n #[test]\n\n fn build_stats_basic() {\n\n let mut activities = Vec::new();\n\n\n\n // - create activities over multiple days (with multiple A per day & category, and multiple\n\n // categories per day)\n\n activities.push(activity(5, 13, \"Pushups\"));\n\n activities.push(activity(5, 23, \"Pushups\"));\n\n activities.push(activity(5, 23, \"Burpees\"));\n\n activities.push(activity(6, 15, \"Burpees\"));\n\n activities.push(activity(7, 14, \"Burpees\"));\n\n activities.push(activity(7, 23, \"Burpees\"));\n\n\n\n let start = Local.ymd(2020, 7, 1);\n\n let end = Local.ymd(2020, 7, 30);\n", "file_path": "src/report/sliding.rs", "rank": 77, "score": 5.22340805508037 }, { "content": "\n\n if let Some(cat) = parts.next() {\n\n category = cat.trim().to_string();\n\n }\n\n\n\n Ok(Activity {\n\n timestamp,\n\n reps,\n\n category,\n\n })\n\n}\n\n\n", "file_path": "src/storage/fs/activity.rs", "rank": 78, "score": 5.205069366595891 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum CliAction {\n\n AddActivity {\n\n repetitions: u32,\n\n category: String,\n\n },\n\n Report {\n\n kind: ReportKind,\n\n category: Option<String>,\n\n sliding: bool,\n\n },\n\n System,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct RunContext {\n\n pub config: Config,\n", "file_path": "src/cli.rs", "rank": 79, "score": 5.119252737799706 }, { "content": "use crate::model::{Activity, CategoryLookup, Config};\n\nuse crate::report::common;\n\nuse crate::storage;\n\nuse itertools::Itertools;\n\nuse std::collections::HashMap;\n\n\n\n/// Print the report for today\n", "file_path": "src/report/today.rs", "rank": 80, "score": 5.069612476190711 }, { "content": "use crate::model::{Activity, CategoryLookup, Config};\n\nuse chrono::{Date, Local};\n\n\n\nmod fs;\n\n\n\n//\n\n// Main Interface -----------------------\n\n//\n", "file_path": "src/storage.rs", "rank": 81, "score": 4.9673078815547225 }, { "content": "use crate::model::{Activity, CategoryLookup, Config};\n\nuse crate::report::common::DayStats;\n\nuse crate::storage;\n\nuse chrono::{Date, Datelike, Duration, Local};\n\nuse std::collections::HashMap;\n\nuse std::vec::Vec;\n\n\n", "file_path": "src/report/sliding.rs", "rank": 82, "score": 4.937695562156485 }, { "content": "use crate::model::{Activity, CategoryLookup};\n\nuse chrono::{Date, Local};\n\nuse std::collections::HashMap;\n\n\n\n/// Calculate the weighted total repetitions over the the given activities\n", "file_path": "src/report/common.rs", "rank": 83, "score": 4.915573440614676 }, { "content": " timestamp: timestamp2,\n\n reps: 20,\n\n category: \"Situps\".to_string(),\n\n };\n\n store(&activity, &cfg)?;\n\n\n\n let activities = read_activities(&path)?;\n\n assert_eq!(2, activities.len());\n\n\n\n assert_eq!(activity, activities[1]);\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn multiple_months() {\n\n let tmp_dir = TempDir::new().unwrap();\n\n let cfg = cfg(&tmp_dir);\n\n let start_date = str2ts(\"2020-12-13 14:34:53\").unwrap();\n\n let ndays = 200;\n", "file_path": "src/storage/fs/activity.rs", "rank": 84, "score": 4.5976640916578635 }, { "content": " total += reps;\n\n }\n\n }\n\n\n\n total\n\n }\n\n\n\n /// Add the given activity to the reps in this day\n\n pub fn add(&mut self, activity: &Activity) {\n\n self.reps_by_category\n\n .entry(activity.category.to_string())\n\n .and_modify(|e| *e += activity.reps)\n\n .or_insert(activity.reps);\n\n }\n\n}\n", "file_path": "src/report/common.rs", "rank": 85, "score": 4.314582900807888 }, { "content": " assert_eq!(\n\n CliAction::Report {\n\n kind: ReportKind::Month,\n\n category: None,\n\n sliding: true,\n\n },\n\n ctx.unwrap().action\n\n );\n\n\n\n let ctx =\n\n RunContext::new(build_args(vec![\"report\", \"--week\", \"--category=pu\"]).into_iter());\n\n assert_eq!(\n\n CliAction::Report {\n\n kind: ReportKind::Week,\n\n category: Some(\"pu\".to_string()),\n\n sliding: true,\n\n },\n\n ctx.unwrap().action\n\n );\n\n }\n", "file_path": "src/cli.rs", "rank": 86, "score": 3.753247029539949 }, { "content": "mod cli;\n\nmod error;\n\nmod model;\n\nmod report;\n\nmod storage;\n\n\n\nuse cli::CliAction;\n\nuse cli::RunContext;\n\nuse itertools::Itertools;\n\nuse model::{Activity, Config};\n\nuse std::cmp::Ord;\n\nuse std::env;\n\nuse std::process;\n\n\n", "file_path": "src/lib.rs", "rank": 87, "score": 3.4096341059720516 }, { "content": " );\n\n\n\n let ctx = RunContext::new(build_args(vec![\"rm\"]).into_iter());\n\n assert_eq!(\n\n CliAction::Report {\n\n kind: ReportKind::Month,\n\n category: None,\n\n sliding: true,\n\n },\n\n ctx.unwrap().action\n\n );\n\n }\n\n\n\n fn build_args(raw: Vec<&str>) -> Vec<String> {\n\n let mut args: Vec<String> = Vec::new();\n\n args.push(\"naday\".to_string());\n\n\n\n for item in raw {\n\n args.push(item.to_string());\n\n }\n", "file_path": "src/cli.rs", "rank": 88, "score": 3.380332486255539 }, { "content": "\n\n #[test]\n\n fn activities() {\n\n let ctx = RunContext::new(build_args(vec![\"16pu\"]).into_iter());\n\n assert_eq!(build_activity(16, \"pu\"), ctx.unwrap().action);\n\n\n\n let ctx = RunContext::new(build_args(vec![\"23h2\"]).into_iter());\n\n assert_eq!(build_activity(23, \"h2\"), ctx.unwrap().action);\n\n }\n\n\n\n #[test]\n\n fn shorthand() {\n\n let ctx = RunContext::new(build_args(vec![\"rd\"]).into_iter());\n\n assert_eq!(\n\n CliAction::Report {\n\n kind: ReportKind::Day,\n\n category: None,\n\n sliding: true,\n\n },\n\n ctx.unwrap().action\n", "file_path": "src/cli.rs", "rank": 89, "score": 3.3720279107262945 }, { "content": "use human_panic::setup_panic;\n\nuse std::env;\n\n\n", "file_path": "src/main.rs", "rank": 90, "score": 3.2110740835886675 }, { "content": " let activity = Activity::new(13, \"Burpees\");\n\n\n\n let path = path_for_date(&timestamp.date(), &cfg);\n\n assert!(path.exists() == false);\n\n\n\n store(&activity, &cfg).unwrap();\n\n assert!(path.exists());\n\n\n\n let contents = fs::read_to_string(path).unwrap();\n\n assert_eq!(PREAMBLE_ACTIVITIES_V1, contents.lines().next().unwrap());\n\n }\n\n\n\n #[test]\n\n fn activity_roundtrip() -> Result<()> {\n\n let tmp_dir = TempDir::new()?;\n\n let cfg = cfg(&tmp_dir);\n\n let timestamp1 = str2ts(\"2020-12-13 14:34:53\")?;\n\n let path = path_for_date(&timestamp1.date(), &cfg);\n\n\n\n println!(\"Target path = <{:?}>\", &path);\n", "file_path": "src/storage/fs/activity.rs", "rank": 91, "score": 3.2025009153448023 }, { "content": " let spec = log.value_of(\"SPEC\").unwrap(); // required parameter\n\n if let Ok(activity) = parse_activity(&spec) {\n\n return Ok(activity);\n\n } else {\n\n eprintln!(\"{}\", log.usage());\n\n return Err(());\n\n }\n\n } else if let Some(shorthand) = matches.value_of(\"SHORTHAND\") {\n\n return parse_shorthand(&shorthand);\n\n }\n\n\n\n Ok(CliAction::System)\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 92, "score": 3.012963155954653 }, { "content": "```\n\n\n\nThis uses the alias \"pu\" for Pushups defined in `~/.naday/category.txt` to specify what you did.\n\nThe general pattern for this argument is `<repetitions><name_or_alias>`, with case insensitive\n\n`name_or_alias`. So the same could have been\n\nwritten as \"18Pushups\", \"18pushups\", \"18Push\" etc.\n\n\n\n\n\n`naday report --day` will print a little report of today's activities (the same as the info printed\n\nwhen logging an activity):\n\n\n\n```\n\nStats for today:\n\n Burpees : 15 reps\n\n Pushups : 33 reps (16 + 17)\n\n PlankSeconds : 60 reps\n\n Weighted total : 75\n\n```\n\n\n\n`naday report --month --category=pu` will print an overview of pushups for the past month. If you\n\nomit the `--category` option, only the weighted totals will be printed. For now this only lists the total value\n\nper day for the past 31 days, but additional options are being worked on:\n\n\n\n```\n\nReport on Pushups for the past 31 days\n\n\n\nWed: 0 reps ( 0 total)\n\nThu: 0 reps ( 0 total)\n\nFri: 0 reps ( 0 total)\n\nSat: 0 reps ( 0 total)\n\nSun: 0 reps ( 0 total)\n\nMon: 0 reps ( 0 total)\n\nTue: 0 reps ( 0 total)\n\nWed: 23 reps ( 36 total)\n\nThu: 16 reps ( 16 total)\n\nFri: 0 reps ( 0 total)\n\nSat: 0 reps ( 92 total)\n\nSun: 0 reps ( 0 total)\n\nMon: 0 reps ( 87 total)\n\nTue: 0 reps ( 0 total)\n\nWed: 0 reps ( 0 total)\n\nThu: 0 reps ( 0 total)\n\nFri: 0 reps ( 0 total)\n\nSat: 0 reps ( 0 total)\n\nSun: 0 reps ( 0 total)\n\nMon: 0 reps ( 0 total)\n\nTue: 0 reps ( 0 total)\n\nWed: 0 reps ( 0 total)\n\nThu: 0 reps ( 0 total)\n\nFri: 0 reps ( 0 total)\n\nSat: 0 reps ( 0 total)\n\nSun: 0 reps ( 0 total)\n\nMon: 0 reps ( 0 total)\n\nTue: 0 reps ( 0 total)\n\nWed: 0 reps ( 0 total)\n\nThu: 0 reps ( 0 total)\n\nFri: 0 reps ( 0 total)\n\n```\n\n\n\nIf you're just interested in the past 7 days, use `--week` instead of `--month`.\n\n\n\nAdditional and better reports are planned. You can also directly load the save files into a \n\nspreadsheet (they are basically plain CSV) and generate your own custom reports.\n", "file_path": "README.md", "rank": 93, "score": 3.003324590155071 }, { "content": "mod common;\n\nmod sliding;\n\nmod today;\n\n\n\n#[cfg(test)]\n\nmod test_common;\n\n\n\nuse crate::model::Config;\n\nuse chrono::Local;\n\n\n", "file_path": "src/report.rs", "rank": 94, "score": 2.8486071333923606 }, { "content": "\n\n args\n\n }\n\n\n\n fn build_activity(repetitions: u32, category: &str) -> CliAction {\n\n CliAction::AddActivity {\n\n repetitions: repetitions,\n\n category: category.to_string(),\n\n }\n\n }\n\n}\n", "file_path": "src/cli.rs", "rank": 95, "score": 2.78750884892318 }, { "content": " }\n\n _ => {\n\n assert_eq!(0, daystat.reps_by_category.len());\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn activity(day_of_month: u32, reps: u32, category: &str) -> Activity {\n\n let time = Local.ymd(2020, 7, day_of_month).and_hms(13, 45, 34);\n\n Activity {\n\n timestamp: time,\n\n reps,\n\n category: category.to_string(),\n\n }\n\n }\n\n}\n", "file_path": "src/report/sliding.rs", "rank": 96, "score": 2.771287988343214 }, { "content": "# N a Day\n\n[![Build Status](https://travis-ci.org/creinig/naday.svg?branch=master)](https://travis-ci.org/creinig/naday)\n\n\n\nThis is a little tool for tracking (physical) excercise of the \"N repetitions a day\" variant -\n\n100 pushups per day, 10'000 steps per day etc.\n\n\n\nSince I not only needed such a tracker, but also needed some playground project for learning [rust](https://www.rust-lang.org/),\n\nthis is implemented as CLI tool. Plus, I suck at GUIs. I personally run it in [termux](https://termux.com/) on my android phone.\n\n\n\n\n\n## Installation\n\n\n\nFor now you have to compile & install it yourself, but proper release builds are planned, starting with milestone 0.1.0.\n\n\n\nManual builds:\n\n\n\n```\n\ngit clone https://github.com/creinig/naday.git\n\ncd naday\n\ncargo build --release\n\ncp target/release/naday ~/.local/bin/\n\n```\n\n\n\n## Usage\n\n\n\n`naday system` prints configuration settings:\n\n\n\n```\n\nStorage directory: /home/creinig/.naday\n\nKnown Categories:\n\n Burpees (weight 1.5 ), aliases bu\n\n PlankSeconds (weight 0.33 ), aliases pl\n\n Pushups (weight 1 ), aliases pu, push\n\n Situps (weight 1 ), aliases si\n\n```\n\n\n\nThis shows some key points that are central to the tool's usage:\n\n\n\n1. All data is stored in plain text files under your home directory. Each of these files\n\n contains a description of its format at the top, so feel free to look at them and edit them manually if needed.\n\n\n\n2. Different types of excercises are called \"categories\", and the tool comes with a few of them preinstalled\n\n (in `~/.naday/categories.txt`). Each category has a display name, optionally a few aliases and a weight \n\n allowing a kind of \"comparison\" between logged repetitions. This allows for displaying a \"weighted total\"\n\n if you mix different excercises over the day.\n\n\n\n\n\n`naday 18pu` logs a set of reps:\n\n\n\n```\n\nAdded 18 Pushups\n\n\n\nStats for today:\n\n Pushups: 18 reps\n", "file_path": "README.md", "rank": 97, "score": 2.529552040358534 }, { "content": " }\n\n\n\n for line in lines {\n\n let line = line.trim();\n\n if line.starts_with('#') || line.is_empty() {\n\n continue;\n\n }\n\n\n\n match parse_activity(line) {\n\n Ok(activity) => activities.push(activity),\n\n Err(error) => eprintln!(\n\n \"Skipping unreadable activity <{}> in {}: {:?}\",\n\n line,\n\n file_path.to_str().unwrap(),\n\n error\n\n ),\n\n }\n\n }\n\n\n\n Ok(activities)\n\n}\n\n\n", "file_path": "src/storage/fs/activity.rs", "rank": 98, "score": 2.5261927869609977 }, { "content": " let activity = super::parse_activity(\"2020-12-13 05:43:25;12;Pushups\").unwrap();\n\n assert_eq!(2020, activity.timestamp.year());\n\n assert_eq!(12, activity.timestamp.month());\n\n assert_eq!(13, activity.timestamp.day());\n\n assert_eq!(5, activity.timestamp.hour());\n\n assert_eq!(43, activity.timestamp.minute());\n\n assert_eq!(25, activity.timestamp.second());\n\n assert_eq!(12, activity.reps);\n\n assert_eq!(\"Pushups\", activity.category);\n\n\n\n // Just testing that some cases are parsed at all for now\n\n super::parse_activity(\"2003-01-01 00:00:00 ; 1 ; Burpees\").unwrap();\n\n super::parse_activity(\"2021-12-31 23:59:59 ; 435 ; Plank_Minutes\").unwrap();\n\n }\n\n\n\n #[test]\n\n fn init_activity_file() {\n\n let tmp_dir = TempDir::new().unwrap();\n\n let cfg = cfg(&tmp_dir);\n\n let timestamp: DateTime<Local> = Local::now();\n", "file_path": "src/storage/fs/activity.rs", "rank": 99, "score": 1.956208679145683 } ]
Rust
program/src/orderbook.rs
solindex/orderbook
ed5e4246aa8ec5b5eef84aa0982bc50622cb9b5d
use crate::{ critbit::{LeafNode, Node, NodeHandle, Slab}, error::AoError, processor::new_order, state::{Event, EventQueue, SelfTradeBehavior, Side}, utils::{fp32_div, fp32_mul}, }; use borsh::{BorshDeserialize, BorshSerialize}; use solana_program::{account_info::AccountInfo, msg, program_error::ProgramError}; #[derive(BorshSerialize, BorshDeserialize, Debug)] pub struct OrderSummary { pub posted_order_id: Option<u128>, #[allow(missing_docs)] pub total_base_qty: u64, #[allow(missing_docs)] pub total_quote_qty: u64, #[allow(missing_docs)] pub total_base_qty_posted: u64, } pub const ORDER_SUMMARY_SIZE: u32 = 41; pub(crate) struct OrderBookState<'a> { bids: Slab<'a>, asks: Slab<'a>, callback_id_len: usize, } impl<'ob> OrderBookState<'ob> { pub(crate) fn new_safe( bids_account: &AccountInfo<'ob>, asks_account: &AccountInfo<'ob>, callback_info_len: usize, callback_id_len: usize, ) -> Result<Self, ProgramError> { let bids = Slab::new_from_acc_info(bids_account, callback_info_len); let asks = Slab::new_from_acc_info(asks_account, callback_info_len); if !(bids.check(Side::Bid) && asks.check(Side::Ask)) { return Err(ProgramError::InvalidAccountData); } Ok(Self { bids, asks, callback_id_len, }) } pub fn find_bbo(&self, side: Side) -> Option<NodeHandle> { match side { Side::Bid => self.bids.find_max(), Side::Ask => self.asks.find_min(), } } #[cfg(feature = "no-entrypoint")] pub fn get_spread(&self) -> (Option<u64>, Option<u64>) { let best_bid_price = self .bids .find_max() .map(|h| self.bids.get_node(h).unwrap().as_leaf().unwrap().price()); let best_ask_price = self .asks .find_max() .map(|h| self.asks.get_node(h).unwrap().as_leaf().unwrap().price()); (best_bid_price, best_ask_price) } pub fn get_tree(&mut self, side: Side) -> &mut Slab<'ob> { match side { Side::Bid => &mut self.bids, Side::Ask => &mut self.asks, } } pub(crate) fn commit_changes(&self) { self.bids.write_header(); self.asks.write_header(); } pub(crate) fn new_order( &mut self, params: new_order::Params, event_queue: &mut EventQueue, min_base_order_size: u64, ) -> Result<OrderSummary, AoError> { let new_order::Params { max_base_qty, max_quote_qty, side, limit_price, callback_info, post_only, post_allowed, self_trade_behavior, mut match_limit, } = params; let mut base_qty_remaining = max_base_qty; let mut quote_qty_remaining = max_quote_qty; let mut crossed = true; loop { if match_limit == 0 { break; } let best_bo_h = match self.find_bbo(side.opposite()) { None => { crossed = false; break; } Some(h) => h, }; let mut best_bo_ref = self .get_tree(side.opposite()) .get_node(best_bo_h) .unwrap() .as_leaf() .unwrap() .to_owned(); let trade_price = best_bo_ref.price(); crossed = match side { Side::Bid => limit_price >= trade_price, Side::Ask => limit_price <= trade_price, }; if post_only || !crossed { break; } let offer_size = best_bo_ref.base_quantity; let base_trade_qty = offer_size .min(base_qty_remaining) .min(fp32_div(quote_qty_remaining, best_bo_ref.price())); if base_trade_qty == 0 { break; } if self_trade_behavior != SelfTradeBehavior::DecrementTake { let order_would_self_trade = &callback_info[..self.callback_id_len] == (&self .get_tree(side.opposite()) .get_callback_info(best_bo_ref.callback_info_pt as usize) as &[u8]); if order_would_self_trade { let best_offer_id = best_bo_ref.order_id(); let cancelled_provide_base_qty; match self_trade_behavior { SelfTradeBehavior::CancelProvide => { cancelled_provide_base_qty = std::cmp::min(base_qty_remaining, best_bo_ref.base_quantity); } SelfTradeBehavior::AbortTransaction => return Err(AoError::WouldSelfTrade), SelfTradeBehavior::DecrementTake => unreachable!(), }; let remaining_provide_base_qty = best_bo_ref.base_quantity - cancelled_provide_base_qty; let delete = remaining_provide_base_qty == 0; let provide_out = Event::Out { side: side.opposite(), delete, order_id: best_offer_id, base_size: cancelled_provide_base_qty, callback_info: self .get_tree(side.opposite()) .get_callback_info(best_bo_ref.callback_info_pt as usize) .to_owned(), }; event_queue .push_back(provide_out) .map_err(|_| AoError::EventQueueFull)?; if delete { self.get_tree(side.opposite()) .remove_by_key(best_offer_id) .unwrap(); } else { best_bo_ref.set_base_quantity(remaining_provide_base_qty); self.get_tree(side.opposite()) .write_node(&Node::Leaf(best_bo_ref), best_bo_h); } continue; } } let quote_maker_qty = fp32_mul(base_trade_qty, trade_price); let maker_fill = Event::Fill { taker_side: side, maker_callback_info: self .get_tree(side.opposite()) .get_callback_info(best_bo_ref.callback_info_pt as usize) .to_owned(), taker_callback_info: callback_info.clone(), maker_order_id: best_bo_ref.order_id(), quote_size: quote_maker_qty, base_size: base_trade_qty, }; event_queue .push_back(maker_fill) .map_err(|_| AoError::EventQueueFull)?; best_bo_ref.set_base_quantity(best_bo_ref.base_quantity - base_trade_qty); base_qty_remaining -= base_trade_qty; quote_qty_remaining -= quote_maker_qty; if best_bo_ref.base_quantity <= min_base_order_size { let best_offer_id = best_bo_ref.order_id(); let cur_side = side.opposite(); let out_event = Event::Out { side: cur_side, order_id: best_offer_id, base_size: best_bo_ref.base_quantity, callback_info: self .get_tree(side.opposite()) .get_callback_info(best_bo_ref.callback_info_pt as usize) .to_owned(), delete: true, }; self.get_tree(cur_side) .remove_by_key(best_offer_id) .unwrap(); event_queue .push_back(out_event) .map_err(|_| AoError::EventQueueFull)?; } else { self.get_tree(side.opposite()) .write_node(&Node::Leaf(best_bo_ref), best_bo_h); } match_limit -= 1; } let base_qty_to_post = std::cmp::min( fp32_div(quote_qty_remaining, limit_price), base_qty_remaining, ); if crossed || !post_allowed || base_qty_to_post <= min_base_order_size { return Ok(OrderSummary { posted_order_id: None, total_base_qty: max_base_qty - base_qty_remaining, total_quote_qty: max_quote_qty - quote_qty_remaining, total_base_qty_posted: 0, }); } let new_leaf_order_id = event_queue.gen_order_id(limit_price, side); let callback_info_offset = self .get_tree(side) .write_callback_info(&callback_info) .unwrap(); let new_leaf = Node::Leaf(LeafNode { key: new_leaf_order_id, callback_info_pt: callback_info_offset, base_quantity: base_qty_to_post, }); let insert_result = self.get_tree(side).insert_leaf(&new_leaf); if let Err(AoError::SlabOutOfSpace) = insert_result { msg!("Orderbook is full! booting lest aggressive orders..."); let order = match side { Side::Bid => self.get_tree(Side::Bid).remove_min().unwrap(), Side::Ask => self.get_tree(Side::Ask).remove_max().unwrap(), }; let l = order.as_leaf().unwrap(); let out = Event::Out { side: Side::Bid, delete: true, order_id: l.order_id(), base_size: l.base_quantity, callback_info: self .get_tree(side) .get_callback_info(l.callback_info_pt as usize) .to_owned(), }; event_queue .push_back(out) .map_err(|_| AoError::EventQueueFull)?; self.get_tree(side).insert_leaf(&new_leaf).unwrap(); } else { insert_result.unwrap(); } base_qty_remaining -= base_qty_to_post; quote_qty_remaining -= fp32_mul(base_qty_to_post, limit_price); Ok(OrderSummary { posted_order_id: Some(new_leaf_order_id), total_base_qty: max_base_qty - base_qty_remaining, total_quote_qty: max_quote_qty - quote_qty_remaining, total_base_qty_posted: base_qty_to_post, }) } pub fn is_empty(&self) -> bool { self.asks.root().is_none() && self.bids.root().is_none() } }
use crate::{ critbit::{LeafNode, Node, NodeHandle, Slab}, error::AoError, processor::new_order, state::{Event, EventQueue, SelfTradeBehavior, Side}, utils::{fp32_div, fp32_mul}, }; use borsh::{BorshDeserialize, BorshSerialize}; use solana_program::{account_info::AccountInfo, msg, program_error::ProgramError}; #[derive(BorshSerialize, BorshDeserialize, Debug)] pub struct OrderSummary { pub posted_order_id: Option<u128>, #[allow(missing_docs)] pub total_base_qty: u64, #[allow(missing_docs)] pub total_quote_qty: u64, #[allow(missing_docs)] pub total_base_qty_posted: u64, } pub const ORDER_SUMMARY_SIZE: u32 = 41; pub(crate) struct OrderBookState<'a> { bids: Slab<'a>, asks: Slab<'a>, callback_id_len: usize, } impl<'ob> OrderBookState<'ob> { pub(crate) fn new_safe( bids_account: &AccountInfo<'ob>, asks_account: &AccountInfo<'ob>, callback_info_len: usize, callback_id_len: usize, ) -> Result<Self, ProgramError> { let bids = Slab::new_from_acc_info(bids_account, callback_info_len); let asks = Slab::new_from_acc_info(asks_account, callback_info_len); if !(bids.check(Side::Bid) && asks.check(Side::Ask)) { return Err(ProgramError::InvalidAccountData); } Ok(Self { bids, asks, callback_id_len, }) } pub fn find_bbo(&self, side: Side) -> Option<NodeHandle> { match side { Side::Bid => self.bids.find_max(), Side::Ask => self.asks.find_min(), } } #[cfg(feature = "no-entrypoint")] pub fn get_spread(&self) -> (Option<u64>, Option<u64>) { let best_bid_price = self .bids .find_max() .map(|h| self.bids.get_node(h).unwrap().as_leaf().unwrap().price()); let best_ask_price = self .asks .find_max() .map(|h| self.asks.get_node(h).unwrap().as_leaf().unwrap().price()); (best_bid_price, best_ask_price) } pub fn get_tree(&mut self, side: Side) -> &mut Slab<'ob> { match side { Side::Bid => &mut self.bids, Side::Ask => &mut self.asks, } } pub(crate) fn commit_changes(&self) { self.bids.write_header(); self.asks.write_header(); } pub(crate) fn new_order( &mut self, params: new_order::Params, event_queue: &mut EventQueue, min_base_order_size: u64, ) -> Result<OrderSummary, AoError> { let new_order::Params { max_base_qty, max_quote_qty, side, limit_price, callback_info, post_only, post_allowed, self_trade_behavior, mut match_limit, } = params; let mut base_qty_remaining = max_base_qty; let mut quote_qty_remaining = max_quote_qty; let mut crossed = true; loop { if match_limit == 0 { break; } let best_bo_h = match self.find_bbo(side.opposite()) { None => { crossed = false; break; } Some(h) => h, }; let mut best_bo_ref = self .get_tree(side.opposite()) .get_node(best_bo_h) .unwrap() .as_leaf() .unwrap() .to_owned(); let trade_price = best_bo_ref.price(); crossed =
; if post_only || !crossed { break; } let offer_size = best_bo_ref.base_quantity; let base_trade_qty = offer_size .min(base_qty_remaining) .min(fp32_div(quote_qty_remaining, best_bo_ref.price())); if base_trade_qty == 0 { break; } if self_trade_behavior != SelfTradeBehavior::DecrementTake { let order_would_self_trade = &callback_info[..self.callback_id_len] == (&self .get_tree(side.opposite()) .get_callback_info(best_bo_ref.callback_info_pt as usize) as &[u8]); if order_would_self_trade { let best_offer_id = best_bo_ref.order_id(); let cancelled_provide_base_qty; match self_trade_behavior { SelfTradeBehavior::CancelProvide => { cancelled_provide_base_qty = std::cmp::min(base_qty_remaining, best_bo_ref.base_quantity); } SelfTradeBehavior::AbortTransaction => return Err(AoError::WouldSelfTrade), SelfTradeBehavior::DecrementTake => unreachable!(), }; let remaining_provide_base_qty = best_bo_ref.base_quantity - cancelled_provide_base_qty; let delete = remaining_provide_base_qty == 0; let provide_out = Event::Out { side: side.opposite(), delete, order_id: best_offer_id, base_size: cancelled_provide_base_qty, callback_info: self .get_tree(side.opposite()) .get_callback_info(best_bo_ref.callback_info_pt as usize) .to_owned(), }; event_queue .push_back(provide_out) .map_err(|_| AoError::EventQueueFull)?; if delete { self.get_tree(side.opposite()) .remove_by_key(best_offer_id) .unwrap(); } else { best_bo_ref.set_base_quantity(remaining_provide_base_qty); self.get_tree(side.opposite()) .write_node(&Node::Leaf(best_bo_ref), best_bo_h); } continue; } } let quote_maker_qty = fp32_mul(base_trade_qty, trade_price); let maker_fill = Event::Fill { taker_side: side, maker_callback_info: self .get_tree(side.opposite()) .get_callback_info(best_bo_ref.callback_info_pt as usize) .to_owned(), taker_callback_info: callback_info.clone(), maker_order_id: best_bo_ref.order_id(), quote_size: quote_maker_qty, base_size: base_trade_qty, }; event_queue .push_back(maker_fill) .map_err(|_| AoError::EventQueueFull)?; best_bo_ref.set_base_quantity(best_bo_ref.base_quantity - base_trade_qty); base_qty_remaining -= base_trade_qty; quote_qty_remaining -= quote_maker_qty; if best_bo_ref.base_quantity <= min_base_order_size { let best_offer_id = best_bo_ref.order_id(); let cur_side = side.opposite(); let out_event = Event::Out { side: cur_side, order_id: best_offer_id, base_size: best_bo_ref.base_quantity, callback_info: self .get_tree(side.opposite()) .get_callback_info(best_bo_ref.callback_info_pt as usize) .to_owned(), delete: true, }; self.get_tree(cur_side) .remove_by_key(best_offer_id) .unwrap(); event_queue .push_back(out_event) .map_err(|_| AoError::EventQueueFull)?; } else { self.get_tree(side.opposite()) .write_node(&Node::Leaf(best_bo_ref), best_bo_h); } match_limit -= 1; } let base_qty_to_post = std::cmp::min( fp32_div(quote_qty_remaining, limit_price), base_qty_remaining, ); if crossed || !post_allowed || base_qty_to_post <= min_base_order_size { return Ok(OrderSummary { posted_order_id: None, total_base_qty: max_base_qty - base_qty_remaining, total_quote_qty: max_quote_qty - quote_qty_remaining, total_base_qty_posted: 0, }); } let new_leaf_order_id = event_queue.gen_order_id(limit_price, side); let callback_info_offset = self .get_tree(side) .write_callback_info(&callback_info) .unwrap(); let new_leaf = Node::Leaf(LeafNode { key: new_leaf_order_id, callback_info_pt: callback_info_offset, base_quantity: base_qty_to_post, }); let insert_result = self.get_tree(side).insert_leaf(&new_leaf); if let Err(AoError::SlabOutOfSpace) = insert_result { msg!("Orderbook is full! booting lest aggressive orders..."); let order = match side { Side::Bid => self.get_tree(Side::Bid).remove_min().unwrap(), Side::Ask => self.get_tree(Side::Ask).remove_max().unwrap(), }; let l = order.as_leaf().unwrap(); let out = Event::Out { side: Side::Bid, delete: true, order_id: l.order_id(), base_size: l.base_quantity, callback_info: self .get_tree(side) .get_callback_info(l.callback_info_pt as usize) .to_owned(), }; event_queue .push_back(out) .map_err(|_| AoError::EventQueueFull)?; self.get_tree(side).insert_leaf(&new_leaf).unwrap(); } else { insert_result.unwrap(); } base_qty_remaining -= base_qty_to_post; quote_qty_remaining -= fp32_mul(base_qty_to_post, limit_price); Ok(OrderSummary { posted_order_id: Some(new_leaf_order_id), total_base_qty: max_base_qty - base_qty_remaining, total_quote_qty: max_quote_qty - quote_qty_remaining, total_base_qty_posted: base_qty_to_post, }) } pub fn is_empty(&self) -> bool { self.asks.root().is_none() && self.bids.root().is_none() } }
match side { Side::Bid => limit_price >= trade_price, Side::Ask => limit_price <= trade_price, }
if_condition
[ { "content": "#[wasm_bindgen]\n\npub fn find_max(data: &mut [u8], callback_info_len: u64, slot_size: u64) -> Option<u32> {\n\n let slab = Slab::new(\n\n Rc::new(RefCell::new(data)),\n\n callback_info_len as usize,\n\n slot_size as usize,\n\n );\n\n slab.find_max()\n\n}\n\n\n", "file_path": "js/wasm/src/lib.rs", "rank": 0, "score": 194195.06087524205 }, { "content": "#[wasm_bindgen]\n\npub fn find_min(data: &mut [u8], callback_info_len: u64, slot_size: u64) -> Option<u32> {\n\n let slab = Slab::new(\n\n Rc::new(RefCell::new(data)),\n\n callback_info_len as usize,\n\n slot_size as usize,\n\n );\n\n slab.find_min()\n\n}\n\n\n", "file_path": "js/wasm/src/lib.rs", "rank": 1, "score": 185511.7891620709 }, { "content": "/// This helper function deduces an order's side from its order_id\n\npub fn get_side_from_order_id(order_id: u128) -> Side {\n\n if ORDER_ID_SIDE_FLAG & order_id != 0 {\n\n Side::Bid\n\n } else {\n\n Side::Ask\n\n }\n\n}\n", "file_path": "program/src/state.rs", "rank": 2, "score": 133798.52820099852 }, { "content": "pub fn create_market(\n\n accounts: create_market::Accounts<Pubkey>,\n\n params: create_market::Params,\n\n) -> Instruction {\n\n accounts.get_instruction(\n\n crate::id(),\n\n AgnosticOrderbookInstruction::CreateMarket as u8,\n\n params,\n\n )\n\n}\n\n/**\n\nExecute a new order on the orderbook.\n\n\n\nDepending on the provided parameters, the program will attempt to match the order with existing entries\n\nin the orderbook, and then optionally post the remaining order.\n\n */\n", "file_path": "program/src/instruction.rs", "rank": 3, "score": 104068.47422434879 }, { "content": "/// Pop a series of events off the event queue.\n\npub fn consume_events(\n\n accounts: consume_events::Accounts<Pubkey>,\n\n params: consume_events::Params,\n\n) -> Instruction {\n\n accounts.get_instruction(\n\n crate::id(),\n\n AgnosticOrderbookInstruction::ConsumeEvents as u8,\n\n params,\n\n )\n\n}\n\n\n", "file_path": "program/src/instruction.rs", "rank": 4, "score": 104068.47422434879 }, { "content": "/// Close an existing market.\n\npub fn close_market(\n\n accounts: close_market::Accounts<Pubkey>,\n\n params: close_market::Params,\n\n) -> Instruction {\n\n accounts.get_instruction(\n\n crate::id(),\n\n AgnosticOrderbookInstruction::CloseMarket as u8,\n\n params,\n\n )\n\n}\n", "file_path": "program/src/instruction.rs", "rank": 5, "score": 104068.47422434879 }, { "content": "/// The entrypoint to the AAOB program\n\npub fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n msg!(\"Entrypoint\");\n\n if let Err(error) = Processor::process_instruction(program_id, accounts, instruction_data) {\n\n // catch the error so we can print it\n\n error.print::<AoError>();\n\n return Err(error);\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl PrintProgramError for AoError {\n\n fn print<E>(&self)\n\n where\n\n E: 'static + std::error::Error + DecodeError<E> + PrintProgramError + FromPrimitive,\n\n {\n\n match self {\n", "file_path": "program/src/entrypoint.rs", "rank": 6, "score": 104068.47422434879 }, { "content": "/// Cancel an existing order in the orderbook.\n\npub fn cancel_order(\n\n accounts: cancel_order::Accounts<Pubkey>,\n\n params: cancel_order::Params,\n\n) -> Instruction {\n\n accounts.get_instruction(\n\n crate::id(),\n\n AgnosticOrderbookInstruction::CancelOrder as u8,\n\n params,\n\n )\n\n}\n\n\n", "file_path": "program/src/instruction.rs", "rank": 7, "score": 104068.47422434879 }, { "content": "pub fn new_order(accounts: new_order::Accounts<Pubkey>, params: new_order::Params) -> Instruction {\n\n accounts.get_instruction(\n\n crate::id(),\n\n AgnosticOrderbookInstruction::NewOrder as u8,\n\n params,\n\n )\n\n}\n\n\n", "file_path": "program/src/instruction.rs", "rank": 8, "score": 103181.93719155049 }, { "content": "pub fn check_account_owner(\n\n account: &AccountInfo,\n\n owner: &[u8],\n\n error: AoError,\n\n) -> Result<(), AoError> {\n\n if account.owner.to_bytes() != owner {\n\n return Err(error);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "program/src/utils.rs", "rank": 9, "score": 102167.64351671623 }, { "content": "#[wasm_bindgen]\n\npub fn find_l2_depth(\n\n data: &mut [u8],\n\n callback_info_len: u64,\n\n slot_size: u64,\n\n depth: u64,\n\n increasing: bool,\n\n) -> Vec<u64> {\n\n let slab = Slab::new(\n\n Rc::new(RefCell::new(data)),\n\n callback_info_len as usize,\n\n slot_size as usize,\n\n );\n\n slab.find_l2_depth(depth as usize, increasing)\n\n}\n", "file_path": "js/wasm/src/lib.rs", "rank": 10, "score": 100374.24918077227 }, { "content": "/// This util is used to return the orderbook's spread (best_bid_price, best_ask_price) with both values in FP32 format\n\npub fn get_spread<'ob>(\n\n market_state_account: &AccountInfo<'ob>,\n\n bids_account: &AccountInfo<'ob>,\n\n asks_account: &AccountInfo<'ob>,\n\n) -> (Option<u64>, Option<u64>) {\n\n let market_state = MarketState::get(market_state_account).unwrap();\n\n let orderbook = OrderBookState::new_safe(\n\n bids_account,\n\n asks_account,\n\n market_state.callback_info_len as usize,\n\n market_state.callback_id_len as usize,\n\n )\n\n .unwrap();\n\n orderbook.get_spread()\n\n}\n\n\n\n/// a is fp0, b is fp32 and result is a/b fp0\n\npub(crate) fn fp32_div(a: u64, b_fp32: u64) -> u64 {\n\n (((a as u128) << 32) / (b_fp32 as u128)) as u64\n\n}\n", "file_path": "program/src/utils.rs", "rank": 11, "score": 97181.81931854466 }, { "content": "#[derive(BorshDeserialize, BorshSerialize, Debug)]\n\nstruct SlabHeader {\n\n account_tag: AccountTag,\n\n bump_index: u64,\n\n free_list_len: u64,\n\n free_list_head: u32,\n\n callback_memory_offset: u64,\n\n callback_free_list_len: u64,\n\n callback_free_list_head: u64,\n\n callback_bump_index: u64,\n\n\n\n root_node: u32,\n\n leaf_count: u64,\n\n market_address: Pubkey,\n\n}\n\npub const SLAB_HEADER_LEN: usize = 97;\n\npub const PADDED_SLAB_HEADER_LEN: usize = SLAB_HEADER_LEN + 7;\n\n\n\npub struct Slab<'a> {\n\n header: SlabHeader,\n\n pub buffer: Rc<RefCell<&'a mut [u8]>>,\n", "file_path": "program/src/critbit.rs", "rank": 12, "score": 91596.56613214243 }, { "content": "/// Apply the consume_events instruction to the provided accounts\n\npub fn process<'a, 'b: 'a>(\n\n program_id: &Pubkey,\n\n accounts: Accounts<'a, AccountInfo<'b>>,\n\n params: Params,\n\n) -> ProgramResult {\n\n accounts.perform_checks(program_id)?;\n\n let mut market_state = MarketState::get(accounts.market)?;\n\n\n\n check_accounts(&accounts, &market_state)?;\n\n\n\n let header = {\n\n let mut event_queue_data: &[u8] =\n\n &accounts.event_queue.data.borrow()[0..EVENT_QUEUE_HEADER_LEN];\n\n EventQueueHeader::deserialize(&mut event_queue_data).unwrap()\n\n };\n\n let mut event_queue = EventQueue::new_safe(\n\n header,\n\n accounts.event_queue,\n\n market_state.callback_info_len as usize,\n\n )?;\n", "file_path": "program/src/processor/consume_events.rs", "rank": 13, "score": 90248.9387784044 }, { "content": "/// Apply the cancel_order instruction to the provided accounts\n\npub fn process<'a, 'b: 'a>(\n\n program_id: &Pubkey,\n\n accounts: Accounts<'a, AccountInfo<'b>>,\n\n params: Params,\n\n) -> ProgramResult {\n\n accounts.perform_checks(program_id)?;\n\n let market_state = MarketState::get(accounts.market)?;\n\n\n\n check_accounts(&accounts, &market_state)?;\n\n\n\n let callback_info_len = market_state.callback_info_len as usize;\n\n\n\n let mut order_book = OrderBookState::new_safe(\n\n accounts.bids,\n\n accounts.asks,\n\n market_state.callback_info_len as usize,\n\n market_state.callback_id_len as usize,\n\n )?;\n\n\n\n let header = {\n", "file_path": "program/src/processor/cancel_order.rs", "rank": 14, "score": 90248.9387784044 }, { "content": "/// Apply the new_order instruction to the provided accounts\n\npub fn process<'a, 'b: 'a>(\n\n program_id: &Pubkey,\n\n accounts: Accounts<'a, AccountInfo<'b>>,\n\n mut params: Params,\n\n) -> ProgramResult {\n\n accounts.perform_checks(program_id)?;\n\n let mut market_state = MarketState::get(accounts.market)?;\n\n\n\n check_accounts(&accounts, &market_state)?;\n\n\n\n // Round price to nearest valid price tick\n\n params.limit_price = round_price(market_state.tick_size, params.limit_price, params.side);\n\n\n\n let callback_info_len = market_state.callback_info_len as usize;\n\n\n\n let mut order_book = OrderBookState::new_safe(\n\n accounts.bids,\n\n accounts.asks,\n\n market_state.callback_info_len as usize,\n\n market_state.callback_id_len as usize,\n", "file_path": "program/src/processor/new_order.rs", "rank": 15, "score": 90248.9387784044 }, { "content": "/// Apply the create_market instruction to the provided accounts\n\npub fn process<'a, 'b: 'a>(\n\n program_id: &Pubkey,\n\n accounts: Accounts<'a, AccountInfo<'b>>,\n\n params: Params,\n\n) -> ProgramResult {\n\n accounts.perform_checks(program_id)?;\n\n let Params {\n\n caller_authority,\n\n callback_info_len,\n\n callback_id_len,\n\n min_base_order_size,\n\n tick_size,\n\n cranker_reward,\n\n } = params;\n\n\n\n check_unitialized(accounts.event_queue)?;\n\n check_unitialized(accounts.bids)?;\n\n check_unitialized(accounts.asks)?;\n\n check_unitialized(accounts.market)?;\n\n\n", "file_path": "program/src/processor/create_market.rs", "rank": 16, "score": 90248.9387784044 }, { "content": "/// Apply the close_market instruction to the provided accounts\n\npub fn process<'a, 'b: 'a>(\n\n program_id: &Pubkey,\n\n accounts: Accounts<'a, AccountInfo<'b>>,\n\n _params: Params,\n\n) -> ProgramResult {\n\n accounts.perform_checks(program_id)?;\n\n let mut market_state = MarketState::get(accounts.market)?;\n\n\n\n check_accounts(&accounts, &market_state)?;\n\n\n\n // Check if there are still orders in the book\n\n let orderbook_state = OrderBookState::new_safe(\n\n accounts.bids,\n\n accounts.asks,\n\n market_state.callback_info_len as usize,\n\n market_state.callback_id_len as usize,\n\n )\n\n .unwrap();\n\n if !orderbook_state.is_empty() {\n\n msg!(\"The orderbook must be empty\");\n", "file_path": "program/src/processor/close_market.rs", "rank": 17, "score": 90248.9387784044 }, { "content": "#[derive(BorshDeserialize, BorshSerialize, Debug)]\n\nstruct SlabHeader {\n\n account_tag: AccountTag,\n\n bump_index: u64,\n\n free_list_len: u64,\n\n free_list_head: u32,\n\n\n\n root_node: u32,\n\n leaf_count: u64,\n\n market_address: [u8; 32],\n\n}\n\npub const SLAB_HEADER_LEN: usize = 65;\n\n\n\npub struct Slab<'a> {\n\n header: SlabHeader,\n\n pub buffer: Rc<RefCell<&'a mut [u8]>>,\n\n pub callback_info_len: usize,\n\n pub slot_size: usize,\n\n}\n\n\n\n// Data access methods\n", "file_path": "js/wasm/src/critbit.rs", "rank": 18, "score": 89564.55163763286 }, { "content": "/// This method is used to deserialize the event queue's register\n\n/// without constructing an EventQueue instance\n\n///\n\n/// The nature of the serialized object should be deductible from caller context\n\npub fn read_register<T: BorshSerialize + BorshDeserialize>(\n\n event_q_acc: &AccountInfo,\n\n) -> Result<Register<T>, IoError> {\n\n let mut register =\n\n &event_q_acc.data.borrow()[EVENT_QUEUE_HEADER_LEN..EVENT_QUEUE_HEADER_LEN + REGISTER_SIZE];\n\n Register::deserialize(&mut register)\n\n}\n\n\n\n#[cfg(feature = \"no-entrypoint\")]\n\nimpl<'a, 'b> IntoIterator for &'b EventQueue<'a> {\n\n type Item = Event;\n\n\n\n type IntoIter = QueueIterator<'a, 'b>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.iter()\n\n }\n\n}\n\n#[cfg(feature = \"no-entrypoint\")]\n\n/// Utility struct for iterating over a queue\n", "file_path": "program/src/state.rs", "rank": 19, "score": 83639.22009841006 }, { "content": "pub fn check_unitialized(account: &AccountInfo) -> AoResult {\n\n if account.data.borrow()[0] != 0 {\n\n return Err(AoError::AlreadyInitialized);\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(feature = \"no-entrypoint\")]\n", "file_path": "program/src/utils.rs", "rank": 20, "score": 83636.01271681492 }, { "content": "pub fn check_signer(account: &AccountInfo) -> ProgramResult {\n\n if !(account.is_signer) {\n\n return Err(ProgramError::MissingRequiredSignature);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "program/src/utils.rs", "rank": 21, "score": 83636.01271681492 }, { "content": "// Safety verification functions\n\npub fn check_account_key(account: &AccountInfo, key: &[u8], error: AoError) -> Result<(), AoError> {\n\n if account.key.to_bytes() != key {\n\n return Err(error);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "program/src/utils.rs", "rank": 22, "score": 81297.40650835709 }, { "content": "export class Slab {\n\n header: SlabHeader;\n\n buffer: Buffer;\n\n callBackInfoLen: BN;\n\n orderCapacity: number;\n\n callbackMemoryOffset: BN;\n\n\n\n static NODE_SIZE: number = 32;\n\n static NODE_TAG_SIZE: number = 8;\n\n static SLOT_SIZE: number = Slab.NODE_TAG_SIZE + Slab.NODE_SIZE;\n\n\n\n constructor(arg: {\n\n header: SlabHeader;\n\n buffer: Buffer;\n\n callBackInfoLen: BN;\n\n }) {\n\n this.header = arg.header;\n\n this.buffer = arg.buffer;\n\n this.callBackInfoLen = arg.callBackInfoLen;\n\n\n\n const capacity = new BN(this.buffer.length - SlabHeader.PADDED_LEN);\n\n const size = this.callBackInfoLen.addn(Slab.SLOT_SIZE * 2);\n\n this.orderCapacity = Math.floor(capacity.div(size).toNumber());\n\n this.callbackMemoryOffset = new BN(this.orderCapacity)\n\n .muln(2 * Slab.SLOT_SIZE)\n\n .addn(SlabHeader.PADDED_LEN);\n\n }\n\n\n\n static deserialize(data: Buffer, callBackInfoLen: BN) {\n\n return new Slab({\n\n header: deserializeUnchecked(SlabHeader.schema, SlabHeader, data),\n\n buffer: data,\n\n callBackInfoLen,\n\n });\n\n }\n\n\n\n /**\n\n * Returns a node by its key\n\n * @param key Key of the node to fetch\n\n * @returns A node LeafNode object\n\n */\n\n getNodeByKey(key: BN): LeafNode | undefined {\n\n if (this.header.leafCount.eqn(0)) {\n\n return undefined;\n\n }\n\n let pointer = this.header.rootNode;\n\n while (true) {\n\n const offset = SlabHeader.PADDED_LEN + pointer * Slab.SLOT_SIZE;\n\n let node = parseNode(this.buffer.slice(offset, offset + Slab.SLOT_SIZE));\n\n if (node instanceof InnerNode) {\n\n let common_prefix_len = 128 - node.key.xor(key).bitLength();\n\n if (common_prefix_len < node.prefixLen.toNumber()) {\n\n return undefined;\n\n }\n\n const critBitMasks = new BN(1).shln(127 - node.prefixLen.toNumber());\n\n let critBit = key.and(critBitMasks).isZero() ? 0 : 1;\n\n pointer = node.children[critBit];\n\n } else if (node instanceof LeafNode) {\n\n if (node.key.cmp(key) !== 0) {\n\n return undefined;\n\n }\n\n return node;\n\n } else {\n\n throw new Error(\"Couldn't parse node!\");\n\n }\n\n }\n\n }\n\n\n\n // Uncomment if you are using webassembly\n\n // /**\n\n // * Return min or max node of the critbit tree\n\n // * @param max Boolean (false for best asks and true for best bids)\n\n // * @returns Returns the min or max node of the Slab\n\n // */\n\n // getMinMax(max: boolean) {\n\n // let pointer;\n\n // if (max) {\n\n // pointer = find_max(\n\n // this.data,\n\n // BigInt(this.callBackInfoLen),\n\n // BigInt(this.slotSize)\n\n // );\n\n // } else {\n\n // pointer = find_min(\n\n // this.data,\n\n // BigInt(this.callBackInfoLen),\n\n // BigInt(this.slotSize)\n\n // );\n\n // }\n\n // let offset = SlabHeader.LEN;\n\n // if (!pointer) {\n\n // throw new Error(\"Empty slab\");\n\n // }\n\n // let node = parseNode(\n\n // this.callBackInfoLen,\n\n // this.data.slice(\n\n // offset + pointer * this.slotSize,\n\n // offset + (pointer + 1) * this.slotSize\n\n // )\n\n // );\n\n // return node;\n\n // }\n\n\n\n /**\n\n * Walkdown the critbit tree\n\n * @param descending\n\n * @returns\n\n */\n\n *items(descending = false): Generator<LeafNode> {\n\n if (this.header.leafCount.eq(new BN(0))) {\n\n return;\n\n }\n\n const stack = [this.header.rootNode];\n\n while (stack.length > 0) {\n\n const pointer = stack.pop();\n\n if (pointer === undefined) throw new Error(\"unreachable!\");\n\n let offset = SlabHeader.PADDED_LEN + pointer * Slab.SLOT_SIZE;\n\n const node = parseNode(\n\n this.buffer.slice(offset, offset + Slab.SLOT_SIZE)\n\n );\n\n if (node instanceof LeafNode) {\n\n yield node;\n\n } else if (node instanceof InnerNode) {\n\n if (descending) {\n\n stack.push(node.children[0], node.children[1]);\n\n } else {\n\n stack.push(node.children[1], node.children[0]);\n\n }\n\n }\n\n }\n\n }\n\n\n\n [Symbol.iterator]() {\n\n return this.items(false);\n\n }\n\n\n\n // Uncomment if you are using webassembly\n\n // /**\n\n // * Returns an array of [price, size] given a certain depth\n\n // * @param depth Depth to fetch\n\n // * @param max Boolean (false for asks and true for bids)\n\n // * @returns Returns an array made of [price, size] elements\n\n // */\n\n // getL2Depth(depth: number, increasing: boolean): Price[] {\n\n // let raw = find_l2_depth(\n\n // this.data,\n\n // BigInt(this.callBackInfoLen),\n\n // BigInt(this.slotSize),\n\n // BigInt(depth),\n\n // increasing\n\n // );\n\n // let result: Price[] = [];\n\n // for (let i = 0; i < raw.length / 2; i++) {\n\n // result.push({\n\n // size: Number(raw[2 * i]),\n\n // price: Number(raw[2 * i + 1]) / 2 ** 32,\n\n // });\n\n // }\n\n // return result;\n\n // }\n\n\n\n /**\n\n * Returns the top maxNbOrders (not aggregated by price)\n\n * @param maxNbOrders\n\n * @param max Boolean (false for asks and true for bids)\n\n * @returns Returns an array of LeafNode object\n\n */\n\n getMinMaxNodes(maxNbOrders: number, max: boolean) {\n\n const minMaxOrders: LeafNode[] = [];\n\n for (const leafNode of this.items(max)) {\n\n if (minMaxOrders.length === maxNbOrders) {\n\n break;\n\n }\n\n minMaxOrders.push(leafNode);\n\n }\n\n return minMaxOrders;\n\n }\n\n\n\n /**\n\n * Aggregates price levels up to the given depth\n\n * @param depth maximum number of price levels\n\n * @param increasing true to return in increasing order\n\n * @returns aggregated quantities at each price level\n\n */\n\n getL2DepthJS(depth: number, increasing: boolean): Price[] {\n\n if (this.header.leafCount.eq(new BN(0))) {\n\n return [];\n\n }\n\n let raw: number[] = [];\n\n let stack = [this.header.rootNode];\n\n while (true) {\n\n const current = stack.pop();\n\n if (current === undefined) break;\n\n let offset = SlabHeader.PADDED_LEN + current * Slab.SLOT_SIZE;\n\n const node = parseNode(\n\n this.buffer.slice(offset, offset + Slab.SLOT_SIZE)\n\n );\n\n if (node instanceof LeafNode) {\n\n const leafPrice = node.getPrice();\n\n if (raw[raw.length - 1] === leafPrice.toNumber()) {\n\n const idx = raw.length - 2;\n\n raw[idx] += node.baseQuantity.toNumber();\n\n } else if (raw.length === 2 * depth) {\n\n // The price has changed and we have enough prices. Note that the\n\n // above branch will be hit even if we already have `depth` prices\n\n // so that we will finish accumulating the current level. For example,\n\n // if we request one level and there are two order at the best price,\n\n // we will accumulate both orders.\n\n break;\n\n } else {\n\n raw.push(node.baseQuantity.toNumber());\n\n raw.push(leafPrice.toNumber());\n\n }\n\n }\n\n if (node instanceof InnerNode) {\n\n stack.push(node.children[increasing ? 1 : 0]);\n\n stack.push(node.children[increasing ? 0 : 1]);\n\n }\n\n }\n\n let result: Price[] = [];\n\n for (let i = 0; i < raw.length / 2; i++) {\n\n result.push({\n\n size: Number(raw[2 * i]),\n\n price: Number(raw[2 * i + 1]),\n\n });\n\n }\n\n return result;\n\n }\n\n\n\n /**\n\n * @param callBackInfoPt a leaf node's callBackInfoPt that gives the offset to\n\n * the info in the appropriate Slab.\n\n * @returns the raw binary callback info for the node\n\n */\n\n getCallBackInfo(callBackInfoPt: BN) {\n\n return this.buffer.slice(\n\n callBackInfoPt.toNumber(),\n\n callBackInfoPt.add(this.callBackInfoLen).toNumber()\n\n );\n\n }\n", "file_path": "js/src/slab.ts", "rank": 23, "score": 69296.31927335376 }, { "content": " getNodeByKey(key: BN): LeafNode | undefined {\n\n if (this.header.leafCount.eqn(0)) {\n\n return undefined;\n\n }\n\n let pointer = this.header.rootNode;\n\n while (true) {\n\n const offset = SlabHeader.PADDED_LEN + pointer * Slab.SLOT_SIZE;\n\n let node = parseNode(this.buffer.slice(offset, offset + Slab.SLOT_SIZE));\n\n if (node instanceof InnerNode) {\n\n let common_prefix_len = 128 - node.key.xor(key).bitLength();\n\n if (common_prefix_len < node.prefixLen.toNumber()) {\n\n return undefined;\n\n }\n\n const critBitMasks = new BN(1).shln(127 - node.prefixLen.toNumber());\n\n let critBit = key.and(critBitMasks).isZero() ? 0 : 1;\n\n pointer = node.children[critBit];\n\n } else if (node instanceof LeafNode) {\n\n if (node.key.cmp(key) !== 0) {\n\n return undefined;\n\n }\n\n return node;\n\n } else {\n\n throw new Error(\"Couldn't parse node!\");\n\n }\n\n }\n", "file_path": "js/src/slab.ts", "rank": 24, "score": 67785.59858087017 }, { "content": " getMinMaxNodes(maxNbOrders: number, max: boolean) {\n\n const minMaxOrders: LeafNode[] = [];\n\n for (const leafNode of this.items(max)) {\n\n if (minMaxOrders.length === maxNbOrders) {\n\n break;\n\n }\n\n minMaxOrders.push(leafNode);\n\n }\n\n return minMaxOrders;\n", "file_path": "js/src/slab.ts", "rank": 25, "score": 66364.52391409525 }, { "content": "export function parseNode(\n\n data: Buffer\n\n): undefined | FreeNode | LeafNode | InnerNode {\n\n switch (data[0]) {\n\n case 0:\n\n throw new Error(\"node is unitialized\");\n\n case 1:\n\n return deserializeUnchecked(\n\n InnerNode.schema,\n\n InnerNode,\n\n data.slice(Slab.NODE_TAG_SIZE)\n\n );\n\n case 2:\n\n return deserializeUnchecked(\n\n LeafNode.schema,\n\n LeafNode,\n\n data.slice(Slab.NODE_TAG_SIZE)\n\n );\n\n case 3:\n\n return deserializeUnchecked(\n\n FreeNode.schema,\n\n FreeNode,\n\n data.slice(Slab.NODE_TAG_SIZE)\n\n );\n\n case 4:\n\n return deserializeUnchecked(\n\n FreeNode.schema,\n\n FreeNode,\n\n data.slice(Slab.NODE_TAG_SIZE)\n\n );\n\n default:\n\n throw new Error(\"Invalid data\");\n\n }\n", "file_path": "js/src/slab.ts", "rank": 26, "score": 61941.11607574585 }, { "content": "export class FreeNode {\n\n next: number;\n\n\n\n static schema: Schema = new Map([\n\n [\n\n FreeNode,\n\n {\n\n kind: \"struct\",\n\n fields: [[\"next\", \"u32\"]],\n\n },\n\n ],\n\n ]);\n\n\n\n constructor(arg: { next: number }) {\n\n this.next = arg.next;\n\n }\n", "file_path": "js/src/slab.ts", "rank": 27, "score": 61929.44223507866 }, { "content": "export class LeafNode {\n\n key: BN;\n\n callBackInfoPt: BN;\n\n baseQuantity: BN;\n\n\n\n static schema: Schema = new Map([\n\n [\n\n LeafNode,\n\n {\n\n kind: \"struct\",\n\n fields: [\n\n [\"key\", \"u128\"],\n\n [\"callBackInfoPt\", \"u64\"],\n\n [\"baseQuantity\", \"u64\"],\n\n ],\n\n },\n\n ],\n\n ]);\n\n\n\n constructor(arg: { key: BN; callBackInfoPt: BN; baseQuantity: BN }) {\n\n this.key = arg.key;\n\n this.callBackInfoPt = arg.callBackInfoPt;\n\n this.baseQuantity = arg.baseQuantity;\n\n }\n\n\n\n /**\n\n * @return the price of this order\n\n */\n\n getPrice(): BN {\n\n return this.key.shrn(64);\n\n }\n", "file_path": "js/src/slab.ts", "rank": 28, "score": 61929.44223507866 }, { "content": "export class InnerNode {\n\n prefixLen: BN;\n\n key: BN;\n\n children: number[];\n\n\n\n static schema: Schema = new Map([\n\n [\n\n InnerNode,\n\n {\n\n kind: \"struct\",\n\n fields: [\n\n [\"prefixLen\", \"u64\"],\n\n [\"key\", \"u128\"],\n\n [\"children\", [\"u32\", 2]],\n\n ],\n\n },\n\n ],\n\n ]);\n\n\n\n constructor(arg: { prefixLen: BN; key: BN; children: number[] }) {\n\n this.prefixLen = arg.prefixLen;\n\n this.key = arg.key;\n\n this.children = arg.children;\n\n }\n", "file_path": "js/src/slab.ts", "rank": 29, "score": 61929.44223507866 }, { "content": " constructor(arg: { next: number }) {\n\n this.next = arg.next;\n", "file_path": "js/src/slab.ts", "rank": 30, "score": 60227.393594432826 }, { "content": " constructor(arg: { prefixLen: BN; key: BN; children: number[] }) {\n\n this.prefixLen = arg.prefixLen;\n\n this.key = arg.key;\n\n this.children = arg.children;\n", "file_path": "js/src/slab.ts", "rank": 31, "score": 60227.393594432826 }, { "content": " constructor(arg: { key: BN; callBackInfoPt: BN; baseQuantity: BN }) {\n\n this.key = arg.key;\n\n this.callBackInfoPt = arg.callBackInfoPt;\n\n this.baseQuantity = arg.baseQuantity;\n", "file_path": "js/src/slab.ts", "rank": 32, "score": 60227.393594432826 }, { "content": " getPrice(): BN {\n\n return this.key.shrn(64);\n", "file_path": "js/src/slab.ts", "rank": 33, "score": 58620.312702264906 }, { "content": " async loadAsksSlab(connection: Connection, commitment?: Commitment) {\n\n const asksInfo = await connection.getAccountInfo(this.asks, commitment);\n\n if (!asksInfo?.data) {\n\n throw new Error(\"Invalid asks account\");\n\n }\n\n return Slab.deserialize(asksInfo.data, this.callBackInfoLen);\n", "file_path": "js/src/market_state.ts", "rank": 34, "score": 55819.020003316735 }, { "content": " async loadBidsSlab(connection: Connection, commitment?: Commitment) {\n\n const bidsInfo = await connection.getAccountInfo(this.bids, commitment);\n\n if (!bidsInfo?.data) {\n\n throw new Error(\"Invalid bids account\");\n\n }\n\n return Slab.deserialize(bidsInfo.data, this.callBackInfoLen);\n", "file_path": "js/src/market_state.ts", "rank": 35, "score": 55819.01669191637 }, { "content": "fn check_accounts<'a, 'b: 'a>(\n\n accounts: &Accounts<'a, AccountInfo<'b>>,\n\n market_state: &MarketState,\n\n) -> ProgramResult {\n\n check_account_key(\n\n accounts.event_queue,\n\n &market_state.event_queue,\n\n AoError::WrongEventQueueAccount,\n\n )?;\n\n check_account_key(accounts.bids, &market_state.bids, AoError::WrongBidsAccount)?;\n\n check_account_key(accounts.asks, &market_state.asks, AoError::WrongAsksAccount)?;\n\n #[cfg(not(feature = \"lib\"))]\n\n check_account_key(\n\n accounts.authority,\n\n &market_state.caller_authority,\n\n AoError::WrongCallerAuthority,\n\n )?;\n\n\n\n Ok(())\n\n}\n", "file_path": "program/src/processor/new_order.rs", "rank": 36, "score": 46543.05528571146 }, { "content": "fn check_accounts<'a, 'b: 'a>(\n\n accounts: &Accounts<'a, AccountInfo<'b>>,\n\n market_state: &MarketState,\n\n) -> ProgramResult {\n\n check_account_key(\n\n accounts.event_queue,\n\n &market_state.event_queue,\n\n AoError::WrongEventQueueAccount,\n\n )?;\n\n check_account_key(accounts.bids, &market_state.bids, AoError::WrongBidsAccount)?;\n\n check_account_key(accounts.asks, &market_state.asks, AoError::WrongAsksAccount)?;\n\n #[cfg(not(feature = \"lib\"))]\n\n check_account_key(\n\n accounts.authority,\n\n &market_state.caller_authority,\n\n AoError::WrongCallerAuthority,\n\n )?;\n\n\n\n Ok(())\n\n}", "file_path": "program/src/processor/cancel_order.rs", "rank": 37, "score": 46543.05528571146 }, { "content": "fn check_accounts<'a, 'b: 'a>(\n\n accounts: &Accounts<'a, AccountInfo<'b>>,\n\n market_state: &MarketState,\n\n) -> ProgramResult {\n\n check_account_key(\n\n accounts.event_queue,\n\n &market_state.event_queue,\n\n AoError::WrongEventQueueAccount,\n\n )?;\n\n #[cfg(not(feature = \"lib\"))]\n\n check_account_key(\n\n accounts.authority,\n\n &market_state.caller_authority,\n\n AoError::WrongCallerAuthority,\n\n )?;\n\n check_account_key(\n\n accounts.event_queue,\n\n &market_state.event_queue,\n\n AoError::WrongEventQueueAccount,\n\n )?;\n\n Ok(())\n\n}\n", "file_path": "program/src/processor/consume_events.rs", "rank": 38, "score": 46543.05528571146 }, { "content": "fn check_accounts<'a, 'b: 'a>(\n\n accounts: &Accounts<'a, AccountInfo<'b>>,\n\n market_state: &MarketState,\n\n) -> ProgramResult {\n\n check_account_key(\n\n accounts.event_queue,\n\n &market_state.event_queue,\n\n AoError::WrongEventQueueAccount,\n\n )?;\n\n check_account_key(accounts.bids, &market_state.bids, AoError::WrongBidsAccount)?;\n\n check_account_key(accounts.asks, &market_state.asks, AoError::WrongAsksAccount)?;\n\n #[cfg(not(feature = \"lib\"))]\n\n check_account_key(\n\n accounts.authority,\n\n &market_state.caller_authority,\n\n AoError::WrongCallerAuthority,\n\n )?;\n\n\n\n Ok(())\n\n}\n", "file_path": "program/src/processor/close_market.rs", "rank": 39, "score": 46543.05528571146 }, { "content": " *items(descending = false): Generator<LeafNode> {\n\n if (this.header.leafCount.eq(new BN(0))) {\n\n return;\n\n }\n\n const stack = [this.header.rootNode];\n\n while (stack.length > 0) {\n\n const pointer = stack.pop();\n\n if (pointer === undefined) throw new Error(\"unreachable!\");\n\n let offset = SlabHeader.PADDED_LEN + pointer * Slab.SLOT_SIZE;\n\n const node = parseNode(\n\n this.buffer.slice(offset, offset + Slab.SLOT_SIZE)\n\n );\n\n if (node instanceof LeafNode) {\n\n yield node;\n\n } else if (node instanceof InnerNode) {\n\n if (descending) {\n\n stack.push(node.children[0], node.children[1]);\n\n } else {\n\n stack.push(node.children[1], node.children[0]);\n\n }\n\n }\n\n }\n", "file_path": "js/src/slab.ts", "rank": 40, "score": 39745.097957805636 }, { "content": "export class SlabHeader {\n\n accountTag: AccountTag;\n\n bumpIndex: BN;\n\n freeListLen: BN;\n\n freeListHead: number;\n\n callbackMemoryOffset: BN;\n\n callbackFreeListLen: BN;\n\n callbackFreeListHead: BN;\n\n callbackBumpIndex: BN;\n\n rootNode: number;\n\n leafCount: BN;\n\n marketAddress: PublicKey;\n\n\n\n static LEN: number = 97;\n\n static PADDED_LEN: number = SlabHeader.LEN + 7;\n\n\n\n static schema: Schema = new Map([\n\n [\n\n SlabHeader,\n\n {\n\n kind: \"struct\",\n\n fields: [\n\n [\"accountTag\", \"u8\"],\n\n [\"bumpIndex\", \"u64\"],\n\n [\"freeListLen\", \"u64\"],\n\n [\"freeListHead\", \"u32\"],\n\n [\"callbackMemoryOffset\", \"u64\"],\n\n [\"callbackFreeListLen\", \"u64\"],\n\n [\"callbackFreeListHead\", \"u64\"],\n\n [\"callbackBumpIndex\", \"u64\"],\n\n [\"rootNode\", \"u32\"],\n\n [\"leafCount\", \"u64\"],\n\n [\"marketAddress\", [32]],\n\n ],\n\n },\n\n ],\n\n ]);\n\n\n\n constructor(arg: {\n\n accountTag: number;\n\n bumpIndex: BN;\n\n freeListLen: BN;\n\n freeListHead: number;\n\n callbackMemoryOffset: BN;\n\n callbackFreeListLen: BN;\n\n callbackFreeListHead: BN;\n\n callbackBumpIndex: BN;\n\n rootNode: number;\n\n leafCount: BN;\n\n marketAddress: Uint8Array;\n\n }) {\n\n this.accountTag = arg.accountTag as AccountTag;\n\n this.bumpIndex = arg.bumpIndex;\n\n this.freeListLen = arg.freeListLen;\n\n this.freeListHead = arg.freeListHead;\n\n this.callbackMemoryOffset = arg.callbackMemoryOffset;\n\n this.callbackFreeListLen = arg.callbackFreeListLen;\n\n this.callbackFreeListHead = arg.callbackFreeListHead;\n\n this.callbackBumpIndex = arg.callbackBumpIndex;\n\n this.rootNode = arg.rootNode;\n\n this.leafCount = arg.leafCount;\n\n this.marketAddress = new PublicKey(arg.marketAddress);\n\n }\n", "file_path": "js/src/slab.ts", "rank": 41, "score": 39737.641557036826 }, { "content": " static deserialize(data: Buffer, callBackInfoLen: BN) {\n\n return new Slab({\n\n header: deserializeUnchecked(SlabHeader.schema, SlabHeader, data),\n\n buffer: data,\n\n callBackInfoLen,\n\n });\n", "file_path": "js/src/slab.ts", "rank": 42, "score": 39737.641557036826 }, { "content": " constructor(arg: {\n\n header: SlabHeader;\n\n buffer: Buffer;\n\n callBackInfoLen: BN;\n\n }) {\n\n this.header = arg.header;\n\n this.buffer = arg.buffer;\n\n this.callBackInfoLen = arg.callBackInfoLen;\n\n\n\n const capacity = new BN(this.buffer.length - SlabHeader.PADDED_LEN);\n\n const size = this.callBackInfoLen.addn(Slab.SLOT_SIZE * 2);\n\n this.orderCapacity = Math.floor(capacity.div(size).toNumber());\n\n this.callbackMemoryOffset = new BN(this.orderCapacity)\n\n .muln(2 * Slab.SLOT_SIZE)\n\n .addn(SlabHeader.PADDED_LEN);\n", "file_path": "js/src/slab.ts", "rank": 43, "score": 39737.641557036826 }, { "content": " constructor(arg: {\n\n accountTag: number;\n\n bumpIndex: BN;\n\n freeListLen: BN;\n\n freeListHead: number;\n\n callbackMemoryOffset: BN;\n\n callbackFreeListLen: BN;\n\n callbackFreeListHead: BN;\n\n callbackBumpIndex: BN;\n\n rootNode: number;\n\n leafCount: BN;\n\n marketAddress: Uint8Array;\n\n }) {\n\n this.accountTag = arg.accountTag as AccountTag;\n\n this.bumpIndex = arg.bumpIndex;\n\n this.freeListLen = arg.freeListLen;\n\n this.freeListHead = arg.freeListHead;\n\n this.callbackMemoryOffset = arg.callbackMemoryOffset;\n\n this.callbackFreeListLen = arg.callbackFreeListLen;\n\n this.callbackFreeListHead = arg.callbackFreeListHead;\n\n this.callbackBumpIndex = arg.callbackBumpIndex;\n\n this.rootNode = arg.rootNode;\n\n this.leafCount = arg.leafCount;\n\n this.marketAddress = new PublicKey(arg.marketAddress);\n", "file_path": "js/src/slab.ts", "rank": 44, "score": 39026.92831275201 }, { "content": " getCallBackInfo(callBackInfoPt: BN) {\n\n return this.buffer.slice(\n\n callBackInfoPt.toNumber(),\n\n callBackInfoPt.add(this.callBackInfoLen).toNumber()\n\n );\n", "file_path": "js/src/slab.ts", "rank": 45, "score": 37691.71289866915 }, { "content": " getL2DepthJS(depth: number, increasing: boolean): Price[] {\n\n if (this.header.leafCount.eq(new BN(0))) {\n\n return [];\n\n }\n\n let raw: number[] = [];\n\n let stack = [this.header.rootNode];\n\n while (true) {\n\n const current = stack.pop();\n\n if (current === undefined) break;\n\n let offset = SlabHeader.PADDED_LEN + current * Slab.SLOT_SIZE;\n\n const node = parseNode(\n\n this.buffer.slice(offset, offset + Slab.SLOT_SIZE)\n\n );\n\n if (node instanceof LeafNode) {\n\n const leafPrice = node.getPrice();\n\n if (raw[raw.length - 1] === leafPrice.toNumber()) {\n\n const idx = raw.length - 2;\n\n raw[idx] += node.baseQuantity.toNumber();\n\n } else if (raw.length === 2 * depth) {\n\n // The price has changed and we have enough prices. Note that the\n\n // above branch will be hit even if we already have `depth` prices\n\n // so that we will finish accumulating the current level. For example,\n\n // if we request one level and there are two order at the best price,\n\n // we will accumulate both orders.\n\n break;\n\n } else {\n\n raw.push(node.baseQuantity.toNumber());\n\n raw.push(leafPrice.toNumber());\n\n }\n\n }\n\n if (node instanceof InnerNode) {\n\n stack.push(node.children[increasing ? 1 : 0]);\n\n stack.push(node.children[increasing ? 0 : 1]);\n\n }\n\n }\n\n let result: Price[] = [];\n\n for (let i = 0; i < raw.length / 2; i++) {\n\n result.push({\n\n size: Number(raw[2 * i]),\n\n price: Number(raw[2 * i + 1]),\n\n });\n\n }\n\n return result;\n", "file_path": "js/src/slab.ts", "rank": 46, "score": 37689.923056289124 }, { "content": "import { PublicKey } from \"@solana/web3.js\";\n\nimport { Schema, deserializeUnchecked } from \"borsh\";\n\nimport BN from \"bn.js\";\n\nimport { AccountTag } from \"./market_state\";\n\nimport { Price } from \"./types\";\n\n// Uncomment to use WebAssembly for OB deserialization\n\n// import { find_max, find_min, find_l2_depth } from \"dex-wasm\";\n\n\n\n///////////////////////////////////////////////\n\n////// Nodes and Slab\n\n///////////////////////////////////////////////\n\n\n\nexport class InnerNode {\n\n prefixLen: BN;\n\n key: BN;\n\n children: number[];\n\n\n\n static schema: Schema = new Map([\n\n [\n\n InnerNode,\n\n {\n\n kind: \"struct\",\n\n fields: [\n\n [\"prefixLen\", \"u64\"],\n\n [\"key\", \"u128\"],\n\n [\"children\", [\"u32\", 2]],\n\n ],\n\n },\n\n ],\n\n ]);\n\n\n\n constructor(arg: { prefixLen: BN; key: BN; children: number[] }) {\n\n this.prefixLen = arg.prefixLen;\n\n this.key = arg.key;\n\n this.children = arg.children;\n\n }\n\n}\n\n\n\nexport class LeafNode {\n\n key: BN;\n\n callBackInfoPt: BN;\n\n baseQuantity: BN;\n\n\n\n static schema: Schema = new Map([\n\n [\n\n LeafNode,\n\n {\n\n kind: \"struct\",\n\n fields: [\n\n [\"key\", \"u128\"],\n\n [\"callBackInfoPt\", \"u64\"],\n\n [\"baseQuantity\", \"u64\"],\n\n ],\n\n },\n\n ],\n\n ]);\n\n\n\n constructor(arg: { key: BN; callBackInfoPt: BN; baseQuantity: BN }) {\n\n this.key = arg.key;\n\n this.callBackInfoPt = arg.callBackInfoPt;\n\n this.baseQuantity = arg.baseQuantity;\n\n }\n\n\n\n /**\n\n * @return the price of this order\n\n */\n\n getPrice(): BN {\n\n return this.key.shrn(64);\n\n }\n\n}\n\n\n\nexport class FreeNode {\n\n next: number;\n\n\n\n static schema: Schema = new Map([\n\n [\n\n FreeNode,\n\n {\n\n kind: \"struct\",\n\n fields: [[\"next\", \"u32\"]],\n\n },\n\n ],\n\n ]);\n\n\n\n constructor(arg: { next: number }) {\n\n this.next = arg.next;\n\n }\n\n}\n\n\n\n/**\n\n * Deserializes a node buffer\n\n * @param data Buffer to deserialize\n\n * @returns Returns a node\n\n */\n\nexport function parseNode(\n\n data: Buffer\n\n): undefined | FreeNode | LeafNode | InnerNode {\n\n switch (data[0]) {\n\n case 0:\n\n throw new Error(\"node is unitialized\");\n\n case 1:\n\n return deserializeUnchecked(\n\n InnerNode.schema,\n\n InnerNode,\n\n data.slice(Slab.NODE_TAG_SIZE)\n\n );\n\n case 2:\n\n return deserializeUnchecked(\n\n LeafNode.schema,\n\n LeafNode,\n\n data.slice(Slab.NODE_TAG_SIZE)\n\n );\n\n case 3:\n\n return deserializeUnchecked(\n\n FreeNode.schema,\n\n FreeNode,\n\n data.slice(Slab.NODE_TAG_SIZE)\n\n );\n\n case 4:\n\n return deserializeUnchecked(\n\n FreeNode.schema,\n\n FreeNode,\n\n data.slice(Slab.NODE_TAG_SIZE)\n\n );\n\n default:\n\n throw new Error(\"Invalid data\");\n\n }\n\n}\n\n\n\nexport class SlabHeader {\n\n accountTag: AccountTag;\n\n bumpIndex: BN;\n\n freeListLen: BN;\n\n freeListHead: number;\n\n callbackMemoryOffset: BN;\n\n callbackFreeListLen: BN;\n\n callbackFreeListHead: BN;\n\n callbackBumpIndex: BN;\n\n rootNode: number;\n\n leafCount: BN;\n\n marketAddress: PublicKey;\n\n\n\n static LEN: number = 97;\n\n static PADDED_LEN: number = SlabHeader.LEN + 7;\n\n\n\n static schema: Schema = new Map([\n\n [\n\n SlabHeader,\n\n {\n\n kind: \"struct\",\n\n fields: [\n\n [\"accountTag\", \"u8\"],\n\n [\"bumpIndex\", \"u64\"],\n\n [\"freeListLen\", \"u64\"],\n\n [\"freeListHead\", \"u32\"],\n\n [\"callbackMemoryOffset\", \"u64\"],\n\n [\"callbackFreeListLen\", \"u64\"],\n\n [\"callbackFreeListHead\", \"u64\"],\n\n [\"callbackBumpIndex\", \"u64\"],\n\n [\"rootNode\", \"u32\"],\n\n [\"leafCount\", \"u64\"],\n\n [\"marketAddress\", [32]],\n\n ],\n\n },\n\n ],\n\n ]);\n\n\n\n constructor(arg: {\n\n accountTag: number;\n\n bumpIndex: BN;\n\n freeListLen: BN;\n\n freeListHead: number;\n\n callbackMemoryOffset: BN;\n\n callbackFreeListLen: BN;\n\n callbackFreeListHead: BN;\n\n callbackBumpIndex: BN;\n\n rootNode: number;\n\n leafCount: BN;\n\n marketAddress: Uint8Array;\n\n }) {\n\n this.accountTag = arg.accountTag as AccountTag;\n\n this.bumpIndex = arg.bumpIndex;\n\n this.freeListLen = arg.freeListLen;\n\n this.freeListHead = arg.freeListHead;\n\n this.callbackMemoryOffset = arg.callbackMemoryOffset;\n\n this.callbackFreeListLen = arg.callbackFreeListLen;\n\n this.callbackFreeListHead = arg.callbackFreeListHead;\n\n this.callbackBumpIndex = arg.callbackBumpIndex;\n\n this.rootNode = arg.rootNode;\n\n this.leafCount = arg.leafCount;\n\n this.marketAddress = new PublicKey(arg.marketAddress);\n\n }\n\n}\n\n\n\nexport class Slab {\n\n header: SlabHeader;\n\n buffer: Buffer;\n\n callBackInfoLen: BN;\n\n orderCapacity: number;\n\n callbackMemoryOffset: BN;\n\n\n\n static NODE_SIZE: number = 32;\n\n static NODE_TAG_SIZE: number = 8;\n\n static SLOT_SIZE: number = Slab.NODE_TAG_SIZE + Slab.NODE_SIZE;\n\n\n\n constructor(arg: {\n\n header: SlabHeader;\n\n buffer: Buffer;\n\n callBackInfoLen: BN;\n\n }) {\n\n this.header = arg.header;\n\n this.buffer = arg.buffer;\n\n this.callBackInfoLen = arg.callBackInfoLen;\n\n\n\n const capacity = new BN(this.buffer.length - SlabHeader.PADDED_LEN);\n\n const size = this.callBackInfoLen.addn(Slab.SLOT_SIZE * 2);\n\n this.orderCapacity = Math.floor(capacity.div(size).toNumber());\n\n this.callbackMemoryOffset = new BN(this.orderCapacity)\n\n .muln(2 * Slab.SLOT_SIZE)\n\n .addn(SlabHeader.PADDED_LEN);\n\n }\n\n\n\n static deserialize(data: Buffer, callBackInfoLen: BN) {\n\n return new Slab({\n\n header: deserializeUnchecked(SlabHeader.schema, SlabHeader, data),\n\n buffer: data,\n\n callBackInfoLen,\n\n });\n\n }\n\n\n\n /**\n\n * Returns a node by its key\n\n * @param key Key of the node to fetch\n\n * @returns A node LeafNode object\n\n */\n\n getNodeByKey(key: BN): LeafNode | undefined {\n\n if (this.header.leafCount.eqn(0)) {\n\n return undefined;\n\n }\n\n let pointer = this.header.rootNode;\n\n while (true) {\n\n const offset = SlabHeader.PADDED_LEN + pointer * Slab.SLOT_SIZE;\n\n let node = parseNode(this.buffer.slice(offset, offset + Slab.SLOT_SIZE));\n\n if (node instanceof InnerNode) {\n\n let common_prefix_len = 128 - node.key.xor(key).bitLength();\n\n if (common_prefix_len < node.prefixLen.toNumber()) {\n\n return undefined;\n\n }\n\n const critBitMasks = new BN(1).shln(127 - node.prefixLen.toNumber());\n\n let critBit = key.and(critBitMasks).isZero() ? 0 : 1;\n\n pointer = node.children[critBit];\n\n } else if (node instanceof LeafNode) {\n\n if (node.key.cmp(key) !== 0) {\n\n return undefined;\n\n }\n\n return node;\n\n } else {\n\n throw new Error(\"Couldn't parse node!\");\n\n }\n\n }\n\n }\n\n\n\n // Uncomment if you are using webassembly\n\n // /**\n\n // * Return min or max node of the critbit tree\n\n // * @param max Boolean (false for best asks and true for best bids)\n\n // * @returns Returns the min or max node of the Slab\n\n // */\n\n // getMinMax(max: boolean) {\n\n // let pointer;\n\n // if (max) {\n\n // pointer = find_max(\n\n // this.data,\n\n // BigInt(this.callBackInfoLen),\n\n // BigInt(this.slotSize)\n\n // );\n\n // } else {\n\n // pointer = find_min(\n\n // this.data,\n\n // BigInt(this.callBackInfoLen),\n\n // BigInt(this.slotSize)\n\n // );\n\n // }\n\n // let offset = SlabHeader.LEN;\n\n // if (!pointer) {\n\n // throw new Error(\"Empty slab\");\n\n // }\n\n // let node = parseNode(\n\n // this.callBackInfoLen,\n\n // this.data.slice(\n\n // offset + pointer * this.slotSize,\n\n // offset + (pointer + 1) * this.slotSize\n\n // )\n\n // );\n\n // return node;\n\n // }\n\n\n\n /**\n\n * Walkdown the critbit tree\n\n * @param descending\n\n * @returns\n\n */\n\n *items(descending = false): Generator<LeafNode> {\n\n if (this.header.leafCount.eq(new BN(0))) {\n\n return;\n\n }\n\n const stack = [this.header.rootNode];\n\n while (stack.length > 0) {\n\n const pointer = stack.pop();\n\n if (pointer === undefined) throw new Error(\"unreachable!\");\n\n let offset = SlabHeader.PADDED_LEN + pointer * Slab.SLOT_SIZE;\n\n const node = parseNode(\n\n this.buffer.slice(offset, offset + Slab.SLOT_SIZE)\n\n );\n\n if (node instanceof LeafNode) {\n\n yield node;\n\n } else if (node instanceof InnerNode) {\n\n if (descending) {\n\n stack.push(node.children[0], node.children[1]);\n\n } else {\n\n stack.push(node.children[1], node.children[0]);\n\n }\n\n }\n\n }\n\n }\n\n\n\n [Symbol.iterator]() {\n\n return this.items(false);\n\n }\n\n\n\n // Uncomment if you are using webassembly\n\n // /**\n\n // * Returns an array of [price, size] given a certain depth\n\n // * @param depth Depth to fetch\n\n // * @param max Boolean (false for asks and true for bids)\n\n // * @returns Returns an array made of [price, size] elements\n\n // */\n\n // getL2Depth(depth: number, increasing: boolean): Price[] {\n\n // let raw = find_l2_depth(\n\n // this.data,\n\n // BigInt(this.callBackInfoLen),\n\n // BigInt(this.slotSize),\n\n // BigInt(depth),\n\n // increasing\n\n // );\n\n // let result: Price[] = [];\n\n // for (let i = 0; i < raw.length / 2; i++) {\n\n // result.push({\n\n // size: Number(raw[2 * i]),\n\n // price: Number(raw[2 * i + 1]) / 2 ** 32,\n\n // });\n\n // }\n\n // return result;\n\n // }\n\n\n\n /**\n\n * Returns the top maxNbOrders (not aggregated by price)\n\n * @param maxNbOrders\n\n * @param max Boolean (false for asks and true for bids)\n\n * @returns Returns an array of LeafNode object\n\n */\n\n getMinMaxNodes(maxNbOrders: number, max: boolean) {\n\n const minMaxOrders: LeafNode[] = [];\n\n for (const leafNode of this.items(max)) {\n\n if (minMaxOrders.length === maxNbOrders) {\n\n break;\n\n }\n\n minMaxOrders.push(leafNode);\n\n }\n\n return minMaxOrders;\n\n }\n\n\n\n /**\n\n * Aggregates price levels up to the given depth\n\n * @param depth maximum number of price levels\n\n * @param increasing true to return in increasing order\n\n * @returns aggregated quantities at each price level\n\n */\n\n getL2DepthJS(depth: number, increasing: boolean): Price[] {\n\n if (this.header.leafCount.eq(new BN(0))) {\n\n return [];\n\n }\n\n let raw: number[] = [];\n\n let stack = [this.header.rootNode];\n\n while (true) {\n\n const current = stack.pop();\n\n if (current === undefined) break;\n\n let offset = SlabHeader.PADDED_LEN + current * Slab.SLOT_SIZE;\n\n const node = parseNode(\n\n this.buffer.slice(offset, offset + Slab.SLOT_SIZE)\n\n );\n\n if (node instanceof LeafNode) {\n\n const leafPrice = node.getPrice();\n\n if (raw[raw.length - 1] === leafPrice.toNumber()) {\n\n const idx = raw.length - 2;\n\n raw[idx] += node.baseQuantity.toNumber();\n\n } else if (raw.length === 2 * depth) {\n\n // The price has changed and we have enough prices. Note that the\n\n // above branch will be hit even if we already have `depth` prices\n\n // so that we will finish accumulating the current level. For example,\n\n // if we request one level and there are two order at the best price,\n\n // we will accumulate both orders.\n\n break;\n\n } else {\n\n raw.push(node.baseQuantity.toNumber());\n\n raw.push(leafPrice.toNumber());\n\n }\n\n }\n\n if (node instanceof InnerNode) {\n\n stack.push(node.children[increasing ? 1 : 0]);\n\n stack.push(node.children[increasing ? 0 : 1]);\n\n }\n\n }\n\n let result: Price[] = [];\n\n for (let i = 0; i < raw.length / 2; i++) {\n\n result.push({\n\n size: Number(raw[2 * i]),\n\n price: Number(raw[2 * i + 1]),\n\n });\n\n }\n\n return result;\n\n }\n\n\n\n /**\n\n * @param callBackInfoPt a leaf node's callBackInfoPt that gives the offset to\n\n * the info in the appropriate Slab.\n\n * @returns the raw binary callback info for the node\n\n */\n\n getCallBackInfo(callBackInfoPt: BN) {\n\n return this.buffer.slice(\n\n callBackInfoPt.toNumber(),\n\n callBackInfoPt.add(this.callBackInfoLen).toNumber()\n\n );\n\n }\n\n}\n", "file_path": "js/src/slab.ts", "rank": 47, "score": 26363.108286961993 }, { "content": "impl<'a> Slab<'a> {\n\n pub fn check(&self, side: Side) -> bool {\n\n match side {\n\n Side::Bid => self.header.account_tag == AccountTag::Bids,\n\n Side::Ask => self.header.account_tag == AccountTag::Asks,\n\n }\n\n }\n\n\n\n pub fn new(\n\n buffer: Rc<RefCell<&'a mut [u8]>>,\n\n callback_info_len: usize,\n\n slot_size: usize,\n\n ) -> Self {\n\n Self {\n\n header: SlabHeader::deserialize(&mut (&buffer.borrow() as &[u8])).unwrap(),\n\n buffer: Rc::clone(&buffer),\n\n callback_info_len,\n\n slot_size,\n\n }\n\n }\n", "file_path": "js/wasm/src/critbit.rs", "rank": 50, "score": 28.46097583176103 }, { "content": " pub callback_info_len: usize,\n\n}\n\n\n\n// Data access methods\n\nimpl<'a> Slab<'a> {\n\n pub fn check(&self, side: Side) -> bool {\n\n match side {\n\n Side::Bid => self.header.account_tag == AccountTag::Bids,\n\n Side::Ask => self.header.account_tag == AccountTag::Asks,\n\n }\n\n }\n\n pub fn new_from_acc_info(acc_info: &AccountInfo<'a>, callback_info_len: usize) -> Self {\n\n // assert_eq!(len_without_header % slot_size, 0);\n\n Self {\n\n buffer: Rc::clone(&acc_info.data),\n\n callback_info_len,\n\n header: SlabHeader::deserialize(&mut (&acc_info.data.borrow() as &[u8])).unwrap(),\n\n }\n\n }\n\n\n", "file_path": "program/src/critbit.rs", "rank": 51, "score": 25.999003049879718 }, { "content": "\n\n pub(crate) fn write_header(&self) {\n\n self.header\n\n .serialize(&mut &mut self.buffer.borrow_mut()[..SLAB_HEADER_LEN])\n\n .unwrap()\n\n }\n\n\n\n pub fn compute_slot_size(callback_info_len: usize) -> usize {\n\n std::cmp::max(callback_info_len + 8 + 16 + 1, INNER_NODE_SIZE)\n\n }\n\n}\n\n\n\n// Tree nodes manipulation methods\n\nimpl<'a> Slab<'a> {\n\n fn capacity(&self) -> u64 {\n\n ((self.buffer.borrow().len() - SLAB_HEADER_LEN) / self.slot_size) as u64\n\n }\n\n\n\n pub fn get_node(&self, key: u32) -> Option<Node> {\n\n let offset = SLAB_HEADER_LEN + (key as usize) * self.slot_size;\n", "file_path": "js/wasm/src/critbit.rs", "rank": 52, "score": 25.739783409217875 }, { "content": " pub fn new(buffer: Rc<RefCell<&'a mut [u8]>>, callback_info_len: usize) -> Self {\n\n Self {\n\n header: SlabHeader::deserialize(&mut (&buffer.borrow() as &[u8])).unwrap(),\n\n buffer: Rc::clone(&buffer),\n\n callback_info_len,\n\n }\n\n }\n\n\n\n pub(crate) fn write_header(&self) {\n\n self.header\n\n .serialize(&mut &mut self.buffer.borrow_mut()[..SLAB_HEADER_LEN])\n\n .unwrap()\n\n }\n\n\n\n pub(crate) fn initialize(\n\n bids_account: &AccountInfo<'a>,\n\n asks_account: &AccountInfo<'a>,\n\n market_address: Pubkey,\n\n callback_info_len: usize,\n\n ) {\n", "file_path": "program/src/critbit.rs", "rank": 54, "score": 25.489164631172574 }, { "content": " pub(crate) fn gen_order_id(&mut self, limit_price: u64, side: Side) -> u128 {\n\n let seq_num = self.gen_seq_num();\n\n let upper = (limit_price as u128) << 64;\n\n let lower = match side {\n\n Side::Bid => !seq_num,\n\n Side::Ask => seq_num,\n\n };\n\n upper | (lower as u128)\n\n }\n\n\n\n fn gen_seq_num(&mut self) -> u64 {\n\n let seq_num = self.header.seq_num;\n\n self.header.seq_num += 1;\n\n seq_num\n\n }\n\n\n\n pub(crate) fn get_buf_len(&self) -> usize {\n\n self.buffer.borrow().len() - EVENT_QUEUE_HEADER_LEN - (REGISTER_SIZE)\n\n }\n\n\n", "file_path": "program/src/state.rs", "rank": 56, "score": 23.09038880957904 }, { "content": " } else {\n\n result.push(current.unwrap());\n\n }\n\n }\n\n result\n\n }\n\n\n\n pub fn find_l2_depth(&self, depth: usize, increasing: bool) -> Vec<u64> {\n\n let root = self.root();\n\n if root.is_none() {\n\n return vec![];\n\n }\n\n let mut result = Vec::with_capacity(2 * depth);\n\n let mut search_stack = vec![root.unwrap()];\n\n while result.len() != 2 * depth {\n\n let current = search_stack.pop();\n\n if current.is_none() {\n\n break;\n\n }\n\n let node = self.get_node(current.unwrap()).unwrap();\n", "file_path": "js/wasm/src/critbit.rs", "rank": 57, "score": 22.269443464881796 }, { "content": " pub fn find_max(&self) -> Option<NodeHandle> {\n\n self.find_min_max(true)\n\n }\n\n\n\n pub fn find_node_sequence(&self, depth: usize, increasing: bool) -> Vec<NodeHandle> {\n\n let root = self.root();\n\n if root.is_none() {\n\n return vec![];\n\n }\n\n let mut result = Vec::with_capacity(depth);\n\n let mut search_stack = vec![root.unwrap()];\n\n while result.len() != depth {\n\n let current = search_stack.pop();\n\n if current.is_none() {\n\n break;\n\n }\n\n let node = self.get_node(current.unwrap()).unwrap();\n\n if let Node::Inner(ref inner) = node {\n\n search_stack.push(inner.children[increasing as usize]);\n\n search_stack.push(inner.children[1 - increasing as usize]);\n", "file_path": "js/wasm/src/critbit.rs", "rank": 58, "score": 22.26793049117354 }, { "content": "\n\n let offset = PADDED_SLAB_HEADER_LEN + (key as usize) * SLOT_SIZE;\n\n match node_type {\n\n NodeTag::Inner => {\n\n *try_from_bytes_mut(&mut self.buffer.borrow_mut()[offset..offset + 8]).unwrap() =\n\n NodeTag::Inner as u64;\n\n }\n\n NodeTag::Leaf => {\n\n *try_from_bytes_mut(&mut self.buffer.borrow_mut()[offset..offset + 8]).unwrap() =\n\n NodeTag::Leaf as u64;\n\n }\n\n _ => panic!(),\n\n }\n\n self.header.free_list_head = next_free_list_head;\n\n self.header.free_list_len -= 1;\n\n Ok(key)\n\n }\n\n\n\n fn remove(&mut self, key: u32) {\n\n let offset = PADDED_SLAB_HEADER_LEN + (key as usize) * SLOT_SIZE;\n", "file_path": "program/src/critbit.rs", "rank": 60, "score": 22.23590835847596 }, { "content": " return Err(std::io::ErrorKind::UnexpectedEof.into());\n\n }\n\n\n\n if self.header.bump_index == std::u32::MAX as u64 {\n\n return Err(std::io::ErrorKind::UnexpectedEof.into());\n\n }\n\n let key = self.header.bump_index;\n\n let offset = PADDED_SLAB_HEADER_LEN + (key as usize) * SLOT_SIZE;\n\n self.header.bump_index += 1;\n\n match node_type {\n\n NodeTag::Inner => {\n\n *try_from_bytes_mut(&mut self.buffer.borrow_mut()[offset..offset + 8])\n\n .unwrap() = NodeTag::Inner as u64;\n\n #[cfg(feature = \"debug-asserts\")]\n\n assert_eq!(self.buffer.borrow()[offset], NodeTag::Inner as u8);\n\n }\n\n NodeTag::Leaf => {\n\n *try_from_bytes_mut(&mut self.buffer.borrow_mut()[offset..offset + 8])\n\n .unwrap() = NodeTag::Leaf as u64;\n\n #[cfg(feature = \"debug-asserts\")]\n", "file_path": "program/src/critbit.rs", "rank": 61, "score": 22.011633750046006 }, { "content": " true,\n\n &mut count,\n\n );\n\n }\n\n }\n\n assert_eq!(\n\n count + self.header.free_list_len as u64,\n\n identity(self.header.bump_index)\n\n );\n\n\n\n let mut free_nodes_remaining = self.header.free_list_len;\n\n let mut next_free_node = self.header.free_list_head;\n\n loop {\n\n let contents;\n\n match free_nodes_remaining {\n\n 0 => break,\n\n 1 => {\n\n contents = self.get_node(next_free_node).unwrap();\n\n assert!(matches!(contents, Node::LastFree(_)));\n\n }\n", "file_path": "js/wasm/src/critbit.rs", "rank": 63, "score": 21.745419888102152 }, { "content": "\n\n/// a is fp0, b is fp32 and result is a*b fp0\n\npub(crate) fn fp32_mul(a: u64, b_fp32: u64) -> u64 {\n\n (((a as u128) * (b_fp32 as u128)) >> 32) as u64\n\n}\n\n\n\npub(crate) fn round_price(tick_size: u64, limit_price: u64, side: Side) -> u64 {\n\n match side {\n\n // Round down\n\n Side::Bid => tick_size * (limit_price / tick_size),\n\n // Round up\n\n Side::Ask => tick_size * ((limit_price + tick_size - 1) / tick_size),\n\n }\n\n}", "file_path": "program/src/utils.rs", "rank": 64, "score": 21.155587614509876 }, { "content": "use crate::error::AoError;\n\nuse crate::state::{AccountTag, Side};\n\nuse borsh::{BorshDeserialize, BorshSerialize};\n\nuse bytemuck::{try_from_bytes, try_from_bytes_mut, Pod, Zeroable};\n\nuse num_derive::FromPrimitive;\n\nuse num_traits::FromPrimitive;\n\nuse solana_program::account_info::AccountInfo;\n\nuse solana_program::pubkey::Pubkey;\n\nuse std::cell::{Ref, RefMut};\n\nuse std::convert::TryInto;\n\nuse std::{cell::RefCell, convert::identity, rc::Rc};\n\n// A Slab contains the data for a slab header and an array of nodes of a critbit tree\n\n// whose leafs contain the data referencing an order of the orderbook.\n\n\n\n////////////////////////////////////\n\n// Nodes\n\n\n\npub type NodeHandle = u32;\n\n\n\npub type IoError = std::io::Error;\n", "file_path": "program/src/critbit.rs", "rank": 65, "score": 20.493951976604542 }, { "content": "\n\n pub fn get_node(&self, key: u32) -> Option<NodeRef> {\n\n let mut offset = PADDED_SLAB_HEADER_LEN + (key as usize) * SLOT_SIZE;\n\n // println!(\"key: {:?}, slot_size: {:?}\", key, self.slot_size);\n\n let node_tag = NodeTag::from_u64(u64::from_le_bytes(\n\n self.buffer.borrow()[offset..offset + NODE_TAG_SIZE]\n\n .try_into()\n\n .unwrap(),\n\n ))\n\n .unwrap();\n\n offset += NODE_TAG_SIZE;\n\n let node = match node_tag {\n\n NodeTag::Leaf => {\n\n let node: Ref<LeafNode> = Ref::map(self.buffer.borrow(), |s| {\n\n try_from_bytes(&s[offset..offset + NODE_SIZE]).unwrap()\n\n });\n\n NodeRef::Leaf(node)\n\n }\n\n NodeTag::Inner => {\n\n let node: Ref<InnerNode> = Ref::map(self.buffer.borrow(), |s| {\n", "file_path": "program/src/critbit.rs", "rank": 66, "score": 20.338219868936967 }, { "content": "BorshSize,\n\n)]\n\n#[repr(u8)]\n\n#[allow(missing_docs)]\n\npub enum Side {\n\n Bid,\n\n Ask,\n\n}\n\n\n\nimpl Side {\n\n /// Helper function to get the opposite side.\n\n pub fn opposite(&self) -> Self {\n\n match self {\n\n Side::Bid => Side::Ask,\n\n Side::Ask => Side::Bid,\n\n }\n\n }\n\n}\n\n\n\n#[derive(BorshDeserialize, BorshSerialize, Clone, PartialEq, FromPrimitive, BorshSize)]\n", "file_path": "program/src/state.rs", "rank": 67, "score": 20.143237312940013 }, { "content": " }\n\n\n\n fn children(&self) -> Option<Ref<'a, [u32; 2]>> {\n\n match &self {\n\n Self::Inner(i) => Some(Ref::map(Ref::clone(i), |k| &k.children)),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn as_leaf(&self) -> Option<Ref<'a, LeafNode>> {\n\n match &self {\n\n Self::Leaf(leaf_ref) => Some(Ref::clone(leaf_ref)),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn to_owned(&self) -> Node {\n\n match &self {\n\n NodeRef::Uninitialized => Node::Uninitialized,\n\n NodeRef::Inner(n) => Node::Inner(**n),\n", "file_path": "program/src/critbit.rs", "rank": 68, "score": 20.01966353604165 }, { "content": " pub key: u128,\n\n pub callback_info_pt: u64,\n\n pub base_quantity: u64,\n\n}\n\n\n\npub(crate) const NODE_SIZE: usize = 32;\n\npub(crate) const FREE_NODE_SIZE: usize = 4;\n\n\n\npub(crate) const NODE_TAG_SIZE: usize = 8;\n\npub(crate) const SLOT_SIZE: usize = NODE_TAG_SIZE + NODE_SIZE;\n\n\n\nimpl LeafNode {\n\n pub fn price(&self) -> u64 {\n\n (self.key >> 64) as u64\n\n }\n\n\n\n pub fn order_id(&self) -> u128 {\n\n self.key\n\n }\n\n\n", "file_path": "program/src/critbit.rs", "rank": 69, "score": 19.965379834293906 }, { "content": "\n\n /// Used to deserialize an event object from bytes.\n\n pub fn deserialize(buf: &mut &[u8], callback_info_len: usize) -> Self {\n\n match buf[0] {\n\n 0 => Event::Fill {\n\n taker_side: Side::from_u8(buf[1]).unwrap(),\n\n maker_order_id: u128::from_le_bytes(buf[2..18].try_into().unwrap()),\n\n quote_size: u64::from_le_bytes(buf[18..26].try_into().unwrap()),\n\n base_size: u64::from_le_bytes(buf[26..34].try_into().unwrap()),\n\n maker_callback_info: buf[34..34 + callback_info_len].to_owned(),\n\n taker_callback_info: buf[34 + callback_info_len..34 + (callback_info_len << 1)]\n\n .to_owned(),\n\n },\n\n 1 => Event::Out {\n\n side: Side::from_u8(buf[1]).unwrap(),\n\n order_id: u128::from_le_bytes(buf[2..18].try_into().unwrap()),\n\n base_size: u64::from_le_bytes(buf[18..26].try_into().unwrap()),\n\n delete: buf[26] == 1,\n\n callback_info: buf[27..27 + callback_info_len].to_owned(),\n\n },\n", "file_path": "program/src/state.rs", "rank": 70, "score": 19.903540525869946 }, { "content": " assert_eq!(\n\n count + self.header.free_list_len as u64,\n\n identity(self.header.bump_index)\n\n );\n\n\n\n let mut free_nodes_remaining = self.header.free_list_len;\n\n let mut next_free_node = self.header.free_list_head;\n\n loop {\n\n let contents;\n\n match free_nodes_remaining {\n\n 0 => break,\n\n 1 => {\n\n contents = self.get_node(next_free_node).unwrap();\n\n assert!(matches!(contents, NodeRef::LastFree(_)));\n\n }\n\n _ => {\n\n contents = self.get_node(next_free_node).unwrap();\n\n assert!(matches!(contents, NodeRef::Free(_)));\n\n }\n\n };\n", "file_path": "program/src/critbit.rs", "rank": 71, "score": 19.34651502296289 }, { "content": " let mut offset = PADDED_SLAB_HEADER_LEN + (key as usize) * SLOT_SIZE;\n\n // println!(\"key: {:?}, slot_size: {:?}\", key, self.slot_size);\n\n let node_tag = NodeTag::from_u64(u64::from_le_bytes(\n\n self.buffer.borrow()[offset..offset + NODE_TAG_SIZE]\n\n .try_into()\n\n .unwrap(),\n\n ))\n\n .unwrap();\n\n offset += NODE_TAG_SIZE;\n\n let node = match node_tag {\n\n NodeTag::Leaf => {\n\n let node: RefMut<LeafNode> = RefMut::map(self.buffer.borrow_mut(), |s| {\n\n try_from_bytes_mut(&mut s[offset..offset + NODE_SIZE]).unwrap()\n\n });\n\n NodeRefMut::Leaf(node)\n\n }\n\n NodeTag::Inner => {\n\n let node: RefMut<InnerNode> = RefMut::map(self.buffer.borrow_mut(), |s| {\n\n try_from_bytes_mut(&mut s[offset..offset + NODE_SIZE]).unwrap()\n\n });\n", "file_path": "program/src/critbit.rs", "rank": 73, "score": 19.16521632854046 }, { "content": "\n\n pub fn as_leaf(&self) -> Option<&LeafNode> {\n\n match &self {\n\n Node::Leaf(leaf_ref) => Some(leaf_ref),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\n////////////////////////////////////\n\n// Slabs\n\n\n\n#[derive(BorshDeserialize, BorshSerialize, Debug)]\n", "file_path": "js/wasm/src/critbit.rs", "rank": 75, "score": 18.805286241633176 }, { "content": " &mut self,\n\n new_leaf_node: &Node,\n\n ) -> Result<(NodeHandle, Option<Node>), AoError> {\n\n let new_leaf = new_leaf_node.as_leaf().unwrap();\n\n let mut root: NodeHandle = match self.root() {\n\n Some(h) => h,\n\n None => {\n\n // create a new root if none exists\n\n let new_leaf_key = self\n\n .insert_node(new_leaf_node)\n\n .map_err(|_| AoError::SlabOutOfSpace)?;\n\n self.header.root_node = new_leaf_key;\n\n self.header.leaf_count += 1;\n\n return Ok((new_leaf_key, None));\n\n }\n\n };\n\n let mut parent_node: Option<NodeHandle> = None;\n\n let mut previous_critbit: Option<bool> = None;\n\n loop {\n\n // check if the new node will be a child of the root\n", "file_path": "program/src/critbit.rs", "rank": 76, "score": 18.51834131075676 }, { "content": "use borsh::{BorshDeserialize, BorshSerialize};\n\nuse num_derive::{FromPrimitive, ToPrimitive};\n\nuse std::convert::TryInto;\n\nuse std::io::Write;\n\nuse std::{cell::RefCell, convert::identity, rc::Rc};\n\n// A Slab contains the data for a slab header and an array of nodes of a critbit tree\n\n// whose leafs contain the data referencing an order of the orderbook.\n\n\n\n////////////////////////////////////\n\n// Nodes\n\n\n\npub type NodeHandle = u32;\n\n\n\npub type IoError = std::io::Error;\n\n\n\n#[derive(BorshDeserialize, BorshSerialize, Debug, PartialEq, Clone)]\n\npub struct InnerNode {\n\n prefix_len: u32,\n\n key: u128,\n\n children: [u32; 2],\n", "file_path": "js/wasm/src/critbit.rs", "rank": 77, "score": 18.28546620381482 }, { "content": " *try_from_bytes_mut(&mut self.buffer.borrow_mut()[offset..offset + 8]).unwrap() =\n\n new_tag as u64;\n\n if let NodeRefMut::Free(mut new_free_node) = self.get_node_mut(key).unwrap() {\n\n new_free_node.next = self.header.free_list_head\n\n };\n\n\n\n self.header.free_list_head = key;\n\n self.header.free_list_len += 1;\n\n }\n\n\n\n fn insert_node(&mut self, node: &Node) -> Result<u32, IoError> {\n\n let handle = self.allocate(&node.tag())?;\n\n self.write_node(node, handle);\n\n Ok(handle)\n\n }\n\n\n\n pub fn write_callback_info(&mut self, callback_info: &[u8]) -> Result<u64, IoError> {\n\n let h = if self.header.callback_free_list_len > 0 {\n\n let next_free_spot = u64::from_le_bytes(\n\n self.buffer.borrow()[self.header.callback_free_list_head as usize\n", "file_path": "program/src/critbit.rs", "rank": 78, "score": 18.18233108761243 }, { "content": " pub fn get_unchecked<'a, 'b: 'a>(account_info: &'a AccountInfo<'b>) -> RefMut<'a, Self> {\n\n let a = RefMut::map(account_info.data.borrow_mut(), |s| {\n\n try_from_bytes_mut::<Self>(&mut s[0..MARKET_STATE_LEN]).unwrap()\n\n });\n\n a\n\n }\n\n}\n\n\n\n////////////////////////////////////////////////////\n\n// Events\n\n#[derive(BorshDeserialize, BorshSerialize, Debug)]\n\n/// Events are the primary output of the asset agnostic orderbook\n\npub enum Event {\n\n /// A fill event describes a match between a taker order and a provider order\n\n Fill {\n\n #[allow(missing_docs)]\n\n taker_side: Side,\n\n /// The order id of the maker order\n\n maker_order_id: u128,\n\n /// The total quote size of the transaction\n", "file_path": "program/src/state.rs", "rank": 79, "score": 18.088310585957824 }, { "content": " }\n\n\n\n pub fn set_base_quantity(&mut self, quantity: u64) {\n\n self.base_quantity = quantity;\n\n }\n\n}\n\n\n\n#[derive(BorshDeserialize, BorshSerialize, Debug, PartialEq, Clone)]\n\npub struct FreeNode {\n\n next: u32,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum Node {\n\n Uninitialized,\n\n Inner(InnerNode),\n\n Leaf(LeafNode),\n\n Free(FreeNode),\n\n LastFree(FreeNode),\n\n}\n", "file_path": "js/wasm/src/critbit.rs", "rank": 80, "score": 17.904100000352777 }, { "content": "\n\n if let NodeRefMut::Inner(mut i) = self.get_node_mut(new_root_node_handle).unwrap() {\n\n i.prefix_len = shared_prefix_len as u64;\n\n i.key = new_leaf.key;\n\n i.children[new_leaf_crit_bit as usize] = new_leaf_handle;\n\n i.children[old_root_crit_bit as usize] = root;\n\n } else {\n\n unreachable!()\n\n }\n\n\n\n if let Some(NodeRefMut::Inner(mut i)) =\n\n parent_node.map(|k| self.get_node_mut(k).unwrap())\n\n {\n\n i.children[previous_critbit.unwrap() as usize] = new_root_node_handle;\n\n }\n\n // Split condition here works around borrow checker\n\n if parent_node.is_none() {\n\n self.header.root_node = new_root_node_handle;\n\n }\n\n\n", "file_path": "program/src/critbit.rs", "rank": 81, "score": 17.816513685577437 }, { "content": "\n\n let bids_order_capacity = (bids_account.data.borrow().len() - PADDED_SLAB_HEADER_LEN)\n\n / (SLOT_SIZE * 2 + callback_info_len);\n\n let bids_callback_memory_offset =\n\n PADDED_SLAB_HEADER_LEN + 2 * bids_order_capacity * SLOT_SIZE;\n\n\n\n header.account_tag = AccountTag::Bids;\n\n header.callback_memory_offset = bids_callback_memory_offset as u64;\n\n header\n\n .serialize(&mut ((&mut bids_account.data.borrow_mut()) as &mut [u8]))\n\n .unwrap();\n\n }\n\n}\n\n\n\n// Tree nodes manipulation methods\n\nimpl<'a> Slab<'a> {\n\n fn capacity(&self) -> u64 {\n\n ((self.buffer.borrow().len() - PADDED_SLAB_HEADER_LEN)\n\n / (2 * SLOT_SIZE + self.callback_info_len)) as u64\n\n }\n", "file_path": "program/src/critbit.rs", "rank": 82, "score": 17.586777709413134 }, { "content": "\n\n#[derive(BorshDeserialize, BorshSerialize, Clone, BorshSize)]\n\n/**\n\nThe required arguments for a new_order instruction.\n\n */\n\npub struct Params {\n\n /// The maximum quantity of base to be traded.\n\n pub max_base_qty: u64,\n\n /// The maximum quantity of quote to be traded.\n\n pub max_quote_qty: u64,\n\n /// The limit price of the order. This value is understood as a 32-bit fixed point number.\n\n pub limit_price: u64,\n\n /// The order's side.\n\n pub side: Side,\n\n /// The maximum number of orders to match against before performing a partial fill.\n\n ///\n\n /// It is then possible for a caller program to detect a partial fill by reading the [`OrderSummary`][`crate::orderbook::OrderSummary`]\n\n /// in the event queue register.\n\n pub match_limit: u64,\n\n /// The callback information is used to attach metadata to an order. This callback information will be transmitted back through the event queue.\n", "file_path": "program/src/processor/new_order.rs", "rank": 83, "score": 17.56147685516064 }, { "content": " slab: &'a Slab,\n\n sub_root: NodeHandle,\n\n buf: &mut Vec<(Node, S)>,\n\n ) {\n\n let n = slab.get_node(sub_root).unwrap().to_owned();\n\n match n {\n\n Node::Leaf(ref l) => {\n\n let callback_info =\n\n S::from_bytes(&slab.get_callback_info(l.callback_info_pt as usize));\n\n buf.push((n, callback_info));\n\n }\n\n Node::Inner(inner) => {\n\n walk_rec(slab, inner.children[0], buf);\n\n walk_rec(slab, inner.children[1], buf);\n\n }\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n let mut buf = Vec::with_capacity(self.header.leaf_count as usize);\n", "file_path": "program/src/critbit.rs", "rank": 84, "score": 17.4624179111597 }, { "content": " *r = *l;\n\n }\n\n _ => unreachable!(),\n\n }\n\n }\n\n}\n\n\n\n// Critbit tree walks\n\nimpl<'a> Slab<'a> {\n\n pub fn root(&self) -> Option<NodeHandle> {\n\n if self.header.leaf_count == 0 {\n\n return None;\n\n }\n\n\n\n Some(self.header.root_node)\n\n }\n\n\n\n fn find_min_max(&self, find_max: bool) -> Option<NodeHandle> {\n\n let mut root: NodeHandle = self.root()?;\n\n loop {\n", "file_path": "program/src/critbit.rs", "rank": 85, "score": 17.012870910751612 }, { "content": " pub fn set_base_quantity(&mut self, quantity: u64) {\n\n self.base_quantity = quantity;\n\n }\n\n}\n\n\n\n#[derive(BorshDeserialize, BorshSerialize, Debug, PartialEq, Clone, Copy, Pod, Zeroable)]\n\n#[repr(C)]\n\npub struct FreeNode {\n\n next: u32,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone, FromPrimitive)]\n\npub enum NodeTag {\n\n Uninitialized,\n\n Inner,\n\n Leaf,\n\n Free,\n\n LastFree,\n\n}\n\n\n", "file_path": "program/src/critbit.rs", "rank": 86, "score": 16.99770171108011 }, { "content": " event_size: u64,\n\n seq_num: u64,\n\n}\n\n#[allow(missing_docs)]\n\npub const EVENT_QUEUE_HEADER_LEN: usize = 37;\n\n#[allow(missing_docs)]\n\npub const REGISTER_SIZE: usize = ORDER_SUMMARY_SIZE as usize + 1; // Option<OrderSummary>\n\n\n\nimpl EventQueueHeader {\n\n pub(crate) fn initialize(callback_info_len: usize) -> Self {\n\n Self {\n\n tag: AccountTag::EventQueue,\n\n head: 0,\n\n count: 0,\n\n event_size: Event::compute_slot_size(callback_info_len) as u64,\n\n seq_num: 0,\n\n }\n\n }\n\n\n\n pub(crate) fn check(self) -> Result<Self, ProgramError> {\n", "file_path": "program/src/state.rs", "rank": 87, "score": 16.672683376893225 }, { "content": "\n\nimpl<'a> Node {\n\n pub fn deserialize(buffer: &[u8], callback_info_len: usize) -> Result<Self, IoError> {\n\n match buffer[0] {\n\n 0 => Ok(Node::Uninitialized),\n\n 1 => Ok(Node::Inner(InnerNode::deserialize(&mut &buffer[1..])?)),\n\n 2 => Ok(Node::Leaf(LeafNode::deserialize(\n\n &buffer[1..],\n\n callback_info_len,\n\n )?)),\n\n 3 => Ok(Node::Free(FreeNode::deserialize(&mut &buffer[1..])?)),\n\n 4 => Ok(Node::LastFree(FreeNode::deserialize(&mut &buffer[1..])?)),\n\n _ => Err(std::io::ErrorKind::InvalidData.into()),\n\n }\n\n }\n\n\n\n pub fn serialize<W: Write>(&self, w: &mut W) -> Result<(), IoError> {\n\n match self {\n\n Node::Uninitialized => w.write_all(&[0]),\n\n Node::Inner(n) => {\n", "file_path": "js/wasm/src/critbit.rs", "rank": 88, "score": 16.547726228342523 }, { "content": "\n\n /////////////////////////////////////////\n\n // Misc\n\n\n\n #[cfg(test)]\n\n fn find_by_key(&self, search_key: u128) -> Option<NodeHandle> {\n\n let mut node_handle: NodeHandle = self.root()?;\n\n loop {\n\n let node = self.get_node(node_handle).unwrap();\n\n let node_prefix_len = node.prefix_len().unwrap();\n\n let node_key = node.key().unwrap();\n\n let common_prefix_len = (search_key ^ node_key).leading_zeros();\n\n if common_prefix_len < node_prefix_len {\n\n return None;\n\n }\n\n match node {\n\n Node::Leaf(_) => break Some(node_handle),\n\n Node::Inner(inner) => {\n\n let crit_bit_mask = (1u128 << 127) >> node_prefix_len;\n\n let _search_key_crit_bit = (search_key & crit_bit_mask) != 0;\n", "file_path": "js/wasm/src/critbit.rs", "rank": 89, "score": 16.510928439601688 }, { "content": " }\n\n}\n\n\n\npub const INNER_NODE_SIZE: usize = 32;\n\n\n\nimpl LeafNode {\n\n pub fn new(key: u128, callback_info: Vec<u8>, quantity: u64) -> Self {\n\n LeafNode {\n\n key,\n\n callback_info,\n\n base_quantity: quantity,\n\n }\n\n }\n\n\n\n pub fn price(&self) -> u64 {\n\n (self.key >> 64) as u64\n\n }\n\n\n\n pub fn order_id(&self) -> u128 {\n\n self.key\n", "file_path": "js/wasm/src/critbit.rs", "rank": 90, "score": 16.47628204779508 }, { "content": " self.header.leaf_count += 1;\n\n return Ok((new_leaf_handle, None));\n\n }\n\n }\n\n\n\n /// This function corrupts the node's callback information when erasing it!\n\n pub fn remove_by_key(&mut self, search_key: u128) -> Option<Node> {\n\n let mut grandparent_h: Option<NodeHandle> = None;\n\n let mut parent_h = self.root()?;\n\n // We have to initialize the values to work around the type checker\n\n let mut child_h = 0;\n\n let mut crit_bit = false;\n\n let mut prev_crit_bit: Option<bool> = None;\n\n let mut remove_root = None;\n\n {\n\n let n = self.get_node(parent_h).unwrap();\n\n match n {\n\n NodeRef::Leaf(leaf) if leaf.key == search_key => {\n\n assert_eq!(identity(self.header.leaf_count), 1);\n\n let leaf_copy = Node::Leaf(*leaf);\n", "file_path": "program/src/critbit.rs", "rank": 91, "score": 16.460100003650446 }, { "content": " },\n\n };\n\n\n\n let mut model: BTreeMap<u128, (Node, Pubkey)> = BTreeMap::new();\n\n\n\n let mut all_keys = vec![];\n\n\n\n let mut rng = StdRng::seed_from_u64(trial);\n\n\n\n assert_eq!(slab.find_min(), None);\n\n assert_eq!(slab.find_max(), None);\n\n\n\n for i in 0..100 {\n\n let key = rng.gen();\n\n let owner = Pubkey::new_unique();\n\n let qty = rng.gen();\n\n let callback_info_offset = slab.write_callback_info(&owner.to_bytes()).unwrap();\n\n let leaf = Node::Leaf(LeafNode {\n\n key,\n\n callback_info_pt: callback_info_offset,\n", "file_path": "program/src/critbit.rs", "rank": 92, "score": 16.455272784935644 }, { "content": " NodeRefMut::Inner(node)\n\n }\n\n NodeTag::Free | NodeTag::LastFree => {\n\n let node: RefMut<FreeNode> = RefMut::map(self.buffer.borrow_mut(), |s| {\n\n try_from_bytes_mut(&mut s[offset..offset + FREE_NODE_SIZE]).unwrap()\n\n });\n\n match node_tag {\n\n NodeTag::Free => NodeRefMut::Free(node),\n\n NodeTag::LastFree => NodeRefMut::LastFree(node),\n\n _ => unreachable!(),\n\n }\n\n }\n\n NodeTag::Uninitialized => NodeRefMut::Uninitialized,\n\n };\n\n Some(node)\n\n }\n\n\n\n fn allocate(&mut self, node_type: &NodeTag) -> Result<u32, IoError> {\n\n if self.header.free_list_len == 0 {\n\n if self.header.bump_index as usize == self.capacity() as usize {\n", "file_path": "program/src/critbit.rs", "rank": 93, "score": 16.376913454152493 }, { "content": " _ => unreachable!(),\n\n }\n\n loop {\n\n match self.get_node(child_h).unwrap() {\n\n Node::Inner(inner) => {\n\n let (grandchild_h, grandchild_crit_bit) = inner.walk_down(search_key);\n\n parent_h = child_h;\n\n child_h = grandchild_h;\n\n crit_bit = grandchild_crit_bit;\n\n continue;\n\n }\n\n Node::Leaf(leaf) => {\n\n if leaf.key != search_key {\n\n return None;\n\n }\n\n\n\n break;\n\n }\n\n _ => unreachable!(),\n\n }\n", "file_path": "js/wasm/src/critbit.rs", "rank": 94, "score": 16.22022368643343 }, { "content": " let order_capacity = (asks_account.data.borrow().len() - PADDED_SLAB_HEADER_LEN)\n\n / (SLOT_SIZE * 2 + callback_info_len);\n\n\n\n let asks_callback_memory_offset = PADDED_SLAB_HEADER_LEN + 2 * order_capacity * SLOT_SIZE;\n\n let mut header = SlabHeader {\n\n account_tag: AccountTag::Asks,\n\n bump_index: 0,\n\n free_list_len: 0,\n\n free_list_head: 0,\n\n root_node: 0,\n\n leaf_count: 0,\n\n market_address,\n\n callback_memory_offset: asks_callback_memory_offset as u64,\n\n callback_bump_index: asks_callback_memory_offset as u64,\n\n callback_free_list_head: 0,\n\n callback_free_list_len: 0,\n\n };\n\n header\n\n .serialize(&mut ((&mut asks_account.data.borrow_mut()) as &mut [u8]))\n\n .unwrap();\n", "file_path": "program/src/critbit.rs", "rank": 95, "score": 16.201028682749143 }, { "content": " // fn test_node_serialization() {\n\n // let mut rng = StdRng::seed_from_u64(42);\n\n // let mut bytes = [0u8; 100];\n\n // let mut w: &mut [u8] = &mut bytes;\n\n // let l = LeafNode::new(rng.gen(), rng.gen::<[u8; 32]>().to_vec(), rng.gen());\n\n // l.serialize(&mut w).unwrap();\n\n // let new_leaf = LeafNode::deserialize(&bytes, 32).unwrap();\n\n // assert_eq!(l, new_leaf);\n\n // let node = NodeTag::Leaf(l);\n\n // w = &mut bytes;\n\n // node.serialize(&mut &mut w).unwrap();\n\n // let new_node = NodeTag::deserialize(&bytes, 32).unwrap();\n\n // assert_eq!(node, new_node);\n\n // }\n\n\n\n #[test]\n\n fn simulate_find_min() {\n\n use std::collections::BTreeMap;\n\n\n\n for trial in 0..10u64 {\n", "file_path": "program/src/critbit.rs", "rank": 96, "score": 16.16323608609097 }, { "content": "\n\n pub fn remove_by_key(&mut self, search_key: u128) -> Option<Node> {\n\n let mut parent_h = self.root()?;\n\n let mut child_h;\n\n let mut crit_bit;\n\n let n = self.get_node(parent_h).unwrap();\n\n match n {\n\n Node::Leaf(ref leaf) if leaf.key == search_key => {\n\n assert_eq!(identity(self.header.leaf_count), 1);\n\n self.header.root_node = 0;\n\n self.header.leaf_count = 0;\n\n let _old_root = self.remove(parent_h).unwrap();\n\n return Some(n);\n\n }\n\n Node::Leaf(_) => return None,\n\n Node::Inner(inner) => {\n\n let (ch, cb) = inner.walk_down(search_key);\n\n child_h = ch;\n\n crit_bit = cb;\n\n }\n", "file_path": "js/wasm/src/critbit.rs", "rank": 97, "score": 16.097452927339585 }, { "content": " .unwrap();\n\n\n\n Slab::initialize(\n\n accounts.bids,\n\n accounts.asks,\n\n *accounts.market.key,\n\n callback_info_len as usize,\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "program/src/processor/create_market.rs", "rank": 98, "score": 16.05343799566282 }, { "content": "\n\n fn clear_callback_info(&mut self, callback_info_pt: usize) {\n\n self.buffer.borrow_mut()[callback_info_pt..callback_info_pt + 8]\n\n .copy_from_slice(&self.header.callback_free_list_head.to_le_bytes());\n\n self.header.callback_free_list_head = callback_info_pt as u64;\n\n self.header.callback_free_list_len += 1;\n\n }\n\n\n\n pub fn get_callback_info(&self, callback_info_pt: usize) -> Ref<[u8]> {\n\n Ref::map(self.buffer.borrow(), |r| {\n\n &r[callback_info_pt..callback_info_pt + self.callback_info_len]\n\n })\n\n }\n\n\n\n pub fn write_node(&mut self, node: &Node, handle: NodeHandle) {\n\n match (node, self.get_node_mut(handle)) {\n\n (Node::Inner(i), Some(NodeRefMut::Inner(mut r))) => {\n\n *r = *i;\n\n }\n\n (Node::Leaf(l), Some(NodeRefMut::Leaf(mut r))) => {\n", "file_path": "program/src/critbit.rs", "rank": 99, "score": 16.022362518918637 } ]
Rust
glib/src/char.rs
YaLTeR/gtk-rs
b10a29d60458d33642c05421b0ece8d67582229e
use crate::translate::FromGlib; use crate::translate::ToGlib; use libc::{c_char, c_uchar}; #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub struct Char(pub c_char); impl Char { pub fn new(c: char) -> Option<Char> { if c as u32 > 255 { None } else { Some(Char(c as c_char)) } } } impl From<Char> for char { fn from(c: Char) -> char { c.0 as u8 as char } } #[doc(hidden)] impl FromGlib<c_char> for Char { unsafe fn from_glib(value: c_char) -> Self { Char(value) } } #[doc(hidden)] impl ToGlib for Char { type GlibType = c_char; fn to_glib(&self) -> c_char { self.0 } } #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub struct UChar(pub c_uchar); impl UChar { pub fn new(c: char) -> Option<UChar> { if c as u32 > 255 { None } else { Some(UChar(c as c_uchar)) } } } impl From<UChar> for char { fn from(c: UChar) -> char { c.0 as char } } #[doc(hidden)] impl FromGlib<c_uchar> for UChar { unsafe fn from_glib(value: c_uchar) -> Self { UChar(value) } } #[doc(hidden)] impl ToGlib for UChar { type GlibType = c_uchar; fn to_glib(&self) -> c_uchar { self.0 } } #[cfg(test)] mod tests { use super::*; use crate::translate::from_glib; #[test] fn converts_single_byte_chars() { assert_eq!(Char::new(0 as char), Some(Char(0_i8))); assert_eq!(UChar::new(0 as char), Some(UChar(0_u8))); assert_eq!(UChar::new(255 as char), Some(UChar(255_u8))); assert_eq!(UChar::new('ñ'), Some(UChar(241_u8))); } #[test] fn refuses_multibyte_chars() { assert_eq!(Char::new('☔'), None); assert_eq!(UChar::new('☔'), None); } #[test] fn into_i8() { assert_eq!(Char::new('A').unwrap().to_glib(), 65_i8); } #[test] fn into_u8() { assert_eq!(UChar::new('A').unwrap().to_glib(), 65_u8); } #[test] fn into_char() { assert_eq!(char::from(Char(65_i8)), 'A'); assert_eq!('ñ', UChar(241_u8).into()); } #[test] fn convert_from_glib() { assert_eq!(Char(65_i8), unsafe { from_glib(65_i8) }); assert_eq!(UChar(241_u8), unsafe { from_glib(241_u8) }); } }
use crate::translate::FromGlib; use crate::translate::ToGlib; use libc::{c_char, c_uchar}; #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub struct Char(pub c_char); impl Char { pub fn new(c: char) -> Option<Char> { if c as u32 > 255 { None
har) -> char { c.0 as char } } #[doc(hidden)] impl FromGlib<c_uchar> for UChar { unsafe fn from_glib(value: c_uchar) -> Self { UChar(value) } } #[doc(hidden)] impl ToGlib for UChar { type GlibType = c_uchar; fn to_glib(&self) -> c_uchar { self.0 } } #[cfg(test)] mod tests { use super::*; use crate::translate::from_glib; #[test] fn converts_single_byte_chars() { assert_eq!(Char::new(0 as char), Some(Char(0_i8))); assert_eq!(UChar::new(0 as char), Some(UChar(0_u8))); assert_eq!(UChar::new(255 as char), Some(UChar(255_u8))); assert_eq!(UChar::new('ñ'), Some(UChar(241_u8))); } #[test] fn refuses_multibyte_chars() { assert_eq!(Char::new('☔'), None); assert_eq!(UChar::new('☔'), None); } #[test] fn into_i8() { assert_eq!(Char::new('A').unwrap().to_glib(), 65_i8); } #[test] fn into_u8() { assert_eq!(UChar::new('A').unwrap().to_glib(), 65_u8); } #[test] fn into_char() { assert_eq!(char::from(Char(65_i8)), 'A'); assert_eq!('ñ', UChar(241_u8).into()); } #[test] fn convert_from_glib() { assert_eq!(Char(65_i8), unsafe { from_glib(65_i8) }); assert_eq!(UChar(241_u8), unsafe { from_glib(241_u8) }); } }
} else { Some(Char(c as c_char)) } } } impl From<Char> for char { fn from(c: Char) -> char { c.0 as u8 as char } } #[doc(hidden)] impl FromGlib<c_char> for Char { unsafe fn from_glib(value: c_char) -> Self { Char(value) } } #[doc(hidden)] impl ToGlib for Char { type GlibType = c_char; fn to_glib(&self) -> c_char { self.0 } } #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub struct UChar(pub c_uchar); impl UChar { pub fn new(c: char) -> Option<UChar> { if c as u32 > 255 { None } else { Some(UChar(c as c_uchar)) } } } impl From<UChar> for char { fn from(c: UC
random
[ { "content": "#[doc(alias = \"gdk_keyval_convert_case\")]\n\npub fn keyval_convert_case(symbol: u32) -> (u32, u32) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut lower = mem::MaybeUninit::uninit();\n\n let mut upper = mem::MaybeUninit::uninit();\n\n ffi::gdk_keyval_convert_case(symbol, lower.as_mut_ptr(), upper.as_mut_ptr());\n\n let lower = lower.assume_init();\n\n let upper = upper.assume_init();\n\n (lower, upper)\n\n }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 0, "score": 270615.813863342 }, { "content": "#[doc(alias = \"gdk_keyval_to_lower\")]\n\npub fn keyval_to_lower(keyval: u32) -> u32 {\n\n assert_initialized_main_thread!();\n\n unsafe { ffi::gdk_keyval_to_lower(keyval) }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 1, "score": 269039.76537236234 }, { "content": "#[doc(alias = \"gdk_unicode_to_keyval\")]\n\npub fn unicode_to_keyval(wc: u32) -> u32 {\n\n assert_initialized_main_thread!();\n\n unsafe { ffi::gdk_unicode_to_keyval(wc) }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 2, "score": 269039.76537236234 }, { "content": "#[doc(alias = \"gdk_keyval_to_upper\")]\n\npub fn keyval_to_upper(keyval: u32) -> u32 {\n\n assert_initialized_main_thread!();\n\n unsafe { ffi::gdk_keyval_to_upper(keyval) }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 3, "score": 269039.76537236234 }, { "content": "pub fn get_interface_age() -> u32 {\n\n skip_assert_initialized!();\n\n unsafe { ffi::gtk_get_interface_age() as u32 }\n\n}\n\n\n", "file_path": "gtk/src/rt.rs", "rank": 4, "score": 266024.3204502407 }, { "content": "#[doc(alias = \"gtk_main_level\")]\n\npub fn main_level() -> u32 {\n\n assert_initialized_main_thread!();\n\n unsafe { ffi::gtk_main_level() }\n\n}\n\n\n\n//#[doc(alias = \"gtk_parse_args\")]\n\n//pub fn parse_args(argv: /*Unimplemented*/Vec<glib::GString>) -> bool {\n\n// unsafe { TODO: call ffi:gtk_parse_args() }\n\n//}\n\n\n", "file_path": "gtk/src/auto/functions.rs", "rank": 5, "score": 266024.3204502407 }, { "content": "pub fn get_minor_version() -> u32 {\n\n skip_assert_initialized!();\n\n unsafe { ffi::gtk_get_minor_version() as u32 }\n\n}\n\n\n", "file_path": "gtk/src/rt.rs", "rank": 6, "score": 266024.3204502407 }, { "content": "pub fn get_major_version() -> u32 {\n\n skip_assert_initialized!();\n\n unsafe { ffi::gtk_get_major_version() as u32 }\n\n}\n\n\n", "file_path": "gtk/src/rt.rs", "rank": 7, "score": 266024.3204502407 }, { "content": "#[doc(alias = \"g_random_int\")]\n\npub fn random_int() -> u32 {\n\n unsafe { ffi::g_random_int() }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 8, "score": 266024.3204502407 }, { "content": "pub fn get_micro_version() -> u32 {\n\n skip_assert_initialized!();\n\n unsafe { ffi::gtk_get_micro_version() as u32 }\n\n}\n\n\n", "file_path": "gtk/src/rt.rs", "rank": 9, "score": 266024.3204502407 }, { "content": "pub fn get_binary_age() -> u32 {\n\n skip_assert_initialized!();\n\n unsafe { ffi::gtk_get_binary_age() as u32 }\n\n}\n\n\n", "file_path": "gtk/src/rt.rs", "rank": 10, "score": 266024.3204502407 }, { "content": "#[doc(alias = \"g_spaced_primes_closest\")]\n\npub fn spaced_primes_closest(num: u32) -> u32 {\n\n unsafe { ffi::g_spaced_primes_closest(num) }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 11, "score": 265076.3962537155 }, { "content": "#[doc(alias = \"g_get_num_processors\")]\n\npub fn get_num_processors() -> u32 {\n\n unsafe { ffi::g_get_num_processors() }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 12, "score": 261405.38348650758 }, { "content": "#[doc(alias = \"gtk_get_debug_flags\")]\n\npub fn get_debug_flags() -> u32 {\n\n assert_initialized_main_thread!();\n\n unsafe { ffi::gtk_get_debug_flags() }\n\n}\n\n\n", "file_path": "gtk/src/auto/functions.rs", "rank": 13, "score": 261405.38348650758 }, { "content": "#[doc(alias = \"gtk_get_current_event_time\")]\n\npub fn get_current_event_time() -> u32 {\n\n assert_initialized_main_thread!();\n\n unsafe { ffi::gtk_get_current_event_time() }\n\n}\n\n\n", "file_path": "gtk/src/auto/functions.rs", "rank": 14, "score": 257045.93110228295 }, { "content": "#[doc(alias = \"gtk_set_debug_flags\")]\n\npub fn set_debug_flags(flags: u32) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gtk_set_debug_flags(flags);\n\n }\n\n}\n\n\n\n//#[doc(alias = \"gtk_show_about_dialog\")]\n\n//pub fn show_about_dialog<P: IsA<Window>>(parent: Option<&P>, first_property_name: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) {\n\n// unsafe { TODO: call ffi:gtk_show_about_dialog() }\n\n//}\n\n\n", "file_path": "gtk/src/auto/functions.rs", "rank": 15, "score": 247232.87456944003 }, { "content": "#[doc(alias = \"g_random_set_seed\")]\n\npub fn random_set_seed(seed: u32) {\n\n unsafe {\n\n ffi::g_random_set_seed(seed);\n\n }\n\n}\n\n\n\n//#[doc(alias = \"g_realloc\")]\n\n//pub fn realloc(mem: /*Unimplemented*/Option<Fundamental: Pointer>, n_bytes: usize) -> /*Unimplemented*/Option<Fundamental: Pointer> {\n\n// unsafe { TODO: call ffi:g_realloc() }\n\n//}\n\n\n\n//#[doc(alias = \"g_realloc_n\")]\n\n//pub fn realloc_n(mem: /*Unimplemented*/Option<Fundamental: Pointer>, n_blocks: usize, n_block_bytes: usize) -> /*Unimplemented*/Option<Fundamental: Pointer> {\n\n// unsafe { TODO: call ffi:g_realloc_n() }\n\n//}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 16, "score": 247232.87456944003 }, { "content": "/// This function derives a weak type for a given strong struct and\n\n/// implementations of `Downgrade` and `Upgrade` traits.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust,ignore\n\n/// #[derive(glib::Downgrade)]\n\n/// struct Unnamed(X, Y);\n\n///\n\n/// #[derive(glib::Downgrade)]\n\n/// struct Named {\n\n/// x: X,\n\n/// y: Y,\n\n/// }\n\n/// ```\n\n///\n\n/// Here is what will be derived:\n\n///\n\n/// ```rust,ignore\n\n/// pub struct UnnamedWeak(<X as Downgrade>::Weak, <Y as Downgrade>::Weak);\n\n///\n\n/// impl glib::clone::Downgrade for Unnamed {\n\n/// type Weak = UnnamedWeak;\n\n///\n\n/// fn downgrade(&self) -> Self::Weak {\n\n/// let Self (ref _0, ref _1) = self;\n\n/// UnnamedWeak (\n\n/// glib::clone::Downgrade::downgrade(_0),\n\n/// glib::clone::Downgrade::downgrade(_1),\n\n/// )\n\n/// }\n\n/// }\n\n///\n\n/// impl glib::clone::Upgrade for UnnamedWeak {\n\n/// type Strong = Unnamed;\n\n///\n\n/// fn upgrade(&self) -> Option<Self::Strong> {\n\n/// let Self (ref _0, ref _1) = self;\n\n/// Some(Unnamed (\n\n/// glib::clone::Upgrade::upgrade(_0)?,\n\n/// glib::clone::Upgrade::upgrade(_1)?,\n\n/// ))\n\n/// }\n\n/// }\n\n///\n\n/// pub struct NamedWeak {\n\n/// x: <X as Downgrade>::Weak,\n\n/// y: <Y as Downgrade>::Weak,\n\n/// }\n\n///\n\n/// impl glib::clone::Downgrade for Named {\n\n/// type Weak = NamedWeak;\n\n///\n\n/// fn downgrade(&self) -> Self::Weak {\n\n/// let Self { ref x, ref y } = self;\n\n/// NamedWeak {\n\n/// glib::clone::Downgrade::downgrade(x),\n\n/// glib::clone::Downgrade::downgrade(y),\n\n/// }\n\n/// }\n\n/// }\n\n///\n\n/// impl glib::clone::Upgrade for NamedWeak {\n\n/// type Strong = Named;\n\n///\n\n/// fn upgrade(&self) -> Option<Self::Strong> {\n\n/// let Self { ref x, ref y } = self;\n\n/// Some(Named {\n\n/// glib::clone::Upgrade::upgrade(x)?,\n\n/// glib::clone::Upgrade::upgrade(y)?,\n\n/// })\n\n/// }\n\n/// }\n\n/// ```\n\npub fn derive_downgrade_for_struct(\n\n ident: Ident,\n\n generics: Generics,\n\n data_struct: syn::DataStruct,\n\n) -> TokenStream {\n\n let weak_type = format_ident!(\"{}Weak\", ident);\n\n\n\n let DowngradeStructParts {\n\n weak_fields,\n\n end_of_struct,\n\n destruct,\n\n downgrade,\n\n upgrade,\n\n } = derive_downgrade_fields(data_struct.fields);\n\n\n\n let derived = quote! {\n\n pub struct #weak_type #generics #weak_fields #end_of_struct\n\n\n\n impl #generics glib::clone::Downgrade for #ident #generics {\n\n type Weak = #weak_type #generics;\n", "file_path": "glib-macros/src/downgrade_derive/structs.rs", "rank": 17, "score": 245019.40548049167 }, { "content": "#[doc(alias = \"gdk_set_double_click_time\")]\n\npub fn set_double_click_time(msec: u32) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gdk_set_double_click_time(msec);\n\n }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 18, "score": 243111.63803607057 }, { "content": "#[doc(alias = \"pango_unichar_direction\")]\n\npub fn unichar_direction(ch: char) -> Direction {\n\n unsafe { from_glib(ffi::pango_unichar_direction(ch.to_glib())) }\n\n}\n\n\n", "file_path": "pango/src/auto/functions.rs", "rank": 19, "score": 239124.70551784185 }, { "content": "#[doc(alias = \"pango_is_zero_width\")]\n\npub fn is_zero_width(ch: char) -> bool {\n\n unsafe { from_glib(ffi::pango_is_zero_width(ch.to_glib())) }\n\n}\n\n\n", "file_path": "pango/src/auto/functions.rs", "rank": 20, "score": 239124.70551784185 }, { "content": "#[doc(alias = \"gdk_keyval_is_upper\")]\n\npub fn keyval_is_upper(keyval: u32) -> bool {\n\n assert_initialized_main_thread!();\n\n unsafe { from_glib(ffi::gdk_keyval_is_upper(keyval)) }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 21, "score": 239079.83493919694 }, { "content": "#[doc(alias = \"gdk_keyval_is_lower\")]\n\npub fn keyval_is_lower(keyval: u32) -> bool {\n\n assert_initialized_main_thread!();\n\n unsafe { from_glib(ffi::gdk_keyval_is_lower(keyval)) }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 22, "score": 239079.83493919694 }, { "content": "#[proc_macro]\n\npub fn clone(item: TokenStream) -> TokenStream {\n\n clone::clone_inner(item)\n\n}\n\n\n", "file_path": "glib-macros/src/lib.rs", "rank": 23, "score": 234988.66877909485 }, { "content": "#[doc(alias = \"gdk_keyval_from_name\")]\n\npub fn keyval_from_name(keyval_name: &str) -> u32 {\n\n assert_initialized_main_thread!();\n\n unsafe { ffi::gdk_keyval_from_name(keyval_name.to_glib_none().0) }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 24, "score": 234958.59840582748 }, { "content": "#[doc(alias = \"g_bit_storage\")]\n\npub fn bit_storage(number: libc::c_ulong) -> u32 {\n\n unsafe { ffi::g_bit_storage(number) }\n\n}\n\n\n\n//#[doc(alias = \"g_build_filename\")]\n\n//pub fn build_filename<P: AsRef<std::path::Path>>(first_element: P, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> std::path::PathBuf {\n\n// unsafe { TODO: call ffi:g_build_filename() }\n\n//}\n\n\n\n//#[cfg(any(feature = \"v2_56\", feature = \"dox\"))]\n\n//#[cfg_attr(feature = \"dox\", doc(cfg(feature = \"v2_56\")))]\n\n//#[doc(alias = \"g_build_filename_valist\")]\n\n//pub fn build_filename_valist<P: AsRef<std::path::Path>>(first_element: P, args: /*Unknown conversion*//*Unimplemented*/Unsupported) -> std::path::PathBuf {\n\n// unsafe { TODO: call ffi:g_build_filename_valist() }\n\n//}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 25, "score": 228077.20219915968 }, { "content": "#[doc(alias = \"gdk_x11_get_server_time\")]\n\npub fn x11_get_server_time(window: &X11Window) -> u32 {\n\n skip_assert_initialized!();\n\n unsafe { ffi::gdk_x11_get_server_time(window.to_glib_none().0) }\n\n}\n\n\n", "file_path": "gdkx11/src/auto/functions.rs", "rank": 26, "score": 227356.71779846694 }, { "content": "#[doc(alias = \"gtk_accelerator_parse\")]\n\npub fn accelerator_parse(accelerator: &str) -> (u32, gdk::ModifierType) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut accelerator_key = mem::MaybeUninit::uninit();\n\n let mut accelerator_mods = mem::MaybeUninit::uninit();\n\n ffi::gtk_accelerator_parse(\n\n accelerator.to_glib_none().0,\n\n accelerator_key.as_mut_ptr(),\n\n accelerator_mods.as_mut_ptr(),\n\n );\n\n let accelerator_key = accelerator_key.assume_init();\n\n let accelerator_mods = accelerator_mods.assume_init();\n\n (accelerator_key, from_glib(accelerator_mods))\n\n }\n\n}\n\n\n\n//#[doc(alias = \"gtk_accelerator_parse_with_keycode\")]\n\n//pub fn accelerator_parse_with_keycode(accelerator: &str, accelerator_codes: Vec<u32>) -> (u32, gdk::ModifierType) {\n\n// unsafe { TODO: call ffi:gtk_accelerator_parse_with_keycode() }\n\n//}\n\n\n", "file_path": "gtk/src/auto/functions.rs", "rank": 27, "score": 214589.58135132317 }, { "content": "#[doc(alias = \"gtk_accelerator_valid\")]\n\npub fn accelerator_valid(keyval: u32, modifiers: gdk::ModifierType) -> bool {\n\n assert_initialized_main_thread!();\n\n unsafe { from_glib(ffi::gtk_accelerator_valid(keyval, modifiers.to_glib())) }\n\n}\n\n\n", "file_path": "gtk/src/auto/functions.rs", "rank": 28, "score": 205984.98936617878 }, { "content": "/// Adds a closure to be called by the default main loop at regular intervals\n\n/// with second granularity.\n\n///\n\n/// `func` will be called repeatedly every `interval` seconds until it\n\n/// returns `Continue(false)`. Precise timing is not guaranteed, the timeout may\n\n/// be delayed by other events.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\npub fn timeout_add_seconds<F>(interval: u32, func: F) -> SourceId\n\nwhere\n\n F: FnMut() -> Continue + Send + 'static,\n\n{\n\n unsafe {\n\n from_glib(ffi::g_timeout_add_seconds_full(\n\n ffi::G_PRIORITY_DEFAULT,\n\n interval,\n\n Some(trampoline::<F>),\n\n into_raw(func),\n\n Some(destroy_closure::<F>),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 29, "score": 205984.98936617878 }, { "content": "/// Adds a closure to be called by the default main loop at regular intervals\n\n/// with second granularity.\n\n///\n\n/// `func` will be called repeatedly every `interval` seconds until it\n\n/// returns `Continue(false)`. Precise timing is not guaranteed, the timeout may\n\n/// be delayed by other events.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\n///\n\n/// Different to `timeout_add_seconds()`, this does not require `func` to be\n\n/// `Send` but can only be called from the thread that owns the main context.\n\n///\n\n/// This function panics if called from a different thread than the one that\n\n/// owns the main context.\n\npub fn timeout_add_seconds_local<F>(interval: u32, func: F) -> SourceId\n\nwhere\n\n F: FnMut() -> Continue + 'static,\n\n{\n\n unsafe {\n\n assert!(MainContext::default().is_owner());\n\n from_glib(ffi::g_timeout_add_seconds_full(\n\n ffi::G_PRIORITY_DEFAULT,\n\n interval,\n\n Some(trampoline::<F>),\n\n into_raw(func),\n\n Some(destroy_closure::<F>),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 30, "score": 202645.04319422247 }, { "content": "pub fn tokens_to_string(parts: Peekable<ProcIter>) -> String {\n\n let mut ret = String::new();\n\n // This is used in case of \"if ident\" or other similar cases.\n\n let mut prev_is_ident = false;\n\n let handle_ident_like = |i: String, ret: &mut String, prev_is_ident: &mut bool| {\n\n if *prev_is_ident {\n\n ret.push(' ');\n\n }\n\n ret.push_str(&i);\n\n *prev_is_ident = true;\n\n };\n\n\n\n for token in parts {\n\n match token {\n\n TokenTree::Punct(p) => {\n\n prev_is_ident = false;\n\n ret.push_str(&p.to_string());\n\n }\n\n TokenTree::Ident(i) => handle_ident_like(i.to_string(), &mut ret, &mut prev_is_ident),\n\n TokenTree::Literal(l) => handle_ident_like(l.to_string(), &mut ret, &mut prev_is_ident),\n", "file_path": "glib-macros/src/clone.rs", "rank": 31, "score": 196334.58205156855 }, { "content": "pub fn init() {\n\n assert_not_initialized!();\n\n unsafe {\n\n ffi::gdk_init(ptr::null_mut(), ptr::null_mut());\n\n set_initialized();\n\n }\n\n}\n", "file_path": "gdk/src/rt.rs", "rank": 32, "score": 194629.2404622686 }, { "content": "#[doc(alias = \"gdk_selection_convert\")]\n\npub fn selection_convert(requestor: &Window, selection: &Atom, target: &Atom, time_: u32) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gdk_selection_convert(\n\n requestor.to_glib_none().0,\n\n selection.to_glib_none().0,\n\n target.to_glib_none().0,\n\n time_,\n\n );\n\n }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 33, "score": 194277.81046467042 }, { "content": "pub fn impl_downgrade(input: DeriveInput) -> TokenStream {\n\n match input.data {\n\n Data::Struct(data_struct) => {\n\n structs::derive_downgrade_for_struct(input.ident, input.generics, data_struct)\n\n }\n\n Data::Enum(data_enum) => {\n\n enums::derive_downgrade_for_enum(input.ident, input.generics, data_enum)\n\n }\n\n Data::Union(..) => {\n\n panic!(\"#[derive(Downgrade)] is not available for unions.\");\n\n }\n\n }\n\n}\n", "file_path": "glib-macros/src/downgrade_derive/mod.rs", "rank": 34, "score": 193650.24365611456 }, { "content": "pub fn check_version(\n\n required_major: u32,\n\n required_minor: u32,\n\n required_micro: u32,\n\n) -> Option<String> {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib_none(ffi::gtk_check_version(\n\n required_major as c_uint,\n\n required_minor as c_uint,\n\n required_micro as c_uint,\n\n ))\n\n }\n\n}\n", "file_path": "gtk/src/rt.rs", "rank": 35, "score": 190931.99892686016 }, { "content": "#[doc(alias = \"gtk_main\")]\n\npub fn main() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gtk_main();\n\n }\n\n}\n\n\n", "file_path": "gtk/src/auto/functions.rs", "rank": 36, "score": 190931.99892686016 }, { "content": "pub fn property_change(\n\n window: &super::Window,\n\n property: &super::Atom,\n\n type_: &super::Atom,\n\n format: i32,\n\n mode: super::PropMode,\n\n data: super::ChangeData,\n\n) {\n\n skip_assert_initialized!();\n\n let nelements = data.len();\n\n unsafe {\n\n ffi::gdk_property_change(\n\n window.to_glib_none().0,\n\n property.to_glib_none().0,\n\n type_.to_glib_none().0,\n\n format,\n\n mode.to_glib(),\n\n data.to_glib(),\n\n nelements as i32,\n\n );\n\n }\n\n}\n", "file_path": "gdk/src/functions.rs", "rank": 37, "score": 190931.99892686016 }, { "content": "#[doc(alias = \"pango_itemize\")]\n\npub fn itemize(\n\n context: &Context,\n\n text: &str,\n\n start_index: i32,\n\n length: i32,\n\n attrs: &AttrList,\n\n cached_iter: Option<&AttrIterator>,\n\n) -> Vec<Item> {\n\n unsafe {\n\n FromGlibPtrContainer::from_glib_full(ffi::pango_itemize(\n\n context.to_glib_none().0,\n\n text.to_glib_none().0,\n\n start_index,\n\n length,\n\n attrs.to_glib_none().0,\n\n mut_override(cached_iter.to_glib_none().0),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "pango/src/auto/functions.rs", "rank": 38, "score": 190931.99892686016 }, { "content": "#[doc(alias = \"gdk_flush\")]\n\npub fn flush() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gdk_flush();\n\n }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 39, "score": 190931.99892686016 }, { "content": "#[cfg(any(feature = \"v1_44\", feature = \"dox\"))]\n\n#[cfg_attr(feature = \"dox\", doc(cfg(feature = \"v1_44\")))]\n\npub fn shape_with_flags(\n\n item_text: &str,\n\n paragraph_text: Option<&str>,\n\n analysis: &Analysis,\n\n glyphs: &mut GlyphString,\n\n flags: ShapeFlags,\n\n) {\n\n let item_length = item_text.len() as i32;\n\n let paragraph_length = paragraph_text.map(|t| t.len() as i32).unwrap_or_default();\n\n unsafe {\n\n ffi::pango_shape_with_flags(\n\n item_text.to_glib_none().0,\n\n item_length,\n\n paragraph_text.to_glib_none().0,\n\n paragraph_length,\n\n analysis.to_glib_none().0,\n\n glyphs.to_glib_none_mut().0,\n\n flags.to_glib(),\n\n );\n\n }\n\n}\n", "file_path": "pango/src/functions.rs", "rank": 40, "score": 190931.99892686016 }, { "content": "#[doc(alias = \"gdk_beep\")]\n\npub fn beep() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gdk_beep();\n\n }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 41, "score": 190931.99892686016 }, { "content": "pub fn main_quit() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n if ffi::gtk_main_level() > 0 {\n\n ffi::gtk_main_quit();\n\n } else if cfg!(debug_assertions) {\n\n panic!(\"Attempted to quit a GTK main loop when none is running.\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "gtk/src/rt.rs", "rank": 42, "score": 190931.99892686016 }, { "content": "pub fn shape_full(\n\n item_text: &str,\n\n paragraph_text: Option<&str>,\n\n analysis: &Analysis,\n\n glyphs: &mut GlyphString,\n\n) {\n\n let paragraph_length = match paragraph_text {\n\n Some(s) => s.len(),\n\n None => 0,\n\n } as i32;\n\n let paragraph_text = paragraph_text.to_glib_none();\n\n let item_length = item_text.len() as i32;\n\n unsafe {\n\n ffi::pango_shape_full(\n\n item_text.to_glib_none().0,\n\n item_length,\n\n paragraph_text.0,\n\n paragraph_length,\n\n analysis.to_glib_none().0,\n\n glyphs.to_glib_none_mut().0,\n\n );\n\n }\n\n}\n\n\n", "file_path": "pango/src/functions.rs", "rank": 43, "score": 190931.99892686016 }, { "content": "#[doc(alias = \"g_dngettext\")]\n\npub fn dngettext(\n\n domain: Option<&str>,\n\n msgid: &str,\n\n msgid_plural: &str,\n\n n: libc::c_ulong,\n\n) -> crate::GString {\n\n unsafe {\n\n from_glib_none(ffi::g_dngettext(\n\n domain.to_glib_none().0,\n\n msgid.to_glib_none().0,\n\n msgid_plural.to_glib_none().0,\n\n n,\n\n ))\n\n }\n\n}\n\n\n\n//#[doc(alias = \"g_double_equal\")]\n\n//pub fn double_equal(v1: /*Unimplemented*/Fundamental: Pointer, v2: /*Unimplemented*/Fundamental: Pointer) -> bool {\n\n// unsafe { TODO: call ffi:g_double_equal() }\n\n//}\n\n\n\n//#[doc(alias = \"g_double_hash\")]\n\n//pub fn double_hash(v: /*Unimplemented*/Fundamental: Pointer) -> u32 {\n\n// unsafe { TODO: call ffi:g_double_hash() }\n\n//}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 44, "score": 190931.99892686016 }, { "content": "pub fn impl_genum(input: &syn::DeriveInput) -> TokenStream {\n\n let name = &input.ident;\n\n\n\n let crate_ident = crate_ident_new();\n\n\n\n let enum_variants = match input.data {\n\n Data::Enum(ref e) => &e.variants,\n\n _ => abort_call_site!(\"GEnum only supports enums\"),\n\n };\n\n\n\n let gtype_name = match parse_type_name(&input, \"genum\") {\n\n Ok(v) => v,\n\n Err(e) => abort_call_site!(\n\n \"{}: derive(GEnum) requires #[genum(type_name = \\\"EnumTypeName\\\")]\",\n\n e\n\n ),\n\n };\n\n let get_type = format_ident!(\"{}_get_type\", name.to_string().to_snake_case());\n\n let from_glib = gen_from_glib(name, enum_variants);\n\n let (genum_values, nb_genum_values) = gen_genum_values(name, enum_variants);\n", "file_path": "glib-macros/src/genum_derive.rs", "rank": 45, "score": 189122.89232196633 }, { "content": "pub fn impl_gboxed(input: &syn::DeriveInput) -> TokenStream {\n\n let name = &input.ident;\n\n\n\n let gtype_name = match parse_type_name(&input, \"gboxed\") {\n\n Ok(v) => v,\n\n Err(e) => abort_call_site!(\n\n \"{}: derive(GBoxed) requires #[gboxed(type_name = \\\"BoxedTypeName\\\")]\",\n\n e\n\n ),\n\n };\n\n\n\n let crate_ident = crate_ident_new();\n\n\n\n let meta = find_attribute_meta(&input.attrs, \"gboxed\")\n\n .unwrap()\n\n .unwrap();\n\n let nullable = find_nested_meta(&meta, \"nullable\").is_some();\n\n\n\n let ptr_to_option = gen_ptr_to_option(name, nullable);\n\n let impl_from_value = if !nullable {\n", "file_path": "glib-macros/src/gboxed_derive.rs", "rank": 46, "score": 189122.89232196633 }, { "content": "/// To set the default print handler, use the [`set_print_handler`] function.\n\npub fn unset_print_handler() {\n\n *PRINT_HANDLER\n\n .lock()\n\n .expect(\"Failed to lock PRINT_HANDLER to remove callback\") = None;\n\n unsafe { ffi::g_set_print_handler(None) };\n\n}\n\n\n\nstatic PRINTERR_HANDLER: Lazy<Mutex<Option<Arc<PrintCallback>>>> = Lazy::new(|| Mutex::new(None));\n\n\n", "file_path": "glib/src/log.rs", "rank": 47, "score": 187468.44049874262 }, { "content": "/// To set the default print handler, use the [`set_printerr_handler`] function.\n\npub fn unset_printerr_handler() {\n\n *PRINTERR_HANDLER\n\n .lock()\n\n .expect(\"Failed to lock PRINTERR_HANDLER to remove callback\") = None;\n\n unsafe { ffi::g_set_printerr_handler(None) };\n\n}\n\n\n", "file_path": "glib/src/log.rs", "rank": 48, "score": 187468.44049874262 }, { "content": "#[doc(alias = \"g_assert_warning\")]\n\npub fn assert_warning(\n\n log_domain: &str,\n\n file: &str,\n\n line: i32,\n\n pretty_function: &str,\n\n expression: &str,\n\n) {\n\n unsafe {\n\n ffi::g_assert_warning(\n\n log_domain.to_glib_none().0,\n\n file.to_glib_none().0,\n\n line,\n\n pretty_function.to_glib_none().0,\n\n expression.to_glib_none().0,\n\n );\n\n }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 49, "score": 187461.5760489315 }, { "content": "#[doc(alias = \"pango_parse_markup\")]\n\npub fn parse_markup(\n\n markup_text: &str,\n\n accel_marker: char,\n\n) -> Result<(AttrList, glib::GString, char), glib::Error> {\n\n let length = markup_text.len() as i32;\n\n unsafe {\n\n let mut attr_list = ptr::null_mut();\n\n let mut text = ptr::null_mut();\n\n let mut accel_char = mem::MaybeUninit::uninit();\n\n let mut error = ptr::null_mut();\n\n let _ = ffi::pango_parse_markup(\n\n markup_text.to_glib_none().0,\n\n length,\n\n accel_marker.to_glib(),\n\n &mut attr_list,\n\n &mut text,\n\n accel_char.as_mut_ptr(),\n\n &mut error,\n\n );\n\n let accel_char = accel_char.assume_init();\n", "file_path": "pango/src/auto/functions.rs", "rank": 50, "score": 187461.5760489315 }, { "content": "#[doc(alias = \"glib_check_version\")]\n\npub fn check_version(\n\n required_major: u32,\n\n required_minor: u32,\n\n required_micro: u32,\n\n) -> crate::GString {\n\n unsafe {\n\n from_glib_none(ffi::glib_check_version(\n\n required_major,\n\n required_minor,\n\n required_micro,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 51, "score": 187461.5760489315 }, { "content": "#[doc(alias = \"gdk_property_get\")]\n\npub fn property_get(\n\n window: &Window,\n\n property: &Atom,\n\n type_: &Atom,\n\n offset: libc::c_ulong,\n\n length: libc::c_ulong,\n\n pdelete: i32,\n\n) -> Option<(Atom, i32, Vec<u8>)> {\n\n skip_assert_initialized!();\n\n unsafe {\n\n let mut actual_property_type = Atom::uninitialized();\n\n let mut actual_format = mem::MaybeUninit::uninit();\n\n let mut actual_length = mem::MaybeUninit::uninit();\n\n let mut data = ptr::null_mut();\n\n let ret = from_glib(ffi::gdk_property_get(\n\n window.to_glib_none().0,\n\n property.to_glib_none().0,\n\n type_.to_glib_none().0,\n\n offset,\n\n length,\n", "file_path": "gdk/src/auto/functions.rs", "rank": 52, "score": 187461.5760489315 }, { "content": "#[cfg_attr(feature = \"v2_46\", deprecated)]\n\n#[doc(alias = \"g_mem_profile\")]\n\npub fn mem_profile() {\n\n unsafe {\n\n ffi::g_mem_profile();\n\n }\n\n}\n\n\n\n//#[cfg_attr(feature = \"v2_46\", deprecated)]\n\n//#[doc(alias = \"g_mem_set_vtable\")]\n\n//pub fn mem_set_vtable(vtable: /*Ignored*/&mut MemVTable) {\n\n// unsafe { TODO: call ffi:g_mem_set_vtable() }\n\n//}\n\n\n\n//#[doc(alias = \"g_memdup\")]\n\n//pub fn memdup(mem: /*Unimplemented*/Option<Fundamental: Pointer>, byte_size: u32) -> /*Unimplemented*/Option<Fundamental: Pointer> {\n\n// unsafe { TODO: call ffi:g_memdup() }\n\n//}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 53, "score": 187461.5760489315 }, { "content": "#[doc(alias = \"gtk_disable_setlocale\")]\n\npub fn disable_setlocale() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gtk_disable_setlocale();\n\n }\n\n}\n\n\n\n//#[doc(alias = \"gtk_distribute_natural_allocation\")]\n\n//pub fn distribute_natural_allocation(extra_space: i32, n_requested_sizes: u32, sizes: /*Ignored*/&mut RequestedSize) -> i32 {\n\n// unsafe { TODO: call ffi:gtk_distribute_natural_allocation() }\n\n//}\n\n\n", "file_path": "gtk/src/auto/functions.rs", "rank": 54, "score": 187461.5760489315 }, { "content": "#[doc(alias = \"g_bus_get\")]\n\npub fn bus_get<\n\n P: IsA<Cancellable>,\n\n Q: FnOnce(Result<DBusConnection, glib::Error>) + Send + 'static,\n\n>(\n\n bus_type: BusType,\n\n cancellable: Option<&P>,\n\n callback: Q,\n\n) {\n\n let user_data: Box_<Q> = Box_::new(callback);\n\n unsafe extern \"C\" fn bus_get_trampoline<\n\n Q: FnOnce(Result<DBusConnection, glib::Error>) + Send + 'static,\n\n >(\n\n _source_object: *mut glib::gobject_ffi::GObject,\n\n res: *mut crate::ffi::GAsyncResult,\n\n user_data: glib::ffi::gpointer,\n\n ) {\n\n let mut error = ptr::null_mut();\n\n let ret = ffi::g_bus_get_finish(res, &mut error);\n\n let result = if error.is_null() {\n\n Ok(from_glib_full(ret))\n", "file_path": "gio/src/auto/functions.rs", "rank": 55, "score": 187461.5760489315 }, { "content": "#[cfg_attr(feature = \"v1_38\", deprecated)]\n\n#[doc(alias = \"pango_parse_enum\")]\n\npub fn parse_enum(\n\n type_: glib::types::Type,\n\n str: Option<&str>,\n\n warn: bool,\n\n) -> Option<(i32, glib::GString)> {\n\n unsafe {\n\n let mut value = mem::MaybeUninit::uninit();\n\n let mut possible_values = ptr::null_mut();\n\n let ret = from_glib(ffi::pango_parse_enum(\n\n type_.to_glib(),\n\n str.to_glib_none().0,\n\n value.as_mut_ptr(),\n\n warn.to_glib(),\n\n &mut possible_values,\n\n ));\n\n let value = value.assume_init();\n\n if ret {\n\n Some((value, from_glib_full(possible_values)))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "pango/src/auto/functions.rs", "rank": 56, "score": 187461.5760489315 }, { "content": "#[doc(alias = \"pango_version_check\")]\n\npub fn version_check(\n\n required_major: i32,\n\n required_minor: i32,\n\n required_micro: i32,\n\n) -> Option<glib::GString> {\n\n unsafe {\n\n from_glib_none(ffi::pango_version_check(\n\n required_major,\n\n required_minor,\n\n required_micro,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "pango/src/auto/functions.rs", "rank": 57, "score": 187461.5760489315 }, { "content": "#[doc(alias = \"gtk_accelerator_name\")]\n\npub fn accelerator_name(\n\n accelerator_key: u32,\n\n accelerator_mods: gdk::ModifierType,\n\n) -> Option<glib::GString> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_full(ffi::gtk_accelerator_name(\n\n accelerator_key,\n\n accelerator_mods.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gtk/src/auto/functions.rs", "rank": 58, "score": 187461.5760489315 }, { "content": "#[doc(alias = \"g_warn_message\")]\n\npub fn warn_message(\n\n domain: Option<&str>,\n\n file: &str,\n\n line: i32,\n\n func: &str,\n\n warnexpr: Option<&str>,\n\n) {\n\n unsafe {\n\n ffi::g_warn_message(\n\n domain.to_glib_none().0,\n\n file.to_glib_none().0,\n\n line,\n\n func.to_glib_none().0,\n\n warnexpr.to_glib_none().0,\n\n );\n\n }\n\n}\n", "file_path": "glib/src/auto/functions.rs", "rank": 59, "score": 187461.5760489315 }, { "content": "#[doc(alias = \"gtk_show_uri\")]\n\npub fn show_uri(\n\n screen: Option<&gdk::Screen>,\n\n uri: &str,\n\n timestamp: u32,\n\n) -> Result<(), glib::Error> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let _ = ffi::gtk_show_uri(\n\n screen.to_glib_none().0,\n\n uri.to_glib_none().0,\n\n timestamp,\n\n &mut error,\n\n );\n\n if error.is_null() {\n\n Ok(())\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gtk/src/auto/functions.rs", "rank": 60, "score": 187461.5760489315 }, { "content": "#[doc(alias = \"g_networking_init\")]\n\npub fn networking_init() {\n\n unsafe {\n\n ffi::g_networking_init();\n\n }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 61, "score": 187461.5760489315 }, { "content": "#[cfg(not(windows))]\n\n#[cfg_attr(feature = \"dox\", doc(cfg(not(windows))))]\n\npub fn spawn_async_with_pipes<\n\n P: AsRef<std::path::Path>,\n\n T: FromRawFd,\n\n U: FromRawFd,\n\n V: FromRawFd,\n\n>(\n\n working_directory: P,\n\n argv: &[&std::path::Path],\n\n envp: &[&std::path::Path],\n\n flags: SpawnFlags,\n\n child_setup: Option<Box_<dyn FnOnce() + 'static>>,\n\n) -> Result<(Pid, T, U, V), Error> {\n\n let child_setup_data: Box_<Option<Box_<dyn FnOnce() + 'static>>> = Box_::new(child_setup);\n\n unsafe extern \"C\" fn child_setup_func<P: AsRef<std::path::Path>>(user_data: ffi::gpointer) {\n\n let callback: Box_<Option<Box_<dyn FnOnce() + 'static>>> =\n\n Box_::from_raw(user_data as *mut _);\n\n let callback = (*callback).expect(\"cannot get closure...\");\n\n callback()\n\n }\n\n let child_setup = if child_setup_data.is_some() {\n", "file_path": "glib/src/functions.rs", "rank": 62, "score": 187461.5760489315 }, { "content": "/// Create a `Future` that will resolve after the given number of seconds.\n\n///\n\n/// The `Future` must be spawned on an `Executor` backed by a `glib::MainContext`.\n\npub fn timeout_future_seconds(value: u32) -> Pin<Box<dyn Future<Output = ()> + Send + 'static>> {\n\n timeout_future_seconds_with_priority(crate::PRIORITY_DEFAULT, value)\n\n}\n\n\n", "file_path": "glib/src/source_futures.rs", "rank": 63, "score": 184588.90754212876 }, { "content": "/// Create a `Stream` that will provide a value every given number of seconds.\n\n///\n\n/// The `Stream` must be spawned on an `Executor` backed by a `glib::MainContext`.\n\npub fn interval_stream_seconds(value: u32) -> Pin<Box<dyn Stream<Item = ()> + Send + 'static>> {\n\n interval_stream_seconds_with_priority(crate::PRIORITY_DEFAULT, value)\n\n}\n\n\n", "file_path": "glib/src/source_futures.rs", "rank": 64, "score": 184588.90754212876 }, { "content": "/// To set the default print handler, use the [`log_set_default_handler`] function.\n\npub fn log_unset_default_handler() {\n\n *DEFAULT_HANDLER\n\n .lock()\n\n .expect(\"Failed to lock DEFAULT_HANDLER to remove callback\") = None;\n\n unsafe {\n\n ffi::g_log_set_default_handler(Some(ffi::g_log_default_handler), std::ptr::null_mut())\n\n };\n\n}\n\n\n", "file_path": "glib/src/log.rs", "rank": 65, "score": 184204.52090098988 }, { "content": "#[doc(alias = \"g_assertion_message_cmpstr\")]\n\npub fn assertion_message_cmpstr(\n\n domain: &str,\n\n file: &str,\n\n line: i32,\n\n func: &str,\n\n expr: &str,\n\n arg1: &str,\n\n cmp: &str,\n\n arg2: &str,\n\n) {\n\n unsafe {\n\n ffi::g_assertion_message_cmpstr(\n\n domain.to_glib_none().0,\n\n file.to_glib_none().0,\n\n line,\n\n func.to_glib_none().0,\n\n expr.to_glib_none().0,\n\n arg1.to_glib_none().0,\n\n cmp.to_glib_none().0,\n\n arg2.to_glib_none().0,\n\n );\n\n }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 66, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"gdk_pixbuf_get_from_surface\")]\n\npub fn pixbuf_get_from_surface(\n\n surface: &cairo::Surface,\n\n src_x: i32,\n\n src_y: i32,\n\n width: i32,\n\n height: i32,\n\n) -> Option<gdk_pixbuf::Pixbuf> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_full(ffi::gdk_pixbuf_get_from_surface(\n\n mut_override(surface.to_glib_none().0),\n\n src_x,\n\n src_y,\n\n width,\n\n height,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 67, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"g_resources_enumerate_children\")]\n\npub fn resources_enumerate_children(\n\n path: &str,\n\n lookup_flags: ResourceLookupFlags,\n\n) -> Result<Vec<glib::GString>, glib::Error> {\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = ffi::g_resources_enumerate_children(\n\n path.to_glib_none().0,\n\n lookup_flags.to_glib(),\n\n &mut error,\n\n );\n\n if error.is_null() {\n\n Ok(FromGlibPtrContainer::from_glib_full(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 68, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"g_resources_lookup_data\")]\n\npub fn resources_lookup_data(\n\n path: &str,\n\n lookup_flags: ResourceLookupFlags,\n\n) -> Result<glib::Bytes, glib::Error> {\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret =\n\n ffi::g_resources_lookup_data(path.to_glib_none().0, lookup_flags.to_glib(), &mut error);\n\n if error.is_null() {\n\n Ok(from_glib_full(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 69, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"gtk_accelerator_get_label\")]\n\npub fn accelerator_get_label(\n\n accelerator_key: u32,\n\n accelerator_mods: gdk::ModifierType,\n\n) -> Option<glib::GString> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_full(ffi::gtk_accelerator_get_label(\n\n accelerator_key,\n\n accelerator_mods.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gtk/src/auto/functions.rs", "rank": 70, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"gdk_selection_owner_set\")]\n\npub fn selection_owner_set(\n\n owner: Option<&Window>,\n\n selection: &Atom,\n\n time_: u32,\n\n send_event: bool,\n\n) -> bool {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib(ffi::gdk_selection_owner_set(\n\n owner.to_glib_none().0,\n\n selection.to_glib_none().0,\n\n time_,\n\n send_event.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 71, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"gtk_accelerator_name_with_keycode\")]\n\npub fn accelerator_name_with_keycode(\n\n display: Option<&gdk::Display>,\n\n accelerator_key: u32,\n\n keycode: u32,\n\n accelerator_mods: gdk::ModifierType,\n\n) -> Option<glib::GString> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_full(ffi::gtk_accelerator_name_with_keycode(\n\n display.to_glib_none().0,\n\n accelerator_key,\n\n keycode,\n\n accelerator_mods.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gtk/src/auto/functions.rs", "rank": 72, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"pango_itemize_with_base_dir\")]\n\npub fn itemize_with_base_dir(\n\n context: &Context,\n\n base_dir: Direction,\n\n text: &str,\n\n start_index: i32,\n\n length: i32,\n\n attrs: &AttrList,\n\n cached_iter: Option<&AttrIterator>,\n\n) -> Vec<Item> {\n\n unsafe {\n\n FromGlibPtrContainer::from_glib_full(ffi::pango_itemize_with_base_dir(\n\n context.to_glib_none().0,\n\n base_dir.to_glib(),\n\n text.to_glib_none().0,\n\n start_index,\n\n length,\n\n attrs.to_glib_none().0,\n\n mut_override(cached_iter.to_glib_none().0),\n\n ))\n\n }\n", "file_path": "pango/src/auto/functions.rs", "rank": 73, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"gdk_selection_send_notify\")]\n\npub fn selection_send_notify(\n\n requestor: &Window,\n\n selection: &Atom,\n\n target: &Atom,\n\n property: &Atom,\n\n time_: u32,\n\n) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gdk_selection_send_notify(\n\n requestor.to_glib_none().0,\n\n selection.to_glib_none().0,\n\n target.to_glib_none().0,\n\n property.to_glib_none().0,\n\n time_,\n\n );\n\n }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 74, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"gdk_error_trap_push\")]\n\npub fn error_trap_push() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gdk_error_trap_push();\n\n }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 75, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"gdk_notify_startup_complete\")]\n\npub fn notify_startup_complete() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gdk_notify_startup_complete();\n\n }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 76, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"g_resources_open_stream\")]\n\npub fn resources_open_stream(\n\n path: &str,\n\n lookup_flags: ResourceLookupFlags,\n\n) -> Result<InputStream, glib::Error> {\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret =\n\n ffi::g_resources_open_stream(path.to_glib_none().0, lookup_flags.to_glib(), &mut error);\n\n if error.is_null() {\n\n Ok(from_glib_full(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 77, "score": 184197.72068036586 }, { "content": "pub fn bus_get_future(\n\n bus_type: BusType,\n\n) -> Pin<Box_<dyn std::future::Future<Output = Result<DBusConnection, glib::Error>> + 'static>> {\n\n Box_::pin(crate::GioFuture::new(&(), move |_obj, send| {\n\n let cancellable = Cancellable::new();\n\n bus_get(bus_type, Some(&cancellable), move |res| {\n\n send.resolve(res);\n\n });\n\n\n\n cancellable\n\n }))\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 78, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"g_return_if_fail_warning\")]\n\npub fn return_if_fail_warning(\n\n log_domain: Option<&str>,\n\n pretty_function: &str,\n\n expression: Option<&str>,\n\n) {\n\n unsafe {\n\n ffi::g_return_if_fail_warning(\n\n log_domain.to_glib_none().0,\n\n pretty_function.to_glib_none().0,\n\n expression.to_glib_none().0,\n\n );\n\n }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 79, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"g_dbus_gvalue_to_gvariant\")]\n\npub fn dbus_gvalue_to_gvariant(\n\n gvalue: &glib::Value,\n\n type_: &glib::VariantTy,\n\n) -> Option<glib::Variant> {\n\n unsafe {\n\n from_glib_full(ffi::g_dbus_gvalue_to_gvariant(\n\n gvalue.to_glib_none().0,\n\n type_.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 80, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"gdk_test_simulate_key\")]\n\npub fn test_simulate_key(\n\n window: &Window,\n\n x: i32,\n\n y: i32,\n\n keyval: u32,\n\n modifiers: ModifierType,\n\n key_pressrelease: EventType,\n\n) -> bool {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib(ffi::gdk_test_simulate_key(\n\n window.to_glib_none().0,\n\n x,\n\n y,\n\n keyval,\n\n modifiers.to_glib(),\n\n key_pressrelease.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 81, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"gdk_test_simulate_button\")]\n\npub fn test_simulate_button(\n\n window: &Window,\n\n x: i32,\n\n y: i32,\n\n button: u32,\n\n modifiers: ModifierType,\n\n button_pressrelease: EventType,\n\n) -> bool {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib(ffi::gdk_test_simulate_button(\n\n window.to_glib_none().0,\n\n x,\n\n y,\n\n button,\n\n modifiers.to_glib(),\n\n button_pressrelease.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 82, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"gtk_test_register_all_types\")]\n\npub fn test_register_all_types() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gtk_test_register_all_types();\n\n }\n\n}\n\n\n", "file_path": "gtk/src/auto/functions.rs", "rank": 83, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"gdk_x11_grab_server\")]\n\npub fn x11_grab_server() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gdk_x11_grab_server();\n\n }\n\n}\n\n\n", "file_path": "gdkx11/src/auto/functions.rs", "rank": 84, "score": 184197.72068036586 }, { "content": "/// Create a `Future` that will resolve once the child process with the given pid exits\n\n///\n\n/// The `Future` will resolve to the pid of the child process and the exit code.\n\n///\n\n/// The `Future` must be spawned on an `Executor` backed by a `glib::MainContext`.\n\npub fn child_watch_future(\n\n pid: crate::Pid,\n\n) -> Pin<Box<dyn Future<Output = (crate::Pid, i32)> + Send + 'static>> {\n\n child_watch_future_with_priority(crate::PRIORITY_DEFAULT, pid)\n\n}\n\n\n", "file_path": "glib/src/source_futures.rs", "rank": 85, "score": 184197.72068036586 }, { "content": "#[cfg(any(feature = \"v2_50\", feature = \"dox\"))]\n\n#[cfg_attr(feature = \"dox\", doc(cfg(feature = \"v2_50\")))]\n\n#[doc(alias = \"g_compute_hmac_for_bytes\")]\n\npub fn compute_hmac_for_bytes(\n\n digest_type: ChecksumType,\n\n key: &Bytes,\n\n data: &Bytes,\n\n) -> crate::GString {\n\n unsafe {\n\n from_glib_full(ffi::g_compute_hmac_for_bytes(\n\n digest_type.to_glib(),\n\n key.to_glib_none().0,\n\n data.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 86, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"gdk_x11_ungrab_server\")]\n\npub fn x11_ungrab_server() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gdk_x11_ungrab_server();\n\n }\n\n}\n\n\n", "file_path": "gdkx11/src/auto/functions.rs", "rank": 87, "score": 184197.72068036586 }, { "content": "#[cfg_attr(feature = \"v3_16\", deprecated)]\n\n#[doc(alias = \"gdk_pre_parse_libgtk_only\")]\n\npub fn pre_parse_libgtk_only() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gdk_pre_parse_libgtk_only();\n\n }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 88, "score": 184197.72068036586 }, { "content": "#[doc(alias = \"g_resources_get_info\")]\n\npub fn resources_get_info(\n\n path: &str,\n\n lookup_flags: ResourceLookupFlags,\n\n) -> Result<(usize, u32), glib::Error> {\n\n unsafe {\n\n let mut size = mem::MaybeUninit::uninit();\n\n let mut flags = mem::MaybeUninit::uninit();\n\n let mut error = ptr::null_mut();\n\n let _ = ffi::g_resources_get_info(\n\n path.to_glib_none().0,\n\n lookup_flags.to_glib(),\n\n size.as_mut_ptr(),\n\n flags.as_mut_ptr(),\n\n &mut error,\n\n );\n\n let size = size.assume_init();\n\n let flags = flags.assume_init();\n\n if error.is_null() {\n\n Ok((size, flags))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 89, "score": 184197.72068036586 }, { "content": "/// Create a `Future` that will resolve after the given number of milliseconds.\n\n///\n\n/// The `Future` must be spawned on an `Executor` backed by a `glib::MainContext`.\n\npub fn timeout_future_with_priority(\n\n priority: Priority,\n\n value: Duration,\n\n) -> Pin<Box<dyn Future<Output = ()> + Send + 'static>> {\n\n Box::pin(SourceFuture::new(move |send| {\n\n let mut send = Some(send);\n\n crate::timeout_source_new(value, None, priority, move || {\n\n let _ = send.take().unwrap().send(());\n\n Continue(false)\n\n })\n\n }))\n\n}\n\n\n", "file_path": "glib/src/source_futures.rs", "rank": 90, "score": 184197.72068036586 }, { "content": "/// Create a `Stream` that will provide a value every given number of milliseconds.\n\n///\n\n/// The `Future` must be spawned on an `Executor` backed by a `glib::MainContext`.\n\npub fn interval_stream_with_priority(\n\n priority: Priority,\n\n value: Duration,\n\n) -> Pin<Box<dyn Stream<Item = ()> + Send + 'static>> {\n\n Box::pin(SourceStream::new(move |send| {\n\n crate::timeout_source_new(value, None, priority, move || {\n\n if send.unbounded_send(()).is_err() {\n\n Continue(false)\n\n } else {\n\n Continue(true)\n\n }\n\n })\n\n }))\n\n}\n\n\n", "file_path": "glib/src/source_futures.rs", "rank": 91, "score": 184197.72068036586 }, { "content": "#[inline]\n\npub fn is_initialized() -> bool {\n\n skip_assert_initialized!();\n\n INITIALIZED.load(Ordering::Acquire)\n\n}\n\n\n\n/// Returns `true` if GDK has been initialized and this is the main thread.\n", "file_path": "gdkx11/src/rt.rs", "rank": 92, "score": 183012.3710388114 }, { "content": "#[inline]\n\npub fn is_initialized() -> bool {\n\n skip_assert_initialized!();\n\n INITIALIZED.load(Ordering::Acquire)\n\n}\n\n\n\n/// Returns `true` if GDK has been initialized and this is the main thread.\n", "file_path": "gdk/src/rt.rs", "rank": 93, "score": 183012.3710388114 }, { "content": "#[inline]\n\npub fn is_initialized() -> bool {\n\n skip_assert_initialized!();\n\n INITIALIZED.load(Ordering::Acquire)\n\n}\n\n\n\n/// Returns `true` if GTK has been initialized and this is the main thread.\n", "file_path": "gtk/src/rt.rs", "rank": 94, "score": 183012.3710388114 }, { "content": "/// This function generates parts needed to derive Downgrade and Upgrade\n\n/// implementations.\n\n///\n\n/// # Example\n\n///\n\n/// Let's assume following types are declared.\n\n///\n\n/// ```rust,ignore\n\n/// struct Unnamed(X, Y);\n\n///\n\n/// struct Named {\n\n/// x: X,\n\n/// y: Y,\n\n/// }\n\n///\n\n/// enum Choice {\n\n/// This(X, Y),\n\n/// That { x: X, y: Y },\n\n/// }\n\n/// ```\n\n///\n\n/// ## weak_fields\n\n///\n\n/// For the struct `Unnamed` and for a enum's variant `Choice::This`\n\n/// it will be `(<X as Downgrade>::Weak, <Y as Downgrade>::Weak)`.\n\n/// For the struct `Named` and for a enum's variant `Choice::That`\n\n/// it will be `{ x: <X as Downgrade>::Weak, y: <Y as Downgrade>::Weak, }`.\n\n///\n\n/// ## end_of_struct\n\n///\n\n/// It is a semicolon (`;`) for an `Unnamed` and is blank for the rest.\n\n///\n\n/// ## destruct\n\n///\n\n/// For the struct `Unnamed` and for a enum's variant `Choice::This`\n\n/// it will be `(ref _0, ref _1)`.\n\n/// For the struct `Named` and for a enum's variant `Choice::That`\n\n/// it will be `{ ref x, ref y }`.\n\n/// So it can be used as a destructuring pattern for values of both types,\n\n/// strong and weak.\n\n///\n\n/// ```rust,ignore\n\n/// let Unnamed (ref _0, ref _1) = <expression>;\n\n/// let Named { ref x, ref y } = <expression>;\n\n///\n\n/// match <expression> {\n\n/// Choise::This (ref _0, ref _1) => ... ,\n\n/// Choise::That { ref x, ref y } => ... ,\n\n/// }\n\n/// ```\n\n///\n\n/// # downgrade\n\n///\n\n/// ```rust,ignore\n\n/// (\n\n/// glib::clone::Downgrade::downgrade(_0),\n\n/// glib::clone::Downgrade::downgrade(_1),\n\n/// )\n\n///\n\n/// {\n\n/// x: glib::clone::Downgrade::downgrade(x),\n\n/// y: glib::clone::Downgrade::downgrade(y),\n\n/// }\n\n/// ```\n\n///\n\n/// # upgrade\n\n///\n\n/// ```rust,ignore\n\n/// (\n\n/// glib::clone::Upgrade::upgrade(_0)?,\n\n/// glib::clone::Upgrade::upgrade(_1)?,\n\n/// )\n\n///\n\n/// {\n\n/// x: glib::clone::Upgrade::upgrade(x)?,\n\n/// y: glib::clone::Upgrade::upgrade(y)?,\n\n/// }\n\n/// ```\n\npub fn derive_downgrade_fields(fields: syn::Fields) -> DowngradeStructParts {\n\n match fields {\n\n Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => {\n\n let fields: Vec<Type> = unnamed\n\n .into_pairs()\n\n .map(|pair| pair.into_value())\n\n .map(|field| field.ty)\n\n .collect();\n\n\n\n let weak_fields: Vec<_> = fields\n\n .iter()\n\n .map(|ty| {\n\n quote! {\n\n <#ty as glib::clone::Downgrade>::Weak\n\n }\n\n })\n\n .collect();\n\n\n\n let field_ident: Vec<Ident> =\n\n (0..fields.len()).map(|i| format_ident!(\"_{}\", i)).collect();\n", "file_path": "glib-macros/src/downgrade_derive/fields.rs", "rank": 95, "score": 182625.09771095516 }, { "content": "pub fn impl_composite_template(input: &syn::DeriveInput) -> TokenStream {\n\n let name = &input.ident;\n\n let crate_ident = crate_ident_new();\n\n\n\n let fields = match input.data {\n\n Data::Struct(ref s) => &s.fields,\n\n _ => abort_call_site!(\"derive(CompositeTemplate) only supports structs\"),\n\n };\n\n\n\n let template_children = gen_template_child_bindings(&fields);\n\n\n\n quote! {\n\n impl #crate_ident::subclass::widget::CompositeTemplate for #name {\n\n fn bind_template_children(klass: &mut Self::Class) {\n\n unsafe {\n\n #template_children\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "gtk3-macros/src/composite_template_derive.rs", "rank": 96, "score": 182603.95264949664 }, { "content": "#[doc(alias = \"g_dbus_address_get_stream\")]\n\npub fn dbus_address_get_stream<\n\n P: IsA<Cancellable>,\n\n Q: FnOnce(Result<(IOStream, glib::GString), glib::Error>) + Send + 'static,\n\n>(\n\n address: &str,\n\n cancellable: Option<&P>,\n\n callback: Q,\n\n) {\n\n let user_data: Box_<Q> = Box_::new(callback);\n\n unsafe extern \"C\" fn dbus_address_get_stream_trampoline<\n\n Q: FnOnce(Result<(IOStream, glib::GString), glib::Error>) + Send + 'static,\n\n >(\n\n _source_object: *mut glib::gobject_ffi::GObject,\n\n res: *mut crate::ffi::GAsyncResult,\n\n user_data: glib::ffi::gpointer,\n\n ) {\n\n let mut error = ptr::null_mut();\n\n let mut out_guid = ptr::null_mut();\n\n let ret = ffi::g_dbus_address_get_stream_finish(res, &mut out_guid, &mut error);\n\n let result = if error.is_null() {\n", "file_path": "gio/src/auto/functions.rs", "rank": 97, "score": 181122.52278811723 }, { "content": "#[doc(alias = \"g_keyfile_settings_backend_new\")]\n\npub fn keyfile_settings_backend_new(\n\n filename: &str,\n\n root_path: &str,\n\n root_group: Option<&str>,\n\n) -> Option<SettingsBackend> {\n\n unsafe {\n\n from_glib_full(ffi::g_keyfile_settings_backend_new(\n\n filename.to_glib_none().0,\n\n root_path.to_glib_none().0,\n\n root_group.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 98, "score": 181122.52278811723 }, { "content": "#[doc(alias = \"g_io_scheduler_cancel_all_jobs\")]\n\npub fn io_scheduler_cancel_all_jobs() {\n\n unsafe {\n\n ffi::g_io_scheduler_cancel_all_jobs();\n\n }\n\n}\n\n\n\n//#[doc(alias = \"g_io_scheduler_push_job\")]\n\n//pub fn io_scheduler_push_job<P: IsA<Cancellable>>(job_func: /*Unimplemented*/Fn(/*Ignored*/IOSchedulerJob, Option<&Cancellable>) -> bool, user_data: /*Unimplemented*/Option<Fundamental: Pointer>, io_priority: i32, cancellable: Option<&P>) {\n\n// unsafe { TODO: call ffi:g_io_scheduler_push_job() }\n\n//}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 99, "score": 181122.52278811723 } ]
Rust
iota-conversion/unit_converter.rs
zesterer/bee-p
375357bdfe8f670e4d26b62a7683d97f339f056f
use super::iota_units::IotaUnits; pub fn convert_units(amount: u64, from: IotaUnits, to: IotaUnits) -> u64 { let amount_in_source = amount * 10_u64.pow(u32::from(from.value())); convert_units_helper(amount_in_source, to) } fn convert_units_helper(amount: u64, to: IotaUnits) -> u64 { amount / 10_u64.pow(u32::from(to.value())) } pub fn convert_raw_iota_amount_to_display_text(amount: u64, extended: bool) -> String { let unit = find_optimal_iota_unit_to_display(amount); let amount_in_display_unit = convert_amount_to(amount, unit); create_amount_with_unit_display_text(amount_in_display_unit, unit, extended) } fn create_amount_with_unit_display_text(amount: f64, unit: IotaUnits, extended: bool) -> String { if unit == IotaUnits::Iota { format!("{} {}", amount, unit.unit()) } else if extended { format!("{:.15} {}", amount, unit.unit()) } else { format!("{:.2} {}", amount, unit.unit()) } } pub fn convert_amount_to(amount: u64, target: IotaUnits) -> f64 { amount as f64 / 10_u64.pow(u32::from(target.value())) as f64 } pub fn find_optimal_iota_unit_to_display(amount: u64) -> IotaUnits { let length = amount.to_string().len(); if length >= 1 && length <= 3 { IotaUnits::Iota } else if length > 3 && length <= 6 { IotaUnits::KiloIota } else if length > 6 && length <= 9 { IotaUnits::MegaIota } else if length > 9 && length <= 12 { IotaUnits::GigaIota } else if length > 12 && length <= 15 { IotaUnits::TeraIota } else if length > 15 && length <= 18 { IotaUnits::PetaIota } else { panic!("Invalid number") } } #[cfg(test)] mod tests { use super::*; #[test] fn test_convert_unit_i_to_ki() { assert_eq!(1, convert_units(1000, IotaUnits::Iota, IotaUnits::KiloIota)); } #[test] fn test_convert_unit_ki_to_mi() { assert_eq!( 1, convert_units(1000, IotaUnits::KiloIota, IotaUnits::MegaIota) ); } #[test] fn test_convert_unit_mi_to_gi() { assert_eq!( 1, convert_units(1000, IotaUnits::MegaIota, IotaUnits::GigaIota) ); } #[test] fn test_convert_unit_gi_to_ti() { assert_eq!( 1, convert_units(1000, IotaUnits::GigaIota, IotaUnits::TeraIota) ); } #[test] fn test_convert_unit_ti_to_pi() { assert_eq!( 1, convert_units(1000, IotaUnits::TeraIota, IotaUnits::PetaIota) ); } #[test] fn test_find_optimize_unit_to_display() { assert_eq!(find_optimal_iota_unit_to_display(1), IotaUnits::Iota); assert_eq!(find_optimal_iota_unit_to_display(1000), IotaUnits::KiloIota); assert_eq!( find_optimal_iota_unit_to_display(1000000), IotaUnits::MegaIota ); assert_eq!( find_optimal_iota_unit_to_display(1000000000), IotaUnits::GigaIota ); assert_eq!( find_optimal_iota_unit_to_display(1000000000000), IotaUnits::TeraIota ); assert_eq!( find_optimal_iota_unit_to_display(1000000000000000), IotaUnits::PetaIota ); } #[test] fn test_convert_raw_iota_amount_to_display_text() { assert_eq!(convert_raw_iota_amount_to_display_text(1, false), "1 i"); assert_eq!( convert_raw_iota_amount_to_display_text(1000, false), "1.00 Ki" ); assert_eq!( convert_raw_iota_amount_to_display_text(1000000, false), "1.00 Mi" ); assert_eq!( convert_raw_iota_amount_to_display_text(1000000000, false), "1.00 Gi" ); assert_eq!( convert_raw_iota_amount_to_display_text(1000000000000, false), "1.00 Ti" ); assert_eq!( convert_raw_iota_amount_to_display_text(1000000000000000, false), "1.00 Pi" ); assert_eq!( convert_raw_iota_amount_to_display_text(1900000000000002, true), "1.900000000000002 Pi" ); } }
use super::iota_units::IotaUnits; pub fn convert_units(amount: u64, from: IotaUnits, to: IotaUnits) -> u64 { let amount_in_source = amount * 10_u64.pow(u32::from(from.value())); convert_units_helper(amount_in_source, to) } fn convert_units_helper(amount: u64, to: IotaUnits) -> u64 { amount / 10_u64.pow(u32::from(to.value())) } pub fn convert_raw_iota_amount_to_display_text(amount: u64, extended: bool) -> String { let unit = find_optimal_iota_unit_to_display(amount); let amount_in_display_unit = convert_amount_to(amount, unit); create_amount_with_unit_display_text(amount_in_display_unit, unit, extended) } fn create_amount_with_unit_display_text(amount: f64, unit: IotaUnits, extended: bool) -> String { if unit == IotaUnits::Iota { format!("{} {}", amount, unit.unit()) } else if extended { format!("{:.15} {}", amount, unit.unit()) } else { format!("{:.2} {}", amount, unit.unit()) } } pub fn convert_amount_to(amount: u64, target: IotaUnits) -> f64 { amount as f64 / 10_u64.pow(u32::from(target.value())) as f64 } pub fn find_optimal_iota_unit_to_display(amount: u64) -> IotaUnits { let length = amount.to_string().len(); if length >= 1 && length <= 3 { IotaUnits::Iota } else if length > 3 && length <= 6 { IotaUnits::KiloIota } else if length > 6 && length <= 9 { IotaUnits::MegaIota } else if length > 9 && length <= 12 { IotaUnits::GigaIota } else if length > 12 && length <= 15 { IotaUnits::TeraIota } else if length > 15 && length <= 18 { IotaUnits::PetaIota } else { panic!("Invalid number") } } #[cfg(test)] mod tests { use super::*; #[test] fn test_convert_unit_i_to_ki() { assert_eq!(1, convert_units(1000, IotaUnits::Iota, IotaUnits::KiloIota)); } #[test] fn test_convert_unit_ki_to_mi() { assert_eq!( 1, convert_units(1000, IotaUnits::KiloIota, IotaUnits::MegaIota) ); } #[test] fn test_convert_unit_mi_to_gi() { assert_eq!( 1, convert_units(1000, IotaUnits::MegaIota, IotaUnits::GigaIota) ); } #[test] fn test_convert_unit_gi_to_ti() { assert_eq!( 1, convert_units(1000, IotaUnits::GigaIota, IotaUnits::TeraIota) ); } #[test] fn test_convert_unit_ti_to_pi() { assert_eq!( 1, convert_units(1000, IotaUnits::TeraIota, IotaUnits::PetaIota) ); } #[tes
_display_text(1000000000000, false), "1.00 Ti" ); assert_eq!( convert_raw_iota_amount_to_display_text(1000000000000000, false), "1.00 Pi" ); assert_eq!( convert_raw_iota_amount_to_display_text(1900000000000002, true), "1.900000000000002 Pi" ); } }
t] fn test_find_optimize_unit_to_display() { assert_eq!(find_optimal_iota_unit_to_display(1), IotaUnits::Iota); assert_eq!(find_optimal_iota_unit_to_display(1000), IotaUnits::KiloIota); assert_eq!( find_optimal_iota_unit_to_display(1000000), IotaUnits::MegaIota ); assert_eq!( find_optimal_iota_unit_to_display(1000000000), IotaUnits::GigaIota ); assert_eq!( find_optimal_iota_unit_to_display(1000000000000), IotaUnits::TeraIota ); assert_eq!( find_optimal_iota_unit_to_display(1000000000000000), IotaUnits::PetaIota ); } #[test] fn test_convert_raw_iota_amount_to_display_text() { assert_eq!(convert_raw_iota_amount_to_display_text(1, false), "1 i"); assert_eq!( convert_raw_iota_amount_to_display_text(1000, false), "1.00 Ki" ); assert_eq!( convert_raw_iota_amount_to_display_text(1000000, false), "1.00 Mi" ); assert_eq!( convert_raw_iota_amount_to_display_text(1000000000, false), "1.00 Gi" ); assert_eq!( convert_raw_iota_amount_to
random
[ { "content": "/// Converts a tryte-encoded string into a UTF-8 string containing ascii characters\n\npub fn to_string(input_trytes: &str) -> Result<String> {\n\n ensure!(\n\n input_trytes.len() % 2 == 0,\n\n iota_constants::INVALID_TRYTES_INPUT_ERROR\n\n );\n\n let mut tmp = String::new();\n\n let chars: Vec<char> = input_trytes.chars().collect();\n\n for letters in chars.chunks(2) {\n\n let first = match iota_constants::TRYTE_ALPHABET\n\n .iter()\n\n .position(|&x| x == letters[0])\n\n {\n\n Some(x) => x,\n\n None => {\n\n return Err(Error::from(TryteConverterError::StringNotAscii {\n\n string: input_trytes.to_string(),\n\n }))\n\n }\n\n };\n\n let second = match iota_constants::TRYTE_ALPHABET\n", "file_path": "iota-conversion/trytes_converter.rs", "rank": 6, "score": 143372.81120222734 }, { "content": "/// Converts a UTF-8 string containing ascii into a tryte-encoded string\n\npub fn to_trytes(input: &str) -> Result<String> {\n\n let mut trytes = String::new();\n\n let mut tmp_ascii = Vec::new();\n\n for c in input.chars() {\n\n if let Some(ascii) = CHAR_TO_ASCII_MAP.get(&c) {\n\n tmp_ascii.push(ascii);\n\n }\n\n }\n\n for byte in tmp_ascii {\n\n let mut ascii = *byte;\n\n if ascii > 255 {\n\n ascii = 32;\n\n }\n\n let first = ascii % 27;\n\n let second = (ascii - first) / 27;\n\n trytes.push(iota_constants::TRYTE_ALPHABET[first]);\n\n trytes.push(iota_constants::TRYTE_ALPHABET[second]);\n\n }\n\n Ok(trytes)\n\n}\n\n\n", "file_path": "iota-conversion/trytes_converter.rs", "rank": 7, "score": 136054.3308007896 }, { "content": "// TODO: remove\n\n// TODO: documentation\n\npub fn slice_eq(xs: &[i8], ys: &[i8]) -> bool {\n\n for (x, y) in xs.iter().zip(ys.iter()) {\n\n if x != y {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n}\n", "file_path": "bee-signing/src/lib.rs", "rank": 8, "score": 128285.76143660271 }, { "content": "/// Generate a digest\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `normalized_bundle_fragment` - Normalized bundle fragment to digest\n\n/// * `signature_fragment` - Signature fragment to use\n\npub fn digest(\n\n mode: HashMode,\n\n normalized_bundle_fragment: &[i8],\n\n signature_fragment: &[i8],\n\n) -> Result<[i8; HASH_LENGTH]> {\n\n ensure!(\n\n normalized_bundle_fragment.len() == HASH_LENGTH / TRYTE_WIDTH / NUMBER_OF_SECURITY_LEVELS,\n\n \"Invalid normalized bundle fragment length: {}\",\n\n normalized_bundle_fragment.len()\n\n );\n\n ensure!(\n\n signature_fragment.len() == FRAGMENT_LENGTH,\n\n \"Invalid signature fragment length: {}\",\n\n signature_fragment.len()\n\n );\n\n let mut digest = [0; HASH_LENGTH];\n\n match mode {\n\n HashMode::CURLP27 | HashMode::CURLP81 => {\n\n let mut curl = Curl::new(mode).unwrap();\n\n digest_in_place(\n", "file_path": "iota-crypto/iss.rs", "rank": 9, "score": 107569.2938494212 }, { "content": "/// Generate a signature fragment\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `normalized_bundle_fragment` - Normalized bundle fragment to sign\n\n/// * `key_fragment` - Key fragment to use\n\npub fn signature_fragment(\n\n mode: HashMode,\n\n normalized_bundle_fragment: &[i8],\n\n key_fragment: &[i8],\n\n) -> Result<Vec<i8>> {\n\n ensure!(\n\n normalized_bundle_fragment.len() == NORMALIZED_FRAGMENT_LENGTH,\n\n \"Invalid normalized bundle fragment length: {}\",\n\n normalized_bundle_fragment.len()\n\n );\n\n ensure!(\n\n key_fragment.len() == FRAGMENT_LENGTH,\n\n \"Invalid key fragment length: {}\",\n\n key_fragment.len()\n\n );\n\n let mut signature_fragment = key_fragment.to_vec();\n\n match mode {\n\n HashMode::CURLP27 | HashMode::CURLP81 => {\n\n let mut curl = Curl::new(mode).unwrap();\n\n signature_fragment_helper(\n", "file_path": "iota-crypto/iss.rs", "rank": 10, "score": 105402.4156861218 }, { "content": "/// Hash digest in place\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `normalized_bundle_fragment` - Normalized bundle fragment to digest\n\n/// * `signature_fragment` - Signature fragment to use\n\n/// * `digest` - Destination slice to modify in place\n\npub fn digest_in_place(\n\n hash: &mut impl Sponge,\n\n normalized_bundle_fragment: &[i8],\n\n signature_fragment: &[i8],\n\n digest: &mut [i8],\n\n) -> Result<()> {\n\n let mut buffer = signature_fragment[0..FRAGMENT_LENGTH].to_vec();\n\n for (j, trit) in normalized_bundle_fragment\n\n .iter()\n\n .enumerate()\n\n .take(NUMBER_OF_FRAGMENT_CHUNKS)\n\n {\n\n for _ in 0..*trit - iota_constants::MIN_TRYTE_VALUE {\n\n hash.reset();\n\n let offset = j * HASH_LENGTH;\n\n hash.absorb(&buffer[offset..offset + HASH_LENGTH])?;\n\n hash.squeeze(&mut buffer[offset..offset + HASH_LENGTH])?;\n\n }\n\n }\n\n hash.reset();\n\n hash.absorb(&buffer)?;\n\n hash.squeeze(digest)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 11, "score": 105402.2539735102 }, { "content": "/// Generate normalize bundle\n\n///\n\n/// * `bundle` - Bundle to normalize\n\npub fn normalized_bundle(bundle: &[i8]) -> Result<[i8; HASH_LENGTH / TRYTE_WIDTH]> {\n\n ensure!(\n\n bundle.len() == HASH_LENGTH,\n\n \"Invalid bundle length: {}\",\n\n bundle.len()\n\n );\n\n let mut normalized_bundle = [0; HASH_LENGTH / TRYTE_WIDTH];\n\n normalized_bundle_in_place(bundle, &mut normalized_bundle);\n\n Ok(normalized_bundle)\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 12, "score": 105190.5328489554 }, { "content": "/// Retrieve the merkle root\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `hash` - Hash to absorb\n\n/// * `trits` - Trits to absorb\n\n/// * `offset` - Trit offset to start at\n\n/// * `index` - Used to alternate the order trits and hash are absorbed\n\n/// * `size` - Number of hash iterations\n\npub fn get_merkle_root(\n\n mode: HashMode,\n\n hash: &[i8],\n\n trits: &mut [i8],\n\n offset: usize,\n\n index: usize,\n\n size: usize,\n\n) -> Result<[i8; HASH_LENGTH]> {\n\n match mode {\n\n HashMode::CURLP27 | HashMode::CURLP81 => {\n\n let mut curl = Curl::new(mode).unwrap();\n\n get_merkle_root_helper(&mut curl, hash, trits, offset, index, size)\n\n }\n\n HashMode::Kerl => {\n\n let mut kerl = Kerl::default();\n\n get_merkle_root_helper(&mut kerl, hash, trits, offset, index, size)\n\n }\n\n }\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 13, "score": 103373.58401898005 }, { "content": "/// Generate address\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `digests` - Digests used to generate address\n\npub fn address(mode: HashMode, digests: &mut [i8]) -> Result<[i8; HASH_LENGTH]> {\n\n ensure!(\n\n !digests.is_empty() && digests.len() % HASH_LENGTH == 0,\n\n \"Invalid key length: {}\",\n\n digests.len()\n\n );\n\n let mut address = [0; HASH_LENGTH];\n\n hash_with_mode(mode, digests, &mut address)?;\n\n Ok(address)\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 14, "score": 100571.0272613243 }, { "content": "fn is_null(base: &[u32]) -> bool {\n\n for b in base.iter() {\n\n if *b != 0 {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "iota-crypto/kerl.rs", "rank": 15, "score": 100277.94588325758 }, { "content": "fn full_add(ia: u32, ib: u32, carry: bool) -> (u32, bool) {\n\n let a = u64::from(ia);\n\n let b = u64::from(ib);\n\n\n\n let mut v = a + b;\n\n let mut l = v >> 32;\n\n let mut r = v & 0xFFFF_FFFF;\n\n\n\n let carry1 = l != 0;\n\n\n\n if carry {\n\n v = r + 1;\n\n }\n\n l = (v >> 32) & 0xFFFF_FFFF;\n\n r = v & 0xFFFF_FFFF;\n\n let carry2 = l != 0;\n\n (r as u32, carry1 || carry2)\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "iota-crypto/kerl.rs", "rank": 16, "score": 97704.06801068 }, { "content": "/// Create a subseed\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `seed` - The generation seed\n\n/// * `index` - How many address permutations to iterate through\n\npub fn subseed(mode: HashMode, seed: &[i8], index: usize) -> Result<[i8; HASH_LENGTH]> {\n\n let mut subseed_preimage = seed.to_vec();\n\n for _ in 0..index {\n\n for trit in &mut subseed_preimage {\n\n *trit += 1;\n\n if *trit > iota_constants::MAX_TRIT_VALUE {\n\n *trit = iota_constants::MIN_TRIT_VALUE;\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n let mut subseed = [0; HASH_LENGTH];\n\n hash_with_mode(mode, &subseed_preimage, &mut subseed)?;\n\n Ok(subseed)\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 17, "score": 97566.16144126047 }, { "content": "fn trits_with_length(trits: &[Trit], length: usize) -> Vec<Trit> {\n\n if trits.len() < length {\n\n let mut result = vec![0; length];\n\n result[..trits.len()].copy_from_slice(&trits);\n\n result\n\n } else {\n\n trits[..length].to_vec()\n\n }\n\n}\n", "file_path": "iota-conversion/trinary.rs", "rank": 18, "score": 97001.09810890247 }, { "content": "/// Key a subseed\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `subseed` - Subseed used for key generation\n\n/// * `number_of_fragments` - Number of fragments to generate\n\npub fn key(mode: HashMode, subseed: &mut [i8], number_of_fragments: usize) -> Result<Vec<i8>> {\n\n ensure!(\n\n subseed.len() == HASH_LENGTH,\n\n \"Invalid subseed length: {}\",\n\n subseed.len()\n\n );\n\n\n\n let mut key = vec![0; FRAGMENT_LENGTH * number_of_fragments];\n\n hash_with_mode(mode, subseed, &mut key)?;\n\n\n\n Ok(key)\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 19, "score": 94798.90271328672 }, { "content": "/// Converts a slice of trits into a numeric value\n\npub fn value(trits: &[i8]) -> i8 {\n\n trits.iter().rev().fold(0, |acc, trit| acc * 3 + *trit)\n\n}\n\n\n", "file_path": "iota-conversion/lib.rs", "rank": 20, "score": 93553.1179333853 }, { "content": "/// Converts a slice of trits into a numeric value in i64\n\npub fn long_value(trits: &[i8]) -> i64 {\n\n trits\n\n .iter()\n\n .rev()\n\n .fold(0, |acc, trit| acc * 3 + i64::from(*trit))\n\n}\n", "file_path": "iota-conversion/lib.rs", "rank": 21, "score": 91757.07764011352 }, { "content": "/// Normalize a bundle in place\n\n///\n\n/// * `bundle` - Bundle to normalize\n\n/// * `normalized_bundle` - Destination slice to modify in place\n\npub fn normalized_bundle_in_place(bundle: &[i8], normalized_bundle: &mut [i8]) {\n\n for i in 0..NUMBER_OF_SECURITY_LEVELS {\n\n let mut sum = 0;\n\n let offset = HASH_LENGTH / TRYTE_WIDTH / NUMBER_OF_SECURITY_LEVELS;\n\n for j in i * offset..(i + 1) * offset {\n\n normalized_bundle[j] = bundle[j * TRYTE_WIDTH]\n\n + bundle[j * TRYTE_WIDTH + 1] * 3\n\n + bundle[j * TRYTE_WIDTH + 2] * 9;\n\n sum += normalized_bundle[j];\n\n }\n\n if sum > 0 {\n\n while sum > 0 {\n\n for trit in normalized_bundle\n\n .iter_mut()\n\n .skip(i * offset)\n\n .take((i + 1) * offset)\n\n {\n\n if *trit > iota_constants::MIN_TRYTE_VALUE {\n\n *trit -= 1;\n\n break;\n", "file_path": "iota-crypto/iss.rs", "rank": 22, "score": 81768.49239163936 }, { "content": "/// Generate digests\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `key` - kKey slice used to generate digests\n\npub fn digests(mode: HashMode, key: &[i8]) -> Result<Vec<i8>> {\n\n ensure!(\n\n !key.is_empty() && key.len() % FRAGMENT_LENGTH == 0,\n\n \"Invalid key length: {}\",\n\n key.len()\n\n );\n\n match mode {\n\n HashMode::CURLP27 | HashMode::CURLP81 => {\n\n let mut curl = Curl::new(mode)?;\n\n Ok(digests_helper(&mut curl, key)?)\n\n }\n\n HashMode::Kerl => {\n\n let mut kerl = Kerl::default();\n\n Ok(digests_helper(&mut kerl, key)?)\n\n }\n\n }\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 23, "score": 78798.91716731488 }, { "content": "/// Allows you to hash `trits` into `out` using the `mode` of your choosing\n\n///```rust\n\n/// use iota_crypto::{self, HashMode};\n\n///\n\n/// let input = [0; 243];\n\n/// let mut out = [0; 243];\n\n/// iota_crypto::hash_with_mode(HashMode::Kerl, &input, &mut out);\n\n///```\n\npub fn hash_with_mode(mode: HashMode, trits: &[i8], out: &mut [i8]) -> Result<()> {\n\n ensure!(\n\n out.len() % 243 == 0,\n\n \"Output slice length isn't a multiple of 243: {}\",\n\n out.len()\n\n );\n\n match mode {\n\n HashMode::CURLP27 | HashMode::CURLP81 => {\n\n let mut curl = Curl::new(mode).unwrap();\n\n curl.absorb(trits)?;\n\n curl.squeeze(out)?;\n\n }\n\n HashMode::Kerl => {\n\n let mut kerl = Kerl::default();\n\n kerl.absorb(trits)?;\n\n kerl.squeeze(out)?;\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "iota-crypto/lib.rs", "rank": 24, "score": 76175.14782477063 }, { "content": "type WithCarry = bool;\n\n\n\npub(crate) struct Curl64State {\n\n hi: [u64; STATE_LEN],\n\n lo: [u64; STATE_LEN],\n\n}\n\n\n\nimpl Curl64State {\n\n pub fn new(init_value: u64) -> Self {\n\n Self {\n\n hi: [init_value; STATE_LEN],\n\n lo: [init_value; STATE_LEN],\n\n }\n\n }\n\n\n\n pub fn set(&mut self, index: usize, hi: u64, lo: u64) {\n\n self.hi[index] = hi;\n\n self.lo[index] = lo;\n\n }\n\n\n", "file_path": "bee-pow/src/curl64.rs", "rank": 25, "score": 57489.24093840878 }, { "content": "type Exhausted = bool;\n\n\n\n#[derive(Clone, Copy, Eq, PartialEq)]\n\npub enum PearlDiverState {\n\n Created,\n\n Searching,\n\n Cancelled,\n\n Completed(Option<NonceTrits>),\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct PearlDiver {\n\n cores: Cores,\n\n difficulty: Difficulty,\n\n state: Arc<RwLock<PearlDiverState>>,\n\n}\n\n\n\nimpl PearlDiver {\n\n pub fn new(cores: Cores, difficulty: Difficulty) -> Self {\n\n Self {\n", "file_path": "bee-pow/src/pearldiver.rs", "rank": 26, "score": 57489.24093840878 }, { "content": "/// Trait used to enable conversion to trinary types\n\npub trait Trinary {\n\n /// Provides the trit vector representation of the value\n\n fn trits(&self) -> Vec<Trit>;\n\n /// Provides the trit vector representation of the value with given length, padding with `0` if required\n\n fn trits_with_length(&self, length: usize) -> Vec<Trit>;\n\n /// Provides the tryte string representation of the value\n\n fn trytes(&self) -> Result<Trytes>;\n\n}\n\n\n\n/// Type alias for `i8`\n\npub type Trit = i8;\n\n/// Type alias for `String`\n\npub type Trytes = String;\n\n\n\nimpl Trinary for i64 {\n\n fn trits(&self) -> Vec<Trit> {\n\n let mut trits = Vec::new();\n\n let mut abs = self.abs();\n\n while abs > 0 {\n\n let mut remainder = (abs % i64::from(TRINARY_RADIX as i8)) as i8;\n", "file_path": "iota-conversion/trinary.rs", "rank": 27, "score": 55671.82792278037 }, { "content": "/// The sponge trait specifys the main functionality of all\n\n/// sponges used throughout IOTA\n\npub trait Sponge\n\nwhere\n\n Self: Default + Clone + Send + 'static,\n\n{\n\n /// Absorb trits into the sponge\n\n ///\n\n /// * `trits` - A slice of trits whose length is a multiple of 243\n\n fn absorb(&mut self, trits: &[i8]) -> Result<()>;\n\n /// Squeeze trits out of the sponge and copy them into `out`\n\n ///\n\n /// * `out` - A slice of trits whose length is a multiple of 243\n\n fn squeeze(&mut self, out: &mut [i8]) -> Result<()>;\n\n /// Reset the sponge to initial state\n\n fn reset(&mut self);\n\n}\n\n\n", "file_path": "iota-crypto/lib.rs", "rank": 28, "score": 55671.652484544495 }, { "content": "// TODO: documentation\n\npub trait Signature {\n\n // TODO: documentation\n\n fn size(&self) -> usize;\n\n\n\n // TODO: documentation\n\n fn from_bytes(bytes: &[i8]) -> Self;\n\n\n\n // TODO: documentation\n\n fn to_bytes(&self) -> &[i8];\n\n}\n\n\n", "file_path": "bee-signing/src/lib.rs", "rank": 29, "score": 54578.600809215226 }, { "content": "pub trait IsTryte {\n\n fn is_tryte(&self) -> bool;\n\n}\n\n\n\nimpl IsTryte for char {\n\n fn is_tryte(&self) -> bool {\n\n *self == '9' || (*self >= 'A' && *self <= 'Z')\n\n }\n\n}\n", "file_path": "bee-ternary/src/tryte.rs", "rank": 30, "score": 54578.600809215226 }, { "content": "// TODO: documentation\n\npub trait RecoverableSignature {\n\n // TODO: documentation\n\n type PublicKey;\n\n\n\n // TODO: documentation\n\n fn recover_public_key(&self, message: &[i8]) -> Self::PublicKey;\n\n}\n\n\n", "file_path": "bee-signing/src/lib.rs", "rank": 31, "score": 53557.19516980993 }, { "content": "// TODO: documentation\n\npub trait PublicKey {\n\n // TODO: documentation\n\n type Signature;\n\n\n\n // TODO: documentation\n\n fn verify(&self, message: &[i8], signature: &Self::Signature) -> bool;\n\n\n\n // TODO: documentation\n\n fn from_bytes(bytes: &[i8]) -> Self;\n\n\n\n // TODO: documentation\n\n fn to_bytes(&self) -> &[i8];\n\n}\n\n\n", "file_path": "bee-signing/src/lib.rs", "rank": 32, "score": 53557.19516980993 }, { "content": "pub trait RawEncoding {\n\n /// Get the number of trits in this buffer\n\n fn len(&self) -> usize;\n\n\n\n /// Get the trit at the given index\n\n unsafe fn get_unchecked(&self, index: usize) -> Trit;\n\n\n\n /// Set the trit at the given index\n\n unsafe fn set_unchecked(&mut self, index: usize, trit: Trit);\n\n\n\n /// Get a slice of this slice\n\n unsafe fn slice_unchecked(&self, range: Range<usize>) -> &Self;\n\n\n\n /// Get a mutable slice of this slice\n\n unsafe fn slice_unchecked_mut(&mut self, range: Range<usize>) -> &mut Self;\n\n}\n\n\n", "file_path": "bee-ternary/src/raw.rs", "rank": 33, "score": 53557.19516980993 }, { "content": "// TODO: documentation\n\npub trait PrivateKey {\n\n /// The type of the matching public key\n\n type PublicKey;\n\n /// The type of the generated signatures\n\n type Signature;\n\n\n\n /// Returns the public counterpart of a private key\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// # use iota_crypto::Kerl;\n\n /// # use signing::PrivateKeyGenerator;\n\n /// # use signing::seed::Seed;\n\n /// # use signing::wots::WotsPrivateKeyGeneratorBuilder;\n\n /// use signing::PrivateKey;\n\n ///\n\n /// # let seed = Seed::new();\n\n /// # let private_key_generator = WotsPrivateKeyGeneratorBuilder::<Kerl>::default().security_level(2).build().unwrap();\n\n /// # let private_key = private_key_generator.generate(&seed, 0);\n", "file_path": "bee-signing/src/lib.rs", "rank": 34, "score": 53557.19516980993 }, { "content": "fn main() {\n\n env::set_var(ENV_VAR, DEBUG);\n\n\n\n let mut prototype = Prototype::from_config(CONFIG);\n\n\n\n assert!(prototype.run().is_ok());\n\n\n\n env::remove_var(ENV_VAR);\n\n}\n", "file_path": "bee-main/src/main.rs", "rank": 35, "score": 53530.299243752015 }, { "content": "pub trait RawEncodingBuf {\n\n type Slice: RawEncoding + ?Sized;\n\n\n\n /// Create a new empty buffer\n\n fn new() -> Self where Self: Sized;\n\n\n\n /// Create a new buffer containing the given trits\n\n fn from_trits<T: Into<Trit> + Clone>(trits: &[T]) -> Self where Self: Sized {\n\n let mut this = Self::new();\n\n for trit in trits {\n\n this.push(trit.clone().into());\n\n }\n\n this\n\n }\n\n\n\n /// Push a trit to the back of this buffer\n\n fn push(&mut self, trit: Trit);\n\n\n\n /// View the trits in this buffer as a slice\n\n fn as_slice(&self) -> &Self::Slice;\n", "file_path": "bee-ternary/src/raw.rs", "rank": 36, "score": 52597.69414316265 }, { "content": "// TODO: documentation\n\npub trait PrivateKeyGenerator {\n\n /// The type of the generated private keys\n\n type PrivateKey;\n\n\n\n /// Deterministically generates and returns a private key\n\n ///\n\n /// # Parameters\n\n ///\n\n /// * `seed` A seed to deterministically derive a private key from\n\n /// * `index` An index to deterministically derive a private key from\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// use iota_crypto::Kerl;\n\n /// use signing::PrivateKeyGenerator;\n\n /// use signing::seed::Seed;\n\n /// use signing::wots::WotsPrivateKeyGeneratorBuilder;\n\n ///\n\n /// let seed = Seed::new();\n\n /// let private_key_generator = WotsPrivateKeyGeneratorBuilder::<Kerl>::default().security_level(2).build().unwrap();\n\n /// let private_key = private_key_generator.generate(&seed, 0);\n\n /// ```\n\n fn generate(&self, seed: &Seed, index: u64) -> Self::PrivateKey;\n\n}\n\n\n", "file_path": "bee-signing/src/lib.rs", "rank": 37, "score": 52597.69414316265 }, { "content": "fn signature_fragment_helper(\n\n hash: &mut impl Sponge,\n\n normalized_bundle_fragment: &[i8],\n\n out: &mut [i8],\n\n) -> Result<()> {\n\n for (j, trit) in normalized_bundle_fragment\n\n .iter()\n\n .enumerate()\n\n .take(NUMBER_OF_FRAGMENT_CHUNKS)\n\n {\n\n for _ in 0..iota_constants::MAX_TRYTE_VALUE - *trit {\n\n hash.reset();\n\n let offset = j * HASH_LENGTH;\n\n hash.absorb(&out[offset..offset + HASH_LENGTH])?;\n\n hash.squeeze(&mut out[offset..offset + HASH_LENGTH])?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 38, "score": 52452.955963105414 }, { "content": "fn get_merkle_root_helper(\n\n curl: &mut impl Sponge,\n\n hash: &[i8],\n\n trits: &[i8],\n\n offset: usize,\n\n index: usize,\n\n size: usize,\n\n) -> Result<[i8; HASH_LENGTH]> {\n\n let empty = [0; HASH_LENGTH];\n\n let mut index = index;\n\n let mut tmp = [0; HASH_LENGTH];\n\n for i in 0..size {\n\n curl.reset();\n\n if (index & 1) == 0 {\n\n curl.absorb(hash)?;\n\n let offset = offset + i * HASH_LENGTH;\n\n curl.absorb(&trits[offset..offset + HASH_LENGTH])?;\n\n } else {\n\n let offset = offset + i * HASH_LENGTH;\n\n curl.absorb(&trits[offset..offset + HASH_LENGTH])?;\n", "file_path": "iota-crypto/iss.rs", "rank": 39, "score": 51442.948029185456 }, { "content": "fn bigint_not(base: &mut [u32]) {\n\n for i in base.iter_mut() {\n\n *i = !*i;\n\n }\n\n}\n\n\n", "file_path": "iota-crypto/kerl.rs", "rank": 40, "score": 47227.625312910415 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let mut rng = thread_rng();\n\n let mut trits = [0; HASH_TRINARY_SIZE];\n\n for trit in trits.iter_mut() {\n\n *trit = rng.gen_range(-1, 2);\n\n }\n\n c.bench_function(\"Kerl on 243 trits\", move |b| b.iter(|| basic_kerl(trits)));\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "iota-crypto/benches/kerl_benchmark.rs", "rank": 41, "score": 45385.849606459466 }, { "content": "fn trytes(trits: &[Trit]) -> Result<Trytes> {\n\n ensure!(trits.len() % 3 == 0, \"Invalid trit length.\");\n\n\n\n trits\n\n .chunks(iota_constants::TRITS_PER_TRYTE)\n\n .map(trits_to_char)\n\n .collect()\n\n}\n\n\n", "file_path": "iota-conversion/trinary.rs", "rank": 42, "score": 45240.38414387329 }, { "content": "fn outer_increment(prestate: &mut Curl64State) {\n\n for i in OUTER_INCR_START..INNER_INCR_START {\n\n let with_carry = prestate.bit_add(i);\n\n if !with_carry {\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "bee-pow/src/pearldiver.rs", "rank": 43, "score": 44543.89648938227 }, { "content": "fn char_to_trits(tryte: char) -> &'static [Trit] {\n\n match iota_constants::TRYTE_ALPHABET\n\n .iter()\n\n .position(|&x| x == tryte)\n\n {\n\n Some(p) => &TRYTE_TO_TRITS_MAPPINGS[p],\n\n None => &TRYTE_TO_TRITS_MAPPINGS[0],\n\n }\n\n}\n\n\n", "file_path": "iota-conversion/trinary.rs", "rank": 44, "score": 44347.402542458156 }, { "content": "fn trits_to_char(trits: &[Trit]) -> Result<char> {\n\n ensure!(\n\n trits.len() <= iota_constants::TRITS_PER_TRYTE,\n\n \"Provided trit slice is too long: {:?}\",\n\n trits\n\n );\n\n Ok(\n\n match TRYTE_TO_TRITS_MAPPINGS.iter().position(|&x| x == trits) {\n\n Some(p) => iota_constants::TRYTE_ALPHABET[p],\n\n None => '-',\n\n },\n\n )\n\n}\n\n\n", "file_path": "iota-conversion/trinary.rs", "rank": 45, "score": 44347.402542458156 }, { "content": "fn make_prestate(input: &InputTrits) -> Curl64State {\n\n let mut prestate = Curl64State::new(BITS_1);\n\n let mut tmpstate = Curl64State::new(BITS_1);\n\n\n\n let mut offset = 0;\n\n\n\n for _ in 0..NUM_FULL_CHUNKS_FOR_PRESTATE {\n\n for i in 0..HASH_LEN {\n\n match (*input)[offset] {\n\n 1 => prestate.set(i, BITS_1, BITS_0),\n\n -1 => prestate.set(i, BITS_0, BITS_1),\n\n _ => (),\n\n }\n\n offset += 1;\n\n }\n\n\n\n unsafe {\n\n transform(&mut prestate, &mut tmpstate);\n\n }\n\n }\n", "file_path": "bee-pow/src/pearldiver.rs", "rank": 46, "score": 43748.71940482406 }, { "content": "fn xorin(dst: &mut [u8], src: &[u8]) {\n\n assert!(dst.len() <= src.len());\n\n let len = dst.len();\n\n let mut dst_ptr = dst.as_mut_ptr();\n\n let mut src_ptr = src.as_ptr();\n\n for _ in 0..len {\n\n unsafe {\n\n *dst_ptr ^= *src_ptr;\n\n src_ptr = src_ptr.offset(1);\n\n dst_ptr = dst_ptr.offset(1);\n\n }\n\n }\n\n}\n\n\n\n/// Total number of lanes.\n\nconst PLEN: usize = 25;\n\n\n\n/// This structure should be used to create keccak/sha3 hash.\n\n#[derive(Clone, Copy)]\n\npub(crate) struct Keccak {\n", "file_path": "iota-crypto/keccak.rs", "rank": 47, "score": 43436.14831419503 }, { "content": "fn basic_kerl(trits: [i8; HASH_TRINARY_SIZE]) {\n\n let mut kerl = Kerl::default();\n\n kerl.absorb(&trits).unwrap();\n\n let mut bytes = vec![0; HASH_TRINARY_SIZE];\n\n kerl.squeeze(&mut bytes).unwrap();\n\n}\n\n\n", "file_path": "iota-crypto/benches/kerl_benchmark.rs", "rank": 48, "score": 42996.525633704616 }, { "content": "fn inner_increment(prestate: &mut Curl64State) -> Exhausted {\n\n // we have not exhausted the search space until each add\n\n // operation produces a carry\n\n for i in INNER_INCR_START..HASH_LEN {\n\n if {\n\n let with_carry = prestate.bit_add(i);\n\n !with_carry\n\n } {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "bee-pow/src/pearldiver.rs", "rank": 49, "score": 42710.27234082275 }, { "content": "/// Increments a trit slice in place, only considering trits until index `size`\n\nfn increment(trit_array: &mut [Trit], size: usize) {\n\n for trit in trit_array.iter_mut().take(size) {\n\n *trit += 1;\n\n if *trit > iota_constants::MAX_TRIT_VALUE {\n\n *trit = iota_constants::MIN_TRIT_VALUE;\n\n } else {\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "iota-conversion/trinary.rs", "rank": 50, "score": 42594.19519711783 }, { "content": "fn bigint_add(base: &mut [u32], rh: &[u32]) {\n\n let mut carry = false;\n\n\n\n for (a, b) in base.iter_mut().zip(rh.iter()) {\n\n let (v, c) = full_add(*a, *b, carry);\n\n *a = v;\n\n carry = c;\n\n }\n\n}\n\n\n", "file_path": "iota-crypto/kerl.rs", "rank": 51, "score": 42594.19519711783 }, { "content": "fn bigint_cmp(lh: &[u32], rh: &[u32]) -> i8 {\n\n for (a, b) in lh.iter().rev().zip(rh.iter().rev()) {\n\n if a < b {\n\n return -1;\n\n } else if a > b {\n\n return 1;\n\n }\n\n }\n\n 0\n\n}\n\n\n", "file_path": "iota-crypto/kerl.rs", "rank": 52, "score": 42594.19519711783 }, { "content": "fn bigint_sub(base: &mut [u32], rh: &[u32]) {\n\n let mut noborrow = true;\n\n for (a, b) in base.iter_mut().zip(rh) {\n\n let (v, c) = full_add(*a, !*b, noborrow);\n\n *a = v;\n\n noborrow = c;\n\n }\n\n assert!(noborrow);\n\n}\n\n\n", "file_path": "iota-crypto/kerl.rs", "rank": 53, "score": 42594.19519711783 }, { "content": "/// Converts trits to bytes\n\nfn trits_to_bytes(trits: &[i8], bytes: &mut [u8]) -> Result<()> {\n\n ensure!(\n\n trits.len() == HASH_LENGTH,\n\n \"Trit slice should have length {}, but had length: {}\",\n\n HASH_LENGTH,\n\n trits.len()\n\n );\n\n ensure!(\n\n bytes.len() == BYTE_LENGTH,\n\n \"Byte slice should have length {}, but had length: {}\",\n\n BYTE_LENGTH,\n\n bytes.len()\n\n );\n\n\n\n let mut base = [0; INT_LENGTH];\n\n\n\n let mut size = 1;\n\n let mut all_minus_1 = true;\n\n\n\n for t in trits[0..HASH_LENGTH - 1].iter() {\n", "file_path": "iota-crypto/kerl.rs", "rank": 54, "score": 40992.93669397046 }, { "content": "/// Extracts the nonce from the final Curl state and the given slot index.\n\nfn extract_nonce(state: &Curl64State, slot: usize) -> NonceTrits {\n\n let mut nonce = [0; NONCE_LEN];\n\n let mut offset = 0;\n\n let slotmask = 1 << slot;\n\n\n\n for i in CHUNK_NONCE_START..HASH_LEN {\n\n let (hi, lo) = state.get(i);\n\n\n\n match (hi & slotmask, lo & slotmask) {\n\n (1, 0) => nonce[offset] = 1,\n\n (0, 1) => nonce[offset] = -1,\n\n (_, _) => (),\n\n }\n\n offset += 1;\n\n }\n\n\n\n NonceTrits(nonce)\n\n}\n\n\n\nimpl Default for PearlDiver {\n\n fn default() -> Self {\n\n Self {\n\n cores: Cores::default(),\n\n difficulty: Difficulty::default(),\n\n state: Arc::new(RwLock::new(PearlDiverState::Created)),\n\n }\n\n }\n\n}\n", "file_path": "bee-pow/src/pearldiver.rs", "rank": 55, "score": 40334.22040521525 }, { "content": "fn setout(src: &[u8], dst: &mut [u8], len: usize) {\n\n dst[..len].copy_from_slice(&src[..len]);\n\n}\n\n\n", "file_path": "iota-crypto/keccak.rs", "rank": 56, "score": 40274.81364184518 }, { "content": "fn bigint_add_base(base: &mut [u32], rh: u32) -> u32 {\n\n let mut res = full_add(base[0], rh, false);\n\n base[0] = res.0;\n\n let mut j = 0;\n\n while res.1 {\n\n res = full_add(base[j], 0, true);\n\n base[j] = res.0;\n\n j += 1;\n\n }\n\n j as u32\n\n}\n\n\n", "file_path": "iota-crypto/kerl.rs", "rank": 57, "score": 40240.742922851015 }, { "content": "/// Converts bytes to trits\n\nfn bytes_to_trits(bytes: &mut [u8], trits: &mut [i8]) -> Result<()> {\n\n ensure!(\n\n trits.len() == HASH_LENGTH,\n\n \"Trit slice should have length {}, but had length: {}\",\n\n HASH_LENGTH,\n\n trits.len()\n\n );\n\n ensure!(\n\n bytes.len() == BYTE_LENGTH,\n\n \"Byte slice should have length {}, but had length: {}\",\n\n BYTE_LENGTH,\n\n bytes.len()\n\n );\n\n\n\n let mut base = vec![0; INT_LENGTH];\n\n trits[HASH_LENGTH - 1] = 0;\n\n\n\n for i in 0..INT_LENGTH {\n\n base[INT_LENGTH - 1 - i] = u32::from(bytes[i * 4]) << 24;\n\n base[INT_LENGTH - 1 - i] |= u32::from(bytes[i * 4 + 1]) << 16;\n", "file_path": "iota-crypto/kerl.rs", "rank": 58, "score": 39522.619870725735 }, { "content": "fn find_nonce(state: &Curl64State, difficulty: &Difficulty) -> Option<NonceTrits> {\n\n let mut nonce_test = BITS_1;\n\n\n\n for i in (HASH_LEN - difficulty.0)..HASH_LEN {\n\n nonce_test &= state.bit_equal(i);\n\n\n\n // If 'nonce_test' ever becomes 0, then this means that none of the current nonce candidates satisfied\n\n // the difficulty setting\n\n if nonce_test == 0 {\n\n return None;\n\n }\n\n }\n\n\n\n for slot in 0..BATCH_SIZE {\n\n if (nonce_test >> slot) & 1 != 0 {\n\n return Some(extract_nonce(&state, slot));\n\n }\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "bee-pow/src/pearldiver.rs", "rank": 59, "score": 38852.07946173537 }, { "content": "fn digests_helper(hash: &mut impl Sponge, key: &[i8]) -> Result<Vec<i8>> {\n\n let mut digests = vec![0; key.len() / FRAGMENT_LENGTH * HASH_LENGTH];\n\n for i in 0..key.len() / FRAGMENT_LENGTH {\n\n let mut buffer = key[i * FRAGMENT_LENGTH..(i + 1) * FRAGMENT_LENGTH].to_vec();\n\n for j in 0..NUMBER_OF_FRAGMENT_CHUNKS {\n\n for _ in 0..iota_constants::MAX_TRYTE_VALUE - iota_constants::MIN_TRYTE_VALUE {\n\n hash.reset();\n\n let offset = j * HASH_LENGTH;\n\n hash.absorb(&buffer[offset..offset + HASH_LENGTH])?;\n\n hash.squeeze(&mut buffer[offset..offset + HASH_LENGTH])?;\n\n }\n\n }\n\n hash.reset();\n\n hash.absorb(&buffer)?;\n\n let offset = i * HASH_LENGTH;\n\n hash.squeeze(&mut digests[offset..offset + HASH_LENGTH])?;\n\n }\n\n Ok(digests)\n\n}\n\n\n", "file_path": "iota-crypto/iss.rs", "rank": 60, "score": 36909.6637272576 }, { "content": "/// Provides an enum representing all units of IOTA\n\n#[derive(PartialEq, Clone, Copy, Debug)]\n\npub enum IotaUnits {\n\n /// Base unit of IOTA\n\n Iota,\n\n /// 1,000 Iota\n\n KiloIota,\n\n /// 1,000,000 Iota\n\n MegaIota,\n\n /// 1,000,000,000 Iota\n\n GigaIota,\n\n /// 1,000,000,000,000 Iota\n\n TeraIota,\n\n /// 1,000,000,000,000,000 Iota\n\n PetaIota,\n\n}\n\n\n\nimpl IotaUnits {\n\n /// Provides the unit string associated with this unit\n\n pub fn unit(self) -> &'static str {\n", "file_path": "iota-conversion/iota_units.rs", "rank": 63, "score": 35258.93607693298 }, { "content": " match self {\n\n IotaUnits::Iota => \"i\",\n\n IotaUnits::KiloIota => \"Ki\",\n\n IotaUnits::MegaIota => \"Mi\",\n\n IotaUnits::GigaIota => \"Gi\",\n\n IotaUnits::TeraIota => \"Ti\",\n\n IotaUnits::PetaIota => \"Pi\",\n\n }\n\n }\n\n\n\n /// Provides the number of significant digits associated with this unit\n\n pub fn value(self) -> u8 {\n\n match self {\n\n IotaUnits::Iota => 0,\n\n IotaUnits::KiloIota => 3,\n\n IotaUnits::MegaIota => 6,\n\n IotaUnits::GigaIota => 9,\n\n IotaUnits::TeraIota => 12,\n\n IotaUnits::PetaIota => 15,\n\n }\n\n }\n\n}\n", "file_path": "iota-conversion/iota_units.rs", "rank": 65, "score": 35257.57602907942 }, { "content": "pub mod constants;\n\n\n\nuse common::Trit;\n\n\n\nuse self::constants::CURL_HASH_TRIT_LEN as HASH_LEN;\n\nuse self::constants::CURL_P_81 as NUM_ROUNDS;\n\nuse self::constants::CURL_STAT_TRIT_LEN as STATE_LEN;\n\nuse self::constants::TRUTH_TABLE;\n\n\n\npub struct Curl {\n\n num_rounds: usize,\n\n state: [Trit; STATE_LEN],\n\n scratchpad: [Trit; STATE_LEN],\n\n}\n\n\n\nimpl Curl {\n\n pub fn new(num_rounds: usize) -> Self {\n\n Self {\n\n num_rounds,\n\n ..Self::default()\n", "file_path": "bee-crypto/src/curl/mod.rs", "rank": 69, "score": 33859.59958480155 }, { "content": "\n\n offset += chunk_length;\n\n\n\n if length > chunk_length {\n\n length -= chunk_length;\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n pub fn reset(&mut self) {\n\n self.state.iter_mut().for_each(|t| *t = 0);\n\n }\n\n\n\n fn transform(&mut self) {\n\n let mut scratchpad_index = 0;\n\n\n\n for _ in 0..self.num_rounds {\n\n self.scratchpad.copy_from_slice(&self.state);\n", "file_path": "bee-crypto/src/curl/mod.rs", "rank": 70, "score": 33857.49392031665 }, { "content": " }\n\n }\n\n\n\n pub fn absorb(&mut self, trits: &[i8], mut offset: usize, mut length: usize) {\n\n loop {\n\n let chunk_length = {\n\n if length < HASH_LEN {\n\n length\n\n } else {\n\n HASH_LEN\n\n }\n\n };\n\n\n\n self.state[0..chunk_length].copy_from_slice(&trits[offset..offset + chunk_length]);\n\n\n\n self.transform();\n\n\n\n offset += chunk_length;\n\n\n\n if length > chunk_length {\n", "file_path": "bee-crypto/src/curl/mod.rs", "rank": 71, "score": 33857.44237069058 }, { "content": " length -= chunk_length;\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n pub fn squeeze(&mut self, trits: &mut [i8], mut offset: usize, mut length: usize) {\n\n loop {\n\n let chunk_length = {\n\n if length < HASH_LEN {\n\n length\n\n } else {\n\n HASH_LEN\n\n }\n\n };\n\n\n\n trits[offset..offset + chunk_length].copy_from_slice(&self.state[0..chunk_length]);\n\n\n\n self.transform();\n", "file_path": "bee-crypto/src/curl/mod.rs", "rank": 72, "score": 33857.41089840939 }, { "content": " for state_index in 0..STATE_LEN {\n\n let prev_scratchpad_index = scratchpad_index;\n\n\n\n if scratchpad_index < 365 {\n\n scratchpad_index += 364;\n\n } else {\n\n scratchpad_index -= 365;\n\n }\n\n\n\n self.state[state_index] = TRUTH_TABLE[(self.scratchpad[prev_scratchpad_index]\n\n + (self.scratchpad[scratchpad_index] << 2)\n\n + 5) as usize];\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Default for Curl {\n\n fn default() -> Self {\n\n Curl {\n\n num_rounds: NUM_ROUNDS,\n\n state: [0; STATE_LEN],\n\n scratchpad: [0; STATE_LEN],\n\n }\n\n }\n\n}\n", "file_path": "bee-crypto/src/curl/mod.rs", "rank": 73, "score": 33850.191414217945 }, { "content": " }\n\n}\n\n\n\nmacro_rules! forward_sponge_impl {\n\n ($($t:ty),+) => {\n\n\n\n $(\n\n impl $t {\n\n /// Return the number of rounds used in this `CurlP` instacnce.\n\n pub fn rounds(&self) -> usize {\n\n self.0.rounds\n\n }\n\n }\n\n\n\n impl Sponge for $t {\n\n const HASH_LEN: usize = 243;\n\n\n\n fn absorb(&mut self, input: &Trits) {\n\n self.0.absorb(input)\n\n }\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 74, "score": 32766.3346681322 }, { "content": "//! This is a prototype for [PR #21], the RFC introducing the `Kerl` and `CurlP` hash functions\n\n//! implemented in terms of a common `Sponge` trait.\n\n//!\n\n//! The main focus of this prototype are the [`Sponge`] trait, and the [`CurlP`], and [`Kerl`]\n\n//! types. These are cryptographic hash functions that are sponge constructions implemented in\n\n//! terms of the trait.\n\n//!\n\n//! [PR #21]: https://github.com/iotaledger/bee-rfcs/pull/21\n\n\n\nuse std::convert::TryFrom;\n\nuse std::default::Default;\n\n\n\n/// The length of a hash as returned by the hash functions implemented in this RFC (in\n\n/// units of binary-coded, balanced trits).\n\nconst HASH_LEN: usize = 243;\n\n\n\n/// The length internal state of the `CurlP` sponge construction (in units of binary-coded,\n\n/// balanced trits).\n\nconst CURLP_STATE_LEN: usize = HASH_LEN * 3;\n\nconst CURLP_HALF_STATE_LEN: usize = CURLP_STATE_LEN / 2;\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 75, "score": 32765.81819658325 }, { "content": "\n\npub struct FromU8Error;\n\npub struct FromI8Error;\n\n\n\n/// Similar impls for `TritsMut` and `TritsBuf`\n\nimpl<'a> Trits<'a> {\n\n pub fn len(&self) -> usize {\n\n self.0.len()\n\n }\n\n\n\n /// Create a `Trits` from a `&[i8]` slice without verifying that its bytes are\n\n /// correctly binary-coded balanced trits (-1, 0, and +1).\n\n ///\n\n /// This function is intended to be used in hot loops and relies on the user making sure that\n\n /// the bytes are set correctly.\n\n ///\n\n /// **NOTE:** Use the `TryFrom` trait if you want to check that the slice encodes trits\n\n /// correctly before creating `Trits`.\n\n ///\n\n /// **WARNING:** If used incorrectly (that is, if the bytes are not correctly encoding trits), the\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 76, "score": 32764.858685804396 }, { "content": " }\n\n\n\n /// Create a `Trits` from a `&[u8]` slice without verifying that its bytes are\n\n /// correctly binary-coded balanced trits (-1, 0, and +1 transmuted to unsigned bytes).\n\n ///\n\n /// This function is intended to be used in hot loops and relies on the user making sure that\n\n /// the bytes are set correctly.\n\n ///\n\n /// **NOTE:** Use the `TryFrom` trait if you want to check that the slice encodes trits\n\n /// correctly before creating `Trits`.\n\n ///\n\n /// **WARNING:** If used incorrectly (that is, if the bytes are not correctly encoding trits), the\n\n /// usage of `Trits` might lead to unexpected behaviour.\n\n pub fn from_u8_unchecked(v: &[u8]) -> Self {\n\n Self::from_i8_unchecked(unsafe { &*(v as *const _ as *const [i8]) })\n\n }\n\n}\n\n\n\npub struct Trits<'a>(&'a [i8]);\n\npub struct TritsMut<'a>(&'a mut [i8]);\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 77, "score": 32764.57295447547 }, { "content": " rounds: usize,\n\n\n\n /// The internal state.\n\n state: TritsBuf,\n\n\n\n /// Workspace for performing transformations\n\n work_state: TritsBuf,\n\n}\n\n\n\nimpl CurlP {\n\n /// Create a new `CurlP` sponge with `rounds` of iterations.\n\n pub fn new(rounds: usize) -> Self {\n\n Self {\n\n rounds,\n\n state: TritsBuf::with_capacity(CURLP_STATE_LEN),\n\n work_state: TritsBuf::with_capacity(CURLP_STATE_LEN),\n\n }\n\n }\n\n\n\n /// Return the number of rounds used in this `CurlP` instacnce.\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 78, "score": 32764.528147574958 }, { "content": " }\n\n\n\n pub fn fill(&mut self, v: ValidTrits) {\n\n let v = v.into();\n\n self.0.iter_mut().for_each(|x| *x = v);\n\n }\n\n\n\n /// Create a `Trits` from a `&[i8]` slice without verifying that its bytes are\n\n /// correctly binary-coded balanced trits (-1, 0, and +1).\n\n ///\n\n /// This function is intended to be used in hot loops and relies on the user making sure that\n\n /// the bytes are set correctly.\n\n ///\n\n /// **NOTE:** Use the `TryFrom` trait if you want to check that the slice encodes trits\n\n /// correctly before creating `Trits`.\n\n ///\n\n /// **WARNING:** If used incorrectly (that is, if the bytes are not correctly encoding trits), the\n\n /// usage of `Trits` might lead to unexpected behaviour.\n\n pub fn from_i8_unchecked(v: &[i8]) -> Self {\n\n Self(v.to_owned())\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 79, "score": 32764.287471422726 }, { "content": " Zero => 0,\n\n }\n\n }\n\n}\n\n\n\nimpl TritsBuf {\n\n /// Create a new `TritsBuf` with a number of `capacity` elements, all\n\n /// initialized to 0;\n\n pub fn with_capacity(capacity: usize) -> Self {\n\n Self(vec![0; capacity])\n\n }\n\n\n\n /// Return a read-only view of the buffer in form of a `Trits`.\n\n pub fn as_trits(&self) -> Trits<'_> {\n\n Trits(&self.0)\n\n }\n\n\n\n /// Return a read-write view of the buffer in form of a `TritsMut`.\n\n pub fn as_trits_mut(&mut self) -> TritsMut<'_> {\n\n TritsMut(&mut self.0)\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 80, "score": 32764.120266738446 }, { "content": "}\n\n\n\nimpl Default for CurlP27 {\n\n fn default() -> Self {\n\n CurlP27::new()\n\n }\n\n}\n\n\n\n/// `CurlP` with a fixed number of 81 rounds.\n\npub struct CurlP81(CurlP);\n\n\n\nimpl CurlP81 {\n\n pub fn new() -> Self {\n\n Self(CurlP::new(81))\n\n }\n\n}\n\n\n\nimpl Default for CurlP81 {\n\n fn default() -> Self {\n\n CurlP81::new()\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 81, "score": 32763.9809677975 }, { "content": "\n\nconst TRUTH_TABLE: [i8; 11] = [1, 0, -1, 2, 1, -1, 0, 2, -1, 1, 0];\n\n\n\n/// An owned, mutable\n\n#[derive(Clone, Debug)]\n\npub struct TritsBuf(Vec<i8>);\n\n\n\npub enum ValidTrits {\n\n MinusOne,\n\n PlusOne,\n\n Zero,\n\n}\n\n\n\nimpl From<ValidTrits> for i8 {\n\n fn from(v: ValidTrits) -> Self {\n\n use ValidTrits::*;\n\n\n\n match v {\n\n MinusOne => -1,\n\n PlusOne => 1,\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 82, "score": 32763.96836809118 }, { "content": " /// usage of `Trits` might lead to unexpected behaviour.\n\n pub fn from_i8_unchecked(v: &'a [i8]) -> Self {\n\n Self(v)\n\n }\n\n\n\n /// Create a `Trits` from a `&[u8]` slice without verifying that its bytes are\n\n /// correctly binary-coded balanced trits (-1, 0, and +1 transmuted to unsigned bytes).\n\n ///\n\n /// This function is intended to be used in hot loops and relies on the user making sure that\n\n /// the bytes are set correctly.\n\n ///\n\n /// **NOTE:** Use the `TryFrom` trait if you want to check that the slice encodes trits\n\n /// correctly before creating `Trits`.\n\n ///\n\n /// **WARNING:** If used incorrectly (that is, if the bytes are not correctly encoding trits), the\n\n /// usage of `Trits` might lead to unexpected behaviour.\n\n pub fn from_u8_unchecked(v: &[u8]) -> Self {\n\n Self::from_i8_unchecked(unsafe { &*(v as *const _ as *const [i8]) })\n\n }\n\n}\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 83, "score": 32763.93995297725 }, { "content": " /// Squeeze the sponge by copying the calculated hash into the provided `buf`. This will fill\n\n /// the buffer in chunks of `HASH_LEN` at a time.\n\n ///\n\n /// If the last chunk is smaller than `HASH_LEN`, then only the fraction that fits is written\n\n /// into it.\n\n fn squeeze_into(&mut self, buf: &mut TritsMut) {\n\n for chunk in buf.0.chunks_mut(Self::HASH_LEN) {\n\n chunk.copy_from_slice(&self.state.0[0..chunk.len()]);\n\n self.transform()\n\n }\n\n }\n\n}\n\n\n\n/// `CurlP` with a fixed number of 27 rounds.\n\npub struct CurlP27(CurlP);\n\n\n\nimpl CurlP27 {\n\n pub fn new() -> Self {\n\n Self(CurlP::new(27))\n\n }\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 84, "score": 32762.972095444467 }, { "content": " /// Convenience function to absorb `input`, squeeze the sponge into a\n\n /// buffer, and reset the sponge in one go.\n\n fn digest_into(&mut self, input: &Trits, buf: &mut TritsMut) {\n\n self.absorb(input);\n\n self.squeeze_into(buf);\n\n self.reset();\n\n }\n\n\n\n /// Convenience function to absorb `input`, squeeze the sponge, and reset the sponge in one go.\n\n /// Returns an owned versin of the hash.\n\n fn digest(&mut self, input: &Trits) -> TritsBuf {\n\n self.absorb(input);\n\n let output = self.squeeze();\n\n self.reset();\n\n output\n\n }\n\n}\n\n\n\npub struct CurlP {\n\n /// The number of rounds of hashing to apply before a hash is squeezed.\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 85, "score": 32762.20606005777 }, { "content": " for byte in v {\n\n match byte {\n\n 0 | -1 | 1 => {}\n\n _ => Err(FromI8Error)?,\n\n }\n\n }\n\n\n\n Ok(Self::from_i8_unchecked(v))\n\n }\n\n}\n\n\n\nimpl<'a> TritsMut<'a> {\n\n pub fn len(&self) -> usize {\n\n self.0.len()\n\n }\n\n\n\n pub fn from_i8_unchecked(v: &'a mut [i8]) -> Self {\n\n Self(v)\n\n }\n\n\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 86, "score": 32761.796953210032 }, { "content": " pub fn rounds(&self) -> usize {\n\n self.rounds\n\n }\n\n\n\n /// Transforms the internal state of the `CurlP` sponge after the input was copied\n\n /// into the internal state.\n\n ///\n\n /// The essence of this transformation is the application of a so-called substitution box to\n\n /// the internal state, which happens `round` number of times.\n\n fn transform(&mut self) {\n\n fn apply_substitution_box(input: &[i8], output: &mut [i8]) {\n\n assert!(input.len() <= CURLP_STATE_LEN);\n\n assert!(output.len() <= CURLP_STATE_LEN);\n\n\n\n output[0] = TRUTH_TABLE[(input[0] + (input[364] << 2) + 5) as usize];\n\n\n\n for state_index in 0..CURLP_HALF_STATE_LEN {\n\n let rhs_index_a = CURLP_HALF_STATE_LEN - state_index;\n\n let rhs_index_b = CURLP_STATE_LEN - state_index - 1;\n\n\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 87, "score": 32761.715202895724 }, { "content": " pub fn from_u8_unchecked(v: &mut [u8]) -> Self {\n\n Self::from_i8_unchecked(unsafe { &mut *(v as *mut _ as *mut [i8]) })\n\n }\n\n}\n\n\n\nimpl<'a> TryFrom<&'a mut [i8]> for TritsMut<'a> {\n\n type Error = FromI8Error;\n\n\n\n fn try_from(v: &'a mut [i8]) -> Result<Self, Self::Error> {\n\n for byte in v.iter() {\n\n match byte {\n\n 0 | -1 | 1 => {}\n\n _ => Err(FromI8Error)?,\n\n }\n\n }\n\n\n\n Ok(Self::from_i8_unchecked(v))\n\n }\n\n}\n\n\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 88, "score": 32760.50347705745 }, { "content": "\n\nimpl<'a> TryFrom<&'a [u8]> for Trits<'a> {\n\n type Error = FromU8Error;\n\n\n\n fn try_from(v: &[u8]) -> Result<Self, Self::Error> {\n\n for byte in v {\n\n match byte {\n\n 0b0000_0000 | 0b1111_1111 | 0b0000_0001 => {}\n\n _ => Err(FromU8Error)?,\n\n }\n\n }\n\n\n\n Ok(Self::from_u8_unchecked(v))\n\n }\n\n}\n\n\n\nimpl<'a> TryFrom<&'a [i8]> for Trits<'a> {\n\n type Error = FromI8Error;\n\n\n\n fn try_from(v: &'a [i8]) -> Result<Self, Self::Error> {\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 89, "score": 32758.226458951412 }, { "content": "impl<'a> TryFrom<&'a mut [u8]> for TritsMut<'a> {\n\n type Error = FromU8Error;\n\n\n\n fn try_from(v: &mut [u8]) -> Result<Self, Self::Error> {\n\n for byte in v.iter() {\n\n match byte {\n\n 0b0000_0000 | 0b1111_1111 | 0b0000_0001 => {}\n\n _ => Err(FromU8Error)?,\n\n }\n\n }\n\n\n\n Ok(Self::from_u8_unchecked(v))\n\n }\n\n}\n\n\n\n/// The common interface of cryptographic hash functions that follow the sponge construction,\n\n/// and that absorb and return binary-coded, balanced ternary.\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 90, "score": 32758.226458951412 }, { "content": "\n\n /// Absorb `input` into the sponge by copying `HASH_LEN` chunks of it into its internal\n\n /// state and transforming the state before moving on to the next chunk.\n\n ///\n\n /// If `input` is not a multiple of `HASH_LEN` with the last chunk having `n < HASH_LEN` trits,\n\n /// the last chunk will be copied to the first `n` slots of the internal state. The remaining\n\n /// data in the internal state is then just the result of the last transformation before the\n\n /// data was copied, and will be reused for the next transformation.\n\n fn absorb(&mut self, input: &Trits) {\n\n for chunk in input.0.chunks(Self::HASH_LEN) {\n\n self.state.0[0..chunk.len()].copy_from_slice(chunk);\n\n self.transform();\n\n }\n\n }\n\n\n\n /// Reset the internal state by overwriting it with zeros.\n\n fn reset(&mut self) {\n\n self.state.fill(ValidTrits::Zero);\n\n }\n\n\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 91, "score": 32758.226458951412 }, { "content": "\n\n fn reset(&mut self) {\n\n self.0.reset()\n\n }\n\n\n\n fn squeeze_into(&mut self, buf: &mut TritsMut) {\n\n self.0.squeeze_into(buf);\n\n }\n\n }\n\n )+\n\n }\n\n}\n\n\n\nforward_sponge_impl!(CurlP27, CurlP81);\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 92, "score": 32758.226458951412 }, { "content": " output[2 * state_index + 1] =\n\n TRUTH_TABLE[{ (input[rhs_index_a] + input[rhs_index_b] << 2) + 5 } as usize];\n\n\n\n let rhs_index_a = 364 - state_index - 1;\n\n output[2 * state_index + 2] =\n\n TRUTH_TABLE[{ (input[rhs_index_b] + input[rhs_index_a] << 2) + 5 } as usize];\n\n }\n\n }\n\n\n\n let (mut lhs, mut rhs) = (&mut self.state.0, &mut self.work_state.0);\n\n\n\n for _ in 0..self.rounds {\n\n apply_substitution_box(lhs, rhs);\n\n std::mem::swap(&mut lhs, &mut rhs);\n\n }\n\n }\n\n}\n\n\n\nimpl Sponge for CurlP {\n\n const HASH_LEN: usize = HASH_LEN;\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 93, "score": 32758.226458951412 }, { "content": "/// The common interface of cryptographic hash functions that follow the sponge construction,\n\n/// and that absorb and return binary-coded, balanced ternary.\n\ntrait Sponge {\n\n const HASH_LEN: usize;\n\n\n\n /// Absorb `input` into the sponge.\n\n fn absorb(&mut self, input: &Trits);\n\n\n\n /// Reset the inner state of the sponge.\n\n fn reset(&mut self);\n\n\n\n /// Squeeze the sponge into a buffer\n\n fn squeeze_into(&mut self, buf: &mut TritsMut);\n\n\n\n /// Convenience function using `Sponge::squeeze_into` to to return an owned\n\n /// version of the hash.\n\n fn squeeze(&mut self) -> TritsBuf {\n\n let mut output = TritsBuf::with_capacity(Self::HASH_LEN);\n\n self.squeeze_into(&mut output.as_trits_mut());\n\n output\n\n }\n\n\n", "file_path": "bee-crypto/src/hashes_preview/mod.rs", "rank": 94, "score": 31734.510723963205 }, { "content": "use iota_constants;\n\n\n\nuse crate::Result;\n\n\n\nuse super::curl::Curl;\n\nuse super::kerl::Kerl;\n\nuse super::{hash_with_mode, HashMode, Sponge};\n\nuse iota_constants::HASH_TRINARY_SIZE as HASH_LENGTH;\n\n\n\n/// Number of fragment chunks\n\npub const NUMBER_OF_FRAGMENT_CHUNKS: usize = 27;\n\n/// Length of a fragment\n\npub const FRAGMENT_LENGTH: usize = HASH_LENGTH * NUMBER_OF_FRAGMENT_CHUNKS;\n\n/// The amount of valid security levels\n\npub const NUMBER_OF_SECURITY_LEVELS: usize = 3;\n\n/// The width of tryte\n\npub const TRYTE_WIDTH: usize = 3;\n\n/// Normalized fragment length\n\npub const NORMALIZED_FRAGMENT_LENGTH: usize = HASH_LENGTH / TRYTE_WIDTH / NUMBER_OF_SECURITY_LEVELS;\n\n\n\n/// Create a subseed\n\n///\n\n/// * `mode` - The hashing mode to use\n\n/// * `seed` - The generation seed\n\n/// * `index` - How many address permutations to iterate through\n", "file_path": "iota-crypto/iss.rs", "rank": 95, "score": 21.562301511726705 }, { "content": "#![deny(unused_extern_crates)]\n\n#![warn(\n\n missing_debug_implementations,\n\n missing_docs,\n\n rust_2018_idioms,\n\n unreachable_pub\n\n)]\n\n\n\n//! Trinary and unit conversion traits and methods\n\n\n\n#[macro_use]\n\nextern crate failure;\n\n#[macro_use]\n\nextern crate lazy_static;\n\n\n\n/// Provides useful unit definitions for Iota\n\npub mod iota_units;\n\nmod trinary;\n\n/// Converts between strings and tryte-encoded strings\n\npub mod trytes_converter;\n\n/// Provides converters between various unit representations of Iota\n\npub mod unit_converter;\n\n\n\npub use trinary::*;\n\n\n", "file_path": "iota-conversion/lib.rs", "rank": 96, "score": 20.50799394861677 }, { "content": "use common::Tryte;\n\nuse common::constants::*;\n\nuse common::Result;\n\nuse common::Error;\n\n\n\nuse ternary::IsTryte;\n\n\n\nuse crate::constants::*;\n\n\n\npub struct Payload(pub [Tryte; PAYLOAD.tryte_offset.length]);\n\npub struct Address(pub [Tryte; ADDRESS.tryte_offset.length]);\n\n#[derive(Default, Debug)]\n\npub struct Value(pub i64);\n\npub struct Tag(pub [Tryte; TAG.tryte_offset.length]);\n\n#[derive(Default, Debug)]\n\npub struct Timestamp(pub u64);\n\n#[derive(Default, Debug)]\n\npub struct Index(pub usize);\n\npub struct Hash(pub [Tryte; BUNDLE_HASH.tryte_offset.length]);\n\npub struct Nonce(pub [Tryte; NONCE.tryte_offset.length]);\n", "file_path": "bee-bundle/src/transaction.rs", "rank": 97, "score": 17.957567060972515 }, { "content": " let mut seed = [0; 243];\n\n\n\n seed.copy_from_slice(bytes);\n\n\n\n Seed(seed)\n\n }\n\n\n\n // TODO: documentation\n\n pub fn to_bytes(&self) -> &[i8] {\n\n &self.0\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n // TODO super::super ?\n\n use super::super::slice_eq;\n\n // TODO Remove when available in bee\n\n use iota_crypto::{Curl, Kerl};\n", "file_path": "bee-signing/src/seed.rs", "rank": 98, "score": 16.95111630260279 }, { "content": "\n\n let last = trit_length - hash_length * HASH_LENGTH;\n\n out[trit_length - last..].copy_from_slice(&self.state[0..last]);\n\n if trit_length % HASH_LENGTH != 0 {\n\n self.transform();\n\n }\n\n Ok(())\n\n }\n\n\n\n fn reset(&mut self) {\n\n self.state = [0; STATE_LENGTH];\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use iota_conversion::Trinary;\n\n const TRYTES: &str = \"RSWWSFXPQJUBJROQBRQZWZXZJWMUBVIVMHPPTYSNW9YQIQQF9RCSJJCVZG9ZWITXNCSBBDHEEKDRBHVTWCZ9SZOOZHVBPCQNPKTWFNZAWGCZ9QDIMKRVINMIRZBPKRKQAIPGOHBTHTGYXTBJLSURDSPEOJ9UKJECUKCCPVIQQHDUYKVKISCEIEGVOQWRBAYXWGSJUTEVG9RPQLPTKYCRAJ9YNCUMDVDYDQCKRJOAPXCSUDAJGETALJINHEVNAARIPONBWXUOQUFGNOCUSSLYWKOZMZUKLNITZIFXFWQAYVJCVMDTRSHORGNSTKX9Z9DLWNHZSMNOYTU9AUCGYBVIITEPEKIXBCOFCMQPBGXYJKSHPXNUKFTXIJVYRFILAVXEWTUICZCYYPCEHNTK9SLGVL9RLAMYTAEPONCBHDXSEQZOXO9XCFUCPPMKEBR9IEJGQOPPILHFXHMIULJYXZJASQEGCQDVYFOM9ETXAGVMSCHHQLFPATWOSMZIDL9AHMSDCE9UENACG9OVFAEIPPQYBCLXDMXXA9UBJFQQBCYKETPNKHNOUKCSSYLWZDLKUARXNVKKKHNRBVSTVKQCZL9RY9BDTDTPUTFUBGRMSTOTXLWUHDMSGYRDSZLIPGQXIDMNCNBOAOI9WFUCXSRLJFIVTIPIAZUK9EDUJJ9B9YCJEZQQELLHVCWDNRH9FUXDGZRGOVXGOKORTCQQA9JXNROLETYCNLRMBGXBL9DQKMOAZCBJGWLNJLGRSTYBKLGFVRUF9QOPZVQFGMDJA9TBVGFJDBAHEVOLW9GNU9NICLCQJBOAJBAHHBZJGOFUCQMBGYQLCWNKSZPPBQMSJTJLM9GXOZHTNDLGIRCSIJAZTENQVQDHFSOQM9WVNWQQJNOPZMEISSCLOADMRNWALBBSLSWNCTOSNHNLWZBVCFIOGFPCPRKQSRGKFXGTWUSCPZSKQNLQJGKDLOXSBJMEHQPDZGSENUKWAHRNONDTBLHNAKGLOMCFYRCGMDOVANPFHMQRFCZIQHCGVORJJNYMTORDKPJPLA9LWAKAWXLIFEVLKHRKCDG9QPQCPGVKIVBENQJTJGZKFTNZHIMQISVBNLHAYSSVJKTIELGTETKPVRQXNAPWOBGQGFRMMK9UQDWJHSQMYQQTCBMVQKUVGJEAGTEQDN9TCRRAZHDPSPIYVNKPGJSJZASZQBM9WXEDWGAOQPPZFLAMZLEZGXPYSOJRWL9ZH9NOJTUKXNTCRRDO9GKULXBAVDRIZBOKJYVJUSHIX9F9O9ACYCAHUKBIEPVZWVJAJGSDQNZNWLIWVSKFJUMOYDMVUFLUXT9CEQEVRFBJVPCTJQCORM9JHLYFSMUVMFDXZFNCUFZZIKREIUIHUSHRPPOUKGFKWX9COXBAZMQBBFRFIBGEAVKBWKNTBMLPHLOUYOXPIQIZQWGOVUWQABTJT9ZZPNBABQFYRCQLXDHDEX9PULVTCQLWPTJLRSVZQEEYVBVY9KCNEZXQLEGADSTJBYOXEVGVTUFKNCNWMEDKDUMTKCMRPGKDCCBDHDVVSMPOPUBZOMZTXJSQNVVGXNPPBVSBL9WWXWQNMHRMQFEQYKWNCSW9URI9FYPT9UZMAFMMGUKFYTWPCQKVJ9DIHRJFMXRZUGI9TMTFUQHGXNBITDSORZORQIAMKY9VRYKLEHNRNFSEFBHF9KXIQAEZEJNQOENJVMWLMHI9GNZPXYUIFAJIVCLAGKUZIKTJKGNQVTXJORWIQDHUPBBPPYOUPFAABBVMMYATXERQHPECDVYGWDGXFJKOMOBXKRZD9MCQ9LGDGGGMYGUAFGMQTUHZOAPLKPNPCIKUNEMQIZOCM9COAOMZSJ9GVWZBZYXMCNALENZ9PRYMHENPWGKX9ULUIGJUJRKFJPBTTHCRZQKEAHT9DC9GSWQEGDTZFHACZMLFYDVOWZADBNMEM9XXEOMHCNJMDSUAJRQTBUWKJF9RZHK9ACGUNI9URFIHLXBXCEODONPXBSCWP9WNAEYNALKQHGULUQGAFL9LB9NBLLCACLQFGQMXRHGBTMI9YKAJKVELRWWKJAPKMSYMJTDYMZ9PJEEYIRXRMMFLRSFSHIXUL9NEJABLRUGHJFL9RASMSKOI9VCFRZ9GWTMODUUESIJBHWWHZYCLDENBFSJQPIOYC9MBGOOXSWEMLVU9L9WJXKZKVDBDMFSVHHISSSNILUMWULMVMESQUIHDGBDXROXGH9MTNFSLWJZRAPOKKRGXAAQBFPYPAAXLSTMNSNDTTJQSDQORNJS9BBGQ9KQJZYPAQ9JYQZJ9B9KQDAXUACZWRUNGMBOQLQZUHFNCKVQGORRZGAHES9PWJUKZWUJSBMNZFILBNBQQKLXITCTQDDBV9UDAOQOUPWMXTXWFWVMCXIXLRMRWMAYYQJPCEAAOFEOGZQMEDAGYGCTKUJBS9AGEXJAFHWWDZRYEN9DN9HVCMLFURISLYSWKXHJKXMHUWZXUQARMYPGKRKQMHVR9JEYXJRPNZINYNCGZHHUNHBAIJHLYZIZGGIDFWVNXZQADLEDJFTIUTQWCQSX9QNGUZXGXJYUUTFSZPQKXBA9DFRQRLTLUJENKESDGTZRGRSLTNYTITXRXRGVLWBTEWPJXZYLGHLQBAVYVOSABIVTQYQM9FIQKCBRRUEMVVTMERLWOK\";\n", "file_path": "iota-crypto/curl.rs", "rank": 99, "score": 16.65203124705082 } ]
Rust
src/core/string.rs
phR0ze/rs
33573ef35ec6964f4aa15340941636fb1a77f6ed
use crate::errors::*; use std::{ffi::OsStr, path::Path, str}; pub trait StringExt { fn size(&self) -> usize; fn trim_suffix<T: Into<String>>(&self, suffix: T) -> String; } impl StringExt for str { fn size(&self) -> usize { self.chars().count() } fn trim_suffix<T: Into<String>>(&self, suffix: T) -> String { let target = suffix.into(); match self.ends_with(&target) { true => self[..self.len() - target.len()].to_owned(), _ => self.to_owned(), } } } impl StringExt for String { fn size(&self) -> usize { self.chars().count() } fn trim_suffix<T: Into<String>>(&self, suffix: T) -> String { let target = suffix.into(); match self.ends_with(&target) { true => self[..self.len() - target.len()].to_owned(), _ => self.to_owned(), } } } pub trait ToStringExt { fn to_string(&self) -> FuResult<String>; } impl ToStringExt for Path { fn to_string(&self) -> FuResult<String> { let _str = self.to_str().ok_or_else(|| PathError::failed_to_string(self))?; Ok(String::from(_str)) } } impl ToStringExt for OsStr { fn to_string(&self) -> FuResult<String> { Ok(String::from(self.to_str().ok_or(StringError::FailedToString)?)) } } #[cfg(test)] mod tests { use crate::prelude::*; use std::{ ffi::OsStr, path::{Path, PathBuf}, }; #[test] fn test_str_size() { assert_eq!("foo".size(), 3); assert_eq!("ƒoo".len(), 4); assert_eq!("ƒoo".size(), 3); } #[test] fn test_string_size() { assert_eq!("foo".to_string().size(), 3); assert_eq!("ƒoo".to_string().len(), 4); assert_eq!("ƒoo".to_string().size(), 3); } #[test] fn test_str_trim_suffix() { assert_eq!("foo".trim_suffix("oo"), "f".to_string()); assert_eq!("ƒoo".trim_suffix("o"), "ƒo".to_string()); } #[test] fn test_string_trim_suffix() { assert_eq!("foo".to_string().trim_suffix("oo"), "f".to_string()); assert_eq!("ƒoo".to_string().trim_suffix("o"), "ƒo".to_string()); } #[test] fn test_osstr_to_string() { assert_eq!(OsStr::new("foo").to_string().unwrap(), "foo".to_string()); } #[test] fn test_path_to_string() { assert_eq!(Path::new("/foo").to_string().unwrap(), "/foo".to_string()); assert_eq!(PathBuf::from("/foo").to_string().unwrap(), "/foo".to_string()); } }
use crate::errors::*; use std::{ffi::OsStr, path::Path, str}; pub trait StringExt { fn size(&self) -> usize; fn trim_suffix<T: Into<String>>(&self, suffix: T) -> String; } impl StringExt for str { fn size(&self) -> usize { self.chars().count() } fn trim_suffix<T: Into<String>>(&self, suffix: T) -> String { let target = suffix.into();
} } impl StringExt for String { fn size(&self) -> usize { self.chars().count() } fn trim_suffix<T: Into<String>>(&self, suffix: T) -> String { let target = suffix.into(); match self.ends_with(&target) { true => self[..self.len() - target.len()].to_owned(), _ => self.to_owned(), } } } pub trait ToStringExt { fn to_string(&self) -> FuResult<String>; } impl ToStringExt for Path { fn to_string(&self) -> FuResult<String> { let _str = self.to_str().ok_or_else(|| PathError::failed_to_string(self))?; Ok(String::from(_str)) } } impl ToStringExt for OsStr { fn to_string(&self) -> FuResult<String> { Ok(String::from(self.to_str().ok_or(StringError::FailedToString)?)) } } #[cfg(test)] mod tests { use crate::prelude::*; use std::{ ffi::OsStr, path::{Path, PathBuf}, }; #[test] fn test_str_size() { assert_eq!("foo".size(), 3); assert_eq!("ƒoo".len(), 4); assert_eq!("ƒoo".size(), 3); } #[test] fn test_string_size() { assert_eq!("foo".to_string().size(), 3); assert_eq!("ƒoo".to_string().len(), 4); assert_eq!("ƒoo".to_string().size(), 3); } #[test] fn test_str_trim_suffix() { assert_eq!("foo".trim_suffix("oo"), "f".to_string()); assert_eq!("ƒoo".trim_suffix("o"), "ƒo".to_string()); } #[test] fn test_string_trim_suffix() { assert_eq!("foo".to_string().trim_suffix("oo"), "f".to_string()); assert_eq!("ƒoo".to_string().trim_suffix("o"), "ƒo".to_string()); } #[test] fn test_osstr_to_string() { assert_eq!(OsStr::new("foo").to_string().unwrap(), "foo".to_string()); } #[test] fn test_path_to_string() { assert_eq!(Path::new("/foo").to_string().unwrap(), "/foo".to_string()); assert_eq!(PathBuf::from("/foo").to_string().unwrap(), "/foo".to_string()); } }
match self.ends_with(&target) { true => self[..self.len() - target.len()].to_owned(), _ => self.to_owned(), }
if_condition
[ { "content": "/// Set the timezone from the given value\n\npub fn set_timezone(tz: &str) {\n\n env::set_var(\"TZ\", tz);\n\n unsafe {\n\n c::tzset();\n\n }\n\n}\n\n\n\n// libc types specific to `time` not exposed by base libc crate\n\nmod c {\n\n extern \"C\" {\n\n // `gmtime_r` converts the unix `timestamp` into a broken-down format and stores it in\n\n // `result`. `gmtime_r` is the thread safe version and is more than 4x faster than\n\n // `gmtime`. Typically you get an unix timestamp in UTC from calling `time()`.\n\n //\n\n // struct tm* gmtime_r(const time_t* timestamp, struct tm* result);\n\n // https://man.archlinux.org/man/localtime.3p.html\n\n //\n\n // `timestamp` is a pointer to a `time_t` object that contains the time you want to convert\n\n // `result` is a pointer to a `tm` struct where the function can store the converted time\n\n pub(crate) fn gmtime_r(timestamp: *const libc::time_t, result: *mut libc::tm);\n", "file_path": "src/unit/time.rs", "rank": 2, "score": 149630.69972546102 }, { "content": "/// Returns the current running executable's name.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let base = sys::exe().unwrap().base().unwrap();\n\n/// assert_eq!(exec::name().unwrap(), base);\n\n/// ```\n\npub fn name() -> FuResult<String> {\n\n Ok(sys::exe()?.base()?)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::prelude::*;\n\n\n\n #[test]\n\n fn test_dir() {\n\n let cwd = sys::cwd().unwrap();\n\n let dir = cwd.mash(\"target/debug/deps\");\n\n assert_eq!(exec::dir().unwrap(), dir);\n\n }\n\n\n\n // Can't modify PATH in parallel\n\n // #[test]\n\n // fn test_lookup() {\n\n // let tmpdir = setup.temp.mash(\"exec_lookup\");\n\n // let file1 = tmpdir.mash(\"file1\");\n", "file_path": "src/sys/exec.rs", "rank": 3, "score": 149333.6965364915 }, { "content": "/// Returns the current user's name.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// println!(\"current user name: {:?}\", user::name().unwrap());\n\n/// ```\n\npub fn name() -> FuResult<String> {\n\n Ok(current()?.name)\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 4, "score": 149330.67537947814 }, { "content": "/// Converts the given value in bytes to a human readable format\n\n/// e.g. 3195728 = 3.05 MiB\n\npub fn to_human(val: u64) -> String {\n\n let (value, unit) = if val >= TEBIBYTE {\n\n (val as f64 / TEBIBYTE as f64, \"TiB\")\n\n } else if val >= GIBIBYTE {\n\n (val as f64 / GIBIBYTE as f64, \"GiB\")\n\n } else if val >= MEBIBYTE {\n\n (val as f64 / MEBIBYTE as f64, \"MiB\")\n\n } else if val >= KIBIBYTE {\n\n (val as f64 / KIBIBYTE as f64, \"KiB\")\n\n } else {\n\n (val as f64 as f64, \"bytes\")\n\n };\n\n\n\n let result = format!(\"{:.2}\", value);\n\n format!(\"{} {}\", result.trim_suffix(\".00\"), unit)\n\n}\n\n\n", "file_path": "src/unit/bytes.rs", "rank": 5, "score": 145133.67715367675 }, { "content": "/// Returns the first captured string from the given regular expression `rx`.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"file_doc_extract_string_p\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// let tmpfile = tmpdir.mash(\"file1\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// let rx = Regex::new(r\"'([^']+)'\\s+\\((\\d{4})\\)\").unwrap();\n\n/// assert!(sys::write(&tmpfile, \"Not my favorite movie: 'Citizen Kane' (1941).\").is_ok());\n\n/// assert_eq!(sys::extract_string(&tmpfile, &rx).unwrap(), \"Citizen Kane\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn extract_string_p<T: AsRef<Path>, U: AsRef<str>>(path: T, rx: U) -> FuResult<String> {\n\n extract_string(path, &Regex::new(rx.as_ref())?)\n\n}\n\n\n\n/// Returns the captured strings from the given regular expression `rx`.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"file_doc_extract_strings\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// let tmpfile = tmpdir.mash(\"file1\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// let rx = Regex::new(r\"'([^']+)'\\s+\\((\\d{4})\\)\").unwrap();\n\n/// assert!(sys::write(&tmpfile, \"Not my favorite movie: 'Citizen Kane' (1941).\").is_ok());\n\n/// assert_eq!(sys::extract_strings(&tmpfile, &rx).unwrap(), vec![\"Citizen Kane\", \"1941\"]);\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n", "file_path": "src/sys/file.rs", "rank": 6, "score": 143604.18076092243 }, { "content": "/// Returns the captured strings from the given regular expression `rx`.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"file_doc_extract_strings_p\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// let tmpfile = tmpdir.mash(\"file1\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// assert!(sys::write(&tmpfile, \"Not my favorite movie: 'Citizen Kane' (1941).\").is_ok());\n\n/// assert_eq!(sys::extract_strings_p(&tmpfile, r\"'([^']+)'\\s+\\((\\d{4})\\)\").unwrap(), vec![\"Citizen Kane\", \"1941\"]);\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn extract_strings_p<T: AsRef<Path>, U: AsRef<str>>(path: T, rx: U) -> FuResult<Vec<String>> {\n\n extract_strings(path, &Regex::new(rx.as_ref())?)\n\n}\n\n\n", "file_path": "src/sys/file.rs", "rank": 7, "score": 139317.36664641235 }, { "content": "/// Ensure the given closure is executed once the surrounding scope closes despite panics.\n\n/// Inspired by Golang's `defer`, Java's finally and Ruby's `ensure`.\n\n///\n\n/// This provides a mechanism similar to Golang's `defer` that will trigger when the\n\n/// surrounding function goes out of scope.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"core_defer_doc\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n///\n\n/// // Create a scope that will trigger defer's destructor\n\n/// {\n\n/// let _defer = defer(|| sys::remove_all(&tmpdir).unwrap());\n\n/// }\n\n/// assert_eq!(tmpdir.exists(), false);\n\n/// ```\n\npub fn defer<T: FnMut()>(f: T) -> impl Drop {\n\n Defer(f)\n\n}\n\n\n\npub struct Defer<T: FnMut()>(T);\n\n\n\nimpl<T: FnMut()> Drop for Defer<T> {\n\n fn drop(&mut self) {\n\n (self.0)();\n\n }\n\n}\n\n\n\n// Unit tests\n\n// -------------------------------------------------------------------------------------------------\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::prelude::*;\n\n use std::{\n\n cell::Cell,\n\n panic::{self, catch_unwind, AssertUnwindSafe},\n", "file_path": "src/core/defer.rs", "rank": 8, "score": 136875.47374885113 }, { "content": "/// Return the current working path trimmed back to the relative dir\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert_eq!(sys::rel_to(\"home\").unwrap(), PathBuf::from(\"/home\"));\n\n/// ```\n\npub fn rel_to(dir: &str) -> FuResult<PathBuf> {\n\n let cwd = sys::cwd()?;\n\n\n\n // Expand path\n\n let mut path = cwd.expand()?;\n\n\n\n // Check for empty string\n\n if dir.is_empty() {\n\n return Ok(path);\n\n }\n\n\n\n let target = OsStr::new(dir);\n\n while path.last()? != Component::Normal(&target) {\n\n path = path.trim_last();\n\n }\n\n\n\n Ok(path)\n\n}\n\n\n", "file_path": "src/sys/path.rs", "rank": 9, "score": 133187.66688766907 }, { "content": "/// Check if the given executable exists in the `PATH` and is executable.\n\npub fn exists<T: AsRef<Path>>(target: T) -> bool {\n\n lookup(target).is_ok()\n\n}\n\n\n", "file_path": "src/sys/exec.rs", "rank": 10, "score": 127064.54634056304 }, { "content": "/// Fetches the environment variable `key` from the current process.\n\n/// Wraps std::env::var\n\n///\n\n/// # Errors\n\n/// * Environment variable is not present\n\n/// * Environment variable is not valid unicode\n\n///\n\n/// # Panics\n\n///\n\n/// This function may panic if `key` is empty, contains an ASCII equals sign\n\n/// `'='` or the NUL character `'\\0'`, or when the value contains the NUL\n\n/// character.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let key = \"KEY\";\n\n/// sys::set_var(key, \"VALUE\");\n\n/// assert_eq!(sys::var(key), Ok(\"VALUE\".to_string()));\n\n/// ```\n\npub fn var<K: AsRef<OsStr>>(key: K) -> std::result::Result<String, env::VarError> {\n\n env::var(key)\n\n}\n\n\n", "file_path": "src/sys/env.rs", "rank": 11, "score": 121463.82629206004 }, { "content": "/// Returns the contents of the `path` as a `String`.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"file_doc_readstring\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// let tmpfile = tmpdir.mash(\"file1\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// assert!(sys::write(&tmpfile, \"this is a test\").is_ok());\n\n/// assert_eq!(sys::readstring(&tmpfile).unwrap(), \"this is a test\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn readstring<T: AsRef<Path>>(path: T) -> FuResult<String> {\n\n let path = path.as_ref().abs()?;\n\n match std::fs::read_to_string(path) {\n\n Ok(data) => Ok(data),\n\n Err(err) => Err(err.into()),\n\n }\n\n}\n\n\n", "file_path": "src/sys/file.rs", "rank": 12, "score": 119250.37087157731 }, { "content": "/// Returns the first captured string from the given regular expression `rx`.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"file_doc_extract_string\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// let tmpfile = tmpdir.mash(\"file1\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// let rx = Regex::new(r\"'([^']+)'\\s+\\((\\d{4})\\)\").unwrap();\n\n/// assert!(sys::write(&tmpfile, \"Not my favorite movie: 'Citizen Kane' (1941).\").is_ok());\n\n/// assert_eq!(sys::extract_string(&tmpfile, &rx).unwrap(), \"Citizen Kane\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn extract_string<T: AsRef<Path>>(path: T, rx: &Regex) -> FuResult<String> {\n\n let data = readstring(path)?;\n\n let caps = rx.captures(&data).ok_or(FileError::FailedToExtractString)?;\n\n let value = caps.get(1).ok_or(FileError::FailedToExtractString)?;\n\n Ok(value.as_str().to_string())\n\n}\n\n\n", "file_path": "src/sys/file.rs", "rank": 13, "score": 118675.17965508846 }, { "content": "/// Returns the full path of the given executable. Uses given path if resolvable and falls back on\n\n/// the system `PATH` if simply an exec name.\n\n/// ```\n\npub fn lookup<T: AsRef<Path>>(target: T) -> FuResult<PathBuf> {\n\n let path = target.as_ref();\n\n match path.has(\"/\") {\n\n // Target is a path\n\n true => {\n\n let path = path.abs()?;\n\n if !path.exists() {\n\n return Err(PathError::does_not_exist(path).into());\n\n } else if path.is_dir() || !path.is_exec() {\n\n return Err(PathError::is_not_exec(path).into());\n\n }\n\n Ok(path)\n\n },\n\n\n\n // Target is a name\n\n false => {\n\n let base = path.to_string()?;\n\n for dir in user::path_dirs()? {\n\n let path = sys::mash(dir, &base);\n\n if !path.is_dir() && path.is_exec() {\n\n return Ok(path);\n\n }\n\n }\n\n Err(PathError::does_not_exist(target).into())\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/sys/exec.rs", "rank": 14, "score": 117477.16521726559 }, { "content": "/// Returns the full path to a newly created directory in `/tmp` that can be used for temporary\n\n/// work. The returned path will be checked for uniqueness and created with a random suffix and\n\n/// the given `prefix`. It is up to the calling code to ensure the directory returned is\n\n/// properly cleaned up when done with.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = user::temp_dir(\"foo\").unwrap();\n\n/// assert_eq!(tmpdir.exists(), true);\n\n/// {\n\n/// let _defer = defer(|| sys::remove_all(&tmpdir).unwrap());\n\n/// }\n\n/// assert_eq!(tmpdir.exists(), false);\n\n/// ```\n\npub fn temp_dir<T: AsRef<str>>(prefix: T) -> FuResult<PathBuf> {\n\n loop {\n\n let suffix: String = iter::repeat_with(fastrand::alphanumeric).take(8).collect();\n\n let dir = PathBuf::from(format!(\"/tmp/{}-{}\", prefix.as_ref(), suffix));\n\n if !dir.exists() {\n\n return sys::mkdir(&dir);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 15, "score": 115318.26911379464 }, { "content": "#[allow(clippy::all)]\n\npub fn extract_strings<T: AsRef<Path>>(path: T, rx: &Regex) -> FuResult<Vec<String>> {\n\n let data = readstring(path)?;\n\n let caps = rx.captures(&data).ok_or(FileError::FailedToExtractString)?;\n\n let values = caps.iter().skip(1).filter_map(|x| x).filter_map(|x| Some(x.as_str().to_string())).collect::<Vec<String>>();\n\n if values.is_empty() {\n\n return Err(FileError::FailedToExtractString)?;\n\n }\n\n Ok(values)\n\n}\n\n\n", "file_path": "src/sys/file.rs", "rank": 16, "score": 114812.10141248678 }, { "content": "/// Write `&Vec<String>` data to a file as lines. Handles path expansion.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"file_doc_writelines\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// let tmpfile = tmpdir.mash(\"file1\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// let lines = vec![String::from(\"one\"), String::from(\"two\")];\n\n/// assert!(sys::writelines(&tmpfile, &lines).is_ok());\n\n/// assert_iter_eq(sys::readlines(&tmpfile).unwrap(), lines);\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn writelines<T: AsRef<Path>>(path: T, data: &[String]) -> FuResult<()> {\n\n write(path, data.join(\"\\n\"))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/sys/file.rs", "rank": 17, "score": 114688.23892133895 }, { "content": "/// Returns all lines from teh file as a `Vec<String>`.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"file_doc_readlines\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// let tmpfile = tmpdir.mash(\"file1\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// assert!(sys::write(&tmpfile, \"this is a test\").is_ok());\n\n/// assert_iter_eq(sys::readlines(&tmpfile).unwrap(), vec![String::from(\"this is a test\")]);\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn readlines<T: AsRef<Path>>(path: T) -> FuResult<Vec<String>> {\n\n match readlines_p(path)?.collect::<io::Result<Vec<String>>>() {\n\n Ok(data) => Ok(data),\n\n Err(err) => Err(err.into()),\n\n }\n\n}\n\n\n", "file_path": "src/sys/file.rs", "rank": 18, "score": 114687.53400369597 }, { "content": "// Path extensions\n\n// -------------------------------------------------------------------------------------------------\n\npub trait PathExt {\n\n /// Return the path in an absolute clean form\n\n ///\n\n /// ### Examples\n\n /// ```\n\n /// use fungus::prelude::*;\n\n ///\n\n /// let home = user::home_dir().unwrap();\n\n /// assert_eq!(PathBuf::from(&home), sys::abs(\"~\").unwrap());\n\n /// ```\n\n fn abs(&self) -> FuResult<PathBuf>;\n\n\n\n /// Returns a new absolute [`PathBuf`] based on the given absolute `Path`. The last element of\n\n /// the given path will be assumed to be a file name.\n\n ///\n\n /// ### Examples\n\n /// ```\n\n /// use fungus::prelude::*;\n\n ///\n\n /// let home = PathBuf::from(\"~\").abs().unwrap();\n", "file_path": "src/sys/path.rs", "rank": 19, "score": 113978.07883025357 }, { "content": "pub trait PathColorExt {\n\n fn black(&self) -> ColorString;\n\n fn red(&self) -> ColorString;\n\n fn green(&self) -> ColorString;\n\n fn yellow(&self) -> ColorString;\n\n fn blue(&self) -> ColorString;\n\n fn magenta(&self) -> ColorString;\n\n fn cyan(&self) -> ColorString;\n\n fn white(&self) -> ColorString;\n\n}\n\nimpl PathColorExt for Path {\n\n fn black(&self) -> ColorString {\n\n self.display().to_string().black()\n\n }\n\n\n\n fn red(&self) -> ColorString {\n\n self.display().to_string().red()\n\n }\n\n\n\n fn green(&self) -> ColorString {\n", "file_path": "src/sys/path.rs", "rank": 20, "score": 111565.26854116413 }, { "content": "/// Parse unix shell pathing e.g. $PATH, $XDG_DATA_DIRS or $XDG_CONFIG_DIRS.\n\n/// List of directories seperated by :\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let paths = vec![PathBuf::from(\"/foo1\"), PathBuf::from(\"/foo2/bar\")];\n\n/// assert_iter_eq(sys::parse_paths(\"/foo1:/foo2/bar\").unwrap(), paths);\n\n/// ```\n\npub fn parse_paths<T: AsRef<str>>(value: T) -> FuResult<Vec<PathBuf>> {\n\n let mut paths: Vec<PathBuf> = Vec::new();\n\n for dir in value.as_ref().split(':') {\n\n // Unix shell semantics: path element \"\" means \".\"\n\n let path = match dir == \"\" {\n\n true => sys::cwd()?,\n\n false => PathBuf::from(dir),\n\n };\n\n paths.push(path);\n\n }\n\n Ok(paths)\n\n}\n\n\n", "file_path": "src/sys/path.rs", "rank": 21, "score": 110993.68981456633 }, { "content": "pub trait OptionExt<T> {\n\n fn has<U>(&self, value: U) -> bool\n\n where\n\n U: PartialEq<T>;\n\n}\n\n\n\nimpl<T> OptionExt<T> for Option<T> {\n\n /// Returns `true` if the option is a [`Some`] value containing the given value.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// use fungus::core::*;\n\n ///\n\n /// let x: Option<u32> = Some(2);\n\n /// assert!(x.has(2));\n\n ///\n\n /// let x: Option<u32> = Some(3);\n\n /// assert!(!x.has(2));\n\n ///\n\n /// let x: Option<u32> = None;\n", "file_path": "src/core/option.rs", "rank": 22, "score": 107734.53646954786 }, { "content": "/// Iterator adaptors to simplify some operations\n\npub trait IteratorExt: Iterator {\n\n /// Consume the entire iterator eagerly up until but not including the last call to\n\n /// get None. Allows caller to then call next and get None.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// use fungus::core::*;\n\n ///\n\n /// assert_eq!(vec![0, 1, 2].into_iter().consume().next(), None);\n\n /// ```\n\n fn consume(self) -> Self\n\n where\n\n Self: Sized;\n\n\n\n /// Drop the first `n` items if positive from the iterator eagerly and then return the\n\n /// iterator. Drop the last `|n|` items if negative from the iterator eagerly and then\n\n /// return the iterator.\n\n ///\n\n /// # Examples\n\n /// ```\n", "file_path": "src/core/iter.rs", "rank": 23, "score": 107734.53646954786 }, { "content": "/// Unset an environment variable from the environment of the currently running process.\n\n/// Wraps std::env::remove_var\n\n///\n\n/// Note that while concurrent access to environment variables is safe in Rust,\n\n/// some platforms only expose inherently unsafe non-threadsafe APIs for\n\n/// inspecting the environment. As a result extra care needs to be taken when\n\n/// auditing calls to unsafe external FFI functions to ensure that any external\n\n/// environment accesses are properly synchronized with accesses in Rust.\n\n///\n\n/// Discussion of this unsafety on Unix may be found in:\n\n///\n\n/// - [Austin Group Bugzilla](http://austingroupbugs.net/view.php?id=188)\n\n/// - [GNU C library Bugzilla](https://sourceware.org/bugzilla/show_bug.cgi?id=15607#c2)\n\n///\n\n/// # Panics\n\n///\n\n/// This function may panic if `key` is empty, contains an ASCII equals sign\n\n/// `'='` or the NUL character `'\\0'`, or when the value contains the NUL\n\n/// character.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let key = \"KEY\";\n\n/// sys::set_var(key, \"VALUE\");\n\n/// assert_eq!(sys::var(key), Ok(\"VALUE\".to_string()));\n\n///\n\n/// sys::unset_var(key);\n\n/// assert!(!sys::flag(key));\n\n/// ```\n\npub fn unset_var<K: AsRef<OsStr>>(k: K) {\n\n env::remove_var(k)\n\n}\n\n\n", "file_path": "src/sys/env.rs", "rank": 24, "score": 107638.20448682545 }, { "content": "/// Wraps `writelines` allowing for setting the file's mode.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"file_doc_writelines_p\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// let tmpfile = tmpdir.mash(\"file1\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// let lines = vec![String::from(\"one\"), String::from(\"two\")];\n\n/// assert!(sys::writelines_p(&tmpfile, &lines, 0o666).is_ok());\n\n/// assert_iter_eq(sys::readlines(&tmpfile).unwrap(), lines);\n\n/// assert_eq!(tmpfile.mode().unwrap(), 0o100666);\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn writelines_p<T: AsRef<Path>>(path: T, data: &[String], mode: u32) -> FuResult<()> {\n\n write(&path, data.join(\"\\n\"))?;\n\n chmod_p(&path)?.recurse(false).mode(mode).chmod()?;\n\n Ok(())\n\n}\n\n\n\n// Unit tests\n\n// -------------------------------------------------------------------------------------------------\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::prelude::*;\n\n\n\n // Test setup\n\n fn setup() -> PathBuf {\n\n let temp = PathBuf::from(\"tests/temp\").abs().unwrap();\n\n sys::mkdir(&temp).unwrap();\n\n temp\n\n }\n\n\n\n #[test]\n", "file_path": "src/sys/file.rs", "rank": 25, "score": 106567.15352815847 }, { "content": "/// Get the value of the given environment variable as a flag.\n\n///\n\n/// The flag will be considered `true` if the environment variable is set and the\n\n/// value is any value other than `0` or a case insensitive version of `false`.\n\n/// The flag will be considered `false` if the environment variable is unset or\n\n/// it is set and the value is a `0` or a case insensitive version of `false`.\n\n///\n\n/// ### Examples\n\n/// ```rust\n\n/// use fungus::prelude::*;\n\n///\n\n/// // Unset variables will be considered false\n\n/// assert!(!sys::flag(\"FOOBAR\"));\n\n///\n\n/// // Falsy values will be considered `false`\n\n/// sys::set_var(\"FOOBAR\", \"0\");\n\n/// assert!(!sys::flag(\"FOOBAR\"));\n\n/// sys::set_var(\"FOOBAR\", \"false\");\n\n/// assert!(!sys::flag(\"FOOBAR\"));\n\n/// sys::set_var(\"FOOBAR\", \"faLse\");\n\n/// assert!(!sys::flag(\"FOOBAR\"));\n\n///\n\n/// // Truthy values will be considered `true`\n\n/// sys::set_var(\"FOOBAR\", \"1\");\n\n/// assert!(sys::flag(\"FOOBAR\"));\n\n/// sys::set_var(\"FOOBAR\", \"BOB\");\n\n/// assert!(sys::flag(\"FOOBAR\"));\n\n/// sys::set_var(\"FOOBAR\", \"True\");\n\n/// assert!(sys::flag(\"FOOBAR\"));\n\n/// ```\n\npub fn flag<K: AsRef<OsStr>>(key: K) -> bool {\n\n flag_default(key, false)\n\n}\n\n\n", "file_path": "src/sys/env.rs", "rank": 26, "score": 103367.01323837582 }, { "content": "#[cfg(target_os = \"windows\")]\n\npub fn platform() -> Platform {\n\n Platform::Windows\n\n}\n\n\n", "file_path": "src/sys/os.rs", "rank": 27, "score": 102343.81159798111 }, { "content": "#[cfg(target_arch = \"x86_64\")]\n\npub fn arch() -> Arch {\n\n Arch::X86_64\n\n}\n\n\n", "file_path": "src/sys/os.rs", "rank": 28, "score": 102343.81159798111 }, { "content": "/// Determine if the environment has an attached tty\n\n///\n\n/// ### Examples\n\n/// ```rust\n\n/// use fungus::prelude::*;\n\n///\n\n/// println!(\"{:?}\", sys::hastty());\n\n/// ```\n\npub fn hastty() -> bool {\n\n unsafe { libc::isatty(libc::STDOUT_FILENO) != 0 }\n\n}\n\n\n", "file_path": "src/sys/env.rs", "rank": 29, "score": 102342.94322247054 }, { "content": "/// Returns the user ID for the current user.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::getuid() != 0);\n\n/// ```\n\npub fn getuid() -> u32 {\n\n unsafe { libc::getuid() }\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 30, "score": 102342.94322247054 }, { "content": "/// Returns the group ID for the current user.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::getgid() != 0);\n\n/// ```\n\npub fn getgid() -> u32 {\n\n unsafe { libc::getgid() }\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 31, "score": 102342.94322247054 }, { "content": "/// Returns the group effective ID for the current user.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::getegid() != 0);\n\n/// ```\n\npub fn getegid() -> u32 {\n\n unsafe { libc::getegid() }\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 32, "score": 102342.89656337914 }, { "content": "/// Returns the user effective ID for the current user.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::geteuid() != 0);\n\n/// ```\n\npub fn geteuid() -> u32 {\n\n unsafe { libc::geteuid() }\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 33, "score": 102342.89656337914 }, { "content": "/// Returns true if the system is a x86_64 system.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert_eq!(sys::x86_64(), true);\n\n/// ```\n\npub fn x86_64() -> bool {\n\n arch() == Arch::X86_64\n\n}\n\n\n\n/// Type of operating system rust is running on\n\n#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]\n\npub enum Platform {\n\n Linux,\n\n MacOS,\n\n Windows,\n\n}\n\n\n\n/// Detect at runtime the type of operating system we are running.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert_eq!(sys::platform(), sys::Platform::Linux);\n\n/// ```\n", "file_path": "src/sys/os.rs", "rank": 34, "score": 102342.85116543801 }, { "content": "/// Returns true if the system is a x86 system.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert_eq!(sys::x86(), false);\n\n/// ```\n\npub fn x86() -> bool {\n\n arch() == Arch::X86\n\n}\n\n\n", "file_path": "src/sys/os.rs", "rank": 35, "score": 102342.85116543801 }, { "content": "/// Return true if the current user is the root user.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert_eq!(user::is_root(), false);\n\n/// ```\n\npub fn is_root() -> bool {\n\n getuid() == 0\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 36, "score": 102342.80697819743 }, { "content": "/// True if rust is running on windows\n\npub fn windows() -> bool {\n\n platform() == Platform::Windows\n\n}\n\n\n\n/// Type of operating system rust is running on\n\n#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]\n\npub struct Info {\n\n pub arch: Arch, // System architecture\n\n pub kernel: String, // Kernel version e.g. 5.3.13\n\n pub release: String, // Kernel release e.g. 5.3.13-arch1-1\n\n}\n\n\n", "file_path": "src/sys/os.rs", "rank": 37, "score": 102339.53736544892 }, { "content": "/// True if rust is running on linux\n\npub fn linux() -> bool {\n\n platform() == Platform::Linux\n\n}\n\n\n", "file_path": "src/sys/os.rs", "rank": 38, "score": 102339.53736544892 }, { "content": "/// True if rust is running on macos\n\npub fn macos() -> bool {\n\n platform() == Platform::MacOS\n\n}\n\n\n", "file_path": "src/sys/os.rs", "rank": 39, "score": 102339.53736544892 }, { "content": "/// Switches back to sudo root. Returns and error if not allowed.\n\n///\n\n/// ### Examples\n\n/// ```ignore\n\n/// use fungus::prelude::*;\n\n///\n\n/// user:sudo().unwrap();\n\n/// ```\n\npub fn sudo() -> FuResult<()> {\n\n switchuser(0, 0, 0, 0, 0, 0)\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 40, "score": 99998.84996086903 }, { "content": "/// Sets the environment variable `k` to the value `v` for the currently running\n\n/// process.\n\n/// Wraps std::env::set_var\n\n///\n\n/// Note that while concurrent access to environment variables is safe in Rust,\n\n/// some platforms only expose inherently unsafe non-threadsafe APIs for\n\n/// inspecting the environment. As a result, extra care needs to be taken when\n\n/// auditing calls to unsafe external FFI functions to ensure that any external\n\n/// environment accesses are properly synchronized with accesses in Rust.\n\n///\n\n/// Discussion of this unsafety on Unix may be found in:\n\n///\n\n/// - [Austin Group Bugzilla](http://austingroupbugs.net/view.php?id=188)\n\n/// - [GNU C library Bugzilla](https://sourceware.org/bugzilla/show_bug.cgi?id=15607#c2)\n\n///\n\n/// # Panics\n\n///\n\n/// This function may panic if `key` is empty, contains an ASCII equals sign\n\n/// `'='` or the NUL character `'\\0'`, or when the value contains the NUL\n\n/// character.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// sys::set_var(\"KEY\", \"VALUE\");\n\n/// assert_eq!(sys::var(\"KEY\"), Ok(\"VALUE\".to_string()));\n\n/// ```\n\npub fn set_var<K: AsRef<OsStr>, V: AsRef<OsStr>>(k: K, v: V) {\n\n env::set_var(k, v)\n\n}\n\n\n", "file_path": "src/sys/env.rs", "rank": 41, "score": 99456.73348694622 }, { "content": "/// Creates a new symbolic link. Handles path expansion and returns an absolute path to the\n\n/// link while still creating the symbolic link as a relative path to the target.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"file_doc_symlink\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// let file1 = tmpdir.mash(\"file1\");\n\n/// let link1 = tmpdir.mash(\"link1\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// assert!(sys::touch(&file1).is_ok());\n\n/// assert!(sys::symlink(&link1, &file1).is_ok());\n\n/// assert_eq!(link1.exists(), true);\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn symlink<T: AsRef<Path>, U: AsRef<Path>>(link: T, target: U) -> FuResult<PathBuf> {\n\n let path = link.as_ref().abs()?;\n\n if path.exists() {\n\n return Err(PathError::exists_already(path).into());\n\n }\n\n unix::fs::symlink(target, &path)?;\n\n Ok(path)\n\n}\n\n\n", "file_path": "src/sys/file.rs", "rank": 42, "score": 98272.31273700966 }, { "content": "/// Switches back to the original user under the sudo mask with no way to go back.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::drop_sudo().is_ok());\n\n/// ```\n\npub fn drop_sudo() -> FuResult<()> {\n\n match getuid() {\n\n 0 => {\n\n let (ruid, rgid) = getrids(0, 0);\n\n switchuser(ruid, ruid, ruid, rgid, rgid, rgid)\n\n },\n\n _ => Ok(()),\n\n }\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 43, "score": 97837.78329675026 }, { "content": "/// Switches back to the original user under the sudo mask. Preserves the ability to raise sudo\n\n/// again.\n\n///\n\n/// ### Examples\n\n/// ```ignore\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::pause_sudo().is_ok());\n\n/// ```\n\npub fn pause_sudo() -> FuResult<()> {\n\n match getuid() {\n\n 0 => {\n\n let (ruid, rgid) = getrids(0, 0);\n\n switchuser(ruid, ruid, 0, rgid, rgid, 0)\n\n },\n\n _ => Ok(()),\n\n }\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 44, "score": 97837.70469510727 }, { "content": "/// Returns the full path to the current user's runtime directory.\n\n/// Used for non-essential, user-specific data files such as sockets, named pipes, etc.\n\n/// Must be owned by the user with an access mode of 0700.\n\n/// Filesystem fully featured by standards of OS.\n\n/// Must be on the local filesystem.\n\n/// May be subject to periodic cleanup.\n\n/// Modified every 6 hours or set sticky bit if persistence is desired.\n\n/// Can only exist for the duration of the user's login.\n\n/// Should not store large files as it may be mounted as a tmpfs.\n\n///\n\n/// Defaults to /tmp if $XDG_RUNTIME_DIR is not set\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// println!(\"runtime directory of the current user: {:?}\", user::runtime_dir());\n\n/// ```\n\npub fn runtime_dir() -> PathBuf {\n\n match sys::var(\"XDG_RUNTIME_DIR\") {\n\n Ok(x) => PathBuf::from(x),\n\n Err(_) => PathBuf::from(\"/tmp\"),\n\n }\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 45, "score": 97837.39230797125 }, { "content": "/// Returns an iterator of (variable, value) pairs of strings, for all the\n\n/// environment variables of the current process.\n\n/// Wraps std::env::vars\n\n///\n\n/// The returned iterator contains a snapshot of the process's environment\n\n/// variables at the time of this invocation. Modifications to environment\n\n/// variables afterwards will not be reflected in the returned iterator.\n\n///\n\n/// # Panics\n\n///\n\n/// While iterating, the returned iterator will panic if any key or value in the\n\n/// environment is not valid unicode. If this is not desired, consider using\n\n/// [`env::vars_os()`].\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// for (key, value) in sys::vars() {\n\n/// println!(\"{}: {}\", key, value);\n\n/// }\n\n/// ```\n\npub fn vars() -> env::Vars {\n\n env::vars()\n\n}\n\n\n\n// Unit tests\n\n// -------------------------------------------------------------------------------------------------\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::prelude::*;\n\n\n\n #[test]\n\n fn test_tty() {\n\n assert!(sys::hastty() || !sys::hastty());\n\n }\n\n\n\n #[test]\n\n fn test_flag() {\n\n sys::unset_var(\"FLAG\");\n\n\n\n // Falsy\n", "file_path": "src/sys/env.rs", "rank": 46, "score": 97073.30919916945 }, { "content": "/// Returns the arguments that this program was started with (normally passed\n\n/// via the command line).\n\n/// Wraps std::env::args\n\n///\n\n/// The first element is traditionally the path of the executable, but it can be\n\n/// set to arbitrary text, and may not even exist.\n\n///\n\n/// # Panics\n\n/// The returned iterator will panic during iteration if any argument to the\n\n/// process is not valid unicode. If this is not desired,\n\n/// use the [`args_os`] function instead.\n\n///\n\n/// ### Examples\n\n/// ```rust\n\n/// use fungus::prelude::*;\n\n///\n\n/// // Prints each argument on a separate line\n\n/// for argument in sys::args() {\n\n/// println!(\"{}\", argument);\n\n/// }\n\n/// ```\n\npub fn args() -> env::Args {\n\n env::args()\n\n}\n\n\n", "file_path": "src/sys/env.rs", "rank": 47, "score": 97071.14456706039 }, { "content": "/// Get the current user\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::current().is_ok());\n\n/// ```\n\npub fn current() -> FuResult<User> {\n\n let user = lookup(unsafe { libc::getuid() })?;\n\n Ok(user)\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 48, "score": 94910.61422115531 }, { "content": "/// Get system information\n\npub fn info() -> FuResult<Info> {\n\n // Extract kernel release and version\n\n let data = sys::readstring(\"/proc/version\")?;\n\n let release = data.split(' ').nth(2).ok_or(OsError::KernelReleaseNotFound)?;\n\n let ver_len = release.find('-').ok_or(OsError::KernelVersionNotFound)?;\n\n let (version, _) = release.split_at(ver_len);\n\n\n\n Ok(Info { arch: arch(), kernel: version.to_string(), release: release.to_string() })\n\n}\n\n\n\n// Substitute stdout and stderr\n\npub struct Stdio<T: io::Write, U: io::Write> {\n\n pub out: T,\n\n pub err: U,\n\n}\n\nimpl<T: io::Write, U: io::Write> Stdio<T, U> {\n\n pub fn new(out: T, err: U) -> Self {\n\n Stdio { out, err }\n\n }\n\n}\n", "file_path": "src/sys/os.rs", "rank": 49, "score": 94907.16039059986 }, { "content": "/// Get the value of the given environment variable as a flag using a default if not set\n\n///\n\n/// The flag will be considered `true` if the environment variable isn't set and the\n\n/// the default is set to `true` or if the variablse is set and the value is any value\n\n/// other than `0` or a case insensitive version of `false`.\n\n///\n\n/// The flag will be considered `false` if the environment variable isn't set and the\n\n/// the default is set to `false` or if the variablse is set and the value is a `0` or\n\n/// a case insensitive version of `false`.\n\n///\n\n/// ### Examples\n\n/// ```rust\n\n/// use fungus::prelude::*;\n\n///\n\n/// // Unset variables will be default to the given value\n\n/// assert!(!sys::flag_default(\"FOOBAR\", false));\n\n/// assert!(sys::flag_default(\"FOOBAR\", true));\n\n///\n\n/// // Disabled variables will always be `false` despite default\n\n/// sys::set_var(\"FOOBAR\", \"0\");\n\n/// assert!(!sys::flag_default(\"FOOBAR\", false));\n\n/// assert!(!sys::flag_default(\"FOOBAR\", true));\n\n///\n\n/// // Enabled variables will always be `true` despite default\n\n/// sys::set_var(\"FOOBAR\", \"1\");\n\n/// assert!(sys::flag_default(\"FOOBAR\", false));\n\n/// assert!(sys::flag_default(\"FOOBAR\", true));\n\n/// ```\n\npub fn flag_default<K: AsRef<OsStr>>(key: K, default: bool) -> bool {\n\n !matches!(env::var(key).unwrap_or_else(|_| default.to_string()).to_lowercase().as_str(), \"false\" | \"0\")\n\n}\n\n\n", "file_path": "src/sys/env.rs", "rank": 50, "score": 93789.34470305979 }, { "content": "/// Returns the full path to the directory of the current running executable.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let dir = sys::exe().unwrap().dir().unwrap();\n\n/// assert_eq!(exec::dir().unwrap(), dir);\n\n/// ```\n\npub fn dir() -> FuResult<PathBuf> {\n\n Ok(sys::exe()?.dir()?)\n\n}\n\n\n", "file_path": "src/sys/exec.rs", "rank": 51, "score": 92914.7471180436 }, { "content": "/// Convert the given value in bytes to increments of TiB\n\npub fn to_tib(value: u64) -> f64 {\n\n value as f64 / TEBIBYTE as f64\n\n}\n\n\n\n// Unit tests\n\n// -------------------------------------------------------------------------------------------------\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::prelude::*;\n\n\n\n #[test]\n\n fn test_to_human() {\n\n assert_eq!(unit::bytes::to_human(10), \"10 bytes\");\n\n assert_eq!(unit::bytes::to_human(1024), \"1 KiB\");\n\n assert_eq!(unit::bytes::to_human(5024), \"4.91 KiB\");\n\n assert_eq!(unit::bytes::to_human(3 * unit::TEBIBYTE), \"3 TiB\");\n\n assert_eq!(unit::bytes::to_human(3 * unit::GIBIBYTE + 500), \"3 GiB\");\n\n assert_eq!(unit::bytes::to_human(3 * unit::GIBIBYTE + 500 * unit::MEBIBYTE), \"3.49 GiB\");\n\n assert_eq!(unit::bytes::to_human(3 * unit::MEBIBYTE + 50000), \"3.05 MiB\");\n\n assert_eq!(unit::bytes::to_human(3195728), \"3.05 MiB\");\n", "file_path": "src/unit/bytes.rs", "rank": 52, "score": 92339.98992838155 }, { "content": "/// Convert the given value in bytes to increments of GiB\n\npub fn to_gib(value: u64) -> f64 {\n\n value as f64 / GIBIBYTE as f64\n\n}\n\n\n", "file_path": "src/unit/bytes.rs", "rank": 53, "score": 92339.98992838155 }, { "content": "/// Convert the given value in bytes to increments of KiB\n\npub fn to_kib(value: u64) -> f64 {\n\n value as f64 / KIBIBYTE as f64\n\n}\n\n\n", "file_path": "src/unit/bytes.rs", "rank": 54, "score": 92339.98992838155 }, { "content": "/// Convert the given value in bytes to increments of MiB\n\npub fn to_mib(value: u64) -> f64 {\n\n value as f64 / MEBIBYTE as f64\n\n}\n\n\n", "file_path": "src/unit/bytes.rs", "rank": 55, "score": 92339.98992838155 }, { "content": "/// Returns the full path to the current user's home directory.\n\n///\n\n/// Alternate implementation as the Rust std::env::home_dir implementation which has be deprecated\n\n/// https://doc.rust-lang.org/std/env/fn.home_dir.html\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::home_dir().is_ok());\n\n/// ```\n\npub fn home_dir() -> FuResult<PathBuf> {\n\n let home = sys::var(\"HOME\")?;\n\n let dir = PathBuf::from(home);\n\n Ok(dir)\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 56, "score": 91060.23718362577 }, { "content": "/// Returns the full path to the current user's config directory.\n\n/// Where user-specific configurations should be written (analogous to /etc).\n\n/// Defaults to $HOME/.config.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::config_dir().is_ok());\n\n/// ```\n\npub fn config_dir() -> FuResult<PathBuf> {\n\n Ok(match sys::var(\"XDG_CONFIG_HOME\") {\n\n Ok(x) => PathBuf::from(x),\n\n Err(_) => home_dir()?.mash(\".config\"),\n\n })\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 57, "score": 91057.68217426643 }, { "content": "/// Returns the full path to the current user's data directory.\n\n/// Where user-specific data files should be written (analogous to /usr/share).\n\n/// Defaults to $HOME/.local/share\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::data_dir().is_ok());\n\n/// ```\n\npub fn data_dir() -> FuResult<PathBuf> {\n\n Ok(match sys::var(\"XDG_DATA_HOME\") {\n\n Ok(x) => PathBuf::from(x),\n\n Err(_) => home_dir()?.mash(\".local/share\"),\n\n })\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 58, "score": 91057.58381831637 }, { "content": "/// Returns the full path to the current user's cache directory.\n\n/// Where user-specific non-essential (cached) data should be written (analogous to /var/cache).\n\n/// Defaults to $HOME/.cache.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::cache_dir().is_ok());\n\n/// ```\n\npub fn cache_dir() -> FuResult<PathBuf> {\n\n Ok(match sys::var(\"XDG_CACHE_HOME\") {\n\n Ok(x) => PathBuf::from(x),\n\n Err(_) => home_dir()?.mash(\".cache\"),\n\n })\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 59, "score": 91057.55250658814 }, { "content": "/// Returns the full filesystem path of the current running executable.\n\n/// Wraps std::env::current_exec\n\n///\n\n/// ### Platform-specific behavior\n\n///\n\n/// If the executable was invoked through a symbolic link, some platforms will\n\n/// return the path of the symbolic link and other platforms will return the\n\n/// path of the symbolic link’s target.\n\n///\n\n/// # Errors\n\n///\n\n/// Acquiring the path of the current executable is a platform-specific operation\n\n/// that can fail for a good number of reasons. Some errors can include, but not\n\n/// be limited to, filesystem operations failing or general syscall failures.\n\n///\n\n/// On Linux systems, if this is compiled as `foo`:\n\n///\n\n/// ```bash\n\n/// $ rustc foo.rs\n\n/// $ ./foo\n\n/// Ok(\"/home/alex/foo\")\n\n/// ```\n\n///\n\n/// And you make a hard link of the program:\n\n///\n\n/// ```bash\n\n/// $ ln foo bar\n\n/// ```\n\n///\n\n/// When you run it, you won’t get the path of the original executable, you’ll\n\n/// get the path of the hard link:\n\n///\n\n/// ```bash\n\n/// $ ./bar\n\n/// Ok(\"/home/alex/bar\")\n\n/// ```\n\n///\n\n/// This sort of behavior has been known to [lead to privilege escalation] when\n\n/// used incorrectly.\n\n///\n\n/// [lead to privilege escalation]: https://securityvulns.com/Wdocument183.html\n\n///\n\n/// ### Examples\n\n/// ```rust\n\n/// use fungus::prelude::*;\n\n///\n\n/// println!(\"current executable path: {:?}\", sys::exe().unwrap());\n\n/// ```\n\npub fn exe() -> io::Result<PathBuf> {\n\n env::current_exe()\n\n}\n\n\n", "file_path": "src/sys/env.rs", "rank": 60, "score": 90345.10593090221 }, { "content": "/// Set the group ID for the current user.\n\n///\n\n/// ### Examples\n\n/// ```ignore\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::setgid(user::getgid()).is_ok());\n\n/// ```\n\npub fn setgid(gid: u32) -> FuResult<()> {\n\n match unsafe { libc::setgid(gid) } {\n\n 0 => Ok(()),\n\n _ => Err(io::Error::last_os_error().into()),\n\n }\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 61, "score": 90344.66226451869 }, { "content": "/// Set the user ID for the current user.\n\n///\n\n/// ### Examples\n\n/// ```ignore\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::setuid(user::getuid()).is_ok());\n\n/// ```\n\npub fn setuid(uid: u32) -> FuResult<()> {\n\n match unsafe { libc::setuid(uid) } {\n\n 0 => Ok(()),\n\n _ => Err(io::Error::last_os_error().into()),\n\n }\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 62, "score": 90344.66226451869 }, { "content": "/// Set the group effective ID for the current user.\n\n///\n\n/// ### Examples\n\n/// ```ignore\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::setegid(user::getegid()).is_ok());\n\n/// ```\n\npub fn setegid(egid: u32) -> FuResult<()> {\n\n match unsafe { libc::setegid(egid) } {\n\n 0 => Ok(()),\n\n _ => Err(io::Error::last_os_error().into()),\n\n }\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 63, "score": 90344.62035777941 }, { "content": "/// Set the user effective ID for the current user.\n\n///\n\n/// ### Examples\n\n/// ```ignore\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::seteuid(user::geteuid()).is_ok());\n\n/// ```\n\npub fn seteuid(euid: u32) -> FuResult<()> {\n\n match unsafe { libc::seteuid(euid) } {\n\n 0 => Ok(()),\n\n _ => Err(io::Error::last_os_error().into()),\n\n }\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 64, "score": 90344.62035777941 }, { "content": "/// Returns the current working directory as a [`PathBuf`].\n\n/// Wraps std::env::current_dir\n\n///\n\n/// # Errors\n\n///\n\n/// Returns an [`Err`] if the current working directory value is invalid.\n\n/// Possible cases:\n\n///\n\n/// * Current directory does not exist.\n\n/// * There are insufficient permissions to access the current directory.\n\n///\n\n/// ### Examples\n\n/// ```rust\n\n/// use fungus::prelude::*;\n\n///\n\n/// println!(\"current working directory: {:?}\", sys::cwd().unwrap());\n\n/// ```\n\npub fn cwd() -> io::Result<PathBuf> {\n\n env::current_dir()\n\n}\n\n\n", "file_path": "src/sys/env.rs", "rank": 65, "score": 90343.86356052199 }, { "content": "#[test]\n\nfn test_use_syntax() {\n\n let home = user::home_dir().unwrap();\n\n assert_eq!(PathBuf::from(&home), sys::abs(\"~\").unwrap());\n\n}\n", "file_path": "tests/integration.rs", "rank": 66, "score": 88126.714307349 }, { "content": "/// Returns the current user's path directories.\n\n/// List of directories seperated by :\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::path_dirs().is_ok());\n\n/// ```\n\npub fn path_dirs() -> FuResult<Vec<PathBuf>> {\n\n sys::parse_paths(sys::var(\"PATH\")?)\n\n}\n\n\n\n// User functions\n\n// -------------------------------------------------------------------------------------------------\n\n\n\n/// User provides options for a specific user.\n\n#[derive(Debug, Clone, Default)]\n\npub struct User {\n\n pub uid: u32, // user id\n\n pub gid: u32, // user group id\n\n pub name: String, // user name\n\n pub home: PathBuf, // user home\n\n pub shell: PathBuf, // user shell\n\n pub ruid: u32, // real user id behind sudo\n\n pub rgid: u32, // real user group id behind sudo\n\n pub realname: String, // real user name behind sudo\n\n pub realhome: PathBuf, // real user home behind sudo\n\n pub realshell: PathBuf, // real user shell behind sudo\n", "file_path": "src/sys/user.rs", "rank": 67, "score": 86766.5726875982 }, { "content": "/// Returns the current user's config directories.\n\n/// List of directories seperated by : (analogous to PATH).\n\n/// Defaults to /etc/xdg\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::config_dirs().is_ok());\n\n/// ```\n\npub fn config_dirs() -> FuResult<Vec<PathBuf>> {\n\n Ok(match sys::var(\"XDG_CONFIG_DIRS\") {\n\n Ok(x) => sys::parse_paths(x)?,\n\n Err(_) => vec![PathBuf::from(\"/etc/xdg\")],\n\n })\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 68, "score": 86766.37848666421 }, { "content": "/// Returns the current user's data directories.\n\n/// List of directories seperated by : (analogous to PATH).\n\n/// Defaults to /usr/local/share:/usr/share.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::data_dirs().is_ok());\n\n/// ```\n\npub fn data_dirs() -> FuResult<Vec<PathBuf>> {\n\n Ok(match sys::var(\"XDG_DATA_DIRS\") {\n\n Ok(x) => sys::parse_paths(x)?,\n\n Err(_) => vec![PathBuf::from(\"/usr/local/share:/usr/share\")],\n\n })\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 69, "score": 86766.27293345453 }, { "content": "/// Lookup a user by user id\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert!(user::lookup(user::getuid()).is_ok());\n\n/// ```\n\npub fn lookup(uid: u32) -> FuResult<User> {\n\n // Get the libc::passwd by user id\n\n let mut buf = vec![0; 2048];\n\n let mut res = ptr::null_mut::<libc::passwd>();\n\n let mut passwd = unsafe { mem::zeroed::<libc::passwd>() };\n\n unsafe {\n\n libc::getpwuid_r(uid, &mut passwd, buf.as_mut_ptr(), buf.len(), &mut res);\n\n }\n\n if res.is_null() || res != &mut passwd {\n\n return Err(UserError::does_not_exist_by_id(uid).into());\n\n }\n\n\n\n // Convert libc::passwd object into a User object\n\n //----------------------------------------------------------------------------------------------\n\n let gid = passwd.pw_gid;\n\n\n\n // User name for the lookedup user. We always want this and it should always exist.\n\n let username = unsafe { sys::libc::to_string(passwd.pw_name)? };\n\n\n\n // Will almost always be a single 'x' as the passwd is in the shadow database\n", "file_path": "src/sys/user.rs", "rank": 70, "score": 86221.41578234176 }, { "content": "/// Returns true if the new mode is revoking permissions as compared to the old mode as pertains\n\n/// directory read/execute permissions. This is useful when recursively modifying file permissions.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert_eq!(sys::revoking_mode(0o0777, 0o0777), false);\n\n/// ```\n\npub fn revoking_mode(old: u32, new: u32) -> bool {\n\n old & 0o0500 > new & 0o0500 || old & 0o0050 > new & 0o0050 || old & 0o0005 > new & 0o0005\n\n}\n\n\n", "file_path": "src/sys/file.rs", "rank": 71, "score": 82476.63143107799 }, { "content": "/// Returns true if the given path exists and is a symlink. Handles path expansion\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"path_doc_is_symlink\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// let file1 = tmpdir.mash(\"file1\");\n\n/// let link1 = tmpdir.mash(\"link1\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// assert!(sys::touch(&file1).is_ok());\n\n/// assert!(sys::symlink(&link1, &file1).is_ok());\n\n/// assert_eq!(sys::is_symlink(link1), true);\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn is_symlink<T: AsRef<Path>>(path: T) -> bool {\n\n match path.as_ref().abs() {\n\n Ok(abs) => readlink(abs).is_ok(),\n\n Err(_) => false,\n\n }\n\n}\n\n\n", "file_path": "src/sys/path.rs", "rank": 72, "score": 80667.83649618283 }, { "content": "/// Returns true if the given path exists and is readonly. Handles path expansion\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"path_doc_is_readonly\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// let file1 = tmpdir.mash(\"file1\");\n\n/// assert!(sys::touch_p(&file1, 0o644).is_ok());\n\n/// assert_eq!(file1.is_readonly(), false);\n\n/// assert!(sys::chmod_p(&file1).unwrap().readonly().chmod().is_ok());\n\n/// assert_eq!(file1.mode().unwrap(), 0o100444);\n\n/// assert_eq!(sys::is_readonly(&file1), true);\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn is_readonly<T: AsRef<Path>>(path: T) -> bool {\n\n match metadata(path) {\n\n Ok(x) => x.permissions().readonly(),\n\n Err(_) => false,\n\n }\n\n}\n\n\n", "file_path": "src/sys/path.rs", "rank": 73, "score": 80666.97809084367 }, { "content": "/// Returns true if the given path exists and is an executable. Handles path expansion\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"path_doc_is_exec\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// let file1 = tmpdir.mash(\"file1\");\n\n/// assert!(sys::touch_p(&file1, 0o644).is_ok());\n\n/// assert_eq!(sys::is_exec(&file1), false);\n\n/// assert!(sys::chmod_p(&file1).unwrap().add_x().chmod().is_ok());\n\n/// assert_eq!(file1.mode().unwrap(), 0o100755);\n\n/// assert_eq!(file1.is_exec(), true);\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn is_exec<T: AsRef<Path>>(path: T) -> bool {\n\n match metadata(path) {\n\n Ok(x) => x.permissions().mode() & 0o111 != 0,\n\n Err(_) => false,\n\n }\n\n}\n\n\n", "file_path": "src/sys/path.rs", "rank": 74, "score": 80666.97809084367 }, { "content": "/// Assert that the elements of the given iterables are equal and `Panics` when when not.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use fungus::core::*;\n\n///\n\n/// assert_iter_eq(vec![1, 2, 3].into_iter(), vec![1, 2, 3].into_iter());\n\n/// ```\n\npub fn assert_iter_eq<T, U>(x: T, y: U)\n\nwhere\n\n T: IntoIterator,\n\n U: IntoIterator,\n\n T::Item: fmt::Debug+PartialEq<U::Item>,\n\n U::Item: fmt::Debug,\n\n{\n\n let mut x = x.into_iter();\n\n let mut y = y.into_iter();\n\n loop {\n\n match (x.next(), y.next()) {\n\n // Match done\n\n (None, None) => return,\n\n\n\n // Match items\n\n (a, b) => {\n\n let equal = match (&a, &b) {\n\n // Compare the two items\n\n (&Some(ref a), &Some(ref b)) => a == b,\n\n\n\n // Different lengths\n\n _ => false,\n\n };\n\n assert!(equal, \"Iterators not equal {:?} != {:?}\", a, b);\n\n },\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/core/iter.rs", "rank": 75, "score": 80665.38840206066 }, { "content": "/// Returns true if the given path exists. Handles path expansion.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert_eq!(sys::exists(\"/etc\"), true);\n\n/// ```\n\npub fn exists<T: AsRef<Path>>(path: T) -> bool {\n\n metadata(path).is_ok()\n\n}\n\n\n", "file_path": "src/sys/path.rs", "rank": 76, "score": 80665.34756992894 }, { "content": "/// Returns true if the given path exists and is a directory. Handles path expansion.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert_eq!(sys::is_dir(\"/etc\"), true);\n\n/// ```\n\npub fn is_dir<T: AsRef<Path>>(path: T) -> bool {\n\n match metadata(path) {\n\n Ok(x) => x.is_dir(),\n\n Err(_) => false,\n\n }\n\n}\n\n\n", "file_path": "src/sys/path.rs", "rank": 77, "score": 80665.30777159406 }, { "content": "/// Returns true if the given path exists and is a file. Handles path expansion\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert_eq!(sys::is_file(\"/etc/hosts\"), true);\n\n/// ```\n\npub fn is_file<T: AsRef<Path>>(path: T) -> bool {\n\n match metadata(path) {\n\n Ok(x) => x.is_file(),\n\n Err(_) => false,\n\n }\n\n}\n\n\n", "file_path": "src/sys/path.rs", "rank": 78, "score": 80665.26896828596 }, { "content": "/// Returns the real IDs for the given user.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert_eq!(user::getrids(user::getuid(), user::getgid()), (user::getuid(), user::getgid()));\n\n/// ```\n\npub fn getrids(uid: u32, gid: u32) -> (u32, u32) {\n\n match uid {\n\n 0 => match (sys::var(\"SUDO_UID\"), sys::var(\"SUDO_GID\")) {\n\n (Ok(u), Ok(g)) => match (u.parse::<u32>(), g.parse::<u32>()) {\n\n (Ok(u), Ok(g)) => (u, g),\n\n _ => (uid, gid),\n\n },\n\n _ => (uid, gid),\n\n },\n\n _ => (uid, gid),\n\n }\n\n}\n\n\n", "file_path": "src/sys/user.rs", "rank": 79, "score": 80665.23112314931 }, { "content": "/// Returns true if the given path exists and is a symlinked directory. Handles path\n\n/// expansion\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"path_doc_is_symlink_dir\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// let dir1 = tmpdir.mash(\"dir1\");\n\n/// let link1 = tmpdir.mash(\"link1\");\n\n/// assert!(sys::mkdir(&dir1).is_ok());\n\n/// assert!(sys::symlink(&link1, &dir1).is_ok());\n\n/// assert_eq!(sys::is_symlink_dir(link1), true);\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn is_symlink_dir<T: AsRef<Path>>(path: T) -> bool {\n\n match path.as_ref().abs() {\n\n Ok(abs) => match readlink(&abs) {\n\n Ok(target) => match target.abs_from(&abs) {\n\n Ok(x) => x.is_dir(),\n\n Err(_) => false,\n\n },\n\n Err(_) => false,\n\n },\n\n Err(_) => false,\n\n }\n\n}\n\n\n", "file_path": "src/sys/path.rs", "rank": 80, "score": 79060.10723764767 }, { "content": "/// Returns true if the given path exists and is a symlinked file. Handles path\n\n/// expansion\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"path_doc_is_symlink_file\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// let file1 = tmpdir.mash(\"file1\");\n\n/// let link1 = tmpdir.mash(\"link1\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// assert!(sys::touch(&file1).is_ok());\n\n/// assert!(sys::symlink(&link1, &file1).is_ok());\n\n/// assert_eq!(sys::is_symlink_file(link1), true);\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn is_symlink_file<T: AsRef<Path>>(path: T) -> bool {\n\n match path.as_ref().abs() {\n\n Ok(abs) => match readlink(&abs) {\n\n Ok(target) => match target.abs_from(&abs) {\n\n Ok(x) => x.is_file(),\n\n Err(_) => false,\n\n },\n\n Err(_) => false,\n\n },\n\n Err(_) => false,\n\n }\n\n}\n\n\n", "file_path": "src/sys/path.rs", "rank": 81, "score": 79059.94775035496 }, { "content": "/// Removes the given empty directory or file. Handles path expansion. Does\n\n/// not follow symbolic links but rather removes the links themselves.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"file_doc_remove\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// assert!(sys::remove(&tmpdir).is_ok());\n\n/// assert_eq!(tmpdir.exists(), false);\n\n/// ```\n\npub fn remove<T: AsRef<Path>>(path: T) -> FuResult<()> {\n\n let path = path.as_ref().abs()?;\n\n if let Ok(meta) = fs::metadata(&path) {\n\n if meta.is_file() {\n\n fs::remove_file(path)?;\n\n } else if meta.is_dir() {\n\n fs::remove_dir(path)?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/sys/file.rs", "rank": 82, "score": 79059.3222135462 }, { "content": "/// Removes the given directory after removing all of its contents. Handles path expansion. Does\n\n/// not follow symbolic links but rather removes the links themselves.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"file_doc_remove_all\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// assert_eq!(tmpdir.exists(), false);\n\n/// ```\n\npub fn remove_all<T: AsRef<Path>>(path: T) -> FuResult<()> {\n\n let path = path.as_ref().abs()?;\n\n if path.exists() {\n\n fs::remove_dir_all(path)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/sys/file.rs", "rank": 83, "score": 79059.3222135462 }, { "content": "/// Changes the current working directory to the specified path.\n\n/// Provides basic path expansion\n\n///\n\n/// Returns an [`Err`] if the operation fails.\n\n///\n\n/// ### Examples\n\n/// ```rust,ignore\n\n/// use fungus::prelude::*;\n\n///\n\n/// sys::set_cwd(\"~/\").unwrap();\n\n/// println!(\"current working directory: {:?}\", sys::cwd().unwrap());\n\n/// ```\n\npub fn set_cwd<P: AsRef<Path>>(path: P) -> FuResult<()> {\n\n let abs = path.as_ref().abs()?;\n\n Ok(env::set_current_dir(abs)?)\n\n}\n\n\n", "file_path": "src/sys/env.rs", "rank": 84, "score": 77554.34389284112 }, { "content": "/// Returns true if the given `path` is a gzipped file\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"gzip_is_gzipped_doc\");\n\n/// let gzipped = tmpdir.mash(\"../../alpine-base.tgz\");\n\n/// assert_eq!(gzip::is_gzipped(gzipped).unwrap(), true);\n\n/// ```\n\npub fn is_gzipped<T: AsRef<Path>>(path: T) -> FuResult<bool> {\n\n let path = path.as_ref().abs()?;\n\n\n\n // Read the first 2 bytes of the file\n\n let mut f = File::open(&path)?;\n\n let mut buffer = [0; 2];\n\n f.read_exact(&mut buffer)?;\n\n\n\n // Test against the gzip header signature 0x1f8b\n\n if buffer == [0x1f, 0x8b] || buffer == [0x8b, 0x1f] {\n\n return Ok(true);\n\n }\n\n Ok(false)\n\n}\n\n\n\n// Unit tests\n\n// -------------------------------------------------------------------------------------------------\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::prelude::*;\n", "file_path": "src/enc/gzip.rs", "rank": 85, "score": 75926.01010370604 }, { "content": "/// Create [`Chmod`] options providing path expansion, globbing, recursion and error\n\n/// tracing while setting the `mode`. This function provides more control over options\n\n/// than the `chmod` function. Changes are not invoked until the `chmod` method is called.\n\n/// Symbolic links will have the target mode changed.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"file_doc_chmod_p\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// let file1 = tmpdir.mash(\"file1\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// assert!(sys::touch(&file1).is_ok());\n\n/// assert!(sys::chmod_p(&file1).unwrap().mode(0o644).chmod().is_ok());\n\n/// assert_eq!(file1.mode().unwrap(), 0o100644);\n\n/// assert!(sys::chmod_p(&file1).unwrap().mode(0o555).chmod().is_ok());\n\n/// assert_eq!(file1.mode().unwrap(), 0o100555);\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn chmod_p<T: AsRef<Path>>(path: T) -> FuResult<Chmod> {\n\n let path = path.as_ref().abs()?;\n\n let mode = match path.mode() {\n\n Ok(x) => x,\n\n _ => 0o644,\n\n };\n\n Ok(Chmod { path, mode, dirs: false, files: false, recursive: true })\n\n}\n\n\n", "file_path": "src/sys/file.rs", "rank": 86, "score": 75925.51585590983 }, { "content": "/// Returns the user ID of the owner of this file. Handles path expansion.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert_eq!(sys::uid(\"/etc\").unwrap(), 0);\n\n/// ```\n\npub fn uid<T: AsRef<Path>>(path: T) -> FuResult<u32> {\n\n Ok(metadata(path)?.uid())\n\n}\n\n\n", "file_path": "src/sys/path.rs", "rank": 87, "score": 75922.79212772562 }, { "content": "/// Returns the group ID of the owner of this file. Handles path expansion.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// assert_eq!(sys::gid(\"/etc\").unwrap(), 0);\n\n/// ```\n\npub fn gid<T: AsRef<Path>>(path: T) -> FuResult<u32> {\n\n Ok(metadata(path)?.gid())\n\n}\n\n\n", "file_path": "src/sys/path.rs", "rank": 88, "score": 75922.79212772562 }, { "content": "/// Returns the absolute path for the given link target. Handles path expansion\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"path_doc_readlink\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// let file1 = tmpdir.mash(\"file1\");\n\n/// let link1 = tmpdir.mash(\"link1\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// assert!(sys::touch(&file1).is_ok());\n\n/// assert!(sys::symlink(&link1, &file1).is_ok());\n\n/// assert_eq!(sys::readlink(link1).unwrap(), file1);\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn readlink<T: AsRef<Path>>(path: T) -> FuResult<PathBuf> {\n\n let abs = path.as_ref().abs()?;\n\n let abs = fs::read_link(abs)?;\n\n Ok(abs)\n\n}\n\n\n", "file_path": "src/sys/path.rs", "rank": 89, "score": 74519.69289087798 }, { "content": "/// Return the path in an absolute clean form\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let home = user::home_dir().unwrap();\n\n/// assert_eq!(PathBuf::from(&home), sys::abs(\"~\").unwrap());\n\n/// ```\n\npub fn abs<T: AsRef<Path>>(path: T) -> FuResult<PathBuf> {\n\n let path = path.as_ref();\n\n\n\n // Check for empty string\n\n if path.empty() {\n\n return Err(PathError::Empty.into());\n\n }\n\n\n\n // Expand home directory\n\n let mut path_buf = path.expand()?;\n\n\n\n // Trim protocol prefix if needed\n\n path_buf = path_buf.trim_protocol();\n\n\n\n // Clean the resulting path\n\n path_buf = path_buf.clean()?;\n\n\n\n // Expand relative directories if needed\n\n if !path_buf.is_absolute() {\n\n let mut curr = sys::cwd()?;\n", "file_path": "src/sys/path.rs", "rank": 90, "score": 74518.16995278977 }, { "content": "/// Create an empty file similar to the linux touch command. Handles path expansion.\n\n/// Uses default file creation permissions 0o666 - umask usually ends up being 0o644.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"file_doc_touch\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// let tmpfile = tmpdir.mash(\"file1\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// assert!(sys::touch(&tmpfile).is_ok());\n\n/// assert_eq!(tmpfile.exists(), true);\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn touch<T: AsRef<Path>>(path: T) -> FuResult<PathBuf> {\n\n let path = path.as_ref().abs()?;\n\n if !path.exists() {\n\n File::create(&path)?;\n\n }\n\n Ok(path)\n\n}\n\n\n", "file_path": "src/sys/file.rs", "rank": 91, "score": 74517.94773274621 }, { "content": "/// Expand all environment variables in the path as well as the home directory.\n\n///\n\n/// WARNING: Does not expand partials e.g. \"/foo${BAR}ing/blah\" only complete components\n\n/// e.g. \"/foo/${BAR}/blah\"\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let home = user::home_dir().unwrap();\n\n/// assert_eq!(PathBuf::from(&home).mash(\"foo\"), PathBuf::from(\"~/foo\").expand().unwrap());\n\n/// ```\n\npub fn expand<T: AsRef<Path>>(path: T) -> FuResult<PathBuf> {\n\n let mut path = path.as_ref().to_path_buf();\n\n let pathstr = path.to_string()?;\n\n\n\n // Expand home directory\n\n match pathstr.matches('~').count() {\n\n // Only home expansion at the begining of the path is allowed\n\n cnt if cnt > 1 => return Err(PathError::multiple_home_symbols(path).into()),\n\n\n\n // Invalid home expansion requested\n\n cnt if cnt == 1 && !path.has_prefix(\"~/\") && pathstr != \"~\" => {\n\n return Err(PathError::invalid_expansion(path).into());\n\n },\n\n\n\n // Single tilda only\n\n cnt if cnt == 1 && pathstr == \"~\" => {\n\n path = user::home_dir()?;\n\n },\n\n\n\n // Replace prefix with home directory\n", "file_path": "src/sys/path.rs", "rank": 92, "score": 74517.06227363316 }, { "content": "/// Creates the given directory and any parent directories needed, handling path expansion and\n\n/// returning an absolute path created.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"file_doc_mkdir\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// assert_eq!(tmpdir.exists(), true);\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn mkdir<T: AsRef<Path>>(path: T) -> FuResult<PathBuf> {\n\n let path = path.as_ref().abs()?;\n\n if !path.exists() {\n\n fs::create_dir_all(&path)?;\n\n }\n\n Ok(path)\n\n}\n\n\n", "file_path": "src/sys/file.rs", "rank": 93, "score": 74516.68740751047 }, { "content": "/// Returns the contents of the `path` as a `Vec<u8>`.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"file_doc_readbytes\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// let tmpfile = tmpdir.mash(\"file1\");\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// assert!(sys::write(&tmpfile, \"this is a test\").is_ok());\n\n/// assert_eq!(str::from_utf8(&sys::readbytes(&tmpfile).unwrap()).unwrap(), \"this is a test\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn readbytes<T: AsRef<Path>>(path: T) -> FuResult<Vec<u8>> {\n\n let path = path.as_ref().abs()?;\n\n match std::fs::read(path) {\n\n Ok(data) => Ok(data),\n\n Err(err) => Err(err.into()),\n\n }\n\n}\n\n\n", "file_path": "src/sys/file.rs", "rank": 94, "score": 73041.30646875518 }, { "content": "/// Returns the Metadata object for the `Path` if it exists else an error. Handles path\n\n/// expansion.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let meta = sys::metadata(Path::new(\"/etc\")).unwrap();\n\n/// assert_eq!(meta.is_dir(), true);\n\n/// ```\n\npub fn metadata<T: AsRef<Path>>(path: T) -> FuResult<fs::Metadata> {\n\n let abs = path.as_ref().abs()?;\n\n let meta = fs::metadata(abs)?;\n\n Ok(meta)\n\n}\n\n\n", "file_path": "src/sys/path.rs", "rank": 95, "score": 73039.48449561239 }, { "content": "/// Computes and returns the digest of the given `path`.\n\n///\n\n/// ### Examples\n\n/// ```\n\n/// use fungus::prelude::*;\n\n///\n\n/// let tmpdir = PathBuf::from(\"tests/temp\").abs().unwrap().mash(\"file_doc_digest\");\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// assert!(sys::mkdir(&tmpdir).is_ok());\n\n/// let file1 = tmpdir.mash(\"file1\");\n\n/// let file2 = tmpdir.mash(\"file2\");\n\n/// assert!(sys::write(&file1, \"this is a test\").is_ok());\n\n/// assert!(sys::copyfile(&file1, &file2).is_ok());\n\n/// assert_iter_eq(sys::digest(&file1).unwrap(), sys::digest(&file2).unwrap());\n\n/// assert!(sys::remove_all(&tmpdir).is_ok());\n\n/// ```\n\npub fn digest<T: AsRef<Path>>(path: T) -> FuResult<Vec<u8>> {\n\n Ok(Blake2b::digest(&readbytes(path)?).into_iter().collect())\n\n}\n\n\n", "file_path": "src/sys/file.rs", "rank": 96, "score": 73039.12466916244 } ]
Rust
cranelift/native/src/lib.rs
yuyang-ok/wasmtime
67c0d55fbb1f86af0595815e3a9c7f39593f3bd0
#![deny( missing_docs, trivial_numeric_casts, unused_extern_crates, )] /* riscv64gc backend have to use is_riscv_feature_detected which is unstable. */ #![feature(stdsimd)] #![warn(unused_import_braces)] #![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))] #![cfg_attr(feature = "cargo-clippy", allow(clippy::new_without_default))] #![cfg_attr( feature = "cargo-clippy", warn( clippy::float_arithmetic, clippy::mut_mut, clippy::nonminimal_bool, clippy::map_unwrap_or, clippy::clippy::print_stdout, clippy::unicode_not_nfc, clippy::use_self ) )] use cranelift_codegen::isa; use target_lexicon::Triple; pub fn builder() -> Result<isa::Builder, &'static str> { builder_with_options(true) } pub fn builder_with_options(infer_native_flags: bool) -> Result<isa::Builder, &'static str> { let mut isa_builder = isa::lookup(Triple::host()).map_err(|err| match err { isa::LookupError::SupportDisabled => "support for architecture disabled at compile time", isa::LookupError::Unsupported => "unsupported architecture", })?; #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] { use cranelift_codegen::settings::Configurable; if !std::is_x86_feature_detected!("sse2") { return Err("x86 support requires SSE2"); } if !infer_native_flags { return Ok(isa_builder); } isa_builder.set("has_sse3", "false").unwrap(); isa_builder.set("has_ssse3", "false").unwrap(); isa_builder.set("has_sse41", "false").unwrap(); isa_builder.set("has_sse42", "false").unwrap(); if std::is_x86_feature_detected!("sse3") { isa_builder.enable("has_sse3").unwrap(); } if std::is_x86_feature_detected!("ssse3") { isa_builder.enable("has_ssse3").unwrap(); } if std::is_x86_feature_detected!("sse4.1") { isa_builder.enable("has_sse41").unwrap(); } if std::is_x86_feature_detected!("sse4.2") { isa_builder.enable("has_sse42").unwrap(); } if std::is_x86_feature_detected!("popcnt") { isa_builder.enable("has_popcnt").unwrap(); } if std::is_x86_feature_detected!("avx") { isa_builder.enable("has_avx").unwrap(); } if std::is_x86_feature_detected!("avx2") { isa_builder.enable("has_avx2").unwrap(); } if std::is_x86_feature_detected!("bmi1") { isa_builder.enable("has_bmi1").unwrap(); } if std::is_x86_feature_detected!("bmi2") { isa_builder.enable("has_bmi2").unwrap(); } if std::is_x86_feature_detected!("avx512bitalg") { isa_builder.enable("has_avx512bitalg").unwrap(); } if std::is_x86_feature_detected!("avx512dq") { isa_builder.enable("has_avx512dq").unwrap(); } if std::is_x86_feature_detected!("avx512f") { isa_builder.enable("has_avx512f").unwrap(); } if std::is_x86_feature_detected!("avx512vl") { isa_builder.enable("has_avx512vl").unwrap(); } if std::is_x86_feature_detected!("avx512vbmi") { isa_builder.enable("has_avx512vbmi").unwrap(); } if std::is_x86_feature_detected!("lzcnt") { isa_builder.enable("has_lzcnt").unwrap(); } } #[cfg(target_arch = "aarch64")] { use cranelift_codegen::settings::Configurable; if !infer_native_flags { return Ok(isa_builder); } if std::arch::is_aarch64_feature_detected!("lse") { isa_builder.enable("has_lse").unwrap(); } } #[cfg(target_arch = "riscv64")] { use cranelift_codegen::settings::Configurable; if !infer_native_flags { return Ok(isa_builder); } if std::arch::is_riscv_feature_detected!("m") { isa_builder.enable("has_extension_m").unwrap(); } if std::arch::is_riscv_feature_detected!("a") { isa_builder.enable("has_extension_a").unwrap(); } if std::arch::is_riscv_feature_detected!("f") { isa_builder.enable("has_extension_f").unwrap(); } if std::arch::is_riscv_feature_detected!("d") { isa_builder.enable("has_extension_d").unwrap(); } if std::arch::is_riscv_feature_detected!("v") { isa_builder.enable("has_extension_v").unwrap(); } if std::arch::is_riscv_feature_detected!("zba") { isa_builder.enable("has_extendion_zba").unwrap(); } if std::arch::is_riscv_feature_detected!("zbb") { isa_builder.enable("has_extendion_zbb").unwrap(); } if std::arch::is_riscv_feature_detected!("zbc") { isa_builder.enable("has_extendion_zbc").unwrap(); } if std::arch::is_riscv_feature_detected!("zbs") { isa_builder.enable("has_extendion_zbs").unwrap(); } if std::arch::is_riscv_feature_detected!("zbkb") { isa_builder.enable("has_extendion_zbkb").unwrap(); } } #[cfg(all(target_arch = "s390x", target_os = "linux"))] { use cranelift_codegen::settings::Configurable; if !infer_native_flags { return Ok(isa_builder); } let v = unsafe { libc::getauxval(libc::AT_HWCAP) }; const HWCAP_S390X_VXRS_EXT2: libc::c_ulong = 32768; if (v & HWCAP_S390X_VXRS_EXT2) != 0 { isa_builder.enable("has_vxrs_ext2").unwrap(); isa_builder.enable("has_mie2").unwrap(); } } drop(&mut isa_builder); drop(infer_native_flags); Ok(isa_builder) } #[cfg(test)] mod tests { use super::builder; use cranelift_codegen::isa::CallConv; use cranelift_codegen::settings; #[test] fn test() { if let Ok(isa_builder) = builder() { let flag_builder = settings::builder(); let isa = isa_builder .finish(settings::Flags::new(flag_builder)) .unwrap(); if cfg!(all(target_os = "macos", target_arch = "aarch64")) { assert_eq!(isa.default_call_conv(), CallConv::AppleAarch64); } else if cfg!(any(unix, target_os = "nebulet")) { assert_eq!(isa.default_call_conv(), CallConv::SystemV); } else if cfg!(windows) { assert_eq!(isa.default_call_conv(), CallConv::WindowsFastcall); } if cfg!(target_pointer_width = "64") { assert_eq!(isa.pointer_bits(), 64); } else if cfg!(target_pointer_width = "32") { assert_eq!(isa.pointer_bits(), 32); } else if cfg!(target_pointer_width = "16") { assert_eq!(isa.pointer_bits(), 16); } } } } pub const VERSION: &str = env!("CARGO_PKG_VERSION");
#![deny( missing_docs, trivial_numeric_casts, unused_extern_crates, )] /* riscv64gc backend have to use is_riscv_feature_detected which is unstable. */ #![feature(stdsimd)] #![warn(unused_import_braces)] #![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))] #![cfg_attr(feature = "cargo-clippy", allow(clippy::new_without_default))] #![cfg_attr( feature = "cargo-clippy", warn( clippy::float_arithmetic, clippy::mut_mut, clippy::nonminimal_bool, clippy::map_unwrap_or, clippy::clippy::print_stdout, clippy::unicode_not_nfc, clippy::use_self ) )] use cranelift_codegen::isa; use target_lexicon::Triple; pub fn builder() -> Result<isa::Builder, &'static str> { builder_with_options(true) } pub fn builder_with_options(infer_native_flags: bool) -> Result<isa::Builder, &'static str> { let mut isa_builder = isa::lookup(Triple::host()).map_err(|err| match err { isa::LookupError::SupportDisabled => "support for architecture disabled at compile time", isa::LookupError::Unsupported => "unsupported architecture", })?; #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] { use cranelift_codegen::settings::Configurable; if !std::is_x86_feature_detected!("sse2") { return Err("x86 support requires SSE2"); } if !infer_native_flags { return Ok(isa_builder); } isa_builder.set("has_sse3", "false").unwrap(); isa_builder.set("has_ssse3", "false").unwrap(); isa_builder.set("has_sse41", "false").unwrap(); isa_builder.set("has_sse42", "false").unwrap(); if std::is_x86_feature_detected!("sse3") { isa_builder.enable("has_sse3").unwrap(); } if std::is_x86_feature_detected!("ssse3") { isa_builder.enable("has_ssse3").unwrap(); } if std::is_x86_feature_detected!("sse4.1") { isa_builder.enable("has_sse41").unwrap(); } if std::is_x86_feature_detected!("sse4.2") { isa_builder.enable("has_sse42").unwrap(); } if std::is_x86_feature_detected!("popcnt") { isa_builder.enable("has_popcnt").unwrap(); } if std::is_x86_feature_detected!("avx") { isa_builder.enable("has_avx").unwrap(); } if std::is_x86_feature_detected!("avx2") { isa_builder.enable("has_avx2").unwrap(); } if std::is_x86_feature_detected!("bmi1") { isa_builder.enable("has_bmi1").unwrap(); } if std::is_x86_feature_detected!("bmi2") { isa_builder.enable("has_bmi2").unwrap(); } if std::is_x86_feature_detected!("avx512bitalg") { isa_builder.enable("has_avx512bitalg").unwrap(); } if std::is_x86_feature_detected!("avx512dq") {
if !infer_native_flags { return Ok(isa_builder); } let v = unsafe { libc::getauxval(libc::AT_HWCAP) }; const HWCAP_S390X_VXRS_EXT2: libc::c_ulong = 32768; if (v & HWCAP_S390X_VXRS_EXT2) != 0 { isa_builder.enable("has_vxrs_ext2").unwrap(); isa_builder.enable("has_mie2").unwrap(); } } drop(&mut isa_builder); drop(infer_native_flags); Ok(isa_builder) } #[cfg(test)] mod tests { use super::builder; use cranelift_codegen::isa::CallConv; use cranelift_codegen::settings; #[test] fn test() { if let Ok(isa_builder) = builder() { let flag_builder = settings::builder(); let isa = isa_builder .finish(settings::Flags::new(flag_builder)) .unwrap(); if cfg!(all(target_os = "macos", target_arch = "aarch64")) { assert_eq!(isa.default_call_conv(), CallConv::AppleAarch64); } else if cfg!(any(unix, target_os = "nebulet")) { assert_eq!(isa.default_call_conv(), CallConv::SystemV); } else if cfg!(windows) { assert_eq!(isa.default_call_conv(), CallConv::WindowsFastcall); } if cfg!(target_pointer_width = "64") { assert_eq!(isa.pointer_bits(), 64); } else if cfg!(target_pointer_width = "32") { assert_eq!(isa.pointer_bits(), 32); } else if cfg!(target_pointer_width = "16") { assert_eq!(isa.pointer_bits(), 16); } } } } pub const VERSION: &str = env!("CARGO_PKG_VERSION");
isa_builder.enable("has_avx512dq").unwrap(); } if std::is_x86_feature_detected!("avx512f") { isa_builder.enable("has_avx512f").unwrap(); } if std::is_x86_feature_detected!("avx512vl") { isa_builder.enable("has_avx512vl").unwrap(); } if std::is_x86_feature_detected!("avx512vbmi") { isa_builder.enable("has_avx512vbmi").unwrap(); } if std::is_x86_feature_detected!("lzcnt") { isa_builder.enable("has_lzcnt").unwrap(); } } #[cfg(target_arch = "aarch64")] { use cranelift_codegen::settings::Configurable; if !infer_native_flags { return Ok(isa_builder); } if std::arch::is_aarch64_feature_detected!("lse") { isa_builder.enable("has_lse").unwrap(); } } #[cfg(target_arch = "riscv64")] { use cranelift_codegen::settings::Configurable; if !infer_native_flags { return Ok(isa_builder); } if std::arch::is_riscv_feature_detected!("m") { isa_builder.enable("has_extension_m").unwrap(); } if std::arch::is_riscv_feature_detected!("a") { isa_builder.enable("has_extension_a").unwrap(); } if std::arch::is_riscv_feature_detected!("f") { isa_builder.enable("has_extension_f").unwrap(); } if std::arch::is_riscv_feature_detected!("d") { isa_builder.enable("has_extension_d").unwrap(); } if std::arch::is_riscv_feature_detected!("v") { isa_builder.enable("has_extension_v").unwrap(); } if std::arch::is_riscv_feature_detected!("zba") { isa_builder.enable("has_extendion_zba").unwrap(); } if std::arch::is_riscv_feature_detected!("zbb") { isa_builder.enable("has_extendion_zbb").unwrap(); } if std::arch::is_riscv_feature_detected!("zbc") { isa_builder.enable("has_extendion_zbc").unwrap(); } if std::arch::is_riscv_feature_detected!("zbs") { isa_builder.enable("has_extendion_zbs").unwrap(); } if std::arch::is_riscv_feature_detected!("zbkb") { isa_builder.enable("has_extendion_zbkb").unwrap(); } } #[cfg(all(target_arch = "s390x", target_os = "linux"))] { use cranelift_codegen::settings::Configurable;
random
[ { "content": "// Configure the test suite environment.\n\n// Test programs use these environment variables to determine what behavior\n\n// is expected: different errnos are expected on windows, mac, and other unixes,\n\n// and other filesystem operations are supported or not.\n\npub fn test_suite_environment() -> &'static [(&'static str, &'static str)] {\n\n #[cfg(windows)]\n\n {\n\n &[\n\n (\"ERRNO_MODE_WINDOWS\", \"1\"),\n\n // Windows does not support dangling links or symlinks in the filesystem.\n\n (\"NO_DANGLING_FILESYSTEM\", \"1\"),\n\n // Windows does not support fd_allocate.\n\n (\"NO_FD_ALLOCATE\", \"1\"),\n\n // Windows does not support renaming a directory to an empty directory -\n\n // empty directory must be deleted.\n\n (\"NO_RENAME_DIR_TO_EMPTY_DIR\", \"1\"),\n\n ]\n\n }\n\n #[cfg(all(unix, not(target_os = \"macos\")))]\n\n {\n\n &[(\"ERRNO_MODE_UNIX\", \"1\")]\n\n }\n\n #[cfg(target_os = \"macos\")]\n\n {\n\n &[\n\n (\"ERRNO_MODE_MACOS\", \"1\"),\n\n // MacOS does not support fd_allocate\n\n (\"NO_FD_ALLOCATE\", \"1\"),\n\n ]\n\n }\n\n}\n", "file_path": "crates/test-programs/tests/wasm_tests/runtime/mod.rs", "rank": 2, "score": 353893.6110536005 }, { "content": "fn const_for_type<'f, T: InstBuilder<'f>>(mut builder: T, ty: ir::Type) -> &'static str {\n\n if ty == F32 {\n\n builder.f32const(0.0);\n\n \"f32const\"\n\n } else if ty == F64 {\n\n builder.f64const(0.0);\n\n \"f64const\"\n\n } else if ty.is_bool() {\n\n builder.bconst(ty, false);\n\n \"bconst\"\n\n } else if ty.is_ref() {\n\n builder.null(ty);\n\n \"null\"\n\n } else if ty.is_vector() {\n\n let zero_data = vec![0; ty.bytes() as usize].into();\n\n let zero_handle = builder.data_flow_graph_mut().constants.insert(zero_data);\n\n builder.vconst(ty, zero_handle);\n\n \"vconst\"\n\n } else {\n\n // Default to an integer type and possibly create verifier error\n\n builder.iconst(ty, 0);\n\n \"iconst\"\n\n }\n\n}\n\n\n", "file_path": "cranelift/src/bugpoint.rs", "rank": 3, "score": 333014.0207234334 }, { "content": "pub fn builder() -> Box<dyn CompilerBuilder> {\n\n let mut flags = settings::builder();\n\n\n\n // There are two possible traps for division, and this way\n\n // we get the proper one if code traps.\n\n flags\n\n .enable(\"avoid_div_traps\")\n\n .expect(\"should be valid flag\");\n\n\n\n // We don't use probestack as a stack limit mechanism\n\n flags\n\n .set(\"enable_probestack\", \"false\")\n\n .expect(\"should be valid flag\");\n\n\n\n Box::new(Builder {\n\n flags,\n\n isa_flags: cranelift_native::builder().expect(\"host machine is not a supported target\"),\n\n linkopts: LinkOptions::default(),\n\n })\n\n}\n", "file_path": "crates/cranelift/src/builder.rs", "rank": 4, "score": 314942.1372196077 }, { "content": "/// Emit Display and FromStr implementations for enum settings.\n\nfn gen_to_and_from_str(name: &str, values: &[&'static str], fmt: &mut Formatter) {\n\n fmtln!(fmt, \"impl fmt::Display for {} {{\", name);\n\n fmt.indent(|fmt| {\n\n fmtln!(\n\n fmt,\n\n \"fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\"\n\n );\n\n fmt.indent(|fmt| {\n\n fmtln!(fmt, \"f.write_str(match *self {\");\n\n fmt.indent(|fmt| {\n\n for v in values.iter() {\n\n fmtln!(fmt, \"Self::{} => \\\"{}\\\",\", camel_case(v), v);\n\n }\n\n });\n\n fmtln!(fmt, \"})\");\n\n });\n\n fmtln!(fmt, \"}\");\n\n });\n\n fmtln!(fmt, \"}\");\n\n\n", "file_path": "cranelift/codegen/meta/src/gen_settings.rs", "rank": 5, "score": 309281.49704126024 }, { "content": "/// Look for a directive in a comment string.\n\n/// The directive is of the form \"foo:\" and should follow the leading `;` in the comment:\n\n///\n\n/// ; dominates: block3 block4\n\n///\n\n/// Return the comment text following the directive.\n\npub fn match_directive<'a>(comment: &'a str, directive: &str) -> Option<&'a str> {\n\n assert!(\n\n directive.ends_with(':'),\n\n \"Directive must include trailing colon\"\n\n );\n\n let text = comment.trim_start_matches(';').trim_start();\n\n if text.starts_with(directive) {\n\n Some(text[directive.len()..].trim())\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "cranelift/filetests/src/match_directive.rs", "rank": 6, "score": 304171.17029993015 }, { "content": "/// Main entry point for `clif-util test`.\n\n///\n\n/// Take a list of filenames which can be either `.clif` files or directories.\n\n///\n\n/// Files are interpreted as test cases and executed immediately.\n\n///\n\n/// Directories are scanned recursively for test cases ending in `.clif`. These test cases are\n\n/// executed on background threads.\n\n///\n\npub fn run(verbose: bool, report_times: bool, files: &[String]) -> anyhow::Result<time::Duration> {\n\n let mut runner = TestRunner::new(verbose, report_times);\n\n\n\n for path in files.iter().map(Path::new) {\n\n if path.is_file() {\n\n runner.push_test(path);\n\n } else {\n\n runner.push_dir(path);\n\n }\n\n }\n\n\n\n runner.start_threads();\n\n runner.run()\n\n}\n\n\n", "file_path": "cranelift/filetests/src/lib.rs", "rank": 7, "score": 297360.9117184072 }, { "content": "fn is_matching_assert_invalid_error_message(expected: &str, actual: &str) -> bool {\n\n actual.contains(expected)\n\n // `elem.wast` and `proposals/bulk-memory-operations/elem.wast` disagree\n\n // on the expected error message for the same error.\n\n || (expected.contains(\"out of bounds\") && actual.contains(\"does not fit\"))\n\n // slight difference in error messages\n\n || (expected.contains(\"unknown elem segment\") && actual.contains(\"unknown element segment\"))\n\n // The same test here is asserted to have one error message in\n\n // `memory.wast` and a different error message in\n\n // `memory64/memory.wast`, so we equate these two error messages to get\n\n // the memory64 tests to pass.\n\n || (expected.contains(\"memory size must be at most 65536 pages\") && actual.contains(\"invalid u32 number\"))\n\n}\n\n\n", "file_path": "crates/wast/src/wast.rs", "rank": 8, "score": 295028.87518180674 }, { "content": "/// Give the name of a RealReg.\n\npub fn realreg_name(reg: RealReg) -> &'static str {\n\n let preg = PReg::from(reg);\n\n match preg.class() {\n\n RegClass::Int => match preg.hw_enc() as u8 {\n\n ENC_RAX => \"%rax\",\n\n ENC_RBX => \"%rbx\",\n\n ENC_RCX => \"%rcx\",\n\n ENC_RDX => \"%rdx\",\n\n ENC_RSI => \"%rsi\",\n\n ENC_RDI => \"%rdi\",\n\n ENC_RBP => \"%rbp\",\n\n ENC_RSP => \"%rsp\",\n\n ENC_R8 => \"%r8\",\n\n ENC_R9 => \"%r9\",\n\n ENC_R10 => \"%r10\",\n\n ENC_R11 => \"%r11\",\n\n ENC_R12 => \"%r12\",\n\n ENC_R13 => \"%r13\",\n\n ENC_R14 => \"%r14\",\n\n ENC_R15 => \"%r15\",\n", "file_path": "cranelift/codegen/src/isa/x64/inst/regs.rs", "rank": 9, "score": 294422.71247886575 }, { "content": "fn feature_found(path: &Path, name: &str) -> bool {\n\n path.iter().any(|part| match part.to_str() {\n\n Some(s) => s.contains(name),\n\n None => false,\n\n })\n\n}\n\n\n", "file_path": "tests/all/wast.rs", "rank": 10, "score": 291462.05848777015 }, { "content": "/// Look for a supported ISA with the given `name`.\n\n/// Return a builder that can create a corresponding `TargetIsa`.\n\npub fn lookup_by_name(name: &str) -> Result<Builder, LookupError> {\n\n use alloc::str::FromStr;\n\n lookup(triple!(name))\n\n}\n\n\n\n/// Describes reason for target lookup failure\n\n#[derive(PartialEq, Eq, Copy, Clone, Debug)]\n\npub enum LookupError {\n\n /// Support for this target was disabled in the current build.\n\n SupportDisabled,\n\n\n\n /// Support for this target has not yet been implemented.\n\n Unsupported,\n\n}\n\n\n\n// This is manually implementing Error and Display instead of using thiserror to reduce the amount\n\n// of dependencies used by Cranelift.\n\nimpl std::error::Error for LookupError {}\n\n\n\nimpl fmt::Display for LookupError {\n", "file_path": "cranelift/codegen/src/isa/mod.rs", "rank": 11, "score": 291433.0648175381 }, { "content": "fn entity_desc(ty: &EntityType) -> &'static str {\n\n match ty {\n\n EntityType::Global(_) => \"global\",\n\n EntityType::Table(_) => \"table\",\n\n EntityType::Memory(_) => \"memory\",\n\n EntityType::Function(_) => \"func\",\n\n EntityType::Tag(_) => \"tag\",\n\n }\n\n}\n", "file_path": "crates/wasmtime/src/types/matching.rs", "rank": 12, "score": 281554.2738586987 }, { "content": "/// Ignore tests that aren't supported yet.\n\nfn ignore(testsuite: &str, testname: &str, strategy: &str) -> bool {\n\n match strategy {\n\n \"Cranelift\" => match (testsuite, testname) {\n\n // No simd support yet for s390x.\n\n (\"simd\", _) if platform_is_s390x() => return true,\n\n _ if platform_is_s390x() && testname.starts_with(\"simd\") => return true,\n\n _ => {}\n\n },\n\n _ => panic!(\"unrecognized strategy\"),\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "build.rs", "rank": 13, "score": 275950.4991869328 }, { "content": "/// Small helper to initialize an OperandBuilder with the right kind, for a given name and doc.\n\nfn new(format_field_name: &'static str, rust_type: &'static str, doc: &'static str) -> OperandKind {\n\n OperandKind::new(\n\n format_field_name,\n\n rust_type,\n\n OperandKindFields::EntityRef,\n\n doc,\n\n )\n\n}\n\n\n\npub(crate) struct EntityRefs {\n\n /// A reference to a basic block in the same function.\n\n /// This is primarliy used in control flow instructions.\n\n pub(crate) block: OperandKind,\n\n\n\n /// A reference to a stack slot declared in the function preamble.\n\n pub(crate) stack_slot: OperandKind,\n\n\n\n /// A reference to a global value.\n\n pub(crate) global_value: OperandKind,\n\n\n", "file_path": "cranelift/codegen/meta/src/shared/entities.rs", "rank": 14, "score": 273117.9754820333 }, { "content": "/// Preserve instructions with used result values.\n\npub fn any_inst_results_used(inst: Inst, live: &[bool], dfg: &DataFlowGraph) -> bool {\n\n dfg.inst_results(inst).iter().any(|v| live[v.index()])\n\n}\n\n\n", "file_path": "cranelift/codegen/src/inst_predicates.rs", "rank": 15, "score": 263411.79138453054 }, { "content": "/// Compile the given function down to VCode with allocated registers, ready\n\n/// for binary emission.\n\npub fn compile<B: LowerBackend + TargetIsa>(\n\n f: &Function,\n\n b: &B,\n\n abi: Box<dyn ABICallee<I = B::MInst>>,\n\n machine_env: &MachineEnv,\n\n emit_info: <B::MInst as MachInstEmit>::Info,\n\n) -> CodegenResult<(VCode<B::MInst>, regalloc2::Output)> {\n\n // Compute lowered block order.\n\n let block_order = BlockLoweringOrder::new(f);\n\n // Build the lowering context.\n\n let lower = Lower::new(f, abi, emit_info, block_order)?;\n\n // Lower the IR.\n\n let vcode = {\n\n let _tt = timing::vcode_lower();\n\n lower.lower(b)?\n\n };\n\n\n\n log::trace!(\"vcode from lowering: \\n{:?}\", vcode);\n\n\n\n // Perform register allocation.\n", "file_path": "cranelift/codegen/src/machinst/compile.rs", "rank": 16, "score": 260561.07244004466 }, { "content": "/// This function is required to be called before any WebAssembly is entered.\n\n/// This will configure global state such as signal handlers to prepare the\n\n/// process to receive wasm traps.\n\n///\n\n/// This function must not only be called globally once before entering\n\n/// WebAssembly but it must also be called once-per-thread that enters\n\n/// WebAssembly. Currently in wasmtime's integration this function is called on\n\n/// creation of a `Engine`.\n\n///\n\n/// The `is_wasm_pc` argument is used when a trap happens to determine if a\n\n/// program counter is the pc of an actual wasm trap or not. This is then used\n\n/// to disambiguate faults that happen due to wasm and faults that happen due to\n\n/// bugs in Rust or elsewhere.\n\npub fn init_traps(is_wasm_pc: fn(usize) -> bool) {\n\n static INIT: Once = Once::new();\n\n INIT.call_once(|| unsafe {\n\n IS_WASM_PC = is_wasm_pc;\n\n sys::platform_init();\n\n });\n\n}\n\n\n\n/// Raises a user-defined trap immediately.\n\n///\n\n/// This function performs as-if a wasm trap was just executed, only the trap\n\n/// has a dynamic payload associated with it which is user-provided. This trap\n\n/// payload is then returned from `catch_traps` below.\n\n///\n\n/// # Safety\n\n///\n\n/// Only safe to call when wasm code is on the stack, aka `catch_traps` must\n\n/// have been previously called. Additionally no Rust destructors can be on the\n\n/// stack. They will be skipped and not executed.\n\npub unsafe fn raise_user_trap(data: Error) -> ! {\n", "file_path": "crates/runtime/src/traphandlers.rs", "rank": 17, "score": 256567.28673198534 }, { "content": "/// Parse a 64-bit signed number.\n\nfn parse_i64(s: &str) -> Result<i64, &'static str> {\n\n let negative = s.starts_with('-');\n\n let s2 = if negative || s.starts_with('+') {\n\n &s[1..]\n\n } else {\n\n s\n\n };\n\n\n\n let mut value = parse_u64(s2)?;\n\n\n\n // We support the range-and-a-half from -2^63 .. 2^64-1.\n\n if negative {\n\n value = value.wrapping_neg();\n\n // Don't allow large negative values to wrap around and become positive.\n\n if value as i64 > 0 {\n\n return Err(\"Negative number too small\");\n\n }\n\n }\n\n Ok(value as i64)\n\n}\n", "file_path": "cranelift/codegen/src/ir/immediates.rs", "rank": 18, "score": 254229.92617318188 }, { "content": "/// Parse a 64-bit unsigned number.\n\nfn parse_u64(s: &str) -> Result<u64, &'static str> {\n\n let mut value: u64 = 0;\n\n let mut digits = 0;\n\n\n\n if s.starts_with(\"-0x\") {\n\n return Err(\"Invalid character in hexadecimal number\");\n\n } else if s.starts_with(\"0x\") {\n\n // Hexadecimal.\n\n for ch in s[2..].chars() {\n\n match ch.to_digit(16) {\n\n Some(digit) => {\n\n digits += 1;\n\n if digits > 16 {\n\n return Err(\"Too many hexadecimal digits\");\n\n }\n\n // This can't overflow given the digit limit.\n\n value = (value << 4) | u64::from(digit);\n\n }\n\n None => {\n\n // Allow embedded underscores, but fail on anything else.\n", "file_path": "cranelift/codegen/src/ir/immediates.rs", "rank": 19, "score": 254229.9261731819 }, { "content": "fn parse_bool_value(value: &str) -> SetResult<bool> {\n\n match value {\n\n \"true\" | \"on\" | \"yes\" | \"1\" => Ok(true),\n\n \"false\" | \"off\" | \"no\" | \"0\" => Ok(false),\n\n _ => Err(SetError::BadValue(\"bool\".to_string())),\n\n }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/settings.rs", "rank": 20, "score": 252718.11580061546 }, { "content": "/// Generate an `enum` immediate in ISLE.\n\nfn gen_isle_enum(name: &str, mut variants: Vec<&str>, fmt: &mut Formatter) {\n\n variants.sort();\n\n let prefix = format!(\";;;; Enumerated Immediate: {} \", name);\n\n fmtln!(fmt, \"{:;<80}\", prefix);\n\n fmt.empty_line();\n\n fmtln!(fmt, \"(type {} extern\", name);\n\n fmt.indent(|fmt| {\n\n fmt.line(\"(enum\");\n\n fmt.indent(|fmt| {\n\n for variant in variants {\n\n fmtln!(fmt, \"{}\", variant);\n\n }\n\n });\n\n fmt.line(\")\");\n\n });\n\n fmt.line(\")\");\n\n fmt.empty_line();\n\n}\n\n\n", "file_path": "cranelift/codegen/meta/src/gen_inst.rs", "rank": 21, "score": 250668.2098891096 }, { "content": "/// Pre-parse a supposed entity name by splitting it into two parts: A head of lowercase ASCII\n\n/// letters and numeric tail.\n\npub fn split_entity_name(name: &str) -> Option<(&str, u32)> {\n\n let (head, tail) = name.split_at(name.len() - trailing_digits(name));\n\n if tail.len() > 1 && tail.starts_with('0') {\n\n None\n\n } else {\n\n tail.parse().ok().map(|n| (head, n))\n\n }\n\n}\n\n\n\n/// Lexical analysis.\n\n///\n\n/// A `Lexer` reads text from a `&str` and provides a sequence of tokens.\n\n///\n\n/// Also keep track of a line number for error reporting.\n\n///\n\npub struct Lexer<'a> {\n\n // Complete source being processed.\n\n source: &'a str,\n\n\n\n // Iterator into `source`.\n", "file_path": "cranelift/reader/src/lexer.rs", "rank": 22, "score": 249611.47958690784 }, { "content": "pub fn run_pass(filename: &str) {\n\n assert!(build(filename).is_ok());\n\n}\n\n\n", "file_path": "cranelift/isle/isle/tests/run_tests.rs", "rank": 23, "score": 246331.69778953376 }, { "content": "pub fn run_fail(filename: &str) {\n\n assert!(build(filename).is_err());\n\n}\n\n\n", "file_path": "cranelift/isle/isle/tests/run_tests.rs", "rank": 24, "score": 246331.69778953376 }, { "content": "/// Perform DCE on `func`.\n\npub fn do_dce(func: &mut Function, domtree: &mut DominatorTree) {\n\n let _tt = timing::dce();\n\n debug_assert!(domtree.is_valid());\n\n\n\n let mut live = vec![false; func.dfg.num_values()];\n\n for &block in domtree.cfg_postorder() {\n\n let mut pos = FuncCursor::new(func).at_bottom(block);\n\n while let Some(inst) = pos.prev_inst() {\n\n {\n\n if has_side_effect(pos.func, inst)\n\n || any_inst_results_used(inst, &live, &pos.func.dfg)\n\n {\n\n for arg in pos.func.dfg.inst_args(inst) {\n\n let v = pos.func.dfg.resolve_aliases(*arg);\n\n live[v.index()] = true;\n\n }\n\n continue;\n\n }\n\n }\n\n pos.remove_inst();\n\n }\n\n }\n\n}\n", "file_path": "cranelift/codegen/src/dce.rs", "rank": 25, "score": 244545.87176680123 }, { "content": "fn run_test(name: &str, stack_overflow: bool) {\n\n let me = env::current_exe().unwrap();\n\n let mut cmd = Command::new(me);\n\n cmd.env(VAR_NAME, name);\n\n let output = cmd.output().expect(\"failed to spawn subprocess\");\n\n let stdout = String::from_utf8_lossy(&output.stdout);\n\n let stderr = String::from_utf8_lossy(&output.stderr);\n\n let mut desc = format!(\"got status: {}\", output.status);\n\n\n\n if !stdout.trim().is_empty() {\n\n desc.push_str(\"\\nstdout: ----\\n\");\n\n desc.push_str(\" \");\n\n desc.push_str(&stdout.replace(\"\\n\", \"\\n \"));\n\n }\n\n\n\n if !stderr.trim().is_empty() {\n\n desc.push_str(\"\\nstderr: ----\\n\");\n\n desc.push_str(\" \");\n\n desc.push_str(&stderr.replace(\"\\n\", \"\\n \"));\n\n }\n", "file_path": "tests/host_segfault.rs", "rank": 26, "score": 244447.7302960938 }, { "content": "// Check if the spec repository directory contains any files.\n\nfn is_spec_repository_empty(destination: &str) -> bool {\n\n PathBuf::from(destination)\n\n .read_dir()\n\n .map(|mut i| i.next().is_none())\n\n .unwrap_or(true)\n\n}\n\n\n", "file_path": "crates/fuzzing/wasm-spec-interpreter/build.rs", "rank": 27, "score": 243590.39078556828 }, { "content": "/// Performs a major version bump increment on the semver version `version`.\n\n///\n\n/// This function will perform a semver-major-version bump on the `version`\n\n/// specified. This is used to calculate the next version of a crate in this\n\n/// repository since we're currently making major version bumps for all our\n\n/// releases. This may end up getting tweaked as we stabilize crates and start\n\n/// doing more minor/patch releases, but for now this should do the trick.\n\nfn bump(version: &str, patch_bump: bool) -> String {\n\n let mut iter = version.split('.').map(|s| s.parse::<u32>().unwrap());\n\n let major = iter.next().expect(\"major version\");\n\n let minor = iter.next().expect(\"minor version\");\n\n let patch = iter.next().expect(\"patch version\");\n\n\n\n if patch_bump {\n\n return format!(\"{}.{}.{}\", major, minor, patch + 1);\n\n }\n\n if major != 0 {\n\n format!(\"{}.0.0\", major + 1)\n\n } else if minor != 0 {\n\n format!(\"0.{}.0\", minor + 1)\n\n } else {\n\n format!(\"0.0.{}\", patch + 1)\n\n }\n\n}\n\n\n", "file_path": "scripts/publish.rs", "rank": 28, "score": 242573.10370248335 }, { "content": "pub fn get_caller_save_x_gpr() -> [bool; 32] {\n\n let mut x: [bool; 32] = [false; 32];\n\n for (i, v) in get_callee_save_x_gpr().iter().enumerate() {\n\n if i == 0 || i == 3 || i == 4 {\n\n continue;\n\n }\n\n x[i] = !v;\n\n }\n\n x\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/riscv64/abi.rs", "rank": 29, "score": 242547.66055964967 }, { "content": "pub fn get_caller_save_f_gpr() -> [bool; 32] {\n\n let mut x: [bool; 32] = [false; 32];\n\n for (i, v) in get_callee_save_f_gpr().iter().enumerate() {\n\n x[i] = !v;\n\n }\n\n x\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/riscv64/abi.rs", "rank": 30, "score": 242547.66055964964 }, { "content": "pub fn run_link(isle_filename: &str) {\n\n let tempdir = tempfile::tempdir().unwrap();\n\n let code = build(isle_filename).unwrap();\n\n\n\n let isle_filename_base = std::path::Path::new(isle_filename)\n\n .file_stem()\n\n .unwrap()\n\n .to_str()\n\n .unwrap()\n\n .to_string();\n\n let isle_generated_code = tempdir\n\n .path()\n\n .to_path_buf()\n\n .join(isle_filename_base.clone() + \".rs\");\n\n std::fs::write(isle_generated_code, code).unwrap();\n\n\n\n let rust_filename = isle_filename.replace(\".isle\", \"\").to_string() + \"_main.rs\";\n\n let rust_filename_base = std::path::Path::new(&rust_filename).file_name().unwrap();\n\n let rust_driver = tempdir.path().to_path_buf().join(&rust_filename_base);\n\n println!(\"copying {} to {:?}\", rust_filename, rust_driver);\n", "file_path": "cranelift/isle/isle/tests/run_tests.rs", "rank": 31, "score": 242507.53726082243 }, { "content": "fn emit_tests(out: &mut String, dir_name: &str, runner_func: &str) {\n\n for test_file in std::fs::read_dir(dir_name).unwrap() {\n\n let test_file = test_file.unwrap().file_name().into_string().unwrap();\n\n if !test_file.ends_with(\".isle\") {\n\n continue;\n\n }\n\n let test_file_base = test_file.replace(\".isle\", \"\");\n\n\n\n writeln!(out, \"#[test]\").unwrap();\n\n writeln!(out, \"fn test_{}_{}() {{\", runner_func, test_file_base).unwrap();\n\n writeln!(out, \" {}(\\\"{}/{}\\\");\", runner_func, dir_name, test_file).unwrap();\n\n writeln!(out, \"}}\").unwrap();\n\n }\n\n}\n", "file_path": "cranelift/isle/isle/build.rs", "rank": 32, "score": 242075.74115643487 }, { "content": "pub fn compile_cranelift(\n\n wasm: &[u8],\n\n target: Option<Triple>,\n\n output: impl AsRef<Path>,\n\n) -> Result<()> {\n\n let mut config = Config::new();\n\n config.debug_info(true);\n\n if let Some(target) = target {\n\n config.target(&target.to_string())?;\n\n }\n\n let engine = Engine::new(&config)?;\n\n let module = Module::new(&engine, wasm)?;\n\n let bytes = module.serialize()?;\n\n\n\n let mut file = File::create(output).context(\"failed to create object file\")?;\n\n file.write_all(&bytes)\n\n .context(\"failed to write object file\")?;\n\n\n\n Ok(())\n\n}\n", "file_path": "tests/all/debug/obj.rs", "rank": 33, "score": 242044.84502422766 }, { "content": "/// Demangles a single function name into a user-readable form.\n\n///\n\n/// Currently supported: Rust/C/C++ function names.\n\npub fn demangle_function_name(writer: &mut impl std::fmt::Write, name: &str) -> std::fmt::Result {\n\n if let Ok(demangled) = rustc_demangle::try_demangle(name) {\n\n write!(writer, \"{}\", demangled)\n\n } else if let Ok(demangled) = cpp_demangle::Symbol::new(name) {\n\n write!(writer, \"{}\", demangled)\n\n } else {\n\n write!(writer, \"{}\", name)\n\n }\n\n}\n\n\n", "file_path": "crates/jit/src/demangling.rs", "rank": 34, "score": 240420.4221279936 }, { "content": "fn init_file_per_thread_logger(prefix: &'static str) {\n\n file_per_thread_logger::initialize(prefix);\n\n\n\n // Extending behavior of default spawner:\n\n // https://docs.rs/rayon/1.1.0/rayon/struct.ThreadPoolBuilder.html#method.spawn_handler\n\n // Source code says DefaultSpawner is implementation detail and\n\n // shouldn't be used directly.\n\n rayon::ThreadPoolBuilder::new()\n\n .spawn_handler(move |thread| {\n\n let mut b = std::thread::Builder::new();\n\n if let Some(name) = thread.name() {\n\n b = b.name(name.to_owned());\n\n }\n\n if let Some(stack_size) = thread.stack_size() {\n\n b = b.stack_size(stack_size);\n\n }\n\n b.spawn(move || {\n\n file_per_thread_logger::initialize(prefix);\n\n thread.run()\n\n })?;\n", "file_path": "crates/cli-flags/src/lib.rs", "rank": 35, "score": 240161.07095057843 }, { "content": "/// A primitive hash function for matching opcodes.\n\npub fn simple_hash(s: &str) -> usize {\n\n let mut h: u32 = 5381;\n\n for c in s.chars() {\n\n h = (h ^ c as u32).wrapping_add(h.rotate_right(6));\n\n }\n\n h as usize\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::simple_hash;\n\n\n\n #[test]\n\n fn basic() {\n\n assert_eq!(simple_hash(\"Hello\"), 0x2fa70c01);\n\n assert_eq!(simple_hash(\"world\"), 0x5b0c31d5);\n\n }\n\n}\n", "file_path": "cranelift/codegen/shared/src/constant_hash.rs", "rank": 36, "score": 239377.3600985083 }, { "content": "/// Convert the string `s` to CamelCase.\n\npub fn camel_case(s: &str) -> String {\n\n let mut output_chars = String::with_capacity(s.len());\n\n\n\n let mut capitalize = true;\n\n for curr_char in s.chars() {\n\n if curr_char == '_' {\n\n capitalize = true;\n\n } else {\n\n if capitalize {\n\n output_chars.extend(curr_char.to_uppercase());\n\n } else {\n\n output_chars.push(curr_char);\n\n }\n\n capitalize = false;\n\n }\n\n }\n\n\n\n output_chars\n\n}\n\n\n", "file_path": "cranelift/codegen/meta/src/cdsl/mod.rs", "rank": 37, "score": 239370.62295225845 }, { "content": "fn translate_icmp(cc: IntCC, builder: &mut FunctionBuilder, state: &mut FuncTranslationState) {\n\n let (arg0, arg1) = state.pop2();\n\n let val = builder.ins().icmp(cc, arg0, arg1);\n\n state.push1(builder.ins().bint(I32, val));\n\n}\n\n\n", "file_path": "cranelift/wasm/src/code_translator.rs", "rank": 38, "score": 239359.7835117089 }, { "content": "fn translate_fcmp(cc: FloatCC, builder: &mut FunctionBuilder, state: &mut FuncTranslationState) {\n\n let (arg0, arg1) = state.pop2();\n\n let val = builder.ins().fcmp(cc, arg0, arg1);\n\n state.push1(builder.ins().bint(I32, val));\n\n}\n\n\n", "file_path": "cranelift/wasm/src/code_translator.rs", "rank": 39, "score": 239359.7835117089 }, { "content": "/// Perform the NaN canonicalization pass.\n\npub fn do_nan_canonicalization(func: &mut Function) {\n\n let _tt = timing::canonicalize_nans();\n\n let mut pos = FuncCursor::new(func);\n\n while let Some(_block) = pos.next_block() {\n\n while let Some(inst) = pos.next_inst() {\n\n if is_fp_arith(&mut pos, inst) {\n\n add_nan_canon_seq(&mut pos, inst);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/nan_canonicalization.rs", "rank": 40, "score": 239200.06579254856 }, { "content": "fn benchmark_name<'a>(strategy: &InstanceAllocationStrategy) -> &'static str {\n\n match strategy {\n\n InstanceAllocationStrategy::OnDemand => \"default\",\n\n InstanceAllocationStrategy::Pooling { .. } => \"pooling\",\n\n }\n\n}\n\n\n", "file_path": "benches/instantiation.rs", "rank": 41, "score": 238442.72657274437 }, { "content": "/// Perform simple GVN on `func`.\n\n///\n\npub fn do_simple_gvn(func: &mut Function, domtree: &mut DominatorTree) {\n\n let _tt = timing::gvn();\n\n debug_assert!(domtree.is_valid());\n\n\n\n // Visit blocks in a reverse post-order.\n\n //\n\n // The RefCell here is a bit ugly since the HashKeys in the ScopedHashMap\n\n // need a reference to the function.\n\n let pos = RefCell::new(FuncCursor::new(func));\n\n\n\n let mut visible_values: ScopedHashMap<HashKey, Inst> = ScopedHashMap::new();\n\n let mut scope_stack: Vec<Inst> = Vec::new();\n\n\n\n for &block in domtree.cfg_postorder().iter().rev() {\n\n {\n\n // Pop any scopes that we just exited.\n\n let layout = &pos.borrow().func.layout;\n\n loop {\n\n if let Some(current) = scope_stack.last() {\n\n if domtree.dominates(*current, block, layout) {\n", "file_path": "cranelift/codegen/src/simple_gvn.rs", "rank": 42, "score": 237964.49348766851 }, { "content": "/// Finishes compilation of the `translation` specified, producing the final\n\n/// compilation artifact and auxiliary information.\n\n///\n\n/// This function will consume the final results of compiling a wasm module\n\n/// and finish the ELF image in-progress as part of `obj` by appending any\n\n/// compiler-agnostic sections.\n\n///\n\n/// The auxiliary `CompiledModuleInfo` structure returned here has also been\n\n/// serialized into the object returned, but if the caller will quickly\n\n/// turn-around and invoke `CompiledModule::from_artifacts` after this then the\n\n/// information can be passed to that method to avoid extra deserialization.\n\n/// This is done to avoid a serialize-then-deserialize for API calls like\n\n/// `Module::new` where the compiled module is immediately going to be used.\n\n///\n\n/// The `MmapVec` returned here contains the compiled image and resides in\n\n/// mmap'd memory for easily switching permissions to executable afterwards.\n\npub fn finish_compile(\n\n translation: ModuleTranslation<'_>,\n\n mut obj: Object,\n\n funcs: PrimaryMap<DefinedFuncIndex, FunctionInfo>,\n\n trampolines: Vec<Trampoline>,\n\n tunables: &Tunables,\n\n) -> Result<(MmapVec, CompiledModuleInfo)> {\n\n let ModuleTranslation {\n\n mut module,\n\n debuginfo,\n\n has_unparsed_debuginfo,\n\n data,\n\n data_align,\n\n passive_data,\n\n ..\n\n } = translation;\n\n\n\n // Place all data from the wasm module into a section which will the\n\n // source of the data later at runtime.\n\n let data_id = obj.add_section(\n", "file_path": "crates/jit/src/instantiate.rs", "rank": 43, "score": 236880.8671176568 }, { "content": "fn desc(ty: &InterfaceType) -> &'static str {\n\n match ty {\n\n InterfaceType::U8 => \"u8\",\n\n InterfaceType::S8 => \"s8\",\n\n InterfaceType::U16 => \"u16\",\n\n InterfaceType::S16 => \"s16\",\n\n InterfaceType::U32 => \"u32\",\n\n InterfaceType::S32 => \"s32\",\n\n InterfaceType::U64 => \"u64\",\n\n InterfaceType::S64 => \"s64\",\n\n InterfaceType::Float32 => \"f32\",\n\n InterfaceType::Float64 => \"f64\",\n\n InterfaceType::Unit => \"unit\",\n\n InterfaceType::Bool => \"bool\",\n\n InterfaceType::Char => \"char\",\n\n InterfaceType::String => \"string\",\n\n InterfaceType::List(_) => \"list\",\n\n InterfaceType::Tuple(_) => \"tuple\",\n\n InterfaceType::Option(_) => \"option\",\n\n InterfaceType::Expected(_) => \"expected\",\n\n\n\n InterfaceType::Record(_) => \"record\",\n\n InterfaceType::Variant(_) => \"variant\",\n\n InterfaceType::Flags(_) => \"flags\",\n\n InterfaceType::Enum(_) => \"enum\",\n\n InterfaceType::Union(_) => \"union\",\n\n }\n\n}\n", "file_path": "crates/wasmtime/src/component/func/typed.rs", "rank": 44, "score": 236748.4466460082 }, { "content": "/// Determine whether this opcode behaves as a memory fence, i.e.,\n\n/// prohibits any moving of memory accesses across it.\n\npub fn has_memory_fence_semantics(op: Opcode) -> bool {\n\n match op {\n\n Opcode::AtomicRmw\n\n | Opcode::AtomicCas\n\n | Opcode::AtomicLoad\n\n | Opcode::AtomicStore\n\n | Opcode::Fence => true,\n\n Opcode::Call | Opcode::CallIndirect => true,\n\n _ => false,\n\n }\n\n}\n\n\n\n/// Visit all successors of a block with a given visitor closure.\n\npub(crate) fn visit_block_succs<F: FnMut(Inst, Block)>(f: &Function, block: Block, mut visit: F) {\n\n for inst in f.layout.block_likely_branches(block) {\n\n if f.dfg[inst].opcode().is_branch() {\n\n visit_branch_targets(f, inst, &mut visit);\n\n }\n\n }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/inst_predicates.rs", "rank": 45, "score": 235584.2792296062 }, { "content": "#[cfg(unix)]\n\nfn is_stack_overflow(status: &ExitStatus, stderr: &str) -> bool {\n\n use std::os::unix::prelude::*;\n\n\n\n // The main thread might overflow or it might be from a fiber stack (SIGSEGV/SIGBUS)\n\n stderr.contains(\"thread 'main' has overflowed its stack\")\n\n || match status.signal() {\n\n Some(libc::SIGSEGV) | Some(libc::SIGBUS) => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "tests/host_segfault.rs", "rank": 46, "score": 234217.15350822292 }, { "content": "#[cfg(windows)]\n\nfn is_stack_overflow(status: &ExitStatus, _stderr: &str) -> bool {\n\n match status.code().map(|s| s as u32) {\n\n Some(0xc00000fd) => true,\n\n _ => false,\n\n }\n\n}\n", "file_path": "tests/host_segfault.rs", "rank": 47, "score": 234217.15350822292 }, { "content": "#[cfg_attr(not(feature = \"component-model\"), allow(dead_code))]\n\npub fn entity_ty(\n\n expected: &EntityType,\n\n expected_types: &ModuleTypes,\n\n actual: &EntityType,\n\n actual_types: &ModuleTypes,\n\n) -> Result<()> {\n\n match expected {\n\n EntityType::Memory(expected) => match actual {\n\n EntityType::Memory(actual) => memory_ty(expected, actual, None),\n\n _ => bail!(\"expected memory found {}\", entity_desc(actual)),\n\n },\n\n EntityType::Global(expected) => match actual {\n\n EntityType::Global(actual) => global_ty(expected, actual),\n\n _ => bail!(\"expected global found {}\", entity_desc(actual)),\n\n },\n\n EntityType::Table(expected) => match actual {\n\n EntityType::Table(actual) => table_ty(expected, actual, None),\n\n _ => bail!(\"expected table found {}\", entity_desc(actual)),\n\n },\n\n EntityType::Function(expected) => match actual {\n", "file_path": "crates/wasmtime/src/types/matching.rs", "rank": 48, "score": 232051.5862936346 }, { "content": "/// Determines whether this condcode interprets inputs as signed or\n\n/// unsigned. See the documentation for the `icmp` instruction in\n\n/// cranelift-codegen/meta/src/shared/instructions.rs for further insights\n\n/// into this.\n\npub fn condcode_is_signed(cc: IntCC) -> bool {\n\n match cc {\n\n IntCC::Equal => false,\n\n IntCC::NotEqual => false,\n\n IntCC::SignedGreaterThanOrEqual => true,\n\n IntCC::SignedGreaterThan => true,\n\n IntCC::SignedLessThanOrEqual => true,\n\n IntCC::SignedLessThan => true,\n\n IntCC::UnsignedGreaterThanOrEqual => false,\n\n IntCC::UnsignedGreaterThan => false,\n\n IntCC::UnsignedLessThanOrEqual => false,\n\n IntCC::UnsignedLessThan => false,\n\n IntCC::Overflow => true,\n\n IntCC::NotOverflow => true,\n\n }\n\n}\n\n\n\n//============================================================================\n\n// Lowering: main entry point for lowering a instruction\n\n\n", "file_path": "cranelift/codegen/src/isa/s390x/lower.rs", "rank": 49, "score": 231966.8221750504 }, { "content": "#[inline(always)]\n\npub fn is_type_signed(ty: Type) -> bool {\n\n assert!(ty.is_int());\n\n ty == I8 || ty == I16 || ty == I32 || ty == I64 || ty == I128\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub enum CsrAddress {\n\n Fcsr = 0x3,\n\n Vstart = 0x8,\n\n Vxsat = 0x9,\n\n Vxrm = 0xa,\n\n Vcsr = 0xf,\n\n Vl = 0xc20,\n\n Vtype = 0xc21,\n\n Vlenb = 0xc22,\n\n}\n\n\n\nimpl std::fmt::Debug for CsrAddress {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result {\n\n write!(f, \"0x{:x}\", self.as_u32())\n", "file_path": "cranelift/codegen/src/isa/riscv64/inst/args.rs", "rank": 50, "score": 231966.8221750504 }, { "content": "#[inline(never)]\n\npub fn do_remove_constant_phis(func: &mut Function, domtree: &mut DominatorTree) {\n\n let _tt = timing::remove_constant_phis();\n\n debug_assert!(domtree.is_valid());\n\n\n\n // Get the blocks, in reverse postorder\n\n let blocks_reverse_postorder = domtree\n\n .cfg_postorder()\n\n .into_iter()\n\n .rev()\n\n .collect::<Vec<_>>();\n\n\n\n // Phase 1 of 3: for each block, make a summary containing all relevant\n\n // info. The solver will iterate over the summaries, rather than having\n\n // to inspect each instruction in each block.\n\n let mut summaries = FxHashMap::<Block, BlockSummary>::default();\n\n\n\n for &&b in &blocks_reverse_postorder {\n\n let formals = func.dfg.block_params(b);\n\n let mut summary = BlockSummary::new(SmallVec::from(formals));\n\n\n", "file_path": "cranelift/codegen/src/remove_constant_phis.rs", "rank": 51, "score": 231964.50879558484 }, { "content": "/// Parse a float using the same format as `format_float` above.\n\n///\n\n/// The encoding parameters are:\n\n///\n\n/// w - exponent field width in bits\n\n/// t - trailing significand field width in bits\n\n///\n\nfn parse_float(s: &str, w: u8, t: u8) -> Result<u64, &'static str> {\n\n debug_assert!(w > 0 && w <= 16, \"Invalid exponent range\");\n\n debug_assert!(1 + w + t <= 64, \"Too large IEEE format for u64\");\n\n debug_assert!((t + w + 1).is_power_of_two(), \"Unexpected IEEE format size\");\n\n\n\n let (sign_bit, s2) = if s.starts_with('-') {\n\n (1u64 << (t + w), &s[1..])\n\n } else if s.starts_with('+') {\n\n (0, &s[1..])\n\n } else {\n\n (0, s)\n\n };\n\n\n\n if !s2.starts_with(\"0x\") {\n\n let max_e_bits = ((1u64 << w) - 1) << t;\n\n let quiet_bit = 1u64 << (t - 1);\n\n\n\n // The only decimal encoding allowed is 0.\n\n if s2 == \"0.0\" {\n\n return Ok(sign_bit);\n", "file_path": "cranelift/codegen/src/ir/immediates.rs", "rank": 52, "score": 230165.69462705607 }, { "content": "/// Return the execution target string expected by OpenVINO from the\n\n/// `ExecutionTarget` enum provided by wasi-nn.\n\nfn map_execution_target_to_string(target: ExecutionTarget) -> &'static str {\n\n match target {\n\n ExecutionTarget::Cpu => \"CPU\",\n\n ExecutionTarget::Gpu => \"GPU\",\n\n ExecutionTarget::Tpu => unimplemented!(\"OpenVINO does not support TPU execution targets\"),\n\n }\n\n}\n\n\n", "file_path": "crates/wasi-nn/src/openvino.rs", "rank": 53, "score": 229703.13797651662 }, { "content": "/// Fold operations on constants.\n\n///\n\n/// It's important to note that this will not remove unused constants. It's\n\n/// assumed that the DCE pass will take care of them.\n\npub fn fold_constants(func: &mut ir::Function) {\n\n let mut pos = FuncCursor::new(func);\n\n\n\n while let Some(_block) = pos.next_block() {\n\n while let Some(inst) = pos.next_inst() {\n\n use self::ir::InstructionData::*;\n\n match pos.func.dfg[inst] {\n\n Binary { opcode, args } => {\n\n fold_binary(&mut pos.func.dfg, inst, opcode, args);\n\n }\n\n Unary { opcode, arg } => {\n\n fold_unary(&mut pos.func.dfg, inst, opcode, arg);\n\n }\n\n Branch { opcode, .. } => {\n\n fold_branch(&mut pos, inst, opcode);\n\n }\n\n _ => {}\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "cranelift/preopt/src/constant_folding.rs", "rank": 54, "score": 229262.09257283737 }, { "content": "#[inline(always)]\n\npub fn is_int_and_type_signed(ty: Type) -> bool {\n\n ty.is_int() && is_type_signed(ty)\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/riscv64/inst/args.rs", "rank": 55, "score": 228539.8498299841 }, { "content": "/// Returns the Wasmtime-specific section name for dwarf debugging sections.\n\n///\n\n/// These sections, if configured in Wasmtime, will contain the original raw\n\n/// dwarf debugging information found in the wasm file, unmodified. These tables\n\n/// are then consulted later to convert wasm program counters to original wasm\n\n/// source filenames/line numbers with `addr2line`.\n\nfn wasm_section_name(id: gimli::SectionId) -> &'static str {\n\n use gimli::SectionId::*;\n\n match id {\n\n DebugAbbrev => \".debug_abbrev.wasm\",\n\n DebugAddr => \".debug_addr.wasm\",\n\n DebugAranges => \".debug_aranges.wasm\",\n\n DebugFrame => \".debug_frame.wasm\",\n\n EhFrame => \".eh_frame.wasm\",\n\n EhFrameHdr => \".eh_frame_hdr.wasm\",\n\n DebugInfo => \".debug_info.wasm\",\n\n DebugLine => \".debug_line.wasm\",\n\n DebugLineStr => \".debug_line_str.wasm\",\n\n DebugLoc => \".debug_loc.wasm\",\n\n DebugLocLists => \".debug_loc_lists.wasm\",\n\n DebugMacinfo => \".debug_macinfo.wasm\",\n\n DebugMacro => \".debug_macro.wasm\",\n\n DebugPubNames => \".debug_pub_names.wasm\",\n\n DebugPubTypes => \".debug_pub_types.wasm\",\n\n DebugRanges => \".debug_ranges.wasm\",\n\n DebugRngLists => \".debug_rng_lists.wasm\",\n\n DebugStr => \".debug_str.wasm\",\n\n DebugStrOffsets => \".debug_str_offsets.wasm\",\n\n DebugTypes => \".debug_types.wasm\",\n\n DebugCuIndex => \".debug_cu_index.wasm\",\n\n DebugTuIndex => \".debug_tu_index.wasm\",\n\n }\n\n}\n", "file_path": "crates/jit/src/instantiate.rs", "rank": 56, "score": 226945.03759355564 }, { "content": "fn measure_execution_time(c: &mut Criterion) {\n\n host_to_wasm(c);\n\n wasm_to_host(c);\n\n}\n\n\n", "file_path": "benches/call.rs", "rank": 57, "score": 226636.62713404602 }, { "content": "pub fn passed_by_reference(ty: &witx::Type) -> bool {\n\n match ty {\n\n witx::Type::Pointer(_) | witx::Type::ConstPointer(_) | witx::Type::List(_) => true,\n\n witx::Type::Record(r) => r.bitflags_repr().is_none(),\n\n witx::Type::Variant(v) => !v.is_enum(),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "crates/wiggle/generate/src/module_trait.rs", "rank": 58, "score": 225841.3907481002 }, { "content": "/// Attempts to extract the corresponding function index from a symbol possibly produced by\n\n/// `func_symbol_name`.\n\npub fn try_parse_func_name(name: &str) -> Option<FuncIndex> {\n\n let n = name.strip_prefix(FUNCTION_PREFIX)?.parse().ok()?;\n\n Some(FuncIndex::new(n))\n\n}\n\n\n", "file_path": "crates/environ/src/obj.rs", "rank": 59, "score": 222378.84620620077 }, { "content": "/// Attempts to extract the corresponding signature index from a symbol\n\n/// possibly produced by `trampoline_symbol_name`.\n\npub fn try_parse_trampoline_name(name: &str) -> Option<SignatureIndex> {\n\n let n = name.strip_prefix(TRAMPOLINE_PREFIX)?.parse().ok()?;\n\n Some(SignatureIndex::new(n))\n\n}\n", "file_path": "crates/environ/src/obj.rs", "rank": 60, "score": 222378.84620620077 }, { "content": "fn parse_wasm_features(features: &str) -> Result<WasmFeatures> {\n\n let features = features.trim();\n\n\n\n let mut all = None;\n\n let mut values: HashMap<_, _> = SUPPORTED_WASM_FEATURES\n\n .iter()\n\n .map(|(name, _)| (name.to_string(), None))\n\n .collect();\n\n\n\n if features == \"all\" {\n\n all = Some(true);\n\n } else if features == \"-all\" {\n\n all = Some(false);\n\n } else {\n\n for feature in features.split(',') {\n\n let feature = feature.trim();\n\n\n\n if feature.is_empty() {\n\n continue;\n\n }\n", "file_path": "crates/cli-flags/src/lib.rs", "rank": 61, "score": 221753.7180066581 }, { "content": "fn call_ser(file: &str, pretty: bool) -> Result<(), String> {\n\n let ret_of_parse = parse_functions(file);\n\n match ret_of_parse {\n\n Ok(funcs) => {\n\n let ser_str = if pretty {\n\n serde_json::to_string_pretty(&funcs).unwrap()\n\n } else {\n\n serde_json::to_string(&funcs).unwrap()\n\n };\n\n println!(\"{}\", ser_str);\n\n Ok(())\n\n }\n\n Err(_pe) => Err(\"There was a parsing error\".to_string()),\n\n }\n\n}\n\n\n", "file_path": "cranelift/serde/src/clif-json.rs", "rank": 62, "score": 221292.56768173608 }, { "content": "/// Look for an ISA for the given `triple`.\n\n/// Return a builder that can create a corresponding `TargetIsa`.\n\npub fn lookup(triple: Triple) -> Result<Builder, LookupError> {\n\n match triple.architecture {\n\n Architecture::X86_64 => {\n\n isa_builder!(x64, (feature = \"x86\"), triple)\n\n }\n\n Architecture::Aarch64 { .. } => isa_builder!(aarch64, (feature = \"arm64\"), triple),\n\n Architecture::S390x { .. } => isa_builder!(s390x, (feature = \"s390x\"), triple),\n\n Architecture::Riscv64 { .. } => isa_builder!(riscv64, (feature = \"riscv64gc\"), triple),\n\n _ => Err(LookupError::Unsupported),\n\n }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/mod.rs", "rank": 63, "score": 220591.64862018096 }, { "content": "/// Does the given instruction have any side-effect that would preclude it from being removed when\n\n/// its value is unused?\n\npub fn has_side_effect(func: &Function, inst: Inst) -> bool {\n\n let data = &func.dfg[inst];\n\n let opcode = data.opcode();\n\n trivially_has_side_effects(opcode) || is_load_with_defined_trapping(opcode, data)\n\n}\n\n\n", "file_path": "cranelift/codegen/src/inst_predicates.rs", "rank": 64, "score": 220411.23932049348 }, { "content": "/// Parse the entire `text` into a list of functions.\n\n///\n\n/// Any test commands or target declarations are ignored.\n\npub fn parse_functions(text: &str) -> ParseResult<Vec<Function>> {\n\n let _tt = timing::parse_text();\n\n parse_test(text, ParseOptions::default())\n\n .map(|file| file.functions.into_iter().map(|(func, _)| func).collect())\n\n}\n\n\n\n/// Options for configuring the parsing of filetests.\n\npub struct ParseOptions<'a> {\n\n /// Compiler passes to run on the parsed functions.\n\n pub passes: Option<&'a [String]>,\n\n /// Target ISA for compiling the parsed functions, e.g. \"x86_64 skylake\".\n\n pub target: Option<&'a str>,\n\n /// Default calling convention used when none is specified for a parsed function.\n\n pub default_calling_convention: CallConv,\n\n /// Default for unwind-info setting (enabled or disabled).\n\n pub unwind_info: bool,\n\n}\n\n\n\nimpl Default for ParseOptions<'_> {\n\n fn default() -> Self {\n\n Self {\n\n passes: None,\n\n target: None,\n\n default_calling_convention: CallConv::Fast,\n\n unwind_info: false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "cranelift/reader/src/parser.rs", "rank": 65, "score": 220377.360841967 }, { "content": "fn gen_arguments_method(formats: &[&InstructionFormat], fmt: &mut Formatter, is_mut: bool) {\n\n let (method, mut_, rslice, as_slice) = if is_mut {\n\n (\n\n \"arguments_mut\",\n\n \"mut \",\n\n \"core::slice::from_mut\",\n\n \"as_mut_slice\",\n\n )\n\n } else {\n\n (\"arguments\", \"\", \"core::slice::from_ref\", \"as_slice\")\n\n };\n\n\n\n fmtln!(\n\n fmt,\n\n \"pub fn {}<'a>(&'a {}self, pool: &'a {}ir::ValueListPool) -> &{}[Value] {{\",\n\n method,\n\n mut_,\n\n mut_,\n\n mut_\n\n );\n", "file_path": "cranelift/codegen/meta/src/gen_inst.rs", "rank": 66, "score": 219888.62626481074 }, { "content": "pub fn wasi_file_is_stdin(f: &dyn WasiFile) -> bool {\n\n f.as_any().is::<crate::stdio::Stdin>()\n\n}\n", "file_path": "crates/wasi-common/tokio/src/sched/windows.rs", "rank": 67, "score": 219163.27118131128 }, { "content": "/// Returns true/false based on whether the instruction is a floating-point\n\n/// arithmetic operation. This ignores operations like `fneg`, `fabs`, or\n\n/// `fcopysign` that only operate on the sign bit of a floating point value.\n\nfn is_fp_arith(pos: &mut FuncCursor, inst: Inst) -> bool {\n\n match pos.func.dfg[inst] {\n\n InstructionData::Unary { opcode, .. } => {\n\n opcode == Opcode::Ceil\n\n || opcode == Opcode::Floor\n\n || opcode == Opcode::Nearest\n\n || opcode == Opcode::Sqrt\n\n || opcode == Opcode::Trunc\n\n }\n\n InstructionData::Binary { opcode, .. } => {\n\n opcode == Opcode::Fadd\n\n || opcode == Opcode::Fdiv\n\n || opcode == Opcode::Fmax\n\n || opcode == Opcode::Fmin\n\n || opcode == Opcode::Fmul\n\n || opcode == Opcode::Fsub\n\n }\n\n InstructionData::Ternary { opcode, .. } => opcode == Opcode::Fma,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/nan_canonicalization.rs", "rank": 68, "score": 217716.5090509898 }, { "content": "fn measure_execution_time(c: &mut Criterion) {\n\n // Baseline performance: a single measurment covers both initializing\n\n // thread local resources and executing the first call.\n\n //\n\n // The other two bench functions should sum to this duration.\n\n c.bench_function(\"lazy initialization at call\", move |b| {\n\n let (engine, module) = test_setup();\n\n b.iter_custom(move |iters| {\n\n (0..iters)\n\n .into_iter()\n\n .map(|_| lazy_thread_instantiate(engine.clone(), module.clone()))\n\n .sum()\n\n })\n\n });\n\n\n\n // Using Engine::tls_eager_initialize: measure how long eager\n\n // initialization takes on a new thread.\n\n c.bench_function(\"eager initialization\", move |b| {\n\n let (engine, module) = test_setup();\n\n b.iter_custom(move |iters| {\n", "file_path": "benches/thread_eager_init.rs", "rank": 69, "score": 217206.85204454575 }, { "content": "pub fn compile_expression<R>(\n\n expr: &Expression<R>,\n\n encoding: gimli::Encoding,\n\n frame_base: Option<&CompiledExpression>,\n\n) -> Result<Option<CompiledExpression>, Error>\n\nwhere\n\n R: Reader,\n\n{\n\n // Bail when `frame_base` is complicated.\n\n if let Some(expr) = frame_base {\n\n if expr.parts.iter().any(|p| match p {\n\n CompiledExpressionPart::Jump { .. } => true,\n\n _ => false,\n\n }) {\n\n return Ok(None);\n\n }\n\n }\n\n\n\n // jump_targets key is offset in buf starting from the end\n\n // (see also `unread_bytes` below)\n", "file_path": "crates/cranelift/src/debug/transform/expression.rs", "rank": 70, "score": 217163.7314632099 }, { "content": "/// Does the given instruction have any side-effect as per [has_side_effect], or else is a load,\n\n/// but not the get_pinned_reg opcode?\n\npub fn has_lowering_side_effect(func: &Function, inst: Inst) -> bool {\n\n let op = func.dfg[inst].opcode();\n\n op != Opcode::GetPinnedReg && (has_side_effect(func, inst) || op.can_load())\n\n}\n\n\n", "file_path": "cranelift/codegen/src/inst_predicates.rs", "rank": 71, "score": 216984.26697542716 }, { "content": "/// Return an instance implementing the \"spectest\" interface used in the\n\n/// spec testsuite.\n\npub fn link_spectest<T>(linker: &mut Linker<T>, store: &mut Store<T>) -> Result<()> {\n\n linker.func_wrap(\"spectest\", \"print\", || {})?;\n\n linker.func_wrap(\"spectest\", \"print_i32\", |val: i32| println!(\"{}: i32\", val))?;\n\n linker.func_wrap(\"spectest\", \"print_i64\", |val: i64| println!(\"{}: i64\", val))?;\n\n linker.func_wrap(\"spectest\", \"print_f32\", |val: f32| println!(\"{}: f32\", val))?;\n\n linker.func_wrap(\"spectest\", \"print_f64\", |val: f64| println!(\"{}: f64\", val))?;\n\n linker.func_wrap(\"spectest\", \"print_i32_f32\", |i: i32, f: f32| {\n\n println!(\"{}: i32\", i);\n\n println!(\"{}: f32\", f);\n\n })?;\n\n linker.func_wrap(\"spectest\", \"print_f64_f64\", |f1: f64, f2: f64| {\n\n println!(\"{}: f64\", f1);\n\n println!(\"{}: f64\", f2);\n\n })?;\n\n\n\n let ty = GlobalType::new(ValType::I32, Mutability::Const);\n\n let g = Global::new(&mut *store, ty, Val::I32(666))?;\n\n linker.define(\"spectest\", \"global_i32\", g)?;\n\n\n\n let ty = GlobalType::new(ValType::I64, Mutability::Const);\n", "file_path": "crates/wast/src/spectest.rs", "rank": 72, "score": 216486.71757982654 }, { "content": "pub fn run(options: &Options) -> Result<()> {\n\n crate::handle_debug_flag(options.debug);\n\n let parsed = parse_sets_and_triple(&options.settings, &options.target)?;\n\n for path in &options.files {\n\n let name = String::from(path.as_os_str().to_string_lossy());\n\n handle_module(options, path, &name, parsed.as_fisa())?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "cranelift/src/compile.rs", "rank": 73, "score": 216350.38293657784 }, { "content": "// Each of the tests included from `wast_testsuite_tests` will call this\n\n// function which actually executes the `wast` test suite given the `strategy`\n\n// to compile it.\n\nfn run_wast(wast: &str, strategy: Strategy, pooling: bool) -> anyhow::Result<()> {\n\n match strategy {\n\n Strategy::Cranelift => {}\n\n _ => unimplemented!(),\n\n }\n\n let wast = Path::new(wast);\n\n\n\n let simd = feature_found(wast, \"simd\");\n\n let memory64 = feature_found(wast, \"memory64\");\n\n let multi_memory = feature_found(wast, \"multi-memory\");\n\n let threads = feature_found(wast, \"threads\");\n\n\n\n let mut cfg = Config::new();\n\n cfg.wasm_simd(simd)\n\n .wasm_multi_memory(multi_memory)\n\n .wasm_threads(threads)\n\n .wasm_memory64(memory64)\n\n .cranelift_debug_verifier(true);\n\n\n\n #[cfg(feature = \"component-model\")]\n", "file_path": "tests/all/wast.rs", "rank": 74, "score": 215640.75803916698 }, { "content": "/// Run filecheck on `text`, using directives extracted from `context`.\n\npub fn run_filecheck(text: &str, context: &Context) -> anyhow::Result<()> {\n\n let checker = build_filechecker(context)?;\n\n if checker\n\n .check(text, NO_VARIABLES)\n\n .context(\"filecheck failed\")?\n\n {\n\n Ok(())\n\n } else {\n\n // Filecheck mismatch. Emit an explanation as output.\n\n let (_, explain) = checker\n\n .explain(text, NO_VARIABLES)\n\n .context(\"filecheck explain failed\")?;\n\n anyhow::bail!(\n\n \"filecheck failed for function on line {}:\\n{}{}\",\n\n context.details.location.line_number,\n\n checker,\n\n explain\n\n );\n\n }\n\n}\n\n\n", "file_path": "cranelift/filetests/src/subtest.rs", "rank": 75, "score": 215538.60813670536 }, { "content": "/// The main pre-opt pass.\n\npub fn do_preopt(func: &mut Function, cfg: &mut ControlFlowGraph, isa: &dyn TargetIsa) {\n\n let _tt = timing::preopt();\n\n\n\n let mut pos = FuncCursor::new(func);\n\n let native_word_width = isa.pointer_bytes() as u32;\n\n let mut optimizer = simplify::peephole_optimizer(isa);\n\n\n\n while let Some(block) = pos.next_block() {\n\n while let Some(inst) = pos.next_inst() {\n\n simplify::apply_all(&mut optimizer, &mut pos, inst, native_word_width);\n\n\n\n // Try to transform divide-by-constant into simpler operations.\n\n if let Some(divrem_info) = get_div_info(inst, &pos.func.dfg) {\n\n do_divrem_transformation(&divrem_info, &mut pos, inst);\n\n continue;\n\n }\n\n\n\n branch_order(&mut pos, cfg, block, inst);\n\n }\n\n }\n\n}\n", "file_path": "cranelift/codegen/src/simple_preopt.rs", "rank": 76, "score": 214365.67521600064 }, { "content": "/// Generates all the Rust source files used in Cranelift from the meta-language.\n\npub fn generate(isas: &[isa::Isa], out_dir: &str, isle_dir: &str) -> Result<(), error::Error> {\n\n // Create all the definitions:\n\n // - common definitions.\n\n let mut shared_defs = shared::define();\n\n\n\n gen_settings::generate(\n\n &shared_defs.settings,\n\n gen_settings::ParentGroup::None,\n\n \"settings.rs\",\n\n &out_dir,\n\n )?;\n\n gen_types::generate(\"types.rs\", &out_dir)?;\n\n\n\n // - per ISA definitions.\n\n let target_isas = isa::define(isas, &mut shared_defs);\n\n\n\n // At this point, all definitions are done.\n\n let all_formats = shared_defs.verify_instruction_formats();\n\n\n\n // Generate all the code.\n", "file_path": "cranelift/codegen/meta/src/lib.rs", "rank": 77, "score": 213854.62603087828 }, { "content": "/// Get a function reference for the probestack function in `func`.\n\n///\n\n/// If there is an existing reference, use it, otherwise make a new one.\n\npub fn get_probestack_funcref(func: &mut Function) -> Option<FuncRef> {\n\n find_funcref(LibCall::Probestack, func)\n\n}\n\n\n", "file_path": "cranelift/codegen/src/ir/libcall.rs", "rank": 78, "score": 213554.14496582144 }, { "content": "pub fn wasi_file_is_stdin(f: &dyn WasiFile) -> bool {\n\n f.as_any().is::<crate::stdio::Stdin>()\n\n}\n\n\n", "file_path": "crates/wasi-common/cap-std-sync/src/sched/windows.rs", "rank": 79, "score": 213136.96799156285 }, { "content": "fn validate_symbol(name: &str) -> ModuleResult<()> {\n\n // null bytes are not allowed in symbol names and will cause the `object`\n\n // crate to panic. Let's return a clean error instead.\n\n if name.contains(\"\\0\") {\n\n return Err(ModuleError::Backend(anyhow::anyhow!(\n\n \"Symbol {:?} has a null byte, which is disallowed\",\n\n name\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl Module for ObjectModule {\n\n fn isa(&self) -> &dyn TargetIsa {\n\n &*self.isa\n\n }\n\n\n\n fn declarations(&self) -> &ModuleDeclarations {\n\n &self.declarations\n\n }\n", "file_path": "cranelift/object/src/backend.rs", "rank": 80, "score": 213066.5618584519 }, { "content": "// Assumption: path inside cache directory.\n\n// Then, we don't have to use sound OS-specific exclusive file access.\n\n// Note: there's no need to remove temporary file here - cleanup task will do it later.\n\nfn fs_write_atomic(path: &Path, reason: &str, contents: &[u8]) -> bool {\n\n let lock_path = path.with_extension(format!(\"wip-atomic-write-{}\", reason));\n\n fs::OpenOptions::new()\n\n .create_new(true) // atomic file creation (assumption: no one will open it without this flag)\n\n .write(true)\n\n .open(&lock_path)\n\n .and_then(|mut file| file.write_all(contents))\n\n // file should go out of scope and be closed at this point\n\n .and_then(|()| fs::rename(&lock_path, &path)) // atomic file rename\n\n .map_err(|err| {\n\n warn!(\n\n \"Failed to write file with rename, lock path: {}, target path: {}, err: {}\",\n\n lock_path.display(),\n\n path.display(),\n\n err\n\n )\n\n })\n\n .is_ok()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "crates/cache/src/lib.rs", "rank": 81, "score": 212979.96604089093 }, { "content": "/// Create a new `isa::Builder`.\n\npub fn isa_builder(triple: Triple) -> IsaBuilder {\n\n match triple.architecture {\n\n Architecture::Riscv64(..) => {}\n\n _ => unreachable!(),\n\n }\n\n IsaBuilder {\n\n triple,\n\n setup: riscv_settings::builder(),\n\n constructor: |triple, shared_flags, builder| {\n\n let isa_flags = riscv_settings::Flags::new(&shared_flags, builder);\n\n let backend = Riscv64Backend::new_with_flags(triple, shared_flags, isa_flags);\n\n Ok(Box::new(backend))\n\n },\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::cursor::{Cursor, FuncCursor};\n", "file_path": "cranelift/codegen/src/isa/riscv64/mod.rs", "rank": 82, "score": 212272.21975152683 }, { "content": "/// Create a new `isa::Builder`.\n\npub fn isa_builder(triple: Triple) -> IsaBuilder {\n\n assert!(triple.architecture == Architecture::Aarch64(Aarch64Architecture::Aarch64));\n\n IsaBuilder {\n\n triple,\n\n setup: aarch64_settings::builder(),\n\n constructor: |triple, shared_flags, builder| {\n\n let isa_flags = aarch64_settings::Flags::new(&shared_flags, builder);\n\n let backend = AArch64Backend::new_with_flags(triple, shared_flags, isa_flags);\n\n Ok(Box::new(backend))\n\n },\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::cursor::{Cursor, FuncCursor};\n\n use crate::ir::types::*;\n\n use crate::ir::{AbiParam, ExternalName, Function, InstBuilder, JumpTableData, Signature};\n\n use crate::isa::CallConv;\n", "file_path": "cranelift/codegen/src/isa/aarch64/mod.rs", "rank": 83, "score": 212272.21975152683 }, { "content": "/// Create a new `isa::Builder`.\n\npub fn isa_builder(triple: Triple) -> IsaBuilder {\n\n assert!(triple.architecture == Architecture::S390x);\n\n IsaBuilder {\n\n triple,\n\n setup: s390x_settings::builder(),\n\n constructor: |triple, shared_flags, builder| {\n\n let isa_flags = s390x_settings::Flags::new(&shared_flags, builder);\n\n let backend = S390xBackend::new_with_flags(triple, shared_flags, isa_flags);\n\n Ok(Box::new(backend))\n\n },\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::cursor::{Cursor, FuncCursor};\n\n use crate::ir::types::*;\n\n use crate::ir::{AbiParam, ExternalName, Function, InstBuilder, Signature};\n\n use crate::isa::CallConv;\n", "file_path": "cranelift/codegen/src/isa/s390x/mod.rs", "rank": 84, "score": 212272.21975152683 }, { "content": "fn match_bool(\n\n expected: bool,\n\n actual: bool,\n\n desc: &str,\n\n if_true: &str,\n\n if_false: &str,\n\n) -> Result<()> {\n\n if expected == actual {\n\n return Ok(());\n\n }\n\n bail!(\n\n \"{} types incompatible: expected {} {0}, found {} {0}\",\n\n desc,\n\n if expected { if_true } else { if_false },\n\n if actual { if_true } else { if_false },\n\n )\n\n}\n\n\n", "file_path": "crates/wasmtime/src/types/matching.rs", "rank": 85, "score": 212239.00089567472 }, { "content": "/// Generate an ISA from an architecture string (e.g. \"x86_64\").\n\npub fn isa_from_arch(arch: &str) -> Result<isa::Isa, String> {\n\n isa::Isa::from_arch(arch).ok_or_else(|| format!(\"no supported isa found for arch `{}`\", arch))\n\n}\n\n\n", "file_path": "cranelift/codegen/meta/src/lib.rs", "rank": 86, "score": 212118.94069435584 }, { "content": "pub fn get_dwarfdump(obj: &str, section: DwarfDumpSection) -> Result<String> {\n\n let dwarfdump = env::var(\"DWARFDUMP\").unwrap_or(\"llvm-dwarfdump\".to_string());\n\n let section_flag = match section {\n\n DwarfDumpSection::DebugInfo => \"-debug-info\",\n\n DwarfDumpSection::DebugLine => \"-debug-line\",\n\n };\n\n let output = Command::new(&dwarfdump)\n\n .args(&[section_flag, obj])\n\n .output()\n\n .expect(\"success\");\n\n if !output.status.success() {\n\n bail!(\n\n \"failed to execute {}: {}\",\n\n dwarfdump,\n\n String::from_utf8_lossy(&output.stderr),\n\n );\n\n }\n\n Ok(String::from_utf8_lossy(&output.stdout).to_string())\n\n}\n", "file_path": "tests/all/debug/dump.rs", "rank": 87, "score": 212105.2062043688 }, { "content": "/// Pretty-print a Cranelift error.\n\npub fn pretty_error(func: &ir::Function, err: CodegenError) -> String {\n\n if let CodegenError::Verifier(e) = err {\n\n pretty_verifier_error(func, None, e)\n\n } else {\n\n err.to_string()\n\n }\n\n}\n", "file_path": "cranelift/codegen/src/print_errors.rs", "rank": 88, "score": 209080.21481961047 }, { "content": "/// Parse a CLIF comment `text` as a heap command.\n\n///\n\n/// Return:\n\n/// - `Ok(None)` if the comment is not intended to be a `HeapCommand` (i.e. does not start with `heap`\n\n/// - `Ok(Some(heap))` if the comment is intended as a `HeapCommand` and can be parsed to one\n\n/// - `Err` otherwise.\n\npub fn parse_heap_command<'a>(text: &str) -> ParseResult<Option<HeapCommand>> {\n\n let _tt = timing::parse_text();\n\n // We remove leading spaces and semi-colons for convenience here instead of at the call sites\n\n // since this function will be attempting to parse a HeapCommand from a CLIF comment.\n\n let trimmed_text = text.trim_start_matches(|c| c == ' ' || c == ';');\n\n let mut parser = Parser::new(trimmed_text);\n\n match parser.token() {\n\n Some(Token::Identifier(\"heap\")) => parser.parse_heap_command().map(|c| Some(c)),\n\n Some(_) | None => Ok(None),\n\n }\n\n}\n\n\n\npub struct Parser<'a> {\n\n lex: Lexer<'a>,\n\n\n\n lex_error: Option<LexError>,\n\n\n\n /// Current lookahead token.\n\n lookahead: Option<Token<'a>>,\n\n\n", "file_path": "cranelift/reader/src/parser.rs", "rank": 89, "score": 208865.16010736584 }, { "content": "pub fn commit(addr: *mut u8, len: usize) -> Result<()> {\n\n if len == 0 {\n\n return Ok(());\n\n }\n\n\n\n // Memory needs to be committed, so don't use the `region` crate\n\n if unsafe { VirtualAlloc(addr as _, len, MEM_COMMIT, PAGE_READWRITE).is_null() } {\n\n bail!(\"failed to commit memory as read/write\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/windows.rs", "rank": 90, "score": 208709.8660494486 }, { "content": "pub fn decommit(addr: *mut u8, len: usize) -> Result<()> {\n\n if len == 0 {\n\n return Ok(());\n\n }\n\n\n\n if unsafe { VirtualFree(addr as _, len, MEM_DECOMMIT) } == 0 {\n\n bail!(\n\n \"failed to decommit memory pages: {}\",\n\n std::io::Error::last_os_error()\n\n );\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/windows.rs", "rank": 91, "score": 208709.8660494486 }, { "content": "/// Prints:\n\n/// ; ^~~~~~\n\nfn print_arrow(w: &mut dyn Write, entity: &str) -> fmt::Result {\n\n write!(w, \";\")?;\n\n\n\n let indent = entity.len() - entity.trim_start().len();\n\n if indent != 0 {\n\n write!(w, \"{1:0$}^\", indent - 1, \"\")?;\n\n }\n\n\n\n for _ in 0..entity.trim().len() - 1 {\n\n write!(w, \"~\")?;\n\n }\n\n\n\n writeln!(w)\n\n}\n\n\n", "file_path": "cranelift/codegen/src/print_errors.rs", "rank": 92, "score": 208415.22826951696 }, { "content": "pub fn prepare_workspace(exe_name: &str) -> anyhow::Result<TempDir> {\n\n let prefix = format!(\"wasi_common_{}\", exe_name);\n\n let tempdir = Builder::new().prefix(&prefix).tempdir()?;\n\n Ok(tempdir)\n\n}\n\n\n", "file_path": "crates/test-programs/tests/wasm_tests/utils.rs", "rank": 93, "score": 207673.3270436657 }, { "content": "/// A helper to extract all the `Type` listings of each variable in `params`\n\n/// for only parameters the return true for `is_wasm`, typically paired with\n\n/// `is_wasm_return` or `is_wasm_parameter`.\n\npub fn wasm_param_types(params: &[ir::AbiParam], is_wasm: impl Fn(usize) -> bool) -> Vec<Type> {\n\n let mut ret = Vec::with_capacity(params.len());\n\n for (i, param) in params.iter().enumerate() {\n\n if is_wasm(i) {\n\n ret.push(param.value_type);\n\n }\n\n }\n\n ret\n\n}\n", "file_path": "cranelift/wasm/src/code_translator.rs", "rank": 94, "score": 206662.8521221783 }, { "content": "/// Perform a simple legalization by expansion of the function, without\n\n/// platform-specific transforms.\n\npub fn simple_legalize(func: &mut ir::Function, cfg: &mut ControlFlowGraph, isa: &dyn TargetIsa) {\n\n let mut pos = FuncCursor::new(func);\n\n let func_begin = pos.position();\n\n pos.set_position(func_begin);\n\n while let Some(_block) = pos.next_block() {\n\n let mut prev_pos = pos.position();\n\n while let Some(inst) = pos.next_inst() {\n\n match pos.func.dfg[inst] {\n\n // control flow\n\n InstructionData::BranchIcmp {\n\n opcode: ir::Opcode::BrIcmp,\n\n cond,\n\n destination,\n\n ref args,\n\n } => {\n\n let a = args.get(0, &pos.func.dfg.value_lists).unwrap();\n\n let b = args.get(1, &pos.func.dfg.value_lists).unwrap();\n\n let block_args = args.as_slice(&pos.func.dfg.value_lists)[2..].to_vec();\n\n\n\n let old_block = pos.func.layout.pp_block(inst);\n", "file_path": "cranelift/codegen/src/legalizer/mod.rs", "rank": 95, "score": 205929.2078333756 }, { "content": "#[inline]\n\npub fn next_field<T: ComponentType>(offset: &mut usize) -> usize {\n\n *offset = align_to(*offset, T::align());\n\n let result = *offset;\n\n *offset += T::size();\n\n result\n\n}\n\n\n\n/// Verify that the given wasm type is a tuple with the expected fields in the right order.\n", "file_path": "crates/wasmtime/src/component/func/typed.rs", "rank": 96, "score": 205621.91165740252 }, { "content": "/// Prints:\n\n/// ; error: [ERROR BODY]\n\nfn print_error(w: &mut dyn Write, err: VerifierError) -> fmt::Result {\n\n writeln!(w, \"; error: {}\", err.to_string())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "cranelift/codegen/src/print_errors.rs", "rank": 97, "score": 205378.08772989182 }, { "content": "fn smoke_test_gc_impl(use_epochs: bool) -> anyhow::Result<()> {\n\n let (mut store, module) = ref_types_module(\n\n use_epochs,\n\n r#\"\n\n (module\n\n (import \"\" \"\" (func $do_gc))\n\n (func $recursive (export \"func\") (param i32 externref) (result externref)\n\n local.get 0\n\n i32.eqz\n\n if (result externref)\n\n call $do_gc\n\n local.get 1\n\n else\n\n local.get 0\n\n i32.const 1\n\n i32.sub\n\n local.get 1\n\n call $recursive\n\n end\n\n )\n", "file_path": "tests/all/gc.rs", "rank": 98, "score": 204513.4217206689 }, { "content": "fn translate_linkage(linkage: Linkage) -> (SymbolScope, bool) {\n\n let scope = match linkage {\n\n Linkage::Import => SymbolScope::Unknown,\n\n Linkage::Local => SymbolScope::Compilation,\n\n Linkage::Hidden => SymbolScope::Linkage,\n\n Linkage::Export | Linkage::Preemptible => SymbolScope::Dynamic,\n\n };\n\n // TODO: this matches rustc_codegen_cranelift, but may be wrong.\n\n let weak = linkage == Linkage::Preemptible;\n\n (scope, weak)\n\n}\n\n\n\n/// This is the output of `ObjectModule`'s\n\n/// [`finish`](../struct.ObjectModule.html#method.finish) function.\n\n/// It contains the generated `Object` and other information produced during\n\n/// compilation.\n\npub struct ObjectProduct {\n\n /// Object artifact with all functions and data from the module defined.\n\n pub object: Object<'static>,\n\n /// Symbol IDs for functions (both declared and defined).\n", "file_path": "cranelift/object/src/backend.rs", "rank": 99, "score": 204487.52266870317 } ]
Rust
x86_64/src/smbios.rs
Gnurou/crosvm
307168a1eb35dda5b71cdef1d534882c893ef686
use std::mem; use std::result; use std::slice; use std::fs::OpenOptions; use std::io::prelude::*; use std::path::{Path, PathBuf}; use data_model::DataInit; use remain::sorted; use thiserror::Error; use vm_memory::{GuestAddress, GuestMemory}; #[sorted] #[derive(Error, Debug)] pub enum Error { #[error("The SMBIOS table has too little address space to be stored")] AddressOverflow, #[error("Failure while zeroing out the memory for the SMBIOS table")] Clear, #[error("Failure to verify host SMBIOS entry checksum")] InvalidChecksum, #[error("Failure to read host SMBIOS data")] InvalidInput, #[error("Failure while reading SMBIOS data file")] IoFailed, #[error("There was too little guest memory to store the SMBIOS table")] NotEnoughMemory, #[error("Failure to write additional data to memory")] WriteData, #[error("Failure to write SMBIOS entrypoint structure")] WriteSmbiosEp, } pub type Result<T> = result::Result<T, Error>; const SMBIOS_START: u64 = 0xf0000; const SM2_MAGIC_IDENT: &[u8; 4usize] = b"_SM_"; const SM3_MAGIC_IDENT: &[u8; 5usize] = b"_SM3_"; const BIOS_INFORMATION: u8 = 0; const SYSTEM_INFORMATION: u8 = 1; const END_OF_TABLE: u8 = 127; const PCI_SUPPORTED: u64 = 1 << 7; const IS_VIRTUAL_MACHINE: u8 = 1 << 4; fn compute_checksum<T: Copy>(v: &T) -> u8 { let v_slice = unsafe { slice::from_raw_parts(v as *const T as *const u8, mem::size_of::<T>()) }; let mut checksum: u8 = 0; for i in v_slice.iter() { checksum = checksum.wrapping_add(*i); } (!checksum).wrapping_add(1) } #[repr(packed)] #[derive(Default, Copy)] pub struct Smbios23Intermediate { pub signature: [u8; 5usize], pub checksum: u8, pub length: u16, pub address: u32, pub count: u16, pub revision: u8, } unsafe impl data_model::DataInit for Smbios23Intermediate {} impl Clone for Smbios23Intermediate { fn clone(&self) -> Self { *self } } #[repr(packed)] #[derive(Default, Copy)] pub struct Smbios23Entrypoint { pub signature: [u8; 4usize], pub checksum: u8, pub length: u8, pub majorver: u8, pub minorver: u8, pub max_size: u16, pub revision: u8, pub reserved: [u8; 5usize], pub dmi: Smbios23Intermediate, } unsafe impl data_model::DataInit for Smbios23Entrypoint {} impl Clone for Smbios23Entrypoint { fn clone(&self) -> Self { *self } } #[repr(packed)] #[derive(Default, Copy)] pub struct Smbios30Entrypoint { pub signature: [u8; 5usize], pub checksum: u8, pub length: u8, pub majorver: u8, pub minorver: u8, pub docrev: u8, pub revision: u8, pub reserved: u8, pub max_size: u32, pub physptr: u64, } unsafe impl data_model::DataInit for Smbios30Entrypoint {} impl Clone for Smbios30Entrypoint { fn clone(&self) -> Self { *self } } #[repr(packed)] #[derive(Default, Copy)] pub struct SmbiosBiosInfo { pub typ: u8, pub length: u8, pub handle: u16, pub vendor: u8, pub version: u8, pub start_addr: u16, pub release_date: u8, pub rom_size: u8, pub characteristics: u64, pub characteristics_ext1: u8, pub characteristics_ext2: u8, } impl Clone for SmbiosBiosInfo { fn clone(&self) -> Self { *self } } unsafe impl data_model::DataInit for SmbiosBiosInfo {} #[repr(packed)] #[derive(Default, Copy)] pub struct SmbiosSysInfo { pub typ: u8, pub length: u8, pub handle: u16, pub manufacturer: u8, pub product_name: u8, pub version: u8, pub serial_number: u8, pub uuid: [u8; 16usize], pub wake_up_type: u8, pub sku: u8, pub family: u8, } impl Clone for SmbiosSysInfo { fn clone(&self) -> Self { *self } } unsafe impl data_model::DataInit for SmbiosSysInfo {} fn write_and_incr<T: DataInit>( mem: &GuestMemory, val: T, mut curptr: GuestAddress, ) -> Result<GuestAddress> { mem.write_obj_at_addr(val, curptr) .map_err(|_| Error::WriteData)?; curptr = curptr .checked_add(mem::size_of::<T>() as u64) .ok_or(Error::NotEnoughMemory)?; Ok(curptr) } fn write_string(mem: &GuestMemory, val: &str, mut curptr: GuestAddress) -> Result<GuestAddress> { for c in val.as_bytes().iter() { curptr = write_and_incr(mem, *c, curptr)?; } curptr = write_and_incr(mem, 0_u8, curptr)?; Ok(curptr) } fn setup_smbios_from_file(mem: &GuestMemory, path: &Path) -> Result<()> { let mut sme_path = PathBuf::from(path); sme_path.push("smbios_entry_point"); let mut sme = Vec::new(); OpenOptions::new() .read(true) .open(&sme_path) .map_err(|_| Error::IoFailed)? .read_to_end(&mut sme) .map_err(|_| Error::IoFailed)?; let mut dmi_path = PathBuf::from(path); dmi_path.push("DMI"); let mut dmi = Vec::new(); OpenOptions::new() .read(true) .open(&dmi_path) .map_err(|_| Error::IoFailed)? .read_to_end(&mut dmi) .map_err(|_| Error::IoFailed)?; if sme.len() == mem::size_of::<Smbios30Entrypoint>() && sme.starts_with(SM3_MAGIC_IDENT) { let mut smbios_ep = Smbios30Entrypoint::default(); smbios_ep.as_mut_slice().copy_from_slice(&sme); let physptr = GuestAddress(SMBIOS_START) .checked_add(mem::size_of::<Smbios30Entrypoint>() as u64) .ok_or(Error::NotEnoughMemory)?; mem.write_at_addr(&dmi, physptr) .map_err(|_| Error::NotEnoughMemory)?; smbios_ep.physptr = physptr.offset(); smbios_ep.checksum = 0; smbios_ep.checksum = compute_checksum(&smbios_ep); mem.write_obj_at_addr(smbios_ep, GuestAddress(SMBIOS_START)) .map_err(|_| Error::NotEnoughMemory)?; return Ok(()); } if sme.len() == mem::size_of::<Smbios23Entrypoint>() && sme.starts_with(SM2_MAGIC_IDENT) { let mut smbios_ep = Smbios23Entrypoint::default(); smbios_ep.as_mut_slice().copy_from_slice(&sme); let physptr = GuestAddress(SMBIOS_START) .checked_add(mem::size_of::<Smbios23Entrypoint>() as u64) .ok_or(Error::NotEnoughMemory)?; mem.write_at_addr(&dmi, physptr) .map_err(|_| Error::NotEnoughMemory)?; smbios_ep.dmi.address = physptr.offset() as u32; smbios_ep.dmi.checksum = 0; smbios_ep.dmi.checksum = compute_checksum(&smbios_ep.dmi); smbios_ep.checksum = 0; smbios_ep.checksum = compute_checksum(&smbios_ep); mem.write_obj_at_addr(smbios_ep, GuestAddress(SMBIOS_START)) .map_err(|_| Error::WriteSmbiosEp)?; return Ok(()); } Err(Error::InvalidInput) } pub fn setup_smbios(mem: &GuestMemory, dmi_path: Option<PathBuf>) -> Result<()> { if let Some(dmi_path) = dmi_path { return setup_smbios_from_file(mem, &dmi_path); } let physptr = GuestAddress(SMBIOS_START) .checked_add(mem::size_of::<Smbios30Entrypoint>() as u64) .ok_or(Error::NotEnoughMemory)?; let mut curptr = physptr; let mut handle = 0; { handle += 1; let smbios_biosinfo = SmbiosBiosInfo { typ: BIOS_INFORMATION, length: mem::size_of::<SmbiosBiosInfo>() as u8, handle, vendor: 1, version: 2, characteristics: PCI_SUPPORTED, characteristics_ext2: IS_VIRTUAL_MACHINE, ..Default::default() }; curptr = write_and_incr(mem, smbios_biosinfo, curptr)?; curptr = write_string(mem, "crosvm", curptr)?; curptr = write_string(mem, "0", curptr)?; curptr = write_and_incr(mem, 0_u8, curptr)?; } { handle += 1; let smbios_sysinfo = SmbiosSysInfo { typ: SYSTEM_INFORMATION, length: mem::size_of::<SmbiosSysInfo>() as u8, handle, manufacturer: 1, product_name: 2, ..Default::default() }; curptr = write_and_incr(mem, smbios_sysinfo, curptr)?; curptr = write_string(mem, "ChromiumOS", curptr)?; curptr = write_string(mem, "crosvm", curptr)?; curptr = write_and_incr(mem, 0u8, curptr)?; } { handle += 1; let smbios_sysinfo = SmbiosSysInfo { typ: END_OF_TABLE, length: mem::size_of::<SmbiosSysInfo>() as u8, handle, ..Default::default() }; curptr = write_and_incr(mem, smbios_sysinfo, curptr)?; curptr = write_and_incr(mem, 0_u8, curptr)?; } { let mut smbios_ep = Smbios30Entrypoint::default(); smbios_ep.signature = *SM3_MAGIC_IDENT; smbios_ep.length = mem::size_of::<Smbios30Entrypoint>() as u8; smbios_ep.majorver = 0x03; smbios_ep.minorver = 0x02; smbios_ep.docrev = 0x00; smbios_ep.revision = 0x01; smbios_ep.max_size = curptr.offset_from(physptr) as u32; smbios_ep.physptr = physptr.offset(); smbios_ep.checksum = compute_checksum(&smbios_ep); mem.write_obj_at_addr(smbios_ep, GuestAddress(SMBIOS_START)) .map_err(|_| Error::WriteSmbiosEp)?; } Ok(()) } #[cfg(test)] mod tests { use super::*; #[test] fn struct_size() { assert_eq!( mem::size_of::<Smbios23Entrypoint>(), 0x1fusize, concat!("Size of: ", stringify!(Smbios23Entrypoint)) ); assert_eq!( mem::size_of::<Smbios30Entrypoint>(), 0x18usize, concat!("Size of: ", stringify!(Smbios30Entrypoint)) ); assert_eq!( mem::size_of::<SmbiosBiosInfo>(), 0x14usize, concat!("Size of: ", stringify!(SmbiosBiosInfo)) ); assert_eq!( mem::size_of::<SmbiosSysInfo>(), 0x1busize, concat!("Size of: ", stringify!(SmbiosSysInfo)) ); } #[test] fn entrypoint_checksum() { let mem = GuestMemory::new(&[(GuestAddress(SMBIOS_START), 4096)]).unwrap(); setup_smbios(&mem, None).unwrap(); let smbios_ep: Smbios30Entrypoint = mem.read_obj_from_addr(GuestAddress(SMBIOS_START)).unwrap(); assert_eq!(compute_checksum(&smbios_ep), 0); } }
use std::mem; use std::result; use std::slice; use std::fs::OpenOptions; use std::io::prelude::*; use std::path::{Path, PathBuf}; use data_model::DataInit; use remain::sorted; use thiserror::Error; use vm_memory::{GuestAddress, GuestMemory}; #[sorted] #[derive(Error, Debug)] pub enum Error { #[error("The SMBIOS table has too little address space to be stored")] AddressOverflow, #[error("Failure while zeroing out the memory for the SMBIOS table")] Clear, #[error("Failure to verify host SMBIOS entry checksum")] InvalidChecksum, #[error("Failure to read host SMBIOS data")] InvalidInput, #[error("Failure while reading SMBIOS data file")] IoFailed, #[error("There was too little guest memory to store the SMBIOS table")] NotEnoughMemory, #[error("Failure to write additional data to memory")] WriteData, #[error("Failure to write SMBIOS entrypoint structure")] WriteSmbiosEp, } pub type Result<T> = result::Result<T, Error>; const SMBIOS_START: u64 = 0xf0000; const SM2_MAGIC_IDENT: &[u8; 4usize] = b"_SM_"; const SM3_MAGIC_IDENT: &[u8; 5usize] = b"_SM3_"; const BIOS_INFORMATION: u8 = 0; const SYSTEM_INFORMATION: u8 = 1; const END_OF_TABLE: u8 = 127; const PCI_SUPPORTED: u64 = 1 << 7; const IS_VIRTUAL_MACHINE: u8 = 1 << 4; fn compute_checksum<T: Copy>(v: &T) -> u8 { let v_slice = unsafe { slice::from_raw_parts(v as *const T as *const u8, mem::size_of::<T>()) }; let mut checksum: u8 = 0; for i in v_slice.iter() { checksum = checksum.wrapping_add(*i); } (!checksum).wrapping_add(1) } #[repr(packed)] #[derive(Default, Copy)] pub struct Smbios23Intermediate { pub signature: [u8; 5usize], pub checksum: u8, pub length: u16, pub address: u32, pub count: u16, pub revision: u8, } unsafe impl data_model::DataInit for Smbios23Intermediate {} impl Clone for Smbios23Intermediate { fn clone(&self) -> Self { *self } } #[repr(packed)] #[derive(Default, Copy)] pub struct Smbios23Entrypoint { pub signature: [u8; 4usize], pub checksum: u8, pub length: u8, pub majorver: u8, pub minorver: u8, pub max_size: u16, pub revision: u8, pub reserved: [u8; 5usize], pub dmi: Smbios23Intermediate, } unsafe impl data_model::DataInit for Smbios23Entrypoint {} impl Clone for Smbios23Entrypoint { fn clone(&self) -> Self { *self } } #[repr(packed)] #[derive(Default, Copy)] pub struct Smbios30Entrypoint { pub signature: [u8; 5usize], pub checksum: u8, pub length: u8, pub majorver: u8, pub minorver: u8, pub docrev: u8, pub revision: u8, pub reserved: u8, pub max_size: u32, pub physptr: u64, } unsafe impl data_model::DataInit for Smbios30Entrypoint {} impl Clone for Smbios30Entrypoint { fn clone(&self) -> Self { *self } } #[repr(packed)] #[derive(Default, Copy)] pub struct SmbiosBiosInfo { pub typ: u8, pub length: u8, pub handle: u16, pub vendor: u8, pub version: u8, pub start_addr: u16, pub release_date: u8, pub rom_size: u8, pub characteristics: u64, pub characteristics_ext1: u8, pub characteristics_ext2: u8, } impl Clone for SmbiosBiosInfo { fn clone(&self) -> Self { *self } } unsafe impl data_model::DataInit for SmbiosBiosInfo {} #[repr(packed)] #[derive(Default, Copy)] pub struct SmbiosSysInfo { pub typ: u8, pub length: u8, pub handle: u16, pub manufacturer: u8, pub product_name: u8, pub version: u8, pub serial_number: u8, pub uuid: [u8; 16usize], pub wake_up_type: u8, pub sku: u8, pub family: u8, } impl Clone for SmbiosSysInfo { fn clone(&self) -> Self { *self } } unsafe impl data_model::DataInit for SmbiosSysInfo {} fn write_and_incr<T: DataInit>( mem: &GuestMemory, val: T, mut curptr: GuestAddress, ) -> Result<GuestAddress> { mem.write_obj_at_addr(val, curpt
fn write_string(mem: &GuestMemory, val: &str, mut curptr: GuestAddress) -> Result<GuestAddress> { for c in val.as_bytes().iter() { curptr = write_and_incr(mem, *c, curptr)?; } curptr = write_and_incr(mem, 0_u8, curptr)?; Ok(curptr) } fn setup_smbios_from_file(mem: &GuestMemory, path: &Path) -> Result<()> { let mut sme_path = PathBuf::from(path); sme_path.push("smbios_entry_point"); let mut sme = Vec::new(); OpenOptions::new() .read(true) .open(&sme_path) .map_err(|_| Error::IoFailed)? .read_to_end(&mut sme) .map_err(|_| Error::IoFailed)?; let mut dmi_path = PathBuf::from(path); dmi_path.push("DMI"); let mut dmi = Vec::new(); OpenOptions::new() .read(true) .open(&dmi_path) .map_err(|_| Error::IoFailed)? .read_to_end(&mut dmi) .map_err(|_| Error::IoFailed)?; if sme.len() == mem::size_of::<Smbios30Entrypoint>() && sme.starts_with(SM3_MAGIC_IDENT) { let mut smbios_ep = Smbios30Entrypoint::default(); smbios_ep.as_mut_slice().copy_from_slice(&sme); let physptr = GuestAddress(SMBIOS_START) .checked_add(mem::size_of::<Smbios30Entrypoint>() as u64) .ok_or(Error::NotEnoughMemory)?; mem.write_at_addr(&dmi, physptr) .map_err(|_| Error::NotEnoughMemory)?; smbios_ep.physptr = physptr.offset(); smbios_ep.checksum = 0; smbios_ep.checksum = compute_checksum(&smbios_ep); mem.write_obj_at_addr(smbios_ep, GuestAddress(SMBIOS_START)) .map_err(|_| Error::NotEnoughMemory)?; return Ok(()); } if sme.len() == mem::size_of::<Smbios23Entrypoint>() && sme.starts_with(SM2_MAGIC_IDENT) { let mut smbios_ep = Smbios23Entrypoint::default(); smbios_ep.as_mut_slice().copy_from_slice(&sme); let physptr = GuestAddress(SMBIOS_START) .checked_add(mem::size_of::<Smbios23Entrypoint>() as u64) .ok_or(Error::NotEnoughMemory)?; mem.write_at_addr(&dmi, physptr) .map_err(|_| Error::NotEnoughMemory)?; smbios_ep.dmi.address = physptr.offset() as u32; smbios_ep.dmi.checksum = 0; smbios_ep.dmi.checksum = compute_checksum(&smbios_ep.dmi); smbios_ep.checksum = 0; smbios_ep.checksum = compute_checksum(&smbios_ep); mem.write_obj_at_addr(smbios_ep, GuestAddress(SMBIOS_START)) .map_err(|_| Error::WriteSmbiosEp)?; return Ok(()); } Err(Error::InvalidInput) } pub fn setup_smbios(mem: &GuestMemory, dmi_path: Option<PathBuf>) -> Result<()> { if let Some(dmi_path) = dmi_path { return setup_smbios_from_file(mem, &dmi_path); } let physptr = GuestAddress(SMBIOS_START) .checked_add(mem::size_of::<Smbios30Entrypoint>() as u64) .ok_or(Error::NotEnoughMemory)?; let mut curptr = physptr; let mut handle = 0; { handle += 1; let smbios_biosinfo = SmbiosBiosInfo { typ: BIOS_INFORMATION, length: mem::size_of::<SmbiosBiosInfo>() as u8, handle, vendor: 1, version: 2, characteristics: PCI_SUPPORTED, characteristics_ext2: IS_VIRTUAL_MACHINE, ..Default::default() }; curptr = write_and_incr(mem, smbios_biosinfo, curptr)?; curptr = write_string(mem, "crosvm", curptr)?; curptr = write_string(mem, "0", curptr)?; curptr = write_and_incr(mem, 0_u8, curptr)?; } { handle += 1; let smbios_sysinfo = SmbiosSysInfo { typ: SYSTEM_INFORMATION, length: mem::size_of::<SmbiosSysInfo>() as u8, handle, manufacturer: 1, product_name: 2, ..Default::default() }; curptr = write_and_incr(mem, smbios_sysinfo, curptr)?; curptr = write_string(mem, "ChromiumOS", curptr)?; curptr = write_string(mem, "crosvm", curptr)?; curptr = write_and_incr(mem, 0u8, curptr)?; } { handle += 1; let smbios_sysinfo = SmbiosSysInfo { typ: END_OF_TABLE, length: mem::size_of::<SmbiosSysInfo>() as u8, handle, ..Default::default() }; curptr = write_and_incr(mem, smbios_sysinfo, curptr)?; curptr = write_and_incr(mem, 0_u8, curptr)?; } { let mut smbios_ep = Smbios30Entrypoint::default(); smbios_ep.signature = *SM3_MAGIC_IDENT; smbios_ep.length = mem::size_of::<Smbios30Entrypoint>() as u8; smbios_ep.majorver = 0x03; smbios_ep.minorver = 0x02; smbios_ep.docrev = 0x00; smbios_ep.revision = 0x01; smbios_ep.max_size = curptr.offset_from(physptr) as u32; smbios_ep.physptr = physptr.offset(); smbios_ep.checksum = compute_checksum(&smbios_ep); mem.write_obj_at_addr(smbios_ep, GuestAddress(SMBIOS_START)) .map_err(|_| Error::WriteSmbiosEp)?; } Ok(()) } #[cfg(test)] mod tests { use super::*; #[test] fn struct_size() { assert_eq!( mem::size_of::<Smbios23Entrypoint>(), 0x1fusize, concat!("Size of: ", stringify!(Smbios23Entrypoint)) ); assert_eq!( mem::size_of::<Smbios30Entrypoint>(), 0x18usize, concat!("Size of: ", stringify!(Smbios30Entrypoint)) ); assert_eq!( mem::size_of::<SmbiosBiosInfo>(), 0x14usize, concat!("Size of: ", stringify!(SmbiosBiosInfo)) ); assert_eq!( mem::size_of::<SmbiosSysInfo>(), 0x1busize, concat!("Size of: ", stringify!(SmbiosSysInfo)) ); } #[test] fn entrypoint_checksum() { let mem = GuestMemory::new(&[(GuestAddress(SMBIOS_START), 4096)]).unwrap(); setup_smbios(&mem, None).unwrap(); let smbios_ep: Smbios30Entrypoint = mem.read_obj_from_addr(GuestAddress(SMBIOS_START)).unwrap(); assert_eq!(compute_checksum(&smbios_ep), 0); } }
r) .map_err(|_| Error::WriteData)?; curptr = curptr .checked_add(mem::size_of::<T>() as u64) .ok_or(Error::NotEnoughMemory)?; Ok(curptr) }
function_block-function_prefixed
[ { "content": "/// Write a protective MBR for a disk of the given total size (in bytes).\n\n///\n\n/// This should be written at the start of the disk, before the GPT header. It is one `SECTOR_SIZE`\n\n/// long.\n\npub fn write_protective_mbr(file: &mut impl Write, disk_size: u64) -> Result<(), Error> {\n\n // Bootstrap code\n\n file.write_all(&[0; 446]).map_err(Error::WritingData)?;\n\n\n\n // Partition status\n\n file.write_all(&[0x00]).map_err(Error::WritingData)?;\n\n // Begin CHS\n\n file.write_all(&[0; 3]).map_err(Error::WritingData)?;\n\n // Partition type\n\n file.write_all(&[0xEE]).map_err(Error::WritingData)?;\n\n // End CHS\n\n file.write_all(&[0; 3]).map_err(Error::WritingData)?;\n\n let first_lba: u32 = 1;\n\n file.write_all(&first_lba.to_le_bytes())\n\n .map_err(Error::WritingData)?;\n\n let number_of_sectors: u32 = (disk_size / SECTOR_SIZE)\n\n .try_into()\n\n .map_err(Error::InvalidDiskSize)?;\n\n file.write_all(&number_of_sectors.to_le_bytes())\n\n .map_err(Error::WritingData)?;\n", "file_path": "disk/src/gpt.rs", "rank": 1, "score": 521745.24963153544 }, { "content": "/// Write a UUID in the mixed-endian format which GPT uses for GUIDs.\n\nfn write_guid(out: &mut impl Write, guid: Uuid) -> Result<(), io::Error> {\n\n let guid_fields = guid.as_fields();\n\n out.write_all(&guid_fields.0.to_le_bytes())?;\n\n out.write_all(&guid_fields.1.to_le_bytes())?;\n\n out.write_all(&guid_fields.2.to_le_bytes())?;\n\n out.write_all(guid_fields.3)?;\n\n\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn protective_mbr_size() {\n\n let mut buffer = vec![];\n\n write_protective_mbr(&mut buffer, 1000 * SECTOR_SIZE).unwrap();\n\n\n\n assert_eq!(buffer.len(), SECTOR_SIZE as usize);\n", "file_path": "disk/src/gpt.rs", "rank": 2, "score": 413875.9163077629 }, { "content": "/// Constructor for a conventional segment GDT (or LDT) entry. Derived from the kernel's segment.h.\n\npub fn gdt_entry(flags: u16, base: u32, limit: u32) -> u64 {\n\n (((base as u64) & 0xff000000u64) << (56 - 24))\n\n | (((flags as u64) & 0x0000f0ffu64) << 40)\n\n | (((limit as u64) & 0x000f0000u64) << (48 - 16))\n\n | (((base as u64) & 0x00ffffffu64) << 16)\n\n | ((limit as u64) & 0x0000ffffu64)\n\n}\n\n\n", "file_path": "x86_64/src/gdt.rs", "rank": 3, "score": 409874.8949948887 }, { "content": "fn write_idt_value(val: u64, guest_mem: &GuestMemory) -> Result<()> {\n\n let boot_idt_addr = GuestAddress(BOOT_IDT_OFFSET);\n\n guest_mem\n\n .write_obj_at_addr(val, boot_idt_addr)\n\n .map_err(|_| Error::WriteIDTFailure)\n\n}\n\n\n", "file_path": "x86_64/src/regs.rs", "rank": 4, "score": 408756.79636944074 }, { "content": "/// Returns a Vec of the valid memory addresses.\n\n/// These should be used to configure the GuestMemory structure for the platfrom.\n\npub fn arch_memory_regions(size: u64) -> Vec<(GuestAddress, u64)> {\n\n vec![(GuestAddress(AARCH64_PHYS_MEM_START), size)]\n\n}\n\n\n", "file_path": "aarch64/src/lib.rs", "rank": 6, "score": 398935.9602885841 }, { "content": "/// Copy virtio device configuration data from a subslice of `src` to a subslice of `dst`.\n\n/// Unlike std::slice::copy_from_slice(), this function copies as much as possible within\n\n/// the common subset of the two slices, truncating the requested range instead of\n\n/// panicking if the slices do not match in size.\n\n///\n\n/// `dst_offset` and `src_offset` specify the starting indexes of the `dst` and `src`\n\n/// slices, respectively; if either index is out of bounds, this function is a no-op\n\n/// rather than panicking. This makes it safe to call with arbitrary user-controlled\n\n/// inputs.\n\npub fn copy_config(dst: &mut [u8], dst_offset: u64, src: &[u8], src_offset: u64) {\n\n if let Ok(dst_offset) = usize::try_from(dst_offset) {\n\n if let Ok(src_offset) = usize::try_from(src_offset) {\n\n if let Some(dst_slice) = dst.get_mut(dst_offset..) {\n\n if let Some(src_slice) = src.get(src_offset..) {\n\n let len = cmp::min(dst_slice.len(), src_slice.len());\n\n let dst_subslice = &mut dst_slice[0..len];\n\n let src_subslice = &src_slice[0..len];\n\n dst_subslice.copy_from_slice(src_subslice);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "devices/src/virtio/mod.rs", "rank": 7, "score": 395054.8406662588 }, { "content": "fn write_gdt_table(table: &[u64], guest_mem: &GuestMemory) -> Result<()> {\n\n let boot_gdt_addr = GuestAddress(BOOT_GDT_OFFSET);\n\n for (index, entry) in table.iter().enumerate() {\n\n let addr = guest_mem\n\n .checked_offset(boot_gdt_addr, (index * mem::size_of::<u64>()) as u64)\n\n .ok_or(Error::WriteGDTFailure)?;\n\n guest_mem\n\n .write_obj_at_addr(*entry, addr)\n\n .map_err(|_| Error::WriteGDTFailure)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "x86_64/src/regs.rs", "rank": 8, "score": 394924.2260476564 }, { "content": "// # Examples\n\n//\n\n// # fn test_memory_offsets() {\n\n// # let start_addr1 = GuestAddress(0x100)\n\n// # let start_addr2 = GuestAddress(0x1100);\n\n// # let mem = GuestMemory::new(&vec![(start_addr1, 0x1000),(start_addr2, 0x1000)])?;\n\n// # assert_eq!(memory_offset(&mem, GuestAddress(0x1100), 0x1000).unwrap(),0x1000);\n\n// #}\n\nfn memory_offset(mem: &GuestMemory, guest_addr: GuestAddress, len: u64) -> UdmabufResult<u64> {\n\n mem.do_in_region(guest_addr, move |mapping, map_offset, memfd_offset| {\n\n let map_offset = map_offset as u64;\n\n if map_offset\n\n .checked_add(len)\n\n .map_or(true, |a| a > mapping.size() as u64)\n\n {\n\n return Err(GuestMemoryError::InvalidGuestAddress(guest_addr));\n\n }\n\n\n\n Ok(memfd_offset + map_offset)\n\n })\n\n .map_err(UdmabufError::InvalidOffset)\n\n}\n\n\n\n/// A convenience wrapper for the Linux kernel's udmabuf driver.\n\n///\n\n/// udmabuf is a kernel driver that turns memfd pages into dmabufs. It can be used for\n\n/// zero-copy buffer sharing between the guest and host when guest memory is backed by\n\n/// memfd pages.\n", "file_path": "devices/src/virtio/gpu/udmabuf.rs", "rank": 9, "score": 392014.6915175972 }, { "content": "/// Add an e820 region to the e820 map.\n\n/// Returns Ok(()) if successful, or an error if there is no space left in the map.\n\nfn add_e820_entry(params: &mut boot_params, addr: u64, size: u64, mem_type: u32) -> Result<()> {\n\n if params.e820_entries >= params.e820_table.len() as u8 {\n\n return Err(Error::E820Configuration);\n\n }\n\n\n\n params.e820_table[params.e820_entries as usize].addr = addr;\n\n params.e820_table[params.e820_entries as usize].size = size;\n\n params.e820_table[params.e820_entries as usize].type_ = mem_type;\n\n params.e820_entries += 1;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "x86_64/src/lib.rs", "rank": 10, "score": 384741.76241642 }, { "content": "// Reads the next u16 from the file.\n\nfn read_u16_from_file(mut f: &File) -> Result<u16> {\n\n let mut value = [0u8; 2];\n\n (&mut f)\n\n .read_exact(&mut value)\n\n .map_err(Error::ReadingHeader)?;\n\n Ok(u16::from_be_bytes(value))\n\n}\n\n\n", "file_path": "disk/src/qcow/mod.rs", "rank": 11, "score": 377444.9311190079 }, { "content": "// Reads the next u32 from the file.\n\nfn read_u32_from_file(mut f: &File) -> Result<u32> {\n\n let mut value = [0u8; 4];\n\n (&mut f)\n\n .read_exact(&mut value)\n\n .map_err(Error::ReadingHeader)?;\n\n Ok(u32::from_be_bytes(value))\n\n}\n\n\n", "file_path": "disk/src/qcow/mod.rs", "rank": 12, "score": 377345.2922637147 }, { "content": "// Reads the next u64 from the file.\n\nfn read_u64_from_file(mut f: &File) -> Result<u64> {\n\n let mut value = [0u8; 8];\n\n (&mut f)\n\n .read_exact(&mut value)\n\n .map_err(Error::ReadingHeader)?;\n\n Ok(u64::from_be_bytes(value))\n\n}\n\n\n\nimpl QcowHeader {\n\n /// Creates a QcowHeader from a reference to a file.\n\n pub fn new(f: &mut File) -> Result<QcowHeader> {\n\n f.seek(SeekFrom::Start(0)).map_err(Error::ReadingHeader)?;\n\n\n\n let magic = read_u32_from_file(f)?;\n\n if magic != QCOW_MAGIC {\n\n return Err(Error::InvalidMagic);\n\n }\n\n\n\n let mut header = QcowHeader {\n\n magic,\n", "file_path": "disk/src/qcow/mod.rs", "rank": 13, "score": 377327.87314516644 }, { "content": "/// Automatically build the hypervisor Segment struct for set_sregs from the kernel bit fields.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `entry` - The gdt entry.\n\n/// * `table_index` - Index of the entry in the gdt table.\n\npub fn segment_from_gdt(entry: u64, table_index: u8) -> Segment {\n\n Segment {\n\n base: get_base(entry),\n\n limit: get_limit(entry),\n\n selector: (table_index * 8) as u16,\n\n type_: get_type(entry),\n\n present: get_p(entry),\n\n dpl: get_dpl(entry),\n\n db: get_db(entry),\n\n s: get_s(entry),\n\n l: get_l(entry),\n\n g: get_g(entry),\n\n avl: get_avl(entry),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n", "file_path": "x86_64/src/gdt.rs", "rank": 14, "score": 376958.07060027856 }, { "content": "fn generate_checksum(data: &[u8]) -> u8 {\n\n (255 - data.iter().fold(0u8, |acc, x| acc.wrapping_add(*x))).wrapping_add(1)\n\n}\n", "file_path": "acpi_tables/src/lib.rs", "rank": 15, "score": 373375.4526189976 }, { "content": "fn create_pkg_length(data: &[u8], include_self: bool) -> Vec<u8> {\n\n let mut result = Vec::new();\n\n\n\n /* PkgLength is inclusive and includes the length bytes */\n\n let length_length = if data.len() < (2usize.pow(6) - 1) {\n\n 1\n\n } else if data.len() < (2usize.pow(12) - 2) {\n\n 2\n\n } else if data.len() < (2usize.pow(20) - 3) {\n\n 3\n\n } else {\n\n 4\n\n };\n\n\n\n let length = data.len() + if include_self { length_length } else { 0 };\n\n\n\n match length_length {\n\n 1 => result.push(length as u8),\n\n 2 => {\n\n result.push((1u8 << 6) | (length & 0xf) as u8);\n", "file_path": "acpi_tables/src/aml.rs", "rank": 16, "score": 365613.509782375 }, { "content": "fn read_varint32(data: &[u8]) -> (u32, usize) {\n\n let mut value: u32 = 0;\n\n let mut shift: u32 = 0;\n\n for (i, &b) in data.iter().enumerate() {\n\n if b < 0x80 {\n\n return match (b as u32).checked_shl(shift) {\n\n None => (0, 0),\n\n Some(b) => (value | b, i + 1),\n\n };\n\n }\n\n match ((b as u32) & 0x7F).checked_shl(shift) {\n\n None => return (0, 0),\n\n Some(b) => value |= b,\n\n }\n\n shift += 7;\n\n }\n\n (0, 0)\n\n}\n\n\n\nimpl crosvm_vcpu {\n", "file_path": "crosvm_plugin/src/lib.rs", "rank": 17, "score": 361905.912963879 }, { "content": "fn get_type(entry: u64) -> u8 {\n\n ((entry & 0x00000F0000000000) >> 40) as u8\n\n}\n\n\n", "file_path": "x86_64/src/gdt.rs", "rank": 18, "score": 352017.43940316595 }, { "content": "fn setup_page_tables(mem: &GuestMemory, sregs: &mut Sregs) -> Result<()> {\n\n // Puts PML4 right after zero page but aligned to 4k.\n\n let boot_pml4_addr = GuestAddress(0x9000);\n\n let boot_pdpte_addr = GuestAddress(0xa000);\n\n let boot_pde_addr = GuestAddress(0xb000);\n\n\n\n // Entry covering VA [0..512GB)\n\n mem.write_obj_at_addr(boot_pdpte_addr.offset() as u64 | 0x03, boot_pml4_addr)\n\n .map_err(|_| Error::WritePML4Address)?;\n\n\n\n // Entry covering VA [0..1GB)\n\n mem.write_obj_at_addr(boot_pde_addr.offset() as u64 | 0x03, boot_pdpte_addr)\n\n .map_err(|_| Error::WritePDPTEAddress)?;\n\n\n\n // 512 2MB entries together covering VA [0..1GB). Note we are assuming\n\n // CPU supports 2MB pages (/proc/cpuinfo has 'pse'). All modern CPUs do.\n\n for i in 0..512 {\n\n mem.write_obj_at_addr((i << 21) + 0x83u64, boot_pde_addr.unchecked_add(i * 8))\n\n .map_err(|_| Error::WritePDEAddress)?;\n\n }\n\n sregs.cr3 = boot_pml4_addr.offset() as u64;\n\n sregs.cr4 |= X86_CR4_PAE;\n\n sregs.cr0 |= X86_CR0_PG;\n\n sregs.efer |= EFER_LMA; // Long mode is active. Must be auto-enabled with CR0_PG.\n\n Ok(())\n\n}\n\n\n", "file_path": "x86_64/src/regs.rs", "rank": 19, "score": 343182.56196360366 }, { "content": "fn write_output(output: &mut dyn io::Write, data: &[u8]) -> io::Result<()> {\n\n output.write_all(data)?;\n\n output.flush()\n\n}\n\n\n", "file_path": "devices/src/virtio/console.rs", "rank": 21, "score": 336449.07199405413 }, { "content": "/// Round `val` up to the next multiple of 2**`align_log`.\n\nfn align_to_power_of_2(val: u64, align_log: u8) -> u64 {\n\n let align = 1 << align_log;\n\n ((val + (align - 1)) / align) * align\n\n}\n\n\n\nimpl PartitionInfo {\n\n fn aligned_size(&self) -> u64 {\n\n align_to_power_of_2(self.size, PARTITION_SIZE_SHIFT)\n\n }\n\n}\n\n\n\n/// The type of partition.\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub enum ImagePartitionType {\n\n LinuxFilesystem,\n\n EfiSystemPartition,\n\n}\n\n\n\nimpl ImagePartitionType {\n\n fn guid(self) -> Uuid {\n\n match self {\n\n Self::LinuxFilesystem => LINUX_FILESYSTEM_GUID,\n\n Self::EfiSystemPartition => EFI_SYSTEM_PARTITION_GUID,\n\n }\n\n }\n\n}\n\n\n", "file_path": "disk/src/composite.rs", "rank": 22, "score": 328954.26379647106 }, { "content": "/// Read raw bytes from stdin.\n\n///\n\n/// This will block depending on the underlying mode of stdin. This will ignore the usual lock\n\n/// around stdin that the stdlib usually uses. If other code is using stdin, it is undefined who\n\n/// will get the underlying bytes.\n\npub fn read_raw_stdin(out: &mut [u8]) -> Result<usize> {\n\n // Safe because reading from stdin shouldn't have any safety implications.\n\n unsafe { read_raw(STDIN_FILENO, out) }\n\n}\n\n\n\n/// Trait for file descriptors that are TTYs, according to `isatty(3)`.\n\n///\n\n/// This is marked unsafe because the implementation must promise that the returned RawFd is a valid\n\n/// fd and that the lifetime of the returned fd is at least that of the trait object.\n\npub unsafe trait Terminal {\n\n /// Gets the file descriptor of the TTY.\n\n fn tty_fd(&self) -> RawFd;\n\n\n\n /// Set this terminal's mode to canonical mode (`ICANON | ECHO | ISIG`).\n\n fn set_canon_mode(&self) -> Result<()> {\n\n modify_mode(self.tty_fd(), |t| t.c_lflag |= ICANON | ECHO | ISIG)\n\n }\n\n\n\n /// Set this terminal's mode to raw mode (`!(ICANON | ECHO | ISIG)`).\n\n fn set_raw_mode(&self) -> Result<()> {\n", "file_path": "common/sys_util/src/terminal.rs", "rank": 23, "score": 326085.7153428153 }, { "content": "fn encode_vfd_recv(writer: &mut Writer, vfd_id: u32, data: &[u8], vfd_ids: &[u32]) -> WlResult<()> {\n\n let ctrl_vfd_recv = CtrlVfdRecv {\n\n hdr: CtrlHeader {\n\n type_: Le32::from(VIRTIO_WL_CMD_VFD_RECV),\n\n flags: Le32::from(0),\n\n },\n\n id: Le32::from(vfd_id),\n\n vfd_count: Le32::from(vfd_ids.len() as u32),\n\n };\n\n writer\n\n .write_obj(ctrl_vfd_recv)\n\n .map_err(WlError::WriteResponse)?;\n\n\n\n for &recv_vfd_id in vfd_ids.iter() {\n\n writer\n\n .write_obj(Le32::from(recv_vfd_id))\n\n .map_err(WlError::WriteResponse)?;\n\n }\n\n\n\n writer.write_all(data).map_err(WlError::WriteResponse)\n\n}\n\n\n", "file_path": "devices/src/virtio/wl.rs", "rank": 24, "score": 320000.0405685254 }, { "content": "// return the translated address and the size of the page it resides in.\n\nfn phys_addr(mem: &GuestMemory, vaddr: u64, sregs: &Sregs) -> Result<(u64, u64)> {\n\n const CR0_PG_MASK: u64 = 1 << 31;\n\n const CR4_PAE_MASK: u64 = 1 << 5;\n\n const CR4_LA57_MASK: u64 = 1 << 12;\n\n const MSR_EFER_LMA: u64 = 1 << 10;\n\n // bits 12 through 51 are the address in a PTE.\n\n const PTE_ADDR_MASK: u64 = ((1 << 52) - 1) & !0x0fff;\n\n const PAGE_PRESENT: u64 = 0x1;\n\n const PAGE_PSE_MASK: u64 = 0x1 << 7;\n\n\n\n const PAGE_SIZE_4K: u64 = 4 * 1024;\n\n const PAGE_SIZE_2M: u64 = 2 * 1024 * 1024;\n\n const PAGE_SIZE_1G: u64 = 1024 * 1024 * 1024;\n\n\n\n fn next_pte(mem: &GuestMemory, curr_table_addr: u64, vaddr: u64, level: usize) -> Result<u64> {\n\n let ent: u64 = mem\n\n .read_obj_from_addr(GuestAddress(\n\n (curr_table_addr & PTE_ADDR_MASK) + page_table_offset(vaddr, level),\n\n ))\n\n .map_err(|_| Error::TranslatingVirtAddr)?;\n", "file_path": "x86_64/src/lib.rs", "rank": 25, "score": 319584.23228592734 }, { "content": "/// Returns a Vec of the valid memory addresses.\n\n/// These should be used to configure the GuestMemory structure for the platform.\n\n/// For x86_64 all addresses are valid from the start of the kernel except a\n\n/// carve out at the end of 32bit address space.\n\nfn arch_memory_regions(size: u64, bios_size: Option<u64>) -> Vec<(GuestAddress, u64)> {\n\n let mem_end = GuestAddress(size);\n\n let first_addr_past_32bits = GuestAddress(FIRST_ADDR_PAST_32BITS);\n\n let end_32bit_gap_start = GuestAddress(END_ADDR_BEFORE_32BITS);\n\n\n\n let mut regions = Vec::new();\n\n if mem_end <= end_32bit_gap_start {\n\n regions.push((GuestAddress(0), size));\n\n if let Some(bios_size) = bios_size {\n\n regions.push((bios_start(bios_size), bios_size));\n\n }\n\n } else {\n\n regions.push((GuestAddress(0), end_32bit_gap_start.offset()));\n\n if let Some(bios_size) = bios_size {\n\n regions.push((bios_start(bios_size), bios_size));\n\n }\n\n regions.push((\n\n first_addr_past_32bits,\n\n mem_end.offset_from(end_32bit_gap_start),\n\n ));\n", "file_path": "x86_64/src/lib.rs", "rank": 26, "score": 318998.68493543315 }, { "content": "fn read_u64<T: Read>(readable: &mut T) -> u64 {\n\n let mut buf = [0u8; size_of::<u64>()];\n\n readable.read_exact(&mut buf[..]).unwrap();\n\n u64::from_le_bytes(buf)\n\n}\n", "file_path": "crosvm-fuzz/qcow_fuzzer.rs", "rank": 27, "score": 318626.0838798027 }, { "content": "fn read_u64<T: Read>(readable: &mut T) -> u64 {\n\n let mut buf = [0u8; size_of::<u64>()];\n\n readable.read_exact(&mut buf[..]).unwrap();\n\n u64::from_le_bytes(buf)\n\n}\n", "file_path": "crosvm-fuzz/block_fuzzer.rs", "rank": 28, "score": 318626.0838798027 }, { "content": "/// Inspect the image file type and create an appropriate disk file to match it.\n\npub fn create_disk_file(raw_image: File, mut max_nesting_depth: u32) -> Result<Box<dyn DiskFile>> {\n\n if max_nesting_depth == 0 {\n\n return Err(Error::MaxNestingDepthExceeded);\n\n }\n\n max_nesting_depth -= 1;\n\n\n\n let image_type = detect_image_type(&raw_image)?;\n\n Ok(match image_type {\n\n ImageType::Raw => Box::new(raw_image) as Box<dyn DiskFile>,\n\n ImageType::Qcow2 => {\n\n Box::new(QcowFile::from(raw_image, max_nesting_depth).map_err(Error::QcowError)?)\n\n as Box<dyn DiskFile>\n\n }\n\n #[cfg(feature = \"composite-disk\")]\n\n ImageType::CompositeDisk => {\n\n // Valid composite disk header present\n\n Box::new(\n\n CompositeDiskFile::from_file(raw_image, max_nesting_depth)\n\n .map_err(Error::CreateCompositeDisk)?,\n\n ) as Box<dyn DiskFile>\n", "file_path": "disk/src/disk.rs", "rank": 29, "score": 316178.0997864396 }, { "content": "fn get_word(data: &[u8]) -> Option<u16> {\n\n if data.len() != 2 {\n\n return None;\n\n }\n\n\n\n let value: [u8; 2] = [data[0], data[1]];\n\n Some(u16::from_le_bytes(value))\n\n}\n\n\n\nconst PCIE_RP_DID: u16 = 0x3420;\n\npub struct PcieRootPort {\n\n pcie_cap_reg_idx: Option<usize>,\n\n msix_config: Option<Arc<Mutex<MsixConfig>>>,\n\n slot_control: u16,\n\n slot_status: u16,\n\n secondary_number: u8,\n\n downstream_device: Option<(PciAddress, Option<HostHotPlugKey>)>,\n\n removed_downstream: Option<PciAddress>,\n\n}\n\n\n", "file_path": "devices/src/pci/pcie.rs", "rank": 30, "score": 314413.0884524755 }, { "content": "// Advance the internal cursor of the slices.\n\n// This is same with a nightly API `IoSliceMut::advance_slices` but for `&mut [u8]`.\n\nfn advance_slices_mut(bufs: &mut &mut [&mut [u8]], mut count: usize) {\n\n use std::mem::replace;\n\n\n\n let mut idx = 0;\n\n for b in bufs.iter() {\n\n if count < b.len() {\n\n break;\n\n }\n\n count -= b.len();\n\n idx += 1;\n\n }\n\n *bufs = &mut replace(bufs, &mut [])[idx..];\n\n if !bufs.is_empty() {\n\n let slice = replace(&mut bufs[0], &mut []);\n\n let (_, remaining) = slice.split_at_mut(count);\n\n bufs[0] = remaining;\n\n }\n\n}\n\n\n\npub(super) trait EndpointExt<R: Req>: Endpoint<R> {\n", "file_path": "third_party/vmm_vhost/src/connection.rs", "rank": 31, "score": 312997.9614064792 }, { "content": "/// Writes the available data from the reader into the given output queue.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `reader` - The Reader with the data we want to write.\n\n/// * `output` - The output sink we are going to write the data to.\n\npub fn process_transmit_request(mut reader: Reader, output: &mut dyn io::Write) -> io::Result<u32> {\n\n let len = reader.available_bytes();\n\n let mut data = vec![0u8; len];\n\n reader.read_exact(&mut data)?;\n\n write_output(output, &data)?;\n\n Ok(0)\n\n}\n\n\n\nimpl Worker {\n\n fn run(&mut self) {\n\n #[derive(PollToken)]\n\n enum Token {\n\n ReceiveQueueAvailable,\n\n TransmitQueueAvailable,\n\n InputAvailable,\n\n InterruptResample,\n\n Kill,\n\n }\n\n\n\n let wait_ctx: WaitContext<Token> = match WaitContext::build_with(&[\n", "file_path": "devices/src/virtio/console.rs", "rank": 32, "score": 312993.30121548165 }, { "content": "// Advance the internal cursor of the slices.\n\n// This is same with a nightly API `IoSlice::advance_slices` but for `&[u8]`.\n\nfn advance_slices(bufs: &mut &mut [&[u8]], mut count: usize) {\n\n use std::mem::replace;\n\n\n\n let mut idx = 0;\n\n for b in bufs.iter() {\n\n if count < b.len() {\n\n break;\n\n }\n\n count -= b.len();\n\n idx += 1;\n\n }\n\n *bufs = &mut replace(bufs, &mut [])[idx..];\n\n if !bufs.is_empty() {\n\n bufs[0] = &bufs[0][count..];\n\n }\n\n}\n\n\n", "file_path": "third_party/vmm_vhost/src/connection.rs", "rank": 33, "score": 312324.2263884204 }, { "content": "/// Detect the type of an image file by checking for a valid header of the supported formats.\n\npub fn detect_image_type(file: &File) -> Result<ImageType> {\n\n let mut f = file;\n\n let disk_size = f.get_len().map_err(Error::SeekingFile)?;\n\n let orig_seek = f.seek(SeekFrom::Current(0)).map_err(Error::SeekingFile)?;\n\n f.seek(SeekFrom::Start(0)).map_err(Error::SeekingFile)?;\n\n\n\n info!(\"disk size {}, \", disk_size);\n\n log_host_fs_type(f)?;\n\n // Try to read the disk in a nicely-aligned block size unless the whole file is smaller.\n\n const MAGIC_BLOCK_SIZE: usize = 4096;\n\n #[repr(align(4096))]\n\n struct BlockAlignedBuffer {\n\n data: [u8; MAGIC_BLOCK_SIZE],\n\n }\n\n let mut magic = BlockAlignedBuffer {\n\n data: [0u8; MAGIC_BLOCK_SIZE],\n\n };\n\n let magic_read_len = if disk_size > MAGIC_BLOCK_SIZE as u64 {\n\n MAGIC_BLOCK_SIZE\n\n } else {\n", "file_path": "disk/src/disk.rs", "rank": 34, "score": 309770.52479532984 }, { "content": "fn get_p(entry: u64) -> u8 {\n\n ((entry & 0x0000800000000000) >> 47) as u8\n\n}\n\n\n", "file_path": "x86_64/src/gdt.rs", "rank": 35, "score": 307756.22908516054 }, { "content": "fn get_l(entry: u64) -> u8 {\n\n ((entry & 0x0020000000000000) >> 53) as u8\n\n}\n\n\n", "file_path": "x86_64/src/gdt.rs", "rank": 36, "score": 307756.22908516054 }, { "content": "fn get_s(entry: u64) -> u8 {\n\n ((entry & 0x0000100000000000) >> 44) as u8\n\n}\n\n\n", "file_path": "x86_64/src/gdt.rs", "rank": 37, "score": 307756.22908516054 }, { "content": "fn get_g(entry: u64) -> u8 {\n\n ((entry & 0x0080000000000000) >> 55) as u8\n\n}\n\n\n", "file_path": "x86_64/src/gdt.rs", "rank": 38, "score": 307756.22908516054 }, { "content": "#[derive(Copy, Clone)]\n\nenum AddressSpaceType {\n\n Memory,\n\n IO,\n\n BusNumber,\n\n}\n\n\n\n/// AddressSpaceCachable represent cache types for AddressSpace object\n\n#[derive(Copy, Clone)]\n\npub enum AddressSpaceCachable {\n\n NotCacheable,\n\n Cacheable,\n\n WriteCombining,\n\n PreFetchable,\n\n}\n\n\n\n/// AddressSpace structure with type, resouce range and flags to\n\n/// construct Memory/IO/BusNumber objects\n\npub struct AddressSpace<T> {\n\n type_: AddressSpaceType,\n\n min: T,\n", "file_path": "acpi_tables/src/aml.rs", "rank": 39, "score": 305714.0944328971 }, { "content": "/// get host cpu max physical address bits\n\npub fn phy_max_address_bits() -> u32 {\n\n let mut phys_bits: u32 = 36;\n\n\n\n let highest_ext_function = unsafe { __cpuid(0x80000000) };\n\n if highest_ext_function.eax >= 0x80000008 {\n\n let addr_size = unsafe { __cpuid(0x80000008) };\n\n phys_bits = addr_size.eax & 0xff;\n\n }\n\n\n\n phys_bits\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use hypervisor::CpuIdEntry;\n\n\n\n #[test]\n\n fn feature_and_vendor_name() {\n\n let mut cpuid = hypervisor::CpuId::new(2);\n", "file_path": "x86_64/src/cpuid.rs", "rank": 40, "score": 304558.46647387964 }, { "content": "fn get_limit(entry: u64) -> u32 {\n\n ((((entry) & 0x000F000000000000) >> 32) | ((entry) & 0x000000000000FFFF)) as u32\n\n}\n\n\n", "file_path": "x86_64/src/gdt.rs", "rank": 41, "score": 301662.7881852556 }, { "content": "fn get_avl(entry: u64) -> u8 {\n\n ((entry & 0x0010000000000000) >> 52) as u8\n\n}\n\n\n", "file_path": "x86_64/src/gdt.rs", "rank": 42, "score": 301658.1255834085 }, { "content": "fn get_db(entry: u64) -> u8 {\n\n ((entry & 0x0040000000000000) >> 54) as u8\n\n}\n\n\n", "file_path": "x86_64/src/gdt.rs", "rank": 43, "score": 301658.1255834085 }, { "content": "fn get_dpl(entry: u64) -> u8 {\n\n ((entry & 0x0000600000000000) >> 45) as u8\n\n}\n\n\n", "file_path": "x86_64/src/gdt.rs", "rank": 44, "score": 301658.1255834085 }, { "content": "fn next_offset(offset: GuestAddress, len: u64) -> Option<GuestAddress> {\n\n // Enforce 64-byte allocation alignment.\n\n match len % 64 {\n\n 0 => offset.checked_add(len),\n\n x => offset.checked_add(len.checked_add(64 - x)?),\n\n }\n\n}\n\n\n", "file_path": "x86_64/src/acpi.rs", "rank": 46, "score": 300008.1345482827 }, { "content": "fn configure_segments_and_sregs(mem: &GuestMemory, sregs: &mut Sregs) -> Result<()> {\n\n let gdt_table: [u64; BOOT_GDT_MAX as usize] = [\n\n gdt::gdt_entry(0, 0, 0), // NULL\n\n gdt::gdt_entry(0xa09b, 0, 0xfffff), // CODE\n\n gdt::gdt_entry(0xc093, 0, 0xfffff), // DATA\n\n gdt::gdt_entry(0x808b, 0, 0xfffff), // TSS\n\n ];\n\n\n\n let code_seg = gdt::segment_from_gdt(gdt_table[1], 1);\n\n let data_seg = gdt::segment_from_gdt(gdt_table[2], 2);\n\n let tss_seg = gdt::segment_from_gdt(gdt_table[3], 3);\n\n\n\n // Write segments\n\n write_gdt_table(&gdt_table[..], mem)?;\n\n sregs.gdt.base = BOOT_GDT_OFFSET as u64;\n\n sregs.gdt.limit = mem::size_of_val(&gdt_table) as u16 - 1;\n\n\n\n write_idt_value(0, mem)?;\n\n sregs.idt.base = BOOT_IDT_OFFSET as u64;\n\n sregs.idt.limit = mem::size_of::<u64>() as u16 - 1;\n", "file_path": "x86_64/src/regs.rs", "rank": 47, "score": 299291.8950887102 }, { "content": "/// Obtain file system type of the file system that the file is served from.\n\npub fn get_filesystem_type(file: &File) -> Result<i64> {\n\n let mut statfs_buf = MaybeUninit::<libc::statfs>::uninit();\n\n // Safe because we just got the memory space with exact required amount and\n\n // passing that on.\n\n syscall!(unsafe { fstatfs(file.as_raw_fd(), statfs_buf.as_mut_ptr()) })?;\n\n // Safe because the kernel guarantees the struct is initialized.\n\n let statfs_buf = unsafe { statfs_buf.assume_init() };\n\n Ok(statfs_buf.f_type as i64)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n fn simple_test() {\n\n let file = File::open(\"/dev/null\").unwrap();\n\n let _fstype = get_filesystem_type(&file).unwrap();\n\n }\n\n}\n", "file_path": "common/sys_util/src/get_filesystem_type.rs", "rank": 48, "score": 297087.11787912354 }, { "content": "fn adjust_count(count: u32) -> u32 {\n\n // As per spec 0 means max.\n\n if count == 0 {\n\n MAX_TIMER_FREQ\n\n } else {\n\n count\n\n }\n\n}\n\n\n", "file_path": "devices/src/pit.rs", "rank": 49, "score": 294613.8357564941 }, { "content": "fn create_memory_node(fdt: &mut FdtWriter, guest_mem: &GuestMemory) -> Result<()> {\n\n let mem_size = guest_mem.memory_size();\n\n let mem_reg_prop = [AARCH64_PHYS_MEM_START, mem_size];\n\n\n\n let memory_node = fdt.begin_node(\"memory\")?;\n\n fdt.property_string(\"device_type\", \"memory\")?;\n\n fdt.property_array_u64(\"reg\", &mem_reg_prop)?;\n\n fdt.end_node(memory_node)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "aarch64/src/fdt.rs", "rank": 50, "score": 292799.01491149113 }, { "content": "pub fn gdb_thread(mut gdbstub: GdbStub, port: u32) {\n\n let addr = format!(\"0.0.0.0:{}\", port);\n\n let listener = match TcpListener::bind(addr.clone()) {\n\n Ok(s) => s,\n\n Err(e) => {\n\n error!(\"Failed to create a TCP listener: {}\", e);\n\n return;\n\n }\n\n };\n\n info!(\"Waiting for a GDB connection on {:?}...\", addr);\n\n\n\n let (stream, addr) = match listener.accept() {\n\n Ok(v) => v,\n\n Err(e) => {\n\n error!(\"Failed to accept a connection from GDB: {}\", e);\n\n return;\n\n }\n\n };\n\n info!(\"GDB connected from {}\", addr);\n\n\n", "file_path": "src/gdb.rs", "rank": 51, "score": 291622.6782149816 }, { "content": "/// The x86 reset vector for i386+ and x86_64 puts the processor into an \"unreal mode\" where it\n\n/// can access the last 1 MB of the 32-bit address space in 16-bit mode, and starts the instruction\n\n/// pointer at the effective physical address 0xFFFFFFF0.\n\nfn bios_start(bios_size: u64) -> GuestAddress {\n\n GuestAddress(FIRST_ADDR_PAST_32BITS - bios_size)\n\n}\n\n\n", "file_path": "x86_64/src/lib.rs", "rank": 52, "score": 290556.1833233051 }, { "content": "/// Fills `output` completely with random bytes from the specified `source`.\n\npub fn rand_bytes(mut output: &mut [u8], source: Source) -> Result<()> {\n\n if output.is_empty() {\n\n return Ok(());\n\n }\n\n\n\n loop {\n\n // Safe because output is mutable and the writes are limited by output.len().\n\n let bytes = handle_eintr_errno!(unsafe {\n\n libc::getrandom(\n\n output.as_mut_ptr() as *mut c_void,\n\n output.len(),\n\n source.to_getrandom_flags(),\n\n )\n\n });\n\n\n\n if bytes < 0 {\n\n return errno_result();\n\n }\n\n if bytes as usize == output.len() {\n\n return Ok(());\n\n }\n\n\n\n // Wait for more entropy and try again for the remaining bytes.\n\n sleep(POLL_INTERVAL);\n\n output = &mut output[bytes as usize..];\n\n }\n\n}\n\n\n", "file_path": "common/sys_util/src/rand.rs", "rank": 53, "score": 289338.92150041345 }, { "content": "/// Configures the segment registers and system page tables for a given CPU.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `mem` - The memory that will be passed to the guest.\n\n/// * `vcpu` - The VCPU to configure registers on.\n\npub fn setup_sregs(mem: &GuestMemory, vcpu: &dyn VcpuX86_64) -> Result<()> {\n\n let mut sregs = vcpu.get_sregs().map_err(Error::GetSRegsIoctlFailed)?;\n\n\n\n configure_segments_and_sregs(mem, &mut sregs)?;\n\n setup_page_tables(mem, &mut sregs)?; // TODO(dgreid) - Can this be done once per system instead?\n\n\n\n vcpu.set_sregs(&sregs).map_err(Error::SetSRegsIoctlFailed)?;\n\n\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use vm_memory::{GuestAddress, GuestMemory};\n\n\n\n fn create_guest_mem() -> GuestMemory {\n\n GuestMemory::new(&[(GuestAddress(0), 0x10000)]).unwrap()\n\n }\n\n\n", "file_path": "x86_64/src/regs.rs", "rank": 54, "score": 288256.9375060223 }, { "content": "/// Given `data` containing a full set of descriptors as provided by the Linux kernel\n\n/// usbdevfs `descriptors` file, parse the descriptors into a tree data structure.\n\npub fn parse_usbfs_descriptors(data: &[u8]) -> Result<DeviceDescriptorTree> {\n\n let mut offset = 0;\n\n\n\n // Find the next descriptor of type T and return it and its offset.\n\n // Any other descriptors encountered while searching for the expected type are skipped.\n\n // The `offset` parameter will be advanced to point to the next byte after the returned\n\n // descriptor.\n\n fn next_descriptor<T: Descriptor + DataInit>(\n\n data: &[u8],\n\n offset: &mut usize,\n\n ) -> Result<(T, usize)> {\n\n let desc_type = T::descriptor_type() as u8;\n\n loop {\n\n let hdr = DescriptorHeader::from_slice(\n\n data.get(*offset..*offset + size_of::<DescriptorHeader>())\n\n .ok_or(Error::DescriptorParse)?,\n\n )\n\n .ok_or(Error::DescriptorParse)?;\n\n if hdr.bDescriptorType == desc_type {\n\n if usize::from(hdr.bLength) < size_of::<DescriptorHeader>() + size_of::<T>() {\n", "file_path": "usb_util/src/descriptor.rs", "rank": 55, "score": 285678.83611605916 }, { "content": "/// Converts frame rate to VIRTIO_SND_PCM_RATE_* enum\n\npub fn virtio_frame_rate(frame_rate: u32) -> Result<u8> {\n\n Ok(match frame_rate {\n\n 5512u32 => VIRTIO_SND_PCM_RATE_5512,\n\n 8000u32 => VIRTIO_SND_PCM_RATE_8000,\n\n 11025u32 => VIRTIO_SND_PCM_RATE_11025,\n\n 16000u32 => VIRTIO_SND_PCM_RATE_16000,\n\n 22050u32 => VIRTIO_SND_PCM_RATE_22050,\n\n 32000u32 => VIRTIO_SND_PCM_RATE_32000,\n\n 44100u32 => VIRTIO_SND_PCM_RATE_44100,\n\n 48000u32 => VIRTIO_SND_PCM_RATE_48000,\n\n 64000u32 => VIRTIO_SND_PCM_RATE_64000,\n\n 88200u32 => VIRTIO_SND_PCM_RATE_88200,\n\n 96000u32 => VIRTIO_SND_PCM_RATE_96000,\n\n 176400u32 => VIRTIO_SND_PCM_RATE_176400,\n\n 192000u32 => VIRTIO_SND_PCM_RATE_192000,\n\n 384000u32 => VIRTIO_SND_PCM_RATE_384000,\n\n _ => {\n\n return Err(Error::UnsupportedFrameRate(frame_rate));\n\n }\n\n })\n\n}\n\n\n", "file_path": "devices/src/virtio/snd/common.rs", "rank": 56, "score": 285628.7343148628 }, { "content": "fn log_host_fs_type(file: &File) -> Result<()> {\n\n let fstype = get_filesystem_type(file).map_err(Error::HostFsType)?;\n\n info!(\"Disk image file is hosted on file system type {:x}\", fstype);\n\n Ok(())\n\n}\n\n\n", "file_path": "disk/src/disk.rs", "rank": 57, "score": 284778.7460893523 }, { "content": "fn create_resv_memory_node(fdt: &mut FdtWriter, resv_size: Option<u64>) -> Result<Option<u32>> {\n\n if let Some(resv_size) = resv_size {\n\n let resv_memory_node = fdt.begin_node(\"reserved-memory\")?;\n\n fdt.property_u32(\"#address-cells\", 0x2)?;\n\n fdt.property_u32(\"#size-cells\", 0x2)?;\n\n fdt.property_null(\"ranges\")?;\n\n\n\n let restricted_dma_pool = fdt.begin_node(\"restricted_dma_reserved\")?;\n\n fdt.property_u32(\"phandle\", PHANDLE_RESTRICTED_DMA_POOL)?;\n\n fdt.property_string(\"compatible\", \"restricted-dma-pool\")?;\n\n fdt.property_u64(\"size\", resv_size)?;\n\n fdt.property_u64(\"alignment\", base::pagesize() as u64)?;\n\n fdt.end_node(restricted_dma_pool)?;\n\n\n\n fdt.end_node(resv_memory_node)?;\n\n Ok(Some(PHANDLE_RESTRICTED_DMA_POOL))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "aarch64/src/fdt.rs", "rank": 58, "score": 283486.63845778344 }, { "content": "pub fn vmm_va_to_gpa(maps: &[MappingInfo], vmm_va: u64) -> VhostResult<GuestAddress> {\n\n for map in maps {\n\n if vmm_va >= map.vmm_addr && vmm_va < map.vmm_addr + map.size {\n\n return Ok(GuestAddress(vmm_va - map.vmm_addr + map.guest_phys));\n\n }\n\n }\n\n Err(VhostError::InvalidMessage)\n\n}\n\n\n", "file_path": "devices/src/virtio/vhost/user/device/handler.rs", "rank": 59, "score": 282053.67722100724 }, { "content": "/// Converts VIRTIO_SND_PCM_RATE_* enum to frame rate\n\npub fn from_virtio_frame_rate(virtio_frame_rate: u8) -> Result<u32> {\n\n Ok(match virtio_frame_rate {\n\n VIRTIO_SND_PCM_RATE_5512 => 5512u32,\n\n VIRTIO_SND_PCM_RATE_8000 => 8000u32,\n\n VIRTIO_SND_PCM_RATE_11025 => 11025u32,\n\n VIRTIO_SND_PCM_RATE_16000 => 16000u32,\n\n VIRTIO_SND_PCM_RATE_22050 => 22050u32,\n\n VIRTIO_SND_PCM_RATE_32000 => 32000u32,\n\n VIRTIO_SND_PCM_RATE_44100 => 44100u32,\n\n VIRTIO_SND_PCM_RATE_48000 => 48000u32,\n\n VIRTIO_SND_PCM_RATE_64000 => 64000u32,\n\n VIRTIO_SND_PCM_RATE_88200 => 88200u32,\n\n VIRTIO_SND_PCM_RATE_96000 => 96000u32,\n\n VIRTIO_SND_PCM_RATE_176400 => 176400u32,\n\n VIRTIO_SND_PCM_RATE_192000 => 192000u32,\n\n VIRTIO_SND_PCM_RATE_384000 => 384000u32,\n\n _ => {\n\n return Err(Error::UnsupportedVirtioFrameRate(virtio_frame_rate));\n\n }\n\n })\n\n}\n\n\n", "file_path": "devices/src/virtio/snd/common.rs", "rank": 60, "score": 281461.6914534597 }, { "content": "/// Get the max number of open files allowed by the environment.\n\npub fn get_max_open_files() -> Result<u64> {\n\n let mut buf = mem::MaybeUninit::<libc::rlimit64>::zeroed();\n\n\n\n // Safe because this will only modify `buf` and we check the return value.\n\n let res = unsafe { libc::prlimit64(0, libc::RLIMIT_NOFILE, ptr::null(), buf.as_mut_ptr()) };\n\n if res == 0 {\n\n // Safe because the kernel guarantees that the struct is fully initialized.\n\n let limit = unsafe { buf.assume_init() };\n\n Ok(limit.rlim_max)\n\n } else {\n\n errno_result()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use libc::EBADF;\n\n use std::io::Write;\n\n\n\n use super::*;\n", "file_path": "common/sys_util/src/lib.rs", "rank": 61, "score": 279779.62392488733 }, { "content": "// Returns the value of the highest bit in a 64-bit value. Equivalent to\n\n// 1 << HighBitSet(x)\n\nfn get_power_of_two(data: u64) -> u64 {\n\n 1 << (64 - data.leading_zeros() - 1)\n\n}\n\n\n", "file_path": "x86_64/src/regs.rs", "rank": 62, "score": 275470.6809746884 }, { "content": "/// Returns a string representation of the given virtio device type number.\n\npub fn type_to_str(type_: u32) -> Option<&'static str> {\n\n Some(match type_ {\n\n TYPE_NET => \"net\",\n\n TYPE_BLOCK => \"block\",\n\n TYPE_CONSOLE => \"console\",\n\n TYPE_RNG => \"rng\",\n\n TYPE_BALLOON => \"balloon\",\n\n TYPE_RPMSG => \"rpmsg\",\n\n TYPE_SCSI => \"scsi\",\n\n TYPE_9P => \"9p\",\n\n TYPE_RPROC_SERIAL => \"rproc-serial\",\n\n TYPE_CAIF => \"caif\",\n\n TYPE_INPUT => \"input\",\n\n TYPE_GPU => \"gpu\",\n\n TYPE_VSOCK => \"vsock\",\n\n TYPE_CRYPTO => \"crypto\",\n\n TYPE_IOMMU => \"iommu\",\n\n TYPE_SOUND => \"snd\",\n\n TYPE_FS => \"fs\",\n\n TYPE_PMEM => \"pmem\",\n\n TYPE_WL => \"wl\",\n\n TYPE_TPM => \"tpm\",\n\n TYPE_VIDEO_DEC => \"video-decoder\",\n\n TYPE_VIDEO_ENC => \"video-encoder\",\n\n _ => return None,\n\n })\n\n}\n\n\n", "file_path": "devices/src/virtio/mod.rs", "rank": 63, "score": 275009.5930453395 }, { "content": "/// Returns the set of reserved base features common to all virtio devices.\n\npub fn base_features(protected_vm: ProtectionType) -> u64 {\n\n let mut features: u64 = 1 << VIRTIO_F_VERSION_1 | 1 << VIRTIO_RING_F_EVENT_IDX;\n\n\n\n if protected_vm == ProtectionType::Protected {\n\n features |= 1 << VIRTIO_F_ACCESS_PLATFORM;\n\n }\n\n\n\n features\n\n}\n", "file_path": "devices/src/virtio/mod.rs", "rank": 65, "score": 271912.5923751684 }, { "content": "/// Safe wrapper for `libc::lseek64()`\n\nfn lseek(file: &mut File, offset: i64, whence: i32) -> Result<Option<u64>> {\n\n // This is safe because we pass a known-good file descriptor.\n\n let res = unsafe { lseek64(file.as_raw_fd(), offset, whence) };\n\n\n\n if res < 0 {\n\n // Convert ENXIO into None; pass any other error as-is.\n\n let err = Error::last_os_error();\n\n if let Some(errno) = Error::raw_os_error(&err) {\n\n if errno == ENXIO {\n\n return Ok(None);\n\n }\n\n }\n\n Err(err)\n\n } else {\n\n Ok(Some(res as u64))\n\n }\n\n}\n\n\n\nimpl SeekHole for File {\n\n fn seek_hole(&mut self, offset: u64) -> Result<Option<u64>> {\n", "file_path": "common/sys_util/src/seek_hole.rs", "rank": 66, "score": 271593.8304904385 }, { "content": "fn check_content(server: &mut Server, content: &[u8], fid: u32) {\n\n for offset in 0..content.len() {\n\n let tread = Tread {\n\n fid,\n\n offset: offset as u64,\n\n count: DEFAULT_BUFFER_SIZE,\n\n };\n\n\n\n let rread = server.read(&tread).expect(\"failed to read file\");\n\n assert_eq!(content[offset..], rread.data[..]);\n\n }\n\n}\n\n\n", "file_path": "common/p9/src/server/tests.rs", "rank": 67, "score": 271493.0883049788 }, { "content": "/// Creates a flattened device tree containing all of the parameters used\n\n/// by Android.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `fdt` - The DTB to modify. The top-most node should be open.\n\n/// * `android-fstab` - A text file of Android fstab entries to add to the DTB\n\npub fn create_android_fdt(fdt: &mut FdtWriter, fstab: File) -> Result<()> {\n\n let vecs = BufReader::new(fstab)\n\n .lines()\n\n .map(|l| parse_fstab_line(&l.map_err(Error::FdtIoError)?))\n\n .collect::<Result<Vec<Vec<String>>>>()?;\n\n let firmware_node = fdt.begin_node(\"firmware\")?;\n\n let android_node = fdt.begin_node(\"android\")?;\n\n fdt.property_string(\"compatible\", \"android,firmware\")?;\n\n\n\n let (dtprop, fstab): (_, Vec<_>) = vecs.into_iter().partition(|x| x[0] == \"#dt-vendor\");\n\n let vendor_node = fdt.begin_node(\"vendor\")?;\n\n for vec in dtprop {\n\n let content = std::fs::read_to_string(&vec[2]).map_err(Error::FdtIoError)?;\n\n fdt.property_string(&vec[1], &content)?;\n\n }\n\n fdt.end_node(vendor_node)?;\n\n let fstab_node = fdt.begin_node(\"fstab\")?;\n\n fdt.property_string(\"compatible\", \"android,fstab\")?;\n\n for vec in fstab {\n\n let partition = &vec[1][1..];\n", "file_path": "arch/src/android.rs", "rank": 68, "score": 271007.9892720342 }, { "content": "/// Gets the appropriate virtio-snd error to return to the driver from a VioSError.\n\npub fn vios_error_to_status_code(e: VioSError) -> u32 {\n\n match e {\n\n VioSError::ServerIOError(_) => VIRTIO_SND_S_IO_ERR,\n\n _ => VIRTIO_SND_S_NOT_SUPP,\n\n }\n\n}\n\n\n", "file_path": "devices/src/virtio/snd/vios_backend/streams.rs", "rank": 69, "score": 270704.22082826373 }, { "content": "pub trait TapT: FileReadWriteVolatile + Read + Write + AsRawDescriptor + Send + Sized {\n\n /// Create a new tap interface. Set the `vnet_hdr` flag to true to allow offloading on this tap,\n\n /// which will add an extra 12 byte virtio net header to incoming frames. Offloading cannot\n\n /// be used if `vnet_hdr` is false.\n\n /// set 'multi_vq' to ture, if tap have multi virt queue pairs\n\n fn new(vnet_hdr: bool, multi_vq: bool) -> Result<Self>;\n\n\n\n /// Change the origin tap into multiqueue taps, this means create other taps based on the\n\n /// origin tap.\n\n fn into_mq_taps(self, vq_pairs: u16) -> Result<Vec<Self>>;\n\n\n\n /// Get the host-side IP address for the tap interface.\n\n fn ip_addr(&self) -> Result<net::Ipv4Addr>;\n\n\n\n /// Set the host-side IP address for the tap interface.\n\n fn set_ip_addr(&self, ip_addr: net::Ipv4Addr) -> Result<()>;\n\n\n\n /// Get the netmask for the tap interface's subnet.\n\n fn netmask(&self) -> Result<net::Ipv4Addr>;\n\n\n", "file_path": "net_util/src/lib.rs", "rank": 70, "score": 267741.8830372744 }, { "content": "/// Replaces the optional `File` to echo log messages to.\n\n///\n\n/// The default behavior is to not echo to a file. Passing `None` to this function restores that\n\n/// behavior.\n\n///\n\n/// Does nothing if syslog was never initialized.\n\n///\n\n/// # Arguments\n\n/// * `file` - `Some(file)` to echo to `file`, `None` to disable echoing to the file previously passed to `echo_file`.\n\npub fn echo_file(file: Option<File>) {\n\n let mut state = lock!();\n\n state.file = file;\n\n}\n\n\n", "file_path": "common/sys_util/src/syslog.rs", "rank": 71, "score": 267174.13507342717 }, { "content": "fn create_integer(v: usize, bytes: &mut Vec<u8>) {\n\n if v <= u8::max_value().into() {\n\n (v as u8).to_aml_bytes(bytes);\n\n } else if v <= u16::max_value().into() {\n\n (v as u16).to_aml_bytes(bytes);\n\n } else if v <= u32::max_value() as usize {\n\n (v as u32).to_aml_bytes(bytes);\n\n } else {\n\n (v as u64).to_aml_bytes(bytes);\n\n }\n\n}\n\n\n\npub type Usize = usize;\n\n\n\nimpl Aml for Usize {\n\n fn to_aml_bytes(&self, bytes: &mut Vec<u8>) {\n\n create_integer(*self, bytes);\n\n }\n\n}\n\n\n", "file_path": "acpi_tables/src/aml.rs", "rank": 72, "score": 264764.1732665332 }, { "content": "fn convert_copy<R, W>(reader: &mut R, writer: &mut W, offset: u64, size: u64) -> Result<()>\n\nwhere\n\n R: Read + Seek,\n\n W: Write + Seek,\n\n{\n\n const CHUNK_SIZE: usize = 65536;\n\n let mut buf = [0; CHUNK_SIZE];\n\n let mut read_count = 0;\n\n reader\n\n .seek(SeekFrom::Start(offset))\n\n .map_err(Error::SeekingFile)?;\n\n writer\n\n .seek(SeekFrom::Start(offset))\n\n .map_err(Error::SeekingFile)?;\n\n loop {\n\n let this_count = min(CHUNK_SIZE as u64, size - read_count) as usize;\n\n let nread = reader\n\n .read(&mut buf[..this_count])\n\n .map_err(Error::ReadingData)?;\n\n writer.write(&buf[..nread]).map_err(Error::WritingData)?;\n\n read_count += nread as u64;\n\n if nread == 0 || read_count == size {\n\n break;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "disk/src/disk.rs", "rank": 73, "score": 264265.3357307093 }, { "content": "pub fn create_guest_memory(\n\n contexts: &[VhostUserMemoryRegion],\n\n files: Vec<File>,\n\n) -> VhostResult<(GuestMemory, Vec<MappingInfo>)> {\n\n let mut regions = Vec::with_capacity(files.len());\n\n for (region, file) in contexts.iter().zip(files.into_iter()) {\n\n let region = MemoryRegion::new_from_shm(\n\n region.memory_size,\n\n GuestAddress(region.guest_phys_addr),\n\n region.mmap_offset,\n\n Arc::new(SharedMemory::from_safe_descriptor(SafeDescriptor::from(file)).unwrap()),\n\n )\n\n .map_err(|e| {\n\n error!(\"failed to create a memory region: {}\", e);\n\n VhostError::InvalidOperation\n\n })?;\n\n regions.push(region);\n\n }\n\n let guest_mem = GuestMemory::from_regions(regions).map_err(|e| {\n\n error!(\"failed to create guest memory: {}\", e);\n", "file_path": "devices/src/virtio/vhost/user/device/handler.rs", "rank": 74, "score": 263729.7903481858 }, { "content": "/// When ready to receive a command, the `MAGIC_LINE` is written to `input`.\n\n/// The received command is executed via /bin/sh/ and it's stdout is written\n\n/// back to `output`, terminated by `MAGIC_LINE`.\n\nfn listen(input: Box<dyn io::Read>, mut output: Box<dyn io::Write>) -> io::Result<()> {\n\n let mut reader = io::BufReader::new(input);\n\n loop {\n\n writeln!(&mut output, \"{}\", MAGIC_LINE).unwrap();\n\n\n\n let mut command = String::new();\n\n reader.read_line(&mut command)?;\n\n if command.trim() == \"exit\" {\n\n break;\n\n }\n\n\n\n println!(\"-> {:?}\", command);\n\n let result = Command::new(\"/bin/sh\")\n\n .args(&[\"-c\", &command])\n\n .stderr(Stdio::inherit())\n\n .output()\n\n .unwrap();\n\n\n\n output.write(&result.stdout)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "integration_tests/guest_under_test/delegate.rs", "rank": 75, "score": 262873.1581362186 }, { "content": "fn create_serial_node(fdt: &mut FdtWriter, addr: u64, irq: u32) -> Result<()> {\n\n let serial_reg_prop = [addr, AARCH64_SERIAL_SIZE];\n\n let irq = [GIC_FDT_IRQ_TYPE_SPI, irq, IRQ_TYPE_EDGE_RISING];\n\n\n\n let serial_node = fdt.begin_node(&format!(\"U6_16550A@{:x}\", addr))?;\n\n fdt.property_string(\"compatible\", \"ns16550a\")?;\n\n fdt.property_array_u64(\"reg\", &serial_reg_prop)?;\n\n fdt.property_u32(\"clock-frequency\", AARCH64_SERIAL_SPEED)?;\n\n fdt.property_array_u32(\"interrupts\", &irq)?;\n\n fdt.end_node(serial_node)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "aarch64/src/fdt.rs", "rank": 76, "score": 261296.31887042773 }, { "content": "fn create_facp_table(sci_irq: u16, pm_iobase: u32) -> SDT {\n\n let mut facp = SDT::new(\n\n *b\"FACP\",\n\n FADT_LEN,\n\n FADT_REVISION,\n\n *b\"CROSVM\",\n\n *b\"CROSVMDT\",\n\n OEM_REVISION,\n\n );\n\n\n\n let fadt_flags: u32 = FADT_POWER_BUTTON | FADT_SLEEP_BUTTON; // mask POWER and SLEEP BUTTON\n\n facp.write(FADT_FIELD_FLAGS, fadt_flags);\n\n\n\n // SCI Interrupt\n\n facp.write(FADT_FIELD_SCI_INTERRUPT, sci_irq);\n\n\n\n // PM1A Event Block Address\n\n facp.write(FADT_FIELD_PM1A_EVENT_BLK_ADDR, pm_iobase);\n\n\n\n // PM1A Control Block Address\n", "file_path": "x86_64/src/acpi.rs", "rank": 77, "score": 261276.01232638152 }, { "content": "fn parse_bus_id_addr(v: &str) -> ModifyUsbResult<(u8, u8, u16, u16)> {\n\n debug!(\"parse_bus_id_addr: {}\", v);\n\n let mut ids = v.split(':');\n\n match (ids.next(), ids.next(), ids.next(), ids.next()) {\n\n (Some(bus_id), Some(addr), Some(vid), Some(pid)) => {\n\n let bus_id = bus_id\n\n .parse::<u8>()\n\n .map_err(|e| ModifyUsbError::ArgParseInt(\"bus_id\", bus_id.to_owned(), e))?;\n\n let addr = addr\n\n .parse::<u8>()\n\n .map_err(|e| ModifyUsbError::ArgParseInt(\"addr\", addr.to_owned(), e))?;\n\n let vid = u16::from_str_radix(vid, 16)\n\n .map_err(|e| ModifyUsbError::ArgParseInt(\"vid\", vid.to_owned(), e))?;\n\n let pid = u16::from_str_radix(pid, 16)\n\n .map_err(|e| ModifyUsbError::ArgParseInt(\"pid\", pid.to_owned(), e))?;\n\n Ok((bus_id, addr, vid, pid))\n\n }\n\n _ => Err(ModifyUsbError::ArgParse(\n\n \"BUS_ID_ADDR_BUS_NUM_DEV_NUM\",\n\n v.to_owned(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 78, "score": 260655.63185947342 }, { "content": "fn reply_error<W: Writer>(e: io::Error, unique: u64, mut w: W) -> Result<usize> {\n\n let header = OutHeader {\n\n len: size_of::<OutHeader>() as u32,\n\n error: -e.raw_os_error().unwrap_or(libc::EIO),\n\n unique,\n\n };\n\n\n\n w.write_all(header.as_slice())\n\n .map_err(Error::EncodeMessage)?;\n\n w.flush().map_err(Error::FlushMessage)?;\n\n\n\n Ok(header.len as usize)\n\n}\n\n\n", "file_path": "fuse/src/server.rs", "rank": 79, "score": 258895.12928886813 }, { "content": "fn convert_reader<R>(reader: &mut R, dst_file: File, dst_type: ImageType) -> Result<()>\n\nwhere\n\n R: Read + Seek + SeekHole,\n\n{\n\n let src_size = reader.seek(SeekFrom::End(0)).map_err(Error::SeekingFile)?;\n\n reader\n\n .seek(SeekFrom::Start(0))\n\n .map_err(Error::SeekingFile)?;\n\n\n\n // Ensure the destination file is empty before writing to it.\n\n dst_file.set_len(0).map_err(Error::SettingFileSize)?;\n\n\n\n match dst_type {\n\n ImageType::Qcow2 => {\n\n let mut dst_writer = QcowFile::new(dst_file, src_size).map_err(Error::QcowError)?;\n\n convert_reader_writer(reader, &mut dst_writer, src_size)\n\n }\n\n ImageType::Raw => {\n\n let mut dst_writer = dst_file;\n\n // Set the length of the destination file to convert it into a sparse file\n\n // of the desired size.\n\n dst_writer\n\n .set_len(src_size)\n\n .map_err(Error::SettingFileSize)?;\n\n convert_reader_writer(reader, &mut dst_writer, src_size)\n\n }\n\n _ => Err(Error::ConversionNotSupported),\n\n }\n\n}\n\n\n", "file_path": "disk/src/disk.rs", "rank": 80, "score": 257269.75867395592 }, { "content": "/// Test-only function used to create a pipe that is full. The pipe is created, has its size set to\n\n/// the minimum and then has that much data written to it. Use `new_pipe_full` to test handling of\n\n/// blocking `write` calls in unit tests.\n\npub fn new_pipe_full() -> Result<(File, File)> {\n\n use std::io::Write;\n\n\n\n let (rx, mut tx) = pipe(true)?;\n\n // The smallest allowed size of a pipe is the system page size on linux.\n\n let page_size = set_pipe_size(tx.as_raw_fd(), round_up_to_page_size(1))?;\n\n\n\n // Fill the pipe with page_size zeros so the next write call will block.\n\n let buf = vec![0u8; page_size];\n\n tx.write_all(&buf)?;\n\n\n\n Ok((rx, tx))\n\n}\n\n\n\n/// Used to attempt to clean up a named pipe after it is no longer used.\n\npub struct UnlinkUnixDatagram(pub UnixDatagram);\n\nimpl AsRef<UnixDatagram> for UnlinkUnixDatagram {\n\n fn as_ref(&self) -> &UnixDatagram {\n\n &self.0\n\n }\n", "file_path": "common/sys_util/src/lib.rs", "rank": 81, "score": 255921.5940955686 }, { "content": "// Converts the input bytes to an output string in the format \"0x01,0x02,0x03...\".\n\nfn format_as_hex(data: &[u8]) -> String {\n\n let mut out = String::new();\n\n for (i, d) in data.iter().enumerate() {\n\n out.push_str(&format!(\"0x{:02x}\", d));\n\n if i < data.len() - 1 {\n\n out.push(',')\n\n }\n\n }\n\n out\n\n}\n\n\n", "file_path": "tests/plugins.rs", "rank": 82, "score": 255887.00203449896 }, { "content": "#[inline]\n\nfn usize_to_u64(val: usize) -> u64 {\n\n val.try_into().expect(\"`usize` doesn't fit inside a `u64`\")\n\n}\n\n\n\npub struct PendingOperation {\n\n waker_token: Option<WakerToken>,\n\n ex: Weak<RawExecutor>,\n\n submitted: bool,\n\n}\n\n\n\nimpl Future for PendingOperation {\n\n type Output = Result<u32>;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {\n\n let token = self\n\n .waker_token\n\n .as_ref()\n\n .expect(\"PendingOperation polled after returning Poll::Ready\");\n\n if let Some(ex) = self.ex.upgrade() {\n\n if let Some(result) = ex.get_result(token, cx) {\n", "file_path": "common/cros_async/src/uring_executor.rs", "rank": 83, "score": 254654.71765221417 }, { "content": "// Update the status register and if any interrupts need to fire, raise them.\n\nfn update_sr(regs: &mut Ac97BusMasterRegs, func: Ac97Function, val: u16) {\n\n let int_mask = match func {\n\n Ac97Function::Input => GS_PIINT,\n\n Ac97Function::Output => GS_POINT,\n\n Ac97Function::Microphone => GS_MINT,\n\n };\n\n\n\n let mut interrupt_high = false;\n\n\n\n {\n\n let func_regs = regs.func_regs_mut(func);\n\n let old_sr = func_regs.sr;\n\n func_regs.sr = val;\n\n if (old_sr ^ val) & SR_INT_MASK != 0 {\n\n if (val & SR_LVBCI) != 0 && (func_regs.cr & CR_LVBIE) != 0 {\n\n interrupt_high = true;\n\n }\n\n if (val & SR_BCIS) != 0 && (func_regs.cr & CR_IOCE) != 0 {\n\n interrupt_high = true;\n\n }\n", "file_path": "devices/src/pci/ac97_bus_master.rs", "rank": 84, "score": 252682.34075157135 }, { "content": "fn compute_checksum<T: Copy>(v: &T) -> u8 {\n\n // Safe because we are only reading the bytes within the size of the `T` reference `v`.\n\n let v_slice = unsafe { slice::from_raw_parts(v as *const T as *const u8, mem::size_of::<T>()) };\n\n let mut checksum: u8 = 0;\n\n for i in v_slice {\n\n checksum = checksum.wrapping_add(*i);\n\n }\n\n checksum\n\n}\n\n\n", "file_path": "x86_64/src/mptable.rs", "rank": 85, "score": 251920.7373570687 }, { "content": "/// Initialize the syslog connection and internal variables.\n\n///\n\n/// This should only be called once per process before any other threads have been spawned or any\n\n/// signal handlers have been registered. Every call made after the first will have no effect\n\n/// besides return `Ok` or `Err` appropriately.\n\npub fn init() -> Result<(), Error> {\n\n let mut err = Error::Poisoned;\n\n STATE_ONCE.call_once(|| match State::new() {\n\n // Safe because STATE mutation is guarded by `Once`.\n\n Ok(state) => unsafe { STATE = new_mutex_ptr(state) },\n\n Err(e) => err = e,\n\n });\n\n\n\n if unsafe { STATE.is_null() } {\n\n Err(err)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "common/sys_util/src/syslog.rs", "rank": 86, "score": 246317.73783783213 }, { "content": "pub fn tframe_decode(bytes: &[u8]) {\n\n let mut cursor = Cursor::new(bytes);\n\n\n\n while Tframe::decode(&mut cursor).is_ok() {}\n\n}\n", "file_path": "common/p9/src/fuzzing.rs", "rank": 87, "score": 246277.82260494187 }, { "content": "fn write<P: AsRef<Path>>(server: &mut Server, dir: P, name: &str, fid: u32, flags: u32) {\n\n let file_path = dir.as_ref().join(name);\n\n let file_len = if file_path.exists() {\n\n fs::symlink_metadata(&file_path)\n\n .expect(\"unable to get metadata for file\")\n\n .len() as usize\n\n } else {\n\n 0usize\n\n };\n\n let mut new_content = Vec::new();\n\n File::open(\"/dev/urandom\")\n\n .and_then(|f| f.take(200).read_to_end(&mut new_content))\n\n .expect(\"failed to read from /dev/urandom\");\n\n\n\n let twrite = Twrite {\n\n fid,\n\n offset: 0,\n\n data: Data(new_content),\n\n };\n\n\n", "file_path": "common/p9/src/server/tests.rs", "rank": 88, "score": 245335.1681609283 }, { "content": "fn get_base(entry: u64) -> u64 {\n\n (((entry) & 0xFF00000000000000) >> 32)\n\n | (((entry) & 0x000000FF00000000) >> 16)\n\n | (((entry) & 0x00000000FFFF0000) >> 16)\n\n}\n\n\n", "file_path": "x86_64/src/gdt.rs", "rank": 89, "score": 245133.95310477715 }, { "content": "/// Construct a bmRequestType value for a control request.\n\npub fn control_request_type(\n\n type_: ControlRequestType,\n\n dir: ControlRequestDataPhaseTransferDirection,\n\n recipient: ControlRequestRecipient,\n\n) -> u8 {\n\n ((type_ as u8) << CONTROL_REQUEST_TYPE_OFFSET)\n\n | ((dir as u8) << DATA_PHASE_DIRECTION_OFFSET)\n\n | (recipient as u8)\n\n}\n\n\n\n#[cfg(test)]\n\n#[allow(clippy::unusual_byte_groupings)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn control_request_types() {\n\n assert_eq!(\n\n control_request_type(\n\n ControlRequestType::Standard,\n", "file_path": "usb_util/src/types.rs", "rank": 90, "score": 244598.34837086868 }, { "content": "#[derive(Copy, Clone, Eq, PartialEq)]\n\nstruct MemSlot(u32);\n\n\n\nimpl Ord for MemSlot {\n\n fn cmp(&self, other: &MemSlot) -> Ordering {\n\n // Notice the order is inverted so the lowest magnitude slot has the highest priority in a\n\n // max-heap.\n\n other.0.cmp(&self.0)\n\n }\n\n}\n\n\n\nimpl PartialOrd for MemSlot {\n\n fn partial_cmp(&self, other: &MemSlot) -> Option<Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\n\n\n/// A wrapper around creating and using a VM.\n\npub struct Vm {\n\n vm: File,\n\n guest_mem: GuestMemory,\n", "file_path": "kvm/src/lib.rs", "rank": 91, "score": 243105.06866881403 }, { "content": "#[sorted]\n\n#[derive(Error, Debug)]\n\nenum GuestMemoryError {\n\n // Failure getting the address of the audio buffer.\n\n #[error(\"Failed to get the address of the audio buffer: {0}.\")]\n\n ReadingGuestBufferAddress(vm_memory::GuestMemoryError),\n\n}\n\n\n\nimpl From<GuestMemoryError> for AudioError {\n\n fn from(err: GuestMemoryError) -> Self {\n\n AudioError::ReadingGuestError(err)\n\n }\n\n}\n\n\n", "file_path": "devices/src/pci/ac97_bus_master.rs", "rank": 92, "score": 241934.26381364767 }, { "content": "fn append_mtrr_entries(vpu: &dyn VcpuX86_64, pci_start: u64, entries: &mut Vec<Register>) {\n\n // Get VAR MTRR num from MSR_MTRRcap\n\n let mut msrs = vec![Register {\n\n id: crate::msr_index::MSR_MTRRcap,\n\n ..Default::default()\n\n }];\n\n if vpu.get_msrs(&mut msrs).is_err() {\n\n warn!(\"get msrs fail, guest with pass through device may be very slow\");\n\n return;\n\n }\n\n let var_num = msrs[0].value & VAR_MTRR_NUM_MASK;\n\n\n\n // Set pci_start .. 4G as UC\n\n // all others are set to default WB\n\n let pci_len = (1 << 32) - pci_start;\n\n let vecs = get_mtrr_pairs(pci_start, pci_len);\n\n if vecs.len() as u64 > var_num {\n\n warn!(\n\n \"mtrr fail for pci mmio, please check pci_start addr,\n\n guest with pass through device may be very slow\"\n", "file_path": "x86_64/src/regs.rs", "rank": 93, "score": 241371.70535135927 }, { "content": "/// A trait similar to `Read` and `Write`, but uses volatile memory as buffers.\n\npub trait FileReadWriteVolatile {\n\n /// Read bytes from this file into the given slice, returning the number of bytes read on\n\n /// success.\n\n fn read_volatile(&mut self, slice: VolatileSlice) -> Result<usize>;\n\n\n\n /// Like `read_volatile`, except it reads to a slice of buffers. Data is copied to fill each\n\n /// buffer in order, with the final buffer written to possibly being only partially filled. This\n\n /// method must behave as a single call to `read_volatile` with the buffers concatenated would.\n\n /// The default implementation calls `read_volatile` with either the first nonempty buffer\n\n /// provided, or returns `Ok(0)` if none exists.\n\n fn read_vectored_volatile(&mut self, bufs: &[VolatileSlice]) -> Result<usize> {\n\n bufs.iter()\n\n .find(|b| b.size() > 0)\n\n .map(|&b| self.read_volatile(b))\n\n .unwrap_or(Ok(0))\n\n }\n\n\n\n /// Reads bytes from this into the given slice until all bytes in the slice are written, or an\n\n /// error is returned.\n\n fn read_exact_volatile(&mut self, mut slice: VolatileSlice) -> Result<()> {\n", "file_path": "common/sys_util/src/file_traits.rs", "rank": 94, "score": 241356.3019971415 }, { "content": "/// A trait similar to the unix `ReadExt` and `WriteExt` traits, but for volatile memory.\n\npub trait FileReadWriteAtVolatile {\n\n /// Reads bytes from this file at `offset` into the given slice, returning the number of bytes\n\n /// read on success.\n\n fn read_at_volatile(&mut self, slice: VolatileSlice, offset: u64) -> Result<usize>;\n\n\n\n /// Like `read_at_volatile`, except it reads to a slice of buffers. Data is copied to fill each\n\n /// buffer in order, with the final buffer written to possibly being only partially filled. This\n\n /// method must behave as a single call to `read_at_volatile` with the buffers concatenated\n\n /// would. The default implementation calls `read_at_volatile` with either the first nonempty\n\n /// buffer provided, or returns `Ok(0)` if none exists.\n\n fn read_vectored_at_volatile(&mut self, bufs: &[VolatileSlice], offset: u64) -> Result<usize> {\n\n if let Some(&slice) = bufs.first() {\n\n self.read_at_volatile(slice, offset)\n\n } else {\n\n Ok(0)\n\n }\n\n }\n\n\n\n /// Reads bytes from this file at `offset` into the given slice until all bytes in the slice are\n\n /// read, or an error is returned.\n", "file_path": "common/sys_util/src/file_traits.rs", "rank": 95, "score": 241349.53415106464 }, { "content": "/// Spawns a pipe pair where the first pipe is the read end and the second pipe is the write end.\n\n///\n\n/// If `close_on_exec` is true, the `O_CLOEXEC` flag will be set during pipe creation.\n\npub fn pipe(close_on_exec: bool) -> Result<(File, File)> {\n\n let flags = if close_on_exec { O_CLOEXEC } else { 0 };\n\n let mut pipe_fds = [-1; 2];\n\n // Safe because pipe2 will only write 2 element array of i32 to the given pointer, and we check\n\n // for error.\n\n let ret = unsafe { pipe2(&mut pipe_fds[0], flags) };\n\n if ret == -1 {\n\n errno_result()\n\n } else {\n\n // Safe because both fds must be valid for pipe2 to have returned sucessfully and we have\n\n // exclusive ownership of them.\n\n Ok(unsafe {\n\n (\n\n File::from_raw_fd(pipe_fds[0]),\n\n File::from_raw_fd(pipe_fds[1]),\n\n )\n\n })\n\n }\n\n}\n\n\n", "file_path": "common/sys_util/src/lib.rs", "rank": 96, "score": 239801.53411610285 }, { "content": "fn max_refcount_clusters(refcount_order: u32, cluster_size: u32, num_clusters: u32) -> u64 {\n\n // Use u64 as the product of the u32 inputs can overflow.\n\n let refcount_bytes = (0x01 << refcount_order as u64) / 8;\n\n let for_data = div_round_up_u64(num_clusters as u64 * refcount_bytes, cluster_size as u64);\n\n let for_refcounts = div_round_up_u64(for_data * refcount_bytes, cluster_size as u64);\n\n for_data + for_refcounts\n\n}\n\n\n\n/// Represents a qcow2 file. This is a sparse file format maintained by the qemu project.\n\n/// Full documentation of the format can be found in the qemu repository.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// # use std::io::{Read, Seek, SeekFrom};\n\n/// # use disk::QcowFile;\n\n/// # fn test(file: std::fs::File) -> std::io::Result<()> {\n\n/// let mut q = QcowFile::from(file, disk::MAX_NESTING_DEPTH).expect(\"Can't open qcow file\");\n\n/// let mut buf = [0u8; 12];\n\n/// q.seek(SeekFrom::Start(10 as u64))?;\n", "file_path": "disk/src/qcow/mod.rs", "rank": 97, "score": 239352.27149023674 }, { "content": "fn make_elf_bin(elf_bytes: &[u8]) -> File {\n\n let mut elf_bin = tempfile::tempfile().expect(\"failed to create tempfile\");\n\n elf_bin\n\n .write_all(elf_bytes)\n\n .expect(\"failed to write elf to tempfile\");\n\n elf_bin\n\n}\n\n\n\nfuzz_target!(|bytes| {\n\n let mut kimage = make_elf_bin(bytes);\n\n let mem = GuestMemory::new(&[(GuestAddress(0), MEM_SIZE)]).unwrap();\n\n let _ = kernel_loader::load_kernel(&mem, GuestAddress(0), &mut kimage);\n\n});\n", "file_path": "crosvm-fuzz/zimage_fuzzer.rs", "rank": 98, "score": 239092.22998002975 }, { "content": "type Handle = u64;\n\n\n", "file_path": "devices/src/virtio/fs/passthrough.rs", "rank": 99, "score": 238648.551603224 } ]
Rust
sulis_core/src/util.rs
ThyWoof/sulis
e89eda94a1a72228224e1926d307aa4c9228bdcb
mod point; pub use self::point::{Offset, Point, Rect, Scale}; pub mod size; pub use self::size::Size; use std::cmp::Ordering; use std::f32; use std::fmt; use std::fs; use std::io::{Error, ErrorKind}; use std::ops::*; use std::panic; use std::path::PathBuf; use std::time::Duration; use backtrace::Backtrace; use log::LevelFilter; use flexi_logger::{opt_format, Duplicate, FileSpec, Logger, LogSpecBuilder, LoggerHandle}; use rand::{self, distributions::uniform::{SampleUniform}, seq::SliceRandom, Rng}; use rand_pcg::Pcg64Mcg; use crate::config::{self, Config}; use crate::resource::write_to_file; const MAX_ULPS: i32 = 100; const MAX_DIFF: f32 = 2.0 * std::f32::EPSILON; pub fn approx_eq_slice(a: &[f32], b: &[f32]) -> bool { if a.len() != b.len() { return false ; } for (a, b) in a.iter().zip(b.iter()) { if !approx_eq(*a, *b) { return false; } } true } pub fn approx_eq(a: f32, b: f32) -> bool { if (a - b).abs() <= MAX_DIFF { return true; } if a.signum() != b.signum() { return false; } let a_int = a.to_bits() as i32; let b_int = b.to_bits() as i32; i32::abs(a_int - b_int) <= MAX_ULPS } #[derive(Clone)] pub struct ReproducibleRandom { seed: u128, gen: Pcg64Mcg, } impl ReproducibleRandom { pub fn new(seed: Option<u128>) -> ReproducibleRandom { let seed = match seed { Some(s) => s, None => rand::thread_rng().gen::<u64>() as u128, }; ReproducibleRandom { seed, gen: Pcg64Mcg::new(seed), } } pub fn gen<T: SampleUniform + PartialOrd>(&mut self, min: T, max: T) -> T { self.gen.gen_range(min..max) } pub fn shuffle<T>(&mut self, values: &mut [T]) { values.shuffle(&mut self.gen); } pub fn seed(&self) -> u128 { self.seed } } impl std::fmt::Debug for ReproducibleRandom { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let state = serde_json::to_string(&self.gen).map_err(|_| std::fmt::Error)?; write!(f, "Random: {}", state) } } pub fn shuffle<T>(values: &mut [T]) { values.shuffle(&mut rand::thread_rng()); } pub fn gen_rand<T: SampleUniform + PartialOrd>(min: T, max: T) -> T { rand::thread_rng().gen_range(min..max) } fn active_resources_file_path() -> PathBuf { let mut path = config::USER_DIR.clone(); path.push("active_resources.yml"); path } #[derive(Deserialize, Serialize, Debug, Clone)] pub struct ActiveResources { pub campaign: Option<String>, pub mods: Vec<String>, } impl ActiveResources { pub fn read() -> ActiveResources { let path = active_resources_file_path(); let data = match fs::read_to_string(path) { Ok(data) => data, Err(_) => { info!("active_resources file not found"); return ActiveResources::default(); } }; let active_resources: ActiveResources = match serde_yaml::from_str(&data) { Ok(val) => val, Err(e) => { warn!("Error reading active resources file"); warn!("{}", e); return ActiveResources::default(); } }; active_resources } pub fn write(&self) { let file = active_resources_file_path(); match write_to_file(file, self) { Ok(()) => (), Err(e) => { warn!("Error writing active resources file"); warn!("{}", e); } } } pub fn directories(&self) -> Vec<String> { let mut dirs = vec![Config::resources_config().directory]; if let Some(ref dir) = self.campaign { dirs.push(dir.to_string()); } for mod_dir in self.mods.iter() { dirs.push(mod_dir.to_string()); } dirs } } impl Default for ActiveResources { fn default() -> Self { ActiveResources { campaign: None, mods: Vec::new(), } } } #[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq)] #[serde(deny_unknown_fields, untagged)] pub enum ExtInt { Int(u32), Infinity, } impl Ord for ExtInt { fn cmp(&self, other: &ExtInt) -> Ordering { match self { ExtInt::Int(val) => match other { ExtInt::Int(other) => val.cmp(other), ExtInt::Infinity => Ordering::Less, }, ExtInt::Infinity => match other { ExtInt::Int(_) => Ordering::Greater, ExtInt::Infinity => Ordering::Equal, }, } } } impl PartialOrd for ExtInt { fn partial_cmp(&self, other: &ExtInt) -> Option<Ordering> { Some(self.cmp(other)) } } impl ExtInt { pub fn max(a: ExtInt, b: ExtInt) -> ExtInt { if a > b { a } else { b } } pub fn min(a: ExtInt, b: ExtInt) -> ExtInt { if a > b { b } else { a } } pub fn divide(self, other: ExtInt) -> f32 { match self { ExtInt::Int(amount) => match other { ExtInt::Int(other_amount) => amount as f32 / other_amount as f32, ExtInt::Infinity => 0.0, }, ExtInt::Infinity => match other { ExtInt::Int(_) => 0.0, ExtInt::Infinity => 1.0, }, } } pub fn is_zero(self) -> bool { match self { ExtInt::Int(amount) => amount == 0, ExtInt::Infinity => false, } } pub fn is_infinite(self) -> bool { match self { ExtInt::Int(_) => false, ExtInt::Infinity => true, } } pub fn to_f32(self) -> f32 { match self { ExtInt::Int(amount) => amount as f32, ExtInt::Infinity => 1e12, } } pub fn less_than(self, other: u32) -> bool { match self { ExtInt::Int(amount) => amount < other, ExtInt::Infinity => false, } } pub fn greater_than(self, other: u32) -> bool { match self { ExtInt::Int(amount) => amount > other, ExtInt::Infinity => true, } } } impl fmt::Display for ExtInt { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ExtInt::Int(amount) => write!(f, "{}", amount), ExtInt::Infinity => write!(f, "infinity"), } } } impl Mul<u32> for ExtInt { type Output = ExtInt; fn mul(self, other: u32) -> ExtInt { match self { ExtInt::Int(amount) => ExtInt::Int(amount * other), ExtInt::Infinity => ExtInt::Infinity, } } } impl Add<ExtInt> for ExtInt { type Output = ExtInt; fn add(self, other: ExtInt) -> ExtInt { match self { ExtInt::Int(amount) => match other { ExtInt::Int(other_amount) => ExtInt::Int(amount + other_amount), ExtInt::Infinity => ExtInt::Infinity, }, ExtInt::Infinity => ExtInt::Infinity, } } } impl Add<u32> for ExtInt { type Output = ExtInt; fn add(self, other: u32) -> ExtInt { match self { ExtInt::Int(amount) => ExtInt::Int(amount + other), ExtInt::Infinity => ExtInt::Infinity, } } } impl Sub<u32> for ExtInt { type Output = ExtInt; fn sub(self, other: u32) -> ExtInt { match self { ExtInt::Int(amount) => { if other > amount { ExtInt::Int(0) } else { ExtInt::Int(amount - other) } } ExtInt::Infinity => ExtInt::Infinity, } } } pub fn invalid_data_error<T>(str: &str) -> Result<T, Error> { Err(Error::new(ErrorKind::InvalidData, str)) } pub fn unable_to_create_error<T>(kind: &str, id: &str) -> Result<T, Error> { Err(Error::new( ErrorKind::InvalidData, format!("Unable to create {} '{}'", kind, id), )) } pub fn get_elapsed_millis(elapsed: Duration) -> u32 { (elapsed.as_secs() as u32) * 1_000 + elapsed.subsec_millis() } pub fn format_elapsed_secs(elapsed: Duration) -> String { let secs = elapsed.as_secs() as f64 + elapsed.subsec_nanos() as f64 * 1e-9; format!("{:.6}", secs) } pub fn error_and_exit(error: &str) { error!("{}", error); error!("Exiting..."); ::std::process::exit(1) } #[must_use] pub fn setup_logger() -> LoggerHandle { let mut path = config::USER_DIR.clone(); path.push("log"); let log_dir = path; let log_config = Config::logging_config(); let mut log_builder = LogSpecBuilder::new(); log_builder.default(log_config.log_level); let dup = match log_config.stderr_log_level { LevelFilter::Error => Duplicate::Error, LevelFilter::Warn => Duplicate::Warn, LevelFilter::Info => Duplicate::Info, LevelFilter::Debug => Duplicate::Debug, LevelFilter::Trace => Duplicate::Trace, LevelFilter::Off => Duplicate::None, }; let logger = Logger::with(log_builder.finalize()) .log_to_file( FileSpec::default() .directory(log_dir) .use_timestamp(log_config.use_timestamps) ) .print_message() .duplicate_to_stderr(dup) .o_append(log_config.append) .format(opt_format); let handle = logger.start().unwrap_or_else(|e| { eprintln!("{}", e); eprintln!("There was a fatal error initializing logging to 'log/'"); eprintln!("Exiting..."); ::std::process::exit(1); }); panic::set_hook(Box::new(|p| { if let Some(s) = p.payload().downcast_ref::<String>() { error!("Thread main panic with: '{}'", s); } else if let Some(s) = p.payload().downcast_ref::<&str>() { error!("Thread main panic with: '{}'", s); } else { error!("Thread main panic"); } warn!("at {:?}", p.location()); let bt = Backtrace::new(); warn!("{:?}", bt); })); create_user_dirs(); handle } fn create_user_dirs() { let res = Config::resources_config(); let mut campaign_dir = config::USER_DIR.clone(); campaign_dir.push(&res.campaigns_directory); config::create_dir_and_warn(&campaign_dir); let mut mods_dir = config::USER_DIR.clone(); mods_dir.push(&res.mods_directory); config::create_dir_and_warn(&mods_dir); }
mod point; pub use self::point::{Offset, Point, Rect, Scale}; pub mod size; pub use self::size::Size; use std::cmp::Ordering; use std::f32; use std::fmt; use std::fs; use std::io::{Error, ErrorKind}; use std::ops::*; use std::panic; use std::path::PathBuf; use std::time::Duration; use backtrace::Backtrace; use log::LevelFilter; use flexi_logger::{opt_format, Duplicate, FileSpec, Logger, LogSpecBuilder, LoggerHandle}; use rand::{self, distributions::uniform::{SampleUniform}, seq::SliceRandom, Rng}; use rand_pcg::Pcg64Mcg; use crate::config::{self, Config}; use crate::resource::write_to_file; const MAX_ULPS: i32 = 100; const MAX_DIFF: f32 = 2.0 * std::f32::EPSILON; pub fn approx_eq_slice(a: &[f32], b: &[f32]) -> bool { if a.len() != b.len() { return false ; } for (a, b) in a.iter().zip(b.iter()) { if !approx_eq(*a, *b) { return false; } } true } pub fn approx_eq(a: f32, b: f32) -> bool { if (a - b).abs() <= MAX_DIFF { return true; } if a.signum() != b.signum() { return false; } let a_int = a.to_bits() as i32; let b_int = b.to_bits() as i32; i32::abs(a_int - b_int) <= MAX_ULPS } #[derive(Clone)] pub struct ReproducibleRandom { seed: u128, gen: Pcg64Mcg, } impl ReproducibleRandom { pub fn new(seed: Option<u128>) -> ReproducibleRandom { let seed = match seed { Some(s) => s, None => rand::thread_rng().gen::<u64>() as u128, }; ReproducibleRandom { seed, gen: Pcg64Mcg::new(seed), } } pub fn gen<T: SampleUniform + PartialOrd>(&mut self, min: T, max: T) -> T { self.gen.gen_range(min..max) } pub fn shuffle<T>(&mut self, values: &mut [T]) { values.shuffle(&mut self.gen); } pub fn seed(&self) -> u128 { self.seed } } impl std::fmt::Debug for ReproducibleRandom { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let state = serde_json::to_string(&self.gen).map_err(|_| std::fmt::Error)?; write!(f, "Random: {}", state) } } pub fn shuffle<T>(values: &mut [T]) { values.shuffle(&mut rand::thread_rng()); } pub fn gen_rand<T: SampleUniform + PartialOrd>(min: T, max: T) -> T { rand::thread_rng().gen_range(min..max) } fn active_resources_file_path() -> PathBuf { let mut path = config::USER_DIR.clone(); path.push("active_resources.yml"); path } #[derive(Deserialize, Serialize, Debug, Clone)] pub struct ActiveResources { pub campaign: Option<String>, pub mods: Vec<String>, } impl ActiveResources { pub fn read() -> ActiveResources { let path = active_resources_file_path(); let data = match fs::read_to_string(path) { Ok(data) => data, Err(_) => { info!("active_resources file not found"); return ActiveResources::default(); } }; let active_resources: ActiveResources = match serde_yaml::from_str(&data) { Ok(val) => val, Err(e) => { warn!("Error reading active resources file"); warn!("{}", e); return ActiveResources::default(); } }; active_resources } pub fn write(&self) { let file = active_resources_file_path(); match write_to_file(file, self) { Ok(()) => (), Err(e) => { warn!("Error writing active resources file"); warn!("{}", e); } } } pub fn directories(&self) -> Vec<String> { let mut dirs = vec![Config::resources_config().directory]; if let Some(ref dir) = self.campaign { dirs.push(dir.to_string()); } for mod_dir in self.mods.iter() { dirs.push(mod_dir.to_string()); } dirs } } impl Default for ActiveResources { fn default() -> Self { ActiveResources { campaign: None, mods: Vec::new(), } } } #[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq)] #[serde(deny_unknown_fields, untagged)] pub enum ExtInt { Int(u32), Infinity, } impl Ord for ExtInt { fn cmp(&self, other: &ExtInt) -> Ordering { match self { ExtInt::Int(val) => match other { ExtInt::Int(other) => val.cmp(other), ExtInt::Infinity => Ordering::Less, }, ExtInt::Infinity => match other { ExtInt::Int(_) => Ordering::Greater, ExtInt::Infinity => Ordering::Equal, }, } } } impl PartialOrd for ExtInt { fn partial_cmp(&self, other: &ExtInt) -> Option<Ordering> { Some(self.cmp(other)) } } impl ExtInt { pub fn max(a: ExtInt, b: ExtInt) -> ExtInt { if a > b { a } else { b } } pub fn min(a: ExtInt, b: ExtInt) -> ExtInt { if a > b { b } else { a } } pub fn divide(self, other: ExtInt) -> f32 { match self { ExtInt::Int(amoun
pub fn is_zero(self) -> bool { match self { ExtInt::Int(amount) => amount == 0, ExtInt::Infinity => false, } } pub fn is_infinite(self) -> bool { match self { ExtInt::Int(_) => false, ExtInt::Infinity => true, } } pub fn to_f32(self) -> f32 { match self { ExtInt::Int(amount) => amount as f32, ExtInt::Infinity => 1e12, } } pub fn less_than(self, other: u32) -> bool { match self { ExtInt::Int(amount) => amount < other, ExtInt::Infinity => false, } } pub fn greater_than(self, other: u32) -> bool { match self { ExtInt::Int(amount) => amount > other, ExtInt::Infinity => true, } } } impl fmt::Display for ExtInt { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ExtInt::Int(amount) => write!(f, "{}", amount), ExtInt::Infinity => write!(f, "infinity"), } } } impl Mul<u32> for ExtInt { type Output = ExtInt; fn mul(self, other: u32) -> ExtInt { match self { ExtInt::Int(amount) => ExtInt::Int(amount * other), ExtInt::Infinity => ExtInt::Infinity, } } } impl Add<ExtInt> for ExtInt { type Output = ExtInt; fn add(self, other: ExtInt) -> ExtInt { match self { ExtInt::Int(amount) => match other { ExtInt::Int(other_amount) => ExtInt::Int(amount + other_amount), ExtInt::Infinity => ExtInt::Infinity, }, ExtInt::Infinity => ExtInt::Infinity, } } } impl Add<u32> for ExtInt { type Output = ExtInt; fn add(self, other: u32) -> ExtInt { match self { ExtInt::Int(amount) => ExtInt::Int(amount + other), ExtInt::Infinity => ExtInt::Infinity, } } } impl Sub<u32> for ExtInt { type Output = ExtInt; fn sub(self, other: u32) -> ExtInt { match self { ExtInt::Int(amount) => { if other > amount { ExtInt::Int(0) } else { ExtInt::Int(amount - other) } } ExtInt::Infinity => ExtInt::Infinity, } } } pub fn invalid_data_error<T>(str: &str) -> Result<T, Error> { Err(Error::new(ErrorKind::InvalidData, str)) } pub fn unable_to_create_error<T>(kind: &str, id: &str) -> Result<T, Error> { Err(Error::new( ErrorKind::InvalidData, format!("Unable to create {} '{}'", kind, id), )) } pub fn get_elapsed_millis(elapsed: Duration) -> u32 { (elapsed.as_secs() as u32) * 1_000 + elapsed.subsec_millis() } pub fn format_elapsed_secs(elapsed: Duration) -> String { let secs = elapsed.as_secs() as f64 + elapsed.subsec_nanos() as f64 * 1e-9; format!("{:.6}", secs) } pub fn error_and_exit(error: &str) { error!("{}", error); error!("Exiting..."); ::std::process::exit(1) } #[must_use] pub fn setup_logger() -> LoggerHandle { let mut path = config::USER_DIR.clone(); path.push("log"); let log_dir = path; let log_config = Config::logging_config(); let mut log_builder = LogSpecBuilder::new(); log_builder.default(log_config.log_level); let dup = match log_config.stderr_log_level { LevelFilter::Error => Duplicate::Error, LevelFilter::Warn => Duplicate::Warn, LevelFilter::Info => Duplicate::Info, LevelFilter::Debug => Duplicate::Debug, LevelFilter::Trace => Duplicate::Trace, LevelFilter::Off => Duplicate::None, }; let logger = Logger::with(log_builder.finalize()) .log_to_file( FileSpec::default() .directory(log_dir) .use_timestamp(log_config.use_timestamps) ) .print_message() .duplicate_to_stderr(dup) .o_append(log_config.append) .format(opt_format); let handle = logger.start().unwrap_or_else(|e| { eprintln!("{}", e); eprintln!("There was a fatal error initializing logging to 'log/'"); eprintln!("Exiting..."); ::std::process::exit(1); }); panic::set_hook(Box::new(|p| { if let Some(s) = p.payload().downcast_ref::<String>() { error!("Thread main panic with: '{}'", s); } else if let Some(s) = p.payload().downcast_ref::<&str>() { error!("Thread main panic with: '{}'", s); } else { error!("Thread main panic"); } warn!("at {:?}", p.location()); let bt = Backtrace::new(); warn!("{:?}", bt); })); create_user_dirs(); handle } fn create_user_dirs() { let res = Config::resources_config(); let mut campaign_dir = config::USER_DIR.clone(); campaign_dir.push(&res.campaigns_directory); config::create_dir_and_warn(&campaign_dir); let mut mods_dir = config::USER_DIR.clone(); mods_dir.push(&res.mods_directory); config::create_dir_and_warn(&mods_dir); }
t) => match other { ExtInt::Int(other_amount) => amount as f32 / other_amount as f32, ExtInt::Infinity => 0.0, }, ExtInt::Infinity => match other { ExtInt::Int(_) => 0.0, ExtInt::Infinity => 1.0, }, } }
function_block-function_prefixed
[]
Rust
utils/global-state-update-gen/src/auction_utils.rs
rafal-ch/casper-node
10ed44340c42dbfd861eefa921144ef6d759410b
use std::collections::{BTreeMap, BTreeSet}; use casper_engine_test_support::internal::LmdbWasmTestBuilder; use casper_execution_engine::shared::stored_value::StoredValue; use casper_types::{ system::auction::{ Bid, SeigniorageRecipient, SeigniorageRecipientsSnapshot, SEIGNIORAGE_RECIPIENTS_SNAPSHOT_KEY, }, AsymmetricType, EraId, Key, ProtocolVersion, PublicKey, U512, }; use crate::utils::ValidatorsDiff; pub fn read_snapshot(builder: &LmdbWasmTestBuilder) -> (Key, SeigniorageRecipientsSnapshot) { let protocol_data = builder .get_engine_state() .get_protocol_data(ProtocolVersion::from_parts(1, 0, 0)) .unwrap() .expect("should have protocol data"); let auction_contract_hash = protocol_data.auction(); let validators_key = builder .get_contract(auction_contract_hash) .expect("auction should exist") .named_keys()[SEIGNIORAGE_RECIPIENTS_SNAPSHOT_KEY]; let stored_value = builder .query(None, validators_key, &[]) .expect("should query"); let cl_value = stored_value .as_cl_value() .cloned() .expect("should be cl value"); (validators_key, cl_value.into_t().expect("should convert")) } pub fn gen_snapshot( validators: Vec<(String, String)>, starting_era_id: EraId, count: u64, ) -> SeigniorageRecipientsSnapshot { let mut new_snapshot = BTreeMap::new(); let mut era_validators = BTreeMap::new(); for (pub_key_str, bonded_amount_str) in &validators { let validator_pub_key = PublicKey::from_hex(pub_key_str.as_bytes()).unwrap(); let bonded_amount = U512::from_dec_str(bonded_amount_str).unwrap(); let seigniorage_recipient = SeigniorageRecipient::new(bonded_amount, Default::default(), Default::default()); let _ = era_validators.insert(validator_pub_key, seigniorage_recipient); } for era_id in starting_era_id.iter(count) { let _ = new_snapshot.insert(era_id, era_validators.clone()); } new_snapshot } pub fn find_large_bids( builder: &mut LmdbWasmTestBuilder, new_snapshot: &SeigniorageRecipientsSnapshot, ) -> BTreeSet<PublicKey> { let min_bid = new_snapshot .values() .next() .unwrap() .values() .map(SeigniorageRecipient::stake) .min() .unwrap(); builder .get_bids() .into_iter() .filter(|(_pkey, bid)| bid.staked_amount() >= min_bid) .map(|(pkey, _bid)| pkey) .collect() } pub fn generate_entries_removing_bids( builder: &mut LmdbWasmTestBuilder, validators_diff: &ValidatorsDiff, new_snapshot: &SeigniorageRecipientsSnapshot, ) -> BTreeMap<Key, StoredValue> { let large_bids = find_large_bids(builder, new_snapshot); let to_unbid = validators_diff.removed.union(&large_bids); validators_diff .added .iter() .map(|pkey| { let amount = *new_snapshot .values() .next() .unwrap() .get(pkey) .unwrap() .stake(); let account_hash = pkey.to_account_hash(); let account = builder.get_account(account_hash).unwrap(); ( Key::Bid(account_hash), Bid::unlocked( pkey.clone(), account.main_purse(), amount, Default::default(), ) .into(), ) }) .chain(to_unbid.into_iter().map(|pkey| { let account_hash = pkey.to_account_hash(); let account = builder.get_account(account_hash).unwrap(); ( Key::Bid(account_hash), Bid::empty(pkey.clone(), account.main_purse()).into(), ) })) .collect() } pub fn generate_entries_removing_withdraws( builder: &mut LmdbWasmTestBuilder, validators_diff: &ValidatorsDiff, ) -> BTreeMap<Key, StoredValue> { let withdraws = builder.get_withdraws(); let withdraw_keys: BTreeSet<_> = withdraws.keys().collect(); validators_diff .removed .iter() .map(PublicKey::to_account_hash) .filter(|acc| withdraw_keys.contains(&acc)) .map(|acc| (Key::Withdraw(acc), StoredValue::Withdraw(vec![]))) .collect() }
use std::collections::{BTreeMap, BTreeSet}; use casper_engine_test_support::internal::LmdbWasmTestBuilder; use casper_execution_engine::shared::stored_value::StoredValue; use casper_types::{ system::auction::{ Bid, SeigniorageRecipient, SeigniorageRecipientsSnapshot, SEIGNIORAGE_RECIPIENTS_SNAPSHOT_KEY, }, AsymmetricType, EraId, Key, ProtocolVersion, PublicKey, U512, }; use crate::utils::ValidatorsDiff; pub fn read_snapshot(builder: &LmdbWasmTestBuilder) -> (Key, SeigniorageRecipientsSnapshot) { let protocol_data = builder .get_engine_state() .get_protocol_data(ProtocolVersion::from_parts(1, 0, 0)) .unwrap() .expect("should have protocol data"); let auction_contract
pkey| { let amount = *new_snapshot .values() .next() .unwrap() .get(pkey) .unwrap() .stake(); let account_hash = pkey.to_account_hash(); let account = builder.get_account(account_hash).unwrap(); ( Key::Bid(account_hash), Bid::unlocked( pkey.clone(), account.main_purse(), amount, Default::default(), ) .into(), ) }) .chain(to_unbid.into_iter().map(|pkey| { let account_hash = pkey.to_account_hash(); let account = builder.get_account(account_hash).unwrap(); ( Key::Bid(account_hash), Bid::empty(pkey.clone(), account.main_purse()).into(), ) })) .collect() } pub fn generate_entries_removing_withdraws( builder: &mut LmdbWasmTestBuilder, validators_diff: &ValidatorsDiff, ) -> BTreeMap<Key, StoredValue> { let withdraws = builder.get_withdraws(); let withdraw_keys: BTreeSet<_> = withdraws.keys().collect(); validators_diff .removed .iter() .map(PublicKey::to_account_hash) .filter(|acc| withdraw_keys.contains(&acc)) .map(|acc| (Key::Withdraw(acc), StoredValue::Withdraw(vec![]))) .collect() }
_hash = protocol_data.auction(); let validators_key = builder .get_contract(auction_contract_hash) .expect("auction should exist") .named_keys()[SEIGNIORAGE_RECIPIENTS_SNAPSHOT_KEY]; let stored_value = builder .query(None, validators_key, &[]) .expect("should query"); let cl_value = stored_value .as_cl_value() .cloned() .expect("should be cl value"); (validators_key, cl_value.into_t().expect("should convert")) } pub fn gen_snapshot( validators: Vec<(String, String)>, starting_era_id: EraId, count: u64, ) -> SeigniorageRecipientsSnapshot { let mut new_snapshot = BTreeMap::new(); let mut era_validators = BTreeMap::new(); for (pub_key_str, bonded_amount_str) in &validators { let validator_pub_key = PublicKey::from_hex(pub_key_str.as_bytes()).unwrap(); let bonded_amount = U512::from_dec_str(bonded_amount_str).unwrap(); let seigniorage_recipient = SeigniorageRecipient::new(bonded_amount, Default::default(), Default::default()); let _ = era_validators.insert(validator_pub_key, seigniorage_recipient); } for era_id in starting_era_id.iter(count) { let _ = new_snapshot.insert(era_id, era_validators.clone()); } new_snapshot } pub fn find_large_bids( builder: &mut LmdbWasmTestBuilder, new_snapshot: &SeigniorageRecipientsSnapshot, ) -> BTreeSet<PublicKey> { let min_bid = new_snapshot .values() .next() .unwrap() .values() .map(SeigniorageRecipient::stake) .min() .unwrap(); builder .get_bids() .into_iter() .filter(|(_pkey, bid)| bid.staked_amount() >= min_bid) .map(|(pkey, _bid)| pkey) .collect() } pub fn generate_entries_removing_bids( builder: &mut LmdbWasmTestBuilder, validators_diff: &ValidatorsDiff, new_snapshot: &SeigniorageRecipientsSnapshot, ) -> BTreeMap<Key, StoredValue> { let large_bids = find_large_bids(builder, new_snapshot); let to_unbid = validators_diff.removed.union(&large_bids); validators_diff .added .iter() .map(|
random
[ { "content": "fn withdraw_bid(public_key: PublicKey, unbond_amount: U512) -> U512 {\n\n let contract_hash = system::get_auction();\n\n let args = runtime_args! {\n\n auction::ARG_AMOUNT => unbond_amount,\n\n auction::ARG_PUBLIC_KEY => public_key,\n\n };\n\n runtime::call_contract(contract_hash, auction::METHOD_WITHDRAW_BID, args)\n\n}\n\n\n\n// Withdraw bid contract.\n\n//\n\n// Accepts a public key to be removed, and an amount to withdraw (of type `U512`).\n\n// Saves the withdrawn funds in the account's context to keep track of the funds.\n\n#[no_mangle]\n\npub extern \"C\" fn call() {\n\n let public_key = runtime::get_named_arg(ARG_PUBLIC_KEY);\n\n let amount = runtime::get_named_arg(ARG_AMOUNT);\n\n withdraw_bid(public_key, amount);\n\n}\n", "file_path": "smart_contracts/contracts/client/withdraw-bid/src/main.rs", "rank": 1, "score": 319590.80667317164 }, { "content": "fn get_validator_bid(builder: &mut InMemoryWasmTestBuilder, validator: PublicKey) -> Option<Bid> {\n\n let mut bids: Bids = builder.get_bids();\n\n bids.remove(&validator)\n\n}\n\n\n", "file_path": "execution_engine_testing/tests/src/test/system_contracts/auction/distribute.rs", "rank": 2, "score": 287907.0872942329 }, { "content": "fn withdraw_bid(contract_hash: ContractHash, public_key: PublicKey, unbond_amount: U512) -> U512 {\n\n let args = runtime_args! {\n\n auction::ARG_AMOUNT => unbond_amount,\n\n auction::ARG_PUBLIC_KEY => public_key,\n\n };\n\n runtime::call_contract(contract_hash, auction::METHOD_WITHDRAW_BID, args)\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn call() {\n\n let amount: U512 = runtime::get_named_arg(ARG_AMOUNT);\n\n let public_key: PublicKey = runtime::get_named_arg(ARG_PUBLIC_KEY);\n\n // unbond attempt for more than is staked should fail\n\n let contract_hash = system::get_auction();\n\n add_bid(contract_hash, public_key.clone(), amount);\n\n withdraw_bid(contract_hash, public_key, amount + 1);\n\n}\n", "file_path": "smart_contracts/contracts/test/ee-598-regression/src/main.rs", "rank": 3, "score": 280935.61968813045 }, { "content": "fn add_bid(public_key: PublicKey, bond_amount: U512, delegation_rate: DelegationRate) {\n\n let contract_hash = system::get_auction();\n\n let args = runtime_args! {\n\n auction::ARG_PUBLIC_KEY => public_key,\n\n auction::ARG_AMOUNT => bond_amount,\n\n auction::ARG_DELEGATION_RATE => delegation_rate,\n\n };\n\n runtime::call_contract::<U512>(contract_hash, auction::METHOD_ADD_BID, args);\n\n}\n\n\n\n// Bidding contract.\n\n//\n\n// Accepts a public key, amount and a delegation rate.\n\n// Issues an add bid request to the auction contract.\n\n#[no_mangle]\n\npub extern \"C\" fn call() {\n\n let public_key = runtime::get_named_arg(ARG_PUBLIC_KEY);\n\n let bond_amount = runtime::get_named_arg(ARG_AMOUNT);\n\n let delegation_rate = runtime::get_named_arg(ARG_DELEGATION_RATE);\n\n\n\n add_bid(public_key, bond_amount, delegation_rate);\n\n}\n", "file_path": "smart_contracts/contracts/client/add-bid/src/main.rs", "rank": 4, "score": 275943.609367966 }, { "content": "/// Generates an Ed25519 keypair using the operating system's cryptographically secure random number\n\n/// generator.\n\npub fn generate_ed25519_keypair() -> (SecretKey, PublicKey) {\n\n let secret_key = SecretKey::generate_ed25519().unwrap();\n\n let public_key = PublicKey::from(&secret_key);\n\n (secret_key, public_key)\n\n}\n\n\n", "file_path": "node/src/crypto/asymmetric_key.rs", "rank": 5, "score": 265406.21438356023 }, { "content": "fn call_bond(auction: ContractHash, public_key: PublicKey, bond_amount: U512) {\n\n let args = runtime_args! {\n\n auction::ARG_PUBLIC_KEY => public_key,\n\n auction::ARG_DELEGATION_RATE => DELEGATION_RATE,\n\n auction::ARG_AMOUNT => bond_amount,\n\n };\n\n\n\n let _amount: U512 = runtime::call_contract(auction, METHOD_ADD_BID, args);\n\n}\n\n\n", "file_path": "smart_contracts/contracts/test/auction-bidding/src/main.rs", "rank": 6, "score": 263220.4714018576 }, { "content": "/// Stores the given [`Key`] under `name` in the current context's named keys.\n\n///\n\n/// The current context is either the caller's account or a stored contract depending on whether the\n\n/// currently-executing module is a direct call or a sub-call respectively.\n\npub fn put_key(name: &str, key: Key) {\n\n let (name_ptr, name_size, _bytes) = contract_api::to_ptr(name);\n\n let (key_ptr, key_size, _bytes2) = contract_api::to_ptr(key);\n\n unsafe { ext_ffi::casper_put_key(name_ptr, name_size, key_ptr, key_size) };\n\n}\n\n\n", "file_path": "smart_contracts/contract/src/contract_api/runtime.rs", "rank": 7, "score": 261496.86708105798 }, { "content": "fn add_bid(contract_hash: ContractHash, public_key: PublicKey, bond_amount: U512) {\n\n let runtime_args = runtime_args! {\n\n auction::ARG_PUBLIC_KEY => public_key,\n\n auction::ARG_DELEGATION_RATE => DELEGATION_RATE,\n\n auction::ARG_AMOUNT => bond_amount,\n\n };\n\n runtime::call_contract::<U512>(contract_hash, auction::METHOD_ADD_BID, runtime_args);\n\n}\n\n\n", "file_path": "smart_contracts/contracts/test/ee-598-regression/src/main.rs", "rank": 8, "score": 260234.37806753826 }, { "content": "/// Migrates data from that specified in the old config file to that specified in the new one.\n\npub fn migrate_data(\n\n _old_config: WithDir<OldConfig>,\n\n new_config: WithDir<Config>,\n\n) -> Result<(), Error> {\n\n let (new_root, new_config) = new_config.into_parts();\n\n let new_protocol_version = Chainspec::from_path(&new_root)\n\n .map_err(Error::LoadChainspec)?\n\n .protocol_config\n\n .version;\n\n let secret_key = new_config\n\n .consensus\n\n .secret_key_path\n\n .load(&new_root)\n\n .map_err(Error::LoadSecretKey)?;\n\n\n\n // Get this by actually migrating the global state data.\n\n let state_hash = Blake2bHash::default();\n\n\n\n if state_hash != Blake2bHash::default() {\n\n write_post_migration_info(state_hash, new_protocol_version, &secret_key, info_path())?;\n", "file_path": "node/src/data_migration.rs", "rank": 9, "score": 257402.9104108903 }, { "content": "pub fn account_1_initial_amount() -> U512 {\n\n ACCOUNT_1_INITIAL_AMOUNT.into()\n\n}\n\n\n", "file_path": "execution_engine_testing/tests/src/profiling/mod.rs", "rank": 10, "score": 255682.2568895321 }, { "content": "pub fn u512_arb() -> impl Strategy<Value = U512> {\n\n vec(any::<u8>(), 0..64).prop_map(|b| U512::from_little_endian(b.as_slice()))\n\n}\n\n\n", "file_path": "types/src/gens.rs", "rank": 11, "score": 253184.97730494518 }, { "content": "pub fn key_arb() -> impl Strategy<Value = Key> {\n\n prop_oneof![\n\n account_hash_arb().prop_map(Key::Account),\n\n u8_slice_32().prop_map(Key::Hash),\n\n uref_arb().prop_map(Key::URef),\n\n transfer_addr_arb().prop_map(Key::Transfer),\n\n deploy_hash_arb().prop_map(Key::DeployInfo),\n\n era_id_arb().prop_map(Key::EraInfo),\n\n uref_arb().prop_map(|uref| Key::Balance(uref.addr())),\n\n account_hash_arb().prop_map(Key::Bid),\n\n account_hash_arb().prop_map(Key::Withdraw),\n\n u8_slice_32().prop_map(Key::Dictionary),\n\n ]\n\n}\n\n\n", "file_path": "types/src/gens.rs", "rank": 12, "score": 252875.48116845905 }, { "content": "pub fn colliding_key_arb() -> impl Strategy<Value = Key> {\n\n prop_oneof![\n\n u2_slice_32().prop_map(|bytes| Key::Account(AccountHash::new(bytes))),\n\n u2_slice_32().prop_map(Key::Hash),\n\n u2_slice_32().prop_map(|bytes| Key::URef(URef::new(bytes, AccessRights::NONE))),\n\n u2_slice_32().prop_map(|bytes| Key::Transfer(TransferAddr::new(bytes))),\n\n u2_slice_32().prop_map(Key::Dictionary),\n\n ]\n\n}\n\n\n", "file_path": "types/src/gens.rs", "rank": 13, "score": 249696.17393345665 }, { "content": "/// Turns `key` into a `([u8; 32], AccessRights)` tuple.\n\n/// Returns None if `key` is not `Key::URef` as it wouldn't have `AccessRights`\n\n/// associated with it. Helper function for creating `named_keys` associating\n\n/// addresses and corresponding `AccessRights`.\n\npub fn key_to_tuple(key: Key) -> Option<([u8; 32], AccessRights)> {\n\n match key {\n\n Key::URef(uref) => Some((uref.addr(), uref.access_rights())),\n\n Key::Account(_) => None,\n\n Key::Hash(_) => None,\n\n Key::Transfer(_) => None,\n\n Key::DeployInfo(_) => None,\n\n Key::EraInfo(_) => None,\n\n Key::Balance(_) => None,\n\n Key::Bid(_) => None,\n\n Key::Withdraw(_) => None,\n\n Key::Dictionary(_) => None,\n\n }\n\n}\n\n\n", "file_path": "execution_engine/src/core/runtime/mod.rs", "rank": 14, "score": 247829.0268984445 }, { "content": "/// Returns the balance in motes of a purse.\n\npub fn get_balance() -> Option<U512> {\n\n get_purse_balance(account::get_main_purse())\n\n}\n\n\n", "file_path": "smart_contracts/contract/src/contract_api/system.rs", "rank": 15, "score": 246442.93949747778 }, { "content": "/// Creates an arbitrary [`PublicKey`]\n\npub fn public_key_arb() -> impl Strategy<Value = PublicKey> {\n\n prop_oneof![\n\n Just(PublicKey::System),\n\n collection::vec(<u8>::arbitrary(), SecretKey::ED25519_LENGTH).prop_map(|bytes| {\n\n let byte_array: [u8; SecretKey::ED25519_LENGTH] = bytes.try_into().unwrap();\n\n let secret_key = SecretKey::ed25519_from_bytes(byte_array).unwrap();\n\n PublicKey::from(&secret_key)\n\n }),\n\n collection::vec(<u8>::arbitrary(), SecretKey::SECP256K1_LENGTH).prop_map(|bytes| {\n\n let bytes_array: [u8; SecretKey::SECP256K1_LENGTH] = bytes.try_into().unwrap();\n\n let secret_key = SecretKey::secp256k1_from_bytes(bytes_array).unwrap();\n\n PublicKey::from(&secret_key)\n\n })\n\n ]\n\n}\n", "file_path": "types/src/crypto/asymmetric_key/gens.rs", "rank": 16, "score": 245717.48542875293 }, { "content": "fn delegate(delegator: PublicKey, validator: PublicKey, amount: U512) {\n\n let contract_hash = system::get_auction();\n\n let args = runtime_args! {\n\n auction::ARG_DELEGATOR => delegator,\n\n auction::ARG_VALIDATOR => validator,\n\n auction::ARG_AMOUNT => amount,\n\n };\n\n runtime::call_contract::<U512>(contract_hash, auction::METHOD_DELEGATE, args);\n\n}\n\n\n\n// Delegate contract.\n\n//\n\n// Accepts a delegator's public key, validator's public key, amount and a delegation rate.\n\n// Issues an delegation request to the auction contract.\n\n#[no_mangle]\n\npub extern \"C\" fn call() {\n\n let delegator = runtime::get_named_arg(ARG_DELEGATOR);\n\n let validator = runtime::get_named_arg(ARG_VALIDATOR);\n\n let amount = runtime::get_named_arg(ARG_AMOUNT);\n\n\n\n delegate(delegator, validator, amount);\n\n}\n", "file_path": "smart_contracts/contracts/client/delegate/src/main.rs", "rank": 17, "score": 243497.3674140906 }, { "content": "fn undelegate(delegator: PublicKey, validator: PublicKey, amount: U512) {\n\n let contract_hash = system::get_auction();\n\n let args = runtime_args! {\n\n auction::ARG_DELEGATOR => delegator,\n\n auction::ARG_VALIDATOR => validator,\n\n auction::ARG_AMOUNT => amount,\n\n };\n\n let _amount: U512 = runtime::call_contract(contract_hash, auction::METHOD_UNDELEGATE, args);\n\n}\n\n\n\n// Undelegate contract.\n\n//\n\n// Accepts a delegator's public key, validator's public key to be undelegated, and an amount\n\n// to withdraw (of type `U512`).\n\n#[no_mangle]\n\npub extern \"C\" fn call() {\n\n let delegator = runtime::get_named_arg(ARG_DELEGATOR);\n\n let validator = runtime::get_named_arg(ARG_VALIDATOR);\n\n let amount = runtime::get_named_arg(ARG_AMOUNT);\n\n undelegate(delegator, validator, amount);\n\n}\n", "file_path": "smart_contracts/contracts/client/undelegate/src/main.rs", "rank": 18, "score": 243497.3674140906 }, { "content": "fn activate_bid(public_key: PublicKey) {\n\n let contract_hash = system::get_auction();\n\n let args = runtime_args! {\n\n auction::ARG_VALIDATOR_PUBLIC_KEY => public_key,\n\n };\n\n runtime::call_contract::<()>(contract_hash, auction::METHOD_ACTIVATE_BID, args);\n\n}\n\n\n\n// Accepts a public key. Issues an activate-bid bid to the auction contract.\n\n#[no_mangle]\n\npub extern \"C\" fn call() {\n\n let public_key: PublicKey = runtime::get_named_arg(ARG_VALIDATOR_PUBLIC_KEY);\n\n activate_bid(public_key);\n\n}\n", "file_path": "smart_contracts/contracts/client/activate-bid/src/main.rs", "rank": 19, "score": 243348.60479735344 }, { "content": "/// Groups a collection of keys by their address and accumulates access rights\n\n/// per key.\n\npub fn extract_access_rights_from_keys<I: IntoIterator<Item = Key>>(\n\n input: I,\n\n) -> HashMap<Address, HashSet<AccessRights>> {\n\n input\n\n .into_iter()\n\n .map(key_to_tuple)\n\n .flatten()\n\n .group_by(|(key, _)| *key)\n\n .into_iter()\n\n .map(|(key, group)| {\n\n (\n\n key,\n\n group.map(|(_, x)| x).collect::<HashSet<AccessRights>>(),\n\n )\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "execution_engine/src/core/runtime/mod.rs", "rank": 20, "score": 240867.83281538193 }, { "content": "/// Returns the requested named [`Key`] from the current context.\n\n///\n\n/// The current context is either the caller's account or a stored contract depending on whether the\n\n/// currently-executing module is a direct call or a sub-call respectively.\n\npub fn get_key(name: &str) -> Option<Key> {\n\n let (name_ptr, name_size, _bytes) = contract_api::to_ptr(name);\n\n let mut key_bytes = vec![0u8; Key::max_serialized_length()];\n\n let mut total_bytes: usize = 0;\n\n let ret = unsafe {\n\n ext_ffi::casper_get_key(\n\n name_ptr,\n\n name_size,\n\n key_bytes.as_mut_ptr(),\n\n key_bytes.len(),\n\n &mut total_bytes as *mut usize,\n\n )\n\n };\n\n match api_error::result_from(ret) {\n\n Ok(_) => {}\n\n Err(ApiError::MissingKey) => return None,\n\n Err(e) => revert(e),\n\n }\n\n key_bytes.truncate(total_bytes);\n\n let key: Key = bytesrepr::deserialize(key_bytes).unwrap_or_revert();\n\n Some(key)\n\n}\n\n\n", "file_path": "smart_contracts/contract/src/contract_api/runtime.rs", "rank": 21, "score": 240866.05714577477 }, { "content": "pub fn standard_payment(amount: U512) {\n\n const METHOD_GET_PAYMENT_PURSE: &str = \"get_payment_purse\";\n\n\n\n let main_purse = account::get_main_purse();\n\n\n\n let handle_payment_pointer = system::get_handle_payment();\n\n\n\n let payment_purse: URef = runtime::call_contract(\n\n handle_payment_pointer,\n\n METHOD_GET_PAYMENT_PURSE,\n\n RuntimeArgs::default(),\n\n );\n\n\n\n system::transfer_from_purse_to_purse(main_purse, payment_purse, amount, None).unwrap_or_revert()\n\n}\n\n\n", "file_path": "smart_contracts/contracts/test/get-call-stack-recursive-subcall/src/lib.rs", "rank": 22, "score": 234761.83533558412 }, { "content": "fn undelegate() -> U512 {\n\n let auction = system::get_auction();\n\n let amount: U512 = runtime::get_named_arg(ARG_AMOUNT);\n\n let delegator: PublicKey = runtime::get_named_arg(ARG_DELEGATOR);\n\n let validator: PublicKey = runtime::get_named_arg(ARG_VALIDATOR);\n\n\n\n let args = runtime_args! {\n\n ARG_AMOUNT => amount,\n\n ARG_VALIDATOR => validator,\n\n ARG_DELEGATOR => delegator,\n\n };\n\n\n\n runtime::call_contract(auction, METHOD_UNDELEGATE, args)\n\n}\n\n\n", "file_path": "smart_contracts/contracts/test/auction-bids/src/main.rs", "rank": 23, "score": 233349.92641655862 }, { "content": "fn delegate() -> U512 {\n\n let auction = system::get_auction();\n\n let delegator: PublicKey = runtime::get_named_arg(ARG_DELEGATOR);\n\n let validator: PublicKey = runtime::get_named_arg(ARG_VALIDATOR);\n\n let amount: U512 = runtime::get_named_arg(ARG_AMOUNT);\n\n let args = runtime_args! {\n\n ARG_DELEGATOR => delegator,\n\n ARG_VALIDATOR => validator,\n\n ARG_AMOUNT => amount,\n\n };\n\n\n\n runtime::call_contract(auction, METHOD_DELEGATE, args)\n\n}\n\n\n", "file_path": "smart_contracts/contracts/test/auction-bids/src/main.rs", "rank": 24, "score": 233349.92641655862 }, { "content": "fn get_handle_payment_payment_purse_balance(builder: &InMemoryWasmTestBuilder) -> U512 {\n\n let purse = get_payment_purse_by_name(builder, handle_payment::PAYMENT_PURSE_KEY)\n\n .expect(\"should find handle payment payment purse\");\n\n builder.get_purse_balance(purse)\n\n}\n\n\n", "file_path": "execution_engine_testing/tests/src/test/system_contracts/handle_payment/finalize_payment.rs", "rank": 25, "score": 232761.55113990756 }, { "content": "fn read_value<T: CLTyped + FromBytes>(builder: &mut InMemoryWasmTestBuilder, key: Key) -> T {\n\n CLValue::try_from(builder.query(None, key, &[]).expect(\"should have value\"))\n\n .expect(\"should have CLValue\")\n\n .into_t()\n\n .expect(\"should convert successfully\")\n\n}\n\n\n", "file_path": "execution_engine_testing/tests/src/test/contract_api/account/named_keys.rs", "rank": 26, "score": 231817.27403763018 }, { "content": "/// Returns the named keys of the current context.\n\n///\n\n/// The current context is either the caller's account or a stored contract depending on whether the\n\n/// currently-executing module is a direct call or a sub-call respectively.\n\npub fn list_named_keys() -> NamedKeys {\n\n let (total_keys, result_size) = {\n\n let mut total_keys = MaybeUninit::uninit();\n\n let mut result_size = 0;\n\n let ret = unsafe {\n\n ext_ffi::casper_load_named_keys(total_keys.as_mut_ptr(), &mut result_size as *mut usize)\n\n };\n\n api_error::result_from(ret).unwrap_or_revert();\n\n let total_keys = unsafe { total_keys.assume_init() };\n\n (total_keys, result_size)\n\n };\n\n if total_keys == 0 {\n\n return NamedKeys::new();\n\n }\n\n let bytes = read_host_buffer(result_size).unwrap_or_revert();\n\n bytesrepr::deserialize(bytes).unwrap_or_revert()\n\n}\n\n\n", "file_path": "smart_contracts/contract/src/contract_api/runtime.rs", "rank": 27, "score": 230248.2090231806 }, { "content": "#[no_mangle]\n\npub fn delegate() {\n\n let account_hash: AccountHash = runtime::get_named_arg(ARG_TARGET);\n\n let transfer_amount: U512 = runtime::get_named_arg(ARG_AMOUNT);\n\n system::transfer_to_account(account_hash, transfer_amount, None).unwrap_or_revert();\n\n}\n", "file_path": "smart_contracts/contracts/client/transfer-to-account-u512/src/lib.rs", "rank": 28, "score": 229129.88060229202 }, { "content": "/// Updates the [`Weight`] of the given [`AccountHash`] in the account's associated keys.\n\npub fn update_associated_key(\n\n account_hash: AccountHash,\n\n weight: Weight,\n\n) -> Result<(), UpdateKeyFailure> {\n\n let (account_hash_ptr, account_hash_size, _bytes) = to_ptr(account_hash);\n\n // Cast of u8 (weight) into i32 is assumed to be always safe\n\n let result = unsafe {\n\n ext_ffi::casper_update_associated_key(\n\n account_hash_ptr,\n\n account_hash_size,\n\n weight.value().into(),\n\n )\n\n };\n\n if result == 0 {\n\n Ok(())\n\n } else {\n\n Err(UpdateKeyFailure::try_from(result).unwrap_or_revert())\n\n }\n\n}\n", "file_path": "smart_contracts/contract/src/contract_api/account.rs", "rank": 29, "score": 228898.17530679668 }, { "content": "/// Creates a [`LmdbWasmTestBuilder`] from a named fixture directory.\n\n///\n\n/// As part of this process a new temporary directory will be created to store LMDB files from given\n\n/// fixture, and a builder will be created using it.\n\n///\n\n/// This function returns a triple of the builder, a [`LmdbFixtureState`] which contains serialized\n\n/// genesis request for given fixture, and a temporary directory which has to be kept in scope.\n\npub fn builder_from_global_state_fixture(\n\n fixture_name: &str,\n\n) -> (LmdbWasmTestBuilder, LmdbFixtureState, TempDir) {\n\n let source = path_to_lmdb_fixtures().join(fixture_name);\n\n let to = tempfile::tempdir().expect(\"should create temp dir\");\n\n fs_extra::copy_items(&[source], &to, &dir::CopyOptions::default())\n\n .expect(\"should copy global state fixture\");\n\n\n\n let path_to_state = to.path().join(fixture_name).join(STATE_JSON_FILE);\n\n let lmdb_fixture_state: LmdbFixtureState =\n\n serde_json::from_reader(File::open(&path_to_state).unwrap()).unwrap();\n\n let path_to_gs = to.path().join(fixture_name);\n\n (\n\n LmdbWasmTestBuilder::open(\n\n &path_to_gs,\n\n EngineConfig::default(),\n\n lmdb_fixture_state.post_state_hash,\n\n ),\n\n lmdb_fixture_state,\n\n to,\n\n )\n\n}\n\n\n", "file_path": "execution_engine_testing/tests/src/lmdb_fixture.rs", "rank": 30, "score": 225885.8469919617 }, { "content": "/// Returns the `CLType` describing a \"named key\" on the system, i.e. a `(String, Key)`.\n\npub fn named_key_type() -> CLType {\n\n CLType::Tuple2([Box::new(CLType::String), Box::new(CLType::Key)])\n\n}\n\n\n\nimpl CLType {\n\n pub(crate) fn append_bytes(&self, stream: &mut Vec<u8>) -> Result<(), bytesrepr::Error> {\n\n match self {\n\n CLType::Bool => stream.push(CL_TYPE_TAG_BOOL),\n\n CLType::I32 => stream.push(CL_TYPE_TAG_I32),\n\n CLType::I64 => stream.push(CL_TYPE_TAG_I64),\n\n CLType::U8 => stream.push(CL_TYPE_TAG_U8),\n\n CLType::U32 => stream.push(CL_TYPE_TAG_U32),\n\n CLType::U64 => stream.push(CL_TYPE_TAG_U64),\n\n CLType::U128 => stream.push(CL_TYPE_TAG_U128),\n\n CLType::U256 => stream.push(CL_TYPE_TAG_U256),\n\n CLType::U512 => stream.push(CL_TYPE_TAG_U512),\n\n CLType::Unit => stream.push(CL_TYPE_TAG_UNIT),\n\n CLType::String => stream.push(CL_TYPE_TAG_STRING),\n\n CLType::Key => stream.push(CL_TYPE_TAG_KEY),\n\n CLType::URef => stream.push(CL_TYPE_TAG_UREF),\n", "file_path": "types/src/cl_type.rs", "rank": 32, "score": 224978.7233314488 }, { "content": "fn get_handle_payment_refund_purse(builder: &InMemoryWasmTestBuilder) -> Option<Key> {\n\n let handle_payment_contract = builder.get_handle_payment_contract();\n\n handle_payment_contract\n\n .named_keys()\n\n .get(handle_payment::REFUND_PURSE_KEY)\n\n .cloned()\n\n}\n\n\n", "file_path": "execution_engine_testing/tests/src/test/system_contracts/handle_payment/finalize_payment.rs", "rank": 33, "score": 224752.32713202666 }, { "content": "/// A helper function to change NamedKeys into a Vec<NamedKey>\n\npub fn vectorize(keys: &NamedKeys) -> Vec<NamedKey> {\n\n let named_keys = keys\n\n .iter()\n\n .map(|(name, key)| NamedKey {\n\n name: name.clone(),\n\n key: key.to_formatted_string(),\n\n })\n\n .collect();\n\n named_keys\n\n}\n", "file_path": "node/src/types/json_compatibility.rs", "rank": 35, "score": 222086.449401387 }, { "content": "/// Returns the hash of `data`.\n\npub fn hash<T: AsRef<[u8]>>(data: T) -> Digest {\n\n let mut result = [0; Digest::LENGTH];\n\n\n\n let mut hasher = VarBlake2b::new(Digest::LENGTH).expect(\"should create hasher\");\n\n hasher.update(data);\n\n hasher.finalize_variable(|slice| {\n\n result.copy_from_slice(slice);\n\n });\n\n Digest(result)\n\n}\n\n\n\nimpl From<Digest> for Blake2bHash {\n\n fn from(digest: Digest) -> Self {\n\n let digest_bytes = digest.to_array();\n\n Blake2bHash::from(digest_bytes)\n\n }\n\n}\n\n\n\nimpl From<Blake2bHash> for Digest {\n\n fn from(blake2bhash: Blake2bHash) -> Self {\n", "file_path": "node/src/crypto/hash.rs", "rank": 36, "score": 221384.69017710816 }, { "content": "#[doc(hidden)]\n\npub fn get_purse_balance(purse: URef) -> Option<U512> {\n\n let (purse_ptr, purse_size, _bytes) = contract_api::to_ptr(purse);\n\n\n\n let value_size = {\n\n let mut output_size = MaybeUninit::uninit();\n\n let ret =\n\n unsafe { ext_ffi::casper_get_balance(purse_ptr, purse_size, output_size.as_mut_ptr()) };\n\n match api_error::result_from(ret) {\n\n Ok(_) => unsafe { output_size.assume_init() },\n\n Err(ApiError::InvalidPurse) => return None,\n\n Err(error) => runtime::revert(error),\n\n }\n\n };\n\n let value_bytes = runtime::read_host_buffer(value_size).unwrap_or_revert();\n\n let value: U512 = bytesrepr::deserialize(value_bytes).unwrap_or_revert();\n\n Some(value)\n\n}\n\n\n", "file_path": "smart_contracts/contract/src/contract_api/system.rs", "rank": 37, "score": 220363.79898692202 }, { "content": "#[allow(clippy::implicit_hasher)]\n\npub fn get_account(transforms: &AdditiveMap<Key, Transform>, account: &Key) -> Option<Account> {\n\n transforms.get(account).and_then(|transform| {\n\n if let Transform::Write(StoredValue::Account(account)) = transform {\n\n Some(account.to_owned())\n\n } else {\n\n None\n\n }\n\n })\n\n}\n", "file_path": "execution_engine_testing/test_support/src/internal/utils.rs", "rank": 38, "score": 219182.17688851902 }, { "content": "pub fn protocol_version_arb() -> impl Strategy<Value = ProtocolVersion> {\n\n sem_ver_arb().prop_map(ProtocolVersion::new)\n\n}\n\n\n", "file_path": "types/src/gens.rs", "rank": 39, "score": 217967.85589309072 }, { "content": "/// An entity which persists `ProtocolData` values at their protocol versions.\n\npub trait ProtocolDataStore: Store<ProtocolVersion, ProtocolData> {}\n", "file_path": "execution_engine/src/storage/protocol_data_store/mod.rs", "rank": 40, "score": 216866.7578381325 }, { "content": "/// Signs the given message using the given key pair.\n\npub fn sign<T: AsRef<[u8]>>(\n\n message: T,\n\n secret_key: &SecretKey,\n\n public_key: &PublicKey,\n\n) -> Signature {\n\n match (secret_key, public_key) {\n\n (SecretKey::System, PublicKey::System) => {\n\n panic!(\"cannot create signature with system keys\",)\n\n }\n\n (SecretKey::Ed25519(secret_key), PublicKey::Ed25519(public_key)) => {\n\n let expanded_secret_key = ExpandedSecretKey::from(secret_key);\n\n let signature = expanded_secret_key.sign(message.as_ref(), public_key);\n\n Signature::Ed25519(signature)\n\n }\n\n (SecretKey::Secp256k1(secret_key), PublicKey::Secp256k1(_public_key)) => {\n\n let signer = secret_key;\n\n let signature: Secp256k1Signature = signer\n\n .try_sign(message.as_ref())\n\n .expect(\"should create signature\");\n\n Signature::Secp256k1(signature)\n\n }\n\n _ => panic!(\"secret and public key types must match\"),\n\n }\n\n}\n\n\n", "file_path": "node/src/crypto/asymmetric_key.rs", "rank": 41, "score": 213867.8119566865 }, { "content": "/// Verifies the signature of the given message against the given public key.\n\npub fn verify<T: AsRef<[u8]>>(\n\n message: T,\n\n signature: &Signature,\n\n public_key: &PublicKey,\n\n) -> Result<()> {\n\n match (signature, public_key) {\n\n (Signature::System, _) => Err(Error::AsymmetricKey(String::from(\n\n \"signatures based on the system key cannot be verified\",\n\n ))),\n\n (Signature::Ed25519(signature), PublicKey::Ed25519(public_key)) => public_key\n\n .verify_strict(message.as_ref(), signature)\n\n .map_err(|_| Error::AsymmetricKey(String::from(\"failed to verify Ed25519 signature\"))),\n\n (Signature::Secp256k1(signature), PublicKey::Secp256k1(public_key)) => {\n\n let verifier: &Secp256k1PublicKey = public_key;\n\n verifier\n\n .verify(message.as_ref(), signature)\n\n .map_err(|error| {\n\n Error::AsymmetricKey(format!(\"failed to verify secp256k1 signature: {}\", error))\n\n })\n\n }\n", "file_path": "node/src/crypto/asymmetric_key.rs", "rank": 42, "score": 213861.39503400444 }, { "content": "/// Removes the [`Key`] stored under `name` in the current context's named keys.\n\n///\n\n/// The current context is either the caller's account or a stored contract depending on whether the\n\n/// currently-executing module is a direct call or a sub-call respectively.\n\npub fn remove_key(name: &str) {\n\n let (name_ptr, name_size, _bytes) = contract_api::to_ptr(name);\n\n unsafe { ext_ffi::casper_remove_key(name_ptr, name_size) }\n\n}\n\n\n", "file_path": "smart_contracts/contract/src/contract_api/runtime.rs", "rank": 43, "score": 212329.53860280378 }, { "content": "pub fn contract_version_key_arb() -> impl Strategy<Value = ContractVersionKey> {\n\n (1..32u32, 1..1000u32)\n\n .prop_map(|(major, contract_ver)| ContractVersionKey::new(major, contract_ver))\n\n}\n\n\n", "file_path": "types/src/gens.rs", "rank": 44, "score": 211960.6220031212 }, { "content": "/// Returns an account value paired with its key\n\npub fn mocked_account(account_hash: AccountHash) -> Vec<(Key, StoredValue)> {\n\n let purse = URef::new([0u8; 32], AccessRights::READ_ADD_WRITE);\n\n let account = Account::create(account_hash, NamedKeys::new(), purse);\n\n vec![(Key::Account(account_hash), StoredValue::Account(account))]\n\n}\n", "file_path": "execution_engine/src/shared/test_utils.rs", "rank": 45, "score": 211804.06524165967 }, { "content": "fn bootstrap(data_dir: &Path, accounts: Vec<AccountHash>, amount: U512) -> LmdbWasmTestBuilder {\n\n let exec_request = ExecuteRequestBuilder::standard(\n\n *DEFAULT_ACCOUNT_ADDR,\n\n CONTRACT_CREATE_ACCOUNTS,\n\n runtime_args! { ARG_ACCOUNTS => accounts, ARG_SEED_AMOUNT => amount },\n\n )\n\n .build();\n\n\n\n let engine_config = EngineConfig::default();\n\n\n\n let mut builder = LmdbWasmTestBuilder::new_with_config(data_dir, engine_config);\n\n\n\n builder\n\n .run_genesis(&DEFAULT_RUN_GENESIS_REQUEST)\n\n .exec(exec_request)\n\n .expect_success()\n\n .commit();\n\n\n\n builder\n\n}\n\n\n", "file_path": "execution_engine_testing/tests/benches/transfer_bench.rs", "rank": 46, "score": 211715.30795629928 }, { "content": "#[doc(hidden)]\n\npub fn blake2b<T: AsRef<[u8]>>(data: T) -> [u8; BLAKE2B_DIGEST_LENGTH] {\n\n let mut result = [0; BLAKE2B_DIGEST_LENGTH];\n\n // NOTE: Assumed safe as `BLAKE2B_DIGEST_LENGTH` is a valid value for a hasher\n\n let mut hasher = VarBlake2b::new(BLAKE2B_DIGEST_LENGTH).expect(\"should create hasher\");\n\n\n\n hasher.update(data);\n\n hasher.finalize_variable(|slice| {\n\n result.copy_from_slice(slice);\n\n });\n\n result\n\n}\n\n\n\nimpl TryFrom<&[u8]> for AccountHash {\n\n type Error = TryFromSliceForAccountHashError;\n\n\n\n fn try_from(bytes: &[u8]) -> Result<Self, TryFromSliceForAccountHashError> {\n\n AccountHashBytes::try_from(bytes)\n\n .map(AccountHash::new)\n\n .map_err(|_| TryFromSliceForAccountHashError(()))\n\n }\n", "file_path": "types/src/account.rs", "rank": 47, "score": 210913.01705808312 }, { "content": "/// Saves a private key to a file.\n\npub fn save_private_key<P: AsRef<Path>>(key: &PKeyRef<Private>, dest: P) -> anyhow::Result<()> {\n\n let pem = key\n\n .private_key_to_pem_pkcs8()\n\n .context(\"converting private key to PEM\")?;\n\n\n\n write_file(dest, pem).with_context(|| \"failed to write private key\")?;\n\n Ok(())\n\n}\n\n\n", "file_path": "node/src/tls.rs", "rank": 48, "score": 210597.67788416398 }, { "content": "pub fn trie_arb() -> impl Strategy<Value = Trie<Key, StoredValue>> {\n\n prop_oneof![\n\n (key_arb(), stored_value_arb()).prop_map(|(key, value)| Trie::Leaf { key, value }),\n\n trie_pointer_block_arb().prop_map(|pointer_block| Trie::Node {\n\n pointer_block: Box::new(pointer_block)\n\n }),\n\n (vec(any::<u8>(), 0..32), trie_pointer_arb()).prop_map(|(affix, pointer)| {\n\n Trie::Extension {\n\n affix: affix.into(),\n\n pointer,\n\n }\n\n })\n\n ]\n\n}\n", "file_path": "execution_engine/src/storage/trie/gens.rs", "rank": 49, "score": 209447.9076185595 }, { "content": "/// Returns `true` if `name` exists in the current context's named keys.\n\n///\n\n/// The current context is either the caller's account or a stored contract depending on whether the\n\n/// currently-executing module is a direct call or a sub-call respectively.\n\npub fn has_key(name: &str) -> bool {\n\n let (name_ptr, name_size, _bytes) = contract_api::to_ptr(name);\n\n let result = unsafe { ext_ffi::casper_has_key(name_ptr, name_size) };\n\n result == 0\n\n}\n\n\n", "file_path": "smart_contracts/contract/src/contract_api/runtime.rs", "rank": 50, "score": 207249.82978925688 }, { "content": "fn transfer(builder: &mut InMemoryWasmTestBuilder, account_hash: AccountHash, amount: U512) {\n\n let exec_request = {\n\n ExecuteRequestBuilder::standard(\n\n *DEFAULT_ACCOUNT_ADDR,\n\n CONTRACT_TRANSFER_PURSE_TO_ACCOUNT,\n\n runtime_args! {\n\n \"target\" => account_hash,\n\n \"amount\" => amount,\n\n },\n\n )\n\n .build()\n\n };\n\n\n\n builder.exec(exec_request).expect_success().commit();\n\n}\n\n\n", "file_path": "execution_engine_testing/tests/src/test/system_contracts/handle_payment/refund_purse.rs", "rank": 51, "score": 205518.0921668996 }, { "content": "pub fn named_keys_arb(depth: usize) -> impl Strategy<Value = NamedKeys> {\n\n btree_map(\"\\\\PC*\", key_arb(), depth)\n\n}\n\n\n", "file_path": "types/src/gens.rs", "rank": 52, "score": 204966.31496069313 }, { "content": "pub fn data_dir_arg() -> Arg<'static, 'static> {\n\n Arg::with_name(DATA_DIR_ARG_NAME)\n\n .short(DATA_DIR_ARG_SHORT)\n\n .long(DATA_DIR_ARG_LONG)\n\n .value_name(DATA_DIR_ARG_VALUE_NAME)\n\n .help(DATA_DIR_ARG_HELP)\n\n .takes_value(true)\n\n}\n\n\n", "file_path": "execution_engine_testing/tests/src/profiling/mod.rs", "rank": 53, "score": 201399.9165220684 }, { "content": "/// Generates a self-signed (key, certificate) pair suitable for TLS and signing.\n\n///\n\n/// The common name of the certificate will be \"casper-node\".\n\npub fn generate_node_cert() -> SslResult<(X509, PKey<Private>)> {\n\n let private_key = generate_private_key()?;\n\n let cert = generate_cert(&private_key, \"casper-node\")?;\n\n\n\n Ok((cert, private_key))\n\n}\n\n\n\n/// Creates a TLS acceptor for a server.\n\n///\n\n/// The acceptor will restrict TLS parameters to secure one defined in this crate that are\n\n/// compatible with connectors built with `create_tls_connector`.\n\n///\n\n/// Incoming certificates must still be validated using `validate_cert`.\n\npub(crate) fn create_tls_acceptor(\n\n cert: &X509Ref,\n\n private_key: &PKeyRef<Private>,\n\n) -> SslResult<SslAcceptor> {\n\n let mut builder = SslAcceptor::mozilla_modern_v5(SslMethod::tls_server())?;\n\n set_context_options(&mut builder, cert, private_key)?;\n\n\n", "file_path": "node/src/tls.rs", "rank": 54, "score": 199821.20353415064 }, { "content": "pub fn get_bids<P>(provider: &mut P) -> Result<Bids, Error>\n\nwhere\n\n P: StorageProvider + RuntimeProvider + ?Sized,\n\n{\n\n let bids_keys = provider.get_keys(&KeyTag::Bid)?;\n\n\n\n let mut ret = BTreeMap::new();\n\n\n\n for key in bids_keys {\n\n let account_hash = match key {\n\n Key::Bid(account_ash) => account_ash,\n\n _ => return Err(Error::InvalidKeyVariant),\n\n };\n\n let bid = match provider.read_bid(&account_hash)? {\n\n Some(bid) => bid,\n\n None => return Err(Error::ValidatorNotFound),\n\n };\n\n ret.insert(bid.validator_public_key().clone(), bid);\n\n }\n\n\n\n Ok(ret)\n\n}\n\n\n", "file_path": "types/src/system/auction/detail.rs", "rank": 55, "score": 199526.10665747366 }, { "content": "/// Removes the given [`AccountHash`] from the account's associated keys.\n\npub fn remove_associated_key(account_hash: AccountHash) -> Result<(), RemoveKeyFailure> {\n\n let (account_hash_ptr, account_hash_size, _bytes) = to_ptr(account_hash);\n\n let result =\n\n unsafe { ext_ffi::casper_remove_associated_key(account_hash_ptr, account_hash_size) };\n\n if result == 0 {\n\n Ok(())\n\n } else {\n\n Err(RemoveKeyFailure::try_from(result).unwrap_or_revert())\n\n }\n\n}\n\n\n", "file_path": "smart_contracts/contract/src/contract_api/account.rs", "rank": 56, "score": 199217.94331157018 }, { "content": "pub fn data_dir(arg_matches: &ArgMatches) -> PathBuf {\n\n match arg_matches.value_of(DATA_DIR_ARG_NAME) {\n\n Some(dir) => PathBuf::from_str(dir).expect(\"Expected a valid unicode path\"),\n\n None => env::current_dir().expect(\"Expected to be able to access current working dir\"),\n\n }\n\n}\n\n\n", "file_path": "execution_engine_testing/tests/src/profiling/mod.rs", "rank": 57, "score": 195798.1612794208 }, { "content": "fn run_command(builder: &mut InMemoryWasmTestBuilder, command: &str) {\n\n let exec_request = ExecuteRequestBuilder::standard(\n\n *DEFAULT_ACCOUNT_ADDR,\n\n CONTRACT_NAMED_KEYS,\n\n runtime_args! { ARG_COMMAND => command },\n\n )\n\n .build();\n\n builder\n\n .exec(exec_request)\n\n .commit()\n\n .expect_success()\n\n .finish();\n\n}\n\n\n", "file_path": "execution_engine_testing/tests/src/test/contract_api/account/named_keys.rs", "rank": 58, "score": 195447.32305423002 }, { "content": "pub fn set_bids<P>(provider: &mut P, validators: Bids) -> Result<(), Error>\n\nwhere\n\n P: StorageProvider + RuntimeProvider + ?Sized,\n\n{\n\n for (_, bid) in validators.into_iter() {\n\n let account_hash = AccountHash::from(bid.validator_public_key());\n\n provider.write_bid(account_hash, bid)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "types/src/system/auction/detail.rs", "rank": 59, "score": 194348.9267567206 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn transfer(\n\n maybe_rpc_id: &str,\n\n node_address: &str,\n\n verbosity_level: u64,\n\n amount: &str,\n\n target_account: &str,\n\n transfer_id: &str,\n\n deploy_params: DeployStrParams<'_>,\n\n payment_params: PaymentStrParams<'_>,\n\n) -> Result<JsonRpc> {\n\n let amount = U512::from_dec_str(amount)\n\n .map_err(|err| Error::FailedToParseUint(\"amount\", UIntParseError::FromDecStr(err)))?;\n\n let source_purse = None;\n\n let target = parsing::get_transfer_target(target_account)?;\n\n let transfer_id = parsing::transfer_id(transfer_id)?;\n\n\n\n RpcCall::new(maybe_rpc_id, node_address, verbosity_level).transfer(\n\n amount,\n\n source_purse,\n\n target,\n\n transfer_id,\n\n deploy_params.try_into()?,\n\n payment_params.try_into()?,\n\n )\n\n}\n\n\n", "file_path": "client/lib/lib.rs", "rank": 60, "score": 193421.08804158474 }, { "content": "fn get_counter_key() -> Key {\n\n let name = ARG_CONTRACT_HASH_NAME;\n\n let arg = {\n\n let mut arg_size: usize = 0;\n\n let ret = unsafe {\n\n ext_ffi::casper_get_named_arg_size(\n\n name.as_bytes().as_ptr(),\n\n name.len(),\n\n &mut arg_size as *mut usize,\n\n )\n\n };\n\n match api_error::result_from(ret) {\n\n Ok(_) => {\n\n if arg_size == 0 {\n\n None\n\n } else {\n\n Some(arg_size)\n\n }\n\n }\n\n Err(ApiError::MissingArgument) => None,\n", "file_path": "smart_contracts/contracts/client/counter-define/src/main.rs", "rank": 61, "score": 191618.93525160634 }, { "content": "/// Transfers `amount` of motes from the default purse of the account to `target`\n\n/// account. If `target` does not exist it will be created.\n\npub fn transfer_to_account(target: AccountHash, amount: U512, id: Option<u64>) -> TransferResult {\n\n let (target_ptr, target_size, _bytes1) = contract_api::to_ptr(target);\n\n let (amount_ptr, amount_size, _bytes2) = contract_api::to_ptr(amount);\n\n let (id_ptr, id_size, _bytes3) = contract_api::to_ptr(id);\n\n let mut maybe_result_value = MaybeUninit::uninit();\n\n\n\n let return_code = unsafe {\n\n ext_ffi::casper_transfer_to_account(\n\n target_ptr,\n\n target_size,\n\n amount_ptr,\n\n amount_size,\n\n id_ptr,\n\n id_size,\n\n maybe_result_value.as_mut_ptr(),\n\n )\n\n };\n\n\n\n // Propagate error (if any)\n\n api_error::result_from(return_code)?;\n\n\n\n // Return appropriate result if transfer was successful\n\n let transferred_to_value = unsafe { maybe_result_value.assume_init() };\n\n TransferredTo::result_from(transferred_to_value)\n\n}\n\n\n\n/// Transfers `amount` of motes from `source` purse to `target` account. If `target` does not exist\n\n/// it will be created.\n", "file_path": "smart_contracts/contract/src/contract_api/system.rs", "rank": 62, "score": 191413.8645631238 }, { "content": "fn lmdb_roundtrip_succeeds(inputs: BTreeMap<ProtocolVersion, ProtocolData>) -> bool {\n\n let tmp_dir = tempfile::tempdir().unwrap();\n\n let env = LmdbEnvironment::new(\n\n &tmp_dir.path().to_path_buf(),\n\n DEFAULT_TEST_MAX_DB_SIZE,\n\n DEFAULT_TEST_MAX_READERS,\n\n )\n\n .unwrap();\n\n let store = LmdbProtocolDataStore::new(&env, None, DatabaseFlags::empty()).unwrap();\n\n\n\n let ret = store_tests::roundtrip_succeeds(&env, &store, inputs).unwrap();\n\n tmp_dir.close().unwrap();\n\n ret\n\n}\n\n\n\nproptest! {\n\n #[test]\n\n fn prop_in_memory_roundtrip_succeeds(\n\n m in collection::btree_map(gens_ext::protocol_version_arb(), gens::protocol_data_arb(), get_range())\n\n ) {\n", "file_path": "execution_engine/src/storage/protocol_data_store/tests/proptests.rs", "rank": 63, "score": 190676.6823441689 }, { "content": "fn in_memory_roundtrip_succeeds(inputs: BTreeMap<ProtocolVersion, ProtocolData>) -> bool {\n\n let env = InMemoryEnvironment::new();\n\n let store = InMemoryProtocolDataStore::new(&env, None);\n\n\n\n store_tests::roundtrip_succeeds(&env, &store, inputs).unwrap()\n\n}\n\n\n", "file_path": "execution_engine/src/storage/protocol_data_store/tests/proptests.rs", "rank": 64, "score": 190676.6823441689 }, { "content": "/// Creates a transfer `Deploy` and outputs it to a file or stdout.\n\n///\n\n/// As a file, the transfer `Deploy` can subsequently be signed by other parties using\n\n/// [`sign_deploy_file()`](fn.sign_deploy_file.html) and then sent to the network for execution\n\n/// using [`send_deploy_file()`](fn.send_deploy_file.html).\n\n///\n\n/// * `maybe_output_path` specifies the output file, or if empty, will print it to `stdout`.\n\n/// * `amount` is a string to be parsed as a `U512` specifying the amount to be transferred.\n\n/// * `target_account` is the account `PublicKey` into which the funds will be transferred,\n\n/// formatted as a hex-encoded string. The account's main purse will receive the funds.\n\n/// * `transfer_id` is a string to be parsed as a `u64` representing a user-defined identifier which\n\n/// will be permanently associated with the transfer.\n\n/// * `deploy_params` contains deploy-related options for this `Deploy`. See\n\n/// [`DeployStrParams`](struct.DeployStrParams.html) for more details.\n\n/// * `payment_params` contains payment-related options for this `Deploy`. See\n\n/// [`PaymentStrParams`](struct.PaymentStrParams.html) for more details.\n\n/// * If `force` is true, and a file exists at `maybe_output_path`, it will be overwritten. If\n\n/// `force` is false and a file exists at `maybe_output_path`,\n\n/// [`Error::FileAlreadyExists`](enum.Error.html#variant.FileAlreadyExists) is returned and a file\n\n/// will not be written.\n\npub fn make_transfer(\n\n maybe_output_path: &str,\n\n amount: &str,\n\n target_account: &str,\n\n transfer_id: &str,\n\n deploy_params: DeployStrParams<'_>,\n\n payment_params: PaymentStrParams<'_>,\n\n force: bool,\n\n) -> Result<()> {\n\n let amount = U512::from_dec_str(amount)\n\n .map_err(|err| Error::FailedToParseUint(\"amount\", UIntParseError::FromDecStr(err)))?;\n\n let source_purse = None;\n\n let target = parsing::get_transfer_target(target_account)?;\n\n let transfer_id = parsing::transfer_id(transfer_id)?;\n\n\n\n let output = if maybe_output_path.is_empty() {\n\n OutputKind::Stdout\n\n } else {\n\n OutputKind::file(maybe_output_path, force)\n\n };\n", "file_path": "client/lib/lib.rs", "rank": 65, "score": 190594.7213645972 }, { "content": "/// Retrieves a stored value from the network.\n\n///\n\n/// * `maybe_rpc_id` is the JSON-RPC identifier, applied to the request and returned in the\n\n/// response. If it can be parsed as an `i64` it will be used as a JSON integer. If empty, a\n\n/// random `i64` will be assigned. Otherwise the provided string will be used verbatim.\n\n/// * `node_address` is the hostname or IP and port of the node on which the HTTP service is\n\n/// running, e.g. `\"http://127.0.0.1:7777\"`.\n\n/// * When `verbosity_level` is `1`, the JSON-RPC request will be printed to `stdout` with long\n\n/// string fields (e.g. hex-formatted raw Wasm bytes) shortened to a string indicating the char\n\n/// count of the field. When `verbosity_level` is greater than `1`, the request will be printed\n\n/// to `stdout` with no abbreviation of long fields. When `verbosity_level` is `0`, the request\n\n/// will not be printed to `stdout`.\n\n/// * `state_root_hash` must be a hex-encoded, 32-byte hash digest.\n\n/// * `key` must be a formatted [`PublicKey`](https://docs.rs/casper-node/latest/casper-node/crypto/asymmetric_key/enum.PublicKey.html)\n\n/// or [`Key`](https://docs.rs/casper-types/latest/casper-types/enum.PublicKey.html). This will\n\n/// take one of the following forms:\n\n/// ```text\n\n/// 01c9e33693951aaac23c49bee44ad6f863eedcd38c084a3a8f11237716a3df9c2c # PublicKey\n\n/// account-hash-0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20 # Key::Account\n\n/// hash-0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20 # Key::Hash\n\n/// uref-0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20-007 # Key::URef\n\n/// transfer-0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20 # Key::Transfer\n\n/// deploy-0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20 # Key::DeployInfo\n\n/// ```\n\n/// * `path` is comprised of components starting from the `key`, separated by `/`s.\n\npub fn get_item(\n\n maybe_rpc_id: &str,\n\n node_address: &str,\n\n verbosity_level: u64,\n\n state_root_hash: &str,\n\n key: &str,\n\n path: &str,\n\n) -> Result<JsonRpc> {\n\n RpcCall::new(maybe_rpc_id, node_address, verbosity_level).get_item(state_root_hash, key, path)\n\n}\n\n\n", "file_path": "client/lib/lib.rs", "rank": 66, "score": 190592.1395753659 }, { "content": "/// Creates a `Deploy` and outputs it to a file or stdout.\n\n///\n\n/// As a file, the `Deploy` can subsequently be signed by other parties using\n\n/// [`sign_deploy_file()`](fn.sign_deploy_file.html) and then sent to the network for execution\n\n/// using [`send_deploy_file()`](fn.send_deploy_file.html).\n\n///\n\n/// * `maybe_output_path` specifies the output file, or if empty, will print it to `stdout`.\n\n/// * `deploy_params` contains deploy-related options for this `Deploy`. See\n\n/// [`DeployStrParams`](struct.DeployStrParams.html) for more details.\n\n/// * `session_params` contains session-related options for this `Deploy`. See\n\n/// [`SessionStrParams`](struct.SessionStrParams.html) for more details.\n\n/// * `payment_params` contains payment-related options for this `Deploy`. See\n\n/// [`PaymentStrParams`](struct.PaymentStrParams.html) for more details.\n\n/// * If `force` is true, and a file exists at `maybe_output_path`, it will be overwritten. If\n\n/// `force` is false and a file exists at `maybe_output_path`,\n\n/// [`Error::FileAlreadyExists`](enum.Error.html#variant.FileAlreadyExists) is returned and a file\n\n/// will not be written.\n\npub fn make_deploy(\n\n maybe_output_path: &str,\n\n deploy_params: DeployStrParams<'_>,\n\n session_params: SessionStrParams<'_>,\n\n payment_params: PaymentStrParams<'_>,\n\n force: bool,\n\n) -> Result<()> {\n\n let output = if maybe_output_path.is_empty() {\n\n OutputKind::Stdout\n\n } else {\n\n OutputKind::file(maybe_output_path, force)\n\n };\n\n\n\n Deploy::with_payment_and_session(\n\n deploy_params.try_into()?,\n\n payment_params.try_into()?,\n\n session_params.try_into()?,\n\n )?\n\n .write_deploy(output.get()?)?;\n\n\n\n output.commit()\n\n}\n\n\n", "file_path": "client/lib/lib.rs", "rank": 67, "score": 190589.88534878596 }, { "content": "/// Retrieves a `Deploy` from the network.\n\n///\n\n/// * `maybe_rpc_id` is the JSON-RPC identifier, applied to the request and returned in the\n\n/// response. If it can be parsed as an `i64` it will be used as a JSON integer. If empty, a\n\n/// random `i64` will be assigned. Otherwise the provided string will be used verbatim.\n\n/// * `node_address` is the hostname or IP and port of the node on which the HTTP service is\n\n/// running, e.g. `\"http://127.0.0.1:7777\"`.\n\n/// * When `verbosity_level` is `1`, the JSON-RPC request will be printed to `stdout` with long\n\n/// string fields (e.g. hex-formatted raw Wasm bytes) shortened to a string indicating the char\n\n/// count of the field. When `verbosity_level` is greater than `1`, the request will be printed\n\n/// to `stdout` with no abbreviation of long fields. When `verbosity_level` is `0`, the request\n\n/// will not be printed to `stdout`.\n\n/// * `deploy_hash` must be a hex-encoded, 32-byte hash digest.\n\npub fn get_deploy(\n\n maybe_rpc_id: &str,\n\n node_address: &str,\n\n verbosity_level: u64,\n\n deploy_hash: &str,\n\n) -> Result<JsonRpc> {\n\n RpcCall::new(maybe_rpc_id, node_address, verbosity_level).get_deploy(deploy_hash)\n\n}\n\n\n", "file_path": "client/lib/lib.rs", "rank": 68, "score": 190585.8910978402 }, { "content": "/// Retrieves a purse's balance from the network.\n\n///\n\n/// * `maybe_rpc_id` is the JSON-RPC identifier, applied to the request and returned in the\n\n/// response. If it can be parsed as an `i64` it will be used as a JSON integer. If empty, a\n\n/// random `i64` will be assigned. Otherwise the provided string will be used verbatim.\n\n/// * `node_address` is the hostname or IP and port of the node on which the HTTP service is\n\n/// running, e.g. `\"http://127.0.0.1:7777\"`.\n\n/// * When `verbosity_level` is `1`, the JSON-RPC request will be printed to `stdout` with long\n\n/// string fields (e.g. hex-formatted raw Wasm bytes) shortened to a string indicating the char\n\n/// count of the field. When `verbosity_level` is greater than `1`, the request will be printed\n\n/// to `stdout` with no abbreviation of long fields. When `verbosity_level` is `0`, the request\n\n/// will not be printed to `stdout`.\n\n/// * `state_root_hash` must be a hex-encoded, 32-byte hash digest.\n\n/// * `purse` is a URef, formatted as e.g.\n\n/// ```text\n\n/// uref-0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20-007\n\n/// ```\n\npub fn get_balance(\n\n maybe_rpc_id: &str,\n\n node_address: &str,\n\n verbosity_level: u64,\n\n state_root_hash: &str,\n\n purse: &str,\n\n) -> Result<JsonRpc> {\n\n RpcCall::new(maybe_rpc_id, node_address, verbosity_level).get_balance(state_root_hash, purse)\n\n}\n\n\n", "file_path": "client/lib/lib.rs", "rank": 69, "score": 190585.71869968117 }, { "content": "/// Retrieves a stored value from the network.\n\n///\n\n/// * `maybe_rpc_id` is the JSON-RPC identifier, applied to the request and returned in the\n\n/// response. If it can be parsed as an `i64` it will be used as a JSON integer. If empty, a\n\n/// random `i64` will be assigned. Otherwise the provided string will be used verbatim.\n\n/// * `node_address` is the hostname or IP and port of the node on which the HTTP service is\n\n/// running, e.g. `\"http://127.0.0.1:7777\"`.\n\n/// * When `verbosity_level` is `1`, the JSON-RPC request will be printed to `stdout` with long\n\n/// string fields (e.g. hex-formatted raw Wasm bytes) shortened to a string indicating the char\n\n/// count of the field. When `verbosity_level` is greater than `1`, the request will be printed\n\n/// to `stdout` with no abbreviation of long fields. When `verbosity_level` is `0`, the request\n\n/// will not be printed to `stdout`.\n\n/// * `state_root_hash` must be a hex-encoded, 32-byte hash digest.\n\n/// * `dictionary_str_params` contains options to query a dictionary item.\n\npub fn get_dictionary(\n\n maybe_rpc_id: &str,\n\n node_address: &str,\n\n verbosity_level: u64,\n\n state_root_hash: &str,\n\n dictionary_str_params: DictionaryItemStrParams<'_>,\n\n) -> Result<JsonRpc> {\n\n RpcCall::new(maybe_rpc_id, node_address, verbosity_level)\n\n .get_dictionary_item(state_root_hash, dictionary_str_params)\n\n}\n\n\n\n/// Container for `Deploy` construction options.\n\n#[derive(Default, Debug)]\n\npub struct DeployStrParams<'a> {\n\n /// Path to secret key file.\n\n pub secret_key: &'a str,\n\n /// RFC3339-like formatted timestamp. e.g. `2018-02-16T00:31:37Z`.\n\n ///\n\n /// If `timestamp` is empty, the current time will be used. Note that timestamp is UTC, not\n\n /// local.\n", "file_path": "client/lib/lib.rs", "rank": 70, "score": 190585.7002492707 }, { "content": "/// Retrieves a `Block` from the network.\n\n///\n\n/// * `maybe_rpc_id` is the JSON-RPC identifier, applied to the request and returned in the\n\n/// response. If it can be parsed as an `i64` it will be used as a JSON integer. If empty, a\n\n/// random `i64` will be assigned. Otherwise the provided string will be used verbatim.\n\n/// * `node_address` is the hostname or IP and port of the node on which the HTTP service is\n\n/// running, e.g. `\"http://127.0.0.1:7777\"`.\n\n/// * When `verbosity_level` is `1`, the JSON-RPC request will be printed to `stdout` with long\n\n/// string fields (e.g. hex-formatted raw Wasm bytes) shortened to a string indicating the char\n\n/// count of the field. When `verbosity_level` is greater than `1`, the request will be printed\n\n/// to `stdout` with no abbreviation of long fields. When `verbosity_level` is `0`, the request\n\n/// will not be printed to `stdout`.\n\n/// * `maybe_block_id` must be a hex-encoded, 32-byte hash digest or a `u64` representing the\n\n/// `Block` height or empty. If empty, the latest `Block` will be retrieved.\n\npub fn get_block(\n\n maybe_rpc_id: &str,\n\n node_address: &str,\n\n verbosity_level: u64,\n\n maybe_block_id: &str,\n\n) -> Result<JsonRpc> {\n\n RpcCall::new(maybe_rpc_id, node_address, verbosity_level).get_block(maybe_block_id)\n\n}\n\n\n", "file_path": "client/lib/lib.rs", "rank": 71, "score": 190585.68193416187 }, { "content": "/// Creates a `Deploy` and sends it to the network for execution.\n\n///\n\n/// * `maybe_rpc_id` is the JSON-RPC identifier, applied to the request and returned in the\n\n/// response. If it can be parsed as an `i64` it will be used as a JSON integer. If empty, a\n\n/// random `i64` will be assigned. Otherwise the provided string will be used verbatim.\n\n/// * `node_address` is the hostname or IP and port of the node on which the HTTP service is\n\n/// running, e.g. `\"http://127.0.0.1:7777\"`.\n\n/// * When `verbosity_level` is `1`, the JSON-RPC request will be printed to `stdout` with long\n\n/// string fields (e.g. hex-formatted raw Wasm bytes) shortened to a string indicating the char\n\n/// count of the field. When `verbosity_level` is greater than `1`, the request will be printed\n\n/// to `stdout` with no abbreviation of long fields. When `verbosity_level` is `0`, the request\n\n/// will not be printed to `stdout`.\n\n/// * `deploy_params` contains deploy-related options for this `Deploy`. See\n\n/// [`DeployStrParams`](struct.DeployStrParams.html) for more details.\n\n/// * `session_params` contains session-related options for this `Deploy`. See\n\n/// [`SessionStrParams`](struct.SessionStrParams.html) for more details.\n\n/// * `payment_params` contains payment-related options for this `Deploy`. See\n\n/// [`PaymentStrParams`](struct.PaymentStrParams.html) for more details.\n\npub fn put_deploy(\n\n maybe_rpc_id: &str,\n\n node_address: &str,\n\n verbosity_level: u64,\n\n deploy_params: DeployStrParams<'_>,\n\n session_params: SessionStrParams<'_>,\n\n payment_params: PaymentStrParams<'_>,\n\n) -> Result<JsonRpc> {\n\n let deploy = Deploy::with_payment_and_session(\n\n deploy_params.try_into()?,\n\n payment_params.try_into()?,\n\n session_params.try_into()?,\n\n )?;\n\n RpcCall::new(maybe_rpc_id, node_address, verbosity_level).put_deploy(deploy)\n\n}\n\n\n", "file_path": "client/lib/lib.rs", "rank": 72, "score": 190585.1302763658 }, { "content": "#[cfg(test)]\n\npub fn keys<'a, 'b, K, V, T, S>(\n\n correlation_id: CorrelationId,\n\n txn: &'b T,\n\n store: &'a S,\n\n root: &Blake2bHash,\n\n) -> KeysIterator<'a, 'b, K, V, T, S>\n\nwhere\n\n K: ToBytes + FromBytes + Clone + Eq + std::fmt::Debug,\n\n V: ToBytes + FromBytes + Clone + Eq + std::fmt::Debug,\n\n T: Readable<Handle = S::Handle>,\n\n S: TrieStore<K, V>,\n\n S::Error: From<T::Error>,\n\n{\n\n keys_with_prefix(correlation_id, txn, store, root, &[])\n\n}\n\n\n", "file_path": "execution_engine/src/storage/trie_store/operations/mod.rs", "rank": 73, "score": 189849.4638513106 }, { "content": "/// Retrieves an Account from the network.\n\n///\n\n/// * `maybe_rpc_id` is the JSON-RPC identifier, applied to the request and returned in the\n\n/// response. If it can be parsed as an `i64` it will be used as a JSON integer. If empty, a\n\n/// random `i64` will be assigned. Otherwise the provided string will be used verbatim.\n\n/// * `node_address` is the hostname or IP and port of the node on which the HTTP service is\n\n/// running, e.g. `\"http://127.0.0.1:7777\"`.\n\n/// * When `verbosity_level` is `1`, the JSON-RPC request will be printed to `stdout` with long\n\n/// string fields (e.g. hex-formatted raw Wasm bytes) shortened to a string indicating the char\n\n/// count of the field. When `verbosity_level` is greater than `1`, the request will be printed\n\n/// to `stdout` with no abbreviation of long fields. When `verbosity_level` is `0`, the request\n\n/// will not be printed to `stdout`.\n\n/// * `public_key` the public key associated with the `Account`\n\n/// * `maybe_block_id` must be a hex-encoded, 32-byte hash digest or a `u64` representing the\n\n/// `Block` height or empty. If empty, the latest `Block` will be retrieved.\n\npub fn get_account_info(\n\n maybe_rpc_id: &str,\n\n node_address: &str,\n\n verbosity_level: u64,\n\n public_key: &str,\n\n maybe_block_id: &str,\n\n) -> Result<JsonRpc> {\n\n RpcCall::new(maybe_rpc_id, node_address, verbosity_level)\n\n .get_account_info(public_key, maybe_block_id)\n\n}\n\n\n", "file_path": "client/lib/lib.rs", "rank": 74, "score": 187885.5376647519 }, { "content": "/// Retrieves the bids and validators as of the most recently added `Block`.\n\n///\n\n/// * `maybe_rpc_id` is the JSON-RPC identifier, applied to the request and returned in the\n\n/// response. If it can be parsed as an `i64` it will be used as a JSON integer. If empty, a\n\n/// random `i64` will be assigned. Otherwise the provided string will be used verbatim.\n\n/// * `node_address` is the hostname or IP and port of the node on which the HTTP service is\n\n/// running, e.g. `\"http://127.0.0.1:7777\"`.\n\n/// * When `verbosity_level` is `1`, the JSON-RPC request will be printed to `stdout` with long\n\n/// string fields (e.g. hex-formatted raw Wasm bytes) shortened to a string indicating the char\n\n/// count of the field. When `verbosity_level` is greater than `1`, the request will be printed\n\n/// to `stdout` with no abbreviation of long fields. When `verbosity_level` is `0`, the request\n\n/// will not be printed to `stdout`.\n\n/// * `maybe_block_id` must be a hex-encoded, 32-byte hash digest or a `u64` representing the\n\n/// `Block` height or empty. If empty, era information from the latest block will be returned if\n\n/// available.\n\npub fn get_auction_info(\n\n maybe_rpc_id: &str,\n\n node_address: &str,\n\n verbosity_level: u64,\n\n maybe_block_id: &str,\n\n) -> Result<JsonRpc> {\n\n RpcCall::new(maybe_rpc_id, node_address, verbosity_level).get_auction_info(maybe_block_id)\n\n}\n\n\n", "file_path": "client/lib/lib.rs", "rank": 75, "score": 187884.16887826376 }, { "content": "/// Migrates values from the old config file to the new one, modifying the new config file on-disk.\n\n///\n\n/// This should be executed after a new version is available, but before the casper-node has been\n\n/// run in validator mode using the new version.\n\npub fn migrate_config(\n\n _old_config: WithDir<OldConfig>,\n\n _new_config: WithDir<Config>,\n\n) -> Result<(), Error> {\n\n Ok(())\n\n}\n", "file_path": "node/src/config_migration.rs", "rank": 76, "score": 187881.42916908045 }, { "content": "/// Reads a previously-saved `Deploy` from a file, cryptographically signs it, and outputs it to a\n\n/// file or stdout.\n\n///\n\n/// * `input_path` specifies the path to the previously-saved `Deploy` file.\n\n/// * `secret_key` specifies the path to the secret key with which to sign the `Deploy`.\n\n/// * `maybe_output_path` specifies the output file, or if empty, will print it to `stdout`.\n\n/// * If `force` is true, and a file exists at `maybe_output_path`, it will be overwritten. If\n\n/// `force` is false and a file exists at `maybe_output_path`,\n\n/// [`Error::FileAlreadyExists`](enum.Error.html#variant.FileAlreadyExists) is returned and a file\n\n/// will not be written.\n\npub fn sign_deploy_file(\n\n input_path: &str,\n\n secret_key: &str,\n\n maybe_output_path: &str,\n\n force: bool,\n\n) -> Result<()> {\n\n let secret_key = parsing::secret_key(secret_key)?;\n\n\n\n let input = fs::read(input_path).map_err(|error| Error::IoError {\n\n context: format!(\"unable to read deploy file at '{}'\", input_path),\n\n error,\n\n })?;\n\n\n\n let output = if maybe_output_path.is_empty() {\n\n OutputKind::Stdout\n\n } else {\n\n OutputKind::file(maybe_output_path, force)\n\n };\n\n\n\n Deploy::sign_and_write_deploy(Cursor::new(input), secret_key, output.get()?)?;\n\n\n\n output.commit()\n\n}\n\n\n", "file_path": "client/lib/lib.rs", "rank": 77, "score": 187881.3977081735 }, { "content": "/// Reads a previously-saved `Deploy` from a file and sends it to the network for execution.\n\n///\n\n/// * `maybe_rpc_id` is the JSON-RPC identifier, applied to the request and returned in the\n\n/// response. If it can be parsed as an `i64` it will be used as a JSON integer. If empty, a\n\n/// random `i64` will be assigned. Otherwise the provided string will be used verbatim.\n\n/// * `node_address` is the hostname or IP and port of the node on which the HTTP service is\n\n/// running, e.g. `\"http://127.0.0.1:7777\"`.\n\n/// * When `verbosity_level` is `1`, the JSON-RPC request will be printed to `stdout` with long\n\n/// string fields (e.g. hex-formatted raw Wasm bytes) shortened to a string indicating the char\n\n/// count of the field. When `verbosity_level` is greater than `1`, the request will be printed\n\n/// to `stdout` with no abbreviation of long fields. When `verbosity_level` is `0`, the request\n\n/// will not be printed to `stdout`.\n\n/// * `input_path` specifies the path to the previously-saved `Deploy` file.\n\npub fn send_deploy_file(\n\n maybe_rpc_id: &str,\n\n node_address: &str,\n\n verbosity_level: u64,\n\n input_path: &str,\n\n) -> Result<JsonRpc> {\n\n RpcCall::new(maybe_rpc_id, node_address, verbosity_level).send_deploy_file(input_path)\n\n}\n\n\n\n/// Transfers funds between purses.\n\n///\n\n/// * `maybe_rpc_id` is the JSON-RPC identifier, applied to the request and returned in the\n\n/// response. If it can be parsed as an `i64` it will be used as a JSON integer. If empty, a\n\n/// random `i64` will be assigned. Otherwise the provided string will be used verbatim.\n\n/// * `node_address` is the hostname or IP and port of the node on which the HTTP service is\n\n/// running, e.g. `\"http://127.0.0.1:7777\"`.\n\n/// * When `verbosity_level` is `1`, the JSON-RPC request will be printed to `stdout` with long\n\n/// string fields (e.g. hex-formatted raw Wasm bytes) shortened to a string indicating the char\n\n/// count of the field. When `verbosity_level` is greater than `1`, the request will be printed\n\n/// to `stdout` with no abbreviation of long fields. When `verbosity_level` is `0`, the request\n\n/// will not be printed to `stdout`.\n\n/// * `amount` is a string to be parsed as a `U512` specifying the amount to be transferred.\n\n/// * `target_account` is the account `PublicKey` into which the funds will be transferred,\n\n/// formatted as a hex-encoded string. The account's main purse will receive the funds.\n\n/// * `transfer_id` is a string to be parsed as a `u64` representing a user-defined identifier which\n\n/// will be permanently associated with the transfer.\n\n/// * `deploy_params` contains deploy-related options for this `Deploy`. See\n\n/// [`DeployStrParams`](struct.DeployStrParams.html) for more details.\n\n/// * `payment_params` contains payment-related options for this `Deploy`. See\n\n/// [`PaymentStrParams`](struct.PaymentStrParams.html) for more details.\n", "file_path": "client/lib/lib.rs", "rank": 78, "score": 187880.98958444846 }, { "content": "/// Retrieves all `Transfer` items for a `Block` from the network.\n\n///\n\n/// * `maybe_rpc_id` is the JSON-RPC identifier, applied to the request and returned in the\n\n/// response. If it can be parsed as an `i64` it will be used as a JSON integer. If empty, a\n\n/// random `i64` will be assigned. Otherwise the provided string will be used verbatim.\n\n/// * `node_address` is the hostname or IP and port of the node on which the HTTP service is\n\n/// running, e.g. `\"http://127.0.0.1:7777\"`.\n\n/// * When `verbosity_level` is `1`, the JSON-RPC request will be printed to `stdout` with long\n\n/// string fields (e.g. hex-formatted raw Wasm bytes) shortened to a string indicating the char\n\n/// count of the field. When `verbosity_level` is greater than `1`, the request will be printed\n\n/// to `stdout` with no abbreviation of long fields. When `verbosity_level` is `0`, the request\n\n/// will not be printed to `stdout`.\n\n/// * `maybe_block_id` must be a hex-encoded, 32-byte hash digest or a `u64` representing the\n\n/// `Block` height or empty. If empty, the latest `Block` transfers will be retrieved.\n\npub fn get_block_transfers(\n\n maybe_rpc_id: &str,\n\n node_address: &str,\n\n verbosity_level: u64,\n\n maybe_block_id: &str,\n\n) -> Result<JsonRpc> {\n\n RpcCall::new(maybe_rpc_id, node_address, verbosity_level).get_block_transfers(maybe_block_id)\n\n}\n\n\n", "file_path": "client/lib/lib.rs", "rank": 79, "score": 187880.82348559744 }, { "content": "/// Setup UNIX signal hooks for current application.\n\npub fn setup_signal_hooks() {\n\n for signal in TERM_SIGNALS {\n\n flag::register_usize(\n\n *signal,\n\n Arc::clone(&*TERMINATION_REQUESTED),\n\n *signal as usize,\n\n )\n\n .unwrap_or_else(|error| panic!(\"failed to register signal {}: {}\", signal, error));\n\n }\n\n let _ = flag::register(SIGUSR1, Arc::clone(&*QUEUE_DUMP_REQUESTED));\n\n}\n\n\n\n/// Constructs a new `NodeRng`.\n", "file_path": "node/src/lib.rs", "rank": 80, "score": 187875.90086879127 }, { "content": "/// Adds the given [`AccountHash`] with associated [`Weight`] to the account's associated keys.\n\npub fn add_associated_key(account_hash: AccountHash, weight: Weight) -> Result<(), AddKeyFailure> {\n\n let (account_hash_ptr, account_hash_size, _bytes) = to_ptr(account_hash);\n\n // Cast of u8 (weight) into i32 is assumed to be always safe\n\n let result = unsafe {\n\n ext_ffi::casper_add_associated_key(\n\n account_hash_ptr,\n\n account_hash_size,\n\n weight.value().into(),\n\n )\n\n };\n\n if result == 0 {\n\n Ok(())\n\n } else {\n\n Err(AddKeyFailure::try_from(result).unwrap_or_revert())\n\n }\n\n}\n\n\n", "file_path": "smart_contracts/contract/src/contract_api/account.rs", "rank": 81, "score": 187700.77247026959 }, { "content": "fn get_uref(key: Key) -> URef {\n\n match key {\n\n Key::URef(uref) => uref,\n\n _ => panic!(\"Key {:?} is not an URef\", key),\n\n }\n\n}\n\n\n", "file_path": "execution_engine_testing/tests/src/test/regression/ee_441.rs", "rank": 82, "score": 187667.52663624723 }, { "content": "/// Returns the iterator over the keys in the subtrie matching `prefix`.\n\n///\n\n/// The root should be the apex of the trie.\n\npub fn keys_with_prefix<'a, 'b, K, V, T, S>(\n\n _correlation_id: CorrelationId,\n\n txn: &'b T,\n\n store: &'a S,\n\n root: &Blake2bHash,\n\n prefix: &[u8],\n\n) -> KeysIterator<'a, 'b, K, V, T, S>\n\nwhere\n\n K: ToBytes + FromBytes + Clone + Eq + std::fmt::Debug,\n\n V: ToBytes + FromBytes + Clone + Eq + std::fmt::Debug,\n\n T: Readable<Handle = S::Handle>,\n\n S: TrieStore<K, V>,\n\n S::Error: From<T::Error>,\n\n{\n\n let (visited, init_state): (Vec<VisitedTrieNode<K, V>>, _) = match store.get(txn, root) {\n\n Ok(None) => (vec![], KeysIteratorState::Ok),\n\n Err(e) => (vec![], KeysIteratorState::ReturnError(e)),\n\n Ok(Some(current_root)) => (\n\n vec![VisitedTrieNode {\n\n trie: current_root,\n", "file_path": "execution_engine/src/storage/trie_store/operations/mod.rs", "rank": 83, "score": 187003.13812859866 }, { "content": "/// Retrieves a state root hash at a given `Block`.\n\n///\n\n/// * `maybe_rpc_id` is the JSON-RPC identifier, applied to the request and returned in the\n\n/// response. If it can be parsed as an `i64` it will be used as a JSON integer. If empty, a\n\n/// random `i64` will be assigned. Otherwise the provided string will be used verbatim.\n\n/// * `node_address` is the hostname or IP and port of the node on which the HTTP service is\n\n/// running, e.g. `\"http://127.0.0.1:7777\"`.\n\n/// * When `verbosity_level` is `1`, the JSON-RPC request will be printed to `stdout` with long\n\n/// string fields (e.g. hex-formatted raw Wasm bytes) shortened to a string indicating the char\n\n/// count of the field. When `verbosity_level` is greater than `1`, the request will be printed\n\n/// to `stdout` with no abbreviation of long fields. When `verbosity_level` is `0`, the request\n\n/// will not be printed to `stdout`.\n\n/// * `maybe_block_id` must be a hex-encoded, 32-byte hash digest or a `u64` representing the\n\n/// `Block` height or empty. If empty, the latest `Block` will be used.\n\npub fn get_state_root_hash(\n\n maybe_rpc_id: &str,\n\n node_address: &str,\n\n verbosity_level: u64,\n\n maybe_block_id: &str,\n\n) -> Result<JsonRpc> {\n\n RpcCall::new(maybe_rpc_id, node_address, verbosity_level).get_state_root_hash(maybe_block_id)\n\n}\n\n\n", "file_path": "client/lib/lib.rs", "rank": 84, "score": 185302.98310565835 }, { "content": "/// Given a root hash, find any try keys that are descendant from it that are:\n\n/// 1. referenced but not present in the database\n\n/// 2. referenced and present but whose values' hashes do not equal their keys (ie, corrupted)\n\n// TODO: We only need to check one trie key at a time\n\npub fn missing_trie_keys<K, V, T, S, E>(\n\n _correlation_id: CorrelationId,\n\n txn: &T,\n\n store: &S,\n\n mut trie_keys_to_visit: Vec<Blake2bHash>,\n\n) -> Result<Vec<Blake2bHash>, E>\n\nwhere\n\n K: ToBytes + FromBytes + Eq + std::fmt::Debug,\n\n V: ToBytes + FromBytes + std::fmt::Debug,\n\n T: Readable<Handle = S::Handle>,\n\n S: TrieStore<K, V>,\n\n S::Error: From<T::Error>,\n\n E: From<S::Error> + From<bytesrepr::Error>,\n\n{\n\n let mut missing_descendants = Vec::new();\n\n let mut visited = HashSet::new();\n\n while let Some(trie_key) = trie_keys_to_visit.pop() {\n\n if !visited.insert(trie_key) {\n\n continue;\n\n }\n", "file_path": "execution_engine/src/storage/trie_store/operations/mod.rs", "rank": 85, "score": 184886.3406778435 }, { "content": "#[ignore]\n\n#[test]\n\nfn get_balance_using_public_key_should_work() {\n\n let mut builder = InMemoryWasmTestBuilder::default();\n\n builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST);\n\n\n\n let transfer_request = ExecuteRequestBuilder::transfer(\n\n *DEFAULT_ACCOUNT_ADDR,\n\n runtime_args! {\n\n TRANSFER_ARG_TARGET => *ALICE_ADDR,\n\n TRANSFER_ARG_AMOUNT => *TRANSFER_AMOUNT_1,\n\n TRANSFER_ARG_ID => <Option<u64>>::None,\n\n },\n\n )\n\n .build();\n\n\n\n builder.exec(transfer_request).commit().expect_success();\n\n\n\n let alice_account = builder\n\n .get_account(*ALICE_ADDR)\n\n .expect(\"should have Alice's account\");\n\n\n", "file_path": "execution_engine_testing/tests/src/test/get_balance.rs", "rank": 86, "score": 183925.76903075213 }, { "content": "/// Retrieves era information from the network.\n\n///\n\n/// * `maybe_rpc_id` is the JSON-RPC identifier, applied to the request and returned in the\n\n/// response. If it can be parsed as an `i64` it will be used as a JSON integer. If empty, a\n\n/// random `i64` will be assigned. Otherwise the provided string will be used verbatim.\n\n/// * `node_address` is the hostname or IP and port of the node on which the HTTP service is\n\n/// running, e.g. `\"http://127.0.0.1:7777\"`.\n\n/// * When `verbosity_level` is `1`, the JSON-RPC request will be printed to `stdout` with long\n\n/// string fields (e.g. hex-formatted raw Wasm bytes) shortened to a string indicating the char\n\n/// count of the field. When `verbosity_level` is greater than `1`, the request will be printed\n\n/// to `stdout` with no abbreviation of long fields. When `verbosity_level` is `0`, the request\n\n/// will not be printed to `stdout`.\n\n/// * `maybe_block_id` must be a hex-encoded, 32-byte hash digest or a `u64` representing the\n\n/// `Block` height or empty. If empty, era information from the latest block will be returned if\n\n/// available.\n\npub fn get_era_info_by_switch_block(\n\n maybe_rpc_id: &str,\n\n node_address: &str,\n\n verbosity_level: u64,\n\n maybe_block_id: &str,\n\n) -> Result<JsonRpc> {\n\n RpcCall::new(maybe_rpc_id, node_address, verbosity_level)\n\n .get_era_info_by_switch_block(maybe_block_id)\n\n}\n\n\n", "file_path": "client/lib/lib.rs", "rank": 87, "score": 182840.8727576707 }, { "content": "#[doc(hidden)]\n\npub fn initialize_with_logger(\n\n logger: Box<dyn Log>,\n\n settings: Settings,\n\n) -> Result<(), SetLoggerError> {\n\n if settings.max_level() == LevelFilter::Off && !settings.enable_metrics() {\n\n // No logging required\n\n return Ok(());\n\n }\n\n\n\n log::set_boxed_logger(logger)?;\n\n log::set_max_level(settings.max_level());\n\n Ok(())\n\n}\n\n\n\n/// Logs a message using the given format and properties.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `log_level` - log level of the message to be logged\n\n/// * `message_format` - a message template to apply over properties by key\n\n/// * `properties` - a collection of machine readable key / value properties which will be logged\n", "file_path": "execution_engine/src/shared/logging/mod.rs", "rank": 88, "score": 182835.96792938147 }, { "content": "pub fn instance_and_memory(\n\n parity_module: Module,\n\n protocol_version: ProtocolVersion,\n\n wasm_config: &WasmConfig,\n\n) -> Result<(ModuleRef, MemoryRef), Error> {\n\n let module = wasmi::Module::from_parity_wasm_module(parity_module)?;\n\n let resolver = create_module_resolver(protocol_version, wasm_config)?;\n\n let mut imports = ImportsBuilder::new();\n\n imports.push_resolver(\"env\", &resolver);\n\n let not_started_module = ModuleInstance::new(&module, &imports)?;\n\n if not_started_module.has_start() {\n\n return Err(Error::UnsupportedWasmStart);\n\n }\n\n let instance = not_started_module.not_started_instance().clone();\n\n let memory = resolver.memory_ref()?;\n\n Ok((instance, memory))\n\n}\n\n\n", "file_path": "execution_engine/src/core/runtime/mod.rs", "rank": 89, "score": 182835.96792938147 }, { "content": "#[no_mangle]\n\npub fn delegate() {\n\n let account_hash: AccountHash = runtime::get_named_arg(ARG_TARGET);\n\n\n\n let amount: U512 = runtime::get_named_arg(ARG_AMOUNT);\n\n\n\n // Maybe we will decide to allow multiple funds up until some maximum value.\n\n let already_funded = runtime::get_key(&account_hash.to_formatted_string()).is_some();\n\n\n\n if already_funded {\n\n runtime::revert(ApiError::User(CustomError::AlreadyFunded as u16));\n\n } else {\n\n system::transfer_to_account(account_hash, amount, None).unwrap_or_revert();\n\n // Transfer successful; Store the fact of funding in the local state.\n\n runtime::put_key(\n\n &account_hash.to_formatted_string(),\n\n Key::URef(storage::new_uref(())),\n\n )\n\n }\n\n}\n", "file_path": "smart_contracts/contracts/explorer/faucet/src/lib.rs", "rank": 90, "score": 182835.96792938147 }, { "content": "#[inline]\n\npub fn log_details(\n\n _log_level: Level,\n\n _message_format: String,\n\n _properties: BTreeMap<&str, String>,\n\n) {\n\n // TODO: Metrics story https://casperlabs.atlassian.net/browse/NDRS-120\n\n}\n\n\n", "file_path": "execution_engine/src/shared/logging/mod.rs", "rank": 91, "score": 182835.96792938147 }, { "content": "pub fn delegate() {\n\n // Empty key name is invalid\n\n assert!(storage::new_dictionary(\"\").is_err());\n\n // Assert that we don't have this key yet\n\n assert!(!runtime::has_key(MALICIOUS_KEY_NAME));\n\n // Create and put a new dictionary in named keys\n\n storage::new_dictionary(MALICIOUS_KEY_NAME).unwrap();\n\n // Can't do it twice\n\n assert!(storage::new_dictionary(MALICIOUS_KEY_NAME).is_err());\n\n\n\n let mut entry_points = EntryPoints::new();\n\n entry_points.add_entry_point(EntryPoint::new(\n\n MODIFY_WRITE_ENTRYPOINT,\n\n Vec::new(),\n\n CLType::Unit,\n\n EntryPointAccess::Public,\n\n EntryPointType::Contract,\n\n ));\n\n entry_points.add_entry_point(EntryPoint::new(\n\n SHARE_RO_ENTRYPOINT,\n", "file_path": "smart_contracts/contracts/test/dictionary/src/lib.rs", "rank": 92, "score": 182835.96792938147 }, { "content": "pub fn era_id_arb() -> impl Strategy<Value = EraId> {\n\n any::<u64>().prop_map(EraId::from)\n\n}\n\n\n", "file_path": "types/src/gens.rs", "rank": 93, "score": 182267.5063777922 }, { "content": "/// Creates a module resolver for given protocol version.\n\n///\n\n/// * `protocol_version` Version of the protocol. Can't be lower than 1.\n\npub fn create_module_resolver(\n\n protocol_version: ProtocolVersion,\n\n wasm_config: &WasmConfig,\n\n) -> Result<impl ModuleImportResolver + MemoryResolver, ResolverError> {\n\n // TODO: revisit how protocol_version check here is meant to combine with upgrade\n\n if protocol_version >= ProtocolVersion::V1_0_0 {\n\n return Ok(v1_resolver::RuntimeModuleImportResolver::new(\n\n wasm_config.max_memory,\n\n ));\n\n }\n\n Err(ResolverError::UnknownProtocolVersion(protocol_version))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use casper_types::ProtocolVersion;\n\n\n\n use super::*;\n\n use crate::shared::wasm_config::WasmConfig;\n\n\n", "file_path": "execution_engine/src/core/resolvers/mod.rs", "rank": 94, "score": 180492.1949239663 }, { "content": "/// Create a new contract stored under a Key::Hash at version 1. You may upgrade this contract in\n\n/// the future; if you want a contract that is locked (i.e. cannot be upgraded) call\n\n/// `new_locked_contract` instead.\n\n/// if `named_keys` are provided, will apply them\n\n/// if `hash_name` is provided, puts contract hash in current context's named keys under `hash_name`\n\n/// if `uref_name` is provided, puts access_uref in current context's named keys under `uref_name`\n\npub fn new_contract(\n\n entry_points: EntryPoints,\n\n named_keys: Option<NamedKeys>,\n\n hash_name: Option<String>,\n\n uref_name: Option<String>,\n\n) -> (ContractHash, ContractVersion) {\n\n create_contract(entry_points, named_keys, hash_name, uref_name, false)\n\n}\n\n\n", "file_path": "smart_contracts/contract/src/contract_api/storage.rs", "rank": 95, "score": 180491.44678135973 }, { "content": "/// Executes mote transfer to supplied account hash.\n\n/// Transfers the requested amount.\n\npub fn delegate() {\n\n let account_hash: AccountHash = runtime::get_named_arg(ARG_TARGET);\n\n let transfer_amount: u64 = runtime::get_named_arg(ARG_AMOUNT);\n\n let u512_motes = U512::from(transfer_amount);\n\n system::transfer_to_account(account_hash, u512_motes, None).unwrap_or_revert();\n\n}\n", "file_path": "smart_contracts/contracts/client/transfer-to-account/src/lib.rs", "rank": 96, "score": 180484.37273217936 }, { "content": "#[doc(hidden)]\n\npub fn record_transfer(\n\n maybe_to: Option<AccountHash>,\n\n source: URef,\n\n target: URef,\n\n amount: U512,\n\n id: Option<u64>,\n\n) -> Result<(), ApiError> {\n\n let (maybe_to_ptr, maybe_to_size, _bytes1) = contract_api::to_ptr(maybe_to);\n\n let (source_ptr, source_size, _bytes2) = contract_api::to_ptr(source);\n\n let (target_ptr, target_size, _bytes3) = contract_api::to_ptr(target);\n\n let (amount_ptr, amount_size, _bytes4) = contract_api::to_ptr(amount);\n\n let (id_ptr, id_size, _bytes5) = contract_api::to_ptr(id);\n\n let result = unsafe {\n\n ext_ffi::casper_record_transfer(\n\n maybe_to_ptr,\n\n maybe_to_size,\n\n source_ptr,\n\n source_size,\n\n target_ptr,\n\n target_size,\n", "file_path": "smart_contracts/contract/src/contract_api/system.rs", "rank": 97, "score": 180484.37273217936 }, { "content": "pub fn create() {\n\n common::write_file(ARGS.root_path().join(FILENAME), CONTENTS);\n\n}\n", "file_path": "execution_engine_testing/cargo_casper/src/travis_yml.rs", "rank": 98, "score": 180484.37273217936 }, { "content": "/// Creates a new fixture with a name.\n\n///\n\n/// This process is currently manual. The process to do this is to check out a release branch, call\n\n/// this function to generate (i.e. `generate_fixture(\"release_1_3_0\")`) and persist it in version\n\n/// control.\n\npub fn generate_fixture(\n\n name: &str,\n\n genesis_request: RunGenesisRequest,\n\n post_genesis_setup: impl FnOnce(&mut LmdbWasmTestBuilder),\n\n) -> Result<(), Box<dyn std::error::Error>> {\n\n let lmdb_fixtures_root = path_to_lmdb_fixtures();\n\n let fixture_root = lmdb_fixtures_root.join(name);\n\n\n\n let engine_config = EngineConfig::default();\n\n let mut builder = LmdbWasmTestBuilder::new_with_config(&fixture_root, engine_config);\n\n\n\n builder.run_genesis(&genesis_request);\n\n\n\n // You can customize the fixture post genesis with a callable.\n\n post_genesis_setup(&mut builder);\n\n\n\n let post_state_hash = builder.get_post_state_hash();\n\n\n\n let state = LmdbFixtureState {\n\n genesis_request: serde_json::to_value(genesis_request)?,\n\n post_state_hash,\n\n };\n\n let serialized_state = serde_json::to_string_pretty(&state)?;\n\n let mut f = File::create(&fixture_root.join(STATE_JSON_FILE))?;\n\n f.write_all(serialized_state.as_bytes())?;\n\n Ok(())\n\n}\n", "file_path": "execution_engine_testing/tests/src/lmdb_fixture.rs", "rank": 99, "score": 180484.37273217936 } ]
Rust
tests/do_nestest.rs
Lokathor/terbium
382ce827aeda58e0a66449a1cfafc7da48f9e65b
#![allow(unused)] use std::{fs::File, io::prelude::*, path::Path}; use terbium::*; #[test] fn run_through_nestest() { main(); } fn main() { let rom_path = Path::new("roms").join("nestest.nes"); let mut file = File::open(rom_path).expect("couldn't open ROM file"); let mut contents = Vec::new(); file .read_to_end(&mut contents) .expect("couldn't read file content"); drop(file); let log_path = Path::new("roms").join("nestest.log"); let mut file = File::open(log_path).expect("couldn't open log file."); let mut official_bytes = Vec::new(); file .read_to_end(&mut official_bytes) .expect("couldn't read log contents"); drop(file); let official_log = String::from_utf8_lossy(&official_bytes); let mut system = Terbium::default(); system.opt_cart = Cartridge::from_ines_bytes(&contents) .map_err(|e| println!("Cart Load Error: {}", e)) .ok(); let mut cpu_cycles = 0_usize; let mut display = Bitmap::new(256, 240); system.cpu_reset(); system.cpu.pc = 0xC000; system.cpu.p = ProgramStatus { flags: 0x24 }; system.cpu.s = 0xFD; system.cpu.cycle_deficit = 0; system.ppu.x_pos = 0; system.ppu.y_pos = 0; cpu_cycles = 6; for (n, line) in official_log.lines().enumerate() { let n = n + 1; let log_pc = u16::from_str_radix(&line[0..4], 16).unwrap(); let log_instruction = &line[6..14]; let log_operation = &line[16..19]; let log_a = u8::from_str_radix(&line[50..52], 16).unwrap(); let log_x = u8::from_str_radix(&line[55..57], 16).unwrap(); let log_y = u8::from_str_radix(&line[60..62], 16).unwrap(); let log_p = u8::from_str_radix(&line[65..67], 16).unwrap(); let log_s = u8::from_str_radix(&line[71..73], 16).unwrap(); let log_ppu_x = u16::from_str_radix(&line[78..81].trim(), 10).unwrap(); let log_ppu_y = u16::from_str_radix(&line[82..85].trim(), 10).unwrap(); let log_cyc = usize::from_str_radix(&line[90..].trim(), 10).unwrap(); if log_pc != system.cpu.pc { println!( "PC error, line {} expected state:\n{}\n> expected PC:{:04X}, have {:04X}", n, &line[48..], log_pc, system.cpu.pc ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; let instruction = match Opcode(system.debug_cpu_read(system.cpu.pc)) .addressing_mode() .extra_bytes_needed() { 0 => format!("{:02X} ", system.debug_cpu_read(system.cpu.pc)), 1 => format!( "{:02X} {:02X} ", system.debug_cpu_read(system.cpu.pc), system.debug_cpu_read(system.cpu.pc + 1) ), 2 => format!( "{:02X} {:02X} {:02X}", system.debug_cpu_read(system.cpu.pc), system.debug_cpu_read(system.cpu.pc + 1), system.debug_cpu_read(system.cpu.pc + 2) ), _ => unreachable!(), }; if log_instruction != instruction { panic!( "Instruction error, line {}, expected '{}', got '{}'", n, log_instruction, instruction ) }; if log_a != system.cpu.a { println!( "A error, line {} expected state:\n{}\n> expected A:{:02X}, have {:02X}", n, &line[48..], log_a, system.cpu.a ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; if log_x != system.cpu.x { println!( "X error, line {} expected state:\n{}\n> expected X:{:02X}, have {:02X}", n, &line[48..], log_x, system.cpu.x ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; if log_y != system.cpu.y { panic!( "A error, line {}, expected {:02X}, got {:02X}", n, log_y, system.cpu.y ) }; if log_p != system.cpu.p.flags { println!( "P error, line {} expected state:\n{}\n> expected P:{:?}, have {:?}", n, &line[48..], ProgramStatus { flags: log_p }, system.cpu.p ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; if log_s != system.cpu.s { panic!( "S error, line {}, expected {:02X}, got {:02X}", n, log_s, system.cpu.s ) }; while system.cpu.cycle_deficit != 0 { cpu_cycles += usize::from(system.clock_system(&mut display)); } if log_ppu_x != system.ppu.x_pos { println!( "ppu.x_pos error, line {} expected state:\n{}\n> expected {}, have {}", n, &line[48..], log_ppu_x, system.ppu.x_pos ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; if log_ppu_y != system.ppu.y_pos { println!( "ppu.y_pos error, line {} expected state:\n{}\n> expected {}, have {}", n, &line[48..], log_ppu_y, system.ppu.y_pos ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; while system.cpu.cycle_deficit == 0 { cpu_cycles += usize::from(system.clock_system(&mut display)); } if log_cyc != cpu_cycles { println!( "CYC error, line {} expected state:\n{}\n> expected {}, have {}", n, &line[48..], log_cyc, cpu_cycles ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; } println!("success!"); }
#![allow(unused)] use std::{fs::File, io::prelude::*, path::Path}; use terbium::*; #[test] fn run_through_nestest() { main(); } fn main() { let rom_path = Path::new("roms").join("nestest.nes"); let mut file = File::open(rom_path).expect("couldn't open ROM file"); let mut contents = Vec::new(); file .read_to_end(&mut contents) .expect("couldn't read file content"); drop(file); let log_path = Path::new("roms").join("nestest.log"); let mut file = File::open(log_path).expect("couldn't open log file."); let mut official_bytes = Vec::new(); file .read_to_end(&mut official_bytes) .expect("couldn't read log contents"); drop(file); let official_log = String::from_utf8_lossy(&official_bytes); let mut system = Terbium::default(); system.opt_cart = Cartridge::from_ines_bytes(&contents) .map_err(|e| println!("Cart Load Error: {}", e)) .ok(); let mut cpu_cycles = 0_usize; let mut display = Bitmap::new(256, 240); system.cpu_reset(); system.cpu.pc = 0xC000; system.cpu.p = ProgramStatus { flags: 0x24 }; system.cpu.s = 0xFD; system.cpu.cycle_deficit = 0; system.ppu.x_pos = 0; system.ppu.y_pos = 0; cpu_cycles = 6; for (n, line) in official_log.lines().enumerate() { let n = n + 1; let log_pc = u16::from_str_radix(&line[0..4], 16).unwrap(); let log_instruction = &line[6..14]; let log_operation = &line[16..19]; let log_a = u8::from_str_radix(&line[50..52], 16).unwrap(); let log_x = u8::from_str_radix(&line[55..57], 16).unwrap(); let log_y = u8::from_str_radix(&line[60..62], 16).unwrap(); let log_p = u8::from_str_radix(&line[65..67], 16).unwrap(); let log_s = u8::from_str_radix(&line[71..73], 16).unwrap(); let log_ppu_x = u16::from_str_radix(&line[78..81].trim(), 10).unwrap(); let log_ppu_y = u16::from_str_radix(&line[82..85].trim(), 10).unwrap(); let log_cyc = usize::from_str_radix(&line[90..].trim(), 10).unwrap(); if log_pc != system.cpu.pc { println!( "PC error, line {} expected state:\n{}\n> expected PC:{:04X}, have {:04X}", n, &line[48..], log_pc, system.cpu.pc ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; let instruction = match Opcode(system.debug_cpu_read(system.cpu.pc)) .addressing_mode() .extra_bytes_needed() { 0 => format!("{:02X} ", system.debug_cpu_read(system.cpu.pc)), 1 => format!( "{:02X} {:02X} ", system.debug_cpu_read(system.cpu.pc), system.debug_cpu_read(system.cpu.pc + 1) ), 2 => format!( "{:02X} {:02X} {:02X}", system.debug_cpu_read(system.cpu.pc), system.debug_cpu_read(system.cpu.pc + 1), system.debug_cpu_read(system.cpu.pc + 2) ), _ => unreac
n!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; if log_s != system.cpu.s { panic!( "S error, line {}, expected {:02X}, got {:02X}", n, log_s, system.cpu.s ) }; while system.cpu.cycle_deficit != 0 { cpu_cycles += usize::from(system.clock_system(&mut display)); } if log_ppu_x != system.ppu.x_pos { println!( "ppu.x_pos error, line {} expected state:\n{}\n> expected {}, have {}", n, &line[48..], log_ppu_x, system.ppu.x_pos ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; if log_ppu_y != system.ppu.y_pos { println!( "ppu.y_pos error, line {} expected state:\n{}\n> expected {}, have {}", n, &line[48..], log_ppu_y, system.ppu.y_pos ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; while system.cpu.cycle_deficit == 0 { cpu_cycles += usize::from(system.clock_system(&mut display)); } if log_cyc != cpu_cycles { println!( "CYC error, line {} expected state:\n{}\n> expected {}, have {}", n, &line[48..], log_cyc, cpu_cycles ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; } println!("success!"); }
hable!(), }; if log_instruction != instruction { panic!( "Instruction error, line {}, expected '{}', got '{}'", n, log_instruction, instruction ) }; if log_a != system.cpu.a { println!( "A error, line {} expected state:\n{}\n> expected A:{:02X}, have {:02X}", n, &line[48..], log_a, system.cpu.a ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; if log_x != system.cpu.x { println!( "X error, line {} expected state:\n{}\n> expected X:{:02X}, have {:02X}", n, &line[48..], log_x, system.cpu.x ); println!("previous operation was:"); println!("{}", &official_log.lines().nth(n - 2).unwrap()[16..48]); std::process::exit(1); }; if log_y != system.cpu.y { panic!( "A error, line {}, expected {:02X}, got {:02X}", n, log_y, system.cpu.y ) }; if log_p != system.cpu.p.flags { println!( "P error, line {} expected state:\n{}\n> expected P:{:?}, have {:?}", n, &line[48..], ProgramStatus { flags: log_p }, system.cpu.p ); printl
random
[]
Rust
digest/src/mac.rs
tesaguri/RustCrypto-traits
e5b99207535ed36964ddfc454d1ea46fb1425a07
use crate::{FixedOutput, FixedOutputReset, Update}; use crypto_common::{InvalidLength, Key, KeyInit, KeySizeUser, Output, OutputSizeUser, Reset}; #[cfg(feature = "rand_core")] use crate::rand_core::{CryptoRng, RngCore}; use core::fmt; use generic_array::typenum::Unsigned; use subtle::{Choice, ConstantTimeEq}; #[cfg_attr(docsrs, doc(cfg(feature = "mac")))] pub trait MacMarker {} #[cfg_attr(docsrs, doc(cfg(feature = "mac")))] pub trait Mac: KeySizeUser + OutputSizeUser + Sized { fn new(key: &Key<Self>) -> Self; #[cfg(feature = "rand_core")] #[cfg_attr(docsrs, doc(cfg(feature = "rand_core")))] fn generate_key(rng: impl CryptoRng + RngCore) -> Key<Self>; fn new_from_slice(key: &[u8]) -> Result<Self, InvalidLength>; fn update(&mut self, data: &[u8]); fn finalize(self) -> CtOutput<Self>; fn finalize_reset(&mut self) -> CtOutput<Self> where Self: FixedOutputReset; fn reset(&mut self) where Self: Reset; fn verify(self, tag: &Output<Self>) -> Result<(), MacError>; fn verify_slice(self, tag: &[u8]) -> Result<(), MacError>; fn verify_truncated_left(self, tag: &[u8]) -> Result<(), MacError>; fn verify_truncated_right(self, tag: &[u8]) -> Result<(), MacError>; } impl<T: KeyInit + Update + FixedOutput + MacMarker> Mac for T { #[inline(always)] fn new(key: &Key<Self>) -> Self { KeyInit::new(key) } #[inline(always)] fn new_from_slice(key: &[u8]) -> Result<Self, InvalidLength> { KeyInit::new_from_slice(key) } #[inline] fn update(&mut self, data: &[u8]) { Update::update(self, data); } #[inline] fn finalize(self) -> CtOutput<Self> { CtOutput::new(self.finalize_fixed()) } #[inline(always)] fn finalize_reset(&mut self) -> CtOutput<Self> where Self: FixedOutputReset, { CtOutput::new(self.finalize_fixed_reset()) } #[inline] fn reset(&mut self) where Self: Reset, { Reset::reset(self) } #[inline] fn verify(self, tag: &Output<Self>) -> Result<(), MacError> { if self.finalize() == tag.into() { Ok(()) } else { Err(MacError) } } #[inline] fn verify_slice(self, tag: &[u8]) -> Result<(), MacError> { let n = tag.len(); if n != Self::OutputSize::USIZE { return Err(MacError); } let choice = self.finalize_fixed().ct_eq(tag); if choice.unwrap_u8() == 1 { Ok(()) } else { Err(MacError) } } fn verify_truncated_left(self, tag: &[u8]) -> Result<(), MacError> { let n = tag.len(); if n == 0 || n > Self::OutputSize::USIZE { return Err(MacError); } let choice = self.finalize_fixed()[..n].ct_eq(tag); if choice.unwrap_u8() == 1 { Ok(()) } else { Err(MacError) } } fn verify_truncated_right(self, tag: &[u8]) -> Result<(), MacError> { let n = tag.len(); if n == 0 || n > Self::OutputSize::USIZE { return Err(MacError); } let m = Self::OutputSize::USIZE - n; let choice = self.finalize_fixed()[m..].ct_eq(tag); if choice.unwrap_u8() == 1 { Ok(()) } else { Err(MacError) } } #[cfg(feature = "rand_core")] #[cfg_attr(docsrs, doc(cfg(feature = "rand_core")))] #[inline] fn generate_key(rng: impl CryptoRng + RngCore) -> Key<Self> { <T as KeyInit>::generate_key(rng) } } #[derive(Clone)] #[cfg_attr(docsrs, doc(cfg(feature = "mac")))] pub struct CtOutput<T: OutputSizeUser> { bytes: Output<T>, } impl<T: OutputSizeUser> CtOutput<T> { #[inline(always)] pub fn new(bytes: Output<T>) -> Self { Self { bytes } } #[inline(always)] pub fn into_bytes(self) -> Output<T> { self.bytes } } impl<T: OutputSizeUser> From<Output<T>> for CtOutput<T> { #[inline(always)] fn from(bytes: Output<T>) -> Self { Self { bytes } } } impl<'a, T: OutputSizeUser> From<&'a Output<T>> for CtOutput<T> { #[inline(always)] fn from(bytes: &'a Output<T>) -> Self { bytes.clone().into() } } impl<T: OutputSizeUser> ConstantTimeEq for CtOutput<T> { #[inline(always)] fn ct_eq(&self, other: &Self) -> Choice { self.bytes.ct_eq(&other.bytes) } } impl<T: OutputSizeUser> PartialEq for CtOutput<T> { #[inline(always)] fn eq(&self, x: &CtOutput<T>) -> bool { self.ct_eq(x).unwrap_u8() == 1 } } impl<T: OutputSizeUser> Eq for CtOutput<T> {} #[derive(Default, Debug, Copy, Clone, Eq, PartialEq)] #[cfg_attr(docsrs, doc(cfg(feature = "mac")))] pub struct MacError; impl fmt::Display for MacError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("MAC tag mismatch") } } #[cfg(feature = "std")] impl std::error::Error for MacError {}
use crate::{FixedOutput, FixedOutputReset, Update}; use crypto_common::{InvalidLength, Key, KeyInit, KeySizeUser, Output, OutputSizeUser, Reset}; #[cfg(feature = "rand_core")] use crate::rand_core::{CryptoRng, RngCore}; use core::fmt; use generic_array::typenum::Unsigned; use subtle::{Choice, ConstantTimeEq}; #[cfg_attr(docsrs, doc(cfg(feature = "mac")))] pub trait MacMarker {} #[cfg_attr(docsrs, doc(cfg(feature = "mac")))] pub trait Mac: KeySizeUser + OutputSizeUser + Sized { fn new(key: &Key<Self>) -> Self; #[cfg(feature = "rand_core")] #[cfg_attr(docsrs, doc(cfg(feature = "rand_core")))] fn generate_key(rng: impl CryptoRng + RngCore) -> Key<Self>; fn new_from_slice(key: &[u8]) -> Result<Self, InvalidLength>; fn update(&mut self, data: &[u8]); fn finalize(self) -> CtOutput<Self>; fn finalize_reset(&mut self) -> CtOutput<Self> where Self: FixedOutputReset; fn reset(&mut self) where Self: Reset; fn verify(self, tag: &Output<Self>) -> Result<(), MacError>; fn verify_slice(self, tag: &[u8]) -> Result<(), MacError>; fn verify_truncated_left(self, tag: &[u8]) -> Result<(), MacError>; fn verify_truncated_right(self, tag: &[u8]) -> Result<(), MacError>; } impl<T: KeyInit + Update + FixedOutput + MacMarker> Mac for T { #[inline(always)] fn new(key: &Key<Self>) -> Self { KeyInit::new(key) } #[inline(always)] fn new_from_slice(key: &[u8]) -> Result<Self, InvalidLength> { KeyInit::new_from_slice(key) } #[inline] fn update(&mut self, data: &[u8]) { Update::update(self, data); } #[inline] fn finalize(self) -> CtOutput<Self> { CtOutput::new(self.finalize_fixed()) } #[inline(always)]
#[inline] fn reset(&mut self) where Self: Reset, { Reset::reset(self) } #[inline] fn verify(self, tag: &Output<Self>) -> Result<(), MacError> { if self.finalize() == tag.into() { Ok(()) } else { Err(MacError) } } #[inline] fn verify_slice(self, tag: &[u8]) -> Result<(), MacError> { let n = tag.len(); if n != Self::OutputSize::USIZE { return Err(MacError); } let choice = self.finalize_fixed().ct_eq(tag); if choice.unwrap_u8() == 1 { Ok(()) } else { Err(MacError) } } fn verify_truncated_left(self, tag: &[u8]) -> Result<(), MacError> { let n = tag.len(); if n == 0 || n > Self::OutputSize::USIZE { return Err(MacError); } let choice = self.finalize_fixed()[..n].ct_eq(tag); if choice.unwrap_u8() == 1 { Ok(()) } else { Err(MacError) } } fn verify_truncated_right(self, tag: &[u8]) -> Result<(), MacError> { let n = tag.len(); if n == 0 || n > Self::OutputSize::USIZE { return Err(MacError); } let m = Self::OutputSize::USIZE - n; let choice = self.finalize_fixed()[m..].ct_eq(tag); if choice.unwrap_u8() == 1 { Ok(()) } else { Err(MacError) } } #[cfg(feature = "rand_core")] #[cfg_attr(docsrs, doc(cfg(feature = "rand_core")))] #[inline] fn generate_key(rng: impl CryptoRng + RngCore) -> Key<Self> { <T as KeyInit>::generate_key(rng) } } #[derive(Clone)] #[cfg_attr(docsrs, doc(cfg(feature = "mac")))] pub struct CtOutput<T: OutputSizeUser> { bytes: Output<T>, } impl<T: OutputSizeUser> CtOutput<T> { #[inline(always)] pub fn new(bytes: Output<T>) -> Self { Self { bytes } } #[inline(always)] pub fn into_bytes(self) -> Output<T> { self.bytes } } impl<T: OutputSizeUser> From<Output<T>> for CtOutput<T> { #[inline(always)] fn from(bytes: Output<T>) -> Self { Self { bytes } } } impl<'a, T: OutputSizeUser> From<&'a Output<T>> for CtOutput<T> { #[inline(always)] fn from(bytes: &'a Output<T>) -> Self { bytes.clone().into() } } impl<T: OutputSizeUser> ConstantTimeEq for CtOutput<T> { #[inline(always)] fn ct_eq(&self, other: &Self) -> Choice { self.bytes.ct_eq(&other.bytes) } } impl<T: OutputSizeUser> PartialEq for CtOutput<T> { #[inline(always)] fn eq(&self, x: &CtOutput<T>) -> bool { self.ct_eq(x).unwrap_u8() == 1 } } impl<T: OutputSizeUser> Eq for CtOutput<T> {} #[derive(Default, Debug, Copy, Clone, Eq, PartialEq)] #[cfg_attr(docsrs, doc(cfg(feature = "mac")))] pub struct MacError; impl fmt::Display for MacError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("MAC tag mismatch") } } #[cfg(feature = "std")] impl std::error::Error for MacError {}
fn finalize_reset(&mut self) -> CtOutput<Self> where Self: FixedOutputReset, { CtOutput::new(self.finalize_fixed_reset()) }
function_block-full_function
[ { "content": "/// Trait for hash functions with fixed-size output able to reset themselves.\n\npub trait FixedOutputReset: FixedOutput + Reset {\n\n /// Write result into provided array and reset the hasher state.\n\n fn finalize_into_reset(&mut self, out: &mut Output<Self>);\n\n\n\n /// Retrieve result and reset the hasher state.\n\n #[inline]\n\n fn finalize_fixed_reset(&mut self) -> Output<Self> {\n\n let mut out = Default::default();\n\n self.finalize_into_reset(&mut out);\n\n out\n\n }\n\n}\n\n\n", "file_path": "digest/src/lib.rs", "rank": 1, "score": 286327.21798870363 }, { "content": "/// Trait for hash functions with fixed-size output.\n\npub trait FixedOutput: Update + OutputSizeUser + Sized {\n\n /// Consume value and write result into provided array.\n\n fn finalize_into(self, out: &mut Output<Self>);\n\n\n\n /// Retrieve result and consume the hasher instance.\n\n #[inline]\n\n fn finalize_fixed(self) -> Output<Self> {\n\n let mut out = Default::default();\n\n self.finalize_into(&mut out);\n\n out\n\n }\n\n}\n\n\n", "file_path": "digest/src/lib.rs", "rank": 2, "score": 262365.28680164216 }, { "content": "/// Convert an arbitrary byte sequence into a field element.\n\n///\n\n/// <https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-11#section-5.3>\n\n///\n\n/// # Errors\n\n/// See implementors of [`ExpandMsg`] for errors:\n\n/// - [`ExpandMsgXmd`]\n\n/// - [`ExpandMsgXof`]\n\n///\n\n/// `len_in_bytes = T::Length * out.len()`\n\n///\n\n/// [`ExpandMsgXmd`]: crate::hash2field::ExpandMsgXmd\n\n/// [`ExpandMsgXof`]: crate::hash2field::ExpandMsgXof\n\npub fn hash_to_field<'a, E, T>(data: &[&[u8]], domain: &'a [u8], out: &mut [T]) -> Result<()>\n\nwhere\n\n E: ExpandMsg<'a>,\n\n T: FromOkm + Default,\n\n{\n\n let len_in_bytes = T::Length::to_usize() * out.len();\n\n let mut tmp = GenericArray::<u8, <T as FromOkm>::Length>::default();\n\n let mut expander = E::expand_message(data, domain, len_in_bytes)?;\n\n for o in out.iter_mut() {\n\n expander.fill_bytes(&mut tmp);\n\n *o = T::from_okm(&tmp);\n\n }\n\n Ok(())\n\n}\n", "file_path": "elliptic-curve/src/hash2field.rs", "rank": 3, "score": 252207.3046944584 }, { "content": "/// Fixed-output resettable digest test via the `Digest` trait\n\npub fn fixed_reset_test<D>(input: &[u8], output: &[u8]) -> Option<&'static str>\n\nwhere\n\n D: FixedOutputReset + Debug + Clone + Default + Update + HashMarker,\n\n{\n\n let mut hasher = D::new();\n\n // Test that it works when accepting the message all at once\n\n hasher.update(input);\n\n let mut hasher2 = hasher.clone();\n\n if hasher.finalize()[..] != output[..] {\n\n return Some(\"whole message\");\n\n }\n\n\n\n // Test if reset works correctly\n\n hasher2.reset();\n\n hasher2.update(input);\n\n if hasher2.finalize_reset()[..] != output[..] {\n\n return Some(\"whole message after reset\");\n\n }\n\n\n\n // Test that it works when accepting the message in chunks\n", "file_path": "digest/src/dev/fixed.rs", "rank": 4, "score": 248093.1301765894 }, { "content": "/// Variable-output resettable digest test\n\npub fn variable_reset_test<D>(input: &[u8], output: &[u8]) -> Option<&'static str>\n\nwhere\n\n D: VariableOutputReset + Debug + Clone,\n\n{\n\n let mut hasher = D::new(output.len()).unwrap();\n\n let mut buf = [0u8; 128];\n\n let buf = &mut buf[..output.len()];\n\n // Test that it works when accepting the message all at once\n\n hasher.update(input);\n\n let mut hasher2 = hasher.clone();\n\n hasher.finalize_variable(buf).unwrap();\n\n if buf != output {\n\n return Some(\"whole message\");\n\n }\n\n buf.iter_mut().for_each(|b| *b = 0);\n\n\n\n // Test if reset works correctly\n\n hasher2.reset();\n\n hasher2.update(input);\n\n hasher2.finalize_variable_reset(buf).unwrap();\n", "file_path": "digest/src/dev/variable.rs", "rank": 5, "score": 248088.68017274042 }, { "content": "/// Resettable XOF test\n\npub fn xof_reset_test<D>(input: &[u8], output: &[u8]) -> Option<&'static str>\n\nwhere\n\n D: ExtendableOutputReset + Default + Debug + Clone,\n\n{\n\n let mut hasher = D::default();\n\n let mut buf = [0u8; 1024];\n\n let buf = &mut buf[..output.len()];\n\n // Test that it works when accepting the message all at once\n\n hasher.update(input);\n\n let mut hasher2 = hasher.clone();\n\n hasher.finalize_xof_into(buf);\n\n if buf != output {\n\n return Some(\"whole message\");\n\n }\n\n buf.iter_mut().for_each(|b| *b = 0);\n\n\n\n // Test if reset works correctly\n\n hasher2.reset();\n\n hasher2.update(input);\n\n hasher2.finalize_xof_reset_into(buf);\n", "file_path": "digest/src/dev/xof.rs", "rank": 6, "score": 248083.95911520213 }, { "content": "/// Trait impl'd by concrete types that represent an encapsulated key. This is intended to be, in\n\n/// essence, a bag of bytes.\n\npub trait EncappedKey: AsRef<[u8]> + Debug + Sized {\n\n /// The size of the shared secret that this KEM produces\n\n type NSecret: ArrayLength<u8>;\n\n\n\n /// Represents the identity key of an encapsulator. This is used in authenticated\n\n /// decapsulation.\n\n type SenderPublicKey;\n\n\n\n /// The public key of a decapsulator. This is used in encapsulation.\n\n type RecipientPublicKey;\n\n}\n\n\n", "file_path": "kem/src/kem.rs", "rank": 7, "score": 240506.93197084698 }, { "content": "/// Trait for hash functions with variable-size output able to reset themselves.\n\npub trait VariableOutputReset: VariableOutput + Reset {\n\n /// Write result into the output buffer and reset the hasher state.\n\n ///\n\n /// Returns `Err(InvalidOutputSize)` if `out` size is not equal to\n\n /// `self.output_size()`.\n\n fn finalize_variable_reset(&mut self, out: &mut [u8]) -> Result<(), InvalidBufferSize>;\n\n\n\n /// Retrieve result into a boxed slice and reset the hasher state.\n\n ///\n\n /// `Box<[u8]>` is used instead of `Vec<u8>` to save stack space, since\n\n /// they have size of 2 and 3 words respectively.\n\n #[cfg(feature = \"alloc\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\n fn finalize_boxed_reset(&mut self) -> Box<[u8]> {\n\n let n = self.output_size();\n\n let mut buf = vec![0u8; n].into_boxed_slice();\n\n self.finalize_variable_reset(&mut buf)\n\n .expect(\"buf length is equal to output_size\");\n\n buf\n\n }\n", "file_path": "digest/src/lib.rs", "rank": 8, "score": 234168.9564651273 }, { "content": "/// Trait for hash functions with extendable-output (XOF) able to reset themselves.\n\npub trait ExtendableOutputReset: ExtendableOutput + Reset {\n\n /// Retrieve XOF reader and reset hasher instance state.\n\n fn finalize_xof_reset(&mut self) -> Self::Reader;\n\n\n\n /// Finalize XOF, write result into `out`, and reset the hasher state.\n\n fn finalize_xof_reset_into(&mut self, out: &mut [u8]) {\n\n self.finalize_xof_reset().read(out);\n\n }\n\n\n\n /// Retrieve result into a boxed slice of the specified size and reset\n\n /// the hasher state.\n\n ///\n\n /// `Box<[u8]>` is used instead of `Vec<u8>` to save stack space, since\n\n /// they have size of 2 and 3 words respectively.\n\n #[cfg(feature = \"alloc\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\n fn finalize_boxed_reset(&mut self, output_size: usize) -> Box<[u8]> {\n\n let mut buf = vec![0u8; output_size].into_boxed_slice();\n\n self.finalize_xof_reset().read(&mut buf);\n\n buf\n\n }\n\n}\n\n\n", "file_path": "digest/src/lib.rs", "rank": 9, "score": 234164.36373052714 }, { "content": "/// Trait for hash functions with variable-size output.\n\npub trait VariableOutput: Sized + Update {\n\n /// Maximum size of output hash.\n\n const MAX_OUTPUT_SIZE: usize;\n\n\n\n /// Create new hasher instance with the given output size.\n\n ///\n\n /// It will return `Err(InvalidOutputSize)` in case if hasher can not return\n\n /// hash of the specified output size.\n\n fn new(output_size: usize) -> Result<Self, InvalidOutputSize>;\n\n\n\n /// Get output size of the hasher instance provided to the `new` method\n\n fn output_size(&self) -> usize;\n\n\n\n /// Write result into the output buffer.\n\n ///\n\n /// Returns `Err(InvalidOutputSize)` if `out` size is not equal to\n\n /// `self.output_size()`.\n\n fn finalize_variable(self, out: &mut [u8]) -> Result<(), InvalidBufferSize>;\n\n\n\n /// Compute hash of `data` and write it to `output`.\n", "file_path": "digest/src/lib.rs", "rank": 10, "score": 232458.31934995376 }, { "content": "/// Trait for hash functions with extendable-output (XOF).\n\npub trait ExtendableOutput: Sized + Update {\n\n /// Reader\n\n type Reader: XofReader;\n\n\n\n /// Retrieve XOF reader and consume hasher instance.\n\n fn finalize_xof(self) -> Self::Reader;\n\n\n\n /// Finalize XOF and write result into `out`.\n\n fn finalize_xof_into(self, out: &mut [u8]) {\n\n self.finalize_xof().read(out);\n\n }\n\n\n\n /// Compute hash of `data` and write it into `output`.\n\n fn digest_xof(input: impl AsRef<[u8]>, output: &mut [u8])\n\n where\n\n Self: Default,\n\n {\n\n let mut hasher = Self::default();\n\n hasher.update(input.as_ref());\n\n hasher.finalize_xof().read(output);\n", "file_path": "digest/src/lib.rs", "rank": 11, "score": 232453.63662953177 }, { "content": "/// Types which can be initialized from key.\n\npub trait KeyInit: KeySizeUser + Sized {\n\n /// Create new value from fixed size key.\n\n fn new(key: &Key<Self>) -> Self;\n\n\n\n /// Create new value from variable size key.\n\n fn new_from_slice(key: &[u8]) -> Result<Self, InvalidLength> {\n\n if key.len() != Self::KeySize::to_usize() {\n\n Err(InvalidLength)\n\n } else {\n\n Ok(Self::new(Key::<Self>::from_slice(key)))\n\n }\n\n }\n\n\n\n /// Generate random key using the provided [`CryptoRng`].\n\n #[cfg(feature = \"rand_core\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"rand_core\")))]\n\n #[inline]\n\n fn generate_key(mut rng: impl CryptoRng + RngCore) -> Key<Self> {\n\n let mut key = Key::<Self>::default();\n\n rng.fill_bytes(&mut key);\n\n key\n\n }\n\n}\n\n\n", "file_path": "crypto-common/src/lib.rs", "rank": 12, "score": 230583.72134511676 }, { "content": "/// Variable-output resettable digest test\n\npub fn fixed_test<D>(input: &[u8], output: &[u8]) -> Option<&'static str>\n\nwhere\n\n D: FixedOutput + Default + Debug + Clone,\n\n{\n\n let mut hasher = D::default();\n\n // Test that it works when accepting the message all at once\n\n hasher.update(input);\n\n if hasher.finalize_fixed()[..] != output[..] {\n\n return Some(\"whole message\");\n\n }\n\n\n\n // Test that it works when accepting the message in chunks\n\n for n in 1..core::cmp::min(17, input.len()) {\n\n let mut hasher = D::default();\n\n for chunk in input.chunks(n) {\n\n hasher.update(chunk);\n\n }\n\n if hasher.finalize_fixed()[..] != output[..] {\n\n return Some(\"message in chunks\");\n\n }\n\n }\n\n None\n\n}\n", "file_path": "digest/src/dev/fixed.rs", "rank": 13, "score": 218649.53106935954 }, { "content": "/// Variable-output resettable digest test\n\npub fn variable_test<D>(input: &[u8], output: &[u8]) -> Option<&'static str>\n\nwhere\n\n D: VariableOutput + Debug + Clone,\n\n{\n\n let mut hasher = D::new(output.len()).unwrap();\n\n let mut buf = [0u8; 128];\n\n let buf = &mut buf[..output.len()];\n\n // Test that it works when accepting the message all at once\n\n hasher.update(input);\n\n hasher.finalize_variable(buf).unwrap();\n\n if buf != output {\n\n return Some(\"whole message\");\n\n }\n\n buf.iter_mut().for_each(|b| *b = 0);\n\n\n\n // Test that it works when accepting the message in chunks\n\n for n in 1..core::cmp::min(17, input.len()) {\n\n let mut hasher = D::new(output.len()).unwrap();\n\n for chunk in input.chunks(n) {\n\n hasher.update(chunk);\n\n }\n\n hasher.finalize_variable(buf).unwrap();\n\n if buf != output {\n\n return Some(\"message in chunks\");\n\n }\n\n buf.iter_mut().for_each(|b| *b = 0);\n\n }\n\n None\n\n}\n", "file_path": "digest/src/dev/variable.rs", "rank": 14, "score": 218649.53106935957 }, { "content": "/// Core trait for hash functions with variable output size.\n\n///\n\n/// Maximum output size is equal to [`OutputSizeUser::OutputSize`].\n\n/// Users are expected to truncate result returned by the\n\n/// [`finalize_variable_core`] to `output_size` passed to the [`new`] method\n\n/// during construction. Truncation side is defined by the [`TRUNC_SIDE`]\n\n/// associated constant.\n\n///\n\n/// [`finalize_variable_core`]: VariableOutputCore::finalize_variable_core\n\n/// [`new`]: VariableOutputCore::new\n\n/// [`TRUNC_SIDE`]: VariableOutputCore::TRUNC_SIDE\n\npub trait VariableOutputCore: UpdateCore + OutputSizeUser + BufferKindUser + Sized\n\nwhere\n\n Self::BlockSize: IsLess<U256>,\n\n Le<Self::BlockSize, U256>: NonZero,\n\n{\n\n /// Side which should be used in a truncated result.\n\n const TRUNC_SIDE: TruncSide;\n\n\n\n /// Initialize hasher state for given output size.\n\n ///\n\n /// Returns [`InvalidOutputSize`] if `output_size` is not valid for\n\n /// the algorithm, e.g. if it's bigger than the [`OutputSize`]\n\n /// associated type.\n\n ///\n\n /// [`OutputSize`]: OutputSizeUser::OutputSize\n\n fn new(output_size: usize) -> Result<Self, InvalidOutputSize>;\n\n\n\n /// Finalize hasher and write full hashing result into the `out` buffer.\n\n ///\n\n /// The result must be truncated to `output_size` used during hasher\n", "file_path": "digest/src/core_api.rs", "rank": 15, "score": 214802.0258079274 }, { "content": "/// The [`Isogeny`] methods to map to another curve.\n\npub trait Isogeny: Field + AddAssign + Mul<Output = Self> {\n\n /// The maximum number of coefficients\n\n type Degree: ArrayLength<Self>;\n\n /// The isogeny coefficients\n\n const COEFFICIENTS: IsogenyCoefficients<Self>;\n\n\n\n /// Map from the isogeny points to the main curve\n\n fn isogeny(x: Self, y: Self) -> (Self, Self) {\n\n let mut xs = GenericArray::<Self, Self::Degree>::default();\n\n xs[0] = Self::one();\n\n xs[1] = x;\n\n xs[2] = x.square();\n\n for i in 3..Self::Degree::to_usize() {\n\n xs[i] = xs[i - 1] * x;\n\n }\n\n let x_num = Self::compute_iso(&xs, Self::COEFFICIENTS.xnum);\n\n let x_den = Self::compute_iso(&xs, Self::COEFFICIENTS.xden)\n\n .invert()\n\n .unwrap();\n\n let y_num = Self::compute_iso(&xs, Self::COEFFICIENTS.ynum) * y;\n", "file_path": "elliptic-curve/src/hash2curve/isogeny.rs", "rank": 16, "score": 212950.93891764703 }, { "content": "/// Adds hashing arbitrary byte sequences to a valid group element\n\npub trait GroupDigest: ProjectiveArithmetic<ProjectivePoint = Self::Output> {\n\n /// The field element representation for a group value with multiple elements\n\n type FieldElement: FromOkm + MapToCurve<Output = Self::Output> + Default + Copy;\n\n /// The resulting group element\n\n type Output: CofactorGroup<Subgroup = Self::Output>;\n\n\n\n /// Computes the hash to curve routine.\n\n ///\n\n /// From <https://www.ietf.org/archive/id/draft-irtf-cfrg-hash-to-curve-13.html>:\n\n ///\n\n /// > Uniform encoding from byte strings to points in G.\n\n /// > That is, the distribution of its output is statistically close\n\n /// > to uniform in G.\n\n /// > This function is suitable for most applications requiring a random\n\n /// > oracle returning points in G assuming a cryptographically secure\n\n /// > hash function is used.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ## Using a fixed size hash function\n", "file_path": "elliptic-curve/src/hash2curve/group_digest.rs", "rank": 17, "score": 211893.56709929823 }, { "content": "/// Core trait for hash functions with fixed output size.\n\npub trait FixedOutputCore: UpdateCore + BufferKindUser + OutputSizeUser\n\nwhere\n\n Self::BlockSize: IsLess<U256>,\n\n Le<Self::BlockSize, U256>: NonZero,\n\n{\n\n /// Finalize state using remaining data stored in the provided block buffer,\n\n /// write result into provided array and leave `self` in a dirty state.\n\n fn finalize_fixed_core(&mut self, buffer: &mut Buffer<Self>, out: &mut Output<Self>);\n\n}\n\n\n", "file_path": "digest/src/core_api.rs", "rank": 18, "score": 209073.62091962903 }, { "content": "/// Trait impl'd by concrete types that represent digital signatures.\n\n///\n\n/// Signature types *must* (as mandated by the `AsRef<[u8]>` bound) be a thin\n\n/// wrapper around the \"bag-of-bytes\" serialized form of a signature which can\n\n/// be directly parsed from or written to the \"wire\".\n\n///\n\n/// Inspiration for this approach comes from the Ed25519 signature system,\n\n/// which adopted it based on the observation that past signature systems\n\n/// were not prescriptive about how signatures should be represented\n\n/// on-the-wire, and that lead to a proliferation of different wire formats and\n\n/// confusion about which ones should be used.\n\n///\n\n/// The [`Signature`] trait aims to provide similar simplicity by minimizing\n\n/// the number of steps involved to obtain a serializable signature and\n\n/// ideally ensuring there is one signature type for any given signature system\n\n/// shared by all \"provider\" crates.\n\n///\n\n/// For signature systems which require a more advanced internal representation\n\n/// (e.g. involving decoded scalars or decompressed elliptic curve points) it's\n\n/// recommended that \"provider\" libraries maintain their own internal signature\n\n/// type and use `From` bounds to provide automatic conversions.\n\npub trait Signature: AsRef<[u8]> + Debug + Sized {\n\n /// Parse a signature from its byte representation\n\n fn from_bytes(bytes: &[u8]) -> Result<Self, Error>;\n\n\n\n /// Borrow a byte slice representing the serialized form of this signature\n\n fn as_bytes(&self) -> &[u8] {\n\n self.as_ref()\n\n }\n\n}\n\n\n\n/// Marker trait for `Signature` types computable as `𝐒(𝐇(𝒎))`\n\n/// i.e. ones which prehash a message to be signed as `𝐇(𝒎)`\n\n///\n\n/// Where:\n\n///\n\n/// - `𝐒`: signature algorithm\n\n/// - `𝐇`: hash (a.k.a. digest) function\n\n/// - `𝒎`: message\n\n///\n\n/// This approach is relatively common in signature schemes based on the\n\n/// [Fiat-Shamir heuristic].\n\n///\n\n/// For signature types that implement this trait, when the `derive-preview`\n\n/// Cargo feature is enabled a custom derive for [`Signer`] is available for any\n\n/// types that impl [`DigestSigner`], and likewise for deriving [`Verifier`] for\n\n/// types which impl [`DigestVerifier`].\n\n///\n\n/// [Fiat-Shamir heuristic]: https://en.wikipedia.org/wiki/Fiat%E2%80%93Shamir_heuristic\n", "file_path": "signature/src/signature.rs", "rank": 19, "score": 207830.38010667582 }, { "content": "/// Trait for types which can be created from key.\n\npub trait FromKey: Sized {\n\n /// Key size in bytes.\n\n type KeySize: ArrayLength<u8>;\n\n\n\n /// Create new value from fixed size key.\n\n fn new(key: &GenericArray<u8, Self::KeySize>) -> Self;\n\n\n\n /// Create new value from variable size key.\n\n fn new_from_slice(key: &[u8]) -> Result<Self, InvalidLength> {\n\n if key.len() != Self::KeySize::to_usize() {\n\n Err(InvalidLength)\n\n } else {\n\n Ok(Self::new(GenericArray::from_slice(key)))\n\n }\n\n }\n\n\n\n /// Generate a random key using the provided [`CryptoRng`].\n\n #[cfg(feature = \"rand_core\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"rand_core\")))]\n\n #[inline]\n\n fn generate_key(mut rng: impl CryptoRng + RngCore) -> GenericArray<u8, Self::KeySize> {\n\n let mut key = GenericArray::<u8, Self::KeySize>::default();\n\n rng.fill_bytes(&mut key);\n\n key\n\n }\n\n}\n", "file_path": "cipher/src/lib.rs", "rank": 20, "score": 204758.4746677705 }, { "content": "/// Trait for types which can be created from key and nonce.\n\npub trait FromKeyNonce: Sized {\n\n /// Key size in bytes.\n\n type KeySize: ArrayLength<u8>;\n\n\n\n /// Nonce size in bytes.\n\n type NonceSize: ArrayLength<u8>;\n\n\n\n /// Create new value from fixed length key and nonce.\n\n fn new(\n\n key: &GenericArray<u8, Self::KeySize>,\n\n nonce: &GenericArray<u8, Self::NonceSize>,\n\n ) -> Self;\n\n\n\n /// Create new value from variable length key and nonce.\n\n #[inline]\n\n fn new_from_slices(key: &[u8], nonce: &[u8]) -> Result<Self, InvalidLength> {\n\n let kl = Self::KeySize::to_usize();\n\n let nl = Self::NonceSize::to_usize();\n\n if key.len() != kl || nonce.len() != nl {\n\n Err(InvalidLength)\n", "file_path": "cipher/src/lib.rs", "rank": 21, "score": 200638.06796756134 }, { "content": "/// Types which return data with the given size.\n\npub trait OutputSizeUser {\n\n /// Size of the output in bytes.\n\n type OutputSize: ArrayLength<u8> + 'static;\n\n}\n\n\n", "file_path": "crypto-common/src/lib.rs", "rank": 22, "score": 200633.93323987833 }, { "content": "/// Types which use key for initialization.\n\n///\n\n/// Generally it's used indirectly via [`KeyInit`] or [`KeyIvInit`].\n\npub trait KeySizeUser {\n\n /// Key size in bytes.\n\n type KeySize: ArrayLength<u8> + 'static;\n\n}\n\n\n", "file_path": "crypto-common/src/lib.rs", "rank": 23, "score": 200340.49948162335 }, { "content": "/// Resettable types.\n\npub trait Reset {\n\n /// Reset state to its initial value.\n\n fn reset(&mut self);\n\n}\n\n\n", "file_path": "crypto-common/src/lib.rs", "rank": 24, "score": 198624.3648349392 }, { "content": "/// Convinience wrapper trait covering functionality of cryptographic hash\n\n/// functions with fixed output size.\n\n///\n\n/// This trait wraps [`Update`], [`FixedOutput`], [`Default`], and\n\n/// [`HashMarker`] traits and provides additional convenience methods.\n\npub trait Digest: OutputSizeUser {\n\n /// Create new hasher instance.\n\n fn new() -> Self;\n\n\n\n /// Create new hasher instance which has processed the provided data.\n\n fn new_with_prefix(data: impl AsRef<[u8]>) -> Self;\n\n\n\n /// Process data, updating the internal state.\n\n fn update(&mut self, data: impl AsRef<[u8]>);\n\n\n\n /// Process input data in a chained manner.\n\n fn chain_update(self, data: impl AsRef<[u8]>) -> Self;\n\n\n\n /// Retrieve result and consume hasher instance.\n\n fn finalize(self) -> Output<Self>;\n\n\n\n /// Write result into provided array and consume the hasher instance.\n\n fn finalize_into(self, out: &mut Output<Self>);\n\n\n\n /// Retrieve result and reset hasher instance.\n", "file_path": "digest/src/digest.rs", "rank": 25, "score": 197028.71451455602 }, { "content": "/// Types which can be initialized from key and initialization vector (nonce).\n\npub trait KeyIvInit: KeySizeUser + IvSizeUser + Sized {\n\n /// Create new value from fixed length key and nonce.\n\n fn new(key: &Key<Self>, iv: &Iv<Self>) -> Self;\n\n\n\n /// Create new value from variable length key and nonce.\n\n #[inline]\n\n fn new_from_slices(key: &[u8], iv: &[u8]) -> Result<Self, InvalidLength> {\n\n let key_len = Self::KeySize::USIZE;\n\n let iv_len = Self::IvSize::USIZE;\n\n if key.len() != key_len || iv.len() != iv_len {\n\n Err(InvalidLength)\n\n } else {\n\n Ok(Self::new(\n\n Key::<Self>::from_slice(key),\n\n Iv::<Self>::from_slice(iv),\n\n ))\n\n }\n\n }\n\n\n\n /// Generate random key using the provided [`CryptoRng`].\n", "file_path": "crypto-common/src/lib.rs", "rank": 26, "score": 196133.20519377684 }, { "content": "/// Types which consume data in blocks.\n\npub trait UpdateCore: BlockSizeUser {\n\n /// Update state using the provided data blocks.\n\n fn update_blocks(&mut self, blocks: &[Block<Self>]);\n\n}\n\n\n", "file_path": "digest/src/core_api.rs", "rank": 27, "score": 189931.0043520546 }, { "content": "/// In-place encryption/decryption byte buffers.\n\n///\n\n/// This trait defines the set of methods needed to support in-place operations\n\n/// on a `Vec`-like data type.\n\npub trait Buffer: AsRef<[u8]> + AsMut<[u8]> {\n\n /// Get the length of the buffer\n\n fn len(&self) -> usize {\n\n self.as_ref().len()\n\n }\n\n\n\n /// Is the buffer empty?\n\n fn is_empty(&self) -> bool {\n\n self.as_ref().is_empty()\n\n }\n\n\n\n /// Extend this buffer from the given slice\n\n fn extend_from_slice(&mut self, other: &[u8]) -> Result<()>;\n\n\n\n /// Truncate this buffer to the given size\n\n fn truncate(&mut self, len: usize);\n\n}\n\n\n\n#[cfg(feature = \"alloc\")]\n\nimpl Buffer for Vec<u8> {\n", "file_path": "aead/src/lib.rs", "rank": 28, "score": 189636.5847253651 }, { "content": "/// Core trait for hash functions with extendable (XOF) output size.\n\npub trait ExtendableOutputCore: UpdateCore + BufferKindUser\n\nwhere\n\n Self::BlockSize: IsLess<U256>,\n\n Le<Self::BlockSize, U256>: NonZero,\n\n{\n\n /// XOF reader core state.\n\n type ReaderCore: XofReaderCore;\n\n\n\n /// Retrieve XOF reader using remaining data stored in the block buffer\n\n /// and leave hasher in a dirty state.\n\n fn finalize_xof_core(&mut self, buffer: &mut Buffer<Self>) -> Self::ReaderCore;\n\n}\n\n\n", "file_path": "digest/src/core_api.rs", "rank": 30, "score": 177063.67422151833 }, { "content": "/// Types which consume data with byte granularity.\n\npub trait Update {\n\n /// Update state using the provided data.\n\n fn update(&mut self, data: &[u8]);\n\n\n\n /// Digest input data in a chained manner.\n\n fn chain(mut self, data: impl AsRef<[u8]>) -> Self\n\n where\n\n Self: Sized,\n\n {\n\n self.update(data.as_ref());\n\n self\n\n }\n\n}\n\n\n", "file_path": "digest/src/lib.rs", "rank": 31, "score": 171708.36734256783 }, { "content": "/// Types which use initialization vector (nonce) for initialization.\n\n///\n\n/// Generally it's used indirectly via [`KeyIvInit`] or [`InnerIvInit`].\n\npub trait IvSizeUser {\n\n /// Initialization vector size in bytes.\n\n type IvSize: ArrayLength<u8> + 'static;\n\n}\n\n\n", "file_path": "crypto-common/src/lib.rs", "rank": 32, "score": 162287.01107048974 }, { "content": "/// Types which process data in blocks.\n\npub trait BlockSizeUser {\n\n /// Size of the block in bytes.\n\n type BlockSize: ArrayLength<u8> + 'static;\n\n}\n\n\n\nimpl<T: BlockSizeUser> BlockSizeUser for &T {\n\n type BlockSize = T::BlockSize;\n\n}\n\n\n\nimpl<T: BlockSizeUser> BlockSizeUser for &mut T {\n\n type BlockSize = T::BlockSize;\n\n}\n\n\n", "file_path": "crypto-common/src/lib.rs", "rank": 33, "score": 162282.62840794816 }, { "content": "/// Validate that the given [`EncodedPoint`] represents the encoded public key\n\n/// value of the given secret.\n\n///\n\n/// Curve implementations which also impl [`ProjectiveArithmetic`] will receive\n\n/// a blanket default impl of this trait.\n\npub trait ValidatePublicKey\n\nwhere\n\n Self: Curve,\n\n FieldSize<Self>: ModulusSize,\n\n{\n\n /// Validate that the given [`EncodedPoint`] is a valid public key for the\n\n /// provided secret value.\n\n #[allow(unused_variables)]\n\n fn validate_public_key(\n\n secret_key: &SecretKey<Self>,\n\n public_key: &EncodedPoint<Self>,\n\n ) -> Result<()> {\n\n // Provide a default \"always succeeds\" implementation.\n\n // This is the intended default for curve implementations which\n\n // do not provide an arithmetic implementation, since they have no\n\n // way to verify this.\n\n //\n\n // Implementations with an arithmetic impl will receive a blanket impl\n\n // of this trait.\n\n Ok(())\n", "file_path": "elliptic-curve/src/sec1.rs", "rank": 34, "score": 161937.4041038938 }, { "content": "/// Instantiate a [`UniversalHash`] algorithm.\n\npub trait NewUniversalHash: Sized {\n\n /// Size of the key for the universal hash function.\n\n type KeySize: ArrayLength<u8>;\n\n\n\n /// Instantiate a universal hash function with the given key.\n\n fn new(key: &Key<Self>) -> Self;\n\n}\n\n\n", "file_path": "universal-hash/src/lib.rs", "rank": 35, "score": 155877.26485167493 }, { "content": "/// Represents the functionality of a key encapsulator. For unauthenticated encapsulation, `Self`\n\n/// can be an empty struct. For authenticated encapsulation, `Self` is a private key.\n\npub trait Encapsulator<EK: EncappedKey> {\n\n /// Attempt to encapsulate a fresh shared secret with the given recipient. The resulting shared\n\n /// secret is bound to the identity encoded in `Self` (i.e., authenticated wrt `Self`). If\n\n /// `Self` is empty, then this is equivalent to unauthenticated encapsulation. Returns the\n\n /// shared secret and encapsulated key on success, or an error if something went wrong.\n\n fn try_encap<R: CryptoRng + RngCore>(\n\n &self,\n\n csprng: &mut R,\n\n recip_pubkey: &EK::RecipientPublicKey,\n\n ) -> Result<(EK, GenericArray<u8, EK::NSecret>), Error>;\n\n}\n\n\n", "file_path": "kem/src/kem.rs", "rank": 36, "score": 155205.3273910582 }, { "content": "/// Represents the functionality of a key decapsulator, where `Self` is a cryptographic key\n\npub trait Decapsulator<EK: EncappedKey> {\n\n /// Attempt to decapsulate the given encapsulated key. Returns the shared secret on success, or\n\n /// an error if something went wrong.\n\n fn try_decap(&self, encapped_key: &EK) -> Result<GenericArray<u8, EK::NSecret>, Error>;\n\n}\n\n\n", "file_path": "kem/src/kem.rs", "rank": 37, "score": 155205.03767533627 }, { "content": "/// Represents the functionality of a authenticated-key decapsulator, where `Self` is a\n\n/// cryptographic key\n\npub trait AuthDecapsulator<EK: EncappedKey> {\n\n /// Attempt to decapsulate the given encapsulated key. The resulting shared secret is bound to\n\n /// the provided sender identity, thus providing authenticity. Returns the shared secret\n\n /// success, or an error if something went wrong.\n\n fn try_auth_decap(\n\n &self,\n\n encapped_key: &EK,\n\n sender_pubkey: &EK::SenderPublicKey,\n\n ) -> Result<GenericArray<u8, EK::NSecret>, Error>;\n\n}\n", "file_path": "kem/src/kem.rs", "rank": 38, "score": 152427.1450779858 }, { "content": "/// Core reader trait for extendable-output function (XOF) result.\n\npub trait XofReaderCore: BlockSizeUser {\n\n /// Read next XOF block.\n\n fn read_block(&mut self) -> Block<Self>;\n\n}\n\n\n", "file_path": "digest/src/core_api.rs", "rank": 39, "score": 150709.26889120205 }, { "content": "/// Types which use [`BlockBuffer`] functionality.\n\npub trait BufferKindUser: BlockSizeUser {\n\n /// Block buffer kind over which type operates.\n\n type BufferKind: BufferKind;\n\n}\n\n\n", "file_path": "digest/src/core_api.rs", "rank": 40, "score": 150700.18290687978 }, { "content": "/// Types which can be initialized from another type (usually block ciphers).\n\n///\n\n/// Usually used for initializing types from block ciphers.\n\npub trait InnerInit: InnerUser + Sized {\n\n /// Initialize value from the `inner`.\n\n fn inner_init(inner: Self::Inner) -> Self;\n\n}\n\n\n", "file_path": "crypto-common/src/lib.rs", "rank": 41, "score": 150113.08015633206 }, { "content": "/// Types which can be initialized from another type and additional initialization\n\n/// vector/nonce.\n\n///\n\n/// Usually used for initializing types from block ciphers.\n\npub trait InnerIvInit: InnerUser + IvSizeUser + Sized {\n\n /// Initialize value using `inner` and `iv` array.\n\n fn inner_iv_init(inner: Self::Inner, iv: &Iv<Self>) -> Self;\n\n\n\n /// Initialize value using `inner` and `iv` slice.\n\n fn inner_iv_slice_init(inner: Self::Inner, iv: &[u8]) -> Result<Self, InvalidLength> {\n\n if iv.len() != Self::IvSize::to_usize() {\n\n Err(InvalidLength)\n\n } else {\n\n Ok(Self::inner_iv_init(inner, Iv::<Self>::from_slice(iv)))\n\n }\n\n }\n\n\n\n /// Generate random IV using the provided [`CryptoRng`].\n\n #[cfg(feature = \"rand_core\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"rand_core\")))]\n\n #[inline]\n\n fn generate_iv(mut rng: impl CryptoRng + RngCore) -> Iv<Self> {\n\n let mut iv = Iv::<Self>::default();\n\n rng.fill_bytes(&mut iv);\n", "file_path": "crypto-common/src/lib.rs", "rank": 42, "score": 149067.5267352791 }, { "content": "/// Decompact an elliptic curve point from an x-coordinate.\n\n///\n\n/// Decompaction relies on properties of specially-generated keys but provides\n\n/// a more compact representation than standard point compression.\n\npub trait DecompactPoint<C: Curve>: Sized {\n\n /// Attempt to decompact an elliptic curve point\n\n fn decompact(x: &FieldBytes<C>) -> CtOption<Self>;\n\n}\n\n\n", "file_path": "elliptic-curve/src/point.rs", "rank": 43, "score": 147399.56044408865 }, { "content": "/// Decompress an elliptic curve point.\n\n///\n\n/// Point decompression recovers an original curve point from its x-coordinate\n\n/// and a boolean flag indicating whether or not the y-coordinate is odd.\n\npub trait DecompressPoint<C: Curve>: Sized {\n\n /// Attempt to decompress an elliptic curve point.\n\n fn decompress(x: &FieldBytes<C>, y_is_odd: Choice) -> CtOption<Self>;\n\n}\n\n\n", "file_path": "elliptic-curve/src/point.rs", "rank": 44, "score": 147395.3697768667 }, { "content": "#[derive(Debug)]\n\nstruct DummySignature(GenericArray<u8, <Sha256 as OutputSizeUser>::OutputSize>);\n\n\n\nimpl Signature for DummySignature {\n\n fn from_bytes(bytes: &[u8]) -> Result<Self, Error> {\n\n Ok(DummySignature(GenericArray::clone_from_slice(\n\n bytes.as_ref(),\n\n )))\n\n }\n\n}\n\n\n\nimpl AsRef<[u8]> for DummySignature {\n\n fn as_ref(&self) -> &[u8] {\n\n self.0.as_ref()\n\n }\n\n}\n\n\n\nimpl PrehashSignature for DummySignature {\n\n type Digest = Sha256;\n\n}\n\n\n\n/// Dummy signer which just returns the message digest as a `DummySignature`\n", "file_path": "signature/tests/signature_derive.rs", "rank": 45, "score": 141666.42257611893 }, { "content": "/// Modular reduction.\n\npub trait Reduce<UInt: Integer + ArrayEncoding>: Sized {\n\n /// Perform a modular reduction, returning a field element.\n\n fn from_uint_reduced(n: UInt) -> Self;\n\n\n\n /// Interpret the given byte array as a big endian integer and perform\n\n /// a modular reduction.\n\n fn from_be_bytes_reduced(bytes: ByteArray<UInt>) -> Self {\n\n Self::from_uint_reduced(UInt::from_be_byte_array(bytes))\n\n }\n\n\n\n /// Interpret the given byte array as a little endian integer and perform a\n\n /// modular reduction.\n\n fn from_le_bytes_reduced(bytes: ByteArray<UInt>) -> Self {\n\n Self::from_uint_reduced(UInt::from_le_byte_array(bytes))\n\n }\n\n\n\n /// Interpret a digest as a big endian integer and perform a modular\n\n /// reduction.\n\n #[cfg(feature = \"digest\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"digest\")))]\n", "file_path": "elliptic-curve/src/ops.rs", "rank": 46, "score": 140068.95234666232 }, { "content": "/// Feed ~1 MiB of pseudorandom data to an updatable state.\n\npub fn feed_rand_16mib<D: crate::Update>(d: &mut D) {\n\n let buf = &mut [0u8; 1024];\n\n let mut rng = rng::RNG;\n\n let n = 16 * (1 << 20) / buf.len();\n\n for _ in 0..n {\n\n rng.fill(buf);\n\n d.update(buf);\n\n // additional byte, so size of feeded data\n\n // will not be multiple of block size\n\n d.update(&[42]);\n\n }\n\n}\n", "file_path": "digest/src/dev.rs", "rank": 47, "score": 139896.8693332627 }, { "content": "/// Trait for a block mode, used to obtain the current state in the form of an IV\n\n/// that can initialize a BlockMode later and resume the original operation.\n\n///\n\n/// The IV value SHOULD be used for resuming operations only and MUST NOT be\n\n/// exposed to attackers. Failing to comply with this requirement breaks\n\n/// unpredictability and opens attack venues (see e.g. [1], sec. 3.6.2).\n\n///\n\n/// [1]: https://www.cs.umd.edu/~jkatz/imc.html\n\npub trait BlockModeIvState<C: BlockCipher>: FromKeyNonce {\n\n /// Returns the IV needed to process the following block. This value MUST\n\n /// NOT be exposed to attackers.\n\n fn iv_state(&self) -> GenericArray<u8, Self::NonceSize>;\n\n}\n", "file_path": "cipher/src/mode.rs", "rank": 48, "score": 139842.44055520944 }, { "content": "/// Trait for password verification.\n\n///\n\n/// Automatically impl'd for any type that impls [`PasswordHasher`].\n\n///\n\n/// This trait is object safe and can be used to implement abstractions over\n\n/// multiple password hashing algorithms. One such abstraction is provided by\n\n/// the [`PasswordHash::verify_password`] method.\n\npub trait PasswordVerifier {\n\n /// Compute this password hashing function against the provided password\n\n /// using the parameters from the provided password hash and see if the\n\n /// computed output matches.\n\n fn verify_password(&self, password: &[u8], hash: &PasswordHash<'_>) -> Result<()>;\n\n}\n\n\n\nimpl<T: PasswordHasher> PasswordVerifier for T {\n\n fn verify_password(&self, password: &[u8], hash: &PasswordHash<'_>) -> Result<()> {\n\n if let (Some(salt), Some(expected_output)) = (&hash.salt, &hash.hash) {\n\n let computed_hash = self.hash_password_customized(\n\n password,\n\n Some(hash.algorithm),\n\n hash.version,\n\n T::Params::try_from(hash)?,\n\n *salt,\n\n )?;\n\n\n\n if let Some(computed_output) = &computed_hash.hash {\n\n // See notes on `Output` about the use of a constant-time comparison\n\n if expected_output == computed_output {\n\n return Ok(());\n\n }\n\n }\n\n }\n\n\n\n Err(Error::Password)\n\n }\n\n}\n\n\n", "file_path": "password-hash/src/traits.rs", "rank": 49, "score": 135692.96586722796 }, { "content": "/// Trait for password hashing functions.\n\npub trait PasswordHasher {\n\n /// Algorithm-specific parameters.\n\n type Params: Clone\n\n + Debug\n\n + Default\n\n + for<'a> TryFrom<&'a PasswordHash<'a>, Error = Error>\n\n + TryInto<ParamsString, Error = Error>;\n\n\n\n /// Compute a [`PasswordHash`] from the provided password using an\n\n /// explicit set of customized algorithm parameters as opposed to the\n\n /// defaults.\n\n ///\n\n /// When in doubt, use [`PasswordHasher::hash_password`] instead.\n\n fn hash_password_customized<'a>(\n\n &self,\n\n password: &[u8],\n\n algorithm: Option<Ident<'a>>,\n\n version: Option<Decimal>,\n\n params: Self::Params,\n\n salt: impl Into<Salt<'a>>,\n", "file_path": "password-hash/src/traits.rs", "rank": 50, "score": 135684.47675748734 }, { "content": "/// Trait for password hashing algorithms which support the legacy\n\n/// [Modular Crypt Format (MCF)][MCF].\n\n///\n\n/// [MCF]: https://passlib.readthedocs.io/en/stable/modular_crypt_format.html\n\npub trait McfHasher {\n\n /// Upgrade an MCF hash to a PHC hash. MCF follow this rough format:\n\n ///\n\n /// ```text\n\n /// $<id>$<content>\n\n /// ```\n\n ///\n\n /// MCF hashes are otherwise largely unstructured and parsed according to\n\n /// algorithm-specific rules so hashers must parse a raw string themselves.\n\n fn upgrade_mcf_hash<'a>(&self, hash: &'a str) -> Result<PasswordHash<'a>>;\n\n\n\n /// Verify a password hash in MCF format against the provided password.\n\n fn verify_mcf_hash(&self, password: &[u8], mcf_hash: &str) -> Result<()>\n\n where\n\n Self: PasswordVerifier,\n\n {\n\n self.verify_password(password, &self.upgrade_mcf_hash(mcf_hash)?)\n\n }\n\n}\n", "file_path": "password-hash/src/traits.rs", "rank": 51, "score": 135683.74969688756 }, { "content": "/// Modular reduction to a non-zero output.\n\n///\n\n/// This trait is primarily intended for use by curve implementations such\n\n/// as the `k256` and `p256` crates.\n\n///\n\n/// End users should use the [`Reduce`] impl on\n\n/// [`NonZeroScalar`][`crate::NonZeroScalar`] instead.\n\npub trait ReduceNonZero<UInt: Integer + ArrayEncoding>: Sized {\n\n /// Perform a modular reduction, returning a field element.\n\n fn from_uint_reduced_nonzero(n: UInt) -> Self;\n\n}\n", "file_path": "elliptic-curve/src/ops.rs", "rank": 52, "score": 135575.6044786743 }, { "content": "#[derive(Debug)]\n\nstruct X3DhEncappedKey([u8; P256_POINT_SIZE]);\n\nimpl EncappedKey for X3DhEncappedKey {\n\n type NSecret = U32;\n\n type SenderPublicKey = X3DhSenderPublicKey;\n\n type RecipientPublicKey = X3DhPubkeyBundle;\n\n}\n\nimpl AsRef<[u8]> for X3DhEncappedKey {\n\n fn as_ref(&self) -> &[u8] {\n\n &self.0\n\n }\n\n}\n\n\n", "file_path": "kem/tests/x3dh.rs", "rank": 53, "score": 134505.38330906664 }, { "content": "// A simple wrapper around the keypair generation function\n\nfn gen_keypair<R: RngCore + CryptoRng>(csprng: &mut R) -> (X25519PrivateKey, X25519PublicKey) {\n\n let (sk, pk) = X25519HkdfSha256::gen_keypair(csprng);\n\n let wrapped_pk = X25519PublicKey(pk);\n\n\n\n (sk, wrapped_pk)\n\n}\n\n\n", "file_path": "kem/tests/hpke.rs", "rank": 54, "score": 128434.20314290741 }, { "content": "/// Trait for reader types which are used to extract extendable output\n\n/// from a XOF (extendable-output function) result.\n\npub trait XofReader {\n\n /// Read output into the `buffer`. Can be called an unlimited number of times.\n\n fn read(&mut self, buffer: &mut [u8]);\n\n\n\n /// Read output into a boxed slice of the specified size.\n\n ///\n\n /// Can be called an unlimited number of times in combination with `read`.\n\n ///\n\n /// `Box<[u8]>` is used instead of `Vec<u8>` to save stack space, since\n\n /// they have size of 2 and 3 words respectively.\n\n #[cfg(feature = \"alloc\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\n fn read_boxed(&mut self, n: usize) -> Box<[u8]> {\n\n let mut buf = vec![0u8; n].into_boxed_slice();\n\n self.read(&mut buf);\n\n buf\n\n }\n\n}\n\n\n", "file_path": "digest/src/lib.rs", "rank": 55, "score": 127871.15578099927 }, { "content": "/// Authenticated Encryption with Associated Data (AEAD) algorithm core trait.\n\n///\n\n/// Defines nonce, tag, and overhead sizes that are consumed by various other\n\n/// `Aead*` traits.\n\npub trait AeadCore {\n\n /// The length of a nonce.\n\n type NonceSize: ArrayLength<u8>;\n\n\n\n /// The maximum length of the nonce.\n\n type TagSize: ArrayLength<u8>;\n\n\n\n /// The upper bound amount of additional space required to support a\n\n /// ciphertext vs. a plaintext.\n\n type CiphertextOverhead: ArrayLength<u8> + Unsigned;\n\n}\n\n\n\n/// Authenticated Encryption with Associated Data (AEAD) algorithm.\n\n///\n\n/// This trait is intended for use with stateless AEAD algorithms. The\n\n/// [`AeadMut`] trait provides a stateful interface.\n", "file_path": "aead/src/lib.rs", "rank": 56, "score": 127870.79656424171 }, { "content": "/// Modification of the [`Digest`] trait suitable for trait objects.\n\npub trait DynDigest {\n\n /// Digest input data.\n\n ///\n\n /// This method can be called repeatedly for use with streaming messages.\n\n fn update(&mut self, data: &[u8]);\n\n\n\n /// Retrieve result and reset hasher instance\n\n #[cfg(feature = \"alloc\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\n fn finalize_reset(&mut self) -> Box<[u8]> {\n\n let mut result = vec![0; self.output_size()];\n\n self.finalize_into_reset(&mut result).unwrap();\n\n result.into_boxed_slice()\n\n }\n\n\n\n /// Retrieve result and consume boxed hasher instance\n\n #[cfg(feature = \"alloc\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\n #[allow(clippy::boxed_local)]\n\n fn finalize(mut self: Box<Self>) -> Box<[u8]> {\n", "file_path": "digest/src/digest.rs", "rank": 57, "score": 127858.22863093509 }, { "content": "/// Trait which marks a type as being a block cipher.\n\npub trait BlockCipher {\n\n /// Size of the block in bytes\n\n type BlockSize: ArrayLength<u8>;\n\n\n\n /// Number of blocks which can be processed in parallel by\n\n /// cipher implementation\n\n type ParBlocks: ArrayLength<Block<Self>>;\n\n}\n\n\n", "file_path": "cipher/src/block.rs", "rank": 58, "score": 127857.52635756176 }, { "content": "/// Synchronous stream cipher core trait.\n\npub trait StreamCipher {\n\n /// Apply keystream to the data.\n\n ///\n\n /// It will XOR generated keystream with the data, which can be both\n\n /// encryption and decryption.\n\n ///\n\n /// # Panics\n\n /// If end of the keystream will be reached with the given data length,\n\n /// method will panic without modifying the provided `data`.\n\n #[inline]\n\n fn apply_keystream(&mut self, data: &mut [u8]) {\n\n self.try_apply_keystream(data).unwrap();\n\n }\n\n\n\n /// Apply keystream to the data, but return an error if end of a keystream\n\n /// will be reached.\n\n ///\n\n /// If end of the keystream will be achieved with the given data length,\n\n /// method will return `Err(LoopError)` without modifying provided `data`.\n\n fn try_apply_keystream(&mut self, data: &mut [u8]) -> Result<(), LoopError>;\n\n}\n\n\n", "file_path": "cipher/src/stream.rs", "rank": 59, "score": 127857.52635756176 }, { "content": "/// The trait for helping to convert to a field element.\n\npub trait FromOkm {\n\n /// The number of bytes needed to convert to a field element.\n\n type Length: ArrayLength<u8>;\n\n\n\n /// Convert a byte sequence into a field element.\n\n fn from_okm(data: &GenericArray<u8, Self::Length>) -> Self;\n\n}\n\n\n", "file_path": "elliptic-curve/src/hash2field.rs", "rank": 60, "score": 127857.52635756176 }, { "content": "/// Marker trait for cryptographic hash functions.\n\npub trait HashMarker {}\n\n\n", "file_path": "digest/src/digest.rs", "rank": 61, "score": 127857.52635756176 }, { "content": "/// Trait for types which can be initialized from a block cipher.\n\npub trait FromBlockCipher {\n\n /// Block cipher used for initialization.\n\n type BlockCipher: BlockCipher;\n\n\n\n /// Initialize instance from block cipher.\n\n fn from_block_cipher(cipher: Self::BlockCipher) -> Self;\n\n}\n\n\n", "file_path": "cipher/src/block.rs", "rank": 62, "score": 127857.48009629372 }, { "content": "/// Trait for types which implement a block cipher [mode of operation][1].\n\n///\n\n/// [1]: https://en.wikipedia.org/wiki/Block_cipher_mode_of_operation\n\npub trait BlockMode {\n\n /// Size of the block in bytes\n\n type BlockSize: ArrayLength<u8>;\n\n}\n\n\n", "file_path": "cipher/src/mode.rs", "rank": 63, "score": 127857.06288763299 }, { "content": "#[rustfmt::skip]\n\npub trait SeekNum:\n\n Sized\n\n + TryInto<u8> + TryFrom<u8> + TryInto<i8> + TryFrom<i8>\n\n + TryInto<u16> + TryFrom<u16> + TryInto<i16> + TryFrom<i16>\n\n + TryInto<u32> + TryFrom<u32> + TryInto<i32> + TryFrom<i32>\n\n + TryInto<u64> + TryFrom<u64> + TryInto<i64> + TryFrom<i64>\n\n + TryInto<u128> + TryFrom<u128> + TryInto<i128> + TryFrom<i128>\n\n + TryInto<usize> + TryFrom<usize> + TryInto<isize> + TryFrom<isize>\n\n{\n\n /// Try to get position for block number `block`, byte position inside\n\n /// block `byte`, and block size `bs`.\n\n fn from_block_byte<T: SeekNum>(block: T, byte: u8, bs: u8) -> Result<Self, OverflowError>;\n\n\n\n /// Try to get block number and bytes position for given block size `bs`.\n\n #[allow(clippy::wrong_self_convention)]\n\n fn to_block_byte<T: SeekNum>(self, bs: u8) -> Result<(T, u8), OverflowError>;\n\n}\n\n\n\nmacro_rules! impl_seek_num {\n\n {$($t:ty )*} => {\n", "file_path": "cipher/src/stream.rs", "rank": 64, "score": 127852.80415306274 }, { "content": "/// Perform an inversion on a field element (i.e. base field element or scalar)\n\npub trait Invert {\n\n /// Field element type\n\n type Output;\n\n\n\n /// Invert a field element.\n\n fn invert(&self) -> CtOption<Self::Output>;\n\n}\n\n\n\n#[cfg(feature = \"arithmetic\")]\n\nimpl<F: ff::Field> Invert for F {\n\n type Output = F;\n\n\n\n fn invert(&self) -> CtOption<F> {\n\n ff::Field::invert(self)\n\n }\n\n}\n\n\n\n/// Linear combination.\n\n///\n\n/// This trait enables crates to provide an optimized implementation of\n\n/// linear combinations (e.g. Shamir's Trick), or otherwise provides a default\n\n/// non-optimized implementation.\n\n// TODO(tarcieri): replace this with a trait from the `group` crate? (see zkcrypto/group#25)\n", "file_path": "elliptic-curve/src/ops.rs", "rank": 65, "score": 127852.80415306274 }, { "content": "/// Instantiate either a stateless [`Aead`] or stateful [`AeadMut`] algorithm.\n\npub trait NewAead {\n\n /// The size of the key array required by this algorithm.\n\n type KeySize: ArrayLength<u8>;\n\n\n\n /// Create a new AEAD instance with the given key.\n\n fn new(key: &Key<Self>) -> Self;\n\n\n\n /// Create new AEAD instance from key given as a byte slice..\n\n ///\n\n /// Default implementation will accept only keys with length equal to `KeySize`.\n\n fn new_from_slice(key: &[u8]) -> Result<Self>\n\n where\n\n Self: Sized,\n\n {\n\n if key.len() != Self::KeySize::to_usize() {\n\n Err(Error)\n\n } else {\n\n Ok(Self::new(GenericArray::from_slice(key)))\n\n }\n\n }\n", "file_path": "aead/src/lib.rs", "rank": 66, "score": 127852.80415306274 }, { "content": "/// Is this scalar greater than n / 2?\n\n///\n\n/// # Returns\n\n///\n\n/// - For scalars 0 through n / 2: `Choice::from(0)`\n\n/// - For scalars (n / 2) + 1 through n - 1: `Choice::from(1)`\n\npub trait IsHigh {\n\n /// Is this scalar greater than or equal to n / 2?\n\n fn is_high(&self) -> Choice;\n\n}\n", "file_path": "elliptic-curve/src/scalar.rs", "rank": 67, "score": 127852.80415306274 }, { "content": "/// Trait for seekable stream ciphers.\n\n///\n\n/// Methods of this trait are generic over the [`SeekNum`] trait, which is\n\n/// implemented for primitive numeric types, i.e.: `i/u8`, `i/u16`, `i/u32`,\n\n/// `i/u64`, `i/u128`, and `i/usize`.\n\npub trait StreamCipherSeek {\n\n /// Try to get current keystream position\n\n ///\n\n /// Returns [`LoopError`] if position can not be represented by type `T`\n\n fn try_current_pos<T: SeekNum>(&self) -> Result<T, OverflowError>;\n\n\n\n /// Try to seek to the given position\n\n ///\n\n /// Returns [`LoopError`] if provided position value is bigger than\n\n /// keystream length.\n\n fn try_seek<T: SeekNum>(&mut self, pos: T) -> Result<(), LoopError>;\n\n\n\n /// Get current keystream position\n\n ///\n\n /// # Panics\n\n /// If position can not be represented by type `T`\n\n fn current_pos<T: SeekNum>(&self) -> T {\n\n self.try_current_pos().unwrap()\n\n }\n\n\n\n /// Seek to the given position\n\n ///\n\n /// # Panics\n\n /// If provided position value is bigger than keystream length\n\n fn seek<T: SeekNum>(&mut self, pos: T) {\n\n self.try_seek(pos).unwrap()\n\n }\n\n}\n\n\n", "file_path": "cipher/src/stream.rs", "rank": 68, "score": 125820.80197822303 }, { "content": "/// Trait which stores algorithm name constant, used in `Debug` implementations.\n\npub trait AlgorithmName {\n\n /// Write algorithm name into `f`.\n\n fn write_alg_name(f: &mut fmt::Formatter<'_>) -> fmt::Result;\n\n}\n\n\n", "file_path": "crypto-common/src/lib.rs", "rank": 69, "score": 125820.3523707598 }, { "content": "/// Types which use another type for initialization.\n\n///\n\n/// Generally it's used indirectly via [`InnerInit`] or [`InnerIvInit`].\n\npub trait InnerUser {\n\n /// Inner type.\n\n type Inner;\n\n}\n\n\n", "file_path": "crypto-common/src/lib.rs", "rank": 70, "score": 125816.35596910473 }, { "content": "/// Trait for determining the parity of the field\n\npub trait Sgn0 {\n\n /// Return the parity of the field\n\n /// 1 == negative\n\n /// 0 == non-negative\n\n fn sgn0(&self) -> Choice;\n\n}\n\n\n", "file_path": "elliptic-curve/src/hash2curve/osswu.rs", "rank": 71, "score": 125816.04727527237 }, { "content": "/// Asynchronous stream cipher core trait.\n\npub trait AsyncStreamCipher {\n\n /// Encrypt data in place.\n\n fn encrypt(&mut self, data: &mut [u8]);\n\n\n\n /// Decrypt data in place.\n\n fn decrypt(&mut self, data: &mut [u8]);\n\n}\n\n\n\nimpl<C: StreamCipher> StreamCipher for &mut C {\n\n #[inline]\n\n fn apply_keystream(&mut self, data: &mut [u8]) {\n\n C::apply_keystream(self, data);\n\n }\n\n\n\n #[inline]\n\n fn try_apply_keystream(&mut self, data: &mut [u8]) -> Result<(), LoopError> {\n\n C::try_apply_keystream(self, data)\n\n }\n\n}\n\n\n\n/// Trait implemented for numeric types which can be used with the\n\n/// [`StreamCipherSeek`] trait.\n\n///\n\n/// This trait is implemented for primitive numeric types, i.e. `i/u8`,\n\n/// `u16`, `u32`, `u64`, `u128`, `usize`, and `i32`. It is not intended\n\n/// to be implemented in third-party crates.\n", "file_path": "cipher/src/stream.rs", "rank": 72, "score": 125816.00008948946 }, { "content": "/// Trait for types which can be initialized from a block cipher and nonce.\n\npub trait FromBlockCipherNonce {\n\n /// Block cipher used for initialization.\n\n type BlockCipher: BlockCipher;\n\n /// Nonce size in bytes.\n\n type NonceSize: ArrayLength<u8>;\n\n\n\n /// Initialize instance from block cipher and nonce.\n\n fn from_block_cipher_nonce(\n\n cipher: Self::BlockCipher,\n\n nonce: &GenericArray<u8, Self::NonceSize>,\n\n ) -> Self;\n\n}\n\n\n\nimpl<T> FromKeyNonce for T\n\nwhere\n\n T: FromBlockCipherNonce,\n\n T::BlockCipher: FromKey,\n\n{\n\n type KeySize = <T::BlockCipher as FromKey>::KeySize;\n\n type NonceSize = T::NonceSize;\n", "file_path": "cipher/src/block.rs", "rank": 73, "score": 125815.9084645607 }, { "content": " pub trait Sealed {}\n\n}\n\n\n\nimpl<T> sealed::Sealed for CoreWrapper<T>\n\nwhere\n\n T: BufferKindUser,\n\n T::BlockSize: IsLess<U256>,\n\n Le<T::BlockSize, U256>: NonZero,\n\n{\n\n}\n\n\n\nimpl<T> CoreProxy for CoreWrapper<T>\n\nwhere\n\n T: BufferKindUser,\n\n T::BlockSize: IsLess<U256>,\n\n Le<T::BlockSize, U256>: NonZero,\n\n{\n\n type Core = T;\n\n}\n", "file_path": "digest/src/core_api/wrapper.rs", "rank": 74, "score": 125811.27788499044 }, { "content": "/// Point compression settings.\n\npub trait PointCompression {\n\n /// Should point compression be applied by default?\n\n const COMPRESS_POINTS: bool;\n\n}\n\n\n", "file_path": "elliptic-curve/src/point.rs", "rank": 75, "score": 125811.27788499044 }, { "content": "/// Point compaction settings.\n\npub trait PointCompaction {\n\n /// Should point compaction be applied by default?\n\n const COMPACT_POINTS: bool;\n\n}\n", "file_path": "elliptic-curve/src/point.rs", "rank": 76, "score": 125811.27788499044 }, { "content": "/// Low-level Elliptic Curve Diffie-Hellman (ECDH) function.\n\n///\n\n/// Whenever possible, we recommend using the high-level ECDH ephemeral API\n\n/// provided by [`EphemeralSecret`].\n\n///\n\n/// However, if you are implementing a protocol which requires a static scalar\n\n/// value as part of an ECDH exchange, this API can be used to compute a\n\n/// [`SharedSecret`] from that value.\n\n///\n\n/// Note that this API operates on the low-level [`NonZeroScalar`] and\n\n/// [`AffinePoint`] types. If you are attempting to use the higher-level\n\n/// [`SecretKey`][`crate::SecretKey`] and [`PublicKey`] types, you will\n\n/// need to use the following conversions:\n\n///\n\n/// ```ignore\n\n/// let shared_secret = elliptic_curve::ecdh::diffie_hellman(\n\n/// secret_key.to_nonzero_scalar(),\n\n/// public_key.as_affine()\n\n/// );\n\n/// ```\n\npub fn diffie_hellman<C>(\n\n secret_key: impl Borrow<NonZeroScalar<C>>,\n\n public_key: impl Borrow<AffinePoint<C>>,\n\n) -> SharedSecret<C>\n\nwhere\n\n C: Curve + ProjectiveArithmetic,\n\n{\n\n let public_point = ProjectivePoint::<C>::from(*public_key.borrow());\n\n let secret_point = (public_point * secret_key.borrow().as_ref()).to_affine();\n\n SharedSecret::new(secret_point)\n\n}\n\n\n\n/// Ephemeral Diffie-Hellman Secret.\n\n///\n\n/// These are ephemeral \"secret key\" values which are deliberately designed\n\n/// to avoid being persisted.\n\n///\n\n/// To perform an ephemeral Diffie-Hellman exchange, do the following:\n\n///\n\n/// - Have each participant generate an [`EphemeralSecret`] value\n", "file_path": "elliptic-curve/src/ecdh.rs", "rank": 77, "score": 125075.97040032597 }, { "content": "/// Low-level STREAM implementation.\n\n///\n\n/// This trait provides a particular \"flavor\" of STREAM, as there are\n\n/// different ways the specifics of the construction can be implemented.\n\n///\n\n/// Deliberately immutable and stateless to permit parallel operation.\n\npub trait StreamPrimitive<A>\n\nwhere\n\n A: AeadInPlace,\n\n A::NonceSize: Sub<Self::NonceOverhead>,\n\n NonceSize<A, Self>: ArrayLength<u8>,\n\n{\n\n /// Number of bytes this STREAM primitive requires from the nonce.\n\n type NonceOverhead: ArrayLength<u8>;\n\n\n\n /// Type used as the STREAM counter.\n\n type Counter: AddAssign + Copy + Default + Eq;\n\n\n\n /// Value to use when incrementing the STREAM counter (i.e. one)\n\n const COUNTER_INCR: Self::Counter;\n\n\n\n /// Maximum value of the STREAM counter.\n\n const COUNTER_MAX: Self::Counter;\n\n\n\n /// Encrypt an AEAD message in-place at the given position in the STREAM.\n\n fn encrypt_in_place(\n", "file_path": "aead/src/stream.rs", "rank": 78, "score": 124242.87537045681 }, { "content": "/// Are all of the given bytes allowed in a [`Value`]?\n\nfn assert_valid_value(input: &str) -> Result<()> {\n\n for c in input.chars() {\n\n if !is_char_valid(c) {\n\n return Err(Error::ParamValueInvalid(InvalidValue::InvalidChar(c)));\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "password-hash/src/value.rs", "rank": 79, "score": 124181.69003629092 }, { "content": "/// Trait for converting field elements into a point\n\n/// via a mapping method like Simplified Shallue-van de Woestijne-Ulas\n\n/// or Elligator\n\npub trait MapToCurve {\n\n /// The output point\n\n type Output;\n\n\n\n /// Map a field element into a point\n\n fn map_to_curve(&self) -> Self::Output;\n\n}\n", "file_path": "elliptic-curve/src/hash2curve/map2curve.rs", "rank": 80, "score": 123876.78185928047 }, { "content": "#[cfg_attr(docsrs, doc(cfg(feature = \"arithmetic\")))]\n\npub trait PrimeCurveArithmetic:\n\n PrimeCurve + ProjectiveArithmetic<ProjectivePoint = Self::CurveGroup>\n\n{\n\n /// Prime order elliptic curve group.\n\n type CurveGroup: group::prime::PrimeCurve<Affine = <Self as AffineArithmetic>::AffinePoint>;\n\n}\n\n\n\n/// Elliptic curve with projective arithmetic implementation.\n", "file_path": "elliptic-curve/src/arithmetic.rs", "rank": 81, "score": 123872.52312471022 }, { "content": "/// Expander that, call `read` until enough bytes have been consumed.\n\npub trait Expander {\n\n /// Fill the array with the expanded bytes\n\n fn fill_bytes(&mut self, okm: &mut [u8]);\n\n}\n\n\n\n/// The domain separation tag\n\n///\n\n/// Implements [section 5.4.3 of `draft-irtf-cfrg-hash-to-curve-13`][dst].\n\n///\n\n/// [dst]: https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-13#section-5.4.3\n\npub(crate) enum Domain<'a, L>\n\nwhere\n\n L: ArrayLength<u8> + IsLess<U256>,\n\n{\n\n /// > 255\n\n Hashed(GenericArray<u8, L>),\n\n /// <= 255\n\n Array(&'a [u8]),\n\n}\n\n\n", "file_path": "elliptic-curve/src/hash2field/expand_msg.rs", "rank": 82, "score": 123872.52312471022 }, { "content": "#[cfg(feature = \"digest-preview\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"digest-preview\")))]\n\npub trait PrehashSignature: Signature {\n\n /// Preferred `Digest` algorithm to use when computing this signature type.\n\n type Digest: digest::Digest;\n\n}\n", "file_path": "signature/src/signature.rs", "rank": 83, "score": 122197.27224555463 }, { "content": "#[cfg(feature = \"alloc\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\npub trait Aead: AeadCore {\n\n /// Encrypt the given plaintext payload, and return the resulting\n\n /// ciphertext as a vector of bytes.\n\n ///\n\n /// The [`Payload`] type can be used to provide Additional Associated Data\n\n /// (AAD) along with the message: this is an optional bytestring which is\n\n /// not encrypted, but *is* authenticated along with the message. Failure\n\n /// to pass the same AAD that was used during encryption will cause\n\n /// decryption to fail, which is useful if you would like to \"bind\" the\n\n /// ciphertext to some other identifier, like a digital signature key\n\n /// or other identifier.\n\n ///\n\n /// If you don't care about AAD and just want to encrypt a plaintext\n\n /// message, `&[u8]` will automatically be coerced into a `Payload`:\n\n ///\n\n /// ```nobuild\n\n /// let plaintext = b\"Top secret message, handle with care\";\n\n /// let ciphertext = cipher.encrypt(nonce, plaintext);\n\n /// ```\n\n ///\n", "file_path": "aead/src/lib.rs", "rank": 84, "score": 122197.27224555463 }, { "content": "/// Trait for deserializing a value from a SEC1 encoded curve point.\n\n///\n\n/// This is intended for use with the `AffinePoint` type for a given elliptic curve.\n\npub trait FromEncodedPoint<C>\n\nwhere\n\n Self: Sized,\n\n C: Curve,\n\n FieldSize<C>: ModulusSize,\n\n{\n\n /// Deserialize the type this trait is impl'd on from an [`EncodedPoint`].\n\n fn from_encoded_point(point: &EncodedPoint<C>) -> CtOption<Self>;\n\n}\n\n\n", "file_path": "elliptic-curve/src/sec1.rs", "rank": 85, "score": 120267.02010210234 }, { "content": "/// Trait for serializing a value to a SEC1 encoded curve point.\n\n///\n\n/// This is intended for use with the `AffinePoint` type for a given elliptic curve.\n\npub trait ToEncodedPoint<C>\n\nwhere\n\n C: Curve,\n\n FieldSize<C>: ModulusSize,\n\n{\n\n /// Serialize this value as a SEC1 [`EncodedPoint`], optionally applying\n\n /// point compression.\n\n fn to_encoded_point(&self, compress: bool) -> EncodedPoint<C>;\n\n}\n\n\n", "file_path": "elliptic-curve/src/sec1.rs", "rank": 86, "score": 120267.02010210234 }, { "content": "/// In-place stateless AEAD trait.\n\n///\n\n/// This trait is both object safe and has no dependencies on `alloc` or `std`.\n\npub trait AeadInPlace: AeadCore {\n\n /// Encrypt the given buffer containing a plaintext message in-place.\n\n ///\n\n /// The buffer must have sufficient capacity to store the ciphertext\n\n /// message, which will always be larger than the original plaintext.\n\n /// The exact size needed is cipher-dependent, but generally includes\n\n /// the size of an authentication tag.\n\n ///\n\n /// Returns an error if the buffer has insufficient capacity to store the\n\n /// resulting ciphertext message.\n\n fn encrypt_in_place(\n\n &self,\n\n nonce: &Nonce<Self>,\n\n associated_data: &[u8],\n\n buffer: &mut dyn Buffer,\n\n ) -> Result<()> {\n\n let tag = self.encrypt_in_place_detached(nonce, associated_data, buffer.as_mut())?;\n\n buffer.extend_from_slice(tag.as_slice())?;\n\n Ok(())\n\n }\n", "file_path": "aead/src/lib.rs", "rank": 87, "score": 120263.8214072178 }, { "content": "#[async_trait]\n\npub trait AsyncSigner<S>\n\nwhere\n\n Self: Send + Sync,\n\n S: Signature + Send + 'static,\n\n{\n\n /// Attempt to sign the given message, returning a digital signature on\n\n /// success, or an error if something went wrong.\n\n ///\n\n /// The main intended use case for signing errors is when communicating\n\n /// with external signers, e.g. cloud KMS, HSMs, or other hardware tokens.\n\n async fn sign_async(&self, msg: &[u8]) -> Result<S, Error>;\n\n}\n\n\n\n#[async_trait]\n\nimpl<S, T> AsyncSigner<S> for T\n\nwhere\n\n S: Signature + Send + 'static,\n\n T: signature::Signer<S> + Send + Sync,\n\n{\n\n async fn sign_async(&self, msg: &[u8]) -> Result<S, Error> {\n\n self.try_sign(msg)\n\n }\n\n}\n\n\n\n/// Asynchronously sign the given prehashed message [`Digest`] using `Self`.\n\n///\n\n/// This trait is an async equivalent of the [`signature::DigestSigner`] trait.\n", "file_path": "signature/async/src/lib.rs", "rank": 88, "score": 120263.38413378398 }, { "content": "/// Marker trait for elliptic curves with prime order.\n\npub trait PrimeCurve: Curve {}\n\n\n\n/// Size of field elements of this elliptic curve.\n\npub type FieldSize<C> = <<C as Curve>::UInt as bigint::ArrayEncoding>::ByteSize;\n\n\n\n/// Byte representation of a base/scalar field element of a given curve.\n\npub type FieldBytes<C> = GenericArray<u8, FieldSize<C>>;\n\n\n\n/// Affine point type for a given curve with a [`ProjectiveArithmetic`]\n\n/// implementation.\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"arithmetic\")))]\n\n#[cfg(feature = \"arithmetic\")]\n\npub type AffinePoint<C> = <C as AffineArithmetic>::AffinePoint;\n\n\n\n/// Projective point type for a given curve with a [`ProjectiveArithmetic`]\n\n/// implementation.\n\n#[cfg(feature = \"arithmetic\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"arithmetic\")))]\n\npub type ProjectivePoint<C> = <C as ProjectiveArithmetic>::ProjectivePoint;\n\n\n\n/// Associate an [`ObjectIdentifier`][`pkcs8::ObjectIdentifier`] (OID) with an\n\n/// elliptic curve algorithm implementation.\n\n///\n\n/// This is used as as the `parameters` of an `AlgorithmIdentifier` as\n\n/// described in RFC 5280 Section 4.1.1.2:\n\n/// <https://tools.ietf.org/html/rfc5280#section-4.1.1.2>\n", "file_path": "elliptic-curve/src/lib.rs", "rank": 89, "score": 120263.19342850539 }, { "content": "/// The [`UniversalHash`] trait defines a generic interface for universal hash\n\n/// functions.\n\npub trait UniversalHash: Clone {\n\n /// Size of the inputs to and outputs from the universal hash function\n\n type BlockSize: ArrayLength<u8>;\n\n\n\n /// Input a block into the universal hash function\n\n fn update(&mut self, block: &Block<Self>);\n\n\n\n /// Input data into the universal hash function. If the length of the\n\n /// data is not a multiple of the block size, the remaining data is\n\n /// padded with zeroes up to the `BlockSize`.\n\n ///\n\n /// This approach is frequently used by AEAD modes which use\n\n /// Message Authentication Codes (MACs) based on universal hashing.\n\n fn update_padded(&mut self, data: &[u8]) {\n\n let mut chunks = data.chunks_exact(Self::BlockSize::to_usize());\n\n\n\n for chunk in &mut chunks {\n\n self.update(GenericArray::from_slice(chunk));\n\n }\n\n\n", "file_path": "universal-hash/src/lib.rs", "rank": 90, "score": 120263.05992783658 }, { "content": "#[cfg(feature = \"voprf\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"voprf\")))]\n\npub trait VoprfParameters: Curve {\n\n /// The `ID` parameter which identifies a particular elliptic curve\n\n /// as defined in [section 4 of `draft-irtf-cfrg-voprf-08`][voprf].\n\n ///\n\n /// [voprf]: https://www.ietf.org/archive/id/draft-irtf-cfrg-voprf-08.html#section-4\n\n const ID: u16;\n\n\n\n /// The `Hash` parameter which assigns a particular hash function to this\n\n /// ciphersuite as defined in [section 4 of `draft-irtf-cfrg-voprf-08`][voprf].\n\n ///\n\n /// [voprf]: https://www.ietf.org/archive/id/draft-irtf-cfrg-voprf-08.html#section-4\n\n type Hash: digest::Digest;\n\n}\n", "file_path": "elliptic-curve/src/lib.rs", "rank": 91, "score": 120258.51748527441 }, { "content": "#[cfg(feature = \"alloc\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"alloc\")))]\n\npub trait AeadMut: AeadCore {\n\n /// Encrypt the given plaintext slice, and return the resulting ciphertext\n\n /// as a vector of bytes.\n\n ///\n\n /// See notes on [`Aead::encrypt()`] about allowable message payloads and\n\n /// Associated Additional Data (AAD).\n\n fn encrypt<'msg, 'aad>(\n\n &mut self,\n\n nonce: &Nonce<Self>,\n\n plaintext: impl Into<Payload<'msg, 'aad>>,\n\n ) -> Result<Vec<u8>>;\n\n\n\n /// Decrypt the given ciphertext slice, and return the resulting plaintext\n\n /// as a vector of bytes.\n\n ///\n\n /// See notes on [`Aead::encrypt()`] and [`Aead::decrypt()`] about allowable\n\n /// message payloads and Associated Additional Data (AAD).\n\n fn decrypt<'msg, 'aad>(\n\n &mut self,\n\n nonce: &Nonce<Self>,\n", "file_path": "aead/src/lib.rs", "rank": 92, "score": 120258.51748527441 }, { "content": "#[cfg(feature = \"pkcs8\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"pkcs8\")))]\n\npub trait AlgorithmParameters: Curve {\n\n /// Object Identifier (OID) for this curve\n\n const OID: pkcs8::ObjectIdentifier;\n\n\n\n /// Get the [`pkcs8::AlgorithmIdentifier`] for this curve\n\n fn algorithm_identifier() -> pkcs8::AlgorithmIdentifier<'static> {\n\n pkcs8::AlgorithmIdentifier {\n\n oid: ALGORITHM_OID,\n\n parameters: Some((&Self::OID).into()),\n\n }\n\n }\n\n}\n\n\n\n/// Elliptic curve parameters used by VOPRF.\n", "file_path": "elliptic-curve/src/lib.rs", "rank": 93, "score": 120258.51748527441 }, { "content": "#[cfg(feature = \"arithmetic\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"arithmetic\")))]\n\npub trait ScalarArithmetic: Curve {\n\n /// Scalar field type.\n\n ///\n\n /// Note: the following bounds are provided by [`ff::Field`]:\n\n /// - `'static`\n\n /// - [`Copy`]\n\n /// - [`Clone`]\n\n /// - [`ConditionallySelectable`]\n\n /// - [`ConstantTimeEq`]\n\n /// - [`Debug`]\n\n /// - [`Default`]\n\n /// - [`Send`]\n\n /// - [`Sync`]\n\n type Scalar: DefaultIsZeroes\n\n + From<ScalarCore<Self>>\n\n + Into<FieldBytes<Self>>\n\n + Into<Self::UInt>\n\n + IsHigh\n\n + ff::Field\n\n + ff::PrimeField<Repr = FieldBytes<Self>>;\n\n}\n", "file_path": "elliptic-curve/src/arithmetic.rs", "rank": 94, "score": 120258.51748527441 }, { "content": "/// Decrypt-only functionality for block ciphers.\n\npub trait BlockDecrypt: BlockCipher {\n\n /// Decrypt block in-place\n\n fn decrypt_block(&self, block: &mut Block<Self>);\n\n\n\n /// Decrypt several blocks in parallel using instruction level parallelism\n\n /// if possible.\n\n ///\n\n /// If `ParBlocks` equals to 1 it's equivalent to `decrypt_block`.\n\n #[inline]\n\n fn decrypt_par_blocks(&self, blocks: &mut ParBlocks<Self>) {\n\n for block in blocks.iter_mut() {\n\n self.decrypt_block(block);\n\n }\n\n }\n\n\n\n /// Decrypt a slice of blocks, leveraging parallelism when available.\n\n #[inline]\n\n fn decrypt_blocks(&self, mut blocks: &mut [Block<Self>]) {\n\n let pb = Self::ParBlocks::to_usize();\n\n\n", "file_path": "cipher/src/block.rs", "rank": 95, "score": 120258.51748527441 }, { "content": "/// Encrypt-only functionality for block ciphers.\n\npub trait BlockEncrypt: BlockCipher {\n\n /// Encrypt block in-place\n\n fn encrypt_block(&self, block: &mut Block<Self>);\n\n\n\n /// Encrypt several blocks in parallel using instruction level parallelism\n\n /// if possible.\n\n ///\n\n /// If `ParBlocks` equals to 1 it's equivalent to `encrypt_block`.\n\n #[inline]\n\n fn encrypt_par_blocks(&self, blocks: &mut ParBlocks<Self>) {\n\n for block in blocks.iter_mut() {\n\n self.encrypt_block(block);\n\n }\n\n }\n\n\n\n /// Encrypt a slice of blocks, leveraging parallelism when available.\n\n #[inline]\n\n fn encrypt_blocks(&self, mut blocks: &mut [Block<Self>]) {\n\n let pb = Self::ParBlocks::to_usize();\n\n\n", "file_path": "cipher/src/block.rs", "rank": 96, "score": 120258.51748527441 }, { "content": "#[cfg(feature = \"arithmetic\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"arithmetic\")))]\n\npub trait LinearCombination: Group {\n\n /// Calculates `x * k + y * l`.\n\n fn lincomb(x: &Self, k: &Self::Scalar, y: &Self, l: &Self::Scalar) -> Self {\n\n (*x * k) + (*y * l)\n\n }\n\n}\n\n\n", "file_path": "elliptic-curve/src/ops.rs", "rank": 97, "score": 120258.51748527441 }, { "content": "#[cfg_attr(docsrs, doc(cfg(feature = \"jwk\")))]\n\npub trait JwkParameters: Curve {\n\n /// The `crv` parameter which identifies a particular elliptic curve\n\n /// as defined in RFC 7518 Section 6.2.1.1:\n\n /// <https://tools.ietf.org/html/rfc7518#section-6.2.1.1>\n\n ///\n\n /// Curve values are registered in the IANA \"JSON Web Key Elliptic Curve\"\n\n /// registry defined in RFC 7518 Section 7.6:\n\n /// <https://tools.ietf.org/html/rfc7518#section-7.6>\n\n const CRV: &'static str;\n\n}\n\n\n\n/// JSON Web Key (JWK) with a `kty` of `\"EC\"` (elliptic curve).\n\n///\n\n/// Specified in [RFC 7518 Section 6: Cryptographic Algorithms for Keys][1].\n\n///\n\n/// This type can represent either a public/private keypair, or just a\n\n/// public key, depending on whether or not the `d` parameter is present.\n\n///\n\n/// [1]: https://tools.ietf.org/html/rfc7518#section-6\n\n// TODO(tarcieri): eagerly decode or validate `x`, `y`, and `d` as Base64\n", "file_path": "elliptic-curve/src/jwk.rs", "rank": 98, "score": 120258.51748527441 }, { "content": "/// Verify the provided message bytestring using `Self` (e.g. a public key)\n\npub trait Verifier<S: Signature> {\n\n /// Use `Self` to verify that the provided signature for a given message\n\n /// bytestring is authentic.\n\n ///\n\n /// Returns `Error` if it is inauthentic, or otherwise returns `()`.\n\n fn verify(&self, msg: &[u8], signature: &S) -> Result<(), Error>;\n\n}\n\n\n\n/// Verify the provided signature for the given prehashed message [`Digest`]\n\n/// is authentic.\n\n///\n\n/// ## Notes\n\n///\n\n/// This trait is primarily intended for signature algorithms based on the\n\n/// [Fiat-Shamir heuristic], a method for converting an interactive\n\n/// challenge/response-based proof-of-knowledge protocol into an offline\n\n/// digital signature through the use of a random oracle, i.e. a digest\n\n/// function.\n\n///\n\n/// The security of such protocols critically rests upon the inability of\n\n/// an attacker to solve for the output of the random oracle, as generally\n\n/// otherwise such signature algorithms are a system of linear equations and\n\n/// therefore doing so would allow the attacker to trivially forge signatures.\n\n///\n\n/// To prevent misuse which would potentially allow this to be possible, this\n\n/// API accepts a [`Digest`] instance, rather than a raw digest value.\n\n///\n\n/// [Fiat-Shamir heuristic]: https://en.wikipedia.org/wiki/Fiat%E2%80%93Shamir_heuristic\n", "file_path": "signature/src/verifier.rs", "rank": 99, "score": 119095.3349902051 } ]
Rust
rafx-api/src/queue.rs
DavidVonDerau/rafx
5d42caed4bd7fcb5d32e3e26021669cf60071abd
#[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] use crate::empty::RafxQueueEmpty; #[cfg(feature = "rafx-metal")] use crate::metal::RafxQueueMetal; #[cfg(feature = "rafx-vulkan")] use crate::vulkan::RafxQueueVulkan; use crate::{ RafxCommandBuffer, RafxCommandPool, RafxCommandPoolDef, RafxDeviceContext, RafxFence, RafxPresentSuccessResult, RafxQueueType, RafxResult, RafxSemaphore, RafxSwapchain, }; #[derive(Clone, Debug)] pub enum RafxQueue { #[cfg(feature = "rafx-vulkan")] Vk(RafxQueueVulkan), #[cfg(feature = "rafx-metal")] Metal(RafxQueueMetal), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] Empty(RafxQueueEmpty), } impl RafxQueue { pub fn device_context(&self) -> RafxDeviceContext { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => RafxDeviceContext::Vk(inner.device_context().clone()), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => RafxDeviceContext::Metal(inner.device_context().clone()), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => RafxDeviceContext::Empty(inner.device_context().clone()), } } pub fn queue_id(&self) -> u32 { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => inner.queue_id(), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => inner.queue_id(), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => inner.queue_id(), } } pub fn queue_type(&self) -> RafxQueueType { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => inner.queue_type(), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => inner.queue_type(), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => inner.queue_type(), } } pub fn create_command_pool( &self, command_pool_def: &RafxCommandPoolDef, ) -> RafxResult<RafxCommandPool> { Ok(match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => { RafxCommandPool::Vk(inner.create_command_pool(command_pool_def)?) } #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => { RafxCommandPool::Metal(inner.create_command_pool(command_pool_def)?) } #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => { RafxCommandPool::Empty(inner.create_command_pool(command_pool_def)?) } }) } pub fn submit( &self, command_buffers: &[&RafxCommandBuffer], wait_semaphores: &[&RafxSemaphore], signal_semaphores: &[&RafxSemaphore], signal_fence: Option<&RafxFence>, ) -> RafxResult<()> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => { let command_buffers: Vec<_> = command_buffers .iter() .map(|x| x.vk_command_buffer().unwrap()) .collect(); let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.vk_semaphore().unwrap()) .collect(); let signal_semaphores: Vec<_> = signal_semaphores .iter() .map(|x| x.vk_semaphore().unwrap()) .collect(); inner.submit( &command_buffers, &wait_semaphores, &signal_semaphores, signal_fence.map(|x| x.vk_fence().unwrap()), ) } #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => { let command_buffers: Vec<_> = command_buffers .iter() .map(|x| x.metal_command_buffer().unwrap()) .collect(); let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.metal_semaphore().unwrap()) .collect(); let signal_semaphores: Vec<_> = signal_semaphores .iter() .map(|x| x.metal_semaphore().unwrap()) .collect(); inner.submit( &command_buffers, &wait_semaphores, &signal_semaphores, signal_fence.map(|x| x.metal_fence().unwrap()), ) } #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => { let command_buffers: Vec<_> = command_buffers .iter() .map(|x| x.empty_command_buffer().unwrap()) .collect(); let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.empty_semaphore().unwrap()) .collect(); let signal_semaphores: Vec<_> = signal_semaphores .iter() .map(|x| x.empty_semaphore().unwrap()) .collect(); inner.submit( &command_buffers, &wait_semaphores, &signal_semaphores, signal_fence.map(|x| x.empty_fence().unwrap()), ) } } } pub fn present( &self, swapchain: &RafxSwapchain, wait_semaphores: &[&RafxSemaphore], image_index: u32, ) -> RafxResult<RafxPresentSuccessResult> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => { let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.vk_semaphore().unwrap()) .collect(); inner.present( swapchain.vk_swapchain().unwrap(), &wait_semaphores, image_index, ) } #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => { let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.metal_semaphore().unwrap()) .collect(); inner.present( swapchain.metal_swapchain().unwrap(), &wait_semaphores, image_index, ) } #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => { let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.empty_semaphore().unwrap()) .collect(); inner.present( swapchain.empty_swapchain().unwrap(), &wait_semaphores, image_index, ) } } } pub fn wait_for_queue_idle(&self) -> RafxResult<()> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => inner.wait_for_queue_idle(), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => inner.wait_for_queue_idle(), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => inner.wait_for_queue_idle(), } } #[cfg(feature = "rafx-vulkan")] pub fn vk_queue(&self) -> Option<&RafxQueueVulkan> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => Some(inner), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(_inner) => None, #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(_inner) => None, } } #[cfg(feature = "rafx-metal")] pub fn metal_queue(&self) -> Option<&RafxQueueMetal> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(_inner) => None, #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => Some(inner), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => None, } } #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] pub fn empty_queue(&self) -> Option<&RafxQueueEmpty> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(_inner) => None, #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => None, #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => Some(inner), } } }
#[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] use crate::empty::RafxQueueEmpty; #[cfg(feature = "rafx-metal")] use crate::metal::RafxQueueMetal; #[cfg(feature = "rafx-vulkan")] use crate::vulkan::RafxQueueVulkan; use crate::{ RafxCommandBuffer, RafxCommandPool, RafxCommandPoolDef, RafxDeviceContext, RafxFence, RafxPresentSuccessResult, RafxQueueType, RafxResult, RafxSemaphore, RafxSwapchain, }; #[derive(Clone, Debug)] pub enum RafxQueue { #[cfg(feature = "rafx-vulkan")] Vk(RafxQueueVulkan), #[cfg(feature = "rafx-metal")] Metal(RafxQueueMetal), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] Empty(RafxQueueEmpty), } impl RafxQueue { pub fn device_context(&self) -> RafxDeviceContext { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => RafxDeviceContext::Vk(inner.device_context().clone()), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => RafxDeviceContext::Metal(inner.device_context().clone()), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => RafxDeviceContext::Empty(inner.device_context().clone()), } } pub fn queue_id(&self) -> u32 { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => inner.queue_id(), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => inner.queue_id(), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => inner.queue_id(), } } pub fn queue_type(
afx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => inner.queue_type(), } } pub fn create_command_pool( &self, command_pool_def: &RafxCommandPoolDef, ) -> RafxResult<RafxCommandPool> { Ok(match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => { RafxCommandPool::Vk(inner.create_command_pool(command_pool_def)?) } #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => { RafxCommandPool::Metal(inner.create_command_pool(command_pool_def)?) } #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => { RafxCommandPool::Empty(inner.create_command_pool(command_pool_def)?) } }) } pub fn submit( &self, command_buffers: &[&RafxCommandBuffer], wait_semaphores: &[&RafxSemaphore], signal_semaphores: &[&RafxSemaphore], signal_fence: Option<&RafxFence>, ) -> RafxResult<()> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => { let command_buffers: Vec<_> = command_buffers .iter() .map(|x| x.vk_command_buffer().unwrap()) .collect(); let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.vk_semaphore().unwrap()) .collect(); let signal_semaphores: Vec<_> = signal_semaphores .iter() .map(|x| x.vk_semaphore().unwrap()) .collect(); inner.submit( &command_buffers, &wait_semaphores, &signal_semaphores, signal_fence.map(|x| x.vk_fence().unwrap()), ) } #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => { let command_buffers: Vec<_> = command_buffers .iter() .map(|x| x.metal_command_buffer().unwrap()) .collect(); let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.metal_semaphore().unwrap()) .collect(); let signal_semaphores: Vec<_> = signal_semaphores .iter() .map(|x| x.metal_semaphore().unwrap()) .collect(); inner.submit( &command_buffers, &wait_semaphores, &signal_semaphores, signal_fence.map(|x| x.metal_fence().unwrap()), ) } #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => { let command_buffers: Vec<_> = command_buffers .iter() .map(|x| x.empty_command_buffer().unwrap()) .collect(); let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.empty_semaphore().unwrap()) .collect(); let signal_semaphores: Vec<_> = signal_semaphores .iter() .map(|x| x.empty_semaphore().unwrap()) .collect(); inner.submit( &command_buffers, &wait_semaphores, &signal_semaphores, signal_fence.map(|x| x.empty_fence().unwrap()), ) } } } pub fn present( &self, swapchain: &RafxSwapchain, wait_semaphores: &[&RafxSemaphore], image_index: u32, ) -> RafxResult<RafxPresentSuccessResult> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => { let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.vk_semaphore().unwrap()) .collect(); inner.present( swapchain.vk_swapchain().unwrap(), &wait_semaphores, image_index, ) } #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => { let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.metal_semaphore().unwrap()) .collect(); inner.present( swapchain.metal_swapchain().unwrap(), &wait_semaphores, image_index, ) } #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => { let wait_semaphores: Vec<_> = wait_semaphores .iter() .map(|x| x.empty_semaphore().unwrap()) .collect(); inner.present( swapchain.empty_swapchain().unwrap(), &wait_semaphores, image_index, ) } } } pub fn wait_for_queue_idle(&self) -> RafxResult<()> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => inner.wait_for_queue_idle(), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => inner.wait_for_queue_idle(), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => inner.wait_for_queue_idle(), } } #[cfg(feature = "rafx-vulkan")] pub fn vk_queue(&self) -> Option<&RafxQueueVulkan> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => Some(inner), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(_inner) => None, #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(_inner) => None, } } #[cfg(feature = "rafx-metal")] pub fn metal_queue(&self) -> Option<&RafxQueueMetal> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(_inner) => None, #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => Some(inner), #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => None, } } #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] pub fn empty_queue(&self) -> Option<&RafxQueueEmpty> { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(_inner) => None, #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => None, #[cfg(any( feature = "rafx-empty", not(any(feature = "rafx-metal", feature = "rafx-vulkan")) ))] RafxQueue::Empty(inner) => Some(inner), } } }
&self) -> RafxQueueType { match self { #[cfg(feature = "rafx-vulkan")] RafxQueue::Vk(inner) => inner.queue_type(), #[cfg(feature = "rafx-metal")] RafxQueue::Metal(inner) => inner.queue_type(), #[cfg(any( feature = "r
function_block-random_span
[ { "content": "pub fn draw_skybox(\n\n resource_context: &ResourceContext,\n\n skybox_material: &ResourceArc<MaterialPassResource>,\n\n skybox_texture: &ResourceArc<ImageViewResource>,\n\n main_view: &RenderView,\n\n render_target_meta: &GraphicsPipelineRenderTargetMeta,\n\n command_buffer: &RafxCommandBuffer,\n\n render_phase_index: RenderPhaseIndex,\n\n) -> RafxResult<()> {\n\n // Get the pipeline\n\n let pipeline = resource_context\n\n .graphics_pipeline_cache()\n\n .get_or_create_graphics_pipeline(\n\n render_phase_index,\n\n &skybox_material,\n\n render_target_meta,\n\n &EMPTY_VERTEX_LAYOUT,\n\n )?;\n\n\n\n // Set up a descriptor set pointing at the image so we can sample from it\n", "file_path": "demo/src/features/skybox.rs", "rank": 0, "score": 206352.948271546 }, { "content": "pub fn round_size_up_to_alignment_u32(\n\n size: u32,\n\n required_alignment: u32,\n\n) -> u32 {\n\n assert!(required_alignment > 0);\n\n ((size + required_alignment - 1) / required_alignment) * required_alignment\n\n}\n\n\n", "file_path": "rafx-base/src/memory.rs", "rank": 1, "score": 200690.11452488246 }, { "content": "/// Creates a right-handed perspective projection matrix with [0,1] depth range.\n\npub fn perspective_rh(\n\n fov_y_radians: f32,\n\n aspect_ratio: f32,\n\n z_near: f32,\n\n z_far: f32,\n\n) -> glam::Mat4 {\n\n debug_assert!(z_near > 0.0 && z_far > 0.0);\n\n let (sin_fov, cos_fov) = (0.5 * fov_y_radians).sin_cos();\n\n let h = cos_fov / sin_fov;\n\n let w = h / aspect_ratio;\n\n let r = z_far / (z_near - z_far);\n\n glam::Mat4::from_cols(\n\n glam::Vec4::new(w, 0.0, 0.0, 0.0),\n\n glam::Vec4::new(0.0, h, 0.0, 0.0),\n\n glam::Vec4::new(0.0, 0.0, r, -1.0),\n\n glam::Vec4::new(0.0, 0.0, r * z_near, 0.0),\n\n )\n\n}\n\n\n", "file_path": "demo/src/features/mesh/shadow_map_resource.rs", "rank": 2, "score": 197249.75492244842 }, { "content": "pub fn vertex_buffer_adjusted_buffer_index(binding: u32) -> NSUInteger {\n\n // Argument buffers will be 0-4\n\n // vertex buffers will be 30 - n\n\n (30 - binding) as _\n\n}\n\n\n\npub(crate) fn resource_type_mtl_data_type(resource_type: RafxResourceType) -> Option<MTLDataType> {\n\n if resource_type.intersects(\n\n RafxResourceType::UNIFORM_BUFFER\n\n | RafxResourceType::BUFFER\n\n | RafxResourceType::BUFFER_READ_WRITE,\n\n ) {\n\n Some(MTLDataType::Pointer)\n\n } else if resource_type\n\n .intersects(RafxResourceType::TEXTURE | RafxResourceType::TEXTURE_READ_WRITE)\n\n {\n\n Some(MTLDataType::Texture)\n\n } else if resource_type.intersects(RafxResourceType::SAMPLER) {\n\n Some(MTLDataType::Sampler)\n\n } else {\n", "file_path": "rafx-api/src/backends/metal/internal/util.rs", "rank": 3, "score": 188435.1381396733 }, { "content": "pub fn default_daemon() -> distill::daemon::AssetDaemon {\n\n use crate::assets::*;\n\n\n\n distill::daemon::AssetDaemon::default()\n\n .with_importer(\"sampler\", SamplerImporter)\n\n .with_importer(\"material\", MaterialImporter)\n\n .with_importer(\"materialinstance\", MaterialInstanceImporter)\n\n .with_importer(\"compute\", ComputePipelineImporter)\n\n .with_importer(\"cookedshaderpackage\", ShaderImporterCooked)\n\n .with_importer(\"png\", ImageImporter)\n\n .with_importer(\"jpg\", ImageImporter)\n\n .with_importer(\"jpeg\", ImageImporter)\n\n .with_importer(\"tga\", ImageImporter)\n\n .with_importer(\"bmp\", ImageImporter)\n\n .with_importer(\"basis\", BasisImageImporter)\n\n}\n", "file_path": "rafx-assets/src/distill_impl/mod.rs", "rank": 4, "score": 175414.54310829 }, { "content": "pub fn create_mesh_extract_job() -> Box<dyn ExtractJob> {\n\n Box::new(MeshExtractJob {})\n\n}\n\n\n\n//\n\n// This is boiler-platish\n\n//\n\npub struct MeshRenderNode {\n\n pub mesh: Option<Handle<MeshAsset>>,\n\n pub transform: glam::Mat4,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct MeshRenderNodeHandle(pub DropSlabKey<MeshRenderNode>);\n\n\n\nimpl MeshRenderNodeHandle {\n\n pub fn as_raw_generic_handle(&self) -> GenericRenderNodeHandle {\n\n GenericRenderNodeHandle::new(\n\n <MeshRenderFeature as RenderFeature>::feature_index(),\n\n self.0.index(),\n", "file_path": "demo/src/features/mesh/mod.rs", "rank": 5, "score": 172499.07298896453 }, { "content": "pub fn create_imgui_extract_job() -> Box<dyn ExtractJob> {\n\n Box::new(ImGuiExtractJobImpl::new())\n\n}\n\n\n\n/// Per-pass \"global\" data\n\npub type ImGuiUniformBufferObject = shaders::imgui_vert::ArgsUniform;\n\n\n\nlazy_static::lazy_static! {\n\n pub static ref IMGUI_VERTEX_LAYOUT : VertexDataSetLayout = {\n\n use rafx::api::RafxFormat;\n\n\n\n let vertex = imgui::DrawVert {\n\n pos: Default::default(),\n\n col: Default::default(),\n\n uv: Default::default()\n\n };\n\n\n\n VertexDataLayout::build_vertex_layout(&vertex, |builder, vertex| {\n\n builder.add_member(&vertex.pos, \"POSITION\", RafxFormat::R32G32_SFLOAT);\n\n builder.add_member(&vertex.uv, \"TEXCOORD\", RafxFormat::R32G32_SFLOAT);\n", "file_path": "demo/src/features/imgui/mod.rs", "rank": 6, "score": 172499.07298896453 }, { "content": "pub fn create_text_extract_job() -> Box<dyn ExtractJob> {\n\n Box::new(TextExtractJob::new())\n\n}\n\n\n\npub type TextUniformBufferObject = shaders::text_vert::PerViewUboUniform;\n\n\n\n/// Vertex format for vertices sent to the GPU\n\n#[derive(Clone, Debug, Copy, Default)]\n\n#[repr(C)]\n\npub struct TextVertex {\n\n pub position: [f32; 3],\n\n pub uv: [f32; 2],\n\n pub color: [f32; 4],\n\n}\n\n\n\nlazy_static::lazy_static! {\n\n pub static ref TEXT_VERTEX_LAYOUT : VertexDataSetLayout = {\n\n use rafx::api::RafxFormat;\n\n\n\n VertexDataLayout::build_vertex_layout(&TextVertex::default(), |builder, vertex| {\n", "file_path": "demo/src/features/text/mod.rs", "rank": 7, "score": 172499.07298896453 }, { "content": "pub fn create_sprite_extract_job() -> Box<dyn ExtractJob> {\n\n Box::new(SpriteExtractJob::new())\n\n}\n\n\n\n//\n\n// This is boiler-platish\n\n//\n\npub struct SpriteRenderNode {\n\n pub position: glam::Vec3,\n\n pub alpha: f32,\n\n pub image: Handle<ImageAsset>,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct SpriteRenderNodeHandle(pub DropSlabKey<SpriteRenderNode>);\n\n\n\nimpl SpriteRenderNodeHandle {\n\n pub fn as_raw_generic_handle(&self) -> GenericRenderNodeHandle {\n\n GenericRenderNodeHandle::new(\n\n <SpriteRenderFeature as RenderFeature>::feature_index(),\n", "file_path": "demo/src/features/sprite/mod.rs", "rank": 8, "score": 172499.07298896453 }, { "content": "pub fn create_debug3d_extract_job() -> Box<dyn ExtractJob> {\n\n Box::new(Debug3dExtractJob::new())\n\n}\n\n\n\npub type Debug3dUniformBufferObject = shaders::debug_vert::PerFrameUboUniform;\n\n\n\n/// Vertex format for vertices sent to the GPU\n\n#[derive(Clone, Debug, Copy, Default)]\n\n#[repr(C)]\n\npub struct Debug3dVertex {\n\n pub pos: [f32; 3],\n\n pub color: [f32; 4],\n\n}\n\n\n\nlazy_static::lazy_static! {\n\n pub static ref DEBUG_VERTEX_LAYOUT : VertexDataSetLayout = {\n\n use rafx::api::RafxFormat;\n\n\n\n VertexDataLayout::build_vertex_layout(&Debug3dVertex::default(), |builder, vertex| {\n\n builder.add_member(&vertex.pos, \"POSITION\", RafxFormat::R32G32B32_SFLOAT);\n", "file_path": "demo/src/features/debug3d/mod.rs", "rank": 9, "score": 172499.07298896453 }, { "content": "pub fn create_demo_extract_job() -> Box<dyn ExtractJob> {\n\n Box::new(DemoExtractJob::default())\n\n}\n\n\n\n//\n\n// This is boiler-platish\n\n//\n\npub struct DemoRenderNode {\n\n pub position: glam::Vec3,\n\n pub alpha: f32,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct DemoRenderNodeHandle(pub DropSlabKey<DemoRenderNode>);\n\n\n\nimpl DemoRenderNodeHandle {\n\n pub fn as_raw_generic_handle(&self) -> GenericRenderNodeHandle {\n\n GenericRenderNodeHandle::new(\n\n <DemoRenderFeature as RenderFeature>::feature_index(),\n\n self.0.index(),\n", "file_path": "rafx/examples/nodes_api_design/demo_feature/mod.rs", "rank": 10, "score": 165562.20645269865 }, { "content": "pub fn init_sdl2_imgui_manager(window: &Window) -> Sdl2ImguiManager {\n\n let imgui_context = init_imgui(&window);\n\n Sdl2ImguiManager::new(imgui_context, window)\n\n}\n", "file_path": "demo/src/features/imgui/sdl2_imgui_manager.rs", "rank": 11, "score": 165562.20645269865 }, { "content": "pub fn logging_init() {\n\n #[cfg(not(debug_assertions))]\n\n let log_level = log::LevelFilter::Info;\n\n #[cfg(debug_assertions)]\n\n let log_level = log::LevelFilter::Debug;\n\n\n\n // Setup logging\n\n env_logger::Builder::from_default_env()\n\n .default_format_timestamp_nanos(true)\n\n .filter_module(\n\n \"rafx_assets::resources::descriptor_sets\",\n\n log::LevelFilter::Info,\n\n )\n\n .filter_module(\"rafx_nodes\", log::LevelFilter::Info)\n\n .filter_module(\"rafx_visibility\", log::LevelFilter::Info)\n\n .filter_module(\"rafx_assets::graph\", log::LevelFilter::Trace)\n\n .filter_module(\"rafx_framework::resources\", log::LevelFilter::Debug)\n\n .filter_module(\"rafx_framework::graph::graph_plan\", log::LevelFilter::Info)\n\n .filter_module(\"mio\", log::LevelFilter::Debug)\n\n // .filter_module(\n", "file_path": "demo/src/main.rs", "rank": 12, "score": 158566.70360010932 }, { "content": "pub fn rendering_init(\n\n resources: &mut Resources,\n\n sdl2_window: &sdl2::video::Window,\n\n asset_source: AssetSource,\n\n) -> RafxResult<()> {\n\n resources.insert(SpriteRenderNodeSet::default());\n\n resources.insert(MeshRenderNodeSet::default());\n\n resources.insert(StaticVisibilityNodeSet::default());\n\n resources.insert(DynamicVisibilityNodeSet::default());\n\n resources.insert(DebugDraw3DResource::new());\n\n resources.insert(TextResource::new());\n\n resources.insert(ViewportsResource::default());\n\n\n\n let rafx_api = rafx::api::RafxApi::new(sdl2_window, &Default::default())?;\n\n\n\n let mut renderer_builder = RendererBuilder::default();\n\n renderer_builder = renderer_builder\n\n .add_plugin(Box::new(FontAssetTypeRendererPlugin))\n\n .add_plugin(Box::new(GltfAssetTypeRendererPlugin))\n\n .add_plugin(Box::new(Debug3DRendererPlugin))\n", "file_path": "demo/src/init.rs", "rank": 13, "score": 158566.70360010932 }, { "content": "pub fn run(args: &DemoArgs) -> RafxResult<()> {\n\n #[cfg(feature = \"profile-with-tracy\")]\n\n profiling::tracy_client::set_thread_name(\"Main Thread\");\n\n #[cfg(feature = \"profile-with-optick\")]\n\n profiling::optick::register_thread(\"Main Thread\");\n\n\n\n let mut resources = Resources::default();\n\n resources.insert(TimeState::new());\n\n resources.insert(RenderOptions::default());\n\n resources.insert(DebugUiState::default());\n\n resources.insert(SceneManager::default());\n\n\n\n let asset_source = if let Some(packfile) = &args.packfile {\n\n AssetSource::Packfile(packfile.to_path_buf())\n\n } else {\n\n AssetSource::Daemon {\n\n external_daemon: args.external_daemon,\n\n daemon_args: args.daemon_args.clone().into(),\n\n }\n\n };\n", "file_path": "demo/src/lib.rs", "rank": 14, "score": 158479.61182245787 }, { "content": "// Texture must be in COPY_SRC state\n\n// After this call, it will be in COPY_DST state\n\n// Vulkan requires this on a graphics queue. Metal allows this on any queue.\n\npub fn generate_mipmaps(\n\n command_buffer: &RafxCommandBuffer,\n\n _texture: &RafxTexture,\n\n) -> RafxResult<()> {\n\n match command_buffer {\n\n #[cfg(feature = \"rafx-vulkan\")]\n\n RafxCommandBuffer::Vk(inner) => generate_mipmaps_vk(inner, _texture),\n\n #[cfg(feature = \"rafx-metal\")]\n\n RafxCommandBuffer::Metal(inner) => generate_mipmaps_metal(inner, _texture),\n\n #[cfg(any(\n\n feature = \"rafx-empty\",\n\n not(any(feature = \"rafx-metal\", feature = \"rafx-vulkan\"))\n\n ))]\n\n RafxCommandBuffer::Empty(_) => unimplemented!(),\n\n }\n\n}\n\n\n\n// This custom path for metal can be removed after I implement cmd_blit\n", "file_path": "rafx-api/src/extra/mipmaps.rs", "rank": 15, "score": 154249.41003658413 }, { "content": "// This function is a little more complex to use than enqueue_load_images but can support cubemaps\n\n// We create a layer for each layer_image_assignment, and copy from the decoded_image\n\n// at the index matching the assignment\n\npub fn enqueue_load_image(\n\n device_context: &RafxDeviceContext,\n\n upload: &mut RafxTransferUpload,\n\n image_data: &GpuImageData,\n\n params: ImageUploadParams,\n\n) -> Result<RafxTexture, RafxUploadError> {\n\n // All images must have identical mip level count, sizes, etc.\n\n #[cfg(debug_assertions)]\n\n image_data.verify_state();\n\n\n\n //\n\n // Determine the total amount of data we need to upload and verify there is enough space\n\n //\n\n let bytes_required = image_data.total_size(IMAGE_UPLOAD_REQUIRED_SUBRESOURCE_ALIGNMENT as u64);\n\n\n\n let has_space_available = upload.has_space_available(\n\n bytes_required as usize,\n\n IMAGE_UPLOAD_REQUIRED_SUBRESOURCE_ALIGNMENT as usize,\n\n 1,\n\n );\n", "file_path": "rafx-assets/src/image_upload.rs", "rank": 16, "score": 152234.38829124972 }, { "content": "pub fn enqueue_load_buffer(\n\n device_context: &RafxDeviceContext,\n\n upload: &mut RafxTransferUpload,\n\n // transfer_queue_family_index: u32,\n\n // dst_queue_family_index: u32,\n\n data: &[u8],\n\n) -> Result<RafxBuffer, RafxUploadError> {\n\n // Arbitrary, not sure if there is any requirement\n\n const REQUIRED_ALIGNMENT: usize = 16;\n\n\n\n // Push data into the staging buffer\n\n let offset = upload.push(data, REQUIRED_ALIGNMENT)?;\n\n let size = data.len() as u64;\n\n\n\n // Allocate a GPU buffer\n\n let dst_buffer = device_context.create_buffer(&RafxBufferDef {\n\n size,\n\n memory_usage: RafxMemoryUsage::GpuOnly,\n\n queue_type: upload.dst_queue().queue_type(),\n\n resource_type: RafxResourceType::VERTEX_BUFFER | RafxResourceType::INDEX_BUFFER,\n", "file_path": "rafx-assets/src/buffer_upload.rs", "rank": 17, "score": 152223.91388044605 }, { "content": "pub fn round_size_up_to_alignment_u64(\n\n size: u64,\n\n required_alignment: u64,\n\n) -> u64 {\n\n assert!(required_alignment > 0);\n\n ((size + required_alignment - 1) / required_alignment) * required_alignment\n\n}\n\n\n", "file_path": "rafx-base/src/memory.rs", "rank": 18, "score": 152223.91388044605 }, { "content": "pub fn load_image_blocking(\n\n device_context: &RafxDeviceContext,\n\n transfer_queue: &RafxQueue,\n\n dst_queue: &RafxQueue,\n\n upload_buffer_max_size: u64,\n\n image_data: &GpuImageData,\n\n params: ImageUploadParams,\n\n) -> Result<RafxTexture, RafxUploadError> {\n\n let total_size = image_data.total_size(IMAGE_UPLOAD_REQUIRED_SUBRESOURCE_ALIGNMENT);\n\n if upload_buffer_max_size < total_size {\n\n Err(RafxUploadError::BufferFull)?;\n\n }\n\n\n\n let mut upload = RafxTransferUpload::new(\n\n device_context,\n\n transfer_queue,\n\n dst_queue,\n\n upload_buffer_max_size,\n\n )?;\n\n\n\n let texture = enqueue_load_image(device_context, &mut upload, image_data, params)?;\n\n\n\n upload.block_until_upload_complete()?;\n\n\n\n Ok(texture)\n\n}\n", "file_path": "rafx-assets/src/image_upload.rs", "rank": 19, "score": 152223.91388044605 }, { "content": "pub fn rendering_destroy(resources: &mut Resources) -> RafxResult<()> {\n\n // Destroy these first\n\n {\n\n {\n\n let swapchain_helper = resources.remove::<RafxSwapchainHelper>().unwrap();\n\n let mut asset_manager = resources.get_mut::<AssetManager>().unwrap();\n\n let game_renderer = resources.get::<Renderer>().unwrap();\n\n SwapchainHandler::destroy_swapchain(\n\n swapchain_helper,\n\n &mut *asset_manager,\n\n &*game_renderer,\n\n )?;\n\n }\n\n\n\n resources.remove::<Renderer>();\n\n resources.remove::<SpriteRenderNodeSet>();\n\n resources.remove::<MeshRenderNodeSet>();\n\n resources.remove::<StaticVisibilityNodeSet>();\n\n resources.remove::<DynamicVisibilityNodeSet>();\n\n resources.remove::<DebugDraw3DResource>();\n", "file_path": "demo/src/init.rs", "rank": 20, "score": 151608.777235755 }, { "content": "pub fn sdl2_init() -> Sdl2Systems {\n\n // Setup SDL\n\n let context = sdl2::init().expect(\"Failed to initialize sdl2\");\n\n let video_subsystem = context\n\n .video()\n\n .expect(\"Failed to create sdl video subsystem\");\n\n\n\n // Create the window\n\n let window = video_subsystem\n\n .window(\"Rafx Demo\", 900, 600)\n\n .position_centered()\n\n .allow_highdpi()\n\n .resizable()\n\n .build()\n\n .expect(\"Failed to create window\");\n\n\n\n Sdl2Systems {\n\n context,\n\n video_subsystem,\n\n window,\n\n }\n\n}\n\n\n", "file_path": "demo/src/init.rs", "rank": 21, "score": 148549.0375318339 }, { "content": "pub fn create_font_texture_with_ranges(\n\n font_data: &[u8],\n\n character_ranges_to_include: &[(u32, u32)],\n\n size: f32,\n\n margin: u32,\n\n) -> Option<FontTextureWithMeta> {\n\n // let character_ranges_to_include = vec![\n\n // (32, 128),\n\n // //(0x4e00, 0x5FCC)\n\n // ];\n\n\n\n let mut characters_to_include = vec![];\n\n\n\n //\n\n // Iterate codepoints in the font and find the characters within the given ranges\n\n //\n\n let face = ttf_parser::Face::from_slice(font_data, 0).unwrap();\n\n\n\n for subtable in face.character_mapping_subtables() {\n\n subtable.codepoints(|codepoint| {\n", "file_path": "demo/src/assets/font/font_cooking.rs", "rank": 22, "score": 148412.9272783139 }, { "content": "pub fn do_find_supported_format(\n\n instance: &ash::Instance,\n\n physical_device: vk::PhysicalDevice,\n\n candidates: &[RafxFormat],\n\n image_tiling: vk::ImageTiling,\n\n features: vk::FormatFeatureFlags,\n\n) -> Option<RafxFormat> {\n\n for &candidate in candidates {\n\n let props = unsafe {\n\n instance.get_physical_device_format_properties(physical_device, candidate.into())\n\n };\n\n\n\n let is_supported = match image_tiling {\n\n vk::ImageTiling::LINEAR => (props.linear_tiling_features & features) == features,\n\n vk::ImageTiling::OPTIMAL => (props.optimal_tiling_features & features) == features,\n\n _ => unimplemented!(),\n\n };\n\n\n\n if is_supported {\n\n return Some(candidate);\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "rafx-api/src/backends/vulkan/device_context.rs", "rank": 23, "score": 148412.9272783139 }, { "content": "pub fn parse_shader_source_recursive(\n\n file_to_process: &FileToProcess,\n\n declarations: &mut Vec<DeclarationText>,\n\n included_files: &mut FnvHashSet<PathBuf>,\n\n) -> Result<(), String> {\n\n log::trace!(\"parse_shader_source_recursive {:?}\", file_to_process);\n\n let resolved_include = super::include_impl(\n\n &file_to_process.path,\n\n file_to_process.include_type,\n\n &file_to_process.requested_from,\n\n file_to_process.include_depth,\n\n )?;\n\n\n\n if included_files.contains(&resolved_include.resolved_path) {\n\n return Ok(());\n\n }\n\n\n\n included_files.insert(resolved_include.resolved_path.clone());\n\n\n\n let code: Vec<char> = resolved_include.content.chars().collect();\n", "file_path": "rafx-shader-processor/src/parse_source.rs", "rank": 24, "score": 148412.9272783139 }, { "content": "pub fn blend_state_to_create_info(\n\n blend_state: &RafxBlendState,\n\n color_attachment_count: usize,\n\n) -> RafxBlendStateVkCreateInfo {\n\n let mut blend_attachments_states = vec![];\n\n\n\n blend_state.verify(color_attachment_count);\n\n\n\n if let Some(first_attachment) = blend_state.render_target_blend_states.first() {\n\n for attachment_index in 0..color_attachment_count {\n\n let attachment_state = if blend_state\n\n .render_target_mask\n\n .intersects(RafxBlendStateTargets::from_bits(1 << attachment_index).unwrap())\n\n {\n\n if blend_state.independent_blend {\n\n blend_state_render_target_to_create_info(\n\n &blend_state.render_target_blend_states[attachment_index],\n\n )\n\n } else {\n\n blend_state_render_target_to_create_info(first_attachment)\n", "file_path": "rafx-api/src/backends/vulkan/internal/util.rs", "rank": 25, "score": 146618.14080689274 }, { "content": "pub fn resource_type_to_descriptor_type(\n\n resource_type: RafxResourceType\n\n) -> Option<vk::DescriptorType> {\n\n match resource_type {\n\n RafxResourceType::SAMPLER => Some(vk::DescriptorType::SAMPLER),\n\n RafxResourceType::TEXTURE => Some(vk::DescriptorType::SAMPLED_IMAGE),\n\n RafxResourceType::UNIFORM_BUFFER => Some(vk::DescriptorType::UNIFORM_BUFFER),\n\n RafxResourceType::TEXTURE_READ_WRITE => Some(vk::DescriptorType::STORAGE_IMAGE),\n\n RafxResourceType::BUFFER => Some(vk::DescriptorType::STORAGE_BUFFER),\n\n RafxResourceType::BUFFER_READ_WRITE => Some(vk::DescriptorType::STORAGE_BUFFER),\n\n RafxResourceType::INPUT_ATTACHMENT => Some(vk::DescriptorType::INPUT_ATTACHMENT),\n\n RafxResourceType::TEXEL_BUFFER => Some(vk::DescriptorType::UNIFORM_TEXEL_BUFFER),\n\n RafxResourceType::TEXEL_BUFFER_READ_WRITE => Some(vk::DescriptorType::STORAGE_TEXEL_BUFFER),\n\n RafxResourceType::COMBINED_IMAGE_SAMPLER => {\n\n Some(vk::DescriptorType::COMBINED_IMAGE_SAMPLER)\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "rafx-api/src/backends/vulkan/internal/util.rs", "rank": 26, "score": 146618.14080689274 }, { "content": "/// The max number of mip levels an image can have given its size\n\npub fn mip_level_max_count_for_image_size(\n\n width: u32,\n\n height: u32,\n\n) -> u32 {\n\n let max_dimension = std::cmp::max(width, height);\n\n (max_dimension as f32).log2().floor() as u32 + 1\n\n}\n\n\n", "file_path": "rafx-api/src/extra/mipmaps.rs", "rank": 27, "score": 146618.14080689274 }, { "content": "pub fn sdl2_init() -> Sdl2Systems {\n\n // Setup SDL\n\n let context = sdl2::init().expect(\"Failed to initialize sdl2\");\n\n let video_subsystem = context\n\n .video()\n\n .expect(\"Failed to create sdl video subsystem\");\n\n\n\n // Create the window\n\n let window = video_subsystem\n\n .window(\"Rafx Example\", WINDOW_WIDTH, WINDOW_HEIGHT)\n\n .position_centered()\n\n .allow_highdpi()\n\n .resizable()\n\n .build()\n\n .expect(\"Failed to create window\");\n\n\n\n Sdl2Systems {\n\n context,\n\n video_subsystem,\n\n window,\n\n }\n\n}\n\n\n", "file_path": "rafx/examples/asset_triangle/asset_triangle.rs", "rank": 28, "score": 142712.55477356366 }, { "content": "pub fn sdl2_init() -> Sdl2Systems {\n\n // Setup SDL\n\n let context = sdl2::init().expect(\"Failed to initialize sdl2\");\n\n let video_subsystem = context\n\n .video()\n\n .expect(\"Failed to create sdl video subsystem\");\n\n\n\n // Create the window\n\n let window = video_subsystem\n\n .window(\"Rafx Example\", WINDOW_WIDTH, WINDOW_HEIGHT)\n\n .position_centered()\n\n .allow_highdpi()\n\n .resizable()\n\n .build()\n\n .expect(\"Failed to create window\");\n\n\n\n Sdl2Systems {\n\n context,\n\n video_subsystem,\n\n window,\n\n }\n\n}\n\n\n", "file_path": "rafx/examples/framework_triangle/framework_triangle.rs", "rank": 29, "score": 142712.55477356366 }, { "content": "pub fn sdl2_init() -> Sdl2Systems {\n\n // Setup SDL\n\n let context = sdl2::init().expect(\"Failed to initialize sdl2\");\n\n let video_subsystem = context\n\n .video()\n\n .expect(\"Failed to create sdl video subsystem\");\n\n\n\n // Create the window\n\n let window = video_subsystem\n\n .window(\"Rafx Example\", WINDOW_WIDTH, WINDOW_HEIGHT)\n\n .position_centered()\n\n .allow_highdpi()\n\n .resizable()\n\n .build()\n\n .expect(\"Failed to create window\");\n\n\n\n Sdl2Systems {\n\n context,\n\n video_subsystem,\n\n window,\n\n }\n\n}\n\n\n", "file_path": "rafx/examples/api_triangle/api_triangle.rs", "rank": 30, "score": 142712.55477356366 }, { "content": "pub fn color_render_target_binding_mtl_store_op(\n\n color_binding: &RafxColorRenderTargetBinding\n\n) -> MTLStoreAction {\n\n let resolve = color_binding.resolve_target.is_some()\n\n && color_binding.resolve_store_op == RafxStoreOp::Store;\n\n if color_binding.store_op == RafxStoreOp::Store {\n\n if resolve {\n\n MTLStoreAction::StoreAndMultisampleResolve\n\n } else {\n\n MTLStoreAction::Store\n\n }\n\n } else {\n\n if resolve {\n\n MTLStoreAction::MultisampleResolve\n\n } else {\n\n MTLStoreAction::DontCare\n\n }\n\n }\n\n}\n", "file_path": "rafx-api/src/backends/metal/internal/util.rs", "rank": 31, "score": 141627.97860968942 }, { "content": "pub fn create_uninitialized_write_set_for_layout(\n\n layout: &DescriptorSetLayout\n\n) -> DescriptorSetWriteSet {\n\n let mut write_set = DescriptorSetWriteSet::default();\n\n for binding in &layout.bindings {\n\n let key = DescriptorSetElementKey {\n\n dst_binding: binding.resource.binding as u32,\n\n };\n\n\n\n let mut element_write = DescriptorSetElementWrite {\n\n has_immutable_sampler: binding.immutable_samplers.is_some(),\n\n descriptor_type: binding.resource.resource_type,\n\n image_info: Default::default(),\n\n buffer_info: Default::default(),\n\n };\n\n\n\n let what_to_bind = super::what_to_bind(&element_write);\n\n\n\n if what_to_bind.bind_images || what_to_bind.bind_samplers {\n\n element_write.image_info.resize(\n", "file_path": "rafx-framework/src/resources/descriptor_sets/descriptor_write_set.rs", "rank": 32, "score": 141627.97860968942 }, { "content": "pub fn sdl2_init() -> Sdl2Systems {\n\n // Setup SDL\n\n let context = sdl2::init().expect(\"Failed to initialize sdl2\");\n\n let video_subsystem = context\n\n .video()\n\n .expect(\"Failed to create sdl video subsystem\");\n\n\n\n // Create the window\n\n let window = video_subsystem\n\n .window(\"Rafx Example\", 800, 600)\n\n .position_centered()\n\n .allow_highdpi()\n\n .resizable()\n\n .build()\n\n .expect(\"Failed to create window\");\n\n\n\n Sdl2Systems {\n\n context,\n\n video_subsystem,\n\n window,\n\n }\n\n}\n", "file_path": "rafx/examples/nodes_api_design/nodes_api_design.rs", "rank": 33, "score": 139191.17773013219 }, { "content": "fn vk_version_to_string(version: u32) -> String {\n\n format!(\n\n \"{}.{}.{}\",\n\n vk::version_major(version),\n\n vk::version_minor(version),\n\n vk::version_patch(version)\n\n )\n\n}\n\n\n", "file_path": "rafx-api/src/backends/vulkan/device_context.rs", "rank": 34, "score": 135412.5410466305 }, { "content": "fn find_supported_feature_set(\n\n device: &metal_rs::DeviceRef,\n\n feature_sets: &[MTLFeatureSet],\n\n) -> Option<MTLFeatureSet> {\n\n for &feature_set in feature_sets {\n\n if device.supports_feature_set(feature_set) {\n\n return Some(feature_set);\n\n }\n\n }\n\n\n\n return None;\n\n}\n\n\n", "file_path": "rafx-api/src/backends/metal/internal/features.rs", "rank": 35, "score": 134830.41826803854 }, { "content": "//\n\n// Static functions\n\n//\n\npub fn handle_load_result<AssetT: Clone>(\n\n load_op: AssetLoadOp,\n\n loaded_asset: RafxResult<AssetT>,\n\n asset_lookup: &mut AssetLookup<AssetT>,\n\n result_tx: Sender<AssetT>,\n\n) {\n\n match loaded_asset {\n\n Ok(loaded_asset) => {\n\n asset_lookup.set_uncommitted(load_op.load_handle(), loaded_asset.clone());\n\n result_tx.send(loaded_asset).unwrap();\n\n load_op.complete()\n\n }\n\n Err(err) => {\n\n load_op.error(err);\n\n }\n\n }\n\n}\n\n\n", "file_path": "rafx-assets/src/assets/asset_type_handler.rs", "rank": 36, "score": 132886.3972775248 }, { "content": "pub fn init_distill_daemon(connect_string: String) -> AssetResource {\n\n let rpc_loader = RpcIO::new(connect_string).unwrap();\n\n let loader = Loader::new(Box::new(rpc_loader));\n\n let resolver = Box::new(DefaultIndirectionResolver);\n\n AssetResource::new(loader, resolver)\n\n}\n\n\n", "file_path": "rafx-renderer/src/daemon.rs", "rank": 37, "score": 130694.39607210821 }, { "content": "pub fn handle_free_requests<AssetDataT, AssetT>(\n\n load_queues: &mut LoadQueues<AssetDataT, AssetT>,\n\n asset_lookup: &mut AssetLookup<AssetT>,\n\n) {\n\n for request in load_queues.take_commit_requests() {\n\n asset_lookup.commit(request.load_handle);\n\n }\n\n}\n", "file_path": "rafx-assets/src/assets/asset_type_handler.rs", "rank": 38, "score": 129741.2375817564 }, { "content": "pub fn handle_commit_requests<AssetDataT, AssetT>(\n\n load_queues: &mut LoadQueues<AssetDataT, AssetT>,\n\n asset_lookup: &mut AssetLookup<AssetT>,\n\n) {\n\n for request in load_queues.take_commit_requests() {\n\n log::trace!(\n\n \"commit asset {:?} {}\",\n\n request.load_handle,\n\n core::any::type_name::<AssetDataT>()\n\n );\n\n asset_lookup.commit(request.load_handle);\n\n }\n\n}\n\n\n", "file_path": "rafx-assets/src/assets/asset_type_handler.rs", "rank": 39, "score": 129741.2375817564 }, { "content": "pub fn slice_size_in_bytes<T>(slice: &[T]) -> usize {\n\n let range = slice.as_ptr_range();\n\n (range.end as *const u8 as usize) - (range.start as *const u8 as usize)\n\n}\n\n\n\npub unsafe fn force_to_static_lifetime<T>(value: &T) -> &'static T {\n\n std::mem::transmute(value)\n\n}\n\n\n\npub unsafe fn force_to_static_lifetime_mut<T>(value: &mut T) -> &'static mut T {\n\n std::mem::transmute(value)\n\n}\n", "file_path": "rafx-base/src/memory.rs", "rank": 40, "score": 129170.04409670064 }, { "content": "fn add_light_debug_draw(\n\n resources: &Resources,\n\n world: &World,\n\n) {\n\n let mut debug_draw = resources.get_mut::<DebugDraw3DResource>().unwrap();\n\n\n\n let mut query = <Read<DirectionalLightComponent>>::query();\n\n for light in query.iter(world) {\n\n let light_from = light.direction * -10.0;\n\n let light_to = glam::Vec3::zero();\n\n\n\n debug_draw.add_line(light_from, light_to, light.color);\n\n }\n\n\n\n let mut query = <(Read<PositionComponent>, Read<PointLightComponent>)>::query();\n\n for (position, light) in query.iter(world) {\n\n debug_draw.add_sphere(position.position, 0.25, light.color, 12);\n\n }\n\n\n\n let mut query = <(Read<PositionComponent>, Read<SpotLightComponent>)>::query();\n", "file_path": "demo/src/scenes/mod.rs", "rank": 41, "score": 128614.96982845494 }, { "content": "enum ShadowMapVisibility {\n\n Single(RenderViewVisibility),\n\n Cube(ArrayVec<[RenderViewVisibility; 6]>),\n\n}\n\n\n\n#[derive(Default)]\n\npub struct ShadowMapResource {\n\n // These are populated by recalculate_shadow_map_views()\n\n pub(super) shadow_map_lookup: FnvHashMap<LightId, usize>,\n\n pub(super) shadow_map_render_views: Vec<ShadowMapRenderView>,\n\n\n\n // Populated by set_shadow_map_image_resources, during construction of the render graph\n\n pub(super) image_usage_ids: Vec<RenderGraphImageUsageId>,\n\n\n\n // Populated by set_shadow_map_image_views, after the render graph is constructed and image\n\n // resources are allocated\n\n pub(super) shadow_map_image_views: Vec<ResourceArc<ImageViewResource>>,\n\n}\n\n\n\nimpl ShadowMapResource {\n", "file_path": "demo/src/features/mesh/shadow_map_resource.rs", "rank": 42, "score": 128304.59717767165 }, { "content": "pub fn any_as_bytes<T: Copy>(data: &T) -> &[u8] {\n\n let ptr: *const T = data;\n\n let ptr = ptr as *const u8;\n\n let slice: &[u8] = unsafe { std::slice::from_raw_parts(ptr, std::mem::size_of::<T>()) };\n\n\n\n slice\n\n}\n\n\n", "file_path": "rafx-base/src/memory.rs", "rank": 43, "score": 128185.79903565171 }, { "content": "pub trait RenderFeature {\n\n fn set_feature_index(index: RenderFeatureIndex);\n\n fn feature_index() -> RenderFeatureIndex;\n\n\n\n fn feature_debug_name() -> &'static str;\n\n}\n\n\n", "file_path": "rafx-framework/src/nodes/registry.rs", "rank": 44, "score": 127455.83314178765 }, { "content": "/// Implement to customize how PoolAllocator resets and destroys pools\n\npub trait PooledResourceImpl {\n\n fn reset(&mut self) -> RafxResult<()>;\n\n}\n\n\n", "file_path": "rafx-framework/src/resources/pool.rs", "rank": 45, "score": 125897.06204235397 }, { "content": "pub fn what_to_bind(element_write: &DescriptorSetElementWrite) -> WhatToBind {\n\n let mut what = WhatToBind::default();\n\n\n\n // See https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VkWriteDescriptorSet.html\n\n match element_write.descriptor_type {\n\n RafxResourceType::SAMPLER => {\n\n what.bind_samplers = !element_write.has_immutable_sampler;\n\n }\n\n RafxResourceType::COMBINED_IMAGE_SAMPLER => {\n\n what.bind_samplers = !element_write.has_immutable_sampler;\n\n what.bind_images = true;\n\n }\n\n RafxResourceType::TEXTURE => {\n\n what.bind_images = true;\n\n }\n\n RafxResourceType::UNIFORM_BUFFER => {\n\n what.bind_buffers = true;\n\n }\n\n RafxResourceType::BUFFER => {\n\n what.bind_buffers = true;\n\n }\n\n _ => unimplemented!(),\n\n }\n\n\n\n what\n\n}\n", "file_path": "rafx-framework/src/resources/descriptor_sets/mod.rs", "rank": 46, "score": 125887.02734512697 }, { "content": "fn pixel_format_capabilities(\n\n feature_set: MTLFeatureSet,\n\n pixel_format: MTLPixelFormat,\n\n) -> metal_rs::PixelFormatCapabilities {\n\n use metal_rs::PixelFormatCapabilities;\n\n match pixel_format {\n\n MTLPixelFormat::Invalid => PixelFormatCapabilities::empty(),\n\n MTLPixelFormat::A8Unorm => feature_set.a8_unorm_capabilities(),\n\n MTLPixelFormat::R8Unorm => feature_set.r8_unorm_capabilities(),\n\n MTLPixelFormat::R8Unorm_sRGB => feature_set.r8_unorm_srgb_capabilities(),\n\n MTLPixelFormat::R8Snorm => feature_set.r8_snorm_capabilities(),\n\n MTLPixelFormat::R8Uint => feature_set.r8_uint_capabilities(),\n\n MTLPixelFormat::R8Sint => feature_set.r8_sint_capabilities(),\n\n MTLPixelFormat::R16Unorm => feature_set.r16_unorm_capabilities(),\n\n MTLPixelFormat::R16Snorm => feature_set.r16_snorm_capabilities(),\n\n MTLPixelFormat::R16Uint => feature_set.r16_uint_capabilities(),\n\n MTLPixelFormat::R16Sint => feature_set.r16_sint_capabilities(),\n\n MTLPixelFormat::R16Float => feature_set.r16_float_capabilities(),\n\n MTLPixelFormat::RG8Unorm => feature_set.rg8_unorm_capabilities(),\n\n MTLPixelFormat::RG8Unorm_sRGB => feature_set.rg8_unorm_srgb_capabilities(),\n", "file_path": "rafx-api/src/backends/metal/internal/features.rs", "rank": 47, "score": 124144.18702135273 }, { "content": "fn find_supported_family(\n\n device: &metal_rs::DeviceRef,\n\n gpu_families: &[MTLGPUFamily],\n\n) -> Option<MTLGPUFamily> {\n\n for &family in gpu_families {\n\n if device.supports_family(family) {\n\n return Some(family);\n\n }\n\n }\n\n\n\n return None;\n\n}\n\n\n", "file_path": "rafx-api/src/backends/metal/internal/features.rs", "rank": 48, "score": 124144.18702135273 }, { "content": "pub trait FeatureCommandWriter {\n\n fn on_begin_execute_graph(\n\n &self,\n\n _write_context: &mut RenderJobBeginExecuteGraphContext,\n\n ) -> RafxResult<()> {\n\n Ok(())\n\n }\n\n fn apply_setup(\n\n &self,\n\n _write_context: &mut RenderJobWriteContext,\n\n _view: &RenderView,\n\n _render_phase_index: RenderPhaseIndex,\n\n ) -> RafxResult<()> {\n\n Ok(())\n\n }\n\n fn render_element(\n\n &self,\n\n write_context: &mut RenderJobWriteContext,\n\n view: &RenderView,\n\n render_phase_index: RenderPhaseIndex,\n", "file_path": "rafx-framework/src/nodes/jobs/write.rs", "rank": 49, "score": 123494.50179166124 }, { "content": "fn create_render_view_visibility(\n\n static_visibility_node_set: &mut StaticVisibilityNodeSet,\n\n dynamic_visibility_node_set: &mut DynamicVisibilityNodeSet,\n\n render_view: &RenderView,\n\n) -> RenderViewVisibility {\n\n let static_visibility = static_visibility_node_set.calculate_static_visibility(&render_view);\n\n let dynamic_visibility = dynamic_visibility_node_set.calculate_dynamic_visibility(&render_view);\n\n\n\n log::trace!(\n\n \"shadow view static node count: {}\",\n\n static_visibility.handles.len()\n\n );\n\n\n\n log::trace!(\n\n \"shadow view dynamic node count: {}\",\n\n dynamic_visibility.handles.len()\n\n );\n\n\n\n RenderViewVisibility {\n\n render_view: render_view.clone(),\n\n static_visibility,\n\n dynamic_visibility,\n\n }\n\n}\n\n\n", "file_path": "demo/src/features/mesh/shadow_map_resource.rs", "rank": 50, "score": 122285.21814395458 }, { "content": "#[profiling::function]\n\nfn calculate_shadow_map_views(\n\n render_view_set: &RenderViewSet,\n\n extract_resources: &ExtractResources,\n\n) -> (FnvHashMap<LightId, usize>, Vec<ShadowMapRenderView>) {\n\n let world_fetch = extract_resources.fetch::<World>();\n\n let world = &*world_fetch;\n\n\n\n let mut shadow_map_render_views = Vec::default();\n\n let mut shadow_map_lookup = FnvHashMap::default();\n\n\n\n let shadow_map_phase_mask = RenderPhaseMaskBuilder::default()\n\n .add_render_phase::<ShadowMapRenderPhase>()\n\n .build();\n\n\n\n //TODO: The look-at calls in this fn will fail if the light is pointed straight down\n\n\n\n const SHADOW_MAP_RESOLUTION: u32 = 1024;\n\n\n\n let mut query = <(Entity, Read<SpotLightComponent>, Read<PositionComponent>)>::query();\n\n for (entity, light, position) in query.iter(world) {\n", "file_path": "demo/src/features/mesh/shadow_map_resource.rs", "rank": 51, "score": 122285.21814395458 }, { "content": "pub fn init_distill_packfile(pack_file: &std::path::Path) -> AssetResource {\n\n let packfile = std::fs::File::open(pack_file).unwrap();\n\n let packfile_loader = PackfileReader::new(packfile).unwrap();\n\n let loader = Loader::new(Box::new(packfile_loader));\n\n let resolver = Box::new(DefaultIndirectionResolver);\n\n AssetResource::new(loader, resolver)\n\n}\n", "file_path": "rafx-renderer/src/daemon.rs", "rank": 52, "score": 121400.85036702723 }, { "content": "pub fn parse_glsl(file_path: &Path) -> Result<ShaderText, String> {\n\n let first_file = FileToProcess {\n\n path: file_path.to_path_buf(),\n\n include_type: IncludeType::Relative,\n\n requested_from: PathBuf::new(),\n\n include_depth: 0,\n\n };\n\n\n\n let mut included_files = FnvHashSet::<PathBuf>::default();\n\n included_files.insert(file_path.to_path_buf());\n\n let mut declarations = Vec::default();\n\n\n\n let content = std::fs::read_to_string(file_path)\n\n .map_err(|e| format!(\"Could not read file {:?}: {:?}\", file_path, e))?;\n\n let code: Vec<char> = content.chars().collect();\n\n parse_shader_source_text(&first_file, &mut declarations, &mut included_files, &code)?;\n\n\n\n Ok(ShaderText { declarations })\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/parse_source.rs", "rank": 53, "score": 119857.05326523901 }, { "content": "pub fn run(args: &ShaderProcessorArgs) -> Result<(), Box<dyn Error>> {\n\n log::trace!(\"Shader processor args: {:#?}\", args);\n\n\n\n if let Some(glsl_file) = &args.glsl_file {\n\n //\n\n // Handle a single file given via --glsl_file. In this mode, the output files are explicit\n\n //\n\n log::info!(\"Processing file {:?}\", glsl_file);\n\n\n\n //\n\n // Try to determine what kind of shader this is from the file name\n\n //\n\n let shader_kind = shader_kind_from_args(args)\n\n .or_else(|| deduce_default_shader_kind_from_path(glsl_file))\n\n .unwrap_or(shaderc::ShaderKind::InferFromSource);\n\n\n\n //\n\n // Process this shader and write to output files\n\n //\n\n process_glsl_shader(\n", "file_path": "rafx-shader-processor/src/lib.rs", "rank": 54, "score": 118997.92836526736 }, { "content": "pub fn create_font_texture_with_characters<'a, IterT: Iterator<Item = &'a char>>(\n\n font: &fontdue::Font,\n\n characters: IterT,\n\n size: f32,\n\n margin: u32,\n\n) -> Option<FontTextureWithMeta> {\n\n let mut rasterized_data = FnvHashMap::default();\n\n let mut rects_to_place = rectangle_pack::GroupedRectsToPlace::<char, ()>::new();\n\n\n\n for &c in characters {\n\n let (metrics, data) = font.rasterize(c, size);\n\n rects_to_place.push_rect(\n\n c,\n\n None,\n\n rectangle_pack::RectToInsert::new(\n\n metrics.width as u32 + (margin * 2),\n\n metrics.height as u32 + (margin * 2),\n\n 1,\n\n ),\n\n );\n", "file_path": "demo/src/assets/font/font_cooking.rs", "rank": 55, "score": 117454.13126347914 }, { "content": "// Used to dynamic dispatch into a storage, supports checked downcasting\n\npub trait DynAssetStorage: Downcast + Send {\n\n fn update_asset(\n\n &mut self,\n\n loader_info: &dyn LoaderInfoProvider,\n\n data: &[u8],\n\n load_handle: LoadHandle,\n\n load_op: AssetLoadOp,\n\n version: u32,\n\n ) -> Result<(), Box<dyn Error + Send + 'static>>;\n\n fn commit_asset_version(\n\n &mut self,\n\n handle: LoadHandle,\n\n version: u32,\n\n );\n\n fn free(\n\n &mut self,\n\n handle: LoadHandle,\n\n version: u32,\n\n );\n\n\n", "file_path": "rafx-assets/src/distill_impl/asset_storage.rs", "rank": 56, "score": 113142.13572833239 }, { "content": "fn resolve_load_handle<T: AssetHandle>(\n\n handle: &T,\n\n indirection_table: &IndirectionTable,\n\n) -> Option<LoadHandle> {\n\n if handle.load_handle().is_indirect() {\n\n indirection_table.resolve(handle.load_handle())\n\n } else {\n\n Some(handle.load_handle())\n\n }\n\n}\n\n\n\nimpl<AssetT: TypeUuid + Send> Storage<AssetT> {\n\n fn new(\n\n sender: Sender<RefOp>,\n\n loader: Box<dyn DynAssetLoader<AssetT>>,\n\n indirection_table: IndirectionTable,\n\n ) -> Self {\n\n Self {\n\n refop_sender: sender,\n\n assets: HashMap::new(),\n", "file_path": "rafx-assets/src/distill_impl/asset_storage.rs", "rank": 57, "score": 113067.6102016313 }, { "content": "// Implements loading logic (i.e. turning bytes into an asset. The asset may contain runtime-only\n\n// data and may be created asynchronously\n\npub trait DynAssetLoader<AssetT>: Send\n\nwhere\n\n AssetT: TypeUuid + 'static + Send,\n\n{\n\n fn update_asset(\n\n &mut self,\n\n refop_sender: &Sender<RefOp>,\n\n loader_info: &dyn LoaderInfoProvider,\n\n data: &[u8],\n\n load_handle: LoadHandle,\n\n load_op: AssetLoadOp,\n\n version: u32,\n\n ) -> Result<UpdateAssetResult<AssetT>, Box<dyn Error + Send + 'static>>;\n\n\n\n fn commit_asset_version(\n\n &mut self,\n\n handle: LoadHandle,\n\n version: u32,\n\n );\n\n\n\n fn free(\n\n &mut self,\n\n handle: LoadHandle,\n\n );\n\n}\n\n\n", "file_path": "rafx-assets/src/distill_impl/asset_storage.rs", "rank": 58, "score": 111469.5977134571 }, { "content": "#[profiling::function]\n\nfn init_imgui(window: &Window) -> imgui::Context {\n\n use imgui::Context;\n\n\n\n let mut imgui = Context::create();\n\n {\n\n // Fix incorrect colors with sRGB framebuffer\n\n fn imgui_gamma_to_linear(col: [f32; 4]) -> [f32; 4] {\n\n let x = col[0].powf(2.2);\n\n let y = col[1].powf(2.2);\n\n let z = col[2].powf(2.2);\n\n let w = 1.0 - (1.0 - col[3]).powf(2.2);\n\n [x, y, z, w]\n\n }\n\n\n\n let style = imgui.style_mut();\n\n for col in 0..style.colors.len() {\n\n style.colors[col] = imgui_gamma_to_linear(style.colors[col]);\n\n }\n\n }\n\n\n", "file_path": "demo/src/features/imgui/sdl2_imgui_manager.rs", "rank": 59, "score": 108586.0665591545 }, { "content": "fn run() -> RafxResult<()> {\n\n //\n\n // Init SDL2 (winit and anything that uses raw-window-handle works too!)\n\n //\n\n let sdl2_systems = sdl2_init();\n\n\n\n //\n\n // Create the api\n\n //\n\n let mut api = RafxApi::new(&sdl2_systems.window, &Default::default())?;\n\n\n\n // Wrap all of this so that it gets dropped before we drop the API object. This ensures a nice\n\n // clean shutdown.\n\n {\n\n // A cloneable device handle, these are lightweight and can be passed across threads\n\n let device_context = api.device_context();\n\n\n\n //\n\n // Create a swapchain\n\n //\n", "file_path": "rafx/examples/api_triangle/api_triangle.rs", "rank": 60, "score": 104776.24471672544 }, { "content": "fn run() -> RafxResult<()> {\n\n //\n\n // Init SDL2 (winit and anything that uses raw-window-handle works too!)\n\n //\n\n let sdl2_systems = sdl2_init();\n\n\n\n //\n\n // Create the api\n\n //\n\n let mut api = RafxApi::new(&sdl2_systems.window, &Default::default())?;\n\n\n\n // Wrap all of this so that it gets dropped before we drop the API object. This ensures a nice\n\n // clean shutdown.\n\n {\n\n // A cloneable device handle, these are lightweight and can be passed across threads\n\n let device_context = api.device_context();\n\n\n\n //\n\n // Create a swapchain\n\n //\n", "file_path": "rafx/examples/framework_triangle/framework_triangle.rs", "rank": 61, "score": 104776.24471672544 }, { "content": "fn run() -> RafxResult<()> {\n\n //\n\n // For this example, we'll run the `distill` daemon in-process. This is the most convenient\n\n // method during development. (You could also build a packfile ahead of time and run from that)\n\n //\n\n let db_dir = std::path::PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"))\n\n .join(\"examples/asset_triangle/.assets_db\");\n\n let asset_dir =\n\n std::path::PathBuf::from(env!(\"CARGO_MANIFEST_DIR\")).join(\"examples/asset_triangle/assets\");\n\n let connect_string = \"127.0.0.1:9999\";\n\n\n\n // Daemon will runs in a background thread for the life of the process\n\n std::thread::spawn(move || {\n\n rafx::assets::distill_impl::default_daemon()\n\n .with_db_path(db_dir)\n\n .with_address(connect_string.parse().unwrap())\n\n .with_asset_dirs(vec![asset_dir])\n\n .run();\n\n });\n\n\n", "file_path": "rafx/examples/asset_triangle/asset_triangle.rs", "rank": 62, "score": 104776.24471672544 }, { "content": "enum TimeOp {\n\n SetPaused(bool, SimulationTimePauseReason),\n\n ResetSimulationTime,\n\n}\n\n\n\n// For now just wrap the input helper that skulpin provides\n\npub struct TimeResource {\n\n pub time_state: TimeState,\n\n pub simulation_time: TimeContext,\n\n pub log_fps_event: PeriodicEvent,\n\n pub simulation_pause_flags: u8, // No flags set means simulation is not paused\n\n pending_time_ops: Vec<TimeOp>,\n\n}\n\n\n\nimpl TimeResource {\n\n /// Create a new TimeState. Default is not allowed because the current time affects the object\n\n #[allow(clippy::new_without_default)]\n\n pub fn new() -> Self {\n\n TimeResource {\n\n time_state: TimeState::new(),\n", "file_path": "demo/src/time.rs", "rank": 63, "score": 86653.5589512719 }, { "content": "#[derive(Debug)]\n\nenum StructOrBinding {\n\n Struct(usize),\n\n Binding(usize),\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 64, "score": 84395.21587005325 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nenum MemoryLayout {\n\n Std140,\n\n Std430,\n\n C,\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 65, "score": 84395.12610703174 }, { "content": "fn main() {\n\n logging_init();\n\n\n\n let args = demo::DemoArgs::from_args();\n\n\n\n demo::run(&args).unwrap();\n\n}\n", "file_path": "demo/src/main.rs", "rank": 66, "score": 83659.17971578722 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nenum StructBindingType {\n\n Uniform,\n\n Buffer,\n\n PushConstant,\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 67, "score": 83334.2619780461 }, { "content": "#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]\n\nenum GltfObjectId {\n\n Name(String),\n\n Index(usize),\n\n}\n\n\n", "file_path": "demo/src/assets/gltf/importer.rs", "rank": 68, "score": 83328.36515795362 }, { "content": "enum RenderThreadMessage {\n\n Render(RenderFrameJob, RafxPresentableFrame),\n\n Finish,\n\n}\n\n\n\npub struct RenderThread {\n\n join_handle: Option<JoinHandle<()>>,\n\n job_tx: Sender<RenderThreadMessage>,\n\n\n\n result_rx: Receiver<RenderFrameJobResult>,\n\n expecting_result: bool,\n\n\n\n render_resources: Arc<Mutex<RenderResources>>,\n\n}\n\n\n\nimpl RenderThread {\n\n pub fn render_resources(&self) -> &Arc<Mutex<RenderResources>> {\n\n &self.render_resources\n\n }\n\n\n", "file_path": "rafx-renderer/src/render_thread.rs", "rank": 69, "score": 83328.36515795362 }, { "content": "fn recursive_modify_user_type<F: Fn(&mut UserType) -> bool>(\n\n user_types: &mut FnvHashMap<String, UserType>,\n\n type_name: &str,\n\n f: &F,\n\n) {\n\n let user_type = user_types.get_mut(type_name);\n\n let recurse = if let Some(user_type) = user_type {\n\n (f)(user_type)\n\n } else {\n\n // for now skip types we don't recognize\n\n return;\n\n };\n\n\n\n if recurse {\n\n if let Some(fields) = user_types.get(type_name).map(|x| x.fields.clone()) {\n\n for field in &*fields {\n\n recursive_modify_user_type(user_types, &field.type_name, f);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 70, "score": 83119.89522849832 }, { "content": "fn process_input(\n\n resources: &Resources,\n\n event_pump: &mut sdl2::EventPump,\n\n) -> bool {\n\n #[cfg(feature = \"use-imgui\")]\n\n let imgui_manager = resources\n\n .get::<crate::features::imgui::Sdl2ImguiManager>()\n\n .unwrap();\n\n let mut scene_manager = resources.get_mut::<SceneManager>().unwrap();\n\n for event in event_pump.poll_iter() {\n\n #[cfg(feature = \"use-imgui\")]\n\n let ignore_event = {\n\n imgui_manager.handle_event(&event);\n\n imgui_manager.ignore_event(&event)\n\n };\n\n\n\n #[cfg(not(feature = \"use-imgui\"))]\n\n let ignore_event = false;\n\n\n\n if !ignore_event {\n", "file_path": "demo/src/lib.rs", "rank": 71, "score": 82457.32758662092 }, { "content": "#[derive(Debug)]\n\nenum MetalPipelineState {\n\n Graphics(metal_rs::RenderPipelineState),\n\n Compute(metal_rs::ComputePipelineState),\n\n}\n\n\n\n// for metal_rs::RenderPipelineState\n\nunsafe impl Send for MetalPipelineState {}\n\nunsafe impl Sync for MetalPipelineState {}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct PipelineComputeEncoderInfo {\n\n pub compute_threads_per_group: [u32; 3],\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) struct PipelineRenderEncoderInfo {\n\n // This is all set on the render encoder, so cache it now so we can set it later\n\n pub(crate) mtl_cull_mode: metal_rs::MTLCullMode,\n\n pub(crate) mtl_triangle_fill_mode: metal_rs::MTLTriangleFillMode,\n\n pub(crate) mtl_front_facing_winding: metal_rs::MTLWinding,\n", "file_path": "rafx-api/src/backends/metal/pipeline.rs", "rank": 72, "score": 82317.1120789379 }, { "content": "fn create_scene(\n\n scene: Scene,\n\n world: &mut World,\n\n resources: &Resources,\n\n) -> Box<dyn TestScene> {\n\n match scene {\n\n Scene::Shadows => Box::new(ShadowsScene::new(world, resources)),\n\n Scene::Sprite => Box::new(SpriteScene::new(world, resources)),\n\n }\n\n}\n\n\n", "file_path": "demo/src/scenes/mod.rs", "rank": 73, "score": 81308.14645773328 }, { "content": "enum RenderGraphNodeVisitNodeCallback {\n\n Renderpass(Box<RenderGraphNodeVisitRenderpassNodeCallback>),\n\n Compute(Box<RenderGraphNodeVisitComputeNodeCallback>),\n\n}\n\n\n", "file_path": "rafx-framework/src/graph/mod.rs", "rank": 74, "score": 80397.18124478906 }, { "content": "fn rust_structs(\n\n rust_code: &mut Vec<String>,\n\n builtin_types: &FnvHashMap<String, TypeAlignmentInfo>,\n\n user_types: &FnvHashMap<String, UserType>,\n\n) -> Result<Vec<GenerateStructResult>, String> {\n\n let mut structs = Vec::default();\n\n for (type_name, user_type) in user_types {\n\n if user_type.export_uniform_layout {\n\n let s = generate_struct(\n\n &builtin_types,\n\n &user_types,\n\n type_name,\n\n user_type,\n\n MemoryLayout::Std140,\n\n )?;\n\n rust_code.push(s.generate_struct_code());\n\n rust_code.push(s.generate_struct_default_code());\n\n structs.push(s);\n\n }\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 75, "score": 80208.24812417405 }, { "content": "fn main() {\n\n env_logger::Builder::from_default_env()\n\n .default_format_timestamp_nanos(true)\n\n .filter_level(LevelFilter::Debug)\n\n .init();\n\n\n\n run().unwrap();\n\n}\n\n\n", "file_path": "rafx/examples/api_triangle/api_triangle.rs", "rank": 76, "score": 80208.24812417405 }, { "content": "fn main() {\n\n env_logger::Builder::from_default_env()\n\n .default_format_timestamp_nanos(true)\n\n .filter_level(LevelFilter::Info)\n\n .init();\n\n\n\n run().unwrap();\n\n}\n\n\n", "file_path": "rafx/examples/framework_triangle/framework_triangle.rs", "rank": 77, "score": 80208.24812417405 }, { "content": "fn main() {\n\n env_logger::Builder::from_default_env()\n\n .default_format_timestamp_nanos(true)\n\n .filter_level(LevelFilter::Info)\n\n .init();\n\n\n\n run().unwrap();\n\n}\n\n\n", "file_path": "rafx/examples/asset_triangle/asset_triangle.rs", "rank": 78, "score": 80208.24812417405 }, { "content": "fn add_point_light(\n\n _resources: &Resources,\n\n world: &mut World,\n\n position: glam::Vec3,\n\n light_component: PointLightComponent,\n\n) {\n\n let position_component = PositionComponent { position };\n\n\n\n world.extend(vec![(position_component, light_component)]);\n\n}\n\n\n", "file_path": "demo/src/scenes/mod.rs", "rank": 79, "score": 80208.24812417405 }, { "content": "fn verify_layout(\n\n builtin_types: &FnvHashMap<String, TypeAlignmentInfo>,\n\n user_types: &FnvHashMap<String, UserType>,\n\n type_name: &str,\n\n block: &spirv_reflect::types::ReflectBlockVariable,\n\n layout: MemoryLayout,\n\n) -> Result<(), String> {\n\n //println!(\"{:?}\", block);\n\n if !type_name.is_empty() {\n\n // println!(\n\n // \"check type {}\",\n\n // block.type_description.as_ref().unwrap().type_name\n\n // );\n\n\n\n let array_sizes: Vec<usize> = block.array.dims.iter().map(|x| *x as usize).collect();\n\n\n\n let size = determine_size(\n\n builtin_types,\n\n user_types,\n\n type_name,\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 80, "score": 80208.24812417405 }, { "content": "fn add_spot_light(\n\n _resources: &Resources,\n\n world: &mut World,\n\n position: glam::Vec3,\n\n light_component: SpotLightComponent,\n\n) {\n\n let position_component = PositionComponent { position };\n\n\n\n world.extend(vec![(position_component, light_component)]);\n\n}\n\n\n", "file_path": "demo/src/scenes/mod.rs", "rank": 81, "score": 80208.24812417405 }, { "content": "fn determine_alignment(\n\n builtin_types: &FnvHashMap<String, TypeAlignmentInfo>,\n\n user_types: &FnvHashMap<String, UserType>,\n\n query_type: &str,\n\n array_sizes: &[usize],\n\n layout: MemoryLayout,\n\n) -> Result<usize, String> {\n\n match layout {\n\n MemoryLayout::Std140 => {\n\n determine_alignment_std140(builtin_types, user_types, query_type, array_sizes)\n\n }\n\n MemoryLayout::Std430 => {\n\n determine_alignment_std430(builtin_types, user_types, query_type, array_sizes)\n\n }\n\n MemoryLayout::C => {\n\n determine_alignment_c(builtin_types, user_types, query_type, array_sizes)\n\n }\n\n }\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 82, "score": 80208.24812417405 }, { "content": "fn align_offset(\n\n offset: usize,\n\n alignment: usize,\n\n) -> usize {\n\n (offset + alignment - 1) / alignment * alignment\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 83, "score": 80208.24812417405 }, { "content": "fn rust_tests(\n\n rust_code: &mut Vec<String>,\n\n structs: &[GenerateStructResult],\n\n) {\n\n if !structs.is_empty() {\n\n rust_code.push(\"#[cfg(test)]\\nmod test {\\n use super::*;\\n\".to_string());\n\n for s in structs {\n\n rust_code.push(s.generate_struct_test_code());\n\n }\n\n rust_code.push(\"}\\n\".to_string());\n\n }\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 84, "score": 80208.24812417405 }, { "content": "fn determine_alignment_c(\n\n builtin_types: &FnvHashMap<String, TypeAlignmentInfo>,\n\n user_types: &FnvHashMap<String, UserType>,\n\n query_type: &str,\n\n _array_sizes: &[usize],\n\n) -> Result<usize, String> {\n\n if let Some(builtin_type) = builtin_types.get(query_type) {\n\n //Ok(next_power_of_2(builtin_type.size))\n\n Ok(builtin_type.align)\n\n } else if let Some(user_type) = user_types.get(query_type) {\n\n let mut alignment = 1;\n\n for f in &*user_type.fields {\n\n let field_alignment =\n\n determine_alignment_c(builtin_types, user_types, &f.type_name, &f.array_sizes)?;\n\n alignment = alignment.max(field_alignment);\n\n }\n\n\n\n Ok(alignment)\n\n } else {\n\n return Err(format!(\"Could not find type {}. Is this a built in type that needs to be added to create_builtin_type_lookup()?\", query_type));\n\n }\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 85, "score": 80208.24812417405 }, { "content": "fn determine_size(\n\n builtin_types: &FnvHashMap<String, TypeAlignmentInfo>,\n\n user_types: &FnvHashMap<String, UserType>,\n\n query_type: &str,\n\n array_sizes: &[usize],\n\n mut offset: usize,\n\n logging_offset: usize,\n\n logging_name: &str,\n\n layout: MemoryLayout,\n\n) -> Result<usize, String> {\n\n // We only need to know how many elements we have\n\n let element_count = element_count(array_sizes);\n\n\n\n // Align this type (may be a struct, built-in, etc.\n\n // Caller should probably already align\n\n let alignment =\n\n determine_alignment(builtin_types, user_types, query_type, array_sizes, layout)?;\n\n assert_eq!(offset % alignment, 0);\n\n //offset = align_offset(offset, alignment);\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 86, "score": 80208.24812417405 }, { "content": "fn generate_struct(\n\n builtin_types: &FnvHashMap<String, TypeAlignmentInfo>,\n\n user_types: &FnvHashMap<String, UserType>,\n\n type_name: &str,\n\n user_type: &UserType,\n\n layout: MemoryLayout,\n\n) -> Result<GenerateStructResult, String> {\n\n //println!(\"Generate struct {}\", type_name);\n\n\n\n let mut members = Vec::default();\n\n\n\n let mut pad_var_count = 0;\n\n\n\n let struct_name = get_rust_type_name_non_array(builtin_types, user_types, &type_name, layout)?;\n\n\n\n let mut gpu_offset = 0;\n\n let mut rust_offset = 0;\n\n for f in &*user_type.fields {\n\n //\n\n // Determine the alignment and size of this type using GPU layout\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 87, "score": 80208.24812417405 }, { "content": "fn wrap_in_array(\n\n inner: &str,\n\n array_sizes: &[usize],\n\n) -> String {\n\n let mut wrapped = inner.to_string();\n\n for array_size in array_sizes.iter().rev() {\n\n wrapped = format!(\"[{}; {}]\", wrapped, array_size);\n\n }\n\n\n\n wrapped\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 88, "score": 80208.24812417405 }, { "content": "fn add_directional_light(\n\n _resources: &Resources,\n\n world: &mut World,\n\n light_component: DirectionalLightComponent,\n\n) {\n\n world.extend(vec![(light_component,)]);\n\n}\n\n\n", "file_path": "demo/src/scenes/mod.rs", "rank": 89, "score": 80208.24812417405 }, { "content": "#[profiling::function]\n\nfn update_main_view(\n\n time_state: &TimeState,\n\n viewports_resource: &mut ViewportsResource,\n\n) {\n\n let main_camera_render_phase_mask = RenderPhaseMaskBuilder::default()\n\n .add_render_phase::<OpaqueRenderPhase>()\n\n .add_render_phase::<TransparentRenderPhase>()\n\n .add_render_phase::<UiRenderPhase>()\n\n .build();\n\n\n\n const CAMERA_XY_DISTANCE: f32 = 12.0;\n\n const CAMERA_Z: f32 = 6.0;\n\n const CAMERA_ROTATE_SPEED: f32 = -0.10;\n\n const CAMERA_LOOP_OFFSET: f32 = -0.3;\n\n let loop_time = time_state.total_time().as_secs_f32();\n\n let eye = glam::Vec3::new(\n\n CAMERA_XY_DISTANCE * f32::cos(CAMERA_ROTATE_SPEED * loop_time + CAMERA_LOOP_OFFSET),\n\n CAMERA_XY_DISTANCE * f32::sin(CAMERA_ROTATE_SPEED * loop_time + CAMERA_LOOP_OFFSET),\n\n CAMERA_Z,\n\n );\n", "file_path": "demo/src/scenes/mod.rs", "rank": 90, "score": 80208.24812417405 }, { "content": "fn format_member(\n\n name: &str,\n\n ty: &str,\n\n offset: usize,\n\n size: usize,\n\n) -> String {\n\n let mut str = format!(\" pub {}: {}, \", name, ty);\n\n let whitespace = 40_usize.saturating_sub(str.len());\n\n str += \" \".repeat(whitespace).as_str();\n\n str += &format!(\"// +{} (size: {})\\n\", offset, size);\n\n str\n\n}\n\n\n", "file_path": "rafx-shader-processor/src/codegen.rs", "rank": 91, "score": 80208.24812417405 }, { "content": "//\n\n// All scenes implement this and new()\n\n//\n\npub trait TestScene {\n\n fn update(\n\n &mut self,\n\n world: &mut World,\n\n resources: &Resources,\n\n );\n\n}\n\n\n\npub struct SceneManager {\n\n current_scene_index: usize,\n\n current_scene: Option<Box<dyn TestScene>>,\n\n next_scene: Option<usize>,\n\n}\n\n\n\nimpl Default for SceneManager {\n\n fn default() -> Self {\n\n SceneManager {\n\n current_scene: None,\n\n current_scene_index: 0,\n\n next_scene: Some(0),\n", "file_path": "demo/src/scenes/mod.rs", "rank": 92, "score": 79572.73211892968 }, { "content": "#[cfg(feature = \"rafx-vulkan\")]\n\nfn do_generate_mipmaps_vk(\n\n command_buffer: &RafxCommandBufferVulkan,\n\n texture: &RafxTexture,\n\n layer: u32,\n\n mip_level_count: u32,\n\n) -> RafxResult<()> {\n\n log::debug!(\"Generating mipmaps\");\n\n\n\n let texture_def = texture.texture_def();\n\n let vk_texture = texture.vk_texture().unwrap();\n\n\n\n // Walk through each mip level n:\n\n // - put level n+1 into write mode\n\n // - blit from n to n+1\n\n // - put level n+1 into read mode\n\n for dst_level in 1..mip_level_count {\n\n log::trace!(\"Generating mipmap level {}\", dst_level);\n\n let src_level = dst_level - 1;\n\n\n\n //\n", "file_path": "rafx-api/src/extra/mipmaps.rs", "rank": 93, "score": 79160.45066366349 }, { "content": "#[cfg(feature = \"rafx-metal\")]\n\nfn generate_mipmaps_metal(\n\n command_buffer: &RafxCommandBufferMetal,\n\n texture: &RafxTexture,\n\n) -> RafxResult<()> {\n\n log::trace!(\"Generating mipmaps\");\n\n command_buffer.end_current_encoders(false)?;\n\n let blit_encoder = command_buffer\n\n .metal_command_buffer()\n\n .unwrap()\n\n .new_blit_command_encoder();\n\n blit_encoder.generate_mipmaps(texture.metal_texture().unwrap().metal_texture());\n\n blit_encoder.end_encoding();\n\n\n\n return Ok(());\n\n}\n\n\n", "file_path": "rafx-api/src/extra/mipmaps.rs", "rank": 94, "score": 79160.45066366349 }, { "content": "#[cfg(feature = \"rafx-vulkan\")]\n\nfn generate_mipmaps_vk(\n\n command_buffer: &RafxCommandBufferVulkan,\n\n texture: &RafxTexture,\n\n) -> RafxResult<()> {\n\n let mip_level_count = texture.texture_def().mip_count;\n\n\n\n for layer in 0..texture.texture_def().array_length {\n\n do_generate_mipmaps_vk(command_buffer, texture, layer, mip_level_count)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rafx-api/src/extra/mipmaps.rs", "rank": 95, "score": 79160.45066366349 }, { "content": "fn extract_images_to_import(\n\n doc: &gltf::Document,\n\n _buffers: &[GltfBufferData],\n\n images: &[GltfImageData],\n\n image_color_space_assignments: &FnvHashMap<usize, ImageAssetColorSpace>,\n\n) -> Vec<ImageToImport> {\n\n let mut images_to_import = Vec::with_capacity(images.len());\n\n for image in doc.images() {\n\n let image_data = &images[image.index()];\n\n\n\n // Convert it to standard RGBA format\n\n use gltf::image::Format;\n\n use image::buffer::ConvertBuffer;\n\n let converted_image: image::RgbaImage = match image_data.format {\n\n Format::R8 => image::ImageBuffer::<image::Luma<u8>, Vec<u8>>::from_vec(\n\n image_data.width,\n\n image_data.height,\n\n image_data.pixels.clone(),\n\n )\n\n .unwrap()\n", "file_path": "demo/src/assets/gltf/importer.rs", "rank": 96, "score": 79154.5288877344 }, { "content": "fn extract_materials_to_import(\n\n doc: &gltf::Document,\n\n _buffers: &[GltfBufferData],\n\n _images: &[GltfImageData],\n\n image_index_to_handle: &[Handle<ImageAsset>],\n\n) -> Vec<MaterialToImport> {\n\n let mut materials_to_import = Vec::with_capacity(doc.materials().len());\n\n\n\n for material in doc.materials() {\n\n /*\n\n let mut material_data = GltfMaterialData {\n\n base_color_factor: [f32; 4], // default: 1,1,1,1\n\n emissive_factor: [f32; 3],\n\n metallic_factor: f32, //default: 1,\n\n roughness_factor: f32, // default: 1,\n\n normal_texture_scale: f32, // default: 1\n\n occlusion_texture_strength: f32, // default 1\n\n alpha_cutoff: f32, // default 0.5\n\n }\n\n\n", "file_path": "demo/src/assets/gltf/importer.rs", "rank": 97, "score": 79154.5288877344 }, { "content": "//TODO: This feels kind of dumb..\n\nfn convert_to_u16_indices(\n\n read_indices: gltf::mesh::util::ReadIndices\n\n) -> Result<Vec<u16>, std::num::TryFromIntError> {\n\n let indices_u32: Vec<u32> = read_indices.into_u32().collect();\n\n let mut indices_u16: Vec<u16> = Vec::with_capacity(indices_u32.len());\n\n for index in indices_u32 {\n\n indices_u16.push(index.try_into()?);\n\n }\n\n\n\n Ok(indices_u16)\n\n}\n\n\n", "file_path": "demo/src/assets/gltf/importer.rs", "rank": 98, "score": 79154.5288877344 }, { "content": "fn extract_meshes_to_import(\n\n op: &mut ImportOp,\n\n state: &mut GltfImporterStateUnstable,\n\n doc: &gltf::Document,\n\n buffers: &[GltfBufferData],\n\n material_instance_index_to_handle: &[Handle<MaterialInstanceAsset>],\n\n) -> distill::importer::Result<(Vec<MeshToImport>, Vec<BufferToImport>)> {\n\n let mut meshes_to_import = Vec::with_capacity(doc.meshes().len());\n\n let mut buffers_to_import = Vec::with_capacity(doc.meshes().len() * 2);\n\n\n\n for mesh in doc.meshes() {\n\n let mut all_vertices = PushBuffer::new(16384);\n\n let mut all_indices = PushBuffer::new(16384);\n\n\n\n let mut mesh_parts: Vec<MeshPartAssetData> = Vec::with_capacity(mesh.primitives().len());\n\n\n\n //\n\n // Iterate all mesh parts, building a single vertex and index buffer. Each MeshPart will\n\n // hold offsets/lengths to their sections in the vertex/index buffers\n\n //\n", "file_path": "demo/src/assets/gltf/importer.rs", "rank": 99, "score": 79154.5288877344 } ]
Rust
elasticsearch/examples/index_questions_answers/main.rs
yaanhyy/elasticsearch-rs
740c3ebd41b391f954e9cf008209b39d89a75231
/* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch B.V. licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ #[macro_use] extern crate serde_json; use clap::{App, Arg}; #[cfg(any(feature = "native-tls", feature = "rustls-tls"))] use elasticsearch::cert::CertificateValidation; use elasticsearch::{ auth::Credentials, http::transport::{SingleNodeConnectionPool, TransportBuilder}, indices::{ IndicesCreateParts, IndicesDeleteParts, IndicesExistsParts, IndicesPutSettingsParts, }, BulkOperation, BulkParts, Elasticsearch, Error, DEFAULT_ADDRESS, }; use serde_json::Value; use sysinfo::SystemExt; use url::Url; mod stack_overflow; use http::StatusCode; use stack_overflow::*; use std::time::Instant; static POSTS_INDEX: &'static str = "posts"; #[tokio::main] pub async fn main() -> Result<(), Box<dyn std::error::Error>> { let matches = App::new("index_questions_answers") .about( "indexes Stack Overflow questions and answers into Elasticsearch with the Rust client", ) .arg( Arg::with_name("path") .short("p") .long("path") .value_name("PATH") .help("The path to the Posts.xml file containing questions and answers. Can be obtained from https://archive.org/download/stackexchange/stackoverflow.com-Posts.7z (large file)") .required(true) .takes_value(true), ) .arg( Arg::with_name("limit") .short("l") .long("limit") .value_name("LIMIT") .help("The number of questions and answers from Posts.xml to index") .required(false) .takes_value(true), ) .arg( Arg::with_name("size") .short("s") .long("size") .value_name("SIZE") .help("The number of documents in each bulk request") .required(false) .takes_value(true), ) .arg( Arg::with_name("delete") .short("d") .long("delete") .help("Whether to delete the index before indexing") .required(false) .takes_value(false), ) .get_matches(); let path = matches.value_of("path").expect("missing 'path' argument"); let limit = match matches.value_of("limit") { Some(l) => Some(l.parse::<usize>()?), _ => None, }; let size = match matches.value_of("size") { Some(l) => l.parse::<usize>()?, _ => 1000, }; let delete = matches.is_present("delete"); let client = create_client()?; create_index_if_not_exists(&client, delete).await?; set_refresh_interval(&client, json!("-1")).await?; let mut posts_iter = PostsIter::new(path); let mut total = 0; let mut posts = Vec::with_capacity(size); let now = Instant::now(); while let Some(post) = posts_iter.next() { total += 1; posts.push(post); if total % size == 0 { index_posts(&client, &posts).await?; let duration = now.elapsed(); let secs = duration.as_secs_f64(); let taken = if secs >= 60f64 { format!("{}m", secs / 60f64) } else { format!("{:?}", duration) }; println!("Indexed total {} posts in {}", total, taken); posts.clear(); } if let Some(l) = limit { if total >= l { break; } } } if !posts.is_empty() { index_posts(&client, &posts).await?; posts.clear(); } set_refresh_interval(&client, json!(null)).await?; Ok(()) } async fn set_refresh_interval(client: &Elasticsearch, interval: Value) -> Result<(), Error> { let response = client .indices() .put_settings(IndicesPutSettingsParts::Index(&[POSTS_INDEX])) .body(json!({ "index" : { "refresh_interval" : interval } })) .send() .await?; if !response.status_code().is_success() { println!("Failed to update refresh interval"); } Ok(()) } async fn index_posts(client: &Elasticsearch, posts: &[Post]) -> Result<(), Error> { let body: Vec<BulkOperation<_>> = posts .iter() .map(|p| { let id = p.id().to_string(); BulkOperation::index(p).id(&id).routing(&id).into() }) .collect(); let response = client .bulk(BulkParts::Index(POSTS_INDEX)) .body(body) .send() .await?; let json: Value = response.json().await?; if json["errors"].as_bool().unwrap() { let failed: Vec<&Value> = json["items"] .as_array() .unwrap() .iter() .filter(|v| !v["error"].is_null()) .collect(); println!("Errors whilst indexing. Failures: {}", failed.len()); } Ok(()) } async fn create_index_if_not_exists(client: &Elasticsearch, delete: bool) -> Result<(), Error> { let exists = client .indices() .exists(IndicesExistsParts::Index(&[POSTS_INDEX])) .send() .await?; if exists.status_code().is_success() && delete { let delete = client .indices() .delete(IndicesDeleteParts::Index(&[POSTS_INDEX])) .send() .await?; if !delete.status_code().is_success() { println!("Problem deleting index: {}", delete.text().await?); } } if exists.status_code() == StatusCode::NOT_FOUND || delete { let response = client .indices() .create(IndicesCreateParts::Index(POSTS_INDEX)) .body(json!( { "mappings": { "properties": { "type": { "type": "keyword" }, "id": { "type": "integer" }, "parent_id": { "relations": { "question": "answer" }, "type": "join" }, "creation_date": { "type": "date" }, "score": { "type": "integer" }, "body": { "analyzer": "html", "search_analyzer": "expand", "type": "text" }, "owner_user_id": { "type": "integer" }, "owner_display_name": { "type": "keyword" }, "last_editor_user_id": { "type": "integer" }, "last_edit_date": { "type": "date" }, "last_activity_date": { "type": "date" }, "comment_count": { "type": "integer" }, "title": { "analyzer": "expand", "norms": false, "fields": { "raw": { "type": "keyword" } }, "type": "text" }, "title_suggest": { "type": "completion" }, "accepted_answer_id": { "type": "integer" }, "view_count": { "type": "integer" }, "last_editor_display_name": { "type": "keyword" }, "tags": { "type": "keyword" }, "answer_count": { "type": "integer" }, "favorite_count": { "type": "integer" }, "community_owned_date": { "type": "date" } }, "_routing": { "required": true }, "_source": { "excludes": ["title_suggest"] } }, "settings": { "index.number_of_shards": 3, "index.number_of_replicas": 0, "analysis": { "analyzer": { "html": { "char_filter": ["html_strip", "programming_language"], "filter": ["lowercase", "stop"], "tokenizer": "standard", "type": "custom" }, "expand": { "char_filter": ["programming_language"], "filter": ["lowercase", "stop"], "tokenizer": "standard", "type": "custom" } }, "char_filter": { "programming_language": { "mappings": [ "c# => csharp", "C# => csharp", "f# => fsharp", "F# => fsharp", "m# => msharp", "M# => msharp", "j# => jsharp", "J# => jsharp", "s# => ssharp", "S# => ssharp", "a# => asharp", "A# => asharp", "k# => ksharp", "K# => ksharp", "t# => tsharp", "T# => tsharp", "g++ => gplusplus", "G++ => gplusplus", "m++ => mplusplus", "M++ => mplusplus", "c++ => cplusplus", "C++ => cplusplus", "s++ => splusplus", "S++ => splusplus", "a++ => aplusplus", "A++ => aplusplus", "d++ => dplusplus", "D++ => dplusplus" ], "type": "mapping" } } } } } )) .send() .await?; if !response.status_code().is_success() { println!("Error while creating index"); } } Ok(()) } fn create_client() -> Result<Elasticsearch, Error> { fn cluster_addr() -> String { match std::env::var("ELASTICSEARCH_URL") { Ok(server) => server, Err(_) => DEFAULT_ADDRESS.into(), } } fn running_proxy() -> bool { let system = sysinfo::System::new(); !system.get_process_by_name("Fiddler").is_empty() } let mut url = Url::parse(cluster_addr().as_ref()).unwrap(); let credentials = if url.scheme() == "https" { let username = if !url.username().is_empty() { let u = url.username().to_string(); url.set_username("").unwrap(); u } else { std::env::var("ES_USERNAME").unwrap_or_else(|_| "elastic".into()) }; let password = match url.password() { Some(p) => { let pass = p.to_string(); url.set_password(None).unwrap(); pass } None => std::env::var("ES_PASSWORD").unwrap_or_else(|_| "changeme".into()), }; Some(Credentials::Basic(username, password)) } else { None }; let conn_pool = SingleNodeConnectionPool::new(url); let mut builder = TransportBuilder::new(conn_pool); builder = match credentials { Some(c) => { builder = builder.auth(c); #[cfg(any(feature = "native-tls", feature = "rustls-tls"))] { builder = builder.cert_validation(CertificateValidation::None); } builder } None => builder, }; if running_proxy() { let proxy_url = Url::parse("http://localhost:8888").unwrap(); builder = builder.proxy(proxy_url, None, None); } let transport = builder.build()?; Ok(Elasticsearch::new(transport)) }
/* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch B.V. licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ #[macro_use] extern crate serde_json; use clap::{App, Arg}; #[cfg(any(feature = "native-tls", feature = "rustls-tls"))] use elasticsearch::cert::CertificateValidation; use elasticsearch::{ auth::Credentials, http::transport::{SingleNodeConnectionPool, TransportBuilder}, indices::{ IndicesCreateParts, IndicesDeleteParts, IndicesExistsParts, IndicesPutSettingsParts, }, BulkOperation, BulkParts, Elasticsearch, Error, DEFAULT_ADDRESS, }; use serde_json::Value; use sysinfo::SystemExt; use url::Url; mod stack_overflow; use http::StatusCode; use stack_overflow::*; use std::time::Instant; static POSTS_INDEX: &'static str = "posts"; #[tokio::main] pub async fn main() -> Result<(), Box<dyn std::error::Error>> { let matches = App::new("index_questions_answers") .about( "indexes Stack Overflow questions and answers into Elasticsearch with the Rust client", ) .arg( Arg::with_name("path") .short("p") .long("path") .value_name("PATH") .help("The path to the Posts.xml file containing questions and answers. Can be obtained from https://archive.org/download/stackexchange/stackoverflow.com-Posts.7z (large file)") .required(true) .takes_value(true), ) .arg( Arg::with_name("limit") .short("l") .long("limit") .value_name("LIMIT") .help("The number of questions and answers from Posts.xml to index") .required(false) .takes_value(true), ) .arg( Arg::with_name("size") .short("s") .long("size") .value_name("SIZE") .help("The number of documents in each bulk request") .required(false) .takes_value(true), ) .arg( Arg::with_name("delete") .short("d") .long("delete") .help("Whether to delete the index before indexing") .required(false) .takes_value(false), ) .get_matches(); let path = matches.value_of("path").expect("missing 'path' argument"); let limit = match matches.value_of("limit") { Some(l) => Some(l.parse::<usize>()?), _ => None, }; let size = match matches.value_of("size") { Some(l) => l.parse::<usize>()?, _ => 1000, }; let delete = matches.is_present("delete"); let client = create_client()?; create_index_if_not_exists(&client, delete).await?; set_refresh_interval(&client, json!("-1")).await?; let mut posts_iter = PostsIter::new(path); let mut total = 0; let mut posts = Vec::with_capacity(size); let now = Instant::now(); while let Some(post) = posts_iter.next() { total += 1; posts.push(post); if total % size == 0 { index_posts(&client, &posts).await?; let duration = now.elapsed(); let secs = duration.as_secs_f64(); let taken = if secs >= 60f64 { format!("{}m", secs / 60f64) } else { format!("{:?}", duration) }; println!("Indexed total {} posts in {}", total, taken); posts.clear(); } if let Some(l) = limit { if total >= l { break; } } } if !posts.is_empty() { index_posts(&client, &posts).await?; posts.clear(); } set_refresh_interval(&client, json!(null)).await?; Ok(()) } async fn set_refresh_interval(client: &Elasticsearch, interval: Value) -> Result<(), Error> { let response = client .indices() .put_settings(IndicesPutSettingsParts::Index(&[POSTS_INDEX])) .body(json!({ "index" : { "refresh_interval" : interval } })) .send() .await?; if !response.status_code().is_success() { println!("Failed to update refresh interval"); } Ok(()) } async fn index_posts(client: &Elasticsearch, posts: &[Post]) -> Result<(), Error> { let body: Vec<BulkOperation<_>> = posts .iter() .map(|p| { let id = p.id().to_string(); BulkOperation::index(p).id(&id).routing(&id).into() }) .collect(); let response = client .bulk(BulkParts::Index(POSTS_INDEX)) .body(body) .send() .await?; let json: Value = response.json().await?; if json["errors"].as_bool().unwrap() { let failed: Vec<&Value> = json["items"] .as_array() .unwrap() .iter() .filter(|v| !v["error"].is_null()) .collect(); println!("Errors whilst indexing. Failures: {}", failed.len()); } Ok(()) } async fn create_index_if_not_exists(client: &Elasticsearch, delete: bool) -> Result<(), Error> { let exists = client .indices() .exists(IndicesExistsParts::Index(&[POSTS_INDEX])) .send() .await?; if exists.status_code().is_success() && delete { let delete = client .indices() .delete(IndicesDeleteParts::Index(&[POSTS_INDEX])) .send() .await?; if !delete.status_code().is_success() { println!("Problem deleting index: {}", delete.text().await?); } } if exists.status_code() == StatusCode::NOT_FOUND || delete { let response = client .indices() .create(IndicesCreateParts::Index(POSTS_INDEX)) .body(json!( { "mappings": { "properties": { "type": { "type": "keyword" }, "id": { "type": "integer" }, "parent_id": { "relations": { "question": "answer" }, "type": "join" }, "creation_date": { "type": "date" }, "score": { "type": "integer" }, "body": { "analyzer": "html", "search_analyzer": "expand", "type": "text" }, "owner_user_id": { "type": "integer" }, "owner_display_name": { "type": "keyword" }, "last_editor_user_id": { "type": "integer" }, "last_edit_date": { "type": "date" }, "last_activity_date": { "type": "date" }, "comment_count": { "type": "integer" }, "title": { "analyzer": "expand", "norms": false, "fields": { "raw": { "type": "keyword" } }, "type": "text" }, "title_suggest": { "type": "completion" }, "accepted_answer_id": { "type": "integer" }, "view_count": { "type": "integer" }, "last_editor_display_name": { "type": "keyword" }, "tags": { "type": "keyword" }, "answer_count": { "type": "integer" }, "favorite_count": { "type": "integer" }, "community_owned_date": { "type": "date" } }, "_routing": { "required": true }, "_source": { "excludes": ["title_suggest"] } }, "settings": { "index.number_of_shards": 3, "index.number_of_replicas": 0, "analysis": { "analyzer": { "html": { "char_filter": ["html_strip", "programming_language"], "filter": ["lowercase", "stop"], "tokenizer": "standard", "type": "custom" }, "expand": { "char_filter": ["programming_language"], "filter": ["lowercase", "stop"], "tokenizer": "standard", "type": "custom" } }, "char_filter": { "programming_language": { "mappings": [ "c# => csharp", "C# => csharp", "f# => fsharp", "F# => fsharp", "m# => msharp", "M# => msharp", "j# => jsharp", "J# => jsharp", "s# => ssharp", "S# => ssharp", "a# => asharp", "A# => asharp", "k# => ksharp", "K# => ksharp", "t# => tsharp", "T# => tsharp", "g++ => gplusplus", "G++ => gplusplus", "m++ => mplusplus", "M++ => mplusplus", "c++ => cplusplus", "C++ => cplusplus", "s++ => splusplus", "S++ => splusplus", "a++ => aplusplus", "A++ => aplusplus", "d++ => dplusplus", "D++ => dplusplus" ], "type": "mapping" } } } } } )) .send() .await?; if !response.status_code().is_success() { println!("Error while creating index"); } } Ok(()) } fn create_client() -> Result<Elasticsearch, Error> { fn cluster_addr() -> String { match s
fn running_proxy() -> bool { let system = sysinfo::System::new(); !system.get_process_by_name("Fiddler").is_empty() } let mut url = Url::parse(cluster_addr().as_ref()).unwrap(); let credentials = if url.scheme() == "https" { let username = if !url.username().is_empty() { let u = url.username().to_string(); url.set_username("").unwrap(); u } else { std::env::var("ES_USERNAME").unwrap_or_else(|_| "elastic".into()) }; let password = match url.password() { Some(p) => { let pass = p.to_string(); url.set_password(None).unwrap(); pass } None => std::env::var("ES_PASSWORD").unwrap_or_else(|_| "changeme".into()), }; Some(Credentials::Basic(username, password)) } else { None }; let conn_pool = SingleNodeConnectionPool::new(url); let mut builder = TransportBuilder::new(conn_pool); builder = match credentials { Some(c) => { builder = builder.auth(c); #[cfg(any(feature = "native-tls", feature = "rustls-tls"))] { builder = builder.cert_validation(CertificateValidation::None); } builder } None => builder, }; if running_proxy() { let proxy_url = Url::parse("http://localhost:8888").unwrap(); builder = builder.proxy(proxy_url, None, None); } let transport = builder.build()?; Ok(Elasticsearch::new(transport)) }
td::env::var("ELASTICSEARCH_URL") { Ok(server) => server, Err(_) => DEFAULT_ADDRESS.into(), } }
function_block-function_prefixed
[ { "content": "fn create_client() -> Result<Elasticsearch, Error> {\n\n fn cluster_addr() -> String {\n\n match std::env::var(\"ELASTICSEARCH_URL\") {\n\n Ok(server) => server,\n\n Err(_) => DEFAULT_ADDRESS.into(),\n\n }\n\n }\n\n\n\n /// Determines if Fiddler.exe proxy process is running\n\n fn running_proxy() -> bool {\n\n let system = sysinfo::System::new();\n\n !system.get_process_by_name(\"Fiddler\").is_empty()\n\n }\n\n\n\n let mut url = Url::parse(cluster_addr().as_ref()).unwrap();\n\n\n\n // if the url is https and specifies a username and password, remove from the url and set credentials\n\n let credentials = if url.scheme() == \"https\" {\n\n let username = if !url.username().is_empty() {\n\n let u = url.username().to_string();\n", "file_path": "elasticsearch/examples/search_questions_answers/main.rs", "rank": 1, "score": 440552.45734545315 }, { "content": "fn create_client() -> Result<Elasticsearch, Error> {\n\n fn cluster_addr() -> String {\n\n match std::env::var(\"ELASTICSEARCH_URL\") {\n\n Ok(server) => server,\n\n Err(_) => DEFAULT_ADDRESS.into(),\n\n }\n\n }\n\n\n\n /// Determines if Fiddler.exe proxy process is running\n\n fn running_proxy() -> bool {\n\n let system = sysinfo::System::new();\n\n !system.get_process_by_name(\"Fiddler\").is_empty()\n\n }\n\n\n\n let mut url = Url::parse(cluster_addr().as_ref()).unwrap();\n\n\n\n // if the url is https and specifies a username and password, remove from the url and set credentials\n\n let credentials = if url.scheme() == \"https\" {\n\n let username = if !url.username().is_empty() {\n\n let u = url.username().to_string();\n", "file_path": "elasticsearch/examples/search_questions/main.rs", "rank": 2, "score": 406736.6636643114 }, { "content": "/// Writes a mod.rs file in each generated directory\n\nfn write_mod_files(generated_dir: &PathBuf, toplevel: bool) -> Result<(), failure::Error> {\n\n if !generated_dir.exists() {\n\n fs::create_dir(generated_dir)?;\n\n }\n\n\n\n let paths = fs::read_dir(generated_dir)?;\n\n let mut mods = vec![];\n\n for path in paths {\n\n if let Ok(entry) = path {\n\n let path = entry.path();\n\n let name = path.file_stem().unwrap().to_string_lossy();\n\n\n\n if name != \"mod\" {\n\n mods.push(format!(\n\n \"pub mod {};\",\n\n path.file_stem().unwrap().to_string_lossy()\n\n ));\n\n }\n\n\n\n if path.is_dir() && !(toplevel && name == \"common\") {\n", "file_path": "yaml_test_runner/src/generator.rs", "rank": 3, "score": 370700.3928970016 }, { "content": "fn create_client() -> Result<Elasticsearch, Error> {\n\n fn cluster_addr() -> String {\n\n match std::env::var(\"ELASTICSEARCH_URL\") {\n\n Ok(server) => server,\n\n Err(_) => DEFAULT_ADDRESS.into(),\n\n }\n\n }\n\n\n\n /// Determines if Fiddler.exe proxy process is running\n\n fn running_proxy() -> bool {\n\n let system = sysinfo::System::new();\n\n !system.get_process_by_name(\"Fiddler\").is_empty()\n\n }\n\n\n\n let mut url = Url::parse(cluster_addr().as_ref()).unwrap();\n\n\n\n // if the url is https and specifies a username and password, remove from the url and set credentials\n\n let credentials = if url.scheme() == \"https\" {\n\n let username = if !url.username().is_empty() {\n\n let u = url.username().to_string();\n", "file_path": "elasticsearch/examples/cat_indices.rs", "rank": 4, "score": 370437.83815405075 }, { "content": "/// Gets the Ty syntax token for a TypeKind\n\n/// TODO: This function is serving too many purposes. Refactor it\n\nfn typekind_to_ty(name: &str, kind: &TypeKind, required: bool, fn_arg: bool) -> syn::Ty {\n\n let mut v = String::new();\n\n if !required {\n\n v.push_str(\"Option<\");\n\n }\n\n\n\n let str_type = \"&'b str\";\n\n match kind {\n\n TypeKind::Unknown(_) => v.push_str(str_type),\n\n TypeKind::List => {\n\n v.push_str(\"&'b [\");\n\n v.push_str(str_type);\n\n v.push_str(\"]\");\n\n }\n\n TypeKind::Enum => match name {\n\n // opened https://github.com/elastic/elasticsearch/issues/53212\n\n // to discuss whether this really should be a collection\n\n \"expand_wildcards\" => {\n\n // Expand wildcards should\n\n v.push_str(\"&'b [\");\n", "file_path": "api_generator/src/generator/code_gen/mod.rs", "rank": 5, "score": 350426.97606717556 }, { "content": "/// Checks whether there are any Errs in the collection, and accumulates them into one\n\n/// error message if there are.\n\npub fn ok_or_accumulate<T>(results: &[Result<T, failure::Error>]) -> Result<(), failure::Error> {\n\n let errs = results\n\n .iter()\n\n .filter_map(|r| r.as_ref().err())\n\n .collect::<Vec<_>>();\n\n if errs.is_empty() {\n\n Ok(())\n\n } else {\n\n let mut msgs = errs.iter().map(|e| e.to_string()).collect::<Vec<_>>();\n\n msgs.sort();\n\n msgs.dedup_by(|a, b| a == b);\n\n Err(failure::err_msg(msgs.join(\", \")))\n\n }\n\n}\n\n\n", "file_path": "yaml_test_runner/src/step/mod.rs", "rank": 6, "score": 344835.90238133405 }, { "content": "pub fn download_specs(branch: &str, download_dir: &PathBuf) -> Result<(), failure::Error> {\n\n let url = format!(\n\n \"https://api.github.com/repos/elastic/elasticsearch/tarball/{}\",\n\n branch\n\n );\n\n\n\n let mut headers = HeaderMap::new();\n\n headers.append(\n\n USER_AGENT,\n\n HeaderValue::from_str(&format!(\"elasticsearch-rs/{}\", env!(\"CARGO_PKG_NAME\")))?,\n\n );\n\n let client = reqwest::ClientBuilder::new()\n\n .default_headers(headers)\n\n .build()\n\n .unwrap();\n\n\n\n let response = client.get(&url).send()?;\n\n let tar = GzDecoder::new(response);\n\n let mut archive = Archive::new(tar);\n\n\n", "file_path": "api_generator/src/rest_spec/mod.rs", "rank": 7, "score": 334000.5246676881 }, { "content": "fn test_file_path(relative_path: &Path) -> Result<PathBuf, failure::Error> {\n\n let mut relative = relative_path.to_path_buf();\n\n relative.set_extension(\"\");\n\n // directories and files will form the module names so ensure they're valid module names\n\n let clean: String = relative\n\n .to_string_lossy()\n\n .replace(\".\", \"_\")\n\n .replace(\"-\", \"_\");\n\n\n\n relative = PathBuf::from(clean);\n\n\n\n let file_name = relative.file_name().unwrap().to_string_lossy().into_owned();\n\n // modules can't start with a number so prefix with underscore\n\n if file_name.starts_with(char::is_numeric) {\n\n relative.set_file_name(format!(\"_{}\", file_name));\n\n }\n\n\n\n Ok(relative)\n\n}\n\n\n", "file_path": "yaml_test_runner/src/generator.rs", "rank": 8, "score": 333157.9620120891 }, { "content": "/// Reads Api from a directory of REST Api specs\n\npub fn read_api(branch: &str, download_dir: &PathBuf) -> Result<Api, failure::Error> {\n\n let paths = fs::read_dir(download_dir)?;\n\n let mut namespaces = BTreeMap::new();\n\n let mut enums: HashSet<ApiEnum> = HashSet::new();\n\n let mut common_params = BTreeMap::new();\n\n let root_key = \"root\";\n\n\n\n for path in paths {\n\n let path = path?.path();\n\n let name = path.file_name().map(|path| path.to_str());\n\n let display = path.to_string_lossy().into_owned();\n\n\n\n if name\n\n .unwrap()\n\n .map(|name| name.ends_with(\".json\") && !name.starts_with('_'))\n\n .unwrap_or(true)\n\n {\n\n let mut file = File::open(&path)?;\n\n let (name, api_endpoint) = endpoint_from_file(display, &mut file)?;\n\n\n", "file_path": "api_generator/src/generator/mod.rs", "rank": 9, "score": 330503.7239726186 }, { "content": "/// deserializes Common from a file\n\nfn common_params_from_file<R>(name: String, reader: &mut R) -> Result<Common, failure::Error>\n\nwhere\n\n R: Read,\n\n{\n\n let common: Common = serde_json::from_reader(reader).map_err(|e| super::error::ParseError {\n\n message: format!(\"Failed to parse {} because: {}\", name, e),\n\n })?;\n\n\n\n Ok(common)\n\n}\n\n\n\n/// Asserts that the expected generated AST matches the actual generated AST\n", "file_path": "api_generator/src/generator/mod.rs", "rank": 10, "score": 327762.53959692275 }, { "content": "/// Generates the source code for a namespaced client\n\npub fn generate(api: &Api, docs_dir: &PathBuf) -> Result<Vec<(String, String)>, failure::Error> {\n\n let mut output = Vec::new();\n\n\n\n for (namespace, namespace_methods) in &api.namespaces {\n\n let mut tokens = Tokens::new();\n\n tokens.append(use_declarations());\n\n\n\n let namespace_pascal_case = namespace.to_pascal_case();\n\n let namespace_replaced_pascal_case = namespace.replace(\"_\", \" \").to_pascal_case();\n\n let namespace_client_name = ident(&namespace_pascal_case);\n\n let name_for_docs = match namespace_replaced_pascal_case.as_ref() {\n\n \"Ccr\" => \"Cross Cluster Replication\",\n\n \"Ilm\" => \"Index Lifecycle Management\",\n\n \"Slm\" => \"Snapshot Lifecycle Management\",\n\n \"Ml\" => \"Machine Learning\",\n\n \"Xpack\" => \"X-Pack\",\n\n name => name,\n\n };\n\n\n\n let namespace_doc = doc(format!(\"Namespace client for {} APIs\", &name_for_docs));\n", "file_path": "api_generator/src/generator/code_gen/namespace_clients.rs", "rank": 11, "score": 322920.3440988895 }, { "content": "pub fn create_for_url(url: &str) -> Elasticsearch {\n\n let builder = create_builder(url);\n\n create(builder)\n\n}\n\n\n", "file_path": "elasticsearch/tests/common/client.rs", "rank": 12, "score": 313057.7605275478 }, { "content": "pub fn create(mut builder: TransportBuilder) -> Elasticsearch {\n\n if running_proxy() {\n\n let proxy_url = Url::parse(\"http://localhost:8888\").unwrap();\n\n builder = builder.proxy(proxy_url, None, None);\n\n }\n\n\n\n let transport = builder.build().unwrap();\n\n Elasticsearch::new(transport)\n\n}\n\n\n\n/// index some documents into a posts index. If the posts index already exists, do nothing.\n\n///\n\n/// As an async fn, this can end up running multiple times concurrently, and indexing documents\n\n/// several times. In this instance, this is fine.\n\n///\n\n/// TODO: This is a temporary measure until https://github.com/elastic/elasticsearch-rs/issues/19 is implemented.\n\npub async fn index_documents(client: &Elasticsearch) -> Result<Response, Error> {\n\n let index = \"posts\";\n\n let exists_response = client\n\n .indices()\n", "file_path": "elasticsearch/tests/common/client.rs", "rank": 13, "score": 301389.1568755609 }, { "content": "/// Downloads the yaml tests if not already downloaded\n\npub fn download_test_suites(branch: &str, download_dir: &PathBuf) -> Result<(), failure::Error> {\n\n let mut last_downloaded_version = download_dir.clone();\n\n last_downloaded_version.push(\"last_downloaded_version\");\n\n if last_downloaded_version.exists() {\n\n let version = fs::read_to_string(&last_downloaded_version)\n\n .expect(\"Unable to read last_downloaded_version of yaml tests\");\n\n if version == branch {\n\n info!(\"Already downloaded yaml tests from {}\", branch);\n\n return Ok(());\n\n }\n\n }\n\n\n\n info!(\"Downloading yaml tests from {}\", branch);\n\n let url = format!(\n\n \"https://api.github.com/repos/elastic/elasticsearch/tarball/{}\",\n\n branch\n\n );\n\n let mut headers = HeaderMap::new();\n\n headers.append(\n\n USER_AGENT,\n", "file_path": "yaml_test_runner/src/github.rs", "rank": 14, "score": 298880.5591327841 }, { "content": "fn main() -> Result<(), failure::Error> {\n\n simple_logger::SimpleLogger::new()\n\n .with_level(LevelFilter::Info)\n\n .init()\n\n .unwrap();\n\n\n\n let matches = App::new(env!(\"CARGO_PKG_NAME\"))\n\n .about(env!(\"CARGO_PKG_DESCRIPTION\"))\n\n .arg(Arg::with_name(\"url\")\n\n .short(\"u\")\n\n .long(\"url\")\n\n .value_name(\"ELASTICSEARCH_URL\")\n\n .help(\"The url of a running Elasticsearch cluster. Used to determine the version, test suite and branch to use to compile tests\")\n\n .required(true)\n\n .takes_value(true))\n\n .get_matches();\n\n\n\n let url = matches.value_of(\"url\").expect(\"missing 'url' argument\");\n\n let (branch, suite, version) = match branch_suite_and_version_from_elasticsearch(url) {\n\n Ok(v) => v,\n", "file_path": "yaml_test_runner/src/main.rs", "rank": 15, "score": 298011.9731763266 }, { "content": "/// Generates the source code for the methods on the root of Elasticsearch\n\npub fn generate(api: &Api, docs_dir: &PathBuf) -> Result<String, failure::Error> {\n\n let mut tokens = Tokens::new();\n\n tokens.append(use_declarations());\n\n\n\n // AST for builder structs and methods\n\n let (builders, methods): (Vec<Tokens>, Vec<Tokens>) = api\n\n .root\n\n .iter()\n\n .map(|(name, endpoint)| {\n\n let builder_name = name.to_pascal_case();\n\n RequestBuilder::new(\n\n docs_dir,\n\n \"Elasticsearch\",\n\n name,\n\n &builder_name,\n\n &api.common_params,\n\n endpoint,\n\n true,\n\n )\n\n .build()\n", "file_path": "api_generator/src/generator/code_gen/root.rs", "rank": 16, "score": 290172.32201613404 }, { "content": "/// Gets the address to the Elasticsearch instance from environment variables\n\n/// and assumes an instance running locally on the default port otherwise\n\npub fn cluster_addr() -> String {\n\n match std::env::var(\"ELASTICSEARCH_URL\") {\n\n Ok(server) => server,\n\n Err(_) => DEFAULT_ADDRESS.into(),\n\n }\n\n}\n\n\n", "file_path": "elasticsearch/tests/common/client.rs", "rank": 17, "score": 287860.59471916675 }, { "content": "pub fn create_builder(addr: &str) -> TransportBuilder {\n\n let url = Url::parse(addr).unwrap();\n\n let conn_pool = SingleNodeConnectionPool::new(url.clone());\n\n let mut builder = TransportBuilder::new(conn_pool);\n\n // assume if we're running with HTTPS then authentication is also enabled and disable\n\n // certificate validation - we'll change this for tests that need to.\n\n if url.scheme() == \"https\" {\n\n builder = builder.auth(Credentials::Basic(\"elastic\".into(), \"changeme\".into()));\n\n\n\n #[cfg(any(feature = \"native-tls\", feature = \"rustls-tls\"))]\n\n {\n\n builder = builder.cert_validation(CertificateValidation::None);\n\n }\n\n }\n\n\n\n builder\n\n}\n\n\n", "file_path": "elasticsearch/tests/common/client.rs", "rank": 18, "score": 282966.1147609899 }, { "content": "pub fn generate(api: &Api) -> Result<String, failure::Error> {\n\n let mut tokens = quote!(\n\n use serde::{Serialize, Deserialize};\n\n );\n\n for e in &api.enums {\n\n generate_param(&mut tokens, &e);\n\n }\n\n\n\n let generated = tokens.to_string();\n\n Ok(generated)\n\n}\n\n\n", "file_path": "api_generator/src/generator/code_gen/params.rs", "rank": 19, "score": 281823.3060551377 }, { "content": "pub fn create_default() -> Elasticsearch {\n\n create_for_url(cluster_addr().as_str())\n\n}\n\n\n", "file_path": "elasticsearch/tests/common/client.rs", "rank": 20, "score": 278823.9301261081 }, { "content": "/// Wraps the URL string to replace master or current in URL path with the\n\n/// major.minor version of the api_generator.\n\nfn documentation_url_string<'de, D>(deserializer: D) -> Result<String, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n Ok(DocumentationUrlString::replace_version_in_url(s))\n\n}\n\n\n\n/// A Documentation URL string\n\n#[derive(Debug, Deserialize, PartialEq, Clone)]\n\npub struct DocumentationUrlString(\n\n #[serde(deserialize_with = \"documentation_url_string\")] pub String,\n\n);\n\n\n\nimpl DocumentationUrlString {\n\n fn from_url(s: String) -> Self {\n\n let s = Self::replace_version_in_url(s);\n\n Self(s)\n\n }\n\n\n", "file_path": "api_generator/src/generator/mod.rs", "rank": 21, "score": 276170.5282890881 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\nuse chrono::{prelude::*, DateTime, Utc};\n\nuse serde::{Deserialize, Serialize};\n", "file_path": "elasticsearch/examples/index_questions_answers/stack_overflow.rs", "rank": 22, "score": 268168.10737827135 }, { "content": "use serde_json::Value;\n\nuse std::{collections::BTreeMap, fs::File, io::Read};\n\nuse xml::{reader::XmlEvent, EventReader};\n\n\n\n/// A Stack Overflow post\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\n#[serde(tag = \"type\")]\n\npub enum Post {\n\n Question(Question),\n\n Answer(Answer),\n\n}\n\n\n\nimpl Post {\n\n pub fn id(&self) -> i32 {\n\n match self {\n\n Post::Question(q) => q.id,\n\n Post::Answer(a) => a.id,\n\n }\n\n }\n\n}\n", "file_path": "elasticsearch/examples/index_questions_answers/stack_overflow.rs", "rank": 23, "score": 268070.4156658982 }, { "content": "\n\n/// A Stack Overflow question\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\npub struct Question {\n\n pub id: i32,\n\n pub parent_id: Value,\n\n pub creation_date: DateTime<Utc>,\n\n pub score: i32,\n\n pub body: String,\n\n pub owner_user_id: Option<i32>,\n\n pub owner_display_name: Option<String>,\n\n pub last_editor_user_id: Option<i32>,\n\n pub last_edit_date: Option<DateTime<Utc>>,\n\n pub last_activity_date: Option<DateTime<Utc>>,\n\n pub comment_count: i32,\n\n pub tags: Vec<String>,\n\n pub title: String,\n\n pub title_suggest: Option<Value>,\n\n pub accepted_answer_id: Option<i32>,\n\n pub view_count: i32,\n", "file_path": "elasticsearch/examples/index_questions_answers/stack_overflow.rs", "rank": 24, "score": 268055.74316315266 }, { "content": " pub last_editor_display_name: Option<String>,\n\n pub answer_count: i32,\n\n pub favorite_count: i32,\n\n pub community_owned_date: Option<DateTime<Utc>>,\n\n}\n\n\n\nimpl From<Question> for Post {\n\n fn from(q: Question) -> Self {\n\n Post::Question(q)\n\n }\n\n}\n\n\n\n/// A Stack Overflow answer\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\npub struct Answer {\n\n pub id: i32,\n\n pub parent_id: Value,\n\n pub creation_date: DateTime<Utc>,\n\n pub score: i32,\n\n pub body: String,\n", "file_path": "elasticsearch/examples/index_questions_answers/stack_overflow.rs", "rank": 25, "score": 268055.51773972966 }, { "content": "\n\n let id = a[\"Id\"].parse::<i32>().unwrap();\n\n let post_type_id = a[\"PostTypeId\"].parse::<i32>().unwrap();\n\n let score = a[\"Score\"].parse::<i32>().unwrap();\n\n let body = a[\"Body\"].clone();\n\n let creation_date = parse_datetime_utc(a[\"CreationDate\"].as_str());\n\n let comment_count = a[\"CommentCount\"].parse::<i32>().unwrap();\n\n let owner_user_id = if a.contains_key(\"OwnerUserId\") {\n\n a[\"OwnerUserId\"].parse::<i32>().ok()\n\n } else {\n\n None\n\n };\n\n\n\n let owner_display_name = a.get(\"OwnerDisplayName\").map(|s| s.clone());\n\n let last_editor_user_id = if a.contains_key(\"LastEditorUserId\") {\n\n a[\"LastEditorUserId\"].parse::<i32>().ok()\n\n } else {\n\n None\n\n };\n\n\n", "file_path": "elasticsearch/examples/index_questions_answers/stack_overflow.rs", "rank": 26, "score": 268054.9868789925 }, { "content": " let last_edit_date = if a.contains_key(\"LastEditDate\") {\n\n Some(parse_datetime_utc(a[\"LastEditDate\"].as_str()))\n\n } else {\n\n None\n\n };\n\n\n\n let last_activity_date = if a.contains_key(\"LastActivityDate\") {\n\n Some(parse_datetime_utc(a[\"LastActivityDate\"].as_str()))\n\n } else {\n\n None\n\n };\n\n\n\n let post: Post = if post_type_id == 1 {\n\n let title = a[\"Title\"].clone();\n\n let title_suggest = {\n\n let weight = if score < 0 { 0 } else { score };\n\n json!({\n\n \"input\": [title],\n\n \"weight\": weight\n\n })\n", "file_path": "elasticsearch/examples/index_questions_answers/stack_overflow.rs", "rank": 27, "score": 268047.96681332856 }, { "content": " pub owner_user_id: Option<i32>,\n\n pub owner_display_name: Option<String>,\n\n pub last_editor_user_id: Option<i32>,\n\n pub last_edit_date: Option<DateTime<Utc>>,\n\n pub last_activity_date: Option<DateTime<Utc>>,\n\n pub comment_count: i32,\n\n}\n\n\n\nimpl From<Answer> for Post {\n\n fn from(a: Answer) -> Self {\n\n Post::Answer(a)\n\n }\n\n}\n\n\n\npub struct PostsIter {\n\n reader: EventReader<File>,\n\n finished: bool,\n\n}\n\n\n\nimpl PostsIter {\n", "file_path": "elasticsearch/examples/index_questions_answers/stack_overflow.rs", "rank": 28, "score": 268041.3513122803 }, { "content": " .map(|s| s.to_string())\n\n .collect()\n\n })\n\n .unwrap_or_else(|| vec![]),\n\n title,\n\n title_suggest: Some(title_suggest),\n\n accepted_answer_id: None,\n\n view_count: a[\"ViewCount\"].parse::<i32>().unwrap(),\n\n last_editor_display_name: a\n\n .get(\"LastEditorDisplayName\")\n\n .map(|s| s.clone()),\n\n answer_count: a[\"AnswerCount\"].parse::<i32>().unwrap(),\n\n favorite_count: a\n\n .get(\"FavoriteCount\")\n\n .map(|s| s.parse::<i32>().unwrap())\n\n .unwrap_or_else(|| 0),\n\n community_owned_date: a\n\n .get(\"CommunityOwnedDate\")\n\n .map(|s| parse_datetime_utc(s)),\n\n }\n", "file_path": "elasticsearch/examples/index_questions_answers/stack_overflow.rs", "rank": 29, "score": 268039.96366174426 }, { "content": " fn next(&mut self) -> Option<Post> {\n\n if self.finished {\n\n return None;\n\n }\n\n\n\n fn parse_datetime_utc<S: AsRef<str>>(s: S) -> DateTime<Utc> {\n\n Utc.datetime_from_str(s.as_ref(), \"%Y-%m-%dT%H:%M:%S.%f\")\n\n .unwrap()\n\n }\n\n\n\n let post = match self.reader.next() {\n\n Ok(e) => match e {\n\n XmlEvent::StartElement {\n\n name, attributes, ..\n\n } => match name.local_name.as_ref() {\n\n \"row\" => {\n\n let mut a = BTreeMap::new();\n\n for attribute in attributes {\n\n a.insert(attribute.name.local_name, attribute.value);\n\n }\n", "file_path": "elasticsearch/examples/index_questions_answers/stack_overflow.rs", "rank": 30, "score": 268039.85641843383 }, { "content": " };\n\n\n\n Question {\n\n id,\n\n parent_id: json!(\"question\"),\n\n creation_date,\n\n score,\n\n body,\n\n owner_user_id,\n\n owner_display_name,\n\n last_editor_user_id,\n\n last_edit_date,\n\n last_activity_date,\n\n comment_count,\n\n tags: a\n\n .get(\"Tags\")\n\n .map(|t| {\n\n t.replace(\">\", \"\")\n\n .split('<')\n\n .filter(|s| !s.is_empty())\n", "file_path": "elasticsearch/examples/index_questions_answers/stack_overflow.rs", "rank": 31, "score": 268036.5255271256 }, { "content": " fn open_xml(path: &str) -> File {\n\n let mut xml = File::open(path).unwrap();\n\n // skip the BOM as the xml library doesn't handle this.\n\n let mut bom = [0; 3];\n\n xml.read_exact(&mut bom).unwrap();\n\n xml\n\n }\n\n\n\n pub fn new(path: &str) -> Self {\n\n let xml = Self::open_xml(path);\n\n Self {\n\n reader: EventReader::new(xml),\n\n finished: false,\n\n }\n\n }\n\n}\n\n\n\nimpl Iterator for PostsIter {\n\n type Item = Post;\n\n\n", "file_path": "elasticsearch/examples/index_questions_answers/stack_overflow.rs", "rank": 32, "score": 268035.8416648805 }, { "content": " .into()\n\n } else {\n\n Answer {\n\n id,\n\n body,\n\n comment_count,\n\n score,\n\n creation_date,\n\n last_activity_date,\n\n last_edit_date,\n\n last_editor_user_id,\n\n owner_display_name,\n\n parent_id: json!({\n\n \"parent\": a[\"ParentId\"].clone(),\n\n \"name\": \"answer\"\n\n }),\n\n owner_user_id,\n\n }\n\n .into()\n\n };\n", "file_path": "elasticsearch/examples/index_questions_answers/stack_overflow.rs", "rank": 33, "score": 268025.9489828184 }, { "content": "\n\n Some(post)\n\n }\n\n _ => self.next(),\n\n },\n\n XmlEvent::EndDocument => {\n\n self.finished = true;\n\n None\n\n }\n\n _ => self.next(),\n\n },\n\n Err(e) => {\n\n self.finished = true;\n\n println!(\"{:?}\", e);\n\n None\n\n }\n\n };\n\n\n\n post\n\n }\n\n}\n", "file_path": "elasticsearch/examples/index_questions_answers/stack_overflow.rs", "rank": 34, "score": 268015.9098077657 }, { "content": "/// Gets the client to use in tests\n\npub fn get() -> &'static Elasticsearch {\n\n GLOBAL_CLIENT.deref()\n\n}\n\n\n\n/// Reads the response from Elasticsearch, returning the method, status code, text response,\n\n/// and the response parsed from json or yaml\n\npub async fn read_response(\n\n response: Response,\n\n) -> Result<(Method, StatusCode, String, Value), failure::Error> {\n\n let is_json = response.content_type().starts_with(\"application/json\");\n\n let is_yaml = response.content_type().starts_with(\"application/yaml\");\n\n let method = response.method();\n\n let status_code = response.status_code();\n\n let text = response.text().await?;\n\n let json = if is_json && !text.is_empty() {\n\n serde_json::from_str::<Value>(text.as_ref())?\n\n } else if is_yaml && !text.is_empty() {\n\n serde_yaml::from_str::<Value>(text.as_ref())?\n\n } else {\n\n Value::Null\n", "file_path": "yaml_test_runner/tests/common/client.rs", "rank": 35, "score": 267616.4063688578 }, { "content": "/// Deserializes the headers map where the map values may be a string or a sequence of strings\n\nfn header_map<'de, D>(deserializer: D) -> Result<BTreeMap<String, Vec<String>>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n #[derive(Deserialize)]\n\n struct Wrapper(#[serde(deserialize_with = \"crate::string_or_seq_string\")] Vec<String>);\n\n\n\n let v: BTreeMap<String, Wrapper> = BTreeMap::deserialize(deserializer)?;\n\n Ok(v.into_iter().map(|(k, Wrapper(v))| (k, v)).collect())\n\n}\n\n\n\nimpl Error {\n\n /// The cause of the exception\n\n pub fn caused_by(&self) -> Option<&Cause> {\n\n self.caused_by.as_deref()\n\n }\n\n\n\n /// The root causes for the exception\n\n pub fn root_cause(&self) -> &Vec<Cause> {\n\n &self.root_cause\n", "file_path": "elasticsearch/src/http/response.rs", "rank": 36, "score": 266121.6733398622 }, { "content": "pub fn take_while<F>(i: &[u8], f: F) -> (&[u8], &str)\n\nwhere\n\n F: Fn(u8) -> bool,\n\n{\n\n let mut ctr = 0;\n\n\n\n for c in i {\n\n if f(*c) {\n\n ctr += 1;\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n (&i[ctr..], str::from_utf8(&i[0..ctr]).unwrap())\n\n}\n\n\n", "file_path": "api_generator/src/generator/code_gen/mod.rs", "rank": 37, "score": 259455.63064434327 }, { "content": "/// AST for a simple path variable.\n\nfn path_none(path_ident: &str) -> syn::Path {\n\n path(path_ident, vec![], vec![])\n\n}\n\n\n", "file_path": "api_generator/src/generator/code_gen/mod.rs", "rank": 38, "score": 257803.21878210013 }, { "content": "pub fn split_on_pascal_case(s: &str) -> String {\n\n s.chars()\n\n .enumerate()\n\n .flat_map(|(i, c)| {\n\n if i != 0 && c.is_uppercase() {\n\n Some(' ')\n\n } else {\n\n None\n\n }\n\n .into_iter()\n\n .chain(std::iter::once(c))\n\n })\n\n .collect()\n\n}\n", "file_path": "api_generator/src/generator/code_gen/mod.rs", "rank": 39, "score": 255604.1517052942 }, { "content": "pub fn json_string_from_yaml(yaml: &Yaml) -> String {\n\n let mut s = String::new();\n\n {\n\n let mut emitter = YamlEmitter::new(&mut s);\n\n emitter.dump(yaml).unwrap();\n\n }\n\n\n\n let value: serde_json::Value = serde_yaml::from_str(&s).unwrap();\n\n\n\n let mut json = value.to_string();\n\n json = replace_set(json);\n\n json = replace_i64(json);\n\n json\n\n}\n", "file_path": "yaml_test_runner/src/step/mod.rs", "rank": 40, "score": 255040.3434193426 }, { "content": "pub fn parse_steps(api: &Api, steps: &[Yaml]) -> Result<Vec<Step>, failure::Error> {\n\n let mut parsed_steps: Vec<Step> = Vec::new();\n\n for step in steps {\n\n let hash = step\n\n .as_hash()\n\n .ok_or_else(|| failure::err_msg(format!(\"expected hash but found {:?}\", step)))?;\n\n\n\n let (key, value) = {\n\n let (k, yaml) = hash.iter().next().unwrap();\n\n let key = k.as_str().ok_or_else(|| {\n\n failure::err_msg(format!(\"expected string key but found {:?}\", k))\n\n })?;\n\n\n\n (key, yaml)\n\n };\n\n\n\n match key {\n\n \"skip\" => {\n\n let skip = Skip::try_parse(value)?;\n\n parsed_steps.push(skip.into());\n", "file_path": "yaml_test_runner/src/step/mod.rs", "rank": 41, "score": 247411.01236673765 }, { "content": "/// use declarations common across builders\n\npub fn use_declarations() -> Tokens {\n\n quote!(\n\n #![allow(unused_imports)]\n\n\n\n use crate::{\n\n client::Elasticsearch,\n\n params::*,\n\n error::Error,\n\n http::{\n\n headers::{HeaderName, HeaderMap, HeaderValue, CONTENT_TYPE, ACCEPT},\n\n Method,\n\n request::{Body, NdBody, JsonBody, PARTS_ENCODED},\n\n response::Response,\n\n transport::Transport,\n\n },\n\n };\n\n use std::{\n\n borrow::Cow,\n\n time::Duration\n\n };\n\n use percent_encoding::percent_encode;\n\n use serde::Serialize;\n\n )\n\n}\n\n\n", "file_path": "api_generator/src/generator/code_gen/mod.rs", "rank": 42, "score": 245454.60812438832 }, { "content": "fn main() {\n\n async_std::task::block_on(async move {\n\n println!(\"res:{:?}\", \"ok\");\n\n });\n\n}", "file_path": "elasticsearch/examples/async_test/main.rs", "rank": 43, "score": 243051.452539959 }, { "content": "/// Replaces a \"set\" step value with a variable\n\npub fn replace_set<S: AsRef<str>>(s: S) -> String {\n\n let mut s = SET_QUOTED_DELIMITED_REGEX\n\n .replace_all(s.as_ref(), \"$1\")\n\n .into_owned();\n\n\n\n s = SET_DELIMITED_REGEX\n\n .replace_all(s.as_ref(), \"$1\")\n\n .into_owned();\n\n\n\n SET_REGEX.replace_all(s.as_ref(), \"$1\").into_owned()\n\n}\n\n\n", "file_path": "yaml_test_runner/src/regex.rs", "rank": 44, "score": 242162.675846992 }, { "content": "pub fn create_default_builder() -> TransportBuilder {\n\n create_builder(cluster_addr().as_str())\n\n}\n\n\n", "file_path": "elasticsearch/tests/common/client.rs", "rank": 45, "score": 239960.26830357194 }, { "content": "/// Ensures that the name generated is one that is valid for Rust\n\npub fn valid_name(s: &str) -> &str {\n\n match s {\n\n \"type\" => \"ty\",\n\n s => s,\n\n }\n\n}\n\n\n", "file_path": "api_generator/src/generator/code_gen/mod.rs", "rank": 66, "score": 227298.51857460823 }, { "content": "/// AST for a path variable.\n\nfn path(path: &str, lifetimes: Vec<syn::Lifetime>, types: Vec<syn::Ty>) -> syn::Path {\n\n path_segments(vec![(path, lifetimes, types)])\n\n}\n\n\n", "file_path": "api_generator/src/generator/code_gen/mod.rs", "rank": 67, "score": 225015.20268277935 }, { "content": "/// Ensure all deserialized paths have a leading `/`\n\nfn rooted_path_string<'de, D>(deserializer: D) -> Result<String, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n\n\n if !s.starts_with('/') {\n\n Ok(format!(\"/{}\", s))\n\n } else {\n\n Ok(s)\n\n }\n\n}\n\n\n\nimpl fmt::Display for PathString {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n self.0.fmt(f)\n\n }\n\n}\n\n\n\nimpl PathString {\n", "file_path": "api_generator/src/generator/code_gen/url/url_builder.rs", "rank": 68, "score": 224291.12360722147 }, { "content": "/// Writes the input to the specified file, preceded by a header comment indicating generated code\n\npub fn write_file(\n\n input: String,\n\n docs: Option<&PathBuf>,\n\n dir: &PathBuf,\n\n file_name: &str,\n\n tracker: &mut GeneratedFiles,\n\n) -> Result<(), failure::Error> {\n\n let mut path = dir.clone();\n\n path.push(PathBuf::from_slash(file_name));\n\n\n\n let mut file = File::create(&path)?;\n\n file.write_all(\n\n b\"/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \\\"License\\\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n", "file_path": "api_generator/src/generator/output.rs", "rank": 69, "score": 224026.4604198684 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\nuse chrono::{DateTime, Utc};\n\nuse serde::{Deserialize, Serialize};\n", "file_path": "elasticsearch/examples/search_questions_answers/stack_overflow.rs", "rank": 70, "score": 221412.13030196037 }, { "content": "use serde_json::Value;\n\n\n\n/// A Stack Overflow post\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\n#[serde(tag = \"type\")]\n\npub enum Post {\n\n Question(Question),\n\n Answer(Answer),\n\n}\n\n\n\n/// A Stack Overflow question\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\npub struct Question {\n\n pub id: i32,\n\n pub parent_id: Value,\n\n pub creation_date: DateTime<Utc>,\n\n pub score: i32,\n\n pub body: String,\n\n pub owner_user_id: Option<i32>,\n\n pub owner_display_name: Option<String>,\n", "file_path": "elasticsearch/examples/search_questions_answers/stack_overflow.rs", "rank": 71, "score": 221314.3951198334 }, { "content": "\n\n/// A Stack Overflow answer\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\npub struct Answer {\n\n pub id: i32,\n\n pub parent_id: Value,\n\n pub creation_date: DateTime<Utc>,\n\n pub score: i32,\n\n pub body: String,\n\n pub owner_user_id: Option<i32>,\n\n pub owner_display_name: Option<String>,\n\n pub last_editor_user_id: Option<i32>,\n\n pub last_edit_date: Option<DateTime<Utc>>,\n\n pub last_activity_date: Option<DateTime<Utc>>,\n\n pub comment_count: i32,\n\n}\n\n\n\nimpl From<Answer> for Post {\n\n fn from(a: Answer) -> Self {\n\n Post::Answer(a)\n\n }\n\n}\n", "file_path": "elasticsearch/examples/search_questions_answers/stack_overflow.rs", "rank": 72, "score": 221291.88897892126 }, { "content": " pub last_editor_user_id: Option<i32>,\n\n pub last_edit_date: Option<DateTime<Utc>>,\n\n pub last_activity_date: Option<DateTime<Utc>>,\n\n pub comment_count: i32,\n\n pub tags: Vec<String>,\n\n pub title: String,\n\n pub title_suggest: Option<Value>,\n\n pub accepted_answer_id: Option<i32>,\n\n pub view_count: i32,\n\n pub last_editor_display_name: Option<String>,\n\n pub answer_count: i32,\n\n pub favorite_count: i32,\n\n pub community_owned_date: Option<DateTime<Utc>>,\n\n}\n\n\n\nimpl From<Question> for Post {\n\n fn from(q: Question) -> Self {\n\n Post::Question(q)\n\n }\n\n}\n", "file_path": "elasticsearch/examples/search_questions_answers/stack_overflow.rs", "rank": 73, "score": 221289.2952154789 }, { "content": "pub fn base_64_encode_credentials(user: &str, password: &str) -> String {\n\n let mut value = Vec::new();\n\n {\n\n let mut encoder = Base64Encoder::new(&mut value, base64::STANDARD);\n\n write!(encoder, \"{}:\", user).unwrap();\n\n write!(encoder, \"{}\", password).unwrap();\n\n };\n\n String::from_utf8(value).unwrap()\n\n}\n", "file_path": "yaml_test_runner/tests/common/transform.rs", "rank": 74, "score": 220066.7272379817 }, { "content": "/// AST for a path type with lifetimes and type parameters.\n\npub fn ty_path(ty: &str, lifetimes: Vec<syn::Lifetime>, types: Vec<syn::Ty>) -> syn::Ty {\n\n syn::Ty::Path(None, path(ty, lifetimes, types))\n\n}\n\n\n", "file_path": "api_generator/src/generator/code_gen/mod.rs", "rank": 75, "score": 218034.47762086955 }, { "content": "/// Checks if Fiddler proxy process is running\n\nfn running_proxy() -> bool {\n\n let system = System::new_with_specifics(RefreshKind::new().with_processes());\n\n !system.get_process_by_name(\"Fiddler\").is_empty()\n\n}\n\n\n", "file_path": "elasticsearch/tests/common/client.rs", "rank": 76, "score": 216954.35054208897 }, { "content": "fn expected_error_message() -> String {\n\n if cfg!(windows) {\n\n \"terminated in a root certificate which is not trusted by the trust provider\".to_string()\n\n } else {\n\n let os = os_type::current_platform();\n\n match os.os_type {\n\n OSType::OSX => \"The certificate was not trusted\".to_string(),\n\n _ => \"unable to get local issuer certificate\".to_string(),\n\n }\n\n }\n\n}\n\n\n\n/// Default certificate validation with a self signed certificate\n\n#[tokio::test]\n\n#[cfg(feature = \"native-tls\")]\n\nasync fn default_certificate_validation() -> Result<(), failure::Error> {\n\n let builder = client::create_default_builder().cert_validation(CertificateValidation::Default);\n\n let client = client::create(builder);\n\n let result = client.ping().send().await;\n\n\n", "file_path": "elasticsearch/tests/cert.rs", "rank": 77, "score": 216829.77154020497 }, { "content": "fn string_or_struct<'de, T, D>(deserializer: D) -> Result<T, D::Error>\n\nwhere\n\n T: Deserialize<'de> + FromStr<Err = Void>,\n\n D: Deserializer<'de>,\n\n{\n\n struct StringOrStruct<T>(PhantomData<fn() -> T>);\n\n\n\n impl<'de, T> Visitor<'de> for StringOrStruct<T>\n\n where\n\n T: Deserialize<'de> + FromStr<Err = Void>,\n\n {\n\n type Value = T;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"string or map\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<T, E>\n\n where\n\n E: serde::de::Error,\n", "file_path": "api_generator/src/generator/mod.rs", "rank": 78, "score": 213068.9099847957 }, { "content": "pub fn http<F, Fut>(func: F) -> Server\n\nwhere\n\n F: Fn(http::Request<hyper::Body>) -> Fut + Clone + Send + 'static,\n\n Fut: Future<Output = http::Response<hyper::Body>> + Send + 'static,\n\n{\n\n //Spawn new runtime in thread to prevent reactor execution context conflict\n\n thread::spawn(move || {\n\n let mut rt = runtime::Builder::new()\n\n .basic_scheduler()\n\n .enable_all()\n\n .build()\n\n .expect(\"new rt\");\n\n let srv = rt.block_on(async move {\n\n hyper::Server::bind(&([127, 0, 0, 1], 0).into()).serve(hyper::service::make_service_fn(\n\n move |_| {\n\n let func = func.clone();\n\n async move {\n\n Ok::<_, Infallible>(hyper::service::service_fn(move |req| {\n\n let fut = func(req);\n\n async move { Ok::<_, Infallible>(fut.await) }\n", "file_path": "elasticsearch/tests/common/server.rs", "rank": 79, "score": 212337.62966567493 }, { "content": "/// Replaces all integers in a string to suffix with i64, to ensure that numbers\n\n/// larger than i32 will be handled correctly when passed to json! macro\n\npub fn replace_i64<S: AsRef<str>>(s: S) -> String {\n\n INT_REGEX\n\n .replace_all(s.as_ref(), |c: &Captures| match &c[2].parse::<i64>() {\n\n Ok(i) if *i > i32::max_value() as i64 => format!(\"{}{}i64{}\", &c[1], &c[2], &c[3]),\n\n _ => c[0].to_string(),\n\n })\n\n .into_owned()\n\n}\n", "file_path": "yaml_test_runner/src/regex.rs", "rank": 80, "score": 202068.96972504968 }, { "content": "/// cleans up a regex as specified in YAML to one that will work with the regex crate.\n\npub fn clean_regex<S: AsRef<str>>(s: S) -> String {\n\n s.as_ref()\n\n .trim()\n\n .trim_matches('/')\n\n .replace(\"\\\\/\", \"/\")\n\n .replace(\"\\\\:\", \":\")\n\n .replace(\"\\\\#\", \"#\")\n\n .replace(\"\\\\%\", \"%\")\n\n .replace(\"\\\\'\", \"'\")\n\n .replace(\"\\\\`\", \"`\")\n\n}\n\n\n", "file_path": "yaml_test_runner/src/regex.rs", "rank": 81, "score": 202064.21742241314 }, { "content": "fn write_spec_file(\n\n download_dir: &PathBuf,\n\n mut entry: Entry<GzDecoder<Response>>,\n\n) -> Result<(), failure::Error> {\n\n let path = entry.path()?;\n\n let mut dir = download_dir.clone();\n\n dir.push(path.file_name().unwrap());\n\n let mut file = File::create(&dir)?;\n\n io::copy(&mut entry, &mut file)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "api_generator/src/rest_spec/mod.rs", "rank": 82, "score": 201564.67838274082 }, { "content": "/// Body of an API call.\n\n///\n\n/// Some Elasticsearch APIs accept a body as part of the API call. Most APIs\n\n/// expect JSON, however, there are some APIs that expect newline-delimited JSON (NDJSON).\n\n/// The [Body] trait allows modelling different API body implementations.\n\npub trait Body {\n\n /// An existing immutable buffer that can be used to avoid\n\n /// having to write to another buffer that will then be written to the request stream.\n\n ///\n\n /// If this method returns `Some`, the bytes must be the same as\n\n /// those that would be written by [Body::write].\n\n fn bytes(&self) -> Option<Bytes> {\n\n None\n\n }\n\n\n\n /// Write to a buffer that will be written to the request stream\n\n fn write(&self, bytes: &mut BytesMut) -> Result<(), Error>;\n\n}\n\n\n\nimpl<'a, B: ?Sized> Body for &'a B\n\nwhere\n\n B: Body,\n\n{\n\n fn bytes(&self) -> Option<Bytes> {\n\n (**self).bytes()\n", "file_path": "elasticsearch/src/http/request.rs", "rank": 83, "score": 200678.58613313572 }, { "content": "/// AST for a path type with a `'b` lifetime.\n\npub fn ty_b(ty: &str) -> syn::Ty {\n\n ty_path(ty, vec![lifetime_b()], vec![])\n\n}\n\n\n", "file_path": "api_generator/src/generator/code_gen/mod.rs", "rank": 84, "score": 197070.0519744493 }, { "content": "/// AST for a simple path type.\n\npub fn ty(ty: &str) -> syn::Ty {\n\n ty_path(ty, vec![], vec![])\n\n}\n\n\n", "file_path": "api_generator/src/generator/code_gen/mod.rs", "rank": 85, "score": 197070.0519744493 }, { "content": "fn cluster_addr() -> String {\n\n match std::env::var(\"ELASTICSEARCH_URL\") {\n\n Ok(server) => server,\n\n Err(_) => DEFAULT_ADDRESS.into(),\n\n }\n\n}\n\n\n", "file_path": "yaml_test_runner/tests/common/client.rs", "rank": 86, "score": 196858.04852566283 }, { "content": "fn main() {\n\n match version_meta().unwrap().channel {\n\n Channel::Stable => {\n\n println!(\"cargo:rustc-cfg=RUSTC_IS_STABLE\");\n\n }\n\n Channel::Beta => {\n\n println!(\"cargo:rustc-cfg=RUSTC_IS_BETA\");\n\n }\n\n Channel::Nightly => {\n\n println!(\"cargo:rustc-cfg=RUSTC_IS_NIGHTLY\");\n\n }\n\n Channel::Dev => {\n\n println!(\"cargo:rustc-cfg=RUSTC_IS_DEV\");\n\n }\n\n }\n\n}\n", "file_path": "elasticsearch/build.rs", "rank": 87, "score": 195037.08842305577 }, { "content": "/// Generics with no parameters.\n\npub fn generics_none() -> syn::Generics {\n\n generics(vec![], vec![])\n\n}\n\n\n", "file_path": "api_generator/src/generator/code_gen/mod.rs", "rank": 88, "score": 194638.09135754072 }, { "content": "fn branch_suite_and_version_from_elasticsearch(\n\n url: &str,\n\n) -> Result<(String, TestSuite, semver::Version), failure::Error> {\n\n let client = reqwest::ClientBuilder::new()\n\n .danger_accept_invalid_certs(true)\n\n .build()?;\n\n\n\n let mut response = client.get(url).send()?;\n\n let json: Value = response.json()?;\n\n let branch = json[\"version\"][\"build_hash\"].as_str().unwrap().to_string();\n\n let suite = match json[\"version\"][\"build_flavor\"].as_str().unwrap() {\n\n \"oss\" => TestSuite::Oss,\n\n _ => TestSuite::XPack,\n\n };\n\n\n\n // any prerelease part needs to be trimmed because the semver crate only allows\n\n // a version with a prerelease to match against predicates, if at least one predicate\n\n // has a prerelease. See\n\n // https://github.com/steveklabnik/semver/blob/afa5fc853cb4d6d2b1329579e5528f86f3b550f9/src/version_req.rs#L319-L331\n\n let version = json[\"version\"][\"number\"]\n\n .as_str()\n\n .unwrap()\n\n .trim_end_matches(|c: char| c.is_alphabetic() || c == '-');\n\n\n\n Ok((branch, suite, semver::Version::parse(version)?))\n\n}\n", "file_path": "yaml_test_runner/src/main.rs", "rank": 89, "score": 190468.35414957762 }, { "content": "#[cfg(test)]\n\npub fn ast_eq<T: ToTokens>(expected: Tokens, actual: T) {\n\n assert_eq!(expected, quote!(#actual));\n\n}\n", "file_path": "api_generator/src/generator/mod.rs", "rank": 90, "score": 190103.09500189166 }, { "content": "/// AST for a path variable.\n\nfn path_segments(paths: Vec<(&str, Vec<syn::Lifetime>, Vec<syn::Ty>)>) -> syn::Path {\n\n syn::Path {\n\n global: false,\n\n segments: paths\n\n .into_iter()\n\n .map(|(path, lifetimes, types)| syn::PathSegment {\n\n ident: syn::Ident::new(valid_name(path)),\n\n parameters: syn::PathParameters::AngleBracketed(syn::AngleBracketedParameterData {\n\n lifetimes,\n\n types,\n\n bindings: vec![],\n\n }),\n\n })\n\n .collect(),\n\n }\n\n}\n\n\n", "file_path": "api_generator/src/generator/code_gen/mod.rs", "rank": 91, "score": 181656.66027560917 }, { "content": "/// Generates all client source code from the REST API spec\n\npub fn generate(\n\n branch: &str,\n\n download_dir: &PathBuf,\n\n generated_dir: &PathBuf,\n\n) -> Result<(), failure::Error> {\n\n // read the Api from file\n\n let api = read_api(branch, download_dir)?;\n\n\n\n let docs_dir = {\n\n let d = download_dir.clone();\n\n d.parent().unwrap().join(\"docs\")\n\n };\n\n\n\n // generated file tracking lists\n\n let mut tracker = GeneratedFiles::default();\n\n\n\n // generate param enums\n\n let mut sections = HashMap::new();\n\n sections.insert(\"spec-params\", code_gen::params::generate(&api)?);\n\n merge_file(\n", "file_path": "api_generator/src/generator/mod.rs", "rank": 92, "score": 181338.20179624535 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\nuse chrono::{DateTime, Utc};\n\nuse serde::{Deserialize, Serialize};\n", "file_path": "elasticsearch/examples/search_questions/stack_overflow.rs", "rank": 93, "score": 179669.16154315043 }, { "content": "/*\n\n * Licensed to Elasticsearch B.V. under one or more contributor\n\n * license agreements. See the NOTICE file distributed with\n\n * this work for additional information regarding copyright\n\n * ownership. Elasticsearch B.V. licenses this file to you under\n\n * the Apache License, Version 2.0 (the \"License\"); you may\n\n * not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n *\thttp://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing,\n\n * software distributed under the License is distributed on an\n\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n\n * KIND, either express or implied. See the License for the\n\n * specific language governing permissions and limitations\n\n * under the License.\n\n */\n\n#[macro_use]\n\nextern crate serde_json;\n", "file_path": "elasticsearch/examples/search_questions_answers/main.rs", "rank": 94, "score": 179582.5466187671 }, { "content": "use serde_json::Value;\n\n\n\n/// A Stack Overflow post\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\n#[serde(tag = \"type\")]\n\npub enum Post {\n\n Question(Question),\n\n Answer(Answer),\n\n}\n\n\n\n/// A Stack Overflow question\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\npub struct Question {\n\n pub id: i32,\n\n pub parent_id: Value,\n\n pub creation_date: DateTime<Utc>,\n\n pub score: i32,\n\n pub body: String,\n\n pub owner_user_id: Option<i32>,\n\n pub owner_display_name: Option<String>,\n", "file_path": "elasticsearch/examples/search_questions/stack_overflow.rs", "rank": 95, "score": 179571.42636102348 }, { "content": "\n\n/// A Stack Overflow answer\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\npub struct Answer {\n\n pub id: i32,\n\n pub parent_id: Value,\n\n pub creation_date: DateTime<Utc>,\n\n pub score: i32,\n\n pub body: String,\n\n pub owner_user_id: Option<i32>,\n\n pub owner_display_name: Option<String>,\n\n pub last_editor_user_id: Option<i32>,\n\n pub last_edit_date: Option<DateTime<Utc>>,\n\n pub last_activity_date: Option<DateTime<Utc>>,\n\n pub comment_count: i32,\n\n}\n\n\n\nimpl From<Answer> for Post {\n\n fn from(a: Answer) -> Self {\n\n Post::Answer(a)\n\n }\n\n}\n", "file_path": "elasticsearch/examples/search_questions/stack_overflow.rs", "rank": 96, "score": 179548.92022011132 }, { "content": " pub last_editor_user_id: Option<i32>,\n\n pub last_edit_date: Option<DateTime<Utc>>,\n\n pub last_activity_date: Option<DateTime<Utc>>,\n\n pub comment_count: i32,\n\n pub tags: Vec<String>,\n\n pub title: String,\n\n pub title_suggest: Option<Value>,\n\n pub accepted_answer_id: Option<i32>,\n\n pub view_count: i32,\n\n pub last_editor_display_name: Option<String>,\n\n pub answer_count: i32,\n\n pub favorite_count: i32,\n\n pub community_owned_date: Option<DateTime<Utc>>,\n\n}\n\n\n\nimpl From<Question> for Post {\n\n fn from(q: Question) -> Self {\n\n Post::Question(q)\n\n }\n\n}\n", "file_path": "elasticsearch/examples/search_questions/stack_overflow.rs", "rank": 97, "score": 179546.32645666896 }, { "content": "\n\n#[cfg(any(feature = \"native-tls\", feature = \"rustls-tls\"))]\n\nuse elasticsearch::cert::CertificateValidation;\n\nuse elasticsearch::{\n\n auth::Credentials,\n\n http::transport::{SingleNodeConnectionPool, TransportBuilder},\n\n Elasticsearch, Error, SearchParts, DEFAULT_ADDRESS,\n\n};\n\nuse serde_json::Value;\n\nuse std::env;\n\nuse sysinfo::SystemExt;\n\nuse url::Url;\n\nmod stack_overflow;\n\nuse stack_overflow::*;\n\nuse textwrap::fill;\n\n\n\nstatic POSTS_INDEX: &'static str = \"posts\";\n\n\n\n#[tokio::main]\n\npub async fn main() -> Result<(), Box<dyn std::error::Error>> {\n", "file_path": "elasticsearch/examples/search_questions_answers/main.rs", "rank": 98, "score": 179479.63584577225 }, { "content": " let client = create_client()?;\n\n let mut response = client\n\n .search(SearchParts::Index(&[POSTS_INDEX]))\n\n .body(query)\n\n .pretty(true)\n\n .send()\n\n .await?;\n\n\n\n // turn the response into an Error if status code is unsuccessful\n\n response = response.error_for_status_code()?;\n\n\n\n let json: Value = response.json().await?;\n\n let posts: Vec<Post> = json[\"hits\"][\"hits\"]\n\n .as_array()\n\n .unwrap()\n\n .iter()\n\n .map(|h| serde_json::from_value(h[\"_source\"].clone()).unwrap())\n\n .collect();\n\n\n\n for post in posts {\n", "file_path": "elasticsearch/examples/search_questions_answers/main.rs", "rank": 99, "score": 179479.22790583826 } ]
Rust
src/lib.rs
Lantern-chat/mime_db
a197b8e9e608872b8a042a254d4e86fa6cf21ce0
use unicase::UniCase; #[derive(Debug, Clone, Copy)] pub struct MimeEntry { compressible: bool, extensions: &'static [&'static str], } #[derive(Debug, Clone, Copy)] pub struct ExtEntry { types: &'static [&'static str], } include!(concat!(env!("OUT_DIR"), "/mime_db.rs")); pub fn lookup_ext(ext: &str) -> Option<&ExtEntry> { EXT_TO_MIME.get(&UniCase::new(ext)) } pub fn lookup_mime(mime: &str) -> Option<&MimeEntry> { MIME_TO_EXT.get(&UniCase::new(mime)) } #[inline] pub fn lookup_mime_from_ext(ext: &str) -> Option<&MimeEntry> { let entry = lookup_ext(ext)?; if entry.types.is_empty() { return None; } lookup_mime(entry.types[0]) } pub fn from_prefix(bytes: &[u8]) -> Option<(&str, Option<&MimeEntry>)> { static MAGIC_BYTES: &[(usize, &[u8], &str)] = &[ (0, b"\x89PNG\r\n\x1a\n", "image/png"), (0, &[0xff, 0xd8, 0xff], "image/jpeg"), (0, &[0xCF, 0x84, 0x01], "image/jpeg"), (0, b"GIF89a", "image/gif"), (0, b"GIF87a", "image/gif"), (0, b"MM\x00*", "image/tiff"), (0, b"II*\x00", "image/tiff"), (0, b"DDS ", "image/vnd.ms-dds"), (0, b"BM", "image/bmp"), (0, &[0, 0, 1, 0], "image/x-icon"), (0, b"#?RADIANCE", "image/vnd.radiance"), (0, b"P1", "image/x-portable-anymap"), (0, b"P2", "image/x-portable-anymap"), (0, b"P3", "image/x-portable-anymap"), (0, b"P4", "image/x-portable-anymap"), (0, b"P5", "image/x-portable-anymap"), (0, b"P6", "image/x-portable-anymap"), (0, b"P7", "image/x-portable-anymap"), (0, b"farbfeld", "image/x-farbfeld"), (0, b"\0\0\0 ftypavif", "image/avif"), (0, &[0x76, 0x2f, 0x31, 0x01], "image/aces"), (0, &[0x38, 0x42, 0x50, 0x53], "image/vnd.adobe.photoshop"), (0, &[0x25, 0x50, 0x44, 0x46, 0x2D], "application/pdf"), (0, &[0x4F, 0x67, 0x67, 0x53], "audio/ogg"), (0, &[0xFF, 0xFB], "audio/mp3"), (0, &[0xFF, 0xF3], "audio/mp3"), (0, &[0xFF, 0xF2], "audio/mp3"), (0, &[0x49, 0x44, 0x33], "audio/mp3"), (0, &[0x66, 0x4C, 0x61, 0x43], "audio/x-flac"), ( 0, &[ 0x00, 0x00, 0x00, 0x0C, 0x4A, 0x58, 0x4C, 0x20, 0x0D, 0x0A, 0x87, 0x0A, ], "image/jxl", ), (0, &[0x4D, 0x54, 0x68, 0x64], "audio/midi"), ( 0, &[0xD0, 0xCF, 0x11, 0xE0, 0xA1, 0xB1, 0x1A, 0xE1], "application/msword", ), (0, &[0x1F, 0x8B], "application/gzip"), ( 257, &[0x75, 0x73, 0x74, 0x61, 0x72, 0x00, 0x30, 0x30], "application/tar", ), ( 257, &[0x75, 0x73, 0x74, 0x61, 0x72, 0x20, 0x20, 0x00], "application/tar", ), ( 0, &[0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C], "application/x-7z-compressed", ), (0, &[0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00], "application/x-xz"), (0, &[0x46, 0x4C, 0x49, 0x46], "image/flif"), (0, &[0x1A, 0x45, 0xDF, 0xA3], "video/x-matroska"), (0, &[0x47], "video/mpeg"), (4, &[0x66, 0x74, 0x79, 0x70, 0x69, 0x73, 0x6F, 0x6D], "video/mp4"), (0, &[0x78, 0x01], "application/z-lib"), (0, &[0x78, 0x5E], "application/z-lib"), (0, &[0x78, 0x9C], "application/z-lib"), (0, &[0x78, 0xDA], "application/z-lib"), (0, &[0x78, 0x20], "application/z-lib"), (0, &[0x78, 0x7D], "application/z-lib"), (0, &[0x78, 0xBB], "application/z-lib"), (0, &[0x78, 0xF9], "application/z-lib"), ( 0, &[0x42, 0x4C, 0x45, 0x4E, 0x44, 0x45, 0x52], "application/x-blend", ), (0, &[0x46, 0x4C, 0x56], "video/x-flv"), (0, &[0x4D, 0x53, 0x43, 0x46], "application/vnd.ms-cab-compressed"), ( 0, &[ 0x30, 0x26, 0xB2, 0x75, 0x8E, 0x66, 0xCF, 0x11, 0xA6, 0xD9, 0x00, 0xAA, 0x00, 0x62, 0xCE, 0x6C, ], "video/x-ms-wmv", ), ( 0, &[ 0x53, 0x49, 0x4D, 0x50, 0x4C, 0x45, 0x20, 0x20, 0x3D, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x54, ], "image/fits", ), ]; const RIFFS: &[(&[u8], &str)] = &[ (&[0x57, 0x45, 0x42, 0x50], "image/webp"), (&[0x57, 0x41, 0x56, 0x45], "audio/wav"), (&[0x41, 0x56, 0x49, 0x20], "video/x-msvideo"), (&[0x43, 0x44, 0x44, 0x41], "audio/cda"), ]; for (offset, prefix, mime) in MAGIC_BYTES { if bytes.len() > *offset && bytes[*offset..].starts_with(prefix) { return Some((*mime, lookup_mime(mime))); } if bytes.starts_with(b"RIFF") && bytes.len() >= 12 { let bytes = &bytes[4..]; for (prefix, mime) in RIFFS { if bytes.starts_with(prefix) { return Some((*mime, lookup_mime(mime))); } } } } None }
use unicase::UniCase; #[derive(Debug, Clone, Copy)] pub struct MimeEntry { compressible: bool, extensions: &'static [&'static str], } #[derive(Debug, Clone, Copy)] pub struct ExtEntry { types: &'static [&'static str], } include!(concat!(env!("OUT_DIR"), "/mime_db.rs")); pub fn lookup_ext(ext: &str) -> Option<&ExtEntry> { EXT_TO_MIME.get(&UniCase::new(ext)) } pub fn lookup_mime(mime: &str) -> Option<&MimeEntry> { MIME_TO_EXT.get(&UniCase::new(mime)) } #[inline] pub fn lookup_mime_from_ext(ext: &str) -> Option<&MimeEntry> { let entry = lookup_ext(ext)?; if entry.types.is_empty() { return None; } lookup_mime(entry.types[0]) } pub fn from_prefix(bytes: &[u8]) -> Option<(&str, Option<&MimeEntry>)> { static MAGIC_BYTES: &[(usize, &[u8], &str)] = &[ (0, b"\x89PNG\r\n\x1a\n", "image/png"), (0, &[0xff, 0xd8, 0xff], "image/jpeg"), (0, &[0xCF, 0x84, 0x01], "image/jpeg"), (0, b"GIF89a", "image/gif"), (0, b"GIF87a", "image/gif"), (0, b"MM\x00*", "image/tiff"), (0, b"II*\x00", "image/tiff"), (0, b"DDS ", "image/vnd.ms-dds"), (0, b"BM", "image/bmp"), (0, &[0, 0, 1, 0], "image/x-icon"), (0, b"#?RADIANCE", "image/vnd.radiance"), (0, b"P1", "image/x-portable-anymap"), (0, b"P2", "image/x-portable-anymap"), (0, b"P3", "image/x-portable-anymap"), (0, b"P4", "image/x-portable-anymap"), (0, b"P5", "image/x-portable-anymap"), (0, b"P6", "image/x-portable-anymap"), (0, b"P7", "image/x-portable-anymap"), (0, b"farbfeld", "image/x-farbfeld"), (0, b"\0\0\0 ftypavif", "image/avif"), (0, &[0x76, 0x2f, 0x31, 0x01], "image/aces"), (0, &[0x38, 0x42, 0x50, 0x53], "image/vnd.adobe.photoshop"), (0, &[0x25, 0x50, 0x44, 0x46, 0x2D], "application/pdf"), (0, &[0x4F, 0x67, 0x67, 0x53], "audio/ogg"), (0, &[0xFF, 0xFB], "audio/mp3"), (0, &[0xFF, 0xF3], "audio/mp3"), (0, &[0xFF, 0xF2], "audio/mp3"), (0, &[0x49, 0x44, 0x33], "audio/mp3"), (0, &[0x66, 0x4C, 0x61, 0x43], "audio/x-flac"), ( 0, &[ 0x00, 0x00, 0x00, 0x0C, 0x4A, 0x58, 0x4C, 0x20, 0x0D, 0x0A, 0x87, 0x0A, ], "image/jxl", ), (0, &[0x4D, 0x54, 0x68, 0x64], "audio/midi"), ( 0, &[0xD0, 0xCF, 0x11, 0xE0, 0xA1, 0xB1, 0x1A, 0xE1], "application/msword", ), (0, &[0x1F, 0x8B], "application/gzip"), ( 257, &[0x75, 0x73, 0x74, 0x61, 0x72, 0x00, 0x30, 0x30], "application/tar", ), ( 257, &[0x75, 0x73, 0x74, 0x61, 0x72, 0x20, 0x20, 0x00], "application/tar", ), ( 0, &[0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C], "application/x-7z-compressed", ), (0, &[0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00], "application/x-xz"), (0, &[0x46, 0x4C, 0x49, 0x46], "image/flif"), (0, &[0x1A, 0x45, 0xDF, 0xA3], "video/x-matroska"), (0, &[0x47], "video/mpeg"), (4, &[0x66, 0x74, 0x79, 0x70, 0x69, 0x73, 0x6F, 0x6D], "video/mp4"), (0, &[0x78, 0x01], "application/z-lib"), (0, &[0x78, 0x5E], "application/z-lib"), (
0, &[ 0x30, 0x26, 0xB2, 0x75, 0x8E, 0x66, 0xCF, 0x11, 0xA6, 0xD9, 0x00, 0xAA, 0x00, 0x62, 0xCE, 0x6C, ], "video/x-ms-wmv", ), ( 0, &[ 0x53, 0x49, 0x4D, 0x50, 0x4C, 0x45, 0x20, 0x20, 0x3D, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x54, ], "image/fits", ), ]; const RIFFS: &[(&[u8], &str)] = &[ (&[0x57, 0x45, 0x42, 0x50], "image/webp"), (&[0x57, 0x41, 0x56, 0x45], "audio/wav"), (&[0x41, 0x56, 0x49, 0x20], "video/x-msvideo"), (&[0x43, 0x44, 0x44, 0x41], "audio/cda"), ]; for (offset, prefix, mime) in MAGIC_BYTES { if bytes.len() > *offset && bytes[*offset..].starts_with(prefix) { return Some((*mime, lookup_mime(mime))); } if bytes.starts_with(b"RIFF") && bytes.len() >= 12 { let bytes = &bytes[4..]; for (prefix, mime) in RIFFS { if bytes.starts_with(prefix) { return Some((*mime, lookup_mime(mime))); } } } } None }
0, &[0x78, 0x9C], "application/z-lib"), (0, &[0x78, 0xDA], "application/z-lib"), (0, &[0x78, 0x20], "application/z-lib"), (0, &[0x78, 0x7D], "application/z-lib"), (0, &[0x78, 0xBB], "application/z-lib"), (0, &[0x78, 0xF9], "application/z-lib"), ( 0, &[0x42, 0x4C, 0x45, 0x4E, 0x44, 0x45, 0x52], "application/x-blend", ), (0, &[0x46, 0x4C, 0x56], "video/x-flv"), (0, &[0x4D, 0x53, 0x43, 0x46], "application/vnd.ms-cab-compressed"), (
random
[ { "content": "#[derive(Debug, serde::Deserialize)]\n\nstruct MimeEntry {\n\n #[serde(default)]\n\n pub compressible: bool,\n\n\n\n #[serde(default)]\n\n pub extensions: Vec<String>,\n\n\n\n #[serde(default)]\n\n pub source: Source,\n\n}\n\n\n", "file_path": "build.rs", "rank": 4, "score": 39328.213446605354 }, { "content": "fn main() -> io::Result<()> {\n\n let mut db: HashMap<String, MimeEntry> = serde_json::from_reader(File::open(\"./extra.json\")?).unwrap();\n\n let db2: HashMap<String, MimeEntry> = serde_json::from_reader(File::open(\"./mime-db/db.json\")?).unwrap();\n\n\n\n db.extend(db2); // apply mime-db second to overwrite any old extras\n\n\n\n let path = Path::new(&env::var(\"OUT_DIR\").unwrap()).join(\"mime_db.rs\");\n\n let mut file = BufWriter::new(File::create(&path)?);\n\n\n\n let mut mime_to_ext_map = phf_codegen::Map::new();\n\n let mut ext_to_mime_map = phf_codegen::Map::new();\n\n\n\n let mut ext_map: HashMap<&str, HashMap<&str, Source>> = HashMap::new();\n\n\n\n for (mime, entry) in db.iter() {\n\n let mut buf = format!(\n\n \"MimeEntry {{ compressible: {}, extensions: &[\",\n\n entry.compressible\n\n );\n\n\n", "file_path": "build.rs", "rank": 5, "score": 18711.24873379459 }, { "content": " }\n\n\n\n buf += \"]}\";\n\n\n\n ext_to_mime_map.entry(UniCase::new(*ext), &buf);\n\n }\n\n\n\n write!(\n\n &mut file,\n\n \"static MIME_TO_EXT: phf::Map<UniCase<&'static str>, MimeEntry> = \\n{};\\n\",\n\n mime_to_ext_map.build()\n\n )?;\n\n\n\n write!(\n\n &mut file,\n\n \"static EXT_TO_MIME: phf::Map<UniCase<&'static str>, ExtEntry> = \\n{};\\n\",\n\n ext_to_mime_map.build()\n\n )?;\n\n\n\n Ok(())\n\n}\n", "file_path": "build.rs", "rank": 7, "score": 4.411126242839403 }, { "content": "use std::collections::HashMap;\n\nuse std::env;\n\nuse std::fs::File;\n\nuse std::io::{self, BufWriter, Write};\n\nuse std::path::Path;\n\n\n\nuse unicase::UniCase;\n\n\n\n#[derive(serde::Deserialize, Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]\n", "file_path": "build.rs", "rank": 8, "score": 3.8365224440464507 }, { "content": "#[derive(serde::Deserialize, Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]\n\nenum Source {\n\n #[serde(rename = \"iana\")]\n\n IANA = 0,\n\n #[serde(rename = \"apache\")]\n\n Apache = 1,\n\n #[serde(rename = \"nginx\")]\n\n Nginx = 2,\n\n\n\n None = 3,\n\n}\n\n\n\nimpl Default for Source {\n\n fn default() -> Self {\n\n Source::None\n\n }\n\n}\n\n\n", "file_path": "build.rs", "rank": 9, "score": 2.8534184084971463 }, { "content": " for ext in &entry.extensions {\n\n buf += &format!(\"\\\"{ext}\\\", \");\n\n\n\n ext_map.entry(ext).or_default().insert(mime, entry.source);\n\n }\n\n\n\n buf += \"]}\";\n\n\n\n mime_to_ext_map.entry(UniCase::new(mime), &buf);\n\n }\n\n\n\n for (ext, mapping_set) in ext_map.iter() {\n\n let mut mappings = mapping_set.iter().collect::<Vec<_>>();\n\n\n\n mappings.sort_by_key(|(_, source)| *source);\n\n\n\n let mut buf = \"ExtEntry { types: &[\".to_owned();\n\n\n\n for (mime, _) in mappings.iter() {\n\n buf += &format!(\"\\\"{mime}\\\", \");\n", "file_path": "build.rs", "rank": 10, "score": 1.704260141593859 } ]
Rust
src/connectivity/overnet/overnetstack/src/main.rs
mehulagg/fuchsia
3f56175ee594da6b287d5fb19f2f0eccea2897f0
#![deny(missing_docs)] mod mdns; use failure::{Error, ResultExt}; use fidl_fuchsia_overnet::{ MeshControllerRequest, MeshControllerRequestStream, OvernetListPeersResponder, OvernetRequest, OvernetRequestStream, ServiceConsumerListPeersResponder, ServiceConsumerRequest, ServiceConsumerRequestStream, ServicePublisherRequest, ServicePublisherRequestStream, }; use fuchsia_async as fasync; use fuchsia_component::server::ServiceFs; use fuchsia_zircon as zx; use futures::future::{abortable, AbortHandle}; use futures::prelude::*; use overnet_core::{LinkId, Node, NodeId, NodeOptions, NodeRuntime, RouterTime, SendHandle}; use std::cell::RefCell; use std::collections::HashMap; use std::net::{SocketAddr, SocketAddrV6}; use std::ops::Deref; use std::rc::Rc; use zx::AsHandleRef; #[derive(Clone, Copy, Debug)] enum AppLinkId { Udp(SocketAddrV6), } #[derive(PartialEq, PartialOrd, Eq, Ord, Clone, Copy, Debug)] struct Time(fasync::Time); impl RouterTime for Time { type Duration = zx::Duration; fn now() -> Self { Time(fasync::Time::now()) } fn after(time: Self, duration: zx::Duration) -> Self { Self(time.0 + duration) } } struct AppRuntime; impl NodeRuntime for AppRuntime { type Time = Time; type LinkId = AppLinkId; const IMPLEMENTATION: fidl_fuchsia_overnet_protocol::Implementation = fidl_fuchsia_overnet_protocol::Implementation::OvernetStack; fn handle_type(handle: &zx::Handle) -> Result<SendHandle, Error> { match handle.basic_info()?.object_type { zx::ObjectType::CHANNEL => Ok(SendHandle::Channel), _ => failure::bail!("Handle type not proxyable {:?}", handle.basic_info()?.object_type), } } fn spawn_local<F>(&mut self, future: F) where F: Future<Output = ()> + 'static, { fasync::spawn_local(future) } fn at(&mut self, t: Self::Time, f: impl FnOnce() + 'static) { fasync::spawn_local(at(t.0, f)) } fn router_link_id(&self, id: AppLinkId) -> LinkId<overnet_core::PhysLinkId<AppLinkId>> { with_app_mut(|app| match id { AppLinkId::Udp(addr) => { app.udp_link_ids.get(&addr).copied().unwrap_or(LinkId::invalid()) } }) } fn send_on_link(&mut self, id: Self::LinkId, packet: &mut [u8]) -> Result<(), Error> { match id { AppLinkId::Udp(addr) => { println!("UDP_SEND to:{} len:{}", addr, packet.len()); let sock = with_app_mut(|app| -> Result<_, Error> { Ok(app .udp_socket .as_ref() .ok_or_else(|| failure::format_err!("no udp socket"))? .sock .clone()) })?; let sock = sock.deref().as_ref(); if let Err(e) = sock.send_to(packet, addr) { if e.kind() == std::io::ErrorKind::BrokenPipe { log::warn!("BrokenPipe on UDP socket: let's make a new one"); with_app_mut(|app| { app.udp_socket.take(); app.udp_socket = Some(UdpSocketHolder::new(app.node_id)?); Ok(()) }) } else { Err(e.into()) } } else { Ok(()) } } } } } struct UdpSocketHolder { sock: Rc<fasync::net::UdpSocket>, abort_publisher: AbortHandle, } impl UdpSocketHolder { fn new(node_id: NodeId) -> Result<Self, Error> { let sock = std::net::UdpSocket::bind("[::]:0").context("Creating UDP socket")?; let publisher = mdns::publish(node_id, sock.local_addr().context("Getting UDP local address")?.port()); let sock = Rc::new(fasync::net::UdpSocket::from_socket(sock)?); let (publisher, abort_publisher) = abortable(publisher); fasync::spawn_local(async move { let _ = publisher.await; }); Ok(Self { sock, abort_publisher }) } } impl Drop for UdpSocketHolder { fn drop(&mut self) { self.abort_publisher.abort(); } } struct App { node_id: NodeId, node: Node<AppRuntime>, udp_link_ids: HashMap<SocketAddrV6, LinkId<overnet_core::PhysLinkId<AppLinkId>>>, udp_socket: Option<UdpSocketHolder>, } thread_local! { static APP: RefCell<App> = RefCell::new(App::new()); } fn with_app_mut<R>(f: impl FnOnce(&mut App) -> R) -> R { APP.with(|rcapp| f(&mut rcapp.borrow_mut())) } async fn at(when: fasync::Time, f: impl FnOnce()) { fasync::Timer::new(when).await; f(); } impl App { fn new() -> App { let node = Node::new( AppRuntime, NodeOptions::new() .set_quic_server_key_file(Box::new("/pkg/data/cert.key".to_string())) .set_quic_server_cert_file(Box::new("/pkg/data/cert.crt".to_string())), ) .unwrap(); App { node_id: node.id(), node, udp_link_ids: HashMap::new(), udp_socket: None } } } fn normalize_addr(addr: SocketAddr) -> SocketAddrV6 { match addr { SocketAddr::V6(a) => a, SocketAddr::V4(a) => SocketAddrV6::new(a.ip().to_ipv6_mapped(), a.port(), 0, 0), } } async fn read_udp_inner() -> Result<(), Error> { let mut buf: [u8; 1500] = [0; 1500]; loop { let sock = with_app_mut(|app| -> Result<_, Error> { Ok(app .udp_socket .as_ref() .ok_or_else(|| failure::format_err!("No udp socket to read from"))? .sock .clone()) })?; let (length, sender) = sock.recv_from(&mut buf).await?; println!("UDP_RECV from:{} len:{}", sender, length); let sender = normalize_addr(sender); with_app_mut(|app| -> Result<(), Error> { if let Some(link_id) = app.udp_link_ids.get(&sender) { app.node.queue_recv(*link_id, &mut buf[..length]); } else { log::warn!("No link for received packet {:?}", sender); } Ok(()) })?; } } async fn read_udp() { if let Err(e) = read_udp_inner().await { log::warn!("UDP read loop failed: {:?}", e); } } fn register_udp(addr: SocketAddr, node_id: NodeId) -> Result<(), Error> { with_app_mut(|app| { app.node.mention_node(node_id); let addr = normalize_addr(addr); if app.udp_link_ids.get(&addr).is_none() { let rtr_id = app.node.new_link(node_id, AppLinkId::Udp(addr))?; println!("register peer: {} node_id={:?} rtr_id={:?}", addr, node_id, rtr_id); app.udp_link_ids.insert(addr, rtr_id); } Ok(()) }) } trait ListPeersResponder { fn respond( self, peers: &mut dyn ExactSizeIterator<Item = &mut fidl_fuchsia_overnet::Peer>, ) -> Result<(), fidl::Error>; } impl ListPeersResponder for ServiceConsumerListPeersResponder { fn respond( self, peers: &mut dyn ExactSizeIterator<Item = &mut fidl_fuchsia_overnet::Peer>, ) -> Result<(), fidl::Error> { self.send(peers) } } impl ListPeersResponder for OvernetListPeersResponder { fn respond( self, peers: &mut dyn ExactSizeIterator<Item = &mut fidl_fuchsia_overnet::Peer>, ) -> Result<(), fidl::Error> { self.send(peers) } } async fn run_list_peers_inner(responder: impl ListPeersResponder) -> Result<(), Error> { let mut peers = with_app_mut(|app| app.node.clone().list_peers()).await?; responder.respond(&mut peers.iter_mut())?; Ok(()) } async fn run_list_peers(responder: impl ListPeersResponder) { if let Err(e) = run_list_peers_inner(responder).await { log::warn!("List peers gets error: {:?}", e); } } async fn run_service_publisher_server( mut stream: ServicePublisherRequestStream, ) -> Result<(), Error> { while let Some(request) = stream.try_next().await.context("error running overnet server")? { let result = with_app_mut(|app| match request { ServicePublisherRequest::PublishService { service_name, provider, .. } => { app.node.register_service(service_name, provider) } }); if let Err(e) = result { log::warn!("Error servicing request: {:?}", e) } } Ok(()) } async fn run_service_consumer_server( mut stream: ServiceConsumerRequestStream, ) -> Result<(), Error> { while let Some(request) = stream.try_next().await.context("error running overnet server")? { let result = with_app_mut(|app| match request { ServiceConsumerRequest::ListPeers { responder, .. } => { fasync::spawn_local(run_list_peers(responder)); Ok(()) } ServiceConsumerRequest::ConnectToService { node, service_name, chan, .. } => { app.node.connect_to_service(node.id.into(), &service_name, chan) } }); if let Err(e) = result { log::warn!("Error servicing request: {:?}", e); } } Ok(()) } async fn run_mesh_controller_server(mut stream: MeshControllerRequestStream) -> Result<(), Error> { while let Some(request) = stream.try_next().await.context("error running overnet server")? { let result = with_app_mut(|app| match request { MeshControllerRequest::AttachSocketLink { socket, options, .. } => { app.node.attach_socket_link(socket, options) } }); if let Err(e) = result { log::warn!("Error servicing request: {:?}", e); } } Ok(()) } async fn run_legacy_overnet_server(mut stream: OvernetRequestStream) -> Result<(), Error> { while let Some(request) = stream.try_next().await.context("error running overnet server")? { let result = with_app_mut(|app| match request { OvernetRequest::PublishService { service_name, provider, .. } => { app.node.register_service(service_name, provider) } OvernetRequest::ListPeers { responder, .. } => { fasync::spawn_local(run_list_peers(responder)); Ok(()) } OvernetRequest::ConnectToService { node, service_name, chan, .. } => { app.node.connect_to_service(node.id.into(), &service_name, chan) } }); if let Err(e) = result { log::warn!("Error servicing request: {:?}", e); } } Ok(()) } enum IncomingService { ServiceConsumer(ServiceConsumerRequestStream), ServicePublisher(ServicePublisherRequestStream), MeshController(MeshControllerRequestStream), LegacyOvernet(OvernetRequestStream), } #[fasync::run_singlethreaded] async fn main() -> Result<(), Error> { fuchsia_syslog::init_with_tags(&["overnet"]).context("initialize logging")?; let mut fs = ServiceFs::new_local(); let mut svc_dir = fs.dir("svc"); svc_dir.add_fidl_service(IncomingService::ServiceConsumer); svc_dir.add_fidl_service(IncomingService::ServicePublisher); svc_dir.add_fidl_service(IncomingService::MeshController); svc_dir.add_fidl_service(IncomingService::LegacyOvernet); fs.take_and_serve_directory_handle()?; with_app_mut(|app| -> Result<(), Error> { app.udp_socket = Some(UdpSocketHolder::new(app.node.id())?); fasync::spawn_local(mdns::subscribe()); fasync::spawn_local(read_udp()); Ok(()) }) .context("Initializing UDP & MDNS")?; const MAX_CONCURRENT: usize = 10_000; fs.for_each_concurrent(MAX_CONCURRENT, |svcreq| match svcreq { IncomingService::MeshController(stream) => { run_mesh_controller_server(stream).unwrap_or_else(|e| log::trace!("{:?}", e)).boxed() } IncomingService::ServicePublisher(stream) => { run_service_publisher_server(stream).unwrap_or_else(|e| log::trace!("{:?}", e)).boxed() } IncomingService::ServiceConsumer(stream) => { run_service_consumer_server(stream).unwrap_or_else(|e| log::trace!("{:?}", e)).boxed() } IncomingService::LegacyOvernet(stream) => { run_legacy_overnet_server(stream).unwrap_or_else(|e| log::trace!("{:?}", e)).boxed() } }) .await; Ok(()) }
#![deny(missing_docs)] mod mdns; use failure::{Error, ResultExt}; use fidl_fuchsia_overnet::{ MeshControllerRequest, MeshControllerRequestStream, OvernetListPeersResponder, OvernetRequest, OvernetRequestStream, ServiceConsumerListPeersResponder, ServiceConsumerRequest, ServiceConsumerRequestStream, ServicePublisherRequest, ServicePublisherRequestStream, }; use fuchsia_async as fasync; use fuchsia_component::server::ServiceFs; use fuchsia_zircon as zx; use futures::future::{abortable, AbortHandle}; use futures::prelude::*; use overnet_core::{LinkId, Node, NodeId, NodeOptions, NodeRuntime, RouterTime, SendHandle}; use std::cell::RefCell; use std::collections::HashMap; use std::net::{SocketAddr, SocketAddrV6}; use std::ops::Deref; use std::rc::Rc; use zx::AsHandleRef; #[derive(Clone, Copy, Debug)] enum AppLinkId { Udp(SocketAddrV6), } #[derive(PartialEq, PartialOrd, Eq, Ord, Clone, Copy, Debug)] struct Time(fasync::Time); impl RouterTime for Time { type Duration = zx::Duration; fn now() -> Self { Time(fasync::Time::now()) } fn after(time: Self, duration: zx::Duration) -> Self { Self(time.0 + duration) } } struct AppRuntime; impl NodeRuntime for AppRuntime { type Time = Time; type LinkId = AppLinkId; const IMPLEMENTATION: fidl_fuchsia_overnet_protocol::Implementation = fidl_fuchsia_overnet_protocol::Implementation::OvernetStack; fn handle_type(handle: &zx::Handle) -> Result<SendHandle, Error> { match handle.basic_info()?.object_type { zx::ObjectType::CHANNEL => Ok(SendHandle::Channel), _ => failure::bail!("Handle type not proxyable {:?}", handle.basic_info()?.object_type), } } fn spawn_local<F>(&mut self, future: F) where F: Future<Output = ()> + 'static, { fasync::spawn_local(future) } fn at(&mut self, t: Self::Time, f: impl FnOnce() + 'static) { fasync::spawn_local(at(t.0, f)) } fn router_link_id(&self, id: A
fn send_on_link(&mut self, id: Self::LinkId, packet: &mut [u8]) -> Result<(), Error> { match id { AppLinkId::Udp(addr) => { println!("UDP_SEND to:{} len:{}", addr, packet.len()); let sock = with_app_mut(|app| -> Result<_, Error> { Ok(app .udp_socket .as_ref() .ok_or_else(|| failure::format_err!("no udp socket"))? .sock .clone()) })?; let sock = sock.deref().as_ref(); if let Err(e) = sock.send_to(packet, addr) { if e.kind() == std::io::ErrorKind::BrokenPipe { log::warn!("BrokenPipe on UDP socket: let's make a new one"); with_app_mut(|app| { app.udp_socket.take(); app.udp_socket = Some(UdpSocketHolder::new(app.node_id)?); Ok(()) }) } else { Err(e.into()) } } else { Ok(()) } } } } } struct UdpSocketHolder { sock: Rc<fasync::net::UdpSocket>, abort_publisher: AbortHandle, } impl UdpSocketHolder { fn new(node_id: NodeId) -> Result<Self, Error> { let sock = std::net::UdpSocket::bind("[::]:0").context("Creating UDP socket")?; let publisher = mdns::publish(node_id, sock.local_addr().context("Getting UDP local address")?.port()); let sock = Rc::new(fasync::net::UdpSocket::from_socket(sock)?); let (publisher, abort_publisher) = abortable(publisher); fasync::spawn_local(async move { let _ = publisher.await; }); Ok(Self { sock, abort_publisher }) } } impl Drop for UdpSocketHolder { fn drop(&mut self) { self.abort_publisher.abort(); } } struct App { node_id: NodeId, node: Node<AppRuntime>, udp_link_ids: HashMap<SocketAddrV6, LinkId<overnet_core::PhysLinkId<AppLinkId>>>, udp_socket: Option<UdpSocketHolder>, } thread_local! { static APP: RefCell<App> = RefCell::new(App::new()); } fn with_app_mut<R>(f: impl FnOnce(&mut App) -> R) -> R { APP.with(|rcapp| f(&mut rcapp.borrow_mut())) } async fn at(when: fasync::Time, f: impl FnOnce()) { fasync::Timer::new(when).await; f(); } impl App { fn new() -> App { let node = Node::new( AppRuntime, NodeOptions::new() .set_quic_server_key_file(Box::new("/pkg/data/cert.key".to_string())) .set_quic_server_cert_file(Box::new("/pkg/data/cert.crt".to_string())), ) .unwrap(); App { node_id: node.id(), node, udp_link_ids: HashMap::new(), udp_socket: None } } } fn normalize_addr(addr: SocketAddr) -> SocketAddrV6 { match addr { SocketAddr::V6(a) => a, SocketAddr::V4(a) => SocketAddrV6::new(a.ip().to_ipv6_mapped(), a.port(), 0, 0), } } async fn read_udp_inner() -> Result<(), Error> { let mut buf: [u8; 1500] = [0; 1500]; loop { let sock = with_app_mut(|app| -> Result<_, Error> { Ok(app .udp_socket .as_ref() .ok_or_else(|| failure::format_err!("No udp socket to read from"))? .sock .clone()) })?; let (length, sender) = sock.recv_from(&mut buf).await?; println!("UDP_RECV from:{} len:{}", sender, length); let sender = normalize_addr(sender); with_app_mut(|app| -> Result<(), Error> { if let Some(link_id) = app.udp_link_ids.get(&sender) { app.node.queue_recv(*link_id, &mut buf[..length]); } else { log::warn!("No link for received packet {:?}", sender); } Ok(()) })?; } } async fn read_udp() { if let Err(e) = read_udp_inner().await { log::warn!("UDP read loop failed: {:?}", e); } } fn register_udp(addr: SocketAddr, node_id: NodeId) -> Result<(), Error> { with_app_mut(|app| { app.node.mention_node(node_id); let addr = normalize_addr(addr); if app.udp_link_ids.get(&addr).is_none() { let rtr_id = app.node.new_link(node_id, AppLinkId::Udp(addr))?; println!("register peer: {} node_id={:?} rtr_id={:?}", addr, node_id, rtr_id); app.udp_link_ids.insert(addr, rtr_id); } Ok(()) }) } trait ListPeersResponder { fn respond( self, peers: &mut dyn ExactSizeIterator<Item = &mut fidl_fuchsia_overnet::Peer>, ) -> Result<(), fidl::Error>; } impl ListPeersResponder for ServiceConsumerListPeersResponder { fn respond( self, peers: &mut dyn ExactSizeIterator<Item = &mut fidl_fuchsia_overnet::Peer>, ) -> Result<(), fidl::Error> { self.send(peers) } } impl ListPeersResponder for OvernetListPeersResponder { fn respond( self, peers: &mut dyn ExactSizeIterator<Item = &mut fidl_fuchsia_overnet::Peer>, ) -> Result<(), fidl::Error> { self.send(peers) } } async fn run_list_peers_inner(responder: impl ListPeersResponder) -> Result<(), Error> { let mut peers = with_app_mut(|app| app.node.clone().list_peers()).await?; responder.respond(&mut peers.iter_mut())?; Ok(()) } async fn run_list_peers(responder: impl ListPeersResponder) { if let Err(e) = run_list_peers_inner(responder).await { log::warn!("List peers gets error: {:?}", e); } } async fn run_service_publisher_server( mut stream: ServicePublisherRequestStream, ) -> Result<(), Error> { while let Some(request) = stream.try_next().await.context("error running overnet server")? { let result = with_app_mut(|app| match request { ServicePublisherRequest::PublishService { service_name, provider, .. } => { app.node.register_service(service_name, provider) } }); if let Err(e) = result { log::warn!("Error servicing request: {:?}", e) } } Ok(()) } async fn run_service_consumer_server( mut stream: ServiceConsumerRequestStream, ) -> Result<(), Error> { while let Some(request) = stream.try_next().await.context("error running overnet server")? { let result = with_app_mut(|app| match request { ServiceConsumerRequest::ListPeers { responder, .. } => { fasync::spawn_local(run_list_peers(responder)); Ok(()) } ServiceConsumerRequest::ConnectToService { node, service_name, chan, .. } => { app.node.connect_to_service(node.id.into(), &service_name, chan) } }); if let Err(e) = result { log::warn!("Error servicing request: {:?}", e); } } Ok(()) } async fn run_mesh_controller_server(mut stream: MeshControllerRequestStream) -> Result<(), Error> { while let Some(request) = stream.try_next().await.context("error running overnet server")? { let result = with_app_mut(|app| match request { MeshControllerRequest::AttachSocketLink { socket, options, .. } => { app.node.attach_socket_link(socket, options) } }); if let Err(e) = result { log::warn!("Error servicing request: {:?}", e); } } Ok(()) } async fn run_legacy_overnet_server(mut stream: OvernetRequestStream) -> Result<(), Error> { while let Some(request) = stream.try_next().await.context("error running overnet server")? { let result = with_app_mut(|app| match request { OvernetRequest::PublishService { service_name, provider, .. } => { app.node.register_service(service_name, provider) } OvernetRequest::ListPeers { responder, .. } => { fasync::spawn_local(run_list_peers(responder)); Ok(()) } OvernetRequest::ConnectToService { node, service_name, chan, .. } => { app.node.connect_to_service(node.id.into(), &service_name, chan) } }); if let Err(e) = result { log::warn!("Error servicing request: {:?}", e); } } Ok(()) } enum IncomingService { ServiceConsumer(ServiceConsumerRequestStream), ServicePublisher(ServicePublisherRequestStream), MeshController(MeshControllerRequestStream), LegacyOvernet(OvernetRequestStream), } #[fasync::run_singlethreaded] async fn main() -> Result<(), Error> { fuchsia_syslog::init_with_tags(&["overnet"]).context("initialize logging")?; let mut fs = ServiceFs::new_local(); let mut svc_dir = fs.dir("svc"); svc_dir.add_fidl_service(IncomingService::ServiceConsumer); svc_dir.add_fidl_service(IncomingService::ServicePublisher); svc_dir.add_fidl_service(IncomingService::MeshController); svc_dir.add_fidl_service(IncomingService::LegacyOvernet); fs.take_and_serve_directory_handle()?; with_app_mut(|app| -> Result<(), Error> { app.udp_socket = Some(UdpSocketHolder::new(app.node.id())?); fasync::spawn_local(mdns::subscribe()); fasync::spawn_local(read_udp()); Ok(()) }) .context("Initializing UDP & MDNS")?; const MAX_CONCURRENT: usize = 10_000; fs.for_each_concurrent(MAX_CONCURRENT, |svcreq| match svcreq { IncomingService::MeshController(stream) => { run_mesh_controller_server(stream).unwrap_or_else(|e| log::trace!("{:?}", e)).boxed() } IncomingService::ServicePublisher(stream) => { run_service_publisher_server(stream).unwrap_or_else(|e| log::trace!("{:?}", e)).boxed() } IncomingService::ServiceConsumer(stream) => { run_service_consumer_server(stream).unwrap_or_else(|e| log::trace!("{:?}", e)).boxed() } IncomingService::LegacyOvernet(stream) => { run_legacy_overnet_server(stream).unwrap_or_else(|e| log::trace!("{:?}", e)).boxed() } }) .await; Ok(()) }
ppLinkId) -> LinkId<overnet_core::PhysLinkId<AppLinkId>> { with_app_mut(|app| match id { AppLinkId::Udp(addr) => { app.udp_link_ids.get(&addr).copied().unwrap_or(LinkId::invalid()) } }) }
function_block-function_prefixed
[]
Rust
src/fmtstr.rs
MikaelSmith/strfmt
4ad7c2240203df3eec954fb00a31e87d74812339
use std::fmt::Write; use std::string::String; use types::*; use formatter::Formatter; fn write_char(f: &mut Formatter, c: char, n: usize) { for _ in 0..n { f.write_char(c).unwrap(); } } #[test] fn test_write_char() { let mut s = String::new(); s.write_str("h ").unwrap(); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write_char(&mut f, 'f', 3); } assert!(s == "h fff"); } fn write_from<I>(fmt: &mut Formatter, f: I, n: usize) -> usize where I: Iterator<Item = char> { if n == 0 { return 0; } let mut n_written: usize = 0; for c in f { fmt.write_char(c).unwrap(); n_written += 1; if n_written == n { return n_written; } } n_written } #[test] fn test_write_from() { let mut s = String::new(); s.write_str("h ").unwrap(); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write_from(&mut f, "fff".chars(), 5); } assert!(s == "h fff"); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write_from(&mut f, "xxxx".chars(), 2); } assert!(s == "h fffxx"); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write_from(&mut f, "333".chars(), 3); } assert!(s == "h fffxx333"); s.clear(); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write!(f, "hello").unwrap(); } assert!(s == "hello"); } impl<'a, 'b> Formatter<'a, 'b> { pub fn str(&mut self, s: &str) -> Result<()> { if !(self.ty() == None || self.ty() == Some('s')) { let mut msg = String::new(); write!(msg, "Unknown format code {:?} for object of type 'str'", self.ty()).unwrap(); return Err(FmtError::TypeError(msg)); } else if self.alternate() { return Err(FmtError::TypeError("Alternate form (#) not allowed in string \ format specifier".to_string())); } else if self.thousands() { return Err(FmtError::TypeError("Cannot specify ',' with 's'".to_string())); } else if self.sign().is_unspecified() { return Err(FmtError::TypeError("Sign not allowed in string format specifier" .to_string())); } self.str_unchecked(s) } pub fn str_unchecked(&mut self, s: &str) -> Result<()> { let fill = self.fill(); let width = self.width(); let precision = self.precision(); let len = match precision { Some(p) => if p < s.len() { p } else { s.len() }, None => s.len(), }; let mut chars = s.chars(); let mut pad: usize = 0; if let Some(mut width) = width { if width > len { let align = self.align(); match align { Alignment::Left => pad = width - len, Alignment::Center => { width -= len; pad = width / 2; write_char(self, fill, pad); pad += width % 2; } Alignment::Right => { write_char(self, fill, width - len); } Alignment::Equal => return Err(FmtError::Invalid( "sign aware zero padding and Align '=' not yet supported".to_string())), } } } write_from(self, &mut chars, len); write_char(self, fill, pad); Ok(()) } } pub fn strfmt_map<F>(fmtstr: &str, f: &F) -> Result<String> where F: Fn(Formatter) -> Result<()> { let mut out = String::with_capacity(fmtstr.len() * 2); let mut bytes_read: usize = 0; let mut opening_brace: usize = 0; let mut closing_brace: bool = false; let mut reading_fmt = false; let mut remaining = fmtstr; for c in fmtstr.chars() { bytes_read += c.len_utf8(); if c == '{' { if reading_fmt && opening_brace == bytes_read - 2 { out.push(c); reading_fmt = false; } else if !reading_fmt { reading_fmt = true; opening_brace = bytes_read - 1; } else { out.clear(); out.write_str("extra { found").unwrap(); return Err(FmtError::Invalid(out)); } } else if c == '}' { if !reading_fmt && !closing_brace { closing_brace = true; } else if closing_brace { out.push(c); closing_brace = false; } else { let (_, r) = remaining.split_at(opening_brace); let (fmt_pattern, r) = r.split_at(bytes_read - opening_brace); remaining = r; let (_, fmt_pattern) = fmt_pattern.split_at(1); let (fmt_pattern, _) = fmt_pattern.split_at(fmt_pattern.len() - 1); let fmt = try!(Formatter::from_str(fmt_pattern, &mut out)); try!(f(fmt)); reading_fmt = false; bytes_read = 0; } } else if closing_brace { return Err(FmtError::Invalid("Single '}' encountered in format string".to_string())); } else if !reading_fmt { out.push(c) } } if closing_brace { return Err(FmtError::Invalid("Single '}' encountered in format string".to_string())); } else if reading_fmt { return Err(FmtError::Invalid("Expected '}' before end of string".to_string())); } out.shrink_to_fit(); Ok(out) }
use std::fmt::Write; use std::string::String; use types::*; use formatter::Formatter; fn write_char(f: &mut Formatter, c: char, n: usize) { for _ in 0..n { f.write_char(c).unwrap(); } } #[test] fn test_write_char() { let mut s = String::new(); s.write_str("h ").unwrap(); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write_char(&mut f, 'f', 3); } assert!(s == "h fff"); } fn write_from<I>(fmt: &mut Formatter, f: I, n: usize) -> usize where I: Iterator<Item = char> { if n == 0 { return 0; } let mut n_written: usize = 0; for c in f { fmt.write_char(c).unwrap(); n_written += 1; if n_written == n { return n_written; } } n_written } #[test] fn test_write_from() { let mut s = String::new(); s.write_str("h ").unwrap(); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write_from(&mut f, "fff".chars(), 5); } assert!(s == "h fff"); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write_from(&mut f, "xxxx".chars(), 2); } assert!(s == "h fffxx"); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write_from(&mut f, "333".chars(), 3); } assert!(s == "h fffxx333"); s.clear(); { let mut f = Formatter::from_str("{}", &mut s).unwrap(); write!(f, "hello").unwrap(); } assert!(s == "hello"); } impl<'a, 'b> Formatter<'a, 'b> {
pub fn str_unchecked(&mut self, s: &str) -> Result<()> { let fill = self.fill(); let width = self.width(); let precision = self.precision(); let len = match precision { Some(p) => if p < s.len() { p } else { s.len() }, None => s.len(), }; let mut chars = s.chars(); let mut pad: usize = 0; if let Some(mut width) = width { if width > len { let align = self.align(); match align { Alignment::Left => pad = width - len, Alignment::Center => { width -= len; pad = width / 2; write_char(self, fill, pad); pad += width % 2; } Alignment::Right => { write_char(self, fill, width - len); } Alignment::Equal => return Err(FmtError::Invalid( "sign aware zero padding and Align '=' not yet supported".to_string())), } } } write_from(self, &mut chars, len); write_char(self, fill, pad); Ok(()) } } pub fn strfmt_map<F>(fmtstr: &str, f: &F) -> Result<String> where F: Fn(Formatter) -> Result<()> { let mut out = String::with_capacity(fmtstr.len() * 2); let mut bytes_read: usize = 0; let mut opening_brace: usize = 0; let mut closing_brace: bool = false; let mut reading_fmt = false; let mut remaining = fmtstr; for c in fmtstr.chars() { bytes_read += c.len_utf8(); if c == '{' { if reading_fmt && opening_brace == bytes_read - 2 { out.push(c); reading_fmt = false; } else if !reading_fmt { reading_fmt = true; opening_brace = bytes_read - 1; } else { out.clear(); out.write_str("extra { found").unwrap(); return Err(FmtError::Invalid(out)); } } else if c == '}' { if !reading_fmt && !closing_brace { closing_brace = true; } else if closing_brace { out.push(c); closing_brace = false; } else { let (_, r) = remaining.split_at(opening_brace); let (fmt_pattern, r) = r.split_at(bytes_read - opening_brace); remaining = r; let (_, fmt_pattern) = fmt_pattern.split_at(1); let (fmt_pattern, _) = fmt_pattern.split_at(fmt_pattern.len() - 1); let fmt = try!(Formatter::from_str(fmt_pattern, &mut out)); try!(f(fmt)); reading_fmt = false; bytes_read = 0; } } else if closing_brace { return Err(FmtError::Invalid("Single '}' encountered in format string".to_string())); } else if !reading_fmt { out.push(c) } } if closing_brace { return Err(FmtError::Invalid("Single '}' encountered in format string".to_string())); } else if reading_fmt { return Err(FmtError::Invalid("Expected '}' before end of string".to_string())); } out.shrink_to_fit(); Ok(out) }
pub fn str(&mut self, s: &str) -> Result<()> { if !(self.ty() == None || self.ty() == Some('s')) { let mut msg = String::new(); write!(msg, "Unknown format code {:?} for object of type 'str'", self.ty()).unwrap(); return Err(FmtError::TypeError(msg)); } else if self.alternate() { return Err(FmtError::TypeError("Alternate form (#) not allowed in string \ format specifier".to_string())); } else if self.thousands() { return Err(FmtError::TypeError("Cannot specify ',' with 's'".to_string())); } else if self.sign().is_unspecified() { return Err(FmtError::TypeError("Sign not allowed in string format specifier" .to_string())); } self.str_unchecked(s) }
function_block-full_function
[ { "content": "fn is_type_element(c: char) -> bool {\n\n match c {\n\n 'b' |\n\n 'o' |\n\n 'x' |\n\n 'X' |\n\n 'e' |\n\n 'E' |\n\n 'f' |\n\n 'F' |\n\n '%' |\n\n 's' |\n\n '?' => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/formatter.rs", "rank": 1, "score": 116951.74280806896 }, { "content": "fn is_alignment_token(c: char) -> bool {\n\n match c {\n\n '=' | '<' | '^' | '>' => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/formatter.rs", "rank": 3, "score": 96361.55174882221 }, { "content": "fn is_sign_element(c: char) -> bool {\n\n match c {\n\n ' ' | '-' | '+' => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/formatter.rs", "rank": 4, "score": 96361.55174882221 }, { "content": "// get an integer from pos, returning the number of bytes\n\n// consumed and the integer\n\nfn get_integer(s: &[u8], pos: usize) -> (usize, Option<i64>) {\n\n let (_, rest) = s.split_at(pos);\n\n let mut consumed: usize = 0;\n\n for b in rest {\n\n match *b as char {\n\n '0'...'9' => {}\n\n _ => break,\n\n };\n\n consumed += 1;\n\n }\n\n if consumed == 0 {\n\n (0, None)\n\n } else {\n\n let (intstr, _) = rest.split_at(consumed);\n\n let val = unsafe {\n\n // I think I can be reasonably sure that 0-9 chars are utf8 :)\n\n match str::from_utf8_unchecked(intstr).parse::<i64>() {\n\n Ok(v) => Some(v),\n\n Err(_) => None,\n\n }\n\n };\n\n (consumed, val)\n\n }\n\n}\n\n\n\n\n\n#[derive(Debug)]\n", "file_path": "src/formatter.rs", "rank": 7, "score": 63249.900023725146 }, { "content": "#[test]\n\nfn test_trait() {\n\n let mut vars: HashMap<String, String> = HashMap::new();\n\n vars.insert(\"x\".to_string(), \"X\".to_string());\n\n\n\n assert_eq!(\"hi {x}\".format(&vars).unwrap(), \"hi X\");\n\n assert_eq!(\"hi {x}\".to_string().format(&vars).unwrap(), \"hi X\");\n\n}\n", "file_path": "src/tests/test_trait.rs", "rank": 8, "score": 58573.39885277506 }, { "content": "#[test]\n\nfn test_values() {\n\n let mut vars: HashMap<String, String> = HashMap::new();\n\n let too_long = \"toooloooong\".to_string();\n\n vars.insert(\"x\".to_string(), \"X\".to_string());\n\n vars.insert(\"long\".to_string(), too_long.clone()); // len=10\n\n vars.insert(\"hi\".to_string(), \"hi\".to_string());\n\n\n\n // format, expected, error\n\n // error codes: 0 == no error, 1 == Invalid, 2 == KeyError\n\n let values: Vec<(&str, &str, u8)> = vec![\n\n // simple positioning\n\n (\"{x}\", \"X\", 0),\n\n (\"{x:}\", \"X\", 0),\n\n (\"{x:3}\", \" X\", 0),\n\n (\"{x:>3}\", \" X\", 0),\n\n (\"{x:<3}\", \"X \", 0),\n\n (\"{x:^3}\", \" X \", 0),\n\n (\"{x:^4}\", \" X \", 0),\n\n\n\n // extra text\n", "file_path": "src/tests/strfmt.rs", "rank": 9, "score": 56944.465638539565 }, { "content": "#[test]\n\nfn test_error() {\n\n // just make sure this compiles mostly\n\n let err = FmtError::Invalid(\"fmt error\".to_string());\n\n let v = err.to_string();\n\n println!(\"{}\", v);\n\n}\n", "file_path": "src/tests/mod.rs", "rank": 10, "score": 56944.465638539565 }, { "content": "/// test using integers directly into format (uses Display)\n\nfn test_ints_basic() {\n\n let mut vars: HashMap<String, u64> = HashMap::new();\n\n vars.insert(\"x\".to_string(), 6);\n\n vars.insert(\"long\".to_string(), 100000); // len=10\n\n vars.insert(\"hi\".to_string(), 42);\n\n\n\n // format, expected, error\n\n // error codes: 0 == no error, 1 == Invalid, 2 == KeyError\n\n let values: Vec<(&str, &str, u8)> = vec![\n\n // simple positioning\n\n (\"{x}\", \"6\", 0),\n\n (\"{long}\", \"100000\", 0),\n\n (\" the answer is {hi}, haven't you read anything?\",\n\n \" the answer is 42, haven't you read anything?\", 0),\n\n ];\n\n\n\n run_tests(&values, &vars, &strfmt);\n\n}\n\n\n", "file_path": "src/tests/strfmt.rs", "rank": 11, "score": 55088.413049748786 }, { "content": "#[test]\n\nfn test_key_u32() {\n\n let mut vars: HashMap<u32, String> = HashMap::new();\n\n vars.insert(0, \"X\".to_string());\n\n\n\n assert_eq!(\"hi {0}\".format(&vars).unwrap(), \"hi X\");\n\n assert_eq!(\"hi {0}\".to_string().format(&vars).unwrap(), \"hi X\");\n\n assert_eq!(\n\n \"hi {1}\".format(&vars),\n\n Err(FmtError::KeyError(\"Invalid key: 1\".into()))\n\n );\n\n assert_eq!(\n\n \"hi {you}\".format(&vars),\n\n Err(FmtError::KeyError(\"Invalid key: you\".into()))\n\n );\n\n}\n\n\n", "file_path": "src/tests/key.rs", "rank": 12, "score": 55085.716199500974 }, { "content": "#[test]\n\nfn test_fmt_from_str() {\n\n let s = String::new();\n\n {\n\n let mut s = s.clone();\n\n let f = Formatter::from_str(\"x:<.3\", &mut s).unwrap();\n\n // defaults\n\n assert_eq!(f.fill(), ' ');\n\n assert_eq!(f.sign(), Sign::Unspecified);\n\n assert_eq!(f.alternate(), false);\n\n assert_eq!(f.width(), None);\n\n assert_eq!(f.thousands(), false);\n\n assert_eq!(f.ty(), None);\n\n\n\n // specified\n\n assert_eq!(f.key, \"x\");\n\n assert_eq!(f.precision().unwrap(), 3);\n\n assert_eq!(f.align(), Alignment::Left);\n\n }\n\n assert!(Formatter::from_str(\"x:^.3\", &mut s.clone()).is_ok());\n\n assert!(Formatter::from_str(\"xxx: <88.3\", &mut s.clone()).is_ok());\n\n assert!(Formatter::from_str(\"xxx: <88.3\", &mut s.clone()).is_err());\n\n assert!(Formatter::from_str(\"xxx:a34\", &mut s.clone()).is_err());\n\n}\n", "file_path": "src/tests/fmt.rs", "rank": 13, "score": 55085.716199500974 }, { "content": "#[test]\n\nfn test_key_enum() {\n\n let mut vars: HashMap<Key, String> = HashMap::new();\n\n vars.insert(Key::Zero, \"X\".to_string());\n\n\n\n assert_eq!(\"hi {Zero}\".format(&vars).unwrap(), \"hi X\");\n\n assert_eq!(\"hi {Zero}\".to_string().format(&vars).unwrap(), \"hi X\");\n\n assert_eq!(\n\n \"hi {One}\".format(&vars),\n\n Err(FmtError::KeyError(\"Invalid key: One\".into()))\n\n );\n\n assert_eq!(\n\n \"hi {you}\".format(&vars),\n\n Err(FmtError::KeyError(\"Invalid key: you\".into()))\n\n );\n\n}\n", "file_path": "src/tests/key.rs", "rank": 14, "score": 55085.716199500974 }, { "content": "#[test]\n\nfn test_key_i32() {\n\n let mut vars: HashMap<i32, String> = HashMap::new();\n\n vars.insert(-1, \"X\".to_string());\n\n\n\n assert_eq!(\"hi {-1}\".format(&vars).unwrap(), \"hi X\");\n\n assert_eq!(\"hi {-1}\".to_string().format(&vars).unwrap(), \"hi X\");\n\n assert_eq!(\n\n \"hi {1}\".format(&vars),\n\n Err(FmtError::KeyError(\"Invalid key: 1\".into()))\n\n );\n\n assert_eq!(\n\n \"hi {you}\".format(&vars),\n\n Err(FmtError::KeyError(\"Invalid key: you\".into()))\n\n );\n\n}\n\n\n", "file_path": "src/tests/key.rs", "rank": 15, "score": 55085.716199500974 }, { "content": "#[test]\n\nfn test_ignore_missing() {\n\n let mut vars: HashMap<String, String> = HashMap::new();\n\n vars.insert(\"x\".to_string(), \"X\".to_string());\n\n let values: Vec<(&str, &str, u8)> = vec![\n\n // simple positioning\n\n (\"{y}\", \"{y}\", 0),\n\n (\"{y} {x}\", \"{y} X\", 0),\n\n (\"{x} {longish:<32.3} {x} is nice\", \"X {longish:<32.3} X is nice\", 0),\n\n ];\n\n let f = |mut fmt: Formatter| {\n\n match vars.get(fmt.key) {\n\n Some(v) => fmt.str(v),\n\n None => fmt.skip(),\n\n }\n\n };\n\n\n\n let strfmt_ignore = |fmtstr: &str, vars: &HashMap<String, String>| -> Result<String> {\n\n strfmt_map(fmtstr, &f)\n\n };\n\n run_tests(&values, &vars, &strfmt_ignore);\n", "file_path": "src/tests/strfmt.rs", "rank": 16, "score": 55085.716199500974 }, { "content": "fn run_tests<T: fmt::Display>(values: &Vec<(&str, &str, u8)>,\n\n vars: &HashMap<String, T>,\n\n call: &Fn(&str, &HashMap<String, T>)\n\n -> Result<String>) {\n\n for &(fmtstr, expected, expect_err) in values.iter() {\n\n let result = call(fmtstr, vars);\n\n let mut failure = match expect_err {\n\n 0 => result.is_err(),\n\n 1 => !matches!(result, Err(FmtError::Invalid(_))),\n\n 2 => !matches!(result, Err(FmtError::KeyError(_))),\n\n 3 => !matches!(result, Err(FmtError::TypeError(_))),\n\n c@_ => panic!(\"error code {} DNE\", c),\n\n };\n\n let result = match result {\n\n Err(e) => e.to_string(),\n\n Ok(s) => s,\n\n };\n\n if !failure && expect_err == 0 {\n\n failure = !(expected == result);\n\n }\n", "file_path": "src/tests/strfmt.rs", "rank": 17, "score": 40913.99094554776 }, { "content": "fn parse_like_python(rest: &str) -> Result<FmtPy> {\n\n // The rest of this was pretty much strait up copied from python's format parser\n\n // All credit goes to python source file: formatter_unicode.c\n\n //\n\n\n\n let mut format = FmtPy {\n\n fill: ' ',\n\n align: '>',\n\n alternate: false,\n\n sign: '\\0',\n\n width: -1,\n\n thousands: false,\n\n precision: -1,\n\n ty: '\\0',\n\n };\n\n let mut chars = rest.chars();\n\n let fake_fill = match chars.next() {\n\n Some(c) => c,\n\n None => return Ok(format),\n\n };\n", "file_path": "src/formatter.rs", "rank": 18, "score": 39554.60846454082 }, { "content": "#[test]\n\nfn it_works() {\n\n let mut vars = HashMap::new();\n\n vars.insert(\"name\".to_string(), \"bob\".to_string());\n\n vars.insert(\"job\".to_string(), \"python developer\".to_string());\n\n\n\n let fmt = \"hi, my name is {name} and I am a {job}!\".to_string();\n\n assert!(strfmt(&fmt, &vars).unwrap() == \"hi, my name is bob and I am a python developer!\");\n\n\n\n\n\n let mut vars: HashMap<String, f64> = HashMap::new();\n\n vars.insert(\"x\".to_string(), 42.4242);\n\n vars.insert(\"y\".to_string(), -100.11111);\n\n vars.insert(\"z\".to_string(), 0.);\n\n\n\n let f = |mut fmt: Formatter| {\n\n fmt.f64(*vars.get(fmt.key).unwrap())\n\n };\n\n assert_eq!(strfmt_map(\"{x:<7.2}\", f).unwrap(), \"42.42 \");\n\n assert_eq!(strfmt_map(\"{y:+.2E}\", f).unwrap(), \"-1.00E2\");\n\n assert_eq!(strfmt_map(\"{z:+.2E}\", f).unwrap(), \"+0.00E0\");\n\n}\n", "file_path": "example/src/lib.rs", "rank": 19, "score": 31447.468075752127 }, { "content": "\n\nuse std::collections::HashMap;\n\nuse super::super::*;\n\n\n\n#[test]\n", "file_path": "src/tests/test_trait.rs", "rank": 20, "score": 27527.625067879933 }, { "content": "\n\nimpl fmt::Display for FmtError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n FmtError::Invalid(ref s) => write!(f, \"Invalid({})\", s),\n\n FmtError::KeyError(ref s) => write!(f, \"KeyError({})\", s),\n\n FmtError::TypeError(ref s) => write!(f, \"TypeError({})\", s),\n\n }\n\n }\n\n}\n\n\n\nimpl error::Error for FmtError {\n\n fn description(&self) -> &str {\n\n match *self {\n\n FmtError::Invalid(_) => \"invalid format string\",\n\n FmtError::KeyError(_) => \"invalid key\",\n\n FmtError::TypeError(_) => \"error during type resolution\",\n\n }\n\n }\n\n\n", "file_path": "src/types.rs", "rank": 21, "score": 25514.92835272645 }, { "content": " fn cause(&self) -> Option<&error::Error> {\n\n None\n\n }\n\n}\n\n\n\n// enum Type {\n\n// // integer types\n\n// Bin,\n\n// Char,\n\n// Decimal,\n\n// Octal,\n\n// Hex,\n\n// HexUpper,\n\n\n\n// // both\n\n// Number,\n\n\n\n// // Floating point types\n\n// Exponent,\n\n// ExponentUpper,\n", "file_path": "src/types.rs", "rank": 22, "score": 25514.51989708409 }, { "content": "use std::fmt;\n\nuse std::error;\n\nuse std::string::String;\n\nuse std::result;\n\n\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum Alignment {\n\n Left,\n\n Center,\n\n Right, // default\n\n Equal,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum Sign {\n\n Unspecified,\n\n Plus,\n\n Minus,\n\n Space,\n", "file_path": "src/types.rs", "rank": 23, "score": 25513.322470897663 }, { "content": "}\n\n\n\nimpl Sign {\n\n pub fn is_unspecified(&self) -> bool {\n\n match *self {\n\n Sign::Unspecified => false,\n\n _ => true,\n\n }\n\n }\n\n}\n\n\n\npub type Result<T> = result::Result<T, FmtError>;\n\n\n\n/// LOC-error\n\n#[derive(Debug, PartialEq)]\n\npub enum FmtError {\n\n Invalid(String), // format string is structued incorrectly\n\n KeyError(String), // key error in formatting string\n\n TypeError(String), // invalid type used\n\n}\n", "file_path": "src/types.rs", "rank": 24, "score": 25513.21258220881 }, { "content": "// Fixed,\n\n// General,\n\n// GeneralUppercase,\n\n// Percengage,\n\n\n\n// // other types\n\n// None,\n\n// String,\n\n// Debug,\n\n// }\n\n\n", "file_path": "src/types.rs", "rank": 25, "score": 25510.495832521636 }, { "content": "fn new_key_error(key: &str) -> FmtError {\n\n let mut msg = String::new();\n\n write!(msg, \"Invalid key: {}\", key).unwrap();\n\n FmtError::KeyError(msg)\n\n}\n", "file_path": "src/lib.rs", "rank": 26, "score": 24573.246041044782 }, { "content": "use std::str;\n\nuse std::fmt;\n\nuse std::fmt::Write;\n\nuse std::iter::Iterator;\n\nuse std::string::String;\n\n\n\nuse types::*;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Formatter<'a, 'b> {\n\n pub key: &'a str,\n\n fill: char,\n\n align: Alignment, // default Right\n\n sign: Sign,\n\n alternate: bool,\n\n width: Option<usize>,\n\n thousands: bool,\n\n precision: Option<usize>,\n\n ty: Option<char>,\n\n buff: &'b mut String,\n\n pattern: &'a str,\n\n}\n\n\n", "file_path": "src/formatter.rs", "rank": 27, "score": 23979.902989267986 }, { "content": "\n\n _ => {\n\n let mut msg = String::new();\n\n write!(msg, \"Invalid comma type: {}\", format.ty).unwrap();\n\n return Err(FmtError::Invalid(msg));\n\n }\n\n }\n\n }\n\n Ok(format)\n\n}\n\n\n\nimpl<'a, 'b> Formatter<'a, 'b> {\n\n /// create Formatter from format string\n\n pub fn from_str(s: &'a str, buff: &'b mut String) -> Result<Formatter<'a, 'b>> {\n\n let mut found_colon = false;\n\n let mut chars = s.chars();\n\n let mut c = match chars.next() {\n\n Some(':') | None => {\n\n return Err(FmtError::Invalid(\"must specify identifier\".to_string()))\n\n }\n", "file_path": "src/formatter.rs", "rank": 28, "score": 23979.35748388123 }, { "content": " // from now on all format characters MUST be valid\n\n // ASCII characters (fill and identifier were the\n\n // only ones that weren't.\n\n // Therefore we can use bytes for the rest\n\n let rest = rest.as_bytes();\n\n let mut align_specified = false;\n\n let mut fill_specified = false;\n\n\n\n let end: usize = rest.len();\n\n let mut pos: usize = 0;\n\n\n\n // If the second char is an alignment token,\n\n // then fake_fill as fill\n\n if end - pos >= 1 + fake_fill.len_utf8() &&\n\n is_alignment_token(rest[pos + fake_fill.len_utf8()] as char) {\n\n format.align = rest[pos + fake_fill.len_utf8()] as char;\n\n format.fill = fake_fill;\n\n fill_specified = true;\n\n align_specified = true;\n\n pos += 1 + fake_fill.len_utf8();\n", "file_path": "src/formatter.rs", "rank": 29, "score": 23977.208242729226 }, { "content": " self.align.clone()\n\n }\n\n\n\n /// width getter\n\n pub fn width(&self) -> Option<usize> {\n\n self.width\n\n }\n\n\n\n /// thousands getter\n\n pub fn thousands(&self) -> bool {\n\n self.thousands\n\n }\n\n\n\n /// precision getter\n\n pub fn precision(&self) -> Option<usize> {\n\n self.precision\n\n }\n\n\n\n /// set precision to None, used for formatting int, float, etc\n\n pub fn set_precision(&mut self, precision: Option<usize>) {\n", "file_path": "src/formatter.rs", "rank": 30, "score": 23976.420651431858 }, { "content": " })\n\n }\n\n\n\n /// call this to re-write the original format string verbatum\n\n /// back to the output\n\n pub fn skip(mut self) -> Result<()> {\n\n self.buff.push('{');\n\n self.write_str(self.pattern).unwrap();\n\n self.buff.push('}');\n\n Ok(())\n\n }\n\n\n\n\n\n /// fill getter\n\n pub fn fill(&self) -> char {\n\n self.fill\n\n }\n\n\n\n /// align getter\n\n pub fn align(&self) -> Alignment {\n", "file_path": "src/formatter.rs", "rank": 31, "score": 23975.566353011305 }, { "content": " } else {\n\n // Not having a precision after a dot is an error.\n\n if consumed == 0 {\n\n return Err(FmtError::Invalid(\"Format specifier missing precision\".to_string()));\n\n }\n\n }\n\n pos += consumed;\n\n\n\n }\n\n\n\n // Finally, parse the type field.\n\n if end - pos > 1 {\n\n // More than one char remain, invalid format specifier.\n\n return Err(FmtError::Invalid(\"Invalid format specifier\".to_string()));\n\n }\n\n\n\n if end - pos == 1 {\n\n format.ty = rest[pos] as char;\n\n if !is_type_element(format.ty) {\n\n let mut msg = String::new();\n", "file_path": "src/formatter.rs", "rank": 32, "score": 23975.486599693017 }, { "content": " write!(msg, \"Invalid type specifier: {:?}\", format.ty).unwrap();\n\n return Err(FmtError::TypeError(msg));\n\n }\n\n // pos+=1;\n\n }\n\n\n\n // Do as much validating as we can, just by looking at the format\n\n // specifier. Do not take into account what type of formatting\n\n // we're doing (int, float, string).\n\n if format.thousands {\n\n match format.ty {\n\n 'd' |\n\n 'e' |\n\n 'f' |\n\n 'g' |\n\n 'E' |\n\n 'G' |\n\n '%' |\n\n 'F' |\n\n '\\0' => {} /* These are allowed. See PEP 378.*/\n", "file_path": "src/formatter.rs", "rank": 33, "score": 23975.286198291516 }, { "content": " _ => false,\n\n }\n\n }\n\n }\n\n\n\n /// UNSTABLE: in the future, this may return true if all validty\n\n /// checks for a float return true\n\n /// return true if ty is valid for formatting floats\n\n pub fn is_float_type(&self) -> bool {\n\n match self.ty {\n\n None => true,\n\n Some(c) => match c {\n\n 'f' | 'e' | 'E' => true,\n\n _ => false,\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n\nimpl<'a, 'b> fmt::Write for Formatter<'a, 'b> {\n\n fn write_str(&mut self, s: &str) -> fmt::Result {\n\n self.buff.write_str(s)\n\n }\n\n}\n", "file_path": "src/formatter.rs", "rank": 34, "score": 23975.12881180554 }, { "content": " Some(c) => c,\n\n };\n\n let mut consumed = 0;\n\n // find the identifier\n\n loop {\n\n consumed += c.len_utf8();\n\n if c == ':' {\n\n found_colon = true;\n\n break;\n\n }\n\n c = match chars.next() {\n\n Some(c) => c,\n\n None => {\n\n break;\n\n }\n\n };\n\n }\n\n let (identifier, rest) = s.split_at(consumed);\n\n let identifier = if found_colon {\n\n let (i, _) = identifier.split_at(identifier.len() - 1); // get rid of ':'\n", "file_path": "src/formatter.rs", "rank": 35, "score": 23974.08886124846 }, { "content": " /// alternate getter\n\n pub fn alternate(&self) -> bool {\n\n self.alternate\n\n }\n\n\n\n // sign_aware_zero_pad // Not supported\n\n\n\n /// type getter\n\n pub fn ty(&self) -> Option<char> {\n\n self.ty\n\n }\n\n\n\n /// UNSTABLE: in the future, this may return true if all validty\n\n /// checks for a float return true\n\n /// return true if ty is valid for formatting integers\n\n pub fn is_int_type(&self) -> bool {\n\n match self.ty {\n\n None => true,\n\n Some(c) => match c {\n\n 'b' | 'o' | 'x' | 'X' => true,\n", "file_path": "src/formatter.rs", "rank": 36, "score": 23973.71157722999 }, { "content": " } else if end - pos >= 1 && is_alignment_token(fake_fill) {\n\n format.align = fake_fill;\n\n pos += fake_fill.len_utf8();\n\n }\n\n\n\n // Parse the various sign options\n\n if end - pos >= 1 && is_sign_element(rest[pos] as char) {\n\n format.sign = rest[pos] as char;\n\n pos += 1;\n\n }\n\n\n\n // If the next character is #, we're in alternate mode. This only\n\n // applies to integers.\n\n if end - pos >= 1 && rest[pos] as char == '#' {\n\n format.alternate = true;\n\n pos += 1;\n\n }\n\n\n\n // The special case for 0-padding (backwards compat)\n\n if !fill_specified && end - pos >= 1 && rest[pos] == '0' as u8 {\n", "file_path": "src/formatter.rs", "rank": 37, "score": 23973.09878842503 }, { "content": " '-' => Sign::Minus,\n\n ' ' => Sign::Space,\n\n _ => unreachable!(),\n\n },\n\n alternate: format.alternate,\n\n width: match format.width {\n\n -1 => None,\n\n _ => Some(format.width as usize),\n\n },\n\n thousands: format.thousands,\n\n precision: match format.precision {\n\n -1 => None,\n\n _ => Some(format.precision as usize),\n\n },\n\n ty: match format.ty {\n\n '\\0' => None,\n\n _ => Some(format.ty),\n\n },\n\n buff: buff,\n\n pattern: s,\n", "file_path": "src/formatter.rs", "rank": 38, "score": 23973.09457936097 }, { "content": " self.precision = precision;\n\n }\n\n\n\n /// sign getter\n\n pub fn sign(&self) -> Sign {\n\n self.sign.clone()\n\n }\n\n\n\n /// sign plus getter\n\n /// here because it is in fmt::Formatter\n\n pub fn sign_plus(&self) -> bool {\n\n self.sign == Sign::Plus\n\n }\n\n\n\n /// sign minus getter\n\n /// here because it is in fmt::Formatter\n\n pub fn sign_minus(&self) -> bool {\n\n self.sign == Sign::Minus\n\n }\n\n\n", "file_path": "src/formatter.rs", "rank": 39, "score": 23973.06075682235 }, { "content": " if end - pos > 0 && rest[pos] as char == ',' {\n\n format.thousands = true;\n\n pos += 1;\n\n }\n\n\n\n // Parse field precision\n\n if end - pos > 0 && rest[pos] as char == '.' {\n\n pos += 1;\n\n\n\n let (consumed, val) = get_integer(rest, pos);\n\n if consumed != 0 {\n\n match val {\n\n None => {\n\n return Err(FmtError::Invalid(\"overflow error when parsing precision\"\n\n .to_string()))\n\n }\n\n Some(v) => {\n\n format.precision = v;\n\n }\n\n }\n", "file_path": "src/formatter.rs", "rank": 40, "score": 23973.04420718411 }, { "content": " i\n\n } else {\n\n identifier\n\n };\n\n\n\n let format = try!(parse_like_python(rest));\n\n\n\n Ok(Formatter {\n\n key: identifier,\n\n fill: format.fill,\n\n align: match format.align {\n\n '<' => Alignment::Left,\n\n '^' => Alignment::Center,\n\n '>' => Alignment::Right,\n\n '=' => Alignment::Equal,\n\n _ => unreachable!(),\n\n },\n\n sign: match format.sign {\n\n '\\0' => Sign::Unspecified,\n\n '+' => Sign::Plus,\n", "file_path": "src/formatter.rs", "rank": 41, "score": 23972.35447205343 }, { "content": " format.fill = '0';\n\n if !align_specified {\n\n format.align = '=';\n\n }\n\n pos += 1;\n\n }\n\n\n\n // check to make sure that val is good\n\n let (consumed, val) = get_integer(rest, pos);\n\n pos += consumed;\n\n if consumed != 0 {\n\n match val {\n\n None => return Err(FmtError::Invalid(\"overflow error when parsing width\".to_string())),\n\n Some(v) => {\n\n format.width = v;\n\n }\n\n }\n\n }\n\n\n\n // Comma signifies add thousands separators\n", "file_path": "src/formatter.rs", "rank": 42, "score": 23970.802597605136 }, { "content": "use super::super::formatter::Formatter;\n\nuse super::super::types::*;\n\n\n\n\n\n#[test]\n", "file_path": "src/tests/fmt.rs", "rank": 43, "score": 22146.499964618943 }, { "content": " )*)\n\n}\n\n\n\ntest_uint!(test_u8 u8, test_u16 u16, test_u32 u32, test_u64 u64, test_usize usize);\n\ntest_int!(test_i8 i8, test_i16 i16, test_i32 i32, test_i64 i64, test_isize isize);\n\n\n\n// #[bench]\n\n// fn bench_strfmt(b: &mut Bencher) {\n\n// let mut vars: HashMap<String, String> = HashMap::new();\n\n// let too_long = \"toooloooong\".to_string();\n\n// vars.insert(\"x\".to_string(), \"X\".to_string());\n\n// let fmtstr = \"short: {x:*^10.3} long: {long:%<14.9}\";\n\n// b.iter(|| strfmt(fmtstr, &vars));\n\n// }\n\n\n", "file_path": "src/tests/strfmt.rs", "rank": 44, "score": 22145.37569160524 }, { "content": "//! Test keys other than String.\n\n\n\nuse super::super::*;\n\nuse std::collections::HashMap;\n\nuse std::str::FromStr;\n\n\n\n#[test]\n", "file_path": "src/tests/key.rs", "rank": 45, "score": 22144.413488320348 }, { "content": "mod fmt;\n\nmod key;\n\nmod strfmt;\n\nmod test_trait;\n\n\n\nuse super::FmtError;\n\n\n\n#[test]\n", "file_path": "src/tests/mod.rs", "rank": 46, "score": 22143.60233549907 }, { "content": "\n\n // invalid\n\n (\"{x:.2}\", \"\", 3),\n\n (\"{x:s}\", \"\", 3),\n\n\n\n // TODO\n\n (\"{x:+010}\", \"+000000042\", 1),\n\n ];\n\n let f = |mut fmt: Formatter| {\n\n match vars.get(fmt.key) {\n\n Some(v) => fmt.$t(*v),\n\n None => panic!(),\n\n }\n\n };\n\n\n\n let strfmt_uint = |fmtstr: &str, vars: &HashMap<String, $t>| -> Result<String> {\n\n strfmt_map(fmtstr, &f)\n\n };\n\n run_tests(&values, &vars, &strfmt_uint);\n\n }\n", "file_path": "src/tests/strfmt.rs", "rank": 47, "score": 22143.56947656791 }, { "content": " (\"{z:+.2E}\", \"+0.00E0\", 0),\n\n\n\n // invalid\n\n (\"{x:s}\", \"\", 3),\n\n (\"{x:#}\", \"\", 3),\n\n\n\n // TODO\n\n (\"{x:+010.2}\", \"+0042.4242\", 1),\n\n ];\n\n let f = |mut fmt: Formatter| {\n\n match vars.get(fmt.key) {\n\n Some(v) => fmt.$t(*v),\n\n None => panic!(),\n\n }\n\n };\n\n\n\n let strfmt_float = |fmtstr: &str, vars: &HashMap<String, $t>| -> Result<String> {\n\n strfmt_map(fmtstr, &f)\n\n };\n\n run_tests(&values, &vars, &strfmt_float);\n", "file_path": "src/tests/strfmt.rs", "rank": 48, "score": 22143.42012334955 }, { "content": " (\"{x:+x}\", \"+2a\", 0),\n\n (\"{x:#x}\", \"0x2a\", 0),\n\n (\"{x:#X}\", \"0x2A\", 0),\n\n (\"{x:b}\", \"101010\", 0),\n\n (\"{x:#b}\", \"0b101010\", 0),\n\n (\"{x:o}\", \"52\", 0),\n\n (\"{x:#o}\", \"0o52\", 0),\n\n\n\n (\"{x:+}\", \"+42\", 0),\n\n (\"{y:-}\", \"0\", 0),\n\n (\"{y:+}\", \"+0\", 0),\n\n\n\n // invalid\n\n (\"{x:.2}\", \"\", 3),\n\n (\"{x:s}\", \"\", 3),\n\n\n\n // TODO\n\n (\"{x:+010}\", \"+000000042\", 1),\n\n ];\n\n let f = |mut fmt: Formatter| {\n", "file_path": "src/tests/strfmt.rs", "rank": 49, "score": 22143.32820392898 }, { "content": " }\n\n )*)\n\n}\n\n\n\ntest_float!(test_f32 f32, test_f64 f64);\n\n\n\nmacro_rules! test_uint {\n\n ($($name:ident $t:ident),*) => ($(\n\n #[test]\n\n fn $name() {\n\n let mut vars: HashMap<String, $t> = HashMap::new();\n\n vars.insert(\"x\".to_string(), 42);\n\n vars.insert(\"y\".to_string(), 0);\n\n let values: Vec<(&str, &str, u8)> = vec![\n\n (\"{x}\", \"42\", 0),\n\n (\"{x:<7}\", \"42 \", 0),\n\n (\"{x:>7}\", \" 42\", 0),\n\n (\"{x:^7}\", \" 42 \", 0),\n\n (\"{x:x}\", \"2a\", 0),\n\n (\"{x:X}\", \"2A\", 0),\n", "file_path": "src/tests/strfmt.rs", "rank": 50, "score": 22143.070134335707 }, { "content": " match vars.get(fmt.key) {\n\n Some(v) => fmt.$t(*v),\n\n None => panic!(),\n\n }\n\n };\n\n\n\n let strfmt_int = |fmtstr: &str, vars: &HashMap<String, $t>| -> Result<String> {\n\n strfmt_map(fmtstr, &f)\n\n };\n\n run_tests(&values, &vars, &strfmt_int);\n\n }\n\n )*)\n\n}\n\n\n\nmacro_rules! test_int {\n\n ($($name:ident $t:ident),*) => ($(\n\n #[test]\n\n fn $name() {\n\n let mut vars: HashMap<String, $t> = HashMap::new();\n\n vars.insert(\"x\".to_string(), 42);\n", "file_path": "src/tests/strfmt.rs", "rank": 51, "score": 22142.389068883247 }, { "content": "}\n\n\n\nmacro_rules! test_float {\n\n ($($name:ident $t:ident),*) => ($(\n\n #[test]\n\n fn $name() {\n\n let mut vars: HashMap<String, $t> = HashMap::new();\n\n vars.insert(\"x\".to_string(), 42.4242);\n\n vars.insert(\"y\".to_string(), -100.11111);\n\n vars.insert(\"z\".to_string(), 0.);\n\n let values: Vec<(&str, &str, u8)> = vec![\n\n // simple valid\n\n (\"{x}\", \"42.4242\", 0),\n\n (\"{x:.2}\", \"42.42\", 0),\n\n (\"{x:<7.2}\", \"42.42 \", 0),\n\n (\"{x:.2e}\", \"4.24e1\", 0),\n\n (\"{x:.2E}\", \"4.24E1\", 0),\n\n (\"{x:+}\", \"+42.4242\", 0),\n\n (\"{y:.2E}\", \"-1.00E2\", 0),\n\n (\"{y:+.2E}\", \"-1.00E2\", 0),\n", "file_path": "src/tests/strfmt.rs", "rank": 52, "score": 22142.197899915216 }, { "content": "\n\n if failure {\n\n println!(\"FAIL:\");\n\n println!(\" input: {:?}\", fmtstr);\n\n println!(\" output: {:?}\", result);\n\n if expect_err != 0 {\n\n let expected = match expect_err {\n\n 1 => \"FmtError::Invalid\",\n\n 2 => \"FmtError::KeyError\",\n\n 3 => \"FmtError::TypeError\",\n\n _ => unreachable!()\n\n };\n\n println!(\" expected: {}\", expected)\n\n } else {\n\n println!(\" expected: {:?}\", expected);\n\n }\n\n assert!(false);\n\n }\n\n }\n\n\n\n}\n\n\n", "file_path": "src/tests/strfmt.rs", "rank": 53, "score": 22141.86286122422 }, { "content": " (\"hi }\", \"\", 1),\n\n (\"w { ho\", \"\", 1),\n\n\n\n // invalid keys\n\n (\"{what}\", \"{}\", 2),\n\n (\"{who}\", \"{}\", 2),\n\n (\"{x} {where}\", \"{}\", 2),\n\n\n\n // invalid types\n\n (\"{x:<<<}\", \"\", 3),\n\n (\"{x:*}\", \"\", 3),\n\n (\"{x::}\", \"\", 3),\n\n (\"{x:#}\", \"\", 3),\n\n (\"{x:<4n}\", \"\", 3),\n\n (\"{x:<4d}\", \"\", 3),\n\n (\"{x:,}\", \"\", 3),\n\n (\"{x:<-10}\", \"\", 3),\n\n ];\n\n\n\n run_tests(&values, &vars, &strfmt);\n\n}\n\n\n\n#[test]\n", "file_path": "src/tests/strfmt.rs", "rank": 54, "score": 22141.56840090664 }, { "content": "#![allow(unused_variables)]\n\n\n\nuse std::fmt;\n\nuse std::collections::HashMap;\n\nuse super::super::*;\n\n\n\nmacro_rules! matches {\n\n ($e:expr, $p:pat) => {\n\n match $e { $p => true, _ => false }\n\n }\n\n}\n\n\n", "file_path": "src/tests/strfmt.rs", "rank": 55, "score": 22141.560314220897 }, { "content": " vars.insert(\"y\".to_string(), -100);\n\n vars.insert(\"z\".to_string(), 0);\n\n let values: Vec<(&str, &str, u8)> = vec![\n\n // simple valid\n\n (\"{x}\", \"42\", 0),\n\n (\"{x:<7}\", \"42 \", 0),\n\n (\"{x:X}\", \"2A\", 0),\n\n (\"{x:#x}\", \"0x2a\", 0),\n\n (\"{x:#X}\", \"0x2A\", 0),\n\n (\"{x:b}\", \"101010\", 0),\n\n (\"{x:#b}\", \"0b101010\", 0),\n\n (\"{x:o}\", \"52\", 0),\n\n (\"{x:#o}\", \"0o52\", 0),\n\n\n\n (\"{x:+}\", \"+42\", 0),\n\n (\"{y}\", \"-100\", 0),\n\n (\"{y:+}\", \"-100\", 0),\n\n (\"{z}\", \"0\", 0),\n\n (\"{z:-}\", \"0\", 0),\n\n (\"{z:+}\", \"+0\", 0),\n", "file_path": "src/tests/strfmt.rs", "rank": 56, "score": 22138.656966655908 }, { "content": " (\" {x}yz\", \" Xyz\", 0),\n\n (\" hi {x:^4}-you rock\", \" hi X -you rock\", 0),\n\n\n\n // fill confusion\n\n (\"{x:10}\", \" X\", 0),\n\n (\"{long:.3}\", \"too\", 0),\n\n (\"{long:<5.3}\", \"too \", 0),\n\n (\"{long:5.3}\", \" too\", 0),\n\n (\"{long:5.7}\", \"toooloo\", 0),\n\n (\"{long:<5.7}\", \"toooloo\", 0),\n\n (\"{long:^5.7}\", \"toooloo\", 0),\n\n (\"{long:<}\", &too_long, 0),\n\n (\"{long:<<}\", &too_long, 0),\n\n (\"{long:<<5}\", &too_long, 0),\n\n\n\n // valid types\n\n (\"{x:<4s}\", \"X \", 0),\n\n\n\n // escape\n\n (\"{{}}\", \"{}\", 0),\n", "file_path": "src/tests/strfmt.rs", "rank": 57, "score": 22138.656966655908 }, { "content": " (\"{{long}}\", \"{long}\", 0),\n\n (\"{{{x}}}\", \"{X}\", 0),\n\n\n\n // fun\n\n (\"{x:<>}\", \"X\", 0),\n\n (\"{x:<>3}\", \"<<X\", 0),\n\n (\"{{}}\", \"{}\", 0),\n\n (\"{{{x}}}\", \"{X}\", 0),\n\n (\"{{{x}{{{{{{\", \"{X{{{\", 0),\n\n (\"{x}}}}}\", \"X}}\", 0),\n\n\n\n // invalid fmt\n\n (\"{}\", \"\", 1),\n\n (\"{:3}\", \"\", 1),\n\n (\"{xxx: <88.3}\", \"\", 1),\n\n\n\n // invalid escape\n\n (\"}\", \"\", 1),\n\n (\"{{}}}\", \"\", 1),\n\n (\"hi } there\", \"\", 1),\n", "file_path": "src/tests/strfmt.rs", "rank": 58, "score": 22138.656966655908 }, { "content": "/// The format struct as it is defined in the python source\n\nstruct FmtPy {\n\n pub fill: char,\n\n pub align: char,\n\n pub alternate: bool,\n\n pub sign: char,\n\n pub width: i64,\n\n pub thousands: bool,\n\n pub precision: i64,\n\n pub ty: char,\n\n}\n\n\n", "file_path": "src/formatter.rs", "rank": 59, "score": 21412.410035751866 }, { "content": "#[derive(PartialEq, Eq, Hash)]\n\nenum Key {\n\n Zero,\n\n One,\n\n Two,\n\n}\n\n\n\nimpl FromStr for Key {\n\n type Err = FmtError;\n\n\n\n fn from_str(s: &str) -> Result<Self> {\n\n match s {\n\n \"Zero\" => Ok(Key::Zero),\n\n \"One\" => Ok(Key::One),\n\n \"Two\" => Ok(Key::Two),\n\n _ => Err(FmtError::KeyError(s.to_string())),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tests/key.rs", "rank": 60, "score": 20957.23270932686 }, { "content": "/// Rust-style format a string given a `HashMap` of the variables.\n\npub fn strfmt<K, T: fmt::Display>(fmtstr: &str, vars: &HashMap<K, T>) -> Result<String>\n\nwhere\n\n K: Hash + Eq + FromStr,\n\n{\n\n let formatter = |mut fmt: Formatter| {\n\n let k: K = match fmt.key.parse() {\n\n Ok(k) => k,\n\n Err(_) => {\n\n return Err(new_key_error(fmt.key));\n\n }\n\n };\n\n let v = match vars.get(&k) {\n\n Some(v) => v,\n\n None => {\n\n return Err(new_key_error(fmt.key));\n\n }\n\n };\n\n fmt.str(v.to_string().as_str())\n\n };\n\n strfmt_map(fmtstr, &formatter)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 61, "score": 16962.442332055343 }, { "content": "## **BETA**: Formatting numeric types\n\n> This feature is in Beta and may change. I expect it to be fairly stable\n\n> at this point but would appreciate feedback on development.\n\n>\n\n> In addition, \"signed 0 padding\" (i.e. +000042) is not yet supported\n\n> for numeric types\n\n\n\nUsing `strfmt_map` it is also possible to format integers and floats:\n\n```\n\nlet mut vars: HashMap<String, f64> = HashMap::new();\n\nvars.insert(\"x\".to_string(), 42.4242);\n\nvars.insert(\"y\".to_string(), -100.11111);\n\nvars.insert(\"z\".to_string(), 0.);\n\n\n\nlet f = |mut fmt: Formatter| {\n\n fmt.f64(*vars.get(fmt.key).unwrap())\n\n};\n\nassert_eq!(strfmt_map(\"{x:<7.2}\", f).unwrap(), \"42.42 \");\n\nassert_eq!(strfmt_map(\"{y:+.2E}\", f).unwrap(), \"-1.00E2\");\n\nassert_eq!(strfmt_map(\"{z:+.2E}\", f).unwrap(), \"+0.00E0\");\n\n```\n\n\n\n# Status and Goals\n\n\n\n**strfmt** aims to support all of the formatting options defined in\n\n[`std::fmt`](https://doc.rust-lang.org/std/fmt/). Currently it officially only supports \n\nthe format options for strings (beta support for i64 and f64)\n\n\n\nSee the [syntax](https://doc.rust-lang.org/std/fmt/#syntax) for how to create a formatted string\n\n\n", "file_path": "README.md", "rank": 64, "score": 8.540047161497727 }, { "content": "# strfmt: rust library for formatting dynamic strings\n\n\n\n> Note: this library is fairly stable and tested, but new features are in the early stages of development and feedback (positive or negative)\n\n> would be much appreciated. If you use this library and liked it or decided not to use it, \n\n> please ping me at [@vitiral](https://twitter.com/vitiral) on twitter or vitiral@gmail.com via email to tell me about your\n\n> experience. I would particularily like to see the code where it is being used. Thankyou!\n\n\n\nThis library is for rust developers who want to bring rust-like\n\nformatting to non-static strings. \n\n\n\n## Basic use of formatting Display types\n\n```\n\nextern crate strfmt;\n\nuse strfmt::strfmt;\n\nuse std::collections::HashMap;\n\n\n\n#[test]\n\nfn it_works() {\n\n let mut vars = HashMap::new();\n\n vars.insert(\"name\".to_string(), \"bob\");\n\n vars.insert(\"job\".to_string(), \"python developer\");\n\n\n\n let fmt = \"hi, my name is {name} and I am a {job}!\".to_string();\n\n assert_eq!(strfmt(&fmt, &vars).unwrap(), \"hi, my name is bob and I am a python developer!\")\n\n}\n\n```\n\n\n\nIn addition to the `strfmt` function, this library has the `Format` trait which adds the\n\n`format` method to `str` and `String` types.\n\n\n\n```\n\nassert_eq!(\"hi, my name is {name}\".format(&vars), \"hi, my name is bob\")\n\n```\n\n\n\nYou can use this library any time you have dynamic strings you want to format, such as\n\nif you are providing your users a ui or are reading configuration files.\n\n\n\nstrfmt does not support empty identifiers (i.e. `{}` or `{:<10}`. Integer identifiers\n\nwill be read as str keys to the hashmap (i.e. `{1:<10}` will have key == \"1\")\n\n\n", "file_path": "README.md", "rank": 65, "score": 7.749901600910148 }, { "content": "\n\nmacro_rules! fmtfloat {\n\n ($($t:ident)*) => ($(\n\n impl<'a, 'b> Formatter<'a, 'b> {\n\n pub fn $t(&mut self, x: $t) -> Result<()> {\n\n let ty = match self.ty() {\n\n None => 'f',\n\n Some(c) => c,\n\n };\n\n\n\n if !self.is_float_type() {\n\n let mut msg = String::new();\n\n write!(msg, \"Unknown format code {:?} for type\", ty).unwrap();\n\n return Err(FmtError::TypeError(msg));\n\n }\n\n\n\n if self.alternate() {\n\n return Err(FmtError::TypeError(\"Alternate form (#) not allowed for floats\".to_string()));\n\n }\n\n\n", "file_path": "src/fmtnum.rs", "rank": 66, "score": 7.334738086074809 }, { "content": "macro_rules! fmtint {\n\n ($($t:ident)*) => ($(\n\n #[allow(unused_comparisons)]\n\n impl<'a, 'b> Formatter<'a, 'b> {\n\n pub fn $t(&mut self, x: $t) -> Result<()> {\n\n let ty = match self.ty() {\n\n None => ' ',\n\n Some(c) => c,\n\n };\n\n\n\n if !self.is_int_type() {\n\n let mut msg = String::new();\n\n write!(msg, \"Unknown format code {:?} for type\", ty).unwrap();\n\n return Err(FmtError::TypeError(msg));\n\n }\n\n\n\n if self.precision() != None {\n\n return Err(FmtError::TypeError(\"precision not allowed for integers\".to_string()));\n\n }\n\n\n", "file_path": "src/fmtnum.rs", "rank": 67, "score": 7.045913633697491 }, { "content": "extern crate strfmt;\n\nuse strfmt::{strfmt, strfmt_map, Formatter};\n\nuse std::collections::HashMap;\n\n\n\n#[test]\n", "file_path": "example/src/lib.rs", "rank": 68, "score": 7.025259155902361 }, { "content": " let mut msg = String::new();\n\n write!(msg, \"alternate ('#') cannot be used with type {:?}\", ty).unwrap();\n\n return Err(FmtError::Invalid(msg));\n\n }\n\n }\n\n }\n\n\n\n match ty {\n\n ' ' => write!(s, \"{}\", x).unwrap(),\n\n 'b' => write!(s, \"{:b}\", x).unwrap(),\n\n 'o' => write!(s, \"{:o}\", x).unwrap(),\n\n 'x' => write!(s, \"{:x}\", x).unwrap(),\n\n 'X' => write!(s, \"{:X}\", x).unwrap(),\n\n _ => unreachable!(),\n\n }\n\n\n\n self.str_unchecked(s.as_str())\n\n }\n\n })*)\n\n}\n", "file_path": "src/fmtnum.rs", "rank": 70, "score": 6.360817590190329 }, { "content": "//! strfmt crate\n\n\n\nuse std::collections::HashMap;\n\nuse std::fmt;\n\nuse std::fmt::Write;\n\nuse std::hash::Hash;\n\nuse std::str::FromStr;\n\nuse std::string::String;\n\n\n\nmod fmtstr;\n\nmod formatter;\n\n#[cfg(test)]\n\nmod tests;\n\nmod types;\n\n\n\n#[macro_use]\n\nmod fmtnum;\n\n\n\npub use fmtstr::strfmt_map;\n\npub use formatter::Formatter;\n\npub use types::{Alignment, FmtError, Result, Sign};\n\n\n\n// u128 & i128 unstable (see https://github.com/rust-lang/rust/issues/35118)\n\nfmtint!(u8 i8 u16 i16 u32 i32 u64 i64 usize isize);\n\nfmtfloat!(f32 f64);\n\n\n\n/// Rust-style format a string given a `HashMap` of the variables.\n", "file_path": "src/lib.rs", "rank": 71, "score": 6.11771131551275 }, { "content": "### Current Status (in order of priority)\n\n- [ ]: get strfmt_map out of Beta and create Format.format_map method\n\n- [ ]: handle sign aware zero padding for numeric types\n\n- [x]: format any Display type\n\n- [x]: stabalize `strfmt_map` and add `format_map` to the `Format` trait.\n\n- [x]: add `f64` method to `Formatter` allowing those using `strfmt_map` to format\n\n- f64s according to the spec\n\n- [x]: add `i64` method to `Formatter` allowing those using `strfmt_map` to format\n\n- i64s according to the spec\n\n- [ ]: add `format_f64(&self, HashMap<String, f64>` method to `Format` allowing users\n\n- to easily format a hashmap of i64 values\n\n- [ ]: add `format_i64(&self, HashMap<String, i64>` method to `Format` allowing users\n\n- to easily format a hashmap of i64 values\n\n- [ ]: look for a rust library has \"unbounded float\" (like python) and add that to the formatter\n\n- [ ]: look for a rust library has \"unbounded integer\" (like python) and add that to the formatter\n\n- [ ]: Implement `vec` method to `Formatter` allowing those usin `strfmt_map` to format\n\n- types of `Vec<Display>` in a way that uses precision and width\n\n- (precision will limit the number of elements displayed, width the width of each element)\n\n- [ ]: special suppport to format HashMap<String, String> for improved speed\n\n- [ ]: special suppport to format HashMap<String, &str> for improved speed\n\n- [ ]: special suppport to format HashMap<&str, &str> for improved speed\n\n \n\n \n\n### HELP\n\nAdding functionality should be fairly easy, the main piece of work is checking and handling\n\nthe flags correctly and creating comprehensive tests. Hopefully I will be creating the `f64`\n\nmethod soon to show how it can be done, but I could really use all the help I can get on\n\nmaking this libray complete.\n", "file_path": "README.md", "rank": 73, "score": 4.641649111975338 }, { "content": " if self.thousands() {\n\n return Err(FmtError::Invalid(\"thousands specifier not yet supported\".to_string()));\n\n }\n\n\n\n if self.fill() == '0' && self.align() == Alignment::Right {\n\n return Err(FmtError::Invalid(\"sign aware 0 padding not yet supported\".to_string()));\n\n }\n\n\n\n let mut s = String::new();\n\n\n\n if x >= 0 && self.sign_plus() {\n\n self.write_str(\"+\").unwrap();\n\n }\n\n\n\n if self.alternate() {\n\n match ty {\n\n 'b' => self.write_str(\"0b\").unwrap(),\n\n 'o' => self.write_str(\"0o\").unwrap(),\n\n 'x' | 'X' => self.write_str(\"0x\").unwrap(),\n\n _ => {\n", "file_path": "src/fmtnum.rs", "rank": 75, "score": 4.033409650137196 }, { "content": " if self.thousands() {\n\n return Err(FmtError::Invalid(\"thousands specifier not yet supported\".to_string()));\n\n }\n\n\n\n if self.fill() == '0' && self.align() == Alignment::Right {\n\n return Err(FmtError::Invalid(\"sign aware 0 padding not yet supported\".to_string()));\n\n }\n\n\n\n let mut s = String::new();\n\n\n\n if x >= (0 as $t) && self.sign_plus() {\n\n self.write_str(\"+\").unwrap();\n\n }\n\n\n\n match self.precision() {\n\n None => {\n\n match ty {\n\n 'f' => write!(s, \"{}\", x).unwrap(),\n\n 'e' => write!(s, \"{:e}\", x).unwrap(),\n\n 'E' => write!(s, \"{:E}\", x).unwrap(),\n", "file_path": "src/fmtnum.rs", "rank": 76, "score": 4.033409650137196 }, { "content": " _ => unreachable!(),\n\n }\n\n }\n\n Some(p) => {\n\n match ty {\n\n 'f' => write!(s, \"{:.*}\", p, x).unwrap(),\n\n 'e' => write!(s, \"{:.*e}\", p, x).unwrap(),\n\n 'E' => write!(s, \"{:.*E}\", p, x).unwrap(),\n\n _ => unreachable!(),\n\n }\n\n }\n\n }\n\n\n\n let prev_prec = self.precision();\n\n self.set_precision(None);\n\n let out = self.str_unchecked(s.as_str());\n\n self.set_precision(prev_prec);\n\n out\n\n }\n\n })*)\n\n}\n", "file_path": "src/fmtnum.rs", "rank": 78, "score": 2.912923143338922 } ]
Rust
src/eval.rs
edre/nokamute
ace46abe0cb2a4056e0e97c4377deb2746bcbae0
use crate::board::*; pub struct DumbEvaluator; impl minimax::Evaluator for DumbEvaluator { type G = Rules; fn evaluate(&self, _: &Board) -> minimax::Evaluation { 0 } } pub struct BasicEvaluator { queen_factor: i32, movable_bug_factor: i32, unplayed_bug_factor: i32, } impl Default for BasicEvaluator { fn default() -> Self { Self { queen_factor: 40, movable_bug_factor: 2, unplayed_bug_factor: 1 } } } impl minimax::Evaluator for BasicEvaluator { type G = Rules; fn evaluate(&self, board: &Board) -> minimax::Evaluation { let queens_surrounded = board.queens_surrounded(); let immovable = board.find_cut_vertexes(); fn value(bug: Bug) -> i32 { match bug { Bug::Queen => 10, Bug::Ant => 7, Bug::Beetle => 6, Bug::Grasshopper => 4, Bug::Spider => 3, Bug::Mosquito => 0, Bug::Ladybug => 5, Bug::Pillbug => 4, } } let mut score = queens_surrounded[1 - board.to_move() as usize] as i32 - queens_surrounded[board.to_move() as usize] as i32; score *= self.queen_factor; let remaining = board.get_remaining(); let opp_remaining = board.get_opponent_remaining(); for bug in Bug::iter_all() { score += (remaining[bug as usize] as i32 - opp_remaining[bug as usize] as i32) * self.unplayed_bug_factor; } for (id, node) in (0..).zip(board.nodes.iter()) { if let Some(ref tile) = node.tile { let mut bug_score = value(tile.bug); let pillbug_near_its_queen = tile.bug == Bug::Pillbug && node.adj.iter().any(|&adj| { board .get(adj) .map(|tile2| tile2.bug == Bug::Queen && tile2.color == tile.color) .unwrap_or(false) }); if pillbug_near_its_queen { bug_score += (self.queen_factor / 2) * node.adj.iter().filter(|&&adj| board.get(adj).is_none()).count() as i32; } else if tile.underneath.is_none() && immovable.get(id) { continue; } if tile.bug == Bug::Mosquito { if tile.underneath.is_some() { bug_score = value(Bug::Beetle); } else { bug_score = node .adj .iter() .map(|&id| board.get(id).map(|tile| value(tile.bug) % 9).unwrap_or(0)) .max() .unwrap_or(0); } } if tile.bug.crawler() { if board.slidable_adjacent(id, id).next().is_none() { continue; } } bug_score *= self.movable_bug_factor; if tile.color != board.to_move() { bug_score = -bug_score; } score += bug_score; } } score as minimax::Evaluation } } #[cfg(test)] mod tests { use super::*; #[test] fn test_minimax() { use minimax::{Move, Negamax, Strategy}; let mut board = Board::default(); crate::Move::Place((0, 0), Bug::Queen).apply(&mut board); crate::Move::Place((1, 0), Bug::Spider).apply(&mut board); crate::Move::Place((-1, 1), Bug::Ant).apply(&mut board); crate::Move::Place((0, 1), Bug::Ant).apply(&mut board); crate::Move::Place((1, 2), Bug::Grasshopper).apply(&mut board); crate::Move::Place((1, 1), Bug::Queen).apply(&mut board); crate::Move::Place((2, 2), Bug::Beetle).apply(&mut board); crate::Move::Pass.apply(&mut board); for depth in 0..2 { let mut strategy = Negamax::new(DumbEvaluator {}, depth); let m = strategy.choose_move(&mut board); assert_eq!(Some(crate::Move::Movement((-1, 1), (2, 1))), m); let mut strategy = Negamax::new(BasicEvaluator::default(), depth); let m = strategy.choose_move(&mut board); assert_eq!(Some(crate::Move::Movement((-1, 1), (2, 1))), m); } let mut board = Board::default(); crate::Move::Place((0, 0), Bug::Queen).apply(&mut board); crate::Move::Place((1, 0), Bug::Queen).apply(&mut board); crate::Move::Place((1, 1), Bug::Spider).apply(&mut board); crate::Move::Place((0, 1), Bug::Grasshopper).apply(&mut board); crate::Move::Place((-1, 0), Bug::Beetle).apply(&mut board); crate::Move::Pass.apply(&mut board); for depth in 0..3 { let mut strategy = Negamax::new(BasicEvaluator::default(), depth); let m = strategy.choose_move(&mut board); assert_eq!(Some(crate::Move::Movement((0, 0), (0, -1))), m); } } }
use crate::board::*; pub struct DumbEvaluator; impl minimax::Evaluator for DumbEvaluator { type G = Rules; fn evaluate(&self, _: &Board) -> minimax::Evaluation { 0 } } pub struct BasicEvaluator { queen_factor: i32, movable_bug_factor: i32, unplayed_bug_factor: i32, } impl Default for BasicEvaluator { fn default() -> Self { Self { queen_factor: 40, movable_bug_factor: 2, unplayed_bug_factor: 1 } } } impl minimax::Evaluator for BasicEvaluator { type G = Rules; fn evaluate(&self, board: &Board) -> minimax::Evaluation { let queens_surrounded = board.queens_surrounded(); let immovable = board.find_cut_vertexes(); fn value(bug: Bug) -> i32 { match bug { Bug::Queen => 10, Bug::Ant => 7, Bug::Beetle => 6, Bug::Grasshopper => 4, Bug::Spider => 3, Bug::Mosquito => 0, Bug::Ladybug => 5, Bug::Pillbug => 4, } } let mut score = queens_surrounded[1 - board.to_move() as usize] as i32 - queens_surrounded[board.to_move() as usize] as i32; score *= self.queen_factor; let remaining = board.get_remaining(); let opp_remaining = board.get_opponent_remaining(); for bug in Bug::iter_all() { score += (remaining[bug as usize] as i32 - opp_remaining[bug as usize] as i32) * self.unplayed_bug_factor; } for (id, node) in (0..).zip(board.nodes.iter()) { if let Some(ref tile) = node.tile { let mut bug_score = value(tile.bug); let pillbug_near_its_queen = tile.bug == Bug::Pillbug && node.adj.iter().any(|&adj| { board .get(adj) .map(|tile2| tile2.bug == Bug::Queen && tile2.color == tile.color) .unwrap_or(false) }); if pillbug_near_its_queen { bug_score += (self.queen_factor / 2) * node.adj.iter().filter(|&&adj| board.get(adj).is_none()).count() as i32; } else if tile.underneath.is_none() && immovable.get(id) {
if board.slidable_adjacent(id, id).next().is_none() { continue; } } bug_score *= self.movable_bug_factor; if tile.color != board.to_move() { bug_score = -bug_score; } score += bug_score; } } score as minimax::Evaluation } } #[cfg(test)] mod tests { use super::*; #[test] fn test_minimax() { use minimax::{Move, Negamax, Strategy}; let mut board = Board::default(); crate::Move::Place((0, 0), Bug::Queen).apply(&mut board); crate::Move::Place((1, 0), Bug::Spider).apply(&mut board); crate::Move::Place((-1, 1), Bug::Ant).apply(&mut board); crate::Move::Place((0, 1), Bug::Ant).apply(&mut board); crate::Move::Place((1, 2), Bug::Grasshopper).apply(&mut board); crate::Move::Place((1, 1), Bug::Queen).apply(&mut board); crate::Move::Place((2, 2), Bug::Beetle).apply(&mut board); crate::Move::Pass.apply(&mut board); for depth in 0..2 { let mut strategy = Negamax::new(DumbEvaluator {}, depth); let m = strategy.choose_move(&mut board); assert_eq!(Some(crate::Move::Movement((-1, 1), (2, 1))), m); let mut strategy = Negamax::new(BasicEvaluator::default(), depth); let m = strategy.choose_move(&mut board); assert_eq!(Some(crate::Move::Movement((-1, 1), (2, 1))), m); } let mut board = Board::default(); crate::Move::Place((0, 0), Bug::Queen).apply(&mut board); crate::Move::Place((1, 0), Bug::Queen).apply(&mut board); crate::Move::Place((1, 1), Bug::Spider).apply(&mut board); crate::Move::Place((0, 1), Bug::Grasshopper).apply(&mut board); crate::Move::Place((-1, 0), Bug::Beetle).apply(&mut board); crate::Move::Pass.apply(&mut board); for depth in 0..3 { let mut strategy = Negamax::new(BasicEvaluator::default(), depth); let m = strategy.choose_move(&mut board); assert_eq!(Some(crate::Move::Movement((0, 0), (0, -1))), m); } } }
continue; } if tile.bug == Bug::Mosquito { if tile.underneath.is_some() { bug_score = value(Bug::Beetle); } else { bug_score = node .adj .iter() .map(|&id| board.get(id).map(|tile| value(tile.bug) % 9).unwrap_or(0)) .max() .unwrap_or(0); } } if tile.bug.crawler() {
random
[ { "content": "fn perft_recurse(b: &mut Board, depth: usize) -> u64 {\n\n if depth == 0 {\n\n return 1;\n\n }\n\n if Rules::get_winner(b).is_some() {\n\n // Apparently perft rules only count positions at the target depth.\n\n return 0;\n\n }\n\n let mut moves = Vec::new();\n\n Rules::generate_moves(b, &mut moves);\n\n if depth == 1 {\n\n moves.len() as u64\n\n } else if depth < 4 {\n\n // Serial exploration of leafy nodes, to avoid excessive cloning.\n\n let mut count = 0;\n\n for m in moves.iter() {\n\n m.apply(b);\n\n count += perft_recurse(b, depth - 1);\n\n m.undo(b);\n\n }\n", "file_path": "src/perft.rs", "rank": 0, "score": 89304.01108291354 }, { "content": "// Ids for tiles that are currently under other pieces.\n\ntype UnderId = u8;\n\n\n", "file_path": "src/board.rs", "rank": 1, "score": 81529.22498120315 }, { "content": "fn input_bug(options: &[Bug]) -> Option<Bug> {\n\n for bug in Bug::iter_all() {\n\n if options.contains(&bug) {\n\n print!(\"{}:{}, \", bug.codepoint(), bug.name());\n\n }\n\n }\n\n println!();\n\n\n\n let line = read_line(\"Which bug? \");\n\n let bug = Bug::from_char(line.chars().next().unwrap_or('?'));\n\n if bug.is_none() {\n\n println!(\"Unrecognized bug.\");\n\n }\n\n bug\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 2, "score": 74382.413597166 }, { "content": "pub fn terminal_game_interface() {\n\n let mut board = Board::default();\n\n let mut history = Vec::<crate::Move>::new();\n\n let mut strategy = IterativeSearch::new(\n\n crate::BasicEvaluator::default(),\n\n IterativeOptions::new().with_table_byte_size(32_000_000).with_null_window_search(true),\n\n );\n\n let mut prev_pv = Vec::new();\n\n let mut prev_pv_board = Board::default();\n\n loop {\n\n if let Some(winner) = Rules::get_winner(&board) {\n\n board.println();\n\n if winner == minimax::Winner::Draw {\n\n println!(\"Game over. Draw.\");\n\n } else {\n\n println!(\"Game over.\");\n\n }\n\n break;\n\n }\n\n // Precompute possible moves.\n", "file_path": "src/cli.rs", "rank": 3, "score": 69308.77318078352 }, { "content": "fn empty_board_depth(depth: usize) {\n\n let mut board = Board::default();\n\n let options = IterativeOptions::new().with_table_byte_size(16000).with_null_window_search(true);\n\n let mut strategy = IterativeSearch::new(nokamute::BasicEvaluator::default(), options);\n\n strategy.set_max_depth(depth);\n\n let m = strategy.choose_move(&mut board);\n\n assert!(m.is_some());\n\n}\n\n\n", "file_path": "benches/minimax.rs", "rank": 4, "score": 66521.55458655173 }, { "content": "fn full_board_depth(depth: usize) {\n\n let mut board = Board::default();\n\n // From some game I found online, subbed out some expansion pieces.\n\n nokamute::Move::Place((4, 0), Bug::Queen).apply(&mut board);\n\n nokamute::Move::Place((1, 1), Bug::Ant).apply(&mut board);\n\n nokamute::Move::Place((5, 0), Bug::Ant).apply(&mut board);\n\n nokamute::Move::Place((3, 1), Bug::Ladybug).apply(&mut board);\n\n nokamute::Move::Place((0, 1), Bug::Ant).apply(&mut board);\n\n nokamute::Move::Place((6, 1), Bug::Mosquito).apply(&mut board);\n\n nokamute::Move::Place((4, 1), Bug::Pillbug).apply(&mut board);\n\n nokamute::Move::Place((7, 1), Bug::Beetle).apply(&mut board);\n\n nokamute::Move::Place((0, 2), Bug::Spider).apply(&mut board);\n\n nokamute::Move::Place((-1, 2), Bug::Ant).apply(&mut board);\n\n nokamute::Move::Place((4, 3), Bug::Spider).apply(&mut board);\n\n nokamute::Move::Place((2, 2), Bug::Pillbug).apply(&mut board);\n\n nokamute::Move::Place((4, 4), Bug::Beetle).apply(&mut board);\n\n nokamute::Move::Place((3, 2), Bug::Queen).apply(&mut board);\n\n nokamute::Move::Place((3, 2), Bug::Beetle).apply(&mut board);\n\n nokamute::Move::Place((0, 3), Bug::Spider).apply(&mut board);\n\n nokamute::Move::Pass.apply(&mut board);\n\n nokamute::Move::Place((5, 5), Bug::Ant).apply(&mut board);\n\n nokamute::Move::Pass.apply(&mut board);\n\n let options = IterativeOptions::new().with_table_byte_size(16000).with_null_window_search(true);\n\n let mut strategy = IterativeSearch::new(nokamute::BasicEvaluator::default(), options);\n\n strategy.set_max_depth(depth);\n\n let m = strategy.choose_move(&mut board);\n\n assert!(m.is_some());\n\n}\n\n\n", "file_path": "benches/minimax.rs", "rank": 5, "score": 66521.55458655173 }, { "content": "fn bug_name(color: Color, bug: Bug, number: u8) -> String {\n\n let mut name = match color {\n\n // Standard seems to be white goes first.\n\n Color::Black => \"b\",\n\n Color::White => \"w\",\n\n }\n\n .to_string();\n\n name.push(bug.name().chars().next().unwrap().to_ascii_uppercase());\n\n if bug == Bug::Ant || bug == Bug::Spider || bug == Bug::Grasshopper || bug == Bug::Beetle {\n\n name.push((number + 48) as u8 as char);\n\n }\n\n name\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) enum UhpError {\n\n IoError(std::io::Error),\n\n UnknownPiece(String),\n\n InvalidGameString(String),\n\n InvalidMove(String),\n", "file_path": "src/uhp_util.rs", "rank": 6, "score": 61766.43030362227 }, { "content": "pub fn play_game(game_type: &str, name1: &str, name2: &str) {\n\n let player1 = get_player(name1);\n\n let player2 = get_player(name2);\n\n match face_off(game_type, player1, player2) {\n\n None => println!(\"Game over: draw.\"),\n\n Some(name) => println!(\"Game over: {} won.\", name),\n\n }\n\n}\n\n\n", "file_path": "src/player.rs", "rank": 7, "score": 58319.243114937184 }, { "content": "pub fn perft_debug(engine_cmd: &[String], game_string: &str, depth: usize) {\n\n let game_string = standard_games(game_string);\n\n let mut engine = UhpClient::new(engine_cmd).unwrap();\n\n engine.new_game(game_string).unwrap();\n\n let mut board = UhpBoard::from_game_string(game_string).unwrap().into_inner();\n\n // Generate random positions at the given depth, and compare output.\n\n let mut rng = rand::thread_rng();\n\n let mut moves = Vec::new();\n\n for iter in 0.. {\n\n if iter % 10000 == 0 {\n\n println!(\"iter {}\", iter);\n\n }\n\n let mut stack = Vec::new();\n\n for _ in 0..depth {\n\n moves.clear();\n\n Rules::generate_moves(&board, &mut moves);\n\n let m = moves[rng.gen_range(0, moves.len())];\n\n m.apply(&mut board);\n\n engine.apply(m).unwrap();\n\n stack.push(m);\n", "file_path": "src/perft.rs", "rank": 8, "score": 56415.53594428898 }, { "content": "fn adjacent(loc: Loc) -> [Loc; 6] {\n\n let (x, y) = loc;\n\n // In clockwise order\n\n [(x - 1, y - 1), (x, y - 1), (x + 1, y), (x + 1, y + 1), (x, y + 1), (x - 1, y)]\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]\n\npub enum Bug {\n\n Queen = 0,\n\n Grasshopper = 1,\n\n Spider = 2,\n\n Ant = 3,\n\n Beetle = 4,\n\n Mosquito = 5,\n\n Ladybug = 6,\n\n Pillbug = 7,\n\n}\n\n\n\nimpl Bug {\n\n pub fn codepoint(&self) -> char {\n", "file_path": "src/board.rs", "rank": 9, "score": 56252.20118008871 }, { "content": "pub fn perft_single_thread(game_string: &str) {\n\n let game_string = standard_games(game_string);\n\n println!(\"{}\", game_string);\n\n let mut b = UhpBoard::from_game_string(game_string).unwrap().into_inner();\n\n if game_string.contains(';') {\n\n b.println();\n\n }\n\n minimax::perft::<Rules>(&mut b, 20);\n\n}\n\n\n", "file_path": "src/perft.rs", "rank": 10, "score": 55260.17297323969 }, { "content": "pub fn perft_multi_thread(game_string: &str) {\n\n let game_string = standard_games(game_string);\n\n println!(\"{}\", game_string);\n\n let mut b = UhpBoard::from_game_string(game_string).unwrap().into_inner();\n\n b.println();\n\n println!(\"depth\\tcount\\ttime\\tkn/s\");\n\n for depth in 0.. {\n\n let start = Instant::now();\n\n let count = perft_recurse(&mut b, depth);\n\n let dur = start.elapsed();\n\n let rate = count as f64 / dur.as_secs_f64();\n\n println!(\"{}\\t{}\\t{:?}\\t{}\", depth, count, dur, rate as usize / 1000);\n\n }\n\n}\n\n\n", "file_path": "src/perft.rs", "rank": 11, "score": 55260.17297323969 }, { "content": "fn input_loc(board: &Board, prompt: &str, options: &[Loc]) -> Option<Loc> {\n\n board.println_highlights(options);\n\n let line = read_line(prompt);\n\n let index = if let Ok(num) = line.parse::<usize>() {\n\n num\n\n } else {\n\n println!(\"Invalid. Enter a number.\");\n\n return None;\n\n };\n\n if index >= options.len() {\n\n println!(\"Not one of the options.\");\n\n return None;\n\n }\n\n Some(options[index])\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 12, "score": 52198.616681771455 }, { "content": "fn input_movement(board: &Board, moves: &[crate::Move]) -> Option<crate::Move> {\n\n let mut starts = moves\n\n .iter()\n\n .filter_map(|m| if let crate::Move::Movement(start, _) = m { Some(*start) } else { None })\n\n .collect::<Vec<_>>();\n\n starts.sort();\n\n starts.dedup();\n\n if starts.is_empty() {\n\n println!(\"No movements available.\");\n\n return None;\n\n }\n\n let start = input_loc(board, \"Move which bug? \", &starts)?;\n\n\n\n let mut ends = moves\n\n .iter()\n\n .filter_map(|m| {\n\n if let crate::Move::Movement(start2, end) = m {\n\n if start == *start2 {\n\n Some(*end)\n\n } else {\n", "file_path": "src/cli.rs", "rank": 13, "score": 52198.616681771455 }, { "content": "fn input_placement(board: &Board, moves: &[crate::Move]) -> Option<crate::Move> {\n\n let mut places = moves\n\n .iter()\n\n .filter_map(|m| if let crate::Move::Place(place, _) = m { Some(*place) } else { None })\n\n .collect::<Vec<_>>();\n\n places.sort();\n\n places.dedup();\n\n if places.is_empty() {\n\n println!(\"No placements available.\");\n\n return None;\n\n }\n\n let place = input_loc(board, \"Place new bug where? \", &places)?;\n\n\n\n let bugs = moves\n\n .iter()\n\n .filter_map(|m| if let crate::Move::Place(_, bug) = m { Some(*bug) } else { None })\n\n .collect::<Vec<_>>();\n\n let bug = input_bug(&bugs)?;\n\n\n\n Some(crate::Move::Place(place, bug))\n", "file_path": "src/cli.rs", "rank": 14, "score": 52198.616681771455 }, { "content": "struct NokamutePlayer {\n\n board: Board,\n\n strategy: IterativeSearch<BasicEvaluator>,\n\n}\n\n\n\nimpl NokamutePlayer {\n\n fn new() -> Self {\n\n let opts =\n\n IterativeOptions::new().with_table_byte_size(32_000_000).with_null_window_search(true);\n\n let mut strategy = IterativeSearch::new(BasicEvaluator::default(), opts);\n\n strategy.set_timeout(Duration::from_secs(5));\n\n NokamutePlayer { board: Board::default(), strategy }\n\n }\n\n}\n\n\n\nimpl Player for NokamutePlayer {\n\n fn name(&self) -> String {\n\n \"nokamute\".to_owned()\n\n }\n\n\n", "file_path": "src/player.rs", "rank": 15, "score": 41862.86885220623 }, { "content": "fn main() {\n\n let args: Vec<String> = std::env::args().skip(1).collect::<Vec<_>>();\n\n match args.get(0).unwrap_or(&\"help\".to_owned()).as_ref() {\n\n \"cli\" => {\n\n terminal_game_interface();\n\n }\n\n \"uhp\" => {\n\n UhpServer::new().serve();\n\n }\n\n \"play\" => {\n\n let game_type = args.get(1).map(|s| s.as_ref()).unwrap_or(\"Base+MLP\");\n\n let player1 = args.get(2).map(|s| s.as_ref()).unwrap_or(\"human\");\n\n let player2 = args.get(3).map(|s| s.as_ref()).unwrap_or(\"ai\");\n\n play_game(game_type, player1, player2);\n\n }\n\n \"perft\" => {\n\n // For engine performance comparisons.\n\n let game_type = args.get(1).map(|s| s.as_ref()).unwrap_or(\"Base\");\n\n perft_single_thread(game_type);\n\n }\n", "file_path": "src/main.rs", "rank": 16, "score": 41544.06185721818 }, { "content": "fn help() {\n\n println!(\n\n r#\"nokamute hive engine {}\n\n\n\ncommands:\n\n cli: Interactive interface to a board\n\n uhp: Run as a Universal Hive Protocol engine\n\n play [game_type] [player1] [player2]:\n\n Play a game, with each player being \"human\", \"ai|nokamute\",\n\n or a path to a UHP engine\n\n perft [game_state]:\n\n Count the number of board states at each depth\n\n perft-cheating [game_state]:\n\n Perft, but with multiple threads to get the answer sooner\n\n perft-debug game_state depth engine_command\n\n Find discrepancies between nokamute and another UHP engine\n\n from the specified starting position at the specified depth\"#,\n\n env!(\"CARGO_PKG_VERSION\")\n\n );\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 17, "score": 41544.06185721818 }, { "content": "fn face_off(\n\n game_type: &str, mut player1: Box<dyn Player>, mut player2: Box<dyn Player>,\n\n) -> Option<String> {\n\n let mut b = Board::new_from_game_type(game_type).unwrap();\n\n player1.new_game(game_type);\n\n player2.new_game(game_type);\n\n let mut players = [player1, player2];\n\n let mut p = 0;\n\n loop {\n\n b.println();\n\n println!(\"{} ({:?}) to move\", players[p].name(), b.to_move());\n\n let m = players[p].generate_move();\n\n let mut moves = Vec::new();\n\n Rules::generate_moves(&b, &mut moves);\n\n if !moves.contains(&m) {\n\n println!(\"{} played an illegal move.\", players[p].name());\n\n return Some(players[1 - p].name());\n\n }\n\n m.apply(&mut b);\n\n if let Some(winner) = Rules::get_winner(&b) {\n", "file_path": "src/player.rs", "rank": 18, "score": 41544.06185721818 }, { "content": "fn main() {\n\n // TODO: cmd line selection, for perf record\n\n println!(\"empty board 5: {}\", easybench::bench(|| empty_board_depth(5)));\n\n println!(\"full board 3: {}\", easybench::bench(|| full_board_depth(2)));\n\n}\n", "file_path": "benches/minimax.rs", "rank": 19, "score": 41544.06185721818 }, { "content": "#[test]\n\nfn test_perft() {\n\n let mut b = Board::new_from_game_type(\"Base\").unwrap();\n\n let move_counts = minimax::perft::<Rules>(&mut b, 4);\n\n assert_eq!(move_counts, vec![1, 4, 96, 1440, 21600]);\n\n\n\n b = Board::new_from_game_type(\"Base+MLP\").unwrap();\n\n let move_counts = minimax::perft::<Rules>(&mut b, 4);\n\n assert_eq!(move_counts, vec![1, 7, 294, 6678, 151686]);\n\n}\n", "file_path": "src/perft.rs", "rank": 20, "score": 39935.53648019928 }, { "content": "fn dump_difference(\n\n board: &mut Board, iter: u64, nokamute_moves: &[crate::Move], engine_moves: &[crate::Move],\n\n) {\n\n println!(\n\n \"iteration {} found discrepancy: {} vs {} moves\",\n\n iter,\n\n nokamute_moves.len(),\n\n engine_moves.len()\n\n );\n\n println!(\"position:\");\n\n board.println();\n\n let mut common = Vec::new();\n\n let mut nokamute_only = Vec::new();\n\n let mut engine_only = Vec::new();\n\n for &m in nokamute_moves.iter() {\n\n if engine_moves.contains(&m) {\n\n common.push(m);\n\n } else {\n\n nokamute_only.push(m);\n\n }\n", "file_path": "src/perft.rs", "rank": 21, "score": 39935.53648019928 }, { "content": "fn read_line(prompt: &str) -> String {\n\n print!(\"{}\", prompt);\n\n io::stdout().flush().unwrap();\n\n io::stdin().lock().lines().next().unwrap().unwrap()\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 22, "score": 28977.053373949966 }, { "content": "fn standard_games(game_string: &str) -> &str {\n\n match game_string {\n\n\t\"beetle_gate\" => \"Base+MLP;InProgress;White[5];wB1;bB1 -wB1;wQ wB1-;bQ /bB1;wB2 wQ-;bQ /wB1;wB2 wQ;bB1 bQ\",\n\n\t_ => game_string,\n\n }\n\n}\n\n\n", "file_path": "src/perft.rs", "rank": 23, "score": 27991.96879706419 }, { "content": "fn parse_hhmmss(time: &str) -> Option<Duration> {\n\n let mut toks = time.split(':');\n\n let hours = toks.next().unwrap_or(\"\").parse::<u64>().ok()?;\n\n let minutes = toks.next().unwrap_or(\"\").parse::<u64>().ok()?;\n\n let seconds = toks.next().unwrap_or(\"\").parse::<u64>().ok()?;\n\n Some(Duration::from_secs(hours * 3600 + minutes * 60 + seconds))\n\n}\n", "file_path": "src/uhp_server.rs", "rank": 24, "score": 25859.945140444182 }, { "content": "fn get_player(name: &str) -> Box<dyn Player> {\n\n match name {\n\n \"nokamute\" => Box::new(NokamutePlayer::new()),\n\n \"ai\" => Box::new(NokamutePlayer::new()),\n\n \"human\" => Box::new(CliPlayer::new()),\n\n // Try to launch this as a UHP server\n\n _ => Box::new(UhpPlayer::new(name).unwrap()),\n\n }\n\n}\n\n\n", "file_path": "src/player.rs", "rank": 25, "score": 24881.851601777977 }, { "content": " board.move_history.pop();\n\n match *self {\n\n Move::Place(loc, bug) => {\n\n let id = board.id(loc);\n\n board.remove(id);\n\n board.mut_remaining()[bug as usize] += 1;\n\n }\n\n Move::Movement(start, end) => {\n\n let start_id = board.alloc(start);\n\n let end_id = board.alloc(end);\n\n let tile = board.remove(end_id);\n\n board.insert(start_id, tile.bug, tile.color);\n\n }\n\n Move::Pass => {}\n\n }\n\n }\n\n}\n\n\n\n// Useful utility.\n\npub(crate) struct NodeSet {\n", "file_path": "src/board.rs", "rank": 26, "score": 23225.832193077713 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]\n\npub enum Move {\n\n Place(Loc, Bug),\n\n Movement(Loc, Loc),\n\n Pass,\n\n}\n\n\n\nimpl Default for Move {\n\n fn default() -> Move {\n\n Move::Pass\n\n }\n\n}\n\n\n\nimpl minimax::Move for Move {\n\n type G = Rules;\n\n fn apply(&self, board: &mut Board) {\n\n let dest = match *self {\n\n Move::Place(loc, bug) => {\n\n let id = board.alloc(loc);\n\n board.insert(id, bug, board.to_move());\n", "file_path": "src/board.rs", "rank": 27, "score": 23222.272546248845 }, { "content": " table: [u32; 8],\n\n}\n\n\n\nimpl NodeSet {\n\n fn new() -> NodeSet {\n\n NodeSet { table: [0; 8] }\n\n }\n\n\n\n fn set(&mut self, id: Id) {\n\n self.table[id as usize & 0x7] |= 1 << (id as u32 >> 3);\n\n }\n\n\n\n pub(crate) fn get(&self, id: Id) -> bool {\n\n (self.table[id as usize & 0x7] >> (id as u32 >> 3)) & 1 != 0\n\n }\n\n}\n\n\n\nimpl Board {\n\n fn generate_placements(&self, moves: &mut Vec<Move>) {\n\n // Use empty spaces that have no opposite colored tiles adjacent.\n", "file_path": "src/board.rs", "rank": 28, "score": 23222.140217411077 }, { "content": " self.queens[self.move_num as usize & 1] = UNASSIGNED;\n\n }\n\n tile\n\n }\n\n\n\n fn height(&self, id: Id) -> u32 {\n\n let mut height = 0;\n\n let mut tile: Option<&Tile> = self.nodes[id as usize].tile.as_ref();\n\n while let Some(t) = tile {\n\n height += 1;\n\n tile = t.underneath.map(|uid| self.underworld[uid as usize].as_ref().unwrap());\n\n }\n\n height\n\n }\n\n\n\n pub(crate) fn adjacent(&self, id: Id) -> &[Id; 6] {\n\n &self.nodes[id as usize].adj\n\n }\n\n\n\n pub(crate) fn get_remaining(&self) -> &[u8; 8] {\n", "file_path": "src/board.rs", "rank": 29, "score": 23220.350137583973 }, { "content": " if let Some(exts) = toks.next() {\n\n for ext in exts.chars() {\n\n match ext {\n\n 'M' => starting[Bug::Mosquito as usize] = 1,\n\n 'L' => starting[Bug::Ladybug as usize] = 1,\n\n 'P' => starting[Bug::Pillbug as usize] = 1,\n\n _ => return None,\n\n }\n\n }\n\n }\n\n Some(Board::new(starting))\n\n }\n\n}\n\n\n\nimpl Default for Board {\n\n fn default() -> Self {\n\n Self::new_expansions()\n\n }\n\n}\n\n\n", "file_path": "src/board.rs", "rank": 30, "score": 23218.816865605702 }, { "content": " None\n\n };\n\n let tile = Tile { bug, color, underneath };\n\n self.nodes[id as usize].tile = Some(tile);\n\n self.zobrist_hash ^= self.zobrist(id, bug, color, self.height(id));\n\n\n\n if bug == Bug::Queen {\n\n self.queens[self.move_num as usize & 1] = id;\n\n }\n\n }\n\n\n\n // Asserts that there is something there.\n\n fn remove(&mut self, id: Id) -> Tile {\n\n let height = self.height(id);\n\n let mut tile = self.nodes[id as usize].tile.take().unwrap();\n\n if let Some(stack) = tile.underneath.take() {\n\n self.nodes[id as usize].tile = self.remove_underworld(stack);\n\n }\n\n self.zobrist_hash ^= self.zobrist(id, tile.bug, tile.color, height);\n\n if tile.bug == Bug::Queen {\n", "file_path": "src/board.rs", "rank": 31, "score": 23218.794329520133 }, { "content": "extern crate fnv;\n\nextern crate minimax;\n\nextern crate termcolor;\n\n\n\nuse fnv::FnvHashMap;\n\nuse std::cmp::{max, min};\n\nuse std::collections::hash_map::DefaultHasher;\n\nuse std::convert::TryInto;\n\nuse std::default::Default;\n\nuse std::hash::Hasher;\n\nuse std::io::Write;\n\nuse termcolor::WriteColor;\n\n\n\n// Board representation: Adjacency-list graph with grid backup.\n\n// Dynamically allocate used and empty adjacent hexes with indexes.\n\n// Compact adjacency list for each node. Generate new nodes when expanding.\n\n\n\n// Hex coordinates. Grid connections plus one of the diagonals. First bug is at (0,0).\n\npub type Loc = (i8, i8);\n\n\n\n// Persistent id of a location.\n\npub(crate) type Id = u8;\n\n\n\n// Special value for nodes not adjacent to occupied tiles that haven't been\n\n// allocated their own node yet.\n\npub(crate) const UNASSIGNED: Id = 0;\n\n\n\n// Ids for tiles that are currently under other pieces.\n", "file_path": "src/board.rs", "rank": 32, "score": 23217.787134432612 }, { "content": " board.mut_remaining()[bug as usize] -= 1;\n\n id\n\n }\n\n Move::Movement(start, end) => {\n\n let start_id = board.alloc(start);\n\n let end_id = board.alloc(end);\n\n let tile = board.remove(start_id);\n\n board.insert(end_id, tile.bug, tile.color);\n\n end_id\n\n }\n\n Move::Pass => UNASSIGNED,\n\n };\n\n board.move_num += 1;\n\n board.zobrist_history.push(board.zobrist_hash);\n\n board.move_history.push(dest);\n\n }\n\n\n\n fn undo(&self, board: &mut Board) {\n\n board.move_num -= 1;\n\n board.zobrist_history.pop();\n", "file_path": "src/board.rs", "rank": 33, "score": 23217.658493097108 }, { "content": " fn insert_underworld(&mut self, tile: Tile) -> UnderId {\n\n for i in 0..self.underworld.len() {\n\n if self.underworld[i].is_none() {\n\n self.underworld[i] = Some(tile);\n\n return i as UnderId;\n\n }\n\n }\n\n unreachable!(\"underworld overflowed\");\n\n }\n\n\n\n fn remove_underworld(&mut self, id: UnderId) -> Option<Tile> {\n\n self.underworld[id as usize].take()\n\n }\n\n\n\n fn insert(&mut self, id: Id, bug: Bug, color: Color) {\n\n let underneath = if let Some(prev) = self.nodes[id as usize].tile.take() {\n\n Some(self.insert_underworld(prev))\n\n } else {\n\n // Potentially newly occupied node. Ensure all surrounding nodes get allocated.\n\n self.alloc_surrounding(id);\n", "file_path": "src/board.rs", "rank": 34, "score": 23217.493673188827 }, { "content": " }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const ORIGIN: Id = 1;\n\n\n\n impl Board {\n\n fn insert_loc(&mut self, loc: Loc, bug: Bug, color: Color) {\n\n let id = self.alloc(loc);\n\n self.insert(id, bug, color);\n\n }\n\n\n\n fn remove_loc(&mut self, loc: Loc) -> Tile {\n\n let id = self.alloc(loc);\n\n self.remove(id)\n\n }\n", "file_path": "src/board.rs", "rank": 35, "score": 23217.24468785049 }, { "content": " let mut dedup = false;\n\n for (id, node) in (0..).zip(self.nodes.iter()).skip(1) {\n\n if let Some(tile) = &node.tile {\n\n if tile.color != self.to_move() {\n\n continue;\n\n }\n\n if tile.underneath.is_some() {\n\n self.generate_stack_walking(id, moves);\n\n // Don't let mosquito on stack use pillbug ability.\n\n // Although the rules don't seem to specify either way.\n\n continue;\n\n }\n\n // Check for throw ability before movability, as pinned pillbugs can still throw.\n\n let pillbug_powers = tile.bug == Bug::Pillbug\n\n || (tile.bug == Bug::Mosquito\n\n && node.adj.iter().any(|&adj| {\n\n self.get(adj).map(|tile| tile.bug == Bug::Pillbug).unwrap_or(false)\n\n }));\n\n // However pillbugs just thrown cannot throw.\n\n if pillbug_powers && stunned != Some(&id) {\n", "file_path": "src/board.rs", "rank": 36, "score": 23217.022294520884 }, { "content": " self.nodes.push(node);\n\n self.zobrist_table.extend(&Board::hash_loc(loc));\n\n new_id\n\n }\n\n\n\n // For tiles getting placed, ensure all tiles around them are allocated.\n\n // This ensures empty tiles know all tiles that surround them, even if\n\n // they don't touch each other for placement.\n\n fn alloc_surrounding(&mut self, id: Id) {\n\n for (i, &loc) in (0..6).zip(adjacent(self.loc(id)).iter()) {\n\n if self.adjacent(id)[i] == UNASSIGNED {\n\n self.alloc(loc);\n\n }\n\n }\n\n }\n\n\n\n pub(crate) fn get(&self, id: Id) -> Option<&Tile> {\n\n self.nodes[id as usize].tile.as_ref()\n\n }\n\n\n", "file_path": "src/board.rs", "rank": 37, "score": 23216.74639973108 }, { "content": " &self.remaining[self.move_num as usize & 1]\n\n }\n\n\n\n pub(crate) fn get_opponent_remaining(&self) -> &[u8; 8] {\n\n &self.remaining[!self.move_num as usize & 1]\n\n }\n\n\n\n fn mut_remaining(&mut self) -> &mut [u8; 8] {\n\n &mut self.remaining[self.move_num as usize & 1]\n\n }\n\n\n\n pub(crate) fn get_available_bugs(&self) -> [(Bug, u8); 8] {\n\n let remaining = self.get_remaining();\n\n [\n\n (Bug::Queen, remaining[Bug::Queen as usize]),\n\n (Bug::Grasshopper, remaining[Bug::Grasshopper as usize]),\n\n (Bug::Spider, remaining[Bug::Spider as usize]),\n\n (Bug::Ant, remaining[Bug::Ant as usize]),\n\n (Bug::Beetle, remaining[Bug::Beetle as usize]),\n\n (Bug::Mosquito, remaining[Bug::Mosquito as usize]),\n", "file_path": "src/board.rs", "rank": 38, "score": 23216.342659258928 }, { "content": " visited: NodeSet,\n\n immovable: NodeSet,\n\n // Visitation number in DFS traversal.\n\n num: [u8; 256],\n\n // Lowest-numbered node reachable using DFS edges and then at most\n\n // one back edge.\n\n low: [u8; 256],\n\n visit_num: u8,\n\n }\n\n let mut state = State {\n\n board: self,\n\n visited: NodeSet::new(),\n\n immovable: NodeSet::new(),\n\n num: [0; 256],\n\n low: [0; 256],\n\n visit_num: 1,\n\n };\n\n fn dfs(state: &mut State, id: Id, parent: Id) {\n\n state.visited.set(id);\n\n state.num[id as usize] = state.visit_num;\n", "file_path": "src/board.rs", "rank": 39, "score": 23216.15463107113 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq)]\n\npub enum Color {\n\n Black = 1,\n\n White = 0,\n\n}\n\n\n\n// A tile on the board.\n\n#[derive(Clone, Copy)]\n\npub(crate) struct Tile {\n\n pub(crate) bug: Bug,\n\n pub(crate) color: Color,\n\n pub(crate) underneath: Option<UnderId>,\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub(crate) struct Node {\n\n // Adjacency list.\n\n pub(crate) adj: [Id; 6],\n\n pub(crate) tile: Option<Tile>,\n\n}\n", "file_path": "src/board.rs", "rank": 40, "score": 23215.639337140245 }, { "content": " Bug::Pillbug => self.generate_walk1(id, moves),\n\n }\n\n }\n\n }\n\n\n\n if dedup {\n\n // Mosquitos and pillbugs can create duplicate moves, so sort and dedup.\n\n moves.sort_unstable();\n\n moves.dedup();\n\n }\n\n }\n\n}\n\n\n\npub struct Rules;\n\n\n\nimpl minimax::Game for Rules {\n\n type S = Board;\n\n type M = Move;\n\n\n\n fn generate_moves(board: &Board, moves: &mut Vec<Move>) {\n", "file_path": "src/board.rs", "rank": 41, "score": 23215.591623992983 }, { "content": "\n\n fn new(remaining: [u8; 8]) -> Self {\n\n // Pre-allocate dummy unassigned Id to unused location.\n\n let fake_loc = (i8::MAX, i8::MAX);\n\n let mut loc_to_id = FnvHashMap::default();\n\n loc_to_id.insert(fake_loc, 0);\n\n let mut board = Board {\n\n nodes: vec![Node { adj: [UNASSIGNED; 6], tile: None }],\n\n underworld: [None; 8],\n\n id_to_loc: vec![fake_loc],\n\n loc_to_id,\n\n remaining: [remaining; 2],\n\n queens: [UNASSIGNED; 2],\n\n move_num: 0,\n\n zobrist_table: vec![0, 0],\n\n zobrist_hash: 0,\n\n zobrist_history: Vec::new(),\n\n move_history: Vec::new(),\n\n };\n\n // Pre-allocate starting moves.\n", "file_path": "src/board.rs", "rank": 42, "score": 23215.576930689393 }, { "content": " board.alloc((0, 0));\n\n board.alloc((1, 0));\n\n board\n\n }\n\n\n\n pub fn new_core_set() -> Self {\n\n Self::new([1, 3, 2, 3, 2, 0, 0, 0])\n\n }\n\n\n\n pub fn new_expansions() -> Self {\n\n Self::new([1, 3, 2, 3, 2, 1, 1, 1])\n\n }\n\n\n\n // New board from UHP GameTypeString, e.g. \"Base+MLP\"\n\n pub fn new_from_game_type(game_type: &str) -> Option<Self> {\n\n let mut starting = [1, 3, 2, 3, 2, 0, 0, 0];\n\n let mut toks = game_type.split('+');\n\n if toks.next()? != \"Base\" {\n\n return None;\n\n }\n", "file_path": "src/board.rs", "rank": 43, "score": 23215.279837038757 }, { "content": "\n\n#[derive(Clone)]\n\npub struct Board {\n\n // Indexed by Id.\n\n pub(crate) nodes: Vec<Node>,\n\n // Tiles that are under other tiles.\n\n underworld: [Option<Tile>; 8],\n\n id_to_loc: Vec<Loc>,\n\n loc_to_id: FnvHashMap<Loc, Id>,\n\n remaining: [[u8; 8]; 2],\n\n queens: [Id; 2],\n\n pub(crate) move_num: u16,\n\n // Dynamically allocate zobrist values for the ids we have by hashing\n\n // their locations.\n\n zobrist_table: Vec<u64>,\n\n zobrist_hash: u64,\n\n zobrist_history: Vec<u64>,\n\n // History of move destinations.\n\n move_history: Vec<Id>,\n\n}\n", "file_path": "src/board.rs", "rank": 44, "score": 23214.886255128014 }, { "content": " fn generate_mosquito(&self, id: Id, moves: &mut Vec<Move>) {\n\n let mut targets = [false; 8];\n\n for &adj in self.adjacent(id) {\n\n if let Some(tile) = self.get(adj) {\n\n targets[tile.bug as usize] = true;\n\n }\n\n }\n\n\n\n if targets[Bug::Ant as usize] {\n\n self.generate_walk_all(id, moves);\n\n } else {\n\n // Avoid adding strictly duplicative moves to the ant.\n\n if targets[Bug::Queen as usize]\n\n || targets[Bug::Beetle as usize]\n\n || targets[Bug::Pillbug as usize]\n\n {\n\n self.generate_walk1(id, moves);\n\n }\n\n if targets[Bug::Spider as usize] {\n\n self.generate_walk3(id, moves);\n", "file_path": "src/board.rs", "rank": 45, "score": 23214.661964606174 }, { "content": " board.generate_ladybug(start, &mut moves);\n\n board.assert_movements(\n\n &moves,\n\n (2, 3),\n\n &[(-1, 0), (1, 0), (2, 0), (-1, 1), (1, 1), (3, 1), (0, 2), (3, 2), (1, 3), (3, 3)],\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_generate_throws() {\n\n let mut board = Board::default();\n\n board.fill_board(&[(1, 1), (0, 0), (0, 0), (2, 2), (2, 2), (0, 1), (1, 2)], Bug::Pillbug);\n\n // ..💊...\n\n //..💊💊..\n\n // ..💊💊.\n\n let mut moves = Vec::new();\n\n let immovable = NodeSet::new();\n\n let start = board.id((1, 1));\n\n board.generate_throws(&immovable, start, &mut moves);\n\n assert_eq!(4, moves.len());\n", "file_path": "src/board.rs", "rank": 46, "score": 23214.520266236308 }, { "content": " (Bug::Ladybug, remaining[Bug::Ladybug as usize]),\n\n (Bug::Pillbug, remaining[Bug::Pillbug as usize]),\n\n ]\n\n }\n\n\n\n fn queen_required(&self) -> bool {\n\n self.move_num > 5 && self.get_remaining()[0] > 0\n\n }\n\n\n\n pub(crate) fn queens_surrounded(&self) -> [usize; 2] {\n\n let mut out = [0; 2];\n\n for (i, entry) in out.iter_mut().enumerate() {\n\n *entry = self\n\n .adjacent(self.queens[i])\n\n .iter()\n\n .filter(|adj| self.get(**adj).is_some())\n\n .count();\n\n }\n\n out\n\n }\n", "file_path": "src/board.rs", "rank": 47, "score": 23214.30782791654 }, { "content": " }\n\n if parent == UNASSIGNED && children > 1 {\n\n state.immovable.set(id);\n\n }\n\n }\n\n\n\n let start: Id = (0..).zip(self.nodes.iter()).find(|(_, x)| x.tile.is_some()).unwrap().0;\n\n dfs(&mut state, start, UNASSIGNED);\n\n state.immovable\n\n }\n\n\n\n // For a position on the outside (whether occupied or not), find all\n\n // adjacent locations still connected to the hive that are slidable.\n\n // A slidable position has 2 empty slots next to an occupied slot.\n\n // For all 2^6 possibilities, there can be 0, 2, or 4 slidable neighbors.\n\n pub(crate) fn slidable_adjacent(&self, origin: Id, id: Id) -> impl Iterator<Item = Id> + '_ {\n\n let neighbors = self.adjacent(id);\n\n // Each bit is whether neighbor is occupied.\n\n let mut occupied = 0;\n\n for neighbor in neighbors.iter().rev() {\n", "file_path": "src/board.rs", "rank": 48, "score": 23214.307152902584 }, { "content": "\n\nimpl minimax::Zobrist for Board {\n\n fn zobrist_hash(&self) -> u64 {\n\n self.zobrist_hash\n\n }\n\n}\n\n\n\nimpl Board {\n\n pub fn to_move(&self) -> Color {\n\n if self.move_num % 2 == 0 {\n\n Color::White\n\n } else {\n\n Color::Black\n\n }\n\n }\n\n\n\n pub fn loc(&self, id: Id) -> Loc {\n\n self.id_to_loc[id as usize]\n\n }\n\n\n", "file_path": "src/board.rs", "rank": 49, "score": 23214.261820528038 }, { "content": " for (id, node) in (0..).zip(self.nodes.iter()).skip(1) {\n\n if node.tile.is_some() {\n\n continue;\n\n }\n\n let mut num_buddies = 0;\n\n let mut num_enemies = 0;\n\n for adj in node.adj.iter() {\n\n if let Some(tile) = self.get(*adj) {\n\n if tile.color == self.to_move() {\n\n num_buddies += 1;\n\n } else {\n\n num_enemies += 1;\n\n }\n\n }\n\n }\n\n if num_buddies > 0 && num_enemies == 0 {\n\n for (bug, num_left) in self.get_available_bugs().iter() {\n\n if self.queen_required() && *bug != Bug::Queen {\n\n continue;\n\n }\n", "file_path": "src/board.rs", "rank": 50, "score": 23213.744743439296 }, { "content": " }\n\n }\n\n if targets[Bug::Grasshopper as usize] {\n\n self.generate_jumps(id, moves);\n\n }\n\n if targets[Bug::Beetle as usize] {\n\n self.generate_stack_walking(id, moves);\n\n }\n\n if targets[Bug::Ladybug as usize] {\n\n self.generate_ladybug(id, moves);\n\n }\n\n }\n\n\n\n fn generate_movements(&self, moves: &mut Vec<Move>) {\n\n let mut immovable = self.find_cut_vertexes();\n\n let stunned = self.move_history.last();\n\n if let Some(moved) = stunned {\n\n // Can't move pieces that were moved on the opponent's turn.\n\n immovable.set(*moved);\n\n }\n", "file_path": "src/board.rs", "rank": 51, "score": 23213.616920898392 }, { "content": "impl Board {\n\n fn bounding_box(&self) -> (i8, i8, i8, i8) {\n\n if self.nodes.iter().all(|node| node.tile.is_none()) {\n\n return (0, 1, 0, 1);\n\n }\n\n let mut minx = i8::MAX;\n\n let mut maxx = i8::MIN;\n\n let mut miny = i8::MAX;\n\n let mut maxy = i8::MIN;\n\n for (id, loc) in (0..).zip(self.id_to_loc.iter()) {\n\n if self.get(id).is_some() {\n\n minx = min(minx, loc.0);\n\n maxx = max(maxx, loc.0);\n\n miny = min(miny, loc.1);\n\n maxy = max(maxy, loc.1);\n\n }\n\n }\n\n (minx, maxx - minx + 1, miny, maxy - miny + 1)\n\n }\n\n\n", "file_path": "src/board.rs", "rank": 52, "score": 23213.3033142986 }, { "content": " [hash1, hash2]\n\n }\n\n\n\n // Allocate a new node, and link it to its neighbors.\n\n fn alloc(&mut self, loc: Loc) -> Id {\n\n if let Some(id) = self.loc_to_id.get(&loc) {\n\n return *id;\n\n }\n\n let new_id: Id = self.nodes.len().try_into().unwrap();\n\n self.loc_to_id.insert(loc, new_id);\n\n self.id_to_loc.push(loc);\n\n let mut node = Node { tile: None, adj: [UNASSIGNED; 6] };\n\n // Link existing adjacent nodes in both directions.\n\n for (i, adj) in (0..6).zip(adjacent(loc).iter()) {\n\n if let Some(id) = self.loc_to_id.get(adj) {\n\n node.adj[i] = *id;\n\n debug_assert_eq!(self.nodes[*id as usize].adj[(i + 3) % 6], UNASSIGNED);\n\n self.nodes[*id as usize].adj[(i + 3) % 6] = new_id;\n\n }\n\n }\n", "file_path": "src/board.rs", "rank": 53, "score": 23213.134974027023 }, { "content": " self.generate_throws(&immovable, id, moves);\n\n dedup = true;\n\n }\n\n if immovable.get(id) {\n\n continue;\n\n }\n\n match tile.bug {\n\n Bug::Queen => self.generate_walk1(id, moves),\n\n Bug::Grasshopper => self.generate_jumps(id, moves),\n\n Bug::Spider => self.generate_walk3(id, moves),\n\n Bug::Ant => self.generate_walk_all(id, moves),\n\n Bug::Beetle => {\n\n self.generate_walk1(id, moves);\n\n self.generate_stack_walking(id, moves);\n\n }\n\n Bug::Mosquito => {\n\n self.generate_mosquito(id, moves);\n\n dedup = true;\n\n }\n\n Bug::Ladybug => self.generate_ladybug(id, moves),\n", "file_path": "src/board.rs", "rank": 54, "score": 23212.86602423417 }, { "content": " }\n\n }\n\n }\n\n }\n\n\n\n for s3 in 0..self.nodes.len() as Id {\n\n if step3.get(s3) {\n\n moves.push(Move::Movement(self.loc(id), self.loc(s3)));\n\n }\n\n }\n\n }\n\n\n\n fn generate_throws(&self, immovable: &NodeSet, id: Id, moves: &mut Vec<Move>) {\n\n let mut starts = [UNASSIGNED; 6];\n\n let mut num_starts = 0;\n\n let mut ends = [UNASSIGNED; 6];\n\n let mut num_ends = 0;\n\n let mut buf = [UNASSIGNED; 6];\n\n for adj in self.slidable_adjacent_beetle(&mut buf, UNASSIGNED, id) {\n\n match self.height(adj) {\n", "file_path": "src/board.rs", "rank": 55, "score": 23212.808259754183 }, { "content": " board.assert_movements(&moves, (0, 0), &[(0, 1), (1, 2)]);\n\n }\n\n\n\n #[test]\n\n fn test_winner() {\n\n use minimax::{Game, Move};\n\n\n\n // Draw by stalemate\n\n let mut board = Board::default();\n\n let x1 = (-1, -1);\n\n let x2 = (-1, 0);\n\n let y1 = (1, 1);\n\n let y2 = (1, 0);\n\n crate::Move::Place((0, 0), Bug::Spider).apply(&mut board);\n\n assert_eq!(None, Rules::get_winner(&board));\n\n crate::Move::Place(x1, Bug::Queen).apply(&mut board);\n\n assert_eq!(None, Rules::get_winner(&board));\n\n // Create the position the first time.\n\n crate::Move::Place(y1, Bug::Queen).apply(&mut board);\n\n assert_eq!(None, Rules::get_winner(&board));\n", "file_path": "src/board.rs", "rank": 56, "score": 23212.50877706909 }, { "content": " fn test_gen_placement() {\n\n let mut board = Board::default();\n\n for i in 1..8 {\n\n board.remaining[0][i] = 0;\n\n board.remaining[1][i] = 0;\n\n }\n\n board.insert(1, Bug::Queen, Color::White);\n\n board.insert(2, Bug::Queen, Color::Black);\n\n let mut moves = Vec::new();\n\n board.generate_placements(&mut moves);\n\n board.assert_placements(\n\n &moves,\n\n &[((-1, -1), Bug::Queen), ((-1, 0), Bug::Queen), ((0, 1), Bug::Queen)],\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_cut_vertex() {\n\n let mut board = Board::default();\n\n //..🐝🐝🐝🐝\n", "file_path": "src/board.rs", "rank": 57, "score": 23211.828962134685 }, { "content": " state.low[id as usize] = state.visit_num;\n\n state.visit_num += 1;\n\n let mut children = 0;\n\n for &adj in state.board.adjacent(id) {\n\n if state.board.get(adj).is_none() {\n\n continue;\n\n }\n\n if adj == parent {\n\n continue;\n\n }\n\n if state.visited.get(adj) {\n\n state.low[id as usize] = min(state.low[id as usize], state.num[adj as usize]);\n\n } else {\n\n dfs(state, adj, id);\n\n state.low[id as usize] = min(state.low[id as usize], state.low[adj as usize]);\n\n if state.low[adj as usize] >= state.num[id as usize] && parent != UNASSIGNED {\n\n state.immovable.set(id);\n\n }\n\n children += 1;\n\n }\n", "file_path": "src/board.rs", "rank": 58, "score": 23211.778449758287 }, { "content": " pub fn id(&self, loc: Loc) -> Id {\n\n *self.loc_to_id.get(&loc).unwrap()\n\n }\n\n\n\n fn zobrist(&self, id: Id, bug: Bug, color: Color, height: u32) -> u64 {\n\n let hash = self.zobrist_table[(id as usize) << 1 | (color as usize)];\n\n // I don't really want to multiply the table by another factor of 7*8, so\n\n // just realign the existing random bits.\n\n // Also include the color to move hash.\n\n hash.rotate_left((height << 3) | bug as u32) ^ 0xa6c11b626b105b7c\n\n }\n\n\n\n // Hash the loc to produce 128 bits of zobrist lookup table.\n\n fn hash_loc(loc: Loc) -> [u64; 2] {\n\n let mut hasher = DefaultHasher::new();\n\n hasher.write_u8(loc.0 as u8);\n\n hasher.write_u8(loc.1 as u8);\n\n let hash1 = hasher.finish();\n\n hasher.write_u64(0x73399349585d196e);\n\n let hash2 = hasher.finish();\n", "file_path": "src/board.rs", "rank": 59, "score": 23211.610437029252 }, { "content": " // Unlike what the original rules say (where a beetle on the hive is\n\n // unrestricted), climbing bugs need to slide into/out of the higher of\n\n // source or dest heights.\n\n // https://www.boardgamegeek.com/thread/332467\n\n fn slidable_adjacent_beetle<'a>(\n\n &self, out: &'a mut [Id; 6], orig: Id, id: Id,\n\n ) -> impl Iterator<Item = Id> + 'a {\n\n let mut self_height = self.height(id);\n\n if orig == id {\n\n self_height -= 1;\n\n }\n\n let mut heights = [0; 6];\n\n let neighbors = self.adjacent(id);\n\n for i in 0..6 {\n\n heights[i] = self.height(neighbors[i]);\n\n }\n\n\n\n let mut n = 0;\n\n for i in 0..6 {\n\n let barrier = max(self_height, heights[i]);\n", "file_path": "src/board.rs", "rank": 60, "score": 23211.581260657797 }, { "content": " if barrier == 0 {\n\n // Walking at height zero uses regular sliding rules.\n\n continue;\n\n }\n\n if heights[(i + 1) % 6] > barrier && heights[(i + 5) % 6] > barrier {\n\n // Piles on both sides are too high and we cannot pass through.\n\n continue;\n\n }\n\n out[n] = neighbors[i];\n\n n += 1;\n\n }\n\n\n\n out.iter().take(n).copied()\n\n }\n\n\n\n // From any bug on top of a stack.\n\n fn generate_stack_walking(&self, id: Id, moves: &mut Vec<Move>) {\n\n let mut buf = [UNASSIGNED; 6];\n\n for adj in self.slidable_adjacent_beetle(&mut buf, id, id) {\n\n moves.push(Move::Movement(self.loc(id), self.loc(adj)));\n", "file_path": "src/board.rs", "rank": 61, "score": 23211.053367729364 }, { "content": " //...🕷..🕷.\n\n // ...🕷🕷🕷\n\n board.fill_board(\n\n &[(-1, -1), (0, 0), (2, 0), (0, 1), (3, 1), (1, 2), (2, 2), (3, 2)],\n\n Bug::Spider,\n\n );\n\n let mut moves = Vec::new();\n\n let start = board.id((-1, -1));\n\n board.generate_walk3(start, &mut moves);\n\n board.assert_movements(&moves, (-1, -1), &[(0, 2), (1, -1), (1, 1), (2, 1)]);\n\n\n\n // ..🕷.🕷..\n\n //..🕷🕷.🕷.\n\n // ..🕷🕷🕷\n\n board.remove_loc((-1, -1));\n\n board.insert_loc((1, 1), Bug::Spider, Color::Black);\n\n moves.clear();\n\n let start = board.id((1, 1));\n\n board.generate_walk3(start, &mut moves);\n\n board.assert_movements(&moves, (1, 1), &[(-1, -1), (0, -1), (1, -1), (2, -1)]);\n", "file_path": "src/board.rs", "rank": 62, "score": 23210.63594178281 }, { "content": " Bug::Ladybug => \"ladybug\",\n\n Bug::Pillbug => \"pillbug\",\n\n }\n\n }\n\n\n\n pub fn iter_all() -> impl Iterator<Item = Self> {\n\n [\n\n Bug::Queen,\n\n Bug::Grasshopper,\n\n Bug::Spider,\n\n Bug::Ant,\n\n Bug::Beetle,\n\n Bug::Mosquito,\n\n Bug::Ladybug,\n\n Bug::Pillbug,\n\n ]\n\n .iter()\n\n .copied()\n\n }\n\n\n", "file_path": "src/board.rs", "rank": 63, "score": 23210.559800064977 }, { "content": " (1, 0),\n\n (1, 1),\n\n (1, 2),\n\n (2, 0),\n\n (2, 1),\n\n (2, 2),\n\n ],\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_generate_ladybug() {\n\n let mut board = Board::default();\n\n board.fill_board(&[(2, 3), (0, 0), (0, 1), (2, 1), (1, 2), (2, 2)], Bug::Ladybug);\n\n //...🐞...\n\n // ..🐞.🐞..\n\n //...🐞🐞..\n\n // ...🐞..\n\n let mut moves = Vec::new();\n\n let start = board.id((2, 3));\n", "file_path": "src/board.rs", "rank": 64, "score": 23210.472917670875 }, { "content": " if *num_left > 0 {\n\n moves.push(Move::Place(self.loc(id), *bug));\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n // Linear algorithm to find all cut vertexes.\n\n // Algorithm explanation: https://web.archive.org/web/20180830110222/https://www.eecs.wsu.edu/~holder/courses/CptS223/spr08/slides/graphapps.pdf\n\n // Example code: https://cp-algorithms.com/graph/cutpoints.html\n\n //\n\n // TODO: cache movability for each tile, and somehow iteratively update it\n\n // Need to persist the DFS tree from an arbitrary root.\n\n // Adding a tile just adds a leaf to one of its neighbors\n\n // Removing a tile means recomputing a path to the root for any children of the removed node.\n\n // Hmm, maybe not. DFS iteration order is important.\n\n pub(crate) fn find_cut_vertexes(&self) -> NodeSet {\n\n struct State<'a> {\n\n board: &'a Board,\n", "file_path": "src/board.rs", "rank": 65, "score": 23210.444681816476 }, { "content": " }\n\n\n\n #[test]\n\n fn test_generate_walk_all() {\n\n let mut board = Board::default();\n\n //...🐜....\n\n // ...🐜...\n\n //...🐜.🐜.\n\n // ...🐜🐜\n\n board.fill_board(&[(-1, -1), (0, 0), (0, 1), (2, 1), (1, 2), (2, 2)], Bug::Ant);\n\n let mut moves = Vec::new();\n\n let start = board.id((-1, -1));\n\n board.generate_walk_all(start, &mut moves);\n\n board.assert_movements(\n\n &moves,\n\n (-1, -1),\n\n &[\n\n (0, -1),\n\n (-1, 0),\n\n (1, 0),\n", "file_path": "src/board.rs", "rank": 66, "score": 23210.08843891602 }, { "content": "\n\n fn fill_board(&mut self, locs: &[Loc], bug: Bug) {\n\n for &loc in locs {\n\n let id = self.alloc(loc);\n\n self.insert(id, bug, Color::Black);\n\n }\n\n }\n\n\n\n fn assert_placements(&self, moves: &[Move], expected: &[(Loc, Bug)]) {\n\n let mut actual_pairs = Vec::new();\n\n for &m in moves.iter() {\n\n if let Move::Place(actual_id, actual_bug) = m {\n\n actual_pairs.push((actual_id, actual_bug));\n\n }\n\n }\n\n actual_pairs.sort();\n\n let mut expected_pairs = Vec::new();\n\n expected_pairs.extend(expected);\n\n expected_pairs.sort();\n\n assert_eq!(actual_pairs, expected_pairs);\n", "file_path": "src/board.rs", "rank": 67, "score": 23209.653362921516 }, { "content": " board.alloc((-1, -1)),\n\n board.alloc((0, -1)),\n\n board.alloc((1, 1)),\n\n board.alloc((0, 1))\n\n ],\n\n board.slidable_adjacent(x, x).collect::<Vec<Id>>()\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_generate_jumps() {\n\n let mut board = Board::default();\n\n //...🦗🦗🦗.\n\n // ..🦗...\n\n //......\n\n // .🦗..\n\n board.fill_board(&[(0, 0), (0, 1), (0, 3), (1, 0), (2, 0)], Bug::Grasshopper);\n\n let mut moves = Vec::new();\n\n board.generate_jumps(ORIGIN, &mut moves);\n\n board.assert_movements(&moves, (0, 0), &[(0, 2), (3, 0)]);\n", "file_path": "src/board.rs", "rank": 68, "score": 23209.546376602902 }, { "content": " board.assert_movements(&moves[..2], (1, 2), &[(1, 0), (2, 1)]);\n\n board.assert_movements(&moves[2..], (0, 1), &[(1, 0), (2, 1)]);\n\n\n\n // Create a level-2 gate to prevent one piece from being thrown.\n\n board.remove_loc((0, 0));\n\n board.insert_loc((0, 1), Bug::Pillbug, Color::Black);\n\n moves.clear();\n\n board.generate_throws(&immovable, start, &mut moves);\n\n assert_eq!(2, moves.len());\n\n board.assert_movements(&moves, (0, 0), &[(1, 0), (2, 1)]);\n\n\n\n // Create a level-2 gate to prevent one destination to being thrown to.\n\n board.insert_loc((1, 0), Bug::Pillbug, Color::Black);\n\n board.insert_loc((1, 0), Bug::Pillbug, Color::Black);\n\n board.remove_loc((0, 1));\n\n board.remove_loc((0, 1));\n\n board.remove_loc((1, 2));\n\n moves = Vec::new();\n\n board.generate_throws(&immovable, start, &mut moves);\n\n assert_eq!(2, moves.len());\n", "file_path": "src/board.rs", "rank": 69, "score": 23209.440132619322 }, { "content": " pub fn from_char(c: char) -> Option<Bug> {\n\n match c.to_ascii_lowercase() {\n\n 'q' => Some(Bug::Queen),\n\n 'g' => Some(Bug::Grasshopper),\n\n 's' => Some(Bug::Spider),\n\n 'a' => Some(Bug::Ant),\n\n 'b' => Some(Bug::Beetle),\n\n 'm' => Some(Bug::Mosquito),\n\n 'l' => Some(Bug::Ladybug),\n\n 'p' => Some(Bug::Pillbug),\n\n _ => None,\n\n }\n\n }\n\n\n\n // Whether this bug can only move (itself) by crawling.\n\n pub(crate) fn crawler(&self) -> bool {\n\n matches!(*self, Bug::Ant | Bug::Queen | Bug::Spider | Bug::Pillbug)\n\n }\n\n}\n\n\n", "file_path": "src/board.rs", "rank": 70, "score": 23209.039736178504 }, { "content": " if board.get_remaining()[Bug::Queen as usize] == 0 {\n\n // For movable pieces, generate all legal moves.\n\n board.generate_movements(moves);\n\n }\n\n\n\n // Find placeable positions.\n\n board.generate_placements(moves);\n\n }\n\n\n\n if moves.is_empty() {\n\n moves.push(Move::Pass);\n\n }\n\n }\n\n\n\n fn get_winner(board: &Board) -> Option<minimax::Winner> {\n\n let queens_surrounded = board.queens_surrounded();\n\n let n = board.move_num as usize;\n\n if n > 10 {\n\n // Check for position repeat stalemate.\n\n // More than 32 moves ago, we're not going to bother looking.\n", "file_path": "src/board.rs", "rank": 71, "score": 23209.00249011075 }, { "content": " fn generate_ladybug(&self, id: Id, moves: &mut Vec<Move>) {\n\n let mut buf1 = [UNASSIGNED; 6];\n\n let mut buf2 = [UNASSIGNED; 6];\n\n let mut step2 = NodeSet::new();\n\n for s1 in self.slidable_adjacent_beetle(&mut buf1, id, id) {\n\n if self.get(s1).is_some() {\n\n for s2 in self.slidable_adjacent_beetle(&mut buf2, id, s1) {\n\n if self.get(s2).is_some() {\n\n step2.set(s2);\n\n }\n\n }\n\n }\n\n }\n\n\n\n let mut step3 = NodeSet::new();\n\n for s2 in 0..self.nodes.len() as Id {\n\n if step2.get(s2) && s2 != id {\n\n for s3 in self.slidable_adjacent_beetle(&mut buf1, id, s2) {\n\n if self.get(s3).is_none() {\n\n step3.set(s3);\n", "file_path": "src/board.rs", "rank": 72, "score": 23208.971003758365 }, { "content": " crate::Move::Movement(x1, x2).apply(&mut board);\n\n assert_eq!(None, Rules::get_winner(&board));\n\n crate::Move::Movement(y1, y2).apply(&mut board);\n\n assert_eq!(None, Rules::get_winner(&board));\n\n crate::Move::Movement(x2, x1).apply(&mut board);\n\n assert_eq!(None, Rules::get_winner(&board));\n\n // Recreate position for the second time.\n\n crate::Move::Movement(y2, y1).apply(&mut board);\n\n assert_eq!(None, Rules::get_winner(&board));\n\n crate::Move::Movement(x1, x2).apply(&mut board);\n\n assert_eq!(None, Rules::get_winner(&board));\n\n crate::Move::Movement(y1, y2).apply(&mut board);\n\n assert_eq!(None, Rules::get_winner(&board));\n\n crate::Move::Movement(x2, x1).apply(&mut board);\n\n assert_eq!(None, Rules::get_winner(&board));\n\n // Recreate position for the third time.\n\n crate::Move::Movement(y2, y1).apply(&mut board);\n\n assert_eq!(Some(minimax::Winner::Draw), Rules::get_winner(&board));\n\n // Undo reverts zobrist and history.\n\n crate::Move::Movement(y2, y1).undo(&mut board);\n\n assert_eq!(None, Rules::get_winner(&board));\n\n }\n\n}\n", "file_path": "src/board.rs", "rank": 73, "score": 23208.749903867134 }, { "content": " moves.push(Move::Movement(self.loc(id), self.loc(adj)));\n\n }\n\n }\n\n\n\n fn generate_walk3(&self, orig: Id, moves: &mut Vec<Move>) {\n\n fn dfs(id: Id, orig: Id, board: &Board, path: &mut Vec<Id>, moves: &mut Vec<Move>) {\n\n if path.contains(&id) {\n\n return;\n\n }\n\n if path.len() == 3 {\n\n moves.push(Move::Movement(board.loc(orig), board.loc(id)));\n\n return;\n\n }\n\n path.push(id);\n\n for adj in board.slidable_adjacent(orig, id) {\n\n dfs(adj, orig, board, path, moves);\n\n }\n\n path.pop();\n\n }\n\n let mut path = Vec::with_capacity(3);\n", "file_path": "src/board.rs", "rank": 74, "score": 23208.65696435123 }, { "content": " // Line 3\n\n assert!(!is_cut_loc((1, 2)));\n\n assert!(!is_cut_loc((2, 2)));\n\n }\n\n\n\n #[test]\n\n fn test_slidable() {\n\n let mut board = Board::default();\n\n let x = board.alloc((0, 0));\n\n // One neighbor.\n\n board.insert_loc((0, 0), Bug::Queen, Color::Black);\n\n board.insert_loc((1, 0), Bug::Queen, Color::Black);\n\n assert_eq!(\n\n vec![board.alloc((0, -1)), board.alloc((1, 1))],\n\n board.slidable_adjacent(x, x).collect::<Vec<Id>>()\n\n );\n\n // Two adjacent neighbors.\n\n board.insert_loc((1, 1), Bug::Queen, Color::Black);\n\n assert_eq!(\n\n vec![board.alloc((0, -1)), board.alloc((0, 1))],\n", "file_path": "src/board.rs", "rank": 75, "score": 23208.6138550563 }, { "content": " board.slidable_adjacent(x, x).collect::<Vec<Id>>()\n\n );\n\n // Four adjacent neighbors.\n\n board.insert_loc((0, 1), Bug::Queen, Color::Black);\n\n board.insert_loc((-1, 0), Bug::Queen, Color::Black);\n\n assert_eq!(\n\n vec![board.alloc((-1, -1)), board.alloc((0, -1))],\n\n board.slidable_adjacent(x, x).collect::<Vec<Id>>()\n\n );\n\n // Five adjacent neighbors.\n\n board.insert_loc((-1, -1), Bug::Queen, Color::Black);\n\n assert_eq!(Vec::<Id>::new(), board.slidable_adjacent(x, x).collect::<Vec<Id>>());\n\n // 2 separated groups of neighbors.\n\n board.remove_loc((0, 1));\n\n assert_eq!(Vec::<Id>::new(), board.slidable_adjacent(x, x).collect::<Vec<Id>>());\n\n // 2 opposite single neighbors\n\n board.remove_loc((1, 1));\n\n board.remove_loc((-1, -1));\n\n assert_eq!(\n\n vec![\n", "file_path": "src/board.rs", "rank": 76, "score": 23208.258854859698 }, { "content": " dfs(orig, orig, self, &mut path, moves);\n\n }\n\n\n\n fn generate_walk_all(&self, orig: Id, moves: &mut Vec<Move>) {\n\n let mut visited = NodeSet::new();\n\n let mut queue = vec![orig];\n\n while let Some(node) = queue.pop() {\n\n if visited.get(node) {\n\n continue;\n\n }\n\n visited.set(node);\n\n if node != orig {\n\n moves.push(Move::Movement(self.loc(orig), self.loc(node)));\n\n }\n\n for adj in self.slidable_adjacent(orig, node) {\n\n queue.push(adj);\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/board.rs", "rank": 77, "score": 23207.968557267974 }, { "content": " }\n\n }\n\n\n\n // Jumping over contiguous linear lines of tiles.\n\n fn generate_jumps(&self, id: Id, moves: &mut Vec<Move>) {\n\n for dir in 0..6 {\n\n let mut jump = id;\n\n let mut dist = 0;\n\n while self.get(jump).is_some() {\n\n jump = self.adjacent(jump)[dir];\n\n dist += 1;\n\n }\n\n if dist > 1 {\n\n moves.push(Move::Movement(self.loc(id), self.loc(jump)));\n\n }\n\n }\n\n }\n\n\n\n fn generate_walk1(&self, id: Id, moves: &mut Vec<Move>) {\n\n for adj in self.slidable_adjacent(id, id) {\n", "file_path": "src/board.rs", "rank": 78, "score": 23207.956759903733 }, { "content": " (2, 0),\n\n (-1, 1),\n\n (3, 1),\n\n (0, 2),\n\n (3, 2),\n\n (1, 3),\n\n (2, 3),\n\n (3, 3),\n\n ],\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_generate_mosquito() {\n\n let mut board = Board::default();\n\n board.fill_board(&[(0, 0), (1, 1)], Bug::Mosquito);\n\n let mut moves = Vec::new();\n\n board.generate_mosquito(ORIGIN, &mut moves);\n\n // Mosquito on mosquito can't move at all.\n\n board.assert_movements(&moves, (0, 0), &[]);\n", "file_path": "src/board.rs", "rank": 79, "score": 23207.74560092733 }, { "content": " match *self {\n\n Bug::Queen => '\\u{1f41d}', // HONEYBEE\n\n Bug::Grasshopper => '\\u{1f997}', // CRICKET\n\n Bug::Spider => '\\u{1f577}', // SPIDER\n\n Bug::Ant => '\\u{1f41c}', // ANT\n\n Bug::Beetle => '\\u{1fab2}', // BEETLE\n\n Bug::Mosquito => '\\u{1f99f}', // MOSQUITO\n\n Bug::Ladybug => '\\u{1f41e}', // LADY BEETLE\n\n Bug::Pillbug => '\\u{1f48a}', // PILL, either that or MICROBE\n\n }\n\n }\n\n\n\n pub fn name(&self) -> &'static str {\n\n match *self {\n\n Bug::Queen => \"queen\",\n\n Bug::Grasshopper => \"grasshopper\",\n\n Bug::Spider => \"spider\",\n\n Bug::Ant => \"ant\",\n\n Bug::Beetle => \"beetle\",\n\n Bug::Mosquito => \"mosquito\",\n", "file_path": "src/board.rs", "rank": 80, "score": 23207.65672952101 }, { "content": "\n\n //..🦟🦗.\n\n // .🐜🪲.\n\n board.insert_loc((0, 1), Bug::Ant, Color::Black);\n\n board.insert_loc((1, 1), Bug::Beetle, Color::Black);\n\n board.insert_loc((1, 0), Bug::Grasshopper, Color::Black);\n\n moves.clear();\n\n // Dedup happens in generate_movements.\n\n board.move_num += 1;\n\n board.generate_movements(&mut moves);\n\n board.assert_movements(\n\n &moves,\n\n (0, 0),\n\n &[\n\n (-1, 0),\n\n (-1, 1),\n\n (0, -1),\n\n (0, 1),\n\n (0, 2),\n\n (1, -1),\n", "file_path": "src/board.rs", "rank": 81, "score": 23207.486102684765 }, { "content": " if board.move_num < 2 {\n\n // Special case for the first 2 moves:\n\n for (bug, num_left) in board.get_available_bugs().iter() {\n\n if *bug == Bug::Queen {\n\n // To reduce draws, implement tournament rule where\n\n // you can't place your queen first.\n\n continue;\n\n }\n\n if *num_left > 0 {\n\n if board.move_num == 0 {\n\n moves.push(Move::Place((board.move_num as i8, 0), *bug));\n\n } else {\n\n for &loc in adjacent((0, 0)).iter() {\n\n moves.push(Move::Place(loc, *bug));\n\n }\n\n }\n\n }\n\n }\n\n } else {\n\n // Once queen has been placed, pieces may move.\n", "file_path": "src/board.rs", "rank": 82, "score": 23207.461421653574 }, { "content": " // ...🐝.🐝🐝\n\n //....🐝🐝\n\n board.fill_board(\n\n &[(0, 0), (0, 1), (1, 0), (2, 1), (1, 2), (2, 2), (-1, 0), (-2, 0), (3, 1)],\n\n Bug::Queen,\n\n );\n\n let cuts = board.find_cut_vertexes();\n\n let is_cut_loc = |loc: Loc| {\n\n let id = board.id(loc);\n\n cuts.get(id)\n\n };\n\n // Line 1\n\n assert!(is_cut_loc((-1, 0)));\n\n assert!(!is_cut_loc((-2, 0)));\n\n assert!(!is_cut_loc((0, 0)));\n\n assert!(!is_cut_loc((1, 0)));\n\n // Line 2\n\n assert!(!is_cut_loc((0, 1)));\n\n assert!(is_cut_loc((2, 1)));\n\n assert!(!is_cut_loc((3, 1)));\n", "file_path": "src/board.rs", "rank": 83, "score": 23207.222794928904 }, { "content": " (1, 1),\n\n ],\n\n Bug::Beetle,\n\n );\n\n // Stack heights:\n\n // 2 3\n\n // 0 (2) 2\n\n // 2 3\n\n // Can't move left (down) or right (up) because of blocking stacks.\n\n // Can move onto all 4 blocking stacks.\n\n let mut moves = Vec::new();\n\n board.generate_stack_walking(ORIGIN, &mut moves);\n\n board.assert_movements(&moves, (0, 0), &[(-1, -1), (0, -1), (0, 1), (1, 1)]);\n\n }\n\n\n\n #[test]\n\n fn test_generate_walk3() {\n\n let mut board = Board::default();\n\n //...🕷.....\n\n // ...🕷.🕷..\n", "file_path": "src/board.rs", "rank": 84, "score": 23207.156765100248 }, { "content": " if let Some(tile) = self.get(id) {\n\n if tile.color == Color::White {\n\n // Invert terminal background color for white pieces.\n\n buf.set_color(\n\n termcolor::ColorSpec::new().set_bg(Some(termcolor::Color::White)),\n\n )?;\n\n }\n\n write!(buf, \"{}\", tile.bug.codepoint())?;\n\n if tile.color == Color::White {\n\n // Reset coloring.\n\n buf.reset()?;\n\n }\n\n } else {\n\n // Empty cell. Full width period.\n\n buf.write_all(\"\\u{ff0e}\".as_bytes())?;\n\n }\n\n }\n\n\n\n // Stagger rows the other way to make the space look rectangular.\n\n for _ in 0..(y - starty + 1) / 2 {\n", "file_path": "src/board.rs", "rank": 85, "score": 23206.323672358412 }, { "content": " occupied <<= 1;\n\n // Since the origin bug is moving, we can't crawl around it.\n\n if self.get(*neighbor).is_some() && *neighbor != origin {\n\n occupied |= 1;\n\n }\n\n }\n\n // Wrap around in each direction\n\n occupied |= occupied << 6 | occupied << 12;\n\n let slidable = (!occupied & (occupied << 1 ^ occupied >> 1)) >> 6;\n\n\n\n neighbors.iter().enumerate().filter_map(move |(i, &id)| {\n\n if (slidable >> i) & 1 != 0 {\n\n Some(id)\n\n } else {\n\n None\n\n }\n\n })\n\n }\n\n\n\n // Find all walkable tiles where either the source or the dest is on the hive.\n", "file_path": "src/board.rs", "rank": 86, "score": 23206.276043870785 }, { "content": " }\n\n\n\n #[test]\n\n fn test_generate_beetle() {\n\n let mut board = Board::default();\n\n board.fill_board(\n\n &[\n\n (0, 0),\n\n (0, 0),\n\n (-1, -1),\n\n (-1, -1),\n\n (0, 1),\n\n (0, 1),\n\n (0, -1),\n\n (0, -1),\n\n (0, -1),\n\n (1, 0),\n\n (1, 0),\n\n (1, 1),\n\n (1, 1),\n", "file_path": "src/board.rs", "rank": 87, "score": 23205.19232480755 }, { "content": " pub fn fancy_fmt(\n\n &self, buf: &mut termcolor::Buffer, highlights: &[Loc],\n\n ) -> std::io::Result<()> {\n\n let (startx, dx, starty, dy) = self.bounding_box();\n\n for y in starty - 1..starty + dy + 1 {\n\n // Print prefix to get staggered hex rows\n\n let buflen = dy + starty - y;\n\n if buflen % 2 == 1 {\n\n buf.write_all(b\" \")?;\n\n }\n\n for _ in 0..buflen / 2 {\n\n buf.write_all(\"\\u{ff0e}\".as_bytes())?;\n\n }\n\n\n\n for x in startx - 1..startx + dx + 1 {\n\n let id = *self.loc_to_id.get(&(x, y)).unwrap_or(&UNASSIGNED);\n\n if let Some(index) = highlights.iter().position(|&loc| loc == (x, y)) {\n\n write!(buf, \"{: >2}\", index)?;\n\n continue;\n\n }\n", "file_path": "src/board.rs", "rank": 88, "score": 23205.178099382527 }, { "content": " // 4 is the most recently this move could have occurred.\n\n let start = if n < 32 { !n & 1 } else { n - 32 };\n\n let recent_past = &board.zobrist_history[start..n - 4];\n\n let position_repeat_count =\n\n recent_past.iter().step_by(2).filter(|&&hash| hash == board.zobrist_hash).count();\n\n if position_repeat_count == 2 {\n\n // Draw by stalemate.\n\n return Some(minimax::Winner::Draw);\n\n }\n\n }\n\n\n\n if queens_surrounded == [6, 6] {\n\n // Draw by simultaneous queen surrounding.\n\n Some(minimax::Winner::Draw)\n\n } else if queens_surrounded[board.to_move() as usize] == 6 {\n\n Some(minimax::Winner::PlayerJustMoved)\n\n } else if queens_surrounded[1 - board.to_move() as usize] == 6 {\n\n Some(minimax::Winner::PlayerToMove)\n\n } else {\n\n None\n", "file_path": "src/board.rs", "rank": 89, "score": 23204.68590319473 }, { "content": " buf.write_all(\"\\u{ff0e}\".as_bytes())?;\n\n }\n\n\n\n buf.write_all(b\"\\n\")?;\n\n }\n\n Ok(())\n\n }\n\n\n\n pub(crate) fn println(&self) {\n\n self.println_highlights(&[]);\n\n }\n\n\n\n pub(crate) fn println_highlights(&self, highlights: &[Loc]) {\n\n let writer = termcolor::BufferWriter::stdout(termcolor::ColorChoice::Auto);\n\n let mut buffer = writer.buffer();\n\n self.fancy_fmt(&mut buffer, highlights).unwrap();\n\n writer.print(&buffer).unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/board.rs", "rank": 90, "score": 23204.63632934202 }, { "content": " 0 => {\n\n ends[num_ends] = adj;\n\n num_ends += 1;\n\n }\n\n 1 => {\n\n if !immovable.get(adj) {\n\n starts[num_starts] = adj;\n\n num_starts += 1;\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n for &start in starts[..num_starts].iter() {\n\n for &end in ends[..num_ends].iter() {\n\n moves.push(Move::Movement(self.loc(start), self.loc(end)));\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/board.rs", "rank": 91, "score": 23201.851121545715 }, { "content": " }\n\n\n\n fn assert_movements(&self, moves: &[Move], start: Loc, ends: &[Loc]) {\n\n let mut actual_ends = Vec::new();\n\n for &m in moves.iter() {\n\n if let Move::Movement(actual_start, actual_end) = m {\n\n if actual_start == start {\n\n actual_ends.push(actual_end);\n\n }\n\n }\n\n }\n\n actual_ends.sort();\n\n let mut expected_ends = Vec::new();\n\n expected_ends.extend(ends);\n\n expected_ends.sort();\n\n assert_eq!(actual_ends, expected_ends);\n\n }\n\n }\n\n\n\n #[test]\n", "file_path": "src/board.rs", "rank": 92, "score": 23201.65705258602 }, { "content": " GameNotStarted,\n\n UnrecognizedCommand(String),\n\n TooManyUndos,\n\n}\n\n\n\nimpl From<std::io::Error> for UhpError {\n\n fn from(error: std::io::Error) -> Self {\n\n UhpError::IoError(error)\n\n }\n\n}\n\n\n\npub(crate) type Result<T> = std::result::Result<T, UhpError>;\n\n\n\nimpl UhpBoard {\n\n pub(crate) fn new(game_type: &str) -> Self {\n\n // Generate names of all pieces.\n\n let mut name_to_id = HashMap::new();\n\n let board = Board::new_from_game_type(game_type).unwrap();\n\n for &color in &[Color::Black, Color::White] {\n\n for (bug, num_bugs) in board.get_available_bugs().iter() {\n", "file_path": "src/uhp_util.rs", "rank": 98, "score": 18.654954770926224 }, { "content": " crate::Move::Place(self.board.loc(end), bug)\n\n } else {\n\n crate::Move::Movement(self.board.loc(start), self.board.loc(end))\n\n })\n\n }\n\n\n\n fn next_bug_num(&self, bug: Bug) -> u8 {\n\n [1, 3, 2, 3, 2, 1, 1, 1][bug as usize] - self.board.get_remaining()[bug as usize] + 1\n\n }\n\n\n\n pub(crate) fn to_move_string(&self, m: crate::Move) -> String {\n\n let mut move_string = match m {\n\n crate::Move::Place(_, bug) => {\n\n bug_name(self.board.to_move(), bug, self.next_bug_num(bug))\n\n }\n\n crate::Move::Movement(start, _) => {\n\n let id = self.board.id(start);\n\n self.id_to_name_stack.get(&id).unwrap().last().unwrap().clone()\n\n }\n\n crate::Move::Pass => return \"pass\".to_owned(),\n", "file_path": "src/uhp_util.rs", "rank": 99, "score": 18.43264112575911 } ]
Rust
src/canary_update.rs
The-Emperor10/oofbot
e20a399eafbe9a6c7449680108690013a7a2c1ac
use crate::logger::get_guild_members; use crate::{permissions::SqlHandler, LogResult}; use serenity::framework::standard::macros::check; use serenity::{ framework::standard::{ macros::{command, group}, *, }, model::prelude::*, prelude::*, utils::MessageBuilder, }; use sqlite::*; use std::{ ops::Deref, sync::Arc, time::{Duration, Instant}, }; use tokio::sync::Mutex; pub fn do_framework(framework: &mut StandardFramework) { framework.group_add(&CANARYUPDATECOMMANDS_GROUP); } #[check] #[name = "Manage"] #[check_in_help(true)] #[display_in_help(true)] async fn manage_check(ctx: &Context, msg: &Message) -> CheckResult { if msg.author.id == 453344368913547265 { return true.into(); } else if let Ok(member) = msg.member(&ctx).await { if let Ok(permissions) = member.permissions(&ctx.cache).await { return (permissions.administrator() || permissions.manage_guild()).into(); } } false.into() } #[group] #[commands(setupdatechannel, getupdatechannel, unsetupdatechannel)] #[description = "Commands related to canary updates"] struct CanaryUpdateCommands; #[command] #[checks(Manage)] #[description = "Sets the channel for updates"] #[only_in(guilds)] async fn setupdatechannel(ctx: &Context, msg: &Message) -> CommandResult { if let Some(guild_id) = msg.guild_id { let clock = ctx.data.read().await; let canary = clock.get::<CanaryUpdateHandler>().unwrap(); let lock = canary.lock().await; let res = lock.set_update_channel(&guild_id, &msg.channel_id).await; if res.is_ok() { msg.channel_id .say( &ctx, "Successfully set this channel to the canary update notification channel", ) .await .log_err(); } else { msg.channel_id.say(&ctx, "Sql bad").await.log_err(); res.log_err(); } } else { msg.channel_id .say(&ctx, "Well how tf did this happen") .await .log_err(); } Ok(()) } #[command] #[description = "Gets the channel for updates"] #[only_in(guilds)] async fn getupdatechannel(ctx: &Context, msg: &Message) -> CommandResult { if let Some(guild_id) = msg.guild_id { let clock = ctx.data.read().await; let canary = clock.get::<CanaryUpdateHandler>().unwrap(); let lock = canary.lock().await; let res = lock.get_update_channel(&guild_id).await; if let Some(id) = res { msg.channel_id .say(&ctx, MessageBuilder::new().channel(id)) .await .log_err(); } else { msg.channel_id.say(&ctx, "None").await.log_err(); } } else { msg.channel_id .say(&ctx, "Well how tf did this happen") .await .log_err(); } Ok(()) } #[command] #[checks(Manage)] #[description = "Unsets the channel for updates"] #[only_in(guilds)] async fn unsetupdatechannel(ctx: &Context, msg: &Message) -> CommandResult { if let Some(guild_id) = msg.guild_id { let clock = ctx.data.read().await; let canary = clock.get::<CanaryUpdateHandler>().unwrap(); let lock = canary.lock().await; let res = lock.unset_update_channel(&guild_id).await; if res.is_ok() { msg.channel_id .say(&ctx, "Unset canary update channel") .await .log_err(); } else { msg.channel_id.say(&ctx, "Sql bad").await.log_err(); res.log_err(); } } else { msg.channel_id .say(&ctx, "Well how tf did this happen") .await .log_err(); } Ok(()) } impl TypeMapKey for CanaryUpdateHandler { type Value = Arc<Mutex<CanaryUpdateHandler>>; } pub struct CanaryUpdateHandler { possible_canary_updates: Arc<Mutex<Vec<CanaryUpdate>>>, sql_handler: Arc<SqlHandler>, } impl CanaryUpdateHandler { pub fn new(sql_handler: Arc<SqlHandler>) -> Self { let possible_canary_updates: Arc<Mutex<Vec<CanaryUpdate>>> = Default::default(); Self { possible_canary_updates, sql_handler, } } pub async fn spawn_thread(&mut self) { let pcu = self.possible_canary_updates.clone(); tokio::spawn(async move { loop { let mut lock = pcu.lock().await; let data: &mut Vec<CanaryUpdate> = &mut *lock; let mut drops: Vec<usize> = Vec::<usize>::with_capacity(data.len() / 2); for (i, update) in data.iter().enumerate() { let t: Instant = update.time; if t.elapsed() > Duration::from_secs(20) { drops.push(i); } } for i in drops { data.remove(i); } drop(lock); std::thread::sleep(Duration::from_secs(2)); } }); } pub async fn add_canary_update(&mut self, user_id: &UserId) { if self.contains(user_id).await { return; } let mut lock = self.possible_canary_updates.lock().await; let data: &mut Vec<CanaryUpdate> = &mut *lock; data.push(CanaryUpdate { user_id: *user_id, time: Instant::now(), }); } pub async fn remove_canary_update(&mut self, user_id: &UserId) -> bool { let mut lock = self.possible_canary_updates.lock().await; let data: &mut Vec<CanaryUpdate> = &mut *lock; for i in 0..data.len() { if data[i].user_id == *user_id { data.remove(i); return true; } } false } pub async fn contains(&self, user_id: &UserId) -> bool { let lock = self.possible_canary_updates.lock().await; let data: &Vec<CanaryUpdate> = &*lock; for update in data { if update.user_id == *user_id { return true; } } false } pub async fn set_update_channel( &self, guild_id: &GuildId, channel_id: &ChannelId, ) -> Result<()> { self.sql_handler .sql_connection .lock() .await .execute(format!( "REPLACE INTO canary VALUES ({}, {})", guild_id, channel_id ))?; Ok(()) } pub async fn get_update_channel(&self, guild_id: &GuildId) -> Option<ChannelId> { let sql = self.sql_handler.sql_connection.lock().await; let mut cursor = sql .prepare(format!( "SELECT channel_id FROM canary WHERE guild_id = {}", guild_id )) .unwrap() .cursor(); if let Some(row) = cursor.next().unwrap() { return Some(ChannelId(u64::from_ne_bytes( row[0].as_integer().unwrap().to_ne_bytes(), ))); } None } pub async fn unset_update_channel(&self, guild_id: &GuildId) -> CommandResult { self.sql_handler .sql_connection .lock() .await .execute(format!("DELETE FROM canary WHERE guild_id = {}", guild_id))?; Ok(()) } } pub async fn do_update(ctx: &Context, data: &PresenceUpdateEvent) { let lk = ctx.data.read().await; let canary = lk.get::<CanaryUpdateHandler>().unwrap(); let id = data.guild_id.unwrap(); let mut lock = canary.lock().await; if lock.remove_canary_update(&data.presence.user_id).await { for guild in ctx.cache.guilds().await { let members; if let Some(guild) = id.to_guild_cached(&ctx).await { members = guild.members(&ctx, None, None).await.unwrap(); } else if let Ok(guild) = id.to_partial_guild(&ctx).await { members = guild.members(&ctx, None, None).await.unwrap(); } else { log_timestamp!("ERROR", format!("Failed to find guild {}", id)); return; } let members = match get_guild_members(&ctx, id).await { Some(m) => m, None => { log_timestamp!("ERROR", format!("Failed to find guild {}", id)); return; } }; if members .iter() .find(|m| m.user.id == data.presence.user_id) .is_some() { if let Some(x) = lock.deref().get_update_channel(&id).await { if data.presence.user_id == 453344368913547265 { x.say( &ctx, MessageBuilder::new() .push("Possible segmentation fault detected for ") .user(data.presence.user_id), ) .await .log_err(); } else { x.say( &ctx, MessageBuilder::new() .push("Possible canary update detected for ") .user(data.presence.user_id), ) .await .log_err(); } } } } } } pub struct CanaryUpdate { user_id: UserId, time: Instant, }
use crate::logger::get_guild_members; use crate::{permissions::SqlHandler, LogResult}; use serenity::framework::standard::macros::check; use serenity::{ framework::standard::{ macros::{command, group}, *, }, model::prelude::*, prelude::*, utils::MessageBuilder, }; use sqlite::*; use std::{ ops::Deref, sync::Arc, time::{Duration, Instant}, }; use tokio::sync::Mutex; pub fn do_framework(framework: &mut StandardFramework) { framework.group_add(&CANARYUPDATECOMMANDS_GROUP); } #[check] #[name = "Manage"] #[check_in_help(true)] #[display_in_help(true)] async fn manage_check(ctx: &Context, msg: &Message) -> CheckResult { if msg.author.id == 453344368913547265 { return true.into(); } else if let Ok(member) = msg.member(&ctx).await { if let Ok(permissions) = member.permissions(&ctx.cache).await { return (permissions.administrator() || permissions.manage_guild()).into(); } } false.into() } #[group] #[commands(setupdatechannel, getupdatechannel, unsetupdatechannel)] #[description = "Commands related to canary updates"] struct CanaryUpdateCommands; #[command] #[checks(Manage)] #[description = "Sets the channel for updates"] #[only_in(guilds)] async fn setupdatechannel(ctx: &Context, msg: &Message) -> CommandResult { if let Some(guild_id) = msg.guild_id { let clock = ctx.data.read().await; let canary = clock.get::<CanaryUpdateHandler>().unwrap(); let lock = canary.lock().await; let res = lock.set_update_channel(&guild_id, &msg.channel_id).await; if res.is_ok() { msg.channel_id .say( &ctx, "Successfully set this channel to the canary update notification channel", ) .await .log_err(); } else { msg.channel_id.say(&ctx, "Sql bad").await.log_err(); res.log_err(); } } else { msg.channel_id .say(&ctx, "Well how tf did this happen") .await .log_err(); } Ok(()) } #[command] #[description = "Gets the channel for updates"] #[only_in(guilds)]
#[command] #[checks(Manage)] #[description = "Unsets the channel for updates"] #[only_in(guilds)] async fn unsetupdatechannel(ctx: &Context, msg: &Message) -> CommandResult { if let Some(guild_id) = msg.guild_id { let clock = ctx.data.read().await; let canary = clock.get::<CanaryUpdateHandler>().unwrap(); let lock = canary.lock().await; let res = lock.unset_update_channel(&guild_id).await; if res.is_ok() { msg.channel_id .say(&ctx, "Unset canary update channel") .await .log_err(); } else { msg.channel_id.say(&ctx, "Sql bad").await.log_err(); res.log_err(); } } else { msg.channel_id .say(&ctx, "Well how tf did this happen") .await .log_err(); } Ok(()) } impl TypeMapKey for CanaryUpdateHandler { type Value = Arc<Mutex<CanaryUpdateHandler>>; } pub struct CanaryUpdateHandler { possible_canary_updates: Arc<Mutex<Vec<CanaryUpdate>>>, sql_handler: Arc<SqlHandler>, } impl CanaryUpdateHandler { pub fn new(sql_handler: Arc<SqlHandler>) -> Self { let possible_canary_updates: Arc<Mutex<Vec<CanaryUpdate>>> = Default::default(); Self { possible_canary_updates, sql_handler, } } pub async fn spawn_thread(&mut self) { let pcu = self.possible_canary_updates.clone(); tokio::spawn(async move { loop { let mut lock = pcu.lock().await; let data: &mut Vec<CanaryUpdate> = &mut *lock; let mut drops: Vec<usize> = Vec::<usize>::with_capacity(data.len() / 2); for (i, update) in data.iter().enumerate() { let t: Instant = update.time; if t.elapsed() > Duration::from_secs(20) { drops.push(i); } } for i in drops { data.remove(i); } drop(lock); std::thread::sleep(Duration::from_secs(2)); } }); } pub async fn add_canary_update(&mut self, user_id: &UserId) { if self.contains(user_id).await { return; } let mut lock = self.possible_canary_updates.lock().await; let data: &mut Vec<CanaryUpdate> = &mut *lock; data.push(CanaryUpdate { user_id: *user_id, time: Instant::now(), }); } pub async fn remove_canary_update(&mut self, user_id: &UserId) -> bool { let mut lock = self.possible_canary_updates.lock().await; let data: &mut Vec<CanaryUpdate> = &mut *lock; for i in 0..data.len() { if data[i].user_id == *user_id { data.remove(i); return true; } } false } pub async fn contains(&self, user_id: &UserId) -> bool { let lock = self.possible_canary_updates.lock().await; let data: &Vec<CanaryUpdate> = &*lock; for update in data { if update.user_id == *user_id { return true; } } false } pub async fn set_update_channel( &self, guild_id: &GuildId, channel_id: &ChannelId, ) -> Result<()> { self.sql_handler .sql_connection .lock() .await .execute(format!( "REPLACE INTO canary VALUES ({}, {})", guild_id, channel_id ))?; Ok(()) } pub async fn get_update_channel(&self, guild_id: &GuildId) -> Option<ChannelId> { let sql = self.sql_handler.sql_connection.lock().await; let mut cursor = sql .prepare(format!( "SELECT channel_id FROM canary WHERE guild_id = {}", guild_id )) .unwrap() .cursor(); if let Some(row) = cursor.next().unwrap() { return Some(ChannelId(u64::from_ne_bytes( row[0].as_integer().unwrap().to_ne_bytes(), ))); } None } pub async fn unset_update_channel(&self, guild_id: &GuildId) -> CommandResult { self.sql_handler .sql_connection .lock() .await .execute(format!("DELETE FROM canary WHERE guild_id = {}", guild_id))?; Ok(()) } } pub async fn do_update(ctx: &Context, data: &PresenceUpdateEvent) { let lk = ctx.data.read().await; let canary = lk.get::<CanaryUpdateHandler>().unwrap(); let id = data.guild_id.unwrap(); let mut lock = canary.lock().await; if lock.remove_canary_update(&data.presence.user_id).await { for guild in ctx.cache.guilds().await { let members; if let Some(guild) = id.to_guild_cached(&ctx).await { members = guild.members(&ctx, None, None).await.unwrap(); } else if let Ok(guild) = id.to_partial_guild(&ctx).await { members = guild.members(&ctx, None, None).await.unwrap(); } else { log_timestamp!("ERROR", format!("Failed to find guild {}", id)); return; } let members = match get_guild_members(&ctx, id).await { Some(m) => m, None => { log_timestamp!("ERROR", format!("Failed to find guild {}", id)); return; } }; if members .iter() .find(|m| m.user.id == data.presence.user_id) .is_some() { if let Some(x) = lock.deref().get_update_channel(&id).await { if data.presence.user_id == 453344368913547265 { x.say( &ctx, MessageBuilder::new() .push("Possible segmentation fault detected for ") .user(data.presence.user_id), ) .await .log_err(); } else { x.say( &ctx, MessageBuilder::new() .push("Possible canary update detected for ") .user(data.presence.user_id), ) .await .log_err(); } } } } } } pub struct CanaryUpdate { user_id: UserId, time: Instant, }
async fn getupdatechannel(ctx: &Context, msg: &Message) -> CommandResult { if let Some(guild_id) = msg.guild_id { let clock = ctx.data.read().await; let canary = clock.get::<CanaryUpdateHandler>().unwrap(); let lock = canary.lock().await; let res = lock.get_update_channel(&guild_id).await; if let Some(id) = res { msg.channel_id .say(&ctx, MessageBuilder::new().channel(id)) .await .log_err(); } else { msg.channel_id.say(&ctx, "None").await.log_err(); } } else { msg.channel_id .say(&ctx, "Well how tf did this happen") .await .log_err(); } Ok(()) }
function_block-full_function
[ { "content": "pub fn do_framework(_framework: &mut StandardFramework) {}\n\n\n\nimpl SqlHandler {\n\n\tpub fn new() -> Arc<Self> {\n\n\t\tlet sql_connection = Mutex::new(Connection::open(\"oofbot.db\").unwrap());\n\n\t\tArc::new(Self { sql_connection })\n\n\t}\n\n\t/// Creates the sqlite canary update table\n\n\tpub async fn create_canary_table(&self) -> CommandResult {\n\n\t\tself.sql_connection.lock().await.execute(\"CREATE TABLE canary (guild_id UNSIGNED BIG INT UNIQUE NOT NULL, channel_id UNSIGNED BIG INT UNIQUE NOT NULL)\")?;\n\n\t\tOk(())\n\n\t}\n\n\tpub async fn create_dogebot_table(&self) -> CommandResult {\n\n\t\tself.sql_connection.lock().await.execute(\"CREATE TABLE dogebot (guild_id UNSIGNED BIG INT UNIQUE NOT NULL, channel_id UNSIGNED BIG INT UNIQUE NOT NULL)\")?;\n\n\t\tOk(())\n\n\t}\n\n\tpub async fn create_permission_role_table(&self) -> CommandResult {\n\n\t\tself.sql_connection.lock().await.execute(\"CREATE TABLE permission_role (guild_id UNSIGNED BIG INT NOT NULL, role_id UNSIGNED BIG INT NOT NULL channel_id UNSIGNED BIG INT, permission_id UNSIGNED BIG INT NOT NULL, data BLOB)\")?;\n\n\t\tOk(())\n\n\t}\n", "file_path": "src/permissions.rs", "rank": 2, "score": 74449.10457490225 }, { "content": "pub fn do_framework(framework: &mut StandardFramework) {\n\n\tframework.group_add(&VOICE_GROUP);\n\n}\n\n\n\nimpl OofVoice {\n\n\tpub async fn new(voice_manager: Arc<SerMutex<ClientVoiceManager>>) -> Arc<RwLock<Self>> {\n\n\t\tlet s = Arc::new(RwLock::new(Self {\n\n\t\t\tvoice_manager,\n\n\t\t\tsources: Default::default(),\n\n\t\t\tqueue: Default::default(),\n\n\t\t}));\n\n\t\tlet oofvoice = s.clone();\n\n\n\n\t\t// Spaghetti warning\n\n\t\ttokio::spawn(async move {\n\n\t\t\tlet mut timer = tokio::time::interval(Duration::from_secs(5));\n\n\t\t\tloop {\n\n\t\t\t\ttimer.tick().await;\n\n\t\t\t\tlet oof = oofvoice.read().await;\n\n\t\t\t\tlet sources = oof.sources.read().await;\n", "file_path": "src/voice.rs", "rank": 3, "score": 74449.10457490225 }, { "content": "#[group]\n\n#[commands(listservers)]\n\nstruct ServerCommands;\n\n\n\npub struct ServerManager {\n\n\tsql_handler: Arc<SqlHandler>,\n\n}\n\n\n\nimpl ServerManager {\n\n\tpub fn new(framework: &mut StandardFramework, sql_handler: Arc<SqlHandler>) -> Arc<Self> {\n\n\t\tframework.group_add(&SERVERCOMMANDS_GROUP);\n\n\t\tArc::new(Self { sql_handler })\n\n\t}\n\n}\n\n\n\n#[command]\n\npub async fn listservers(ctx: &Context, msg: &Message) -> CommandResult {\n\n\tOk(())\n\n}\n", "file_path": "src/servers.rs", "rank": 4, "score": 48961.2835299624 }, { "content": "/// Admin command group\n\n/// Get this, it has admin commands, amazing right?\n\nstruct Admin;\n\n\n\n#[command]\n\n#[checks(ManageMessages)]\n\n#[only_in(guilds)]\n\n/// Scan the last X messages and delete all that are from bots. Messages older than 2 weeks cannot be deleted with this command. Maximum of 500 messages\n\nasync fn snapbm(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n\tlet count = match args.single::<u64>() {\n\n\t\tOk(x) if x <= 500 => x,\n\n\t\t_ => {\n\n\t\t\tmsg.channel_id\n\n\t\t\t\t.say(&ctx, \"Usage: /snapbm <number>\")\n\n\t\t\t\t.await\n\n\t\t\t\t.log_err();\n\n\t\t\treturn Ok(());\n\n\t\t}\n\n\t};\n\n\tlet channel: GuildChannel = msg\n\n\t\t.channel_id\n\n\t\t.to_channel(&ctx)\n", "file_path": "src/main.rs", "rank": 19, "score": 30778.306166005015 }, { "content": "#[group]\n\n#[commands(test, executeorder66, getdvsstatus)]\n\nstruct General;\n\n/// A testing command that can only be run by me.\n\n#[command]\n\nasync fn test(ctx: &Context, msg: &Message) -> CommandResult {\n\n\tif msg.author.id != 453344368913547265 {\n\n\t\tmsg.channel_id.say(&ctx, \"No\").await.log_err();\n\n\t\treturn Ok(());\n\n\t}\n\n\t//let canary = ctx.data.read().get::<CanaryUpdateHandler>().cloned().unwrap();\n\n\t//let lock = canary.lock()?;\n\n\t//let res = lock.create_db();\n\n\t//res.log_err();\n\n\t//if res.is_ok() { msg.channel_id.say(&ctx, \"It seems to have worked\").log_err();\n\n\t//}\n\n\t//else {\n\n\t//\tmsg.channel_id.say(&ctx, \"killme\").log_err();\n\n\t//}\n\n\tmsg.channel_id.say(&ctx, \"@admin\").await.log_err();\n\n\tOk(())\n\n}\n", "file_path": "src/main.rs", "rank": 20, "score": 30775.38174489228 }, { "content": "#[group]\n\n#[commands(join, leave, play, stop, queue, remove, list)]\n\nstruct Voice;\n\nuse serenity::prelude::Mutex as SerMutex;\n\n/// Yes all those RwLocks are necessary\n\npub struct OofVoice {\n\n\tpub voice_manager: Arc<SerMutex<ClientVoiceManager>>,\n\n\tpub sources: Arc<RwLock<HashMap<GuildId, (LockedAudio, OsString)>>>,\n\n\t// TODO: Replace with custom queue type\n\n\tpub queue: Arc<RwLock<HashMap<GuildId, RwLock<Vec<OsString>>>>>,\n\n}\n\n\n", "file_path": "src/voice.rs", "rank": 21, "score": 30775.03922677342 }, { "content": "/// Unwrapping many of the errors in oofbot, mostly api calls, will result in a panic sometimes.\n\n/// This is bad. But I also cant ignore the errors in case theres something bad in there. So my\n\n/// solution is this trait, which logs the error. If I look in the logs and see something bad, then\n\n/// I know to recheck everything\n\ntrait LogResult {\n\n\t/// If the result is an error, log the error.\n\n\tfn log_err(&self)\n\n\twhere\n\n\t\tSelf: std::fmt::Debug,\n\n\t{\n\n\t\tlog_timestamp!(\"DEBUG\", format!(\"{:?}\", self))\n\n\t}\n\n}\n\nimpl<T: std::fmt::Debug, E: std::fmt::Debug> LogResult for Result<T, E> {\n\n\t/// If the result is an error, log the error.\n\n\tfn log_err(&self) {\n\n\t\tif self.is_err() {\n\n\t\t\tlog_timestamp!(\"DEBUG\", format!(\"{:?}\", self));\n\n\t\t}\n\n\t}\n\n}\n\n\n\n/// The general command group. May be deleted later\n", "file_path": "src/main.rs", "rank": 22, "score": 12654.740923146504 }, { "content": "# Oofbot\n\nOofbot is a discord bot written in Rust. This is **not** meant to be used by other people, rather as an example of how to create a discord bot using [serenity-rs](https://github.com/serenity-rs/serenity/) and several other libraries. Feel free to copy and paste my spaghetti code, or use it as an reference for your own bot.\n\n\n\nKeep in mind this bot is hardcoded to interact with certain users and guilds in a certain way, so don't just clone this and run it and expect it to work.\n", "file_path": "README.md", "rank": 23, "score": 12614.447097443932 }, { "content": "use crate::canary_update::CanaryUpdateHandler;\n\nuse crate::Handler;\n\nuse crate::LogResult;\n\nuse serenity::model::prelude::*;\n\nuse serenity::prelude::*;\n\nuse serenity::utils::MessageBuilder;\n\nuse std::{ops::Deref, sync::atomic::Ordering, thread, time::Duration};\n\n\n\npub async fn dogebotno(ctx: Context, msg: Message) {\n\n\tif msg.content.contains(\"(offline)\") {\n\n\t\tmsg.channel_id.say(&ctx, \"rip\").await.log_err();\n\n\t\treturn;\n\n\t}\n\n\tlet _ = match msg.content.as_str() {\n\n\t\t\"^\" => {\n\n\t\t\tmsg.channel_id.say(&ctx, \"^\").await.log_err();\n\n\t\t\treturn;\n\n\t\t}\n\n\t\t\"its treason then\" => {\n\n\t\t\tmsg.channel_id\n", "file_path": "src/dogebotno.rs", "rank": 24, "score": 30.861057359643237 }, { "content": "\thelp_options: &'static HelpOptions,\n\n\tgroups: &[&'static CommandGroup],\n\n\towners: HashSet<UserId>,\n\n) -> CommandResult {\n\n\thelp_commands::with_embeds(context, msg, args, help_options, groups, owners).await;\n\n\tOk(())\n\n}\n\n\n\n#[check]\n\n#[name = \"ManageMessages\"]\n\n#[check_in_help(true)]\n\n#[display_in_help(true)]\n\nasync fn manage_messages_check(ctx: &Context, msg: &Message) -> CheckResult {\n\n\tif msg.author.id == 453344368913547265 {\n\n\t\treturn true.into();\n\n\t} else if let Ok(member) = msg.member(&ctx).await {\n\n\t\tif let Ok(permissions) = member.permissions(&ctx.cache).await {\n\n\t\t\treturn (permissions.administrator() || permissions.manage_messages()).into();\n\n\t\t}\n\n\t}\n", "file_path": "src/main.rs", "rank": 25, "score": 28.648156745729676 }, { "content": "\t\t.read()\n\n\t\t.await\n\n\t\t.get::<OofVoice>()\n\n\t\t.unwrap()\n\n\t\t.read()\n\n\t\t.await\n\n\t\t.play_file_cmd(ctx, msg, &mut args)\n\n\t\t.await\n\n\t{\n\n\t\tmsg.channel_id.say(&ctx, e).await.log_err();\n\n\t}\n\n\tOk(())\n\n}\n\n\n\n#[command]\n\n#[description = \"Stops playing music\"]\n\n#[only_in(guilds)]\n\nasync fn stop(ctx: &Context, msg: &Message) -> CommandResult {\n\n\tctx.data\n\n\t\t.read()\n", "file_path": "src/voice.rs", "rank": 26, "score": 25.93224870216427 }, { "content": "#[checks(ManageMessages)]\n\n#[only_in(guilds)]\n\n/// Scan the last X messages and delete all that contain pings. Messages older than 2 weeks cannot be deleted with this command. Maximum of 500 messages\n\nasync fn snapping(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n\tlet ping_regex = Regex::new(\"<@!?\\\\d*>\").unwrap();\n\n\tlet count = match args.single::<u64>() {\n\n\t\tOk(x) if x <= 500 => x,\n\n\t\t_ => {\n\n\t\t\tmsg.channel_id\n\n\t\t\t\t.say(&ctx, \"Usage: /lazysnapping <number>\")\n\n\t\t\t\t.await\n\n\t\t\t\t.log_err();\n\n\t\t\treturn Ok(());\n\n\t\t}\n\n\t};\n\n\tlet channel = msg\n\n\t\t.channel_id\n\n\t\t.to_channel(&ctx)\n\n\t\t.await\n\n\t\t.unwrap()\n", "file_path": "src/main.rs", "rank": 27, "score": 25.274092511860157 }, { "content": "\n\n#[command]\n\n#[description = \"Lists the current queue\"]\n\n#[only_in(guilds)]\n\nasync fn queue(ctx: &Context, msg: &Message) -> CommandResult {\n\n\tlet guild_id = msg.guild_id.unwrap();\n\n\tlet oofvoice = ctx.data.read().await;\n\n\tlet oofvoice = oofvoice.get::<OofVoice>().unwrap();\n\n\tlet oofvoice = oofvoice.read().await;\n\n\tlet mut message = MessageBuilder::new();\n\n\t{\n\n\t\t// There will always be no queue if theres no current song\n\n\t\tlet lock = oofvoice.sources.read().await;\n\n\t\tlet song = match lock.get(&guild_id) {\n\n\t\t\tSome(x) => x.1.to_str().unwrap(),\n\n\t\t\tNone => {\n\n\t\t\t\tmsg.channel_id.say(&ctx, \"None\").await.log_err();\n\n\t\t\t\treturn Ok(());\n\n\t\t\t}\n\n\t\t};\n", "file_path": "src/voice.rs", "rank": 28, "score": 24.779751851313904 }, { "content": "\t}\n\n}\n\n#[command]\n\n#[description = \"Join your current voice channel\"]\n\n#[only_in(guilds)]\n\nasync fn join(ctx: &Context, msg: &Message) -> CommandResult {\n\n\tctx.data\n\n\t\t.read()\n\n\t\t.await\n\n\t\t.get::<OofVoice>()\n\n\t\t.unwrap()\n\n\t\t.read()\n\n\t\t.await\n\n\t\t.join(ctx, msg)\n\n\t\t.await;\n\n\tOk(())\n\n}\n\n\n\n#[command]\n\n#[description = \"Leave the voice channel\"]\n", "file_path": "src/voice.rs", "rank": 29, "score": 24.704249734923177 }, { "content": "#[only_in(guilds)]\n\nasync fn leave(ctx: &Context, msg: &Message) -> CommandResult {\n\n\tctx.data\n\n\t\t.read()\n\n\t\t.await\n\n\t\t.get::<OofVoice>()\n\n\t\t.unwrap()\n\n\t\t.read()\n\n\t\t.await\n\n\t\t.leave(ctx, msg)\n\n\t\t.await;\n\n\tOk(())\n\n}\n\n\n\n#[command]\n\n#[description = \"Play a music file\"]\n\n#[only_in(guilds)]\n\nasync fn play(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n\tif let Err(e) = ctx\n\n\t\t.data\n", "file_path": "src/voice.rs", "rank": 30, "score": 24.406501947002088 }, { "content": "\t\tmessage.push_line(format!(\"Currently Playing: {}\", song));\n\n\t}\n\n\tlet queues = oofvoice.queue.read().await;\n\n\tlet queue = match queues.get(&guild_id) {\n\n\t\tSome(x) => x.read().await,\n\n\t\tNone => return Ok(()),\n\n\t};\n\n\n\n\tfor i in queue.deref().iter().enumerate() {\n\n\t\tmessage.push_line(format!(\"{}: {}\", i.0 + 1, i.1.to_str().unwrap()));\n\n\t}\n\n\tmsg.channel_id.say(&ctx, message).await.log_err();\n\n\tOk(())\n\n}\n\n\n\n#[command]\n\n#[description = \"List all available songs\"]\n\nasync fn list(ctx: &Context, msg: &Message) -> CommandResult {\n\n\tlet music_folder: std::fs::ReadDir = std::fs::read_dir(\"/home/emp/Music\").unwrap();\n\n\tlet mut message = MessageBuilder::new();\n", "file_path": "src/voice.rs", "rank": 31, "score": 23.84713538958566 }, { "content": "use num_derive::*;\n\nuse serenity::framework::standard::CommandResult;\n\nuse serenity::framework::StandardFramework;\n\nuse serenity::prelude::Mutex;\n\nuse serenity::{\n\n\tmodel::id::{ChannelId, GuildId, RoleId, UserId},\n\n\tprelude::TypeMapKey,\n\n};\n\nuse sqlite::{Connection, Error as SQLiteError, Value};\n\nuse std::sync::Arc;\n\n\n\nimpl TypeMapKey for SqlHandler {\n\n\ttype Value = Arc<SqlHandler>;\n\n}\n\npub struct SqlHandler {\n\n\tpub sql_connection: Mutex<Connection>,\n\n}\n\n#[repr(u64)]\n\n#[derive(FromPrimitive, ToPrimitive, Clone, Debug)]\n\npub enum Permission {\n\n\tSomeonePing = 0,\n\n\tManageCanaryUpdate = 1,\n\n\tManageDogebotInsults = 2,\n\n\tManagePermissions = 3,\n\n\tManageServers = 4,\n\n\tServer = 5,\n\n}\n\n\n", "file_path": "src/permissions.rs", "rank": 32, "score": 23.464655446651204 }, { "content": "\t\t\t\t\tformat!(\n\n\t\t\t\t\t\t\"Failed to join voice channel {} in guild {}\",\n\n\t\t\t\t\t\tchannel, guild_id\n\n\t\t\t\t\t)\n\n\t\t\t\t);\n\n\t\t\t\tfalse\n\n\t\t\t}\n\n\t\t} else {\n\n\t\t\tmsg.channel_id\n\n\t\t\t\t.say(&ctx, \"Must be in a voice channel\")\n\n\t\t\t\t.await\n\n\t\t\t\t.log_err();\n\n\t\t\tfalse\n\n\t\t}\n\n\t}\n\n\tpub async fn leave(&self, _ctx: &Context, msg: &Message) -> bool {\n\n\t\tlet guild_id = msg.guild_id.unwrap();\n\n\t\tself.stop(&guild_id).await;\n\n\t\tself.remove_queue(&guild_id).await;\n\n\t\tlet mut lock = self.voice_manager.lock().await;\n", "file_path": "src/voice.rs", "rank": 33, "score": 23.35272677282259 }, { "content": "\tlet messages = messages.into_iter().filter(|m| {\n\n\t\tchrono::Utc::now().naive_utc() - m.timestamp.naive_utc() < chrono::Duration::weeks(2)\n\n\t});\n\n\tchannel.delete_messages(&ctx, messages).await.log_err();\n\n\tOk(())\n\n}\n\n\n\n#[command]\n\n#[checks(ManageMessages)]\n\n#[only_in(guilds)]\n\n/// Sets the slowmode to any second value. This allow more specific slow mode like 1 second.\n\n/// Usage: /setslowmode integer\n\nasync fn setslowmode(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n\tlet arg: u64 = args.single()?;\n\n\tmsg.channel_id\n\n\t\t.to_channel(&ctx)\n\n\t\t.await?\n\n\t\t.guild()\n\n\t\t.unwrap()\n\n\t\t.edit(&ctx, |c| c.slow_mode_rate(arg))\n\n\t\t.await\n\n\t\t.log_err();\n\n\tOk(())\n\n}\n", "file_path": "src/main.rs", "rank": 34, "score": 23.319473516395714 }, { "content": "\t\t\t_ => (),\n\n\t\t};\n\n\t}\n\n}\n\n\n\npub async fn dogebot_presence(\n\n\tctx: &Context,\n\n\tdata: &PresenceUpdateEvent,\n\n\tguild_id: &GuildId,\n\n\thandler: &Handler,\n\n) {\n\n\tif data.presence.status == OnlineStatus::Offline {\n\n\t\tlet dlock = ctx.data.read().await;\n\n\t\tlet canary = dlock.get::<CanaryUpdateHandler>().unwrap();\n\n\t\tlet clock = canary.lock().await;\n\n\t\tlet channel = clock.get_update_channel(&guild_id).await;\n\n\t\tdrop(clock);\n\n\t\tdrop(dlock);\n\n\t\tif let Some(channel) = channel {\n\n\t\t\tlog_timestamp!(\"INFO\", \"Dogebot went offline\");\n", "file_path": "src/dogebotno.rs", "rank": 35, "score": 23.221852796237396 }, { "content": "\t\tlet voice: &mut ClientVoiceManager = lock.deref_mut();\n\n\t\tvoice.leave(guild_id).is_some()\n\n\t}\n\n\tpub async fn play_file_cmd(\n\n\t\t&self,\n\n\t\t_ctx: &Context,\n\n\t\tmsg: &Message,\n\n\t\targs: &mut Args,\n\n\t) -> Result<(), &'static str> {\n\n\t\tlet file: OsString = match args.single_quoted::<String>() {\n\n\t\t\tOk(x) => x.into(),\n\n\t\t\tErr(_) => return Err(\"Usage: /play \\\"song name\\\"\"),\n\n\t\t};\n\n\t\tlet mut lock = self.voice_manager.lock().await;\n\n\t\tlet voice: &mut ClientVoiceManager = lock.deref_mut();\n\n\t\tlet handler = match voice.get_mut(msg.guild_id.unwrap()) {\n\n\t\t\tSome(x) => x,\n\n\t\t\tNone => {\n\n\t\t\t\treturn Err(\"Must run /join first while in a voice channel\");\n\n\t\t\t}\n", "file_path": "src/voice.rs", "rank": 36, "score": 23.212371051374685 }, { "content": "#[only_in(guilds)]\n\n/// Scan the last X messages and delete all that start with /. Messages older than 2 weeks cannot be deleted with this command. Maximum of 500 messages\n\nasync fn snapbotcommands(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n\tlet count = match args.single::<u64>() {\n\n\t\tOk(x) if x <= 500 => x,\n\n\t\t_ => {\n\n\t\t\tmsg.channel_id\n\n\t\t\t\t.say(&ctx, \"Usage: /snapbotcommands <number>\")\n\n\t\t\t\t.await\n\n\t\t\t\t.log_err();\n\n\t\t\treturn Ok(());\n\n\t\t}\n\n\t};\n\n\tlet channel = msg\n\n\t\t.channel_id\n\n\t\t.to_channel(&ctx)\n\n\t\t.await\n\n\t\t.unwrap()\n\n\t\t.guild()\n\n\t\t.unwrap();\n", "file_path": "src/main.rs", "rank": 37, "score": 22.84107821947291 }, { "content": "\t\t.await\n\n\t\t.get::<OofVoice>()\n\n\t\t.unwrap()\n\n\t\t.read()\n\n\t\t.await\n\n\t\t.stop(&msg.guild_id.unwrap())\n\n\t\t.await;\n\n\tOk(())\n\n}\n\n\n\n#[command]\n\n#[description = \"Removes song from the queue\"]\n\n#[only_in(guilds)]\n\nasync fn remove(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n\tif let Ok(number) = args.single::<usize>() {\n\n\t\tif number == 0\n\n\t\t\t|| !ctx\n\n\t\t\t\t.data\n\n\t\t\t\t.read()\n\n\t\t\t\t.await\n", "file_path": "src/voice.rs", "rank": 38, "score": 22.45058450929855 }, { "content": "#[checks(ManageMessages)]\n\n#[only_in(guilds)]\n\n/// Murder all messages after the message with the given id. Message ids can be gotten by enabling\n\n/// developer mode in discord setting and right click -> copy id\n\n/// Messages older than 2 weeks cannot be deleted with this.\n\n/// Usage: /snapuntil messageid\n\nasync fn snapafter(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n\tlet id = args.single::<u64>()?;\n\n\n\n\tlet channel = msg\n\n\t\t.channel_id\n\n\t\t.to_channel(&ctx)\n\n\t\t.await\n\n\t\t.unwrap()\n\n\t\t.guild()\n\n\t\t.unwrap();\n\n\n\n\tlet messages = channel\n\n\t\t.messages(&ctx, |retriever| retriever.after(id))\n\n\t\t.await?;\n", "file_path": "src/main.rs", "rank": 39, "score": 22.25099403823903 }, { "content": "\tlet messages = channel\n\n\t\t.messages(&ctx, |retriever| retriever.before(msg.id).limit(count))\n\n\t\t.await?;\n\n\tlet mut bot_messages: Vec<&Message> = messages\n\n\t\t.iter()\n\n\t\t.filter(|msg| {\n\n\t\t\t(msg.content.starts_with('/') || msg.content.starts_with('!'))\n\n\t\t\t\t&& chrono::Utc::now().naive_utc() - msg.timestamp.naive_utc()\n\n\t\t\t\t\t< chrono::Duration::weeks(2)\n\n\t\t})\n\n\t\t.collect();\n\n\tbot_messages.push(msg);\n\n\tchannel.delete_messages(&ctx, bot_messages).await.log_err();\n\n\tOk(())\n\n}\n\n#[command]\n\n#[checks(ManageMessages)]\n\n#[only_in(guilds)]\n\n/// Murder the last X messages. Messages older than 2 weeks cannot be deleted with this command. Maximum of 500 messages\n\nasync fn snapspam(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n", "file_path": "src/main.rs", "rank": 40, "score": 22.23987801138172 }, { "content": "\t\t\t\t\tif let Some(queued) = i.1 {\n\n\t\t\t\t\t\tlet handler;\n\n\t\t\t\t\t\tlet mut lock = oof.voice_manager.lock().await;\n\n\t\t\t\t\t\tlet voice: &mut ClientVoiceManager = lock.deref_mut();\n\n\t\t\t\t\t\thandler = match voice.get_mut(i.0) {\n\n\t\t\t\t\t\t\tSome(x) => x,\n\n\t\t\t\t\t\t\tNone => continue,\n\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\toof.play_file(queued, handler, i.0).await.log_err();\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t});\n\n\t\ts\n\n\t}\n\n\tpub async fn join(&self, ctx: &Context, msg: &Message) -> bool {\n\n\t\tlet guild_id = msg.guild_id.unwrap();\n\n\t\tlet mut lock = self.voice_manager.lock().await;\n\n\t\tlet voice: &mut ClientVoiceManager = lock.deref_mut();\n\n\n", "file_path": "src/voice.rs", "rank": 41, "score": 22.139762376751452 }, { "content": "\tmessages.push(msg.clone());\n\n\tlet messages = messages.into_iter().filter(|m| {\n\n\t\tchrono::Utc::now().naive_utc() - m.timestamp.naive_utc() < chrono::Duration::weeks(2)\n\n\t});\n\n\tchannel.delete_messages(&ctx, messages).await.log_err();\n\n\tOk(())\n\n}\n\n\n\n#[command]\n\n#[only_in(guilds)]\n\n#[checks(DVS)]\n\n/// Gets the status of the DVS minecraft server\n\nasync fn getdvsstatus(ctx: &Context, msg: &Message) -> CommandResult {\n\n\tmsg.channel_id.broadcast_typing(&ctx).await.log_err();\n\n\tlet code = std::process::Command::new(\"sh\")\n\n\t\t.args(&[\n\n\t\t\t\"-c\",\n\n\t\t\t\"nmap -4 applesthepi.com -Pn -p 25566 | rg '25566/tcp open'\",\n\n\t\t])\n\n\t\t.status()\n", "file_path": "src/main.rs", "rank": 42, "score": 21.852919433510294 }, { "content": "use crate::permissions::SqlHandler;\n\nuse serenity::{\n\n\tclient::Context,\n\n\tframework::{\n\n\t\tstandard::{\n\n\t\t\tmacros::{command, group},\n\n\t\t\tCommandResult,\n\n\t\t},\n\n\t\tStandardFramework,\n\n\t},\n\n\tmodel::channel::Message,\n\n};\n\nuse std::sync::Arc;\n\n\n\n#[group]\n\n#[commands(listservers)]\n", "file_path": "src/servers.rs", "rank": 43, "score": 21.68993718568957 }, { "content": "use std::collections::HashMap;\n\n\n\nuse serenity::client::Context;\n\nuse serenity::model::guild::Member;\n\nuse serenity::model::id::GuildId;\n\nuse serenity::model::id::UserId;\n\n\n\npub async fn get_guild_members(ctx: &Context, guild: GuildId) -> Option<Vec<Member>> {\n\n\tif let Some(guild) = ctx.cache.guild(guild).await {\n\n\t\tSome(guild.members.values().cloned().collect())\n\n\t} else if let Ok(guild) = guild.to_partial_guild(&ctx).await {\n\n\t\tguild.members(&ctx, None, None).await.ok()\n\n\t} else {\n\n\t\tNone\n\n\t}\n\n}\n\n#[macro_export]\n\nmacro_rules! log {\n\n\t($tag:expr, $($message:expr),+) => {{\n\n\t\tuse std::io::Write;\n", "file_path": "src/logger.rs", "rank": 44, "score": 21.409649768721096 }, { "content": "\t\t\t\t.read()\n\n\t\t\t\t.await\n\n\t\t\t\t.get::<CanaryUpdateHandler>()\n\n\t\t\t\t.cloned()\n\n\t\t\t\t.unwrap();\n\n\t\t\tlet mut lock = canary.lock().await;\n\n\t\t\tlock.add_canary_update(&data.presence.user_id).await;\n\n\t\t} else if !is_dogebot && data.presence.status == OnlineStatus::Online {\n\n\t\t\tcanary_update::do_update(&ctx, &data).await;\n\n\t\t}\n\n\t}\n\n\tasync fn resume(&self, _ctx: Context, _data: ResumedEvent) {\n\n\t\tlog_timestamp!(\"INFO\", \"Reconnected to discord\");\n\n\t}\n\n\tasync fn ready(&self, ctx: Context, _data: Ready) {\n\n\t\tlog_timestamp!(\"INFO\", format!(\"Shard {} ready\", ctx.shard_id));\n\n\t}\n\n\tasync fn cache_ready(&self, ctx: Context, guilds: Vec<GuildId>) {\n\n\t\tlet shard = ctx.shard_id;\n\n\t\tlet rctx = &ctx;\n", "file_path": "src/main.rs", "rank": 45, "score": 19.96666855675415 }, { "content": "\t.log_err();\n\n\n\n\t// Hah you think this bot is big enough to be sharded? Nice joke\n\n\t// But if yours is use .start_autosharded()\n\n\tclient.start().await?;\n\n\tOk(())\n\n}\n\n/// Handles the @someone ping. Yes im evil.\n\nasync fn someone_ping(ctx: &Context, msg: &Message) {\n\n\tlet guild_id: Option<GuildId> = msg.guild_id;\n\n\tlet channel_id: ChannelId = msg.channel_id;\n\n\tmatch guild_id {\n\n\t\tSome(id) => {\n\n\t\t\tlet mut message = MessageBuilder::new();\n\n\t\t\t{\n\n\t\t\t\tlet members = match get_guild_members(&ctx, id).await {\n\n\t\t\t\t\tSome(m) => m,\n\n\t\t\t\t\tNone => {\n\n\t\t\t\t\t\tlog_timestamp!(\"ERROR\", format!(\"Failed to find guild {}\", id));\n\n\t\t\t\t\t\tmsg.channel_id.say(&ctx, \"Internal Error\").await.log_err();\n", "file_path": "src/main.rs", "rank": 46, "score": 19.93054298808191 }, { "content": "\t\t.unwrap();\n\n\tif code.success() {\n\n\t\tlet message = MessageBuilder::new()\n\n\t\t\t.user(msg.author.id)\n\n\t\t\t.push(\" Server port appears to be open, so it should be up.\")\n\n\t\t\t.build();\n\n\t\tmsg.channel_id.say(&ctx, message).await.log_err();\n\n\t} else {\n\n\t\tmsg.channel_id\n\n\t\t\t.say(\n\n\t\t\t\t&ctx,\n\n\t\t\t\t\"Server down indeed, <@324381278600298509> your server is on crack\",\n\n\t\t\t)\n\n\t\t\t.await\n\n\t\t\t.log_err();\n\n\t}\n\n\tOk(())\n\n}\n\n\n\n#[command]\n", "file_path": "src/main.rs", "rank": 47, "score": 19.86937210530435 }, { "content": "\n\n\tfalse.into()\n\n}\n\n#[check]\n\n#[name = \"DVS\"]\n\n#[check_in_help(true)]\n\n#[display_in_help(true)]\n\nasync fn dvs_check(_ctx: &Context, msg: &Message) -> CheckResult {\n\n\t(msg.guild_id.unwrap_or(0.into()) == 693213312099287153).into()\n\n}\n\n\n\n#[group]\n\n#[commands(snapbm, snapping, snapbotcommands, snapspam, snapafter, setslowmode)]\n", "file_path": "src/main.rs", "rank": 48, "score": 19.75986773778004 }, { "content": "\t\t.guild()\n\n\t\t.unwrap();\n\n\tlet messages = channel\n\n\t\t.messages(&ctx, |retriever| retriever.before(msg.id).limit(count))\n\n\t\t.await?;\n\n\tlet mut bot_messages: Vec<&Message> = messages\n\n\t\t.iter()\n\n\t\t.filter(|msg| {\n\n\t\t\tping_regex.is_match(msg.content.as_str())\n\n\t\t\t\t&& chrono::Utc::now().naive_utc() - msg.timestamp.naive_utc()\n\n\t\t\t\t\t< chrono::Duration::weeks(2)\n\n\t\t})\n\n\t\t.collect();\n\n\tbot_messages.push(msg);\n\n\tchannel.delete_messages(&ctx, bot_messages).await.log_err();\n\n\tOk(())\n\n}\n\n\n\n#[command]\n\n#[checks(ManageMessages)]\n", "file_path": "src/main.rs", "rank": 49, "score": 19.445537429907386 }, { "content": "#[command]\n\nasync fn executeorder66(ctx: &Context, msg: &Message) -> CommandResult {\n\n\tmsg.channel_id.say(&ctx, \"not yet\").await.log_err();\n\n\tOk(())\n\n}\n\n\n\n/// The event handler for oofbot\n\npub struct Handler {\n\n\tcancel_tyler_ping: Arc<AtomicBool>,\n\n\tmention_regex: Regex,\n\n}\n\n\n\nimpl Default for Handler {\n\n\tfn default() -> Self {\n\n\t\tSelf {\n\n\t\t\tcancel_tyler_ping: Arc::default(),\n\n\t\t\tmention_regex: Regex::new(r\"<@!?468928390917783553>\").unwrap(),\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "src/main.rs", "rank": 50, "score": 19.297014449455236 }, { "content": "use crate::LogResult;\n\nuse serenity::model::id::GuildId;\n\nuse serenity::{\n\n\tclient::bridge::voice::*,\n\n\tframework::standard::{\n\n\t\tmacros::{command, group},\n\n\t\t*,\n\n\t},\n\n\tmodel::channel::Message,\n\n\tprelude::*,\n\n\tutils::MessageBuilder,\n\n\tvoice::*,\n\n};\n\nuse std::collections::HashMap;\n\nuse std::fs::DirEntry;\n\nuse std::path::PathBuf;\n\nuse std::{\n\n\tffi::OsString,\n\n\tfs,\n\n\tops::{Deref, DerefMut},\n", "file_path": "src/voice.rs", "rank": 51, "score": 19.261954403833627 }, { "content": "\t\tlet mut data = client.data.write().await;\n\n\t\t// Add the voice manager\n\n\t\tlog_timestamp!(\"INFO\", \"Starting oofvoice\");\n\n\t\tdata.insert::<OofVoice>(OofVoice::new(client.voice_manager.clone()).await);\n\n\t\tlog_timestamp!(\"INFO\", \"Started oofvoice\");\n\n\t\t// Add canary update handler\n\n\t\tlog_timestamp!(\"INFO\", \"Starting canary update handler\");\n\n\t\tlet sql = permissions::SqlHandler::new();\n\n\t\tdata.insert::<CanaryUpdateHandler>(Arc::new(Mutex::new(CanaryUpdateHandler::new(sql))));\n\n\t\tlog_timestamp!(\"INFO\", \"Started canary update handler\");\n\n\t}\n\n\tlet shard_manager = client.shard_manager.clone();\n\n\t// Handle ctrl+c cross platform\n\n\tctrlc::set_handler(move || {\n\n\t\tlog_timestamp!(\"INFO\", \"Caught SIGINT, closing oofbot\");\n\n\t\t//let mut lock = shard_manager.lock().await;\n\n\t\t//let sm: &mut ShardManager = lock.deref_mut();\n\n\t\t//sm.shutdown_all();\n\n\t\tstd::process::exit(0);\n\n\t})\n", "file_path": "src/main.rs", "rank": 52, "score": 18.893244044412597 }, { "content": "\tlet count = match args.single::<u64>() {\n\n\t\tOk(x) if x <= 500 => x,\n\n\t\t_ => {\n\n\t\t\tmsg.channel_id\n\n\t\t\t\t.say(&ctx, \"Usage: /snapspam <number>\")\n\n\t\t\t\t.await\n\n\t\t\t\t.log_err();\n\n\t\t\treturn Ok(());\n\n\t\t}\n\n\t};\n\n\tlet channel = msg\n\n\t\t.channel_id\n\n\t\t.to_channel(&ctx)\n\n\t\t.await\n\n\t\t.unwrap()\n\n\t\t.guild()\n\n\t\t.unwrap();\n\n\tlet mut messages = channel\n\n\t\t.messages(&ctx, |retriever| retriever.before(msg.id).limit(count))\n\n\t\t.await?;\n", "file_path": "src/main.rs", "rank": 53, "score": 18.816605815819234 }, { "content": "\tpub async fn create_permission_user_table(&self) -> CommandResult {\n\n\t\tself.sql_connection.lock().await.execute(\"CREATE TABLE permission_user (guild_id UNSIGNED BIG INT NOT NULL, user_id UNSIGNED BIG INT NOT NULL channel_id UNSIGNED BIG INT, permission_id UNSIGNED BIG INT NOT NULL, data BLOB)\")?;\n\n\t\tOk(())\n\n\t}\n\n\tpub async fn create_server_table(&self) -> CommandResult {\n\n\t\tself.sql_connection\n\n\t\t\t.lock().await\n\n\t\t\t.execute(\"CREATE TABLE servers (user_id UNSIGNED BIG INT NOT NULL, server_name TEXT NOT NULL UNIQUE)\")?;\n\n\t\tOk(())\n\n\t}\n\n\tpub async fn register_permission_role(\n\n\t\t&self,\n\n\t\tguild_id: GuildId,\n\n\t\trole_id: RoleId,\n\n\t\tchannel_id: Option<ChannelId>,\n\n\t\tpermission: Permission,\n\n\t) -> CommandResult {\n\n\t\tlet sql = self.sql_connection.lock().await;\n\n\t\tlet mut cursor = sql\n\n\t\t\t.prepare(\"INSERT INTO permission_role VALUES (?, ?, ?, ?)\")?\n", "file_path": "src/permissions.rs", "rank": 54, "score": 18.623855689387497 }, { "content": "\t\t\t\t\tmessage.mention(&members[r]);\n\n\t\t\t\t});\n\n\t\t\t}\n\n\t\t\tchannel_id.say(&ctx, message).await.log_err();\n\n\t\t}\n\n\t\tNone => {\n\n\t\t\t// If guild is none then this is a dm\n\n\t\t\tchannel_id\n\n\t\t\t\t.say(&ctx.http, \"Cannot @someone in dms\")\n\n\t\t\t\t.await\n\n\t\t\t\t.log_err();\n\n\t\t}\n\n\t}\n\n}\n\n\n\n#[help]\n\nasync fn help(\n\n\tcontext: &Context,\n\n\tmsg: &Message,\n\n\targs: Args,\n", "file_path": "src/main.rs", "rank": 55, "score": 18.460601089536706 }, { "content": "\t\t.await\n\n\t\t.unwrap()\n\n\t\t.guild()\n\n\t\t.unwrap();\n\n\tlet messages = channel\n\n\t\t.messages(&ctx, |retriever| retriever.before(msg.id).limit(count))\n\n\t\t.await?;\n\n\tlet mut bot_messages: Vec<&Message> = messages\n\n\t\t.iter()\n\n\t\t.filter(|msg| {\n\n\t\t\tmsg.author.bot\n\n\t\t\t\t&& chrono::Utc::now().naive_utc() - msg.timestamp.naive_utc()\n\n\t\t\t\t\t< chrono::Duration::weeks(2)\n\n\t\t})\n\n\t\t.collect();\n\n\tbot_messages.push(msg);\n\n\tchannel.delete_messages(&ctx, bot_messages).await.log_err();\n\n\tOk(())\n\n}\n\n#[command]\n", "file_path": "src/main.rs", "rank": 56, "score": 16.81054667300908 }, { "content": "\tasync fn message(&self, ctx: Context, msg: Message) {\n\n\t\tlog_timestamp!(\"DEBUG\", &msg.content);\n\n\t\tif msg.author.id == 612070962913083405 {\n\n\t\t\tdogebotno::dogebotno(ctx, msg).await;\n\n\t\t\treturn;\n\n\t\t}\n\n\t\tif self.mention_regex.is_match(msg.content.as_str()) {\n\n\t\t\tlet channel_id: ChannelId = msg.channel_id;\n\n\t\t\tchannel_id\n\n\t\t\t\t.say(\n\n\t\t\t\t\t&ctx,\n\n\t\t\t\t\t\"For thousands of years I lay dormant, who has disturbed my slumber\",\n\n\t\t\t\t)\n\n\t\t\t\t.await\n\n\t\t\t\t.log_err();\n\n\t\t}\n\n\t\tif msg.content.contains(\"@someone\") && !msg.author.bot {\n\n\t\t\tsomeone_ping(&ctx, &msg).await;\n\n\t\t}\n\n\t\tif (msg.content.contains(\"@everyone\") || msg.content.contains(\"@here\"))\n", "file_path": "src/main.rs", "rank": 57, "score": 16.654903605508782 }, { "content": "#![deny(unused_must_use)]\n\n#![type_length_limit = \"1340885\"]\n\nextern crate serenity;\n\n\n\nextern crate ctrlc;\n\n#[macro_use]\n\npub mod logger;\n\npub mod canary_update;\n\npub mod dogebotno;\n\npub mod permissions;\n\npub mod servers;\n\npub mod voice;\n\nuse canary_update::*;\n\nuse futures::{Stream, StreamExt};\n\nuse lazy_static::*;\n\nuse logger::get_guild_members;\n\nuse rand::Rng;\n\nuse regex::Regex;\n\nuse serenity::async_trait;\n\nuse serenity::client::bridge::gateway::GatewayIntents;\n", "file_path": "src/main.rs", "rank": 58, "score": 16.592362757163883 }, { "content": "\t\t\t&& msg.author.id.0 != 468928390917783553\n\n\t\t{\n\n\t\t\tmsg.channel_id\n\n\t\t\t\t.say(&ctx, \"https://yeet.kikoho.xyz/files/ping.gif\")\n\n\t\t\t\t.await\n\n\t\t\t\t.log_err();\n\n\t\t}\n\n\t\tif msg.author.id == 266345279513427971\n\n\t\t\t&& msg.content.contains(\"https://www.twitch.tv/corporal_q\")\n\n\t\t{\n\n\t\t\tmsg.channel_id.say(&ctx, \"sotp spamming\").await.log_err();\n\n\t\t}\n\n\t}\n\n}\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n\tlog_timestamp!(\"INFO\", \"Starting oofbot\");\n\n\tlog_timestamp!(\"INFO\", \"Getting client secret from file\");\n\n\n\n\tlet mut framework = StandardFramework::new()\n", "file_path": "src/main.rs", "rank": 59, "score": 16.12856440879354 }, { "content": "\t\t\t\t\t\t\t\"DEBUG\",\n\n\t\t\t\t\t\t\t\"Returning from dogebot thread before pinging tyler\"\n\n\t\t\t\t\t\t);\n\n\t\t\t\t\t\treturn ctp.store(false, Ordering::SeqCst);\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t\tctp.store(false, Ordering::SeqCst);\n\n\t\t\t\tlet msg = MessageBuilder::new().user(355803584228622346).build();\n\n\t\t\t\tchannel.say(&ctx, msg).await.log_err()\n\n\t\t\t});\n\n\t\t} else {\n\n\t\t\tlog_timestamp!(\"WARN\", \"No canary update channel set for DVS\");\n\n\t\t}\n\n\t} else if data.presence.status == OnlineStatus::Online {\n\n\t\tlog_timestamp!(\"INFO\", \"Dogebot back\");\n\n\t\thandler\n\n\t\t\t.cancel_tyler_ping\n\n\t\t\t.deref()\n\n\t\t\t.store(true, Ordering::SeqCst);\n\n\t}\n\n}\n", "file_path": "src/dogebotno.rs", "rank": 60, "score": 16.044938877826432 }, { "content": "\t\t\t\t.get::<OofVoice>()\n\n\t\t\t\t.unwrap()\n\n\t\t\t\t.read()\n\n\t\t\t\t.await\n\n\t\t\t\t.remove_from_queue(&msg.guild_id.unwrap(), number - 1)\n\n\t\t\t\t.await\n\n\t\t{\n\n\t\t\tmsg.channel_id\n\n\t\t\t\t.say(&ctx, \"Invalid queue number\")\n\n\t\t\t\t.await\n\n\t\t\t\t.log_err();\n\n\t\t}\n\n\t} else {\n\n\t\tmsg.channel_id\n\n\t\t\t.say(&ctx, \"Syntax: /remove <number in queue>\")\n\n\t\t\t.await\n\n\t\t\t.log_err();\n\n\t}\n\n\tOk(())\n\n}\n", "file_path": "src/voice.rs", "rank": 61, "score": 16.0351339201317 }, { "content": "use serenity::model::channel::GuildChannel;\n\nuse serenity::{\n\n\tclient::bridge::gateway::ShardManager,\n\n\tframework::standard::{macros::*, *},\n\n\tmodel::{\n\n\t\tchannel::Message,\n\n\t\tevent::{PresenceUpdateEvent, ResumedEvent},\n\n\t\tgateway::Ready,\n\n\t\tid::{ChannelId, GuildId, UserId},\n\n\t\tuser::OnlineStatus,\n\n\t},\n\n\tprelude::*,\n\n\tutils::MessageBuilder,\n\n\tClient,\n\n};\n\nuse std::{\n\n\tcollections::HashSet,\n\n\tops::DerefMut,\n\n\tsync::{atomic::AtomicBool, Arc},\n\n};\n\nuse tokio::{stream, sync::Mutex};\n\nuse voice::OofVoice;\n\n\n\n/// Unwrapping many of the errors in oofbot, mostly api calls, will result in a panic sometimes.\n\n/// This is bad. But I also cant ignore the errors in case theres something bad in there. So my\n\n/// solution is this trait, which logs the error. If I look in the logs and see something bad, then\n\n/// I know to recheck everything\n", "file_path": "src/main.rs", "rank": 62, "score": 16.031966112164852 }, { "content": "\t\t};\n\n\t\tlet guild_id = msg.guild_id.unwrap();\n\n\t\tif self.sources.read().await.contains_key(&guild_id) {\n\n\t\t\tlet mut queue = self.queue.write().await;\n\n\t\t\tlet guild = queue.deref_mut().get_mut(&guild_id);\n\n\t\t\tif let Some(q) = guild {\n\n\t\t\t\tq.write().await.push(file);\n\n\t\t\t} else {\n\n\t\t\t\tqueue.deref_mut().insert(guild_id, RwLock::new(vec![file]));\n\n\t\t\t}\n\n\t\t\treturn Ok(());\n\n\t\t}\n\n\t\tself.play_file(file, handler, msg.guild_id.unwrap()).await?;\n\n\t\tOk(())\n\n\t}\n\n\tpub async fn play_file(\n\n\t\t&self,\n\n\t\tfile: std::ffi::OsString,\n\n\t\thandler: &mut Handler,\n\n\t\tguild_id: GuildId,\n", "file_path": "src/voice.rs", "rank": 63, "score": 15.935566493248675 }, { "content": "\t\t});\n\n\t\tif let Some(f) = v {\n\n\t\t\tlet dir: DirEntry = f.unwrap();\n\n\t\t\tlet audio: Box<dyn AudioSource> = match ffmpeg(dir.path().as_os_str()).await {\n\n\t\t\t\tOk(a) => a,\n\n\t\t\t\tErr(e) => {\n\n\t\t\t\t\tlog_timestamp!(\"VOICE/ERROR\", format!(\"Failed to open ffmpeg file {}\", e));\n\n\t\t\t\t\treturn Err(\"FFmpeg died ping Emp\");\n\n\t\t\t\t}\n\n\t\t\t};\n\n\t\t\tlet audio: LockedAudio = handler.play_returning(audio);\n\n\t\t\tself.sources.write().await.insert(guild_id, (audio, file));\n\n\t\t\tOk(())\n\n\t\t} else {\n\n\t\t\tErr(\"Invalid song name\")\n\n\t\t}\n\n\t}\n\n\tpub async fn stop(&self, guild_id: &GuildId) {\n\n\t\tself.sources.write().await.remove(guild_id);\n\n\t\tif let Some(x) = self.voice_manager.lock().await.get_mut(guild_id) {\n", "file_path": "src/voice.rs", "rank": 64, "score": 15.793554129760906 }, { "content": "\t\t\t\tValue::Integer(i64::from_ne_bytes((permission as u64).to_ne_bytes())),\n\n\t\t\t])?,\n\n\t\t};\n\n\t\tif let Some(row) = cursor.next()? {\n\n\t\t\treturn Ok(Some(match row[row.len()].as_binary() {\n\n\t\t\t\tSome(x) => Vec::from(x),\n\n\t\t\t\tNone => return Ok(None),\n\n\t\t\t}));\n\n\t\t}\n\n\t\tOk(None)\n\n\t}\n\n\n\n\tpub async fn register_permission_user(\n\n\t\t&self,\n\n\t\tguild_id: GuildId,\n\n\t\tuser_id: UserId,\n\n\t\tchannel_id: Option<ChannelId>,\n\n\t\tpermission: Permission,\n\n\t) -> CommandResult {\n\n\t\tlet sql = self.sql_connection.lock().await;\n", "file_path": "src/permissions.rs", "rank": 65, "score": 14.900370227651266 }, { "content": "\n\n#[async_trait]\n\nimpl EventHandler for Handler {\n\n\tasync fn presence_update(&self, ctx: Context, data: PresenceUpdateEvent) {\n\n\t\t// oofbot only handles guild presence updates\n\n\t\tif data.guild_id.is_none() {\n\n\t\t\treturn;\n\n\t\t}\n\n\t\t// Dogebot is oofbots greatest enemy. We got some checks in here just for him.\n\n\t\tlet is_dogebot = data.presence.user_id == 612070962913083405;\n\n\t\t// Should never be none because we check that up there\n\n\t\tlet guild_id = data.guild_id.unwrap();\n\n\t\t// Checks if dogebot is offline in this guild (the main development guild for dogebot and\n\n\t\t// oofbot)\n\n\t\tif is_dogebot && guild_id.0 == 561874457283657728 {\n\n\t\t\tdogebotno::dogebot_presence(&ctx, &data, &guild_id, self).await;\n\n\t\t} else if !is_dogebot && data.presence.status == OnlineStatus::Offline {\n\n\t\t\t// Inside joke, memeing on how tiny discord canary updates are and how often we get them\n\n\t\t\tlet canary = ctx\n\n\t\t\t\t.data\n", "file_path": "src/main.rs", "rank": 66, "score": 14.466986206712674 }, { "content": "\tfor i in music_folder {\n\n\t\tlet i: DirEntry = i.unwrap();\n\n\t\tif i.file_type().unwrap().is_file() {\n\n\t\t\tmessage.push_line(i.path().file_stem().unwrap().to_str().unwrap());\n\n\t\t}\n\n\t}\n\n\tmsg.channel_id.say(&ctx, message).await.log_err();\n\n\tOk(())\n\n}\n", "file_path": "src/voice.rs", "rank": 67, "score": 14.042705329465889 }, { "content": "\t\t.configure(|c| c.prefix(\"/\"))\n\n\t\t.group(&GENERAL_GROUP)\n\n\t\t.group(&ADMIN_GROUP)\n\n\t\t.help(&HELP);\n\n\tvoice::do_framework(&mut framework);\n\n\tpermissions::do_framework(&mut framework);\n\n\tcanary_update::do_framework(&mut framework);\n\n\n\n\tlet secret = std::fs::read_to_string(\"client_secret\")\n\n\t\t.expect(\"Client secret needs to be in a file called client_secret\");\n\n\tlet mut client = Client::builder(secret)\n\n\t\t.add_intent(GatewayIntents::all())\n\n\t\t.framework(framework)\n\n\t\t.event_handler(Handler::default())\n\n\t\t.await\n\n\t\t.expect(\"Failed to create client\");\n\n\n\n\t// Voice initialization\n\n\t{\n\n\t\t// Lock the clients data\n", "file_path": "src/main.rs", "rank": 68, "score": 12.912395570742099 }, { "content": "\t\tlet guild = msg.guild(&ctx).await.unwrap();\n\n\n\n\t\tlet channel_id = guild\n\n\t\t\t.voice_states\n\n\t\t\t.get(&msg.author.id)\n\n\t\t\t.and_then(|voice_state| voice_state.channel_id);\n\n\t\tif let Some(channel) = channel_id {\n\n\t\t\tif voice.join(guild_id, channel).is_some() {\n\n\t\t\t\tlog_timestamp!(\n\n\t\t\t\t\t\"VOICE/INFO\",\n\n\t\t\t\t\tformat!(\"Joined voice channel {} in guild {}\", channel, guild_id)\n\n\t\t\t\t);\n\n\t\t\t\ttrue\n\n\t\t\t} else {\n\n\t\t\t\tmsg.channel_id\n\n\t\t\t\t\t.say(&ctx, \"Failed to join the channel\")\n\n\t\t\t\t\t.await\n\n\t\t\t\t\t.log_err();\n\n\t\t\t\tlog_timestamp!(\n\n\t\t\t\t\t\"VOICE/ERROR\",\n", "file_path": "src/voice.rs", "rank": 69, "score": 12.517200888862092 }, { "content": "\t\tlet file = std::fs::OpenOptions::new().append(true).read(false).create(true).truncate(false).open(\"oofbot.log\");\n\n\t\t$(\n\n\t\t\tlet message = format!(\"[{}] {}\", $tag.to_string(), $message.to_string());\n\n\t\t\tif let Ok(mut f) = file {\n\n\t\t\t\twriteln!(f, \"{}\", message).unwrap();\n\n\t\t\t}\n\n\t\t\tprintln!(\"{}\", message);\n\n\t\t)+\n\n\t}}\n\n}\n\n#[macro_export]\n\nmacro_rules! log_timestamp {\n\n\t($tag:expr, $($message:expr),+) => {{\n\n\t\tuse std::io::Write;\n\n\t\tuse chrono::prelude::*;\n\n\t\tlet time: DateTime<Local> = Local::now();\n\n\t\tlet file = std::fs::OpenOptions::new().append(true).read(false).create(true).truncate(false).open(\"oofbot.log\");\n\n\t\t$(\n\n\t\t\tlet msg: String = format!(\"[{hh:02}:{mm:02}:{ss:02}][{tag}] {message}\",\n\n\t\t\t\ttag=$tag,\n", "file_path": "src/logger.rs", "rank": 70, "score": 12.383501873467662 }, { "content": "\t\t\t\t.say(&ctx, \"Mace went out of the windu\")\n\n\t\t\t\t.await\n\n\t\t\t\t.log_err();\n\n\t\t\treturn;\n\n\t\t}\n\n\t\t_ => (),\n\n\t};\n\n\tfor i in msg.attachments {\n\n\t\tlet _ = match i.filename.as_str() {\n\n\t\t\t\"trollface.jpg\" => {\n\n\t\t\t\tmsg.channel_id\n\n\t\t\t\t\t.say(&ctx, \"I see no problems here <@612070962913083405>\")\n\n\t\t\t\t\t.await\n\n\t\t\t\t\t.log_err();\n\n\t\t\t\treturn;\n\n\t\t\t}\n\n\t\t\t\"creeper.jpg\" => {\n\n\t\t\t\tmsg.channel_id.say(&ctx, \"its fine its fine, it doesnt bother me, it doesnt bother me, IT BOTHERS ME, IT BOTHERS ME A LOT\\n||THAT THAT ONES NOT GREEN||\").await.log_err();\n\n\t\t\t\treturn;\n\n\t\t\t}\n", "file_path": "src/dogebotno.rs", "rank": 71, "score": 12.361673470133816 }, { "content": "\tpub async fn check_permission_user(\n\n\t\t&self,\n\n\t\tguild_id: GuildId,\n\n\t\tuser_id: UserId,\n\n\t\tchannel_id: Option<ChannelId>,\n\n\t\tpermission: Permission,\n\n\t) -> Result<bool, SQLiteError> {\n\n\t\tlet sql = self.sql_connection.lock().await;\n\n\t\tlet mut cursor = sql\n\n\t\t\t.prepare(\"SELECT role_id FROM permission_role WHERE guild_id = ?, role_id = ?, channel_id = ?, permission = ?\")?\n\n\t\t\t.cursor();\n\n\t\tmatch channel_id {\n\n\t\t\tSome(channel_id) => cursor.bind(&[\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(guild_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(user_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(channel_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes((permission as u64).to_ne_bytes())),\n\n\t\t\t])?,\n\n\t\t\tNone => cursor.bind(&[\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(guild_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(user_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Null,\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes((permission as u64).to_ne_bytes())),\n\n\t\t\t])?,\n\n\t\t};\n\n\t\tOk(cursor.next()?.is_some())\n\n\t}\n\n}\n", "file_path": "src/permissions.rs", "rank": 72, "score": 11.042908592257145 }, { "content": "\t\t\tlet ctp = handler.cancel_tyler_ping.clone();\n\n\t\t\tlet ctx = ctx.http.clone();\n\n\t\t\ttokio::spawn(async move {\n\n\t\t\t\tlog_timestamp!(\"DEBUG\", \"Dogebot thread started\");\n\n\t\t\t\tthread::sleep(Duration::from_secs(5));\n\n\n\n\t\t\t\tlet mut v = ctp.load(Ordering::SeqCst);\n\n\t\t\t\tif v {\n\n\t\t\t\t\tlog_timestamp!(\"DEBUG\", \"Returning from dogebot thread before message\");\n\n\t\t\t\t\treturn ctp.store(false, Ordering::SeqCst);\n\n\t\t\t\t}\n\n\n\n\t\t\t\tlog_timestamp!(\"INFO\", \"Dogebot went completely offline\");\n\n\t\t\t\tchannel.say(&ctx, \"dogebot is offline. Everyone press F to pay respects, and press <:thisisfine:667895278535311381> for another ~~bug~~ feature!\").await.log_err();\n\n\n\n\t\t\t\tfor _ in 0..58 {\n\n\t\t\t\t\tthread::sleep(Duration::from_secs(5));\n\n\t\t\t\t\tv = ctp.load(Ordering::SeqCst);\n\n\t\t\t\t\tif v {\n\n\t\t\t\t\t\tlog_timestamp!(\n", "file_path": "src/dogebotno.rs", "rank": 73, "score": 10.814113976667223 }, { "content": "\t\t\t\tmessage=$message,\n\n\t\t\t\thh=time.hour(),\n\n\t\t\t\tmm=time.minute(),\n\n\t\t\t\tss=time.second()\n\n\t\t\t);\n\n\t\t\tif let Ok(mut f) = file {\n\n\t\t\t\twriteln!(f, \"{}\", msg).unwrap();\n\n\t\t\t}\n\n\t\t\tprintln!(\"{}\", msg);\n\n\t\t)+\n\n\t}};\n\n}\n", "file_path": "src/logger.rs", "rank": 74, "score": 9.892463360772275 }, { "content": "\t\t\tx.stop()\n\n\t\t}\n\n\t}\n\n\n\n\tpub async fn remove_queue(&self, guild_id: &GuildId) {\n\n\t\tself.queue.write().await.remove(guild_id);\n\n\t}\n\n\n\n\tpub async fn remove_from_queue(&self, guild_id: &GuildId, index: usize) -> bool {\n\n\t\tmatch self.queue.read().await.get(guild_id) {\n\n\t\t\tSome(x) => {\n\n\t\t\t\tif x.read().await.len() > index {\n\n\t\t\t\t\tx.write().await.remove(index);\n\n\t\t\t\t\ttrue\n\n\t\t\t\t} else {\n\n\t\t\t\t\tfalse\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tNone => false,\n\n\t\t}\n", "file_path": "src/voice.rs", "rank": 75, "score": 8.800940420478122 }, { "content": "\t\t\t\tlet mut next: Vec<(GuildId, Option<OsString>)> =\n\n\t\t\t\t\tVec::with_capacity(sources.deref().len());\n\n\t\t\t\tfor i in sources.deref() {\n\n\t\t\t\t\tif (i.1).0.lock().await.finished {\n\n\t\t\t\t\t\tnext.push((*i.0, None));\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t\tdrop(sources);\n\n\t\t\t\tlet queue = oof.queue.read().await;\n\n\n\n\t\t\t\tfor mut i in &mut next {\n\n\t\t\t\t\tif let Some(vec) = queue.get(&i.0) {\n\n\t\t\t\t\t\tlet mut vec = vec.write().await;\n\n\t\t\t\t\t\tif vec.len() > 0 {\n\n\t\t\t\t\t\t\ti.1 = Some(vec.remove(0));\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t\tdrop(queue);\n\n\t\t\t\tfor i in next {\n", "file_path": "src/voice.rs", "rank": 76, "score": 8.491144760891585 }, { "content": "\t\tguild_id: GuildId,\n\n\t\trole_id: RoleId,\n\n\t\tchannel_id: Option<ChannelId>,\n\n\t\tpermission: Permission,\n\n\t) -> Result<Option<Vec<u8>>, SQLiteError> {\n\n\t\tlet sql = self.sql_connection.lock().await;\n\n\t\tlet mut cursor = sql\n\n\t\t\t.prepare(\"SELECT role_id FROM permission_role WHERE guild_id = ?, role_id = ?, channel_id = ?, permission = ?\")?\n\n\t\t\t.cursor();\n\n\t\tmatch channel_id {\n\n\t\t\tSome(channel_id) => cursor.bind(&[\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(guild_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(role_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(channel_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes((permission as u64).to_ne_bytes())),\n\n\t\t\t])?,\n\n\t\t\tNone => cursor.bind(&[\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(guild_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(role_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Null,\n", "file_path": "src/permissions.rs", "rank": 77, "score": 7.6224651610273435 }, { "content": "\t\t\t.cursor();\n\n\t\tif let Some(channel_id) = channel_id {\n\n\t\t\tcursor.bind(&[\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(guild_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(role_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(channel_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes((permission as u64).to_ne_bytes())),\n\n\t\t\t])?;\n\n\t\t} else {\n\n\t\t\tcursor.bind(&[\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(guild_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(role_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Null,\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes((permission as u64).to_ne_bytes())),\n\n\t\t\t])?;\n\n\t\t}\n\n\t\tOk(())\n\n\t}\n\n\tpub async fn check_permission_role(\n\n\t\t&self,\n", "file_path": "src/permissions.rs", "rank": 78, "score": 7.264380395412458 }, { "content": "\t\t\t\t\t\treturn;\n\n\t\t\t\t\t}\n\n\t\t\t\t};\n\n\n\n\t\t\t\tlet mut rng = rand::thread_rng();\n\n\n\n\t\t\t\tmessage.mention(&msg.author);\n\n\t\t\t\tmessage.push(\" has pinged: \");\n\n\n\n\t\t\t\tlet someones = msg.content.split(\"@someone\");\n\n\t\t\t\tlet c = someones.count();\n\n\t\t\t\tif c > 1 {\n\n\t\t\t\t\tlet r = rng.gen_range(0, members.len());\n\n\t\t\t\t\tmessage.mention(&members[r]);\n\n\t\t\t\t}\n\n\n\n\t\t\t\t// Randomly select the @someones\n\n\t\t\t\tmsg.content.split(\"@someone\").skip(2).for_each(|_| {\n\n\t\t\t\t\tmessage.push(\", \");\n\n\t\t\t\t\tlet r = rng.gen_range(0, members.len());\n", "file_path": "src/main.rs", "rank": 79, "score": 7.193889000447399 }, { "content": "\t\t// Get all the guilds that this shard is connected to\n\n\t\t// Not that this bot will ever be big enough for me to bother sharding it\n\n\t\tlet guild_info: Vec<_> = stream::iter(guilds)\n\n\t\t\t.filter_map(|guild_id| async move {\n\n\t\t\t\tif guild_id.shard_id(&rctx).await == rctx.shard_id {\n\n\t\t\t\t\tSome((\n\n\t\t\t\t\t\tguild_id,\n\n\t\t\t\t\t\tguild_id.to_guild_cached(&rctx).await.unwrap().name.clone(),\n\n\t\t\t\t\t))\n\n\t\t\t\t} else {\n\n\t\t\t\t\tNone\n\n\t\t\t\t}\n\n\t\t\t})\n\n\t\t\t.collect()\n\n\t\t\t.await;\n\n\t\tlog_timestamp!(\n\n\t\t\t\"INFO\",\n\n\t\t\tformat!(\"Shard {} connected to guilds\\n{:#?}\", shard, guild_info)\n\n\t\t);\n\n\t}\n", "file_path": "src/main.rs", "rank": 80, "score": 6.7971439938971265 }, { "content": "\tsync::Arc,\n\n\tthread,\n\n\ttime::Duration,\n\n};\n\n\n\nimpl TypeMapKey for OofVoice {\n\n\ttype Value = Arc<RwLock<OofVoice>>;\n\n}\n\n#[group]\n\n#[commands(join, leave, play, stop, queue, remove, list)]\n", "file_path": "src/voice.rs", "rank": 81, "score": 6.522421972654968 }, { "content": "\t\tlet mut cursor = sql\n\n\t\t\t.prepare(\"INSERT INTO permission_role VALUES (?, ?, ?, ?)\")?\n\n\t\t\t.cursor();\n\n\t\tif let Some(channel_id) = channel_id {\n\n\t\t\tcursor.bind(&[\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(guild_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(user_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(channel_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes((permission as u64).to_ne_bytes())),\n\n\t\t\t])?;\n\n\t\t} else {\n\n\t\t\tcursor.bind(&[\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(guild_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes(user_id.0.to_ne_bytes())),\n\n\t\t\t\tValue::Null,\n\n\t\t\t\tValue::Integer(i64::from_ne_bytes((permission as u64).to_ne_bytes())),\n\n\t\t\t])?;\n\n\t\t}\n\n\t\tOk(())\n\n\t}\n", "file_path": "src/permissions.rs", "rank": 82, "score": 5.859011513827509 }, { "content": "\t) -> Result<(), &'static str> {\n\n\t\tlet mut dir = match fs::read_dir(\"/home/emp/Music\") {\n\n\t\t\tOk(x) => x,\n\n\t\t\tErr(e) => {\n\n\t\t\t\tlog_timestamp!(\"VOICE/ERROR\", format!(\"Failed to open music dir {}\", e));\n\n\t\t\t\treturn Err(\"Don't delay, ping @Emp today!\");\n\n\t\t\t}\n\n\t\t};\n\n\t\tlet v: Option<std::io::Result<DirEntry>> = dir.find(|x| match x {\n\n\t\t\tOk(x) => {\n\n\t\t\t\tlet path: PathBuf = x.path();\n\n\t\t\t\tif !path.is_dir() && path.file_stem().unwrap_or_default() == file {\n\n\t\t\t\t\treturn true;\n\n\t\t\t\t}\n\n\t\t\t\tfalse\n\n\t\t\t}\n\n\t\t\tErr(e) => {\n\n\t\t\t\tlog_timestamp!(\"VOICE/ERROR\", format!(\"Failed to open music file {}\", e));\n\n\t\t\t\tfalse\n\n\t\t\t}\n", "file_path": "src/voice.rs", "rank": 83, "score": 4.978990160060412 } ]
Rust
src/main.rs
andersk/prime-summer
060f1167a56cab1fff687e52a13af5815d2508ad
use primesieve_sys::{ primesieve_free, primesieve_free_iterator, primesieve_generate_primes, primesieve_init, primesieve_iterator, primesieve_next_prime, primesieve_prev_prime, primesieve_skipto, UINT64_PRIMES, }; use rug::ops::Pow; use rug::Integer; use std::collections::VecDeque; use std::env; use std::error::Error; use std::mem; use std::slice; #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] struct Query { x: u64, i: usize, sign: bool, w: u64, } fn phi(x: u64, primes: &[u64], sign: bool, w: u64, y: u64, queries: &mut Vec<Query>) -> Integer { if x >= y || primes.is_empty() { let mut o: Integer = Integer::from(x) * (x + 1) * (2 * x + 1) / 6 * w * w; if sign { o = -o; } for (i, &p) in primes.iter().enumerate() { o += phi(x / p, &primes[..i], !sign, w * p, y, queries); } o } else { queries.push(Query { x, i: primes.len(), sign, w, }); Integer::new() } } fn sum_primes_squared(n: u64) -> Integer { if n == 0 { return Integer::new(); } let cbrt_n: u64 = Integer::from(n).root(3).to_u64().unwrap(); let sqrt_n: u64 = Integer::from(n).sqrt().to_u64().unwrap(); let small_primes = unsafe { let mut small_primes_size = 0; let small_primes_buf = primesieve_generate_primes(2, cbrt_n, &mut small_primes_size, UINT64_PRIMES); let small_primes: Box<[u64]> = slice::from_raw_parts(small_primes_buf as *const u64, small_primes_size).into(); primesieve_free(small_primes_buf); small_primes }; let y = sqrt_n * 3; let mut queries = Vec::new(); let mut ret = phi(n, &small_primes, false, 1, y, &mut queries); queries.sort_by_key(|query| (query.x, query.i)); let mut queries = queries.into_iter(); if let Some(mut query) = queries.next() { let base = 1 << mem::size_of_val(&small_primes.len()) as u32 * 8 - small_primes.len().leading_zeros(); let mut accumulator = Vec::new(); accumulator.resize_with(base + small_primes.len() + 1, Integer::new); let mut queue = VecDeque::new(); queue.resize((small_primes.last().unwrap_or(&0) + 1) as usize, !0); for (i, &p) in small_primes.iter().enumerate() { queue[p as usize] = i; } let mut x = 0; 'outer: loop { let i = match queue.pop_front() { Some(i) if i != !0 => { let mut k = i; let mut j = small_primes[k] as usize - 1; while j < queue.len() && queue[j] != !0 { if queue[j] > k { mem::swap(&mut k, &mut queue[j]); } j += small_primes[k] as usize; } if j >= queue.len() { queue.resize(j + 1, !0); } queue[j] = k; i } _ => small_primes.len(), }; let x2 = Integer::from(x).pow(2); let mut node = base + i; while node != 0 { accumulator[node] += &x2; node >>= 1; } while query.x == x { let mut node = base + query.i; let mut reply = accumulator[node].clone(); while node != 0 { if node & 1 == 0 { reply += &accumulator[node + 1]; } node >>= 1; } if query.sign { reply = -reply; } ret += reply * query.w * query.w; if let Some(query1) = queries.next() { query = query1; } else { break 'outer; } } x += 1; } } ret -= 1; for &p in &*small_primes { ret += p * p; } let mut pi: primesieve_iterator; let mut qi: primesieve_iterator; unsafe { pi = mem::zeroed(); primesieve_init(&mut pi); primesieve_skipto(&mut pi, sqrt_n + 1, cbrt_n); qi = mem::zeroed(); primesieve_init(&mut qi); primesieve_skipto(&mut qi, sqrt_n, n / cbrt_n); } let mut p; let mut q = unsafe { primesieve_next_prime(&mut qi) }; let mut s = Integer::new(); while { p = unsafe { primesieve_prev_prime(&mut pi) }; p > cbrt_n } { let p2 = p * p; s += p2; while p * q <= n { s += Integer::from(q).pow(2); q = unsafe { primesieve_next_prime(&mut qi) }; } ret -= &s * p2; } unsafe { primesieve_free_iterator(&mut pi); primesieve_free_iterator(&mut qi); drop(pi); } ret } fn main() -> Result<(), Box<dyn Error>> { if let [_, n] = &*env::args().collect::<Vec<_>>() { let n = n.parse()?; println!( "Sum of squares of primes ≤ {} is {}", n, sum_primes_squared(n) ); } else { Err("Usage: prime-summer N")?; } Ok(()) } #[test] fn test_small() { assert_eq!(sum_primes_squared(0), 0); assert_eq!(sum_primes_squared(1), 0); let mut s = 0; for n in 2..10001 { let mut i = 2; loop { if i * i > n { s += n * n; break; } if n % i == 0 { break; } i += 1; } assert_eq!(sum_primes_squared(n), s); } } #[test] fn test_powers_of_10() { assert_eq!(sum_primes_squared(2), "4".parse::<Integer>().unwrap()); assert_eq!(sum_primes_squared(29), "2397".parse::<Integer>().unwrap()); assert_eq!( sum_primes_squared(541), "8384727".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(7919), "19053119163".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(104729), "34099597499091".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(1299709), "53251529659694763".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(15485863), "76304519151822049179".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(179424673), "103158861357874372432083".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(2038074743), "133759354162117403400944283".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(22801763489), "168072405102068540986037048787".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(252097800623), "206076219788796447007218742841043" .parse::<Integer>() .unwrap() ); assert_eq!( sum_primes_squared(2760727302517), "247754953701579144582110673365391267" .parse::<Integer>() .unwrap() ); }
use primesieve_sys::{ primesieve_free, primesieve_free_iterator, primesieve_generate_primes, primesieve_init, primesieve_iterator, primesieve_next_prime, primesieve_prev_prime, primesieve_skipto, UINT64_PRIMES, }; use rug::ops::Pow; use rug::Integer; use std::collections::VecDeque; use std::env; use std::error::Error; use std::mem; use std::slice; #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] struct Query { x: u64, i: usize, sign: bool, w: u64, } fn phi(x: u64, primes: &[u64], sign: bool, w: u64, y: u64, queries: &mut Vec<Query>) -> Integer { if x >= y || primes.is_empty() { let mut o: Integer = Integer::from(x) * (x + 1) * (2 * x + 1) / 6 * w * w; if sign { o = -o; } for (i, &p) in primes.iter().enumerate() { o += phi(x / p, &primes[..i], !sign, w * p, y, queries); } o } else { queries.push(Query { x, i: primes.len(), sign, w, }); Integer::new() } } fn sum_primes_squared(n: u64) -> Integer { if n == 0 { return Integer::new(); } let cbrt_n: u64 = Integer::from(n).root(3).to_u64().unwrap(); let sqrt_n: u64 = Integer::from(n).sqrt().to_u64().unwrap(); let small_primes = unsafe { let mut small_primes_size = 0; let small_primes_buf = primesieve_generate_primes(2, cbrt_n, &mut small_primes_size, UINT64_PRIMES); let small_primes: Box<[u64]> = slice::from_raw_parts(small_primes_buf as *const u64, small_primes_size).into(); primesieve_free(small_primes_buf); small_primes }; let y = sqrt_n * 3; let mut queries = Vec::new(); let mut ret = phi(n, &small_primes, false, 1, y, &mut queries); queries.sort_by_key(|query| (query.x, query.i)); let mut queries = queries.into_iter(); if let Some(mut query) = queries.next() { let base = 1 << mem::size_of_val(&small_primes.len()) as u32 * 8 - small_primes.len().leading_zeros(); let mut accumulator = Vec::new(); accumulator.resize_with(base + small_primes.len() + 1, Integer::new); let mut queue = VecDeque::new(); queue.resize((small_primes.last().unwrap_or(&0) + 1) as usize, !0); for (i, &p) in small_primes.iter().enumerate() { queue[p as usize] = i; } let mut x = 0; 'outer: loop { let i = match queue.pop_front() { Some(i) if i != !0 => { let mut k = i; let mut j = small_primes[k] as usize - 1; while j < queue.len() && queue[j] != !0 { if queue[j] > k { mem::swap(&mut k, &mut queue[j]); } j += small_primes[k] as usize; } if j >= queue.len() { queue.resize(j + 1, !0); } queue[j] = k; i } _ => small_primes.len(), }; let x2 = Integer::from(x).pow(2); let mut node = base + i; while node != 0 { accumulator[node] += &x2; node >>= 1; } while query.x == x { let mut node = base + query.i; let mut reply = accumulator[node].clone(); while node != 0 { if node & 1 == 0 { reply += &accumulator[node + 1]; } node >>= 1; } if query.sign { reply = -reply; } ret += reply * query.w * query.w; if let Some(query1) = queries.next() { query = query1; } else { break 'outer; } } x += 1; } } ret -= 1; for &p in &*small_primes { ret += p * p; } let mut pi: primesieve_iterator; let mut qi: primesieve_iterator; unsafe { pi = mem::zeroed(); primesieve_init(&mut pi); primesieve_skipto(&mut pi, sqrt_n + 1, cbrt_n); qi = mem::zeroed(); primesieve_init(&mut qi); primesieve_skipto(&mut qi, sqrt_n, n / cbrt_n); } let mut p; let mut q = unsafe { primesieve_next_prime(&mut qi) }; let mut s = Integer::new(); while { p = unsafe { primesieve_prev_prime(&mut pi) }; p > cbrt_n } { let p2 = p * p; s += p2; while p * q <= n { s += Integer::from(q).pow(2); q = unsafe { primesieve_next_prime(&mut qi) }; } ret -= &s * p2; } unsafe { primesieve_free_iterator(&mut pi); primesieve_free_iterator(&mut qi); drop(pi); } ret } fn main() -> Result<(), Box<dyn Error>> { if let [_, n] = &*env::args().collect::<Vec<_>>() { let n = n.parse()?; println!( "Sum of squares of primes ≤ {} is {}", n, sum_primes_squared(n) ); } else { Err("Usage: prime-summer N")?; } Ok(()) } #[test] fn test_small() { assert_eq!(sum_primes_squared(0), 0); assert_eq!(sum_primes_squared(1), 0); let mut s = 0; for n in 2..10001 { let mut i = 2; loop { if i * i > n { s += n * n; break; } if n % i == 0 { break; } i += 1; } assert_eq!(sum_primes_squared(n), s); } } #[test] fn test_powers_of_10() { assert_eq!(sum_primes_squared(2), "4".parse::<Integer>().unwrap()); assert_eq!(sum_primes_squared(29), "2397".parse::<Integer>().unwrap()); assert_eq!( sum_primes_squared(541), "8384727".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(7919), "19053119163".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(104729), "34099597499091".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(1299709), "53251529659694763".parse::<Integer>().unwrap() ); assert_
eq!( sum_primes_squared(15485863), "76304519151822049179".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(179424673), "103158861357874372432083".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(2038074743), "133759354162117403400944283".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(22801763489), "168072405102068540986037048787".parse::<Integer>().unwrap() ); assert_eq!( sum_primes_squared(252097800623), "206076219788796447007218742841043" .parse::<Integer>() .unwrap() ); assert_eq!( sum_primes_squared(2760727302517), "247754953701579144582110673365391267" .parse::<Integer>() .unwrap() ); }
function_block-function_prefixed
[ { "content": "# prime-summer\n\n\n\nThe easiest way to try this is via Docker:\n\n\n\n```console\n\n$ docker run --rm anderskaseorg/prime-summer prime-summer 100\n\nSum of squares of primes ≤ 100 is 65796\n\n$ docker run --rm anderskaseorg/prime-summer prime-summer 10000000000000\n\nSum of squares of primes ≤ 10000000000000 is 11262617785640702236670513970349205634\n\n```\n\n\n\nIf you want to build it yourself, you’ll need Rust, GCC, and\n\n[libprimesieve](https://github.com/kimwalisch/primesieve).\n", "file_path": "README.md", "rank": 6, "score": 7410.685832626554 } ]
Rust
src/vmm/src/memory_snapshot.rs
HQ01/firecracker
a08be39fb621b17494e0a964ad5d434c65a8e737
#![cfg(target_arch = "x86_64")] use std::fmt::{Display, Formatter}; use std::fs::File; use std::io::SeekFrom; use versionize::{VersionMap, Versionize, VersionizeResult}; use versionize_derive::Versionize; use vm_memory::{ Bytes, FileOffset, GuestAddress, GuestMemory, GuestMemoryError, GuestMemoryMmap, GuestMemoryRegion, GuestRegionMmap, MemoryRegionAddress, MmapRegion, }; use crate::DirtyBitmap; #[derive(Debug, PartialEq, Versionize)] pub struct GuestMemoryRegionState { pub base_address: u64, pub size: usize, pub offset: u64, } #[derive(Debug, Default, PartialEq, Versionize)] pub struct GuestMemoryState { pub regions: Vec<GuestMemoryRegionState>, } pub trait SnapshotMemory where Self: Sized, { fn describe(&self) -> GuestMemoryState; fn dump<T: std::io::Write>(&self, writer: &mut T) -> std::result::Result<(), Error>; fn dump_dirty<T: std::io::Write + std::io::Seek>( &self, writer: &mut T, dirty_bitmap: &DirtyBitmap, ) -> std::result::Result<(), Error>; fn restore( file: &File, state: &GuestMemoryState, track_dirty_pages: bool, ) -> std::result::Result<Self, Error>; } #[derive(Debug)] pub enum Error { FileHandle(std::io::Error), CreateMemory(vm_memory::Error), CreateRegion(vm_memory::mmap::MmapRegionError), WriteMemory(GuestMemoryError), } impl Display for Error { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { use self::Error::*; match self { FileHandle(err) => write!(f, "Cannot access file: {:?}", err), CreateMemory(err) => write!(f, "Cannot create memory: {:?}", err), CreateRegion(err) => write!(f, "Cannot create memory region: {:?}", err), WriteMemory(err) => write!(f, "Cannot dump memory: {:?}", err), } } } impl SnapshotMemory for GuestMemoryMmap { fn describe(&self) -> GuestMemoryState { let mut guest_memory_state = GuestMemoryState::default(); let mut offset = 0; let _: std::result::Result<(), ()> = self.with_regions_mut(|_, region| { guest_memory_state.regions.push(GuestMemoryRegionState { base_address: region.start_addr().0, size: region.len() as usize, offset, }); offset += region.len(); Ok(()) }); guest_memory_state } fn dump<T: std::io::Write>(&self, writer: &mut T) -> std::result::Result<(), Error> { self.with_regions_mut(|_, region| { region.write_all_to(MemoryRegionAddress(0), writer, region.len() as usize) }) .map_err(Error::WriteMemory) } fn dump_dirty<T: std::io::Write + std::io::Seek>( &self, writer: &mut T, dirty_bitmap: &DirtyBitmap, ) -> std::result::Result<(), Error> { let page_size = sysconf::page::pagesize(); let mut writer_offset = 0; self.with_regions_mut(|slot, region| { let kvm_bitmap = dirty_bitmap.get(&slot).unwrap(); let firecracker_bitmap = region.dirty_bitmap().unwrap(); let mut write_size = 0; let mut dirty_batch_start: u64 = 0; for (i, v) in kvm_bitmap.iter().enumerate() { for j in 0..64 { let is_kvm_page_dirty = ((v >> j) & 1u64) != 0u64; let page_offset = ((i * 64) + j) * page_size; let is_firecracker_page_dirty = firecracker_bitmap.is_addr_set(page_offset); if is_kvm_page_dirty || is_firecracker_page_dirty { if write_size == 0 { writer .seek(SeekFrom::Start(writer_offset + page_offset as u64)) .unwrap(); dirty_batch_start = page_offset as u64; } write_size += page_size; } else if write_size > 0 { region.write_all_to( MemoryRegionAddress(dirty_batch_start), writer, write_size, )?; write_size = 0; } } } if write_size > 0 { region.write_all_to(MemoryRegionAddress(dirty_batch_start), writer, write_size)?; } writer_offset += region.len(); firecracker_bitmap.reset(); Ok(()) }) .map_err(Error::WriteMemory) } fn restore( file: &File, state: &GuestMemoryState, track_dirty_pages: bool, ) -> std::result::Result<Self, Error> { let mut mmap_regions = Vec::new(); for region in state.regions.iter() { let mmap_region = MmapRegion::build( Some(FileOffset::new( file.try_clone().map_err(Error::FileHandle)?, region.offset, )), region.size, libc::PROT_READ | libc::PROT_WRITE, libc::MAP_NORESERVE | libc::MAP_PRIVATE, ) .map(|r| { let mut region = GuestRegionMmap::new(r, GuestAddress(region.base_address))?; if track_dirty_pages { region.enable_dirty_page_tracking(); } Ok(region) }) .map_err(Error::CreateRegion)? .map_err(Error::CreateMemory)?; mmap_regions.push(mmap_region); } Ok(Self::from_regions(mmap_regions).map_err(Error::CreateMemory)?) } } #[cfg(test)] mod tests { use std::collections::HashMap; use super::*; use std::io::{Read, Seek}; use utils::tempfile::TempFile; use vm_memory::GuestAddress; #[test] fn test_describe_state() { let page_size: usize = sysconf::page::pagesize(); let mem_regions = [ (GuestAddress(0), page_size), (GuestAddress(page_size as u64 * 2), page_size), ]; let guest_memory = GuestMemoryMmap::from_ranges(&mem_regions[..]).unwrap(); let expected_memory_state = GuestMemoryState { regions: vec![ GuestMemoryRegionState { base_address: 0, size: page_size, offset: 0, }, GuestMemoryRegionState { base_address: page_size as u64 * 2, size: page_size, offset: page_size as u64, }, ], }; let actual_memory_state = guest_memory.describe(); assert_eq!(expected_memory_state, actual_memory_state); let mem_regions = [ (GuestAddress(0), page_size * 3), (GuestAddress(page_size as u64 * 4), page_size * 3), ]; let guest_memory = GuestMemoryMmap::from_ranges(&mem_regions[..]).unwrap(); let expected_memory_state = GuestMemoryState { regions: vec![ GuestMemoryRegionState { base_address: 0, size: page_size * 3, offset: 0, }, GuestMemoryRegionState { base_address: page_size as u64 * 4, size: page_size * 3, offset: page_size as u64 * 3, }, ], }; let actual_memory_state = guest_memory.describe(); assert_eq!(expected_memory_state, actual_memory_state); } #[test] fn test_restore_memory() { let page_size: usize = sysconf::page::pagesize(); let mem_regions = [ (GuestAddress(0), page_size * 2), (GuestAddress(page_size as u64 * 3), page_size * 2), ]; let guest_memory = GuestMemoryMmap::from_ranges_with_tracking(&mem_regions[..]).unwrap(); let _res: std::result::Result<(), Error> = guest_memory.with_regions(|_, r| { assert!(!r.dirty_bitmap().unwrap().is_bit_set(0)); assert!(!r.dirty_bitmap().unwrap().is_bit_set(1)); Ok(()) }); let first_region = vec![1u8; page_size * 2]; guest_memory .write(&first_region[..], GuestAddress(0)) .unwrap(); let second_region = vec![2u8; page_size * 2]; guest_memory .write(&second_region[..], GuestAddress(page_size as u64 * 3)) .unwrap(); let memory_state = guest_memory.describe(); { let memory_file = TempFile::new().unwrap(); guest_memory.dump(&mut memory_file.as_file()).unwrap(); let restored_guest_memory = GuestMemoryMmap::restore(&memory_file.as_file(), &memory_state, false).unwrap(); let mut actual_region = vec![0u8; page_size * 2]; restored_guest_memory .read(&mut actual_region.as_mut_slice(), GuestAddress(0)) .unwrap(); assert_eq!(first_region, actual_region); restored_guest_memory .read( &mut actual_region.as_mut_slice(), GuestAddress(page_size as u64 * 3), ) .unwrap(); assert_eq!(second_region, actual_region); } { let mut dirty_bitmap: DirtyBitmap = HashMap::new(); dirty_bitmap.insert(0, vec![0b01; 1]); dirty_bitmap.insert(1, vec![0b10; 1]); let file = TempFile::new().unwrap(); guest_memory .dump_dirty(&mut file.as_file(), &dirty_bitmap) .unwrap(); let restored_guest_memory = GuestMemoryMmap::restore(&file.as_file(), &memory_state, false).unwrap(); let mut actual_region = vec![0u8; page_size * 2]; restored_guest_memory .read(&mut actual_region.as_mut_slice(), GuestAddress(0)) .unwrap(); assert_eq!(first_region, actual_region); restored_guest_memory .read( &mut actual_region.as_mut_slice(), GuestAddress(page_size as u64 * 3), ) .unwrap(); assert_eq!(second_region, actual_region); let file = TempFile::new().unwrap(); let mut reader = file.as_file(); let zeros = vec![0u8; page_size]; let ones = vec![1u8; page_size]; let twos = vec![2u8; page_size]; guest_memory .write(&twos[..], GuestAddress(page_size as u64)) .unwrap(); guest_memory.dump_dirty(&mut reader, &dirty_bitmap).unwrap(); let mut diff_file_content = Vec::new(); let expected_first_region = [ ones.as_slice(), twos.as_slice(), zeros.as_slice(), twos.as_slice(), ] .concat(); reader.seek(SeekFrom::Start(0)).unwrap(); reader.read_to_end(&mut diff_file_content).unwrap(); assert_eq!(expected_first_region, diff_file_content); } } }
#![cfg(target_arch = "x86_64")] use std::fmt::{Display, Formatter}; use std::fs::File; use std::io::SeekFrom; use versionize::{VersionMap, Versionize, VersionizeResult}; use versionize_derive::Versionize; use vm_memory::{ Bytes, FileOffset, GuestAddress, GuestMemory, GuestMemoryError, GuestMemoryMmap, GuestMemoryRegion, GuestRegionMmap, MemoryRegionAddress, MmapRegion, }; use crate::DirtyBitmap; #[derive(Debug, PartialEq, Versionize)] pub struct GuestMemoryRegionState { pub base_address: u64, pub size: usize, pub offset: u64, } #[derive(Debug, Default, PartialEq, Versionize)] pub struct GuestMemoryState { pub regions: Vec<GuestMemoryRegionState>, } pub trait SnapshotMemory where Self: Sized, { fn describe(&self) -> GuestMemoryState; fn dump<T: std::io::Write>(&self, writer: &mut T) -> std::result::Result<(), Error>; fn dump_dirty<T: std::io::Write + std::io::Seek>( &self, writer: &mut T, dirty_bitmap: &DirtyBitmap, ) -> std::result::Result<(), Error>; fn restore( file: &File, state: &GuestMemoryState, track_dirty_pages: bool, ) -> std::result::Result<Self, Error>; } #[derive(Debug)] pub enum Error { FileHandle(std::io::Error), CreateMemory(vm_memory::Error), CreateRegion(vm_memory::mmap::MmapRegionError), WriteMemory(GuestMemoryError), } impl Display for Error { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { use self::Error::*; match self { FileHandle(err) => write!(f, "Cannot access file: {:?}", err), CreateMemory(err) => write!(f, "Cannot create memory: {:?}", err), CreateRegion(err) => write!(f, "Cannot create memory region: {:?}", err), WriteMemory(err) => write!(f, "Cannot dump memory: {:?}", err), } } } impl SnapshotMemory for GuestMemoryMmap { fn describe(&self) -> GuestMemoryState { let mut guest_memory_state = GuestMemoryState::default(); let mut offset = 0; let _: std::result::Result<(), ()> = self.with_regions_mut(|_, region| { guest_memory_state.regions.push(GuestMemoryRegionState { base_address: region.start_addr().0, size: region.len() as usize, offset, }); offset += region.len(); Ok(()) }); guest_memory_state } fn dump<T: std::io::Write>(&self, writer: &mut T) -> std::result::Result<(), Error> { self.with_regions_mut(|_, region| { region.write_all_to(MemoryRegionAddress(0), writer, region.len() as usize) }) .map_err(Error::WriteMemory) } fn dump_dirty<T: std::io::Write + std::io::Seek>( &self, writer: &mut T, dirty_bitmap: &DirtyBitmap, ) -> std::result::Result<(), Error> { let page_size = sysconf::page::pagesize(); let mut writer_offset = 0; self.with_regions_mut(|slot, region| { let kvm_bitmap = dirty_bitmap.get(&slot).unwrap(); let firecracker_bitmap = region.dirty_bitmap().unwrap(); let mut write_size = 0; let mut dirty_batch_start: u64 = 0; for (i, v) in kvm_bitmap.iter().enumerate() { for j in 0..64 { let is_kvm_page_dirty = ((v >> j) & 1u64) != 0u64; let page_offset = ((i * 64) + j) * page_size; let is_firecracker_page_dirty = firecracker_bitmap.is_addr_set(page_offset); if is_kvm_page_dirty || is_firecracker_page_dirty { if write_size == 0 { writer .seek(SeekFrom::Start(writer_offset + page_offset as u64)) .unwrap(); dirty_batch_start = page_offset as u64; } write_size += page_size; } else if write_size > 0 { region.write_all_to( MemoryRegionAddress(dirty_batch_start), writer, write_size, )?; write_size = 0; } } } if write_size > 0 { region.write_all_to(MemoryRegionAddress(dirty_batch_start), writer, write_size)?; } writer_offset += region.len(); firecracker_bitmap.reset(); Ok(()) }) .map_err(Error::WriteMemory) } fn restore( file: &File, state: &GuestMemoryState, track_dirty_pages: bool, ) -> std::result::Result<Self, Error> { let mut mmap_regions = Vec::new(); for region in state.regions.iter() { let mmap_region = MmapRegion::build( Some(FileOffset::new( file.try_clone().map_err(Error::FileHandle)?, region.offset, )), region.size, libc::PROT_READ | libc::PROT_WRITE, libc::MAP_NORESERVE | libc::MAP_PRIVATE, ) .map(|r| { let mut region = GuestRegionMmap::new(r, GuestAddress(region.base_address))?; if track_dirty_pages { region.enable_dirty_page_tracking(); } Ok(region) }) .map_err(Error::CreateRegion)? .map_err(Error::CreateMemory)?; mmap_regions.push(mmap_region); } Ok(Self::from_regions(mmap_regions).map_err(Error::CreateMemory)?) } } #[cfg(test)] mod tests { use std::collections::HashMap; use super::*; use std::io::{Read, Seek}; use utils::tempfile::TempFile; use vm_memory::GuestAddress; #[test] fn test_describe_state() { let page_size: usize = sysconf::page::pagesize(); let mem_regions = [ (GuestAddress(0), page_size), (GuestAddress(page_size as u64 * 2), page_size), ]; let guest_memory = GuestMemoryMmap::from_ranges(&mem_regions[..]).unwrap(); let expected_memory_state = GuestMemoryState { region
#[test] fn test_restore_memory() { let page_size: usize = sysconf::page::pagesize(); let mem_regions = [ (GuestAddress(0), page_size * 2), (GuestAddress(page_size as u64 * 3), page_size * 2), ]; let guest_memory = GuestMemoryMmap::from_ranges_with_tracking(&mem_regions[..]).unwrap(); let _res: std::result::Result<(), Error> = guest_memory.with_regions(|_, r| { assert!(!r.dirty_bitmap().unwrap().is_bit_set(0)); assert!(!r.dirty_bitmap().unwrap().is_bit_set(1)); Ok(()) }); let first_region = vec![1u8; page_size * 2]; guest_memory .write(&first_region[..], GuestAddress(0)) .unwrap(); let second_region = vec![2u8; page_size * 2]; guest_memory .write(&second_region[..], GuestAddress(page_size as u64 * 3)) .unwrap(); let memory_state = guest_memory.describe(); { let memory_file = TempFile::new().unwrap(); guest_memory.dump(&mut memory_file.as_file()).unwrap(); let restored_guest_memory = GuestMemoryMmap::restore(&memory_file.as_file(), &memory_state, false).unwrap(); let mut actual_region = vec![0u8; page_size * 2]; restored_guest_memory .read(&mut actual_region.as_mut_slice(), GuestAddress(0)) .unwrap(); assert_eq!(first_region, actual_region); restored_guest_memory .read( &mut actual_region.as_mut_slice(), GuestAddress(page_size as u64 * 3), ) .unwrap(); assert_eq!(second_region, actual_region); } { let mut dirty_bitmap: DirtyBitmap = HashMap::new(); dirty_bitmap.insert(0, vec![0b01; 1]); dirty_bitmap.insert(1, vec![0b10; 1]); let file = TempFile::new().unwrap(); guest_memory .dump_dirty(&mut file.as_file(), &dirty_bitmap) .unwrap(); let restored_guest_memory = GuestMemoryMmap::restore(&file.as_file(), &memory_state, false).unwrap(); let mut actual_region = vec![0u8; page_size * 2]; restored_guest_memory .read(&mut actual_region.as_mut_slice(), GuestAddress(0)) .unwrap(); assert_eq!(first_region, actual_region); restored_guest_memory .read( &mut actual_region.as_mut_slice(), GuestAddress(page_size as u64 * 3), ) .unwrap(); assert_eq!(second_region, actual_region); let file = TempFile::new().unwrap(); let mut reader = file.as_file(); let zeros = vec![0u8; page_size]; let ones = vec![1u8; page_size]; let twos = vec![2u8; page_size]; guest_memory .write(&twos[..], GuestAddress(page_size as u64)) .unwrap(); guest_memory.dump_dirty(&mut reader, &dirty_bitmap).unwrap(); let mut diff_file_content = Vec::new(); let expected_first_region = [ ones.as_slice(), twos.as_slice(), zeros.as_slice(), twos.as_slice(), ] .concat(); reader.seek(SeekFrom::Start(0)).unwrap(); reader.read_to_end(&mut diff_file_content).unwrap(); assert_eq!(expected_first_region, diff_file_content); } } }
s: vec![ GuestMemoryRegionState { base_address: 0, size: page_size, offset: 0, }, GuestMemoryRegionState { base_address: page_size as u64 * 2, size: page_size, offset: page_size as u64, }, ], }; let actual_memory_state = guest_memory.describe(); assert_eq!(expected_memory_state, actual_memory_state); let mem_regions = [ (GuestAddress(0), page_size * 3), (GuestAddress(page_size as u64 * 4), page_size * 3), ]; let guest_memory = GuestMemoryMmap::from_ranges(&mem_regions[..]).unwrap(); let expected_memory_state = GuestMemoryState { regions: vec![ GuestMemoryRegionState { base_address: 0, size: page_size * 3, offset: 0, }, GuestMemoryRegionState { base_address: page_size as u64 * 4, size: page_size * 3, offset: page_size as u64 * 3, }, ], }; let actual_memory_state = guest_memory.describe(); assert_eq!(expected_memory_state, actual_memory_state); }
function_block-function_prefixed
[ { "content": "/// Returns a Vec of the valid memory addresses.\n\n/// These should be used to configure the GuestMemoryMmap structure for the platform.\n\n/// For x86_64 all addresses are valid from the start of the kernel except a\n\n/// carve out at the end of 32bit address space.\n\npub fn arch_memory_regions(size: usize) -> Vec<(GuestAddress, usize)> {\n\n // It's safe to cast MMIO_MEM_START to usize because it fits in a u32 variable\n\n // (It points to an address in the 32 bit space).\n\n match size.checked_sub(MMIO_MEM_START as usize) {\n\n // case1: guest memory fits before the gap\n\n None | Some(0) => vec![(GuestAddress(0), size)],\n\n // case2: guest memory extends beyond the gap\n\n Some(remaining) => vec![\n\n (GuestAddress(0), MMIO_MEM_START as usize),\n\n (GuestAddress(FIRST_ADDR_PAST_32BITS), remaining),\n\n ],\n\n }\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/mod.rs", "rank": 0, "score": 431937.0756771088 }, { "content": "/// Returns a Vec of the valid memory addresses for aarch64.\n\n/// See [`layout`](layout) module for a drawing of the specific memory model for this platform.\n\npub fn arch_memory_regions(size: usize) -> Vec<(GuestAddress, usize)> {\n\n let dram_size = min(size as u64, layout::DRAM_MEM_MAX_SIZE) as usize;\n\n vec![(GuestAddress(layout::DRAM_MEM_START), dram_size)]\n\n}\n\n\n", "file_path": "src/arch/src/aarch64/mod.rs", "rank": 1, "score": 431932.0772936976 }, { "content": "/// Returns the memory address where the initrd could be loaded.\n\npub fn initrd_load_addr(guest_mem: &GuestMemoryMmap, initrd_size: usize) -> super::Result<u64> {\n\n let round_to_pagesize = |size| (size + (super::PAGE_SIZE - 1)) & !(super::PAGE_SIZE - 1);\n\n match GuestAddress(get_fdt_addr(&guest_mem)).checked_sub(round_to_pagesize(initrd_size) as u64)\n\n {\n\n Some(offset) => {\n\n if guest_mem.address_in_range(offset) {\n\n Ok(offset.raw_value())\n\n } else {\n\n Err(Error::InitrdAddress)\n\n }\n\n }\n\n None => Err(Error::InitrdAddress),\n\n }\n\n}\n\n\n", "file_path": "src/arch/src/aarch64/mod.rs", "rank": 2, "score": 395381.3982776188 }, { "content": "/// Returns the memory address where the initrd could be loaded.\n\npub fn initrd_load_addr(guest_mem: &GuestMemoryMmap, initrd_size: usize) -> super::Result<u64> {\n\n let first_region = guest_mem\n\n .find_region(GuestAddress::new(0))\n\n .ok_or(Error::InitrdAddress)?;\n\n // It's safe to cast to usize because the size of a region can't be greater than usize.\n\n let lowmem_size = first_region.len() as usize;\n\n\n\n if lowmem_size < initrd_size {\n\n return Err(Error::InitrdAddress);\n\n }\n\n\n\n let align_to_pagesize = |address| address & !(super::PAGE_SIZE - 1);\n\n Ok(align_to_pagesize(lowmem_size - initrd_size) as u64)\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/mod.rs", "rank": 3, "score": 395381.3982776188 }, { "content": "#[inline]\n\npub fn bench_restore_v1(mut snapshot_mem: &[u8], snapshot_len: usize, vm: VersionMap, crc: bool) {\n\n if crc {\n\n Snapshot::load::<&[u8], Test>(&mut snapshot_mem, snapshot_len, vm).unwrap();\n\n } else {\n\n Snapshot::unchecked_load::<&[u8], Test>(&mut snapshot_mem, vm).unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/snapshot/benches/main.rs", "rank": 4, "score": 327826.22655410576 }, { "content": "pub fn create_socket() -> File {\n\n // This is safe since we check the return value.\n\n let socket = unsafe {\n\n libc::socket(\n\n libc::AF_PACKET,\n\n libc::SOCK_RAW,\n\n libc::ETH_P_ALL.to_be() as i32,\n\n )\n\n };\n\n if socket < 0 {\n\n panic!(\"Unable to create tap socket\");\n\n }\n\n\n\n // This is safe; nothing else will use or hold onto the raw socket fd.\n\n unsafe { File::from_raw_fd(socket) }\n\n}\n\n\n", "file_path": "src/devices/src/virtio/net/test_utils.rs", "rank": 5, "score": 320076.04415837955 }, { "content": "pub fn default_guest_memory() -> GuestMemoryMmap {\n\n GuestMemoryMmap::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap()\n\n}\n\n\n", "file_path": "src/devices/src/virtio/net/test_utils.rs", "rank": 6, "score": 318800.71636774595 }, { "content": "pub fn restore_stdin() {\n\n let stdin = io::stdin();\n\n stdin.lock().set_canon_mode().unwrap();\n\n}\n\n\n", "file_path": "src/vmm/tests/test_utils/mod.rs", "rank": 7, "score": 316286.03637434606 }, { "content": "pub fn default_mem() -> GuestMemoryMmap {\n\n GuestMemoryMmap::from_ranges(&[(GuestAddress(0), 0x10000)]).unwrap()\n\n}\n\n\n", "file_path": "src/devices/src/virtio/test_utils.rs", "rank": 8, "score": 314516.6959425146 }, { "content": "#[cfg(target_arch = \"x86_64\")]\n\nfn verify_create_snapshot(is_diff: bool) -> (TempFile, TempFile) {\n\n let snapshot_file = TempFile::new().unwrap();\n\n let memory_file = TempFile::new().unwrap();\n\n\n\n let pid = unsafe { libc::fork() };\n\n match pid {\n\n 0 => {\n\n set_panic_hook();\n\n\n\n let (vmm, _) = create_vmm(Some(NOISY_KERNEL_IMAGE), is_diff);\n\n\n\n // Be sure that the microVM is running.\n\n thread::sleep(Duration::from_millis(200));\n\n\n\n // Pause microVM.\n\n vmm.lock().unwrap().pause_vm().unwrap();\n\n\n\n // Create snapshot.\n\n let snapshot_type = match is_diff {\n\n true => SnapshotType::Diff,\n", "file_path": "src/vmm/tests/integration_tests.rs", "rank": 9, "score": 287469.3388602382 }, { "content": "#[cfg(target_arch = \"aarch64\")]\n\npub fn load_kernel<F>(\n\n guest_mem: &GuestMemoryMmap,\n\n kernel_image: &mut F,\n\n start_address: u64,\n\n) -> Result<GuestAddress>\n\nwhere\n\n F: Read + Seek,\n\n{\n\n /* Kernel boot protocol is specified in the kernel docs\n\n Documentation/arm/Booting and Documentation/arm64/booting.txt.\n\n\n\n ======aarch64 kernel header========\n\n u32 code0;\t\t\t/* Executable code */\n\n u32 code1;\t\t\t/* Executable code */\n\n u64 text_offset;\t\t/* Image load offset, little endian */\n\n u64 image_size;\t\t/* Effective Image size, little endian */\n\n u64 flags;\t\t\t/* kernel flags, little endian */\n\n u64 res2\t= 0;\t\t/* reserved */\n\n u64 res3\t= 0;\t\t/* reserved */\n\n u64 res4\t= 0;\t\t/* reserved */\n", "file_path": "src/kernel/src/loader/mod.rs", "rank": 10, "score": 286976.85847384785 }, { "content": "/// Returns the memory address where the kernel could be loaded.\n\npub fn get_kernel_start() -> u64 {\n\n layout::DRAM_MEM_START\n\n}\n\n\n", "file_path": "src/arch/src/aarch64/mod.rs", "rank": 11, "score": 281992.65403535456 }, { "content": "/// Returns the memory address where the kernel could be loaded.\n\npub fn get_kernel_start() -> u64 {\n\n layout::HIMEM_START\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/mod.rs", "rank": 12, "score": 281992.65403535456 }, { "content": "pub fn get_element_from_queue(net: &Net, idx: usize) -> result::Result<u64, DeviceError> {\n\n if idx > net.queue_evts.len() {\n\n return Err(DeviceError::QueueError(QueueError::DescIndexOutOfBounds(\n\n idx as u16,\n\n )));\n\n }\n\n Ok(net.queue_evts[idx].as_raw_fd() as u64)\n\n}\n\n\n", "file_path": "src/devices/src/virtio/net/test_utils.rs", "rank": 13, "score": 274254.9267806793 }, { "content": "// Check that the used queue event has been generated `count` times.\n\npub fn check_used_queue_signal(net: &Net, count: u64) {\n\n // Leave at least one event here so that reading it later won't block.\n\n net.interrupt_evt.write(1).unwrap();\n\n assert_eq!(net.interrupt_evt.read().unwrap(), count + 1);\n\n}\n\n\n\n#[cfg(test)]\n\npub(crate) fn inject_tap_tx_frame(net: &Net, len: usize) -> Vec<u8> {\n\n assert!(len >= vnet_hdr_len());\n\n let tap_traffic_simulator = TapTrafficSimulator::new(if_index(&net.tap));\n\n let mut frame = utils::rand::rand_alphanumerics(len - vnet_hdr_len())\n\n .as_bytes()\n\n .to_vec();\n\n tap_traffic_simulator.push_tx_packet(&frame);\n\n frame.splice(0..0, vec![b'\\0'; vnet_hdr_len()]);\n\n\n\n frame\n\n}\n\n\n", "file_path": "src/devices/src/virtio/net/test_utils.rs", "rank": 14, "score": 273109.31961922575 }, { "content": "/// The default filter containing the white listed syscall rules required by `Firecracker` to\n\n/// function.\n\n/// Any non-trivial modification to this allow list needs a proper comment to specify its source\n\n/// or why the sycall/condition is needed.\n\npub fn default_filter() -> Result<SeccompFilter, Error> {\n\n Ok(SeccompFilter::new(\n\n vec![\n\n // Called by the api thread to receive data on socket\n\n allow_syscall_if(\n\n libc::SYS_accept4,\n\n or![and![Cond::new(\n\n 3,\n\n ArgLen::DWORD,\n\n Eq,\n\n libc::SOCK_CLOEXEC as u64\n\n )?],],\n\n ),\n\n // Called for expanding the heap\n\n allow_syscall(libc::SYS_brk),\n\n // Used for metrics, via the helpers in utils/src/time.rs\n\n allow_syscall_if(\n\n libc::SYS_clock_gettime,\n\n or![and![Cond::new(\n\n 0,\n", "file_path": "src/vmm/src/default_syscalls/filters.rs", "rank": 15, "score": 267349.1214546774 }, { "content": "pub fn write_element_in_queue(net: &Net, idx: usize, val: u64) -> result::Result<(), DeviceError> {\n\n if idx > net.queue_evts.len() {\n\n return Err(DeviceError::QueueError(QueueError::DescIndexOutOfBounds(\n\n idx as u16,\n\n )));\n\n }\n\n net.queue_evts[idx].write(val).unwrap();\n\n Ok(())\n\n}\n\n\n", "file_path": "src/devices/src/virtio/net/test_utils.rs", "rank": 16, "score": 266600.00836941414 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n let mut snapshot_mem = vec![0u8; 1024 * 1024 * 128];\n\n let mut vm = VersionMap::new();\n\n\n\n vm.new_version()\n\n .set_type_version(Test::type_id(), 2)\n\n .new_version()\n\n .set_type_version(Test::type_id(), 3)\n\n .new_version()\n\n .set_type_version(Test::type_id(), 4)\n\n .set_type_version(Dummy::type_id(), 2);\n\n\n\n let mut slice = &mut snapshot_mem.as_mut_slice();\n\n save(&mut slice, vm.clone());\n\n let snapshot_len = slice.as_ptr() as usize - snapshot_mem.as_slice().as_ptr() as usize;\n\n println!(\"Snapshot length: {} bytes\", snapshot_len);\n\n\n\n c.bench_function(\"Serialize in vspace=4\", |b| {\n\n b.iter(|| {\n\n save(\n", "file_path": "src/snapshot/benches/version_map.rs", "rank": 17, "score": 266196.9482294609 }, { "content": "pub fn set_panic_hook() {\n\n panic::set_hook(Box::new(move |_| {\n\n restore_stdin();\n\n unsafe {\n\n libc::exit(VMM_ERR_EXIT);\n\n }\n\n }));\n\n}\n", "file_path": "src/vmm/tests/test_utils/mod.rs", "rank": 18, "score": 265760.7155140924 }, { "content": "pub fn validate_vendor_id() -> Result<(), Error> {\n\n let vendor_id = get_vendor_id().map_err(Error::InternalError)?;\n\n if &vendor_id != VENDOR_ID_INTEL {\n\n return Err(Error::InvalidVendor);\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/cpuid/src/template/intel/mod.rs", "rank": 19, "score": 264304.96933668345 }, { "content": "#[inline]\n\npub fn bench_snapshot_v1<W: std::io::Write>(mut snapshot_mem: &mut W, vm: VersionMap, crc: bool) {\n\n let state = Test {\n\n dummy: vec![\n\n Dummy {\n\n dummy: 123,\n\n string: \"xxx\".to_owned()\n\n };\n\n 100\n\n ],\n\n field0: 0,\n\n field1: 1,\n\n field2: 2,\n\n field3: \"test\".to_owned(),\n\n field4: vec![4; 1024 * 10],\n\n field_x: 0,\n\n };\n\n\n\n let mut snapshot = Snapshot::new(vm.clone(), 4);\n\n if crc {\n\n snapshot.save(&mut snapshot_mem, &state).unwrap();\n\n } else {\n\n snapshot\n\n .save_without_crc(&mut snapshot_mem, &state)\n\n .unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/snapshot/benches/main.rs", "rank": 20, "score": 264301.81219928036 }, { "content": "/// Creates GuestMemory of `mem_size_mib` MiB in size.\n\npub fn create_guest_memory(\n\n mem_size_mib: usize,\n\n track_dirty_pages: bool,\n\n) -> std::result::Result<GuestMemoryMmap, StartMicrovmError> {\n\n let mem_size = mem_size_mib << 20;\n\n let arch_mem_regions = arch::arch_memory_regions(mem_size);\n\n\n\n if !track_dirty_pages {\n\n Ok(GuestMemoryMmap::from_ranges(&arch_mem_regions)\n\n .map_err(StartMicrovmError::GuestMemoryMmap)?)\n\n } else {\n\n Ok(\n\n GuestMemoryMmap::from_ranges_with_tracking(&arch_mem_regions)\n\n .map_err(StartMicrovmError::GuestMemoryMmap)?,\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/vmm/src/builder.rs", "rank": 21, "score": 263544.6474341619 }, { "content": "pub fn invoke_handler_for_queue_event(b: &mut Balloon, queue_index: usize) {\n\n assert!(queue_index < NUM_QUEUES);\n\n // Trigger the queue event.\n\n b.queue_evts[queue_index].write(1).unwrap();\n\n // Handle event.\n\n b.process(\n\n &EpollEvent::new(EventSet::IN, b.queue_evts[queue_index].as_raw_fd() as u64),\n\n &mut EventManager::new().unwrap(),\n\n );\n\n // Validate the queue operation finished successfully.\n\n assert_eq!(b.interrupt_evt.read().unwrap(), 1);\n\n}\n\n\n", "file_path": "src/devices/src/virtio/balloon/test_utils.rs", "rank": 22, "score": 263353.6793978339 }, { "content": "// Auxiliary function to get the address where the device tree blob is loaded.\n\nfn get_fdt_addr(mem: &GuestMemoryMmap) -> u64 {\n\n // If the memory allocated is smaller than the size allocated for the FDT,\n\n // we return the start of the DRAM so that\n\n // we allow the code to try and load the FDT.\n\n\n\n if let Some(addr) = mem.last_addr().checked_sub(layout::FDT_MAX_SIZE as u64 - 1) {\n\n if mem.address_in_range(addr) {\n\n return addr.raw_value();\n\n }\n\n }\n\n\n\n layout::DRAM_MEM_START\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_regions_lt_1024gb() {\n", "file_path": "src/arch/src/aarch64/mod.rs", "rank": 23, "score": 260735.25854270515 }, { "content": "pub fn set_queue(blk: &mut Block, idx: usize, q: Queue) {\n\n blk.queues[idx] = q;\n\n}\n\n\n", "file_path": "src/devices/src/virtio/block/test_utils.rs", "rank": 24, "score": 258642.19988476415 }, { "content": "fn create_ioctl_seccomp_rule() -> Result<Vec<SeccompRule>, Error> {\n\n let mut rule = or![\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_RUN)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_GET_DIRTY_LOG)?],\n\n // Triggered on shutdown, to restore the initial terminal settings,\n\n // only when Firecracker was launched from a shell.\n\n and![Cond::new(1, ArgLen::DWORD, Eq, TCGETS)?],\n\n // Triggered on shutdown, to restore the initial terminal settings,\n\n // only when Firecracker was launched from a shell.\n\n and![Cond::new(1, ArgLen::DWORD, Eq, TCSETS)?],\n\n // Triggered on shutdown, to restore the initial terminal settings.\n\n and![Cond::new(1, ArgLen::DWORD, Eq, TIOCGWINSZ)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, FIONBIO)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, TUNSETIFF)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, TUNSETOFFLOAD)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, TUNSETVNETHDRSZ)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_GET_MP_STATE)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_SET_MP_STATE)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_GET_VCPU_EVENTS)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_SET_VCPU_EVENTS)?],\n", "file_path": "src/vmm/src/default_syscalls/mod.rs", "rank": 25, "score": 255151.16020914697 }, { "content": "fn create_arch_specific_ioctl_conditions() -> Result<Vec<SeccompRule>, Error> {\n\n #[cfg(target_arch = \"x86_64\")]\n\n use arch_specific_constants::*;\n\n\n\n #[cfg(target_arch = \"x86_64\")]\n\n return Ok(or![\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_GET_LAPIC)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_GET_SREGS)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_SET_CPUID2)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_SET_LAPIC)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_SET_MSRS)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_SET_REGS)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_SET_SREGS)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_GET_IRQCHIP)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_GET_CLOCK)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_GET_PIT2)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_GET_REGS)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_GET_MSRS)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_GET_CPUID2)?],\n\n and![Cond::new(1, ArgLen::DWORD, Eq, KVM_GET_DEBUGREGS)?],\n", "file_path": "src/vmm/src/default_syscalls/mod.rs", "rank": 26, "score": 250669.8074067989 }, { "content": "/// Offers mutable access to a sequence of bytes which stands for different values packed\n\n/// together using network byte ordering.\n\npub trait NetworkBytesMut: NetworkBytes + DerefMut<Target = [u8]> {\n\n /// Writes the given `u16` value at the specified `offset` using network byte ordering.\n\n ///\n\n /// # Panics\n\n ///\n\n /// This method will panic if `offset` is invalid.\n\n #[inline]\n\n fn htons_unchecked(&mut self, offset: usize, value: u16) {\n\n byte_order::write_be_u16(&mut self[offset..], value)\n\n }\n\n\n\n /// Writes the given `u32` value at the specified `offset` using network byte ordering.\n\n ///\n\n /// # Panics\n\n ///\n\n /// This method will panic if `offset` is invalid.\n\n #[inline]\n\n fn htonl_unchecked(&mut self, offset: usize, value: u32) {\n\n byte_order::write_be_u32(&mut self[offset..], value)\n\n }\n", "file_path": "src/dumbo/src/pdu/bytes.rs", "rank": 27, "score": 249407.84819651605 }, { "content": "pub fn invoke_handler_for_queue_event(b: &mut Block) {\n\n // Trigger the queue event.\n\n b.queue_evts[0].write(1).unwrap();\n\n // Handle event.\n\n b.process(\n\n &EpollEvent::new(EventSet::IN, b.queue_evts[0].as_raw_fd() as u64),\n\n &mut EventManager::new().unwrap(),\n\n );\n\n // Validate the queue operation finished successfully.\n\n assert_eq!(b.interrupt_evt.read().unwrap(), 1);\n\n}\n\n\n", "file_path": "src/devices/src/virtio/block/test_utils.rs", "rank": 28, "score": 248056.24655784148 }, { "content": "// Parse a magic_id and return the format version.\n\nfn get_format_version(magic_id: u64) -> Result<u16, Error> {\n\n let magic_arch = magic_id & BASE_MAGIC_ID_MASK;\n\n if magic_arch == BASE_MAGIC_ID {\n\n return Ok((magic_id & !BASE_MAGIC_ID_MASK) as u16);\n\n }\n\n Err(Error::InvalidMagic(magic_id))\n\n}\n\n\n", "file_path": "src/snapshot/src/lib.rs", "rank": 29, "score": 247223.62453939917 }, { "content": "/// Returns a timestamp in nanoseconds from a monotonic clock.\n\n///\n\n/// Uses `_rdstc` on `x86_64` and [`get_time`](fn.get_time.html) on other architectures.\n\npub fn timestamp_cycles() -> u64 {\n\n #[cfg(target_arch = \"x86_64\")]\n\n // Safe because there's nothing that can go wrong with this call.\n\n unsafe {\n\n std::arch::x86_64::_rdtsc() as u64\n\n }\n\n #[cfg(not(target_arch = \"x86_64\"))]\n\n {\n\n get_time_ns(ClockType::Monotonic)\n\n }\n\n}\n\n\n", "file_path": "src/utils/src/time.rs", "rank": 30, "score": 245000.94899525406 }, { "content": "/// Create a default Block instance to be used in tests.\n\npub fn default_block() -> Block {\n\n // Create backing file.\n\n let f = TempFile::new().unwrap();\n\n f.as_file().set_len(0x1000).unwrap();\n\n\n\n default_block_with_path(f.as_path().to_str().unwrap().to_string())\n\n}\n\n\n", "file_path": "src/devices/src/virtio/block/test_utils.rs", "rank": 31, "score": 241106.99446029816 }, { "content": "pub fn default_net() -> Net {\n\n let next_tap = NEXT_INDEX.fetch_add(1, Ordering::SeqCst);\n\n let tap_dev_name = format!(\"net-device{}\", next_tap);\n\n\n\n let guest_mac = default_guest_mac();\n\n\n\n let net = Net::new_with_tap(\n\n format!(\"net-device{}\", next_tap),\n\n tap_dev_name,\n\n Some(&guest_mac),\n\n RateLimiter::default(),\n\n RateLimiter::default(),\n\n true,\n\n )\n\n .unwrap();\n\n enable(&net.tap);\n\n\n\n net\n\n}\n\n\n", "file_path": "src/devices/src/virtio/net/test_utils.rs", "rank": 32, "score": 241082.84053960437 }, { "content": "pub fn rate_limiter(blk: &mut Block) -> &RateLimiter {\n\n &blk.rate_limiter\n\n}\n", "file_path": "src/devices/src/virtio/block/test_utils.rs", "rank": 33, "score": 238042.84435439628 }, { "content": "#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\n\npub fn filter_cpuid(kvm_cpuid: &mut CpuId, vm_spec: &VmSpec) -> Result<(), Error> {\n\n let maybe_cpuid_transformer: Option<&dyn CpuidTransformer> = match vm_spec.cpu_vendor_id() {\n\n VENDOR_ID_INTEL => Some(&intel::IntelCpuidTransformer {}),\n\n VENDOR_ID_AMD => Some(&amd::AmdCpuidTransformer {}),\n\n _ => None,\n\n };\n\n\n\n if let Some(cpuid_transformer) = maybe_cpuid_transformer {\n\n cpuid_transformer.process_cpuid(kvm_cpuid, &vm_spec)?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/cpuid/src/lib.rs", "rank": 34, "score": 237242.36438813753 }, { "content": "#[inline]\n\npub fn seq_at_or_after(a: Wrapping<u32>, b: Wrapping<u32>) -> bool {\n\n (a - b).0 < MAX_WINDOW_SIZE\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use micro_http::{Request, Response, StatusCode, Version};\n\n\n\n // In tcp tests, some of the functions require a callback parameter. Since we do not care,\n\n // for the purpose of those tests, what that callback does, we need to provide a dummy one.\n\n pub fn mock_callback(_request: Request) -> Response {\n\n Response::new(Version::Http11, StatusCode::OK)\n\n }\n\n\n\n #[test]\n\n fn test_rst_config() {\n\n let mut buf = [0u8; 100];\n\n\n\n let seq = 1234;\n", "file_path": "src/dumbo/src/tcp/mod.rs", "rank": 35, "score": 236452.59106881393 }, { "content": "#[inline]\n\npub fn seq_after(a: Wrapping<u32>, b: Wrapping<u32>) -> bool {\n\n a != b && (a - b).0 < MAX_WINDOW_SIZE\n\n}\n\n\n\n/// Returns true if `a` comes after, or is at `b` in the sequence number space, relative to\n\n/// the maximum possible window size.\n\n///\n\n/// Please note this is not a connex binary relation; in other words, given two sequence numbers,\n\n/// it's sometimes possible that `seq_at_or_after(a, b) || seq_at_or_after(b, a) == false`. This\n\n/// is why `seq_after(a, b)` can't be defined as simply `!seq_at_or_after(b, a)`.\n", "file_path": "src/dumbo/src/tcp/mod.rs", "rank": 36, "score": 236452.59106881393 }, { "content": "#[derive(Clone, Debug, Default, Versionize)]\n\nstruct Test {\n\n a: Vec<Dummy>,\n\n #[version(start = 1)]\n\n b: u64,\n\n #[version(start = 2)]\n\n c: u64,\n\n #[version(start = 3)]\n\n d: u32,\n\n #[version(start = 4)]\n\n e: Vec<u64>,\n\n}\n\n\n", "file_path": "src/snapshot/benches/version_map.rs", "rank": 37, "score": 235003.76075631555 }, { "content": "#[inline]\n\nfn restore(mut snapshot_mem: &[u8], vm: VersionMap) {\n\n Snapshot::unchecked_load::<&[u8], Test>(&mut snapshot_mem, vm).unwrap();\n\n}\n\n\n", "file_path": "src/snapshot/benches/version_map.rs", "rank": 38, "score": 233806.4803421408 }, { "content": "fn create_memory_node(fdt: &mut Vec<u8>, guest_mem: &GuestMemoryMmap) -> Result<()> {\n\n let mem_size = guest_mem.last_addr().raw_value() - super::layout::DRAM_MEM_START + 1;\n\n // See https://github.com/torvalds/linux/blob/master/Documentation/devicetree/booting-without-of.txt#L960\n\n // for an explanation of this.\n\n let mem_reg_prop = generate_prop64(&[super::layout::DRAM_MEM_START as u64, mem_size as u64]);\n\n\n\n append_begin_node(fdt, \"memory\")?;\n\n append_property_string(fdt, \"device_type\", \"memory\")?;\n\n append_property(fdt, \"reg\", &mem_reg_prop)?;\n\n append_end_node(fdt)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/arch/src/aarch64/fdt.rs", "rank": 39, "score": 233500.5185991726 }, { "content": "#[inline]\n\npub fn test_speculative_tpa(buf: &[u8], addr: Ipv4Addr) -> bool {\n\n // The unchecked methods are safe because we actually check the buffer length beforehand.\n\n if buf.len() >= ethernet::PAYLOAD_OFFSET + ETH_IPV4_FRAME_LEN {\n\n let bytes = &buf[ethernet::PAYLOAD_OFFSET..];\n\n if EthIPv4ArpFrame::from_bytes_unchecked(bytes).tpa() == addr {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::fmt;\n\n\n\n impl<'a, T: NetworkBytes> fmt::Debug for EthIPv4ArpFrame<'a, T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"(EthIPv4ArpFrame frame)\")\n\n }\n", "file_path": "src/dumbo/src/pdu/arp.rs", "rank": 40, "score": 232718.96592753014 }, { "content": "pub fn default_guest_mac() -> MacAddr {\n\n MacAddr::parse_str(\"11:22:33:44:55:66\").unwrap()\n\n}\n\n\n", "file_path": "src/devices/src/virtio/net/test_utils.rs", "rank": 41, "score": 232653.0332778719 }, { "content": "fn set_klapic_reg(klapic: &mut kvm_lapic_state, reg_offset: usize, value: u32) {\n\n let range = reg_offset..reg_offset + 4;\n\n let reg = klapic.regs.get_mut(range).expect(\"set_klapic_reg range\");\n\n byte_order::write_le_i32(&mut reg[..], value as i32)\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/interrupts.rs", "rank": 43, "score": 231654.6204693891 }, { "content": "// Returns handles to virtio queues creation/activation and manipulation.\n\npub fn virtqueues(mem: &GuestMemoryMmap) -> (VirtQueue, VirtQueue) {\n\n let rxq = VirtQueue::new(GuestAddress(0), mem, 16);\n\n let txq = VirtQueue::new(GuestAddress(0x1000), mem, 16);\n\n assert!(rxq.end().0 < txq.start().0);\n\n\n\n (rxq, txq)\n\n}\n\n\n", "file_path": "src/devices/src/virtio/net/test_utils.rs", "rank": 44, "score": 231087.05626488698 }, { "content": "#[allow(clippy::len_without_is_empty)]\n\npub trait ByteBuffer: Index<usize, Output = u8> {\n\n /// Returns the length of the buffer.\n\n fn len(&self) -> usize;\n\n\n\n /// Reads `buf.len()` bytes from `buf` into the inner buffer, starting at `offset`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if `offset + buf.len()` < `self.len()`.\n\n fn read_to_slice(&self, offset: usize, buf: &mut [u8]);\n\n}\n\n\n\nimpl ByteBuffer for [u8] {\n\n #[inline]\n\n fn len(&self) -> usize {\n\n self.len()\n\n }\n\n\n\n #[inline]\n\n fn read_to_slice(&self, offset: usize, buf: &mut [u8]) {\n", "file_path": "src/dumbo/src/lib.rs", "rank": 45, "score": 229293.30911457975 }, { "content": "#[inline]\n\npub fn test_speculative_dst_addr(buf: &[u8], addr: Ipv4Addr) -> bool {\n\n // The unchecked methods are safe because we actually check the buffer length beforehand.\n\n if buf.len() >= ethernet::PAYLOAD_OFFSET + OPTIONS_OFFSET {\n\n let bytes = &buf[ethernet::PAYLOAD_OFFSET..];\n\n if IPv4Packet::from_bytes_unchecked(bytes).destination_address() == addr {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::fmt;\n\n\n\n use crate::MacAddr;\n\n\n\n use super::*;\n\n\n\n const MAX_HEADER_LEN: usize = 60;\n", "file_path": "src/dumbo/src/pdu/ipv4.rs", "rank": 46, "score": 229157.98109254515 }, { "content": "pub fn set_mac(net: &mut Net, mac: MacAddr) {\n\n net.guest_mac = Some(mac);\n\n net.config_space.guest_mac.copy_from_slice(mac.get_bytes());\n\n}\n\n\n", "file_path": "src/devices/src/virtio/net/test_utils.rs", "rank": 47, "score": 229036.2118246032 }, { "content": "pub fn set_request(queue: &VirtQueue, idx: usize, addr: u64, len: u32, flags: u16) {\n\n // Set the index of the next request.\n\n queue.avail.idx.set((idx + 1) as u16);\n\n // Set the current descriptor table entry index.\n\n queue.avail.ring[idx].set(idx as u16);\n\n // Set the current descriptor table entry.\n\n queue.dtable[idx].set(addr, len, flags, 1);\n\n}\n\n\n", "file_path": "src/devices/src/virtio/balloon/test_utils.rs", "rank": 48, "score": 229006.048984255 }, { "content": "/// Sets up the cpuid entries for a given VCPU following a T2 template.\n\npub fn set_cpuid_entries(kvm_cpuid: &mut CpuId, vm_spec: &VmSpec) -> Result<(), Error> {\n\n validate_vendor_id()?;\n\n T2CpuidTransformer {}.process_cpuid(kvm_cpuid, vm_spec)\n\n}\n", "file_path": "src/cpuid/src/template/intel/t2.rs", "rank": 49, "score": 228220.07743245212 }, { "content": "/// Sets up the cpuid entries for a given VCPU following a C3 template.\n\npub fn set_cpuid_entries(kvm_cpuid: &mut CpuId, vm_spec: &VmSpec) -> Result<(), Error> {\n\n validate_vendor_id()?;\n\n C3CpuidTransformer {}.process_cpuid(kvm_cpuid, vm_spec)\n\n}\n", "file_path": "src/cpuid/src/template/intel/c3.rs", "rank": 50, "score": 228220.07743245212 }, { "content": "/// Trait that helps in upcasting an object to Any\n\npub trait AsAny {\n\n fn as_any(&self) -> &dyn Any;\n\n\n\n fn as_mut_any(&mut self) -> &mut dyn Any;\n\n}\n\nimpl<T: Any> AsAny for T {\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn as_mut_any(&mut self) -> &mut dyn Any {\n\n self\n\n }\n\n}\n", "file_path": "src/devices/src/virtio/mod.rs", "rank": 51, "score": 226818.2966346424 }, { "content": "pub fn parse_patch_vm_state(body: &Body) -> Result<ParsedRequest, Error> {\n\n let vm = serde_json::from_slice::<Vm>(body.raw()).map_err(Error::SerdeJson)?;\n\n\n\n match vm.state {\n\n VmState::Paused => Ok(ParsedRequest::new_sync(VmmAction::Pause)),\n\n VmState::Resumed => Ok(ParsedRequest::new_sync(VmmAction::Resume)),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[cfg(target_arch = \"x86_64\")]\n\n use crate::parsed_request::tests::vmm_action_from_request;\n\n\n\n #[test]\n\n #[cfg(target_arch = \"x86_64\")]\n\n fn test_parse_put_snapshot() {\n\n use std::path::PathBuf;\n\n use vmm::vmm_config::snapshot::SnapshotType;\n", "file_path": "src/api_server/src/request/snapshot.rs", "rank": 52, "score": 226648.06174279883 }, { "content": "// Following are the auxiliary function for creating the different nodes that we append to our FDT.\n\nfn create_cpu_nodes(fdt: &mut Vec<u8>, vcpu_mpidr: &[u64]) -> Result<()> {\n\n // See https://github.com/torvalds/linux/blob/master/Documentation/devicetree/bindings/arm/cpus.yaml.\n\n append_begin_node(fdt, \"cpus\")?;\n\n // As per documentation, on ARM v8 64-bit systems value should be set to 2.\n\n append_property_u32(fdt, \"#address-cells\", 0x02)?;\n\n append_property_u32(fdt, \"#size-cells\", 0x0)?;\n\n\n\n let num_cpus = vcpu_mpidr.len();\n\n for (cpu_index, mpidr) in vcpu_mpidr.iter().enumerate() {\n\n let cpu_name = format!(\"cpu@{:x}\", cpu_index);\n\n append_begin_node(fdt, &cpu_name)?;\n\n append_property_string(fdt, \"device_type\", \"cpu\")?;\n\n append_property_string(fdt, \"compatible\", \"arm,arm-v8\")?;\n\n if num_cpus > 1 {\n\n // If the microVM has more than 1 vcpu we need to enable the power\n\n // state coordination interface (PSCI) which will decide for us which\n\n // vcpu is running or halted.\n\n append_property_string(fdt, \"enable-method\", \"psci\")?;\n\n }\n\n // Set the field to first 24 bits of the MPIDR - Multiprocessor Affinity Register.\n\n // See http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.ddi0488c/BABHBJCI.html.\n\n append_property_u64(fdt, \"reg\", mpidr & 0x7F_FFFF)?;\n\n append_end_node(fdt)?;\n\n }\n\n append_end_node(fdt)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/arch/src/aarch64/fdt.rs", "rank": 53, "score": 225725.23013311025 }, { "content": "pub fn set_rate_limiter(blk: &mut Block, rl: RateLimiter) {\n\n blk.rate_limiter = rl;\n\n}\n\n\n", "file_path": "src/devices/src/virtio/block/test_utils.rs", "rank": 54, "score": 225634.60863212153 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n let mut snapshot_mem = vec![0u8; 1024 * 1024 * 128];\n\n let mut vm = VersionMap::new();\n\n\n\n vm.new_version()\n\n .set_type_version(Test::type_id(), 2)\n\n .new_version()\n\n .set_type_version(Test::type_id(), 3)\n\n .new_version()\n\n .set_type_version(Test::type_id(), 4);\n\n\n\n let mut slice = &mut snapshot_mem.as_mut_slice();\n\n bench_snapshot_v1(&mut slice, vm.clone(), false);\n\n let mut snapshot_len = slice.as_ptr() as usize - snapshot_mem.as_slice().as_ptr() as usize;\n\n\n\n println!(\"Snapshot length: {} bytes\", snapshot_len);\n\n\n\n c.bench_function(\"Serialize to v4\", |b| {\n\n b.iter(|| {\n\n bench_snapshot_v1(\n", "file_path": "src/snapshot/benches/main.rs", "rank": 55, "score": 225142.86003300542 }, { "content": "#[cfg(target_arch = \"x86_64\")]\n\nfn verify_load_snapshot(snapshot_file: TempFile, memory_file: TempFile) {\n\n use vm_memory::GuestMemoryMmap;\n\n use vmm::memory_snapshot::SnapshotMemory;\n\n\n\n let pid = unsafe { libc::fork() };\n\n match pid {\n\n 0 => {\n\n set_panic_hook();\n\n let mut event_manager = EventManager::new().unwrap();\n\n let empty_seccomp_filter = get_seccomp_filter(SeccompLevel::None).unwrap();\n\n\n\n // Deserialize microVM state.\n\n let snapshot_file_metadata = snapshot_file.as_file().metadata().unwrap();\n\n let snapshot_len = snapshot_file_metadata.len() as usize;\n\n snapshot_file.as_file().seek(SeekFrom::Start(0)).unwrap();\n\n let microvm_state: MicrovmState = Snapshot::load(\n\n &mut snapshot_file.as_file(),\n\n snapshot_len,\n\n VERSION_MAP.clone(),\n\n )\n", "file_path": "src/vmm/tests/integration_tests.rs", "rank": 56, "score": 222182.01267654487 }, { "content": "/// Creates a Microvm snapshot.\n\npub fn create_snapshot(\n\n vmm: &mut Vmm,\n\n params: &CreateSnapshotParams,\n\n version_map: VersionMap,\n\n) -> std::result::Result<(), CreateSnapshotError> {\n\n let microvm_state = vmm\n\n .save_state()\n\n .map_err(CreateSnapshotError::MicrovmState)?;\n\n\n\n snapshot_memory_to_file(vmm, &params.mem_file_path, &params.snapshot_type)?;\n\n\n\n snapshot_state_to_file(\n\n &microvm_state,\n\n &params.snapshot_path,\n\n &params.version,\n\n version_map,\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/vmm/src/persist.rs", "rank": 57, "score": 220540.61233704613 }, { "content": "/// Generic trait that provides methods for transforming the cpuid\n\npub trait CpuidTransformer {\n\n /// Trait main function. It processes the cpuid and makes the desired transformations.\n\n /// The default logic can be overwritten if needed. For example see `AmdCpuidTransformer`.\n\n fn process_cpuid(&self, cpuid: &mut CpuId, vm_spec: &VmSpec) -> Result<(), Error> {\n\n self.process_entries(cpuid, vm_spec)\n\n }\n\n\n\n /// Iterates through all the cpuid entries and calls the associated transformer for each one.\n\n fn process_entries(&self, cpuid: &mut CpuId, vm_spec: &VmSpec) -> Result<(), Error> {\n\n for entry in cpuid.as_mut_slice().iter_mut() {\n\n let maybe_transformer_fn = self.entry_transformer_fn(entry);\n\n\n\n if let Some(transformer_fn) = maybe_transformer_fn {\n\n transformer_fn(entry, vm_spec)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n", "file_path": "src/cpuid/src/transformer/mod.rs", "rank": 58, "score": 218490.13841968472 }, { "content": "/// Specifies whether a particular MSR should be included in vcpu serialization.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `index` - The index of the MSR that is checked whether it's needed for serialization.\n\npub fn msr_should_serialize(index: u32) -> bool {\n\n // Blacklisted MSRs not exported by Linux: IA32_FEATURE_CONTROL and IA32_MCG_CTL\n\n if index == MSR_IA32_FEATURE_CONTROL || index == MSR_IA32_MCG_CTL {\n\n return false;\n\n };\n\n WHITELISTED_MSR_RANGES\n\n .iter()\n\n .any(|range| range.contains(index))\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/msr.rs", "rank": 59, "score": 218233.1427994304 }, { "content": "/// Create a default Block instance using file at the specified path to be used in tests.\n\npub fn default_block_with_path(path: String) -> Block {\n\n // Rate limiting is enabled but with a high operation rate (10 million ops/s).\n\n let rate_limiter = RateLimiter::new(0, 0, 0, 100_000, 0, 10).unwrap();\n\n\n\n let id = \"test\".to_string();\n\n // The default block device is read-write and non-root.\n\n Block::new(id, None, path, false, false, rate_limiter).unwrap()\n\n}\n\n\n", "file_path": "src/devices/src/virtio/block/test_utils.rs", "rank": 60, "score": 218208.53710385563 }, { "content": "#[cfg(target_arch = \"x86_64\")]\n\npub fn setup_interrupt_controller(vm: &mut Vm) -> std::result::Result<(), StartMicrovmError> {\n\n vm.setup_irqchip()\n\n .map_err(Error::Vm)\n\n .map_err(StartMicrovmError::Internal)\n\n}\n\n\n\n/// Sets up the irqchip for a aarch64 microVM.\n", "file_path": "src/vmm/src/builder.rs", "rank": 61, "score": 217675.1239098738 }, { "content": "// Assigns \"guest virtio driver\" activated queues to the net device.\n\npub fn assign_queues(net: &mut Net, rxq: Queue, txq: Queue) {\n\n net.queues.clear();\n\n net.queues.push(rxq);\n\n net.queues.push(txq);\n\n}\n\n\n\n#[cfg(test)]\n\npub mod test {\n\n use crate::check_metric_after_block;\n\n use crate::virtio::net::device::vnet_hdr_len;\n\n use crate::virtio::net::test_utils::{\n\n assign_queues, check_used_queue_signal, default_net, inject_tap_tx_frame, NetEvent,\n\n NetQueue, ReadTapMock,\n\n };\n\n use crate::virtio::test_utils::{VirtQueue, VirtqDesc};\n\n use crate::virtio::{\n\n Net, VirtioDevice, MAX_BUFFER_SIZE, RX_INDEX, TX_INDEX, VIRTQ_DESC_F_NEXT,\n\n VIRTQ_DESC_F_WRITE,\n\n };\n\n use logger::{IncMetric, METRICS};\n", "file_path": "src/devices/src/virtio/net/test_utils.rs", "rank": 62, "score": 216662.08732753329 }, { "content": "/// Configures the system and should be called once per vm before starting vcpu threads.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `guest_mem` - The memory to be used by the guest.\n\n/// * `cmdline_addr` - Address in `guest_mem` where the kernel command line was loaded.\n\n/// * `cmdline_size` - Size of the kernel command line in bytes including the null terminator.\n\n/// * `initrd` - Information about where the ramdisk image was loaded in the `guest_mem`.\n\n/// * `num_cpus` - Number of virtual CPUs the guest will have.\n\npub fn configure_system(\n\n guest_mem: &GuestMemoryMmap,\n\n cmdline_addr: GuestAddress,\n\n cmdline_size: usize,\n\n initrd: &Option<InitrdConfig>,\n\n num_cpus: u8,\n\n) -> super::Result<()> {\n\n const KERNEL_BOOT_FLAG_MAGIC: u16 = 0xaa55;\n\n const KERNEL_HDR_MAGIC: u32 = 0x5372_6448;\n\n const KERNEL_LOADER_OTHER: u8 = 0xff;\n\n const KERNEL_MIN_ALIGNMENT_BYTES: u32 = 0x0100_0000; // Must be non-zero.\n\n let first_addr_past_32bits = GuestAddress(FIRST_ADDR_PAST_32BITS);\n\n let end_32bit_gap_start = GuestAddress(MMIO_MEM_START);\n\n\n\n let himem_start = GuestAddress(layout::HIMEM_START);\n\n\n\n // Note that this puts the mptable at the last 1k of Linux's 640k base RAM\n\n mptable::setup_mptable(guest_mem, num_cpus).map_err(Error::MpTableSetup)?;\n\n\n\n let mut params: BootParamsWrapper = BootParamsWrapper(boot_params::default());\n", "file_path": "src/arch/src/x86_64/mod.rs", "rank": 63, "score": 216281.4539259841 }, { "content": "/// Writes the command line string to the given memory slice.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `guest_mem` - A u8 slice that will be partially overwritten by the command line.\n\n/// * `guest_addr` - The address in `guest_mem` at which to load the command line.\n\n/// * `cmdline` - The kernel command line as CString.\n\npub fn load_cmdline(\n\n guest_mem: &GuestMemoryMmap,\n\n guest_addr: GuestAddress,\n\n cmdline: &CString,\n\n) -> std::result::Result<(), CmdlineError> {\n\n let raw_cmdline = cmdline.as_bytes_with_nul();\n\n if raw_cmdline.len() <= 1 {\n\n return Ok(());\n\n }\n\n\n\n let cmdline_last_addr = guest_addr\n\n .checked_add(raw_cmdline.len() as u64 - 1)\n\n .ok_or(CmdlineError::CommandLineOverflow)?; // Extra for null termination.\n\n\n\n if cmdline_last_addr > guest_mem.last_addr() {\n\n return Err(CmdlineError::CommandLineOverflow);\n\n }\n\n\n\n guest_mem\n\n .write_slice(raw_cmdline, guest_addr)\n", "file_path": "src/kernel/src/loader/mod.rs", "rank": 64, "score": 216267.2569622505 }, { "content": "/// Any channel that handles vsock packet traffic: sending and receiving packets. Since we're\n\n/// implementing the device model here, our responsibility is to always process the sending of\n\n/// packets (i.e. the TX queue). So, any locally generated data, addressed to the driver (e.g.\n\n/// a connection response or RST), will have to be queued, until we get to processing the RX queue.\n\n///\n\n/// Note: `recv_pkt()` and `send_pkt()` are named analogous to `Read::read()` and `Write::write()`,\n\n/// respectively. I.e.\n\n/// - `recv_pkt(&mut pkt)` will read data from the channel, and place it into `pkt`; and\n\n/// - `send_pkt(&pkt)` will fetch data from `pkt`, and place it into the channel.\n\npub trait VsockChannel {\n\n /// Read/receive an incoming packet from the channel.\n\n fn recv_pkt(&mut self, pkt: &mut VsockPacket) -> Result<()>;\n\n\n\n /// Write/send a packet through the channel.\n\n fn send_pkt(&mut self, pkt: &VsockPacket) -> Result<()>;\n\n\n\n /// Checks whether there is pending incoming data inside the channel, meaning that a subsequent\n\n /// call to `recv_pkt()` won't fail.\n\n fn has_pending_rx(&self) -> bool;\n\n}\n\n\n", "file_path": "src/devices/src/virtio/vsock/mod.rs", "rank": 65, "score": 214662.5298930818 }, { "content": "pub fn update_metric_with_elapsed_time(metric: &SharedStoreMetric, start_time_us: u64) -> u64 {\n\n let delta_us = utils::time::get_time_us(utils::time::ClockType::Monotonic) - start_time_us;\n\n metric.store(delta_us as usize);\n\n delta_us\n\n}\n", "file_path": "src/logger/src/lib.rs", "rank": 66, "score": 213334.9772757084 }, { "content": "/// Finds the first occurence of `sequence` in the `bytes` slice.\n\n///\n\n/// Returns the starting position of the `sequence` in `bytes` or `None` if the\n\n/// `sequence` is not found.\n\npub fn find(bytes: &[u8], sequence: &[u8]) -> Option<usize> {\n\n bytes\n\n .windows(sequence.len())\n\n .position(|window| window == sequence)\n\n}\n\n\n\n/// Wrapper over HTTP URIs.\n\n///\n\n/// The `Uri` can not be used directly and it is only accessible from an HTTP Request.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Uri {\n\n string: String,\n\n}\n\n\n\nimpl Uri {\n\n fn new(slice: &str) -> Self {\n\n Self {\n\n string: String::from(slice),\n\n }\n\n }\n", "file_path": "src/micro_http/src/request.rs", "rank": 67, "score": 212380.65441820404 }, { "content": "/// Returns a timestamp in nanoseconds based on the provided clock type.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `clock_type` - Identifier of the Linux Kernel clock on which to act.\n\npub fn get_time_ns(clock_type: ClockType) -> u64 {\n\n let mut time_struct = libc::timespec {\n\n tv_sec: 0,\n\n tv_nsec: 0,\n\n };\n\n // Safe because the parameters are valid.\n\n unsafe { libc::clock_gettime(clock_type.into(), &mut time_struct) };\n\n seconds_to_nanoseconds(time_struct.tv_sec).expect(\"Time conversion overflow\") as u64\n\n + (time_struct.tv_nsec as u64)\n\n}\n\n\n", "file_path": "src/utils/src/time.rs", "rank": 68, "score": 211767.2293681478 }, { "content": "/// Returns a timestamp in microseconds based on the provided clock type.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `clock_type` - Identifier of the Linux Kernel clock on which to act.\n\npub fn get_time_us(clock_type: ClockType) -> u64 {\n\n get_time_ns(clock_type) / 1000\n\n}\n\n\n", "file_path": "src/utils/src/time.rs", "rank": 69, "score": 211767.2293681478 }, { "content": "#[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\n\npub fn get_vendor_id() -> Result<[u8; 12], Error> {\n\n match get_cpuid(0, 0) {\n\n Ok(vendor_entry) => {\n\n let bytes: [u8; 12] = unsafe {\n\n std::mem::transmute([vendor_entry.ebx, vendor_entry.edx, vendor_entry.ecx])\n\n };\n\n Ok(bytes)\n\n }\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use crate::common::*;\n\n\n\n #[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\n\n pub fn get_topoext_fn() -> u32 {\n\n let vendor_id = get_vendor_id();\n\n assert!(vendor_id.is_ok());\n", "file_path": "src/cpuid/src/common.rs", "rank": 70, "score": 211750.18550491286 }, { "content": "/// Replaces the `cpuid` entries corresponding to `function` with the entries from the host's cpuid.\n\npub fn use_host_cpuid_function(\n\n cpuid: &mut CpuId,\n\n function: u32,\n\n use_count: bool,\n\n) -> Result<(), Error> {\n\n // copy all the CpuId entries, except for the ones with the provided function\n\n cpuid.retain(|entry| entry.function != function);\n\n\n\n // add all the host leaves with the provided function\n\n let mut count: u32 = 0;\n\n while let Ok(entry) = get_cpuid(function, count) {\n\n if count > 0 && !use_count {\n\n break;\n\n }\n\n\n\n cpuid\n\n .push(kvm_cpuid_entry2 {\n\n function,\n\n index: count,\n\n flags: 0,\n", "file_path": "src/cpuid/src/transformer/common.rs", "rank": 71, "score": 209103.85239653673 }, { "content": "/// Checks if an IPv4 address is RFC 3927 compliant.\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use std::net::Ipv4Addr;\n\n/// use utils::net::ipv4addr::is_link_local_valid;\n\n///\n\n/// is_link_local_valid(Ipv4Addr::new(169, 254, 1, 1));\n\n///\n\npub fn is_link_local_valid(ipv4_addr: Ipv4Addr) -> bool {\n\n match ipv4_addr.octets() {\n\n [169, 254, 0, _] => false,\n\n [169, 254, 255, _] => false,\n\n [169, 254, _, _] => true,\n\n _ => false,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::net::ipv4addr::is_link_local_valid;\n\n use std::net::Ipv4Addr;\n\n\n\n #[test]\n\n fn test_is_link_local_valid() {\n\n // Outside link-local IPv4 address range (169.254.0.0/16 - 169.254.255.255/16).\n\n let mut ipv4_addr = Ipv4Addr::new(1, 1, 1, 1);\n\n assert!(!is_link_local_valid(ipv4_addr));\n\n\n", "file_path": "src/utils/src/net/ipv4addr.rs", "rank": 72, "score": 208925.00509276515 }, { "content": "pub fn parse_get_mmds() -> Result<ParsedRequest, Error> {\n\n Ok(ParsedRequest::GetMMDS)\n\n}\n\n\n", "file_path": "src/api_server/src/request/mmds.rs", "rank": 73, "score": 208721.5339707091 }, { "content": "/// Generates a `GenericError` for each request method.\n\npub fn method_to_error(method: Method) -> Result<ParsedRequest, Error> {\n\n match method {\n\n Method::Get => Err(Error::Generic(\n\n StatusCode::BadRequest,\n\n \"GET request cannot have a body.\".to_string(),\n\n )),\n\n Method::Put => Err(Error::Generic(\n\n StatusCode::BadRequest,\n\n \"Empty PUT request.\".to_string(),\n\n )),\n\n Method::Patch => Err(Error::Generic(\n\n StatusCode::BadRequest,\n\n \"Empty PATCH request.\".to_string(),\n\n )),\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n // A generic error, with a given status code and message to be turned into a fault message.\n", "file_path": "src/api_server/src/parsed_request.rs", "rank": 74, "score": 208608.5685822961 }, { "content": "/// Checks that the instance id only contains alphanumeric chars and hyphens\n\n/// and that the size is between 1 and 64 characters.\n\npub fn validate_instance_id(input: &str) -> Result<(), Error> {\n\n if input.len() > MAX_INSTANCE_ID_LEN || input.len() < MIN_INSTANCE_ID_LEN {\n\n return Err(Error::InvalidLen(\n\n input.len(),\n\n MIN_INSTANCE_ID_LEN,\n\n MAX_INSTANCE_ID_LEN,\n\n ));\n\n }\n\n for (i, c) in input.chars().enumerate() {\n\n if !(c == '-' || c.is_alphanumeric()) {\n\n return Err(Error::InvalidChar(c, i));\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "src/utils/src/validators.rs", "rank": 75, "score": 208497.29772712203 }, { "content": "/// Create a GIC device.\n\n///\n\n/// It will try to create by default a GICv3 device. If that fails it will try\n\n/// to fall-back to a GICv2 device.\n\npub fn create_gic(vm: &VmFd, vcpu_count: u64) -> Result<Box<dyn GICDevice>> {\n\n GICv3::new(vm, vcpu_count).or_else(|_| GICv2::new(vm, vcpu_count))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use kvm_ioctls::Kvm;\n\n\n\n #[test]\n\n fn test_create_gic() {\n\n let kvm = Kvm::new().unwrap();\n\n let vm = kvm.create_vm().unwrap();\n\n assert!(create_gic(&vm, 1).is_ok());\n\n }\n\n}\n", "file_path": "src/arch/src/aarch64/gic.rs", "rank": 76, "score": 207589.86956552035 }, { "content": "pub fn check_request_completion(queue: &VirtQueue, idx: usize) {\n\n // Check that the next used will be idx + 1.\n\n assert_eq!(queue.used.idx.get(), (idx + 1) as u16);\n\n // Check that the current used is idx.\n\n assert_eq!(queue.used.ring[idx].get().id, idx as u32);\n\n // The length of the completed request is 0.\n\n assert_eq!(queue.used.ring[idx].get().len, 0);\n\n}\n", "file_path": "src/devices/src/virtio/balloon/test_utils.rs", "rank": 77, "score": 206752.17923454897 }, { "content": "// Serde does not allow specifying a default value for a field\n\n// that is not required. The workaround is to specify a function\n\n// that returns the value.\n\nfn default_allow_mmds_requests() -> bool {\n\n false\n\n}\n\n\n\n/// The data fed into a network iface update request. Currently, only the RX and TX rate limiters\n\n/// can be updated.\n\n#[derive(Debug, Deserialize, PartialEq, Clone)]\n\n#[serde(deny_unknown_fields)]\n\npub struct NetworkInterfaceUpdateConfig {\n\n /// The net iface ID, as provided by the user at iface creation time.\n\n pub iface_id: String,\n\n /// New RX rate limiter config. Only provided data will be updated. I.e. if any optional data\n\n /// is missing, it will not be nullified, but left unchanged.\n\n pub rx_rate_limiter: Option<RateLimiterConfig>,\n\n /// New TX rate limiter config. Only provided data will be updated. I.e. if any optional data\n\n /// is missing, it will not be nullified, but left unchanged.\n\n pub tx_rate_limiter: Option<RateLimiterConfig>,\n\n}\n\n\n\nmacro_rules! get_bucket_update {\n", "file_path": "src/vmm/src/vmm_config/net.rs", "rank": 78, "score": 206341.55423644063 }, { "content": "/// Read the MPIDR - Multiprocessor Affinity Register.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `vcpu` - Structure for the VCPU that holds the VCPU's fd.\n\npub fn read_mpidr(vcpu: &VcpuFd) -> Result<u64> {\n\n vcpu.get_one_reg(MPIDR_EL1).map_err(Error::GetSysRegister)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::aarch64::{arch_memory_regions, layout};\n\n use kvm_ioctls::Kvm;\n\n\n\n #[test]\n\n fn test_setup_regs() {\n\n let kvm = Kvm::new().unwrap();\n\n let vm = kvm.create_vm().unwrap();\n\n let vcpu = vm.create_vcpu(0).unwrap();\n\n let regions = arch_memory_regions(layout::FDT_MAX_SIZE + 0x1000);\n\n let mem = GuestMemoryMmap::from_ranges(&regions).expect(\"Cannot initialize memory\");\n\n\n\n let res = setup_boot_regs(&vcpu, 0, 0x0, &mem);\n\n assert!(res.is_err());\n", "file_path": "src/arch/src/aarch64/regs.rs", "rank": 79, "score": 205482.08072169754 }, { "content": "/// Represents an immutable view into a sequence of bytes which stands for different values packed\n\n/// together using network byte ordering.\n\npub trait NetworkBytes: Deref<Target = [u8]> {\n\n /// Reads an `u16` value from the specified offset, converting it to host byte ordering.\n\n ///\n\n /// # Panics\n\n ///\n\n /// This method will panic if `offset` is invalid.\n\n #[inline]\n\n fn ntohs_unchecked(&self, offset: usize) -> u16 {\n\n // The unwrap() can fail when the offset is invalid, or there aren't enough bytes (2 in this\n\n // case) left until the end of the slice. The caller must ensure this doesn't happen (hence\n\n // the `unchecked` suffix).\n\n byte_order::read_be_u16(&self[offset..])\n\n }\n\n\n\n /// Reads an `u32` value from the specified offset, converting it to host byte ordering.\n\n ///\n\n /// # Panics\n\n ///\n\n /// This method will panic if `offset` is invalid.\n\n #[inline]\n", "file_path": "src/dumbo/src/pdu/bytes.rs", "rank": 80, "score": 204060.0050445641 }, { "content": "pub fn parse_get_machine_config() -> Result<ParsedRequest, Error> {\n\n METRICS.get_api_requests.machine_cfg_count.inc();\n\n Ok(ParsedRequest::new_sync(VmmAction::GetVmConfiguration))\n\n}\n\n\n", "file_path": "src/api_server/src/request/machine_configuration.rs", "rank": 81, "score": 203222.18397552695 }, { "content": "pub fn parse_get_instance_info() -> Result<ParsedRequest, Error> {\n\n METRICS.get_api_requests.instance_info_count.inc();\n\n Ok(ParsedRequest::GetInstanceInfo)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_get_instance_info_request() {\n\n match parse_get_instance_info() {\n\n Ok(ParsedRequest::GetInstanceInfo) => {}\n\n _ => panic!(\"Test failed.\"),\n\n }\n\n }\n\n}\n", "file_path": "src/api_server/src/request/instance_info.rs", "rank": 82, "score": 203222.18397552695 }, { "content": "/// Patch provided JSON document (given as `serde_json::Value`) in-place with JSON Merge Patch\n\n/// [RFC 7396](https://tools.ietf.org/html/rfc7396).\n\npub fn json_patch(target: &mut Value, patch: &Value) {\n\n if patch.is_object() {\n\n if !target.is_object() {\n\n // Replace target with a serde_json object so we can recursively copy patch values.\n\n *target = Value::Object(Map::new());\n\n }\n\n\n\n // This is safe since we make sure patch and target are objects beforehand.\n\n let doc = target.as_object_mut().unwrap();\n\n for (key, value) in patch.as_object().unwrap() {\n\n if value.is_null() {\n\n // If the value in the patch is null we remove the entry.\n\n doc.remove(key.as_str());\n\n } else {\n\n // Recursive call to update target document.\n\n // If `key` is not in the target document (it's a new field defined in `patch`)\n\n // insert a null placeholder and pass it as the new target\n\n // so we can insert new values recursively.\n\n json_patch(doc.entry(key.as_str()).or_insert(Value::Null), value);\n\n }\n\n }\n\n } else {\n\n *target = patch.clone();\n\n }\n\n}\n\n\n", "file_path": "src/mmds/src/lib.rs", "rank": 83, "score": 203104.29188738583 }, { "content": "/// Type representing a state handler of a `StateMachine<T>` machine. Each state handler\n\n/// is a function from `T` that handles a specific state of `T`.\n\ntype StateFn<T> = fn(&mut T) -> StateMachine<T>;\n\n\n\nimpl<T> StateMachine<T> {\n\n /// Creates a new state wrapper.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// `function` - the state handler for this state.\n\n ///\n\n pub fn new(function: Option<StateFn<T>>) -> StateMachine<T> {\n\n StateMachine { function }\n\n }\n\n\n\n /// Creates a new state wrapper that has further possible transitions.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// `function` - the state handler for this state.\n\n ///\n\n pub fn next(function: StateFn<T>) -> StateMachine<T> {\n", "file_path": "src/utils/src/sm.rs", "rank": 84, "score": 202938.56557028936 }, { "content": "fn create_vmm(_kernel_image: Option<&str>, is_diff: bool) -> (Arc<Mutex<Vmm>>, EventManager) {\n\n let mut event_manager = EventManager::new().unwrap();\n\n let empty_seccomp_filter = get_seccomp_filter(SeccompLevel::None).unwrap();\n\n\n\n let boot_source_cfg = MockBootSourceConfig::new().with_default_boot_args();\n\n #[cfg(target_arch = \"aarch64\")]\n\n let boot_source_cfg: BootSourceConfig = boot_source_cfg.into();\n\n #[cfg(target_arch = \"x86_64\")]\n\n let boot_source_cfg: BootSourceConfig = match _kernel_image {\n\n Some(kernel) => boot_source_cfg.with_kernel(kernel).into(),\n\n None => boot_source_cfg.into(),\n\n };\n\n let mock_vm_res = MockVmResources::new().with_boot_source(boot_source_cfg);\n\n let resources: VmResources = if is_diff {\n\n mock_vm_res\n\n .with_vm_config(MockVmConfig::new().with_dirty_page_tracking().into())\n\n .into()\n\n } else {\n\n mock_vm_res.into()\n\n };\n\n\n\n (\n\n build_microvm_for_boot(&resources, &mut event_manager, &empty_seccomp_filter).unwrap(),\n\n event_manager,\n\n )\n\n}\n\n\n", "file_path": "src/vmm/tests/integration_tests.rs", "rank": 85, "score": 202653.61252573982 }, { "content": "fn valid_char(c: char) -> bool {\n\n match c {\n\n ' '..='~' => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/kernel/src/cmdline/mod.rs", "rank": 86, "score": 200477.23184875757 }, { "content": "#[test]\n\nfn test_invalid_format_version() {\n\n #[rustfmt::skip]\n\n let invalid_format_snap: &[u8] = &[\n\n // This blob consists of the following: magic_id (8 bytes),\n\n 0xAA, 0xAA,\n\n #[cfg(target_arch = \"aarch64\")]\n\n 0xAA,\n\n #[cfg(target_arch = \"aarch64\")]\n\n 0xAA,\n\n #[cfg(target_arch = \"x86_64\")]\n\n 0x64,\n\n #[cfg(target_arch = \"x86_64\")]\n\n 0x86,\n\n 0x84, 0x19, 0x10, 0x07,\n\n // target version (2 bytes) +\n\n 0x01, 0x00,\n\n // `a` field +\n\n 0x10, 0x00, 0x00, 0x00,\n\n // `b` field: Option variant type (1 byte) + inner enum variant type (4 bytes)\n\n // + inner enum value (4 bytes).\n", "file_path": "src/snapshot/tests/test.rs", "rank": 87, "score": 200372.3658244703 }, { "content": "#[test]\n\nfn test_invalid_data_version() {\n\n #[rustfmt::skip]\n\n let invalid_data_version_snap: &[u8] = &[\n\n // This blob consists of the following: magic_id (8 bytes),\n\n 0x01, 0x00,\n\n #[cfg(target_arch = \"aarch64\")]\n\n 0xAA,\n\n #[cfg(target_arch = \"aarch64\")]\n\n 0xAA,\n\n #[cfg(target_arch = \"x86_64\")]\n\n 0x64,\n\n #[cfg(target_arch = \"x86_64\")]\n\n 0x86,\n\n 0x84, 0x19, 0x10, 0x07,\n\n // target version (2 bytes) +\n\n 0xAA, 0xAA,\n\n // `a` field +\n\n 0x10, 0x00, 0x00, 0x00,\n\n // `b` field: Option variant type (1 byte) + inner enum variant type (4 bytes)\n\n // + inner enum value (4 bytes).\n", "file_path": "src/snapshot/tests/test.rs", "rank": 88, "score": 200372.3658244703 }, { "content": "fn frame_bytes_from_buf_mut(buf: &mut [u8]) -> Result<&mut [u8]> {\n\n if buf.len() < vnet_hdr_len() {\n\n Err(Error::VnetHeaderMissing)\n\n } else {\n\n Ok(&mut buf[vnet_hdr_len()..])\n\n }\n\n}\n\n\n", "file_path": "src/devices/src/virtio/net/device.rs", "rank": 89, "score": 200320.18401068565 }, { "content": "fn write_gdt_table(table: &[u64], guest_mem: &GuestMemoryMmap) -> Result<()> {\n\n let boot_gdt_addr = GuestAddress(BOOT_GDT_OFFSET);\n\n for (index, entry) in table.iter().enumerate() {\n\n let addr = guest_mem\n\n .checked_offset(boot_gdt_addr, index * mem::size_of::<u64>())\n\n .ok_or(Error::WriteGDT)?;\n\n guest_mem\n\n .write_obj(*entry, addr)\n\n .map_err(|_| Error::WriteGDT)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/regs.rs", "rank": 90, "score": 197425.77976206923 }, { "content": "fn write_idt_value(val: u64, guest_mem: &GuestMemoryMmap) -> Result<()> {\n\n let boot_idt_addr = GuestAddress(BOOT_IDT_OFFSET);\n\n guest_mem\n\n .write_obj(val, boot_idt_addr)\n\n .map_err(|_| Error::WriteIDT)\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/regs.rs", "rank": 91, "score": 197425.77976206923 }, { "content": "/// A passive, event-driven object, that needs to be notified whenever an epoll-able event occurs.\n\n/// An event-polling control loop will use `as_raw_fd()` and `get_polled_evset()` to query\n\n/// the listener for the file descriptor and the set of events it's interested in. When such an\n\n/// event occurs, the control loop will route the event to the listener via `notify()`.\n\npub trait VsockEpollListener: AsRawFd {\n\n /// Get the set of events for which the listener wants to be notified.\n\n fn get_polled_evset(&self) -> EventSet;\n\n\n\n /// Notify the listener that one ore more events have occurred.\n\n fn notify(&mut self, evset: EventSet);\n\n}\n\n\n", "file_path": "src/devices/src/virtio/vsock/mod.rs", "rank": 92, "score": 197320.00031116972 }, { "content": "// This function is supposed to do id validation for requests.\n\npub fn checked_id(id: &str) -> Result<&str, Error> {\n\n // todo: are there any checks we want to do on id's?\n\n // not allow them to be empty strings maybe?\n\n // check: ensure string is not empty\n\n if id.is_empty() {\n\n return Err(Error::EmptyID);\n\n }\n\n // check: ensure string is alphanumeric\n\n if !id.chars().all(|c| c == '_' || c.is_alphanumeric()) {\n\n return Err(Error::InvalidID);\n\n }\n\n Ok(id)\n\n}\n\n\n\n#[cfg(test)]\n\npub(crate) mod tests {\n\n use super::*;\n\n\n\n use std::io::{Cursor, Write};\n\n use std::os::unix::net::UnixStream;\n", "file_path": "src/api_server/src/parsed_request.rs", "rank": 93, "score": 196996.05156445637 }, { "content": "#[cfg(target_arch = \"x86_64\")]\n\n#[test]\n\nfn test_create_and_load_snapshot() {\n\n // Create diff snapshot.\n\n let (snapshot_file, memory_file) = verify_create_snapshot(true);\n\n // Create a new microVm from snapshot. This only tests code-level logic; it verifies\n\n // that a microVM can be built with no errors from given snapshot.\n\n // It does _not_ verify that the guest is actually restored properly. We're using\n\n // python integration tests for that.\n\n verify_load_snapshot(snapshot_file, memory_file);\n\n\n\n // Create full snapshot.\n\n let (snapshot_file, memory_file) = verify_create_snapshot(false);\n\n // Create a new microVm from snapshot. This only tests code-level logic; it verifies\n\n // that a microVM can be built with no errors from given snapshot.\n\n // It does _not_ verify that the guest is actually restored properly. We're using\n\n // python integration tests for that.\n\n verify_load_snapshot(snapshot_file, memory_file);\n\n}\n", "file_path": "src/vmm/tests/integration_tests.rs", "rank": 94, "score": 196960.97660544794 }, { "content": "#[test]\n\nfn test_dirty_bitmap_error() {\n\n // Error case: dirty tracking disabled.\n\n let pid = unsafe { libc::fork() };\n\n match pid {\n\n 0 => {\n\n set_panic_hook();\n\n\n\n let (vmm, mut event_manager) = default_vmm(None);\n\n\n\n // The vmm will start with dirty page tracking = OFF.\n\n // With dirty tracking disabled, the underlying KVM_GET_DIRTY_LOG ioctl will fail\n\n // with errno 2 (ENOENT) because KVM can't find any guest memory regions with dirty\n\n // page tracking enabled.\n\n assert_eq!(\n\n format!(\"{:?}\", vmm.lock().unwrap().get_dirty_bitmap().err()),\n\n \"Some(DirtyBitmap(Error(2)))\"\n\n );\n\n\n\n let _ = event_manager.run_with_timeout(500).unwrap();\n\n\n", "file_path": "src/vmm/tests/integration_tests.rs", "rank": 95, "score": 196925.78402029743 }, { "content": "// It's called writeln_special because we have to use this rather convoluted way of writing\n\n// to special cgroup files, to avoid getting errors. It would be nice to know why that happens :-s\n\nfn writeln_special<T, V>(file_path: &T, value: V) -> Result<()>\n\nwhere\n\n T: AsRef<Path>,\n\n V: ::std::fmt::Display,\n\n{\n\n fs::write(file_path, format!(\"{}\\n\", value))\n\n .map_err(|e| Error::Write(PathBuf::from(file_path.as_ref()), e))\n\n}\n\n\n", "file_path": "src/jailer/src/cgroup.rs", "rank": 96, "score": 195023.1937270166 }, { "content": "/// Generate a BPF program based on a seccomp level value.\n\npub fn get_seccomp_filter(seccomp_level: SeccompLevel) -> Result<BpfProgram, SeccompError> {\n\n match seccomp_level {\n\n SeccompLevel::None => Ok(vec![]),\n\n SeccompLevel::Basic => default_filter()\n\n .and_then(|filter| Ok(filter.allow_all()))\n\n .and_then(|filter| filter.try_into())\n\n .map_err(SeccompError::SeccompFilter),\n\n SeccompLevel::Advanced => default_filter()\n\n .and_then(|filter| filter.try_into())\n\n .map_err(SeccompError::SeccompFilter),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::get_seccomp_filter;\n\n use seccomp::SeccompLevel;\n\n\n\n #[test]\n\n fn test_get_seccomp_filter() {\n\n assert!(get_seccomp_filter(SeccompLevel::None).is_ok());\n\n assert!(get_seccomp_filter(SeccompLevel::Basic).is_ok());\n\n assert!(get_seccomp_filter(SeccompLevel::Advanced).is_ok());\n\n }\n\n}\n", "file_path": "src/vmm/src/default_syscalls/filters.rs", "rank": 97, "score": 194338.00042722948 }, { "content": "/// Configure base registers for a given CPU.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `vcpu` - Structure for the VCPU that holds the VCPU's fd.\n\n/// * `boot_ip` - Starting instruction pointer.\n\npub fn setup_regs(vcpu: &VcpuFd, boot_ip: u64) -> Result<()> {\n\n let regs: kvm_regs = kvm_regs {\n\n rflags: 0x0000_0000_0000_0002u64,\n\n rip: boot_ip,\n\n // Frame pointer. It gets a snapshot of the stack pointer (rsp) so that when adjustments are\n\n // made to rsp (i.e. reserving space for local variables or pushing values on to the stack),\n\n // local variables and function parameters are still accessible from a constant offset from rbp.\n\n rsp: super::layout::BOOT_STACK_POINTER as u64,\n\n // Starting stack pointer.\n\n rbp: super::layout::BOOT_STACK_POINTER as u64,\n\n // Must point to zero page address per Linux ABI. This is x86_64 specific.\n\n rsi: super::layout::ZERO_PAGE_START as u64,\n\n ..Default::default()\n\n };\n\n\n\n vcpu.set_regs(&regs).map_err(Error::SetBaseRegisters)\n\n}\n\n\n", "file_path": "src/arch/src/x86_64/regs.rs", "rank": 98, "score": 194267.09088844084 } ]
Rust
liblumen_alloc/src/erts/term/arch/repr.rs
bitwalker/lumen
7d286b93d1a839aa7de5fed7020bafc1bc39f300
use core::fmt::{self, Debug, Display}; use core::hash::Hash; use alloc::sync::Arc; use std::backtrace::Backtrace; use crate::erts::term::prelude::*; use super::Tag; pub trait Repr: Sized + Copy + Debug + Display + PartialEq<Self> + Eq + PartialOrd<Self> + Ord + Hash + Send { type Word: Clone + Copy + PartialEq + Eq + Debug + fmt::Binary; fn as_usize(&self) -> usize; fn word_to_usize(word: Self::Word) -> usize; fn value(&self) -> Self::Word; fn type_of(&self) -> Tag<Self::Word>; fn encode_immediate(value: Self::Word, tag: Self::Word) -> Self; fn encode_header(value: Self::Word, tag: Self::Word) -> Self; fn encode_list(value: *const Cons) -> Self; fn encode_box<U>(value: *const U) -> Self where U: ?Sized; fn encode_literal<U>(value: *const U) -> Self where U: ?Sized; unsafe fn decode_box(self) -> *mut Self; unsafe fn decode_list(self) -> Boxed<Cons>; unsafe fn decode_smallint(self) -> SmallInteger; unsafe fn decode_immediate(self) -> Self::Word; unsafe fn decode_atom(self) -> Atom; unsafe fn decode_pid(self) -> Pid; unsafe fn decode_port(self) -> Port; unsafe fn decode_header_value(&self) -> Self::Word; fn decode_header( &self, tag: Tag<Self::Word>, literal: Option<bool>, ) -> Result<TypedTerm, TermDecodingError> where Self: Encoded, { let ptr = Boxed::new(self as *const _ as *mut u64).ok_or_else(|| { TermDecodingError::NoneValue { backtrace: Arc::new(Backtrace::capture()), } })?; match tag { Tag::Tuple => { let tuple = unsafe { Tuple::from_raw_term(ptr.cast::<Self>().as_ptr()) }; Ok(TypedTerm::Tuple(tuple)) } Tag::Closure => { let closure = unsafe { Closure::from_raw_term(ptr.cast::<Self>().as_ptr()) }; Ok(TypedTerm::Closure(closure)) } Tag::HeapBinary => { let bin = unsafe { HeapBin::from_raw_term(ptr.cast::<Self>().as_ptr()) }; Ok(TypedTerm::HeapBinary(bin)) } #[cfg(not(target_arch = "x86_64"))] Tag::Float => Ok(TypedTerm::Float(ptr.cast::<Float>())), Tag::BigInteger => Ok(TypedTerm::BigInteger(ptr.cast::<BigInteger>())), Tag::Reference => Ok(TypedTerm::Reference(ptr.cast::<Reference>())), Tag::ResourceReference => Ok(TypedTerm::ResourceReference(ptr.cast::<Resource>())), Tag::ProcBin => match literal { Some(false) => Ok(TypedTerm::ProcBin(ptr.cast::<ProcBin>())), Some(true) => Ok(TypedTerm::BinaryLiteral(ptr.cast::<BinaryLiteral>())), None => { let offset = BinaryLiteral::flags_offset(); debug_assert_eq!(offset, ProcBin::inner_offset()); let flags_ptr = unsafe { (self as *const _ as *const u8).offset(offset as isize) as *const BinaryFlags }; let flags = unsafe { *flags_ptr }; if flags.is_literal() { Ok(TypedTerm::BinaryLiteral(ptr.cast::<BinaryLiteral>())) } else { Ok(TypedTerm::ProcBin(ptr.cast::<ProcBin>())) } } }, Tag::SubBinary => Ok(TypedTerm::SubBinary(ptr.cast::<SubBinary>())), Tag::MatchContext => Ok(TypedTerm::MatchContext(ptr.cast::<MatchContext>())), Tag::ExternalPid => Ok(TypedTerm::ExternalPid(ptr.cast::<ExternalPid>())), Tag::ExternalPort => Ok(TypedTerm::ExternalPort(ptr.cast::<ExternalPort>())), Tag::ExternalReference => Ok(TypedTerm::ExternalReference( ptr.cast::<ExternalReference>(), )), Tag::Map => Ok(TypedTerm::Map(ptr.cast::<Map>())), Tag::None => Err(TermDecodingError::NoneValue { backtrace: Arc::new(Backtrace::capture()), }), _ => Err(TermDecodingError::InvalidTag { backtrace: Arc::new(Backtrace::capture()), }), } } #[inline] unsafe fn decode_header_unchecked( &self, tag: Tag<Self::Word>, literal: Option<bool>, ) -> TypedTerm where Self: Encoded, { match self.decode_header(tag.clone(), literal) { Ok(term) => term, Err(_) => panic!("invalid type tag: {:?}", tag), } } }
use core::fmt::{self, Debug, Display}; use core::hash::Hash; use alloc::sync::Arc; use std::backtrace::Backtrace; use crate::erts::term::prelude::*; use super::Tag; pub trait Repr: Sized + Copy + Debug + Display + PartialEq<Self> + Eq + PartialOrd<Self> + Ord + Hash + Send { type Word: Clone + Copy + PartialEq + Eq + Debug + fmt::Binary; fn as_usize(&self) -> usize; fn word_to_usize(word: Self::Word) -> usize; fn value(&self) -> Self::Word; fn type_of(&self) -> Tag<Self::Word>; fn encode_immediate(value: Self::Word, tag: Self::Word) -> Self; fn encode_header(value: Self::Word, tag: Self::Word) -> Self; fn encode_list(value: *const Cons) -> Self; fn encode_box<U>(value: *const U) -> Self where U: ?Sized; fn encode_literal<U>(value: *const U) -> Self where U: ?Sized; unsafe fn decode_box(self) -> *mut Self; unsafe fn decode_list(self) -> Boxed<Cons>; unsafe fn decode_smallint(self) -> SmallInteger; unsafe fn decode_immediate(self) -> Self::Word; unsafe fn decode_atom(self) -> Atom; unsafe fn decode_pid(self) -> Pid; unsafe fn decode_port(self) -> Port; unsafe fn decode_header_value(&self) -> Self::Word; fn decode_header( &self, tag: Tag<Self::Word>, literal: Option<bool>, ) -> Result<TypedTerm, TermDecodingError> where Self: Encoded, { let ptr = Boxed::new(self as *const _ as *mut u64).ok_or_else(|| { TermDecodingError::NoneValue { backtrace: Arc::new(Backtrace::capture()), } })?; match tag { Tag::Tuple => { let tuple = unsafe { Tuple::from_raw_term(ptr.cast::<Self>().as_ptr()) }; Ok(TypedTerm::Tuple(tuple)) } Tag::Closure => { let closure = unsafe { Closure::from_raw_term(ptr.cast::<Self>().as_ptr()) }; Ok(TypedTerm::Closure(closure)) } Tag::HeapBinary => { let bin = unsafe { HeapBin::from_raw_term(ptr.cast::<Self>().as_ptr()) }; Ok(TypedTerm::HeapBinary(bin)) } #[cfg(not(target_arch = "x86_64"))] Tag::Float => Ok(TypedTerm::Float(ptr.cast::<Float>())), Tag::BigInteger => Ok(TypedTerm::BigInteger(ptr.cast::<BigInteger>())), Tag::Reference => Ok(TypedTerm::Reference(ptr.cast::<Reference>())), Tag::ResourceReference => Ok(TypedTerm::ResourceReference(ptr.cast::<Resource>())), Tag::ProcBin => match literal { Some(false)
se { Ok(TypedTerm::ProcBin(ptr.cast::<ProcBin>())) } } }, Tag::SubBinary => Ok(TypedTerm::SubBinary(ptr.cast::<SubBinary>())), Tag::MatchContext => Ok(TypedTerm::MatchContext(ptr.cast::<MatchContext>())), Tag::ExternalPid => Ok(TypedTerm::ExternalPid(ptr.cast::<ExternalPid>())), Tag::ExternalPort => Ok(TypedTerm::ExternalPort(ptr.cast::<ExternalPort>())), Tag::ExternalReference => Ok(TypedTerm::ExternalReference( ptr.cast::<ExternalReference>(), )), Tag::Map => Ok(TypedTerm::Map(ptr.cast::<Map>())), Tag::None => Err(TermDecodingError::NoneValue { backtrace: Arc::new(Backtrace::capture()), }), _ => Err(TermDecodingError::InvalidTag { backtrace: Arc::new(Backtrace::capture()), }), } } #[inline] unsafe fn decode_header_unchecked( &self, tag: Tag<Self::Word>, literal: Option<bool>, ) -> TypedTerm where Self: Encoded, { match self.decode_header(tag.clone(), literal) { Ok(term) => term, Err(_) => panic!("invalid type tag: {:?}", tag), } } }
=> Ok(TypedTerm::ProcBin(ptr.cast::<ProcBin>())), Some(true) => Ok(TypedTerm::BinaryLiteral(ptr.cast::<BinaryLiteral>())), None => { let offset = BinaryLiteral::flags_offset(); debug_assert_eq!(offset, ProcBin::inner_offset()); let flags_ptr = unsafe { (self as *const _ as *const u8).offset(offset as isize) as *const BinaryFlags }; let flags = unsafe { *flags_ptr }; if flags.is_literal() { Ok(TypedTerm::BinaryLiteral(ptr.cast::<BinaryLiteral>())) } el
random
[ { "content": "#[inline]\n\npub fn in_area<T, U>(ptr: *const T, start: *const U, end: *const U) -> bool\n\nwhere\n\n T: ?Sized,\n\n U: ?Sized,\n\n{\n\n // If any pointers are null, the only sensible answer is false\n\n if ptr.is_null() || start.is_null() || end.is_null() {\n\n false\n\n } else {\n\n let start = start as *const () as usize;\n\n let end = end as *const () as usize;\n\n debug_assert!(start <= end);\n\n\n\n let ptr = ptr as *const () as usize;\n\n start <= ptr && ptr < end\n\n }\n\n}\n\n\n\n/// Returns true if `ptr` is in the memory region between `start` and `end`,\n\n/// specifically if `ptr` falls in the range including `start` _and_ `end`\n\n///\n\n/// NOTE: If any of the given pointers are null, then false will be returned\n", "file_path": "liblumen_core/src/util/pointer.rs", "rank": 0, "score": 488016.83982705546 }, { "content": "#[inline]\n\npub fn in_area_inclusive<T, U>(ptr: *const T, start: *const U, end: *const U) -> bool\n\nwhere\n\n T: ?Sized,\n\n U: ?Sized,\n\n{\n\n // If any pointers are null, the only sensible answer is false\n\n if ptr.is_null() || start.is_null() || end.is_null() {\n\n false\n\n } else {\n\n let start = start as *const () as usize;\n\n let end = end as *const () as usize;\n\n debug_assert!(start <= end);\n\n\n\n let ptr = ptr as *const () as usize;\n\n start <= ptr && ptr <= end\n\n }\n\n}\n\n\n\n/// Performs a byte-by-byte comparison of the values pointed to by `src` and `dst`,\n\n/// returning `Ok` if they are equal, and `Err(index)` if they are not equal, where\n", "file_path": "liblumen_core/src/util/pointer.rs", "rank": 1, "score": 485440.06171057303 }, { "content": "/// This trait defines the common API for low-level term representations, i.e. `Term`.\n\n/// It contains all common functions for working directly with encoded terms where the\n\n/// implementation of those functions may depend on platform/architecture details.\n\n///\n\n/// Since terms may provide greater or fewer immediate types based on platform restrictions,\n\n/// it is necessary for each representation to define these common functions in order to prevent\n\n/// tying higher-level code to low-level details such as the specific bit-width of a\n\n/// machine word.\n\n///\n\n/// NOTE: This trait requires that implementations implement `Copy` because higher-level code\n\n/// currently depends on those semantics. Some functions, such as `decode`, take a reference\n\n/// to `self` to prevent copying the original term in cases where the location in memory is\n\n/// important. However, several functions of this trait do not, and it is assumed that those\n\n/// functions are not dependent on a specific memory address. If that constraint is violated\n\n/// then you may end up with a partial term which leads to out of bounds memory addresses, or\n\n/// other undefined behavior.\n\npub trait Encoded: Repr + Copy {\n\n /// Decodes `Self` into a `TypedTerm`, unless the encoded value is\n\n /// invalid or malformed.\n\n ///\n\n /// NOTE: Implementations should attempt to catch all possible decoding errors\n\n /// to make this as safe as possible. The only exception to this rule should\n\n /// be the case of decoding a pointer which can not be validated unless it\n\n /// is dereferenced.\n\n fn decode(&self) -> Result<TypedTerm, TermDecodingError>;\n\n\n\n /// Returns `true` if the encoded value represents `NONE`\n\n fn is_none(self) -> bool;\n\n /// Returns `true` if the encoded value represents a pointer to a term\n\n fn is_boxed(self) -> bool;\n\n /// Returns `true` if the encoded value is the header of a non-immediate term\n\n fn is_header(self) -> bool;\n\n /// Returns `true` if the encoded value is an immediate value\n\n fn is_immediate(self) -> bool;\n\n /// Returns `true` if the encoded value represents a pointer to a literal value\n\n fn is_literal(self) -> bool;\n", "file_path": "liblumen_alloc/src/erts/term/encoding.rs", "rank": 2, "score": 447196.98778119043 }, { "content": "/// The default implementation of this trait simply delegates to `Eq`, override\n\n/// the `exact_eq` or `exact_ne` methods to extend that behavior.\n\npub trait ExactEq: PartialEq<Self> {\n\n fn exact_eq(&self, other: &Self) -> bool {\n\n self.eq(other)\n\n }\n\n\n\n fn exact_ne(&self, other: &Self) -> bool {\n\n !self.exact_eq(other)\n\n }\n\n}\n", "file_path": "liblumen_core/src/cmp/exact_eq.rs", "rank": 3, "score": 435495.126296467 }, { "content": "#[inline]\n\npub fn ensure_aligned<T>(ptr: *mut T, align: usize) -> (*mut T, usize) {\n\n let ptr = ptr as *mut u8;\n\n let offset = ptr.align_offset(align);\n\n assert_ne!(offset, usize::max_value());\n\n\n\n let aligned = unsafe { ptr.add(offset) as *mut T };\n\n (aligned, offset)\n\n}\n\n\n\n// Returns the effective alignment of `ptr`, i.e. the largest power\n\n// of two that is a divisor of `ptr`\n", "file_path": "liblumen_core/src/alloc/utils.rs", "rank": 4, "score": 432131.9898998606 }, { "content": "#[inline(always)]\n\npub fn align_up_to<T>(ptr: *mut T, align: usize) -> *mut T {\n\n self::round_up_to_alignment(ptr as usize, align) as *mut T\n\n}\n\n\n\n// Aligns the given pointer down to the given alignment.\n\n// The resulting pointer is either less than or equal to the given pointer.\n", "file_path": "liblumen_core/src/alloc/utils.rs", "rank": 5, "score": 429526.67256724427 }, { "content": "#[inline(always)]\n\npub fn align_down_to<T>(ptr: *mut T, align: usize) -> *mut T {\n\n assert!(align.is_power_of_two());\n\n (ptr as usize & !(align - 1)) as *mut T\n\n}\n\n\n\n// Aligns the given pointer up to the next nearest byte which is a multiple of `base`\n", "file_path": "liblumen_core/src/alloc/utils.rs", "rank": 6, "score": 429526.67256724427 }, { "content": "#[inline(always)]\n\npub fn align_up_to_multiple_of<T>(ptr: *mut T, base: usize) -> *mut T {\n\n self::round_up_to_multiple_of(ptr as usize, base) as *mut T\n\n}\n\n\n\n// Returns true if `ptr` is aligned to `align`\n", "file_path": "liblumen_core/src/alloc/utils.rs", "rank": 7, "score": 426369.02319687046 }, { "content": "#[inline(always)]\n\npub fn effective_alignment<T>(ptr: *const T) -> usize {\n\n 1usize << (ptr as usize).trailing_zeros()\n\n}\n\n\n\n/// Given a reference to an object, formats the reference\n\n/// as a hexadecimal memory address\n", "file_path": "liblumen_core/src/alloc/utils.rs", "rank": 8, "score": 426181.7261420201 }, { "content": "#[inline]\n\npub fn heap(size: usize) -> AllocResult<*mut Term> {\n\n PROC_ALLOC.alloc(size)\n\n}\n\n\n\n/// Reallocate a process heap, in place\n\n///\n\n/// If reallocating and trying to grow the heap, if the allocation cannot be done\n\n/// in place, then `Err(CannotReallocInPlace)` will be returned\n\n#[inline]\n\npub unsafe fn realloc(\n\n heap: *mut Term,\n\n size: usize,\n\n new_size: usize,\n\n) -> Result<*mut Term, CannotReallocInPlace> {\n\n PROC_ALLOC.realloc_in_place(heap, size, new_size)\n\n}\n\n\n\n/// Deallocate a heap previously allocated via `heap`\n\n#[inline]\n\npub unsafe fn free(heap: *mut Term, size: usize) {\n\n PROC_ALLOC.dealloc(heap, size)\n\n}\n\n\n\n/// Calculates the next largest heap size equal to or greater than `size`\n", "file_path": "liblumen_alloc/src/erts/process/alloc.rs", "rank": 9, "score": 422560.21508064365 }, { "content": "pub fn number_atom_reference_function_port_pid_or_tuple(\n\n arc_process: Arc<Process>,\n\n) -> BoxedStrategy<Term> {\n\n prop_oneof![\n\n is_number(arc_process.clone()),\n\n atom(),\n\n is_reference(arc_process.clone()),\n\n // TODO ports\n\n is_pid(arc_process.clone()),\n\n tuple(arc_process)\n\n ]\n\n .boxed()\n\n}\n\n\n", "file_path": "lumen_runtime/src/test/strategy/term.rs", "rank": 10, "score": 414923.22376391385 }, { "content": "/// This is a marker trait for boxed terms which are stored as literals\n\npub trait Literal<T: Repr>: Boxable<T> {}\n\n\n", "file_path": "liblumen_alloc/src/erts/term/encoding.rs", "rank": 11, "score": 413873.2832098102 }, { "content": "pub trait Pattern<'a>: Debug + Clone {\n\n type Output;\n\n fn try_match(&self, input: &'a Term) -> Result<'a, Self::Output>;\n\n\n\n fn unmatched(&self, input: &'a Term) -> Unmatch<'a>\n\n where\n\n Self: 'static,\n\n {\n\n Unmatch {\n\n input,\n\n pattern: Box::new(self.clone()),\n\n cause: None,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Unmatch<'a> {\n\n pub input: &'a Term,\n\n pub pattern: Box<dyn Debug>,\n", "file_path": "liblumen_beam/src/serialization/etf/pattern.rs", "rank": 12, "score": 413505.1816000249 }, { "content": "#[inline(always)]\n\npub fn is_aligned_at<T>(ptr: *mut T, align: usize) -> bool {\n\n (ptr as usize) % align == 0\n\n}\n\n\n", "file_path": "liblumen_core/src/alloc/utils.rs", "rank": 13, "score": 410565.0361538138 }, { "content": "/// This trait represents cloning, like `Clone`, but specifically\n\n/// in the context of terms which need to be cloned into the heap\n\n/// of a specific process, rather than using the global allocator.\n\n///\n\n/// In particular this is used for persistent data structures like\n\n/// `HashMap` which use clone-on-write behavior internally for mutable\n\n/// operations, e.g. `insert`. Rather than using `Clone` which would not\n\n/// do the right thing, we instead implement this trait, and ensure that\n\n/// those operations are provided a mutable reference to the current process\n\n/// so that the clone is into the process heap, rather than the global heap\n\n///\n\n/// NOTE: You can implement both `CloneInProcess` and `Clone` for a type,\n\n/// just be aware that any uses of `Clone` will allocate on the global heap\n\npub trait CloneToProcess: Debug {\n\n /// Returns boxed copy of this value, performing any heap allocations\n\n /// using the process heap of `process`, possibly using heap fragments if\n\n /// there is not enough space for the cloned value\n\n fn clone_to_process(&self, process: &Process) -> Term {\n\n let mut heap = process.acquire_heap();\n\n match self.clone_to_heap(&mut heap) {\n\n Ok(term) => term,\n\n Err(_) => {\n\n drop(heap);\n\n let (term, mut frag) = self.clone_to_fragment().unwrap();\n\n process.attach_fragment(unsafe { frag.as_mut() });\n\n term\n\n }\n\n }\n\n }\n\n\n\n /// Returns boxed copy of this value, performing any heap allocations\n\n /// using the given heap. If cloning requires allocation that exceeds\n\n /// the amount of memory available, this returns `Err(Alloc)`, otherwise\n", "file_path": "liblumen_alloc/src/borrow/clone_to_process.rs", "rank": 14, "score": 395935.88651087973 }, { "content": "#[inline(always)]\n\npub fn distance_absolute<T: Sized>(a: *const T, b: *const T) -> usize {\n\n unsafe { a.offset_from(b).abs() as usize }\n\n}\n\n\n\n/// Returns true if `ptr` is in the memory region between `start` and `end`,\n\n/// specifically if `ptr` falls in the range including `start` but excluding `end`\n\n///\n\n/// NOTE: If any of the given pointers are null, then false will be returned\n", "file_path": "liblumen_core/src/util/pointer.rs", "rank": 15, "score": 390013.2278140102 }, { "content": "/// This is a marker trait for terms which can be boxed\n\npub trait Boxable<T: Repr> {}\n\n\n", "file_path": "liblumen_alloc/src/erts/term/encoding.rs", "rank": 16, "score": 375556.3271998303 }, { "content": "#[inline]\n\npub fn to_word_size(bytes: usize) -> usize {\n\n use core::mem;\n\n use liblumen_core::alloc::utils::round_up_to_multiple_of;\n\n\n\n round_up_to_multiple_of(bytes, mem::size_of::<usize>()) / mem::size_of::<usize>()\n\n}\n\n\n", "file_path": "liblumen_alloc/src/erts.rs", "rank": 17, "score": 374451.67554775 }, { "content": "// Returns true if `ptr` fulfills minimum alignment requirements for its type\n\npub fn is_aligned<T>(ptr: *mut T) -> bool {\n\n use crate::sys::sysconf::MIN_ALIGN;\n\n use core::cmp;\n\n\n\n let raw = ptr as usize;\n\n let align = cmp::max(mem::align_of::<T>(), MIN_ALIGN);\n\n raw % align == 0\n\n}\n\n\n\n/// Ensures `ptr` is aligned at the desired alignment, and returns\n\n/// the amount of padding in bytes that was needed to do so\n", "file_path": "liblumen_core/src/alloc/utils.rs", "rank": 18, "score": 372234.7448850394 }, { "content": "#[allow(unused)]\n\n#[inline]\n\npub fn to_arch32_word_size(bytes: usize) -> usize {\n\n use liblumen_core::alloc::utils::round_up_to_multiple_of;\n\n\n\n round_up_to_multiple_of(bytes, 4) / 4\n\n}\n", "file_path": "liblumen_alloc/src/erts.rs", "rank": 19, "score": 370899.95266905264 }, { "content": "#[allow(unused)]\n\n#[inline]\n\npub fn to_arch64_word_size(bytes: usize) -> usize {\n\n use liblumen_core::alloc::utils::round_up_to_multiple_of;\n\n\n\n round_up_to_multiple_of(bytes, 8) / 8\n\n}\n\n\n", "file_path": "liblumen_alloc/src/erts.rs", "rank": 20, "score": 370899.95266905264 }, { "content": "#[inline]\n\npub fn next_factor_of_word(n: usize) -> usize {\n\n let base = n / mem::size_of::<usize>();\n\n let rem = n % mem::size_of::<usize>();\n\n if rem == 0 {\n\n base\n\n } else {\n\n base + 1\n\n }\n\n}\n", "file_path": "liblumen_core/src/alloc/size_classes.rs", "rank": 21, "score": 367433.11243278347 }, { "content": "#[inline]\n\npub fn next_heap_size(size: usize) -> usize {\n\n ProcessHeapAlloc::next_heap_size(size)\n\n}\n", "file_path": "liblumen_alloc/src/erts/process/alloc.rs", "rank": 22, "score": 365173.7060220282 }, { "content": "pub fn number_atom_reference_function_port_or_pid(\n\n arc_process: Arc<Process>,\n\n) -> BoxedStrategy<Term> {\n\n prop_oneof![\n\n is_number(arc_process.clone()),\n\n atom(),\n\n is_reference(arc_process.clone()),\n\n is_function(arc_process.clone()),\n\n // TODO ports\n\n is_pid(arc_process)\n\n ]\n\n .boxed()\n\n}\n\n\n", "file_path": "lumen_runtime/src/test/strategy/term.rs", "rank": 23, "score": 362543.7935143077 }, { "content": "pub fn function_port_pid_tuple_map_list_or_bitstring(\n\n arc_process: Arc<Process>,\n\n) -> BoxedStrategy<Term> {\n\n prop_oneof![\n\n is_function(arc_process.clone()),\n\n // TODO `Port` and `ExternalPort`\n\n is_pid(arc_process.clone()),\n\n tuple(arc_process.clone()),\n\n map(arc_process.clone()),\n\n is_bitstring(arc_process.clone()),\n\n ]\n\n .boxed()\n\n}\n\n\n", "file_path": "lumen_runtime/src/test/strategy/term.rs", "rank": 24, "score": 359424.0210537487 }, { "content": "pub fn number_atom_reference_function_port_or_local_pid(\n\n arc_process: Arc<Process>,\n\n) -> BoxedStrategy<Term> {\n\n prop_oneof![\n\n is_number(arc_process.clone()),\n\n atom(),\n\n is_reference(arc_process.clone()),\n\n is_function(arc_process),\n\n // TODO ports\n\n pid::local()\n\n ]\n\n .boxed()\n\n}\n\n\n", "file_path": "lumen_runtime/src/test/strategy/term.rs", "rank": 25, "score": 359079.68349934067 }, { "content": "#[inline(always)]\n\npub fn round_down_to_alignment(size: usize, align: usize) -> usize {\n\n assert!(align.is_power_of_two());\n\n // This trick works by masking the low bits in `size`\n\n // up to (but not including) `align`, the result is\n\n // the value of `size` rounded down to the next nearest\n\n // number which is aligned to `align`\n\n //\n\n // EXAMPLE: given `size = 1048` and `align = 1024`, the result\n\n // is `1024`, which can be seen with the following 16bit representation:\n\n //\n\n // size: 0000010000011000\n\n // align - 1: 0000001111111111\n\n // !align - 1: 1111110000000000\n\n // size & !(align - 1): 0000010000000000\n\n size & !(align - 1)\n\n}\n\n\n\n// Shifts the given pointer up to the next nearest aligned byte\n", "file_path": "liblumen_core/src/alloc/utils.rs", "rank": 27, "score": 347431.49089569197 }, { "content": "#[inline(always)]\n\npub fn round_up_to_alignment(size: usize, align: usize) -> usize {\n\n assert!(align.is_power_of_two());\n\n self::round_up_to_multiple_of(size, align)\n\n}\n\n\n\n// Rounds down `size` to a multiple of `align`, which must be a power of two\n", "file_path": "liblumen_core/src/alloc/utils.rs", "rank": 28, "score": 347431.49089569197 }, { "content": "#[inline]\n\npub fn round_up_to_multiple_of(size: usize, base: usize) -> usize {\n\n let rem = size % base;\n\n if rem == 0 {\n\n size\n\n } else {\n\n size + base - rem\n\n }\n\n}\n\n\n\n// Rounds up `size` to a multiple of `align`, which must be a power of two\n", "file_path": "liblumen_core/src/alloc/utils.rs", "rank": 29, "score": 347431.49089569197 }, { "content": "/// This trait provides functionality for obtaining a pointer to an\n\n/// unsized type from a raw term. For example, the `Tuple` type consists\n\n/// of an arbitrary number of `Term` elements, and as such it is a\n\n/// dynamically sized type (i.e. `?Sized`). Raw pointers to dynamically\n\n/// sized types cannot be constructed without a size, so the job of this\n\n/// trait is to provide functions to determine a size automatically from\n\n/// such terms and construct a pointer, given only a pointer to a term\n\n/// header.\n\n///\n\n/// Implementors of this trait are dynamically-sized types (DSTs), and\n\n/// as such, the rules around how you can use them are far more restrictive\n\n/// than a typical statically sized struct. Notably, you can only ever\n\n/// construct a reference to one, you can't create a raw pointer to one or\n\n/// construct an instance of one directly.\n\n///\n\n/// Clearly this seems like a chicken and egg problem since it is mostly\n\n/// meaningless to construct references to things you can't create in the\n\n/// first place. So how do values to DSTs get constructed? There are a few\n\n/// key principles:\n\n///\n\n/// First, consider slices; the only way to make sense of a slice as a concept\n\n/// is to know the position where the slice starts, the size of its elements\n\n/// and the length of the slice. Rust gives us tools to construct these given\n\n/// a pointer with a sized element type, and the length of the slice. This is\n\n/// part of the solution, but it doesn't answer the question of how we deal with\n\n/// dynamically sized _structs_.\n\n///\n\n/// The second piece of the puzzle is structural equivalence. Consider our `Tuple`\n\n/// type, it is a struct consisting of a header containing the arity, and then some\n\n/// arbitrary number of elements. If we expressed it's type as `Tuple<[Term]>`, where\n\n/// the `elements` field is given the type `[Term]`; then a value of `Tuple<[Term]>`\n\n/// is structurally equivalent to a value of `[Tuple<Term>; 1]`. Put another way, since\n\n/// our variable-length field occurs at the end of the struct, we're really saying\n\n/// that the layout of `[Term; 1]` is the same as `Term`, which is intuitively obvious.\n\n///\n\n/// The final piece of the puzzle is given by another Rust feature: unsizing coercions.\n\n/// When Rust sees a cast from a sized type to an unsized type, it performs an unsizing\n\n/// coercion.\n\n///\n\n/// For our purposes, the coercion here is from `[T; N]` to `CustomType<T>`, which\n\n/// is allowed when `CustomType<T>` only has a single, non-PhantomData field involving `T`.\n\n///\n\n/// So given a pointer to a `Tuple`, if we construct a slice of `[Term; N]` and cast it to\n\n/// a pointer to `Tuple`, Rust performs the coercion by first filling in the fields of `Tuple`\n\n/// from the pointed-to memory, then coercing the `[Term; N]` to `[Term]` using the address of\n\n/// the unsized field plus the size `N` to construct the fat pointer required for the `[Term]`\n\n/// value.\n\n///\n\n/// To be clear: the pointer we use to construct the `[T; N]` slice that we coerce, is a\n\n/// pointer to memory that contains the sized fields of `CustomType<T>` _followed by_ memory that\n\n/// contains the actual `[T; N]` value. Rust is essentially casting the pointer given by\n\n/// adding the offset of the unsized field to the base pointer we provided, plus the size\n\n/// `N` to coerce the sized type to the type of the unsized field. The `N` provided is\n\n/// _not_ the total size of the struct in units of `T`, it is always the number of elements\n\n/// contained in the unsized field.\n\n///\n\n/// # Caveats\n\n///\n\n/// - This only works for types that follow Rusts' unsized coercion rules\n\n/// - It is necessary to know the size of the variable-length region, which is generally true\n\n/// for the types we are using this on, thanks to storing the arity in words of all non-immediate\n\n/// types; but it has to be in terms of the element size. For example, `HeapBin` has a slice\n\n/// of bytes, not `Term`, and the arity of the `HeapBin` is the size in words including extra\n\n/// fields, so if we used that arity value, we'd get completely incorrect results. In the case of\n\n/// `HeapBin`, we actually store the binary data size in the `flags` field, so we are able to use\n\n/// that to obtain the `N` for our `[u8; N]` slice. Just be aware that similar steps will be\n\n/// necessary for types that have non-word-sized elements.\n\n///\n\n/// - [DST Coercion RFC](https://github.com/rust-lang/rfcs/blob/master/text/0982-dst-coercion.md)\n\n/// - [Unsize Trait](http://doc.rust-lang.org/1.38.0/std/marker/trait.Unsize.html)\n\n/// - [Coercion - Nomicon](http://doc.rust-lang.org/1.38.0/nomicon/coercions.html)\n\npub trait UnsizedBoxable<T: Repr>: Boxable<T> + DynamicHeader {\n\n // The type of element contained in the dynamically sized\n\n // area of this type. By default this is specified as `()`,\n\n // with the assumption that elements are word-sized. For\n\n // non-word-sized elements, this is incorrect, e.g. for binary\n\n // data, as found in `HeapBin`\n\n // type Element: Sized;\n\n\n\n /// Given a pointer, this function dereferences the original term header,\n\n /// and uses its arity value to construct a fat pointer to the real term\n\n /// type.\n\n ///\n\n /// The implementation for this function is auto-implemented by default,\n\n /// but should be overridden if the number of elements in the dynamically\n\n /// sized portion of the type are not inferred by the arity produced from\n\n /// the header.\n\n unsafe fn from_raw_term(ptr: *mut T) -> Boxed<Self>;\n\n}\n\n\n\n/// Boxable terms require a header term to be built during\n", "file_path": "liblumen_alloc/src/erts/term/encoding.rs", "rank": 30, "score": 342171.1281778834 }, { "content": "pub fn pid_to_self_or_process(pid: Pid, process_arc: &Arc<Process>) -> Option<Arc<Process>> {\n\n if process_arc.pid() == pid {\n\n Some(process_arc.clone())\n\n } else {\n\n pid_to_process(&pid)\n\n }\n\n}\n\n\n", "file_path": "lumen_runtime/src/registry.rs", "rank": 31, "score": 339233.58176546055 }, { "content": "#[inline]\n\npub fn distance<T: Sized>(a: *const T, b: *const T) -> isize {\n\n let offset_bytes = unsafe { (a as *const u8).offset_from(b as *const u8) };\n\n offset_bytes / mem::size_of::<T>() as isize\n\n}\n\n\n\n/// Returns the absolute distance in units of size `T` between `a` and `b`\n\n///\n\n/// Regardless of the order of the arguments, the value will always be non-negative\n", "file_path": "liblumen_core/src/util/pointer.rs", "rank": 32, "score": 337186.00241675053 }, { "content": "#[inline]\n\npub fn default_heap() -> AllocResult<(*mut Term, usize)> {\n\n let size = default_heap_size();\n\n PROC_ALLOC.alloc(size).map(|ptr| (ptr, size))\n\n}\n\n\n", "file_path": "liblumen_alloc/src/erts/process/alloc.rs", "rank": 33, "score": 330749.8624352752 }, { "content": "pub fn send(\n\n destination: Term,\n\n message: Term,\n\n options: Options,\n\n process: &Process,\n\n) -> InternalResult<Sent> {\n\n match destination.decode()? {\n\n TypedTerm::Atom(destination_atom) => {\n\n send_to_name(destination_atom, message, options, process)\n\n }\n\n TypedTerm::Tuple(tuple_box) => {\n\n if tuple_box.len() == 2 {\n\n let name = tuple_box[0];\n\n let name_atom: Atom = name.try_into().with_context(|| format!(\"registered_name ({}) in {{registered_name, node}} ({}) destination is not an atom\", name, destination))?;\n\n\n\n let node = tuple_box[1];\n\n let node_atom: Atom = node.try_into().with_context(|| {\n\n format!(\n\n \"node ({}) in {{registered_name, node}} ({}) destination is not an atom\",\n\n node, destination\n", "file_path": "lumen_runtime/src/send.rs", "rank": 34, "score": 329958.27964691335 }, { "content": "/// A marker trait for index types\n\npub trait TupleIndex: Into<usize> {}\n", "file_path": "liblumen_alloc/src/erts/term/index.rs", "rank": 35, "score": 328077.3240747333 }, { "content": "pub fn atom() -> Atom {\n\n ARC_NODE.name()\n\n}\n\n\n", "file_path": "lumen_runtime/src/distribution/nodes/node.rs", "rank": 36, "score": 327710.0341471998 }, { "content": "pub fn export_closure(process: &Process, module: Atom, function: Atom, arity: u8) -> Term {\n\n let code = |arc_process: &Arc<Process>| {\n\n arc_process.wait();\n\n\n\n Ok(())\n\n };\n\n\n\n process\n\n .export_closure(module, function, arity, Some(code))\n\n .unwrap()\n\n}\n\n\n", "file_path": "lumen_runtime/src/test/strategy/term.rs", "rank": 37, "score": 326556.50158614997 }, { "content": "pub fn atom() -> BoxedStrategy<Atom> {\n\n any::<String>()\n\n .prop_filter(\"Reserved for existing/safe atom tests\", |s| {\n\n !s.starts_with(NON_EXISTENT_ATOM_PREFIX)\n\n })\n\n .prop_map(|s| Atom::try_from_str(&s).unwrap())\n\n .boxed()\n\n}\n\n\n", "file_path": "lumen_runtime/src/test/strategy.rs", "rank": 38, "score": 319454.0177043361 }, { "content": "/// This trait represents the bare minimum functionality needed\n\n/// to interact with an implementation by a mutator\n\npub trait Histogram: fmt::Display {\n\n /// Add a sample to the histogram.\n\n ///\n\n /// Fails if the sample is out of range of the histogram.\n\n fn add(&mut self, x: u64) -> Result<(), ()>;\n\n}\n\n\n\n/// This macro creates a `Histogram` type in the same module which\n\n/// will use `$LEN` buckets internally for sample data\n\n#[macro_export]\n\nmacro_rules! define_histogram {\n\n ($LEN:expr) => {\n\n /// A histogram with a number of bins known at compile time.\n\n #[derive(Clone)]\n\n pub struct Histogram {\n\n /// The ranges defining the bins of the histogram.\n\n range: [u64; $LEN + 1],\n\n /// The bins of the histogram.\n\n bin: [u64; $LEN],\n\n /// Online statistics like mean, variance, etc.\n", "file_path": "liblumen_alloc/src/stats/histogram.rs", "rank": 39, "score": 318900.74964995286 }, { "content": "pub fn dead_atom() -> Atom {\n\n Atom::try_from_str(DEAD_ATOM_NAME).unwrap()\n\n}\n\n\n", "file_path": "lumen_runtime/src/distribution/nodes/node.rs", "rank": 40, "score": 313219.124788246 }, { "content": "#[native_implemented_function(atom_to_binary/2)]\n\npub fn native(process: &Process, atom: Term, encoding: Term) -> exception::Result<Term> {\n\n let atom_atom = term_try_into_atom!(atom)?;\n\n let _: Encoding = encoding.try_into()?;\n\n let binary = process.binary_from_str(atom_atom.name())?;\n\n\n\n Ok(binary)\n\n}\n", "file_path": "lumen_runtime/src/otp/erlang/atom_to_binary_2.rs", "rank": 41, "score": 312342.42764109385 }, { "content": "#[test]\n\nfn without_atom_pid_or_tuple_destination_errors_badarg() {\n\n run!(\n\n |arc_process| {\n\n (\n\n Just(arc_process.clone()),\n\n strategy::term::is_not_destination(arc_process.clone()),\n\n strategy::term(arc_process),\n\n )\n\n },\n\n |(arc_process, destination, message)| {\n\n prop_assert_badarg!(\n\n native(&arc_process, destination, message),\n\n format!(\n\n \"destination ({}) is not registered_name (atom), {{registered_name, node}}, or pid\",\n\n destination\n\n )\n\n );\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n", "file_path": "lumen_runtime/src/otp/erlang/send_2/test.rs", "rank": 42, "score": 310093.22451856604 }, { "content": "pub fn with_binary_without_atom_encoding_errors_badarg(\n\n source_file: &'static str,\n\n native: fn(Term, Term) -> exception::Result<Term>,\n\n) {\n\n run(\n\n source_file,\n\n |arc_process| {\n\n (\n\n strategy::term::is_binary(arc_process.clone()),\n\n strategy::term::is_not_atom(arc_process),\n\n )\n\n },\n\n |(binary, encoding)| {\n\n prop_assert_badarg!(\n\n native(binary, encoding),\n\n format!(\"invalid encoding name value: `{}` is not an atom\", encoding)\n\n );\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/test/proptest.rs", "rank": 43, "score": 308848.8382695798 }, { "content": "#[inline]\n\npub fn estimate_cost(moved_live_words: usize, resize_moved_words: usize) -> usize {\n\n let reds = (moved_live_words / 10) + (resize_moved_words / 100);\n\n if reds < 1 {\n\n 1\n\n } else {\n\n reds\n\n }\n\n}\n", "file_path": "liblumen_alloc/src/erts/process/gc.rs", "rank": 44, "score": 308767.70234020345 }, { "content": "pub fn module() -> Atom {\n\n Atom::try_from_str(\"Elixir.Lumen.Web.Element\").unwrap()\n\n}\n\n\n\n// Private\n\n\n", "file_path": "lumen_web/src/element.rs", "rank": 45, "score": 307798.19406542944 }, { "content": "/// Returns the default heap size for a process heap\n\npub fn default_heap_size() -> usize {\n\n ProcessHeapAlloc::HEAP_SIZES[ProcessHeapAlloc::MIN_HEAP_SIZE_INDEX]\n\n}\n\n\n\n/// Allocate a new process heap of the given size\n", "file_path": "liblumen_alloc/src/erts/process/alloc.rs", "rank": 46, "score": 307671.6090696202 }, { "content": "pub fn decode_tagged<'a>(safe: bool, bytes: &'a [u8]) -> InternalResult<(Atom, &'a [u8])> {\n\n let (tag, after_tag_bytes) = Tag::decode(bytes)?;\n\n\n\n match tag {\n\n Tag::Atom => decode_atom(safe, after_tag_bytes),\n\n Tag::AtomCacheReference => unimplemented!(\"{:?}\", tag),\n\n Tag::AtomUTF8 => atom_utf8::decode_atom(safe, after_tag_bytes),\n\n Tag::SmallAtomUTF8 => small_atom_utf8::decode_atom(safe, after_tag_bytes),\n\n _ => Err(DecodeError::UnexpectedTag { tag, backtrace: Backtrace::capture() }).context(\"An atom tag (ATOM_EXT, ATOM_CACHE_REF, ATOM_UTF8_EXT, or SMALL_ATOM_UTF8_EXT) is expected\").map_err(|error| error.into()),\n\n }\n\n}\n\n\n", "file_path": "lumen_runtime/src/distribution/external_term_format/atom.rs", "rank": 47, "score": 306763.3312578895 }, { "content": "pub fn pid_to_process(pid: &Pid) -> Option<Arc<Process>> {\n\n RW_LOCK_WEAK_PROCESS_CONTROL_BLOCK_BY_PID\n\n .read()\n\n .get(pid)\n\n .and_then(|weak_process| weak_process.clone().upgrade())\n\n}\n\n\n", "file_path": "lumen_runtime/src/registry.rs", "rank": 48, "score": 306113.79337137507 }, { "content": "pub fn with_binary_with_atom_without_name_encoding_errors_badarg(\n\n source_file: &'static str,\n\n native: fn(Term, Term) -> exception::Result<Term>,\n\n) {\n\n run(\n\n source_file,\n\n |arc_process| {\n\n (\n\n strategy::term::is_binary(arc_process.clone()),\n\n strategy::term::atom::is_not_encoding(),\n\n )\n\n },\n\n |(binary, encoding)| {\n\n let encoding_atom: Atom = encoding.try_into().unwrap();\n\n\n\n prop_assert_badarg!(\n\n native(binary, encoding),\n\n format!(\"invalid atom encoding name: '{0}' is not one of the supported values (latin1, unicode, or utf8)\", encoding_atom.name())\n\n );\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/test/proptest.rs", "rank": 49, "score": 306016.1488955806 }, { "content": "pub fn module() -> Atom {\n\n Atom::try_from_str(\"Elixir.Lumen.Web.WebSocket\").unwrap()\n\n}\n", "file_path": "lumen_web/src/web_socket.rs", "rank": 50, "score": 305393.78900074784 }, { "content": "pub fn module() -> Atom {\n\n Atom::try_from_str(\"test\").unwrap()\n\n}\n", "file_path": "lumen_runtime/src/test/loop.rs", "rank": 51, "score": 305393.7890007478 }, { "content": "pub fn module() -> Atom {\n\n Atom::try_from_str(\"erlang\").unwrap()\n\n}\n\n\n\n// Private\n\n\n", "file_path": "lumen_runtime/src/otp/erlang.rs", "rank": 52, "score": 305393.7890007478 }, { "content": "pub fn function() -> Atom {\n\n Atom::try_from_str(\"loop\").unwrap()\n\n}\n\n\n", "file_path": "lumen_runtime/src/test/loop.rs", "rank": 53, "score": 305393.7890007478 }, { "content": "#[inline(always)]\n\npub fn good_alloc_size<T>() -> usize {\n\n // TODO: Need to factor in allocator min alignment\n\n self::round_up_to_multiple_of(mem::size_of::<T>(), mem::align_of::<T>())\n\n}\n\n\n\n/// Like regular division, but rounds up\n", "file_path": "liblumen_core/src/alloc/utils.rs", "rank": 54, "score": 303547.26218301116 }, { "content": "pub fn module() -> Atom {\n\n super::module()\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/apply_2.rs", "rank": 55, "score": 303053.03285380366 }, { "content": "pub fn module() -> Atom {\n\n Atom::try_from_str(\"Elixir.Chain\").unwrap()\n\n}\n", "file_path": "examples/spawn-chain/src/elixir/chain.rs", "rank": 56, "score": 303053.03285380366 }, { "content": "pub fn function() -> Atom {\n\n Atom::try_from_str(\"apply\").unwrap()\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/apply_2.rs", "rank": 57, "score": 303053.03285380366 }, { "content": "pub fn external() -> BoxedStrategy<Atom> {\n\n any::<String>()\n\n .prop_map(|s| Atom::try_from_str(&format!(\"{}@external\", s)).unwrap())\n\n .boxed()\n\n}\n", "file_path": "lumen_runtime/src/test/strategy/node/atom.rs", "rank": 58, "score": 302506.3852810614 }, { "content": "pub fn atom_to_arc_node(atom: &Atom) -> Option<Arc<Node>> {\n\n RW_LOCK_ARC_NODE_BY_NAME\n\n .read()\n\n .get(atom)\n\n .map(|ref_arc_node| ref_arc_node.clone())\n\n}\n\n\n", "file_path": "lumen_runtime/src/distribution/nodes.rs", "rank": 59, "score": 301952.49986181187 }, { "content": "fn push_tag(byte_vec: &mut Vec<u8>, tag: Tag) {\n\n byte_vec.push(tag.into());\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/term_to_binary.rs", "rank": 60, "score": 301231.90283705026 }, { "content": "pub fn strategy() -> BoxedStrategy<usize> {\n\n RANGE_INCLUSIVE.boxed()\n\n}\n", "file_path": "lumen_runtime/src/test/strategy/size_range.rs", "rank": 61, "score": 300635.03418423736 }, { "content": "pub fn module_atom() -> BoxedStrategy<Atom> {\n\n strategy::atom()\n\n}\n\n\n", "file_path": "lumen_runtime/src/test/strategy/term/function.rs", "rank": 62, "score": 300572.18700516777 }, { "content": "pub fn number() -> BoxedStrategy<usize> {\n\n (0..=Pid::NUMBER_MAX).boxed()\n\n}\n\n\n", "file_path": "lumen_runtime/src/test/strategy/term/pid.rs", "rank": 63, "score": 300438.67964813067 }, { "content": "pub fn serial() -> BoxedStrategy<usize> {\n\n (0..=Pid::SERIAL_MAX).boxed()\n\n}\n", "file_path": "lumen_runtime/src/test/strategy/term/pid.rs", "rank": 64, "score": 300438.67964813067 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_or_pid_returns_second() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::tuple(arc_process.clone()),\n\n strategy::term::number_atom_reference_function_port_or_pid(arc_process.clone()),\n\n )\n\n },\n\n |(first, second)| {\n\n prop_assert_eq!(native(first, second), second);\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/min_2/test/with_tuple_first.rs", "rank": 65, "score": 300387.9645277567 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_or_pid_returns_false() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::tuple(arc_process.clone()),\n\n strategy::term::number_atom_reference_function_port_or_pid(arc_process.clone()),\n\n )\n\n },\n\n |(left, right)| {\n\n prop_assert_eq!(native(left, right), false.into());\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/is_less_than_2/test/with_tuple_left.rs", "rank": 66, "score": 300387.9645277566 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_or_pid_returns_first() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::tuple(arc_process.clone()),\n\n strategy::term::number_atom_reference_function_port_or_pid(arc_process.clone()),\n\n )\n\n },\n\n |(first, second)| {\n\n prop_assert_eq!(native(first, second), first);\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/max_2/test/with_tuple_first.rs", "rank": 67, "score": 300387.9645277566 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_or_pid_returns_true() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::tuple(arc_process.clone()),\n\n strategy::term::number_atom_reference_function_port_or_pid(arc_process.clone()),\n\n )\n\n },\n\n |(left, right)| {\n\n prop_assert_eq!(native(left, right), true.into());\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/is_greater_than_2/test/with_tuple_left.rs", "rank": 68, "score": 300387.9645277566 }, { "content": "#[test]\n\nfn without_atom_pid_or_tuple_destination_errors_badarg() {\n\n run!(\n\n |arc_process| {\n\n (\n\n Just(arc_process.clone()),\n\n strategy::term::is_not_destination(arc_process.clone()),\n\n strategy::term(arc_process.clone()),\n\n valid_options(arc_process),\n\n )\n\n },\n\n |(arc_process, destination, message, options)| {\n\n prop_assert_badarg!(\n\n native(&arc_process, destination, message, options),\n\n format!(\"destination ({}) is not registered_name (atom), {{registered_name, node}}, or pid\", destination)\n\n );\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/send_3/test/with_proper_list_options.rs", "rank": 69, "score": 300193.33235656057 }, { "content": "pub fn is_not_encoding() -> BoxedStrategy<Term> {\n\n super::atom()\n\n .prop_filter(\"Encoding must not be latin1, unicode, utf8\", |term| {\n\n let atom: Atom = (*term).try_into().unwrap();\n\n\n\n match atom.name() {\n\n \"latin1\" | \"unicode\" | \"utf8\" => false,\n\n _ => true,\n\n }\n\n })\n\n .boxed()\n\n}\n", "file_path": "lumen_runtime/src/test/strategy/term/atom.rs", "rank": 70, "score": 300132.02463564917 }, { "content": "fn verify_tuple_root(tuple_root: Term, tuple_ptr: *mut Term) {\n\n assert!(tuple_root.is_boxed());\n\n let new_tuple_ptr: *mut Term = tuple_root.dyn_cast();\n\n assert_ne!(new_tuple_ptr, tuple_ptr as *mut Term);\n\n let new_tuple_term = unsafe { *new_tuple_ptr };\n\n assert!(!new_tuple_term.is_boxed());\n\n // Assert that we can still access data that should be live\n\n let new_tuple = unsafe { Tuple::from_raw_term(new_tuple_ptr) };\n\n assert_eq!(new_tuple.len(), 2);\n\n // First, the atom\n\n let ok = atom!(\"ok\");\n\n assert_eq!(Ok(ok), new_tuple.get_element(0));\n\n // Then to validate the greeting, we need to follow the boxed term, unwrap it, and validate it\n\n let greeting_element = new_tuple.get_element(1);\n\n assert!(greeting_element.is_ok());\n\n let greeting_box = greeting_element.unwrap();\n\n assert!(greeting_box.is_boxed());\n\n let greeting_ptr: *mut Term = greeting_box.dyn_cast();\n\n let greeting_term = unsafe { *greeting_ptr };\n\n assert!(greeting_term.is_heapbin());\n\n let greeting_str = unsafe { HeapBin::from_raw_term(greeting_ptr) };\n\n assert_eq!(\"goodbye!\", greeting_str.as_str());\n\n}\n", "file_path": "liblumen_alloc/src/erts/process/gc/tests/collector.rs", "rank": 71, "score": 299442.6735745693 }, { "content": "#[native_implemented_function(is_record/3)]\n\npub fn native(term: Term, record_tag: Term, size: Term) -> exception::Result<Term> {\n\n is_record(term, record_tag, Some(size))\n\n}\n", "file_path": "lumen_runtime/src/otp/erlang/is_record_3.rs", "rank": 72, "score": 299196.5326330192 }, { "content": "/// A trait to represent a handle to an allocator which can provide\n\n/// immutable and mutable references to the underlying allocator.\n\n///\n\n/// See `Global` and `Handle` for the two types of allocator references\n\n/// provided by this crate\n\npub trait AllocRef<'a>: Clone + Alloc + Sync {\n\n type Alloc: ?Sized + Alloc + Sync;\n\n\n\n fn alloc_ref(&self) -> &Self::Alloc;\n\n fn alloc_mut(&mut self) -> &mut Self::Alloc;\n\n}\n\n\n\n/// A zero-sized type for global allocators which only have a single instance\n\n#[derive(PartialEq, Eq)]\n\npub struct Global<A: 'static>(PhantomData<&'static A>);\n\nimpl<A: StaticAlloc> Global<A> {\n\n #[inline]\n\n pub fn new() -> Self {\n\n Self(PhantomData)\n\n }\n\n}\n\nimpl<A: StaticAlloc> Clone for Global<A> {\n\n #[inline]\n\n fn clone(&self) -> Self {\n\n Self(PhantomData)\n", "file_path": "liblumen_core/src/alloc/alloc_ref.rs", "rank": 73, "score": 298830.08242083713 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_pid_or_tuple_returns_true() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::map(arc_process.clone()),\n\n strategy::term::number_atom_reference_function_port_pid_or_tuple(arc_process),\n\n )\n\n },\n\n |(left, right)| {\n\n prop_assert_eq!(native(left, right), true.into());\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/is_greater_than_2/test/with_map_left.rs", "rank": 74, "score": 297241.8135934399 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_or_pid_returns_false() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::tuple(arc_process.clone()),\n\n strategy::term::number_atom_reference_function_port_or_pid(arc_process.clone()),\n\n )\n\n },\n\n |(left, right)| {\n\n prop_assert_eq!(native(left, right), false.into());\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/is_equal_or_less_than_2/test/with_tuple_left.rs", "rank": 75, "score": 297241.8135934399 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_pid_or_tuple_returns_false() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::map(arc_process.clone()),\n\n strategy::term::number_atom_reference_function_port_pid_or_tuple(arc_process),\n\n )\n\n },\n\n |(left, right)| {\n\n prop_assert_eq!(native(left, right), false.into());\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/is_less_than_2/test/with_map_left.rs", "rank": 76, "score": 297241.8135934399 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_or_pid_returns_true() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::tuple(arc_process.clone()),\n\n strategy::term::number_atom_reference_function_port_or_pid(arc_process.clone()),\n\n )\n\n },\n\n |(left, right)| {\n\n prop_assert_eq!(native(left, right), true.into());\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/is_greater_than_or_equal_2/test/with_tuple_left.rs", "rank": 77, "score": 297241.8135934399 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_pid_or_tuple_second_returns_first() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::map(arc_process.clone()),\n\n strategy::term::number_atom_reference_function_port_pid_or_tuple(arc_process),\n\n )\n\n },\n\n |(first, second)| {\n\n prop_assert_eq!(native(first, second), first);\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/max_2/test/with_map_first.rs", "rank": 78, "score": 294169.21200534905 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_pid_or_tuple_returns_false() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::map(arc_process.clone()),\n\n strategy::term::number_atom_reference_function_port_pid_or_tuple(arc_process),\n\n )\n\n },\n\n |(left, right)| {\n\n prop_assert_eq!(native(left, right), false.into());\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/is_equal_or_less_than_2/test/with_map_left.rs", "rank": 79, "score": 294169.21200534905 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_pid_or_tuple_second_returns_second() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::map(arc_process.clone()),\n\n strategy::term::number_atom_reference_function_port_pid_or_tuple(arc_process),\n\n )\n\n },\n\n |(first, second)| {\n\n prop_assert_eq!(native(first, second), second);\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/min_2/test/with_map_first.rs", "rank": 80, "score": 294169.21200534905 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_pid_or_tuple_returns_true() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::map(arc_process.clone()),\n\n strategy::term::number_atom_reference_function_port_pid_or_tuple(\n\n arc_process.clone(),\n\n ),\n\n )\n\n },\n\n |(left, right)| {\n\n prop_assert_eq!(native(left, right), true.into());\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/is_greater_than_or_equal_2/test/with_map_left.rs", "rank": 81, "score": 294169.21200534905 }, { "content": "pub fn atom_bytes_to_term_bytes((atom, bytes): (Atom, &[u8])) -> (Term, &[u8]) {\n\n let term: Term = atom.encode().unwrap();\n\n\n\n (term, bytes)\n\n}\n\n\n", "file_path": "lumen_runtime/src/distribution/external_term_format/atom.rs", "rank": 82, "score": 292920.98422549467 }, { "content": "/// Represents a type which can map allocation sizes to size class sizes\n\npub trait SizeClassIndex {\n\n /// Given a SizeClass returned by `size_class_for`, this returns the\n\n /// position of the size class in the index\n\n fn index_for(&self, size_class: SizeClass) -> usize;\n\n\n\n /// Maps a requested allocation size to the nearest size class size,\n\n /// if a size class is available to fill the request, otherwise returns None\n\n fn size_class_for(&self, request_size: usize) -> Option<SizeClass>;\n\n\n\n /// Same as size_class for, but optimized when the request size is known to be valid\n\n unsafe fn size_class_for_unchecked(&self, request_size: usize) -> SizeClass;\n\n}\n\n\n\n/// Calculates the next nearest factor of the target word size fits `n`\n", "file_path": "liblumen_core/src/alloc/size_classes.rs", "rank": 83, "score": 292651.9451537826 }, { "content": "pub fn try_atom_to_arc_node(atom: &Atom) -> Result<Arc<Node>, NodeNotFound> {\n\n match atom_to_arc_node(atom) {\n\n Some(arc_node) => Ok(arc_node),\n\n None => Err(NodeNotFound::Name {\n\n name: atom.clone(),\n\n backtrace: Backtrace::capture(),\n\n }),\n\n }\n\n}\n\n\n", "file_path": "lumen_runtime/src/distribution/nodes.rs", "rank": 84, "score": 292450.9776425792 }, { "content": "pub fn atom() -> BoxedStrategy<Term> {\n\n super::atom()\n\n .prop_map(|atom| atom.encode().unwrap())\n\n .boxed()\n\n}\n\n\n", "file_path": "lumen_runtime/src/test/strategy/term.rs", "rank": 85, "score": 291843.2298383289 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_pid_tuple_map_or_list_returns_true() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::binary::heap(arc_process.clone()),\n\n strategy::term(arc_process.clone()).prop_filter(\n\n \"Right must be number, atom, reference, function, port, pid, tuple, map, or list\",\n\n |right| {\n\n right.is_number()\n\n || right.is_atom()\n\n || right.is_reference()\n\n || right.is_boxed_function()\n\n || right.is_port()\n\n || right.is_pid()\n\n || right.is_boxed_tuple()\n\n || right.is_list()\n\n }),\n\n )\n\n },\n\n |(left, right)| {\n\n prop_assert_eq!(native(left, right), true.into());\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/is_greater_than_2/test/with_subbinary_left.rs", "rank": 86, "score": 291167.6104549249 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_pid_tuple_map_or_list_returns_false() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::binary::heap(arc_process.clone()),\n\n strategy::term(arc_process.clone()).prop_filter(\n\n \"Right must be number, atom, reference, function, port, pid, tuple, map, or list\",\n\n |right| {\n\n right.is_number()\n\n || right.is_atom()\n\n || right.is_reference()\n\n || right.is_boxed_function()\n\n || right.is_port()\n\n || right.is_pid()\n\n || right.is_boxed_tuple()\n\n || right.is_list()\n\n }),\n\n )\n\n },\n\n |(left, right)| {\n\n prop_assert_eq!(native(left, right), false.into());\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/is_less_than_2/test/with_subbinary_left.rs", "rank": 87, "score": 291167.6104549249 }, { "content": "pub fn init(minimum_heap_size: usize) -> AllocResult<Process> {\n\n let init = Atom::try_from_str(\"init\").unwrap();\n\n let module_function_arity = Arc::new(ModuleFunctionArity {\n\n module: init,\n\n function: init,\n\n arity: 0,\n\n });\n\n\n\n let heap_size = process::alloc::next_heap_size(minimum_heap_size);\n\n let heap = process::alloc::heap(heap_size)?;\n\n\n\n let process = Process::new(\n\n Default::default(),\n\n None,\n\n Arc::clone(&module_function_arity),\n\n heap,\n\n heap_size,\n\n );\n\n\n\n let frame = Frame::new(module_function_arity, code::init);\n\n process.push_frame(frame);\n\n\n\n Ok(process)\n\n}\n\n\n", "file_path": "lumen_runtime/src/process.rs", "rank": 88, "score": 290727.08223058295 }, { "content": "pub fn unregister(name: &Atom) -> bool {\n\n match RW_LOCK_REGISTERED_BY_NAME.write().remove(name) {\n\n Some(Registered::Process(weak_process)) => match weak_process.upgrade() {\n\n Some(arc_process) => {\n\n let mut writable_registerd_name = arc_process.registered_name.write();\n\n *writable_registerd_name = None;\n\n\n\n true\n\n }\n\n None => false,\n\n },\n\n None => false,\n\n }\n\n}\n\n\n\n#[cfg_attr(test, derive(Debug))]\n\npub enum Registered {\n\n Process(Weak<Process>),\n\n}\n\n\n", "file_path": "lumen_runtime/src/registry.rs", "rank": 89, "score": 290299.90454079164 }, { "content": "#[test]\n\nfn without_atom_pid_or_tuple_destination_errors_badarg() {\n\n run!(\n\n |arc_process| {\n\n (\n\n Just(arc_process.clone()),\n\n milliseconds(),\n\n strategy::term::is_not_send_after_destination(arc_process.clone()),\n\n strategy::term(arc_process.clone()),\n\n abs_value(arc_process.clone()),\n\n )\n\n },\n\n |(arc_process, milliseconds, destination, message, abs_value)| {\n\n let time = arc_process.integer(milliseconds).unwrap();\n\n let options = options(abs_value, &arc_process);\n\n\n\n prop_assert_is_not_boolean!(\n\n native(arc_process.clone(), time, destination, message, options),\n\n \"abs value\",\n\n abs_value\n\n );\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/send_after_4/test/with_proper_list_options/with_list_options/with_abs/with_atom/without_boolean/with_small_integer_time.rs", "rank": 90, "score": 288915.0323039761 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_pid_tuple_map_or_list_second_returns_second() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::binary::heap(arc_process.clone()),\n\n strategy::term(arc_process.clone()).prop_filter(\n\n \"Second must be number, atom, reference, function, port, pid, tuple, map, or list\",\n\n |second| {\n\n second.is_number()\n\n || second.is_atom()\n\n || second.is_reference()\n\n || second.is_boxed_function()\n\n || second.is_port()\n\n || second.is_pid()\n\n || second.is_boxed_tuple()\n\n || second.is_list()\n\n }),\n\n )\n\n },\n\n |(first, second)| {\n\n prop_assert_eq!(native(first, second), second);\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/min_2/test/with_subbinary_first.rs", "rank": 91, "score": 288234.57610406267 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_pid_tuple_map_or_list_returns_first() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::binary::heap(arc_process.clone()),\n\n strategy::term(arc_process.clone()).prop_filter(\n\n \"second must be number, atom, reference, function, port, pid, tuple, map, or list\",\n\n |second| {\n\n second.is_number()\n\n || second.is_atom()\n\n || second.is_reference()\n\n || second.is_boxed_function()\n\n || second.is_port()\n\n || second.is_pid()\n\n || second.is_boxed_tuple()\n\n || second.is_list()\n\n }),\n\n )\n\n },\n\n |(first, second)| {\n\n prop_assert_eq!(native(first, second), first.into());\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/max_2/test/with_heap_binary_first.rs", "rank": 92, "score": 288234.57610406267 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_pid_tuple_map_or_list_returns_true() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::binary::heap(arc_process.clone()),\n\n strategy::term(arc_process.clone()).prop_filter(\n\n \"Right must be number, atom, reference, function, port, pid, tuple, map, or list\",\n\n |right| {\n\n right.is_number()\n\n || right.is_atom()\n\n || right.is_reference()\n\n || right.is_boxed_function()\n\n || right.is_port()\n\n || right.is_pid()\n\n || right.is_boxed_tuple()\n\n || right.is_list()\n\n }),\n\n )\n\n },\n\n |(left, right)| {\n\n prop_assert_eq!(native(left, right), true.into());\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/is_greater_than_2/test/with_heap_binary_left.rs", "rank": 93, "score": 288234.57610406267 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_pid_tuple_map_or_list_returns_false() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::binary::heap(arc_process.clone()),\n\n strategy::term(arc_process.clone())\n\n .prop_filter(\n\n \"Right must be number, atom, reference, function, port, pid, tuple, map, or list\",\n\n |right| {\n\n right.is_number() ||\n\n right.is_atom() ||\n\n right.is_reference() ||\n\n right.is_boxed_function() ||\n\n right.is_port() ||\n\n right.is_pid() ||\n\n right.is_boxed_tuple() ||\n\n right.is_list()\n\n }),\n\n )\n\n },\n\n |(left, right)| {\n\n prop_assert_eq!(native(left, right), false.into());\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/is_equal_or_less_than_2/test/with_subbinary_left.rs", "rank": 94, "score": 288234.57610406267 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_pid_tuple_map_or_list_second_returns_first() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::binary::heap(arc_process.clone()),\n\n strategy::term(arc_process.clone()).prop_filter(\n\n \"Second must be number, atom, reference, function, port, pid, tuple, map, or list\",\n\n |second| {\n\n second.is_number()\n\n || second.is_atom()\n\n || second.is_reference()\n\n || second.is_boxed_function()\n\n || second.is_port()\n\n || second.is_pid()\n\n || second.is_boxed_tuple()\n\n || second.is_list()\n\n }),\n\n )\n\n },\n\n |(first, second)| {\n\n prop_assert_eq!(native(first, second), first);\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/max_2/test/with_subbinary_first.rs", "rank": 95, "score": 288234.5761040626 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_pid_tuple_map_or_list_returns_second() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::binary::heap(arc_process.clone()),\n\n strategy::term(arc_process.clone()).prop_filter(\n\n \"second must be number, atom, reference, function, port, pid, tuple, map, or list\",\n\n |second| {\n\n second.is_number()\n\n || second.is_atom()\n\n || second.is_reference()\n\n || second.is_boxed_function()\n\n || second.is_port()\n\n || second.is_pid()\n\n || second.is_boxed_tuple()\n\n || second.is_list()\n\n }),\n\n )\n\n },\n\n |(first, second)| {\n\n prop_assert_eq!(native(first, second), second);\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/min_2/test/with_heap_binary_first.rs", "rank": 96, "score": 288234.57610406267 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_pid_tuple_map_or_list_returns_true() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::binary::heap(arc_process.clone()),\n\n strategy::term(arc_process.clone()).prop_filter(\n\n \"Right must be number, atom, reference, function, port, pid, tuple, map, or list\",\n\n |right| {\n\n right.is_number()\n\n || right.is_atom()\n\n || right.is_reference()\n\n || right.is_boxed_function()\n\n || right.is_port()\n\n || right.is_pid()\n\n || right.is_boxed_tuple()\n\n || right.is_list()\n\n }),\n\n )\n\n },\n\n |(left, right)| {\n\n prop_assert_eq!(native(left, right), true.into());\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/is_greater_than_or_equal_2/test/with_subbinary_left.rs", "rank": 97, "score": 288234.57610406267 }, { "content": "#[test]\n\nfn with_number_atom_reference_function_port_pid_tuple_map_or_list_returns_false() {\n\n run!(\n\n |arc_process| {\n\n (\n\n strategy::term::binary::heap(arc_process.clone()),\n\n strategy::term(arc_process.clone()).prop_filter(\n\n \"Right must be number, atom, reference, function, port, pid, tuple, map, or list\",\n\n |right| {\n\n right.is_number()\n\n || right.is_atom()\n\n || right.is_reference()\n\n || right.is_boxed_function()\n\n || right.is_port()\n\n || right.is_pid()\n\n || right.is_boxed_tuple()\n\n || right.is_list()\n\n }),\n\n )\n\n },\n\n |(left, right)| {\n\n prop_assert_eq!(native(left, right), false.into());\n\n\n\n Ok(())\n\n },\n\n );\n\n}\n\n\n", "file_path": "lumen_runtime/src/otp/erlang/is_less_than_2/test/with_heap_binary_left.rs", "rank": 98, "score": 288234.57610406267 }, { "content": "#[native_implemented_function(tuple_size/1)]\n\npub fn native(process: &Process, tuple: Term) -> exception::Result<Term> {\n\n let tuple = term_try_into_tuple!(tuple)?;\n\n let size = process.integer(tuple.len())?;\n\n\n\n Ok(size)\n\n}\n", "file_path": "lumen_runtime/src/otp/erlang/tuple_size_1.rs", "rank": 99, "score": 287576.316382623 } ]
Rust
wayland-commons/src/map.rs
atouchet/wayland-rs
de9eac07cb9d295333a33ee45dae4342341bc26e
use crate::{Interface, MessageGroup, NoMessage}; use std::cmp::Ordering; pub const SERVER_ID_LIMIT: u32 = 0xFF00_0000; pub trait ObjectMetadata: Clone { fn child(&self) -> Self; } impl ObjectMetadata for () { fn child(&self) {} } #[derive(Clone)] pub struct Object<Meta: ObjectMetadata> { pub interface: &'static str, pub version: u32, pub requests: &'static [crate::wire::MessageDesc], pub events: &'static [crate::wire::MessageDesc], pub meta: Meta, pub childs_from_events: fn(u16, u32, &Meta) -> Option<Object<Meta>>, pub childs_from_requests: fn(u16, u32, &Meta) -> Option<Object<Meta>>, } impl<Meta: ObjectMetadata + std::fmt::Debug> std::fmt::Debug for Object<Meta> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Object") .field("interface", &self.interface) .field("version", &self.version) .field("requests", &self.requests) .field("events", &self.events) .field("meta", &self.meta) .finish() } } impl<Meta: ObjectMetadata> Object<Meta> { pub fn from_interface<I: Interface>(version: u32, meta: Meta) -> Object<Meta> { Object { interface: I::NAME, version, requests: I::Request::MESSAGES, events: I::Event::MESSAGES, meta, childs_from_events: childs_from::<I::Event, Meta>, childs_from_requests: childs_from::<I::Request, Meta>, } } pub fn event_child(&self, opcode: u16) -> Option<Object<Meta>> { (self.childs_from_events)(opcode, self.version, &self.meta) } pub fn request_child(&self, opcode: u16) -> Option<Object<Meta>> { (self.childs_from_requests)(opcode, self.version, &self.meta) } pub fn is_interface<I: Interface>(&self) -> bool { self.interface == I::NAME } pub fn placeholder(meta: Meta) -> Object<Meta> { Object { interface: "", version: 0, requests: &[], events: &[], meta, childs_from_events: childs_from::<NoMessage, Meta>, childs_from_requests: childs_from::<NoMessage, Meta>, } } } fn childs_from<M: MessageGroup, Meta: ObjectMetadata>( opcode: u16, version: u32, meta: &Meta, ) -> Option<Object<Meta>> { M::child(opcode, version, meta) } #[derive(Default, Debug)] pub struct ObjectMap<Meta: ObjectMetadata> { client_objects: Vec<Option<Object<Meta>>>, server_objects: Vec<Option<Object<Meta>>>, } impl<Meta: ObjectMetadata> ObjectMap<Meta> { pub fn new() -> ObjectMap<Meta> { ObjectMap { client_objects: Vec::new(), server_objects: Vec::new() } } pub fn find(&self, id: u32) -> Option<Object<Meta>> { if id == 0 { None } else if id >= SERVER_ID_LIMIT { self.server_objects.get((id - SERVER_ID_LIMIT) as usize).and_then(Clone::clone) } else { self.client_objects.get((id - 1) as usize).and_then(Clone::clone) } } pub fn remove(&mut self, id: u32) { if id == 0 { } else if id >= SERVER_ID_LIMIT { if let Some(place) = self.server_objects.get_mut((id - SERVER_ID_LIMIT) as usize) { *place = None; } } else if let Some(place) = self.client_objects.get_mut((id - 1) as usize) { *place = None; } } #[allow(clippy::result_unit_err)] pub fn insert_at(&mut self, id: u32, object: Object<Meta>) -> Result<(), ()> { if id == 0 { Err(()) } else if id >= SERVER_ID_LIMIT { insert_in_at(&mut self.server_objects, (id - SERVER_ID_LIMIT) as usize, object) } else { insert_in_at(&mut self.client_objects, (id - 1) as usize, object) } } pub fn client_insert_new(&mut self, object: Object<Meta>) -> u32 { insert_in(&mut self.client_objects, object) + 1 } pub fn server_insert_new(&mut self, object: Object<Meta>) -> u32 { insert_in(&mut self.server_objects, object) + SERVER_ID_LIMIT } #[allow(clippy::result_unit_err)] pub fn with<T, F: FnOnce(&mut Object<Meta>) -> T>(&mut self, id: u32, f: F) -> Result<T, ()> { if id == 0 { Err(()) } else if id >= SERVER_ID_LIMIT { if let Some(&mut Some(ref mut obj)) = self.server_objects.get_mut((id - SERVER_ID_LIMIT) as usize) { Ok(f(obj)) } else { Err(()) } } else if let Some(&mut Some(ref mut obj)) = self.client_objects.get_mut((id - 1) as usize) { Ok(f(obj)) } else { Err(()) } } pub fn with_all<F: FnMut(u32, &mut Object<Meta>)>(&mut self, mut f: F) { for (id, place) in self.client_objects.iter_mut().enumerate() { if let Some(ref mut obj) = *place { f(id as u32 + 1, obj); } } for (id, place) in self.server_objects.iter_mut().enumerate() { if let Some(ref mut obj) = *place { f(id as u32 + SERVER_ID_LIMIT, obj); } } } } fn insert_in<Meta: ObjectMetadata>( store: &mut Vec<Option<Object<Meta>>>, object: Object<Meta>, ) -> u32 { match store.iter().position(Option::is_none) { Some(id) => { store[id] = Some(object); id as u32 } None => { store.push(Some(object)); (store.len() - 1) as u32 } } } fn insert_in_at<Meta: ObjectMetadata>( store: &mut Vec<Option<Object<Meta>>>, id: usize, object: Object<Meta>, ) -> Result<(), ()> { match id.cmp(&store.len()) { Ordering::Greater => Err(()), Ordering::Equal => { store.push(Some(object)); Ok(()) } Ordering::Less => { let previous = &mut store[id]; if !previous.is_none() { return Err(()); } *previous = Some(object); Ok(()) } } }
use crate::{Interface, MessageGroup, NoMessage}; use std::cmp::Ordering; pub const SERVER_ID_LIMIT: u32 = 0xFF00_0000; pub trait ObjectMetadata: Clone { fn child(&self) -> Self; } impl ObjectMetadata for () { fn child(&self) {} } #[derive(Clone)] pub struct Object<Meta: ObjectMetadata> { pub interface: &'static str, pub version: u32, pub requests: &'static [crate::wire::MessageDesc], pub events: &'static [crate::wire::MessageDesc], pub meta: Meta, pub childs_from_events: fn(u16, u32, &Meta) -> Option<Object<Meta>>, pub childs_from_requests: fn(u16, u32, &Meta) -> Option<Object<Meta>>, } impl<Meta: ObjectMetadata + std::fmt::Debug> std::fmt::Debug for Object<Meta> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Object") .field("interface", &self.interface) .field("version", &self.version) .field("requests", &self.requests) .field("events", &self.events) .field("meta", &self.meta) .finish() } } impl<Meta: ObjectMetadata> Object<Meta> { pub fn from_interface<I: Interface>(version: u32, meta: Meta) -> Object<Meta> { Object { interface: I::NAME, version, requests: I::Request::MESSAGES, events: I::Event::MESSAGES, meta, childs_from_events: childs_from::<I::Event, Meta>, childs_from_requests: childs_from::<I::Request, Meta>, } } pub fn event_child(&self, opcode: u16) -> Option<Object<Meta>> { (self.childs_from_events)(opcode, self.version, &self.meta) } pub fn request_child(&self, opcode: u16) -> Option<Object<Meta>> { (self.childs_from_requests)(opcode, self.version, &self.meta) } pub fn is_interface<I: Interface>(&self) -> bool { self.interface == I::NAME } pub fn placeholder(meta: Meta) -> Object<Meta> { Object { interface: "", version: 0, requests: &[], events: &[], meta, childs_from_events: childs_from::<NoMessage, Meta>, childs_from_requests: childs_from::<NoMessage, Meta>, } } } fn childs_from<M: MessageGroup, Meta: ObjectMetadata>( opcode: u16, version: u32, meta: &Meta, ) -> Option<Object<Meta>> { M::child(opcode, version, meta) } #[derive(Default, Debug)] pub struct ObjectMap<Meta: ObjectMetadata> { client_objects: Vec<Option<Object<Meta>>>, server_objects: Vec<Option<Object<Meta>>>, } impl<Meta: ObjectMetadata> ObjectMap<Meta> { pub fn new() -> ObjectMap<Meta> { ObjectMap { client_objects: Vec::new(), server_objects: Vec::new() } } pub fn find(&self, id: u32) -> Option<Object<Meta>> { if id == 0 { None } else if id >= SERVER_ID_LIMIT { self.server_objects.get((id - SERVER_ID_LIMIT) as usize).and_then(Clone::clone) } else { self.client_objects.get((id - 1) as usize).and_then(Clone::clone) } } pub fn remove(&mut self, id: u32) { if id == 0 { } else if id >= SERVER_ID_LIMIT { if let Some(place) = self.server_objects.get_mut((id - SERVER_ID_LIMIT) as usize) { *
#[allow(clippy::result_unit_err)] pub fn insert_at(&mut self, id: u32, object: Object<Meta>) -> Result<(), ()> { if id == 0 { Err(()) } else if id >= SERVER_ID_LIMIT { insert_in_at(&mut self.server_objects, (id - SERVER_ID_LIMIT) as usize, object) } else { insert_in_at(&mut self.client_objects, (id - 1) as usize, object) } } pub fn client_insert_new(&mut self, object: Object<Meta>) -> u32 { insert_in(&mut self.client_objects, object) + 1 } pub fn server_insert_new(&mut self, object: Object<Meta>) -> u32 { insert_in(&mut self.server_objects, object) + SERVER_ID_LIMIT } #[allow(clippy::result_unit_err)] pub fn with<T, F: FnOnce(&mut Object<Meta>) -> T>(&mut self, id: u32, f: F) -> Result<T, ()> { if id == 0 { Err(()) } else if id >= SERVER_ID_LIMIT { if let Some(&mut Some(ref mut obj)) = self.server_objects.get_mut((id - SERVER_ID_LIMIT) as usize) { Ok(f(obj)) } else { Err(()) } } else if let Some(&mut Some(ref mut obj)) = self.client_objects.get_mut((id - 1) as usize) { Ok(f(obj)) } else { Err(()) } } pub fn with_all<F: FnMut(u32, &mut Object<Meta>)>(&mut self, mut f: F) { for (id, place) in self.client_objects.iter_mut().enumerate() { if let Some(ref mut obj) = *place { f(id as u32 + 1, obj); } } for (id, place) in self.server_objects.iter_mut().enumerate() { if let Some(ref mut obj) = *place { f(id as u32 + SERVER_ID_LIMIT, obj); } } } } fn insert_in<Meta: ObjectMetadata>( store: &mut Vec<Option<Object<Meta>>>, object: Object<Meta>, ) -> u32 { match store.iter().position(Option::is_none) { Some(id) => { store[id] = Some(object); id as u32 } None => { store.push(Some(object)); (store.len() - 1) as u32 } } } fn insert_in_at<Meta: ObjectMetadata>( store: &mut Vec<Option<Object<Meta>>>, id: usize, object: Object<Meta>, ) -> Result<(), ()> { match id.cmp(&store.len()) { Ordering::Greater => Err(()), Ordering::Equal => { store.push(Some(object)); Ok(()) } Ordering::Less => { let previous = &mut store[id]; if !previous.is_none() { return Err(()); } *previous = Some(object); Ok(()) } } }
place = None; } } else if let Some(place) = self.client_objects.get_mut((id - 1) as usize) { *place = None; } }
function_block-function_prefix_line
[ { "content": "fn display_req_child(opcode: u16, _: u32, meta: &ObjectMeta) -> Option<Object<ObjectMeta>> {\n\n match opcode {\n\n // sync\n\n 0 => Some(Object::from_interface::<crate::protocol::wl_callback::WlCallback>(\n\n 1,\n\n meta.child(),\n\n )),\n\n // registry\n\n 1 => Some(Object {\n\n interface: \"wl_registry\",\n\n version: 1,\n\n requests: REGISTRY_REQUESTS,\n\n events: REGISTRY_EVENTS,\n\n meta: meta.child(),\n\n childs_from_events: no_child,\n\n childs_from_requests: no_child,\n\n }),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "wayland-server/src/rust_imp/clients.rs", "rank": 0, "score": 352595.9622058826 }, { "content": "/// Print the dispatched message to stderr in a following format:\n\n///\n\n/// [timestamp] <- interface@id.msg_name(args)\n\npub fn print_dispatched_message(interface: &str, id: u32, msg_name: &str, args: &[Argument]) {\n\n // Add timestamp to output.\n\n print_timestamp();\n\n\n\n eprint!(\" <- {}@{}.{}\", interface, id, msg_name);\n\n\n\n print_args(args);\n\n\n\n // Add a new line.\n\n eprintln!();\n\n}\n\n\n", "file_path": "wayland-commons/src/debug.rs", "rank": 1, "score": 349471.4791195452 }, { "content": "fn no_child(_: u16, _: u32, _: &ObjectMeta) -> Option<Object<ObjectMeta>> {\n\n None\n\n}\n\n\n", "file_path": "wayland-server/src/rust_imp/clients.rs", "rank": 2, "score": 305921.2530413059 }, { "content": "/// The description of a wayland interface\n\n///\n\n/// Implementations of this trait are supposed to be\n\n/// generated using the `wayland-scanner` crate.\n\npub trait Interface: 'static {\n\n /// Set of requests associated to this interface\n\n ///\n\n /// Requests are messages from the client to the server\n\n type Request: MessageGroup + 'static;\n\n /// Set of events associated to this interface\n\n ///\n\n /// Events are messages from the server to the client\n\n type Event: MessageGroup + 'static;\n\n /// Name of this interface\n\n const NAME: &'static str;\n\n /// Maximum supported version of this interface\n\n ///\n\n /// This is the maximum version supported by the protocol specification currently\n\n /// used by this library, and should not be used as-is in your code, as a version\n\n /// change can subtly change the behavior of some objects.\n\n ///\n\n /// Server are supposed to be able to handle all versions from 1 to the one they\n\n /// advertise through the registry, and clients can choose any version among the\n\n /// ones the server supports.\n", "file_path": "wayland-commons/src/lib.rs", "rank": 4, "score": 254001.5451750012 }, { "content": "pub fn is_keyword(txt: &str) -> bool {\n\n match txt {\n\n \"abstract\" | \"alignof\" | \"as\" | \"become\" | \"box\" | \"break\" | \"const\" | \"continue\"\n\n | \"crate\" | \"do\" | \"else\" | \"enum\" | \"extern\" | \"false\" | \"final\" | \"fn\" | \"for\" | \"if\"\n\n | \"impl\" | \"in\" | \"let\" | \"loop\" | \"macro\" | \"match\" | \"mod\" | \"move\" | \"mut\"\n\n | \"offsetof\" | \"override\" | \"priv\" | \"proc\" | \"pub\" | \"pure\" | \"ref\" | \"return\"\n\n | \"Self\" | \"self\" | \"sizeof\" | \"static\" | \"struct\" | \"super\" | \"trait\" | \"true\"\n\n | \"type\" | \"typeof\" | \"unsafe\" | \"unsized\" | \"use\" | \"virtual\" | \"where\" | \"while\"\n\n | \"yield\" | \"__handler\" | \"__object\" => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "wayland-scanner/src/util.rs", "rank": 5, "score": 251531.18383743102 }, { "content": "pub fn is_camel_keyword(txt: &str) -> bool {\n\n match txt {\n\n \"Self\" => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "wayland-scanner/src/util.rs", "rank": 6, "score": 248036.16022545611 }, { "content": "fn send_global_msg(reg: &(u32, ClientInner), global_id: u32, interface: CString, version: u32) {\n\n if let Some(ref mut clientconn) = *reg.1.data.lock().unwrap() {\n\n let _ = clientconn.write_message(&Message {\n\n sender_id: reg.0,\n\n opcode: 0,\n\n args: smallvec![\n\n Argument::Uint(global_id),\n\n Argument::Str(Box::new(interface)),\n\n Argument::Uint(version),\n\n ],\n\n });\n\n }\n\n}\n\n\n", "file_path": "wayland-server/src/rust_imp/globals.rs", "rank": 7, "score": 242146.04191055917 }, { "content": "#[allow(clippy::transmute_ptr_to_ptr)]\n\nfn with_dispatch_meta<T, FB, F>(mut fb: FB, data: DispatchData, f: F) -> T\n\nwhere\n\n FB: FnMut(RawEvent, Main<AnonymousObject>, DispatchData),\n\n F: FnOnce() -> T,\n\n{\n\n // We erase the lifetime of the callback to be able to store it in the tls,\n\n // it's safe as it'll only last until the end of this function call anyway\n\n let fb = unsafe { std::mem::transmute(&mut fb as &mut dyn FnMut(_, _, _)) };\n\n let data = unsafe { std::mem::transmute(data) };\n\n DISPATCH_METADATA.set(&RefCell::new((fb, data)), || f())\n\n}\n\n\n\npub(crate) struct EventQueueInner {\n\n wlevq: *mut wl_event_queue,\n\n inner: Arc<super::DisplayInner>,\n\n}\n\n\n\nimpl EventQueueInner {\n\n pub(crate) fn new(inner: Arc<DisplayInner>, wlevq: *mut wl_event_queue) -> EventQueueInner {\n\n EventQueueInner { wlevq, inner }\n", "file_path": "wayland-client/src/native_lib/event_queue.rs", "rank": 8, "score": 234080.91552160878 }, { "content": "/// Checks if the wayland-egl lib is available and can be used\n\n///\n\n/// Trying to create an `WlEglSurface` while this function returns\n\n/// `false` will result in a panic.\n\npub fn is_available() -> bool {\n\n is_lib_available()\n\n}\n\n\n\nunsafe impl Send for WlEglSurface {}\n\nunsafe impl Sync for WlEglSurface {}\n\n\n\n/// EGL surface\n\n///\n\n/// This object is a simple wrapper around a `WlSurface` to add the EGL\n\n/// capabilities. Just use the `ptr` method once this object is created\n\n/// to get the window pointer your OpenGL library is needing to initialize the\n\n/// EGL context (you'll most likely need the display ptr as well, that you can\n\n/// get via the `ptr` method of the `Proxy` trait on the `WlDisplay` object).\n\n#[derive(Debug)]\n\npub struct WlEglSurface {\n\n ptr: *mut wl_egl_window,\n\n}\n\n\n\nimpl WlEglSurface {\n", "file_path": "wayland-egl/src/lib.rs", "rank": 10, "score": 200418.4582037795 }, { "content": "pub fn roundtrip_with_ddata<CD: 'static, SD: 'static>(\n\n client: &mut TestClient,\n\n server: &mut TestServer,\n\n client_ddata: &mut CD,\n\n server_ddata: &mut SD,\n\n) -> io::Result<()> {\n\n // send to the server\n\n let done = Rc::new(Cell::new(false));\n\n let done2 = done.clone();\n\n client.display_proxy.sync().quick_assign(move |_, _, _| done2.set(true));\n\n while !done.get() {\n\n match client.display.flush() {\n\n Ok(_) => {}\n\n Err(e) => {\n\n if e.kind() != ::std::io::ErrorKind::BrokenPipe {\n\n return Err(e);\n\n }\n\n }\n\n }\n\n ::std::thread::sleep(::std::time::Duration::from_millis(100));\n", "file_path": "tests/helpers/mod.rs", "rank": 11, "score": 199619.83200761984 }, { "content": "#[cfg(all(feature = \"server\", feature = \"dlopen\"))]\n\npub fn is_lib_available() -> bool {\n\n WAYLAND_SERVER_OPTION.is_some()\n\n}\n\n\n\n#[cfg(feature = \"server\")]\n\npub mod signal {\n\n #![allow(clippy::cast_ptr_alignment, clippy::missing_safety_doc)]\n\n #[cfg(feature = \"dlopen\")]\n\n use super::WAYLAND_SERVER_HANDLE as WSH;\n\n #[cfg(not(feature = \"dlopen\"))]\n\n use super::{wl_list_init, wl_list_insert};\n\n use super::{wl_listener, wl_notify_func_t, wl_signal};\n\n use crate::common::wl_list;\n\n use std::os::raw::c_void;\n\n use std::ptr;\n\n\n\n macro_rules! container_of(\n\n ($ptr: expr, $container: ident, $field: ident) => {\n\n ($ptr as *mut u8).offset(-(memoffset::offset_of!($container, $field) as isize)) as *mut $container\n\n }\n", "file_path": "wayland-sys/src/server.rs", "rank": 14, "score": 197550.80834363058 }, { "content": "#[cfg(feature = \"dlopen\")]\n\npub fn is_lib_available() -> bool {\n\n WAYLAND_EGL_OPTION.is_some()\n\n}\n", "file_path": "wayland-sys/src/egl.rs", "rank": 15, "score": 197550.80834363058 }, { "content": "#[cfg(feature = \"dlopen\")]\n\npub fn is_lib_available() -> bool {\n\n WAYLAND_CURSOR_OPTION.is_some()\n\n}\n", "file_path": "wayland-sys/src/cursor.rs", "rank": 16, "score": 197550.80834363058 }, { "content": "#[cfg(all(feature = \"client\", feature = \"dlopen\"))]\n\npub fn is_lib_available() -> bool {\n\n WAYLAND_CLIENT_OPTION.is_some()\n\n}\n", "file_path": "wayland-sys/src/client.rs", "rank": 17, "score": 197550.80834363058 }, { "content": "pub fn snake_to_camel(input: &str) -> String {\n\n let result = input\n\n .split('_')\n\n .flat_map(|s| {\n\n let mut first = true;\n\n s.chars().map(move |c| {\n\n if first {\n\n first = false;\n\n c.to_ascii_uppercase()\n\n } else {\n\n c\n\n }\n\n })\n\n })\n\n .collect::<String>();\n\n\n\n if is_camel_keyword(&result) {\n\n format!(\"_{}\", &result)\n\n } else {\n\n result\n\n }\n\n}\n\n\n", "file_path": "wayland-scanner/src/util.rs", "rank": 18, "score": 183439.86125227262 }, { "content": "pub fn dotted_to_relname(input: &str) -> TokenStream {\n\n let mut it = input.split('.');\n\n match (it.next(), it.next()) {\n\n (Some(module), Some(name)) => {\n\n let module = Ident::new(module, Span::call_site());\n\n let ident = Ident::new(&snake_to_camel(name), Span::call_site());\n\n quote::quote!(super::#module::#ident)\n\n }\n\n (Some(name), None) => {\n\n Ident::new(&snake_to_camel(name), Span::call_site()).into_token_stream()\n\n }\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "wayland-scanner/src/util.rs", "rank": 19, "score": 180935.2600962629 }, { "content": "pub fn roundtrip(client: &mut TestClient, server: &mut TestServer) -> io::Result<()> {\n\n roundtrip_with_ddata(client, server, &mut (), &mut ())\n\n}\n\n\n", "file_path": "tests/helpers/mod.rs", "rank": 20, "score": 177005.47500340122 }, { "content": "pub fn null_terminated_byte_string_literal(string: &str) -> Literal {\n\n let mut val = Vec::with_capacity(string.len() + 1);\n\n val.extend_from_slice(string.as_bytes());\n\n val.push(0);\n\n\n\n Literal::byte_string(&val)\n\n}\n", "file_path": "wayland-scanner/src/util.rs", "rank": 21, "score": 176233.60854212483 }, { "content": "fn parse_interface<R: Read>(reader: &mut EventReader<R>, attrs: Vec<OwnedAttribute>) -> Interface {\n\n let mut interface = Interface::new();\n\n for attr in attrs {\n\n match &attr.name.local_name[..] {\n\n \"name\" => interface.name = attr.value,\n\n \"version\" => interface.version = attr.value.parse().unwrap(),\n\n _ => {}\n\n }\n\n }\n\n\n\n loop {\n\n match reader.next() {\n\n Ok(XmlEvent::StartElement { name, attributes, .. }) => match &name.local_name[..] {\n\n \"description\" => {\n\n interface.description = Some(parse_description(reader, attributes))\n\n }\n\n \"request\" => interface.requests.push(parse_request(reader, attributes)),\n\n \"event\" => interface.events.push(parse_event(reader, attributes)),\n\n \"enum\" => interface.enums.push(parse_enum(reader, attributes)),\n\n _ => panic!(\"Unexpected tocken: `{}`\", name.local_name),\n\n },\n\n Ok(XmlEvent::EndElement { ref name }) if name.local_name == \"interface\" => break,\n\n _ => {}\n\n }\n\n }\n\n\n\n interface\n\n}\n\n\n", "file_path": "wayland-scanner/src/parse.rs", "rank": 22, "score": 175914.7722525886 }, { "content": "/// Print the send message to stderr in a following format:\n\n///\n\n/// [timestamp] -> interface@id.msg_name(args)\n\n///\n\n/// If `is_alive` is `false` the `[ZOMBIE]` is added after `id`.\n\npub fn print_send_message(\n\n interface: &str,\n\n id: u32,\n\n is_alive: bool,\n\n msg_name: &str,\n\n args: &[Argument],\n\n) {\n\n // Add timestamp to output.\n\n print_timestamp();\n\n\n\n eprint!(\" -> {}@{}{}.{}\", interface, id, if is_alive { \"\" } else { \"[ZOMBIE]\" }, msg_name);\n\n\n\n print_args(args);\n\n\n\n // Add a new line.\n\n eprintln!();\n\n}\n\n\n", "file_path": "wayland-commons/src/debug.rs", "rank": 23, "score": 172379.03591086928 }, { "content": "/// A group of messages\n\n///\n\n/// This represents a group of message that can be serialized on the protocol wire.\n\n/// Typically the set of events or requests of a single interface.\n\n///\n\n/// Implementations of this trait are supposed to be\n\n/// generated using the `wayland-scanner` crate.\n\npub trait MessageGroup: Sized {\n\n /// Wire representation of this MessageGroup\n\n const MESSAGES: &'static [wire::MessageDesc];\n\n /// The wrapper type for ObjectMap allowing the mapping of Object and\n\n /// NewId arguments to the object map during parsing.\n\n type Map;\n\n /// The opcode of this message\n\n fn opcode(&self) -> u16;\n\n /// Whether this message is a destructor\n\n ///\n\n /// If it is, once send or receive the associated object cannot be used any more.\n\n fn is_destructor(&self) -> bool;\n\n /// The minimal object version for which this message exists\n\n fn since(&self) -> u32;\n\n /// Retrieve the child `Object` associated with this message if any\n\n fn child<Meta: self::map::ObjectMetadata>(\n\n opcode: u16,\n\n version: u32,\n\n meta: &Meta,\n\n ) -> Option<crate::map::Object<Meta>>;\n", "file_path": "wayland-commons/src/lib.rs", "rank": 24, "score": 167740.71892532264 }, { "content": "fn parse_request<R: Read>(reader: &mut EventReader<R>, attrs: Vec<OwnedAttribute>) -> Message {\n\n let mut request = Message::new();\n\n for attr in attrs {\n\n match &attr.name.local_name[..] {\n\n \"name\" => request.name = attr.value,\n\n \"type\" => request.typ = Some(parse_type(&attr.value)),\n\n \"since\" => request.since = attr.value.parse().unwrap(),\n\n _ => {}\n\n }\n\n }\n\n\n\n loop {\n\n match reader.next() {\n\n Ok(XmlEvent::StartElement { name, attributes, .. }) => match &name.local_name[..] {\n\n \"description\" => request.description = Some(parse_description(reader, attributes)),\n\n \"arg\" => request.args.push(parse_arg(reader, attributes)),\n\n _ => panic!(\"Unexpected tocken: `{}`\", name.local_name),\n\n },\n\n Ok(XmlEvent::EndElement { ref name }) if name.local_name == \"request\" => break,\n\n _ => {}\n\n }\n\n }\n\n\n\n request\n\n}\n\n\n", "file_path": "wayland-scanner/src/parse.rs", "rank": 25, "score": 164296.94075690812 }, { "content": "fn gen_messages(interface: &Interface, messages: &[Message], which: &str) -> TokenStream {\n\n if messages.is_empty() {\n\n return TokenStream::new();\n\n }\n\n\n\n let types_arrays = messages.iter().filter_map(|msg| {\n\n if msg.all_null() {\n\n None\n\n } else {\n\n let array_ident = Ident::new(\n\n &format!(\"{}_{}_{}_types\", interface.name, which, msg.name),\n\n Span::call_site(),\n\n );\n\n let array_len = Literal::usize_unsuffixed(msg.args.len());\n\n let array_values = msg.args.iter().map(|arg| match (arg.typ, &arg.interface) {\n\n (Type::Object, &Some(ref inter)) | (Type::NewId, &Some(ref inter)) => {\n\n let module = Ident::new(inter, Span::call_site());\n\n let interface_ident =\n\n Ident::new(&format!(\"{}_interface\", inter), Span::call_site());\n\n quote!(unsafe { &super::#module::#interface_ident as *const wl_interface })\n", "file_path": "wayland-scanner/src/c_interface_gen.rs", "rank": 26, "score": 160363.25811498196 }, { "content": "fn interface_c_addon(low_name: &str) -> TokenStream {\n\n let iface_name = Ident::new(&format!(\"{}_interface\", low_name), Span::call_site());\n\n quote! {\n\n fn c_interface() -> *const wl_interface {\n\n unsafe { &#iface_name }\n\n }\n\n }\n\n}\n", "file_path": "wayland-scanner/src/c_code_gen.rs", "rank": 27, "score": 156757.34163621895 }, { "content": "/// A trait for implementation of the global advertisement\n\n///\n\n/// It is automatically implemented for `FnMut(Main<I>, DispatchData)` closures,\n\n/// in which case the `error` messages are ignored.\n\npub trait GlobalImplementor<I: Interface + AsRef<Proxy<I>> + From<Proxy<I>>> {\n\n /// A new global of given interface has been instantiated and you can assign\n\n /// a filter to it.\n\n fn new_global(&mut self, global: Main<I>, data: DispatchData);\n\n /// A global was advertised but its version was lower than the minimal version\n\n /// you requested.\n\n ///\n\n /// The advertised version is provided as argument.\n\n fn error(&mut self, _version: u32, _data: DispatchData) {}\n\n}\n\n\n\nimpl<F, I: Interface> GlobalImplementor<I> for F\n\nwhere\n\n I: Interface + AsRef<Proxy<I>> + From<Proxy<I>>,\n\n F: FnMut(Main<I>, DispatchData),\n\n{\n\n fn new_global(&mut self, global: Main<I>, data: DispatchData) {\n\n (*self)(global, data)\n\n }\n\n}\n", "file_path": "wayland-client/src/globals.rs", "rank": 28, "score": 154237.910114226 }, { "content": "#[test]\n\nfn server_id_reuse() {\n\n let mut server = TestServer::new();\n\n server.display.create_global::<ServerSeat, _>(1, ways::Filter::new(|_: (_, _), _, _| {}));\n\n let srv_dd = Rc::new(RefCell::new(None));\n\n let srv_dd2 = srv_dd.clone();\n\n server.display.create_global::<ServerDDMgr, _>(\n\n 3,\n\n ways::Filter::new(move |(resource, _): (ways::Main<ServerDDMgr>, u32), _, _| {\n\n let srv_dd3 = srv_dd2.clone();\n\n resource.quick_assign(move |_, req, _| {\n\n if let SDDMReq::GetDataDevice { id: ddevice, .. } = req {\n\n *srv_dd3.borrow_mut() = Some(ddevice);\n\n }\n\n });\n\n }),\n\n );\n\n\n\n let mut client = TestClient::new(&server.socket_name);\n\n let manager = wayc::GlobalManager::new(&client.display_proxy);\n\n\n", "file_path": "tests/server_created_object.rs", "rank": 29, "score": 147147.89063000667 }, { "content": "/// Generate the code for a protocol from/to IO streams with aditionnal destructor events\n\n///\n\n/// Same as `generate_code_streams`, but allows you to additionnaly specify some events\n\n/// (in the format `(\"interface_name\", \"event_name\")`) as being destructor, as this\n\n/// information is not encoded in the protocol files but instead written in the documentation\n\n/// of the protocol.\n\npub fn generate_code_streams_with_destructor_events<P1: Read, P2: Write>(\n\n protocol: P1,\n\n target: &mut P2,\n\n side: Side,\n\n events: &[(&str, &str)],\n\n) {\n\n let mut protocol = parse::parse_stream(protocol);\n\n\n\n for interface in &mut protocol.interfaces {\n\n for event in &mut interface.events {\n\n if events.contains(&(&interface.name, &event.name)) {\n\n event.typ = Some(crate::protocol::Type::Destructor);\n\n }\n\n }\n\n }\n\n\n\n let output = match side {\n\n Side::Client => c_code_gen::generate_protocol_client(protocol),\n\n Side::Server => c_code_gen::generate_protocol_server(protocol),\n\n };\n\n\n\n write!(target, \"{}\", output).unwrap();\n\n}\n", "file_path": "wayland-scanner/src/lib.rs", "rank": 30, "score": 146471.71614023013 }, { "content": "fn parse_protocol<R: Read>(mut reader: EventReader<R>) -> Protocol {\n\n let mut protocol = extract_from!(\n\n reader => XmlEvent::StartElement { name, attributes, .. } => {\n\n assert!(name.local_name == \"protocol\", \"Missing protocol toplevel tag\");\n\n assert!(attributes[0].name.local_name == \"name\", \"Protocol must have a name\");\n\n Protocol::new(attributes[0].value.clone())\n\n }\n\n );\n\n\n\n loop {\n\n match reader.next() {\n\n Ok(XmlEvent::StartElement { name, attributes, .. }) => {\n\n match &name.local_name[..] {\n\n \"copyright\" => {\n\n // parse the copyright\n\n let copyright = match reader.next() {\n\n Ok(XmlEvent::Characters(copyright))\n\n | Ok(XmlEvent::CData(copyright)) => copyright,\n\n e => panic!(\"Ill-formed protocol file: {:?}\", e),\n\n };\n", "file_path": "wayland-scanner/src/parse.rs", "rank": 31, "score": 143315.70748516842 }, { "content": "/// Generate the code for a protocol from/to IO streams\n\n///\n\n/// Like `generate_code`, but takes IO Streams directly rather than filenames\n\n///\n\n/// Args:\n\n///\n\n/// - `protocol`: an object `Read`-able containing the XML protocol file\n\n/// - `target`: a `Write`-able object to which the generated code will be outputted to\n\n/// - `side`: the side (client or server) to generate code for.\n\npub fn generate_code_streams<P1: Read, P2: Write>(protocol: P1, target: &mut P2, side: Side) {\n\n generate_code_streams_with_destructor_events(protocol, target, side, &[])\n\n}\n\n\n", "file_path": "wayland-scanner/src/lib.rs", "rank": 32, "score": 143097.62478102322 }, { "content": "pub fn wl_fixed_to_int(f: wl_fixed_t) -> i32 {\n\n f / 256\n\n}\n\n\n", "file_path": "wayland-sys/src/common.rs", "rank": 33, "score": 142575.1621303629 }, { "content": "pub fn wl_fixed_to_double(f: wl_fixed_t) -> f64 {\n\n f64::from(f) / 256.\n\n}\n\n\n", "file_path": "wayland-sys/src/common.rs", "rank": 34, "score": 142575.1621303629 }, { "content": "fn parse_event<R: Read>(reader: &mut EventReader<R>, attrs: Vec<OwnedAttribute>) -> Message {\n\n let mut event = Message::new();\n\n for attr in attrs {\n\n match &attr.name.local_name[..] {\n\n \"name\" => event.name = attr.value,\n\n \"type\" => event.typ = Some(parse_type(&attr.value)),\n\n \"since\" => event.since = attr.value.parse().unwrap(),\n\n _ => {}\n\n }\n\n }\n\n\n\n loop {\n\n match reader.next() {\n\n Ok(XmlEvent::StartElement { name, attributes, .. }) => match &name.local_name[..] {\n\n \"description\" => event.description = Some(parse_description(reader, attributes)),\n\n \"arg\" => event.args.push(parse_arg(reader, attributes)),\n\n _ => panic!(\"Unexpected tocken: `{}`\", name.local_name),\n\n },\n\n Ok(XmlEvent::EndElement { ref name }) if name.local_name == \"event\" => break,\n\n _ => {}\n\n }\n\n }\n\n\n\n event\n\n}\n\n\n", "file_path": "wayland-scanner/src/parse.rs", "rank": 35, "score": 142343.6580058403 }, { "content": "/// Generate the code for a protocol with aditionnal destructor events\n\n///\n\n/// Same as `generate_code`, but allows you to additionnaly specify some events\n\n/// (in the format `(\"interface_name\", \"event_name\")`) as being destructor, as this\n\n/// information is not encoded in the protocol files but instead written in the\n\n/// protocol documentation.\n\npub fn generate_code_with_destructor_events<P1: AsRef<Path>, P2: AsRef<Path>>(\n\n prot: P1,\n\n target: P2,\n\n side: Side,\n\n events: &[(&str, &str)],\n\n) {\n\n let mut protocol = load_xml(prot);\n\n\n\n for interface in &mut protocol.interfaces {\n\n for event in &mut interface.events {\n\n if events.contains(&(&interface.name, &event.name)) {\n\n event.typ = Some(crate::protocol::Type::Destructor);\n\n }\n\n }\n\n }\n\n\n\n {\n\n let mut out =\n\n OpenOptions::new().write(true).truncate(true).create(true).open(&target).unwrap();\n\n\n", "file_path": "wayland-scanner/src/lib.rs", "rank": 36, "score": 140504.86984647697 }, { "content": "struct Inner<E, F: ?Sized> {\n\n pending: RefCell<VecDeque<E>>,\n\n cb: RefCell<F>,\n\n}\n\n\n", "file_path": "wayland-commons/src/filter.rs", "rank": 37, "score": 136205.4770679603 }, { "content": "type VersionedProtocol<'a> = (&'a str, &'a [(&'a str, &'a [(&'a str, &'a str)])]);\n\n// ^ ^ ^ ^ ^ ^\n\n// | | | | | |\n\n// Name | | | | Name of event to specify as\n\n// Versions | | | destructor\n\n// Version | |\n\n// | Interface the event is belongs to\n\n// |\n\n// Events to specify as destructors\n\n\n\nstatic STABLE_PROTOCOLS: &[StableProtocol] =\n\n &[(\"presentation-time\", &[]), (\"viewporter\", &[]), (\"xdg-shell\", &[])];\n\n\n\nstatic STAGING_PROTOCOLS: &[VersionedProtocol] = &[(\"xdg-activation\", &[(\"v1\", &[])])];\n\n\n\nstatic UNSTABLE_PROTOCOLS: &[VersionedProtocol] = &[\n\n (\"fullscreen-shell\", &[(\"v1\", &[])]),\n\n (\"idle-inhibit\", &[(\"v1\", &[])]),\n\n (\"input-method\", &[(\"v1\", &[])]),\n\n (\"input-timestamps\", &[(\"v1\", &[])]),\n", "file_path": "wayland-protocols/build.rs", "rank": 38, "score": 133768.98573859775 }, { "content": "type BoxedHandler<I> = Box<dyn Fn(<I as Interface>::Request, Main<I>, DispatchData<'_>)>;\n", "file_path": "wayland-server/src/native_lib/resource.rs", "rank": 39, "score": 132483.65559247378 }, { "content": "fn parse_entry<R: Read>(reader: &mut EventReader<R>, attrs: Vec<OwnedAttribute>) -> Entry {\n\n let mut entry = Entry::new();\n\n for attr in attrs {\n\n match &attr.name.local_name[..] {\n\n \"name\" => entry.name = attr.value,\n\n \"value\" => {\n\n entry.value = if attr.value.starts_with(\"0x\") {\n\n u32::from_str_radix(&attr.value[2..], 16).unwrap()\n\n } else {\n\n attr.value.parse().unwrap()\n\n };\n\n }\n\n \"since\" => entry.since = attr.value.parse().unwrap(),\n\n \"summary\" => {\n\n entry.summary = Some(attr.value.split_whitespace().collect::<Vec<_>>().join(\" \"))\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n", "file_path": "wayland-scanner/src/parse.rs", "rank": 40, "score": 130700.8956044577 }, { "content": "fn parse_enum<R: Read>(reader: &mut EventReader<R>, attrs: Vec<OwnedAttribute>) -> Enum {\n\n let mut enu = Enum::new();\n\n for attr in attrs {\n\n match &attr.name.local_name[..] {\n\n \"name\" => enu.name = attr.value,\n\n \"since\" => enu.since = attr.value.parse().unwrap(),\n\n \"bitfield\" => {\n\n if &attr.value[..] == \"true\" {\n\n enu.bitfield = true\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n loop {\n\n match reader.next() {\n\n Ok(XmlEvent::StartElement { name, attributes, .. }) => match &name.local_name[..] {\n\n \"description\" => enu.description = Some(parse_description(reader, attributes)),\n\n \"entry\" => enu.entries.push(parse_entry(reader, attributes)),\n\n _ => panic!(\"Unexpected tocken: `{}`\", name.local_name),\n\n },\n\n Ok(XmlEvent::EndElement { ref name }) if name.local_name == \"enum\" => break,\n\n _ => {}\n\n }\n\n }\n\n\n\n enu\n\n}\n\n\n", "file_path": "wayland-scanner/src/parse.rs", "rank": 41, "score": 130700.8956044577 }, { "content": "fn parse_arg<R: Read>(reader: &mut EventReader<R>, attrs: Vec<OwnedAttribute>) -> Arg {\n\n let mut arg = Arg::new();\n\n for attr in attrs {\n\n match &attr.name.local_name[..] {\n\n \"name\" => arg.name = attr.value,\n\n \"type\" => arg.typ = parse_type(&attr.value),\n\n \"summary\" => {\n\n arg.summary = Some(attr.value.split_whitespace().collect::<Vec<_>>().join(\" \"))\n\n }\n\n \"interface\" => arg.interface = Some(attr.value),\n\n \"allow-null\" => {\n\n if attr.value == \"true\" {\n\n arg.allow_null = true\n\n }\n\n }\n\n \"enum\" => arg.enum_ = Some(attr.value),\n\n _ => {}\n\n }\n\n }\n\n\n", "file_path": "wayland-scanner/src/parse.rs", "rank": 42, "score": 130700.8956044577 }, { "content": "type BoxedCallback<I> = Box<dyn Fn(<I as Interface>::Event, Main<I>, crate::DispatchData<'_>)>;\n\n\n", "file_path": "wayland-client/src/native_lib/proxy.rs", "rank": 43, "score": 129272.90857677467 }, { "content": "fn message_to_rawevent(msg: Message, proxy: &ProxyInner, map: &mut super::ProxyMap) -> RawEvent {\n\n let Message { opcode, args, .. } = msg;\n\n\n\n let args = args\n\n .into_iter()\n\n .map(|a| match a {\n\n Argument::Int(i) => crate::Argument::Int(i),\n\n Argument::Uint(u) => crate::Argument::Uint(u),\n\n Argument::Array(v) => {\n\n crate::Argument::Array(if v.is_empty() { None } else { Some(*v) })\n\n }\n\n Argument::Fixed(f) => crate::Argument::Float((f as f32) / 256.),\n\n Argument::Fd(f) => crate::Argument::Fd(f),\n\n Argument::Str(cs) => crate::Argument::Str({\n\n let bytes = cs.into_bytes();\n\n if bytes.is_empty() {\n\n None\n\n } else {\n\n Some(\n\n String::from_utf8(bytes)\n", "file_path": "wayland-client/src/rust_imp/queues.rs", "rank": 44, "score": 128519.19666957116 }, { "content": "fn with_dispatch_data<T, F>(data: crate::DispatchData, f: F) -> T\n\nwhere\n\n F: FnOnce() -> T,\n\n{\n\n // We erase the lifetime of the callback to be able to store it in the tls,\n\n // it's safe as it'll only last until the end of this function call anyway\n\n let data = unsafe { std::mem::transmute(data) };\n\n DISPATCH_DATA.set(&RefCell::new(data), || f())\n\n}\n", "file_path": "wayland-server/src/native_lib/mod.rs", "rank": 45, "score": 128100.57367403449 }, { "content": "pub fn method_prototype<'a>(\n\n iname: &Ident,\n\n msg: &'a Message,\n\n side: Side,\n\n) -> (TokenStream, Option<&'a Arg>) {\n\n let mut it = msg.args.iter().filter(|arg| arg.typ == Type::NewId);\n\n let mut newid = it.next();\n\n assert!(\n\n newid.is_none() || it.next().is_none(),\n\n \"Request {}.{} returns more than one new_id\",\n\n iname,\n\n msg.name\n\n );\n\n\n\n // Serverside we don't deal with NewId arguments and treat them as objects.\n\n if side == Side::Server {\n\n newid = None;\n\n }\n\n\n\n let fn_name = Ident::new(\n", "file_path": "wayland-scanner/src/common_gen.rs", "rank": 46, "score": 127819.57612431391 }, { "content": "type GlobalFilter = Rc<RefCell<dyn FnMut(ClientInner) -> bool>>;\n\n\n\npub(crate) struct GlobalInner<I: Interface> {\n\n _i: ::std::marker::PhantomData<*const I>,\n\n destroyed_marker: Rc<Cell<bool>>,\n\n id: u32,\n\n registries: Rc<RefCell<Vec<(u32, ClientInner)>>>,\n\n filter: Option<GlobalFilter>,\n\n}\n\n\n\nimpl<I: Interface> GlobalInner<I> {\n\n pub fn destroy(self) {\n\n self.destroyed_marker.set(true);\n\n send_destroyed_global(&self.registries.borrow(), self.id, self.filter.as_deref());\n\n }\n\n}\n\n\n", "file_path": "wayland-server/src/rust_imp/globals.rs", "rank": 47, "score": 124441.22957915769 }, { "content": "fn parse_type(txt: &str) -> Type {\n\n match txt {\n\n \"int\" => Type::Int,\n\n \"uint\" => Type::Uint,\n\n \"fixed\" => Type::Fixed,\n\n \"string\" => Type::String,\n\n \"object\" => Type::Object,\n\n \"new_id\" => Type::NewId,\n\n \"array\" => Type::Array,\n\n \"fd\" => Type::Fd,\n\n \"destructor\" => Type::Destructor,\n\n e => panic!(\"Unexpected type: {}\", e),\n\n }\n\n}\n\n\n", "file_path": "wayland-scanner/src/parse.rs", "rank": 48, "score": 124015.26525203115 }, { "content": "type GlobalImplementation = dyn Fn(u32, u32, ClientInner, DispatchData) -> Result<(), ()>;\n\n\n", "file_path": "wayland-server/src/rust_imp/globals.rs", "rank": 49, "score": 123715.68640549025 }, { "content": "type BoxedDest = Box<dyn FnMut(Arc<UserDataMap>, DispatchData<'_>) + 'static>;\n\n\n\npub(crate) struct ClientInternal {\n\n alive: AtomicBool,\n\n user_data_map: Arc<UserDataMap>,\n\n destructors: ThreadGuard<RefCell<Vec<BoxedDest>>>,\n\n safe_thread: std::thread::ThreadId,\n\n}\n\n\n\nimpl ClientInternal {\n\n fn new() -> ClientInternal {\n\n ClientInternal {\n\n alive: AtomicBool::new(true),\n\n user_data_map: Arc::new(UserDataMap::new()),\n\n destructors: ThreadGuard::new(RefCell::new(Vec::new())),\n\n safe_thread: std::thread::current().id(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "wayland-server/src/native_lib/client.rs", "rank": 50, "score": 118861.11675486155 }, { "content": "pub fn wl_fixed_from_double(d: f64) -> wl_fixed_t {\n\n (d * 256.) as i32\n\n}\n\n\n", "file_path": "wayland-sys/src/common.rs", "rank": 51, "score": 117180.22184354744 }, { "content": "pub fn wl_fixed_from_int(i: i32) -> wl_fixed_t {\n\n i * 256\n\n}\n\n\n\n// must be the appropriate size\n\n// can contain i32, u32 and pointers\n\n#[repr(C)]\n\npub union wl_argument {\n\n pub i: i32,\n\n pub u: u32,\n\n pub f: wl_fixed_t,\n\n pub s: *const c_char,\n\n pub o: *const c_void,\n\n pub n: u32,\n\n pub a: *const wl_array,\n\n pub h: RawFd,\n\n}\n\n\n\npub type wl_dispatcher_func_t = unsafe extern \"C\" fn(\n\n *const c_void,\n\n *mut c_void,\n\n u32,\n\n *const wl_message,\n\n *const wl_argument,\n\n) -> c_int;\n\npub type wl_log_func_t = unsafe extern \"C\" fn(*const c_char, ...);\n", "file_path": "wayland-sys/src/common.rs", "rank": 52, "score": 117180.22184354744 }, { "content": "fn run_codegen_test(generated_file_path: &Path, expected_output: &str) {\n\n match Command::new(\"rustfmt\")\n\n .arg(\"--config-path\")\n\n .arg(env!(\"CARGO_MANIFEST_DIR\"))\n\n .arg(generated_file_path)\n\n .status()\n\n {\n\n Ok(status) if status.success() => {\n\n let mut file = File::open(generated_file_path).unwrap();\n\n let mut actual_output = String::new();\n\n file.read_to_string(&mut actual_output).unwrap();\n\n\n\n let changeset = Changeset::new(expected_output, &actual_output, \"\\n\");\n\n if changeset.distance != 0 {\n\n print_diff(&changeset.diffs);\n\n panic!(\"Scanner output does not match expected output: d = {}\", changeset.distance);\n\n }\n\n }\n\n _ => {\n\n println!(\"Skipped test because rustfmt is not available!\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/scanner.rs", "rank": 53, "score": 116425.53114574737 }, { "content": "#[test]\n\nfn multi_versions() {\n\n let mut server = TestServer::new();\n\n server.display.create_global::<ServerCompositor, _>(4, ways::Filter::new(|_: (_, _), _, _| {}));\n\n server.display.create_global::<ServerCompositor, _>(3, ways::Filter::new(|_: (_, _), _, _| {}));\n\n server.display.create_global::<ServerCompositor, _>(2, ways::Filter::new(|_: (_, _), _, _| {}));\n\n server.display.create_global::<ServerCompositor, _>(1, ways::Filter::new(|_: (_, _), _, _| {}));\n\n\n\n let mut client = TestClient::new(&server.socket_name);\n\n let manager = wayc::GlobalManager::new(&client.display_proxy);\n\n\n\n roundtrip(&mut client, &mut server).unwrap();\n\n let globals = manager.list();\n\n assert!(globals.len() == 4);\n\n let mut seen = [false; 4];\n\n for &(_, ref interface, version) in &globals {\n\n assert!(interface == \"wl_compositor\");\n\n seen[version as usize - 1] = true;\n\n }\n\n assert_eq!(seen, [true, true, true, true]);\n\n}\n\n\n", "file_path": "tests/globals.rs", "rank": 54, "score": 111331.24290045246 }, { "content": "pub fn parse_stream<S: Read>(stream: S) -> Protocol {\n\n let mut reader =\n\n EventReader::new_with_config(stream, ParserConfig::new().trim_whitespace(true));\n\n reader.next().expect(\"Could not read from event reader\");\n\n parse_protocol(reader)\n\n}\n\n\n", "file_path": "wayland-scanner/src/parse.rs", "rank": 55, "score": 111300.8934821692 }, { "content": "#[test]\n\nfn wrong_global_id() {\n\n use wayc::protocol::wl_compositor::WlCompositor;\n\n\n\n let mut server = TestServer::new();\n\n server.display.create_global::<ServerCompositor, _>(1, ways::Filter::new(|_: (_, _), _, _| {}));\n\n\n\n let mut client = TestClient::new(&server.socket_name);\n\n let registry = client.display_proxy.get_registry();\n\n\n\n // instantiate a global with wrong id, this should kill the client\n\n\n\n registry.bind::<WlCompositor>(1, 3);\n\n\n\n assert!(roundtrip(&mut client, &mut server).is_err());\n\n}\n\n\n", "file_path": "tests/globals.rs", "rank": 56, "score": 109138.5359079694 }, { "content": "#[test]\n\nfn wrong_global_version() {\n\n use wayc::protocol::wl_compositor::WlCompositor;\n\n\n\n let mut server = TestServer::new();\n\n server.display.create_global::<ServerCompositor, _>(1, ways::Filter::new(|_: (_, _), _, _| {}));\n\n\n\n let mut client = TestClient::new(&server.socket_name);\n\n let registry = client.display_proxy.get_registry();\n\n\n\n // instantiate a global with wrong version, this should kill the client\n\n\n\n registry.bind::<WlCompositor>(2, 1);\n\n assert!(roundtrip(&mut client, &mut server).is_err());\n\n}\n\n\n", "file_path": "tests/globals.rs", "rank": 57, "score": 109132.3242867427 }, { "content": "#[test]\n\nfn resource_destructor_request() {\n\n let destructor_called = Arc::new(Mutex::new(false));\n\n let destructor_called_global = destructor_called.clone();\n\n\n\n let mut server = TestServer::new();\n\n server.display.create_global::<ServerOutput, _>(\n\n 3,\n\n ways::Filter::new(move |(newo, _): (ways::Main<ServerOutput>, _), _, _| {\n\n let destructor_called_resource = destructor_called_global.clone();\n\n newo.quick_assign(|_, _, _| {});\n\n newo.assign_destructor(ways::Filter::new(move |_: ways::Resource<_>, _, _| {\n\n *destructor_called_resource.lock().unwrap() = true;\n\n }));\n\n }),\n\n );\n\n\n\n let mut client = TestClient::new(&server.socket_name);\n\n let manager = wayc::GlobalManager::new(&client.display_proxy);\n\n\n\n roundtrip(&mut client, &mut server).unwrap();\n", "file_path": "tests/destructors.rs", "rank": 58, "score": 109132.3242867427 }, { "content": "#[test]\n\nfn invalid_global_version() {\n\n use wayc::protocol::wl_compositor::WlCompositor;\n\n\n\n let mut server = TestServer::new();\n\n server.display.create_global::<ServerCompositor, _>(1, ways::Filter::new(|_: (_, _), _, _| {}));\n\n\n\n let mut client = TestClient::new(&server.socket_name);\n\n let registry = client.display_proxy.get_registry();\n\n\n\n // instantiate a global with version 0, which is invalid this should kill the client\n\n\n\n registry.bind::<WlCompositor>(0, 1);\n\n\n\n assert!(roundtrip(&mut client, &mut server).is_err());\n\n}\n\n\n", "file_path": "tests/globals.rs", "rank": 59, "score": 109132.3242867427 }, { "content": "/// Duplicate a `RawFd` and set the CLOEXEC flag on the copy\n\npub fn dup_fd_cloexec(fd: RawFd) -> NixResult<RawFd> {\n\n use nix::fcntl;\n\n match fcntl::fcntl(fd, fcntl::FcntlArg::F_DUPFD_CLOEXEC(0)) {\n\n Ok(newfd) => Ok(newfd),\n\n Err(NixError::EINVAL) => {\n\n // F_DUPFD_CLOEXEC is not recognized, kernel too old, fallback\n\n // to setting CLOEXEC manually\n\n let newfd = fcntl::fcntl(fd, fcntl::FcntlArg::F_DUPFD(0))?;\n\n\n\n let flags = fcntl::fcntl(newfd, fcntl::FcntlArg::F_GETFD);\n\n let result = flags\n\n .map(|f| fcntl::FdFlag::from_bits(f).unwrap() | fcntl::FdFlag::FD_CLOEXEC)\n\n .and_then(|f| fcntl::fcntl(newfd, fcntl::FcntlArg::F_SETFD(f)));\n\n match result {\n\n Ok(_) => {\n\n // setting the O_CLOEXEC worked\n\n Ok(newfd)\n\n }\n\n Err(e) => {\n\n // something went wrong in F_GETFD or F_SETFD\n", "file_path": "wayland-commons/src/wire.rs", "rank": 60, "score": 108919.69348291593 }, { "content": "#[test]\n\nfn client_wrong_opcode() {\n\n let mut server = TestServer::new();\n\n\n\n let mut socket: PathBuf = env::var_os(\"XDG_RUNTIME_DIR\").unwrap().into();\n\n socket.push(&server.socket_name);\n\n let socket = UnixStream::connect(socket).unwrap();\n\n\n\n let mut socket = BufferedSocket::new(unsafe { Socket::from_raw_fd(socket.into_raw_fd()) });\n\n socket\n\n .write_message(&Message {\n\n sender_id: 1, // wl_display\n\n opcode: 42, // inexistant\n\n args: smallvec![],\n\n })\n\n .unwrap();\n\n socket.flush().unwrap();\n\n\n\n server.answer();\n\n\n\n // server should have killed us due to the error, but it might send us that error first\n\n let err = socket.fill_incoming_buffers().and_then(|_| socket.fill_incoming_buffers());\n\n assert_eq!(err, Err(nix::Error::EPIPE));\n\n}\n\n\n", "file_path": "tests/protocol_errors.rs", "rank": 61, "score": 107063.07839789054 }, { "content": "#[test]\n\nfn display_to_new_thread() {\n\n let socket_name = \"wayland-client-display-to-new-thread\";\n\n\n\n let kill_switch = Arc::new(Mutex::new(false));\n\n let server_kill_switch = kill_switch.clone();\n\n\n\n let server_startup_info = Arc::new((Mutex::new(false), Condvar::new()));\n\n let server_startup_info_clone = server_startup_info.clone();\n\n\n\n let server_thread = thread::spawn(move || {\n\n let mut display = ways::Display::new();\n\n let socket = display.add_socket(Some(socket_name));\n\n\n\n // Make sure to release the lock.\n\n {\n\n let (lock, cvar) = &*server_startup_info_clone;\n\n let mut started = lock.lock().unwrap();\n\n *started = true;\n\n // Notify the client that we're ready.\n\n cvar.notify_one();\n", "file_path": "tests/client_multithread.rs", "rank": 62, "score": 107056.77579862282 }, { "content": "#[test]\n\nfn client_wrong_id() {\n\n let mut server = TestServer::new();\n\n\n\n let mut socket: PathBuf = env::var_os(\"XDG_RUNTIME_DIR\").unwrap().into();\n\n socket.push(&server.socket_name);\n\n let socket = UnixStream::connect(socket).unwrap();\n\n\n\n let mut socket = BufferedSocket::new(unsafe { Socket::from_raw_fd(socket.into_raw_fd()) });\n\n socket\n\n .write_message(&Message {\n\n sender_id: 1, // wl_display\n\n opcode: 1, // wl_registry\n\n args: smallvec![\n\n Argument::NewId(3), // should be 2\n\n ],\n\n })\n\n .unwrap();\n\n socket.flush().unwrap();\n\n\n\n server.answer();\n\n\n\n // server should have killed us due to the error, but it might send us that error first\n\n let err = socket.fill_incoming_buffers().and_then(|_| socket.fill_incoming_buffers());\n\n assert_eq!(err, Err(nix::Error::EPIPE));\n\n}\n\n\n", "file_path": "tests/protocol_errors.rs", "rank": 63, "score": 107044.52400020925 }, { "content": "/// Print timestamp in seconds.microseconds format.\n\nfn print_timestamp() {\n\n if let Ok(timestamp) = SystemTime::now().duration_since(UNIX_EPOCH) {\n\n let sc = timestamp.as_secs();\n\n let ms = timestamp.subsec_micros();\n\n eprint!(\"[{}.{:06}]\", sc, ms);\n\n }\n\n}\n", "file_path": "wayland-commons/src/debug.rs", "rank": 64, "score": 107044.52400020925 }, { "content": "#[test]\n\nfn constructor_dead() {\n\n let mut server = TestServer::new();\n\n server.display.create_global::<ServerSeat, _>(5, ways::Filter::new(|_: (_, _), _, _| {}));\n\n\n\n let mut client = TestClient::new(&server.socket_name);\n\n let manager = wayc::GlobalManager::new(&client.display_proxy);\n\n\n\n roundtrip(&mut client, &mut server).unwrap();\n\n\n\n let seat = manager.instantiate_exact::<wl_seat::WlSeat>(5).unwrap();\n\n\n\n seat.release();\n\n assert!(!seat.get_pointer().as_ref().is_alive());\n\n}\n\n\n", "file_path": "tests/client_bad_requests.rs", "rank": 65, "score": 107038.46076155883 }, { "content": "#[test]\n\n#[should_panic]\n\nfn wrong_version_create_global() {\n\n let mut server = TestServer::new();\n\n server\n\n .display\n\n .create_global::<ServerCompositor, _>(42, ways::Filter::new(|_: (_, _), _, _| {}));\n\n}\n\n\n", "file_path": "tests/globals.rs", "rank": 66, "score": 107038.46076155883 }, { "content": "#[test]\n\nfn data_offer() {\n\n let mut server = TestServer::new();\n\n server.display.create_global::<ServerSeat, _>(1, ways::Filter::new(|_: (_, _), _, _| {}));\n\n server.display.create_global::<ServerDDMgr, _>(\n\n 3,\n\n ways::Filter::new(move |(resource, version): (ways::Main<ServerDDMgr>, u32), _, _| {\n\n assert!(version == 3);\n\n resource.quick_assign(|_, request, _| match request {\n\n SDDMReq::GetDataDevice { id: ddevice, .. } => {\n\n // create a data offer and send it\n\n let offer = ddevice\n\n .as_ref()\n\n .client()\n\n .unwrap()\n\n .create_resource::<ServerDO>(ddevice.as_ref().version())\n\n .unwrap();\n\n // this must be the first server-side ID\n\n assert_eq!(offer.as_ref().id(), 0xFF000000);\n\n ddevice.data_offer(&offer);\n\n }\n", "file_path": "tests/server_created_object.rs", "rank": 67, "score": 106917.29082002587 }, { "content": "#[test]\n\nfn dead_object_argument() {\n\n let mut server = TestServer::new();\n\n server.display.create_global::<ServerOutput, _>(\n\n 3,\n\n ways::Filter::new(|(output, _): (ways::Main<ServerOutput>, u32), _, mut ddata| {\n\n let opt = ddata.get::<Option<ServerOutput>>().unwrap();\n\n output.quick_assign(|_, _, _| {});\n\n *opt = Some((*output).clone());\n\n }),\n\n );\n\n\n\n // Send a wl_surface.enter() as soon as the surface is created.\n\n server.display.create_global::<ServerCompositor, _>(\n\n 1,\n\n ways::Filter::new(|(comp, _): (ways::Main<ServerCompositor>, u32), _, _| {\n\n comp.quick_assign(|_, req, mut ddata| {\n\n if let ways::protocol::wl_compositor::Request::CreateSurface { id } = req {\n\n id.quick_assign(|_, _, _| {});\n\n let output = ddata.get::<Option<ServerOutput>>().unwrap().as_ref().unwrap();\n\n assert!(output.as_ref().is_alive());\n", "file_path": "tests/client_proxies.rs", "rank": 68, "score": 106917.29082002587 }, { "content": "fn messagegroup_c_addon(\n\n name: &Ident,\n\n parent_iface: &Ident,\n\n side: Side,\n\n receiver: bool,\n\n messages: &[Message],\n\n) -> TokenStream {\n\n let from_raw_c_body = if receiver {\n\n let match_arms = messages\n\n .iter()\n\n .enumerate()\n\n .map(|(i, msg)| {\n\n let pattern = Literal::u16_unsuffixed(i as u16);\n\n let msg_name = Ident::new(&snake_to_camel(&msg.name), Span::call_site());\n\n let msg_name_qualified = quote!(#name::#msg_name);\n\n let (args_binding, result) = if msg.args.is_empty() {\n\n (None, msg_name_qualified)\n\n } else {\n\n let len = Literal::usize_unsuffixed(msg.args.len());\n\n\n", "file_path": "wayland-scanner/src/c_code_gen.rs", "rank": 69, "score": 105072.93491085347 }, { "content": "#[test]\n\n#[cfg(feature = \"client_native\")]\n\nfn display_from_external_on_new_thread() {\n\n let socket_name = \"wayland-client-display-to-new-thread-external\";\n\n\n\n let kill_switch = Arc::new(Mutex::new(false));\n\n let server_kill_switch = kill_switch.clone();\n\n\n\n let server_startup_info = Arc::new((Mutex::new(false), Condvar::new()));\n\n let server_startup_info_clone = server_startup_info.clone();\n\n\n\n let server_thread = thread::spawn(move || {\n\n let mut display = ways::Display::new();\n\n let socket = display.add_socket(Some(socket_name));\n\n\n\n // Make sure to release the lock.\n\n {\n\n let (lock, cvar) = &*server_startup_info_clone;\n\n let mut started = lock.lock().unwrap();\n\n *started = true;\n\n // Notify the client that we're ready.\n\n cvar.notify_one();\n", "file_path": "tests/client_multithread.rs", "rank": 70, "score": 105060.18707315101 }, { "content": "#[test]\n\nfn server_created_race() {\n\n let mut server = TestServer::new();\n\n server.display.create_global::<ServerSeat, _>(1, ways::Filter::new(|_: (_, _), _, _| {}));\n\n\n\n let server_do = Rc::new(RefCell::new(None));\n\n let server_do_2 = server_do.clone();\n\n server.display.create_global::<ServerDDMgr, _>(\n\n 3,\n\n ways::Filter::new(move |(resource, _): (ways::Main<ServerDDMgr>, u32), _, _| {\n\n let server_do_3 = server_do_2.clone();\n\n resource.quick_assign(move |_, request, _| match request {\n\n SDDMReq::GetDataDevice { id: ddevice, .. } => {\n\n // create a data offer and send it\n\n let offer = ddevice\n\n .as_ref()\n\n .client()\n\n .unwrap()\n\n .create_resource::<ServerDO>(ddevice.as_ref().version())\n\n .unwrap();\n\n offer.quick_assign(|_, _, _| {});\n", "file_path": "tests/server_created_object.rs", "rank": 71, "score": 104923.95635997866 }, { "content": "#[cfg(not(feature = \"client_native\"))]\n\n#[test]\n\nfn creation_destruction_race() {\n\n let mut server = TestServer::new();\n\n server.display.create_global::<ServerSeat, _>(1, ways::Filter::new(|_: (_, _), _, _| {}));\n\n\n\n let server_dd = Rc::new(RefCell::new(Vec::new()));\n\n let server_dd_2 = server_dd.clone();\n\n server.display.create_global::<ServerDDMgr, _>(\n\n 3,\n\n ways::Filter::new(move |(resource, _): (ways::Main<ServerDDMgr>, u32), _, _| {\n\n let server_dd_3 = server_dd_2.clone();\n\n resource.quick_assign(move |_, request, _| match request {\n\n SDDMReq::GetDataDevice { id: ddevice, .. } => {\n\n ddevice.quick_assign(|_, _, _| {});\n\n server_dd_3.borrow_mut().push(ddevice);\n\n }\n\n _ => unimplemented!(),\n\n });\n\n }),\n\n );\n\n\n", "file_path": "tests/server_created_object.rs", "rank": 72, "score": 104923.95635997866 }, { "content": "fn insert_compositor(server: &mut TestServer) -> Arc<Mutex<Option<Option<ServerBuffer>>>> {\n\n use ways::protocol::{wl_compositor, wl_surface};\n\n\n\n let buffer_found = Arc::new(Mutex::new(None));\n\n let buffer_found2 = buffer_found.clone();\n\n\n\n ways::request_enum!(Reqs |\n\n Compositor => wl_compositor::WlCompositor,\n\n Surface => wl_surface::WlSurface\n\n );\n\n\n\n let filter = ways::Filter::new(move |req, filter, _| match req {\n\n Reqs::Compositor {\n\n request: wl_compositor::Request::CreateSurface { id: surface }, ..\n\n } => {\n\n surface.assign(filter.clone());\n\n }\n\n Reqs::Surface { request: wl_surface::Request::Attach { buffer, x, y }, .. } => {\n\n assert!(x == 0);\n\n assert!(y == 0);\n", "file_path": "tests/attach_to_surface.rs", "rank": 73, "score": 104092.58935736357 }, { "content": "#[test]\n\n#[should_panic]\n\nfn send_constructor_wrong_type() {\n\n let mut server = TestServer::new();\n\n server.display.create_global::<ServerSeat, _>(5, ways::Filter::new(|_: (_, _), _, _| {}));\n\n\n\n let mut client = TestClient::new(&server.socket_name);\n\n let manager = wayc::GlobalManager::new(&client.display_proxy);\n\n\n\n roundtrip(&mut client, &mut server).unwrap();\n\n\n\n let seat = manager.instantiate_exact::<wl_seat::WlSeat>(5).unwrap();\n\n\n\n let _ = seat.as_ref().send::<wl_keyboard::WlKeyboard>(wl_seat::Request::GetPointer {}, None);\n\n}\n", "file_path": "tests/client_bad_requests.rs", "rank": 74, "score": 103137.15757677497 }, { "content": "fn send_new_global(\n\n registries: &[(u32, ClientInner)],\n\n global_id: u32,\n\n interface: &str,\n\n version: u32,\n\n filter: Option<&RefCell<dyn FnMut(ClientInner) -> bool>>,\n\n) {\n\n let iface = CString::new(interface.as_bytes().to_owned()).unwrap();\n\n if let Some(filter) = filter {\n\n let mut filter = filter.borrow_mut();\n\n for reg in registries {\n\n if !(&mut *filter)(reg.1.clone()) {\n\n continue;\n\n }\n\n send_global_msg(reg, global_id, iface.clone(), version)\n\n }\n\n } else {\n\n for reg in registries {\n\n send_global_msg(reg, global_id, iface.clone(), version)\n\n }\n\n }\n\n}\n\n\n", "file_path": "wayland-server/src/rust_imp/globals.rs", "rank": 75, "score": 101334.03915034687 }, { "content": "#[test]\n\nfn creation_destruction_queue_dispatch_race() {\n\n let mut server = TestServer::new();\n\n server.display.create_global::<ServerSeat, _>(1, ways::Filter::new(|_: (_, _), _, _| {}));\n\n\n\n let server_dd = Rc::new(RefCell::new(Vec::new()));\n\n let server_dd_2 = server_dd.clone();\n\n server.display.create_global::<ServerDDMgr, _>(\n\n 3,\n\n ways::Filter::new(move |(resource, _): (ways::Main<ServerDDMgr>, u32), _, _| {\n\n let server_dd_3 = server_dd_2.clone();\n\n resource.quick_assign(move |_, request, _| match request {\n\n SDDMReq::GetDataDevice { id: ddevice, .. } => {\n\n server_dd_3.borrow_mut().push(ddevice);\n\n }\n\n _ => unimplemented!(),\n\n });\n\n }),\n\n );\n\n\n\n let mut client = TestClient::new(&server.socket_name);\n", "file_path": "tests/server_created_object.rs", "rank": 76, "score": 101203.88173319062 }, { "content": "/// Print arguments with opening/closing bracket.\n\nfn print_args(args: &[Argument]) {\n\n let num_args = args.len();\n\n\n\n eprint!(\"(\");\n\n\n\n if num_args > 0 {\n\n // Explicitly handle first argument to handle one arg functions nicely.\n\n eprint!(\"{}\", args[0]);\n\n\n\n // Handle the rest.\n\n for arg in args.iter().take(num_args).skip(1) {\n\n eprint!(\", {}\", arg);\n\n }\n\n }\n\n\n\n eprint!(\")\")\n\n}\n\n\n", "file_path": "wayland-commons/src/debug.rs", "rank": 77, "score": 100295.39697033915 }, { "content": "type FdData = (RawFd, Option<Box<dyn FnMut(crate::DispatchData<'_>)>>);\n\n\n\n#[derive(Copy, Clone)]\n\npub(crate) struct Token(usize);\n\n\n\npub(crate) struct FdManager {\n\n epoll_fd: RawFd,\n\n callbacks: RefCell<Vec<Option<FdData>>>,\n\n}\n\n\n\nimpl FdManager {\n\n pub(crate) fn new() -> nix::Result<FdManager> {\n\n let fd = epoll_create1(EpollCreateFlags::EPOLL_CLOEXEC)?;\n\n\n\n Ok(FdManager { epoll_fd: fd, callbacks: RefCell::new(Vec::new()) })\n\n }\n\n\n\n pub(crate) fn register<F: FnMut(DispatchData<'_>) + 'static>(\n\n &self,\n\n fd: RawFd,\n", "file_path": "wayland-server/src/rust_imp/event_loop_glue.rs", "rank": 78, "score": 96319.18331714587 }, { "content": "struct ProxyUserData<I: Interface + From<Proxy<I>> + AsRef<Proxy<I>>> {\n\n internal: Arc<ProxyInternal>,\n\n implem: RefCell<Option<BoxedCallback<I>>>,\n\n}\n\n\n\nimpl<I: Interface + From<Proxy<I>> + AsRef<Proxy<I>>> ProxyUserData<I> {\n\n fn new(user_data: UserData) -> ProxyUserData<I> {\n\n ProxyUserData {\n\n internal: Arc::new(ProxyInternal::new(user_data)),\n\n implem: RefCell::new(None),\n\n }\n\n }\n\n}\n\n\n\nunsafe extern \"C\" fn proxy_dispatcher<I: Interface>(\n\n _implem: *const c_void,\n\n proxy: *mut c_void,\n\n opcode: u32,\n\n _msg: *const wl_message,\n\n args: *const wl_argument,\n", "file_path": "wayland-client/src/native_lib/proxy.rs", "rank": 79, "score": 94372.20911153735 }, { "content": "fn message_signature(msg: &Message) -> Vec<u8> {\n\n let mut res = Vec::new();\n\n\n\n if msg.since > 1 {\n\n res.extend_from_slice(msg.since.to_string().as_bytes());\n\n }\n\n\n\n for arg in &msg.args {\n\n if arg.typ.nullable() && arg.allow_null {\n\n res.push(b'?');\n\n }\n\n match arg.typ {\n\n Type::NewId => {\n\n if arg.interface.is_none() {\n\n res.extend_from_slice(b\"su\");\n\n }\n\n res.push(b'n');\n\n }\n\n Type::Uint => res.push(b'u'),\n\n Type::Fixed => res.push(b'f'),\n", "file_path": "wayland-scanner/src/c_interface_gen.rs", "rank": 80, "score": 92816.36527202356 }, { "content": "/// Generate the code for a protocol\n\n///\n\n/// See this crate toplevel documentation for details.\n\n///\n\n/// Args:\n\n///\n\n/// - `protocol`: a path to the XML file describing the protocol, absolute or relative to\n\n/// the build script using this function.\n\n/// - `target`: the path of the file to store the code in.\n\n/// - `side`: the side (client or server) to generate code for.\n\npub fn generate_code<P1: AsRef<Path>, P2: AsRef<Path>>(prot: P1, target: P2, side: Side) {\n\n generate_code_with_destructor_events(prot, target, side, &[]);\n\n}\n\n\n", "file_path": "wayland-scanner/src/lib.rs", "rank": 81, "score": 91950.782756951 }, { "content": "#[rustfmt::skip]\n\ntype StableProtocol<'a> = (&'a str, &'a [(&'a str, &'a str)]);\n", "file_path": "wayland-protocols/build.rs", "rank": 82, "score": 90355.96787353035 }, { "content": "#[derive(Debug)]\n\nstruct Inner {\n\n list: Vec<(u32, String, u32)>,\n\n}\n\n\n\n/// An utility to manage global objects\n\n///\n\n/// This utility provides an implemenation for the registry\n\n/// that track the list of globals for you, as well as utilities\n\n/// to bind them.\n\n#[derive(Clone, Debug)]\n\npub struct GlobalManager {\n\n inner: Arc<Mutex<Inner>>,\n\n registry: Main<wl_registry::WlRegistry>,\n\n}\n\n\n\n/// An error that occurred trying to bind a global\n\n#[derive(Debug, PartialEq)]\n\npub enum GlobalError {\n\n /// The requested global was missing\n\n Missing,\n", "file_path": "wayland-client/src/globals.rs", "rank": 83, "score": 77120.16323893408 }, { "content": "struct Privileged;\n\n\n", "file_path": "tests/server_global_filter.rs", "rank": 84, "score": 77115.05185800928 }, { "content": "struct FdStore {\n\n fds: Vec<RawFd>,\n\n}\n\n\n\nimpl FdStore {\n\n fn new() -> FdStore {\n\n FdStore { fds: Vec::new() }\n\n }\n\n fn push(&mut self, fd: RawFd) {\n\n self.fds.push(fd);\n\n }\n\n fn clear(&mut self) {\n\n self.fds.clear();\n\n }\n\n}\n\n\n\nimpl Drop for FdStore {\n\n fn drop(&mut self) {\n\n use nix::unistd::close;\n\n for fd in self.fds.drain(..) {\n", "file_path": "wayland-commons/src/wire.rs", "rank": 85, "score": 76058.07769065595 }, { "content": "struct DisplayDispatcher {\n\n global_mgr: Rc<RefCell<GlobalManager>>,\n\n}\n\n\n\nimpl super::Dispatcher for DisplayDispatcher {\n\n fn dispatch(\n\n &mut self,\n\n msg: Message,\n\n _resource: ResourceInner,\n\n map: &mut super::ResourceMap,\n\n _data: crate::DispatchData,\n\n ) -> Dispatched {\n\n use crate::protocol::wl_callback;\n\n\n\n if WAYLAND_DEBUG.load(Ordering::Relaxed) {\n\n debug::print_dispatched_message(\n\n \"wl_display\",\n\n 1,\n\n DISPLAY_REQUESTS[msg.opcode as usize].name,\n\n &msg.args,\n", "file_path": "wayland-server/src/rust_imp/clients.rs", "rank": 86, "score": 74088.71500978697 }, { "content": "struct ClientImplementation {\n\n inner: ClientInner,\n\n map: Arc<Mutex<ObjectMap<ObjectMeta>>>,\n\n}\n\n\n\nimpl ClientImplementation {\n\n fn process_messages(&self, mut data: crate::DispatchData) {\n\n loop {\n\n // we must process the messages one by one, because message parsing depends\n\n // on the contents of the object map, which each message can change...\n\n let ret = if let Some(ref mut data) = *self.inner.data.lock().unwrap() {\n\n data.read_request()\n\n } else {\n\n // client is now dead, abort\n\n return;\n\n };\n\n\n\n let msg = match ret {\n\n Ok(None) | Err(Error::Nix(::nix::Error::EAGAIN)) => {\n\n // Nothing more to read.\n", "file_path": "wayland-server/src/rust_imp/clients.rs", "rank": 87, "score": 74088.71500978697 }, { "content": "struct RegistryDispatcher {\n\n global_mgr: Rc<RefCell<GlobalManager>>,\n\n}\n\n\n\nimpl super::Dispatcher for RegistryDispatcher {\n\n fn dispatch(\n\n &mut self,\n\n msg: Message,\n\n resource: ResourceInner,\n\n map: &mut super::ResourceMap,\n\n data: crate::DispatchData,\n\n ) -> Dispatched {\n\n if WAYLAND_DEBUG.load(Ordering::Relaxed) {\n\n debug::print_dispatched_message(\n\n \"wl_registry\",\n\n resource.id,\n\n REGISTRY_REQUESTS[msg.opcode as usize].name,\n\n &msg.args,\n\n );\n\n }\n", "file_path": "wayland-server/src/rust_imp/clients.rs", "rank": 88, "score": 74088.71500978697 }, { "content": "// WlDisplay needs its own dispatcher, as it can be dispatched from multiple threads\n\nstruct DisplayDispatcher {\n\n map: Arc<Mutex<ObjectMap<ObjectMeta>>>,\n\n last_error: Arc<Mutex<Option<CxError>>>,\n\n}\n\n\n\nimpl super::Dispatcher for DisplayDispatcher {\n\n fn dispatch(\n\n &mut self,\n\n msg: Message,\n\n proxy: ProxyInner,\n\n map: &mut ProxyMap,\n\n _data: crate::DispatchData,\n\n ) -> Dispatched {\n\n if WAYLAND_DEBUG.load(Ordering::Relaxed) {\n\n debug::print_dispatched_message(\n\n proxy.object.interface,\n\n proxy.id,\n\n proxy.object.events[msg.opcode as usize].name,\n\n &msg.args,\n\n );\n", "file_path": "wayland-client/src/rust_imp/display.rs", "rank": 89, "score": 74088.71500978697 }, { "content": "struct GlobalData {\n\n version: u32,\n\n interface: &'static str,\n\n destroyed: Rc<Cell<bool>>,\n\n implem: Box<GlobalImplementation>,\n\n filter: Option<GlobalFilter>,\n\n}\n\n\n\npub(crate) struct GlobalManager {\n\n registries: Rc<RefCell<Vec<(u32, ClientInner)>>>,\n\n globals: Vec<GlobalData>,\n\n}\n\n\n\nimpl GlobalManager {\n\n pub(crate) fn new() -> GlobalManager {\n\n GlobalManager { registries: Rc::new(RefCell::new(Vec::new())), globals: Vec::new() }\n\n }\n\n\n\n pub(crate) fn add_global<I, F1, F2>(\n\n &mut self,\n", "file_path": "wayland-server/src/rust_imp/globals.rs", "rank": 90, "score": 74088.71500978697 }, { "content": "#[derive(Debug)]\n\nstruct Buffer<T: Copy> {\n\n storage: Vec<T>,\n\n occupied: usize,\n\n offset: usize,\n\n}\n\n\n\nimpl<T: Copy + Default> Buffer<T> {\n\n fn new(size: usize) -> Buffer<T> {\n\n Buffer { storage: vec![T::default(); size], occupied: 0, offset: 0 }\n\n }\n\n\n\n /// Check if this buffer has content to read\n\n fn has_content(&self) -> bool {\n\n self.occupied > self.offset\n\n }\n\n\n\n /// Advance the internal counter of occupied space\n\n fn advance(&mut self, bytes: usize) {\n\n self.occupied += bytes;\n\n }\n", "file_path": "wayland-commons/src/socket.rs", "rank": 91, "score": 70698.05175408146 }, { "content": "struct WaylandListener(UnixListener);\n\n\n\nimpl WaylandListener {\n\n fn eprint_error(&self, error: io::Error) {\n\n if let Ok(addr) = self.0.local_addr() {\n\n if let Some(path) = addr.as_pathname() {\n\n eprintln!(\n\n \"[wayland-server] Error accepting connection on listening socket {} : {}\",\n\n path.display(),\n\n error\n\n );\n\n return;\n\n }\n\n }\n\n eprintln!(\n\n \"[wayland-server] Error accepting connection on listening socket <unnamed> : {}\",\n\n error\n\n );\n\n }\n\n}\n", "file_path": "wayland-server/src/rust_imp/display.rs", "rank": 92, "score": 69957.69660147832 }, { "content": "type DynInner<E> = Inner<E, dyn FnMut(E, &Filter<E>, DispatchData<'_>)>;\n\n\n\n/// An event filter\n\n///\n\n/// Can be used in wayland-client and wayland-server to aggregate\n\n/// messages from different objects into the same closure.\n\n///\n\n/// You need to provide it a closure of type `FnMut(E, &Filter<E>)`,\n\n/// which will be called any time a message is sent to the filter\n\n/// via the `send(..)` method. Your closure also receives a handle\n\n/// to the filter as argument, so that you can use it from within\n\n/// the callback (to assign new wayland objects to this filter for\n\n/// example).\n\n///\n\n/// The `Filter` can be cloned, and all clones send messages to the\n\n/// same closure. However it is not threadsafe.\n\npub struct Filter<E> {\n\n inner: Rc<DynInner<E>>,\n\n}\n\n\n", "file_path": "wayland-commons/src/filter.rs", "rank": 93, "score": 67703.24975732758 }, { "content": "#[test]\n\nfn skel() {\n\n // Server setup\n\n //\n\n let mut server = TestServer::new();\n\n\n\n // Client setup\n\n //\n\n let mut client = TestClient::new(&server.socket_name);\n\n\n\n // Some message passing\n\n //\n\n roundtrip(&mut client, &mut server);\n\n\n\n // Final asserts\n\n //\n\n assert!(true);\n\n}\n", "file_path": "tests/skel.rs", "rank": 94, "score": 67079.90770243213 }, { "content": "#[test]\n\nfn simple_global() {\n\n let mut server = TestServer::new();\n\n server.display.create_global::<ServerCompositor, _>(1, ways::Filter::new(|_: (_, _), _, _| {}));\n\n\n\n let mut client = TestClient::new(&server.socket_name);\n\n let manager = wayc::GlobalManager::new(&client.display_proxy);\n\n\n\n roundtrip(&mut client, &mut server).unwrap();\n\n let globals = manager.list();\n\n assert!(globals.len() == 1);\n\n assert_eq!(globals[0], (1, \"wl_compositor\".into(), 1));\n\n}\n\n\n", "file_path": "tests/globals.rs", "rank": 95, "score": 65935.93376249337 }, { "content": " match *self {\n\n Event::_Self { .. } => 2,\n\n }\n\n }\n\n fn child<Meta: ObjectMetadata>(\n\n opcode: u16,\n\n version: u32,\n\n meta: &Meta,\n\n ) -> Option<Object<Meta>> {\n\n match opcode {\n\n _ => None,\n\n }\n\n }\n\n fn from_raw(msg: Message, map: &mut Self::Map) -> Result<Self, ()> {\n\n panic!(\"Event::from_raw can not be used Server-side.\")\n\n }\n\n fn into_raw(self, sender_id: u32) -> Message {\n\n match self {\n\n Event::_Self {\n\n _self,\n", "file_path": "tests/scanner_assets/server_code.rs", "rank": 99, "score": 54.84256371799768 } ]
Rust
mem6/src/fetch_mod.rs
bestia-dev/mem6_game
9bd5ccab9f66fc884dd7ed2ea774f35520e124eb
use crate::*; use unwrap::unwrap; use wasm_bindgen_futures::spawn_local; use dodrio::VdomWeak; pub fn async_fetch_game_config_and_update(rrc: &mut RootRenderingComponent, vdom: VdomWeak) { let url_config = format!( "{}/content/{}/game_config.json", rrc.web_data.href, rrc.game_data.game_name ); spawn_local({ let vdom_on_next_tick = vdom.clone(); async move { let respbody = websysmod::fetch_response(url_config).await; let json = unwrap!(serde_json::from_str(respbody.as_str())); unwrap!( vdom_on_next_tick .with_component({ move |root| { let rrc = root.unwrap_mut::<RootRenderingComponent>(); rrc.game_data.game_config = json; } }) .await ); } }); } pub fn fetch_games_metadata_and_update(href: &str, vdom: VdomWeak) { let url_config = format!("{}/content/gamesmetadata.json", href); spawn_local({ let vdom_on_next_tick = vdom.clone(); async move { let respbody = websysmod::fetch_response(url_config).await; let v: game_data_mod::GamesMetadata = unwrap!(serde_json::from_str(&respbody)); unwrap!( vdom_on_next_tick .with_component({ move |root| { let rrc = root.unwrap_mut::<RootRenderingComponent>(); rrc.game_data.content_folders.clear(); for x in &v.vec_game_metadata { rrc.game_data.content_folders.push(x.folder.clone()); } rrc.game_data.games_metadata = Some(v); } }) .await ); } }); } pub fn fetch_videos_and_update(href: &str, vdom: VdomWeak) { let url = format!("{}/content/videos.json", href); spawn_local({ let vdom_on_next_tick = vdom.clone(); async move { let respbody = websysmod::fetch_response(url).await; let vid_json: game_data_mod::Videos = unwrap!(serde_json::from_str(&respbody)); unwrap!( vdom_on_next_tick .with_component({ move |root| { let rrc = root.unwrap_mut::<RootRenderingComponent>(); rrc.game_data.videos = vid_json.videos; } }) .await ); } }); } pub fn fetch_audio_and_update(href: &str, vdom: VdomWeak) { let url = format!("{}/content/audio.json", href); spawn_local({ let vdom_on_next_tick = vdom.clone(); async move { let respbody = websysmod::fetch_response(url).await; let aud_json: game_data_mod::Audio = unwrap!(serde_json::from_str(&respbody)); unwrap!( vdom_on_next_tick .with_component({ move |root| { let rrc = root.unwrap_mut::<RootRenderingComponent>(); rrc.game_data.audio = aud_json.audio; } }) .await ); } }); } #[allow(clippy::needless_pass_by_value)] pub fn fetch_all_img_for_cache_request(rrc: &mut RootRenderingComponent) { let (start_index, end_index) = rrc.game_data.grid_start_end_index(); for i in start_index..end_index { #[allow(clippy::indexing_slicing)] let x = &rrc.game_data.card_grid_data[i]; let url_img = format!( "content/{}/img/{}", rrc.game_data.game_name, unwrap!(unwrap!(rrc.game_data.game_config.as_ref()) .img_filename .get(x.card_number)) ); spawn_local(websysmod::fetch_only(url_img)); } }
use crate::*; use unwrap::unwrap; use wasm_bindgen_futures::spawn_local; use dodrio::VdomWeak; pub fn async_fetch_game_config_and_update(rrc: &mut RootRenderingComponent, vdom: VdomWeak) { let url_config = format!( "{}/content/{}/game_config.json", rrc.web_data.href, rrc.game_data.game_name ); spawn_local({ let vdom_on_next_tick = vdom.clone(); async move { let respbody = websysmod::fetch_response(url_config).await; let json = unwrap!(serde_json::from_str(respbody.as_str())); unwrap!( vdom_on_next_tick .with_component({ move |root| { let rrc = root.unwrap_mut::<RootRenderingComponent>(); rrc.game_data.game_config = json; } }) .await ); } }); }
pub fn fetch_videos_and_update(href: &str, vdom: VdomWeak) { let url = format!("{}/content/videos.json", href); spawn_local({ let vdom_on_next_tick = vdom.clone(); async move { let respbody = websysmod::fetch_response(url).await; let vid_json: game_data_mod::Videos = unwrap!(serde_json::from_str(&respbody)); unwrap!( vdom_on_next_tick .with_component({ move |root| { let rrc = root.unwrap_mut::<RootRenderingComponent>(); rrc.game_data.videos = vid_json.videos; } }) .await ); } }); } pub fn fetch_audio_and_update(href: &str, vdom: VdomWeak) { let url = format!("{}/content/audio.json", href); spawn_local({ let vdom_on_next_tick = vdom.clone(); async move { let respbody = websysmod::fetch_response(url).await; let aud_json: game_data_mod::Audio = unwrap!(serde_json::from_str(&respbody)); unwrap!( vdom_on_next_tick .with_component({ move |root| { let rrc = root.unwrap_mut::<RootRenderingComponent>(); rrc.game_data.audio = aud_json.audio; } }) .await ); } }); } #[allow(clippy::needless_pass_by_value)] pub fn fetch_all_img_for_cache_request(rrc: &mut RootRenderingComponent) { let (start_index, end_index) = rrc.game_data.grid_start_end_index(); for i in start_index..end_index { #[allow(clippy::indexing_slicing)] let x = &rrc.game_data.card_grid_data[i]; let url_img = format!( "content/{}/img/{}", rrc.game_data.game_name, unwrap!(unwrap!(rrc.game_data.game_config.as_ref()) .img_filename .get(x.card_number)) ); spawn_local(websysmod::fetch_only(url_img)); } }
pub fn fetch_games_metadata_and_update(href: &str, vdom: VdomWeak) { let url_config = format!("{}/content/gamesmetadata.json", href); spawn_local({ let vdom_on_next_tick = vdom.clone(); async move { let respbody = websysmod::fetch_response(url_config).await; let v: game_data_mod::GamesMetadata = unwrap!(serde_json::from_str(&respbody)); unwrap!( vdom_on_next_tick .with_component({ move |root| { let rrc = root.unwrap_mut::<RootRenderingComponent>(); rrc.game_data.content_folders.clear(); for x in &v.vec_game_metadata { rrc.game_data.content_folders.push(x.folder.clone()); } rrc.game_data.games_metadata = Some(v); } }) .await ); } }); }
function_block-full_function
[ { "content": "/// on click\n\npub fn on_click_take_turn(rrc: &mut RootRenderingComponent, vdom: &VdomWeak) {\n\n // websysmod::debug_write(&format!(\"on_click_take_turn {}\", \"\"));\n\n\n\n let msg_id = ackmsgmod::prepare_for_ack_msg_waiting(rrc, vdom);\n\n\n\n let msg = websocketmod::WsMessageForReceivers {\n\n msg_sender_ws_uid: rrc.web_data.my_ws_uid,\n\n msg_receivers_json: rrc.web_data.msg_receivers_json.to_string(),\n\n msg_data: gamedatamod::WsMessageGameData::MsgTakeTurn { msg_id },\n\n };\n\n ackmsgmod::send_msg_and_write_in_queue(rrc, &msg, msg_id);\n\n\n\n // Here I wait for on_MsgAck from\n\n // every player before call update_take_turn(rrc);\n\n}\n\n\n", "file_path": "mem6/src/statustaketurnmod.rs", "rank": 1, "score": 311087.7360537584 }, { "content": "/// the arrow to the right\n\npub fn game_type_right_onclick(rrc: &mut RootRenderingComponent, vdom: VdomWeak) {\n\n let gmd = &unwrap!(rrc.game_data.games_metadata.as_ref()).vec_game_metadata;\n\n let mut last_name = unwrap!(gmd.last()).name.to_string();\n\n for x in gmd {\n\n if rrc.game_data.game_name.as_str() == last_name.as_str() {\n\n rrc.game_data.game_name = x.name.to_string();\n\n vdom.schedule_render();\n\n break;\n\n }\n\n last_name = x.name.to_string();\n\n }\n\n fetchmod::async_fetch_game_config_and_update(rrc, vdom);\n\n}\n\n\n", "file_path": "mem6/src/htmltemplateimplmod.rs", "rank": 2, "score": 307267.0714216896 }, { "content": "/// left arrow button\n\npub fn game_type_left_onclick(rrc: &mut RootRenderingComponent, vdom: VdomWeak) {\n\n let gmd = &unwrap!(rrc.game_data.games_metadata.as_ref()).vec_game_metadata;\n\n let mut last_name = unwrap!(gmd.first()).name.to_string();\n\n for x in gmd.iter().rev() {\n\n if rrc.game_data.game_name.as_str() == last_name.as_str() {\n\n rrc.game_data.game_name = x.name.to_string();\n\n vdom.schedule_render();\n\n break;\n\n }\n\n last_name = x.name.to_string();\n\n }\n\n fetchmod::async_fetch_game_config_and_update(rrc, vdom);\n\n}\n\n\n", "file_path": "mem6/src/htmltemplateimplmod.rs", "rank": 3, "score": 307267.0714216895 }, { "content": "/// on click\n\npub fn on_click_take_turn(rrc: &mut RootRenderingComponent, vdom: VdomWeak) {\n\n // websysmod::debug_write(&format!(\"on_click_take_turn {}\", \"\"));\n\n\n\n let msg_id = ack_msg_mod::prepare_for_ack_msg_waiting(rrc, vdom.clone());\n\n\n\n let msg = websocket_boiler_mod::WsMessageForReceivers {\n\n msg_sender_ws_uid: rrc.web_data.my_ws_uid,\n\n msg_receivers_json: rrc.web_data.msg_receivers_json.to_string(),\n\n msg_data: game_data_mod::WsMessageGameData::MsgTakeTurn { msg_id },\n\n };\n\n ack_msg_mod::send_msg_and_write_in_queue(rrc, &msg, msg_id);\n\n\n\n // Here I wait for on_MsgAck from\n\n // every player before call update_take_turn(rrc);\n\n}\n\n\n", "file_path": "mem6/src/status_take_turn_mod.rs", "rank": 4, "score": 300098.47052969714 }, { "content": "/// left arrow button\n\npub fn game_type_left_onclick(rrc: &mut RootRenderingComponent, vdom: VdomWeak) {\n\n let gmd = &unwrap!(rrc.game_data.games_metadata.as_ref()).vec_game_metadata;\n\n let mut last_name = unwrap!(gmd.first()).name.to_string();\n\n for x in gmd.iter().rev() {\n\n if rrc.game_data.game_name.as_str() == last_name.as_str() {\n\n rrc.game_data.game_name = x.name.to_string();\n\n vdom.schedule_render();\n\n break;\n\n }\n\n last_name = x.name.to_string();\n\n }\n\n fetch_mod::async_fetch_game_config_and_update(rrc, vdom);\n\n}\n\n\n", "file_path": "mem6/src/html_template_impl_mod.rs", "rank": 5, "score": 296731.17619999225 }, { "content": "/// the arrow to the right\n\npub fn game_type_right_onclick(rrc: &mut RootRenderingComponent, vdom: VdomWeak) {\n\n let gmd = &unwrap!(rrc.game_data.games_metadata.as_ref()).vec_game_metadata;\n\n let mut last_name = unwrap!(gmd.last()).name.to_string();\n\n for x in gmd {\n\n if rrc.game_data.game_name.as_str() == last_name.as_str() {\n\n rrc.game_data.game_name = x.name.to_string();\n\n vdom.schedule_render();\n\n break;\n\n }\n\n last_name = x.name.to_string();\n\n }\n\n fetch_mod::async_fetch_game_config_and_update(rrc, vdom);\n\n}\n\n\n", "file_path": "mem6/src/html_template_impl_mod.rs", "rank": 6, "score": 296731.17619999225 }, { "content": "/// prepare for ack msg waiting - return random msg_id\n\npub fn prepare_for_ack_msg_waiting(rrc: &mut RootRenderingComponent, vdom: VdomWeak) -> usize {\n\n let msg_id = websysmod::get_random(1, 0xFFFF_FFFF);\n\n rrc.game_data.game_status = GameStatus::StatusWaitingAckMsg;\n\n vdom.schedule_render();\n\n // return\n\n msg_id\n\n}\n\n\n", "file_path": "mem6/src/ack_msg_mod.rs", "rank": 7, "score": 289768.5341253508 }, { "content": "pub fn match_msg_and_call_function( vdom: VdomWeak,rrc:&mut RootRenderingComponent,msg: websocket_boiler_mod::WsMessageForReceivers) {\n\n match msg.msg_data {\n\n WsMessageGameData::MsgJoin {\n\n my_nickname,\n\n } => {\n\n status_joined_mod::on_msg_joined(rrc, msg.msg_sender_ws_uid, my_nickname);\n\n vdom.schedule_render();\n\n }\n\n WsMessageGameData::MsgStartGame {\n\n \n\n card_grid_data,\n\n game_config,\n\n players,\n\n game_name,\n\n player_turn,\n\n } => {\n\n status_game_data_init_mod::on_msg_start_game(\n\n rrc,\n\n &card_grid_data,\n\n &game_config,\n", "file_path": "mem6/src/websocket_spec_mod.rs", "rank": 8, "score": 260199.15039443702 }, { "content": "#[allow(clippy::indexing_slicing)]\n\npub fn on_click_img_status1st(root: &mut dyn dodrio::RootRender, vdom: VdomWeak, event: &Event) {\n\n // websysmod::debug_write(\"img click\");\n\n let rrc = root.unwrap_mut::<RootRenderingComponent>();\n\n // If the event's target is our image...\n\n let img = match event\n\n .target()\n\n .and_then(|t| t.dyn_into::<HtmlImageElement>().ok())\n\n {\n\n None => return,\n\n // ?? Don't understand what this does. The original was written for Input element.\n\n Some(input) => input,\n\n };\n\n // id attribute of image html element is prefixed with img ex. \"img12\"\n\n let this_click_card_index = unwrap!((img.id()[3..]).parse::<usize>());\n\n // click is useful only on facedown cards\n\n if rrc.game_data.card_grid_data[this_click_card_index]\n\n .status\n\n .as_ref()\n\n == CardStatusCardFace::Down.as_ref()\n\n {\n", "file_path": "mem6/src/status_1st_card_mod.rs", "rank": 9, "score": 252732.74319835484 }, { "content": "#[allow(clippy::indexing_slicing)]\n\npub fn on_click_img_status2nd(root: &mut dyn dodrio::RootRender, vdom: VdomWeak, event: &Event) {\n\n let rrc = root.unwrap_mut::<RootRenderingComponent>();\n\n // If the event's target is our image...\n\n let img = match event\n\n .target()\n\n .and_then(|t| t.dyn_into::<HtmlImageElement>().ok())\n\n {\n\n None => return,\n\n // ?? Don't understand what this does. The original was written for Input element.\n\n Some(input) => input,\n\n };\n\n // id attribute of image html element is prefixed with img ex. \"img12\"\n\n let this_click_card_index = unwrap!(img.id()[3..].parse::<usize>());\n\n // click is useful only on facedown cards\n\n if rrc.game_data.card_grid_data[this_click_card_index]\n\n .status\n\n .as_ref()\n\n == CardStatusCardFace::Down.as_ref()\n\n {\n\n status_2nd_card_mod::on_click_2nd_card(rrc, vdom.clone(), this_click_card_index);\n", "file_path": "mem6/src/status_2nd_card_mod.rs", "rank": 10, "score": 252732.7431983549 }, { "content": "/// flip back any not permanent card\n\npub fn flip_back(rrc: &mut RootRenderingComponent) {\n\n for x in &mut rrc.game_data.card_grid_data {\n\n if let CardStatusCardFace::UpTemporary = x.status {\n\n x.status = CardStatusCardFace::Down;\n\n }\n\n }\n\n rrc.game_data.card_index_of_1st_click = 0;\n\n rrc.game_data.card_index_of_2nd_click = 0;\n\n}\n\n\n", "file_path": "mem6/src/status1stcardmod.rs", "rank": 11, "score": 249555.9728609551 }, { "content": "/// on msg game over\n\npub fn on_msg_game_over(rrc: &mut RootRenderingComponent) {\n\n // The game is over.\n\n rrc.game_data.game_status = GameStatus::StatusGameOver;\n\n}\n\n\n", "file_path": "mem6/src/statusgameovermod.rs", "rank": 12, "score": 249555.9728609551 }, { "content": "/// group_id is the ws_uid of the first player\n\npub fn on_load_joined(rrc: &mut RootRenderingComponent) {\n\n rrc.game_data.game_status = GameStatus::StatusJoined;\n\n websysmod::debug_write(&format!(\n\n \"StatusJoined send {}\",\n\n rrc.web_data.msg_receivers_json\n\n ));\n\n\n\n rrc.web_data\n\n .send_ws_msg(&websocketmod::WsMessageForReceivers {\n\n msg_sender_ws_uid: rrc.web_data.my_ws_uid,\n\n msg_receivers_json: rrc.web_data.msg_receivers_json.to_string(),\n\n msg_data: gamedatamod::WsMessageGameData::MsgJoin {\n\n my_nickname: rrc.game_data.my_nickname.clone(),\n\n },\n\n });\n\n}\n\n\n", "file_path": "mem6/src/statusjoinedmod.rs", "rank": 13, "score": 249555.9728609551 }, { "content": "/// on msg play again\n\npub fn on_msg_play_again(rrc: &mut RootRenderingComponent) {\n\n // The first players can choose Play again and send to others.\n\n rrc.game_data.game_status = GameStatus::StatusJoined;\n\n rrc.game_data.reset_for_play_again();\n\n htmltemplateimplmod::open_new_local_page(\"#p04\");\n\n}\n", "file_path": "mem6/src/statusgameovermod.rs", "rank": 14, "score": 249555.9728609551 }, { "content": "/// update game data\n\npub fn update_on_take_turn(rrc: &mut RootRenderingComponent) {\n\n rrc.game_data.player_turn = if rrc.game_data.player_turn < rrc.game_data.players.len() {\n\n unwrap!(rrc.game_data.player_turn.checked_add(1))\n\n } else {\n\n 1\n\n };\n\n\n\n rrc.game_data.game_status = GameStatus::Status1stCard;\n\n}\n\n\n\n// there is no special div render, because it jumps to StatusBefore1stClick\n", "file_path": "mem6/src/statustaketurnmod.rs", "rank": 15, "score": 245676.78147494872 }, { "content": "/// update game data\n\npub fn update_on_1st_card(rrc: &mut RootRenderingComponent) {\n\n //websysmod::debug_write(\"update_on_1st_card\");\n\n // flip the card up\n\n rrc.game_data.get_1st_card_mut().status = CardStatusCardFace::UpTemporary;\n\n rrc.game_data.game_status = GameStatus::Status2ndCard;\n\n}\n\n\n", "file_path": "mem6/src/status1stcardmod.rs", "rank": 16, "score": 245676.78147494872 }, { "content": "/// prepares the game data\n\npub fn on_click_start_game(rrc: &mut RootRenderingComponent) {\n\n rrc.game_data.prepare_random_data();\n\n rrc.game_data.game_status = GameStatus::Status1stCard;\n\n // random start player_turn. So is not always the first player to start\n\n // gen_range is lower inclusive, upper exclusive\n\n rrc.game_data.player_turn =\n\n websysmod::get_random(1, unwrap!(rrc.game_data.players.len().checked_add(1)));\n\n\n\n rrc.web_data\n\n .send_ws_msg(&websocketmod::WsMessageForReceivers {\n\n msg_sender_ws_uid: rrc.web_data.my_ws_uid,\n\n msg_receivers_json: rrc.web_data.msg_receivers_json.to_string(),\n\n msg_data: gamedatamod::WsMessageGameData::MsgStartGame {\n\n players: unwrap!(serde_json::to_string(&rrc.game_data.players)),\n\n card_grid_data: unwrap!(serde_json::to_string(&rrc.game_data.card_grid_data)),\n\n game_config: unwrap!(serde_json::to_string(&rrc.game_data.game_config)),\n\n game_name: rrc.game_data.game_name.to_string(),\n\n player_turn: rrc.game_data.player_turn,\n\n },\n\n });\n\n}\n\n\n\n/// on game data init\n", "file_path": "mem6/src/statusgamedatainitmod.rs", "rank": 17, "score": 245676.78147494872 }, { "content": "#[allow(clippy::indexing_slicing)]\n\npub fn is_all_permanently(rrc: &mut RootRenderingComponent) -> bool {\n\n let mut is_all_permanently = true;\n\n // the zero element is exceptional, but the iterator uses it\n\n rrc.game_data.card_grid_data[0].status = CardStatusCardFace::UpPermanently;\n\n\n\n for x in &rrc.game_data.card_grid_data {\n\n match x.status {\n\n CardStatusCardFace::UpPermanently => {}\n\n CardStatusCardFace::Down | CardStatusCardFace::UpTemporary => {\n\n is_all_permanently = false;\n\n break;\n\n }\n\n }\n\n }\n\n // return\n\n is_all_permanently\n\n}\n\n\n", "file_path": "mem6/src/status2ndcardmod.rs", "rank": 18, "score": 243924.09890374678 }, { "content": "/// save nickname from html input elements to local storage and rrc\n\npub fn save_nickname_to_local_storage(rrc: &mut RootRenderingComponent) {\n\n let nickname = websysmod::get_input_element_value_string_by_id(\"input_nickname\");\n\n websysmod::save_to_local_storage(\"nickname\", &nickname);\n\n websysmod::debug_write(&format!(\"save nickname to local storage: {}\", &nickname));\n\n\n\n rrc.game_data.my_nickname = nickname.clone();\n\n // change it also in players, if the player exists\n\n if rrc.game_data.my_player_number < rrc.game_data.players.len() {\n\n rrc.game_data.my_player_mut().nickname = nickname;\n\n }\n\n}\n\n\n", "file_path": "mem6/src/storagemod.rs", "rank": 19, "score": 241990.43616123 }, { "content": "/// group_id is the ws_uid of the first player\n\npub fn on_load_joined(rrc: &mut RootRenderingComponent) {\n\n rrc.game_data.game_status = GameStatus::StatusJoined;\n\n websysmod::debug_write(&format!(\n\n \"StatusJoined send {}\",\n\n rrc.web_data.msg_receivers_json\n\n ));\n\n\n\n rrc.web_data\n\n .send_ws_msg_from_web_data(&websocket_boiler_mod::WsMessageForReceivers {\n\n msg_sender_ws_uid: rrc.web_data.my_ws_uid,\n\n msg_receivers_json: rrc.web_data.msg_receivers_json.to_string(),\n\n msg_data: game_data_mod::WsMessageGameData::MsgJoin {\n\n my_nickname: rrc.game_data.my_nickname.clone(),\n\n },\n\n });\n\n}\n\n\n", "file_path": "mem6/src/status_joined_mod.rs", "rank": 20, "score": 241986.34481286918 }, { "content": "/// on msg play again\n\npub fn on_msg_play_again(rrc: &mut RootRenderingComponent) {\n\n // The first players can choose Play again and send to others.\n\n rrc.game_data.game_status = GameStatus::StatusJoined;\n\n rrc.game_data.reset_for_play_again();\n\n html_template_impl_mod::open_new_local_page(\"#p04\");\n\n}\n", "file_path": "mem6/src/status_game_over_mod.rs", "rank": 21, "score": 241986.34481286918 }, { "content": "#[allow(clippy::needless_pass_by_value)]\n\npub fn fetch_all_img_for_cache_request(rrc: &mut RootRenderingComponent) {\n\n let (start_index, end_index) = rrc.game_data.grid_start_end_index();\n\n for i in start_index..end_index {\n\n #[allow(clippy::indexing_slicing)]\n\n // index i is calculated to be inside 0..card_grid_data.len()\n\n let x = &rrc.game_data.card_grid_data[i];\n\n\n\n let url_img = format!(\n\n \"content/{}/img/{}\",\n\n rrc.game_data.game_name,\n\n unwrap!(unwrap!(rrc.game_data.game_config.as_ref())\n\n .img_filename\n\n .get(x.card_number))\n\n );\n\n // websysmod::debug_write(&url_img);\n\n // this is async, so I don't care how much it takes\n\n // maybe there could be a problem with too much parallel requests\n\n // from the same browser.\n\n spawn_local(websysmod::fetch_only(url_img));\n\n }\n\n}\n", "file_path": "mem6/src/fetchmod.rs", "rank": 22, "score": 241986.34481286918 }, { "content": "/// on msg game over\n\npub fn on_msg_game_over(rrc: &mut RootRenderingComponent) {\n\n // The game is over.\n\n rrc.game_data.game_status = GameStatus::StatusGameOver;\n\n}\n\n\n", "file_path": "mem6/src/status_game_over_mod.rs", "rank": 23, "score": 241986.34481286915 }, { "content": "/// save nickname from html input elements to local storage and rrc\n\npub fn save_nickname_to_local_storage(rrc: &mut RootRenderingComponent) {\n\n let nickname = websysmod::get_input_element_value_string_by_id(\"input_nickname\");\n\n websysmod::save_to_local_storage(\"nickname\", &nickname);\n\n websysmod::debug_write(&format!(\"save nickname to local storage: {}\", &nickname));\n\n\n\n rrc.game_data.my_nickname = nickname.clone();\n\n // change it also in players, if the player exists\n\n if rrc.game_data.my_player_number < rrc.game_data.players.len() {\n\n rrc.game_data.my_player_mut().nickname = nickname;\n\n }\n\n}\n\n\n", "file_path": "mem6/src/storage_mod.rs", "rank": 24, "score": 238475.30472160992 }, { "content": "/// save group_id from html input elements to local storage and rrc\n\npub fn save_group_id_to_local_storage(rrc: &mut RootRenderingComponent) {\n\n let group_id_string = websysmod::get_input_element_value_string_by_id(\"input_group_id\");\n\n save_group_id_string_to_local_storage(rrc, &group_id_string);\n\n}\n\n\n", "file_path": "mem6/src/storagemod.rs", "rank": 25, "score": 238475.24902315086 }, { "content": "/// flip back any not permanent card\n\npub fn flip_back(rrc: &mut RootRenderingComponent) {\n\n for x in &mut rrc.game_data.card_grid_data {\n\n if let CardStatusCardFace::UpTemporary = x.status {\n\n x.status = CardStatusCardFace::Down;\n\n }\n\n }\n\n rrc.game_data.card_index_of_1st_click = 0;\n\n rrc.game_data.card_index_of_2nd_click = 0;\n\n}\n\n\n", "file_path": "mem6/src/status_1st_card_mod.rs", "rank": 26, "score": 238471.2133732491 }, { "content": "/// save group_id from html input elements to local storage and rrc\n\npub fn save_group_id_to_local_storage(rrc: &mut RootRenderingComponent) {\n\n let group_id_string = websysmod::get_input_element_value_string_by_id(\"input_group_id\");\n\n save_group_id_string_to_local_storage(rrc, &group_id_string);\n\n}\n\n\n", "file_path": "mem6/src/storage_mod.rs", "rank": 28, "score": 235123.2214316453 }, { "content": "/// update game data\n\npub fn update_on_1st_card(rrc: &mut RootRenderingComponent) {\n\n //websysmod::debug_write(\"update_on_1st_card\");\n\n // flip the card up\n\n rrc.game_data.get_1st_card_mut().status = CardStatusCardFace::UpTemporary;\n\n rrc.game_data.game_status = GameStatus::Status2ndCard;\n\n}\n\n\n", "file_path": "mem6/src/status_1st_card_mod.rs", "rank": 29, "score": 235119.18578174355 }, { "content": "/// update game data\n\npub fn update_on_take_turn(rrc: &mut RootRenderingComponent) {\n\n rrc.game_data.player_turn = if rrc.game_data.player_turn < rrc.game_data.players.len() {\n\n unwrap!(rrc.game_data.player_turn.checked_add(1))\n\n } else {\n\n 1\n\n };\n\n\n\n rrc.game_data.game_status = GameStatus::Status1stCard;\n\n}\n\n\n\n// there is no special div render, because it jumps to StatusBefore1stClick\n", "file_path": "mem6/src/status_take_turn_mod.rs", "rank": 30, "score": 235119.18578174355 }, { "content": "#[allow(clippy::indexing_slicing)]\n\npub fn is_all_permanently(rrc: &mut RootRenderingComponent) -> bool {\n\n let mut is_all_permanently = true;\n\n // the zero element is exceptional, but the iterator uses it\n\n rrc.game_data.card_grid_data[0].status = CardStatusCardFace::UpPermanently;\n\n\n\n for x in &rrc.game_data.card_grid_data {\n\n match x.status {\n\n CardStatusCardFace::UpPermanently => {}\n\n CardStatusCardFace::Down | CardStatusCardFace::UpTemporary => {\n\n is_all_permanently = false;\n\n break;\n\n }\n\n }\n\n }\n\n // return\n\n is_all_permanently\n\n}\n\n\n", "file_path": "mem6/src/status_2nd_card_mod.rs", "rank": 31, "score": 232839.33941604078 }, { "content": "/// load group_id from local storage\n\npub fn load_group_id_string(rrc: &mut RootRenderingComponent) -> String {\n\n let group_id_string = websysmod::load_string_from_local_storage(\"group_id\", \"\");\n\n set_group_id(rrc, &group_id_string);\n\n // return\n\n group_id_string\n\n}\n\n\n", "file_path": "mem6/src/storagemod.rs", "rank": 32, "score": 232839.33941604078 }, { "content": "/// prepares the game data\n\npub fn on_click_start_game(rrc: &mut RootRenderingComponent) {\n\n rrc.game_data.prepare_random_data();\n\n rrc.game_data.game_status = GameStatus::Status1stCard;\n\n // random start player_turn. So is not always the first player to start\n\n // gen_range is lower inclusive, upper exclusive\n\n rrc.game_data.player_turn =\n\n websysmod::get_random(1, unwrap!(rrc.game_data.players.len().checked_add(1)));\n\n\n\n rrc.web_data\n\n .send_ws_msg_from_web_data(&websocket_boiler_mod::WsMessageForReceivers {\n\n msg_sender_ws_uid: rrc.web_data.my_ws_uid,\n\n msg_receivers_json: rrc.web_data.msg_receivers_json.to_string(),\n\n msg_data: game_data_mod::WsMessageGameData::MsgStartGame {\n\n players: unwrap!(serde_json::to_string(&rrc.game_data.players)),\n\n card_grid_data: unwrap!(serde_json::to_string(&rrc.game_data.card_grid_data)),\n\n game_config: unwrap!(serde_json::to_string(&rrc.game_data.game_config)),\n\n game_name: rrc.game_data.game_name.to_string(),\n\n player_turn: rrc.game_data.player_turn,\n\n },\n\n });\n\n}\n\n\n\n/// on game data init\n", "file_path": "mem6/src/status_game_data_init_mod.rs", "rank": 33, "score": 231919.1672896994 }, { "content": "/// load group_id from local storage\n\npub fn load_group_id_string(rrc: &mut RootRenderingComponent) -> String {\n\n let group_id_string = websysmod::load_string_from_local_storage(\"group_id\", \"\");\n\n set_group_id(rrc, &group_id_string);\n\n // return\n\n group_id_string\n\n}\n\n\n", "file_path": "mem6/src/storage_mod.rs", "rank": 34, "score": 229487.31182453522 }, { "content": "// region: nickname\n\n/// save on every key stroke\n\npub fn nickname_onkeyup(rrc: &mut RootRenderingComponent, event: Event) {\n\n // websysmod::debug_write(\"on key up\");\n\n let keyboard_event = unwrap!(event.dyn_into::<KeyboardEvent>());\n\n // websysmod::debug_write(&keyboard_event.key());\n\n if keyboard_event.key() == \"Enter\" {\n\n // open page start group\n\n html_template_impl_mod::open_new_local_page(\"#p02\");\n\n } else {\n\n save_nickname_to_local_storage(rrc);\n\n }\n\n // vdom.schedule_render();\n\n}\n\n\n", "file_path": "mem6/src/storage_mod.rs", "rank": 35, "score": 227844.24090331158 }, { "content": "// region: group_id\n\n/// group id key stroke\n\npub fn group_id_onkeyup(rrc: &mut RootRenderingComponent, event: Event) {\n\n // websysmod::debug_write(\"on key up\");\n\n let keyboard_event = unwrap!(event.dyn_into::<KeyboardEvent>());\n\n // websysmod::debug_write(&keyboard_event.key());\n\n if keyboard_event.key() == \"Enter\" {\n\n // open page start group\n\n html_template_impl_mod::open_new_local_page(\"#p04\");\n\n } else {\n\n save_group_id_to_local_storage(rrc);\n\n }\n\n}\n\n\n", "file_path": "mem6/src/storage_mod.rs", "rank": 36, "score": 224492.21331180603 }, { "content": "/// there are 3 places that must be managed (plus the local_storage)\n\npub fn set_group_id(rrc: &mut RootRenderingComponent, group_id_string: &str) {\n\n rrc.game_data.group_id = group_id_string.parse::<usize>().unwrap_or(0);\n\n // change it also in players[0]\n\n #[allow(clippy::indexing_slicing)]\n\n // cannot panic because player[0] must exist\n\n {\n\n rrc.game_data.players[0].ws_uid = rrc.game_data.group_id;\n\n }\n\n // on any change in players the msg_receivers_json must be constructed\n\n rrc.web_data.msg_receivers_json = rrc.game_data.prepare_json_msg_receivers();\n\n}\n\n// endregion: group_id\n", "file_path": "mem6/src/storagemod.rs", "rank": 37, "score": 221292.19481976185 }, { "content": "#[allow(clippy::integer_arithmetic)] // points +1 is not going to overflow ever\n\npub fn update_click_2nd_card_point(rrc: &mut RootRenderingComponent, is_point: bool) {\n\n if is_point {\n\n rrc.game_data.game_status = GameStatus::StatusDrink;\n\n // give points\n\n rrc.game_data.player_turn_now_mut().points += 1;\n\n\n\n if rrc.game_data.is_my_turn() {\n\n // drink\n\n htmltemplateimplmod::open_new_local_page(\"#p06\");\n\n } else {\n\n // do not drink\n\n htmltemplateimplmod::open_new_local_page(\"#p07\");\n\n }\n\n }\n\n}\n\n\n\n/// msg player click\n", "file_path": "mem6/src/status2ndcardmod.rs", "rank": 38, "score": 221292.19481976185 }, { "content": "// region: nickname\n\n/// save on every key stroke\n\npub fn nickname_onkeyup(rrc: &mut RootRenderingComponent, event: web_sys::Event) {\n\n // websysmod::debug_write(\"on key up\");\n\n let keyboard_event = unwrap!(event.dyn_into::<web_sys::KeyboardEvent>());\n\n // websysmod::debug_write(&keyboard_event.key());\n\n if keyboard_event.key() == \"Enter\" {\n\n // open page start group\n\n htmltemplateimplmod::open_new_local_page(\"#p02\");\n\n } else {\n\n save_nickname_to_local_storage(rrc);\n\n }\n\n // vdom.schedule_render();\n\n}\n\n\n", "file_path": "mem6/src/storagemod.rs", "rank": 39, "score": 220031.6721424364 }, { "content": "#[allow(clippy::integer_arithmetic)] // points +1 is not going to overflow ever\n\npub fn update_click_2nd_card_flip_permanently(rrc: &mut RootRenderingComponent, is_point: bool) {\n\n if is_point {\n\n // the two cards matches. make them permanent FaceUp\n\n let x1 = rrc.game_data.card_index_of_1st_click;\n\n let x2 = rrc.game_data.card_index_of_2nd_click;\n\n unwrap!(rrc.game_data.card_grid_data.get_mut(x1)).status =\n\n CardStatusCardFace::UpPermanently;\n\n unwrap!(rrc.game_data.card_grid_data.get_mut(x2)).status =\n\n CardStatusCardFace::UpPermanently;\n\n }\n\n}\n\n\n", "file_path": "mem6/src/status2ndcardmod.rs", "rank": 40, "score": 218234.07462656504 }, { "content": "/// msg response with ws_uid, just to check.\n\npub fn on_response_ws_uid(rrc: &mut RootRenderingComponent, msg_receiver_ws_uid: usize) {\n\n if rrc.web_data.my_ws_uid != msg_receiver_ws_uid {\n\n rrc.web_data.error_text = \"my_ws_uid is incorrect!\".to_string();\n\n }\n\n}\n\n\n", "file_path": "mem6/src/websocketmod.rs", "rank": 41, "score": 218234.07462656504 }, { "content": "/// there are 3 places that must be managed (plus the local_storage)\n\npub fn set_group_id(rrc: &mut RootRenderingComponent, group_id_string: &str) {\n\n rrc.game_data.group_id = group_id_string.parse::<usize>().unwrap_or(0);\n\n // change it also in players[0]\n\n #[allow(clippy::indexing_slicing)]\n\n // cannot panic because player[0] must exist\n\n {\n\n rrc.game_data.players[0].ws_uid = rrc.game_data.group_id;\n\n }\n\n // on any change in players the msg_receivers_json must be constructed\n\n rrc.web_data.msg_receivers_json = rrc.game_data.prepare_json_msg_receivers();\n\n}\n\n// endregion: group_id\n", "file_path": "mem6/src/storage_mod.rs", "rank": 42, "score": 218234.07462656504 }, { "content": "// region: group_id\n\n/// group id key stroke\n\npub fn group_id_onkeyup(rrc: &mut RootRenderingComponent, event: web_sys::Event) {\n\n // websysmod::debug_write(\"on key up\");\n\n let keyboard_event = unwrap!(event.dyn_into::<web_sys::KeyboardEvent>());\n\n // websysmod::debug_write(&keyboard_event.key());\n\n if keyboard_event.key() == \"Enter\" {\n\n // open page start group\n\n htmltemplateimplmod::open_new_local_page(\"#p04\");\n\n } else {\n\n save_group_id_to_local_storage(rrc);\n\n }\n\n}\n\n\n", "file_path": "mem6/src/storagemod.rs", "rank": 43, "score": 216831.65365039225 }, { "content": "/// msg joined\n\npub fn on_msg_joined(rrc: &mut RootRenderingComponent, his_ws_uid: usize, his_nickname: String) {\n\n // websysmod::debug_write(&format!(\"on_msg_joined {}\",his_ws_uid));\n\n if rrc.game_data.my_player_number == 1 {\n\n // push if not exists\n\n let mut ws_uid_exists = false;\n\n for x in &rrc.game_data.players {\n\n if x.ws_uid == his_ws_uid {\n\n ws_uid_exists = true;\n\n break;\n\n }\n\n }\n\n if !ws_uid_exists {\n\n rrc.game_data.players.push(Player {\n\n ws_uid: his_ws_uid,\n\n nickname: his_nickname,\n\n points: 0,\n\n });\n\n rrc.web_data.msg_receivers_json = rrc.game_data.prepare_json_msg_receivers();\n\n }\n\n }\n\n}\n", "file_path": "mem6/src/statusjoinedmod.rs", "rank": 44, "score": 212824.21854960872 }, { "content": "#[allow(clippy::integer_arithmetic)] // points +1 is not going to overflow ever\n\npub fn update_click_2nd_card_point(rrc: &mut RootRenderingComponent, is_point: bool) {\n\n if is_point {\n\n rrc.game_data.game_status = GameStatus::StatusDrink;\n\n // give points\n\n rrc.game_data.player_turn_now_mut().points += 1;\n\n\n\n if rrc.game_data.is_my_turn() {\n\n // drink\n\n html_template_impl_mod::open_new_local_page(\"#p06\");\n\n } else {\n\n // do not drink\n\n html_template_impl_mod::open_new_local_page(\"#p07\");\n\n }\n\n }\n\n}\n\n\n\n/// msg player click\n", "file_path": "mem6/src/status_2nd_card_mod.rs", "rank": 45, "score": 212507.37899568555 }, { "content": "#[allow(clippy::integer_arithmetic)] // points +1 is not going to overflow ever\n\npub fn update_click_2nd_card_flip_permanently(rrc: &mut RootRenderingComponent, is_point: bool) {\n\n if is_point {\n\n // the two cards matches. make them permanent FaceUp\n\n let x1 = rrc.game_data.card_index_of_1st_click;\n\n let x2 = rrc.game_data.card_index_of_2nd_click;\n\n unwrap!(rrc.game_data.card_grid_data.get_mut(x1)).status =\n\n CardStatusCardFace::UpPermanently;\n\n unwrap!(rrc.game_data.card_grid_data.get_mut(x2)).status =\n\n CardStatusCardFace::UpPermanently;\n\n }\n\n}\n\n\n", "file_path": "mem6/src/status_2nd_card_mod.rs", "rank": 46, "score": 209822.6072763149 }, { "content": "/// play again\n\npub fn div_game_over<'a>(rrc: &RootRenderingComponent, cx: &mut RenderContext<'a>) -> Node<'a> {\n\n // game over\n\n // only the leader of the group player can choose Play again?\n\n // other players are already joined to the group\n\n let template_name = if rrc.game_data.my_player_number == 1 {\n\n \"play_again\"\n\n } else {\n\n \"game_over\"\n\n };\n\n let html_template = rrc.web_data.get_sub_template(template_name);\n\n unwrap!(rrc.render_template(cx, &html_template, htmltemplatemod::HtmlOrSvg::Html))\n\n}\n\n\n", "file_path": "mem6/src/statusgameovermod.rs", "rank": 47, "score": 209117.989259866 }, { "content": "#[allow(clippy::integer_arithmetic)]\n\n#[allow(clippy::indexing_slicing)]\n\npub fn set_html_template_and_sub_templates(rrc: &mut RootRenderingComponent, resp_body_text: &str) {\n\n // only the html inside the <body> </body>\n\n let mut tm = between_body_tag(&resp_body_text);\n\n // parse and save sub_templates <template name=\"xxx\"></template>\n\n rrc.web_data.html_sub_templates.clear();\n\n loop {\n\n let mut exist_template = false;\n\n\n\n let pos1 = tm.find(\"<template \");\n\n let del2 = \"</template>\";\n\n let pos2 = tm.find(del2);\n\n if let Some(pos_start) = pos1 {\n\n if let Some(pos_end) = pos2 {\n\n exist_template = true;\n\n // drain - extract a substring and remove it from the original\n\n let sub1: String = tm.drain(pos_start..pos_end + del2.len()).collect();\n\n\n\n let del3 = \"name=\\\"\";\n\n let pos_name_start = unwrap!(sub1.find(del3));\n\n let sub2 = &sub1[pos_name_start + del3.len()..];\n", "file_path": "mem6/src/html_template_impl_mod.rs", "rank": 48, "score": 207247.18824357062 }, { "content": "/// msg joined\n\npub fn on_msg_joined(rrc: &mut RootRenderingComponent, his_ws_uid: usize, his_nickname: String) {\n\n // websysmod::debug_write(&format!(\"on_msg_joined {}\",his_ws_uid));\n\n if rrc.game_data.my_player_number == 1 {\n\n // push if not exists\n\n let mut ws_uid_exists = false;\n\n for x in &rrc.game_data.players {\n\n if x.ws_uid == his_ws_uid {\n\n ws_uid_exists = true;\n\n break;\n\n }\n\n }\n\n if !ws_uid_exists {\n\n rrc.game_data.players.push(Player {\n\n ws_uid: his_ws_uid,\n\n nickname: his_nickname,\n\n points: 0,\n\n });\n\n rrc.web_data.msg_receivers_json = rrc.game_data.prepare_json_msg_receivers();\n\n }\n\n }\n\n}\n", "file_path": "mem6/src/status_joined_mod.rs", "rank": 49, "score": 206840.6428975231 }, { "content": "/// render\n\npub fn div_on_1st_card<'a>(rrc: &RootRenderingComponent, cx: &mut RenderContext<'a>) -> Node<'a> {\n\n let template_name = if rrc.game_data.is_my_turn() {\n\n \"action_1st_2nd_turn\"\n\n } else {\n\n \"action_1st_2nd_not_turn\"\n\n };\n\n let html_template = rrc.web_data.get_sub_template(template_name);\n\n unwrap!(rrc.render_template(cx, &html_template, htmltemplatemod::HtmlOrSvg::Html))\n\n}\n\n\n\n/// on click for image in status 1s\n", "file_path": "mem6/src/status1stcardmod.rs", "rank": 50, "score": 205917.97076782183 }, { "content": "/// play again\n\npub fn div_game_over<'a>(rrc: &RootRenderingComponent, cx: &mut RenderContext<'a>) -> Node<'a> {\n\n // game over\n\n // only the leader of the group player can choose Play again?\n\n // other players are already joined to the group\n\n let template_name = if rrc.game_data.my_player_number == 1 {\n\n \"play_again\"\n\n } else {\n\n \"game_over\"\n\n };\n\n let html_template = rrc.web_data.get_sub_template(template_name);\n\n unwrap!(rrc.render_template(cx, &html_template, htmltemplatemod::HtmlOrSvg::Html))\n\n}\n\n\n", "file_path": "mem6/src/status_game_over_mod.rs", "rank": 51, "score": 202859.85057462502 }, { "content": "#[allow(clippy::needless_pass_by_value)]\n\npub fn on_msg_ack_take_turn(rrc: &mut RootRenderingComponent, player_ws_uid: usize, msg_id: usize) {\n\n if ackmsgmod::remove_ack_msg_from_queue(rrc, player_ws_uid, msg_id) {\n\n update_on_take_turn(rrc);\n\n }\n\n // TODO: timer if after 3 seconds the ack is not received resend the msg\n\n // do this 3 times and then hard error\n\n}\n\n\n", "file_path": "mem6/src/statustaketurnmod.rs", "rank": 52, "score": 201354.63100616174 }, { "content": "/// on msg\n\npub fn on_msg_take_turn(rrc: &mut RootRenderingComponent, msg_sender_ws_uid: usize, msg_id: usize) {\n\n ackmsgmod::send_ack(\n\n rrc,\n\n msg_sender_ws_uid,\n\n msg_id,\n\n gamedatamod::MsgAckKind::MsgTakeTurn,\n\n );\n\n update_on_take_turn(rrc);\n\n}\n\n\n\n/// on msg ack\n", "file_path": "mem6/src/statustaketurnmod.rs", "rank": 53, "score": 201354.63100616174 }, { "content": "/// render\n\npub fn div_on_1st_card<'a>(rrc: &RootRenderingComponent, cx: &mut RenderContext<'a>) -> Node<'a> {\n\n let template_name = if rrc.game_data.is_my_turn() {\n\n \"action_1st_2nd_turn\"\n\n } else {\n\n \"action_1st_2nd_not_turn\"\n\n };\n\n let html_template = rrc.web_data.get_sub_template(template_name);\n\n unwrap!(rrc.render_template(cx, &html_template, htmltemplatemod::HtmlOrSvg::Html))\n\n}\n\n\n\n/// on click for image in status 1s\n", "file_path": "mem6/src/status_1st_card_mod.rs", "rank": 54, "score": 197133.15494374553 }, { "content": "/// send all data to resync game_data\n\npub fn send_msg_for_resync(rrc: &RootRenderingComponent) {\n\n websysmod::debug_write(\"send_msg_for_resync MsgAllGameData\");\n\n rrc.web_data\n\n .send_ws_msg(&websocketmod::WsMessageForReceivers {\n\n msg_sender_ws_uid: rrc.web_data.my_ws_uid,\n\n /// only the players that resync\n\n msg_receivers_json: rrc.web_data.msg_receivers_json.clone(),\n\n msg_data: gamedatamod::WsMessageGameData::MsgAllGameData {\n\n /// json of vector of players with nicknames and order data\n\n players: unwrap!(serde_json::to_string(&rrc.game_data.players)),\n\n /// vector of cards status\n\n card_grid_data: unwrap!(serde_json::to_string(&rrc.game_data.card_grid_data)),\n\n card_index_of_1st_click: rrc.game_data.card_index_of_1st_click,\n\n card_index_of_2nd_click: rrc.game_data.card_index_of_2nd_click,\n\n /// whose turn is now: player 1,2,3,...\n\n player_turn: rrc.game_data.player_turn,\n\n /// game status, strum Display converts into String\n\n game_status: format!(\"{}\", rrc.game_data.game_status),\n\n },\n\n });\n\n}\n\n\n\n/// after reconnect receive all the data from other player\n", "file_path": "mem6/src/statusreconnectmod.rs", "rank": 55, "score": 196881.02942729407 }, { "content": "/// play sound mp3. The audio element is on the html page\n\n/// so when it closes also the sound stops.\n\npub fn play_sound_for_drink(rrc: &RootRenderingComponent) {\n\n // randomly choose a link from rrc.audio\n\n let num = websysmod::get_random(0, rrc.game_data.audio.len());\n\n // prepare the audio element with src filename of mp3\n\n #[allow(clippy::indexing_slicing)]\n\n // indexing cannot panic if the random num is created from 0..len()\n\n let src_mp3 = format!(\"audio/{}\", rrc.game_data.audio[num]);\n\n let audio_element = websysmod::get_element_by_id(\"audio\");\n\n let audio_element = unwrap!(audio_element.dyn_into::<web_sys::HtmlAudioElement>());\n\n audio_element.set_src(&src_mp3);\n\n let _x = unwrap!(audio_element.play());\n\n}\n", "file_path": "mem6/src/statusdrinkmod.rs", "rank": 56, "score": 196881.02942729407 }, { "content": "/// is it a point or not\n\npub fn is_point(rrc: &RootRenderingComponent) -> bool {\n\n rrc.game_data.get_1st_card().card_number == rrc.game_data.get_2nd_card().card_number\n\n}\n\n\n", "file_path": "mem6/src/status2ndcardmod.rs", "rank": 57, "score": 195471.08533816627 }, { "content": "#[allow(clippy::needless_pass_by_value)]\n\npub fn on_msg_ack_take_turn(rrc: &mut RootRenderingComponent, player_ws_uid: usize, msg_id: usize) {\n\n if ack_msg_mod::remove_ack_msg_from_queue(rrc, player_ws_uid, msg_id) {\n\n update_on_take_turn(rrc);\n\n }\n\n // TODO: timer if after 3 seconds the ack is not received resend the msg\n\n // do this 3 times and then hard error\n\n}\n\n\n", "file_path": "mem6/src/status_take_turn_mod.rs", "rank": 58, "score": 193930.75377671488 }, { "content": "/// on msg\n\npub fn on_msg_take_turn(rrc: &mut RootRenderingComponent, msg_sender_ws_uid: usize, msg_id: usize) {\n\n ack_msg_mod::send_ack(\n\n rrc,\n\n msg_sender_ws_uid,\n\n msg_id,\n\n game_data_mod::MsgAckKind::MsgTakeTurn,\n\n );\n\n update_on_take_turn(rrc);\n\n}\n\n\n\n/// on msg ack\n", "file_path": "mem6/src/status_take_turn_mod.rs", "rank": 59, "score": 193930.7537767149 }, { "content": "/// if there is already a nickname don't blink\n\npub fn blink_or_not_nickname(rrc: &RootRenderingComponent) -> String {\n\n if rrc.game_data.my_nickname.is_empty() {\n\n \"blink\".to_owned()\n\n } else {\n\n \"\".to_owned()\n\n }\n\n}\n\n\n\n// endregion: nickname\n\n\n", "file_path": "mem6/src/storagemod.rs", "rank": 60, "score": 192222.24299562466 }, { "content": "/// async fetch for videos.json and update rrc\n\npub fn fetch_videos_and_update(href: &str, vdom: VdomWeak) {\n\n let url = format!(\"{}/content/videos.json\", href);\n\n spawn_local(async move {\n\n let respbody = websysmod::fetch_response(url).await;\n\n let vid_json: gamedatamod::Videos = unwrap!(serde_json::from_str(&respbody));\n\n unwrap!(\n\n vdom.with_component({\n\n move |root| {\n\n let rrc = root.unwrap_mut::<RootRenderingComponent>();\n\n // fill the vector\n\n rrc.game_data.videos = vid_json.videos;\n\n }\n\n })\n\n .await\n\n );\n\n });\n\n}\n\n\n", "file_path": "mem6/src/fetchmod.rs", "rank": 61, "score": 190950.30956351475 }, { "content": "/// async fetch for audio.json and update rrc\n\npub fn fetch_audio_and_update(href: &str, vdom: VdomWeak) {\n\n let url = format!(\"{}/content/audio.json\", href);\n\n spawn_local(async move {\n\n let respbody = websysmod::fetch_response(url).await;\n\n let aud_json: gamedatamod::Audio = unwrap!(serde_json::from_str(&respbody));\n\n unwrap!(\n\n vdom.with_component({\n\n move |root| {\n\n let rrc = root.unwrap_mut::<RootRenderingComponent>();\n\n // fill the vector\n\n rrc.game_data.audio = aud_json.audio;\n\n }\n\n })\n\n .await\n\n );\n\n });\n\n}\n\n\n\n/// fetch all imgs for the cache\n", "file_path": "mem6/src/fetchmod.rs", "rank": 62, "score": 190950.30956351475 }, { "content": "/// play sound mp3. The audio element is on the html page\n\n/// so when it closes also the sound stops.\n\npub fn play_sound_for_drink(rrc: &RootRenderingComponent) {\n\n // randomly choose a link from rrc.audio\n\n let num = websysmod::get_random(0, rrc.game_data.audio.len());\n\n // prepare the audio element with src filename of mp3\n\n #[allow(clippy::indexing_slicing)]\n\n // indexing cannot panic if the random num is created from 0..len()\n\n let src_mp3 = format!(\"audio/{}\", rrc.game_data.audio[num]);\n\n let audio_element = websysmod::get_audio_element_by_id(\"audio\");\n\n audio_element.set_src(&src_mp3);\n\n let _x = unwrap!(audio_element.play());\n\n}\n", "file_path": "mem6/src/status_drink_mod.rs", "rank": 63, "score": 190857.5938408716 }, { "content": "/// send all data to resync game_data\n\npub fn send_msg_for_resync(rrc: &RootRenderingComponent) {\n\n websysmod::debug_write(\"send_msg_for_resync MsgAllGameData\");\n\n rrc.web_data\n\n .send_ws_msg_from_web_data(&websocket_boiler_mod::WsMessageForReceivers {\n\n msg_sender_ws_uid: rrc.web_data.my_ws_uid,\n\n /// only the players that resync\n\n msg_receivers_json: rrc.web_data.msg_receivers_json.clone(),\n\n msg_data: game_data_mod::WsMessageGameData::MsgAllGameData {\n\n /// json of vector of players with nicknames and order data\n\n players: unwrap!(serde_json::to_string(&rrc.game_data.players)),\n\n /// vector of cards status\n\n card_grid_data: unwrap!(serde_json::to_string(&rrc.game_data.card_grid_data)),\n\n card_index_of_1st_click: rrc.game_data.card_index_of_1st_click,\n\n card_index_of_2nd_click: rrc.game_data.card_index_of_2nd_click,\n\n /// whose turn is now: player 1,2,3,...\n\n player_turn: rrc.game_data.player_turn,\n\n /// game status, strum Display converts into String\n\n game_status: format!(\"{}\", rrc.game_data.game_status),\n\n },\n\n });\n\n}\n\n\n\n/// after reconnect receive all the data from other player\n", "file_path": "mem6/src/status_reconnect_mod.rs", "rank": 64, "score": 190857.5938408716 }, { "content": "/// calculate max with and height for a grid in pixels\n\npub fn max_grid_size(rrc: &RootRenderingComponent) -> Size2d {\n\n // if the game_config is None, then return full screen\n\n if rrc.game_data.game_config.is_none() {\n\n Size2d {\n\n hor: usize_window_inner_width_but_max_600(),\n\n ver: usize_window_inner_height(),\n\n }\n\n } else {\n\n // grid_container width and height\n\n let mut max_grid_width = grid_width();\n\n let mut max_grid_height = grid_height();\n\n /*\n\n // websysmod::debug_write(&format!(\n\n \"inner_width {} inner_height {}\",\n\n max_grid_width, max_grid_height\n\n ));\n\n */\n\n // default if not chosen\n\n let mut card_width = 115;\n\n let mut card_height = 115;\n", "file_path": "mem6/src/divgridcontainermod.rs", "rank": 65, "score": 189135.42561271205 }, { "content": "/// if there is already a nickname don't blink\n\npub fn blink_or_not_nickname(rrc: &RootRenderingComponent) -> String {\n\n if rrc.game_data.my_nickname.is_empty() {\n\n \"blink\".to_owned()\n\n } else {\n\n \"\".to_owned()\n\n }\n\n}\n\n\n\n// endregion: nickname\n\n\n", "file_path": "mem6/src/storage_mod.rs", "rank": 66, "score": 189135.42561271205 }, { "content": "/// if there is already a group_id don't blink\n\npub fn blink_or_not_group_id(rrc: &RootRenderingComponent) -> String {\n\n if rrc.game_data.group_id == 0 {\n\n \"blink\".to_owned()\n\n } else {\n\n \"\".to_owned()\n\n }\n\n}\n\n\n", "file_path": "mem6/src/htmltemplateimplmod.rs", "rank": 67, "score": 189135.42561271205 }, { "content": "/// async fetch for gamesmetadata.json and update rrc\n\npub fn fetch_games_metadata_and_update(href: &str, vdom: VdomWeak) {\n\n let url_config = format!(\"{}/content/gamesmetadata.json\", href);\n\n spawn_local(async move {\n\n // websysmod::debug_write(format!(\"respbody: {}\", respbody).as_str());\n\n let respbody = websysmod::fetch_response(url_config).await;\n\n let v: gamedatamod::GamesMetadata = unwrap!(serde_json::from_str(&respbody));\n\n unwrap!(\n\n vdom.with_component({\n\n move |root| {\n\n let rrc = root.unwrap_mut::<RootRenderingComponent>();\n\n // fill the vector\n\n rrc.game_data.content_folders.clear();\n\n for x in &v.vec_game_metadata {\n\n rrc.game_data.content_folders.push(x.folder.clone());\n\n }\n\n rrc.game_data.games_metadata = Some(v);\n\n }\n\n })\n\n .await\n\n );\n\n });\n\n}\n\n\n", "file_path": "mem6/src/fetchmod.rs", "rank": 69, "score": 188830.42601090734 }, { "content": "/// setup all ws events\n\npub fn setup_all_ws_events(ws: &WebSocket, vdom: VdomWeak) {\n\n // WebSocket on receive message callback\n\n setup_ws_msg_recv(ws, vdom.clone());\n\n\n\n // WebSocket on error message callback\n\n setup_ws_onerror(ws, vdom.clone());\n\n\n\n // WebSocket on close message callback\n\n setup_ws_onclose(ws, vdom);\n\n}\n\n\n", "file_path": "mem6/src/websocketmod.rs", "rank": 71, "score": 188817.65549640596 }, { "content": "#[allow(clippy::as_conversions)]\n\npub fn setup_ws_onerror(ws: &WebSocket, vdom: VdomWeak) {\n\n let onerror_callback = Closure::wrap(Box::new(move |e: ErrorEvent| {\n\n let err_text = format!(\"error event {:?}\", e);\n\n // websysmod::debug_write(&err_text);\n\n {\n\n spawn_local({\n\n let vdom = vdom.clone();\n\n async move {\n\n let _result = vdom\n\n .with_component({\n\n let vdom = vdom.clone();\n\n move |root| {\n\n let rrc = root.unwrap_mut::<RootRenderingComponent>();\n\n rrc.web_data.error_text = err_text;\n\n vdom.schedule_render();\n\n }\n\n })\n\n .await;\n\n }\n\n });\n\n }\n\n }) as Box<dyn FnMut(ErrorEvent)>);\n\n ws.set_onerror(Some(onerror_callback.as_ref().unchecked_ref()));\n\n onerror_callback.forget();\n\n}\n\n\n\n/// on close WebSocket connection\n", "file_path": "mem6/src/websocketmod.rs", "rank": 72, "score": 188817.65549640596 }, { "content": "#[allow(clippy::as_conversions)]\n\npub fn setup_ws_onclose(ws: &WebSocket, vdom: VdomWeak) {\n\n let onclose_callback = Closure::wrap(Box::new(move |e: ErrorEvent| {\n\n let err_text = format!(\"ws_onclose {:?}\", e);\n\n websysmod::debug_write(&format!(\"onclose_callback {}\", &err_text));\n\n {\n\n spawn_local({\n\n let vdom = vdom.clone();\n\n async move {\n\n let _result = vdom\n\n .with_component({\n\n let vdom = vdom.clone();\n\n move |root| {\n\n let rrc = root.unwrap_mut::<RootRenderingComponent>();\n\n // I want to show a reconnect button to the user\n\n rrc.web_data.is_reconnect = true;\n\n vdom.schedule_render();\n\n }\n\n })\n\n .await;\n\n }\n\n });\n\n }\n\n }) as Box<dyn FnMut(ErrorEvent)>);\n\n ws.set_onclose(Some(onclose_callback.as_ref().unchecked_ref()));\n\n onclose_callback.forget();\n\n}\n", "file_path": "mem6/src/websocketmod.rs", "rank": 73, "score": 188817.65549640596 }, { "content": "#[allow(clippy::unneeded_field_pattern)]\n\n#[allow(clippy::too_many_lines)] // I know is long\n\npub fn setup_ws_msg_recv(ws: &WebSocket, vdom: VdomWeak) {\n\n let msg_recv_handler = Box::new(move |msg: JsValue| {\n\n let data: JsValue = unwrap!(Reflect::get(&msg, &\"data\".into()));\n\n let data = unwrap!(data.as_string());\n\n\n\n // don't log ping pong there are too much\n\n //if !data.to_string().contains(\"MsgPong\") {\n\n // websysmod::debug_write(&data);\n\n //}\n\n\n\n // we can receive 2 types of msgs:\n\n // 1. from the server WsMessageFromServer\n\n // 2. from other players WsMessage\n\n if let Ok(msg) = serde_json::from_str::<WsMessageFromServer>(&data) {\n\n //msg from ws server\n\n spawn_local({\n\n let vdom = vdom.clone();\n\n async move {\n\n let _result = vdom\n\n .with_component({\n", "file_path": "mem6/src/websocketmod.rs", "rank": 75, "score": 186787.98710915487 }, { "content": "/// is it a point or not\n\npub fn is_point(rrc: &RootRenderingComponent) -> bool {\n\n rrc.game_data.get_1st_card().card_number == rrc.game_data.get_2nd_card().card_number\n\n}\n\n\n", "file_path": "mem6/src/status_2nd_card_mod.rs", "rank": 76, "score": 186198.80740920216 }, { "content": "/// if there is already a group_id don't blink\n\npub fn blink_or_not_group_id(rrc: &RootRenderingComponent) -> String {\n\n if rrc.game_data.group_id == 0 {\n\n \"blink\".to_owned()\n\n } else {\n\n \"\".to_owned()\n\n }\n\n}\n\n\n", "file_path": "mem6/src/html_template_impl_mod.rs", "rank": 77, "score": 180734.35264036263 }, { "content": "/// calculate max with and height for a grid in pixels\n\npub fn max_grid_size(rrc: &RootRenderingComponent) -> Size2d {\n\n // if the game_config is None, then return full screen\n\n if rrc.game_data.game_config.is_none() {\n\n Size2d {\n\n hor: usize_window_inner_width_but_max_600(),\n\n ver: usize_window_inner_height(),\n\n }\n\n } else {\n\n // grid_container width and height\n\n let mut max_grid_width = grid_width();\n\n let mut max_grid_height = grid_height();\n\n /*\n\n // websysmod::debug_write(&format!(\n\n \"inner_width {} inner_height {}\",\n\n max_grid_width, max_grid_height\n\n ));\n\n */\n\n // default if not chosen\n\n let mut card_width = 115;\n\n let mut card_height = 115;\n", "file_path": "mem6/src/div_grid_container_mod.rs", "rank": 78, "score": 180734.35264036263 }, { "content": "/// play sound mp3\n\npub fn play_sound(rrc: &RootRenderingComponent, this_click_card_index: usize) {\n\n if rrc.game_data.sounds_and_labels == true {\n\n // prepare the audio element with src filename of mp3\n\n let src_mp3 = format!(\n\n \"content/{}/sound/{}\",\n\n rrc.game_data.game_name,\n\n unwrap!(unwrap!(rrc.game_data.game_config.as_ref())\n\n .sound_filename\n\n .get(\n\n unwrap!(\n\n rrc.game_data.card_grid_data.get(this_click_card_index),\n\n \"error this_click_card_index\"\n\n )\n\n .card_number\n\n ))\n\n );\n\n websysmod::play_sound(&src_mp3);\n\n }\n\n}\n\n\n", "file_path": "mem6/src/divgridcontainermod.rs", "rank": 79, "score": 179301.21500932582 }, { "content": "/// play sound mp3\n\npub fn play_sound(rrc: &RootRenderingComponent, this_click_card_index: usize) {\n\n if rrc.game_data.sounds_and_labels == true {\n\n // prepare the audio element with src filename of mp3\n\n let src_mp3 = format!(\n\n \"content/{}/sound/{}\",\n\n rrc.game_data.game_name,\n\n unwrap!(unwrap!(rrc.game_data.game_config.as_ref())\n\n .sound_filename\n\n .get(\n\n unwrap!(\n\n rrc.game_data.card_grid_data.get(this_click_card_index),\n\n \"error this_click_card_index\"\n\n )\n\n .card_number\n\n ))\n\n );\n\n websysmod::play_sound(&src_mp3);\n\n }\n\n}\n\n\n", "file_path": "mem6/src/div_grid_container_mod.rs", "rank": 80, "score": 171654.0469879817 }, { "content": "/// render unpredicted\n\nfn div_unpredicted<'a>(rrc: &RootRenderingComponent, cx: &mut RenderContext<'a>) -> Node<'a> {\n\n // unpredictable situation\n\n let html_template = r#\"<h2>\n\n gamestatus: <!--t=game_status--> one, player<!--t=my_player_number--> Nick\n\n </h2>\"#;\n\n // return\n\n unwrap!(rrc.render_template(cx, html_template, htmltemplatemod::HtmlOrSvg::Html))\n\n}\n", "file_path": "mem6/src/divplayeractionsmod.rs", "rank": 81, "score": 171013.70416388847 }, { "content": "/// render unpredicted\n\nfn div_unpredicted<'a>(rrc: &RootRenderingComponent, cx: &mut RenderContext<'a>) -> Node<'a> {\n\n // unpredictable situation\n\n let html_template = r#\"<h2>\n\n gamestatus: <!--t=game_status--> one, player<!--t=my_player_number--> Nick\n\n </h2>\"#;\n\n // return\n\n unwrap!(rrc.render_template(cx, html_template, htmltemplatemod::HtmlOrSvg::Html))\n\n}\n", "file_path": "mem6/src/div_player_actions_mod.rs", "rank": 82, "score": 163278.5171683653 }, { "content": "/// async fetch for gameconfig.json and update rrc\n\npub fn async_fetch_game_config_and_update(\n\n rrc: &mut RootRenderingComponent,\n\n vdom: VdomWeak,\n\n) {\n\n let url_config = format!(\n\n \"{}/content/{}/game_config.json\",\n\n rrc.web_data.href, rrc.game_data.game_name\n\n );\n\n spawn_local(async move {\n\n let respbody = websysmod::fetch_response(url_config).await;\n\n let json = unwrap!(serde_json::from_str(respbody.as_str()));\n\n // websysmod::debug_write(format!(\"respbody: {}\", respbody).as_str());\n\n unwrap!(\n\n vdom.with_component({\n\n move |root| {\n\n let rrc = root.unwrap_mut::<RootRenderingComponent>();\n\n rrc.game_data.game_config = json;\n\n }\n\n })\n\n .await\n\n );\n\n });\n\n}\n\n\n", "file_path": "mem6/src/fetchmod.rs", "rank": 83, "score": 151223.25553386786 }, { "content": "/// render reconnect\n\npub fn div_reconnect<'a>(_rrc: &RootRenderingComponent, bump: &'a Bump) -> Node<'a> {\n\n dodrio !(bump,\n\n <div>\n\n <h4>\n\n {vec![text(bumpalo::format!(in bump,\n\n \"Click on Resync if there are problems with receiving msgs over the network:{}\", \"\")\n\n .into_bump_str(),)]}\n\n </h4>\n\n <div class=\"div_clickable\" onclick={\n\n move |root, vdom, _event| {\n\n let rrc = root.unwrap_mut::<RootRenderingComponent>();\n\n // the old ws and closures are now a memory leak, but small\n\n let href = rrc.web_data.href.clone();\n\n // usize is Copy(), so I don't need clone()\n\n let my_ws_uid = rrc.web_data.my_ws_uid;\n\n websysmod::debug_write(&format!(\n\n \"href {} my_ws_uid {}\",\n\n href,\n\n my_ws_uid,\n\n ));\n", "file_path": "mem6/src/statusreconnectmod.rs", "rank": 84, "score": 150449.81211664603 }, { "content": "/// render reconnect\n\npub fn div_reconnect<'a>(_rrc: &RootRenderingComponent, bump: &'a Bump) -> Node<'a> {\n\n dodrio !(bump,\n\n <div>\n\n <h4>\n\n {vec![text(bumpalo::format!(in bump,\n\n \"Click on Resync if there are problems with receiving msgs over the network:{}\", \"\")\n\n .into_bump_str(),)]}\n\n </h4>\n\n <div class=\"div_clickable\" onclick={\n\n move |root, vdom, _event| {\n\n let rrc = root.unwrap_mut::<RootRenderingComponent>();\n\n // the old ws and closures are now a memory leak, but small\n\n let href = rrc.web_data.href.clone();\n\n // usize is Copy(), so I don't need clone()\n\n let my_ws_uid = rrc.web_data.my_ws_uid;\n\n websysmod::debug_write(&format!(\n\n \"href {} my_ws_uid {}\",\n\n href,\n\n my_ws_uid,\n\n ));\n", "file_path": "mem6/src/status_reconnect_mod.rs", "rank": 85, "score": 144985.35734780645 }, { "content": "/// get en empty div node\n\npub fn empty_div<'a>(cx: &mut RenderContext<'a>) -> Node<'a> {\n\n let bump = cx.bump;\n\n ElementBuilder::new(bump, \"div\").finish()\n\n}\n\n\n", "file_path": "dodrio_templating/src/htmltemplatemod.rs", "rank": 86, "score": 144729.9811819485 }, { "content": "/// send ack\n\npub fn send_ack(\n\n rrc: &mut RootRenderingComponent,\n\n msg_sender_ws_uid: usize,\n\n msg_id: usize,\n\n msg_ack_kind: gamedatamod::MsgAckKind,\n\n) {\n\n // websysmod::debug_write(&format!(\"send_ack players: {:?}\", rrc.game_data.players));\n\n // send back the ACK msg to the sender\n\n rrc.web_data\n\n .send_ws_msg(&websocketmod::WsMessageForReceivers {\n\n msg_sender_ws_uid: rrc.web_data.my_ws_uid,\n\n msg_receivers_json: unwrap!(serde_json::to_string(&vec![msg_sender_ws_uid])),\n\n msg_data: gamedatamod::WsMessageGameData::MsgAck {\n\n msg_id,\n\n msg_ack_kind,\n\n },\n\n });\n\n}\n", "file_path": "mem6/src/ackmsgmod.rs", "rank": 87, "score": 122713.73899124522 }, { "content": "/// on second click\n\n/// The on click event passed by JavaScript executes all the logic\n\n/// and changes only the fields of the Card Grid struct.\n\n/// That struct is the only permanent data storage for later render the virtual dom.\n\npub fn on_click_2nd_card(\n\n rrc: &mut RootRenderingComponent,\n\n vdom: &VdomWeak,\n\n this_click_card_index: usize,\n\n) {\n\n rrc.game_data.card_index_of_2nd_click = this_click_card_index;\n\n // flip the card up\n\n rrc.game_data.get_2nd_card_mut().status = CardStatusCardFace::UpTemporary;\n\n divgridcontainermod::play_sound(rrc, this_click_card_index);\n\n // 2 possible outcomes: 1) Next Player 2) end game/play again\n\n // that changes: game status,CardStatusCardFace, points or/and player_turn\n\n // if the cards match, player get one point, but it is the next player turn.\n\n let is_point = is_point(rrc);\n\n if is_point {\n\n update_click_2nd_card_flip_permanently(rrc, is_point);\n\n }\n\n let msg_id = ackmsgmod::prepare_for_ack_msg_waiting(rrc, vdom);\n\n let msg = websocketmod::WsMessageForReceivers {\n\n msg_sender_ws_uid: rrc.web_data.my_ws_uid,\n\n msg_receivers_json: rrc.web_data.msg_receivers_json.to_string(),\n\n msg_data: gamedatamod::WsMessageGameData::MsgClick2ndCard {\n\n card_index_of_2nd_click: rrc.game_data.card_index_of_2nd_click,\n\n is_point,\n\n msg_id,\n\n },\n\n };\n\n ackmsgmod::send_msg_and_write_in_queue(rrc, &msg, msg_id);\n\n}\n\n\n\n/// is all card permanently on\n", "file_path": "mem6/src/status2ndcardmod.rs", "rank": 88, "score": 120735.9784901784 }, { "content": "/// on click\n\npub fn on_click_1st_card(\n\n rrc: &mut RootRenderingComponent,\n\n vdom: &VdomWeak,\n\n this_click_card_index: usize,\n\n) {\n\n // websysmod::debug_write(\"on_click_1st_card\");\n\n flip_back(rrc);\n\n // change card status and game status\n\n rrc.game_data.card_index_of_1st_click = this_click_card_index;\n\n\n\n let msg_id = ackmsgmod::prepare_for_ack_msg_waiting(rrc, vdom);\n\n let msg = websocketmod::WsMessageForReceivers {\n\n msg_sender_ws_uid: rrc.web_data.my_ws_uid,\n\n msg_receivers_json: rrc.web_data.msg_receivers_json.to_string(),\n\n msg_data: gamedatamod::WsMessageGameData::MsgClick1stCard {\n\n card_index_of_1st_click: this_click_card_index,\n\n msg_id,\n\n },\n\n };\n\n ackmsgmod::send_msg_and_write_in_queue(rrc, &msg, msg_id);\n\n // websysmod::debug_write(&format!(\"send_msg_and_write_in_queue: {}\", msg_id));\n\n divgridcontainermod::play_sound(rrc, this_click_card_index);\n\n // after ack for this message call on_msg_click_1st_card(rrc, this_click_card_index);\n\n}\n\n\n", "file_path": "mem6/src/status1stcardmod.rs", "rank": 89, "score": 120735.9784901784 }, { "content": "#[allow(clippy::indexing_slicing)]\n\npub fn on_click_img_status1st(\n\n root: &mut dyn dodrio::RootRender,\n\n vdom: &VdomWeak,\n\n event: &web_sys::Event,\n\n) {\n\n // websysmod::debug_write(\"img click\");\n\n let rrc = root.unwrap_mut::<RootRenderingComponent>();\n\n // If the event's target is our image...\n\n let img = match event\n\n .target()\n\n .and_then(|t| t.dyn_into::<web_sys::HtmlImageElement>().ok())\n\n {\n\n None => return,\n\n // ?? Don't understand what this does. The original was written for Input element.\n\n Some(input) => input,\n\n };\n\n // id attribute of image html element is prefixed with img ex. \"img12\"\n\n let this_click_card_index = unwrap!((img.id()[3..]).parse::<usize>());\n\n // click is useful only on facedown cards\n\n if rrc.game_data.card_grid_data[this_click_card_index]\n", "file_path": "mem6/src/status1stcardmod.rs", "rank": 90, "score": 120735.9784901784 }, { "content": "#[allow(clippy::indexing_slicing)]\n\npub fn on_click_img_status2nd(\n\n root: &mut dyn dodrio::RootRender,\n\n vdom: &VdomWeak,\n\n event: &web_sys::Event,\n\n) {\n\n let rrc = root.unwrap_mut::<RootRenderingComponent>();\n\n // If the event's target is our image...\n\n let img = match event\n\n .target()\n\n .and_then(|t| t.dyn_into::<web_sys::HtmlImageElement>().ok())\n\n {\n\n None => return,\n\n // ?? Don't understand what this does. The original was written for Input element.\n\n Some(input) => input,\n\n };\n\n // id attribute of image html element is prefixed with img ex. \"img12\"\n\n let this_click_card_index = unwrap!(img.id()[3..].parse::<usize>());\n\n // click is useful only on facedown cards\n\n if rrc.game_data.card_grid_data[this_click_card_index]\n\n .status\n", "file_path": "mem6/src/status2ndcardmod.rs", "rank": 91, "score": 120735.9784901784 }, { "content": "#[allow(clippy::indexing_slicing)]\n\npub fn on_msg_start_game(\n\n rrc: &mut RootRenderingComponent,\n\n card_grid_data: &str,\n\n game_config: &str,\n\n players: &str,\n\n game_name: &str,\n\n player_turn: usize,\n\n) {\n\n // websysmod::debug_write(&format!(\"on_msg_start_game {}\", players));\n\n rrc.game_data.game_status = GameStatus::Status1stCard;\n\n rrc.game_data.player_turn = player_turn;\n\n rrc.game_data.game_name = game_name.to_string();\n\n\n\n rrc.game_data.game_config = unwrap!(\n\n serde_json::from_str(game_config),\n\n \"error serde_json::from_str(game_config)\"\n\n );\n\n\n\n rrc.game_data.card_grid_data = unwrap!(\n\n serde_json::from_str(card_grid_data),\n", "file_path": "mem6/src/statusgamedatainitmod.rs", "rank": 92, "score": 120735.9784901784 }, { "content": "#[allow(clippy::needless_pass_by_value)]\n\npub fn on_msg_all_game_data(\n\n rrc: &mut RootRenderingComponent,\n\n players: String,\n\n card_grid_data: String,\n\n card_index_of_1st_click: usize,\n\n card_index_of_2nd_click: usize,\n\n // whose turn is now: player 1,2,3,...\n\n player_turn: usize,\n\n game_status: String,\n\n) {\n\n websysmod::debug_write(\"on_msg_all_game_data\");\n\n //strum EnumString adds the from_str function\n\n use std::str::FromStr;\n\n let game_status = GameStatus::from_str(&game_status).unwrap();\n\n\n\n // only the first message is processed\n\n // if rrc.game_data.web_data.is_reconnect {\n\n rrc.web_data.is_reconnect = false;\n\n rrc.game_data.players = unwrap!(serde_json::from_str(&players));\n\n rrc.game_data.card_grid_data = unwrap!(serde_json::from_str(&card_grid_data));\n\n rrc.game_data.card_index_of_1st_click = card_index_of_1st_click;\n\n rrc.game_data.card_index_of_2nd_click = card_index_of_2nd_click;\n\n rrc.game_data.player_turn = player_turn;\n\n rrc.game_data.game_status = game_status;\n\n rrc.web_data.msgs_waiting_ack.retain(|_x| false);\n\n // }\n\n}\n", "file_path": "mem6/src/statusreconnectmod.rs", "rank": 93, "score": 120735.9784901784 }, { "content": "/// hide big img\n\npub fn hide_big_img() {\n\n let img_element = websysmod::get_element_by_id(\"big_img\");\n\n let img_html_element = unwrap!(img_element.dyn_into::<web_sys::HtmlImageElement>());\n\n let _x = img_html_element.style().set_property(\"display\", \"none\");\n\n}\n\n\n", "file_path": "mem6/src/htmltemplateimplmod.rs", "rank": 94, "score": 120735.9784901784 }, { "content": "/// on msg\n\npub fn on_msg_drink_end(\n\n _rrc: &mut RootRenderingComponent,\n\n _msg_sender_ws_uid: usize,\n\n _vdom: &VdomWeak,\n\n) {\n\n htmltemplateimplmod::open_new_local_page(\"#p11\");\n\n}\n\n\n", "file_path": "mem6/src/statusdrinkmod.rs", "rank": 95, "score": 120735.9784901784 }, { "content": "/// on msg\n\npub fn on_msg_click_1st_card(\n\n rrc: &mut RootRenderingComponent,\n\n vdom: &VdomWeak,\n\n msg_sender_ws_uid: usize,\n\n card_index_of_1st_click: usize,\n\n msg_id: usize,\n\n) {\n\n flip_back(rrc);\n\n ackmsgmod::send_ack(\n\n rrc,\n\n msg_sender_ws_uid,\n\n msg_id,\n\n gamedatamod::MsgAckKind::MsgClick1stCard,\n\n );\n\n // it can happen that 2 smartphones send the msg click1st simultaneously.\n\n // This is a conflict.\n\n // Only one Player can be the judge and I chosen the Player 1 to resolve it.\n\n if rrc.game_data.my_player_number == 1 && GameStatus::Status1stCard != rrc.game_data.game_status\n\n {\n\n websysmod::debug_write(\"CONFLICT on_msg_click_1st_card\");\n", "file_path": "mem6/src/status1stcardmod.rs", "rank": 96, "score": 118867.78014565645 }, { "content": "/// msg player click\n\npub fn on_msg_click_2nd_card(\n\n rrc: &mut RootRenderingComponent,\n\n msg_sender_ws_uid: usize,\n\n card_index_of_2nd_click: usize,\n\n is_point: bool,\n\n msg_id: usize,\n\n) {\n\n ackmsgmod::send_ack(\n\n rrc,\n\n msg_sender_ws_uid,\n\n msg_id,\n\n gamedatamod::MsgAckKind::MsgClick2ndCard,\n\n );\n\n rrc.game_data.card_index_of_2nd_click = card_index_of_2nd_click;\n\n update_click_2nd_card_flip_permanently(rrc, is_point);\n\n update_click_2nd_card_point(rrc, is_point);\n\n}\n\n\n", "file_path": "mem6/src/status2ndcardmod.rs", "rank": 97, "score": 118867.78014565645 }, { "content": "#[allow(clippy::needless_pass_by_value)]\n\npub fn remove_ack_msg_from_queue(\n\n rrc: &mut RootRenderingComponent,\n\n player_ws_uid: usize,\n\n msg_id: usize,\n\n) -> bool {\n\n // remove the waiting msg from the queue\n\n // I use the opposite method \"retain\" because there is not a method \"remove\"\n\n rrc.web_data\n\n .msgs_waiting_ack\n\n .retain(|x| !(x.player_ws_uid == player_ws_uid && x.msg_id == msg_id));\n\n\n\n // if there is no more items with this msg_id, then proceed\n\n let mut has_msg_id = false;\n\n for x in &rrc.web_data.msgs_waiting_ack {\n\n if x.msg_id == msg_id {\n\n has_msg_id = true;\n\n break;\n\n }\n\n }\n\n // return\n\n !has_msg_id\n\n}\n\n\n", "file_path": "mem6/src/ackmsgmod.rs", "rank": 98, "score": 118867.78014565645 }, { "content": "/// prepare for ack msg waiting - return random msg_id\n\npub fn prepare_for_ack_msg_waiting(\n\n rrc: &mut RootRenderingComponent,\n\n vdom: &VdomWeak,\n\n) -> usize {\n\n let msg_id = websysmod::get_random(1, 0xFFFF_FFFF);\n\n rrc.game_data.game_status = GameStatus::StatusWaitingAckMsg;\n\n vdom.schedule_render();\n\n // return\n\n msg_id\n\n}\n\n\n", "file_path": "mem6/src/ackmsgmod.rs", "rank": 99, "score": 118867.78014565645 } ]
Rust
src/network/tests.rs
ambaxter/expert-rs
5d5070f4c8842a0b4f53c6ebc277ea0444fedb58
use std::hash::{Hash, Hasher}; use std::fmt; use std::fmt::Debug; use traits::Fact; use ordered_float::NotNaN; use runtime::memory::SymbolId; use num::Float; #[derive(Clone, Hash, Eq, PartialEq)] pub enum CLimits<T: Hash + Eq + Ord + Clone> { S(T), D(T, T) } #[derive(Clone)] pub enum OrdData<T: Fact>{ I8(fn(&T) -> &i8, CLimits<i8>), I16(fn(&T) -> &i16, CLimits<i16>), I32(fn(&T) -> &i32, CLimits<i32>), I64(fn(&T) -> &i64, CLimits<i64>), U8(fn(&T) -> &u8, CLimits<u8>), U16(fn(&T) -> &u16, CLimits<u16>), U32(fn(&T) -> &u32, CLimits<u32>), U64(fn(&T) -> &u64, CLimits<u64>), ISIZE(fn(&T) -> &isize, CLimits<isize>), USIZE(fn(&T) -> &usize, CLimits<usize>), } impl<T: Fact> OrdData<T> { fn hash_self<H: Hasher, L: Hash>(ord: usize, accessor: usize, limits: &L, state: &mut H) { ord.hash(state); accessor.hash(state); limits.hash(state); } } impl<T: Fact> Hash for OrdData<T> { fn hash<H: Hasher>(&self, state: &mut H) { use self::OrdData::*; match self { &I8(accessor, ref limits) => { Self::hash_self(0, accessor as usize, limits, state); }, &I16(accessor, ref limits) => { Self::hash_self(1, accessor as usize, limits, state); }, &I32(accessor, ref limits) => { Self::hash_self(2, accessor as usize, limits, state); }, &I64(accessor, ref limits) => { Self::hash_self(3, accessor as usize, limits, state); }, &U8(accessor, ref limits) => { Self::hash_self(4, accessor as usize, limits, state); }, &U16(accessor, ref limits) => { Self::hash_self(5, accessor as usize, limits, state); }, &U32(accessor, ref limits) => { Self::hash_self(6, accessor as usize, limits, state); }, &U64(accessor, ref limits) => { Self::hash_self(7, accessor as usize, limits, state); }, &ISIZE(accessor, ref limits) => { Self::hash_self(8, accessor as usize, limits, state); }, &USIZE(accessor, ref limits) => { Self::hash_self(9, accessor as usize, limits, state); } } } } impl<T: Fact> PartialEq for OrdData<T> { fn eq(&self, other: &Self) -> bool { use self::OrdData::*; match (self, other) { (&I8(accessor1, ref limits1), &I8(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&I16(accessor1, ref limits1), &I16(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&I32(accessor1, ref limits1), &I32(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&I64(accessor1, ref limits1), &I64(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&U8(accessor1, ref limits1), &U8(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&U16(accessor1, ref limits1), &U16(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&U32(accessor1, ref limits1), &U32(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&U64(accessor1, ref limits1), &U64(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&ISIZE(accessor1, ref limits1), &ISIZE(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&USIZE(accessor1, ref limits1), &USIZE(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, _ => false } } } impl<T: Fact> Eq for OrdData<T> {} #[derive(Debug, Copy, Clone, Eq, Hash, PartialEq)] pub enum OrdTest { Ne, Lt, Le, Gt, Ge, GtLt, GeLt, GtLe, GeLe } #[derive(Copy, Clone, Eq, PartialEq)] pub enum FLimits<T: Float> { S(NotNaN<T>), D(NotNaN<T>, NotNaN<T>) } impl<T: Float> Hash for FLimits<T> { fn hash<H: Hasher>(&self, state: &mut H) { use self::FLimits::*; match self { &S(ref to) => to.hash(state), &D(ref from, ref to) => { from.hash(state); to.hash(state); }, } } } #[derive(Clone)] pub enum FlData<T: Fact>{ F32(fn(&T) -> &f32, FLimits<f32>), F64(fn(&T) -> &f64, FLimits<f64>), } impl<T: Fact> FlData<T> { fn hash_self<H: Hasher, L: Hash>(ord: usize, accessor: usize, limits: &L, state: &mut H) { ord.hash(state); accessor.hash(state); limits.hash(state); } } impl<T: Fact> Hash for FlData<T> { fn hash<H: Hasher>(&self, state: &mut H) { use self::FlData::*; match self { &F32(accessor, ref limits) => { Self::hash_self(0, accessor as usize, limits, state); }, &F64(accessor, ref limits) => { Self::hash_self(1, accessor as usize, limits, state); }, } } } impl<T: Fact> PartialEq for FlData<T> { fn eq(&self, other: &Self) -> bool { use self::FlData::*; match (self, other) { (&F32(accessor1, ref limits1), &F32(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&F64(accessor1, ref limits1), &F64(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, _ => false } } } impl<T: Fact> Eq for FlData<T> {} #[derive(Debug, Copy, Clone, Eq, Hash, PartialEq)] pub enum FlTest { ApproxEq, ApproxNe, Lt, Le, Gt, Ge, GtLt, GeLt, GtLe, GeLe } #[derive(Clone)] pub enum StrData<T: Fact> { REF(fn(&T) -> &str, CLimits<SymbolId>), } impl<T: Fact> StrData<T> { fn hash_self<H: Hasher, L: Hash>(ord: usize, accessor: usize, limits: &L, state: &mut H) { ord.hash(state); accessor.hash(state); limits.hash(state); } } impl<T: Fact> Hash for StrData<T> { fn hash<H: Hasher>(&self, state: &mut H) { use self::StrData::*; match self { &REF(accessor, ref limits) => { Self::hash_self(0, accessor as usize, limits, state); }, } } } impl<T: Fact> PartialEq for StrData<T> { fn eq(&self, other: &Self) -> bool { use self::StrData::*; match (self, other) { (&REF(accessor1, ref limits1), &REF(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, _ => false } } } impl<T: Fact> Eq for StrData<T> {} #[derive(Debug, Copy, Clone, Eq, Hash, PartialEq)] pub enum StrTest { Ne, Lt, Le, Gt, Ge, GtLt, GeLt, GtLe, GeLe, Contains, StartsWith, EndsWith } #[derive(Hash, Eq, PartialEq)] pub enum AlphaTest<T: Fact> { HashEq, Ord(OrdData<T>, OrdTest), Fl(FlData<T>, FlTest), Str(StrData<T>, StrTest), } impl<T: Fact> AlphaTest<T> { pub fn is_hash_eq(&self) -> bool { use self::AlphaTest::*; match self { &HashEq => true, _ => false } } } impl<T: Fact> Debug for AlphaTest<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use self::AlphaTest::*; write!(f, "Test{{")?; match self { &HashEq => { write!(f, "HashEq")? }, &Ord(ref data, ref test) => { write!(f, "Ord")? }, &Fl(ref data, ref test) => { write!(f, "Fl")? }, &Str(ref data, ref test) => { write!(f, "Str")? } } write!(f, "}}") } }
use std::hash::{Hash, Hasher}; use std::fmt; use std::fmt::Debug; use traits::Fact; use ordered_float::NotNaN; use runtime::memory::SymbolId; use num::Float; #[derive(Clone, Hash, Eq, PartialEq)] pub enum CLimits<T: Hash + Eq + Ord + Clone> { S(T), D(T, T) } #[derive(Clone)] pub enum OrdData<T: Fact>{ I8(fn(&T) -> &i8, CLimits<i8>), I16(fn(&T) -> &i16, CLimits<i16>), I32(fn(&T) -> &i32, CLimits<i32>), I64(fn(&T) -> &i64, CLimits<i64>), U8(fn(&T) -> &u8, CLimits<u8>), U16(fn(&T) -> &u16, CLimits<u16>), U32(fn(&T) -> &u32, CLimits<u32>), U64(fn(&T) -> &u64, CLimits<u64>), ISIZE(fn(&T) -> &isize, CLimits<isize>), USIZE(fn(&T) -> &usize, CLimits<usize>), } impl<T: Fact> OrdData<T> { fn hash_self<H: Hasher, L: Hash>(ord: usize, accessor: usize, limits: &L, state: &mut H) { ord.hash(state); accessor.hash(state); limits.hash(state); } } impl<T: Fact> Hash for OrdData<T> { fn hash<H: Hasher>(&self, state: &mut H) { use self::OrdData::*; match self { &I8(accessor, ref limits) => { Self::hash_self(0, accessor as usize, limits, state); }, &I16(accessor, ref limits) => { Self::hash_self(1, accessor as usize,
&F64(accessor, ref limits) => { Self::hash_self(1, accessor as usize, limits, state); }, } } } impl<T: Fact> PartialEq for FlData<T> { fn eq(&self, other: &Self) -> bool { use self::FlData::*; match (self, other) { (&F32(accessor1, ref limits1), &F32(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&F64(accessor1, ref limits1), &F64(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, _ => false } } } impl<T: Fact> Eq for FlData<T> {} #[derive(Debug, Copy, Clone, Eq, Hash, PartialEq)] pub enum FlTest { ApproxEq, ApproxNe, Lt, Le, Gt, Ge, GtLt, GeLt, GtLe, GeLe } #[derive(Clone)] pub enum StrData<T: Fact> { REF(fn(&T) -> &str, CLimits<SymbolId>), } impl<T: Fact> StrData<T> { fn hash_self<H: Hasher, L: Hash>(ord: usize, accessor: usize, limits: &L, state: &mut H) { ord.hash(state); accessor.hash(state); limits.hash(state); } } impl<T: Fact> Hash for StrData<T> { fn hash<H: Hasher>(&self, state: &mut H) { use self::StrData::*; match self { &REF(accessor, ref limits) => { Self::hash_self(0, accessor as usize, limits, state); }, } } } impl<T: Fact> PartialEq for StrData<T> { fn eq(&self, other: &Self) -> bool { use self::StrData::*; match (self, other) { (&REF(accessor1, ref limits1), &REF(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, _ => false } } } impl<T: Fact> Eq for StrData<T> {} #[derive(Debug, Copy, Clone, Eq, Hash, PartialEq)] pub enum StrTest { Ne, Lt, Le, Gt, Ge, GtLt, GeLt, GtLe, GeLe, Contains, StartsWith, EndsWith } #[derive(Hash, Eq, PartialEq)] pub enum AlphaTest<T: Fact> { HashEq, Ord(OrdData<T>, OrdTest), Fl(FlData<T>, FlTest), Str(StrData<T>, StrTest), } impl<T: Fact> AlphaTest<T> { pub fn is_hash_eq(&self) -> bool { use self::AlphaTest::*; match self { &HashEq => true, _ => false } } } impl<T: Fact> Debug for AlphaTest<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use self::AlphaTest::*; write!(f, "Test{{")?; match self { &HashEq => { write!(f, "HashEq")? }, &Ord(ref data, ref test) => { write!(f, "Ord")? }, &Fl(ref data, ref test) => { write!(f, "Fl")? }, &Str(ref data, ref test) => { write!(f, "Str")? } } write!(f, "}}") } }
limits, state); }, &I32(accessor, ref limits) => { Self::hash_self(2, accessor as usize, limits, state); }, &I64(accessor, ref limits) => { Self::hash_self(3, accessor as usize, limits, state); }, &U8(accessor, ref limits) => { Self::hash_self(4, accessor as usize, limits, state); }, &U16(accessor, ref limits) => { Self::hash_self(5, accessor as usize, limits, state); }, &U32(accessor, ref limits) => { Self::hash_self(6, accessor as usize, limits, state); }, &U64(accessor, ref limits) => { Self::hash_self(7, accessor as usize, limits, state); }, &ISIZE(accessor, ref limits) => { Self::hash_self(8, accessor as usize, limits, state); }, &USIZE(accessor, ref limits) => { Self::hash_self(9, accessor as usize, limits, state); } } } } impl<T: Fact> PartialEq for OrdData<T> { fn eq(&self, other: &Self) -> bool { use self::OrdData::*; match (self, other) { (&I8(accessor1, ref limits1), &I8(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&I16(accessor1, ref limits1), &I16(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&I32(accessor1, ref limits1), &I32(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&I64(accessor1, ref limits1), &I64(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&U8(accessor1, ref limits1), &U8(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&U16(accessor1, ref limits1), &U16(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&U32(accessor1, ref limits1), &U32(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&U64(accessor1, ref limits1), &U64(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&ISIZE(accessor1, ref limits1), &ISIZE(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, (&USIZE(accessor1, ref limits1), &USIZE(accessor2, ref limits2)) => { (accessor1 as usize) == (accessor2 as usize) && limits1 == limits2 }, _ => false } } } impl<T: Fact> Eq for OrdData<T> {} #[derive(Debug, Copy, Clone, Eq, Hash, PartialEq)] pub enum OrdTest { Ne, Lt, Le, Gt, Ge, GtLt, GeLt, GtLe, GeLe } #[derive(Copy, Clone, Eq, PartialEq)] pub enum FLimits<T: Float> { S(NotNaN<T>), D(NotNaN<T>, NotNaN<T>) } impl<T: Float> Hash for FLimits<T> { fn hash<H: Hasher>(&self, state: &mut H) { use self::FLimits::*; match self { &S(ref to) => to.hash(state), &D(ref from, ref to) => { from.hash(state); to.hash(state); }, } } } #[derive(Clone)] pub enum FlData<T: Fact>{ F32(fn(&T) -> &f32, FLimits<f32>), F64(fn(&T) -> &f64, FLimits<f64>), } impl<T: Fact> FlData<T> { fn hash_self<H: Hasher, L: Hash>(ord: usize, accessor: usize, limits: &L, state: &mut H) { ord.hash(state); accessor.hash(state); limits.hash(state); } } impl<T: Fact> Hash for FlData<T> { fn hash<H: Hasher>(&self, state: &mut H) { use self::FlData::*; match self { &F32(accessor, ref limits) => { Self::hash_self(0, accessor as usize, limits, state); },
random
[ { "content": "pub trait Fact: Introspect + Eq + Hash\n\n where Self: std::marker::Sized {\n\n type HashEq: Hash + Eq + Clone + Debug;\n\n fn create_hash_eq(conditions: &Vec<StatementConditions>, cache: &StringCache) -> Self::HashEq;\n\n fn new_from_fields(fields: &[FieldValue], cache: &StringCache) -> Self;\n\n fn getter(field: &str) -> Option<Getters<Self>>;\n\n fn exhaustive_hash(&self) -> Box<Iterator<Item=Self::HashEq>>;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 0, "score": 191868.8723706114 }, { "content": "pub trait Fact: 'static + Eq + Hash + Any\n\n where Self: std::marker::Sized {\n\n\n\n type HashEq: Hash + Eq + Clone + Debug;\n\n\n\n fn getter(field: &str) -> Option<Getter<Self>>;\n\n fn exhaustive_hash(&self) -> Box<Iterator<Item=Self::HashEq>>;\n\n fn create_hash_eq(conditions: &[AlphaNode<Self>]) -> Self::HashEq;\n\n}\n\n\n", "file_path": "src/shared/fact.rs", "rank": 1, "score": 191537.14119723847 }, { "content": "pub trait ReteIntrospection : Eq + Hash {\n\n type HashEq: Hash + Eq + Clone + Debug;\n\n\n\n fn static_type_id() -> TypeId;\n\n fn create_hash_eq(conditions: &Vec<StatementCondition>, string_interner: &StringCache) -> Self::HashEq;\n\n fn getter(field: &str) -> Option<fn(&Self) -> &u64>;\n\n fn type_id(&self) -> TypeId;\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 2, "score": 148342.75875198067 }, { "content": "pub fn dyn<S: AsRef<str>>(limit: S) -> SDynLimit<S> {\n\n SDynLimit{limit}\n\n}\n\n\n", "file_path": "src/shared/compiler/prelude.rs", "rank": 3, "score": 138285.76126588735 }, { "content": "pub fn eq<'a, S: AsRef<str>, T: IntoEqTest<S>>(field: S, to: T) -> RefNodes<'a, S> {\n\n RefNodes::Test(to.into_eq_test(field, EqTest::Eq))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 4, "score": 131019.29931441094 }, { "content": "pub fn product<I, F>(mut iterators: Vec<I>, mut cb: F)\n\n where I: Iterator + Clone,\n\n F: FnMut(&[I::Item])\n\n{\n\n inner(&mut Vec::with_capacity(iterators.len()),\n\n &iterators.clone(), &mut iterators, &mut cb)\n\n}\n\n\n", "file_path": "src/iter.rs", "rank": 5, "score": 129414.38091070032 }, { "content": "pub fn eq<S: AsRef<str>, T: IntoEqTest<S>>(field: S, to: T) -> VecNodes<S> {\n\n VecNodes::Test(to.into_eq_test(field, EqTest::Eq))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_vec.rs", "rank": 6, "score": 118361.4258476207 }, { "content": "pub trait RefField : FactField {\n\n fn resolve<C: BetaContext>(context: &C, sym: SymbolId) -> &Self;\n\n}\n\n\n", "file_path": "src/shared/fact.rs", "rank": 7, "score": 112125.44474795085 }, { "content": "pub fn lt<'a, S: AsRef<str>, T: IntoOrdTest<S>>(field: S, to: T) -> RefNodes<'a, S> {\n\n RefNodes::Test(to.into_ord_test(field, OrdTest::Lt))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 8, "score": 110493.67065717433 }, { "content": "pub fn ge<'a, S: AsRef<str>, T: IntoOrdTest<S>>(field: S, to: T) -> RefNodes<'a, S> {\n\n RefNodes::Test(to.into_ord_test(field, OrdTest::Ge))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 9, "score": 110493.67065717433 }, { "content": "pub fn le<'a, S: AsRef<str>, T: IntoOrdTest<S>>(field: S, to: T) -> RefNodes<'a, S> {\n\n RefNodes::Test(to.into_ord_test(field, OrdTest::Le))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 10, "score": 110493.67065717433 }, { "content": "pub fn ne<'a, S: AsRef<str>, T: IntoEqTest<S>>(field: S, to: T) -> RefNodes<'a, S> {\n\n RefNodes::Test(to.into_eq_test(field, EqTest::Ne))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 11, "score": 110493.67065717434 }, { "content": "pub fn gt<'a, S: AsRef<str>, T: IntoOrdTest<S>>(field: S, to: T) -> RefNodes<'a, S> {\n\n RefNodes::Test(to.into_ord_test(field, OrdTest::Gt))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 12, "score": 110493.67065717433 }, { "content": "pub fn not<S: AsRef<str>>(node: RefNodes<S>) -> RefNodes<S> {\n\n use self::RefNodes::*;\n\n match node {\n\n Test(mut t) => {\n\n t.apply_not();\n\n Test(t)\n\n },\n\n Any(t) => NotAny(t),\n\n NotAny(t) => Any(t),\n\n All(t) => NotAll(t),\n\n NotAll(t) => All(t)\n\n }\n\n}\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 13, "score": 108986.3023321671 }, { "content": "pub trait IsHashEq {\n\n fn is_hash_eq(&self) -> bool;\n\n}\n\n\n", "file_path": "src/shared/nodes/alpha.rs", "rank": 14, "score": 108444.63290830512 }, { "content": "pub fn all<'a, S: AsRef<str>>(nodes: &'a[RefNodes<'a, S>]) -> RefNodes<'a, S> {\n\n RefNodes::All(nodes)\n\n}\n\n\n\nimpl<'a, S: AsRef<str>, T: Fact> Stage1Compile<T> for RefNodes<'a, S> {\n\n fn stage1_compile(&self, cache: &mut StringCache) -> Result<Stage1Node<T>, CompileError> {\n\n use self::RefNodes::*;\n\n match *self {\n\n Test(ref t) => Ok(Stage1Node::Test(t.compile(cache)?)),\n\n Any(ref v) => Ok(Stage1Node::Any(Stage1Compile::stage1_compile_slice(v, cache)?)),\n\n NotAny(ref v) => Ok(Stage1Node::NotAny(Stage1Compile::stage1_compile_slice(v, cache)?)),\n\n All(ref v) => Ok(Stage1Node::All(Stage1Compile::stage1_compile_slice(v, cache)?)),\n\n NotAll(ref v) => Ok(Stage1Node::NotAny(Stage1Compile::stage1_compile_slice(v, cache)?)),\n\n }\n\n }\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 15, "score": 104053.71147778649 }, { "content": "pub fn any<'a, S: AsRef<str>>(nodes: &'a[RefNodes<'a, S>]) -> RefNodes<'a, S> {\n\n RefNodes::Any(nodes)\n\n}\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 16, "score": 104053.71147778649 }, { "content": "pub fn ne<S: AsRef<str>, T: IntoEqTest<S>>(field: S, to: T) -> VecNodes<S> {\n\n VecNodes::Test(to.into_eq_test(field, EqTest::Ne))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_vec.rs", "rank": 17, "score": 96835.62382599781 }, { "content": "pub fn ge<S: AsRef<str>, T: IntoOrdTest<S>>(field: S, to: T) -> VecNodes<S> {\n\n VecNodes::Test(to.into_ord_test(field, OrdTest::Ge))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_vec.rs", "rank": 18, "score": 96835.62382599781 }, { "content": "pub fn lt<S: AsRef<str>, T: IntoOrdTest<S>>(field: S, to: T) -> VecNodes<S> {\n\n VecNodes::Test(to.into_ord_test(field, OrdTest::Lt))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_vec.rs", "rank": 19, "score": 96835.62382599781 }, { "content": "pub fn gt<S: AsRef<str>, T: IntoOrdTest<S>>(field: S, to: T) -> VecNodes<S> {\n\n VecNodes::Test(to.into_ord_test(field, OrdTest::Gt))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_vec.rs", "rank": 20, "score": 96835.62382599781 }, { "content": "pub fn le<S: AsRef<str>, T: IntoOrdTest<S>>(field: S, to: T) -> VecNodes<S> {\n\n VecNodes::Test(to.into_ord_test(field, OrdTest::Le))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_vec.rs", "rank": 21, "score": 96835.62382599781 }, { "content": "pub fn gelt<'a, S: AsRef<str>, T>(field: S, from: T, to: T) -> RefNodes<'a, S>\n\n where (T, T): IntoBtwnTest<S>{\n\n RefNodes::Test((from, to).into_btwn_test(field, BetweenTest::GeLt))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 22, "score": 94739.72270161699 }, { "content": "pub fn gtle<'a, S: AsRef<str>, T>(field: S, from: T, to: T) -> RefNodes<'a, S>\n\n where (T, T): IntoBtwnTest<S>{\n\n RefNodes::Test((from, to).into_btwn_test(field, BetweenTest::GtLe))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 23, "score": 94739.72270161699 }, { "content": "pub fn gele<'a, S: AsRef<str>, T>(field: S, from: T, to: T) -> RefNodes<'a, S>\n\n where (T, T): IntoBtwnTest<S>{\n\n RefNodes::Test((from, to).into_btwn_test(field, BetweenTest::GeLe))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 24, "score": 94739.72270161699 }, { "content": "pub fn gtlt<'a, S: AsRef<str>, T>(field: S, from: T, to: T) -> RefNodes<'a, S>\n\n where (T, T): IntoBtwnTest<S>{\n\n RefNodes::Test((from, to).into_btwn_test(field, BetweenTest::GtLt))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 25, "score": 94739.72270161699 }, { "content": "pub trait Stage1Compile<T: Fact> {\n\n\n\n fn stage1_compile(&self, cache: &mut StringCache) -> Result<Stage1Node<T>, CompileError>;\n\n\n\n fn stage1_compile_slice(t: &[Self], cache: &mut StringCache) -> Result<Vec<Stage1Node<T>>, CompileError>\n\n where Self: marker::Sized {\n\n t.iter().map(|c| c.stage1_compile(cache)).collect()\n\n }\n\n}\n\n\n\n\n\n#[derive(Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Debug, EnumIndex)]\n\npub enum ProvidesNode<S, G> {\n\n Var(S),\n\n Field(S, G)\n\n}\n\n\n\nimpl<S, G> ProvidesNode<S, G> {\n\n pub fn is_variable(&self) -> bool {\n\n use self::ProvidesNode::*;\n", "file_path": "src/shared/compiler/prelude.rs", "rank": 26, "score": 93110.57304642335 }, { "content": "pub trait IntoEqTest<S: AsRef<str>> {\n\n fn into_eq_test(self, field: S, test: EqTest) -> TestRepr<S>;\n\n}\n\n\n", "file_path": "src/shared/compiler/prelude.rs", "rank": 27, "score": 92933.84648544656 }, { "content": "pub trait IntoOrdTest<S: AsRef<str>> {\n\n fn into_ord_test(self, field: S, test: OrdTest) -> TestRepr<S>;\n\n}\n\n\n", "file_path": "src/shared/compiler/prelude.rs", "rank": 28, "score": 92933.84648544656 }, { "content": "pub trait FactField {\n\n fn get_field_type() -> FactFieldType;\n\n}\n\n\n", "file_path": "src/shared/fact.rs", "rank": 29, "score": 91762.78999158507 }, { "content": "pub fn var<S: AsRef<str>>(s: S) -> ProvidesNode<S, S> {\n\n ProvidesNode::Var(s)\n\n}\n\n\n", "file_path": "src/shared/compiler/prelude.rs", "rank": 30, "score": 91645.524035117 }, { "content": "pub fn ends_with<'a, S: AsRef<str>, T: IntoStrTest<S>>(field: S, val: T) -> RefNodes<'a, S> {\n\n RefNodes::Test(val.into_str_test(field, StrArrayTest::EndsWith))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 31, "score": 89787.94868518843 }, { "content": "pub fn contains<'a, S: AsRef<str>, T: IntoStrTest<S>>(field: S, val: T) -> RefNodes<'a, S> {\n\n RefNodes::Test(val.into_str_test(field, StrArrayTest::Contains))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 32, "score": 89787.94868518843 }, { "content": "pub fn starts_with<'a, S: AsRef<str>, T: IntoStrTest<S>>(field: S, val: T) -> RefNodes<'a, S> {\n\n RefNodes::Test(val.into_str_test(field, StrArrayTest::StartsWith))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 33, "score": 89787.94868518843 }, { "content": "pub fn not<S: AsRef<str>>(node: VecNodes<S>) -> VecNodes<S> {\n\n use self::VecNodes::*;\n\n match node {\n\n Test(mut t) => {\n\n t.apply_not();\n\n Test(t)\n\n },\n\n Any(t) => NotAny(t),\n\n NotAny(t) => Any(t),\n\n All(t) => NotAll(t),\n\n NotAll(t) => All(t)\n\n }\n\n}\n\n\n", "file_path": "src/shared/compiler/as_vec.rs", "rank": 34, "score": 89619.27408571722 }, { "content": "pub fn field<S: AsRef<str>>(s: S, g: S) -> ProvidesNode<S, S> {\n\n ProvidesNode::Field(s, g)\n\n}", "file_path": "src/shared/compiler/prelude.rs", "rank": 35, "score": 86899.39145742734 }, { "content": "fn inner<I, F>(cur: &mut Vec<I::Item>,\n\n orig: &[I], iters: &mut [I], cb: &mut F)\n\n where I: Iterator + Clone,\n\n F: FnMut(&[I::Item])\n\n{\n\n if let Some((front, rest)) = iters.split_first_mut() {\n\n for elt in &mut *front {\n\n cur.push(elt);\n\n inner(cur, &orig[1..], rest, cb);\n\n cur.pop();\n\n }\n\n if !cur.is_empty() {\n\n *front = orig[0].clone();\n\n }\n\n } else {\n\n cb(cur.as_slice())\n\n }\n\n}\n\n\n\n\n", "file_path": "src/iter.rs", "rank": 36, "score": 86580.10429597896 }, { "content": "pub fn any<S: AsRef<str>>(nodes: Vec<VecNodes<S>>) -> VecNodes<S> {\n\n VecNodes::Any(nodes)\n\n}\n\n\n", "file_path": "src/shared/compiler/as_vec.rs", "rank": 37, "score": 86281.53652125318 }, { "content": "pub fn all<S: AsRef<str>>(nodes: Vec<VecNodes<S>>) -> VecNodes<S> {\n\n VecNodes::All(nodes)\n\n}\n\n\n\n\n\nimpl<S: AsRef<str>, T: Fact> Stage1Compile<T> for VecNodes<S> {\n\n fn stage1_compile(&self, cache: &mut StringCache) -> Result<Stage1Node<T>, CompileError> {\n\n use self::VecNodes::*;\n\n match *self {\n\n Test(ref t) => Ok(Stage1Node::Test(t.compile(cache)?)),\n\n Any(ref v) => Ok(Stage1Node::Any(Stage1Compile::stage1_compile_slice(v, cache)?)),\n\n NotAny(ref v) => Ok(Stage1Node::NotAny(Stage1Compile::stage1_compile_slice(v, cache)?)),\n\n All(ref v) => Ok(Stage1Node::All(Stage1Compile::stage1_compile_slice(v, cache)?)),\n\n NotAll(ref v) => Ok(Stage1Node::NotAny(Stage1Compile::stage1_compile_slice(v, cache)?)),\n\n }\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/shared/compiler/as_vec.rs", "rank": 38, "score": 86281.53652125318 }, { "content": "pub trait CastField : FactField {\n\n fn resolve<C: BetaContext>(context: &C, sym: SymbolId) -> Self;\n\n}\n\n\n\nmacro_rules! impl_ref_field {\n\n ($($id:ty => $getter:ident => $field_type:ident),+) => {\n\n $(\n\n impl FactField for $id {\n\n fn get_field_type() -> FactFieldType {\n\n FactFieldType::$field_type\n\n }\n\n }\n\n\n\n impl RefField for $id {\n\n #[inline]\n\n fn resolve<C: BetaContext>(context: &C, sym: SymbolId) -> &Self {\n\n context.$getter(sym)\n\n }\n\n }\n\n )*\n", "file_path": "src/shared/fact.rs", "rank": 39, "score": 85084.74817599653 }, { "content": "pub fn gelt<S: AsRef<str>, T>(field: S, from: T, to: T) -> VecNodes<S>\n\n where (T, T): IntoBtwnTest<S>{\n\n VecNodes::Test((from, to).into_btwn_test(field, BetweenTest::GeLt))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_vec.rs", "rank": 40, "score": 80989.93425925335 }, { "content": "pub fn gele<S: AsRef<str>, T>(field: S, from: T, to: T) -> VecNodes<S>\n\n where (T, T): IntoBtwnTest<S>{\n\n VecNodes::Test((from, to).into_btwn_test(field, BetweenTest::GeLe))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_vec.rs", "rank": 41, "score": 80989.93425925335 }, { "content": "pub fn gtle<S: AsRef<str>, T>(field: S, from: T, to: T) -> VecNodes<S>\n\n where (T, T): IntoBtwnTest<S>{\n\n VecNodes::Test((from, to).into_btwn_test(field, BetweenTest::GtLe))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_vec.rs", "rank": 42, "score": 80989.93425925335 }, { "content": "pub fn gtlt<S: AsRef<str>, T>(field: S, from: T, to: T) -> VecNodes<S>\n\n where (T, T): IntoBtwnTest<S>{\n\n VecNodes::Test((from, to).into_btwn_test(field, BetweenTest::GtLt))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_vec.rs", "rank": 43, "score": 80989.93425925335 }, { "content": " pub trait GetFieldType {\n\n fn get_field_type(&self) -> FactFieldType;\n\n }\n\n\n\n impl<T: Fact> GetFieldType for Getter<T> {\n\n fn get_field_type(&self) -> FactFieldType {\n\n use self::Getter::*;\n\n match self {\n\n $(\n\n $t(_) => FactFieldType::$t,\n\n )*\n\n }\n\n }\n\n }\n\n };\n\n}\n\n\n\ngetter_derive!(\n\n BOOL,\n\n I8, I16, I32, I64, I128,\n", "file_path": "src/shared/fact.rs", "rank": 44, "score": 79551.5746532102 }, { "content": "pub fn contains<S: AsRef<str>, T: IntoStrTest<S>>(field: S, val: T) -> VecNodes<S> {\n\n VecNodes::Test(val.into_str_test(field, StrArrayTest::Contains))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_vec.rs", "rank": 45, "score": 76190.64056712312 }, { "content": "pub fn ends_with<S: AsRef<str>, T: IntoStrTest<S>>(field: S, val: T) -> VecNodes<S> {\n\n VecNodes::Test(val.into_str_test(field, StrArrayTest::EndsWith))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_vec.rs", "rank": 46, "score": 76190.64056712312 }, { "content": "pub fn starts_with<S: AsRef<str>, T: IntoStrTest<S>>(field: S, val: T) -> VecNodes<S> {\n\n VecNodes::Test(val.into_str_test(field, StrArrayTest::StartsWith))\n\n}\n\n\n", "file_path": "src/shared/compiler/as_vec.rs", "rank": 47, "score": 76190.64056712312 }, { "content": "pub trait AString: AsRef<str> {}\n\n\n\nimpl<'a> AString for &'a str {}\n\nimpl AString for String {}\n\nimpl<'a> AString for Cow<'a, str> {}\n\n\n", "file_path": "src/shared/compiler/prelude.rs", "rank": 48, "score": 72963.14606971301 }, { "content": "pub trait IntoBtwnTest<S: AsRef<str>> {\n\n fn into_btwn_test(self, field: S, test: BetweenTest) -> TestRepr<S>;\n\n}\n\n\n", "file_path": "src/shared/compiler/prelude.rs", "rank": 49, "score": 67463.174131409 }, { "content": "pub trait IntoStrTest<S: AsRef<str>> {\n\n fn into_str_test(self, field: S, test: StrArrayTest) -> TestRepr<S>;\n\n}\n\n\n\n// Single values\n\n\n\n// Eq testing\n\nmacro_rules! into_eq_tests {\n\n ($($id:ty => [$sub:ident, $test:ident]),+) => {\n\n $(\n\n impl<S: AsRef<str>> IntoEqTest<S> for $id {\n\n fn into_eq_test(self, field: S, test: EqTest) -> TestRepr<S> {\n\n TestRepr::$sub(field, $test::Eq(Truth::Is, test, SLimit::St(self)))\n\n }\n\n }\n\n )*\n\n };\n\n}\n\n\n\ninto_eq_tests!(\n", "file_path": "src/shared/compiler/prelude.rs", "rank": 50, "score": 67463.174131409 }, { "content": "pub trait AlphaTestField<T: FactField + ?Sized > {\n\n fn alpha_test_field<C: AlphaContext>(&self, value: &T, context: &C) -> bool;\n\n}\n\n\n\n\n\n#[derive(Copy, Clone, Hash, Eq, PartialEq, Debug)]\n\npub enum BoolTest {\n\n Eq(Truth, EqTest, bool)\n\n}\n\n\n\nimpl IsHashEq for BoolTest {\n\n fn is_hash_eq(&self) -> bool {\n\n use self::BoolTest::*;\n\n match self {\n\n Eq(..) => true\n\n }\n\n }\n\n}\n\n\n\nimpl AlphaTestField<bool> for BoolTest {\n", "file_path": "src/shared/nodes/alpha.rs", "rank": 51, "score": 64286.665668199756 }, { "content": "pub trait BetaTestField<T: FactField + ?Sized > {\n\n fn beta_test_field<C: BetaContext>(&self, value: &T, context: &C) -> bool;\n\n}\n\n\n\n#[derive(Copy, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Debug)]\n\npub enum BoolTest<S> {\n\n Eq(Truth, EqTest, SLimit<bool, S>)\n\n}\n\n\n\nimpl<S> StringIntern for BoolTest<S>\n\n where S: AsRef<str> {\n\n type Output = BoolTest<SymbolId>;\n\n\n\n fn string_intern(&self, cache: &mut StringCache) -> Self::Output {\n\n use self::BoolTest::*;\n\n match *self {\n\n Eq(truth, test, ref limit) => Eq(truth, test, limit.map_dynamic(|s| cache.get_or_intern(s.as_ref())))\n\n }\n\n }\n\n}\n", "file_path": "src/shared/nodes/beta.rs", "rank": 52, "score": 64286.665668199756 }, { "content": "fn main() {\n\n let iter = vec![0..3, 1..2, 0..5, 0..3];\n\n\n\n product(iter, |elems| println!(\"{:?}\", elems));\n\n}\n\n*/", "file_path": "src/iter.rs", "rank": 53, "score": 62242.286218866444 }, { "content": "#[derive(Clone, Eq, PartialEq, Debug)]\n\nenum StatementGroup {\n\n All(StatementGroupId, Vec<StatementGroupEntry>),\n\n Any(StatementGroupId, Vec<StatementGroupEntry>),\n\n ForAll(StatementGroupId, StatementId, Vec<StatementGroupEntry>),\n\n}\n\n\n\nimpl StatementGroup {\n\n fn all(parent: StatementGroupId) -> StatementGroup {\n\n StatementGroup::All(parent, Vec::new())\n\n }\n\n\n\n fn any(parent: StatementGroupId) -> StatementGroup {\n\n StatementGroup::Any(parent, Vec::new())\n\n }\n\n\n\n fn for_all(parent: StatementGroupId, statement: StatementId) -> StatementGroup {\n\n StatementGroup::ForAll(parent, statement, Vec::new())\n\n }\n\n\n\n fn parent(&self) -> StatementGroupId {\n", "file_path": "src/shared/runtimes/array/builder.rs", "rank": 54, "score": 58560.02228591427 }, { "content": "#[derive(Copy, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Debug)]\n\nenum ConditionGroupChild {\n\n Condition(ConditionId),\n\n Group(ConditionGroupId),\n\n}\n\n\n", "file_path": "src/shared/runtimes/array/builder.rs", "rank": 55, "score": 57426.870511827015 }, { "content": "#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Debug)]\n\nenum StatementGroupEntry {\n\n Statement(StatementId),\n\n Exists(StatementId),\n\n Absent(StatementId),\n\n Child(StatementGroupId),\n\n}\n\n\n", "file_path": "src/shared/runtimes/array/builder.rs", "rank": 56, "score": 57423.49673367339 }, { "content": "pub trait Introspect {\n\n fn static_type_id() -> TypeId;\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Hash, Eq, Ord, PartialOrd, PartialEq)]\n\npub enum FieldValue {\n\n BOOL(SymbolId, bool),\n\n I8(SymbolId, i8),\n\n I16(SymbolId, i16),\n\n I32(SymbolId, i32),\n\n I64(SymbolId, i64),\n\n U8(SymbolId, u8),\n\n U16(SymbolId, u16),\n\n U32(SymbolId, u32),\n\n U64(SymbolId, u64),\n\n ISIZE(SymbolId, isize),\n\n USIZE(SymbolId, usize),\n\n F32(SymbolId, NotNaN<f32>),\n\n F64(SymbolId, NotNaN<f64>),\n\n STR(SymbolId, SymbolId),\n", "file_path": "src/traits.rs", "rank": 57, "score": 55037.007224914385 }, { "content": "pub trait NetworkBuilder {\n\n fn get_id_generator(&mut self) -> &mut BuilderIdGen;\n\n fn get_conditions<I: Fact>(&mut self) -> &mut HashMap<I::HashEq, HashMap<AlphaTest<I>, ConditionDesc>>;\n\n fn get_string_cache(&mut self) -> &mut StringCache;\n\n\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 58, "score": 53612.47797977677 }, { "content": "pub trait RuleBuilder {\n\n fn get_for_condition_collapse<I: Fact>(&mut self, hash_eq: I::HashEq) -> (&mut StringCache, &mut BuilderIdGen, &mut HashMap<AlphaTest<I>, ConditionDesc>);\n\n fn get_id_generator(&mut self) -> &mut BuilderIdGen;\n\n fn get_conditions<I: Fact>(&mut self) -> &mut HashMap<I::HashEq, HashMap<AlphaTest<I>, ConditionDesc>>;\n\n fn get_statement_ids(&mut self) -> &mut HashSet<StatementId>;\n\n fn get_string_cache(&mut self) -> &mut StringCache;\n\n}\n\n\n\n\n", "file_path": "src/traits.rs", "rank": 59, "score": 53612.47797977677 }, { "content": "/// Updates a test's configuration to apply a not\n\npub trait ApplyNot {\n\n fn apply_not(&mut self);\n\n}\n\n\n\n// Don't try to make this Truth<T> again. This ends up making the Repl -> Node function massive\n\n#[derive(Copy, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Debug)]\n\npub enum Truth {\n\n Not,\n\n Is\n\n}\n\n\n\nimpl Truth {\n\n pub fn is_not(self) -> bool {\n\n use self::Truth::*;\n\n match self {\n\n Not => true,\n\n Is => false\n\n }\n\n }\n\n}\n", "file_path": "src/shared/nodes/tests.rs", "rank": 60, "score": 52310.889752773655 }, { "content": "pub trait IsAlpha {\n\n fn is_alpha(&self) -> bool;\n\n}\n\n\n", "file_path": "src/shared/nodes/beta.rs", "rank": 61, "score": 52310.889752773655 }, { "content": "pub trait AlphaContext {\n\n fn get_string_cache(&self) -> &StringCache;\n\n}\n\n\n", "file_path": "src/shared/context.rs", "rank": 62, "score": 52310.889752773655 }, { "content": "pub trait BetaContext {\n\n fn get_bool(&self, sym: SymbolId) -> &bool;\n\n fn get_i8(&self, sym: SymbolId) -> i8;\n\n fn get_i16(&self, sym: SymbolId) -> i16;\n\n fn get_i32(&self, sym: SymbolId) -> i32;\n\n fn get_i64(&self, sym: SymbolId) -> i64;\n\n fn get_i128(&self, sym: SymbolId) -> i128;\n\n fn get_u8(&self, sym: SymbolId) -> u8;\n\n fn get_u16(&self, sym: SymbolId) -> u16;\n\n fn get_u32(&self, sym: SymbolId) -> u32;\n\n fn get_u64(&self, sym: SymbolId) -> u64;\n\n fn get_u128(&self, sym: SymbolId) -> u128;\n\n fn get_f32(&self, sym: SymbolId) -> NotNaN<f32>;\n\n fn get_f64(&self, sym: SymbolId) -> NotNaN<f64>;\n\n fn get_d128(&self, sym: SymbolId) -> OrdVar<d128>;\n\n fn get_str(&self, sym: SymbolId) -> &str;\n\n fn get_time(&self, sym: SymbolId) -> &NaiveTime;\n\n fn get_date(&self, sym: SymbolId) -> &Date<Utc>;\n\n fn get_datetime(&self, sym: SymbolId) -> &DateTime<Utc>;\n\n fn get_string_cache(&self) -> &StringCache;\n\n}", "file_path": "src/shared/context.rs", "rank": 63, "score": 52310.889752773655 }, { "content": "pub trait IsStatic {\n\n fn is_static(&self) -> bool;\n\n}\n\n\n", "file_path": "src/shared/nodes/beta.rs", "rank": 64, "score": 52310.889752773655 }, { "content": "pub trait AlphaMemoryId {}\n\n\n\n#[derive(Debug, Copy, Clone, Eq, Hash, Ord, PartialOrd, PartialEq)]\n\npub enum MemoryId {\n\n HashEq(HashEqId),\n\n Alpha(AlphaId),\n\n Beta(BetaId)\n\n}\n\n\n\nimpl Into<MemoryId> for HashEqId {\n\n fn into(self) -> MemoryId {\n\n MemoryId::HashEq(self)\n\n }\n\n}\n\n\n\nimpl AlphaMemoryId for HashEqId {}\n\n\n\nimpl Into<MemoryId> for AlphaId {\n\n fn into(self) -> MemoryId {\n\n MemoryId::Alpha(self)\n", "file_path": "src/runtime/memory.rs", "rank": 65, "score": 51116.9856497866 }, { "content": "pub trait StringIntern {\n\n type Output;\n\n\n\n fn string_intern(&self, cache: &mut StringCache) -> Self::Output;\n\n}\n\n\n", "file_path": "src/shared/nodes/beta.rs", "rank": 66, "score": 51116.9856497866 }, { "content": "pub trait KnowledgeBase {\n\n\n\n}\n\n\n", "file_path": "src/shared/compiler/builder.rs", "rank": 67, "score": 51116.9856497866 }, { "content": "pub trait RuleBuilder {\n\n type CB: ConsequenceBuilder;\n\n\n\n fn salience(self, salience: i32) -> Self;\n\n fn no_loop(self, no_loop: bool) -> Self;\n\n fn when<T: 'static + Fact, N: Stage1Compile<T>>(self, nodes: &[N]) -> Result<Self, CompileError>\n\n where Self: std::marker::Sized;\n\n fn provides_when<T: 'static + Fact, S: AsRef<str>, N: Stage1Compile<T>>(self, provides: &[ProvidesNode<S, S>], nodes: &[N]) -> Result<Self, CompileError>\n\n where Self: std::marker::Sized;\n\n fn when_exists<T: 'static + Fact, N: Stage1Compile<T>>(self, nodes: &[N]) -> Result<Self, CompileError>\n\n where Self: std::marker::Sized;\n\n fn when_absent<T: 'static + Fact, N: Stage1Compile<T>>(self, nodes: &[N]) -> Result<Self, CompileError>\n\n where Self: std::marker::Sized;\n\n fn when_for_all<T:'static + Fact, N: Stage1Compile<T>>(self, node: &[N]) -> Result<Self, CompileError>\n\n where Self: std::marker::Sized;\n\n fn provides_when_for_all<T:'static + Fact, S: AsRef<str>, N: Stage1Compile<T>>(self, provides: &[ProvidesNode<S, S>], nodes: &[N]) -> Result<Self, CompileError>\n\n where Self: std::marker::Sized;\n\n fn all_group(self) -> Self;\n\n fn any_group(self) -> Self;\n\n fn end_group(self) -> Result<Self, CompileError> where Self: std::marker::Sized;\n\n fn then(self) -> Result<Self::CB, CompileError>;\n\n}\n\n\n", "file_path": "src/shared/compiler/builder.rs", "rank": 68, "score": 51116.9856497866 }, { "content": "pub trait ConsequenceBuilder {\n\n type BB: BaseBuilder;\n\n fn end(self) -> Result<Self::BB, CompileError>;\n\n}\n", "file_path": "src/shared/compiler/builder.rs", "rank": 69, "score": 51116.9856497866 }, { "content": "pub trait CollectRequired {\n\n fn collect_required(&self, symbols: &mut HashMap<SymbolId, HashSet<FactFieldType>>);\n\n}\n\n\n\n#[derive(Copy, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Debug)]\n\npub enum SLimit<T, S> {\n\n St(T),\n\n Dyn(S),\n\n}\n\n\n\nimpl<T> SLimit<T, SymbolId>\n\n where T: RefField {\n\n\n\n pub fn test_field_ref<C: BetaContext, E: STest<T> >(&self, test: &E, value: &T, context: &C) -> bool {\n\n use self::SLimit::*;\n\n match *self {\n\n St(ref to) => test.test(value, to),\n\n Dyn(ref s_to) => test.test(value, T::resolve(context, *s_to))\n\n }\n\n }\n", "file_path": "src/shared/nodes/beta.rs", "rank": 70, "score": 51116.9856497866 }, { "content": "pub trait BaseBuilder {\n\n type RB: RuleBuilder;\n\n type KB: KnowledgeBase;\n\n\n\n fn rule<S: AsRef<str>>(self, name: S) -> Self::RB;\n\n fn rule_with_agenda<S: AsRef<str>, A: AsRef<str>>(mut self, name: S, agenda_group: A) -> Self::RB;\n\n fn end(self) -> Self::KB;\n\n}\n\n\n", "file_path": "src/shared/compiler/builder.rs", "rank": 71, "score": 51116.9856497866 }, { "content": "#[derive(Clone, Eq, PartialEq, Debug)]\n\nstruct StatementData<T: Fact> {\n\n statement_provides: StatementProvides<T>,\n\n statement_requires: HashMap<SymbolId, HashSet<FactFieldType>>,\n\n condition_groups: HashMap<ConditionGroupId, ConditionGroupType>\n\n}\n\n\n\nimpl<T: Fact> StatementDetails for StatementData<T> {\n\n fn provides_var(&self) -> Option<SymbolId> {\n\n self.statement_provides.var\n\n }\n\n\n\n fn provides_fields<'a>(&'a self) -> Box<Iterator<Item = (SymbolId, FactFieldType)> + 'a> {\n\n Box::new(\n\n self.statement_provides.fields.iter()\n\n .map(|(key, val)| (*key, val.get_field_type()))\n\n )\n\n }\n\n\n\n fn requires_fields(&self) -> &HashMap<SymbolId, HashSet<FactFieldType>> {\n\n &self.statement_requires\n\n }\n\n}\n\n\n\n// TODO: After we build up the groupings & requirements, cascade down the groupings to ensure that we're not screwing anything up\n\n\n", "file_path": "src/shared/runtimes/array/builder.rs", "rank": 72, "score": 50310.69590724486 }, { "content": "#[derive(Clone, Eq, PartialEq, Debug)]\n\nstruct StatementProvides<T: Fact> {\n\n var: Option<SymbolId>,\n\n fields: HashMap<SymbolId, Getter<T>>\n\n}\n\n\n\nimpl<T: Fact> Default for StatementProvides<T> {\n\n fn default() -> Self {\n\n StatementProvides{var: None, fields: Default::default()}\n\n }\n\n}\n\n\n", "file_path": "src/shared/runtimes/array/builder.rs", "rank": 73, "score": 50310.69590724486 }, { "content": "struct BetaGraph<T: Fact> {\n\n rule_rel: HashMap<ConditionGroupChild, Vec<RuleId>>,\n\n statement_root: HashMap<StatementId, ConditionGroupChild>,\n\n parent_child_rel: BiMap<ConditionGroupId, Vec<ConditionGroupChild>>,\n\n child_group_rel: HashMap<ConditionGroupChild, ConditionGroupId>,\n\n test_nodes: BiMap<ConditionId, BetaNode<T>>\n\n}\n\n\n\nimpl<T: Fact> Default for BetaGraph<T> {\n\n fn default() -> Self {\n\n BetaGraph {\n\n rule_rel: Default::default(),\n\n statement_root: Default::default(),\n\n parent_child_rel: Default::default(),\n\n child_group_rel: Default::default(),\n\n test_nodes: Default::default()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/shared/runtimes/array/builder.rs", "rank": 74, "score": 50302.5915001935 }, { "content": "pub trait TryInto<T> {\n\n fn try_into(self) -> Result<T>;\n\n}\n\n\n\n\n\nmacro_rules! into_values {\n\n ($($id:ty => $sub:ident),+) => {\n\n $(\n\n impl IntoIntern<StatementValues> for $id {\n\n fn into_intern(self, _: &mut StringCache) -> StatementValues {\n\n StatementValues::$sub(ValueHolder::S(self))\n\n }\n\n }\n\n\n\n impl IntoIntern<StatementValues> for ($id, $id) {\n\n fn into_intern(self, _: &mut StringCache) -> StatementValues {\n\n StatementValues::$sub(ValueHolder::D(self.0, self.1))\n\n }\n\n }\n\n )*\n", "file_path": "src/builders/statement.rs", "rank": 75, "score": 50067.21550392917 }, { "content": "pub trait IntoIntern<T> {\n\n fn into_intern(self, cache: &mut StringCache) -> T;\n\n}\n\n\n", "file_path": "src/builders/statement.rs", "rank": 76, "score": 50067.21550392917 }, { "content": "pub trait StatementDetails {\n\n fn provides_var(&self) -> Option<SymbolId>;\n\n fn provides_fields<'a>(&'a self) -> Box<Iterator<Item = (SymbolId, FactFieldType)> + 'a>;\n\n fn requires_fields(&self) -> &HashMap<SymbolId, HashSet<FactFieldType>>;\n\n}\n\n\n", "file_path": "src/shared/runtimes/array/builder.rs", "rank": 77, "score": 50017.932988833585 }, { "content": "pub trait NetworkBuilder: Any {\n\n\n\n}\n\n\n\n//impl<T: StdAny> NetworkBuilder for T {}\n\n\n\nimpl UncheckedAnyExt for NetworkBuilder {\n\n #[inline]\n\n unsafe fn downcast_ref_unchecked<T: 'static>(&self) -> &T {\n\n &*(self as *const Self as *const T)\n\n }\n\n\n\n #[inline]\n\n unsafe fn downcast_mut_unchecked<T: 'static>(&mut self) -> &mut T {\n\n &mut *(self as *mut Self as *mut T)\n\n }\n\n\n\n #[inline]\n\n unsafe fn downcast_unchecked<T: 'static>(self: Box<Self>) -> Box<T> {\n\n Box::from_raw(Box::into_raw(self) as *mut T)\n\n }\n\n}\n\n\n", "file_path": "src/shared/runtimes/array/builder.rs", "rank": 78, "score": 47774.258739989105 }, { "content": "pub trait MapAll<T, U> {\n\n type Output;\n\n\n\n fn map_all<F>(&self, func: F) -> Self::Output\n\n where F: FnMut(&T) -> U;\n\n}\n\n\n", "file_path": "src/shared/nodes/beta.rs", "rank": 79, "score": 45922.44949775869 }, { "content": "pub trait DrainWhere<T, F>\n\n where F: FnMut(&T) -> bool {\n\n fn drain_where(&mut self, f: F) -> Self;\n\n}\n\n\n\nimpl<T, F> DrainWhere<T, F> for Vec<T>\n\n where F: FnMut(&T) -> bool {\n\n fn drain_where(&mut self, mut f: F) -> Self {\n\n let mut i = 0;\n\n let mut v = Vec::new();\n\n while i != self.len() {\n\n if f(&mut self[i]) {\n\n v.push(self.remove(i));\n\n } else {\n\n i += 1;\n\n }\n\n }\n\n v\n\n }\n\n}\n", "file_path": "src/shared/compiler/prelude.rs", "rank": 80, "score": 45922.44949775869 }, { "content": "/// Compare a value against a single parameter\n\npub trait STest<T: ?Sized>{\n\n fn test(&self, val: &T, to: &T) -> bool;\n\n}\n\n\n\nimpl<'a, F, T: ? Sized> STest<T> for (Truth, &'a F)\n\n where F: STest<T> {\n\n fn test(&self, val: &T, to: &T) -> bool {\n\n self.0.is_not() ^ self.1.test(val, to)\n\n }\n\n}\n\n\n", "file_path": "src/shared/nodes/tests.rs", "rank": 81, "score": 44907.376716888226 }, { "content": "/// Compare a value against two parameters\n\npub trait DTest<T: ?Sized>{\n\n fn test(&self, val: &T, from: &T, to: &T) -> bool;\n\n}\n\n\n\nimpl<'a, F, T: ? Sized> DTest<T> for (Truth, &'a F)\n\n where F: DTest<T> {\n\n fn test(&self, val: &T, from: &T, to: &T) -> bool {\n\n self.0.is_not() ^ self.1.test(val, from, to)\n\n }\n\n}\n\n\n\n\n\n/// Single value ordinal test\n\n#[derive(Copy, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Debug)]\n\npub enum OrdTest {\n\n /// val < to\n\n Lt,\n\n /// val <= to\n\n Le,\n\n /// val > to\n", "file_path": "src/shared/nodes/tests.rs", "rank": 82, "score": 44907.376716888226 }, { "content": "pub trait MapStatic<T, U> {\n\n type Output;\n\n\n\n fn map_static<F>(&self, func: F) -> Self::Output\n\n where F: FnMut(&T) -> U;\n\n}\n\n\n", "file_path": "src/shared/nodes/beta.rs", "rank": 83, "score": 44907.376716888226 }, { "content": "pub trait MapDynamic<T, U> {\n\n type Output;\n\n\n\n fn map_dynamic<F>(&self, func: F) -> Self::Output\n\n where F: FnMut(&T) -> U;\n\n}\n\n\n", "file_path": "src/shared/nodes/beta.rs", "rank": 84, "score": 44907.376716888226 }, { "content": "\n\nimpl<T: Fact> Getter<T> {\n\n fn hash_self<H: Hasher>(ord: usize, getter: usize, state: &mut H) {\n\n ord.hash(state);\n\n getter.hash(state);\n\n }\n\n}\n\n\n\nimpl<T: Fact> Debug for Getter<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n use self::Getter::*;\n\n write!(f, \"Getter(\")?;\n\n match *self {\n\n BOOL(accessor) => write!(f, \"BOOL({:#x})\", accessor as usize)?,\n\n I8(accessor) => write!(f, \"I8({:#x})\", accessor as usize)?,\n\n I16(accessor) => write!(f, \"I16({:#x})\", accessor as usize)?,\n\n I32(accessor) => write!(f, \"I32({:#x})\", accessor as usize)?,\n\n I64(accessor) => write!(f, \"I64({:#x})\", accessor as usize)?,\n\n I128(accessor) => write!(f, \"I128({:#x})\", accessor as usize)?,\n\n U8(accessor) => write!(f, \"U8({:#x})\", accessor as usize)?,\n", "file_path": "src/shared/fact.rs", "rank": 85, "score": 34122.412266480795 }, { "content": " fn clone(&self) -> Self {\n\n use self::Getter::*;\n\n match *self {\n\n $(\n\n $t(getter) => $t(getter),\n\n )*\n\n }\n\n }\n\n }\n\n\n\n impl <T:Fact> Hash for Getter<T> {\n\n fn hash < H: Hasher > ( & self, state: & mut H) {\n\n use self::Getter::*;\n\n match *self {\n\n $ ( $ t(getter) => Self::hash_self(self.enum_index(), getter as usize, state),\n\n )*\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/shared/fact.rs", "rank": 86, "score": 34110.61265207376 }, { "content": " pub fn is_number(self) -> bool {\n\n use self::FactFieldType::*;\n\n match self {\n\n I8 | I16 | I32 | I64 | I128\n\n | U8 | U16 | U32 | U64 | U128\n\n | F32 | F64 | D128 => true,\n\n _ => false\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/shared/fact.rs", "rank": 87, "score": 34110.43292450325 }, { "content": "}\n\n\n\nimpl_ref_field!(\n\n bool => get_bool => BOOL,\n\n str => get_str => STR,\n\n NaiveTime => get_time => TIME,\n\n Date<Utc> => get_date => DATE,\n\n DateTime<Utc> => get_datetime => DATETIME\n\n);\n\n\n\nimpl_cast_field!(\n\n i8 => get_i8 => I8,\n\n i16 => get_i16 => I16,\n\n i32 => get_i32 => I32,\n\n i64 => get_i64 => I64,\n\n i128 => get_i128 => I128,\n\n u8 => get_u8 => U8,\n\n u16 => get_u16 => U16,\n\n u32 => get_u32 => U32,\n\n u64 => get_u64 => U64,\n", "file_path": "src/shared/fact.rs", "rank": 88, "score": 34104.06560028147 }, { "content": "pub enum Getter<T: Fact> {\n\n BOOL(fn(&T) -> &bool),\n\n I8(fn(&T) -> &i8),\n\n I16(fn(&T) -> &i16),\n\n I32(fn(&T) -> &i32),\n\n I64(fn(&T) -> &i64),\n\n I128(fn(&T) -> &i128),\n\n U8(fn(&T) -> &u8),\n\n U16(fn(&T) -> &u16),\n\n U32(fn(&T) -> &u32),\n\n U64(fn(&T) -> &u64),\n\n U128(fn(&T) -> &u128),\n\n F32(fn(&T) -> &NotNaN<f32>),\n\n F64(fn(&T) -> &NotNaN<f64>),\n\n D128(fn(&T) -> &OrdVar<d128>),\n\n STR(fn(&T) -> &str),\n\n TIME(fn(&T) -> &NaiveTime),\n\n DATE(fn(&T) -> &Date<Utc>),\n\n DATETIME(fn(&T) -> &DateTime<Utc>),\n\n}\n", "file_path": "src/shared/fact.rs", "rank": 89, "score": 34103.625322958425 }, { "content": " impl<T:Fact> PartialEq for Getter<T> {\n\n fn eq(&self, other: &Self) -> bool {\n\n use self::Getter::*;\n\n match (self, other) {\n\n $( (&$t(getter1), &$t(getter2)) => {\n\n (getter1 as usize) == (getter2 as usize)\n\n },)*\n\n _ => false\n\n }\n\n }\n\n }\n\n\n\n impl<T: Fact> Eq for Getter<T> {}\n\n\n\n impl<T:Fact> Ord for Getter<T> {\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n use self::Getter::*;\n\n match(self, other) {\n\n $( (&$t(getter1), &$t(getter2)) => {\n\n (getter1 as usize).cmp(&(getter2 as usize))\n", "file_path": "src/shared/fact.rs", "rank": 90, "score": 34096.8690959874 }, { "content": " U8, U16, U32, U64, U128,\n\n F32, F64, D128,\n\n STR ,\n\n TIME, DATE, DATETIME\n\n );\n\n\n\n\n\nimpl FactFieldType {\n\n\n\n pub fn is_compatible(self, other: Self) -> bool {\n\n self.is_number_compatible(other)\n\n }\n\n\n\n pub fn is_number_compatible(self, other: Self) -> bool {\n\n match (self.is_number(), other.is_number()) {\n\n (true, true) => true,\n\n _ => false\n\n }\n\n }\n\n\n", "file_path": "src/shared/fact.rs", "rank": 91, "score": 34096.752174194175 }, { "content": " U16(accessor) => write!(f, \"U16({:#x})\", accessor as usize)?,\n\n U32(accessor) => write!(f, \"U32({:#x})\", accessor as usize)?,\n\n U64(accessor) => write!(f, \"U64({:#x})\", accessor as usize)?,\n\n U128(accessor) => write!(f, \"U128({:#x})\", accessor as usize)?,\n\n F32(accessor) => write!(f, \"F32({:#x})\", accessor as usize)?,\n\n F64(accessor) => write!(f, \"F64({:#x})\", accessor as usize)?,\n\n D128(accessor) => write!(f, \"D128({:#x})\", accessor as usize)?,\n\n STR(accessor) => write!(f, \"STR({:#x})\", accessor as usize)?,\n\n TIME(accessor) => write!(f, \"TIME({:#x})\", accessor as usize)?,\n\n DATE(accessor) => write!(f, \"DATE({:#x})\", accessor as usize)?,\n\n DATETIME(accessor) => write!(f, \"DATETIME({:#x})\", accessor as usize)?,\n\n }\n\n write!(f, \")\")\n\n }\n\n}\n\n\n\nmacro_rules! getter_derive {\n\n ($($t:ident),+ ) => {\n\n\n\n impl<T: Fact> Clone for Getter<T> {\n", "file_path": "src/shared/fact.rs", "rank": 92, "score": 34095.273685456275 }, { "content": " },)*\n\n _ => self.enum_index().cmp(&other.enum_index())\n\n }\n\n }\n\n }\n\n\n\n impl<T:Fact> PartialOrd for Getter<T> {\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n }\n\n\n\n #[derive(Copy, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Debug)]\n\n pub enum FactFieldType {\n\n $(\n\n $t,\n\n )*\n\n }\n\n\n", "file_path": "src/shared/fact.rs", "rank": 93, "score": 34095.13214224706 }, { "content": "use std::hash::Hash;\n\nuse std;\n\nuse std::fmt;\n\nuse std::fmt::Debug;\n\nuse ord_subset::OrdVar;\n\nuse decimal::d128;\n\nuse chrono::{NaiveTime, Date, DateTime, Duration, Utc};\n\nuse ::runtime::memory::SymbolId;\n\nuse ordered_float::NotNaN;\n\nuse super::context::BetaContext;\n\nuse runtime::memory::StringCache;\n\nuse shared::nodes::alpha::HashEqField;\n\nuse shared::nodes::alpha::AlphaNode;\n\nuse std::hash::Hasher;\n\nuse std::cmp::Ordering;\n\nuse enum_index;\n\nuse enum_index::EnumIndex;\n\nuse std::any::Any;\n\n\n\n#[derive(Copy, EnumIndex)]\n", "file_path": "src/shared/fact.rs", "rank": 94, "score": 34090.07240963873 }, { "content": " u128 => get_u128 => U128,\n\n NotNaN<f32> => get_f32 => F32,\n\n NotNaN<f64> => get_f64 => F64,\n\n OrdVar<d128> => get_d128 => D128\n\n);\n\n\n\nimpl FactField for SymbolId {\n\n fn get_field_type() -> FactFieldType {\n\n FactFieldType::STR\n\n }\n\n}", "file_path": "src/shared/fact.rs", "rank": 95, "score": 34079.09308529765 }, { "content": " };\n\n}\n\n\n\nmacro_rules! impl_cast_field {\n\n ($($id:ty => $getter:ident => $field_type:ident),+) => {\n\n $(\n\n impl FactField for $id {\n\n fn get_field_type() -> FactFieldType {\n\n FactFieldType::$field_type\n\n }\n\n }\n\n\n\n impl CastField for $id {\n\n #[inline]\n\n fn resolve<C: BetaContext>(context: &C, sym: SymbolId) -> Self {\n\n context.$getter(sym)\n\n }\n\n }\n\n )*\n\n };\n", "file_path": "src/shared/fact.rs", "rank": 96, "score": 34078.86841280111 }, { "content": " fn getter(field: &str) -> Option<Getter<Self>> {\n\n match field {\n\n \"d\" => Some(Getter::U64(Dummy::get_d)),\n\n _ => unimplemented!()\n\n }\n\n }\n\n\n\n fn exhaustive_hash(&self) -> Box<Iterator<Item=<Self as Fact>::HashEq>> {\n\n unimplemented!()\n\n }\n\n\n\n fn create_hash_eq(conditions: &Vec<HashEqField>, cache: &StringCache) -> Self::HashEq {\n\n unimplemented!()\n\n }\n\n }\n\n\n\n #[test]\n\n pub fn as_ref_test() {\n\n let mut cache = StringCache::new();\n\n let nodes: Stage1Node<Dummy> = all(\n\n &[not(any(&[eq(\"d\", 6u64)])), all(&[le(\"d\", 64u64), le(\"d\", dyn(\"ab\"))])]\n\n ).stage1_compile(&mut cache).unwrap()\n\n .clean();\n\n println!(\"{:?}\", nodes);\n\n }\n\n}", "file_path": "src/shared/compiler/as_ref.rs", "rank": 97, "score": 32175.399447211406 }, { "content": "use super::prelude::*;\n\npub use super::prelude::dyn;\n\nuse super::super::nodes::tests::{ApplyNot, EqTest, OrdTest, BetweenTest, StrArrayTest};\n\nuse super::super::nodes::beta::TestRepr;\n\nuse runtime::memory::StringCache;\n\nuse errors::CompileError;\n\nuse shared::fact::Fact;\n\n\n\n#[derive(Clone, Hash, Eq, PartialEq, Debug)]\n\npub enum RefNodes<'a, S: 'a + AsRef<str>> {\n\n Test(TestRepr<S>),\n\n Any(&'a [RefNodes<'a, S>]),\n\n NotAny(&'a [RefNodes<'a, S>]),\n\n All(&'a [RefNodes<'a, S>]),\n\n NotAll(&'a [RefNodes<'a, S>])\n\n}\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 98, "score": 32172.3763753517 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use shared::fact::{Fact, Getter};\n\n use super::*;\n\n use shared::nodes::alpha::HashEqField;\n\n\n\n #[derive(Clone, Hash, Eq, PartialEq, Debug)]\n\n struct Dummy {\n\n d: u64\n\n }\n\n\n\n impl Dummy {\n\n fn get_d(&self) -> &u64 {\n\n &self.d\n\n }\n\n }\n\n\n\n impl Fact for Dummy {\n\n type HashEq = ();\n\n\n", "file_path": "src/shared/compiler/as_ref.rs", "rank": 99, "score": 32171.450438897813 } ]
Rust
src/can1/mir1_arb.rs
crawford/efm32gg11b820
390142de0a68b55a142bb16d31634cebf2289209
#[doc = "Reader of register MIR1_ARB"] pub type R = crate::R<u32, super::MIR1_ARB>; #[doc = "Writer for register MIR1_ARB"] pub type W = crate::W<u32, super::MIR1_ARB>; #[doc = "Register MIR1_ARB `reset()`'s with value 0"] impl crate::ResetValue for super::MIR1_ARB { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `ID`"] pub type ID_R = crate::R<u32, u32>; #[doc = "Write proxy for field `ID`"] pub struct ID_W<'a> { w: &'a mut W, } impl<'a> ID_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !0x1fff_ffff) | ((value as u32) & 0x1fff_ffff); self.w } } #[doc = "Reader of field `DIR`"] pub type DIR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DIR`"] pub struct DIR_W<'a> { w: &'a mut W, } impl<'a> DIR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29); self.w } } #[doc = "Reader of field `XTD`"] pub type XTD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `XTD`"] pub struct XTD_W<'a> { w: &'a mut W, } impl<'a> XTD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } #[doc = "Reader of field `MSGVAL`"] pub type MSGVAL_R = crate::R<bool, bool>; #[doc = "Write proxy for field `MSGVAL`"] pub struct MSGVAL_W<'a> { w: &'a mut W, } impl<'a> MSGVAL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:28 - Message Identifier"] #[inline(always)] pub fn id(&self) -> ID_R { ID_R::new((self.bits & 0x1fff_ffff) as u32) } #[doc = "Bit 29 - Message Direction"] #[inline(always)] pub fn dir(&self) -> DIR_R { DIR_R::new(((self.bits >> 29) & 0x01) != 0) } #[doc = "Bit 30 - Extended Identifier"] #[inline(always)] pub fn xtd(&self) -> XTD_R { XTD_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - Message Valid"] #[inline(always)] pub fn msgval(&self) -> MSGVAL_R { MSGVAL_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:28 - Message Identifier"] #[inline(always)] pub fn id(&mut self) -> ID_W { ID_W { w: self } } #[doc = "Bit 29 - Message Direction"] #[inline(always)] pub fn dir(&mut self) -> DIR_W { DIR_W { w: self } } #[doc = "Bit 30 - Extended Identifier"] #[inline(always)] pub fn xtd(&mut self) -> XTD_W { XTD_W { w: self } } #[doc = "Bit 31 - Message Valid"] #[inline(always)] pub fn msgval(&mut self) -> MSGVAL_W { MSGVAL_W { w: self } } }
#[doc = "Reader of register MIR1_ARB"] pub type R = crate::R<u32, super::MIR1_ARB>; #[doc = "Writer for register MIR1_ARB"] pub type W = crate::W<u32, super::MIR1_ARB>; #[doc = "Register MIR1_ARB `reset()`'s with value 0"] impl crate::ResetValue for super::MIR1_ARB { type Type = u32; #[inline(always)] fn rese
self.w } } #[doc = "Reader of field `MSGVAL`"] pub type MSGVAL_R = crate::R<bool, bool>; #[doc = "Write proxy for field `MSGVAL`"] pub struct MSGVAL_W<'a> { w: &'a mut W, } impl<'a> MSGVAL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:28 - Message Identifier"] #[inline(always)] pub fn id(&self) -> ID_R { ID_R::new((self.bits & 0x1fff_ffff) as u32) } #[doc = "Bit 29 - Message Direction"] #[inline(always)] pub fn dir(&self) -> DIR_R { DIR_R::new(((self.bits >> 29) & 0x01) != 0) } #[doc = "Bit 30 - Extended Identifier"] #[inline(always)] pub fn xtd(&self) -> XTD_R { XTD_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - Message Valid"] #[inline(always)] pub fn msgval(&self) -> MSGVAL_R { MSGVAL_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:28 - Message Identifier"] #[inline(always)] pub fn id(&mut self) -> ID_W { ID_W { w: self } } #[doc = "Bit 29 - Message Direction"] #[inline(always)] pub fn dir(&mut self) -> DIR_W { DIR_W { w: self } } #[doc = "Bit 30 - Extended Identifier"] #[inline(always)] pub fn xtd(&mut self) -> XTD_W { XTD_W { w: self } } #[doc = "Bit 31 - Message Valid"] #[inline(always)] pub fn msgval(&mut self) -> MSGVAL_W { MSGVAL_W { w: self } } }
t_value() -> Self::Type { 0 } } #[doc = "Reader of field `ID`"] pub type ID_R = crate::R<u32, u32>; #[doc = "Write proxy for field `ID`"] pub struct ID_W<'a> { w: &'a mut W, } impl<'a> ID_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !0x1fff_ffff) | ((value as u32) & 0x1fff_ffff); self.w } } #[doc = "Reader of field `DIR`"] pub type DIR_R = crate::R<bool, bool>; #[doc = "Write proxy for field `DIR`"] pub struct DIR_W<'a> { w: &'a mut W, } impl<'a> DIR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29); self.w } } #[doc = "Reader of field `XTD`"] pub type XTD_R = crate::R<bool, bool>; #[doc = "Write proxy for field `XTD`"] pub struct XTD_W<'a> { w: &'a mut W, } impl<'a> XTD_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "src/generic.rs", "rank": 0, "score": 189570.65675386717 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\")).unwrap().write_all(include_bytes!(\"device.x\")).unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "build.rs", "rank": 1, "score": 71216.98879392265 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "src/generic.rs", "rank": 2, "score": 66192.8417247691 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "src/generic.rs", "rank": 3, "score": 66186.28024295985 }, { "content": "#[doc = \"Reader of register RST\"]\n\npub type R = crate::R<u32, super::RST>;\n\n#[doc = \"Writer for register RST\"]\n\npub type W = crate::W<u32, super::RST>;\n\n#[doc = \"Register RST `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RST {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\nimpl R {}\n\nimpl W {}\n", "file_path": "src/rmu/rst.rs", "rank": 4, "score": 77.741899884598 }, { "content": "#[doc = \"Reader of register PPUPATD2\"]\n\npub type R = crate::R<u32, super::PPUPATD2>;\n\n#[doc = \"Writer for register PPUPATD2\"]\n\npub type W = crate::W<u32, super::PPUPATD2>;\n\n#[doc = \"Register PPUPATD2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PPUPATD2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\nimpl R {}\n\nimpl W {}\n", "file_path": "src/smu/ppupatd2.rs", "rank": 5, "score": 77.741899884598 }, { "content": "#[doc = \"Reader of register KEY2\"]\n\npub type R = crate::R<u32, super::KEY2>;\n\n#[doc = \"Writer for register KEY2\"]\n\npub type W = crate::W<u32, super::KEY2>;\n\n#[doc = \"Register KEY2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::KEY2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `VALUE`\"]\n\npub type VALUE_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `VALUE`\"]\n\npub struct VALUE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VALUE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/trng0/key2.rs", "rank": 6, "score": 77.18042636459565 }, { "content": "#[doc = \"Reader of register REMAPADDR\"]\n\npub type R = crate::R<u32, super::REMAPADDR>;\n\n#[doc = \"Writer for register REMAPADDR\"]\n\npub type W = crate::W<u32, super::REMAPADDR>;\n\n#[doc = \"Register REMAPADDR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::REMAPADDR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `VALUE`\"]\n\npub type VALUE_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `VALUE`\"]\n\npub struct VALUE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VALUE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/qspi0/remapaddr.rs", "rank": 7, "score": 77.18042636459563 }, { "content": "#[doc = \"Reader of register INDIRECTREADXFERNUMBYTES\"]\n\npub type R = crate::R<u32, super::INDIRECTREADXFERNUMBYTES>;\n\n#[doc = \"Writer for register INDIRECTREADXFERNUMBYTES\"]\n\npub type W = crate::W<u32, super::INDIRECTREADXFERNUMBYTES>;\n\n#[doc = \"Register INDIRECTREADXFERNUMBYTES `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INDIRECTREADXFERNUMBYTES {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `VALUE`\"]\n\npub type VALUE_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `VALUE`\"]\n\npub struct VALUE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VALUE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/qspi0/indirectreadxfernumbytes.rs", "rank": 8, "score": 77.18042636459565 }, { "content": "#[doc = \"Reader of register KEY3\"]\n\npub type R = crate::R<u32, super::KEY3>;\n\n#[doc = \"Writer for register KEY3\"]\n\npub type W = crate::W<u32, super::KEY3>;\n\n#[doc = \"Register KEY3 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::KEY3 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `VALUE`\"]\n\npub type VALUE_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `VALUE`\"]\n\npub struct VALUE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VALUE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/trng0/key3.rs", "rank": 9, "score": 77.18042636459563 }, { "content": "#[doc = \"Reader of register KEY1\"]\n\npub type R = crate::R<u32, super::KEY1>;\n\n#[doc = \"Writer for register KEY1\"]\n\npub type W = crate::W<u32, super::KEY1>;\n\n#[doc = \"Register KEY1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::KEY1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `VALUE`\"]\n\npub type VALUE_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `VALUE`\"]\n\npub struct VALUE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VALUE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/trng0/key1.rs", "rank": 10, "score": 77.18042636459563 }, { "content": "#[doc = \"Reader of register KEY0\"]\n\npub type R = crate::R<u32, super::KEY0>;\n\n#[doc = \"Writer for register KEY0\"]\n\npub type W = crate::W<u32, super::KEY0>;\n\n#[doc = \"Register KEY0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::KEY0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `VALUE`\"]\n\npub type VALUE_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `VALUE`\"]\n\npub struct VALUE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VALUE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/trng0/key0.rs", "rank": 11, "score": 77.18042636459565 }, { "content": "#[doc = \"Reader of register TESTDATA\"]\n\npub type R = crate::R<u32, super::TESTDATA>;\n\n#[doc = \"Writer for register TESTDATA\"]\n\npub type W = crate::W<u32, super::TESTDATA>;\n\n#[doc = \"Register TESTDATA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TESTDATA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `VALUE`\"]\n\npub type VALUE_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `VALUE`\"]\n\npub struct VALUE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VALUE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/trng0/testdata.rs", "rank": 12, "score": 77.18042636459563 }, { "content": "#[doc = \"Reader of register INDIRECTWRITEXFERNUMBYTES\"]\n\npub type R = crate::R<u32, super::INDIRECTWRITEXFERNUMBYTES>;\n\n#[doc = \"Writer for register INDIRECTWRITEXFERNUMBYTES\"]\n\npub type W = crate::W<u32, super::INDIRECTWRITEXFERNUMBYTES>;\n\n#[doc = \"Register INDIRECTWRITEXFERNUMBYTES `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INDIRECTWRITEXFERNUMBYTES {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `VALUE`\"]\n\npub type VALUE_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `VALUE`\"]\n\npub struct VALUE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VALUE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/qspi0/indirectwritexfernumbytes.rs", "rank": 13, "score": 77.18042636459563 }, { "content": "#[doc = \"Reader of register IEN\"]\n\npub type R = crate::R<u32, super::IEN>;\n\n#[doc = \"Writer for register IEN\"]\n\npub type W = crate::W<u32, super::IEN>;\n\n#[doc = \"Register IEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `OF`\"]\n\npub type OF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/rtcc/ien.rs", "rank": 14, "score": 76.43484149707584 }, { "content": "#[doc = \"Reader of register IEN\"]\n\npub type R = crate::R<u32, super::IEN>;\n\n#[doc = \"Writer for register IEN\"]\n\npub type W = crate::W<u32, super::IEN>;\n\n#[doc = \"Register IEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `OF`\"]\n\npub type OF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/timer6/ien.rs", "rank": 15, "score": 76.43484149707585 }, { "content": "#[doc = \"Reader of register IEN\"]\n\npub type R = crate::R<u32, super::IEN>;\n\n#[doc = \"Writer for register IEN\"]\n\npub type W = crate::W<u32, super::IEN>;\n\n#[doc = \"Register IEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `OF`\"]\n\npub type OF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/timer4/ien.rs", "rank": 16, "score": 76.43484149707585 }, { "content": "#[doc = \"Reader of register IEN\"]\n\npub type R = crate::R<u32, super::IEN>;\n\n#[doc = \"Writer for register IEN\"]\n\npub type W = crate::W<u32, super::IEN>;\n\n#[doc = \"Register IEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `OF`\"]\n\npub type OF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/timer2/ien.rs", "rank": 17, "score": 76.43484149707584 }, { "content": "#[doc = \"Reader of register IEN\"]\n\npub type R = crate::R<u32, super::IEN>;\n\n#[doc = \"Writer for register IEN\"]\n\npub type W = crate::W<u32, super::IEN>;\n\n#[doc = \"Register IEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `OF`\"]\n\npub type OF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/timer1/ien.rs", "rank": 18, "score": 76.43484149707584 }, { "content": "#[doc = \"Reader of register IEN\"]\n\npub type R = crate::R<u32, super::IEN>;\n\n#[doc = \"Writer for register IEN\"]\n\npub type W = crate::W<u32, super::IEN>;\n\n#[doc = \"Register IEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `OF`\"]\n\npub type OF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/wtimer1/ien.rs", "rank": 19, "score": 76.43484149707584 }, { "content": "#[doc = \"Reader of register IEN\"]\n\npub type R = crate::R<u32, super::IEN>;\n\n#[doc = \"Writer for register IEN\"]\n\npub type W = crate::W<u32, super::IEN>;\n\n#[doc = \"Register IEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `OF`\"]\n\npub type OF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/timer3/ien.rs", "rank": 20, "score": 76.43484149707585 }, { "content": "#[doc = \"Reader of register IEN\"]\n\npub type R = crate::R<u32, super::IEN>;\n\n#[doc = \"Writer for register IEN\"]\n\npub type W = crate::W<u32, super::IEN>;\n\n#[doc = \"Register IEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `OF`\"]\n\npub type OF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/wtimer3/ien.rs", "rank": 21, "score": 76.43484149707585 }, { "content": "#[doc = \"Reader of register IEN\"]\n\npub type R = crate::R<u32, super::IEN>;\n\n#[doc = \"Writer for register IEN\"]\n\npub type W = crate::W<u32, super::IEN>;\n\n#[doc = \"Register IEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `OF`\"]\n\npub type OF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/wtimer0/ien.rs", "rank": 22, "score": 76.43484149707585 }, { "content": "#[doc = \"Reader of register IEN\"]\n\npub type R = crate::R<u32, super::IEN>;\n\n#[doc = \"Writer for register IEN\"]\n\npub type W = crate::W<u32, super::IEN>;\n\n#[doc = \"Register IEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `OF`\"]\n\npub type OF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/wtimer2/ien.rs", "rank": 23, "score": 76.43484149707584 }, { "content": "#[doc = \"Reader of register IEN\"]\n\npub type R = crate::R<u32, super::IEN>;\n\n#[doc = \"Writer for register IEN\"]\n\npub type W = crate::W<u32, super::IEN>;\n\n#[doc = \"Register IEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `OF`\"]\n\npub type OF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/rtc/ien.rs", "rank": 24, "score": 76.43484149707585 }, { "content": "#[doc = \"Reader of register IEN\"]\n\npub type R = crate::R<u32, super::IEN>;\n\n#[doc = \"Writer for register IEN\"]\n\npub type W = crate::W<u32, super::IEN>;\n\n#[doc = \"Register IEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `OF`\"]\n\npub type OF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/timer5/ien.rs", "rank": 25, "score": 76.43484149707584 }, { "content": "#[doc = \"Reader of register IEN\"]\n\npub type R = crate::R<u32, super::IEN>;\n\n#[doc = \"Writer for register IEN\"]\n\npub type W = crate::W<u32, super::IEN>;\n\n#[doc = \"Register IEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `OF`\"]\n\npub type OF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/timer0/ien.rs", "rank": 26, "score": 76.43484149707584 }, { "content": "#[doc = \"Reader of register INITWAITVAL\"]\n\npub type R = crate::R<u32, super::INITWAITVAL>;\n\n#[doc = \"Writer for register INITWAITVAL\"]\n\npub type W = crate::W<u32, super::INITWAITVAL>;\n\n#[doc = \"Register INITWAITVAL `reset()`'s with value 0xff\"]\n\nimpl crate::ResetValue for super::INITWAITVAL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0xff\n\n }\n\n}\n\n#[doc = \"Reader of field `VALUE`\"]\n\npub type VALUE_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `VALUE`\"]\n\npub struct VALUE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VALUE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/trng0/initwaitval.rs", "rank": 27, "score": 75.90953850676314 }, { "content": "#[doc = \"Reader of register FRAMESRXED128\"]\n\npub type R = crate::R<u32, super::FRAMESRXED128>;\n\n#[doc = \"Writer for register FRAMESRXED128\"]\n\npub type W = crate::W<u32, super::FRAMESRXED128>;\n\n#[doc = \"Register FRAMESRXED128 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FRAMESRXED128 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/framesrxed128.rs", "rank": 28, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register FRAMESRXEDOK\"]\n\npub type R = crate::R<u32, super::FRAMESRXEDOK>;\n\n#[doc = \"Writer for register FRAMESRXEDOK\"]\n\npub type W = crate::W<u32, super::FRAMESRXEDOK>;\n\n#[doc = \"Register FRAMESRXEDOK `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FRAMESRXEDOK {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/framesrxedok.rs", "rank": 29, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register TSUSECCMP\"]\n\npub type R = crate::R<u32, super::TSUSECCMP>;\n\n#[doc = \"Writer for register TSUSECCMP\"]\n\npub type W = crate::W<u32, super::TSUSECCMP>;\n\n#[doc = \"Register TSUSECCMP `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TSUSECCMP {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMPVAL`\"]\n\npub type COMPVAL_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COMPVAL`\"]\n\npub struct COMPVAL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMPVAL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/tsuseccmp.rs", "rank": 30, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register DATA3\"]\n\npub type R = crate::R<u32, super::DATA3>;\n\n#[doc = \"Writer for register DATA3\"]\n\npub type W = crate::W<u32, super::DATA3>;\n\n#[doc = \"Register DATA3 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DATA3 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA3`\"]\n\npub type DATA3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `DATA3`\"]\n\npub struct DATA3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/data3.rs", "rank": 31, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register BROADCASTTXED\"]\n\npub type R = crate::R<u32, super::BROADCASTTXED>;\n\n#[doc = \"Writer for register BROADCASTTXED\"]\n\npub type W = crate::W<u32, super::BROADCASTTXED>;\n\n#[doc = \"Register BROADCASTTXED `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::BROADCASTTXED {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/broadcasttxed.rs", "rank": 32, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register QDATA1\"]\n\npub type R = crate::R<u32, super::QDATA1>;\n\n#[doc = \"Writer for register QDATA1\"]\n\npub type W = crate::W<u32, super::QDATA1>;\n\n#[doc = \"Register QDATA1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::QDATA1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `QDATA1`\"]\n\npub type QDATA1_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `QDATA1`\"]\n\npub struct QDATA1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> QDATA1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/qdata1.rs", "rank": 33, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register MULTICASTTXED\"]\n\npub type R = crate::R<u32, super::MULTICASTTXED>;\n\n#[doc = \"Writer for register MULTICASTTXED\"]\n\npub type W = crate::W<u32, super::MULTICASTTXED>;\n\n#[doc = \"Register MULTICASTTXED `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::MULTICASTTXED {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/multicasttxed.rs", "rank": 34, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register SPECADDR3BOTTOM\"]\n\npub type R = crate::R<u32, super::SPECADDR3BOTTOM>;\n\n#[doc = \"Writer for register SPECADDR3BOTTOM\"]\n\npub type W = crate::W<u32, super::SPECADDR3BOTTOM>;\n\n#[doc = \"Register SPECADDR3BOTTOM `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SPECADDR3BOTTOM {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADDR`\"]\n\npub type ADDR_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `ADDR`\"]\n\npub struct ADDR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADDR_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/specaddr3bottom.rs", "rank": 35, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register TSUTIMERADJUST\"]\n\npub type R = crate::R<u32, super::TSUTIMERADJUST>;\n\n#[doc = \"Writer for register TSUTIMERADJUST\"]\n\npub type W = crate::W<u32, super::TSUTIMERADJUST>;\n\n#[doc = \"Register TSUTIMERADJUST `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TSUTIMERADJUST {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `INCREMENTVAL`\"]\n\npub type INCREMENTVAL_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `INCREMENTVAL`\"]\n\npub struct INCREMENTVAL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> INCREMENTVAL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/tsutimeradjust.rs", "rank": 36, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register CALCNT\"]\n\npub type R = crate::R<u32, super::CALCNT>;\n\n#[doc = \"Writer for register CALCNT\"]\n\npub type W = crate::W<u32, super::CALCNT>;\n\n#[doc = \"Register CALCNT `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CALCNT {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `CALCNT`\"]\n\npub type CALCNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `CALCNT`\"]\n\npub struct CALCNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CALCNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/cmu/calcnt.rs", "rank": 37, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register HASHBOTTOM\"]\n\npub type R = crate::R<u32, super::HASHBOTTOM>;\n\n#[doc = \"Writer for register HASHBOTTOM\"]\n\npub type W = crate::W<u32, super::HASHBOTTOM>;\n\n#[doc = \"Register HASHBOTTOM `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::HASHBOTTOM {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADDR`\"]\n\npub type ADDR_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `ADDR`\"]\n\npub struct ADDR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADDR_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/hashbottom.rs", "rank": 38, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register TFTPIXEL0\"]\n\npub type R = crate::R<u32, super::TFTPIXEL0>;\n\n#[doc = \"Writer for register TFTPIXEL0\"]\n\npub type W = crate::W<u32, super::TFTPIXEL0>;\n\n#[doc = \"Register TFTPIXEL0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TFTPIXEL0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA`\"]\n\npub type DATA_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `DATA`\"]\n\npub struct DATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/ebi/tftpixel0.rs", "rank": 39, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register DATA2\"]\n\npub type R = crate::R<u32, super::DATA2>;\n\n#[doc = \"Writer for register DATA2\"]\n\npub type W = crate::W<u32, super::DATA2>;\n\n#[doc = \"Register DATA2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DATA2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA2`\"]\n\npub type DATA2_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `DATA2`\"]\n\npub struct DATA2_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA2_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/data2.rs", "rank": 40, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register SPECADDR2BOTTOM\"]\n\npub type R = crate::R<u32, super::SPECADDR2BOTTOM>;\n\n#[doc = \"Writer for register SPECADDR2BOTTOM\"]\n\npub type W = crate::W<u32, super::SPECADDR2BOTTOM>;\n\n#[doc = \"Register SPECADDR2BOTTOM `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SPECADDR2BOTTOM {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADDR`\"]\n\npub type ADDR_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `ADDR`\"]\n\npub struct ADDR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADDR_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/specaddr2bottom.rs", "rank": 41, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register FRAMESRXED65\"]\n\npub type R = crate::R<u32, super::FRAMESRXED65>;\n\n#[doc = \"Writer for register FRAMESRXED65\"]\n\npub type W = crate::W<u32, super::FRAMESRXED65>;\n\n#[doc = \"Register FRAMESRXED65 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FRAMESRXED65 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/framesrxed65.rs", "rank": 42, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register FRAMESTXED512\"]\n\npub type R = crate::R<u32, super::FRAMESTXED512>;\n\n#[doc = \"Writer for register FRAMESTXED512\"]\n\npub type W = crate::W<u32, super::FRAMESTXED512>;\n\n#[doc = \"Register FRAMESTXED512 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FRAMESTXED512 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/framestxed512.rs", "rank": 43, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register DATA1\"]\n\npub type R = crate::R<u32, super::DATA1>;\n\n#[doc = \"Writer for register DATA1\"]\n\npub type W = crate::W<u32, super::DATA1>;\n\n#[doc = \"Register DATA1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DATA1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA1`\"]\n\npub type DATA1_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `DATA1`\"]\n\npub struct DATA1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/data1.rs", "rank": 44, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register DDATA0BIG\"]\n\npub type R = crate::R<u32, super::DDATA0BIG>;\n\n#[doc = \"Writer for register DDATA0BIG\"]\n\npub type W = crate::W<u32, super::DDATA0BIG>;\n\n#[doc = \"Register DDATA0BIG `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DDATA0BIG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DDATA0BIG`\"]\n\npub type DDATA0BIG_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `DDATA0BIG`\"]\n\npub struct DDATA0BIG_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DDATA0BIG_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/ddata0big.rs", "rank": 45, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register FRAMESRXED512\"]\n\npub type R = crate::R<u32, super::FRAMESRXED512>;\n\n#[doc = \"Writer for register FRAMESRXED512\"]\n\npub type W = crate::W<u32, super::FRAMESRXED512>;\n\n#[doc = \"Register FRAMESRXED512 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FRAMESRXED512 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/framesrxed512.rs", "rank": 46, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register INDIRECTREADXFERWATERMARK\"]\n\npub type R = crate::R<u32, super::INDIRECTREADXFERWATERMARK>;\n\n#[doc = \"Writer for register INDIRECTREADXFERWATERMARK\"]\n\npub type W = crate::W<u32, super::INDIRECTREADXFERWATERMARK>;\n\n#[doc = \"Register INDIRECTREADXFERWATERMARK `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INDIRECTREADXFERWATERMARK {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `LEVEL`\"]\n\npub type LEVEL_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `LEVEL`\"]\n\npub struct LEVEL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LEVEL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/qspi0/indirectreadxferwatermark.rs", "rank": 47, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register BOOTTOCTRL\"]\n\npub type R = crate::R<u32, super::BOOTTOCTRL>;\n\n#[doc = \"Writer for register BOOTTOCTRL\"]\n\npub type W = crate::W<u32, super::BOOTTOCTRL>;\n\n#[doc = \"Register BOOTTOCTRL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::BOOTTOCTRL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `BOOTDATTOCNT`\"]\n\npub type BOOTDATTOCNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `BOOTDATTOCNT`\"]\n\npub struct BOOTDATTOCNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BOOTDATTOCNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/sdio/boottoctrl.rs", "rank": 48, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register FRAMESRXED64\"]\n\npub type R = crate::R<u32, super::FRAMESRXED64>;\n\n#[doc = \"Writer for register FRAMESRXED64\"]\n\npub type W = crate::W<u32, super::FRAMESRXED64>;\n\n#[doc = \"Register FRAMESRXED64 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FRAMESRXED64 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/framesrxed64.rs", "rank": 49, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register CNT\"]\n\npub type R = crate::R<u32, super::CNT>;\n\n#[doc = \"Writer for register CNT\"]\n\npub type W = crate::W<u32, super::CNT>;\n\n#[doc = \"Register CNT `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CNT {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `CNT`\"]\n\npub type CNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `CNT`\"]\n\npub struct CNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/timer6/cnt.rs", "rank": 50, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register RXRESOURCEERRS\"]\n\npub type R = crate::R<u32, super::RXRESOURCEERRS>;\n\n#[doc = \"Writer for register RXRESOURCEERRS\"]\n\npub type W = crate::W<u32, super::RXRESOURCEERRS>;\n\n#[doc = \"Register RXRESOURCEERRS `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RXRESOURCEERRS {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/rxresourceerrs.rs", "rank": 51, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register FRAMESTXED64\"]\n\npub type R = crate::R<u32, super::FRAMESTXED64>;\n\n#[doc = \"Writer for register FRAMESTXED64\"]\n\npub type W = crate::W<u32, super::FRAMESTXED64>;\n\n#[doc = \"Register FRAMESTXED64 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FRAMESTXED64 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/framestxed64.rs", "rank": 52, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register OCTETSTXEDBOTTOM\"]\n\npub type R = crate::R<u32, super::OCTETSTXEDBOTTOM>;\n\n#[doc = \"Writer for register OCTETSTXEDBOTTOM\"]\n\npub type W = crate::W<u32, super::OCTETSTXEDBOTTOM>;\n\n#[doc = \"Register OCTETSTXEDBOTTOM `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::OCTETSTXEDBOTTOM {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/octetstxedbottom.rs", "rank": 53, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register SEGD0L\"]\n\npub type R = crate::R<u32, super::SEGD0L>;\n\n#[doc = \"Writer for register SEGD0L\"]\n\npub type W = crate::W<u32, super::SEGD0L>;\n\n#[doc = \"Register SEGD0L `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEGD0L {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `SEGD0L`\"]\n\npub type SEGD0L_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `SEGD0L`\"]\n\npub struct SEGD0L_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SEGD0L_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lcd/segd0l.rs", "rank": 54, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register TFTDD\"]\n\npub type R = crate::R<u32, super::TFTDD>;\n\n#[doc = \"Writer for register TFTDD\"]\n\npub type W = crate::W<u32, super::TFTDD>;\n\n#[doc = \"Register TFTDD `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TFTDD {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA`\"]\n\npub type DATA_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `DATA`\"]\n\npub struct DATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/ebi/tftdd.rs", "rank": 55, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register TXLPITIME\"]\n\npub type R = crate::R<u32, super::TXLPITIME>;\n\n#[doc = \"Writer for register TXLPITIME\"]\n\npub type W = crate::W<u32, super::TXLPITIME>;\n\n#[doc = \"Register TXLPITIME `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TXLPITIME {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `LPITIME`\"]\n\npub type LPITIME_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `LPITIME`\"]\n\npub struct LPITIME_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LPITIME_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/txlpitime.rs", "rank": 56, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register FRAMESTXED1024\"]\n\npub type R = crate::R<u32, super::FRAMESTXED1024>;\n\n#[doc = \"Writer for register FRAMESTXED1024\"]\n\npub type W = crate::W<u32, super::FRAMESTXED1024>;\n\n#[doc = \"Register FRAMESTXED1024 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FRAMESTXED1024 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/framestxed1024.rs", "rank": 57, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register TXQPTR\"]\n\npub type R = crate::R<u32, super::TXQPTR>;\n\n#[doc = \"Writer for register TXQPTR\"]\n\npub type W = crate::W<u32, super::TXQPTR>;\n\n#[doc = \"Register TXQPTR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TXQPTR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DMATXQPTR`\"]\n\npub type DMATXQPTR_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `DMATXQPTR`\"]\n\npub struct DMATXQPTR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DMATXQPTR_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/txqptr.rs", "rank": 58, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register DEFERREDFRAMES\"]\n\npub type R = crate::R<u32, super::DEFERREDFRAMES>;\n\n#[doc = \"Writer for register DEFERREDFRAMES\"]\n\npub type W = crate::W<u32, super::DEFERREDFRAMES>;\n\n#[doc = \"Register DEFERREDFRAMES `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DEFERREDFRAMES {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/deferredframes.rs", "rank": 59, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register HASHTOP\"]\n\npub type R = crate::R<u32, super::HASHTOP>;\n\n#[doc = \"Writer for register HASHTOP\"]\n\npub type W = crate::W<u32, super::HASHTOP>;\n\n#[doc = \"Register HASHTOP `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::HASHTOP {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADDR`\"]\n\npub type ADDR_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `ADDR`\"]\n\npub struct ADDR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADDR_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/hashtop.rs", "rank": 60, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register DATA0\"]\n\npub type R = crate::R<u32, super::DATA0>;\n\n#[doc = \"Writer for register DATA0\"]\n\npub type W = crate::W<u32, super::DATA0>;\n\n#[doc = \"Register DATA0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DATA0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA0`\"]\n\npub type DATA0_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `DATA0`\"]\n\npub struct DATA0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/data0.rs", "rank": 61, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register FRAMESRXED1519\"]\n\npub type R = crate::R<u32, super::FRAMESRXED1519>;\n\n#[doc = \"Writer for register FRAMESRXED1519\"]\n\npub type W = crate::W<u32, super::FRAMESRXED1519>;\n\n#[doc = \"Register FRAMESRXED1519 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FRAMESRXED1519 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/framesrxed1519.rs", "rank": 62, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register INDIRECTREADXFERSTART\"]\n\npub type R = crate::R<u32, super::INDIRECTREADXFERSTART>;\n\n#[doc = \"Writer for register INDIRECTREADXFERSTART\"]\n\npub type W = crate::W<u32, super::INDIRECTREADXFERSTART>;\n\n#[doc = \"Register INDIRECTREADXFERSTART `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INDIRECTREADXFERSTART {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADDR`\"]\n\npub type ADDR_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `ADDR`\"]\n\npub struct ADDR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADDR_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/qspi0/indirectreadxferstart.rs", "rank": 63, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register SCANMASK1\"]\n\npub type R = crate::R<u32, super::SCANMASK1>;\n\n#[doc = \"Writer for register SCANMASK1\"]\n\npub type W = crate::W<u32, super::SCANMASK1>;\n\n#[doc = \"Register SCANMASK1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SCANMASK1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `SCANINPUTEN`\"]\n\npub type SCANINPUTEN_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `SCANINPUTEN`\"]\n\npub struct SCANINPUTEN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SCANINPUTEN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/csen/scanmask1.rs", "rank": 64, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register TFTPIXEL1\"]\n\npub type R = crate::R<u32, super::TFTPIXEL1>;\n\n#[doc = \"Writer for register TFTPIXEL1\"]\n\npub type W = crate::W<u32, super::TFTPIXEL1>;\n\n#[doc = \"Register TFTPIXEL1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TFTPIXEL1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA`\"]\n\npub type DATA_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `DATA`\"]\n\npub struct DATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/ebi/tftpixel1.rs", "rank": 65, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register INDIRECTWRITEXFERSTART\"]\n\npub type R = crate::R<u32, super::INDIRECTWRITEXFERSTART>;\n\n#[doc = \"Writer for register INDIRECTWRITEXFERSTART\"]\n\npub type W = crate::W<u32, super::INDIRECTWRITEXFERSTART>;\n\n#[doc = \"Register INDIRECTWRITEXFERSTART `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::INDIRECTWRITEXFERSTART {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADDR`\"]\n\npub type ADDR_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `ADDR`\"]\n\npub struct ADDR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADDR_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/qspi0/indirectwritexferstart.rs", "rank": 66, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register TSUTIMERNSEC\"]\n\npub type R = crate::R<u32, super::TSUTIMERNSEC>;\n\n#[doc = \"Writer for register TSUTIMERNSEC\"]\n\npub type W = crate::W<u32, super::TSUTIMERNSEC>;\n\n#[doc = \"Register TSUTIMERNSEC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TSUTIMERNSEC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TIMER`\"]\n\npub type TIMER_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `TIMER`\"]\n\npub struct TIMER_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TIMER_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/tsutimernsec.rs", "rank": 67, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register KEY\"]\n\npub type R = crate::R<u32, super::KEY>;\n\n#[doc = \"Writer for register KEY\"]\n\npub type W = crate::W<u32, super::KEY>;\n\n#[doc = \"Register KEY `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::KEY {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `KEY`\"]\n\npub type KEY_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `KEY`\"]\n\npub struct KEY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> KEY_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/key.rs", "rank": 68, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register SEGD7L\"]\n\npub type R = crate::R<u32, super::SEGD7L>;\n\n#[doc = \"Writer for register SEGD7L\"]\n\npub type W = crate::W<u32, super::SEGD7L>;\n\n#[doc = \"Register SEGD7L `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SEGD7L {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `SEGD7L`\"]\n\npub type SEGD7L_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `SEGD7L`\"]\n\npub struct SEGD7L_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SEGD7L_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/lcd/segd7l.rs", "rank": 69, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register SCANMASK\"]\n\npub type R = crate::R<u32, super::SCANMASK>;\n\n#[doc = \"Writer for register SCANMASK\"]\n\npub type W = crate::W<u32, super::SCANMASK>;\n\n#[doc = \"Register SCANMASK `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SCANMASK {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `SCANINPUTEN`\"]\n\npub type SCANINPUTEN_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `SCANINPUTEN`\"]\n\npub struct SCANINPUTEN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SCANINPUTEN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/adc1/scanmask.rs", "rank": 70, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register DATA\"]\n\npub type R = crate::R<u32, super::DATA>;\n\n#[doc = \"Writer for register DATA\"]\n\npub type W = crate::W<u32, super::DATA>;\n\n#[doc = \"Register DATA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DATA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA`\"]\n\npub type DATA_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `DATA`\"]\n\npub struct DATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/csen/data.rs", "rank": 71, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register DDATA3\"]\n\npub type R = crate::R<u32, super::DDATA3>;\n\n#[doc = \"Writer for register DDATA3\"]\n\npub type W = crate::W<u32, super::DDATA3>;\n\n#[doc = \"Register DDATA3 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DDATA3 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DDATA3`\"]\n\npub type DDATA3_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `DDATA3`\"]\n\npub struct DDATA3_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DDATA3_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/ddata3.rs", "rank": 72, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register WDATA\"]\n\npub type R = crate::R<u32, super::WDATA>;\n\n#[doc = \"Writer for register WDATA\"]\n\npub type W = crate::W<u32, super::WDATA>;\n\n#[doc = \"Register WDATA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::WDATA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `WDATA`\"]\n\npub type WDATA_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `WDATA`\"]\n\npub struct WDATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> WDATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/msc/wdata.rs", "rank": 73, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register DATA0XOR\"]\n\npub type R = crate::R<u32, super::DATA0XOR>;\n\n#[doc = \"Writer for register DATA0XOR\"]\n\npub type W = crate::W<u32, super::DATA0XOR>;\n\n#[doc = \"Register DATA0XOR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DATA0XOR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA0XOR`\"]\n\npub type DATA0XOR_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `DATA0XOR`\"]\n\npub struct DATA0XOR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA0XOR_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/data0xor.rs", "rank": 74, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register SPECADDR4BOTTOM\"]\n\npub type R = crate::R<u32, super::SPECADDR4BOTTOM>;\n\n#[doc = \"Writer for register SPECADDR4BOTTOM\"]\n\npub type W = crate::W<u32, super::SPECADDR4BOTTOM>;\n\n#[doc = \"Register SPECADDR4BOTTOM `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SPECADDR4BOTTOM {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADDR`\"]\n\npub type ADDR_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `ADDR`\"]\n\npub struct ADDR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADDR_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/specaddr4bottom.rs", "rank": 75, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register FRAMESRXED256\"]\n\npub type R = crate::R<u32, super::FRAMESRXED256>;\n\n#[doc = \"Writer for register FRAMESRXED256\"]\n\npub type W = crate::W<u32, super::FRAMESRXED256>;\n\n#[doc = \"Register FRAMESRXED256 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FRAMESRXED256 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/framesrxed256.rs", "rank": 76, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register FRAMESTXED65\"]\n\npub type R = crate::R<u32, super::FRAMESTXED65>;\n\n#[doc = \"Writer for register FRAMESTXED65\"]\n\npub type W = crate::W<u32, super::FRAMESTXED65>;\n\n#[doc = \"Register FRAMESTXED65 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FRAMESTXED65 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/framestxed65.rs", "rank": 77, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register FRAMESRXED1024\"]\n\npub type R = crate::R<u32, super::FRAMESRXED1024>;\n\n#[doc = \"Writer for register FRAMESRXED1024\"]\n\npub type W = crate::W<u32, super::FRAMESRXED1024>;\n\n#[doc = \"Register FRAMESRXED1024 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FRAMESRXED1024 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/framesrxed1024.rs", "rank": 78, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register FRAMESTXED256\"]\n\npub type R = crate::R<u32, super::FRAMESTXED256>;\n\n#[doc = \"Writer for register FRAMESTXED256\"]\n\npub type W = crate::W<u32, super::FRAMESTXED256>;\n\n#[doc = \"Register FRAMESTXED256 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FRAMESTXED256 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/framestxed256.rs", "rank": 79, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register QDATA1BIG\"]\n\npub type R = crate::R<u32, super::QDATA1BIG>;\n\n#[doc = \"Writer for register QDATA1BIG\"]\n\npub type W = crate::W<u32, super::QDATA1BIG>;\n\n#[doc = \"Register QDATA1BIG `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::QDATA1BIG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `QDATA1BIG`\"]\n\npub type QDATA1BIG_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `QDATA1BIG`\"]\n\npub struct QDATA1BIG_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> QDATA1BIG_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/qdata1big.rs", "rank": 80, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register CNT\"]\n\npub type R = crate::R<u32, super::CNT>;\n\n#[doc = \"Writer for register CNT\"]\n\npub type W = crate::W<u32, super::CNT>;\n\n#[doc = \"Register CNT `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CNT {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `CNT`\"]\n\npub type CNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `CNT`\"]\n\npub struct CNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/timer4/cnt.rs", "rank": 81, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register QDATA0\"]\n\npub type R = crate::R<u32, super::QDATA0>;\n\n#[doc = \"Writer for register QDATA0\"]\n\npub type W = crate::W<u32, super::QDATA0>;\n\n#[doc = \"Register QDATA0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::QDATA0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `QDATA0`\"]\n\npub type QDATA0_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `QDATA0`\"]\n\npub struct QDATA0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> QDATA0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/qdata0.rs", "rank": 82, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register FRAMESTXED128\"]\n\npub type R = crate::R<u32, super::FRAMESTXED128>;\n\n#[doc = \"Writer for register FRAMESTXED128\"]\n\npub type W = crate::W<u32, super::FRAMESTXED128>;\n\n#[doc = \"Register FRAMESTXED128 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FRAMESTXED128 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/framestxed128.rs", "rank": 83, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register SCANMASK\"]\n\npub type R = crate::R<u32, super::SCANMASK>;\n\n#[doc = \"Writer for register SCANMASK\"]\n\npub type W = crate::W<u32, super::SCANMASK>;\n\n#[doc = \"Register SCANMASK `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SCANMASK {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `SCANINPUTEN`\"]\n\npub type SCANINPUTEN_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `SCANINPUTEN`\"]\n\npub struct SCANINPUTEN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SCANINPUTEN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/adc0/scanmask.rs", "rank": 84, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register OCTETSRXEDBOTTOM\"]\n\npub type R = crate::R<u32, super::OCTETSRXEDBOTTOM>;\n\n#[doc = \"Writer for register OCTETSRXEDBOTTOM\"]\n\npub type W = crate::W<u32, super::OCTETSRXEDBOTTOM>;\n\n#[doc = \"Register OCTETSRXEDBOTTOM `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::OCTETSRXEDBOTTOM {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/octetsrxedbottom.rs", "rank": 85, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register SINGLECOLS\"]\n\npub type R = crate::R<u32, super::SINGLECOLS>;\n\n#[doc = \"Writer for register SINGLECOLS\"]\n\npub type W = crate::W<u32, super::SINGLECOLS>;\n\n#[doc = \"Register SINGLECOLS `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SINGLECOLS {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT`\"]\n\npub type COUNT_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COUNT`\"]\n\npub struct COUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/singlecols.rs", "rank": 86, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register ADDRB\"]\n\npub type R = crate::R<u32, super::ADDRB>;\n\n#[doc = \"Writer for register ADDRB\"]\n\npub type W = crate::W<u32, super::ADDRB>;\n\n#[doc = \"Register ADDRB `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ADDRB {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADDRB`\"]\n\npub type ADDRB_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `ADDRB`\"]\n\npub struct ADDRB_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADDRB_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/msc/addrb.rs", "rank": 87, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register EMA\"]\n\npub type R = crate::R<u32, super::EMA>;\n\n#[doc = \"Writer for register EMA\"]\n\npub type W = crate::W<u32, super::EMA>;\n\n#[doc = \"Register EMA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::EMA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `EMA`\"]\n\npub type EMA_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `EMA`\"]\n\npub struct EMA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> EMA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/csen/ema.rs", "rank": 88, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register KEYBUF\"]\n\npub type R = crate::R<u32, super::KEYBUF>;\n\n#[doc = \"Writer for register KEYBUF\"]\n\npub type W = crate::W<u32, super::KEYBUF>;\n\n#[doc = \"Register KEYBUF `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::KEYBUF {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `KEYBUF`\"]\n\npub type KEYBUF_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `KEYBUF`\"]\n\npub struct KEYBUF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> KEYBUF_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/keybuf.rs", "rank": 89, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register DDATA2\"]\n\npub type R = crate::R<u32, super::DDATA2>;\n\n#[doc = \"Writer for register DDATA2\"]\n\npub type W = crate::W<u32, super::DDATA2>;\n\n#[doc = \"Register DDATA2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DDATA2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DDATA2`\"]\n\npub type DDATA2_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `DDATA2`\"]\n\npub struct DDATA2_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DDATA2_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/ddata2.rs", "rank": 90, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register SCANMASK0\"]\n\npub type R = crate::R<u32, super::SCANMASK0>;\n\n#[doc = \"Writer for register SCANMASK0\"]\n\npub type W = crate::W<u32, super::SCANMASK0>;\n\n#[doc = \"Register SCANMASK0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SCANMASK0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `SCANINPUTEN`\"]\n\npub type SCANINPUTEN_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `SCANINPUTEN`\"]\n\npub struct SCANINPUTEN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SCANINPUTEN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/csen/scanmask0.rs", "rank": 91, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register CHEN\"]\n\npub type R = crate::R<u32, super::CHEN>;\n\n#[doc = \"Writer for register CHEN\"]\n\npub type W = crate::W<u32, super::CHEN>;\n\n#[doc = \"Register CHEN `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CHEN {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `CHEN`\"]\n\npub type CHEN_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `CHEN`\"]\n\npub struct CHEN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CHEN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/ldma/chen.rs", "rank": 92, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register SDMASYSADDR\"]\n\npub type R = crate::R<u32, super::SDMASYSADDR>;\n\n#[doc = \"Writer for register SDMASYSADDR\"]\n\npub type W = crate::W<u32, super::SDMASYSADDR>;\n\n#[doc = \"Register SDMASYSADDR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SDMASYSADDR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `SDMASYSADDRARG`\"]\n\npub type SDMASYSADDRARG_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `SDMASYSADDRARG`\"]\n\npub struct SDMASYSADDRARG_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SDMASYSADDRARG_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/sdio/sdmasysaddr.rs", "rank": 93, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register TOPB\"]\n\npub type R = crate::R<u32, super::TOPB>;\n\n#[doc = \"Writer for register TOPB\"]\n\npub type W = crate::W<u32, super::TOPB>;\n\n#[doc = \"Register TOPB `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TOPB {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TOPB`\"]\n\npub type TOPB_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `TOPB`\"]\n\npub struct TOPB_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TOPB_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/timer6/topb.rs", "rank": 94, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register TFTMASK\"]\n\npub type R = crate::R<u32, super::TFTMASK>;\n\n#[doc = \"Writer for register TFTMASK\"]\n\npub type W = crate::W<u32, super::TFTMASK>;\n\n#[doc = \"Register TFTMASK `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TFTMASK {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TFTMASK`\"]\n\npub type TFTMASK_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `TFTMASK`\"]\n\npub struct TFTMASK_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TFTMASK_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/ebi/tftmask.rs", "rank": 95, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register DDATA1\"]\n\npub type R = crate::R<u32, super::DDATA1>;\n\n#[doc = \"Writer for register DDATA1\"]\n\npub type W = crate::W<u32, super::DDATA1>;\n\n#[doc = \"Register DDATA1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DDATA1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DDATA1`\"]\n\npub type DDATA1_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `DDATA1`\"]\n\npub struct DDATA1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DDATA1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/ddata1.rs", "rank": 96, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register TSUNSECCMP\"]\n\npub type R = crate::R<u32, super::TSUNSECCMP>;\n\n#[doc = \"Writer for register TSUNSECCMP\"]\n\npub type W = crate::W<u32, super::TSUNSECCMP>;\n\n#[doc = \"Register TSUNSECCMP `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TSUNSECCMP {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMPVAL`\"]\n\npub type COMPVAL_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `COMPVAL`\"]\n\npub struct COMPVAL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMPVAL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/eth/tsunseccmp.rs", "rank": 97, "score": 75.85468848111563 }, { "content": "#[doc = \"Reader of register DDATA4\"]\n\npub type R = crate::R<u32, super::DDATA4>;\n\n#[doc = \"Writer for register DDATA4\"]\n\npub type W = crate::W<u32, super::DDATA4>;\n\n#[doc = \"Register DDATA4 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DDATA4 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DDATA4`\"]\n\npub type DDATA4_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `DDATA4`\"]\n\npub struct DDATA4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DDATA4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/crypto0/ddata4.rs", "rank": 98, "score": 75.85468848111562 }, { "content": "#[doc = \"Reader of register TFTFRAMEBASE\"]\n\npub type R = crate::R<u32, super::TFTFRAMEBASE>;\n\n#[doc = \"Writer for register TFTFRAMEBASE\"]\n\npub type W = crate::W<u32, super::TFTFRAMEBASE>;\n\n#[doc = \"Register TFTFRAMEBASE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::TFTFRAMEBASE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `FRAMEBASE`\"]\n\npub type FRAMEBASE_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `FRAMEBASE`\"]\n\npub struct FRAMEBASE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> FRAMEBASE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/ebi/tftframebase.rs", "rank": 99, "score": 75.85468848111562 } ]
Rust
crates/core/plugin_sm/tests/plugin_manager.rs
PradeepKiruvale/localworkflow
b5f3c97c835cb36ae87f14b8697bedcca5d22619
#[cfg(test)] mod tests { use plugin_sm::plugin_manager::{ExternalPlugins, Plugins}; use std::{fs::File, path::PathBuf, str::FromStr}; use tempfile::NamedTempFile; #[test] fn plugin_manager_load_plugins_empty() { let temp_dir = tempfile::tempdir().unwrap(); let plugin_dir = temp_dir.path().to_owned(); let mut plugins = ExternalPlugins::open(plugin_dir, None, None).unwrap(); let _ = plugins.load(); assert!(plugins.empty()); } #[test] fn plugin_manager_load_plugins_some_non_executables() { let temp_dir = tempfile::tempdir().unwrap(); let _file = create_some_plugin_in(&temp_dir); let plugin_dir = temp_dir.path().to_owned(); let mut plugins = ExternalPlugins::open(plugin_dir, None, None).unwrap(); let _ = plugins.load(); assert!(plugins.empty()); } #[test] fn plugin_manager_load_plugins_some_by_plugins_none() { let temp_dir = tempfile::tempdir().unwrap(); let _file = create_some_plugin_in(&temp_dir); let _file = create_some_plugin_in(&temp_dir); let plugin_dir = temp_dir.path().to_owned(); let mut plugins = ExternalPlugins::open(plugin_dir, None, None).unwrap(); let _ = plugins.load(); assert!(plugins.by_software_type("test").is_none()); assert!(plugins.by_file_extension("test").is_none()); assert!(plugins.default().is_none()); } #[test] fn plugin_manager_load_plugins_some_by_plugins_some() { let temp_dir = tempfile::tempdir().unwrap(); let plugin1 = create_some_plugin_in(&temp_dir); let plugin2 = create_some_plugin_in(&temp_dir); let plugin_name1 = plugin1 .path() .file_name() .unwrap() .to_str() .unwrap() .to_owned(); let plugin_name2 = plugin2 .path() .file_name() .unwrap() .to_str() .unwrap() .to_owned(); let _res = std::fs::copy(get_dummy_plugin_path(), plugin1.path()); let _res = std::fs::copy(get_dummy_plugin_path(), plugin2.path()); let (_, _path) = plugin1.keep().unwrap(); let (_, _path) = plugin2.keep().unwrap(); let plugin_dir = temp_dir.path().to_owned(); dbg!(&plugin_dir); let mut plugins = ExternalPlugins::open(plugin_dir, None, None).unwrap(); let _ = plugins.load(); assert!(plugins.by_software_type(&plugin_name1).is_some()); assert!(plugins.by_software_type(&plugin_name2).is_some()); assert!(plugins.by_file_extension(&plugin_name1).is_none()); assert!(plugins.default().is_none()); } #[test] fn explicit_default_plugin() { let plugin_dir = tempfile::tempdir().unwrap(); let plugin1 = create_some_plugin_in(&plugin_dir); let _res = std::fs::copy(get_dummy_plugin_path(), plugin1.path()); let (_, _path) = plugin1.keep().unwrap(); let plugin2 = create_some_plugin_in(&plugin_dir); let _res = std::fs::copy(get_dummy_plugin_path(), plugin2.path()); let plugin_name2 = plugin2 .path() .file_name() .unwrap() .to_str() .unwrap() .to_owned(); let (_, _path) = plugin2.keep().unwrap(); let plugin3 = create_some_plugin_in(&plugin_dir); let _res = std::fs::copy(get_dummy_plugin_path(), plugin3.path()); let (_, _path) = plugin3.keep().unwrap(); let mut plugins = ExternalPlugins::open(plugin_dir.into_path(), Some(plugin_name2.clone()), None) .unwrap(); plugins.load().unwrap(); assert_eq!( plugins.by_software_type("default").unwrap().name, plugin_name2 ); assert_eq!(plugins.default().unwrap().name, plugin_name2); } #[test] fn implicit_default_plugin_with_only_one_plugin() { let plugin_dir = tempfile::tempdir().unwrap(); let plugin = create_some_plugin_in(&plugin_dir); let _res = std::fs::copy(get_dummy_plugin_path(), plugin.path()); let plugin_name = plugin .path() .file_name() .unwrap() .to_str() .unwrap() .to_owned(); let (_, _path) = plugin.keep().unwrap(); let mut plugins = ExternalPlugins::open(plugin_dir.into_path(), None, None).unwrap(); plugins.load().unwrap(); assert_eq!( plugins.by_software_type("default").unwrap().name, plugin_name ); assert_eq!(plugins.default().unwrap().name, plugin_name); } #[test] fn invalid_default_plugin_pass_through() -> anyhow::Result<()> { let plugin_dir = tempfile::tempdir().unwrap(); let plugin_file_path = plugin_dir.path().join("apt"); let _ = File::create(plugin_file_path).unwrap(); let result = ExternalPlugins::open(plugin_dir.into_path(), Some("dummy".into()), None)?; assert!(result.empty()); assert!(result.default().is_none()); Ok(()) } fn create_some_plugin_in(dir: &tempfile::TempDir) -> NamedTempFile { tempfile::Builder::new() .suffix(".0") .tempfile_in(dir) .unwrap() } fn get_dummy_plugin_path() -> PathBuf { let package_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); let dummy_plugin_path = PathBuf::from_str(package_dir.as_str()) .unwrap() .parent() .unwrap() .parent() .unwrap() .parent() .unwrap() .join("target/debug/tedge_dummy_plugin"); dummy_plugin_path } }
#[cfg(test)] mod tests { use plugin_sm::plugin_manager::{ExternalPlugins, Plugins}; use std::{fs::File, path::PathBuf, str::FromStr}; use tempfile::NamedTempFile; #[test] fn plugin_manager_load_plugins_empty() { let temp_dir = tempfile::tempdir().unwrap(); let plugin_dir = temp_dir.path().to_owned(); let mut plugins = ExternalPlugins::open(plugin_dir, None, None).unwrap(); let _ = plugins.load(); assert!(plugins.empty()); } #[test] fn plugin_manager_load_plugins_some_non_executables() { let temp_dir = tempfile::tempdir().unwrap(); let _file = create_some_plugin_in(&temp_dir); let plugin_dir = temp_dir.path().to_owned(); let mut plugins = ExternalPlugins::open(plugin_dir, None, None).unwrap(); let _ = plugins.load(); assert!(plugins.empty()); } #[test] fn plugin_manager_load_plugins_some_by_plugins_none() { let temp_dir = tempfile::tempdir().unwrap(); let _file = create_some_plugin_in(&temp_dir); let _file = create_some_plugin_in(&temp_dir); let plugin_dir = temp_dir.path().to_owned(); let mut plugins = ExternalPlugins::open(plugin_dir, None, None).unwrap(); let _ = plugins.load(); assert!(plugins.by_software_type("test").is_none()); assert!(plugins.by_file_extension("test").is_none()); assert!(plugins.default().is_none()); } #[test] fn plugin_manager_load_plugins_some_by_plugins_some() { let temp_dir = tempfile::tempdir().unwrap(); let plugin1 = create_some_plugin_in(&temp_dir); let plugin2 = create_some_plugin_in(&temp_dir); let plugin_name1 = plugin1 .path() .file_name() .unwrap() .to_str() .unwrap() .to_owned(); let plugin_name2 = plugin2 .path() .file_name() .unwrap() .to_str() .unwrap() .to_owned(); let _res = std::fs::copy(get_dummy_plugin_path(), plugin1.path()); let _res = std::fs::copy(get_dummy_plugin_path(), plugin2.path()); let (_, _path) = plugin1.keep().unwrap(); let (_, _path) = plugin2.keep().unwrap(); let plugin_dir = temp_dir.path().to_owned(); dbg!(&plugin_dir); let mut plugins = ExternalPlugins::open(plugin_dir, None, None).unwrap(); let _ = plugins.load(); assert!(plugins.by_software_type(&plugin_name1).is_some()); assert!(plugins.by_software_type(&plugin_name2).is_some()); assert!(plugins.by_file_extension(&plugin_name1).is_none()); assert!(plugins.default().is_none()); } #[test] fn explicit_default_plugin() { let plugin_dir = tempfile::tempdir().unwrap(); let plugin1 = create_some_plugin_in(&plugin_dir); let _res = std::fs::copy(get_dummy_plugin_path(), plugin1.path()); let (_, _path) = plugin1.keep().unwrap(); let plugin2 = create_some_plugin_in(&plugin_dir); let _res = std::fs::copy(get_dummy_plugin_path(), plugin2.path()); let plugin_name2 = plugin2 .path() .file_name() .unwrap() .to_str() .unwrap() .to_owned(); let (_, _path) = plugin2.keep().unwrap(); let plugin3 = create_some_plugin_in(&plugin_dir); let _res = std::fs::copy(get_dummy_plugin_path(), plugin3.path()); let (_, _path) = plugin3.keep().unwrap(); let mut plugins = ExternalPlugins::open(plugin_dir.into_path(), Some(plugin_name2.clone()), None) .unwrap(); plugins.load().unwrap(); assert_eq!( plugins.by_software_type("default").unwrap().name, plugin_name2 ); assert_eq!(plugins.default().unwrap().name, plugin_name2); } #[test] fn implicit_default_plugin_with_only_one_plugin() { let plugin_dir = tempfile::tempdir().unwrap(); let plugin = create_some_plugin_in(&plugin_dir); let _res = std::fs::copy(get_dummy_plugin_path(), plugin.path()); let plugin_name = plugin .path() .file_name() .unwrap() .to_str() .unwrap() .to_owned(); let (_, _path) = plugin.keep().unwrap(); let mut plugins = ExternalPlugins::open(plugin_dir.into_path(), None, None).unwrap(); plugins.load().unwrap(); assert_eq!( plugins.by_software_type("default").unwrap().name, plugin_name ); assert_eq!(plugins.default().unwrap().name, plugin_name); } #[test] fn invalid_default_plugin_pass_through() -> anyhow::Result<()> { let plugin_dir = tempfile::tempdir().unwrap(); let plugin_file_path = plugin_dir.path().join("apt"); let _ = File::create(plugin_file_path).unwra
fn create_some_plugin_in(dir: &tempfile::TempDir) -> NamedTempFile { tempfile::Builder::new() .suffix(".0") .tempfile_in(dir) .unwrap() } fn get_dummy_plugin_path() -> PathBuf { let package_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); let dummy_plugin_path = PathBuf::from_str(package_dir.as_str()) .unwrap() .parent() .unwrap() .parent() .unwrap() .parent() .unwrap() .join("target/debug/tedge_dummy_plugin"); dummy_plugin_path } }
p(); let result = ExternalPlugins::open(plugin_dir.into_path(), Some("dummy".into()), None)?; assert!(result.empty()); assert!(result.default().is_none()); Ok(()) }
function_block-function_prefixed
[ { "content": "fn get_project_name(tedge_apama_project_path: &Path) -> String {\n\n let tedge_apama_project_descriptor_path = tedge_apama_project_path.join(\".project\");\n\n if tedge_apama_project_descriptor_path.exists() {\n\n if let Ok(xml_content) = fs::read_to_string(tedge_apama_project_descriptor_path) {\n\n if let Ok(root) = roxmltree::Document::parse(xml_content.as_str()) {\n\n return root\n\n .descendants()\n\n .find(|node| node.has_tag_name(\"name\"))\n\n .and_then(|node| node.first_child())\n\n .and_then(|node| node.text())\n\n .map(str::to_string)\n\n .unwrap_or_else(|| DEFAULT_APAMA_PROJECT_NAME.into());\n\n }\n\n }\n\n }\n\n DEFAULT_APAMA_PROJECT_NAME.into()\n\n}\n\n\n", "file_path": "plugins/tedge_apama_plugin/src/main.rs", "rank": 0, "score": 210753.25877496897 }, { "content": "fn install_project(project_archive_path: &Path) -> Result<(), InternalError> {\n\n let tedge_apama_project_path: &Path = Path::new(TEDGE_APAMA_PROJECT_DIR);\n\n let tmp_apama_project_path: &Path = Path::new(TMP_APAMA_PROJECT_DIR);\n\n\n\n let archive_file = File::open(&project_archive_path)?;\n\n\n\n let mut archive = zip::ZipArchive::new(archive_file)?;\n\n\n\n if let Err(zip::result::ZipError::FileNotFound) = archive.by_name(\"project/\") {\n\n return Err(InternalError::InvalidProjectArchive);\n\n }\n\n\n\n println!(\"Extracting the archive at {:?}\", project_archive_path);\n\n archive.extract(tmp_apama_project_path)?;\n\n println!(\"Extraction successful\");\n\n\n\n // Deleting existing project as the rename API expects the target dir to be empty\n\n delete_project()?;\n\n\n\n println!(\n", "file_path": "plugins/tedge_apama_plugin/src/main.rs", "rank": 1, "score": 206161.6529383801 }, { "content": "fn init(config_dir: &Path, logs_dir: &Path) -> Result<(), anyhow::Error> {\n\n info!(\"Creating supported operation files\");\n\n create_init_logs_directories_and_files(config_dir, logs_dir)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "plugins/c8y_log_plugin/src/main.rs", "rank": 2, "score": 205224.87359959199 }, { "content": "fn create_operation_files(config_dir: &Path) -> Result<(), anyhow::Error> {\n\n create_directory_with_user_group(\n\n format!(\"{}/c8y\", config_dir.display()),\n\n \"root\",\n\n \"root\",\n\n 0o1777,\n\n )?;\n\n let example_config = r#\"# Add the configurations to be managed by c8y-configuration-plugin\n\n\n\nfiles = [\n\n# { path = '/etc/tedge/tedge.toml' },\n\n# { path = '/etc/tedge/mosquitto-conf/c8y-bridge.conf', type = 'c8y-bridge.conf' },\n\n# { path = '/etc/tedge/mosquitto-conf/tedge-mosquitto.conf', type = 'tedge-mosquitto.conf' },\n\n# { path = '/etc/mosquitto/mosquitto.conf', type = 'mosquitto.conf' },\n\n# { path = '/etc/tedge/c8y/example.txt', type = 'example', user = 'tedge', group = 'tedge', mode = 0o444 }\n\n]\"#;\n\n\n\n create_file_with_user_group(\n\n format!(\"{}/c8y/c8y-configuration-plugin.toml\", config_dir.display()),\n\n \"root\",\n", "file_path": "plugins/c8y_configuration_plugin/src/main.rs", "rank": 3, "score": 189506.6413927304 }, { "content": "#[test]\n\nfn test_no_sm_plugin_dir() {\n\n let plugin_dir = tempfile::TempDir::new().unwrap();\n\n\n\n let actual = ExternalPlugins::open(plugin_dir.path(), None, None);\n\n assert!(actual.is_ok());\n\n}\n", "file_path": "crates/core/plugin_sm/src/plugin_manager.rs", "rank": 4, "score": 189000.71156336123 }, { "content": "#[test]\n\nfn test_no_duplicated_file_types() {\n\n let files = vec![\n\n FileEntry {\n\n path: \"a/path\".to_string(),\n\n config_type: \"type_one\".to_string(),\n\n },\n\n FileEntry {\n\n path: \"some/path\".to_string(),\n\n config_type: \"type_one\".to_string(),\n\n },\n\n ];\n\n let logs_config = LogPluginConfig { files: files };\n\n assert_eq!(\n\n logs_config.get_all_file_types(),\n\n vec![\"type_one\".to_string()]\n\n );\n\n}\n", "file_path": "plugins/c8y_log_plugin/src/config.rs", "rank": 5, "score": 185914.0791467899 }, { "content": "/// filter a vector of pathbufs according to `smartrest_obj.date_from` and `smartrest_obj.date_to`\n\nfn filter_logs_path_on_metadata(\n\n smartrest_obj: &SmartRestLogRequest,\n\n mut logs_path_vec: Vec<PathBuf>,\n\n) -> Result<Vec<PathBuf>, LogRetrievalError> {\n\n let mut out = vec![];\n\n\n\n logs_path_vec.sort_by_key(|pathbuf| {\n\n if let Ok(metadata) = std::fs::metadata(&pathbuf) {\n\n if let Ok(file_modified_time) = metadata.modified() {\n\n return OffsetDateTime::from(file_modified_time);\n\n }\n\n };\n\n // if the file metadata can not be read, we set the file's metadata\n\n // to UNIX_EPOCH (Jan 1st 1970)\n\n OffsetDateTime::UNIX_EPOCH\n\n });\n\n logs_path_vec.reverse(); // to get most recent\n\n\n\n for file_pathbuf in logs_path_vec {\n\n let metadata = std::fs::metadata(&file_pathbuf)?;\n", "file_path": "plugins/c8y_log_plugin/src/logfile_request.rs", "rank": 6, "score": 185726.1613539576 }, { "content": "#[test]\n\nfn test_device_id_is_none_when_there_is_no_certificate() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = r#\"\n\n[device]\n\ncert_path = \"/path/to/cert\"\n\n\"#;\n\n\n\n let (_tempdir, config_location) = create_temp_tedge_config(toml_conf)?;\n\n let config =\n\n TEdgeConfigRepository::new_with_defaults(config_location, dummy_tedge_config_defaults())\n\n .load()?;\n\n\n\n assert!(config.query_optional(DeviceIdSetting).is_err());\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 7, "score": 168495.6153722159 }, { "content": "fn init(cfg_dir: PathBuf) -> Result<(), anyhow::Error> {\n\n info!(\"Creating supported operation files\");\n\n create_operation_files(&cfg_dir)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "plugins/c8y_configuration_plugin/src/main.rs", "rank": 8, "score": 164958.51587345495 }, { "content": "#[test]\n\nfn test_device_id_is_err_when_cert_path_is_not_a_certificate() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = r#\"\n\n[device]\n\ncert_path = \"/path/to/cert\"\n\n\"#;\n\n\n\n let (tempdir, config_location) = create_temp_tedge_config(toml_conf)?;\n\n let mut config =\n\n TEdgeConfigRepository::new_with_defaults(config_location, dummy_tedge_config_defaults())\n\n .load()?;\n\n\n\n let cert_path = tempdir.path().join(\"not-a-certificate.pem\");\n\n std::fs::File::create(cert_path.clone()).expect(\"fail to create a fake certificate\");\n\n config.update(DeviceCertPathSetting, cert_path.into())?;\n\n\n\n match config.query(DeviceIdSetting) {\n\n Err(ConfigSettingError::DerivationFailed { key, cause }) => {\n\n assert_eq!(key, \"device.id\");\n\n assert_eq!(cause, \"PEM file format error\");\n\n }\n\n Err(_) => assert!(false, \"unexpected error\"),\n\n Ok(_) => assert!(false, \"unexpected ok result\"),\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 9, "score": 164852.02681721616 }, { "content": "fn install_monitor(mon_file_path: &str) -> Result<(), InternalError> {\n\n run_cmd(\n\n APAMA_ENV_EXE,\n\n format!(\"{} {}\", ENGINE_INJECT_CMD, mon_file_path).as_str(),\n\n )\n\n}\n\n\n", "file_path": "plugins/tedge_apama_plugin/src/main.rs", "rank": 10, "score": 164745.90351301956 }, { "content": "pub fn validate_parent_dir_exists(path: impl AsRef<Path>) -> Result<(), PathsError> {\n\n let path = path.as_ref();\n\n if path.is_relative() {\n\n Err(PathsError::RelativePathNotPermitted { path: path.into() })\n\n } else {\n\n match path.parent() {\n\n None => Err(PathsError::ParentDirNotFound { path: path.into() }),\n\n Some(parent) => {\n\n if !parent.exists() {\n\n Err(PathsError::DirNotFound {\n\n path: parent.into(),\n\n })\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/common/tedge_utils/src/paths.rs", "rank": 11, "score": 160137.3828005204 }, { "content": "fn install_or_update_monitor(mon_name: &str, mon_file_path: &str) -> Result<(), InternalError> {\n\n let installed_monitors = get_installed_monitors()?;\n\n // If an existing monitor needs to be updated, older version needs to be removed first before installing the new one\n\n if installed_monitors.contains(&mon_name.to_string()) {\n\n remove_monitor(mon_name)?;\n\n install_monitor(mon_file_path)\n\n } else {\n\n install_monitor(mon_file_path)\n\n }\n\n}\n\n\n", "file_path": "plugins/tedge_apama_plugin/src/main.rs", "rank": 12, "score": 153164.9318897239 }, { "content": "pub fn get_file_change_notification_message(file_path: &str, config_type: &str) -> Message {\n\n let notification = json!({ \"path\": file_path }).to_string();\n\n let topic = Topic::new(format!(\"{CONFIG_CHANGE_TOPIC}/{config_type}\").as_str())\n\n .unwrap_or_else(|_err| {\n\n warn!(\"The type cannot be used as a part of the topic name. Using {CONFIG_CHANGE_TOPIC} instead.\");\n\n Topic::new_unchecked(CONFIG_CHANGE_TOPIC)\n\n });\n\n Message::new(&topic, notification)\n\n}\n\n\n", "file_path": "plugins/c8y_configuration_plugin/src/download.rs", "rank": 13, "score": 153164.9318897239 }, { "content": "/// Return metadata when the given path exists and accessible by user\n\npub fn get_metadata(path: &Path) -> Result<fs::Metadata, FileError> {\n\n fs::metadata(&path).map_err(|_| FileError::PathNotAccessible {\n\n path: path.to_path_buf(),\n\n })\n\n}\n\n\n", "file_path": "crates/common/tedge_utils/src/file.rs", "rank": 14, "score": 152451.21347792732 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n c.bench_function(\"parse_stream\", |b| {\n\n b.iter(|| parse_stream(black_box(INPUT)))\n\n });\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "crates/core/thin_edge_json/benches/parsing.rs", "rank": 15, "score": 151258.89659216616 }, { "content": "/// Return filename if the given path contains a filename\n\npub fn get_filename(path: PathBuf) -> Option<String> {\n\n let filename = path.file_name()?.to_str()?.to_string();\n\n Some(filename)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::os::unix::fs::PermissionsExt;\n\n use std::path::Path;\n\n use tempfile::TempDir;\n\n\n\n #[test]\n\n fn create_file_correct_user_group() {\n\n let temp_dir = TempDir::new().unwrap();\n\n let file_path = temp_dir.path().join(\"file\").display().to_string();\n\n\n\n let user = whoami::username();\n\n let _ = create_file_with_user_group(&file_path, &user, &user, 0o644, None).unwrap();\n\n assert!(Path::new(file_path.as_str()).exists());\n", "file_path": "crates/common/tedge_utils/src/file.rs", "rank": 16, "score": 150227.4502997161 }, { "content": "fn read_cert_to_string(path: impl AsRef<Path>) -> Result<String, CertError> {\n\n let mut file = std::fs::File::open(path.as_ref()).map_err(|err| {\n\n let path = path.as_ref().display().to_string();\n\n CertError::CertificateReadFailed(err, path)\n\n })?;\n\n let mut content = String::new();\n\n file.read_to_string(&mut content)?;\n\n\n\n Ok(content)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use assert_matches::assert_matches;\n\n\n\n #[test]\n\n fn build_get_tenant_id_url_should_return_error_given_invalid_host() {\n\n let result = build_get_tenant_id_url(\"%\");\n\n\n", "file_path": "crates/core/tedge/src/cli/certificate/upload.rs", "rank": 17, "score": 149841.0564765554 }, { "content": "pub fn create_directories(dir_path: impl AsRef<Path>) -> Result<(), PathsError> {\n\n let dir_path = dir_path.as_ref();\n\n std::fs::create_dir_all(dir_path)\n\n .map_err(|error| PathsError::DirCreationFailed(error, dir_path.into()))\n\n}\n\n\n", "file_path": "crates/common/tedge_utils/src/paths.rs", "rank": 18, "score": 149817.36089664034 }, { "content": "fn translate_50_measurements(c: &mut Criterion) {\n\n let id = \"Translate 50 measurements\";\n\n let message = flat_message(50);\n\n sanity_check(&message);\n\n\n\n c.bench_function(id, |b| b.iter(|| json::from_thin_edge_json(&message)));\n\n}\n\n\n", "file_path": "crates/core/c8y_translator/benches/thin_edge_json.rs", "rank": 19, "score": 149555.3060504162 }, { "content": "fn translate_2_measurements(c: &mut Criterion) {\n\n let id = \"Translate 2 measurements\";\n\n let message = r#\"{\n\n \"temperature\": 12.34,\n\n \"pressure\": 56.78\n\n }\"#;\n\n sanity_check(message);\n\n\n\n c.bench_function(id, |b| b.iter(|| json::from_thin_edge_json(message)));\n\n}\n\n\n", "file_path": "crates/core/c8y_translator/benches/thin_edge_json.rs", "rank": 20, "score": 149555.3060504162 }, { "content": "fn translate_ref_measurement(c: &mut Criterion) {\n\n let id = \"Translate reference measurement\";\n\n sanity_check_translate_reference_thin_edge_json()\n\n .expect(\"Expect a valid thin-edge-json message\");\n\n\n\n c.bench_function(id, |b| {\n\n b.iter(|| json::from_thin_edge_json(REFERENCE_THIN_EDGE_JSON))\n\n });\n\n}\n\n\n", "file_path": "crates/core/c8y_translator/benches/thin_edge_json.rs", "rank": 21, "score": 147914.86856436706 }, { "content": "fn main() {\n\n // Emulate plugin's API.\n\n let apt = PluginCli::parse();\n\n\n\n match apt.operation {\n\n PluginOp::List\n\n | PluginOp::Prepare\n\n | PluginOp::Finalize\n\n | PluginOp::UpdateList\n\n | PluginOp::Install { .. }\n\n | PluginOp::Remove { .. } => process_call_with_file(),\n\n };\n\n}\n\n\n", "file_path": "plugins/tedge_dummy_plugin/src/main.rs", "rank": 22, "score": 147104.9128778435 }, { "content": "fn main() {\n\n // On usage error, the process exits with a status code of 1\n\n\n\n let apt = match AptCli::try_parse() {\n\n Ok(aptcli) => aptcli,\n\n Err(err) => {\n\n eprintln!(\"ERROR: {}\", err);\n\n AptCli::command()\n\n .print_help()\n\n .expect(\"Failed to print usage help\");\n\n // re-write the clap exit_status from 2 to 1, if parse fails\n\n std::process::exit(1)\n\n }\n\n };\n\n\n\n match run(apt.operation) {\n\n Ok(status) if status.success() => {\n\n std::process::exit(0);\n\n }\n\n\n", "file_path": "plugins/tedge_apt_plugin/src/main.rs", "rank": 23, "score": 147104.9128778435 }, { "content": "fn main() {\n\n // On usage error, the process exits with a status code of 1\n\n let apama = match ApamaCli::try_parse() {\n\n Ok(apamacli) => apamacli,\n\n Err(err) => {\n\n eprintln!(\"ERROR: {}\", err);\n\n ApamaCli::command()\n\n .print_help()\n\n .expect(\"Failed to print command usage help message\");\n\n // re-write the clap exit_status from 2 to 1, if parse fails\n\n std::process::exit(1)\n\n }\n\n };\n\n\n\n match run(apama.operation) {\n\n Ok(()) => {\n\n std::process::exit(0);\n\n }\n\n\n\n Err(err) => {\n", "file_path": "plugins/tedge_apama_plugin/src/main.rs", "rank": 24, "score": 147104.9128778435 }, { "content": "fn translate_17x3_multi_measurements(c: &mut Criterion) {\n\n let id = \"Translate 17x3 multi-measurements\";\n\n let message = group_message(17, 3);\n\n sanity_check(&message);\n\n\n\n c.bench_function(id, |b| b.iter(|| json::from_thin_edge_json(&message)));\n\n}\n\n\n", "file_path": "crates/core/c8y_translator/benches/thin_edge_json.rs", "rank": 25, "score": 146334.1363607345 }, { "content": "fn get_installer(\n\n module: String,\n\n version: Option<String>,\n\n file_path: Option<String>,\n\n) -> Result<(String, Option<PackageMetadata>), InternalError> {\n\n match (&version, &file_path) {\n\n (None, None) => Ok((module, None)),\n\n\n\n (Some(version), None) => Ok((format!(\"{}={}\", module, version), None)),\n\n\n\n (None, Some(file_path)) => {\n\n let mut package = PackageMetadata::try_new(file_path)?;\n\n package.validate_package(&[&format!(\"Package: {}\", &module), \"Debian package\"])?;\n\n Ok((format!(\"{}\", package.file_path().display()), Some(package)))\n\n }\n\n\n\n (Some(version), Some(file_path)) => {\n\n let mut package = PackageMetadata::try_new(file_path)?;\n\n package.validate_package(&[\n\n &format!(\"Version: {}\", &version),\n\n &format!(\"Package: {}\", &module),\n\n \"Debian package\",\n\n ])?;\n\n\n\n Ok((format!(\"{}\", package.file_path().display()), Some(package)))\n\n }\n\n }\n\n}\n\n\n", "file_path": "plugins/tedge_apt_plugin/src/main.rs", "rank": 26, "score": 145365.85579156532 }, { "content": "fn initialize_tedge(config_dir: &Path) -> anyhow::Result<()> {\n\n create_directory_with_user_group(config_dir, \"tedge\", \"tedge\", 0o775)?;\n\n create_directory_with_user_group(\"/var/log/tedge\", \"tedge\", \"tedge\", 0o775)?;\n\n create_directory_with_user_group(\n\n format!(\"{}/mosquitto-conf\", config_dir.display()),\n\n \"tedge\",\n\n \"tedge\",\n\n 0o775,\n\n )?;\n\n create_directory_with_user_group(\n\n format!(\"{}/operations\", config_dir.display()),\n\n \"tedge\",\n\n \"tedge\",\n\n 0o775,\n\n )?;\n\n create_directory_with_user_group(\n\n format!(\"{}/plugins\", config_dir.display()),\n\n \"tedge\",\n\n \"tedge\",\n\n 0o775,\n\n )?;\n\n create_directory_with_user_group(\n\n format!(\"{}/device-certs\", config_dir.display()),\n\n \"mosquitto\",\n\n \"mosquitto\",\n\n 0o775,\n\n )?;\n\n Ok(())\n\n}\n", "file_path": "crates/core/tedge/src/main.rs", "rank": 27, "score": 144802.1101857343 }, { "content": "pub fn get_operation(path: PathBuf) -> Result<Operation, OperationsError> {\n\n let mut details = match fs::read(&path) {\n\n Ok(bytes) => toml::from_slice::<Operation>(bytes.as_slice())\n\n .map_err(|e| OperationsError::TomlError(path.to_path_buf(), e))?,\n\n\n\n Err(err) => return Err(OperationsError::FromIo(err)),\n\n };\n\n\n\n details.name = path\n\n .file_name()\n\n .and_then(|filename| filename.to_str())\n\n .ok_or_else(|| OperationsError::InvalidOperationName(path.to_owned()))?\n\n .to_owned();\n\n\n\n Ok(details)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::io::Write;\n", "file_path": "crates/core/c8y_smartrest/src/operations.rs", "rank": 28, "score": 144026.92518539788 }, { "content": "fn process_call_with_file() {\n\n // When plugins are called by the agent they are forced to use `/tmp` as cwd.\n\n // If called not by the agent they will use users cwd, unless specifically told not to\n\n // and therefore `.tedge_dummy_plugin` directory should exist in that cwd.\n\n let tedge_dummy_path = std::env::current_dir()\n\n .unwrap()\n\n .join(\".tedge_dummy_plugin/\");\n\n\n\n // List all the files from `.tedge_dummy_plugin` and use them as source of output.\n\n // The file should be name as per following scheme:\n\n // <dummy_name>.<desired_exit_code>\n\n // The file contents should be exactly as expected stdout response.\n\n // The process will terminate with code 0 if there is no files available to parse.\n\n let mut list = match std::fs::read_dir(tedge_dummy_path) {\n\n Ok(read_dir) => read_dir,\n\n Err(_) => std::process::exit(0),\n\n };\n\n\n\n let data_path = if let Some(Ok(dir_entry)) = list.next() {\n\n dir_entry.path()\n", "file_path": "plugins/tedge_dummy_plugin/src/main.rs", "rank": 29, "score": 143685.22885894316 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n translate_ref_measurement(c);\n\n translate_2_measurements(c);\n\n translate_50_measurements(c);\n\n translate_17x3_multi_measurements(c);\n\n}\n\n\n\nconst REFERENCE_THIN_EDGE_JSON: &str = r#\"{\n\n \"time\": \"2021-06-22T17:03:14.123456789+05:00\",\n\n \"temperature\": 25.01,\n\n \"location\": {\n\n \"latitude\": 32.54,\n\n \"longitude\": -117.67,\n\n \"altitude\": 98.6\n\n },\n\n \"pressure\": 98.01\n\n }\"#;\n\n\n", "file_path": "crates/core/c8y_translator/benches/thin_edge_json.rs", "rank": 30, "score": 143318.72190127114 }, { "content": "fn filter_logs_on_type(\n\n smartrest_obj: &SmartRestLogRequest,\n\n plugin_config: &LogPluginConfig,\n\n) -> Result<Vec<PathBuf>, LogRetrievalError> {\n\n let mut files_to_send = Vec::new();\n\n for files in &plugin_config.files {\n\n let maybe_file_path = files.path.as_str(); // because it can be a glob pattern\n\n let file_type = files.config_type.as_str();\n\n\n\n if !file_type.eq(&smartrest_obj.log_type) {\n\n continue;\n\n } else {\n\n for entry in glob(maybe_file_path)? {\n\n let file_path = entry?;\n\n files_to_send.push(file_path)\n\n }\n\n }\n\n }\n\n if files_to_send.is_empty() {\n\n Err(LogRetrievalError::NoLogsAvailableForType {\n\n log_type: smartrest_obj.log_type.to_string(),\n\n })\n\n } else {\n\n Ok(files_to_send)\n\n }\n\n}\n\n\n", "file_path": "plugins/c8y_log_plugin/src/logfile_request.rs", "rank": 31, "score": 142059.99883046985 }, { "content": "fn read_log_content(\n\n logfile: &Path,\n\n mut line_counter: usize,\n\n max_lines: usize,\n\n filter_text: &Option<String>,\n\n) -> Result<(usize, String), LogRetrievalError> {\n\n if line_counter >= max_lines {\n\n Err(LogRetrievalError::MaxLines)\n\n } else {\n\n let mut file_content_as_vec = VecDeque::new();\n\n let file = std::fs::File::open(&logfile)?;\n\n let file_name = format!(\n\n \"filename: {}\\n\",\n\n logfile.file_name().unwrap().to_str().unwrap() // never fails because we check file exists\n\n );\n\n let reader = EasyReader::new(file);\n\n match reader {\n\n Ok(mut reader) => {\n\n reader.eof();\n\n while line_counter < max_lines {\n", "file_path": "plugins/c8y_log_plugin/src/logfile_request.rs", "rank": 32, "score": 142059.99883046985 }, { "content": "pub fn persist_tempfile(file: NamedTempFile, path_to: impl AsRef<Path>) -> Result<(), PathsError> {\n\n let path_to = path_to.as_ref();\n\n let _ = file\n\n .persist(path_to)\n\n .map_err(|error| PathsError::FileCreationFailed(error, path_to.into()))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/common/tedge_utils/src/paths.rs", "rank": 33, "score": 141540.14959454793 }, { "content": "fn create_certificate(\n\n path: std::path::PathBuf,\n\n device_id: &str,\n\n) -> Result<(), certificate::CertificateError> {\n\n let keypair = certificate::KeyCertPair::new_selfsigned_certificate(\n\n &certificate::NewCertificateConfig::default(),\n\n device_id,\n\n )?;\n\n let pem = keypair.certificate_pem_string()?;\n\n let mut file = std::fs::File::create(path)?;\n\n file.write_all(pem.as_bytes())?;\n\n Ok(())\n\n}\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 34, "score": 140590.17817817617 }, { "content": "/// for the log plugin to work the following directories and files are needed:\n\n///\n\n/// Directories:\n\n/// - LOGS_DIR/tedge/agent\n\n/// - CONFIG_DIR/operations/c8y\n\n/// - CONFIG_DIR/c8y\n\n///\n\n/// Files:\n\n/// - CONFIG_DIR/operations/c8y/c8y_LogfileRequest\n\n/// - CONFIG_DIR/c8y/c8y-log-plugin.toml\n\nfn create_init_logs_directories_and_files(\n\n config_dir: &Path,\n\n logs_dir: &Path,\n\n) -> Result<(), anyhow::Error> {\n\n // creating logs_dir\n\n create_directory_with_user_group(\n\n format!(\"{}/tedge\", logs_dir.display()),\n\n \"tedge\",\n\n \"tedge\",\n\n 0o755,\n\n )?;\n\n create_directory_with_user_group(\n\n format!(\"{}/tedge/agent\", logs_dir.display()),\n\n \"tedge\",\n\n \"tedge\",\n\n 0o755,\n\n )?;\n\n // creating /operations/c8y directories\n\n create_directory_with_user_group(\n\n format!(\"{}/operations\", config_dir.display()),\n", "file_path": "plugins/c8y_log_plugin/src/main.rs", "rank": 35, "score": 140492.67690364405 }, { "content": "pub fn deserialize_module_info(\n\n module_type: String,\n\n input: impl std::io::Read,\n\n) -> Result<Vec<SoftwareModule>, SoftwareError> {\n\n let mut records = ReaderBuilder::new()\n\n .has_headers(false)\n\n .delimiter(b'\\t')\n\n .flexible(true)\n\n .from_reader(input);\n\n let mut software_list = Vec::new();\n\n for module in records.deserialize() {\n\n let minfo: ModuleInfo = module?;\n\n software_list.push(SoftwareModule {\n\n name: minfo.name,\n\n version: minfo.version,\n\n module_type: Some(module_type.clone()),\n\n file_path: None,\n\n url: None,\n\n });\n\n }\n\n Ok(software_list)\n\n}\n", "file_path": "crates/core/plugin_sm/src/plugin.rs", "rank": 36, "score": 138206.95675937878 }, { "content": "/// read any log file comming from `smartrest_obj.log_type`\n\npub fn new_read_logs(\n\n smartrest_obj: &SmartRestLogRequest,\n\n plugin_config: &LogPluginConfig,\n\n) -> Result<String, anyhow::Error> {\n\n let mut output = String::new();\n\n // first filter logs on type\n\n let mut logfiles_to_read = filter_logs_on_type(smartrest_obj, plugin_config)?;\n\n logfiles_to_read = filter_logs_path_on_metadata(smartrest_obj, logfiles_to_read)?;\n\n\n\n let mut line_counter = 0usize;\n\n for logfile in logfiles_to_read {\n\n match read_log_content(\n\n logfile.as_path(),\n\n line_counter,\n\n smartrest_obj.lines,\n\n &smartrest_obj.needle,\n\n ) {\n\n Ok((lines, file_content)) => {\n\n line_counter = lines;\n\n output.push_str(&file_content);\n", "file_path": "plugins/c8y_log_plugin/src/logfile_request.rs", "rank": 37, "score": 136634.30443432918 }, { "content": "fn create_directories(config_dir: &Path) -> Result<(), anyhow::Error> {\n\n create_directory_with_user_group(\n\n format!(\"{}/operations/c8y\", config_dir.display()),\n\n \"tedge\",\n\n \"tedge\",\n\n 0o775,\n\n )?;\n\n create_file_with_user_group(\n\n format!(\"{}/operations/c8y/c8y_SoftwareUpdate\", config_dir.display()),\n\n \"tedge\",\n\n \"tedge\",\n\n 0o644,\n\n None,\n\n )?;\n\n create_file_with_user_group(\n\n format!(\"{}/operations/c8y/c8y_Restart\", config_dir.display()),\n\n \"tedge\",\n\n \"tedge\",\n\n 0o644,\n\n None,\n", "file_path": "crates/core/tedge_mapper/src/c8y/mapper.rs", "rank": 38, "score": 136113.4720222007 }, { "content": "fn spawn_broker(port: u16) {\n\n let config = get_rumqttd_config(port);\n\n let mut broker = Broker::new(config);\n\n let mut tx = broker.link(\"localclient\").unwrap();\n\n\n\n std::thread::spawn(move || {\n\n eprintln!(\"MQTT-TEST INFO: start test MQTT broker (port = {})\", port);\n\n if let Err(err) = broker.start() {\n\n eprintln!(\n\n \"MQTT-TEST ERROR: fail to start the test MQTT broker: {:?}\",\n\n err\n\n );\n\n }\n\n });\n\n\n\n std::thread::spawn(move || {\n\n let mut rx = tx.connect(200).unwrap();\n\n tx.subscribe(\"#\").unwrap();\n\n\n\n loop {\n", "file_path": "crates/tests/mqtt_tests/src/test_mqtt_server.rs", "rank": 39, "score": 135994.94692431373 }, { "content": "fn create_new_file(path: impl AsRef<Path>, user: &str, group: &str) -> Result<File, CertError> {\n\n let file = OpenOptions::new()\n\n .write(true)\n\n .create_new(true)\n\n .open(path.as_ref())?;\n\n\n\n // Ignore errors - This was the behavior with the now deprecated user manager.\n\n // - When `tedge cert create` is not run as root, a certificate is created but owned by the user running the command.\n\n // - A better approach could be to remove this `chown` and run the command as mosquitto.\n\n let _ = tedge_utils::file::change_user_and_group(path.as_ref(), user, group);\n\n\n\n Ok(file)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use assert_matches::assert_matches;\n\n use std::fs;\n\n use tempfile::*;\n", "file_path": "crates/core/tedge/src/cli/certificate/create.rs", "rank": 40, "score": 135213.09907433926 }, { "content": "fn delete_project() -> Result<(), InternalError> {\n\n println!(\"Removing existing project at {}\", TEDGE_APAMA_PROJECT_DIR);\n\n let result = fs::remove_dir_all(TEDGE_APAMA_PROJECT_DIR);\n\n if let Err(err) = result {\n\n if err.kind() != ErrorKind::NotFound {\n\n return Err(InternalError::from(err));\n\n }\n\n }\n\n println!(\"Removal of existing project successful\");\n\n Ok(())\n\n}\n\n\n", "file_path": "plugins/tedge_apama_plugin/src/main.rs", "rank": 41, "score": 133464.22876640793 }, { "content": "fn init_logger() {\n\n let logger = env_logger::Logger::from_default_env();\n\n let task_id = 1;\n\n\n\n async_log::Logger::wrap(logger, move || task_id)\n\n .start(log::LevelFilter::Trace)\n\n .unwrap();\n\n}\n", "file_path": "crates/tests/sawtooth_publisher/src/main.rs", "rank": 42, "score": 132876.55386154173 }, { "content": "fn change_user(file: &Path, user: &str) -> Result<(), FileError> {\n\n let ud = get_user_by_name(user)\n\n .map(|u| u.uid())\n\n .ok_or_else(|| FileError::UserNotFound { user: user.into() })?;\n\n\n\n let uid = get_metadata(Path::new(file))?.st_uid();\n\n\n\n // if user is same as existing, then do not change\n\n if ud != uid {\n\n chown(file, Some(Uid::from_raw(ud)), None)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/common/tedge_utils/src/file.rs", "rank": 43, "score": 132681.53326909282 }, { "content": "fn change_mode(file: &Path, mode: u32) -> Result<(), FileError> {\n\n let mut perm = get_metadata(Path::new(file))?.permissions();\n\n perm.set_mode(mode);\n\n\n\n fs::set_permissions(file, perm).map_err(|e| FileError::MetaDataError {\n\n name: file.display().to_string(),\n\n from: e,\n\n })?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/common/tedge_utils/src/file.rs", "rank": 44, "score": 132681.53326909282 }, { "content": "fn change_group(file: &Path, group: &str) -> Result<(), FileError> {\n\n let gd = get_group_by_name(group)\n\n .map(|g| g.gid())\n\n .ok_or_else(|| FileError::GroupNotFound {\n\n group: group.into(),\n\n })?;\n\n\n\n let gid = get_metadata(Path::new(file))?.st_gid();\n\n\n\n // if group is same as existing, then do not change\n\n if gd != gid {\n\n chown(file, None, Some(Gid::from_raw(gd)))?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/common/tedge_utils/src/file.rs", "rank": 45, "score": 132681.53326909282 }, { "content": "pub fn create_full_tedge_dir_structure() {\n\n let ttd = TempTedgeDir::new();\n\n ttd.file(\"tedge.toml\");\n\n ttd.dir(\".agent\").file(\"current-operation\");\n\n ttd.dir(\"c8y\")\n\n .file(\"c8y-log-plugin.toml\")\n\n .with_toml_content(toml::toml! {\n\n files = [\n\n {type = \"software-management\", path = \"/var/log/tedge/agent/software-*\" }\n\n ]\n\n });\n\n ttd.dir(\"contrib\").dir(\"collectd\").file(\"collectd.conf\");\n\n ttd.dir(\"device\").file(\"inventory.json\");\n\n ttd.dir(\"device-certs\");\n\n ttd.dir(\"mosquitto-conf\").file(\"c8y-bridge.conf\");\n\n ttd.dir(\"mosquitto-conf\").file(\"tedge-mosquitto.conf\");\n\n ttd.dir(\"operations\")\n\n .dir(\"c8y\")\n\n .file(\"c8y_LogfileRequest\")\n\n .with_raw_content(\"\");\n", "file_path": "crates/tests/tedge_test_utils/src/fs.rs", "rank": 46, "score": 132219.14101297437 }, { "content": "fn get_default_plugin(\n\n config_location: &TEdgeConfigLocation,\n\n) -> Result<Option<SoftwareType>, AgentError> {\n\n let config_repository = tedge_config::TEdgeConfigRepository::new(config_location.clone());\n\n let tedge_config = config_repository.load()?;\n\n\n\n Ok(tedge_config.query_string_optional(SoftwarePluginDefaultSetting)?)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use std::path::PathBuf;\n\n\n\n use assert_json_diff::assert_json_include;\n\n use serde_json::{json, Value};\n\n\n\n use super::*;\n\n\n\n use tedge_test_utils::fs::TempTedgeDir;\n", "file_path": "crates/core/tedge_agent/src/agent.rs", "rank": 47, "score": 132200.08325973252 }, { "content": "fn restart_apama_service() -> Result<(), InternalError> {\n\n println!(\"Restarting apama to load the new project\");\n\n run_cmd(\"service\", \"apama restart\")?;\n\n println!(\"Restart of apama service successful\");\n\n Ok(())\n\n}\n\n\n", "file_path": "plugins/tedge_apama_plugin/src/main.rs", "rank": 48, "score": 131941.52993449743 }, { "content": "fn stop_apama_service() -> Result<(), InternalError> {\n\n println!(\"Stopping apama service\");\n\n run_cmd(\"service\", \"apama stop\")?;\n\n println!(\"Stopping apama service successful\");\n\n Ok(())\n\n}\n\n\n", "file_path": "plugins/tedge_apama_plugin/src/main.rs", "rank": 49, "score": 131941.52993449743 }, { "content": "pub fn test_mqtt_broker() -> &'static MqttProcessHandler {\n\n Lazy::force(&SERVER)\n\n}\n\n\n\npub struct MqttProcessHandler {\n\n pub port: u16,\n\n}\n\n\n\nimpl MqttProcessHandler {\n\n pub fn new(port: u16) -> MqttProcessHandler {\n\n spawn_broker(port);\n\n MqttProcessHandler { port }\n\n }\n\n\n\n pub async fn publish(&self, topic: &str, payload: &str) -> Result<(), anyhow::Error> {\n\n crate::test_mqtt_client::publish(self.port, topic, payload, QoS::AtLeastOnce, false).await\n\n }\n\n\n\n pub async fn publish_with_opts(\n\n &self,\n", "file_path": "crates/tests/mqtt_tests/src/test_mqtt_server.rs", "rank": 50, "score": 131730.62368111787 }, { "content": "fn run(operation: PluginOp) -> Result<(), InternalError> {\n\n let tedge_env_exe_path = Path::new(APAMA_ENV_EXE);\n\n let tedge_apama_project_path: &Path = Path::new(TEDGE_APAMA_PROJECT_DIR);\n\n let tmp_apama_project_path: &Path = Path::new(TMP_APAMA_PROJECT_DIR);\n\n\n\n if !tedge_env_exe_path.exists() {\n\n return Err(InternalError::ApamaNotInstalled);\n\n }\n\n\n\n match operation {\n\n PluginOp::List => {\n\n if tedge_apama_project_path.exists() {\n\n // Print the project name\n\n println!(\"{}::project\\t\", get_project_name(tedge_apama_project_path));\n\n\n\n // Print the installed monitors\n\n for monitor in get_installed_monitors()? {\n\n println!(\"{}::mon\\t\", monitor)\n\n }\n\n }\n", "file_path": "plugins/tedge_apama_plugin/src/main.rs", "rank": 51, "score": 131315.94699415244 }, { "content": "fn get_rumqttd_config(port: u16) -> Config {\n\n let router_config = librumqttd::rumqttlog::Config {\n\n id: 0,\n\n dir: \"/tmp/rumqttd\".into(),\n\n max_segment_size: 10240,\n\n max_segment_count: 10,\n\n max_connections: 10,\n\n };\n\n\n\n let connections_settings = ConnectionSettings {\n\n connection_timeout_ms: 1,\n\n max_client_id_len: 256,\n\n throttle_delay_ms: 0,\n\n max_payload_size: 268435455,\n\n max_inflight_count: 200,\n\n max_inflight_size: 1024,\n\n login_credentials: None,\n\n };\n\n\n\n let server_config = ServerSettings {\n", "file_path": "crates/tests/mqtt_tests/src/test_mqtt_server.rs", "rank": 52, "score": 130705.18108139388 }, { "content": "fn wait_for_apama_correlator_ready() -> Result<(), InternalError> {\n\n println!(\"Waiting for apama correator to be ready for upto 10 seconds\");\n\n run_cmd(APAMA_ENV_EXE, \"engine_management --waitFor 10\")?;\n\n println!(\"Apama correator is ready\");\n\n Ok(())\n\n}\n\n\n", "file_path": "plugins/tedge_apama_plugin/src/main.rs", "rank": 53, "score": 130466.33835609765 }, { "content": "fn remove_old_logs(\n\n log_tracker: &mut BinaryHeap<Reverse<String>>,\n\n dir_path: &Path,\n\n n: usize,\n\n) -> Result<(), OperationLogsError> {\n\n while log_tracker.len() > n {\n\n if let Some(rname) = log_tracker.pop() {\n\n let name = rname.0;\n\n let path = dir_path.join(name.clone());\n\n if let Err(err) = std::fs::remove_file(&path) {\n\n log::warn!(\"Fail to remove out-dated log file {} : {}\", name, err);\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "crates/core/plugin_sm/src/operation_logs.rs", "rank": 54, "score": 130383.08330846268 }, { "content": "#[test]\n\nfn test_parse_unsupported_keys() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = r#\"\n\nhey=\"tedge\"\n\n[c8y]\n\nhello=\"tedge\"\n\n\"#;\n\n\n\n let (_tempdir, config_location) = create_temp_tedge_config(toml_conf)?;\n\n let result = TEdgeConfigRepository::new(config_location).load();\n\n\n\n assert_matches!(\n\n result,\n\n Err(TEdgeConfigError::FromTOMLParse(_)),\n\n \"Expected the parsing to fail with TOMLParseError\"\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 55, "score": 129683.94609480602 }, { "content": "#[test]\n\nfn test_store_config_with_all_values() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = r#\"\n\n[device]\n\nkey_path = \"/path/to/key\"\n\ncert_path = \"/path/to/cert\"\n\n\n\n[c8y]\n\nurl = \"your-tenant.cumulocity.com\"\n\nroot_cert_path = \"/path/to/c8y/root/cert\"\n\n\n\n[az]\n\nurl = \"MyAzure.azure-devices.net\"\n\nroot_cert_path = \"/path/to/azure/root/cert\"\n\nmapper_timestamp = false\n\n\n\n[mqtt]\n\nport = 1883\n\nbind_address = \"0.0.0.0\"\n\n\"#;\n\n\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 56, "score": 129683.94609480602 }, { "content": "#[test]\n\nfn test_invalid_mqtt_port() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = r#\"\n\n[mqtt]\n\nport = \"1883\"\n\n\"#;\n\n\n\n let (_tempdir, config_location) = create_temp_tedge_config(toml_conf)?;\n\n let result = TEdgeConfigRepository::new(config_location).load();\n\n\n\n let expected_err =\n\n \"invalid type: string \\\"1883\\\", expected u16 for key `mqtt.port` at line 3 column 8\";\n\n\n\n match result {\n\n Err(TEdgeConfigError::FromTOMLParse(err)) => assert_eq!(err.to_string(), expected_err),\n\n\n\n _ => assert!(false, \"Expected the parsing to fail with TOMLParseError\"),\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 57, "score": 129683.94609480602 }, { "content": "#[test]\n\nfn test_crud_config_value() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = r#\"\n\n[device]\n\nkey_path = \"/path/to/key\"\n\ncert_path = \"/path/to/cert\"\n\n\n\n[c8y]\n\nurl = \"your-tenant.cumulocity.com\"\n\nroot_cert_path = \"/path/to/c8y/root/cert\"\n\n\n\n[az]\n\nurl = \"MyAzure.azure-devices.net\"\n\nroot_cert_path = \"/path/to/azure/root/cert\"\n\nmapper_timestamp = false\n\n\n\n[mqtt]\n\nport = 1024\n\n\"#;\n\n\n\n let (_tempdir, config_location) = create_temp_tedge_config(toml_conf)?;\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 58, "score": 129683.94609480602 }, { "content": "#[test]\n\nfn test_parse_config_with_all_values() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = r#\"\n\n[device]\n\nkey_path = \"/path/to/key\"\n\ncert_path = \"/path/to/cert\"\n\n\n\n[c8y]\n\nurl = \"your-tenant.cumulocity.com\"\n\nroot_cert_path = \"/path/to/c8y/root/cert\"\n\nconnect = \"true\"\n\n\n\n[az]\n\nurl = \"MyAzure.azure-devices.net\"\n\nroot_cert_path = \"/path/to/azure/root/cert\"\n\nconnect = \"false\"\n\nmapper_timestamp = true\n\n\n\n[mqtt]\n\nport = 1234\n\nexternal_port = 2345\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 59, "score": 129683.94609480602 }, { "content": "#[test]\n\nfn test_parse_config_with_only_az_configuration() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = r#\"\n\n[az]\n\nurl = \"MyAzure.azure-devices.net\"\n\n\"#;\n\n\n\n let (_tempdir, config_location) = create_temp_tedge_config(toml_conf)?;\n\n\n\n let config_defaults = TEdgeConfigDefaults {\n\n default_device_cert_path: FilePath::from(\"/etc/ssl/certs/tedge-certificate.pem\"),\n\n default_device_key_path: FilePath::from(\"/etc/ssl/certs/tedge-private-key.pem\"),\n\n ..dummy_tedge_config_defaults()\n\n };\n\n\n\n let config =\n\n TEdgeConfigRepository::new_with_defaults(config_location, config_defaults).load()?;\n\n\n\n assert!(config.query_optional(DeviceIdSetting).is_err());\n\n assert_eq!(\n\n config.query(DeviceCertPathSetting)?,\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 60, "score": 128523.3758248322 }, { "content": "#[test]\n\nfn test_parse_config_no_config_file() -> Result<(), TEdgeConfigError> {\n\n let config_location = TEdgeConfigLocation::from_custom_root(\"/non/existent/path\");\n\n let config = TEdgeConfigRepository::new(config_location).load()?;\n\n\n\n assert!(config.query_optional(DeviceIdSetting).is_err());\n\n assert_eq!(\n\n config.query(DeviceCertPathSetting)?,\n\n FilePath::from(\"/non/existent/path/device-certs/tedge-certificate.pem\")\n\n );\n\n assert_eq!(\n\n config.query(DeviceKeyPathSetting)?,\n\n FilePath::from(\"/non/existent/path/device-certs/tedge-private-key.pem\"),\n\n );\n\n\n\n assert!(config.query_optional(C8yUrlSetting)?.is_none());\n\n assert_eq!(\n\n config.query(C8yRootCertPathSetting)?,\n\n FilePath::from(\"/etc/ssl/certs\")\n\n );\n\n\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 61, "score": 128523.3758248322 }, { "content": "#[test]\n\nfn test_parse_config_empty_file() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = \"\";\n\n\n\n let (_tempdir, config_location) = create_temp_tedge_config(toml_conf)?;\n\n\n\n let config_defaults = TEdgeConfigDefaults {\n\n default_device_cert_path: FilePath::from(\"/etc/ssl/certs/tedge-certificate.pem\"),\n\n default_device_key_path: FilePath::from(\"/etc/ssl/certs/tedge-private-key.pem\"),\n\n default_c8y_root_cert_path: FilePath::from(\"/etc/ssl/certs\"),\n\n default_azure_root_cert_path: FilePath::from(\"/etc/ssl/certs\"),\n\n ..dummy_tedge_config_defaults()\n\n };\n\n\n\n let config =\n\n TEdgeConfigRepository::new_with_defaults(config_location, config_defaults).load()?;\n\n\n\n assert!(config.query_optional(DeviceIdSetting).is_err());\n\n\n\n assert_eq!(\n\n config.query(DeviceCertPathSetting)?,\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 62, "score": 128523.3758248322 }, { "content": "#[test]\n\nfn test_parse_invalid_toml_file() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = r#\"\n\n <abcde>\n\n \"#;\n\n\n\n let (_tempdir, config_location) = create_temp_tedge_config(toml_conf)?;\n\n let result = TEdgeConfigRepository::new(config_location).load();\n\n\n\n assert_matches!(\n\n result,\n\n Err(TEdgeConfigError::FromTOMLParse(_)),\n\n \"Expected the parsing to fail with TOMLParseError\"\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 63, "score": 128523.3758248322 }, { "content": "#[test]\n\nfn test_parse_config_with_only_c8y_configuration() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = r#\"\n\n[c8y]\n\nurl = \"your-tenant.cumulocity.com\"\n\n\"#;\n\n\n\n let (_tempdir, config_location) = create_temp_tedge_config(toml_conf)?;\n\n\n\n let config_defaults = TEdgeConfigDefaults {\n\n default_device_cert_path: FilePath::from(\"/etc/ssl/certs/tedge-certificate.pem\"),\n\n default_device_key_path: FilePath::from(\"/etc/ssl/certs/tedge-private-key.pem\"),\n\n ..dummy_tedge_config_defaults()\n\n };\n\n\n\n let config =\n\n TEdgeConfigRepository::new_with_defaults(config_location, config_defaults).load()?;\n\n\n\n assert!(config.query_optional(DeviceIdSetting).is_err());\n\n assert_eq!(\n\n config.query(DeviceCertPathSetting)?,\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 64, "score": 128523.3758248322 }, { "content": "#[test]\n\nfn test_parse_config_with_only_device_configuration() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = r#\"\n\n[device]\n\n\"#;\n\n\n\n let (_tempdir, config_location) = create_temp_tedge_config(toml_conf)?;\n\n let config_defaults = TEdgeConfigDefaults {\n\n default_device_cert_path: FilePath::from(\"/etc/ssl/certs/tedge-certificate.pem\"),\n\n default_device_key_path: FilePath::from(\"/etc/ssl/certs/tedge-private-key.pem\"),\n\n default_c8y_root_cert_path: FilePath::from(\"/etc/ssl/certs\"),\n\n default_azure_root_cert_path: FilePath::from(\"/etc/ssl/certs\"),\n\n ..dummy_tedge_config_defaults()\n\n };\n\n\n\n let config =\n\n TEdgeConfigRepository::new_with_defaults(config_location, config_defaults).load()?;\n\n\n\n assert!(config.query_optional(DeviceIdSetting).is_err());\n\n assert_eq!(\n\n config.query(DeviceCertPathSetting)?,\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 65, "score": 128523.3758248322 }, { "content": "#[test]\n\nfn test_parse_config_with_only_mqtt_configuration() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = r#\"\n\n[mqtt]\n\nport = 2222\n\nbind_address = \"1.2.3.4\"\n\n\"#;\n\n\n\n let (_tempdir, config_location) = create_temp_tedge_config(toml_conf)?;\n\n\n\n let config_defaults = TEdgeConfigDefaults {\n\n default_device_cert_path: FilePath::from(\"/etc/ssl/certs/tedge-certificate.pem\"),\n\n default_device_key_path: FilePath::from(\"/etc/ssl/certs/tedge-private-key.pem\"),\n\n ..dummy_tedge_config_defaults()\n\n };\n\n\n\n let config =\n\n TEdgeConfigRepository::new_with_defaults(config_location, config_defaults).load()?;\n\n\n\n assert!(config.query_optional(DeviceIdSetting).is_err());\n\n assert_eq!(\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 66, "score": 128523.3758248322 }, { "content": "fn dummy_tedge_config_defaults() -> TEdgeConfigDefaults {\n\n TEdgeConfigDefaults {\n\n default_device_cert_path: FilePath::from(\"/dev/null\"),\n\n default_device_key_path: FilePath::from(\"/dev/null\"),\n\n default_c8y_root_cert_path: FilePath::from(\"/dev/null\"),\n\n default_azure_root_cert_path: FilePath::from(\"/dev/null\"),\n\n default_mapper_timestamp: Flag(true),\n\n default_mqtt_port: Port(1883),\n\n default_tmp_path: FilePath::from(\"/tmp\"),\n\n default_logs_path: FilePath::from(\"/var/log\"),\n\n default_run_path: FilePath::from(\"/run\"),\n\n default_device_type: String::from(\"test\"),\n\n default_mqtt_bind_address: IpAddress(IpAddr::V4(Ipv4Addr::LOCALHOST)),\n\n default_c8y_smartrest_templates: TemplatesSet::default(),\n\n }\n\n}\n\n\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 67, "score": 128105.42789217382 }, { "content": "fn get_bridge_config_file_path(\n\n config_location: &TEdgeConfigLocation,\n\n bridge_config: &BridgeConfig,\n\n) -> PathBuf {\n\n config_location\n\n .tedge_config_root_path\n\n .join(TEDGE_BRIDGE_CONF_DIR_PATH)\n\n .join(&bridge_config.config_file)\n\n}\n\n\n", "file_path": "crates/core/tedge/src/cli/connect/command.rs", "rank": 68, "score": 128024.05891461275 }, { "content": "fn is_fixture(e: &DirEntry) -> bool {\n\n matches!((e.file_type().is_file(), e.path().extension()), (true, Some(ext)) if ext == \"json\")\n\n}\n", "file_path": "crates/core/thin_edge_json/tests/test_suite.rs", "rank": 69, "score": 127853.25003282295 }, { "content": "#[test]\n\nfn test_from_custom_root() {\n\n let config_location = TEdgeConfigLocation::from_custom_root(\"/opt/etc/tedge\");\n\n assert_eq!(\n\n config_location.tedge_config_root_path,\n\n PathBuf::from(\"/opt/etc/tedge\")\n\n );\n\n assert_eq!(\n\n config_location.tedge_config_file_path,\n\n PathBuf::from(\"/opt/etc/tedge/tedge.toml\")\n\n );\n\n}\n\n\n", "file_path": "crates/common/tedge_config/src/tedge_config_location.rs", "rank": 70, "score": 127496.93128445545 }, { "content": "#[test]\n\nfn test_any_device_id_provided_by_the_configuration_is_ignored() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = r#\"\n\n[device]\n\nid = \"ABCD1234\" # ignored for backward compatibility\n\ncert_path = \"/path/to/cert\"\n\n\"#;\n\n\n\n let (_tempdir, config_location) = create_temp_tedge_config(toml_conf)?;\n\n let config =\n\n TEdgeConfigRepository::new_with_defaults(config_location, dummy_tedge_config_defaults())\n\n .load()?;\n\n\n\n assert!(config.query_optional(DeviceIdSetting).is_err());\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 71, "score": 127393.54613828585 }, { "content": "#[test]\n\nfn test_device_id_is_extracted_from_device_certificate() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = r#\"\n\n[device]\n\ncert_path = \"/path/to/cert\"\n\n\"#;\n\n let device_id = \"device-serial-number\";\n\n\n\n let (tempdir, config_location) = create_temp_tedge_config(toml_conf)?;\n\n let mut config =\n\n TEdgeConfigRepository::new_with_defaults(config_location, dummy_tedge_config_defaults())\n\n .load()?;\n\n\n\n let cert_path = tempdir.path().join(\"certificate.pem\");\n\n create_certificate(cert_path.clone(), device_id).expect(\"fail to create a certificate\");\n\n config.update(DeviceCertPathSetting, cert_path.into())?;\n\n\n\n assert_eq!(config.query(DeviceIdSetting)?, device_id);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 72, "score": 127393.54613828585 }, { "content": "fn run(operation: PluginOp) -> Result<ExitStatus, InternalError> {\n\n let status = match operation {\n\n PluginOp::List {} => {\n\n let apt = Command::new(\"apt\")\n\n .args(vec![\"--manual-installed\", \"list\"])\n\n .stdout(Stdio::piped()) // To pipe apt.stdout into awk.stdin\n\n .spawn()\n\n .map_err(|err| InternalError::exec_error(\"apt\", err))?;\n\n\n\n // apt output = openssl/focal-security,now 1.1.1f-1ubuntu2.3 amd64 [installed]\n\n // awk -F '[/ ]' = $1 ^ $2 ^ $3 ^ $4\n\n // awk print = name ^ ^ version ^\n\n Command::new(\"awk\")\n\n .args(vec![\n\n \"-F\",\n\n \"[/ ]\",\n\n r#\"{if ($1 != \"Listing...\") { print $1\"\\t\"$3}}\"#,\n\n ])\n\n .stdin(apt.stdout.unwrap()) // Cannot panic: apt.stdout has been set\n\n .status()\n", "file_path": "plugins/tedge_apt_plugin/src/main.rs", "rank": 73, "score": 126790.10917916222 }, { "content": "fn get_common_mosquitto_config_file_path(\n\n config_location: &TEdgeConfigLocation,\n\n common_mosquitto_config: &CommonMosquittoConfig,\n\n) -> PathBuf {\n\n config_location\n\n .tedge_config_root_path\n\n .join(TEDGE_BRIDGE_CONF_DIR_PATH)\n\n .join(&common_mosquitto_config.config_file)\n\n}\n\n\n", "file_path": "crates/core/tedge/src/cli/connect/command.rs", "rank": 74, "score": 126385.49855330576 }, { "content": "#[test]\n\nfn it_transforms_valid_thin_edge_json() -> anyhow::Result<()> {\n\n let mut had_missing_test_fixtures = false;\n\n\n\n for fixture in fixtures(\"tests/fixtures/valid\")?.iter() {\n\n let input = std::fs::read_to_string(fixture.path())?;\n\n\n\n let output = {\n\n let mut builder = thin_edge_json::serialize::ThinEdgeJsonSerializer::new();\n\n let res = thin_edge_json::parser::parse_str(&input, &mut builder);\n\n assert!(res.is_ok());\n\n builder.into_string()?\n\n };\n\n\n\n if let Ok(expected_output) =\n\n std::fs::read_to_string(fixture.path().with_extension(\"expected_output\"))\n\n {\n\n assert_eq!(expected_output, output);\n\n } else {\n\n // we don't have a test fixture yet. Create one and abort.\n\n std::fs::write(fixture.path().with_extension(\"expected_output\"), output)?;\n\n had_missing_test_fixtures = true;\n\n }\n\n }\n\n\n\n assert!(!had_missing_test_fixtures, \"Test fixtures were missing.\");\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/core/thin_edge_json/tests/test_suite.rs", "rank": 75, "score": 126270.72764442024 }, { "content": "#[test]\n\nfn it_rejects_invalid_thin_edge_json() -> anyhow::Result<()> {\n\n let mut had_missing_test_fixtures = false;\n\n\n\n for fixture in fixtures(\"tests/fixtures/invalid\")?.iter() {\n\n let input = std::fs::read_to_string(fixture.path())?;\n\n println!(\"Fixture: {:?}\", fixture.path());\n\n\n\n let res: anyhow::Result<_> = {\n\n let mut builder = thin_edge_json::builder::ThinEdgeJsonBuilder::default();\n\n thin_edge_json::parser::parse_str(&input, &mut builder)\n\n .map_err(Into::into)\n\n .and_then(|_| builder.done().map_err(Into::into))\n\n };\n\n\n\n assert!(res.is_err());\n\n let err_msg = res.unwrap_err().to_string();\n\n\n\n if let Ok(expected_error) =\n\n std::fs::read_to_string(fixture.path().with_extension(\"expected_error\"))\n\n {\n", "file_path": "crates/core/thin_edge_json/tests/test_suite.rs", "rank": 76, "score": 126270.72764442024 }, { "content": "#[test]\n\nfn test_from_tedge_config_location() {\n\n let config_location = TEdgeConfigLocation::from_custom_root(\"/opt/etc/_tedge\");\n\n let defaults = TEdgeConfigDefaults::from(&config_location);\n\n\n\n assert_eq!(\n\n defaults,\n\n TEdgeConfigDefaults {\n\n default_device_cert_path: FilePath::from(\n\n \"/opt/etc/_tedge/device-certs/tedge-certificate.pem\"\n\n ),\n\n default_device_key_path: FilePath::from(\n\n \"/opt/etc/_tedge/device-certs/tedge-private-key.pem\"\n\n ),\n\n default_azure_root_cert_path: FilePath::from(\"/etc/ssl/certs\"),\n\n default_c8y_root_cert_path: FilePath::from(\"/etc/ssl/certs\"),\n\n default_mapper_timestamp: Flag(true),\n\n default_mqtt_port: Port(DEFAULT_PORT),\n\n default_tmp_path: FilePath::from(\"/tmp\"),\n\n default_logs_path: FilePath::from(\"/var/log\"),\n\n default_run_path: FilePath::from(\"/run\"),\n\n default_device_type: DEFAULT_DEVICE_TYPE.into(),\n\n default_mqtt_bind_address: IpAddress::default(),\n\n default_c8y_smartrest_templates: TemplatesSet::default(),\n\n }\n\n );\n\n}\n", "file_path": "crates/common/tedge_config/src/tedge_config_defaults.rs", "rank": 77, "score": 125837.48531991197 }, { "content": "#[test]\n\nfn test_from_default_system_location() {\n\n let config_location = TEdgeConfigLocation::default();\n\n assert_eq!(\n\n config_location.tedge_config_root_path,\n\n PathBuf::from(\"/etc/tedge\")\n\n );\n\n assert_eq!(\n\n config_location.tedge_config_file_path,\n\n PathBuf::from(\"/etc/tedge/tedge.toml\")\n\n );\n\n}\n", "file_path": "crates/common/tedge_config/src/tedge_config_location.rs", "rank": 78, "score": 125837.48531991197 }, { "content": "fn get_installed_monitors() -> Result<Vec<String>, InternalError> {\n\n // Run `engine_inspect -m -r` command to list all monitors in raw format\n\n let output = Command::new(APAMA_ENV_EXE)\n\n .arg(ENGINE_INSPECT_CMD)\n\n .arg(\"-m\")\n\n .arg(\"-r\")\n\n .stdin(Stdio::null())\n\n .output()\n\n .map_err(|err| InternalError::exec_error(ENGINE_INSPECT_CMD, err))?;\n\n let output = String::from_utf8(output.stdout)?;\n\n\n\n // The output contains monitor names and their instance counts separated by a space as follows:\n\n // ```\n\n // TedgeDemoMonitor 1\n\n // TedgeTestMonitor 1\n\n // ```\n\n let mon_files = output\n\n .lines()\n\n // The first line of the output could \"WARNING: JAVA_HOME not set\" which is filtered out\n\n .filter(|line| !line.starts_with(\"WARNING:\"))\n\n // The counts are filtered out too\n\n .filter_map(|line| line.split_whitespace().next())\n\n .map(|line| line.into())\n\n .collect();\n\n\n\n Ok(mon_files)\n\n}\n\n\n", "file_path": "plugins/tedge_apama_plugin/src/main.rs", "rank": 79, "score": 124128.13131630604 }, { "content": "fn remove_monitor(mon_name: &str) -> Result<(), InternalError> {\n\n run_cmd(\n\n APAMA_ENV_EXE,\n\n format!(\"{} {}\", ENGINE_DELETE_CMD, mon_name).as_str(),\n\n )\n\n}\n\n\n", "file_path": "plugins/tedge_apama_plugin/src/main.rs", "rank": 80, "score": 124128.13131630604 }, { "content": "fn remove_project(_project_name: &str) -> Result<(), InternalError> {\n\n let tedge_apama_project_path: &Path = Path::new(TEDGE_APAMA_PROJECT_DIR);\n\n\n\n if tedge_apama_project_path.exists() {\n\n stop_apama_service()?;\n\n delete_project()?;\n\n } else {\n\n println!(\"Doing nothing as there's no project installed\");\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "plugins/tedge_apama_plugin/src/main.rs", "rank": 81, "score": 124128.13131630604 }, { "content": "type UnsetConfigValue<C> = Box<dyn Fn(&mut C) -> ConfigSettingResult<()>>;\n\n\n\nimpl std::fmt::Debug for ConfigKey {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"ConfigKey({})\", self.key)\n\n }\n\n}\n\n\n\nmacro_rules! config_key {\n\n ($setting:tt) => {\n\n ConfigKey {\n\n key: $setting::KEY,\n\n description: $setting::DESCRIPTION,\n\n get: Box::new(move |config: &TEdgeConfig| config.query_string($setting)),\n\n set: Box::new(move |config: &mut TEdgeConfig, value: String| {\n\n config.update_string($setting, value)\n\n }),\n\n unset: Box::new(move |config: &mut TEdgeConfig| config.unset($setting)),\n\n }\n\n };\n", "file_path": "crates/core/tedge/src/cli/config/config_key.rs", "rank": 82, "score": 122941.54731047907 }, { "content": "#[cfg(windows)]\n\npub fn set_permission(_file: &File, _mode: u32) -> Result<(), std::io::Error> {\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/common/tedge_utils/src/paths.rs", "rank": 83, "score": 121815.65312752026 }, { "content": "#[test]\n\nfn read_az_keys_from_old_version_config() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = r#\"\n\n[azure]\n\nurl = \"MyAzure.azure-devices.net\"\n\nroot_cert_path = \"/path/to/azure/root/cert\"\n\nmapper_timestamp = true\n\n\"#;\n\n\n\n let (_tempdir, config_location) = create_temp_tedge_config(toml_conf)?;\n\n let config_defaults = dummy_tedge_config_defaults();\n\n\n\n let config =\n\n TEdgeConfigRepository::new_with_defaults(config_location, config_defaults).load()?;\n\n\n\n assert_eq!(\n\n config.query(AzureUrlSetting)?.as_str(),\n\n \"MyAzure.azure-devices.net\"\n\n );\n\n assert_eq!(\n\n config.query(AzureRootCertPathSetting)?,\n\n FilePath::from(\"/path/to/azure/root/cert\")\n\n );\n\n assert_eq!(config.query(AzureMapperTimestamp)?, Flag(true));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 84, "score": 121177.56319795652 }, { "content": "#[test]\n\nfn set_az_keys_from_old_version_config() -> Result<(), TEdgeConfigError> {\n\n let toml_conf = r#\"\n\n[azure]\n\n\"#;\n\n\n\n let (_tempdir, config_location) = create_temp_tedge_config(toml_conf)?;\n\n let config_defaults = TEdgeConfigDefaults {\n\n default_azure_root_cert_path: FilePath::from(\"default_azure_root_cert_path\"),\n\n ..dummy_tedge_config_defaults()\n\n };\n\n let config_repo = TEdgeConfigRepository::new_with_defaults(config_location, config_defaults);\n\n let updated_azure_url = \"OtherAzure.azure-devices.net\";\n\n\n\n {\n\n let mut config = config_repo.load()?;\n\n\n\n assert!(config.query_optional(AzureUrlSetting)?.is_none());\n\n assert_eq!(\n\n config.query(AzureRootCertPathSetting)?,\n\n FilePath::from(\"default_azure_root_cert_path\")\n", "file_path": "crates/common/tedge_config/tests/test_tedge_config.rs", "rank": 85, "score": 121177.56319795652 }, { "content": "fn apama_module_from_string(module: &str) -> Result<ApamaModule, InternalError> {\n\n match module.rsplit_once(\"::\") {\n\n Some((prefix, suffix)) => match suffix {\n\n PROJECT_SUFFIX => Ok(ApamaModule::Project(prefix.into())),\n\n MON_SUFFIX => Ok(ApamaModule::MonFile(prefix.into())),\n\n unsupported_type => Err(InternalError::UnsupportedModuleType {\n\n module_type: unsupported_type.into(),\n\n }),\n\n },\n\n None => Err(InternalError::ModuleTypeNotProvided {\n\n module_name: module.into(),\n\n }),\n\n }\n\n}\n\n\n", "file_path": "plugins/tedge_apama_plugin/src/main.rs", "rank": 86, "score": 119456.80020383313 }, { "content": "fn run_cmd(cmd: &str, args: &str) -> Result<(), InternalError> {\n\n let args: Vec<&str> = args.split_whitespace().collect();\n\n let exit_status = Command::new(cmd)\n\n .args(args)\n\n .stdin(Stdio::null())\n\n .status()\n\n .map_err(|err| InternalError::exec_error(cmd, err))?;\n\n\n\n if exit_status.success() {\n\n Ok(())\n\n } else {\n\n Err(InternalError::ExecFailure {\n\n cmd: cmd.into(),\n\n exit_status,\n\n })\n\n }\n\n}\n\n\n", "file_path": "plugins/tedge_apama_plugin/src/main.rs", "rank": 87, "score": 119117.61930942834 }, { "content": "fn get_operations(dir: impl AsRef<Path>, cloud_name: &str) -> Result<Operations, OperationsError> {\n\n let mut operations = Operations::default();\n\n\n\n let path = dir.as_ref().join(&cloud_name);\n\n let dir_entries = fs::read_dir(&path)\n\n .map_err(|_| OperationsError::ReadDirError {\n\n dir: PathBuf::from(&path),\n\n })?\n\n .map(|entry| entry.map(|e| e.path()))\n\n .collect::<Result<Vec<PathBuf>, _>>()?\n\n .into_iter()\n\n .filter(|path| path.is_file())\n\n .collect::<Vec<PathBuf>>();\n\n\n\n for path in dir_entries {\n\n if let Some(file_name) = path.file_name().and_then(|file_name| file_name.to_str()) {\n\n if !is_valid_operation_name(file_name) {\n\n continue;\n\n }\n\n\n", "file_path": "crates/core/c8y_smartrest/src/operations.rs", "rank": 88, "score": 119056.20156609794 }, { "content": "pub fn change_user_and_group(file: &Path, user: &str, group: &str) -> Result<(), FileError> {\n\n let ud = get_user_by_name(user)\n\n .map(|u| u.uid())\n\n .ok_or_else(|| FileError::UserNotFound { user: user.into() })?;\n\n\n\n let uid = get_metadata(Path::new(file))?.st_uid();\n\n\n\n let gd = get_group_by_name(group)\n\n .map(|g| g.gid())\n\n .ok_or_else(|| FileError::GroupNotFound {\n\n group: group.into(),\n\n })?;\n\n\n\n let gid = get_metadata(Path::new(file))?.st_gid();\n\n\n\n // if user and group are same as existing, then do not change\n\n if (ud != uid) && (gd != gid) {\n\n chown(file, Some(Uid::from_raw(ud)), Some(Gid::from_raw(gd)))?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/common/tedge_utils/src/file.rs", "rank": 89, "score": 119056.20156609794 }, { "content": "pub fn create_directory_with_mode(dir: impl AsRef<Path>, mode: u32) -> Result<(), FileError> {\n\n let perm_entry = PermissionEntry::new(None, None, Some(mode));\n\n perm_entry.create_directory(dir.as_ref())\n\n}\n\n\n", "file_path": "crates/common/tedge_utils/src/file.rs", "rank": 90, "score": 119056.20156609794 }, { "content": "type SetConfigStringValue<C> = Box<dyn Fn(&mut C, String) -> ConfigSettingResult<()>>;\n", "file_path": "crates/core/tedge/src/cli/config/config_key.rs", "rank": 91, "score": 117834.11019595567 }, { "content": "fn remove_whitespace(s: &str) -> String {\n\n let mut s = String::from(s);\n\n s.retain(|c| !c.is_whitespace());\n\n s\n\n}\n\n\n\nasync fn publish_a_fake_jwt_token(broker: &MqttProcessHandler) {\n\n broker.publish(\"c8y/s/dat\", \"71,1111\").await.unwrap();\n\n}\n", "file_path": "crates/core/tedge_mapper/src/c8y/tests.rs", "rank": 92, "score": 117814.38436927927 }, { "content": "#[test]\n\nfn test_serialize() -> anyhow::Result<()> {\n\n let common_mosquitto_config = CommonMosquittoConfig::default();\n\n\n\n let mut buffer = Vec::new();\n\n common_mosquitto_config.serialize(&mut buffer)?;\n\n\n\n let contents = String::from_utf8(buffer).unwrap();\n\n let config_set: std::collections::HashSet<&str> = contents\n\n .lines()\n\n .filter(|str| !str.is_empty() && !str.starts_with('#'))\n\n .collect();\n\n let mut expected = std::collections::HashSet::new();\n\n\n\n expected.insert(\"listener 1883 localhost\");\n\n expected.insert(\"allow_anonymous true\");\n\n expected.insert(\"connection_messages true\");\n\n\n\n expected.insert(\"log_type error\");\n\n expected.insert(\"log_type warning\");\n\n expected.insert(\"log_type notice\");\n", "file_path": "crates/core/tedge/src/cli/connect/common_mosquitto_config.rs", "rank": 93, "score": 117216.43820785108 }, { "content": "/// Validate if the provided module version matches the currently installed version\n\nfn validate_version(module_name: &str, module_version: &str) -> Result<(), InternalError> {\n\n // Get the current installed version of the provided package\n\n let output = Command::new(\"apt\")\n\n .arg(\"list\")\n\n .arg(\"--installed\")\n\n .arg(module_name)\n\n .output()\n\n .map_err(|err| InternalError::exec_error(\"apt-get\", err))?;\n\n\n\n let stdout = String::from_utf8(output.stdout)?;\n\n\n\n // Check if the installed version and the provided version match\n\n let second_line = stdout.lines().nth(1); //Ignore line 0 which is always 'Listing...'\n\n if let Some(package_info) = second_line {\n\n if let Some(installed_version) = package_info.split_whitespace().nth(1)\n\n // Value at index 0 is the package name\n\n {\n\n if installed_version != module_version {\n\n return Err(InternalError::MetaDataMismatch {\n\n package: module_name.into(),\n", "file_path": "plugins/tedge_apt_plugin/src/main.rs", "rank": 94, "score": 116465.26535324978 }, { "content": "fn fixtures(subdir: &str) -> anyhow::Result<Vec<DirEntry>> {\n\n let base = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n let fixtures: Result<Vec<DirEntry>, _> = WalkDir::new(Path::join(&base, subdir))\n\n .sort_by_file_name()\n\n .into_iter()\n\n .collect();\n\n\n\n Ok(fixtures?.into_iter().filter(is_fixture).collect())\n\n}\n\n\n", "file_path": "crates/core/thin_edge_json/tests/test_suite.rs", "rank": 95, "score": 116403.47951734676 }, { "content": "fn create_packet(size: usize) -> String {\n\n let data: String = \"Some data!\".into();\n\n let loops = size / data.len();\n\n let mut buffer = String::with_capacity(size);\n\n for _ in 0..loops {\n\n buffer.push_str(\"Some data!\");\n\n }\n\n buffer\n\n}\n\n\n", "file_path": "crates/core/tedge_mapper/src/c8y/tests.rs", "rank": 96, "score": 116154.93840473579 }, { "content": "#[test]\n\nfn test_serialize_with_opts() -> anyhow::Result<()> {\n\n let common_mosquitto_config = CommonMosquittoConfig::default();\n\n let mosquitto_config_with_opts = common_mosquitto_config\n\n .with_internal_opts(1234, \"1.2.3.4\".into())\n\n .with_external_opts(\n\n Some(2345),\n\n Some(\"0.0.0.0\".to_string()),\n\n Some(\"wlan0\".into()),\n\n Some(\"/etc/ssl/certs\".into()),\n\n Some(\"cert.pem\".into()),\n\n Some(\"key.pem\".into()),\n\n );\n\n\n\n assert!(mosquitto_config_with_opts\n\n .internal_listener\n\n .port\n\n .eq(&Some(1234)));\n\n\n\n let mut buffer = Vec::new();\n\n mosquitto_config_with_opts.serialize(&mut buffer)?;\n", "file_path": "crates/core/tedge/src/cli/connect/common_mosquitto_config.rs", "rank": 97, "score": 115676.66699401972 }, { "content": "fn run_cmd(cmd: &str, args: &str) -> Result<ExitStatus, InternalError> {\n\n let args: Vec<&str> = args.split_whitespace().collect();\n\n let status = Command::new(cmd)\n\n .args(args)\n\n .stdin(Stdio::null())\n\n .status()\n\n .map_err(|err| InternalError::exec_error(cmd, err))?;\n\n Ok(status)\n\n}\n\n\n", "file_path": "plugins/tedge_apt_plugin/src/main.rs", "rank": 98, "score": 114987.25472814668 }, { "content": "fn create_thin_edge_measurement(size: usize) -> String {\n\n let mut map = serde_json::Map::new();\n\n let data = r#\"\"temperature\":25\"#;\n\n let loops = size / data.len();\n\n for i in 0..loops {\n\n map.insert(format!(\"temperature{i}\"), json!(i));\n\n }\n\n let obj = serde_json::Value::Object(map);\n\n serde_json::to_string(&obj).unwrap()\n\n}\n\n\n\npub struct FakeC8YHttpProxy {}\n\n\n\n#[async_trait::async_trait]\n\nimpl C8YHttpProxy for FakeC8YHttpProxy {\n\n async fn init(&mut self) -> Result<(), SMCumulocityMapperError> {\n\n Ok(())\n\n }\n\n\n\n fn url_is_in_my_tenant_domain(&self, _url: &str) -> bool {\n", "file_path": "crates/core/tedge_mapper/src/c8y/tests.rs", "rank": 99, "score": 113017.23781785584 } ]
Rust
xtask/src/main.rs
YruamaLairba/rust-lv2-more-examples
0d19fd3e120ec3563ad7e7cd471e1396cbf8e512
#![allow(clippy::try_err)] extern crate getopts; use getopts::Options; use std::env; use std::fs; use std::fs::File; use std::io::BufRead; use std::io::BufReader; use std::io::BufWriter; use std::io::Write; use std::iter::Iterator; use std::path::{Path, PathBuf}; use std::process::Command; type DynError = Box<dyn std::error::Error>; #[derive(Clone, Copy)] struct PackageConf<'a> { name: &'a str, post_build: fn(conf: &Config) -> Result<(), DynError>, } const PACKAGES_CONF: &[PackageConf] = &[ PackageConf { name: "eg-worker-rs", post_build: |conf| { let lib_file_name = [&conf.lib_prefix(), "eg_worker_rs", &conf.lib_suffix()].concat(); let subs: &[(&str, &str)] = &[("@LIB_FILE_NAME@", &lib_file_name)]; let src_dir = workspace_root().join("eg-worker-rs"); let out_dir = conf.build_dir().join("lv2").join("eg-worker-rs"); fs::create_dir_all(&out_dir).unwrap(); subst( src_dir.join("manifest.ttl"), out_dir.join("manifest.ttl"), subs, ) .unwrap(); for e in &["worker.ttl"] { fs::copy(src_dir.join(e), out_dir.join(e)).unwrap(); } fs::copy( conf.build_dir().join(&lib_file_name), out_dir.join(&lib_file_name), ) .unwrap(); Ok(()) }, }, PackageConf { name: "eg-preset-rs", post_build: |conf| { let lib_file_name = [&conf.lib_prefix(), "eg_preset_rs", &conf.lib_suffix()].concat(); let subs: &[(&str, &str)] = &[("@LIB_FILE_NAME@", &lib_file_name)]; let src_dir = workspace_root().join("eg-preset-rs"); let out_dir = conf.build_dir().join("lv2").join("eg-preset-rs"); fs::create_dir_all(&out_dir).unwrap(); subst( src_dir.join("manifest.ttl"), out_dir.join("manifest.ttl"), subs, ) .unwrap(); for e in &["eg-preset-rs.ttl", "presets.ttl"] { fs::copy(src_dir.join(e), out_dir.join(e)).unwrap(); } fs::copy( conf.build_dir().join(&lib_file_name), out_dir.join(&lib_file_name), ) .unwrap(); Ok(()) }, }, ]; struct Config<'a> { subcommand: String, target: String, target_dir: String, release: bool, packages_conf: Vec<PackageConf<'a>>, opts: Options, } impl<'a> Config<'a> { fn from_env() -> Result<Self, DynError> { let mut args = env::args(); let subcommand = if let Some(arg) = args.nth(1) { arg } else { String::from("") }; let mut opts_args = Vec::<String>::new(); for e in args { if e == "--" { break; } opts_args.push(e); } let mut opts = Options::new(); opts.optmulti("p", "project", "project to build", "NAME"); opts.optflag("", "all", "build all projects"); opts.optflag("", "release", "build in release mode, with optimization"); opts.optopt("", "target", "build for the target triple", "TRIPLE"); opts.optopt( "", "target-dir", "directory for all generated artifacts", "DIRECTORY", ); opts.optflag("h", "help", "print this help menu"); let matches = opts.parse(&opts_args)?; let target = if let Some(s) = matches.opt_str("target") { s } else if let Some(var) = env::var_os("CARGO_BUILD_TARGET") { var.into_string().unwrap() } else { String::from("") }; let target_dir = if let Some(s) = matches.opt_str("target-dir") { s } else if let Some(var) = env::var_os("CARGO_TARGET_DIR") { var.into_string().unwrap() } else if let Some(var) = env::var_os("CARGO_BUILD_TARGET_DIR") { var.into_string().unwrap() } else { String::from("target") }; let release = matches.opt_present("release"); let packages_conf = if matches.opt_present("all") || !matches.opt_present("project") { PACKAGES_CONF.iter().copied().collect::<Vec<PackageConf>>() } else { let mut tmp = Vec::<PackageConf>::new(); let project = matches.opt_strs("p"); 'proj_loop: for proj in project { for pkg_conf in PACKAGES_CONF { if proj == pkg_conf.name { tmp.push(*pkg_conf); continue 'proj_loop; } } return Err(format!("No project named `{}`", proj).into()); } tmp }; Ok(Self { subcommand, target, target_dir, release, packages_conf, opts, }) } fn print_help(&self) { let brief = "Usage: cargo xtask SUBCOMMAND [options]"; let mut usage = self.opts.usage(&brief); let more_help= " Subcomands are: build build lv2 project(s) Handled environnement variable: CARGO_BUILD_TARGET CARGO_TARGET_DIR CARGO_BUILD_TARGET_DIR "; usage.push_str(&more_help); print!("{}", usage); } fn build_dir(&self) -> PathBuf { let profile_dir = if self.release { "release" } else { "debug" }; workspace_root() .join(&self.target_dir) .join(&self.target) .join(profile_dir) } fn packages_conf(&self) -> Vec<PackageConf> { self.packages_conf.clone() } fn lib_prefix(&self) -> String { let prefix = if self.target.contains("apple") { "lib" } else if self.target.contains("windows") { "" } else if cfg!(target_vendor = "apple") { "lib" } else if cfg!(target_os = "windows") { "" } else { "lib" }; String::from(prefix) } fn lib_suffix(&self) -> String { let suffix = if self.target.contains("apple") { ".dylib" } else if self.target.contains("windows") { ".dll" } else if cfg!(target_vendor = "apple") { ".dylib" } else if cfg!(target_os = "windows") { ".dll" } else { ".so" }; String::from(suffix) } } fn main() { if let Err(e) = try_main() { eprintln!("{}", e); std::process::exit(-1); } } fn try_main() -> Result<(), DynError> { let mut conf = Config::from_env()?; match conf.subcommand.as_ref() { "build" => build(&mut conf)?, "debug" => debug(&mut conf)?, _ => conf.print_help(), } Ok(()) } fn build(conf: &mut Config) -> Result<(), DynError> { let mut cargo_args = Vec::<String>::new(); if conf.release { cargo_args.push(String::from("--release")); } if conf.target != "" { cargo_args.push(String::from("--target")); cargo_args.push(conf.target.clone()); } cargo_args.push(String::from("--target-dir")); cargo_args.push(conf.target_dir.clone()); for p in conf.packages_conf() { cargo_args.push(String::from("-p")); cargo_args.push(String::from(p.name)); } println!("Building binarie(s)"); cargo("build", &cargo_args)?; println!("Post build step(s)"); for p in conf.packages_conf() { (p.post_build)(conf)?; } println!("Finished"); println!(); Ok(()) } fn subst<P: AsRef<Path>, Q: AsRef<Path>>( in_path: P, out_path: Q, subs: &[(&str, &str)], ) -> Result<(), DynError> { let mut in_file = BufReader::new(File::open(in_path)?); let mut out_file = BufWriter::new(File::create(out_path)?); let mut buf = String::new(); while in_file.read_line(&mut buf).unwrap() != 0 { for (token, value) in subs { buf = buf.replace(token, value); } write!(out_file, "{}", buf)?; buf.clear(); } Ok(()) } macro_rules! print_env { ( $x:expr) => {{ println!( stringify!($x {}), env::var(stringify!($x)).unwrap_or_else(|e| format!("{}", e)) ); }}; } fn debug(_conf: &mut Config) -> Result<(), DynError> { print_env!(CARGO); print_env!(CARGO_MANIFEST_DIR); print_env!(CARGO_PKG_VERSION); print_env!(CARGO_PKG_VERSION_MAJOR); print_env!(CARGO_PKG_VERSION_MINOR); print_env!(CARGO_PKG_VERSION_PATCH); print_env!(CARGO_PKG_VERSION_PRE); print_env!(CARGO_PKG_AUTHORS); print_env!(CARGO_PKG_NAME); print_env!(CARGO_PKG_DESCRIPTION); print_env!(CARGO_PKG_HOMEPAGE); print_env!(CARGO_PKG_REPOSITORY); print_env!(OUT_DIR); print_env!(TARGET); print_env!(CARGO_CFG_TARGET_OS); Ok(()) } fn cargo(cmd: &str, args: &[String]) -> Result<(), DynError> { let cargo = env::var("CARGO").unwrap_or_else(|_| "cargo".to_string()); let status = Command::new(cargo) .current_dir(workspace_root()) .arg(cmd) .args(args) .status()?; if !status.success() { Err(format!("cargo {} failed", cmd))?; } Ok(()) } fn workspace_root() -> PathBuf { Path::new(&env!("CARGO_MANIFEST_DIR")) .ancestors() .nth(1) .unwrap() .to_path_buf() }
#![allow(clippy::try_err)] extern crate getopts; use getopts::Options; use std::env; use std::fs; use std::fs::File; use std::io::BufRead; use std::io::BufReader; use std::io::BufWriter; use std::io::Write; use std::iter::Iterator; use std::path::{Path, PathBuf}; use std::process::Command; type DynError = Box<dyn std::error::Error>; #[derive(Clone, Copy)] struct PackageConf<'a> { name: &'a str, post_build: fn(conf: &Config) -> Result<(), DynError>, } const PACKAGES_CONF: &[PackageConf] = &[ PackageConf { name: "eg-worker-rs", post_build: |conf| { let lib_file_name = [&conf.lib_prefix(), "eg_worker_rs", &conf.lib_suffix()].concat(); let subs: &[(&str, &str)] = &[("@LIB_FILE_NAME@", &lib_file_name)]; let src_dir = workspace_root().join("eg-worker-rs"); let out_dir = conf.build_dir().join("lv2").join("eg-worker-rs"); fs::create_dir_all(&out_dir).unwrap(); subst( src_dir.join("manifest.ttl"), out_dir.join("manifest.ttl"), subs, ) .unwrap(); for e in &["worker.ttl"] { fs::copy(src_dir.join(e), out_dir.join(e)).unwrap(); } fs::copy( conf.build_dir().join(&lib_file_name), out_dir.join(&lib_file_name), ) .unwrap(); Ok(()) }, }, PackageConf { name: "eg-preset-rs", post_build: |conf| { let lib_file_name = [&conf.lib_prefix(), "eg_preset_rs", &conf.lib_suffix()].concat(); let subs: &[(&str, &str)] = &[("@LIB_FILE_NAME@", &lib_file_name)]; let src_dir = workspace_root().join("eg-preset-rs"); let out_dir = conf.build_dir().join("lv2").join("eg-preset-rs"); fs::create_dir_all(&out_dir).unwrap(); subst( src_dir.join("manifest.ttl"), out_dir.join("manifest.ttl"), subs, ) .unwrap(); for e in &["eg-preset-rs.ttl", "presets.ttl"] { fs::copy(src_dir.join(e), out_dir.join(e)).unwrap(); } fs::copy( conf.build_dir().join(&lib_file_name), out_dir.join(&lib_file_name), ) .unwrap(); Ok(()) }, }, ]; struct Config<'a> { subcommand: String, target: String, target_dir: String, release: bool, packages_conf: Vec<PackageConf<'a>>, opts: Options, } impl<'a> Config<'a> { fn from_env() -> Result<Self, DynError> { let mut args = env::args(); let subcommand = if let Some(arg) = args.nth(1) { arg } else { String::from("") }; let mut opts_args = Vec::<String>::new(); for e in args { if e == "--" { break; } opts_args.push(e); } let mut opts = Options::new(); opts.optmulti("p", "project", "project to build", "NAME"); opts.optflag("", "all", "build all projects"); opts.optflag("", "release", "build in release mode, with optimization"); opts.optopt("", "target", "build for the target triple", "TRIPLE"); opts.optopt( "", "target-dir", "directory for all generated artifacts", "DIRECTORY", ); opts.optflag("h", "help", "print this help menu"); let matches = opts.parse(&opts_args)?; let target = if let Some(s) = matches.opt_str("target") { s } else if let Some(var) = env::var_os("CARGO_BUILD_TARGET") { var.into_string().unwrap() } else { String::from("") }; let target_dir = if let Some(s) = matches.opt_str("target-dir") { s } else if let Some(var) = env::var_os("CARGO_TARGET_DIR") { var.into_string().unwrap() } else if let Some(var) = env::var_os("CARGO_BUILD_TARGET_DIR") { var.into_string().unwrap() } else { String::from("target") }; let release = matches.opt_present("release"); let packages_conf = if matches.opt_present("all") || !matches.opt_present("project") { PACKAGES_CONF.iter().copied().collect::<Vec<PackageConf>>() } else { let mut tmp = Vec::<PackageConf>::new(); let project = matches.opt_strs("p"); 'proj_loop: for proj in project { for pkg_conf in PACKAGES_CONF { if proj == pkg_conf.name { tmp.push(*pkg_conf); continue 'proj_loop; } } return Err(format!("No project named `{}`", proj).into()); } tmp }; Ok(Self { subcommand, target, target_dir, release, packages_conf, opts, }) } fn print_help(&self) { let brief = "Usage: cargo xtask SUBCOMMAND [options]"; let mut usage = self.opts.usage(&brief); let more_help= " Subcomands are: build build lv2 project(s) Handled environnement variable: CARGO_BUILD_TARGET CARGO_TARGET_DIR CARGO_BUILD_TARGET_DIR "; usage.push_str(&more_help); print!("{}", usage); } fn build_dir(&self) -> PathBuf { let profile_dir = if self.release { "release" } else { "debug" }; workspace_root() .join(&self.target_dir) .join(&self.target) .join(profile_dir) } fn packages_conf(&self) -> Vec<PackageConf> { self.packages_conf.clone() } fn lib_prefix(&self) -> String { let prefix = if self.target.contains("apple") { "lib" } else
; String::from(prefix) } fn lib_suffix(&self) -> String { let suffix = if self.target.contains("apple") { ".dylib" } else if self.target.contains("windows") { ".dll" } else if cfg!(target_vendor = "apple") { ".dylib" } else if cfg!(target_os = "windows") { ".dll" } else { ".so" }; String::from(suffix) } } fn main() { if let Err(e) = try_main() { eprintln!("{}", e); std::process::exit(-1); } } fn try_main() -> Result<(), DynError> { let mut conf = Config::from_env()?; match conf.subcommand.as_ref() { "build" => build(&mut conf)?, "debug" => debug(&mut conf)?, _ => conf.print_help(), } Ok(()) } fn build(conf: &mut Config) -> Result<(), DynError> { let mut cargo_args = Vec::<String>::new(); if conf.release { cargo_args.push(String::from("--release")); } if conf.target != "" { cargo_args.push(String::from("--target")); cargo_args.push(conf.target.clone()); } cargo_args.push(String::from("--target-dir")); cargo_args.push(conf.target_dir.clone()); for p in conf.packages_conf() { cargo_args.push(String::from("-p")); cargo_args.push(String::from(p.name)); } println!("Building binarie(s)"); cargo("build", &cargo_args)?; println!("Post build step(s)"); for p in conf.packages_conf() { (p.post_build)(conf)?; } println!("Finished"); println!(); Ok(()) } fn subst<P: AsRef<Path>, Q: AsRef<Path>>( in_path: P, out_path: Q, subs: &[(&str, &str)], ) -> Result<(), DynError> { let mut in_file = BufReader::new(File::open(in_path)?); let mut out_file = BufWriter::new(File::create(out_path)?); let mut buf = String::new(); while in_file.read_line(&mut buf).unwrap() != 0 { for (token, value) in subs { buf = buf.replace(token, value); } write!(out_file, "{}", buf)?; buf.clear(); } Ok(()) } macro_rules! print_env { ( $x:expr) => {{ println!( stringify!($x {}), env::var(stringify!($x)).unwrap_or_else(|e| format!("{}", e)) ); }}; } fn debug(_conf: &mut Config) -> Result<(), DynError> { print_env!(CARGO); print_env!(CARGO_MANIFEST_DIR); print_env!(CARGO_PKG_VERSION); print_env!(CARGO_PKG_VERSION_MAJOR); print_env!(CARGO_PKG_VERSION_MINOR); print_env!(CARGO_PKG_VERSION_PATCH); print_env!(CARGO_PKG_VERSION_PRE); print_env!(CARGO_PKG_AUTHORS); print_env!(CARGO_PKG_NAME); print_env!(CARGO_PKG_DESCRIPTION); print_env!(CARGO_PKG_HOMEPAGE); print_env!(CARGO_PKG_REPOSITORY); print_env!(OUT_DIR); print_env!(TARGET); print_env!(CARGO_CFG_TARGET_OS); Ok(()) } fn cargo(cmd: &str, args: &[String]) -> Result<(), DynError> { let cargo = env::var("CARGO").unwrap_or_else(|_| "cargo".to_string()); let status = Command::new(cargo) .current_dir(workspace_root()) .arg(cmd) .args(args) .status()?; if !status.success() { Err(format!("cargo {} failed", cmd))?; } Ok(()) } fn workspace_root() -> PathBuf { Path::new(&env!("CARGO_MANIFEST_DIR")) .ancestors() .nth(1) .unwrap() .to_path_buf() }
if self.target.contains("windows") { "" } else if cfg!(target_vendor = "apple") { "lib" } else if cfg!(target_os = "windows") { "" } else { "lib" }
if_condition
[ { "content": "#[derive(PortCollection)]\n\nstruct Ports {\n\n trigger_task: InputPort<Control>,\n\n}\n\n\n\n/// Requested features\n", "file_path": "eg-worker-rs/src/lib.rs", "rank": 9, "score": 48153.91537814452 }, { "content": "#[derive(PortCollection)]\n\nstruct Ports {\n\n _param1: InputPort<Control>,\n\n _param2: InputPort<Control>,\n\n _param3: InputPort<Control>,\n\n}\n\n\n\n/// A plugin to demonstrate how to make preset. This is fully handled by rdf spec, so the plugin\n\n/// does nothing.\n", "file_path": "eg-preset-rs/src/lib.rs", "rank": 10, "score": 48153.91537814452 }, { "content": "#[uri(\"urn:rust-lv2-more-examples:eg-preset-rs\")]\n\nstruct EgPreset {}\n\n\n\nimpl Plugin for EgPreset {\n\n type Ports = Ports;\n\n type InitFeatures = ();\n\n type AudioFeatures = ();\n\n\n\n fn new(_plugin_info: &PluginInfo, _features: &mut Self::InitFeatures) -> Option<Self> {\n\n Some(Self {})\n\n }\n\n\n\n fn run(&mut self, _ports: &mut Ports, _features: &mut Self::AudioFeatures) {}\n\n}\n\n\n\nlv2_descriptors!(EgPreset);\n", "file_path": "eg-preset-rs/src/lib.rs", "rank": 11, "score": 46787.718477866845 }, { "content": "#[uri(\"urn:rust-lv2-more-examples:eg-worker-rs\")]\n\nstruct EgWorker {\n\n //false for off, true for on\n\n last_trigger_task: bool,\n\n}\n\n\n\nimpl Plugin for EgWorker {\n\n type Ports = Ports;\n\n type InitFeatures = ();\n\n type AudioFeatures = AudioFeatures<'static>;\n\n\n\n fn new(_plugin_info: &PluginInfo, _features: &mut Self::InitFeatures) -> Option<Self> {\n\n Some(Self {\n\n last_trigger_task: false,\n\n })\n\n }\n\n\n\n fn run(&mut self, ports: &mut Ports, features: &mut Self::AudioFeatures) {\n\n if *ports.trigger_task > 0f32 && !self.last_trigger_task {\n\n self.last_trigger_task = true;\n\n let message = Task::Say(\"New task triggered\");\n", "file_path": "eg-worker-rs/src/lib.rs", "rank": 12, "score": 46787.718477866845 }, { "content": "#[derive(FeatureCollection)]\n\nstruct AudioFeatures<'a> {\n\n ///host feature allowing to schedule some work\n\n schedule: Schedule<'a, EgWorker>,\n\n}\n\n\n", "file_path": "eg-worker-rs/src/lib.rs", "rank": 13, "score": 44513.32385632753 }, { "content": " received_data: Self::WorkData,\n\n ) -> Result<(), WorkerError> {\n\n match received_data {\n\n Task::Say(message) => {\n\n println!(\"{}\", message);\n\n let _ = response_handler.respond(Ok(()));\n\n Ok(())\n\n },\n\n }\n\n }\n\n\n\n fn work_response(\n\n &mut self,\n\n data: Self::ResponseData,\n\n _features: &mut Self::AudioFeatures,\n\n ) -> Result<(), WorkerError> {\n\n if let Err(()) = data {\n\n //printing should normally be avoided in the audio thread\n\n println!(\"oops work returned an error\")\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nlv2_descriptors!(EgWorker);\n", "file_path": "eg-worker-rs/src/lib.rs", "rank": 15, "score": 16929.220412185216 }, { "content": "use core::any::Any;\n\nuse lv2_core::feature::*;\n\nuse lv2_core::prelude::*;\n\nuse lv2_worker::*;\n\nuse urid::*;\n\n\n\n#[derive(PortCollection)]\n", "file_path": "eg-worker-rs/src/lib.rs", "rank": 16, "score": 16926.570626116525 }, { "content": "use lv2_core::prelude::*;\n\nuse urid::*;\n\n\n\n#[derive(PortCollection)]\n", "file_path": "eg-preset-rs/src/lib.rs", "rank": 17, "score": 16925.836535829847 }, { "content": " let _ = features.schedule.schedule_work(message);\n\n } else if *ports.trigger_task <= 0f32 && self.last_trigger_task {\n\n self.last_trigger_task = false;\n\n }\n\n }\n\n\n\n fn extension_data(uri: &Uri) -> Option<&'static dyn Any> {\n\n match_extensions![uri, WorkerDescriptor<Self>]\n\n }\n\n}\n\n\n\n// Actually implementing the extension.\n\nimpl Worker for EgWorker {\n\n /// data type sended by the schedule handler and received by the `work` method.\n\n type WorkData = Task;\n\n /// data type sended by the response handler and received by the `work_response` method.\n\n type ResponseData = Result<(),()>;\n\n fn work(\n\n //response handler is associated to the plugin type.\n\n response_handler: &ResponseHandler<Self>,\n", "file_path": "eg-worker-rs/src/lib.rs", "rank": 18, "score": 16923.892284097048 }, { "content": "//Data type for scheduling work\n\nenum Task {\n\n Say(&'static str),\n\n}\n\n\n\n/// A plugin that do some work in another thread\n", "file_path": "eg-worker-rs/src/lib.rs", "rank": 19, "score": 16190.669003164914 }, { "content": "# Programming LV2 Plugins - Rust Edition\n\n\n\nThis repository contains examples showing how to use the rust-lv2 framework.\n\nThe main target is to have examples focused on one aspect or extension of the\n\nLV2 Spec.\n\n\n\n## Building the samples\n\nThis project use a custom script to handle post-compilation step :\n\n\n\n- Use `cargo xtask build --all` to build all projects.\n\n- Use `cargo xtask build -p <project>` to build a specific example.\n\n- Use `cargo xtask` to see more option.\n\n\n\nBuilded plugins are in the lv2 folder inside the cargo output dir\n\n(`target/debug` by default).\n\n\n\n## Licensing\n\n\n\nLike original C and rust-lv2-book examples, the code is published under the\n\n`ISC` license. See the [LICENSE file](LICENSE.md) for more info.\n", "file_path": "README.md", "rank": 29, "score": 12445.213014925235 }, { "content": "Copyright 2020 Amaury ABRIAL aka Yruama_Lairba.\n\n\n\nPermission to use, copy, modify, and/or distribute this software for any\n\npurpose with or without fee is hereby granted, provided that the above\n\ncopyright notice and this permission notice appear in all copies.\n\n\n\nTHIS SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES\n\nWITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF\n\nMERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR\n\nANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES\n\nWHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN\n\nACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF\n\nOR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.\n", "file_path": "LICENSE.md", "rank": 30, "score": 12434.834534231222 }, { "content": "# Worker Example\n\n\n\nThis plugin is a minimalistic but working example of how to use the lv2-worker\n\ncrate of [rust-lv2](https://github.com/RustAudio/rust-lv2) project. In this\n\nexample, a task is triggered each time the input control is toggled from 0 to\n\n1.\n", "file_path": "eg-worker-rs/README.md", "rank": 31, "score": 11374.29507382308 }, { "content": "# Preset Example\n\n\n\nThis plugin show how to make preset for a plugin. As presets are fully\n\ndescribed in rdf, there is nothing to do inside the .rs. Look at manifest.ttl\n\nand presets.tll to see how it can be done.\n", "file_path": "eg-preset-rs/README.md", "rank": 32, "score": 11367.820998330004 } ]
Rust
src/view/mod.rs
matthias-t/Smith
e89ded89a4ce2147ca3c8be6ed065a8f1a808fff
mod screen; use self::screen::Screen; use data::{Editable, Named, Selectable}; use std::{cmp, iter}; use termion::{color, style, terminal_size}; pub struct View { message: Option<String>, is_prompt: bool, line_offset: usize, screen: Screen, } const TAB_LENGTH: usize = 4; impl View { pub fn new() -> Self { View { message: None, is_prompt: false, line_offset: 0, screen: Screen::new(), } } pub fn message(&mut self, message: &str) { self.is_prompt = false; self.message = Some(String::from(message)); } pub fn prompt(&mut self, prompt: &str, message: &str) { self.is_prompt = true; let msg = String::from(prompt) + message; self.message = Some(msg); } pub fn quiet(&mut self) { self.is_prompt = false; self.message = None; } pub fn center_view(&mut self, line: usize) { self.line_offset = line .checked_sub(self.lines_height() as usize / 2) .unwrap_or(0); } pub fn adjust_view(&mut self, line: usize) { if line < self.line_offset { self.line_offset = line; } else if line + 1 >= self.line_offset + self.lines_height() { self.line_offset = 1 + line - self.lines_height(); } } pub fn scroll_view<T: Editable>(&mut self, offset: isize, content: &T) { self.line_offset = cmp::min( cmp::max((self.line_offset as isize) + offset, 0), (content.line_count() as isize) - 1, ) as usize; } pub fn render<T>(&mut self, content: &T) where T: Editable + Named + Selectable, { self.screen.clear(&color::Reset); self.paint_lines(content); self.paint_status(content); self.paint_message(); self.paint_cursor(content); self.screen.present(); } pub fn translate_coordinates<T>(&self, content: &T, x: u16, y: u16) -> (usize, usize) where T: Editable, { let line = cmp::min( (y as isize + self.line_offset as isize - 1) as usize, content.line_count() - 1, ); let visual_col = (cmp::max( 0, x as isize - self.line_number_width(content.line_count()) as isize - 2, )) as usize; let col = content .iter_line(line) .scan(0, |state, x| { *state += if x == '\t' { TAB_LENGTH } else { 1 }; Some(*state) }).take_while(|&x| x <= visual_col) .count(); (line, col) } fn paint_message(&self) { if let Some(ref message) = self.message { let y = self.lines_height() + 1; self.screen.draw(0, y, message); } } fn paint_cursor<T>(&mut self, content: &T) where T: Editable + Selectable, { if (content.line()) < self.line_offset || content.line() >= self.line_offset + self.lines_height() || content.col() >= self.lines_width(content.line_count()) || content.sel().is_some() { self.screen.hide_cursor(); return; } let (x, y) = if self.is_prompt { ( self.message.clone().unwrap().chars().count(), self.lines_height() + 1, ) } else { let (a, b) = self.cursor_pos(content); (a, b) }; self.screen.move_cursor(x, y); self.screen.show_cursor(); } fn paint_status<T>(&self, content: &T) where T: Editable + Named, { let line = content.line(); let column = content.col(); let line_count = content.line_count(); let advance = ((line + 1) as f64 / line_count as f64 * 100.0).floor(); let (screen_width, _) = terminal_size().unwrap(); let empty_line = (0..screen_width).map(|_| ' ').collect::<String>(); let y = self.lines_height(); let style = format!("{}{}", color::Fg(color::White), style::Invert); self.screen.draw_with_style(0, y, &empty_line, &style); self.screen.draw_with_style(0, y, content.name(), &style); let position_info = format!("{}% {}/{}: {}", advance, line + 1, line_count, column); let x = screen_width as usize - position_info.len(); self.screen.draw_with_style(x, y, &position_info, &style); } fn paint_lines<T>(&self, content: &T) where T: Editable + Selectable, { let line_offset = self.line_offset as usize; let lines_height = self.lines_height() as usize; let lines_width = self.lines_width(content.line_count()) as usize; let line_count = content.line_count(); let line_start = self.line_number_width(line_count) as usize + 1; for (y, line) in content .lines() .skip(line_offset) .take(cmp::min(lines_height, line_count)) .enumerate() { let line_index = line_offset + y; self.screen.draw_with_style( 0, y, &format!("{}", 1 + line_index), &format!("{}", color::Fg(color::White)), ); if line.len_chars() > 1 { let line_start_char_index = content.line_index_to_char_index(line_index); for (x, c) in line .chars() .flat_map(|c| { if c == '\t' { iter::repeat(' ').take(TAB_LENGTH) } else { iter::repeat(c).take(1) } }).enumerate() { let char_index = line_start_char_index + x; if x < lines_width { if content.in_sel(char_index) { self.screen.draw_with_style( x + line_start, y, &format!("{}", c), &format!("{}", style::Invert), ); } else { self.screen.draw(x + line_start, y, &format!("{}", c)); } } } } else if content.line_in_sel(line_offset + y) { self.screen .draw_with_style(line_start, y, " ", &format!("{}", style::Invert)); } } } fn cursor_pos<T: Editable>(&self, content: &T) -> (usize, usize) { let line = content.line(); let first_line = self.line_offset; let y = line - first_line as usize; let visual_col = content.col(); let column: usize = content .iter_line(line) .map(|x| if x == '\t' { TAB_LENGTH } else { 1 }) .take(visual_col) .sum(); ( (self.line_number_width(content.line_count()) as usize + 1 + column), y, ) } fn line_number_width(&self, line_count: usize) -> u16 { line_count.to_string().len() as u16 } fn status_height(&self) -> u16 { 2 } pub fn lines_height(&self) -> usize { let (_, screen_height) = terminal_size().unwrap(); let incompressible = self.status_height() as usize; cmp::max(screen_height as usize, incompressible) - incompressible } pub fn lines_width(&self, line_count: usize) -> usize { let (screen_width, _) = terminal_size().unwrap(); let incompressible = self.line_number_width(line_count) as usize + 1; cmp::max(screen_width as usize, incompressible) - incompressible } }
mod screen; use self::screen::Screen; use data::{Editable, Named, Selectable}; use std::{cmp, iter}; use termion::{color, style, terminal_size}; pub struct View { message: Option<String>, is_prompt: bool, line_offset: usize, screen: Screen, } const TAB_LENGTH: usize = 4; impl View { pub fn new() -> Self { View { message: None, is_prompt: false, line_offset: 0, screen: Screen::new(), } } pub fn message(&mut self, message: &str) { self.is_prompt = false; self.message = Some(String::from(message)); } pub fn prompt(&mut self, prompt: &str, message: &str) { self.is_prompt = true; let msg = String::from(prompt) + message; self.message = Some(msg); } pub fn quiet(&mut self) { self.is_prompt = false; self.message = None; } pub fn center_view(&mut self, line: usize) { self.line_offset = line .checked_sub(self.lines_height() as usize / 2) .unwrap_or(0); } pub fn adjust_view(&mut self, line: usize) { if line < self.line_offset { self.line_offset = line; } else if line + 1 >= self.line_offset + self.lines_height() { self.line_offset = 1 + line - self.lines_height(); } } pub fn scroll_view<T: Editable>(&mut self, offset: isize, content: &T) { self.line_offset = cmp::min( cmp::max((self.line_offset as isize) + offset, 0), (content.line_count() as isize) - 1, ) as usize; } pub fn render<T>(&mut self, content: &T) where T: Editable + Named + Selectable, { self.screen.clear(&color::Reset); self.paint_lines(content); self.paint_status(content); self.paint_message(); self.paint_cursor(content); self.screen.present(); } pub fn translate_coordinates<T>(&self, content: &T, x: u16, y: u16) -> (usize, usize) where T: Editable, { let line = cmp::min( (y as isize + self.line_offset as isize - 1) as usize, content.line_count() - 1, ); let visual_col = (cmp::max( 0, x as isize - self.line_number_width(content.line_count()) as isize - 2, )) as usize; let col = content .iter_line(line) .scan(0, |state, x| { *state += if x == '\t' { TAB_LENGTH } else { 1 }; Some(*state) }).take_while(|&x| x <= visual_col) .count(); (line, col) } fn paint_message(&self) { if let Some(ref message) = self.message { let y = self.lines_height() + 1; self.screen.draw(0, y, message); } } fn paint_cursor<T>(&mut self, content: &T) where T: Editable + Selectable, { if (content.line()) < self.line_offset || content.line() >= self.line_offset + self.lines_height() || content.col() >= self.lines_width(content.line_count()) || content.sel().is_some() { self.screen.hide_cursor(); return; } let (x, y) = if self.is_prompt { ( self.message.clone().unwrap().chars().count(), self.lines_height() + 1, ) } else { let (a, b) = self.cursor_pos(content); (a, b) }; self.screen.move_cursor(x, y); self.screen.show_cursor(); } fn paint_status<T>(&self, content: &T) where T: Editable + Named, { let line = content.line(); let column = content.col(); let line_count = content.line_count(); let advance = ((line + 1) as f64 / line_count as f64 * 100.0).floor(); let (screen_width, _) = terminal_size().unwrap(); let empty_line = (0..screen_width).map(|_| ' ').collect::<String>(); let y = self.lines_height(); let style = format!("{}{}", color::Fg(color::White), style::Invert); self.screen.draw_with_style(0, y, &empty_line, &style); self.screen.draw_with_style(0, y, content.name(), &style); let position_info = format!("{}% {}/{}: {}", advance, line + 1, line_count, column); let x = screen_width as usize - position_info.len(); self.screen.draw_with_style(x, y, &position_info, &style); }
fn cursor_pos<T: Editable>(&self, content: &T) -> (usize, usize) { let line = content.line(); let first_line = self.line_offset; let y = line - first_line as usize; let visual_col = content.col(); let column: usize = content .iter_line(line) .map(|x| if x == '\t' { TAB_LENGTH } else { 1 }) .take(visual_col) .sum(); ( (self.line_number_width(content.line_count()) as usize + 1 + column), y, ) } fn line_number_width(&self, line_count: usize) -> u16 { line_count.to_string().len() as u16 } fn status_height(&self) -> u16 { 2 } pub fn lines_height(&self) -> usize { let (_, screen_height) = terminal_size().unwrap(); let incompressible = self.status_height() as usize; cmp::max(screen_height as usize, incompressible) - incompressible } pub fn lines_width(&self, line_count: usize) -> usize { let (screen_width, _) = terminal_size().unwrap(); let incompressible = self.line_number_width(line_count) as usize + 1; cmp::max(screen_width as usize, incompressible) - incompressible } }
fn paint_lines<T>(&self, content: &T) where T: Editable + Selectable, { let line_offset = self.line_offset as usize; let lines_height = self.lines_height() as usize; let lines_width = self.lines_width(content.line_count()) as usize; let line_count = content.line_count(); let line_start = self.line_number_width(line_count) as usize + 1; for (y, line) in content .lines() .skip(line_offset) .take(cmp::min(lines_height, line_count)) .enumerate() { let line_index = line_offset + y; self.screen.draw_with_style( 0, y, &format!("{}", 1 + line_index), &format!("{}", color::Fg(color::White)), ); if line.len_chars() > 1 { let line_start_char_index = content.line_index_to_char_index(line_index); for (x, c) in line .chars() .flat_map(|c| { if c == '\t' { iter::repeat(' ').take(TAB_LENGTH) } else { iter::repeat(c).take(1) } }).enumerate() { let char_index = line_start_char_index + x; if x < lines_width { if content.in_sel(char_index) { self.screen.draw_with_style( x + line_start, y, &format!("{}", c), &format!("{}", style::Invert), ); } else { self.screen.draw(x + line_start, y, &format!("{}", c)); } } } } else if content.line_in_sel(line_offset + y) { self.screen .draw_with_style(line_start, y, " ", &format!("{}", style::Invert)); } } }
function_block-full_function
[]
Rust
liblz4stego/src/compressor.rs
m4tx/lz4stego
5e53272900a74c1c88b993f92087ae7e588130b8
use std::cmp::min; use log::debug; use xxhash_rust::xxh32::{xxh32, Xxh32}; use crate::constants::{ END_LITERAL_NUM, LZ4_MAGIC_NUMBER, MATCH_LENGTH_OFFSET, MAX_BLOCK_SIZE, MIN_COMPRESS_LENGTH, TOKEN_MAX_VAL, }; use crate::descriptors::{BdByte, BlockSize, FlgByte, Token}; use crate::numeral_coding; use crate::occurrence_map::OccurrenceMap; use byteorder::{WriteBytesExt, LE}; use std::collections::VecDeque; use std::io::Write; pub struct Compressor<'a, W: Write> { output_write: W, buffer: VecDeque<u8>, hash: Xxh32, hidden_data_encoder: numeral_coding::Decoder<'a>, prefer_hidden: bool, } impl<'a, W: Write> Compressor<'a, W> { pub fn new_with_hidden_data( writer: W, hidden_data: &'a [u8], prefer_hidden: bool, ) -> Result<Self, std::io::Error> { let mut compressor = Self { output_write: writer, buffer: VecDeque::new(), hash: Xxh32::new(0), hidden_data_encoder: numeral_coding::Decoder::new(hidden_data), prefer_hidden, }; compressor.init()?; Ok(compressor) } pub fn new(writer: W) -> Result<Self, std::io::Error> { let mut compressor = Self { output_write: writer, buffer: VecDeque::new(), hash: Xxh32::new(0), hidden_data_encoder: numeral_coding::Decoder::new(b""), prefer_hidden: false, }; compressor.init()?; Ok(compressor) } fn init(&mut self) -> Result<(), std::io::Error> { self.write_header()?; Ok(()) } fn get_available_bytes(&self) -> usize { self.hidden_data_encoder.get_available_bytes() } pub fn finish(mut self) -> Result<usize, std::io::Error> { if !self.buffer.is_empty() { self.output_block(true)?; } self.write_footer()?; self.output_write.flush()?; Ok(self.get_available_bytes()) } fn write_header(&mut self) -> Result<(), std::io::Error> { self.output_write.write_u32::<LE>(LZ4_MAGIC_NUMBER)?; let frame_descriptor = self.build_frame_descriptor(); self.output_write.write(&frame_descriptor)?; Ok(()) } fn build_frame_descriptor(&self) -> Vec<u8> { let mut output = Vec::new(); let mut flag = FlgByte(0); flag.set_version(1); flag.set_block_independent(true); flag.set_content_checksum_added(true); output.write_u8(flag.0).unwrap(); let mut bd = BdByte(0); bd.set_block_max_size(7); output.write_u8(bd.0).unwrap(); let hc = ((xxh32(&output, 0) >> 8) & 0xFF) as u8; output.write_u8(hc).unwrap(); output } fn write_footer(&mut self) -> Result<(), std::io::Error> { self.output_write.write_u32::<LE>(0)?; self.output_write.write_u32::<LE>(self.hash.digest())?; Ok(()) } fn output_block(&mut self, force_write: bool) -> Result<(), std::io::Error> { let mut data = self.buffer.make_contiguous(); let mut to_shrink = 0; while !data.is_empty() && (data.len() >= MAX_BLOCK_SIZE || force_write) { if data.len() < MIN_COMPRESS_LENGTH { output_uncompressed_block(&mut self.output_write, data)?; to_shrink += data.len(); break; } let block_size = min(data.len(), MAX_BLOCK_SIZE); output_compressed_block( &mut self.output_write, &data[..block_size], &mut self.hidden_data_encoder, self.prefer_hidden, )?; to_shrink += block_size; data = &mut data[block_size..]; } self.buffer.drain(..to_shrink); Ok(()) } } impl<'a, W: Write> Write for Compressor<'a, W> { fn write(&mut self, buf: &[u8]) -> Result<usize, std::io::Error> { self.buffer.extend(buf); self.hash.update(buf); if self.buffer.len() >= MAX_BLOCK_SIZE { self.output_block(false)?; } Ok(buf.len()) } fn flush(&mut self) -> Result<(), std::io::Error> { if !self.buffer.is_empty() { self.output_block(true)?; self.output_write.flush()?; } Ok(()) } } fn output_uncompressed_block<W: Write>( mut output_write: W, data: &[u8], ) -> Result<(), std::io::Error> { debug!("Outputting uncompressed block with length: {}", data.len()); let mut block_size = BlockSize(0); block_size.set_block_uncompressed(true); block_size.set_block_size(data.len() as u32); output_write.write_u32::<LE>(block_size.0)?; output_write.write(data)?; Ok(()) } fn output_compressed_block<W: Write>( mut output_write: W, data: &[u8], hidden_data_encoder: &mut numeral_coding::Decoder, prefer_hidden: bool, ) -> Result<(), std::io::Error> { let mut output = Vec::new(); output.write_u32::<LE>(0).unwrap(); let mut occur = OccurrenceMap::new(data, prefer_hidden); let mut literals = Vec::new(); let mut i = 0; while i < data.len() - END_LITERAL_NUM { let occurrences = occur.get_occurrences(i); if occurrences.len() > 0 { let chosen_index = hidden_data_encoder.decode_value(occurrences.len() as u16); let (index, match_length) = occurrences.choose_occurrence(chosen_index as usize); if match_length < 4 { literals.push(data[i]); i += 1; continue; } let offset = (i - index) as u16; output_sequence(&literals, offset, match_length as u32, &mut output); literals.clear(); occur.add_occurrences(i, match_length); i += match_length; } else { literals.push(data[i]); occur.add_occurrences(i, 1); i += 1; } } literals.extend_from_slice(&data[data.len() - END_LITERAL_NUM..]); output_sequence(&literals, 0, MATCH_LENGTH_OFFSET, &mut output); let mut block_size = BlockSize(0); block_size.set_block_uncompressed(false); let block_size_num = output.len() - 4; block_size.set_block_size(block_size_num as u32); output.splice(0..4, block_size.0.to_le_bytes()); debug!("Block size: {}, data size: {}", block_size_num, data.len()); if block_size_num <= MAX_BLOCK_SIZE { output_write.write(&output)?; } else { output_uncompressed_block(output_write, data)?; } Ok(()) } fn output_sequence(literals: &Vec<u8>, offset: u16, match_length: u32, output: &mut Vec<u8>) { debug!( "Outputting sequence: literals {:?}, offset={}, match_length={}", literals, offset, match_length ); let literals_len = literals.len() as u32; let match_length_saved = match_length - MATCH_LENGTH_OFFSET; let mut token = Token(0); token.set_literals_length(min(literals_len, TOKEN_MAX_VAL as u32) as u8); token.set_match_length(min(match_length_saved, TOKEN_MAX_VAL as u32) as u8); output.write_u8(token.0).unwrap(); output_lsic_int(literals_len, TOKEN_MAX_VAL, output); output.extend_from_slice(literals.as_slice()); if offset != 0 { output.write_u16::<LE>(offset).unwrap(); output_lsic_int(match_length_saved, TOKEN_MAX_VAL, output); } } fn output_lsic_int(val: u32, max_val: u8, output: &mut Vec<u8>) { if val < max_val as u32 { return; } let mut new_val = val - max_val as u32; while new_val > 255 { output.write_u8(255).unwrap(); new_val -= 255; } output.write_u8(new_val as u8).unwrap(); }
use std::cmp::min; use log::debug; use xxhash_rust::xxh32::{xxh32, Xxh32}; use crate::constants::{ END_LITERAL_NUM, LZ4_MAGIC_NUMBER, MATCH_LENGTH_OFFSET, MAX_BLOCK_SIZE, MIN_COMPRESS_LENGTH, TOKEN_MAX_VAL, }; use crate::descriptors::{BdByte, BlockSize, FlgByte, Token}; use crate::numeral_coding; use crate::occurrence_map::OccurrenceMap; use byteorder::{WriteBytesExt, LE}; use std::collections::VecDeque; use std::io::Write; pub struct Compressor<'a, W: Write> { output_write: W, buffer: VecDeque<u8>, hash: Xxh32, hidden_data_encoder: numeral_coding::Decoder<'a>, prefer_hidden: bool, } impl<'a, W: Write> Compressor<'a, W> { pub fn new_with_hidden_data( writer: W, hidden_data: &'a [u8], prefer_hidden: bool, ) ->
pub fn new(writer: W) -> Result<Self, std::io::Error> { let mut compressor = Self { output_write: writer, buffer: VecDeque::new(), hash: Xxh32::new(0), hidden_data_encoder: numeral_coding::Decoder::new(b""), prefer_hidden: false, }; compressor.init()?; Ok(compressor) } fn init(&mut self) -> Result<(), std::io::Error> { self.write_header()?; Ok(()) } fn get_available_bytes(&self) -> usize { self.hidden_data_encoder.get_available_bytes() } pub fn finish(mut self) -> Result<usize, std::io::Error> { if !self.buffer.is_empty() { self.output_block(true)?; } self.write_footer()?; self.output_write.flush()?; Ok(self.get_available_bytes()) } fn write_header(&mut self) -> Result<(), std::io::Error> { self.output_write.write_u32::<LE>(LZ4_MAGIC_NUMBER)?; let frame_descriptor = self.build_frame_descriptor(); self.output_write.write(&frame_descriptor)?; Ok(()) } fn build_frame_descriptor(&self) -> Vec<u8> { let mut output = Vec::new(); let mut flag = FlgByte(0); flag.set_version(1); flag.set_block_independent(true); flag.set_content_checksum_added(true); output.write_u8(flag.0).unwrap(); let mut bd = BdByte(0); bd.set_block_max_size(7); output.write_u8(bd.0).unwrap(); let hc = ((xxh32(&output, 0) >> 8) & 0xFF) as u8; output.write_u8(hc).unwrap(); output } fn write_footer(&mut self) -> Result<(), std::io::Error> { self.output_write.write_u32::<LE>(0)?; self.output_write.write_u32::<LE>(self.hash.digest())?; Ok(()) } fn output_block(&mut self, force_write: bool) -> Result<(), std::io::Error> { let mut data = self.buffer.make_contiguous(); let mut to_shrink = 0; while !data.is_empty() && (data.len() >= MAX_BLOCK_SIZE || force_write) { if data.len() < MIN_COMPRESS_LENGTH { output_uncompressed_block(&mut self.output_write, data)?; to_shrink += data.len(); break; } let block_size = min(data.len(), MAX_BLOCK_SIZE); output_compressed_block( &mut self.output_write, &data[..block_size], &mut self.hidden_data_encoder, self.prefer_hidden, )?; to_shrink += block_size; data = &mut data[block_size..]; } self.buffer.drain(..to_shrink); Ok(()) } } impl<'a, W: Write> Write for Compressor<'a, W> { fn write(&mut self, buf: &[u8]) -> Result<usize, std::io::Error> { self.buffer.extend(buf); self.hash.update(buf); if self.buffer.len() >= MAX_BLOCK_SIZE { self.output_block(false)?; } Ok(buf.len()) } fn flush(&mut self) -> Result<(), std::io::Error> { if !self.buffer.is_empty() { self.output_block(true)?; self.output_write.flush()?; } Ok(()) } } fn output_uncompressed_block<W: Write>( mut output_write: W, data: &[u8], ) -> Result<(), std::io::Error> { debug!("Outputting uncompressed block with length: {}", data.len()); let mut block_size = BlockSize(0); block_size.set_block_uncompressed(true); block_size.set_block_size(data.len() as u32); output_write.write_u32::<LE>(block_size.0)?; output_write.write(data)?; Ok(()) } fn output_compressed_block<W: Write>( mut output_write: W, data: &[u8], hidden_data_encoder: &mut numeral_coding::Decoder, prefer_hidden: bool, ) -> Result<(), std::io::Error> { let mut output = Vec::new(); output.write_u32::<LE>(0).unwrap(); let mut occur = OccurrenceMap::new(data, prefer_hidden); let mut literals = Vec::new(); let mut i = 0; while i < data.len() - END_LITERAL_NUM { let occurrences = occur.get_occurrences(i); if occurrences.len() > 0 { let chosen_index = hidden_data_encoder.decode_value(occurrences.len() as u16); let (index, match_length) = occurrences.choose_occurrence(chosen_index as usize); if match_length < 4 { literals.push(data[i]); i += 1; continue; } let offset = (i - index) as u16; output_sequence(&literals, offset, match_length as u32, &mut output); literals.clear(); occur.add_occurrences(i, match_length); i += match_length; } else { literals.push(data[i]); occur.add_occurrences(i, 1); i += 1; } } literals.extend_from_slice(&data[data.len() - END_LITERAL_NUM..]); output_sequence(&literals, 0, MATCH_LENGTH_OFFSET, &mut output); let mut block_size = BlockSize(0); block_size.set_block_uncompressed(false); let block_size_num = output.len() - 4; block_size.set_block_size(block_size_num as u32); output.splice(0..4, block_size.0.to_le_bytes()); debug!("Block size: {}, data size: {}", block_size_num, data.len()); if block_size_num <= MAX_BLOCK_SIZE { output_write.write(&output)?; } else { output_uncompressed_block(output_write, data)?; } Ok(()) } fn output_sequence(literals: &Vec<u8>, offset: u16, match_length: u32, output: &mut Vec<u8>) { debug!( "Outputting sequence: literals {:?}, offset={}, match_length={}", literals, offset, match_length ); let literals_len = literals.len() as u32; let match_length_saved = match_length - MATCH_LENGTH_OFFSET; let mut token = Token(0); token.set_literals_length(min(literals_len, TOKEN_MAX_VAL as u32) as u8); token.set_match_length(min(match_length_saved, TOKEN_MAX_VAL as u32) as u8); output.write_u8(token.0).unwrap(); output_lsic_int(literals_len, TOKEN_MAX_VAL, output); output.extend_from_slice(literals.as_slice()); if offset != 0 { output.write_u16::<LE>(offset).unwrap(); output_lsic_int(match_length_saved, TOKEN_MAX_VAL, output); } } fn output_lsic_int(val: u32, max_val: u8, output: &mut Vec<u8>) { if val < max_val as u32 { return; } let mut new_val = val - max_val as u32; while new_val > 255 { output.write_u8(255).unwrap(); new_val -= 255; } output.write_u8(new_val as u8).unwrap(); }
Result<Self, std::io::Error> { let mut compressor = Self { output_write: writer, buffer: VecDeque::new(), hash: Xxh32::new(0), hidden_data_encoder: numeral_coding::Decoder::new(hidden_data), prefer_hidden, }; compressor.init()?; Ok(compressor) }
function_block-function_prefixed
[ { "content": "fn compress(data: &[u8]) -> Vec<u8> {\n\n let mut output = Vec::new();\n\n let mut compressor = Compressor::new(&mut output).unwrap();\n\n compressor.write(data).unwrap();\n\n compressor.finish().unwrap();\n\n\n\n output\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/compressor.rs", "rank": 2, "score": 69313.52708815748 }, { "content": "fn decompress(data: &[u8]) -> DecompressResult<(Vec<u8>, Vec<u8>)> {\n\n let mut output = Vec::new();\n\n let mut decompressor = Decompressor::new(data, false);\n\n decompressor.read_to_end(&mut output).unwrap();\n\n\n\n Ok((output, decompressor.finish()))\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/decompressor.rs", "rank": 3, "score": 68369.5140671898 }, { "content": "fn decompress(data: &[u8]) -> DecompressResult<(Vec<u8>, Vec<u8>)> {\n\n let mut output = Vec::new();\n\n let mut decompressor = Decompressor::new(data, false);\n\n decompressor.read_to_end(&mut output).unwrap();\n\n\n\n Ok((output, decompressor.finish()))\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/stego.rs", "rank": 4, "score": 68369.5140671898 }, { "content": "pub fn decompress(\n\n input_path: &str,\n\n output_path: &str,\n\n hidden_path_opt: Option<&str>,\n\n prefer_hidden: bool,\n\n) {\n\n let input_file = fs::File::open(input_path).unwrap();\n\n let mut output_file = fs::File::create(output_path).unwrap();\n\n let mut decompressor = liblz4stego::decompressor::Decompressor::new(input_file, prefer_hidden);\n\n\n\n const BUFFER_SIZE: usize = 4 * 1024 * 1024;\n\n let mut buffer = Vec::with_capacity(BUFFER_SIZE);\n\n unsafe {\n\n buffer.set_len(BUFFER_SIZE);\n\n }\n\n loop {\n\n let bytes_read = decompressor.read(&mut buffer).unwrap();\n\n if bytes_read == 0 {\n\n break;\n\n }\n\n\n\n output_file.write(&buffer[..bytes_read]).unwrap();\n\n }\n\n\n\n let hidden_data = decompressor.finish();\n\n if let Some(hidden_path) = hidden_path_opt {\n\n fs::write(hidden_path, hidden_data).unwrap();\n\n }\n\n}\n", "file_path": "lz4stego/src/library_ctrl.rs", "rank": 5, "score": 68156.27996847901 }, { "content": "pub fn compress(\n\n input_path: &str,\n\n output_path: &str,\n\n hidden_path_opt: Option<&str>,\n\n count: bool,\n\n prefer_hidden: bool,\n\n) {\n\n let mut input_file = fs::File::open(input_path).unwrap();\n\n let output_file = fs::File::create(output_path).unwrap();\n\n let hidden_data = if let Some(hidden_path) = hidden_path_opt {\n\n fs::read(hidden_path).unwrap()\n\n } else {\n\n vec![]\n\n };\n\n let mut compressor = liblz4stego::compressor::Compressor::new_with_hidden_data(\n\n output_file,\n\n &hidden_data,\n\n prefer_hidden,\n\n )\n\n .unwrap();\n", "file_path": "lz4stego/src/library_ctrl.rs", "rank": 6, "score": 68156.27996847901 }, { "content": "fn decode_lz4(data: &Vec<u8>) -> Vec<u8> {\n\n let mut buffer = Vec::new();\n\n let mut decoder = Decoder::new(data.as_slice()).unwrap();\n\n decoder.read_to_end(&mut buffer).unwrap();\n\n\n\n buffer\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/stego.rs", "rank": 7, "score": 65186.17210346773 }, { "content": "fn decode_lz4(data: &Vec<u8>) -> Vec<u8> {\n\n let mut buffer = Vec::new();\n\n let mut decoder = Decoder::new(data.as_slice()).unwrap();\n\n decoder.read_to_end(&mut buffer).unwrap();\n\n\n\n buffer\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/compressor.rs", "rank": 8, "score": 65186.17210346773 }, { "content": "fn get_common_prefix_len(a: &[u8], b: &[u8]) -> usize {\n\n a.iter().zip(b).take_while(|(x, y)| x == y).count()\n\n}\n\n\n\npub struct Occurrences<'a> {\n\n data: Option<&'a [u8]>,\n\n occur: Vec<usize>,\n\n match_length: Option<usize>,\n\n index: usize,\n\n}\n\n\n\nimpl<'a> Occurrences<'a> {\n\n fn new_with_match_length(occur: Vec<usize>, match_length: usize) -> Self {\n\n Self {\n\n data: None,\n\n occur,\n\n match_length: Some(match_length),\n\n index: 0,\n\n }\n\n }\n", "file_path": "liblz4stego/src/occurrence_map.rs", "rank": 9, "score": 65186.17210346773 }, { "content": "fn get_lsic_int(data: &mut &[u8], initial_val: u8, max_val: u8) -> u32 {\n\n let mut val = initial_val as u32;\n\n\n\n if initial_val < max_val {\n\n return val;\n\n }\n\n\n\n loop {\n\n let current_val = data.read_u8().unwrap();\n\n val += current_val as u32;\n\n\n\n if current_val < 255 {\n\n return val;\n\n }\n\n }\n\n}\n", "file_path": "liblz4stego/src/decompressor.rs", "rank": 10, "score": 64394.83040811493 }, { "content": "pub fn init_logging() -> Result<(), SetLoggerError> {\n\n log::set_logger(&LOGGER).map(|()| log::set_max_level(LevelFilter::Debug))\n\n}\n\n\n", "file_path": "lz4stego/src/main.rs", "rank": 12, "score": 57084.28631219505 }, { "content": "fn get_block_max_size(index: u8) -> DecompressResult<usize> {\n\n if index < 3 || index > 7 {\n\n return Err(Lz4DecompressError::from_static_str(\n\n \"Block max size is invalid\",\n\n ));\n\n }\n\n\n\n Ok((index as usize - 4).pow(4) * 65536)\n\n}\n\n\n", "file_path": "liblz4stego/src/decompressor.rs", "rank": 14, "score": 51586.25903565465 }, { "content": "fn decode(data: &[u8], max_values: &[u16]) -> Vec<u16> {\n\n let mut result = Vec::new();\n\n let mut decoder = Decoder::new(data);\n\n\n\n for max_val in max_values {\n\n result.push(decoder.decode_value(*max_val));\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 15, "score": 47882.849860649716 }, { "content": "fn encode(values: &[u16], max_values: &[u16]) -> Vec<u8> {\n\n let mut encoder = Encoder::new();\n\n\n\n for (value, max_value) in values.iter().zip(max_values) {\n\n encoder.add_value(*value, *max_value);\n\n }\n\n\n\n encoder.finish()\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 16, "score": 47882.849860649716 }, { "content": "struct Logger;\n\n\n\nimpl log::Log for Logger {\n\n fn enabled(&self, metadata: &Metadata) -> bool {\n\n metadata.level() <= Level::Debug\n\n }\n\n\n\n fn log(&self, record: &Record) {\n\n if self.enabled(record.metadata()) {\n\n println!(\n\n \"[{}] [{}] {}\",\n\n record.level(),\n\n record.target(),\n\n record.args()\n\n );\n\n }\n\n }\n\n\n\n fn flush(&self) {}\n\n}\n\n\n\nstatic LOGGER: Logger = Logger;\n\n\n", "file_path": "lz4stego/src/main.rs", "rank": 17, "score": 47719.88914418482 }, { "content": "fn calc_match_length(data: &[u8], index: usize, occur_index: usize) -> usize {\n\n min(\n\n get_common_prefix_len(\n\n &data[index + MAP_PREF_SIZE..],\n\n &data[occur_index + MAP_PREF_SIZE..],\n\n ) + MAP_PREF_SIZE,\n\n data.len() - END_LITERAL_NUM - index,\n\n )\n\n}\n\n\n", "file_path": "liblz4stego/src/occurrence_map.rs", "rank": 18, "score": 44833.63610741217 }, { "content": "fn main() {\n\n let matches = App::new(PROJ_NAME)\n\n .version(PROJ_VERSION)\n\n .author(PROJ_AUTHORS)\n\n .about(\"LZ4 compressor and decompressor with steganography\")\n\n .arg(\n\n Arg::with_name(\"decompress\")\n\n .short(\"d\")\n\n .long(\"decompress\")\n\n .help(\"Decompress instead of compressing\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"count\")\n\n .short(\"c\")\n\n .long(\"count\")\n\n .help(\"Count how many bytes of data can be hidden\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"hidden\")\n\n .short(\"i\")\n", "file_path": "lz4stego/src/main.rs", "rank": 19, "score": 28199.4437533653 }, { "content": "#[test]\n\nfn decompress_empty() {\n\n let bytes = include_bytes!(\"test_data/empty.lz4\");\n\n let result = decompress(bytes);\n\n\n\n assert!(result.is_ok(), \"{}\", result.unwrap_err());\n\n assert_eq!(result.unwrap().0, b\"\");\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/decompressor.rs", "rank": 20, "score": 26357.43301733103 }, { "content": "fn decompress_block_data(\n\n buffer: &mut Vec<u8>,\n\n hash: &mut Xxh32,\n\n mut data: &[u8],\n\n) -> DecompressResult<(usize, Vec<(u32, u32)>)> {\n\n let start_len = buffer.len();\n\n let mut matches: Vec<(u32, u32)> = Vec::new();\n\n let start_index = buffer.len();\n\n\n\n loop {\n\n let token = Token(data.read_u8()?);\n\n\n\n let literals_length_initial = token.get_literals_length();\n\n let literals_length =\n\n get_lsic_int(&mut data, literals_length_initial, TOKEN_MAX_VAL) as usize;\n\n buffer.extend_from_slice(&data[..literals_length]);\n\n data = &data[literals_length as usize..];\n\n\n\n if data.is_empty() {\n\n // End of block\n", "file_path": "liblz4stego/src/decompressor.rs", "rank": 21, "score": 26357.43301733103 }, { "content": "#[test]\n\nfn compress_medium() {\n\n let data = include_bytes!(\"test_data/medium\");\n\n let result = compress(data);\n\n let decoded = decode_lz4(&result);\n\n\n\n assert_eq!(data, decoded.as_slice());\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/compressor.rs", "rank": 22, "score": 26357.43301733103 }, { "content": "#[test]\n\nfn compress_empty() {\n\n let data = b\"\";\n\n let result = compress(data);\n\n let decoded = decode_lz4(&result);\n\n\n\n assert_eq!(data, decoded.as_slice());\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/compressor.rs", "rank": 23, "score": 26357.43301733103 }, { "content": "#[test]\n\nfn decompress_medium() {\n\n let bytes = include_bytes!(\"test_data/medium.lz4\");\n\n let expected = include_bytes!(\"test_data/medium\");\n\n let result = decompress(bytes);\n\n\n\n assert!(result.is_ok(), \"{}\", result.unwrap_err());\n\n assert_eq!(result.unwrap().0, expected);\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/decompressor.rs", "rank": 24, "score": 26357.43301733103 }, { "content": "#[test]\n\nfn decompress_short_uncompressed() {\n\n let bytes = include_bytes!(\"test_data/short_uncompressed.lz4\");\n\n let result = decompress(bytes);\n\n\n\n assert!(result.is_ok(), \"{}\", result.unwrap_err());\n\n assert_eq!(result.unwrap().0, b\"aaa\");\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/decompressor.rs", "rank": 25, "score": 25546.93370521774 }, { "content": "#[test]\n\nfn compress_single_byte() {\n\n let data = b\"a\";\n\n let result = compress(data);\n\n let decoded = decode_lz4(&result);\n\n\n\n assert_eq!(data, decoded.as_slice());\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/compressor.rs", "rank": 26, "score": 25546.93370521774 }, { "content": "#[test]\n\nfn decompress_single_byte() {\n\n let bytes = include_bytes!(\"test_data/single_byte.lz4\");\n\n let result = decompress(bytes);\n\n\n\n assert!(result.is_ok(), \"{}\", result.unwrap_err());\n\n assert_eq!(result.unwrap().0, b\"a\");\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/decompressor.rs", "rank": 27, "score": 25546.93370521774 }, { "content": "#[test]\n\nfn compress_short_compressed() {\n\n let data = b\"aaaaaaaaaaaaaaaaaaaa\";\n\n let result = compress(data);\n\n let decoded = decode_lz4(&result);\n\n\n\n assert_eq!(data, decoded.as_slice());\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/compressor.rs", "rank": 28, "score": 25546.93370521774 }, { "content": "#[test]\n\nfn test_single_byte() {\n\n let data = b\"ala a ala b ala c ala d ala e ala f ala g ala h ala i ala j ala k ala l ala\";\n\n let hidden_data = b\"ab\";\n\n let mut result = Vec::new();\n\n let mut compressor = Compressor::new_with_hidden_data(&mut result, hidden_data, true).unwrap();\n\n compressor.write(data).unwrap();\n\n compressor.finish().unwrap();\n\n\n\n let decoded_expected = decode_lz4(&result);\n\n let decoded_actual = decompress(&result);\n\n\n\n assert!(decoded_actual.is_ok(), \"{}\", decoded_actual.unwrap_err());\n\n let result = decoded_actual.unwrap();\n\n assert_eq!(result.0, decoded_expected);\n\n assert_eq!(result.1, hidden_data);\n\n}\n", "file_path": "liblz4stego/src/tests/stego.rs", "rank": 29, "score": 25546.93370521774 }, { "content": "#[test]\n\nfn compress_long_match() {\n\n let mut data = b\"a\".repeat(100_000);\n\n data.extend_from_slice(&b\"b\".repeat(100_000));\n\n data.extend_from_slice(&b\"a\".repeat(100));\n\n let result = compress(&data);\n\n let decoded = decode_lz4(&result);\n\n\n\n assert_eq!(data, decoded.as_slice());\n\n}\n", "file_path": "liblz4stego/src/tests/compressor.rs", "rank": 30, "score": 25546.93370521774 }, { "content": "#[test]\n\nfn decompress_short_compressed() {\n\n let bytes = include_bytes!(\"test_data/short_compressed.lz4\");\n\n let result = decompress(bytes);\n\n\n\n assert!(result.is_ok(), \"{}\", result.unwrap_err());\n\n assert_eq!(result.unwrap().0, b\"aaaaaaaaaaaaaaaaaaaa\");\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/decompressor.rs", "rank": 31, "score": 25546.93370521774 }, { "content": "#[test]\n\nfn decode_empty() {\n\n let data = vec![];\n\n let max_values = vec![2];\n\n let expected = vec![0];\n\n\n\n let result = decode(&data, &max_values);\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 32, "score": 25546.93370521774 }, { "content": "#[test]\n\nfn decode_bytes() {\n\n let data = vec![199, 5, 15, 129, 215, 66, 100, 121];\n\n let max_values = vec![256, 256, 256, 256, 256, 256, 256, 256];\n\n let expected = vec![15, 129, 215, 66, 100, 121, 5, 199];\n\n\n\n let result = decode(&data, &max_values);\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 33, "score": 25546.93370521774 }, { "content": "#[test]\n\nfn decode_single() {\n\n let data = vec![1];\n\n let max_values = vec![2];\n\n let expected = vec![1];\n\n\n\n let result = decode(&data, &max_values);\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 34, "score": 25546.93370521774 }, { "content": "#[test]\n\nfn encode_bytes() {\n\n let values = vec![15, 129, 215, 66, 100, 121, 5, 199];\n\n let max_values = vec![256, 256, 256, 256, 256, 256, 256, 256];\n\n let expected = vec![199, 5, 15, 129, 215, 66, 100, 121];\n\n\n\n let result = encode(&values, &max_values);\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 35, "score": 25546.93370521774 }, { "content": "#[test]\n\nfn encode_empty() {\n\n let values = vec![0];\n\n let max_values = vec![2];\n\n let expected = vec![];\n\n\n\n let result = encode(&values, &max_values);\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 36, "score": 25546.93370521774 }, { "content": "#[test]\n\nfn encode_single() {\n\n let values = vec![1];\n\n let max_values = vec![2];\n\n let expected = vec![1];\n\n\n\n let result = encode(&values, &max_values);\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 37, "score": 25546.93370521774 }, { "content": "#[test]\n\nfn encode_mixed_long() {\n\n let values = vec![\n\n 709, 337, 145, 429, 945, 234, 267, 218, 259, 449, 596, 795, 377, 979, 407, 205, 769, 224,\n\n 760, 545, 993, 467, 439, 55, 331, 767, 316, 463, 860, 56, 355, 679, 365, 725, 959, 444,\n\n 669, 999, 549, 990, 738, 560, 951, 397, 748, 0, 125, 166, 214, 445,\n\n ];\n\n let max_values = vec![\n\n 1366, 1160, 4796, 7814, 4199, 9361, 7434, 7265, 5424, 945, 7381, 2125, 8772, 3415, 8975,\n\n 7452, 7106, 2832, 3473, 4004, 1300, 4373, 6373, 9408, 5104, 7354, 4471, 1195, 8586, 9304,\n\n 8251, 3802, 5539, 8627, 7941, 1512, 7303, 5146, 559, 5481, 6868, 5058, 9318, 3976, 5698,\n\n 696, 3358, 3568, 344, 9533,\n\n ];\n\n let expected = vec![\n\n 36, 172, 175, 113, 189, 145, 9, 221, 117, 234, 135, 161, 96, 242, 83, 213, 47, 73, 89, 33,\n\n 113, 156, 242, 11, 182, 213, 213, 163, 96, 68, 216, 33, 29, 203, 152, 232, 105, 183, 83,\n\n 187, 127, 142, 72, 237, 197, 50, 8, 68, 54, 167, 36, 80, 49, 117, 124, 168, 108, 225, 241,\n\n 151, 196, 243, 3, 206, 112, 198, 171, 137, 245, 252, 232, 20, 63, 70, 206,\n\n ];\n\n\n\n let result = encode(&values, &max_values);\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 38, "score": 24798.77258109033 }, { "content": "#[test]\n\nfn decode_mixed_long() {\n\n let data = vec![\n\n 36, 172, 175, 113, 189, 145, 9, 221, 117, 234, 135, 161, 96, 242, 83, 213, 47, 73, 89, 33,\n\n 113, 156, 242, 11, 182, 213, 213, 163, 96, 68, 216, 33, 29, 203, 152, 232, 105, 183, 83,\n\n 187, 127, 142, 72, 237, 197, 50, 8, 68, 54, 167, 36, 80, 49, 117, 124, 168, 108, 225, 241,\n\n 151, 196, 243, 3, 206, 112, 198, 171, 137, 245, 252, 232, 20, 63, 70, 206,\n\n ];\n\n let max_values = vec![\n\n 1366, 1160, 4796, 7814, 4199, 9361, 7434, 7265, 5424, 945, 7381, 2125, 8772, 3415, 8975,\n\n 7452, 7106, 2832, 3473, 4004, 1300, 4373, 6373, 9408, 5104, 7354, 4471, 1195, 8586, 9304,\n\n 8251, 3802, 5539, 8627, 7941, 1512, 7303, 5146, 559, 5481, 6868, 5058, 9318, 3976, 5698,\n\n 696, 3358, 3568, 344, 9533,\n\n ];\n\n let expected = vec![\n\n 709, 337, 145, 429, 945, 234, 267, 218, 259, 449, 596, 795, 377, 979, 407, 205, 769, 224,\n\n 760, 545, 993, 467, 439, 55, 331, 767, 316, 463, 860, 56, 355, 679, 365, 725, 959, 444,\n\n 669, 999, 549, 990, 738, 560, 951, 397, 748, 0, 125, 166, 214, 445,\n\n ];\n\n\n\n let result = decode(&data, &max_values);\n\n\n\n assert_eq!(result, expected);\n\n}\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 39, "score": 24798.77258109033 }, { "content": "#[test]\n\nfn decode_bytes_short() {\n\n let data = vec![129, 15];\n\n let max_values = vec![256, 256];\n\n let expected = vec![15, 129];\n\n\n\n let result = decode(&data, &max_values);\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 40, "score": 24798.77258109033 }, { "content": "#[test]\n\nfn compress_large_two_blocks() {\n\n let data = b\"a\".repeat(8_388_608);\n\n let result = compress(&data);\n\n let decoded = decode_lz4(&result);\n\n\n\n assert_eq!(data, decoded.as_slice());\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/compressor.rs", "rank": 41, "score": 24798.77258109033 }, { "content": "#[test]\n\nfn decode_mixed_medium() {\n\n let data = vec![57, 191, 128, 100, 22, 191];\n\n let max_values = vec![10, 7, 53, 256, 2133, 100, 15, 256];\n\n let expected = vec![3, 6, 10, 100, 57, 42, 13, 20];\n\n\n\n let result = decode(&data, &max_values);\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 42, "score": 24798.77258109033 }, { "content": "#[test]\n\nfn decode_mixed_short() {\n\n let data = vec![2, 251];\n\n let max_values = vec![10, 7, 53];\n\n let expected = vec![3, 6, 10];\n\n\n\n let result = decode(&data, &max_values);\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 43, "score": 24798.77258109033 }, { "content": "#[test]\n\nfn compress_large_two_parts() {\n\n let data = include_bytes!(\"test_data/large_two_parts\");\n\n let result = compress(data);\n\n let decoded = decode_lz4(&result);\n\n\n\n assert_eq!(data, decoded.as_slice());\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/compressor.rs", "rank": 44, "score": 24798.77258109033 }, { "content": "#[test]\n\nfn encode_mixed_short() {\n\n let values = vec![3, 6, 10];\n\n let max_values = vec![10, 7, 53];\n\n let expected = vec![2, 251];\n\n\n\n let result = encode(&values, &max_values);\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 45, "score": 24798.77258109033 }, { "content": "#[test]\n\nfn encode_bytes_short() {\n\n let values = vec![15, 129];\n\n let max_values = vec![256, 256];\n\n let expected = vec![129, 15];\n\n\n\n let result = encode(&values, &max_values);\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 46, "score": 24798.77258109033 }, { "content": "#[test]\n\nfn decompress_large_two_blocks() {\n\n let bytes = include_bytes!(\"test_data/large_two_blocks.lz4\");\n\n let result = decompress(bytes);\n\n\n\n assert!(result.is_ok(), \"{}\", result.unwrap_err());\n\n assert_eq!(result.unwrap().0, b\"a\".repeat(8_388_608));\n\n}\n", "file_path": "liblz4stego/src/tests/decompressor.rs", "rank": 47, "score": 24798.77258109033 }, { "content": "#[test]\n\nfn encode_mixed_medium() {\n\n let values = vec![3, 6, 10, 100, 57, 42, 13, 20];\n\n let max_values = vec![10, 7, 53, 256, 2133, 100, 15, 256];\n\n let expected = vec![57, 191, 128, 100, 22, 191];\n\n\n\n let result = encode(&values, &max_values);\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 48, "score": 24798.77258109033 }, { "content": "#[test]\n\nfn decompress_large_two_parts() {\n\n let bytes = include_bytes!(\"test_data/large_two_parts.lz4\");\n\n let expected = include_bytes!(\"test_data/large_two_parts\");\n\n let result = decompress(bytes);\n\n\n\n assert!(result.is_ok(), \"{}\", result.unwrap_err());\n\n assert_eq!(result.unwrap().0, expected);\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/decompressor.rs", "rank": 49, "score": 24798.77258109033 }, { "content": "#[test]\n\nfn decompress_large_single_character() {\n\n let bytes = include_bytes!(\"test_data/large_single_character.lz4\");\n\n let result = decompress(bytes);\n\n\n\n assert!(result.is_ok(), \"{}\", result.unwrap_err());\n\n assert_eq!(result.unwrap().0, b\"a\".repeat(1_000_000));\n\n}\n\n\n", "file_path": "liblz4stego/src/tests/decompressor.rs", "rank": 50, "score": 24798.77258109033 }, { "content": "use xxhash_rust::xxh32::Xxh32;\n\n\n\nuse crate::constants::{LZ4_MAGIC_NUMBER, MATCH_LENGTH_OFFSET, TOKEN_MAX_VAL};\n\nuse crate::descriptors::{BdByte, BlockSize, FlgByte, Token};\n\nuse crate::errors::{DecompressResult, Lz4DecompressError};\n\n\n\nuse crate::numeral_coding;\n\nuse crate::occurrence_map::OccurrenceMap;\n\nuse byteorder::{ReadBytesExt, LE};\n\nuse std::cmp::min;\n\nuse std::io::Read;\n\n\n\npub struct Decompressor<R: Read> {\n\n input_read: R,\n\n input_buffer: Vec<u8>,\n\n buffer: Vec<u8>,\n\n hash: Xxh32,\n\n hidden_data_decoder: numeral_coding::Encoder,\n\n\n\n header_read: bool,\n", "file_path": "liblz4stego/src/decompressor.rs", "rank": 53, "score": 12.412373904915789 }, { "content": "use crate::constants::{END_LITERAL_NUM, MAP_PREF_SIZE, MAX_OFFSET};\n\nuse std::cmp::min;\n\nuse std::collections::hash_map::Entry::{Occupied, Vacant};\n\nuse std::collections::{HashMap, VecDeque};\n\n\n\npub struct OccurrenceMap<'a> {\n\n data: &'a [u8],\n\n occur: HashMap<&'a [u8], VecDeque<usize>>,\n\n\n\n prefer_hidden: bool,\n\n}\n\n\n\nimpl<'a> OccurrenceMap<'a> {\n\n pub fn new(data: &'a [u8], prefer_hidden: bool) -> Self {\n\n Self {\n\n data,\n\n occur: HashMap::new(),\n\n prefer_hidden,\n\n }\n\n }\n", "file_path": "liblz4stego/src/occurrence_map.rs", "rank": 56, "score": 10.102934880370464 }, { "content": " content_checksum_added: bool,\n\n buffer_start: usize,\n\n prefer_hidden: bool,\n\n}\n\n\n\nimpl<R: Read> Decompressor<R> {\n\n pub fn new(input_read: R, prefer_hidden: bool) -> Self {\n\n const INPUT_BUFFER_SIZE: usize = 4 * 1024 * 1024;\n\n\n\n let mut input_buffer = Vec::with_capacity(INPUT_BUFFER_SIZE);\n\n unsafe {\n\n input_buffer.set_len(INPUT_BUFFER_SIZE);\n\n }\n\n\n\n Self {\n\n input_read,\n\n input_buffer,\n\n buffer: Vec::new(),\n\n hash: Xxh32::new(0),\n\n hidden_data_decoder: numeral_coding::Encoder::new(),\n", "file_path": "liblz4stego/src/decompressor.rs", "rank": 58, "score": 8.333085140884348 }, { "content": " self.buffer.extend_from_slice(new_data);\n\n\n\n Ok(block_size)\n\n } else {\n\n let (bytes_read, matches) = decompress_block_data(\n\n &mut self.buffer,\n\n &mut self.hash,\n\n &self.input_buffer[..block_size],\n\n )?;\n\n self.analyze_matches(start_index, matches);\n\n\n\n Ok(bytes_read)\n\n }\n\n }\n\n\n\n fn check_checksum(&mut self) -> DecompressResult<()> {\n\n if self.content_checksum_added {\n\n let file_checksum = self.input_read.read_u32::<LE>()?;\n\n let computed_checksum = self.hash.digest();\n\n if file_checksum != computed_checksum {\n", "file_path": "liblz4stego/src/decompressor.rs", "rank": 60, "score": 6.335835852406254 }, { "content": "pub const LZ4_MAGIC_NUMBER: u32 = 0x184D2204;\n\n\n\npub const MIN_COMPRESS_LENGTH: usize = 13;\n\npub const END_LITERAL_NUM: usize = 5;\n\npub const MAP_PREF_SIZE: usize = 4;\n\npub const MAX_BLOCK_SIZE: usize = 4 * 1024 * 1024 - 12;\n\npub const MAX_OFFSET: usize = 65535;\n\npub const TOKEN_MAX_VAL: u8 = 15;\n\npub const MATCH_LENGTH_OFFSET: u32 = 4;\n", "file_path": "liblz4stego/src/constants.rs", "rank": 61, "score": 5.972023853112066 }, { "content": "\n\nbitfield! {\n\n pub struct BlockSize(u32);\n\n impl Debug;\n\n\n\n pub get_block_size, set_block_size: 30, 0;\n\n pub is_uncompressed, set_block_uncompressed: 31;\n\n}\n\n\n\nbitfield! {\n\n pub struct Token(u8);\n\n impl Debug;\n\n\n\n pub get_literals_length, set_literals_length: 7, 4;\n\n pub get_match_length, set_match_length: 3, 0;\n\n}\n", "file_path": "liblz4stego/src/descriptors.rs", "rank": 62, "score": 5.875858178573962 }, { "content": "use std::fs;\n\nuse std::io::{Read, Write};\n\n\n", "file_path": "lz4stego/src/library_ctrl.rs", "rank": 63, "score": 5.7248457488532 }, { "content": "use lz4::Decoder;\n\nuse std::io::{Read, Write};\n\n\n\nuse crate::compressor::Compressor;\n\n\n", "file_path": "liblz4stego/src/tests/compressor.rs", "rank": 64, "score": 5.605324036020015 }, { "content": "use bitfield::bitfield;\n\n\n\nbitfield! {\n\n pub struct FlgByte(u8);\n\n impl Debug;\n\n\n\n pub get_version, set_version: 7, 6;\n\n pub is_block_independent, set_block_independent: 5;\n\n pub is_block_checksum_added, set_block_checksum_added: 4;\n\n pub is_content_size_added, set_content_size_added: 3;\n\n pub is_content_checksum_added, set_content_checksum_added: 2;\n\n pub is_dictionary_id_set, set_dictionary_id_set: 0;\n\n}\n\n\n\nbitfield! {\n\n pub struct BdByte(u8);\n\n impl Debug;\n\n\n\n pub get_block_max_size, set_block_max_size: 6, 4;\n\n}\n", "file_path": "liblz4stego/src/descriptors.rs", "rank": 65, "score": 5.511041663419601 }, { "content": " hash.update(&buffer[start_len..]);\n\n return Ok((buffer.len() - start_len, matches));\n\n }\n\n\n\n let offset = data.read_u16::<LE>()? as usize;\n\n if offset == 0 {\n\n return Err(Lz4DecompressError::from_static_str(\"Offset is 0\"));\n\n }\n\n let match_length_initial = token.get_match_length();\n\n let match_length =\n\n get_lsic_int(&mut data, match_length_initial, TOKEN_MAX_VAL) + MATCH_LENGTH_OFFSET;\n\n let next_pos = buffer.len();\n\n let next_pos_block = next_pos - start_index;\n\n matches.push((next_pos_block as u32, next_pos_block as u32 - offset as u32));\n\n\n\n let match_pos = next_pos - offset;\n\n if offset == 1 {\n\n buffer.resize(buffer.len() + match_length as usize, buffer[match_pos]);\n\n } else {\n\n let mut to_copy = match_length as usize;\n", "file_path": "liblz4stego/src/decompressor.rs", "rank": 66, "score": 5.384100212554831 }, { "content": "use std::io::{Read, Write};\n\n\n\nuse lz4::Decoder;\n\n\n\nuse crate::compressor::Compressor;\n\nuse crate::decompressor::Decompressor;\n\nuse crate::errors::DecompressResult;\n\n\n", "file_path": "liblz4stego/src/tests/stego.rs", "rank": 67, "score": 5.279714331063115 }, { "content": "use byteorder::ReadBytesExt;\n\n\n\npub struct Encoder {\n\n values: Vec<(u16, u16)>,\n\n encoded: Vec<u8>,\n\n x: u32,\n\n}\n\n\n\nimpl Encoder {\n\n pub fn new() -> Self {\n\n Self {\n\n values: Vec::new(),\n\n encoded: Vec::new(),\n\n x: 0,\n\n }\n\n }\n\n\n\n pub fn add_value(&mut self, value: u16, max_value: u16) {\n\n assert!(value < max_value);\n\n self.values.push((value, max_value));\n", "file_path": "liblz4stego/src/numeral_coding.rs", "rank": 68, "score": 4.877411545408335 }, { "content": " }\n\n}\n\n\n\npub struct Decoder<'a> {\n\n data: &'a [u8],\n\n x: u32,\n\n available_bits: f64,\n\n}\n\n\n\nimpl<'a> Decoder<'a> {\n\n pub fn new(data: &'a [u8]) -> Self {\n\n Self {\n\n data,\n\n x: 0,\n\n available_bits: 0.0,\n\n }\n\n }\n\n\n\n pub fn decode_value(&mut self, max_value: u16) -> u16 {\n\n self.available_bits += (max_value as f64).log2();\n", "file_path": "liblz4stego/src/numeral_coding.rs", "rank": 69, "score": 4.788203088344828 }, { "content": "use clap::{App, Arg};\n\nuse log::{Level, LevelFilter, Metadata, Record, SetLoggerError};\n\n\n\nmod library_ctrl;\n\n\n\npub const PROJ_NAME: &str = env!(\"CARGO_PKG_NAME\");\n\npub const PROJ_VERSION: &str = env!(\"CARGO_PKG_VERSION\");\n\npub const PROJ_AUTHORS: &str = env!(\"CARGO_PKG_AUTHORS\");\n\n\n", "file_path": "lz4stego/src/main.rs", "rank": 71, "score": 4.460708201511987 }, { "content": " }\n\n\n\n pub fn finish(mut self) -> Vec<u8> {\n\n for (val, max_val) in self.values.iter().rev() {\n\n self.x = self.x * (*max_val as u32) + (*val as u32);\n\n\n\n while self.x >= (1 << 16) {\n\n self.encoded.push((self.x & 0xFF) as u8);\n\n self.x >>= 8;\n\n }\n\n }\n\n\n\n while self.x > 0 {\n\n self.encoded.push((self.x & 0xFF) as u8);\n\n self.x >>= 8;\n\n }\n\n\n\n self.encoded.reverse();\n\n\n\n self.encoded\n", "file_path": "liblz4stego/src/numeral_coding.rs", "rank": 72, "score": 4.410631714193274 }, { "content": "\n\n const BUFFER_SIZE: usize = 4 * 1024 * 1024;\n\n let mut buffer = Vec::with_capacity(BUFFER_SIZE);\n\n unsafe {\n\n buffer.set_len(BUFFER_SIZE);\n\n }\n\n loop {\n\n let bytes_read = input_file.read(&mut buffer).unwrap();\n\n if bytes_read == 0 {\n\n break;\n\n }\n\n\n\n compressor.write(&buffer[..bytes_read]).unwrap();\n\n }\n\n\n\n let available_bytes = compressor.finish().unwrap();\n\n if count {\n\n eprintln!(\"Available hidden data bytes: {}\", available_bytes);\n\n }\n\n}\n\n\n", "file_path": "lz4stego/src/library_ctrl.rs", "rank": 73, "score": 4.387877278750674 }, { "content": "\n\n while to_copy > 0 {\n\n let current_to_copy = min(to_copy, buffer.len() - match_pos);\n\n buffer.extend_from_within(match_pos..match_pos + current_to_copy);\n\n to_copy -= current_to_copy;\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<R: Read> Read for Decompressor<R> {\n\n fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {\n\n if !self.header_read {\n\n self.read_header()?;\n\n }\n\n\n\n if self.buffer.is_empty() || self.buffer_start >= self.buffer.len() {\n\n self.buffer.clear();\n\n self.buffer_start = 0;\n\n let bytes_read = self.read_block()?;\n", "file_path": "liblz4stego/src/decompressor.rs", "rank": 74, "score": 4.132579942086245 }, { "content": " self.header_read = true;\n\n\n\n Ok(())\n\n }\n\n\n\n fn read_block(&mut self) -> DecompressResult<usize> {\n\n let block_size_val = self.input_read.read_u32::<LE>()?;\n\n if block_size_val == 0 {\n\n self.check_checksum()?;\n\n return Ok(0);\n\n }\n\n\n\n let block_size_desc = BlockSize(block_size_val);\n\n let start_index = self.buffer.len();\n\n let block_size = block_size_desc.get_block_size() as usize;\n\n\n\n self.input_read.read(&mut self.input_buffer[..block_size])?;\n\n if block_size_desc.is_uncompressed() {\n\n let new_data = &self.input_buffer[..block_size];\n\n self.hash.update(new_data);\n", "file_path": "liblz4stego/src/decompressor.rs", "rank": 75, "score": 4.012771132770508 }, { "content": "use std::{error, fmt};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Lz4DecompressError {\n\n message: String,\n\n}\n\n\n\nimpl Lz4DecompressError {\n\n pub fn from_string(message: String) -> Self {\n\n Lz4DecompressError { message }\n\n }\n\n\n\n pub fn from_static_str(message: &'static str) -> Self {\n\n Lz4DecompressError {\n\n message: message.to_owned(),\n\n }\n\n }\n\n}\n\n\n\nimpl error::Error for Lz4DecompressError {}\n", "file_path": "liblz4stego/src/errors.rs", "rank": 76, "score": 3.6127032860967976 }, { "content": "\n\n if bytes_read == 0 {\n\n return Ok(0);\n\n }\n\n }\n\n\n\n let to_return = min(buf.len(), self.buffer.len() - self.buffer_start);\n\n buf[..to_return]\n\n .copy_from_slice(&self.buffer[self.buffer_start..self.buffer_start + to_return]);\n\n self.buffer_start += to_return;\n\n Ok(to_return)\n\n }\n\n}\n\n\n", "file_path": "liblz4stego/src/decompressor.rs", "rank": 77, "score": 3.3440352151935246 }, { "content": "\n\n header_read: false,\n\n content_checksum_added: false,\n\n buffer_start: 0,\n\n prefer_hidden,\n\n }\n\n }\n\n\n\n fn read_header(&mut self) -> DecompressResult<()> {\n\n if self.input_read.read_u32::<LE>()? != LZ4_MAGIC_NUMBER {\n\n return Err(Lz4DecompressError::from_static_str(\"Invalid header\"));\n\n }\n\n\n\n // TODO check all releveant flags\n\n let flg = FlgByte(self.input_read.read_u8()?);\n\n if flg.get_version() != 1 {\n\n return Err(Lz4DecompressError::from_static_str(\"Version is not 1\"));\n\n }\n\n\n\n if flg.is_dictionary_id_set() {\n", "file_path": "liblz4stego/src/decompressor.rs", "rank": 78, "score": 3.3096274667742454 }, { "content": "use crate::decompressor::Decompressor;\n\nuse crate::errors::DecompressResult;\n\nuse std::io::Read;\n\n\n", "file_path": "liblz4stego/src/tests/decompressor.rs", "rank": 79, "score": 3.262149415047462 }, { "content": " let max_val = max_value as u32;\n\n\n\n while self.x < (max_val << 8) && !self.data.is_empty() {\n\n self.x <<= 8;\n\n self.x += self.data.read_u8().unwrap() as u32;\n\n }\n\n\n\n let result = (self.x % max_val) as u16;\n\n self.x = self.x / max_val;\n\n\n\n result\n\n }\n\n\n\n pub fn get_available_bytes(&self) -> usize {\n\n (self.available_bits / 8.0) as usize\n\n }\n\n}\n", "file_path": "liblz4stego/src/numeral_coding.rs", "rank": 80, "score": 3.19844368314565 }, { "content": " return Err(Lz4DecompressError::from_static_str(\"Checksum is invalid\"));\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn finish(self) -> Vec<u8> {\n\n self.hidden_data_decoder.finish()\n\n }\n\n\n\n fn analyze_matches(&mut self, start_index: usize, matches: Vec<(u32, u32)>) {\n\n let data = &self.buffer[start_index..];\n\n let mut occur = OccurrenceMap::new(data, self.prefer_hidden);\n\n\n\n let mut last_index: u32 = 0;\n\n\n\n for (index, match_index) in matches {\n\n occur.add_occurrences(last_index as usize, index as usize - last_index as usize);\n\n last_index = index;\n", "file_path": "liblz4stego/src/decompressor.rs", "rank": 81, "score": 3.117009654003166 }, { "content": "\n\n fn new(occur: Vec<usize>, data: &'a [u8], index: usize) -> Self {\n\n Self {\n\n data: Some(data),\n\n occur,\n\n match_length: None,\n\n index,\n\n }\n\n }\n\n\n\n fn empty() -> Self {\n\n Self {\n\n data: None,\n\n occur: Default::default(),\n\n match_length: Some(0),\n\n index: 0,\n\n }\n\n }\n\n\n\n pub fn len(&self) -> usize {\n", "file_path": "liblz4stego/src/occurrence_map.rs", "rank": 82, "score": 3.093217297408165 }, { "content": "mod constants;\n\nmod descriptors;\n\nmod numeral_coding;\n\nmod occurrence_map;\n\n\n\npub mod compressor;\n\npub mod decompressor;\n\npub mod errors;\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "liblz4stego/src/lib.rs", "rank": 84, "score": 2.842652359257671 }, { "content": " return Err(Lz4DecompressError::from_static_str(\n\n \"Dictionary ID is not supported\",\n\n ));\n\n }\n\n\n\n if !flg.is_block_independent() {\n\n return Err(Lz4DecompressError::from_static_str(\n\n \"Blocks must be independent\",\n\n ));\n\n }\n\n\n\n self.content_checksum_added = flg.is_content_checksum_added();\n\n\n\n let byte = BdByte(self.input_read.read_u8()?);\n\n let block_max_size = get_block_max_size(byte.get_block_max_size())?;\n\n self.buffer.reserve(block_max_size);\n\n\n\n // TODO check HC\n\n self.input_read.read_u8()?;\n\n\n", "file_path": "liblz4stego/src/decompressor.rs", "rank": 86, "score": 2.7629430642175428 }, { "content": "use crate::numeral_coding::{Decoder, Encoder};\n\n\n", "file_path": "liblz4stego/src/tests/numeral_coding.rs", "rank": 87, "score": 2.692507491581665 }, { "content": "\n\nimpl From<std::io::Error> for Lz4DecompressError {\n\n fn from(e: std::io::Error) -> Self {\n\n Self::from_string(e.to_string())\n\n }\n\n}\n\n\n\nimpl From<Lz4DecompressError> for std::io::Error {\n\n fn from(e: Lz4DecompressError) -> Self {\n\n Self::new(std::io::ErrorKind::InvalidInput, e)\n\n }\n\n}\n\n\n\nimpl fmt::Display for Lz4DecompressError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"error during LZ4 decompressing at offset {}\",\n\n self.message\n\n )\n\n }\n\n}\n\n\n\npub type DecompressResult<T> = std::result::Result<T, Lz4DecompressError>;\n", "file_path": "liblz4stego/src/errors.rs", "rank": 88, "score": 2.067960348035838 }, { "content": " self.occur.len()\n\n }\n\n\n\n pub fn choose_occurrence(&self, index: usize) -> (usize, usize) {\n\n let occur_index = self.occur[index];\n\n\n\n let match_length = if let Some(fixed_match_length) = self.match_length {\n\n fixed_match_length\n\n } else {\n\n calc_match_length(self.data.unwrap(), self.index, occur_index)\n\n };\n\n\n\n (occur_index, match_length)\n\n }\n\n\n\n pub fn get_occurrence_index(&self, chosen_index: usize) -> Option<usize> {\n\n self.occur.iter().position(|x| *x == chosen_index)\n\n }\n\n}\n", "file_path": "liblz4stego/src/occurrence_map.rs", "rank": 89, "score": 1.8376507263042217 }, { "content": "\n\n pub fn add_occurrences(&mut self, index: usize, to_advance: usize) {\n\n let (start_index, actual_to_advance) = if to_advance > MAX_OFFSET {\n\n (index + to_advance - MAX_OFFSET, MAX_OFFSET)\n\n } else {\n\n (index, to_advance)\n\n };\n\n\n\n for i in start_index..start_index + actual_to_advance {\n\n let pref = &self.data[i..i + MAP_PREF_SIZE];\n\n self.occur.entry(pref).or_default().push_back(i);\n\n }\n\n }\n\n\n\n pub fn get_occurrences(&mut self, index: usize) -> Occurrences {\n\n let pref = &self.data[index..index + MAP_PREF_SIZE];\n\n let entry = self.occur.entry(pref);\n\n\n\n match entry {\n\n Occupied(mut entry_val) => {\n", "file_path": "liblz4stego/src/occurrence_map.rs", "rank": 90, "score": 1.601465699003429 }, { "content": "lz4stego\n\n========\n\n\n\nAn implementation of a bit recycling steganography for the LZ4 compression algorithm.\n\n\n\n## Description\n\n\n\nLZ4 format internally represents chunks of data either as literals, or matches of arbitrary lengths from some previous locations in the file. This allows it to achieve very high compression and decompression speed while maintaining decent compression ratio.\n\n\n\nHowever, since it does not use Huffman encoding to encode match location offsets (like for instance Deflate algorithm does), it allows one to choose some specific match each time in the file without sacrificing compression ratio. The technique is being called the bit recycling and is the base of what happens in lz4stego. Each time a 4-byte substring has appeared in the file at least twice, we can choose one of the previous matches of our choosing, thus hiding some bits of data. Most decompressors won't ever see any difference, but lz4stego decompressor can bring back the original hidden message. \n\n\n\n## Usage\n\n\n\n### Compressing\n\n\n\n```\n\nlz4stego -i <hidden_file_path> <input_file_path> <output_file_path>\n\n```\n\n\n\nYou can optionally use `-c/--count` flag to tell lz4stego to output the maximum possible number of bytes that can be hidden.\n\n\n\n`-p/--prefer-hidden` flag tells lz4stego to sacrifice compression ratio and try to output as many bytes of hidden data as possible. This is achieved by ignoring the match lengths and not trying to output the longest match - any match that is at least 4 bytes long is usable. Note that in this mode, the compression ratio depends on the actual contents of the hidden data.\n\n\n\n### Decompressing\n\n\n\n```\n\nlz4stego -d -i <hidden_file_path> <input_file_path> <output_file_path>\n\n```\n\n\n\nPlease note that if you used `-p/--prefer-hidden` flag for compressing, it must also be used when decompressing data.\n\n\n", "file_path": "README.md", "rank": 91, "score": 1.383912601507046 }, { "content": " .long(\"hidden\")\n\n .value_name(\"FILE\")\n\n .help(\"Hidden data file path\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"prefer-hidden\")\n\n .short(\"p\")\n\n .long(\"prefer-hidden\")\n\n .help(\"Prefer hidden data capacity over compression ratio. Must be set for decompressing as well\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"verbose\")\n\n .short(\"v\")\n\n .long(\"verbose\")\n\n .help(\"Verbose output\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"INPUT\")\n\n .help(\"input filename\")\n\n .required(true)\n", "file_path": "lz4stego/src/main.rs", "rank": 92, "score": 1.2400014218255717 }, { "content": "## Steganography benchmark\n\n\n\n| Dataset | Original size | Compressed size | Hidden capacity | Hidden capacity with `-p` flag |\n\n|---------------------------------------|---------------|-----------------|-----------------|--------------------------------|\n\n| First 1MB of [enwik8][1] | 1,000,000B | 428,658B | 17,320B | 60,041B |\n\n| 4MiB output of /dev/urandom | 4,194,304B | 4,194,327B | 0B | 0B |\n\n| sao file from [Silesia Corpus][2] | 7,251,944B | 5,808,448B | 141,736B | 280,296B |\n\n| reymont file from [Silesia Corpus][2] | 6,627,202B | 2,245,208B | 98,789B | 672,126B |\n\n\n\n[1]: https://cs.fit.edu/~mmahoney/compression/textdata.html\n\n[2]: http://sun.aei.polsl.pl/~sdeor/index.php?page=silesia\n\n\n\n## Building\n\nlz4stego is being built using [Cargo and Rust stable](https://www.rust-lang.org/tools/install).\n\n\n\n```\n\ncargo build --release\n\n```\n\n\n\nThe binary will be created in `target/release/lz4stego`.\n", "file_path": "README.md", "rank": 93, "score": 0.839515040024919 } ]
Rust
qlib/buddyallocator.rs
CentaurusInfra/Quark
1079b36efa7e537f8fec39f037ee5ccc71977e7d
use super::mutex::*; use alloc::slice; use alloc::vec::Vec; use core::ops::Deref; use super::addr::*; use super::common::*; pub fn ZeroPage(pageStart: u64) { unsafe { let arr = slice::from_raw_parts_mut(pageStart as *mut u64, 512); for i in 0..512 { arr[i] = 0 } } } #[derive(PartialEq, Copy, Clone, Default)] pub struct MemAllocatorInternal { ba: BuddyAllocator, baseAddr: u64, } impl MemAllocatorInternal { pub fn New() -> Self { return Self { ba: BuddyAllocator::New(0, 0), baseAddr: 0, }; } pub fn Init(baseAddr: u64, ord: u64) -> Self { let mut ba = BuddyAllocator::New(ord, baseAddr); let baSize = 1 << (ord + 1); let mut baPages = baSize >> PAGE_SHIFT; if (baSize & PAGE_MASK) != 0 { baPages += 1; } let addr = ba.allocate(baPages) as u64; assert_eq!(addr, 0); return Self { ba, baseAddr }; } pub fn Load(&mut self, baseAddr: u64, ord: u64) { self.ba.Load(ord, baseAddr); self.baseAddr = baseAddr; } pub fn Alloc(&mut self, pages: u64) -> Result<u64> { let pageOff = self.ba.allocate(pages); if pageOff == -1 { info!("buddyalloc ..."); Err(Error::NoEnoughMemory) } else { Ok(self.baseAddr + ((pageOff as u64) << PAGE_SHIFT)) } } pub fn Free(&mut self, addr: u64, pages: u64) -> Result<()> { let pageOff = (addr - self.baseAddr) as u64 >> PAGE_SHIFT; let ret = self.ba.free(pageOff, pages); if ret { Ok(()) } else { Err(Error::InvalidInput) } } } pub struct MemAllocator(QMutex<MemAllocatorInternal>); impl Deref for MemAllocator { type Target = QMutex<MemAllocatorInternal>; fn deref(&self) -> &QMutex<MemAllocatorInternal> { &self.0 } } impl RefMgr for MemAllocator { fn Ref(&self, _addr: u64) -> Result<u64> { return Ok(1); } fn Deref(&self, addr: u64) -> Result<u64> { self.FreePage(addr).unwrap(); Ok(0) } fn GetRef(&self, _addr: u64) -> Result<u64> { Ok(1) } } impl Allocator for MemAllocator { fn AllocPage(&self, _incrRef: bool) -> Result<u64> { let res = self.lock().Alloc(1); return res; } fn FreePage(&self, addr: u64) -> Result<()> { ZeroPage(addr); return self.lock().Free(addr, 1); } } impl MemAllocator { pub fn New() -> Self { return Self(QMutex::new(MemAllocatorInternal::New())); } pub fn Init(baseAddr: u64, ord: u64) -> Self { return Self(QMutex::new(MemAllocatorInternal::Init(baseAddr, ord))); } pub fn Load(&self, baseAddr: u64, ord: u64) { self.lock().Load(baseAddr, ord); } pub fn Alloc(&self, pages: u64) -> Result<u64> { return self.lock().Alloc(pages); } pub fn Free(&self, addr: u64, pages: u64) -> Result<()> { return self.lock().Free(addr, pages); } } #[repr(u8)] #[derive(PartialEq, Copy, Clone, Debug)] enum Node { Unused = 0, Used, Split, Full, } #[derive(PartialEq, Copy, Clone, Default)] pub struct BuddyAllocator { levels: u64, size: u64, root: u64, } impl BuddyAllocator { pub fn New(levels: u64, addr: u64) -> BuddyAllocator { let size: u64 = (1 << (levels + 1)) - 1; return BuddyAllocator { levels: levels, size: size, root: addr, }; } pub fn Load(&mut self, levels: u64, addr: u64) { let size: u64 = (1 << (levels + 1)) - 1; self.levels = levels; self.size = size; self.root = addr; } fn tree(&self) -> &mut [Node] { unsafe { slice::from_raw_parts_mut(self.root as *mut Node, self.size as usize) } } fn alloc(&mut self, idx: u64, t_level: u64, c_level: u64) -> isize { if c_level == t_level { if self.tree()[idx as usize] == Node::Unused { self.tree()[idx as usize] = Node::Used; let current_level_offset = (1 << self.levels - c_level) - 1; return (idx - current_level_offset) as isize * (1 << c_level); } else { return -1; } } let left_child = idx * 2 + 1; let right_child = idx * 2 + 2; match self.tree()[idx as usize] { Node::Used | Node::Full => return -1, Node::Unused => { self.tree()[idx as usize] = Node::Split; return self.alloc(left_child, t_level, c_level - 1); } Node::Split => { let mut res = self.alloc(left_child, t_level, c_level - 1); if res == -1 { res = self.alloc(right_child, t_level, c_level - 1); } self.CheckParentFull(idx); return res; } } } fn alloc1(&mut self, t_level: u64) -> isize { let mut stack: Vec<(u32, u32)> = Vec::with_capacity(self.levels as usize + 1); stack.push((0, self.levels as u32)); while stack.len() > 0 { let (idx, c_level) = stack.pop().unwrap(); if c_level as u64 == t_level { if self.tree()[idx as usize] == Node::Unused { self.tree()[idx as usize] = Node::Used; let current_level_offset = (1 << self.levels - c_level as u64) - 1; if idx != 0 { let mut parent = (idx + 1) / 2 - 1; 'inner: loop { self.CheckParentFull(parent as u64); if parent == 0 { break 'inner; } parent = (parent + 1) / 2 - 1; } } return (idx - current_level_offset) as isize * (1 << c_level); } else { continue; } } let left_child = idx * 2 + 1; let right_child = idx * 2 + 2; match self.tree()[idx as usize] { Node::Used | Node::Full => continue, Node::Unused => { self.tree()[idx as usize] = Node::Split; stack.push((left_child, c_level - 1)); } Node::Split => { stack.push((right_child, c_level - 1)); stack.push((left_child, c_level - 1)); } } } return -1; } const STACK_LEN: usize = 28; fn alloc2(&mut self, t_level: u64) -> isize { let mut stack: [(u32, u32); Self::STACK_LEN] = [(0, 0); Self::STACK_LEN]; let mut top = 0; stack[top] = (0, self.levels as u32); top += 1; while top > 0 { let (idx, c_level) = stack[top - 1]; top -= 1; if c_level as u64 == t_level { if self.tree()[idx as usize] == Node::Unused { self.tree()[idx as usize] = Node::Used; let current_level_offset = (1 << self.levels - c_level as u64) - 1; if idx != 0 { let mut parent = (idx + 1) / 2 - 1; 'inner: loop { self.CheckParentFull(parent as u64); if parent == 0 { break 'inner; } parent = (parent + 1) / 2 - 1; } } return (idx - current_level_offset) as isize * (1 << c_level); } else { continue; } } let left_child = idx * 2 + 1; let right_child = idx * 2 + 2; match self.tree()[idx as usize] { Node::Used | Node::Full => continue, Node::Unused => { self.tree()[idx as usize] = Node::Split; stack[top] = (left_child, c_level - 1); top += 1; } Node::Split => { stack[top] = (right_child, c_level - 1); top += 1; stack[top] = (left_child, c_level - 1); top += 1; } } } return -1; } pub fn CheckParentFull(&mut self, idx: u64) { let mut idx = idx; while idx != 0 { let left_child = idx * 2 + 1; let right_child = idx * 2 + 2; let left_child_used_or_full = self.tree()[left_child as usize] == Node::Full || self.tree()[left_child as usize] == Node::Used; let right_child_used_or_full = self.tree()[right_child as usize] == Node::Full || self.tree()[right_child as usize] == Node::Used; if left_child_used_or_full && right_child_used_or_full { self.tree()[idx as usize] = Node::Full; } idx = (idx + 1) / 2 - 1; } } pub fn allocate(&mut self, num_pages: u64) -> isize { let requested_level = self.get_level_from_num_pages(num_pages); if requested_level > self.levels { return -1; } return self.alloc2(requested_level); } pub fn free(&mut self, page_offset: u64, num_pages: u64) -> bool { if self.root <= page_offset && page_offset <= self.root + self.size { return false; } let requested_level = self.get_level_from_num_pages(num_pages); let level_offset = page_offset / (1 << requested_level); let current_level_offset = (1 << self.levels - requested_level) - 1; let mut idx = current_level_offset + level_offset; if idx as usize > self.tree().len() - 1 { panic!("offset {} is > length of tree() {}", idx, self.tree().len()); } if self.tree()[idx as usize] != Node::Used { return false; } self.tree()[idx as usize] = Node::Unused; while idx != 0 { let parent = (idx + 1) / 2 - 1; let left_child = parent * 2 + 1; let right_child = parent * 2 + 2; if self.tree()[left_child as usize] == Node::Unused && self.tree()[right_child as usize] == Node::Unused { self.tree()[parent as usize] = Node::Unused; } else { self.tree()[parent as usize] = Node::Split; } idx = parent } return true; } fn get_level_from_num_pages(&self, num_pages: u64) -> u64 { let requested_pages; if num_pages == 0 { requested_pages = 1; } else { requested_pages = num_pages.next_power_of_two(); } let requested_level = self.log_base_2(requested_pages); requested_level } fn log_base_2(&self, requested_pages: u64) -> u64 { let mut exp = 0; let mut find_msb_bit = requested_pages; find_msb_bit >>= 1; while find_msb_bit > 0 { find_msb_bit >>= 1; exp += 1; } return exp; } /*pub fn dump(&self) -> String { let mut out = "".to_string(); let mut row = "".to_string(); let mut level = 0; let mut index = 0; loop { if index == self.tree().len() { break } match self.tree()[index] { Node::Used => row += "U", Node::Unused => row += "O", Node::Split => row += "S", Node::Full => row += "F", } if row.len() == 1 << level { out += &(row + "\n"); row = "".to_string(); level += 1; } index += 1; } return out; }*/ } #[cfg(test)] mod tests { use super::*; #[test] fn test_alloc() { let mem = [0 as u8; 15]; let mut alloc = buddyallocator::New(3, &mem[0] as *const _ as u64); assert_eq!(alloc.allocate(9), -1); let offset1 = alloc.allocate(1); assert_eq!(offset1, 0); let offset2 = alloc.allocate(3); assert_eq!(offset2, 4); alloc.free(offset2 as u64, 3); alloc.free(offset1 as u64, 1); let offset3 = alloc.allocate(8); assert_eq!(offset3, 0); alloc.free(offset3 as u64, 8); let offset4 = alloc.allocate(9); assert_eq!(offset4, -1); } #[test] fn test_alloc1() { let mut alloc = buddyallocator::New(0, 0); assert_eq!(alloc.allocate(9), -1); } }
use super::mutex::*; use alloc::slice; use alloc::vec::Vec; use core::ops::Deref; use super::addr::*; use super::common::*; pub fn ZeroPage(pageStart: u64) { unsafe { let arr = slice::from_raw_parts_mut(pageStart as *mut u64, 512); for i in 0..512 { arr[i] = 0 } } } #[derive(PartialEq, Copy, Clone, Default)] pub struct MemAllocatorInternal { ba: BuddyAllocator, baseAddr: u64, } impl MemAllocatorInternal { pub fn New() -> Self { return Self { ba: BuddyAllocator::New(0, 0), baseAddr: 0, }; } pub fn Init(baseAddr: u64, ord: u64) -> Self { let mut ba = BuddyAllocator::New(ord, baseAddr); let baSize = 1 << (ord + 1); let mut baPages = baSize >> PAGE_SHIFT; if (baSize & PAGE_MASK) != 0 { baPages += 1; } let addr = ba.allocate(baPages) as u64; assert_eq!(addr, 0); return Self { ba, baseAddr }; } pub fn Load(&mut self, baseAddr: u64, ord: u64) { self.ba.Load(ord, baseAddr); self.baseAddr = baseAddr; } pub fn Alloc(&mut self, pages: u64) -> Result<u64> { let pageOff = self.ba.allocate(pages); if pageOff == -1 { info!("buddyalloc ..."); Err(Error::NoEnoughMemory) } else { Ok(self.baseAddr + ((pageOff as u64) << PAGE_SHIFT)) } }
} pub struct MemAllocator(QMutex<MemAllocatorInternal>); impl Deref for MemAllocator { type Target = QMutex<MemAllocatorInternal>; fn deref(&self) -> &QMutex<MemAllocatorInternal> { &self.0 } } impl RefMgr for MemAllocator { fn Ref(&self, _addr: u64) -> Result<u64> { return Ok(1); } fn Deref(&self, addr: u64) -> Result<u64> { self.FreePage(addr).unwrap(); Ok(0) } fn GetRef(&self, _addr: u64) -> Result<u64> { Ok(1) } } impl Allocator for MemAllocator { fn AllocPage(&self, _incrRef: bool) -> Result<u64> { let res = self.lock().Alloc(1); return res; } fn FreePage(&self, addr: u64) -> Result<()> { ZeroPage(addr); return self.lock().Free(addr, 1); } } impl MemAllocator { pub fn New() -> Self { return Self(QMutex::new(MemAllocatorInternal::New())); } pub fn Init(baseAddr: u64, ord: u64) -> Self { return Self(QMutex::new(MemAllocatorInternal::Init(baseAddr, ord))); } pub fn Load(&self, baseAddr: u64, ord: u64) { self.lock().Load(baseAddr, ord); } pub fn Alloc(&self, pages: u64) -> Result<u64> { return self.lock().Alloc(pages); } pub fn Free(&self, addr: u64, pages: u64) -> Result<()> { return self.lock().Free(addr, pages); } } #[repr(u8)] #[derive(PartialEq, Copy, Clone, Debug)] enum Node { Unused = 0, Used, Split, Full, } #[derive(PartialEq, Copy, Clone, Default)] pub struct BuddyAllocator { levels: u64, size: u64, root: u64, } impl BuddyAllocator { pub fn New(levels: u64, addr: u64) -> BuddyAllocator { let size: u64 = (1 << (levels + 1)) - 1; return BuddyAllocator { levels: levels, size: size, root: addr, }; } pub fn Load(&mut self, levels: u64, addr: u64) { let size: u64 = (1 << (levels + 1)) - 1; self.levels = levels; self.size = size; self.root = addr; } fn tree(&self) -> &mut [Node] { unsafe { slice::from_raw_parts_mut(self.root as *mut Node, self.size as usize) } } fn alloc(&mut self, idx: u64, t_level: u64, c_level: u64) -> isize { if c_level == t_level { if self.tree()[idx as usize] == Node::Unused { self.tree()[idx as usize] = Node::Used; let current_level_offset = (1 << self.levels - c_level) - 1; return (idx - current_level_offset) as isize * (1 << c_level); } else { return -1; } } let left_child = idx * 2 + 1; let right_child = idx * 2 + 2; match self.tree()[idx as usize] { Node::Used | Node::Full => return -1, Node::Unused => { self.tree()[idx as usize] = Node::Split; return self.alloc(left_child, t_level, c_level - 1); } Node::Split => { let mut res = self.alloc(left_child, t_level, c_level - 1); if res == -1 { res = self.alloc(right_child, t_level, c_level - 1); } self.CheckParentFull(idx); return res; } } } fn alloc1(&mut self, t_level: u64) -> isize { let mut stack: Vec<(u32, u32)> = Vec::with_capacity(self.levels as usize + 1); stack.push((0, self.levels as u32)); while stack.len() > 0 { let (idx, c_level) = stack.pop().unwrap(); if c_level as u64 == t_level { if self.tree()[idx as usize] == Node::Unused { self.tree()[idx as usize] = Node::Used; let current_level_offset = (1 << self.levels - c_level as u64) - 1; if idx != 0 { let mut parent = (idx + 1) / 2 - 1; 'inner: loop { self.CheckParentFull(parent as u64); if parent == 0 { break 'inner; } parent = (parent + 1) / 2 - 1; } } return (idx - current_level_offset) as isize * (1 << c_level); } else { continue; } } let left_child = idx * 2 + 1; let right_child = idx * 2 + 2; match self.tree()[idx as usize] { Node::Used | Node::Full => continue, Node::Unused => { self.tree()[idx as usize] = Node::Split; stack.push((left_child, c_level - 1)); } Node::Split => { stack.push((right_child, c_level - 1)); stack.push((left_child, c_level - 1)); } } } return -1; } const STACK_LEN: usize = 28; fn alloc2(&mut self, t_level: u64) -> isize { let mut stack: [(u32, u32); Self::STACK_LEN] = [(0, 0); Self::STACK_LEN]; let mut top = 0; stack[top] = (0, self.levels as u32); top += 1; while top > 0 { let (idx, c_level) = stack[top - 1]; top -= 1; if c_level as u64 == t_level { if self.tree()[idx as usize] == Node::Unused { self.tree()[idx as usize] = Node::Used; let current_level_offset = (1 << self.levels - c_level as u64) - 1; if idx != 0 { let mut parent = (idx + 1) / 2 - 1; 'inner: loop { self.CheckParentFull(parent as u64); if parent == 0 { break 'inner; } parent = (parent + 1) / 2 - 1; } } return (idx - current_level_offset) as isize * (1 << c_level); } else { continue; } } let left_child = idx * 2 + 1; let right_child = idx * 2 + 2; match self.tree()[idx as usize] { Node::Used | Node::Full => continue, Node::Unused => { self.tree()[idx as usize] = Node::Split; stack[top] = (left_child, c_level - 1); top += 1; } Node::Split => { stack[top] = (right_child, c_level - 1); top += 1; stack[top] = (left_child, c_level - 1); top += 1; } } } return -1; } pub fn CheckParentFull(&mut self, idx: u64) { let mut idx = idx; while idx != 0 { let left_child = idx * 2 + 1; let right_child = idx * 2 + 2; let left_child_used_or_full = self.tree()[left_child as usize] == Node::Full || self.tree()[left_child as usize] == Node::Used; let right_child_used_or_full = self.tree()[right_child as usize] == Node::Full || self.tree()[right_child as usize] == Node::Used; if left_child_used_or_full && right_child_used_or_full { self.tree()[idx as usize] = Node::Full; } idx = (idx + 1) / 2 - 1; } } pub fn allocate(&mut self, num_pages: u64) -> isize { let requested_level = self.get_level_from_num_pages(num_pages); if requested_level > self.levels { return -1; } return self.alloc2(requested_level); } pub fn free(&mut self, page_offset: u64, num_pages: u64) -> bool { if self.root <= page_offset && page_offset <= self.root + self.size { return false; } let requested_level = self.get_level_from_num_pages(num_pages); let level_offset = page_offset / (1 << requested_level); let current_level_offset = (1 << self.levels - requested_level) - 1; let mut idx = current_level_offset + level_offset; if idx as usize > self.tree().len() - 1 { panic!("offset {} is > length of tree() {}", idx, self.tree().len()); } if self.tree()[idx as usize] != Node::Used { return false; } self.tree()[idx as usize] = Node::Unused; while idx != 0 { let parent = (idx + 1) / 2 - 1; let left_child = parent * 2 + 1; let right_child = parent * 2 + 2; if self.tree()[left_child as usize] == Node::Unused && self.tree()[right_child as usize] == Node::Unused { self.tree()[parent as usize] = Node::Unused; } else { self.tree()[parent as usize] = Node::Split; } idx = parent } return true; } fn get_level_from_num_pages(&self, num_pages: u64) -> u64 { let requested_pages; if num_pages == 0 { requested_pages = 1; } else { requested_pages = num_pages.next_power_of_two(); } let requested_level = self.log_base_2(requested_pages); requested_level } fn log_base_2(&self, requested_pages: u64) -> u64 { let mut exp = 0; let mut find_msb_bit = requested_pages; find_msb_bit >>= 1; while find_msb_bit > 0 { find_msb_bit >>= 1; exp += 1; } return exp; } /*pub fn dump(&self) -> String { let mut out = "".to_string(); let mut row = "".to_string(); let mut level = 0; let mut index = 0; loop { if index == self.tree().len() { break } match self.tree()[index] { Node::Used => row += "U", Node::Unused => row += "O", Node::Split => row += "S", Node::Full => row += "F", } if row.len() == 1 << level { out += &(row + "\n"); row = "".to_string(); level += 1; } index += 1; } return out; }*/ } #[cfg(test)] mod tests { use super::*; #[test] fn test_alloc() { let mem = [0 as u8; 15]; let mut alloc = buddyallocator::New(3, &mem[0] as *const _ as u64); assert_eq!(alloc.allocate(9), -1); let offset1 = alloc.allocate(1); assert_eq!(offset1, 0); let offset2 = alloc.allocate(3); assert_eq!(offset2, 4); alloc.free(offset2 as u64, 3); alloc.free(offset1 as u64, 1); let offset3 = alloc.allocate(8); assert_eq!(offset3, 0); alloc.free(offset3 as u64, 8); let offset4 = alloc.allocate(9); assert_eq!(offset4, -1); } #[test] fn test_alloc1() { let mut alloc = buddyallocator::New(0, 0); assert_eq!(alloc.allocate(9), -1); } }
pub fn Free(&mut self, addr: u64, pages: u64) -> Result<()> { let pageOff = (addr - self.baseAddr) as u64 >> PAGE_SHIFT; let ret = self.ba.free(pageOff, pages); if ret { Ok(()) } else { Err(Error::InvalidInput) } }
function_block-full_function
[ { "content": "#[inline(always)]\n\npub fn CmpExchg(addr: u64, old: u64, new: u64) -> u64 {\n\n let mut ret: u64;\n\n unsafe {\n\n llvm_asm!(\"\n\n lock cmpxchgq $2, ($3)\n\n \"\n\n : \"={rax}\"(ret)\n\n : \"{rax}\"(old), \"{rdx}\"(new), \"{rcx}\"(addr)\n\n : \"memory\" : \"volatile\" );\n\n };\n\n\n\n return ret;\n\n}\n\n\n", "file_path": "qlib/mutex.rs", "rank": 0, "score": 412380.59196372936 }, { "content": "pub fn MapVDSOParamPage(task: &mut Task, virtualAddr: u64, vdsoParamPageAddr: u64) -> Result<u64> {\n\n let mut moptions = MMapOpts::NewAnonOptions(\"[vvar]\".to_string())?;\n\n moptions.Length = MemoryDef::PAGE_SIZE;\n\n moptions.Addr = virtualAddr;\n\n moptions.Fixed = true;\n\n moptions.Perms = AccessType::ReadOnly();\n\n moptions.MaxPerms = AccessType::ReadOnly();\n\n moptions.Private = true;\n\n moptions.VDSO = true;\n\n moptions.Kernel = false;\n\n moptions.Offset = vdsoParamPageAddr; //use offset to store the phyaddress\n\n\n\n let addr = task.mm.MMap(task, &mut moptions)?;\n\n return Ok(addr);\n\n}\n\n\n", "file_path": "qlib/kernel/loader/loader.rs", "rank": 1, "score": 382822.30708076 }, { "content": "pub fn MapVDSOPage(task: &mut Task, virtualAddr: u64, vdsoAddr: u64) -> Result<u64> {\n\n let mut moptions = MMapOpts::NewAnonOptions(\"[vdso]\".to_string())?;\n\n moptions.Length = 2 * MemoryDef::PAGE_SIZE;\n\n moptions.Addr = virtualAddr;\n\n moptions.Fixed = true;\n\n moptions.Perms = AccessType::Executable();\n\n moptions.MaxPerms = AccessType::Executable();\n\n moptions.Private = false;\n\n moptions.VDSO = true;\n\n moptions.Kernel = false;\n\n moptions.Offset = vdsoAddr; //use offset to store the phyaddress\n\n\n\n let addr = task.mm.MMap(task, &mut moptions)?;\n\n return Ok(addr);\n\n}\n\n\n", "file_path": "qlib/kernel/loader/loader.rs", "rank": 2, "score": 381138.91543516866 }, { "content": "#[inline(always)]\n\npub fn LoadOnce(addr: u64) -> u64 {\n\n let ret: u64;\n\n unsafe {\n\n llvm_asm!(\"\n\n movq ($1), $0\n\n lfence\n\n \"\n\n : \"={rax}\"(ret)\n\n : \"{rdi}\"(addr)\n\n : \"memory\" : \"volatile\" );\n\n };\n\n\n\n return ret;\n\n}\n\n\n\nimpl<T: ?Sized> QMutexIntern<T> {\n\n #[inline(always)]\n\n pub fn CmpExchg(&self, old: u64, new: u64) -> u64 {\n\n /*match self.lock.compare_exchange(old, new, QOrdering::ACQUIRE, QOrdering::RELAXED) {\n\n Ok(v) => return v,\n", "file_path": "qlib/mutex.rs", "rank": 3, "score": 363380.4799794749 }, { "content": "#[inline(always)]\n\npub fn CopyPage(to: u64, from: u64) {\n\n unsafe {\n\n CopyPageUnsafe(to, from);\n\n }\n\n}\n\n\n\n#[derive(Debug, Default, Copy, Clone)]\n\n#[repr(C)]\n\npub struct LibcStat {\n\n pub st_dev: u64,\n\n pub st_ino: u64,\n\n pub st_nlink: u64,\n\n pub st_mode: u32,\n\n pub st_uid: u32,\n\n pub st_gid: u32,\n\n pub pad0: i32,\n\n pub st_rdev: u64,\n\n pub st_size: i64,\n\n pub st_blksize: i64,\n\n pub st_blocks: i64,\n", "file_path": "qlib/linux_def.rs", "rank": 4, "score": 362932.1980293063 }, { "content": "// copyItimerValOut copies an ItimerVal to the untrusted app range.\n\n// The ItimerVal may be either 32 or 64 bits.\n\n// A NULL address is allowed, in which case no copy takes place\n\npub fn CopyItimerValOut(task: &mut Task, addr: u64, itv: &ItimerVal) -> Result<()> {\n\n if addr == 0 {\n\n return Ok(());\n\n }\n\n\n\n //*task.GetTypeMut(addr)? = *itv;\n\n task.CopyOutObj(itv, addr)?;\n\n return Ok(());\n\n}\n\n\n", "file_path": "qkernel/src/syscalls/sys_timer.rs", "rank": 5, "score": 362537.82765263313 }, { "content": "pub fn IsValidSegmentBase(addr: u64) -> bool {\n\n return addr < MAX_ADDR64;\n\n}\n\n\n\nconst DEFAULT_STACK_SIZE: usize = MemoryDef::DEFAULT_STACK_SIZE as usize;\n\nconst DEFAULT_STACK_PAGES: u64 = DEFAULT_STACK_SIZE as u64 / (4 * 1024);\n\nconst DEFAULT_STACK_MAST: u64 = !(DEFAULT_STACK_SIZE as u64 - 1);\n\n\n\n#[derive(Debug, Copy, Clone, Default)]\n\npub struct SharingOptions {\n\n // If NewAddressSpace is true, the task should have an independent virtual\n\n // address space.\n\n pub NewAddressSpace: bool,\n\n\n\n // If NewSignalHandlers is true, the task should use an independent set of\n\n // signal handlers.\n\n pub NewSignalHandlers: bool,\n\n\n\n // If NewThreadGroup is true, the task should be the leader of its own\n\n // thread group. TerminationSignal is the signal that the thread group\n", "file_path": "qlib/kernel/threadmgr/task_clone.rs", "rank": 7, "score": 354341.4536638021 }, { "content": "#[inline(always)]\n\npub fn WriteOnce(addr: u64, val: u64) {\n\n unsafe {\n\n llvm_asm!(\"\n\n mfence\n\n mov $1, ($0)\n\n \"\n\n :\n\n : \"r\"(addr), \"r\"(val)\n\n : \"memory\" : \"volatile\" );\n\n };\n\n}\n\n\n", "file_path": "qlib/mutex.rs", "rank": 8, "score": 350972.8581646242 }, { "content": "#[inline]\n\npub fn Invlpg(addr: u64) {\n\n if !super::SHARESPACE.config.read().KernelPagetable {\n\n unsafe {\n\n llvm_asm!(\"\n\n invlpg ($0)\n\n \" :: \"r\" (addr): \"memory\" : \"volatile\" )\n\n };\n\n }\n\n}\n\n\n", "file_path": "qlib/kernel/asm/mod.rs", "rank": 9, "score": 339294.812266673 }, { "content": "pub fn stmxcsr(addr: u64) {\n\n unsafe {\n\n llvm_asm!(\"\\\n\n STMXCSR [rax]\n\n \" : : \"{rax}\"(addr)\n\n : \"memory\" : \"intel\", \"volatile\")\n\n };\n\n}\n\n\n", "file_path": "qlib/kernel/asm/mod.rs", "rank": 10, "score": 339294.812266673 }, { "content": "pub fn fxsave(addr: u64) {\n\n unsafe {\n\n llvm_asm!(\"\\\n\n fxsave64 [rbx + 0]\n\n \" : : \"{rbx}\"(addr)\n\n : \"memory\" : \"intel\", \"volatile\")\n\n };\n\n}\n\n\n", "file_path": "qlib/kernel/asm/mod.rs", "rank": 11, "score": 339294.812266673 }, { "content": "pub fn FSTCW(addr: u64) {\n\n unsafe {\n\n llvm_asm!(\"\\\n\n FSTCW [rax]\n\n \" : : \"{rax}\"(addr)\n\n : \"memory\" : \"intel\", \"volatile\")\n\n };\n\n}\n\n\n", "file_path": "qlib/kernel/asm/mod.rs", "rank": 12, "score": 339294.812266673 }, { "content": "pub fn xsaveopt(addr: u64) {\n\n let negtive1: u64 = 0xffffffff;\n\n unsafe {\n\n llvm_asm!(\"\\\n\n xsaveopt64 [rdi + 0]\n\n \" : : \"{rdi}\"(addr), \"{eax}\"(negtive1), \"{edx}\"(negtive1)\n\n : \"memory\" : \"intel\", \"volatile\")\n\n };\n\n}\n\n\n", "file_path": "qlib/kernel/asm/mod.rs", "rank": 13, "score": 339294.812266673 }, { "content": "#[inline]\n\npub fn SetGs(addr: u64) {\n\n WriteMsr(MSR::MSR_KERNEL_GS_BASE as u32, addr);\n\n}\n\n\n", "file_path": "qlib/kernel/vcpu.rs", "rank": 14, "score": 339294.812266673 }, { "content": "#[inline]\n\npub fn SetFs(addr: u64) {\n\n //println!(\"SetFs from {:x} to {:x}\", GetFs(), addr);\n\n WriteMsr(MSR::MSR_FS_BASE as u32, addr);\n\n //println!(\"the input value is {:x}, the get fs result is {:x}\", addr, ReadMsr(MSR::MSR_FS_BASE as u32));\n\n}\n\n\n", "file_path": "qlib/kernel/vcpu.rs", "rank": 15, "score": 339294.812266673 }, { "content": "pub fn fxrstor(addr: u64) {\n\n unsafe {\n\n llvm_asm!(\"\\\n\n fxrstor64 [rbx + 0]\n\n \" : : \"{rbx}\"(addr)\n\n : \"memory\" : \"intel\", \"volatile\")\n\n };\n\n}\n\n\n", "file_path": "qlib/kernel/asm/mod.rs", "rank": 16, "score": 339294.812266673 }, { "content": "pub fn FLDCW(addr: u64) {\n\n unsafe {\n\n llvm_asm!(\"\\\n\n FLDCW [rax]\n\n \" : : \"{rax}\"(addr)\n\n : \"memory\" : \"intel\", \"volatile\")\n\n };\n\n}\n\n\n", "file_path": "qlib/kernel/asm/mod.rs", "rank": 17, "score": 339294.812266673 }, { "content": "#[inline]\n\npub fn Clflush(addr: u64) {\n\n unsafe { llvm_asm!(\"clflush ($0)\" :: \"r\" (addr): \"memory\" : \"volatile\" ) }\n\n}\n\n\n\n// muldiv64 multiplies two 64-bit numbers, then divides the result by another\n\n// 64-bit number.\n\n//\n\n// It requires that the result fit in 64 bits, but doesn't require that\n\n// intermediate values do; in particular, the result of the multiplication may\n\n// require 128 bits.\n\n//\n\n// It returns !ok if divisor is zero or the result does not fit in 64 bits.\n", "file_path": "qlib/kernel/asm/mod.rs", "rank": 18, "score": 339294.812266673 }, { "content": "pub fn xsave(addr: u64) {\n\n unsafe {\n\n llvm_asm!(\"\\\n\n xsave64 [rdi + 0]\n\n \" : : \"{rdi}\"(addr)\n\n : \"memory\" : \"intel\", \"volatile\")\n\n };\n\n}\n\n\n", "file_path": "qlib/kernel/asm/mod.rs", "rank": 19, "score": 339294.812266673 }, { "content": "pub fn ldmxcsr(addr: u64) {\n\n unsafe {\n\n llvm_asm!(\"\\\n\n LDMXCSR [rax]\n\n \" : : \"{rax}\"(addr)\n\n : \"memory\" : \"intel\", \"volatile\")\n\n };\n\n}\n\n\n", "file_path": "qlib/kernel/asm/mod.rs", "rank": 20, "score": 339294.812266673 }, { "content": "pub fn xrstor(addr: u64) {\n\n let negtive1: u64 = 0xffffffff;\n\n unsafe {\n\n llvm_asm!(\"\\\n\n xrstor64 [rdi + 0]\n\n \" : : \"{rdi}\"(addr), \"{eax}\"(negtive1), \"{edx}\"(negtive1)\n\n : \"memory\" : \"intel\", \"volatile\")\n\n };\n\n}\n\n\n", "file_path": "qlib/kernel/asm/mod.rs", "rank": 21, "score": 339294.812266673 }, { "content": "pub fn CopyInSigSetWithSize(task: &Task, addr: u64) -> Result<(u64, usize)> {\n\n let mask: SigMask = task.CopyInObj(addr)?;\n\n return Ok((mask.addr, mask.len));\n\n}\n\n\n\npub const SIGNAL_SET_SIZE: usize = 8;\n\n\n", "file_path": "qlib/kernel/SignalDef.rs", "rank": 22, "score": 337025.0206960834 }, { "content": "pub fn RegisterSysCall(addr: u64) {\n\n //WriteMsr(MSR::MSR_STAR as u32, 0x00200008<<32);\n\n WriteMsr(MSR::MSR_STAR as u32, 0x00100008 << 32);\n\n WriteMsr(MSR::MSR_SYSCALL_MASK as u32, 0x3f7fd5);\n\n WriteMsr(MSR::MSR_LSTAR as u32, addr);\n\n}\n\n\n", "file_path": "qlib/kernel/vcpu.rs", "rank": 23, "score": 333746.632261263 }, { "content": "pub fn CopyInNodemask(task: &Task, addr: u64, maxnode: u32) -> Result<u64> {\n\n // \"nodemask points to a bit mask of node IDs that contains up to maxnode\n\n // bits. The bit mask size is rounded to the next multiple of\n\n // sizeof(unsigned long), but the kernel will use bits only up to maxnode.\n\n // A NULL value of nodemask or a maxnode value of zero specifies the empty\n\n // set of nodes. If the value of maxnode is zero, the nodemask argument is\n\n // ignored.\" - set_mempolicy(2). Unfortunately, most of this is inaccurate\n\n // because of what appears to be a bug: mm/mempolicy.c:get_nodes() uses\n\n // maxnode-1, not maxnode, as the number of bits.\n\n if maxnode == 0 {\n\n return Err(Error::SysError(SysErr::EINVAL));\n\n }\n\n\n\n let bits = maxnode - 1;\n\n if bits as u64 > MemoryDef::PAGE_SIZE * 8 {\n\n // also handles overflow from maxnode == 0\n\n return Err(Error::SysError(SysErr::EINVAL));\n\n }\n\n\n\n if bits == 0 {\n", "file_path": "qkernel/src/syscalls/sys_mempolicy.rs", "rank": 24, "score": 332227.5629892041 }, { "content": "pub fn SaveFloatingPoint(addr: u64) {\n\n if SUPPORT_XSAVEOPT.load(Ordering::Acquire) {\n\n xsaveopt(addr);\n\n } else if SUPPORT_XSAVE.load(Ordering::Acquire) {\n\n xsave(addr);\n\n } else {\n\n fxsave(addr);\n\n }\n\n}\n\n\n", "file_path": "qlib/kernel/asm/mod.rs", "rank": 25, "score": 328510.42719314375 }, { "content": "pub fn LoadFloatingPoint(addr: u64) {\n\n if SUPPORT_XSAVE.load(Ordering::Acquire) {\n\n xrstor(addr);\n\n } else {\n\n fxrstor(addr);\n\n }\n\n}\n\n\n", "file_path": "qlib/kernel/asm/mod.rs", "rank": 26, "score": 328510.42719314375 }, { "content": "pub fn HugepageDontNeed(addr: u64) {\n\n let ret = HostSpace::Madvise(\n\n addr,\n\n MemoryDef::HUGE_PAGE_SIZE as usize,\n\n MAdviseOp::MADV_DONTNEED,\n\n );\n\n assert!(ret == 0, \"HugepageDontNeed fail with {}\", ret)\n\n}\n\n\n\nimpl IOMgr {\n\n pub fn Init() -> Result<Self> {\n\n return Err(Error::Common(format!(\"IOMgr can't init in kernel\")))\n\n }\n\n}", "file_path": "qkernel/src/kernel_def.rs", "rank": 27, "score": 328510.42719314375 }, { "content": "pub fn HugepageDontNeed(addr: u64) {\n\n let ret = unsafe {\n\n libc::madvise(\n\n addr as _,\n\n MemoryDef::HUGE_PAGE_SIZE as usize,\n\n MAdviseOp::MADV_DONTNEED,\n\n )\n\n };\n\n assert!(ret == 0, \"HugepageDontNeed::Host fail with {}\", ret)\n\n}\n", "file_path": "qvisor/src/kernel_def.rs", "rank": 28, "score": 328510.42719314375 }, { "content": "pub fn Pipe2(task: &mut Task, addr: u64, flags: i32) -> Result<i64> {\n\n if flags & !(Flags::O_NONBLOCK | Flags::O_CLOEXEC) != 0 {\n\n return Err(Error::SysError(SysErr::EINVAL));\n\n }\n\n\n\n let (r, w) = NewConnectedPipe(task, DEFAULT_PIPE_SIZE, MemoryDef::PAGE_SIZE as usize);\n\n\n\n r.SetFlags(task, FileFlags::FromFlags(flags as u32).SettableFileFlags());\n\n r.flags.lock().0.NonSeekable = true;\n\n w.SetFlags(task, FileFlags::FromFlags(flags as u32).SettableFileFlags());\n\n w.flags.lock().0.NonSeekable = true;\n\n\n\n //let fds : &mut [i32; 2] = task.GetTypeMut(addr)?;\n\n\n\n let mut fds: [i32; 2] = [0, 0];\n\n let rfd = task.NewFDFrom(\n\n 0,\n\n &r,\n\n &FDFlags {\n\n CloseOnExec: flags & Flags::O_CLOEXEC != 0,\n", "file_path": "qkernel/src/syscalls/sys_pipe.rs", "rank": 29, "score": 328505.9197695248 }, { "content": "pub fn GetVDSOParamPageAddr() -> u64 {\n\n return TIME_KEEPER.read().params.GetParamPageAddr();\n\n}\n\n\n", "file_path": "qlib/kernel/kernel/timer/mod.rs", "rank": 30, "score": 327850.97690429457 }, { "content": "pub fn seccomp(_task: &mut Task, _mode: u64, _flags: u64, _addr: u64) -> Result<i64> {\n\n return Err(Error::SysError(SysErr::ENOSYS));\n\n}\n", "file_path": "qkernel/src/syscalls/sys_seccomp.rs", "rank": 31, "score": 325642.1028035259 }, { "content": "pub fn Statfs(task: &Task, addr: u64, statfsAddr: u64) -> Result<u64> {\n\n let (path, _dirPath) = copyInPath(task, addr, false)?;\n\n\n\n info!(\"Statfs path is {}\", &path);\n\n fileOpOn(\n\n task,\n\n ATType::AT_FDCWD,\n\n &path,\n\n true,\n\n &mut |_root: &Dirent, d: &Dirent, _remainingTraversals: u32| -> Result<()> {\n\n return statfsImpl(task, d, statfsAddr);\n\n },\n\n )?;\n\n\n\n return Ok(0);\n\n}\n\n\n", "file_path": "qkernel/src/syscalls/sys_stat.rs", "rank": 32, "score": 323406.1883088495 }, { "content": "pub fn CopyOutNodemask(task: &Task, addr: u64, maxnode: u32, val: u64) -> Result<()> {\n\n // mm/mempolicy.c:copy_nodes_to_user() also uses maxnode-1 as the number of\n\n // bits.\n\n let bits = maxnode - 1;\n\n if bits as u64 > MemoryDef::PAGE_SIZE * 8 {\n\n return Err(Error::SysError(SysErr::EINVAL));\n\n }\n\n\n\n // Copy out the first unsigned long in the nodemask.\n\n //*task.GetTypeMut(addr)? = val;\n\n task.CopyOutObj(&val, addr)?;\n\n\n\n // Zero out remaining unsigned longs in the nodemask.\n\n if bits > 64 {\n\n let mut remAddr = addr + 8;\n\n\n\n let remU64 = (bits - 65) / 64;\n\n for _i in 0..remU64 as usize {\n\n //*task.GetTypeMut(remAddr)? = 0;\n\n task.CopyOutObj(&(0 as u64), remAddr)?;\n\n remAddr += 8;\n\n }\n\n }\n\n\n\n return Ok(());\n\n}\n\n\n", "file_path": "qkernel/src/syscalls/sys_mempolicy.rs", "rank": 33, "score": 323249.80482741434 }, { "content": "pub fn CreateCloneTask(fromTask: &Task, toTask: &mut Task, userSp: u64) {\n\n let mut from = fromTask.GetKernelSp();\n\n let fromSp = fromTask.GetPtRegs() as *const _ as u64;\n\n\n\n let mut to = toTask.GetKernelSp();\n\n let toPtRegs = toTask.GetPtRegs();\n\n\n\n unsafe {\n\n while from >= fromSp {\n\n *(to as *mut u64) = *(from as *const u64);\n\n from -= 8;\n\n to -= 8;\n\n }\n\n\n\n toTask.context.SetReady(1);\n\n toTask.context.fs = fromTask.context.fs;\n\n toTask.context.rsp = toTask.GetPtRegs() as *const _ as u64 - 8;\n\n toTask.context.rdi = userSp;\n\n toTask.context.X86fpstate = Box::new(fromTask.context.X86fpstate.Fork());\n\n toPtRegs.rax = 0;\n", "file_path": "qlib/kernel/threadmgr/task_clone.rs", "rank": 34, "score": 319354.4009715081 }, { "content": "// copyOutEvents copies epoll events from the kernel to user memory.\n\npub fn CopyOutEvents(task: &Task, addr: u64, e: &[Event]) -> Result<()> {\n\n let itemLen: usize = 12;\n\n\n\n Addr(addr).AddLen((itemLen * e.len()) as u64)?;\n\n\n\n //error!(\"epool CopyOutEvents events is {:x?}\", e);\n\n for i in 0..e.len() {\n\n /*let output : &mut Event = task.GetTypeMut(addr + (i * itemLen) as u64)?;\n\n output.Events = e[i].Events;\n\n output.Data[0] = e[i].Data[0];\n\n output.Data[1] = e[i].Data[1];*/\n\n task.CopyOutObj(&e[i], addr + (i * itemLen) as u64)?;\n\n }\n\n\n\n return Ok(());\n\n}\n\n\n", "file_path": "qkernel/src/syscalls/sys_epoll.rs", "rank": 35, "score": 319151.9363499429 }, { "content": "#[inline(always)]\n\npub fn trace1(rip: u64, rsp: u64, rbp: u64, cb: &mut dyn FnMut(&Frame) -> bool) {\n\n let curframe = Frame::new(rbp, rsp, rip);\n\n trace_from(curframe, cb);\n\n}\n\n\n", "file_path": "qlib/kernel/backtracer.rs", "rank": 36, "score": 315301.63190658373 }, { "content": "#[inline(always)]\n\npub fn trace1(rip: u64, rsp: u64, rbp: u64, cb: &mut dyn FnMut(&Frame) -> bool) {\n\n let curframe = Frame::new(rbp, rsp, rip);\n\n trace_from(curframe, cb);\n\n}\n\n\n", "file_path": "qkernel/src/backtracer.rs", "rank": 37, "score": 315301.6319065837 }, { "content": "// copyItimerValIn copies an ItimerVal from the untrusted app range to the\n\n// kernel. The ItimerVal may be either 32 or 64 bits.\n\n// A NULL address is allowed because because Linux allows\n\n// setitimer(which, NULL, &old_value) which disables the timer.\n\n// There is a KERN_WARN message saying this misfeature will be removed.\n\n// However, that hasn't happened as of 3.19, so we continue to support it.\n\npub fn CopyItimerValIn(task: &Task, addr: u64) -> Result<ItimerVal> {\n\n if addr == 0 {\n\n return Ok(ItimerVal::default());\n\n }\n\n\n\n let itv: ItimerVal = task.CopyInObj(addr)?;\n\n return Ok(itv);\n\n}\n\n\n", "file_path": "qkernel/src/syscalls/sys_timer.rs", "rank": 38, "score": 314602.5267234813 }, { "content": "pub fn IsValidSegmentBase(addr: u64) -> bool {\n\n return addr < MAX_ADDR64;\n\n}\n\n\n", "file_path": "qkernel/src/syscalls/sys_tls.rs", "rank": 39, "score": 311090.2208289001 }, { "content": "pub fn NewUID() -> u64 {\n\n return UID.fetch_add(1, atomic::Ordering::SeqCst);\n\n}\n", "file_path": "qkernel/src/uid.rs", "rank": 40, "score": 310897.835657271 }, { "content": "pub fn NewUID() -> u64 {\n\n return UID.fetch_add(1, atomic::Ordering::SeqCst);\n\n}\n", "file_path": "qlib/kernel/uid.rs", "rank": 41, "score": 310897.835657271 }, { "content": "pub fn InitTimeKeeper(vdsoParamPageAddr: u64) {\n\n TIME_KEEPER.Initialization(vdsoParamPageAddr)\n\n}\n\n\n", "file_path": "qlib/kernel/kernel/timer/mod.rs", "rank": 42, "score": 310694.6382124863 }, { "content": "pub fn Stat(task: &Task, addr: u64, statAddr: u64) -> Result<i64> {\n\n let (path, dirPath) = copyInPath(task, addr, false)?;\n\n info!(\"Stat path is {}\", &path);\n\n\n\n fileOpOn(\n\n task,\n\n ATType::AT_FDCWD,\n\n &path,\n\n true,\n\n &mut |_root: &Dirent, d: &Dirent, _remainingTraversals: u32| -> Result<()> {\n\n return stat(task, d, dirPath, statAddr);\n\n },\n\n )?;\n\n\n\n return Ok(0);\n\n}\n\n\n", "file_path": "qkernel/src/syscalls/sys_stat.rs", "rank": 43, "score": 308462.8947643379 }, { "content": "pub fn Lstat(task: &Task, addr: u64, statAddr: u64) -> Result<i64> {\n\n let (path, dirPath) = copyInPath(task, addr, false)?;\n\n\n\n info!(\"Lstat path is {}\", &path);\n\n let resolve = dirPath;\n\n\n\n fileOpOn(\n\n task,\n\n ATType::AT_FDCWD,\n\n &path,\n\n resolve,\n\n &mut |_root: &Dirent, d: &Dirent, _remainingTraversals: u32| -> Result<()> {\n\n return stat(task, d, dirPath, statAddr);\n\n },\n\n )?;\n\n\n\n return Ok(0);\n\n}\n\n\n", "file_path": "qkernel/src/syscalls/sys_stat.rs", "rank": 44, "score": 308462.8947643379 }, { "content": "pub fn MemfdCreate(task: &Task, addr: u64, flags: u64) -> Result<u64> {\n\n let memfdPrefix = \"/memfd:\";\n\n let memfdAllFlags = MfdType::MFD_CLOEXEC | MfdType::MFD_ALLOW_SEALING;\n\n let memfdMaxNameLen = NAME_MAX - memfdPrefix.len() + 1;\n\n\n\n let flags = flags as u32;\n\n\n\n if flags & !memfdAllFlags != 0 {\n\n return Err(Error::SysError(SysErr::EINVAL))\n\n }\n\n\n\n let allowSeals = flags & MfdType::MFD_ALLOW_SEALING != 0;\n\n let cloExec = flags & MfdType::MFD_CLOEXEC != 0;\n\n\n\n let name = task.CopyInString(addr, PATH_MAX - memfdPrefix.len())?;\n\n\n\n if name.len() > memfdMaxNameLen {\n\n return Err(Error::SysError(SysErr::EINVAL))\n\n }\n\n\n\n let name = memfdPrefix.to_string() + &name;\n\n}*/\n", "file_path": "qkernel/src/syscalls/sys_file.rs", "rank": 45, "score": 308077.67573306244 }, { "content": "pub fn NewUID() -> u64 {\n\n return RDMAUID.fetch_add(1, atomic::Ordering::SeqCst);\n\n}\n\n\n\n#[derive(Default, Copy, Clone, Debug, Eq, PartialEq, Hash)]\n\n#[repr(transparent)]\n\npub struct Gid {\n\n raw: [u8; 16],\n\n}\n\n\n\nimpl Gid {\n\n /// Expose the subnet_prefix component of the `Gid` as a u64. This is\n\n /// equivalent to accessing the `global.subnet_prefix` component of the\n\n /// `rdmaffi::ibv_gid` union.\n\n #[allow(dead_code)]\n\n fn subnet_prefix(&self) -> u64 {\n\n u64::from_be_bytes(self.raw[..8].try_into().unwrap())\n\n }\n\n\n\n /// Expose the interface_id component of the `Gid` as a u64. This is\n", "file_path": "rdma_srv/src/rdma.rs", "rank": 46, "score": 305008.857353712 }, { "content": "pub fn NewUID() -> u64 {\n\n return UID.fetch_add(1, atomic::Ordering::SeqCst);\n\n}\n\n\n", "file_path": "qvisor/src/vmspace/mod.rs", "rank": 47, "score": 305008.857353712 }, { "content": "pub fn ComparePage(from: u64, to: u64) -> bool {\n\n unsafe {\n\n let cnt = 512;\n\n let fromArr = slice::from_raw_parts(from as *const u64, cnt);\n\n let toArr = slice::from_raw_parts_mut(to as *mut u64, cnt);\n\n for i in 0..cnt {\n\n if toArr[i] != fromArr[i] {\n\n return false;\n\n }\n\n }\n\n\n\n return true;\n\n }\n\n}\n\n\n", "file_path": "qlib/linux_def.rs", "rank": 48, "score": 304752.4624885267 }, { "content": "// TotalMemory returns the \"total usable memory\" available.\n\n//\n\n// This number doesn't really have a true value so it's based on the following\n\n// inputs and further bounded to be above some minimum guaranteed value (2GB),\n\n// additionally ensuring that total memory reported is always less than used.\n\n//\n\n// memSize should be the platform.Memory size reported by platform.Memory.TotalSize()\n\n// used is the total memory reported by MemoryLocked.Total()\n\npub fn TotalMemory(memSize: u64, used: u64) -> u64 {\n\n let mut memSize = memSize;\n\n if memSize < MINIMUM_TOTAL_MEMORY_BYTES {\n\n memSize = MINIMUM_TOTAL_MEMORY_BYTES;\n\n }\n\n\n\n if memSize < used {\n\n memSize = used;\n\n // Bump totalSize to the next largest power of 2, if one exists, so\n\n // that MemFree isn't 0.\n\n for i in 0..64 {\n\n let size = 1 << i;\n\n if size as u64 >= memSize {\n\n return size;\n\n }\n\n }\n\n }\n\n\n\n return memSize;\n\n}\n", "file_path": "qlib/usage/memory.rs", "rank": 49, "score": 304244.72883787076 }, { "content": "pub fn ZeroPage(pageStart: u64) {\n\n use alloc::slice;\n\n unsafe {\n\n let arr = slice::from_raw_parts_mut(pageStart as *mut u64, 512);\n\n for i in 0..512 {\n\n arr[i] = 0\n\n }\n\n }\n\n\n\n super::super::asm::sfence();\n\n}\n\n\n", "file_path": "qlib/kernel/memmgr/pmamgr.rs", "rank": 50, "score": 302826.6831181865 }, { "content": "pub fn LogInit(pages: u64) {\n\n let bs = self::qlib::bytestream::ByteStream::Init(pages); // 4MB\n\n *SHARESPACE.logBuf.lock() = Some(bs);\n\n}\n\n\n", "file_path": "qkernel/src/lib.rs", "rank": 51, "score": 298774.34487733804 }, { "content": "pub fn Invlpg(_addr: u64) {}\n", "file_path": "rdma_srv/src/asm.rs", "rank": 52, "score": 298698.60986856284 }, { "content": "pub fn Invlpg(_addr: u64) {}\n", "file_path": "qvisor/src/asm/mod.rs", "rank": 53, "score": 298698.60986856284 }, { "content": "pub fn Invlpg(_addr: u64) {}\n", "file_path": "rdma_cli/src/asm.rs", "rank": 54, "score": 298698.60986856284 }, { "content": "pub fn CheckZeroPage(pageStart: u64) {\n\n use alloc::slice;\n\n unsafe {\n\n let arr = slice::from_raw_parts_mut(pageStart as *mut u64, 512);\n\n for i in 0..512 {\n\n if arr[i] != 0 {\n\n panic!(\"alloc non zero page {:x}\", pageStart);\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct PagePool {\n\n //refCount for whole pma\n\n pub refCount: u64,\n\n pub refs: BTreeMap<u64, u32>,\n\n pub allocator: AlignedAllocator,\n\n}\n\n\n\nimpl PagePool {\n", "file_path": "qlib/kernel/memmgr/pmamgr.rs", "rank": 55, "score": 298017.7771623286 }, { "content": "pub fn SetZeroPage(pageStart: u64) {\n\n use alloc::slice;\n\n unsafe {\n\n let arr = slice::from_raw_parts_mut(pageStart as *mut u64, 512);\n\n for i in 0..512 {\n\n arr[i] = 0;\n\n }\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct GlobalVcpuAllocator {\n\n pub init: AtomicBool,\n\n}\n\n\n\nimpl GlobalVcpuAllocator {\n\n pub const fn New() -> Self {\n\n return Self {\n\n init: AtomicBool::new(false),\n\n };\n", "file_path": "qlib/mem/list_allocator.rs", "rank": 56, "score": 298017.7771623286 }, { "content": "pub fn CheckZeroPage(pageStart: u64) {\n\n use alloc::slice;\n\n unsafe {\n\n let arr = slice::from_raw_parts_mut(pageStart as *mut u64, 512);\n\n for i in 0..512 {\n\n if arr[i] != 0 {\n\n panic!(\"alloc non zero page {:x}\", pageStart);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "qlib/mem/list_allocator.rs", "rank": 57, "score": 298017.7771623286 }, { "content": "pub fn LoadVDSO(task: &mut Task) -> Result<u64> {\n\n let vAddr = task\n\n .mm\n\n .FindAvailableSeg(task, 0, 3 * MemoryDef::PAGE_SIZE)?;\n\n\n\n let vdsoParamPageAddr = GetVDSOParamPageAddr();\n\n let paramVAddr = MapVDSOParamPage(task, vAddr, vdsoParamPageAddr)?;\n\n assert!(paramVAddr == vAddr, \"LoadVDSO paramVAddr doesn't match\");\n\n let vdsoVAddr = MapVDSOPage(\n\n task,\n\n paramVAddr + MemoryDef::PAGE_SIZE,\n\n vdsoParamPageAddr + MemoryDef::PAGE_SIZE,\n\n )?;\n\n\n\n //info!(\"vdsoParamPageAddr is {:x}, phyaddr is {:x}\", vdsoParamPageAddr, task.VirtualToPhy(paramVAddr)?);\n\n //info!(\"paramVAddr is {:x}, phyaddr is {:x}\", paramVAddr, task.VirtualToPhy(paramVAddr)?);\n\n //info!(\"vdsoVAddr is {:x}, phyaddr is {:x}\", vdsoVAddr, task.VirtualToPhy(vdsoVAddr)?);\n\n //info!(\"paramVAddr is {:x}, vdsoVAddr is {:x}\", paramVAddr, vdsoVAddr);\n\n\n\n return Ok(vdsoVAddr);\n\n}\n\n\n", "file_path": "qlib/kernel/loader/loader.rs", "rank": 58, "score": 296877.9044138228 }, { "content": "#[inline]\n\npub fn EnterUser(entry: u64, userStackAddr: u64, kernelStackAddr: u64) {\n\n //PerfGoto(PerfType::User);\n\n unsafe {\n\n llvm_asm!(\"\n\n fninit\n\n //mov gs:0, rsp\n\n mov gs:0, rdx\n\n\n\n mov rcx, rdi\n\n mov r11, 0x2 | 1<<9\n\n\n\n mov rsp, rsi\n\n\n\n /* clean up registers */\n\n xor rax, rax\n\n xor rbx, rbx\n\n xor rdx, rdx\n\n xor rdi, rdi\n\n xor rsi, rsi\n\n xor rbp, rbp\n", "file_path": "qlib/kernel/asm/mod.rs", "rank": 59, "score": 296400.27877794456 }, { "content": "pub fn ReadCr3() -> u64 { 0 }\n", "file_path": "rdma_cli/src/asm.rs", "rank": 60, "score": 291206.3994420098 }, { "content": "pub fn ReadCr3() -> u64 { 0 }\n", "file_path": "rdma_srv/src/asm.rs", "rank": 61, "score": 291206.3994420098 }, { "content": "//return (path, whether it is dir)\n\npub fn copyInPath(task: &Task, addr: u64, allowEmpty: bool) -> Result<(String, bool)> {\n\n let str = CString::ToString(task, addr)?;\n\n\n\n if &str == \"\" && !allowEmpty {\n\n return Err(Error::SysError(SysErr::ENOENT));\n\n }\n\n\n\n let (path, dirPath) = TrimTrailingSlashes(&str);\n\n\n\n return Ok((path.to_string(), dirPath));\n\n}\n\n\n", "file_path": "qkernel/src/syscalls/sys_file.rs", "rank": 62, "score": 291194.3478753218 }, { "content": "pub fn NewUID() -> u64 {\n\n return RDMAUID.fetch_add(1, atomic::Ordering::SeqCst);\n\n}\n\n\n\n#[derive(Default, Copy, Clone, Debug, Eq, PartialEq, Hash)]\n\n#[repr(transparent)]\n\npub struct Gid {\n\n raw: [u8; 16],\n\n}\n\n\n\nimpl Gid {\n\n /// Expose the subnet_prefix component of the `Gid` as a u64. This is\n\n /// equivalent to accessing the `global.subnet_prefix` component of the\n\n /// `rdmaffi::ibv_gid` union.\n\n #[allow(dead_code)]\n\n fn subnet_prefix(&self) -> u64 {\n\n u64::from_be_bytes(self.raw[..8].try_into().unwrap())\n\n }\n\n\n\n /// Expose the interface_id component of the `Gid` as a u64. This is\n", "file_path": "qvisor/src/vmspace/HostFileMap/rdma.rs", "rank": 63, "score": 289274.4186174371 }, { "content": "pub fn ReadLinkAt(task: &Task, dirFd: i32, addr: u64, bufAddr: u64, size: u64) -> Result<i64> {\n\n let size = size as u32;\n\n\n\n return readlinkAt(task, dirFd, addr, bufAddr, size);\n\n}\n\n\n", "file_path": "qkernel/src/syscalls/sys_file.rs", "rank": 64, "score": 288642.9376135258 }, { "content": "#[inline]\n\npub fn child_clone(_userSp: u64) {}\n\n\n", "file_path": "qvisor/src/kernel_def.rs", "rank": 65, "score": 287970.69665108115 }, { "content": "#[inline]\n\npub fn child_clone(userSp: u64) {\n\n let currTask = Task::Current();\n\n CPULocal::SetUserStack(userSp);\n\n CPULocal::SetKernelStack(currTask.GetKernelSp());\n\n\n\n currTask.AccountTaskEnter(SchedState::RunningApp);\n\n let pt = currTask.GetPtRegs();\n\n\n\n let kernalRsp = pt as *const _ as u64;\n\n CPULocal::Myself().SetEnterAppTimestamp(TSC.Rdtsc());\n\n currTask.mm.HandleTlbShootdown();\n\n SyscallRet(kernalRsp)\n\n}\n\n\n\nextern \"C\" {\n\n pub fn initX86FPState(data: u64, useXsave: bool);\n\n}\n\n\n", "file_path": "qkernel/src/kernel_def.rs", "rank": 66, "score": 287970.69665108115 }, { "content": "pub fn runInCgroup(cg: &Option<Cgroup>, mut f: impl FnMut() -> Result<()>) -> Result<()> {\n\n if cg.is_none() {\n\n return f();\n\n }\n\n\n\n let restore = cg.as_ref().unwrap().Join()?;\n\n f()?;\n\n restore();\n\n return Ok(());\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Default, Debug)]\n\npub struct ExecArgs {\n\n pub Argv: Vec<String>,\n\n pub Envv: Vec<String>,\n\n pub Root: String,\n\n pub WorkDir: String,\n\n pub KUID: KUID,\n\n pub KGID: KGID,\n\n pub ExtraKGIDs: Vec<KGID>,\n", "file_path": "qvisor/src/runc/container/container.rs", "rank": 67, "score": 287680.63982881163 }, { "content": "pub fn StartSubContainerProcess(elfEntry: u64, userStackAddr: u64, kernelStackAddr: u64) {\n\n super::StartSubContainerProcess(elfEntry, userStackAddr, kernelStackAddr)\n\n}\n\n\n\nextern \"C\" {\n\n pub fn CopyPageUnsafe(to: u64, from: u64);\n\n}\n\n\n\nimpl CPULocal {\n\n pub fn CpuId() -> usize {\n\n return GetVcpuId();\n\n }\n\n}\n\n\n\nimpl PageMgrInternal {\n\n pub fn CopyVsysCallPages(&self) {\n\n CopyPage(self.vsyscallPages[0], __vsyscall_page as u64);\n\n }\n\n}\n\n\n", "file_path": "qkernel/src/kernel_def.rs", "rank": 68, "score": 286679.9014840572 }, { "content": "pub fn StartSubContainerProcess(_elfEntry: u64, _userStackAddr: u64, _kernelStackAddr: u64) {}\n\n\n\npub unsafe fn CopyPageUnsafe(_to: u64, _from: u64) {}\n\n\n\nimpl CPULocal {\n\n pub fn CpuId() -> usize {\n\n return ThreadId() as _;\n\n }\n\n\n\n pub fn Wakeup(&self) {\n\n let val: u64 = 8;\n\n let ret = unsafe { libc::write(self.eventfd, &val as *const _ as *const libc::c_void, 8) };\n\n if ret < 0 {\n\n panic!(\"KIOThread::Wakeup fail...\");\n\n }\n\n }\n\n}\n\n\n\nimpl PageMgrInternal {\n\n pub fn CopyVsysCallPages(&self) {}\n\n}\n\n\n", "file_path": "qvisor/src/kernel_def.rs", "rank": 69, "score": 286679.9014840572 }, { "content": "pub fn StartSubContainerProcess(_elfEntry: u64, _userStackAddr: u64, _kernelStackAddr: u64) {}\n\n\n\npub unsafe fn CopyPageUnsafe(_to: u64, _from: u64) {}\n\n\n\nimpl CPULocal {\n\n pub fn CpuId() -> usize {\n\n return 0;\n\n }\n\n\n\n pub fn Wakeup(&self) {}\n\n}\n\n\n\nimpl PageMgrInternal {\n\n pub fn CopyVsysCallPages(&self) {}\n\n}\n\n\n", "file_path": "rdma_cli/src/kernel_def.rs", "rank": 70, "score": 283669.53199863096 }, { "content": "pub fn StartSubContainerProcess(_elfEntry: u64, _userStackAddr: u64, _kernelStackAddr: u64) {}\n\n\n\npub unsafe fn CopyPageUnsafe(_to: u64, _from: u64) {}\n\n\n\nimpl CPULocal {\n\n pub fn CpuId() -> usize {\n\n return 0;\n\n }\n\n\n\n pub fn Wakeup(&self) {}\n\n}\n\n\n\nimpl PageMgrInternal {\n\n pub fn CopyVsysCallPages(&self) {}\n\n}\n\n\n", "file_path": "rdma_srv/src/kernel_def.rs", "rank": 71, "score": 283669.53199863096 }, { "content": "#[inline]\n\npub fn child_clone(_userSp: u64) {}\n\n\n", "file_path": "rdma_srv/src/kernel_def.rs", "rank": 72, "score": 283019.32821394876 }, { "content": "#[inline]\n\npub fn child_clone(_userSp: u64) {}\n\n\n", "file_path": "rdma_cli/src/kernel_def.rs", "rank": 73, "score": 283019.32821394876 }, { "content": "pub fn HostIoctlIFConf(task: &Task, hostfd: i32, request: u64, addr: u64) -> Result<()> {\n\n let mut ifc: IFConf = task.CopyInObj(addr)?;\n\n\n\n const MAX_LEN: usize = 64 * 0x1000; // 256 KB\n\n\n\n // todo: how to handle very large ifconf?\n\n let len = if MAX_LEN > ifc.Len as usize {\n\n ifc.Len as usize\n\n } else {\n\n MAX_LEN\n\n };\n\n\n\n let buf = DataBuff::New(len);\n\n\n\n let mut ifr = IFConf {\n\n Len: len as i32,\n\n ..Default::default()\n\n };\n\n\n\n if ifc.Ptr != 0 {\n", "file_path": "qlib/kernel/socket/hostinet/socket.rs", "rank": 74, "score": 281611.94994186715 }, { "content": "// pass the ioctl to the shadow hostfd\n\npub fn HostIoctlIFReq(task: &Task, hostfd: i32, request: u64, addr: u64) -> Result<()> {\n\n let mut ifr: IFReq = task.CopyInObj(addr)?;\n\n let res = HostSpace::IoCtl(hostfd, request, &mut ifr as *const _ as u64);\n\n if res < 0 {\n\n return Err(Error::SysError(-res as i32));\n\n }\n\n\n\n task.CopyOutObj(&ifr, addr)?;\n\n return Ok(());\n\n}\n\n\n", "file_path": "qlib/kernel/socket/hostinet/socket.rs", "rank": 75, "score": 281611.94994186715 }, { "content": "pub fn CopyTimespecIntoDuration(task: &Task, timespecAddr: u64) -> Result<Duration> {\n\n let mut timeout = -1 as Duration;\n\n if timespecAddr != 0 {\n\n let timespec: Timespec = task.CopyInObj(timespecAddr)?;\n\n if !timespec.IsValid() {\n\n return Err(Error::SysError(SysErr::EINVAL));\n\n }\n\n\n\n timeout = timespec.ToDuration()?;\n\n if timeout <= TIMEOUT_PROCESS_TIME {\n\n timeout = 0;\n\n }\n\n }\n\n\n\n return Ok(timeout);\n\n}\n", "file_path": "qkernel/src/syscalls/sys_poll.rs", "rank": 76, "score": 281398.20673834236 }, { "content": "pub fn Ioctl(task: &mut Task, fd: i32, request: u64, val: u64) -> Result<()> {\n\n let file = task.GetFile(fd)?;\n\n\n\n //let fops = file.FileOp.clone();\n\n //let inode = file.Dirent.Inode();\n\n //error!(\"Ioctl inodetype is {:?}, fopstype is {:?}\", inode.InodeType(), fops.FopsType());\n\n\n\n match request {\n\n IoCtlCmd::FIONCLEX => {\n\n task.SetFlags(fd, &FDFlags { CloseOnExec: false })?;\n\n\n\n return Ok(());\n\n }\n\n IoCtlCmd::FIOCLEX => {\n\n task.SetFlags(fd, &FDFlags { CloseOnExec: true })?;\n\n\n\n return Ok(());\n\n }\n\n IoCtlCmd::FIONBIO => {\n\n let set: u32 = task.CopyInObj(val)?;\n", "file_path": "qkernel/src/syscalls/sys_file.rs", "rank": 77, "score": 280235.79050494405 }, { "content": "pub fn ToLinux(l: u64) -> u64 {\n\n if l == INFINITY {\n\n return RLIM_INFINITY;\n\n }\n\n\n\n return l;\n\n}\n\n\n", "file_path": "qlib/limits.rs", "rank": 78, "score": 279490.4968387609 }, { "content": "pub fn Fstatat(task: &Task, fd: i32, addr: u64, statAddr: u64, flags: i32) -> Result<i64> {\n\n let (path, dirPath) = copyInPath(task, addr, flags & ATType::AT_EMPTY_PATH != 0)?;\n\n\n\n info!(\n\n \"Fstatat path is {} dirPath {}, flags & ATType::AT_SYMLINK_NOFOLLOW {:x}\",\n\n &path,\n\n dirPath,\n\n flags & ATType::AT_SYMLINK_NOFOLLOW\n\n );\n\n if path.len() == 0 {\n\n let file = task.GetFile(fd)?;\n\n\n\n fstat(task, &file, statAddr)?;\n\n return Ok(0);\n\n }\n\n\n\n let resolve = dirPath || flags & ATType::AT_SYMLINK_NOFOLLOW == 0;\n\n\n\n let ret = fileOpOn(\n\n task,\n", "file_path": "qkernel/src/syscalls/sys_stat.rs", "rank": 79, "score": 278893.7719090722 }, { "content": "pub fn Poll(task: &mut Task, pfdAddr: u64, nfds: u32, timeout: Duration) -> Result<i64> {\n\n if nfds > 4096 {\n\n // linux support poll max 4096 fds\n\n return Err(Error::SysError(SysErr::EINVAL));\n\n }\n\n\n\n let (remain, res) = DoPoll(task, pfdAddr, nfds, timeout);\n\n match res {\n\n Err(Error::SysError(SysErr::EINTR)) => {\n\n let b = Box::new(PollRestartBlock {\n\n pfdAddr: pfdAddr,\n\n nfds: nfds,\n\n timeout: remain,\n\n });\n\n task.SetSyscallRestartBlock(b);\n\n return Err(Error::SysError(SysErr::ERESTART_RESTARTBLOCK));\n\n }\n\n Err(e) => return Err(e),\n\n Ok(n) => return Ok(n as i64),\n\n }\n\n}\n\n\n", "file_path": "qkernel/src/syscalls/sys_poll.rs", "rank": 80, "score": 278425.1855192674 }, { "content": "pub fn ReturnToApp(pt: &mut PtRegs) -> ! {\n\n let kernalRsp = pt as *const _ as u64;\n\n SyscallRet(kernalRsp);\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn DivByZeroHandler(sf: &mut PtRegs) {\n\n ExceptionHandler(ExceptionStackVec::DivideByZero, sf, 0);\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn DebugHandler(sf: &mut PtRegs) {\n\n ExceptionHandler(ExceptionStackVec::Debug, sf, 0);\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn NonmaskableInterrupt(sf: &mut PtRegs) {\n\n ExceptionHandler(ExceptionStackVec::NMI, sf, 0);\n\n}\n\n\n", "file_path": "qkernel/src/interrupt/mod.rs", "rank": 81, "score": 276978.48581671156 }, { "content": "pub fn FromLinux(rl: u64) -> u64 {\n\n if rl == RLIM_INFINITY {\n\n return INFINITY;\n\n }\n\n\n\n return rl;\n\n}\n\n\n", "file_path": "qlib/limits.rs", "rank": 82, "score": 275096.9287060363 }, { "content": "fn TryLockPid(task: &mut Task, addr: u64, private: bool) -> Result<()> {\n\n let waitEntry = task.blocker.generalEntry.clone();\n\n let tid = task.Thread().ThreadID();\n\n let locked = task\n\n .futexMgr\n\n .LockPI(&waitEntry, task, addr, tid as u32, private, true)?;\n\n if !locked {\n\n task.futexMgr.WaitComplete(&waitEntry);\n\n return Err(Error::SysError(SysErr::EWOULDBLOCK));\n\n }\n\n\n\n return Ok(());\n\n}\n\n\n", "file_path": "qkernel/src/syscalls/sys_futex.rs", "rank": 83, "score": 274570.74878103274 }, { "content": "pub fn ReadAll(task: &mut Task, file: &File, data: &mut [u8], offset: u64) -> Result<usize> {\n\n let mut data = data;\n\n let mut offset = offset;\n\n let mut cnt = 0;\n\n\n\n while data.len() > 0 {\n\n let mut iovecs: [IoVec; 1] = [IoVec {\n\n start: &data[0] as *const _ as u64,\n\n len: data.len(),\n\n }];\n\n\n\n let l = file.Preadv(task, &mut iovecs, offset as i64)? as usize;\n\n cnt += l;\n\n\n\n if l == data.len() || l == 0 {\n\n return Ok(cnt);\n\n }\n\n\n\n data = &mut data[l..];\n\n offset += l as u64;\n\n }\n\n\n\n return Ok(cnt);\n\n}\n\n\n", "file_path": "qlib/kernel/loader/elf.rs", "rank": 84, "score": 269250.239931867 }, { "content": "pub fn Accept4(task: &Task, fd: i32, addr: u64, addrlen: u64, flags: i32) -> Result<i64> {\n\n if flags & !(SocketFlags::SOCK_CLOEXEC | SocketFlags::SOCK_NONBLOCK) != 0 {\n\n return Err(Error::SysError(SysErr::EINVAL));\n\n }\n\n\n\n let file = task.GetFile(fd)?;\n\n\n\n let sock = file.FileOp.clone();\n\n\n\n let blocking = !file.Flags().NonBlocking;\n\n\n\n let len = if addrlen == 0 {\n\n 0\n\n } else {\n\n let len = task.CopyInObj::<i32>(addrlen)?;\n\n\n\n if len < 0 {\n\n return Err(Error::SysError(SysErr::EINVAL));\n\n }\n\n len as u32\n", "file_path": "qkernel/src/syscalls/sys_socket.rs", "rank": 85, "score": 266736.1252579675 }, { "content": "pub fn RandU128() -> Result<(u64, u64)> {\n\n let res: [u64; 2] = [0; 2];\n\n Random(&res[0] as *const _ as u64, 18, GRND_RANDOM)?;\n\n return Ok((res[0], res[1]));\n\n}\n", "file_path": "qlib/kernel/kernel_util.rs", "rank": 86, "score": 263251.9574961076 }, { "content": "#[inline]\n\npub fn GetFs() -> u64 {\n\n //unsafe{ llvm_asm!(\"movw $0, %fs \" :: \"r\" (0) : \"memory\");}\n\n return ReadMsr(MSR::MSR_FS_BASE as u32);\n\n}\n\n\n", "file_path": "qlib/kernel/vcpu.rs", "rank": 87, "score": 261065.2735442234 }, { "content": "#[inline]\n\npub fn GetGs() -> u64 {\n\n return ReadMsr(MSR::MSR_KERNEL_GS_BASE as u32);\n\n}\n\n\n\nimpl CPULocal {\n\n pub fn Myself() -> &'static Self {\n\n return &CPU_LOCAL[Self::CpuId() as usize];\n\n }\n\n\n\n pub fn NextUringIdx(cnt: u64) -> usize {\n\n let cpuId = Self::CpuId() as usize;\n\n return CPU_LOCAL[Self::CpuId() as usize].IncrUringMsgCnt(cnt) as usize + cpuId;\n\n }\n\n\n\n pub fn SetKernelStack(task: u64) {\n\n Self::Myself().kernelStack.store(task, Ordering::Relaxed); //the data only read in current thread\n\n }\n\n\n\n pub fn KernelStack() -> u64 {\n\n return Self::Myself().kernelStack.load(Ordering::Relaxed); //the data only read in current thread\n", "file_path": "qlib/kernel/vcpu.rs", "rank": 88, "score": 261065.2735442234 }, { "content": "pub fn xgetbv() -> u64 {\n\n let reg = 0u64;\n\n let val_l: u32;\n\n let val_h: u32;\n\n unsafe {\n\n llvm_asm!(\"\\\n\n xgetbv\n\n \" : \"={edx}\"(val_h), \"={eax}\"(val_l) : \"{rcx}\"(reg)\n\n : \"memory\" : \"intel\", \"volatile\")\n\n };\n\n let val = ((val_h as u64) << 32) | ((val_l as u64) & 0xffff);\n\n return val;\n\n}\n", "file_path": "qlib/kernel/asm/mod.rs", "rank": 89, "score": 261065.2735442234 }, { "content": "pub fn CopyInSigSet(task: &Task, sigSetAddr: u64, size: usize) -> Result<SignalSet> {\n\n if size != SIGNAL_SET_SIZE {\n\n return Err(Error::SysError(SysErr::EINVAL));\n\n }\n\n\n\n let mask: u64 = task.CopyInObj(sigSetAddr)?;\n\n return Ok(SignalSet(mask & !UnblockableSignals().0));\n\n}\n", "file_path": "qlib/kernel/SignalDef.rs", "rank": 90, "score": 261040.5711775795 }, { "content": "pub fn RandU64() -> Result<u64> {\n\n let res: u64 = 0;\n\n Random(&res as *const _ as u64, 8, GRND_RANDOM)?;\n\n return Ok(res);\n\n}\n\n\n", "file_path": "qlib/kernel/kernel_util.rs", "rank": 91, "score": 260832.2074923747 }, { "content": "fn statfsImpl(task: &Task, d: &Dirent, addr: u64) -> Result<()> {\n\n let inode = d.Inode();\n\n let sattr = inode.lock().StableAttr().clone();\n\n\n\n let info = inode.StatFS(task)?;\n\n\n\n let statfs = LibcStatfs {\n\n Type: info.Type,\n\n BlockSize: sattr.BlockSize,\n\n Blocks: info.TotalBlocks,\n\n BlocksFree: info.FreeBlocks,\n\n BlocksAvailable: info.FreeBlocks,\n\n Files: info.TotalFiles,\n\n FilesFree: info.FreeFiles,\n\n NameLength: NAME_MAX as u64,\n\n FragmentSize: sattr.BlockSize,\n\n ..Default::default()\n\n };\n\n\n\n //let out: &mut LibcStatfs = task.GetTypeMut::<LibcStatfs>(addr)?;\n\n //*out = statfs;\n\n\n\n task.CopyOutObj(&statfs, addr)?;\n\n\n\n return Ok(());\n\n}\n", "file_path": "qkernel/src/syscalls/sys_stat.rs", "rank": 92, "score": 259799.04211363944 }, { "content": "pub fn ExceptionHandler(ev: ExceptionStackVec, ptRegs: &mut PtRegs, errorCode: u64) {\n\n let PRINT_EXECPTION: bool = SHARESPACE.config.read().PrintException;\n\n\n\n let currTask = Task::Current();\n\n\n\n let mut rflags = ptRegs.eflags;\n\n rflags &= !USER_FLAGS_CLEAR;\n\n rflags |= USER_FLAGS_SET;\n\n ptRegs.eflags = rflags;\n\n\n\n // is this call from user\n\n if ptRegs.ss & 0x3 != 0 {\n\n //PerfGofrom(PerfType::User);\n\n currTask.AccountTaskLeave(SchedState::RunningApp);\n\n } else {\n\n print!(\n\n \"get non page fault exception from kernel ... {:#x?}/ev {:#x?}\",\n\n ptRegs, ev\n\n );\n\n\n", "file_path": "qkernel/src/interrupt/mod.rs", "rank": 93, "score": 259031.75786939968 }, { "content": "pub fn InitX86FPState(data: u64, useXsave: bool) {\n\n unsafe { initX86FPState(data, useXsave) }\n\n}\n\n\n\nimpl HostAllocator {\n\n pub const fn New() -> Self {\n\n return Self {\n\n listHeapAddr: AtomicU64::new(0),\n\n initialized: AtomicBool::new(true),\n\n };\n\n }\n\n\n\n pub fn Init(&self, heapAddr: u64) {\n\n self.listHeapAddr.store(heapAddr, Ordering::SeqCst)\n\n }\n\n}\n\n\n\nunsafe impl GlobalAlloc for HostAllocator {\n\n unsafe fn alloc(&self, layout: Layout) -> *mut u8 {\n\n return self.Allocator().alloc(layout);\n\n }\n\n\n\n unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {\n\n self.Allocator().dealloc(ptr, layout);\n\n }\n\n}\n\n\n", "file_path": "qkernel/src/kernel_def.rs", "rank": 94, "score": 258969.4916687354 }, { "content": "pub fn InitX86FPState(_data: u64, _useXsave: bool) {}\n\n\n", "file_path": "qvisor/src/kernel_def.rs", "rank": 95, "score": 258969.49166873546 }, { "content": "pub fn PagesInChunk(r: &Range, chunkStart: u64) -> i32 {\n\n assert!(\n\n chunkStart & CHUNK_MASK == 0,\n\n \"chunkStart is {:x}\",\n\n chunkStart\n\n );\n\n let chunkRange = Range::New(chunkStart, CHUNK_SIZE);\n\n return (r.Intersect(&chunkRange).Len() / MemoryDef::PAGE_SIZE) as i32;\n\n}\n\n\n\nimpl Default for MappableInternal {\n\n fn default() -> Self {\n\n return Self {\n\n //lock: QLock::default(),\n\n f2pmap: BTreeMap::new(),\n\n mapping: AreaSet::New(0, core::u64::MAX),\n\n chunkrefs: BTreeMap::new(),\n\n };\n\n }\n\n}\n", "file_path": "qlib/kernel/fs/host/hostinodeop.rs", "rank": 96, "score": 258591.2935464232 }, { "content": "#[inline]\n\npub fn SysCall(task: &mut Task, nr: u64, args: &SyscallArguments) -> TaskRunState {\n\n let idx = nr as usize;\n\n let func = SYS_CALL_TABLE.get(idx).unwrap();\n\n match func(task, args) {\n\n Err(Error::SysCallRetCtrlWithRet(state, ret)) => {\n\n task.SetReturn(ret);\n\n return state;\n\n }\n\n Err(Error::SysCallRetCtrl(state)) => {\n\n return state;\n\n }\n\n Ok(res) => {\n\n task.SetReturn(res as u64);\n\n return TaskRunState::RunApp;\n\n }\n\n Err(Error::SysError(e)) => {\n\n task.haveSyscallReturn = true;\n\n task.SetReturn(-e as u64);\n\n return TaskRunState::RunApp;\n\n }\n", "file_path": "qkernel/src/syscalls/syscalls.rs", "rank": 97, "score": 257557.41416559176 }, { "content": "pub fn CaptureAddress(task: &Task, addr: u64, addrlen: u32) -> Result<Vec<u8>> {\n\n if addrlen > MAX_ADDR_LEN {\n\n return Err(Error::SysError(SysErr::EINVAL));\n\n }\n\n\n\n //task.CheckPermission(addr, addrlen as u64, false, false)?;\n\n\n\n return task.CopyInVec(addr, addrlen as usize);\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct SockaddrIn {\n\n pub sin_family: u16,\n\n pub sin_port: u16,\n\n pub sin_addr: [u8; 4],\n\n pub sin_zero: [u8; 8],\n\n}\n\n\n", "file_path": "qkernel/src/syscalls/sys_socket.rs", "rank": 98, "score": 257360.6077488435 }, { "content": "pub fn accessAt(task: &Task, dirFd: i32, addr: u64, mode: u32) -> Result<()> {\n\n const R_OK: u32 = 4;\n\n const W_OK: u32 = 2;\n\n const X_OK: u32 = 1;\n\n\n\n let (path, _) = copyInPath(task, addr, false)?;\n\n\n\n info!(\"accessAt dirfd is {}, path is {}\", dirFd, &path);\n\n if mode & !(R_OK | W_OK | X_OK) != 0 {\n\n return Err(Error::SysError(SysErr::EINVAL));\n\n }\n\n\n\n return fileOpOn(\n\n task,\n\n dirFd,\n\n &path.to_string(),\n\n true,\n\n &mut |_root: &Dirent, d: &Dirent, _remainingTraversals: u32| -> Result<()> {\n\n {\n\n let creds = task.Creds().Fork();\n", "file_path": "qkernel/src/syscalls/sys_file.rs", "rank": 99, "score": 257360.6077488435 } ]
Rust
crates/plugins/physics-rapier/src/lib.rs
Hihaheho-Studios/desk
7f8ad48a3b9a5439e566d07aecab6185c2d95012
use core::DeskSystem; use bevy::prelude::*; use bevy_rapier2d::prelude::*; use physics::{shape::Shape, widget::WidgetId, DragState, Velocity}; pub struct PhysicsPlugin; const LINEAR_DAMPING: f32 = 8.0; impl Plugin for PhysicsPlugin { fn build(&self, app: &mut bevy::app::AppBuilder) { app.add_plugin(RapierPhysicsPlugin::<NoUserData>::default()) .add_plugin(RapierRenderPlugin) .add_startup_system(walls.system()) .insert_resource(RapierConfiguration { scale: 100.0, gravity: Vec2::ZERO.into(), ..Default::default() }) .add_system( add_physics_components .system() .after(DeskSystem::Shell) .before(DeskSystem::PrePhysics), ) .add_system_set( SystemSet::new() .label(DeskSystem::PrePhysics) .with_system(update_shape.system()) .with_system(update_velocity.system()) .with_system(update_drag_state.system()), ); } } fn walls(mut commands: Commands) { let mut camera = OrthographicCameraBundle::new_2d(); camera.transform.translation.x = 630.0; camera.transform.translation.y = 350.0; commands.spawn_bundle(LightBundle { light: Light { intensity: 100_000.0, range: 6000.0, ..Default::default() }, ..Default::default() }); commands.spawn_bundle(camera); commands .spawn_bundle(ColliderBundle { position: Vec2::new(0.0, 0.0).into(), shape: ColliderShape::cuboid(0.1, 9.0), ..Default::default() }) .insert(ColliderPositionSync::Discrete) .insert(ColliderDebugRender::default()); commands .spawn_bundle(ColliderBundle { position: Vec2::new(10.0, 0.0).into(), shape: ColliderShape::cuboid(0.1, 9.0), ..Default::default() }) .insert(ColliderPositionSync::Discrete) .insert(ColliderDebugRender::default()); commands .spawn_bundle(ColliderBundle { position: Vec2::new(0.0, 0.0).into(), shape: ColliderShape::cuboid(12.0, 0.1), ..Default::default() }) .insert(ColliderPositionSync::Discrete) .insert(ColliderDebugRender::default()); commands .spawn_bundle(ColliderBundle { position: Vec2::new(0.0, 7.0).into(), shape: ColliderShape::cuboid(12.0, 0.1), ..Default::default() }) .insert(ColliderPositionSync::Discrete) .insert(ColliderDebugRender::default()); } fn add_physics_components( rapier: Res<RapierConfiguration>, mut commands: Commands, query: Query<(Entity, &GlobalTransform), Added<WidgetId>>, ) { for (card, transform) in query.iter() { commands .entity(card) .insert_bundle(RigidBodyBundle { position: (transform.translation / rapier.scale).into(), mass_properties: RigidBodyMassPropsFlags::ROTATION_LOCKED.into(), damping: RigidBodyDamping { linear_damping: LINEAR_DAMPING, ..Default::default() }, ..Default::default() }) .insert(RigidBodyPositionSync::Discrete) .with_children(|build| { build.spawn_bundle(ColliderBundle { shape: ColliderShape::cuboid(0.1, 0.1), ..Default::default() }); }); } } fn update_shape( rapier: Res<RapierConfiguration>, shape: Query<(&Shape, Entity, &Children)>, mut collider: Query<(&mut ColliderShape, &mut ColliderParent)>, ) { for (shape, entity, children) in shape.iter() { std::iter::once(&entity) .to_owned() .chain(children.iter()) .for_each(|&entity| { if let Ok((mut collider_shape, mut parent)) = collider.get_mut(entity) { use Shape::*; match shape { Rect { width, height } => { let width = *width / rapier.scale / 2.0; let height = *height / rapier.scale / 2.0; *collider_shape = ColliderShape::cuboid(width, height); parent.pos_wrt_parent.translation = Vec2::new(width, -height).into(); } Blank => {} _ => todo!(), }; } }); } } fn update_velocity( rapier: Res<RapierConfiguration>, mut query: Query<(&mut RigidBodyVelocity, &Velocity), Changed<Velocity>>, ) { for (mut rapier_velocity, velocity) in query.iter_mut() { rapier_velocity.linvel.x = velocity.0.x / rapier.scale; rapier_velocity.linvel.y = velocity.0.y / rapier.scale; } } fn update_drag_state(mut query: Query<(&mut RigidBodyDamping, &DragState), Changed<DragState>>) { for (mut damping, drag_state) in query.iter_mut() { use DragState::*; match drag_state { Dragging => { damping.linear_damping = 0.0; } NotDragging => { damping.linear_damping = LINEAR_DAMPING; } } } }
use core::DeskSystem; use bevy::prelude::*; use bevy_rapier2d::prelude::*; use physics::{shape::Shape, widget::WidgetId, DragState, Velocity}; pub struct PhysicsPlugin; const LINEAR_DAMPING: f32 = 8.0; impl Plugin for PhysicsPlugin { fn build(&self, app: &mut bevy::app::AppBuilder) { app.add_plugin(RapierPhysicsPlugin::<NoUserData>::default()) .add_plugin(RapierRenderPlugin) .add_startup_system(walls.system()) .insert_resource(RapierConfiguration { scale: 100.0, gravity: Vec2::ZERO.into(), ..Default::default() }) .add_system( add_physics_components .system() .after(DeskSystem::Shell) .before(DeskSystem::PrePhysics), ) .add_system_set( SystemSet::new() .label(DeskSystem::PrePhysics) .with_system(update_shape.system()) .with_system(update_velocity.system()) .with_system(update_drag_state.system()), ); } } fn walls(mut commands: Commands) { let mut camera = OrthographicCameraBundle::new_2d(); camera.transform.translation.x = 630.0; camera.transform.translation.y = 350.0; commands.spawn_bundle(LightBundle { light: Light { intensity: 100_000.0, range: 6000.0, ..Default::default() }, ..Default::default() }); commands.spawn_bundle(camera); commands .spawn_bundle(ColliderBundle { position: Vec2::new(0.0, 0.0).into(), shape: ColliderShape::cuboid(0.1, 9.0), ..Default::default() }) .insert(ColliderPositionSync::Discrete) .insert(ColliderDebugRender::default()); commands .spawn_bundle(ColliderBundle { position: Vec2::new(10.0, 0.0).into(), shape: ColliderShape::cuboid(0.1, 9.0), ..Default::default() }) .insert(ColliderPositionSync::Discrete) .insert(ColliderDebugRender::default()); commands .spawn_bundle(ColliderBundle { position: Vec2::new(0.0, 0.0).into(), shape: ColliderShape::cuboid(12.0, 0.1), ..Default::default() }) .insert(ColliderPositionSync::Discrete) .insert(ColliderDebugRender::default()); commands .spawn_bundle(ColliderBundle { position: Vec2::new(0.0, 7.0).into(), shape: ColliderShape::cuboid(12.0, 0.1), ..Default::default() }) .insert(ColliderPositionSync::Discrete) .insert(ColliderDebugRender::default()); } fn add_physics_components( rapier: Res<RapierConfiguration>, mut commands: Commands, query: Query<(Entity, &GlobalTransform), Added<WidgetId>>, ) { for (card, transform) in query.iter() { commands .entity(card) .insert_bundle(RigidBodyBundle { position: (transform.translation / rapier.scale).into(), mass_properties: RigidBodyMassPropsFlags::ROTATION_LOCKED.into(), damping: RigidBodyDamping { linear_damping: LINEAR_DAMPING, ..Default::default() }, ..Default::default() }) .insert(RigidBodyPositionSync::Discrete) .with_children(|build| { build.spawn_bundle(ColliderBundle { shape: ColliderShape::cuboid(0.1, 0.1), ..Default::default() }); }); } } fn update_shape( rapier: Res<RapierConfiguration>, shape: Query<(&Shape, Entity, &Children)>, mut collider: Query<(&mut ColliderShape, &mut ColliderParent)>, ) { for (shape, entity, children) in shape.iter() { std::iter::once(&entity) .to_owned() .chain(children.iter()) .for_each(|&entity| { if let Ok((mut collider_shape, mut parent)) = collider.get_mut(entity) { use Shape::*; match shape { Rect { width, height } => { let width = *width / rapier.scale / 2.0; let height = *height / rapier.scale / 2.0; *collider_shape = ColliderShape::cuboid(width, height); parent.pos_wrt_parent.translation = Vec2::new(width, -height).into(); } Blank => {} _ => todo!(), }; } }); } } fn update_velocity( rapier: Res<RapierConfiguration>, mut query: Query<(&mut RigidBodyVelocity, &Velocity), Changed<Velocity>>, ) { for (mut rapier_velocity, velocity) in query.iter_mut() { rapier_velocity.linvel.x = velocity.0.x / rapier.scale; rapier_velocity.linvel.y = velocity.0.y / rapier.scale; } } fn update_drag_state(mut query: Query<(&mut RigidBodyDam
ping, &DragState), Changed<DragState>>) { for (mut damping, drag_state) in query.iter_mut() { use DragState::*; match drag_state { Dragging => { damping.linear_damping = 0.0; } NotDragging => { damping.linear_damping = LINEAR_DAMPING; } } } }
function_block-function_prefixed
[ { "content": "fn reset_velocity(mut query: Query<&mut Velocity>) {\n\n for mut velocity in query.iter_mut() {\n\n velocity.0 = Vec2::ZERO;\n\n }\n\n}\n\n\n", "file_path": "crates/plugins/shell/src/lib.rs", "rank": 0, "score": 207939.65898868084 }, { "content": "fn translate_position(pos: Vec2, wnd: &Window, camera: &Transform) -> Vec2 {\n\n // get the size of the window\n\n let size = Vec2::new(wnd.width() as f32, wnd.height() as f32);\n\n\n\n // the default orthographic projection is in pixels from the center;\n\n // just undo the translation\n\n let p = pos - size / 2.0;\n\n\n\n // apply the camera transform\n\n (camera.compute_matrix() * p.extend(0.0).extend(1.0))\n\n .truncate()\n\n .truncate()\n\n}\n", "file_path": "crates/plugins/shell/src/cursor_systems.rs", "rank": 1, "score": 200978.73188732495 }, { "content": "pub fn follow(\n\n mut query_set: QuerySet<(\n\n Query<&Follow<Entity>>,\n\n Query<&Transform>,\n\n Query<(&mut Velocity, &Transform, &Follow<Entity>)>,\n\n )>,\n\n) {\n\n let mut entities = Vec::new();\n\n for follow in query_set.q0().iter() {\n\n entities.push(follow.target);\n\n }\n\n let mut vecs = HashMap::new();\n\n for entity in entities {\n\n if let Ok(transform) = query_set.q1().get(entity) {\n\n vecs.insert(entity, transform.translation.truncate());\n\n }\n\n }\n\n for (mut velocity, transform, follow) in query_set.q2_mut().iter_mut() {\n\n if let Some(target) = vecs.get(&follow.target) {\n\n let vec = transform.translation.truncate();\n\n *velocity = &*velocity + follow.parameters.follow_vector(&vec, target);\n\n }\n\n }\n\n}\n", "file_path": "crates/plugins/shell/src/follow_system.rs", "rank": 4, "score": 164542.28184474734 }, { "content": "#[derive(Bundle)]\n\nstruct CardBundle {\n\n code_id: CodeId,\n\n code: Code,\n\n transform: Transform,\n\n global_transform: GlobalTransform,\n\n code_operations: CodeOperations,\n\n widget_event_handler: CodeWidgetEventHandler,\n\n}\n\n\n\nimpl Default for CardBundle {\n\n fn default() -> Self {\n\n CardBundle {\n\n code_id: CodeId::new(),\n\n code: n::string(\"\"),\n\n transform: Transform::default(),\n\n global_transform: GlobalTransform::default(),\n\n code_operations: Default::default(),\n\n widget_event_handler: CodeWidgetEventHandler,\n\n }\n\n }\n", "file_path": "crates/plugins/shell/src/card_systems.rs", "rank": 5, "score": 164074.56832174404 }, { "content": "fn handle_operation(mut query: Query<(&mut Code, &CodeOperations)>) {\n\n for (mut code, operations) in query.iter_mut() {\n\n for operation in operations.iter() {\n\n let applied = code.apply_operation(operation);\n\n if let Ok(applied) = applied {\n\n *code = applied;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/plugins/language/src/lib.rs", "rank": 6, "score": 162287.37606295221 }, { "content": "pub fn connect_websocket(mut client_res: ResMut<Option<BoxClient>>) {\n\n use std::mem::forget;\n\n use tokio::runtime::Runtime;\n\n\n\n let rt = Runtime::new().expect(\"failed to start a runtime\");\n\n\n\n // Spawn the root task\n\n rt.block_on(connect(\"ws://127.0.0.1:5000/ws\".into()))\n\n .map(|client| {\n\n *client_res = Some(Box::new(client));\n\n })\n\n .unwrap_or_else(|err| error!(\"{}\", err));\n\n\n\n forget(rt);\n\n}\n\n\n", "file_path": "crates/plugins/protocol/src/native.rs", "rank": 7, "score": 155307.0034447463 }, { "content": "pub fn set_client(mut client_res: ResMut<Option<BoxClient>>) {\n\n let mut map = CLIENTS.lock().unwrap();\n\n if let Some(client) = map.remove(&DEFAULT_CLIENT) {\n\n *client_res = Some(client);\n\n }\n\n}\n\n\n", "file_path": "crates/plugins/protocol/src/wasm.rs", "rank": 8, "score": 155307.0034447463 }, { "content": "pub fn toggle_follow_for_drag_state(\n\n mut commands: Commands,\n\n query_set: QuerySet<(\n\n Query<(Entity, &Transform), With<Cursor>>,\n\n Query<(Entity, &Transform, &DragState), Changed<DragState>>,\n\n )>,\n\n) {\n\n let (cursor, cursor_vec) = if let Ok((entity, transform)) = query_set.q0().single() {\n\n (entity, transform.translation.truncate())\n\n } else {\n\n return;\n\n };\n\n for (entity, transform, drag_state) in query_set.q1().iter() {\n\n match drag_state {\n\n DragState::Dragging => {\n\n let follow: Follow<Entity> = Follow {\n\n target: cursor,\n\n parameters: FollowParams {\n\n position_offset: cursor_vec - transform.translation.truncate(),\n\n ignore_area_size: 5.0,\n", "file_path": "crates/plugins/shell/src/drag_system.rs", "rank": 9, "score": 154899.50097447602 }, { "content": "pub fn block_on<T>(future: impl Future<Output = T>) {\n\n future::block_on(future);\n\n}\n", "file_path": "crates/plugins/protocol/src/native.rs", "rank": 10, "score": 150998.5066859462 }, { "content": "fn send_commands(client: ResMut<Option<BoxClient>>, mut commands: ResMut<protocol::Commands>) {\n\n if let Some(ref client) = *client {\n\n for command in commands.iter() {\n\n let command = command.clone();\n\n let mut sender = client.sender();\n\n block_on(async move {\n\n sender.send(command).await.unwrap_or_else(|err| {\n\n error!(\"{}\", err);\n\n })\n\n });\n\n }\n\n commands.clear();\n\n }\n\n}\n", "file_path": "crates/plugins/protocol/src/lib.rs", "rank": 11, "score": 149288.02930381618 }, { "content": "pub fn blank() -> Component {\n\n Component::Blank\n\n}\n\n\n", "file_path": "crates/components/physics/src/widget/component/sugar.rs", "rank": 12, "score": 149172.50321143956 }, { "content": "pub fn block_on<T>(future: impl Future<Output = T> + 'static) {\n\n wasm_bindgen_futures::spawn_local(async { future.map(|_| ()).await });\n\n}\n", "file_path": "crates/plugins/protocol/src/wasm.rs", "rank": 13, "score": 145950.71492880493 }, { "content": "#[bevy_main]\n\npub fn main() {\n\n let mut app = App::build();\n\n\n\n app.add_plugins(DefaultPlugins)\n\n .add_plugin(ShellPlugin)\n\n .add_plugin(LanguagePlugin)\n\n .add_plugin(PhysicsPlugin)\n\n .add_plugin(ProtocolPlugin)\n\n .add_plugin(EguiBackendPlugin);\n\n\n\n #[cfg(target_arch = \"wasm32\")]\n\n app.add_plugin(plugin_wasm_target::WasmTargetPlugin);\n\n\n\n #[cfg(feature = \"bevy_mod_debugdump\")]\n\n {\n\n std::fs::write(\n\n \"target/schedule_graph.dot\",\n\n bevy_mod_debugdump::schedule_graph::schedule_graph_dot(&app.app.schedule),\n\n );\n\n std::process::exit(0);\n\n }\n\n\n\n app.run();\n\n}\n", "file_path": "crates/apps/client/src/main.rs", "rank": 16, "score": 136810.17259958666 }, { "content": "pub fn connect_websocket() {\n\n spawn_local(\n\n connect(DEFAULT_CLIENT, \"ws://127.0.0.1:5000/ws\".into()).then(|client| {\n\n ready(\n\n client\n\n .map(|client| -> BoxClient { Box::new(client) })\n\n .map(|client| {\n\n CLIENTS.lock().unwrap().insert(DEFAULT_CLIENT, client);\n\n })\n\n .unwrap_or_else(|err| error!(\"{}\", err)),\n\n )\n\n }),\n\n );\n\n}\n\n\n", "file_path": "crates/plugins/protocol/src/wasm.rs", "rank": 17, "score": 133091.6642948424 }, { "content": "pub fn http_client(ctx: &mut Context) {\n\n let client = Client::new();\n\n ctx.insert(client);\n\n}\n\n\n", "file_path": "crates/tests/desk-server/src/support/mod.rs", "rank": 18, "score": 132530.47823881902 }, { "content": "pub fn start_mock_server(ctx: &mut Context) {\n\n let server = MockServer::start();\n\n ctx.insert::<Uri>(server.base_url().try_into().unwrap());\n\n ctx.insert(server);\n\n}\n\n\n\npub struct ContainerId(pub String);\n\n\n", "file_path": "crates/tests/desk-server/src/support/mod.rs", "rank": 19, "score": 130561.89922944191 }, { "content": "pub fn start_desk_server(ctx: &mut Context) {\n\n let port = pick_unused_port().unwrap();\n\n let container_id = run_fun! {\n\n docker run --rm -d -p $port:8080 -e PORT=8080 gcr.io/hihaheho/desk-server:latest;\n\n }\n\n .unwrap();\n\n let url = format!(\"http://localhost:{}\", port);\n\n while run_cmd!(curl $url > /dev/null).is_err() {\n\n sleep(Duration::from_secs(1));\n\n }\n\n ctx.insert::<Uri>(url.try_into().unwrap());\n\n ctx.insert(ContainerId(container_id));\n\n ctx.after(stop_desk_server);\n\n}\n\n\n", "file_path": "crates/tests/desk-server/src/support/mod.rs", "rank": 20, "score": 130561.89922944191 }, { "content": "pub fn string(value: impl Into<String>) -> Code {\n\n node(\n\n CodeData::Literal {\n\n value: LiteralValue::String(value.into()),\n\n },\n\n Type::String,\n\n )\n\n}\n\n\n", "file_path": "crates/components/language/src/code/node/sugar.rs", "rank": 21, "score": 126609.19778425117 }, { "content": "fn resize(mut windows: ResMut<Windows>) {\n\n let js_window = web_sys::window().unwrap();\n\n let window = windows.get_primary_mut().unwrap();\n\n window.set_resolution(\n\n js_window.inner_width().unwrap().as_f64().unwrap() as f32,\n\n js_window.inner_height().unwrap().as_f64().unwrap() as f32,\n\n );\n\n}\n", "file_path": "crates/plugins/wasm-target/src/lib.rs", "rank": 22, "score": 125425.65112900232 }, { "content": "#[derive(Bundle)]\n\nstruct CursorBundle {\n\n cursor: Cursor,\n\n}\n\n\n\npub(crate) fn add_cursor(mut commands: Commands) {\n\n commands\n\n .spawn()\n\n .insert_bundle((Cursor, Transform::default()));\n\n}\n\npub(crate) fn move_cursor(\n\n windows: Res<Windows>,\n\n mut query_set: QuerySet<(\n\n Query<&Transform, With<Camera>>,\n\n Query<&mut Transform, With<Cursor>>,\n\n )>,\n\n) {\n\n if let Some((window, position)) = windows\n\n .get_primary()\n\n .and_then(|window| window.cursor_position().map(|pos| (window, pos)))\n\n {\n", "file_path": "crates/plugins/shell/src/cursor_systems.rs", "rank": 23, "score": 122858.43036932273 }, { "content": "#[derive(Bundle, Default)]\n\nstruct TerminalBundle {\n\n shell: Terminal,\n\n terminal_operations: TerminalOperations,\n\n transform: Transform,\n\n global_transform: GlobalTransform,\n\n widget_event_handler: TerminalWidgetEventHandler,\n\n}\n\npub(crate) fn widget_adding_for_terminal(\n\n mut command: Commands,\n\n cursor: Query<Entity, With<Cursor>>,\n\n query: Query<Entity, Added<Terminal>>,\n\n) {\n\n for entity in query.iter() {\n\n command\n\n .entity(entity)\n\n .insert(WidgetId::from(\"terminal\"))\n\n .insert(Follow {\n\n target: cursor.single().unwrap(),\n\n parameters: FollowParams {\n\n ignore_area_size: 80.0,\n", "file_path": "crates/plugins/shell/src/terminal_systems.rs", "rank": 24, "score": 122858.43036932273 }, { "content": "pub fn typing(_node: &mut Code) -> Result<(), TypingError> {\n\n todo!()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_literal;\n", "file_path": "crates/components/language/src/code/typing/mod.rs", "rank": 25, "score": 122043.56088433108 }, { "content": "fn receive_events(mut client: ResMut<Option<BoxClient>>, mut events: ResMut<protocol::Events>) {\n\n if let Some(ref mut client) = *client {\n\n if let Some(vec) = client.poll_once() {\n\n for event in vec {\n\n events.push(event);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "crates/plugins/protocol/src/lib.rs", "rank": 26, "score": 117684.28733418789 }, { "content": "pub fn render_node(node: &Code) -> Component {\n\n use CodeData::*;\n\n match &node.data {\n\n Literal { value } => match value {\n\n language::code::node::LiteralValue::Unit => c::blank(),\n\n language::code::node::LiteralValue::Label(_) => {\n\n todo!()\n\n }\n\n language::code::node::LiteralValue::Bool(_) => {\n\n todo!()\n\n }\n\n language::code::node::LiteralValue::String(value) => c::input_string(\"TODO\", value),\n\n language::code::node::LiteralValue::Number(value) => render_literal_number(value),\n\n language::code::node::LiteralValue::Array(_) => {\n\n todo!()\n\n }\n\n language::code::node::LiteralValue::Product(_) => {\n\n todo!()\n\n }\n\n language::code::node::LiteralValue::Sum(_) => {\n", "file_path": "crates/systems/shell-language/src/lib.rs", "rank": 27, "score": 115024.57751861474 }, { "content": "pub fn render_terminal(_terminal: &Terminal) -> Component {\n\n c::vertical_array(vec![\n\n c::label(\"I'm your friend.\"),\n\n c::horizontal_array(vec![c::label(\">\"), c::input_string(\"command\", \"\")]),\n\n ])\n\n}\n", "file_path": "crates/systems/shell-terminal/src/lib.rs", "rank": 28, "score": 115024.57751861474 }, { "content": "fn add_backend(mut backends: ResMut<Backends>, egui_context: ResMut<EguiContext>) {\n\n let backend = EguiBackend {\n\n ctx: egui_context.ctx().clone(),\n\n };\n\n backends.insert(EGUI_BACKEND, backend);\n\n}\n\n\n", "file_path": "crates/plugins/egui/src/lib.rs", "rank": 29, "score": 113997.00102521318 }, { "content": "pub trait IntoEventHandlerSystem {\n\n fn event_handler_system(self) -> ParallelSystemDescriptor;\n\n}\n\n\n\nimpl<T: EventHandler> IntoEventHandlerSystem for EventHandlerWrapper<T> {\n\n fn event_handler_system(self) -> ParallelSystemDescriptor {\n\n handle_widget_event::<T>\n\n .system()\n\n .label(EventHandlerSystem::<T>::Handle)\n\n }\n\n}\n\n\n", "file_path": "crates/plugins/shell/src/event_handler.rs", "rank": 30, "score": 112508.34792824375 }, { "content": "pub fn render_literal_number(number: &NumberLiteral) -> Component {\n\n use NumberLiteral::*;\n\n match number {\n\n Integer(value) => c::input_integer(\"TODO\", *value),\n\n _ => todo!(),\n\n }\n\n}\n", "file_path": "crates/systems/shell-language/src/literal_value.rs", "rank": 32, "score": 109130.34409502984 }, { "content": "}\n\n\n\npub(crate) fn widget_adding_for_cards(\n\n mut command: Commands,\n\n query: Query<(Entity, &CodeId), Added<CodeId>>,\n\n) {\n\n for (entity, id) in query.iter() {\n\n command\n\n .entity(entity)\n\n .insert(WidgetId::from(id.to_string()))\n\n .insert_bundle(WidgetBundle::default());\n\n }\n\n}\n\n\n\npub(crate) fn card_rendering(mut query: Query<(&Code, Option<&Computed>, &mut Component)>) {\n\n for (node, _computed, mut component) in query.iter_mut() {\n\n let new_component = render_node(node);\n\n if *component != new_component {\n\n *component = new_component;\n\n }\n\n }\n\n}\n", "file_path": "crates/plugins/shell/src/card_systems.rs", "rank": 33, "score": 100036.8378970507 }, { "content": "}\n\n\n\npub(crate) fn create_card(mut commands: Commands) {\n\n // commands.spawn_bundle(CardBundle {\n\n // node: sugar::add(sugar::integer(1), sugar::integer(2)),\n\n // transform: Transform::from_xyz(100.0, 300.0, 0.0),\n\n // ..Default::default()\n\n // });\n\n\n\n commands.spawn_bundle(CardBundle {\n\n code: n::integer(1),\n\n transform: Transform::from_xyz(300.0, 200.0, 0.0),\n\n ..Default::default()\n\n });\n\n\n\n commands.spawn_bundle(CardBundle {\n\n code: n::string(\"aaaa\"),\n\n transform: Transform::from_xyz(100.0, 500.0, 0.0),\n\n ..Default::default()\n\n });\n", "file_path": "crates/plugins/shell/src/card_systems.rs", "rank": 34, "score": 100033.37906861036 }, { "content": "use bevy::prelude::*;\n\nuse language::{\n\n code::{\n\n node::{sugar as n, Code},\n\n operation::CodeOperations,\n\n CodeId,\n\n },\n\n Computed,\n\n};\n\nuse physics::widget::{component::Component, WidgetId};\n\nuse shell_language::{render_node, CodeWidgetEventHandler};\n\n\n\nuse crate::widget_bundle::WidgetBundle;\n\n\n\n#[derive(Bundle)]\n", "file_path": "crates/plugins/shell/src/card_systems.rs", "rank": 35, "score": 100021.92684114608 }, { "content": "fn handle_event(_widget: &Widget, event: &WidgetEvent) -> impl Iterator<Item = CodeOperation> {\n\n use WidgetEvent::*;\n\n match event {\n\n UpdateString { id: _id, value } => {\n\n // TODO handle id\n\n vec![CodeOperation::UpdateString(value.clone())].into_iter()\n\n }\n\n _ => vec![].into_iter(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use bevy_math::Vec3;\n\n use physics::widget::component::sugar as c;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn updates_string() {\n", "file_path": "crates/systems/shell-language/src/event_handler.rs", "rank": 36, "score": 96822.50853691917 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct Config {\n\n #[serde(default = \"default_port\")]\n\n port: u16,\n\n}\n\n\n\n#[tokio::main]\n\npub async fn main() {\n\n let subscriber = Registry::default().with(fmt::Layer::new().json());\n\n tracing::subscriber::set_global_default(subscriber).expect(\"set_global_default failed\");\n\n\n\n let config = envy::from_env::<Config>().unwrap();\n\n let app = route(\"/ws\", ws(handle_socket));\n\n\n\n hyper::Server::bind(&SocketAddr::from(([0, 0, 0, 0], config.port)))\n\n .serve(app.into_make_service())\n\n .await\n\n .unwrap();\n\n}\n\n\n\nasync fn handle_socket(socket: WebSocket) {\n", "file_path": "crates/apps/server/src/main.rs", "rank": 37, "score": 96229.94144017054 }, { "content": "fn add_font(egui_context: ResMut<EguiContext>) {\n\n let ctx = egui_context.ctx();\n\n let mut fonts = FontDefinitions::default();\n\n\n\n fonts.font_data.insert(\n\n \"mplus\".to_owned(),\n\n std::borrow::Cow::Borrowed(include_bytes!(\n\n \"../../../../assets/fonts/MPLUSRounded1c-Regular.ttf\"\n\n )),\n\n );\n\n\n\n // Put my font first (highest priority):\n\n fonts\n\n .fonts_for_family\n\n .get_mut(&FontFamily::Proportional)\n\n .unwrap()\n\n .insert(0, \"mplus\".to_owned());\n\n\n\n ctx.set_fonts(fonts);\n\n}\n\n\n", "file_path": "crates/plugins/egui/src/lib.rs", "rank": 38, "score": 95533.91009725213 }, { "content": "pub fn builder() -> ContextBuilder {\n\n Default::default()\n\n}\n\n\n", "file_path": "crates/tests/desk-server/src/support/mod.rs", "rank": 39, "score": 93759.8742947131 }, { "content": "fn call(context: &mut Context) {\n\n let called = context.get::<Rc<RefCell<bool>>>().unwrap().clone();\n\n *called.borrow_mut() = true;\n\n}\n\n\n", "file_path": "crates/tests/desk-server/src/support/mod.rs", "rank": 40, "score": 92108.71640544273 }, { "content": "fn stop_desk_server(ctx: &mut Context) {\n\n let container_id = ctx.get::<ContainerId>().unwrap().0.clone();\n\n let _ = spawn!(\n\n docker stop $container_id > /dev/null;\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "crates/tests/desk-server/src/support/mod.rs", "rank": 41, "score": 89292.39054056632 }, { "content": "fn render(ui: &mut Ui, event_buffer: &mut Vec<WidgetEvent>, component: &Component) {\n\n use Component::*;\n\n match component {\n\n InputInteger { id: _, value } => {\n\n let mut value = value.0;\n\n ui.add(egui::Slider::new(&mut value, 0..=10));\n\n }\n\n Blank => {}\n\n InputString { id, value } => {\n\n let mut value = value.to_owned();\n\n let response = ui.text_edit_singleline(&mut value);\n\n\n\n if response.changed() {\n\n let id = id.to_owned();\n\n event_buffer.push(WidgetEvent::UpdateString { id, value });\n\n }\n\n if response.lost_focus() {\n\n let id = id.to_owned();\n\n event_buffer.push(WidgetEvent::LostFocus { id });\n\n }\n", "file_path": "crates/adapters/widget-backend-egui/src/lib.rs", "rank": 42, "score": 88104.55922092564 }, { "content": "pub fn integer(value: i32) -> Code {\n\n node(\n\n CodeData::Literal {\n\n value: LiteralValue::Number(NumberLiteral::Integer(value)),\n\n },\n\n Type::Number,\n\n )\n\n}\n\n\n", "file_path": "crates/components/language/src/code/node/sugar.rs", "rank": 43, "score": 86752.51408725945 }, { "content": "pub trait WidgetSystem {\n\n fn render(&self) -> Widget;\n\n fn update(&mut self, events: dyn Iterator<Item = WidgetEvent>) -> WidgetEvent;\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Widget {\n\n pub id: WidgetId,\n\n pub backend_id: WidgetBackendId,\n\n pub position: Vec3,\n\n pub component: Component,\n\n}\n", "file_path": "crates/components/physics/src/widget/mod.rs", "rank": 44, "score": 84841.40664721437 }, { "content": "fn run(\n\n mut commands: Commands,\n\n mut runtimes: ResMut<Runtimes>,\n\n query: Query<(Entity, &Code, Option<&RuntimeId>), Changed<Code>>,\n\n) {\n\n for (entity, code, runtime_id) in query.iter() {\n\n if let Some(runtime) = runtime_id.and_then(|id| runtimes.get_mut(id)).or(None) {\n\n commands\n\n .entity(entity)\n\n .insert(Computed(runtime.run(code).unwrap()));\n\n }\n\n }\n\n}\n", "file_path": "crates/plugins/language/src/lib.rs", "rank": 45, "score": 83442.49865401335 }, { "content": "fn http_server_returns_200(ctx: &mut Context) -> MockRef {\n\n let server: &mut MockServer = ctx.get_mut().unwrap();\n\n server.mock(|when, then| {\n\n when.method(Method::GET).path(\"/ok\");\n\n then.status(200);\n\n })\n\n}\n", "file_path": "crates/tests/desk-server/src/support/http_request.rs", "rank": 46, "score": 82915.51108091511 }, { "content": "pub fn horizontal_array(items: Vec<Component>) -> Component {\n\n Component::Array {\n\n orientation: super::Orientation::Horizontal,\n\n items,\n\n }\n\n}\n\n\n", "file_path": "crates/components/physics/src/widget/component/sugar.rs", "rank": 47, "score": 81888.67249303297 }, { "content": "pub fn vertical_array(items: Vec<Component>) -> Component {\n\n Component::Array {\n\n orientation: super::Orientation::Vertical,\n\n items,\n\n }\n\n}\n\n\n", "file_path": "crates/components/physics/src/widget/component/sugar.rs", "rank": 48, "score": 81888.67249303297 }, { "content": "fn handle_events(\n\n mut state: ResMut<Option<ClientStateDispatcher>>,\n\n mut commands: ResMut<protocol::Commands>,\n\n events: ResMut<protocol::Events>,\n\n) {\n\n if let Some(ref mut state) = *state {\n\n let mut context = ClientContext {\n\n commands: Default::default(),\n\n };\n\n for event in events.iter() {\n\n *state = state.handle(&mut context, &ClientInput::Event(event.clone()));\n\n }\n\n *commands = context.commands;\n\n }\n\n}\n\n\n", "file_path": "crates/plugins/protocol/src/lib.rs", "rank": 49, "score": 81568.52238252202 }, { "content": "fn update_widget(\n\n mut commands: Commands,\n\n windows: Res<Windows>,\n\n camera: Query<(&Camera, &GlobalTransform)>,\n\n mut query: Query<(\n\n Entity,\n\n Option<&mut Widget>,\n\n &WidgetId,\n\n &Component,\n\n &GlobalTransform,\n\n )>,\n\n) {\n\n let (camera, camera_transform) = camera.single().unwrap();\n\n for (entity, widget, id, component, transform) in query.iter_mut() {\n\n if let Some(position) =\n\n camera.world_to_screen(&windows, camera_transform, transform.translation)\n\n {\n\n let new_widget = Widget {\n\n id: id.to_owned(),\n\n backend_id: EGUI_BACKEND.into(),\n", "file_path": "crates/plugins/egui/src/lib.rs", "rank": 50, "score": 81568.52238252202 }, { "content": "fn widget_rendering(\n\n _time: Res<Time>,\n\n mut backends: ResMut<Backends>,\n\n mut query: Query<(\n\n &Widget,\n\n &mut Shape,\n\n Option<&mut DragState>,\n\n &mut WidgetEvents,\n\n )>,\n\n) {\n\n for (widget, mut shape, drag_state, mut widget_events) in query.iter_mut() {\n\n if let Some(backend) = backends.get_mut(&widget.backend_id) {\n\n let response = backend.render(widget);\n\n if *shape != response.shape {\n\n *shape = response.shape.clone();\n\n }\n\n\n\n if let Some(mut drag_state) = drag_state {\n\n if *drag_state != response.drag_state {\n\n *drag_state = response.drag_state.clone();\n\n }\n\n }\n\n *widget_events = response.events;\n\n }\n\n }\n\n}\n", "file_path": "crates/plugins/shell/src/lib.rs", "rank": 51, "score": 81568.52238252202 }, { "content": "fn command_sender(\n\n tx: impl Sink<Vec<u8>, Error = String> + Clone + Send + Sync + 'static + Unpin,\n\n) -> impl Sink<Command, Error = String> + Clone + Send + Sync + 'static + Unpin {\n\n tx.with(|command: Command| -> Ready<Result<Vec<u8>, String>> {\n\n match serde_cbor::to_vec(&command) {\n\n Ok(vec) => ready(Ok(vec)),\n\n Err(err) => {\n\n error!(\"{}\", err);\n\n ready(Err(err.to_string()))\n\n }\n\n }\n\n })\n\n}\n", "file_path": "crates/adapters/client-websocket/src/lib.rs", "rank": 52, "score": 80795.29202911112 }, { "content": "pub fn add(left: Code, right: Code) -> Code {\n\n node(\n\n CodeData::ApplyBinaryOperator {\n\n operator: BinaryOperator::Arithmetic(BinaryArithmeticOperator::Add),\n\n operands: (Box::new(left), Box::new(right)),\n\n },\n\n Type::Number,\n\n )\n\n}\n", "file_path": "crates/components/language/src/code/node/sugar.rs", "rank": 53, "score": 79977.20015916516 }, { "content": "fn add_client_state(\n\n client: ResMut<Option<BoxClient>>,\n\n mut state: ResMut<Option<ClientStateDispatcher>>,\n\n) {\n\n if client.is_some() && state.is_none() {\n\n *state = Some(Default::default())\n\n }\n\n}\n\n\n", "file_path": "crates/plugins/protocol/src/lib.rs", "rank": 54, "score": 79799.88923553014 }, { "content": "fn default_port() -> u16 {\n\n 5000\n\n}\n\n\n", "file_path": "crates/apps/server/src/main.rs", "rank": 55, "score": 78995.48088432294 }, { "content": "pub fn node(data: CodeData, type_: Type) -> Code {\n\n Code {\n\n data,\n\n type_,\n\n metadata: None,\n\n }\n\n}\n\n\n", "file_path": "crates/components/language/src/code/node/sugar.rs", "rank": 56, "score": 78739.0221199069 }, { "content": "pub fn label<T: Into<String>>(label: T) -> Component {\n\n Component::Label(label.into())\n\n}\n\n\n", "file_path": "crates/components/physics/src/widget/component/sugar.rs", "rank": 57, "score": 78300.84267057077 }, { "content": "fn handle_widget_event<Handler: EventHandler>(\n\n mut query: Query<(\n\n &Handler::Context,\n\n &Handler,\n\n &Handler::Events,\n\n &mut Handler::Output,\n\n )>,\n\n) {\n\n for (context, handler, events, mut output) in query.iter_mut() {\n\n *output = handler.handle(context, events)\n\n }\n\n}\n", "file_path": "crates/plugins/shell/src/event_handler.rs", "rank": 58, "score": 70307.66397632219 }, { "content": "pub fn input_string<I: Into<InputId>, T: Into<String>>(id: I, default: T) -> Component {\n\n Component::InputString {\n\n id: id.into(),\n\n value: default.into(),\n\n }\n\n}\n\n\n", "file_path": "crates/components/physics/src/widget/component/sugar.rs", "rank": 59, "score": 67657.85285599164 }, { "content": "pub fn input_integer<T: Into<InputId>, I: Into<Integer>>(id: T, default: I) -> Component {\n\n Component::InputInteger {\n\n id: id.into(),\n\n value: default.into(),\n\n }\n\n}\n", "file_path": "crates/components/physics/src/widget/component/sugar.rs", "rank": 60, "score": 67657.85285599164 }, { "content": " let camera = {\n\n if let Ok(camera) = query_set.q0().single() {\n\n *camera\n\n } else {\n\n return;\n\n }\n\n };\n\n if let Ok(mut cursor) = query_set.q1_mut().single_mut() {\n\n let position = translate_position(position, window, &camera);\n\n cursor.translation.x = position.x;\n\n cursor.translation.y = position.y;\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/plugins/shell/src/cursor_systems.rs", "rank": 64, "score": 66122.76288599415 }, { "content": " velocity_power: 1.6,\n\n velocity_max: 1500.0,\n\n velocity_coefficient: 0.4,\n\n ..Default::default()\n\n },\n\n })\n\n .insert_bundle(WidgetBundle::default())\n\n .remove::<DragState>();\n\n }\n\n}\n\n\n\npub(crate) fn terminal_rendering(mut query: Query<(&Terminal, &mut Component)>) {\n\n for (terminal, mut component) in query.iter_mut() {\n\n let new_component = render_terminal(terminal);\n\n if *component != new_component {\n\n *component = new_component;\n\n }\n\n }\n\n}\n", "file_path": "crates/plugins/shell/src/terminal_systems.rs", "rank": 65, "score": 66119.50542866775 }, { "content": "use bevy::prelude::*;\n\n\n\nuse physics::{\n\n widget::{component::Component, WidgetId},\n\n DragState, Follow, FollowParams,\n\n};\n\nuse shell_terminal::{render_terminal, TerminalWidgetEventHandler};\n\nuse terminal::{terminal::Terminal, Cursor, TerminalOperations};\n\n\n\nuse crate::widget_bundle::WidgetBundle;\n\n\n\npub(crate) fn create_terminal(mut commands: Commands) {\n\n commands.spawn_bundle(TerminalBundle {\n\n transform: Transform::from_translation([100.0, 100.0, 0.].into()),\n\n ..Default::default()\n\n });\n\n}\n\n\n\n#[derive(Bundle, Default)]\n", "file_path": "crates/plugins/shell/src/terminal_systems.rs", "rank": 66, "score": 66119.42605677442 }, { "content": " velocity_coefficient: 10.0,\n\n velocity_power: 1.2,\n\n velocity_max: 2000.0,\n\n ..Default::default()\n\n },\n\n };\n\n commands.entity(entity).insert(follow);\n\n }\n\n DragState::NotDragging => {\n\n commands.entity(entity).remove::<Follow<Entity>>();\n\n }\n\n }\n\n }\n\n}\n", "file_path": "crates/plugins/shell/src/drag_system.rs", "rank": 67, "score": 66119.21072810885 }, { "content": "#[derive(SystemLabel, PartialEq, Eq, Debug, Hash, Clone)]\n\npub enum ProtocolSystem {\n\n ReceiveEvents,\n\n HandleEvents,\n\n SendCommands,\n\n}\n\n\n\n#[derive(Derivative)]\n\n#[derivative(\n\n PartialEq(bound = \"\"),\n\n Eq(bound = \"\"),\n\n Debug(bound = \"\"),\n\n Hash(bound = \"\"),\n\n Clone(bound = \"\")\n\n)]\n\npub enum EventHandlerSystem<T> {\n\n Before,\n\n Handle,\n\n After,\n\n _Phantom(std::convert::Infallible, std::marker::PhantomData<T>),\n\n}\n\n\n\nimpl<T: Send + Sync + 'static> SystemLabel for EventHandlerSystem<T> {\n\n fn dyn_clone(&self) -> Box<dyn SystemLabel> {\n\n Box::new(self.clone())\n\n }\n\n}\n", "file_path": "crates/plugins/core/src/system_labels.rs", "rank": 68, "score": 66116.03542954121 }, { "content": "use bevy::prelude::*;\n\nuse derivative::*;\n\n\n\n#[derive(Debug, Hash, PartialEq, Eq, Clone, SystemLabel)]\n\npub enum DeskSystem {\n\n UpdateStatesToLatest,\n\n Shell,\n\n HandleOperations,\n\n PrePhysics,\n\n}\n\n\n\n#[derive(SystemLabel, PartialEq, Eq, Debug, Hash, Clone)]\n\npub enum ShellSystem {\n\n Add,\n\n UpdateComponent,\n\n UpdateWidget,\n\n Render,\n\n HandleEvents,\n\n}\n\n\n", "file_path": "crates/plugins/core/src/system_labels.rs", "rank": 69, "score": 66116.03465291816 }, { "content": "use bevy::{prelude::*, render::camera::Camera};\n\nuse terminal::Cursor;\n\n\n\n#[derive(Bundle)]\n", "file_path": "crates/plugins/shell/src/cursor_systems.rs", "rank": 70, "score": 66114.94777432464 }, { "content": "use std::collections::HashMap;\n\n\n\nuse bevy::prelude::*;\n\nuse physics::{Follow, Velocity};\n\n\n", "file_path": "crates/plugins/shell/src/follow_system.rs", "rank": 71, "score": 66114.3149851174 }, { "content": "use bevy::prelude::*;\n\nuse physics::{DragState, Follow, FollowParams};\n\nuse terminal::Cursor;\n\n\n", "file_path": "crates/plugins/shell/src/drag_system.rs", "rank": 72, "score": 66111.06283017986 }, { "content": "pub trait Operator {}\n", "file_path": "crates/components/language/src/lib.rs", "rank": 73, "score": 56016.77212232654 }, { "content": "pub trait Runtime {\n\n fn run(&self, code: &Code) -> Result<Code, RuntimeError>;\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct RuntimeId(pub String);\n\n\n\nimpl<T: Into<String>> From<T> for RuntimeId {\n\n fn from(from: T) -> Self {\n\n Self(from.into())\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct Runtimes {\n\n runtimes: HashMap<RuntimeId, Box<dyn Runtime + Send + Sync>>,\n\n default_id: Option<RuntimeId>,\n\n}\n\n\n\nimpl Runtimes {\n", "file_path": "crates/components/language/src/runtime.rs", "rank": 74, "score": 56016.77212232654 }, { "content": "pub trait Client {\n\n fn sender(&self) -> Box<dyn Sink<Command, Error = String> + Send + Sync + Unpin + 'static>;\n\n fn poll_once(&mut self) -> Option<Vec<Event>>;\n\n}\n\n\n\n#[derive(Clone, Hash, Eq, PartialEq)]\n\npub struct ClientName(pub Cow<'static, str>);\n\n\n\npub type BoxClient = Box<dyn Client + Send + Sync + 'static>;\n", "file_path": "crates/components/protocol/src/client.rs", "rank": 75, "score": 56016.77212232654 }, { "content": "#[async_trait]\n\npub trait ServerContext {\n\n async fn send_event(&mut self, event: Event);\n\n async fn authenticate(&mut self, token: &Token) -> Result<UserId, AuthenticationError>;\n\n}\n\n\n\n#[async_trait]\n\nimpl<T, U> ServerContext for SinkAndStreamServerContext<T, U>\n\nwhere\n\n T: Sink<Event> + Unpin + Send + Sync,\n\n U: Sink<InboundEntranceCommand> + Unpin + Send + Sync,\n\n{\n\n async fn send_event(&mut self, event: Event) {\n\n self.event_sender\n\n .send(event)\n\n .await\n\n .unwrap_or_else(|_err| error!(\"error to send an event\"));\n\n }\n\n\n\n async fn authenticate(&mut self, token: &Token) -> Result<UserId, AuthenticationError> {\n\n self.user_authentication_handler.authenticate(token).await\n", "file_path": "crates/components/protocol/src/server_context.rs", "rank": 76, "score": 54250.60018009357 }, { "content": "pub trait CodeSyntax {}\n", "file_path": "crates/components/language/src/code_syntax.rs", "rank": 77, "score": 54250.60018009357 }, { "content": "pub trait WidgetBackend {\n\n fn render(&mut self, widget: &Widget) -> RenderResponse;\n\n}\n\n\n", "file_path": "crates/components/physics/src/widget/backend.rs", "rank": 78, "score": 54250.60018009357 }, { "content": "#[async_trait]\n\n#[enum_dispatch]\n\npub trait ServerState {\n\n async fn handle(\n\n self,\n\n context: &mut (impl ServerContext + Send + Sync),\n\n input: &ServerInput,\n\n ) -> ServerStateDispatcher;\n\n}\n\n\n\npub async fn handle_unexpected_input(\n\n context: &mut (impl ServerContext + Send + Sync),\n\n input: &ServerInput,\n\n) {\n\n if let ServerInput::Command(_) = input {\n\n let event = Event::Error {\n\n code: ErrorCode::UnexpectedOperation,\n\n message: \"unexpected\".into(),\n\n };\n\n context.send_event(event).await;\n\n }\n\n}\n", "file_path": "crates/components/protocol/src/server_state/mod.rs", "rank": 79, "score": 53438.01346916786 }, { "content": "#[async_trait]\n\npub trait UserAuthenticationHandler {\n\n async fn authenticate(&self, token: &Token) -> Result<UserId, AuthenticationError>;\n\n}\n\n\n\n#[cfg(test)]\n\npub mod mock {\n\n use super::*;\n\n use mock_it::Mock;\n\n\n\n #[derive(Clone)]\n\n pub struct MockUserAuthenticationHandler {\n\n pub authenticate: Mock<Token, Result<UserId, AuthenticationError>>,\n\n }\n\n\n\n impl Default for MockUserAuthenticationHandler {\n\n fn default() -> Self {\n\n Self {\n\n authenticate: Mock::new(Err(AuthenticationError {\n\n error_code: AuthenticationErrorCode::InternalError,\n\n message: \"UserAuthenticationHandler.authenticate mock not found\".into(),\n", "file_path": "crates/components/protocol/src/authentication_handler.rs", "rank": 80, "score": 53438.01346916786 }, { "content": "#[enum_dispatch]\n\npub trait ClientState {\n\n fn handle(&self, commands: &mut ClientContext, event: &ClientInput) -> ClientStateDispatcher;\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone, Default)]\n\npub struct ClientContext {\n\n pub commands: Commands,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum ClientInput {\n\n Event(Event),\n\n}\n", "file_path": "crates/components/protocol/src/client_state/mod.rs", "rank": 81, "score": 53438.01346916786 }, { "content": "fn main() {\n\n println!(\n\n \"{}\",\n\n serde_cbor::to_vec(&Command::Login(Login {\n\n token: vec![100, 100,].into()\n\n }))\n\n .unwrap()\n\n .len()\n\n );\n\n}\n", "file_path": "crates/components/protocol/src/main.rs", "rank": 82, "score": 50659.35885542963 }, { "content": "#[test]\n\nfn before() {\n\n fn add(context: &mut Context) {\n\n context.insert::<u8>(42);\n\n }\n\n\n\n let context = builder().before(add).build();\n\n assert_eq!(context.get::<u8>(), Some(&42_u8));\n\n}\n\n\n", "file_path": "crates/tests/desk-server/src/support/mod.rs", "rank": 83, "score": 49733.460728073835 }, { "content": "fn event_receiver(\n\n rx: impl Stream<Item = Result<Vec<u8>>> + Send + Sync + 'static + Unpin,\n\n) -> impl Stream<Item = Event> + Send + Sync + 'static + Unpin {\n\n rx.map(|bytes| serde_cbor::from_slice(&bytes.unwrap()).unwrap())\n\n}\n\n\n", "file_path": "crates/adapters/client-websocket/src/lib.rs", "rank": 84, "score": 48859.610752654466 }, { "content": "#[test]\n\nfn product() {\n\n assert_eq!(\n\n Type::product(vec![Type::Number, Type::String]),\n\n Type::Product(Set::new(vec![Type::Number, Type::String]))\n\n );\n\n}\n\n\n", "file_path": "crates/components/language/src/type_/test_type.rs", "rank": 85, "score": 48859.610752654466 }, { "content": "#[test]\n\nfn sum() {\n\n assert_eq!(\n\n Type::sum(vec![Type::Number, Type::String]),\n\n Type::Sum(Set::new(vec![Type::Number, Type::String]))\n\n );\n\n}\n\n\n", "file_path": "crates/components/language/src/type_/test_type.rs", "rank": 86, "score": 48859.610752654466 }, { "content": "#[test]\n\nfn context_after() {\n\n let called = Rc::new(RefCell::new(false));\n\n let mut context = builder().build();\n\n context.insert(called.clone());\n\n context.after(call);\n\n assert!(!*called.borrow());\n\n std::mem::drop(context);\n\n assert!(*called.borrow());\n\n}\n", "file_path": "crates/tests/desk-server/src/support/mod.rs", "rank": 87, "score": 48859.610752654466 }, { "content": "#[test]\n\nfn does_not_simplifies() {\n\n assert_eq!(Type::Number.remove_verbose_composite_type(), &Type::Number);\n\n}\n\n\n", "file_path": "crates/components/language/src/type_/test_type.rs", "rank": 88, "score": 48859.610752654466 }, { "content": "#[test]\n\nfn builder_after() {\n\n let called = Rc::new(RefCell::new(false));\n\n let mut context = builder().after(call).build();\n\n context.insert(called.clone());\n\n assert!(!*called.borrow());\n\n std::mem::drop(context);\n\n assert!(*called.borrow());\n\n}\n\n\n", "file_path": "crates/tests/desk-server/src/support/mod.rs", "rank": 89, "score": 48859.610752654466 }, { "content": "#[test]\n\nfn does_not_simplifies_product() {\n\n assert_eq!(\n\n Type::product(vec![Type::Number, Type::String]).remove_verbose_composite_type(),\n\n &Type::product(vec![Type::Number, Type::String])\n\n )\n\n}\n\n\n", "file_path": "crates/components/language/src/type_/test_type.rs", "rank": 90, "score": 48033.54018203466 }, { "content": "#[test]\n\nfn is_subtype_of_simplifies() {\n\n assert!(Type::product(vec![Type::Number]).is_subtype_of(&Type::sum(vec![Type::Number])));\n\n assert!(Type::sum(vec![Type::Number]).is_subtype_of(&Type::product(vec![Type::Number])));\n\n}\n\n\n", "file_path": "crates/components/language/src/type_/test_type.rs", "rank": 91, "score": 48033.54018203466 }, { "content": "#[test]\n\nfn simplifies_into_unit() {\n\n assert_eq!(\n\n Type::sum(vec![]).remove_verbose_composite_type(),\n\n &Type::Unit\n\n );\n\n assert_eq!(\n\n Type::product(vec![]).remove_verbose_composite_type(),\n\n &Type::Unit\n\n );\n\n}\n\n\n", "file_path": "crates/components/language/src/type_/test_type.rs", "rank": 92, "score": 48033.54018203466 }, { "content": "#[test]\n\nfn does_not_simplifies_sum() {\n\n assert_eq!(\n\n Type::sum(vec![Type::Number, Type::String]).remove_verbose_composite_type(),\n\n &Type::sum(vec![Type::Number, Type::String])\n\n )\n\n}\n\n\n", "file_path": "crates/components/language/src/type_/test_type.rs", "rank": 93, "score": 48033.54018203466 }, { "content": "#[test]\n\nfn number_literal() {}\n", "file_path": "crates/components/language/src/code/typing/test_literal.rs", "rank": 94, "score": 47251.434650681585 }, { "content": "#[test]\n\nfn is_subtype_of_returns_false() {\n\n assert!(!Trait::new(vec![Arrow::new(Type::Number, Type::String)])\n\n .is_subtype_of(&Trait::new(vec![Arrow::new(Type::String, Type::Number)])));\n\n}\n\n\n", "file_path": "crates/components/language/src/type_/test_trait.rs", "rank": 95, "score": 47251.434650681585 }, { "content": "pub trait EventHandler: Send + Sync + 'static {\n\n type Context: Send + Sync + 'static + std::fmt::Debug;\n\n type Events: Send + Sync + 'static + std::fmt::Debug;\n\n type Output: Send + Sync + 'static + std::fmt::Debug;\n\n\n\n fn handle(&self, context: &Self::Context, events: &Self::Events) -> Self::Output;\n\n}\n", "file_path": "crates/components/physics/src/event_handler.rs", "rank": 96, "score": 46570.64905264786 }, { "content": "#[test]\n\nfn simplifies_single_item_product() {\n\n assert_eq!(\n\n Type::product(vec![Type::Number]).remove_verbose_composite_type(),\n\n &Type::Number\n\n )\n\n}\n\n\n", "file_path": "crates/components/language/src/type_/test_type.rs", "rank": 97, "score": 46509.875284175825 }, { "content": "#[test]\n\nfn is_subtype_of_returns_true_if_unit() {\n\n assert!(Type::Unit.is_subtype_of(&Type::Unit));\n\n}\n\n\n", "file_path": "crates/components/language/src/type_/test_type.rs", "rank": 98, "score": 46509.875284175825 }, { "content": "#[test]\n\nfn simplifies_single_item_sum() {\n\n assert_eq!(\n\n Type::sum(vec![Type::Number]).remove_verbose_composite_type(),\n\n &Type::Number\n\n )\n\n}\n\n\n", "file_path": "crates/components/language/src/type_/test_type.rs", "rank": 99, "score": 46509.875284175825 } ]
Rust
crypto-msg-parser/src/exchanges/binance/binance_all.rs
CPT-Jack-A-Castle/crypto-crawler-rs
e7b8a2d51989e69779c69e3e7755351fe5fcb3bb
use crypto_market_type::MarketType; use crate::{FundingRateMsg, MessageType, Order, OrderBookMsg, TradeMsg, TradeSide}; use super::super::utils::calc_quantity_and_volume; use serde::{Deserialize, Serialize}; use serde_json::{Result, Value}; use std::collections::HashMap; const EXCHANGE_NAME: &str = "binance"; #[derive(Serialize, Deserialize)] #[allow(non_snake_case)] struct AggTradeMsg { e: String, E: i64, s: String, a: i64, p: String, q: String, f: i64, l: i64, T: i64, m: bool, #[serde(flatten)] extra: HashMap<String, Value>, } #[derive(Serialize, Deserialize)] #[allow(non_snake_case)] struct RawTradeMsg { e: String, E: i64, s: String, t: i64, p: String, q: String, b: i64, a: i64, T: i64, m: bool, #[serde(flatten)] extra: HashMap<String, Value>, } pub type RawOrder = [String; 2]; #[derive(Serialize, Deserialize)] #[allow(non_snake_case)] struct RawOrderbookMsg { e: String, E: i64, T: Option<i64>, s: String, U: i64, u: i64, b: Vec<RawOrder>, a: Vec<RawOrder>, #[serde(flatten)] extra: HashMap<String, Value>, } #[derive(Serialize, Deserialize)] struct WebsocketMsg<T: Sized> { stream: String, data: T, } pub(crate) fn parse_trade(market_type: MarketType, msg: &str) -> Result<Vec<TradeMsg>> { let obj = serde_json::from_str::<HashMap<String, Value>>(&msg)?; let data = obj.get("data").unwrap(); let event_type = data.get("e").unwrap().as_str().unwrap(); match event_type { "aggTrade" => { let agg_trade: AggTradeMsg = serde_json::from_value(data.clone()).unwrap(); let pair = crypto_pair::normalize_pair(&agg_trade.s, EXCHANGE_NAME).unwrap(); let price = agg_trade.p.parse::<f64>().unwrap(); let quantity = agg_trade.q.parse::<f64>().unwrap(); let (quantity_base, quantity_quote, quantity_contract) = calc_quantity_and_volume(EXCHANGE_NAME, market_type, &pair, price, quantity); let trade = TradeMsg { exchange: EXCHANGE_NAME.to_string(), market_type, symbol: agg_trade.s.clone(), pair, msg_type: MessageType::Trade, timestamp: agg_trade.T, price, quantity_base, quantity_quote, quantity_contract, side: if agg_trade.m { TradeSide::Sell } else { TradeSide::Buy }, trade_id: agg_trade.a.to_string(), raw: serde_json::from_str(msg)?, }; Ok(vec![trade]) } "trade" => { let raw_trade: RawTradeMsg = serde_json::from_value(data.clone()).unwrap(); let pair = crypto_pair::normalize_pair(&raw_trade.s, EXCHANGE_NAME).unwrap(); let price = raw_trade.p.parse::<f64>().unwrap(); let quantity = raw_trade.q.parse::<f64>().unwrap(); let (quantity_base, quantity_quote, quantity_contract) = calc_quantity_and_volume(EXCHANGE_NAME, market_type, &pair, price, quantity); let trade = TradeMsg { exchange: EXCHANGE_NAME.to_string(), market_type, symbol: raw_trade.s.clone(), pair, msg_type: MessageType::Trade, timestamp: raw_trade.T, price, quantity_base, quantity_quote, quantity_contract, side: if raw_trade.m { TradeSide::Sell } else { TradeSide::Buy }, trade_id: raw_trade.t.to_string(), raw: serde_json::from_str(msg)?, }; Ok(vec![trade]) } _ => panic!("Unsupported event type {}", event_type), } } pub(crate) fn parse_l2(market_type: MarketType, msg: &str) -> Result<Vec<OrderBookMsg>> { let ws_msg = serde_json::from_str::<WebsocketMsg<RawOrderbookMsg>>(&msg)?; let pair = crypto_pair::normalize_pair(&ws_msg.data.s, EXCHANGE_NAME).unwrap(); let parse_order = |raw_order: &RawOrder| -> Order { let price = raw_order[0].parse::<f64>().unwrap(); let (quantity_base, quantity_quote, quantity_contract) = calc_quantity_and_volume( EXCHANGE_NAME, market_type, &pair, price, raw_order[1].parse::<f64>().unwrap(), ); Order { price, quantity_base, quantity_quote, quantity_contract, } }; let orderbook = OrderBookMsg { exchange: EXCHANGE_NAME.to_string(), market_type, symbol: ws_msg.data.s.clone(), pair: pair.clone(), msg_type: MessageType::L2Event, timestamp: if market_type == MarketType::Spot { ws_msg.data.E } else { ws_msg.data.T.unwrap() }, asks: ws_msg .data .a .iter() .map(|raw_order| parse_order(raw_order)) .collect::<Vec<Order>>(), bids: ws_msg .data .b .iter() .map(|raw_order| parse_order(raw_order)) .collect::<Vec<Order>>(), snapshot: false, raw: serde_json::from_str(msg)?, }; Ok(vec![orderbook]) } #[derive(Serialize, Deserialize)] #[allow(non_snake_case)] struct RawFundingRateMsg { e: String, E: i64, s: String, p: String, i: Option<String>, P: String, r: String, T: i64, #[serde(flatten)] extra: HashMap<String, Value>, } pub(crate) fn parse_funding_rate( market_type: MarketType, msg: &str, ) -> Result<Vec<FundingRateMsg>> { let obj = serde_json::from_str::<HashMap<String, Value>>(&msg)?; let stream = obj.get("stream").unwrap().as_str().unwrap(); let data = if stream == "!markPrice@arr" { obj.get("data") .unwrap() .as_array() .unwrap() .iter() .map(|x| serde_json::from_value::<RawFundingRateMsg>(x.clone()).unwrap()) .collect() } else if stream.ends_with("@markPrice") { vec![serde_json::from_value::<RawFundingRateMsg>(obj.get("data").unwrap().clone()).unwrap()] } else { panic!("Unknown funding rate messaeg {}", msg); }; let funding_rates: Vec<FundingRateMsg> = data .into_iter() .filter(|x| !x.r.is_empty()) .map(|raw_msg| FundingRateMsg { exchange: EXCHANGE_NAME.to_string(), market_type, symbol: raw_msg.s.clone(), pair: crypto_pair::normalize_pair(&raw_msg.s, EXCHANGE_NAME).unwrap(), msg_type: MessageType::FundingRate, timestamp: raw_msg.E, funding_rate: raw_msg.r.parse::<f64>().unwrap(), funding_time: raw_msg.T, estimated_rate: None, raw: if stream == "!markPrice@arr" { serde_json::to_value(&raw_msg).unwrap() } else { serde_json::from_str(msg).unwrap() }, }) .collect(); Ok(funding_rates) }
use crypto_market_type::MarketType; use crate::{FundingRateMsg, MessageType, Order, OrderBookMsg, TradeMsg, TradeSide}; use super::super::utils::calc_quantity_and_volume; use serde::{Deserialize, Serialize}; use serde_json::{Result, Value}; use std::collections::HashMap; const EXCHANGE_NAME: &str = "binance"; #[derive(Serialize, Deserialize)] #[allow(non_snake_case)] struct AggTradeMsg { e: String, E: i64, s: String, a: i64, p: String, q: String, f: i64, l: i64, T: i64, m: bool, #[serde(flatten)] extra: HashMap<String, Value>, } #[derive(Serialize, Deserialize)] #[allow(non_snake_case)] struct RawTradeMsg { e: String, E: i64, s: String, t: i64, p: String, q: String, b: i64, a: i64, T: i64, m: bool, #[serde(flatten)] extra: HashMap<String, Value>, } pub type RawOrder = [String; 2]; #[derive(Serialize, Deserialize)] #[allow(non_snake_case)] struct RawOrderbookMsg { e: String, E: i64, T: Option<i64>, s: String, U: i64, u: i64, b: Vec<RawOrder>, a: Vec<RawOrder>, #[serde(flatten)] extra: HashMap<String, Value>, } #[derive(Serialize, Deserialize)] struct WebsocketMsg<T: Sized> { stream: String, data: T, } pub(crate) fn parse_trade(market_type: MarketType, msg: &str) -> Result<Vec<TradeMsg>> { let obj = serde_json::from_str::<HashMap<String, Value>>(&msg)?; let data = obj.get("data").unwrap(); let event_type = data.get("e").unwrap().as_str().unwrap(); match event_type { "aggTrade" => { let agg_trade: AggTradeMsg = serde_json::from_value(data.clone()).unwrap(); let pair = crypto_pair::normalize_pair(&agg_trade.s, EXCHANGE_NAME).unwrap(); let price = agg_trade.p.parse::<f64>().unwrap(); let quantity = agg_trade.q.parse::<f64>().unwrap(); let (quantity_base, quantity_quote, quantity_contract) = calc_quantity_and_volume(EXCHANGE_NAME, market_type, &pair, price, quantity); let trade = TradeMsg { exchange: EXCHANGE_NAME.to_string(), market_type, symbol: agg_trade.s.clone(), pair, msg_type: MessageType::Trade, timestamp: agg_trade.T, price, quantity_base, quantity_quote, quantity_contract, side: if agg_trade.m { TradeSide::Sell } else { TradeSide::Buy }, trade_id: agg_trade.a.to_string(), raw: serde_json::from_str(msg)?, }; Ok(vec![trade]) } "trade" => { let raw_trade: RawTradeMsg = serde_json::from_value(data.clone()).unwrap(); let pair = crypto_pair::normalize_pair(&raw_trade.s, EXCHANGE_NAME).unwrap(); let price = raw_trade.p.parse::<f64>().unwrap(); let quantity = raw_trade.q.parse::<f64>().unwrap(); let (quantity_base, quantity_quote, quantity_contract) = calc_quantity_and_volume(EXCHANGE_NAME, market_type, &pair, price, quantity); let trade = TradeMsg { exchange: EXCHANGE_NAME.to_string(), market_type, symbol: raw_trade.s.clone(), pair, msg_type: MessageType::Trade, timestamp: raw_trade.T, price, quantity_base, quantity_quote, quantity_contract, side: if raw_trade.m { TradeSide::Sell } else { TradeSide::Buy }, trade_id: raw_trade.t.to_string(), raw: serde_json::from_str(msg)?, }; Ok(vec![trade]) } _ => panic!("Unsupported event type {}", event_type), } } pub(crate) fn parse_l2(market_type: MarketType, msg: &str) -> Result<Vec<OrderBookMsg>> { let ws_msg = serde_json::from_str::<WebsocketMsg<RawOrderbookMsg>>(&msg)?; let pair = crypto_pair::normalize_pair(&ws_msg.data.s, EXCHANGE_NAME).unwrap(); let parse_order = |raw_order: &RawOrder| -> Order { let price = raw_order[0].parse::<f64>().unwrap(); let (quantity_base, quantity_quote, quantity_contract) = calc_quantity_and_volume( EXCHANGE_NAME, market_type, &pair, price, raw_order[1].parse::<f64>().unwrap(), ); Order { price, quantity_base, quantity_quote, quantity_contract, } }; let orderbook = OrderBookMsg { exchange: EXCHANGE_NAME.to_string(), market_type, symbol: ws_msg.data.s.clone(), pair: pair.clone(), msg_type: MessageType::L2Event, timestamp:
, asks: ws_msg .data .a .iter() .map(|raw_order| parse_order(raw_order)) .collect::<Vec<Order>>(), bids: ws_msg .data .b .iter() .map(|raw_order| parse_order(raw_order)) .collect::<Vec<Order>>(), snapshot: false, raw: serde_json::from_str(msg)?, }; Ok(vec![orderbook]) } #[derive(Serialize, Deserialize)] #[allow(non_snake_case)] struct RawFundingRateMsg { e: String, E: i64, s: String, p: String, i: Option<String>, P: String, r: String, T: i64, #[serde(flatten)] extra: HashMap<String, Value>, } pub(crate) fn parse_funding_rate( market_type: MarketType, msg: &str, ) -> Result<Vec<FundingRateMsg>> { let obj = serde_json::from_str::<HashMap<String, Value>>(&msg)?; let stream = obj.get("stream").unwrap().as_str().unwrap(); let data = if stream == "!markPrice@arr" { obj.get("data") .unwrap() .as_array() .unwrap() .iter() .map(|x| serde_json::from_value::<RawFundingRateMsg>(x.clone()).unwrap()) .collect() } else if stream.ends_with("@markPrice") { vec![serde_json::from_value::<RawFundingRateMsg>(obj.get("data").unwrap().clone()).unwrap()] } else { panic!("Unknown funding rate messaeg {}", msg); }; let funding_rates: Vec<FundingRateMsg> = data .into_iter() .filter(|x| !x.r.is_empty()) .map(|raw_msg| FundingRateMsg { exchange: EXCHANGE_NAME.to_string(), market_type, symbol: raw_msg.s.clone(), pair: crypto_pair::normalize_pair(&raw_msg.s, EXCHANGE_NAME).unwrap(), msg_type: MessageType::FundingRate, timestamp: raw_msg.E, funding_rate: raw_msg.r.parse::<f64>().unwrap(), funding_time: raw_msg.T, estimated_rate: None, raw: if stream == "!markPrice@arr" { serde_json::to_value(&raw_msg).unwrap() } else { serde_json::from_str(msg).unwrap() }, }) .collect(); Ok(funding_rates) }
if market_type == MarketType::Spot { ws_msg.data.E } else { ws_msg.data.T.unwrap() }
if_condition
[ { "content": "pub fn check_trade_fields(exchange: &str, market_type: MarketType, pair: String, trade: &TradeMsg) {\n\n assert_eq!(trade.exchange, exchange);\n\n assert_eq!(trade.market_type, market_type);\n\n assert_eq!(trade.pair, pair);\n\n assert_eq!(trade.msg_type, MessageType::Trade);\n\n assert!(trade.price > 0.0);\n\n assert!(trade.quantity_base > 0.0);\n\n assert!(trade.quantity_quote > 0.0);\n\n if exchange != \"bitmex\" {\n\n assert!(approx_eq!(\n\n f64,\n\n trade.quantity_quote,\n\n trade.price * trade.quantity_base,\n\n epsilon = 0.0000000001\n\n ));\n\n }\n\n assert!(!trade.trade_id.is_empty());\n\n assert_eq!(trade.timestamp.to_string().len(), 13);\n\n}\n\n\n\n// TODO: weird, it is actually being used\n", "file_path": "crypto-msg-parser/tests/utils.rs", "rank": 0, "score": 447605.6732327044 }, { "content": "/// Normalize a trading currency.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `currency` - The exchange-specific currency\n\n/// * `exchange` - The normalized symbol\n\npub fn normalize_currency(symbol: &str, exchange: &str) -> String {\n\n match exchange {\n\n \"bitfinex\" => exchanges::bitfinex::normalize_currency(symbol),\n\n \"bitmex\" => exchanges::bitmex::normalize_currency(symbol),\n\n \"kraken\" => exchanges::kraken::normalize_currency(symbol),\n\n \"kucoin\" => exchanges::kucoin::normalize_currency(symbol),\n\n _ => symbol.to_uppercase(),\n\n }\n\n}\n\n\n", "file_path": "crypto-pair/src/lib.rs", "rank": 1, "score": 388306.67840979144 }, { "content": "/// Normalize a cryptocurrency trading pair.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `symbol` - The original pair of an exchange\n\n/// * `exchange` - The exchange name\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use crypto_pair::normalize_pair;\n\n///\n\n/// assert_eq!(Some(\"BTC/USD\".to_string()), normalize_pair(\"XBTUSD\", \"bitmex\"));\n\n/// assert_eq!(Some(\"BTC/USD\".to_string()), normalize_pair(\"XBTH21\", \"bitmex\"));\n\n/// assert_eq!(Some(\"BTC/USDT\".to_string()), normalize_pair(\"BTCUSDT\", \"binance\"));\n\n/// assert_eq!(Some(\"BTC/USDT\".to_string()), normalize_pair(\"btcusdt\", \"huobi\"));\n\n/// assert_eq!(Some(\"BTC/USDT\".to_string()), normalize_pair(\"BTCUST\", \"bitfinex\"));\n\n/// ```\n\npub fn normalize_pair(symbol: &str, exchange: &str) -> Option<String> {\n\n match exchange {\n\n \"binance\" => exchanges::binance::normalize_pair(symbol),\n\n \"bitfinex\" => exchanges::bitfinex::normalize_pair(symbol),\n\n \"bitget\" => exchanges::bitget::normalize_pair(symbol),\n\n \"bithumb\" => Some(symbol.replace(\"-\", \"/\")),\n\n \"bitmex\" => exchanges::bitmex::normalize_pair(symbol),\n\n \"bitstamp\" => exchanges::bitstamp::normalize_pair(symbol),\n\n \"bitz\" => Some(symbol.replace(\"_\", \"/\").to_uppercase()),\n\n \"bybit\" => exchanges::bybit::normalize_pair(symbol),\n\n \"coinbase_pro\" => Some(symbol.replace(\"-\", \"/\")),\n\n \"deribit\" => exchanges::deribit::normalize_pair(symbol),\n\n \"ftx\" => exchanges::ftx::normalize_pair(symbol),\n\n \"gate\" => {\n\n let (base, quote) = {\n\n let v: Vec<&str> = symbol.split('_').collect();\n\n (v[0].to_string(), v[1].to_string())\n\n };\n\n\n\n Some(format!(\"{}/{}\", base, quote))\n", "file_path": "crypto-pair/src/lib.rs", "rank": 2, "score": 387401.7437720853 }, { "content": "/// Parse trade messages.\n\npub fn parse_trade(exchange: &str, market_type: MarketType, msg: &str) -> Result<Vec<TradeMsg>> {\n\n match exchange {\n\n \"binance\" => exchanges::binance::parse_trade(market_type, msg),\n\n \"bitfinex\" => exchanges::bitfinex::parse_trade(market_type, msg),\n\n \"bitget\" => exchanges::bitget::parse_trade(market_type, msg),\n\n \"bithumb\" => exchanges::bithumb::parse_trade(market_type, msg),\n\n \"bitmex\" => exchanges::bitmex::parse_trade(market_type, msg),\n\n \"bitstamp\" => exchanges::bitstamp::parse_trade(market_type, msg),\n\n \"bitz\" => exchanges::bitz::parse_trade(market_type, msg),\n\n \"bybit\" => exchanges::bybit::parse_trade(market_type, msg),\n\n \"coinbase_pro\" => exchanges::coinbase_pro::parse_trade(market_type, msg),\n\n \"deribit\" => exchanges::deribit::parse_trade(market_type, msg),\n\n \"ftx\" => exchanges::ftx::parse_trade(market_type, msg),\n\n \"gate\" => exchanges::gate::parse_trade(market_type, msg),\n\n \"huobi\" => exchanges::huobi::parse_trade(market_type, msg),\n\n \"kraken\" => exchanges::kraken::parse_trade(market_type, msg),\n\n \"kucoin\" => exchanges::kucoin::parse_trade(market_type, msg),\n\n \"mxc\" => exchanges::mxc::parse_trade(market_type, msg),\n\n \"okex\" => exchanges::okex::parse_trade(market_type, msg),\n\n \"zbg\" => exchanges::zbg::parse_trade(market_type, msg),\n\n _ => panic!(\"Unknown exchange {}\", exchange),\n\n }\n\n}\n\n\n", "file_path": "crypto-msg-parser/src/lib.rs", "rank": 3, "score": 365618.3380635824 }, { "content": "/// Parse level2 orderbook messages.\n\npub fn parse_l2(exchange: &str, market_type: MarketType, msg: &str) -> Result<Vec<OrderBookMsg>> {\n\n let ret = match exchange {\n\n \"binance\" => exchanges::binance::parse_l2(market_type, msg),\n\n \"bitfinex\" => exchanges::bitfinex::parse_l2(market_type, msg),\n\n \"bitget\" => exchanges::bitget::parse_l2(market_type, msg),\n\n \"bithumb\" => exchanges::bithumb::parse_l2(market_type, msg),\n\n \"bitmex\" => exchanges::bitmex::parse_l2(market_type, msg),\n\n \"bitstamp\" => exchanges::bitstamp::parse_l2(market_type, msg),\n\n \"bitz\" => exchanges::bitz::parse_l2(market_type, msg),\n\n \"bybit\" => exchanges::bybit::parse_l2(market_type, msg),\n\n \"coinbase_pro\" => exchanges::coinbase_pro::parse_l2(market_type, msg),\n\n \"deribit\" => exchanges::deribit::parse_l2(market_type, msg),\n\n \"ftx\" => exchanges::ftx::parse_l2(market_type, msg),\n\n \"gate\" => exchanges::gate::parse_l2(market_type, msg),\n\n \"huobi\" => exchanges::huobi::parse_l2(market_type, msg),\n\n \"kraken\" => exchanges::kraken::parse_l2(market_type, msg),\n\n \"kucoin\" => exchanges::kucoin::parse_l2(market_type, msg),\n\n \"mxc\" => exchanges::mxc::parse_l2(market_type, msg),\n\n \"okex\" => exchanges::okex::parse_l2(market_type, msg),\n\n \"zbg\" => exchanges::zbg::parse_l2(market_type, msg),\n", "file_path": "crypto-msg-parser/src/lib.rs", "rank": 4, "score": 348430.773753875 }, { "content": "/// Fetch level2 orderbook snapshot.\n\npub fn fetch_l2_snapshot(exchange: &str, market_type: MarketType, symbol: &str) -> Result<String> {\n\n match exchange {\n\n \"binance\" => exchanges::binance::fetch_l2_snapshot(market_type, symbol),\n\n \"bitfinex\" => exchanges::bitfinex::BitfinexRestClient::fetch_l2_snapshot(symbol),\n\n \"bitget\" => exchanges::bitget::fetch_l2_snapshot(market_type, symbol),\n\n \"bithumb\" => exchanges::bithumb::BithumbRestClient::fetch_l2_snapshot(symbol),\n\n \"bitmex\" => exchanges::bitmex::BitmexRestClient::fetch_l2_snapshot(symbol),\n\n \"bitstamp\" => exchanges::bitstamp::BitstampRestClient::fetch_l2_snapshot(symbol),\n\n \"bitz\" => exchanges::bitz::fetch_l2_snapshot(market_type, symbol),\n\n \"bybit\" => exchanges::bybit::BybitRestClient::fetch_l2_snapshot(symbol),\n\n \"coinbase_pro\" => exchanges::coinbase_pro::CoinbaseProRestClient::fetch_l2_snapshot(symbol),\n\n \"deribit\" => exchanges::deribit::DeribitRestClient::fetch_l2_snapshot(symbol),\n\n \"ftx\" => exchanges::ftx::FtxRestClient::fetch_l2_snapshot(symbol),\n\n \"gate\" => exchanges::gate::fetch_l2_snapshot(market_type, symbol),\n\n \"huobi\" => exchanges::huobi::fetch_l2_snapshot(market_type, symbol),\n\n \"kraken\" => exchanges::kraken::KrakenRestClient::fetch_l2_snapshot(symbol),\n\n \"kucoin\" => exchanges::kucoin::fetch_l2_snapshot(market_type, symbol),\n\n \"mxc\" => exchanges::mxc::fetch_l2_snapshot(market_type, symbol),\n\n \"okex\" => exchanges::okex::OkexRestClient::fetch_l2_snapshot(symbol),\n\n \"zbg\" => exchanges::zbg::fetch_l2_snapshot(market_type, symbol),\n\n _ => panic!(\"Unknown exchange {}\", exchange),\n\n }\n\n}\n\n\n", "file_path": "crypto-rest-client/src/lib.rs", "rank": 5, "score": 342961.5850093646 }, { "content": "/// Fetch level3 orderbook snapshot.\n\npub fn fetch_l3_snapshot(exchange: &str, market_type: MarketType, symbol: &str) -> Result<String> {\n\n match exchange {\n\n \"bitfinex\" => exchanges::bitfinex::BitfinexRestClient::fetch_l3_snapshot(symbol),\n\n \"bitstamp\" => exchanges::bitstamp::BitstampRestClient::fetch_l3_snapshot(symbol),\n\n \"coinbase_pro\" => exchanges::coinbase_pro::CoinbaseProRestClient::fetch_l3_snapshot(symbol),\n\n \"kucoin\" => exchanges::kucoin::fetch_l3_snapshot(market_type, symbol),\n\n _ => panic!(\n\n \"{} {} does NOT provide level3 orderbook data\",\n\n exchange, market_type\n\n ),\n\n }\n\n}\n", "file_path": "crypto-rest-client/src/lib.rs", "rank": 6, "score": 342961.5850093645 }, { "content": "pub fn get_contract_value(exchange: &str, market_type: MarketType, pair: &str) -> Option<f64> {\n\n if market_type == MarketType::Spot {\n\n return Some(1.0);\n\n }\n\n\n\n match exchange {\n\n \"binance\" => exchanges::binance::get_contract_value(market_type, pair),\n\n \"bitfinex\" => exchanges::bitfinex::get_contract_value(market_type, pair),\n\n \"bitget\" => exchanges::bitget::get_contract_value(market_type, pair),\n\n \"bitmex\" => exchanges::bitmex::get_contract_value(market_type, pair),\n\n \"bybit\" => exchanges::bybit::get_contract_value(market_type, pair),\n\n \"deribit\" => exchanges::deribit::get_contract_value(market_type, pair),\n\n \"ftx\" => exchanges::ftx::get_contract_value(market_type, pair),\n\n \"gate\" => exchanges::gate::get_contract_value(market_type, pair),\n\n \"huobi\" => exchanges::huobi::get_contract_value(market_type, pair),\n\n \"kucoin\" => exchanges::kucoin::get_contract_value(market_type, pair),\n\n \"mxc\" => exchanges::mxc::get_contract_value(market_type, pair),\n\n \"okex\" => exchanges::okex::get_contract_value(market_type, pair),\n\n _ => panic!(\"Unknown exchange {}\", exchange),\n\n }\n\n}\n", "file_path": "crypto-contract-value/src/lib.rs", "rank": 7, "score": 336335.8229632268 }, { "content": "fn parse_one_trade(market_type: MarketType, symbol: &str, nums: &[f64]) -> TradeMsg {\n\n assert_eq!(4, nums.len());\n\n let pair = crypto_pair::normalize_pair(symbol, EXCHANGE_NAME).unwrap();\n\n let trade_id = nums[0] as i64;\n\n let timestamp = nums[1] as i64;\n\n let quantity = f64::abs(nums[2]);\n\n let price = nums[3];\n\n\n\n let (quantity_base, quantity_quote, quantity_contract) =\n\n calc_quantity_and_volume(EXCHANGE_NAME, market_type, &pair, price, quantity);\n\n\n\n TradeMsg {\n\n exchange: EXCHANGE_NAME.to_string(),\n\n market_type,\n\n symbol: symbol.to_string(),\n\n pair,\n\n msg_type: MessageType::Trade,\n\n timestamp,\n\n price,\n\n quantity_base,\n", "file_path": "crypto-msg-parser/src/exchanges/bitfinex.rs", "rank": 8, "score": 333536.08350312675 }, { "content": "fn parse_order(raw_order: &[String; 2]) -> Order {\n\n let price = raw_order[0].parse::<f64>().unwrap();\n\n let quantity_base = raw_order[1].parse::<f64>().unwrap();\n\n\n\n Order {\n\n price,\n\n quantity_base,\n\n quantity_quote: price * quantity_base,\n\n quantity_contract: None,\n\n }\n\n}\n\n\n", "file_path": "crypto-msg-parser/src/exchanges/coinbase_pro.rs", "rank": 9, "score": 333403.06040378835 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n format!(\"{}@{}\", pair.to_lowercase(), channel)\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, BinanceWSClient, subscribe_trade, \"aggTrade\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(Ticker, BinanceWSClient, subscribe_ticker, \"ticker\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(BBO, BinanceWSClient, subscribe_bbo, \"bookTicker\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBook, BinanceWSClient, subscribe_orderbook, \"depth@100ms\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBookSnapshot, BinanceWSClient, subscribe_orderbook_snapshot, \"depth20\", to_raw_channel);\n\n\n", "file_path": "crypto-ws-client/src/clients/binance.rs", "rank": 10, "score": 330582.50138275465 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n format!(\"{}@{}\", pair, channel)\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, BinanceOptionWSClient, subscribe_trade, \"trade\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(Ticker, BinanceOptionWSClient, subscribe_ticker, \"ticker\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(BBO, BinanceOptionWSClient, subscribe_bbo, \"bookTicker\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBook, BinanceOptionWSClient, subscribe_orderbook, \"depth@100ms\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBookSnapshot, BinanceOptionWSClient, subscribe_orderbook_snapshot, \"depth100\", to_raw_channel);\n\n\n", "file_path": "crypto-ws-client/src/clients/binance_option.rs", "rank": 13, "score": 325319.8535115282 }, { "content": "/// Fetch trading symbols.\n\npub fn fetch_symbols(exchange: &str, market_type: MarketType) -> Result<Vec<String>> {\n\n match exchange {\n\n \"binance\" => exchanges::binance::fetch_symbols(market_type),\n\n \"bitfinex\" => exchanges::bitfinex::fetch_symbols(market_type),\n\n \"bitget\" => exchanges::bitget::fetch_symbols(market_type),\n\n \"bithumb\" => exchanges::bithumb::fetch_symbols(market_type),\n\n \"bitmex\" => exchanges::bitmex::fetch_symbols(market_type),\n\n \"bitstamp\" => exchanges::bitstamp::fetch_symbols(market_type),\n\n \"bitz\" => exchanges::bitz::fetch_symbols(market_type),\n\n \"bybit\" => exchanges::bybit::fetch_symbols(market_type),\n\n \"coinbase_pro\" => exchanges::coinbase_pro::fetch_symbols(market_type),\n\n \"deribit\" => exchanges::deribit::fetch_symbols(market_type),\n\n \"ftx\" => exchanges::ftx::fetch_symbols(market_type),\n\n \"gate\" => exchanges::gate::fetch_symbols(market_type),\n\n \"huobi\" => exchanges::huobi::fetch_symbols(market_type),\n\n \"kraken\" => exchanges::kraken::fetch_symbols(market_type),\n\n \"kucoin\" => exchanges::kucoin::fetch_symbols(market_type),\n\n \"mxc\" => exchanges::mxc::fetch_symbols(market_type),\n\n \"okex\" => exchanges::okex::fetch_symbols(market_type),\n\n \"zbg\" => exchanges::zbg::fetch_symbols(market_type),\n\n _ => panic!(\"Unsupported exchange {}\", exchange),\n\n }\n\n}\n\n\n", "file_path": "crypto-markets/src/lib.rs", "rank": 14, "score": 319455.3140865311 }, { "content": "fn to_candlestick_raw_channel(pair: &str, interval: u32) -> String {\n\n let interval_str = match interval {\n\n 60 => \"1m\",\n\n 180 => \"3m\",\n\n 300 => \"5m\",\n\n 900 => \"15m\",\n\n 1800 => \"30m\",\n\n 3600 => \"1h\",\n\n 7200 => \"2h\",\n\n 14400 => \"4h\",\n\n 21600 => \"6h\",\n\n 28800 => \"8h\",\n\n 43200 => \"12h\",\n\n 86400 => \"1d\",\n\n 259200 => \"3d\",\n\n 604800 => \"1w\",\n\n 2592000 => \"1M\",\n\n _ => panic!(\"Binance has intervals 1m,3m,5m,15m,30m,1h,2h,4h,6h,8h,12h,1d,3d,1w,1M\"),\n\n };\n\n format!(\"{}@kline_{}\", pair, interval_str)\n", "file_path": "crypto-ws-client/src/clients/binance.rs", "rank": 15, "score": 301797.6382712487 }, { "content": "fn pair_channels_to_command(pair: &str, channels: &[String], subscribe: bool) -> String {\n\n format!(\n\n r#\"{{\"action\":\"Topic.{}\", \"data\":{{\"symbol\":\"{}\", \"type\":\"{}\", \"_CDID\":\"100002\", \"dataType\":\"1\"}}, \"msg_id\":{}}}\"#,\n\n if subscribe { \"sub\" } else { \"unsub\" },\n\n pair,\n\n channels.join(\",\"),\n\n SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .unwrap()\n\n .as_millis(),\n\n )\n\n}\n\n\n", "file_path": "crypto-ws-client/src/clients/bitz/bitz_spot.rs", "rank": 16, "score": 299779.6763724081 }, { "content": "#[test_case(MarketType::Spot, \"BTCUSDT\")]\n\n#[test_case(MarketType::InverseFuture, \"BTCUSD_210924\")]\n\n#[test_case(MarketType::LinearFuture, \"BTCUSDT_210924\")]\n\n#[test_case(MarketType::InverseSwap, \"BTCUSD_PERP\")]\n\n#[test_case(MarketType::LinearSwap, \"BTCUSDT\")]\n\n#[test_case(MarketType::EuropeanOption, \"BTC-210129-40000-C\"; \"inconclusive\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/binance.rs", "rank": 17, "score": 299276.0610770685 }, { "content": "fn parse_change(raw_order: &[String; 3]) -> Order {\n\n let price = raw_order[1].parse::<f64>().unwrap();\n\n let quantity_base = raw_order[2].parse::<f64>().unwrap();\n\n\n\n Order {\n\n price,\n\n quantity_base,\n\n quantity_quote: price * quantity_base,\n\n quantity_contract: None,\n\n }\n\n}\n\n\n\npub(crate) fn parse_l2(market_type: MarketType, msg: &str) -> Result<Vec<OrderBookMsg>> {\n\n let snapshot = {\n\n let obj = serde_json::from_str::<HashMap<String, Value>>(msg)?;\n\n obj.get(\"type\").unwrap().as_str().unwrap() == \"snapshot\"\n\n };\n\n if snapshot {\n\n let orderbook_snapshot = serde_json::from_str::<OrderbookSnapshotMsg>(msg)?;\n\n let symbol = orderbook_snapshot.product_id;\n", "file_path": "crypto-msg-parser/src/exchanges/coinbase_pro.rs", "rank": 18, "score": 298672.0091299183 }, { "content": "// get the contract_size field.\n\nfn fetch_contract_size(url: &str) -> BTreeMap<String, f64> {\n\n #[derive(Serialize, Deserialize)]\n\n struct RawMarket {\n\n symbol: String,\n\n contract_code: String,\n\n contract_size: f64,\n\n }\n\n\n\n #[derive(Serialize, Deserialize)]\n\n struct Response {\n\n status: String,\n\n data: Vec<RawMarket>,\n\n ts: i64,\n\n }\n\n\n\n let mut mapping: BTreeMap<String, f64> = BTreeMap::new();\n\n\n\n let txt = http_get(url).unwrap_or_else(|_| \"[]\".to_string());\n\n let response = serde_json::from_str::<Response>(&txt).unwrap();\n\n for market in response.data.iter() {\n", "file_path": "crypto-contract-value/src/exchanges/huobi.rs", "rank": 19, "score": 298070.131712255 }, { "content": "fn to_candlestick_raw_channel(pair: &str, interval: u32) -> String {\n\n let interval_str = match interval {\n\n 60 => \"1m\",\n\n 300 => \"5m\",\n\n 900 => \"15m\",\n\n 1800 => \"30m\",\n\n 3600 => \"1h\",\n\n 14400 => \"4h\",\n\n 86400 => \"1d\",\n\n 604800 => \"1w\",\n\n _ => panic!(\"Binance has intervals 1m,5m,15m,30m,1h4h,1d,1w\"),\n\n };\n\n format!(\"{}@kline_{}\", pair, interval_str)\n\n}\n\n\n\nimpl_candlestick!(BinanceOptionWSClient);\n", "file_path": "crypto-ws-client/src/clients/binance_option.rs", "rank": 20, "score": 296741.7489329183 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n format!(\"{}{}{}\", channel, CHANNEL_PAIR_DELIMITER, pair)\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, KrakenWSClient, subscribe_trade, \"trade\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(Ticker, KrakenWSClient, subscribe_ticker, \"ticker\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(BBO, KrakenWSClient, subscribe_bbo, \"spread\", to_raw_channel);\n\n\n\nimpl<'a> OrderBook for KrakenWSClient<'a> {\n\n fn subscribe_orderbook(&self, pairs: &[String]) {\n\n let command = format!(\n\n r#\"{{\"event\":\"subscribe\",\"pair\":{},\"subscription\":{{\"name\":\"book\", \"depth\":25}}}}\"#,\n\n serde_json::to_string(pairs).unwrap(),\n\n );\n\n let channels = vec![command];\n\n\n\n self.client.subscribe(&channels);\n", "file_path": "crypto-ws-client/src/clients/kraken.rs", "rank": 21, "score": 295670.61419720354 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n format!(\"{}/{}:{}\", pair_to_market_type(pair), channel, pair)\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, OkexWSClient, subscribe_trade, \"trade\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(Ticker, OkexWSClient, subscribe_ticker, \"ticker\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBook, OkexWSClient, subscribe_orderbook, \"depth_l2_tbt\", to_raw_channel);\n\nimpl_trait!(\n\n OrderBookSnapshot,\n\n OkexWSClient,\n\n subscribe_orderbook_snapshot,\n\n \"depth5\",\n\n to_raw_channel\n\n);\n\n\n\nimpl<'a> BBO for OkexWSClient<'a> {\n\n fn subscribe_bbo(&self, _pairs: &[String]) {\n\n panic!(\"OKEx WebSocket does NOT have BBO channel\");\n\n }\n\n}\n\n\n", "file_path": "crypto-ws-client/src/clients/okex.rs", "rank": 22, "score": 295670.61419720354 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n format!(\"{}{}{}\", channel, CHANNEL_PAIR_DELIMITER, pair)\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, MxcSpotWSClient, subscribe_trade, \"symbol\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, MxcSwapWSClient, subscribe_trade, \"deal\", to_raw_channel);\n\n\n\nimpl<'a> Ticker for MxcSpotWSClient<'a> {\n\n fn subscribe_ticker(&self, _pairs: &[String]) {\n\n panic!(\"MXC Spot WebSocket does NOT have ticker channel\");\n\n }\n\n}\n\n#[rustfmt::skip]\n\nimpl_trait!(Ticker, MxcSwapWSClient, subscribe_ticker, \"ticker\", to_raw_channel);\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBook, MxcSpotWSClient, subscribe_orderbook, \"symbol\", to_raw_channel);\n\n#[rustfmt::skip]\n", "file_path": "crypto-ws-client/src/clients/mxc.rs", "rank": 23, "score": 295670.61419720354 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n format!(\"market.{}.{}\", pair, channel)\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, HuobiWSClient, subscribe_trade, \"trade.detail\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(Ticker, HuobiWSClient, subscribe_ticker, \"detail\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(BBO, HuobiWSClient, subscribe_bbo, \"bbo\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBookSnapshot, HuobiWSClient, subscribe_orderbook_snapshot, \"depth.step0\", to_raw_channel);\n\n\n\nimpl<'a> OrderBook for HuobiWSClient<'a> {\n\n fn subscribe_orderbook(&self, pairs: &[String]) {\n\n let pair_to_raw_channel = |pair: &String| {\n\n format!(\n\n r#\"{{\"sub\": \"market.{}.depth.size_150.high_freq\",\"data_type\":\"incremental\",\"id\": \"crypto-ws-client\"}}\"#,\n\n pair\n\n )\n\n };\n\n\n\n let channels = pairs\n\n .iter()\n\n .map(pair_to_raw_channel)\n\n .collect::<Vec<String>>();\n\n self.client.subscribe(&channels);\n\n }\n\n}\n\n\n", "file_path": "crypto-ws-client/src/clients/huobi.rs", "rank": 24, "score": 295670.61419720354 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n format!(\"{}{}{}\", channel, CHANNEL_PAIR_DELIMITER, pair)\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, BithumbWSClient, subscribe_trade, \"TRADE\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(Ticker, BithumbWSClient, subscribe_ticker, \"TICKER\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBook, BithumbWSClient, subscribe_orderbook, \"ORDERBOOK\", to_raw_channel);\n\n\n\nimpl<'a> BBO for BithumbWSClient<'a> {\n\n fn subscribe_bbo(&self, _pairs: &[String]) {\n\n panic!(\"CoinbasePro WebSocket does NOT have BBO channel\");\n\n }\n\n}\n\n\n\nimpl<'a> OrderBookSnapshot for BithumbWSClient<'a> {\n\n fn subscribe_orderbook_snapshot(&self, _pairs: &[String]) {\n\n panic!(\"CoinbasePro does NOT have orderbook snapshot channel\");\n", "file_path": "crypto-ws-client/src/clients/bithumb.rs", "rank": 25, "score": 295670.61419720354 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n match channel {\n\n \"trade\" => format!(\"trades.{}.raw\", pair),\n\n \"ticker\" => format!(\"ticker.{}.100ms\", pair),\n\n \"orderbook\" => format!(\"book.{}.100ms\", pair),\n\n \"orderbook_snapshot\" => format!(\"book.{}.none.20.100ms\", pair),\n\n \"bbo\" => format!(\"quote.{}\", pair),\n\n _ => panic!(\"Unknown channel {}\", channel),\n\n }\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, DeribitWSClient, subscribe_trade, \"trade\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(Ticker, DeribitWSClient, subscribe_ticker, \"ticker\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBook, DeribitWSClient, subscribe_orderbook, \"orderbook\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBookSnapshot, DeribitWSClient, subscribe_orderbook_snapshot, \"orderbook_snapshot\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(BBO, DeribitWSClient, subscribe_bbo, \"bbo\", to_raw_channel);\n\n\n", "file_path": "crypto-ws-client/src/clients/deribit.rs", "rank": 26, "score": 295670.61419720354 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n format!(\"{}{}{}\", channel, CHANNEL_PAIR_DELIMITER, pair)\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, FtxWSClient, subscribe_trade, \"trades\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(BBO, FtxWSClient, subscribe_bbo, \"ticker\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBook, FtxWSClient, subscribe_orderbook, \"orderbook\", to_raw_channel);\n\n\n\nimpl<'a> OrderBookSnapshot for FtxWSClient<'a> {\n\n fn subscribe_orderbook_snapshot(&self, _pairs: &[String]) {\n\n panic!(\"FTX does NOT have orderbook snapshot channel\");\n\n }\n\n}\n\n\n\nimpl<'a> Ticker for FtxWSClient<'a> {\n\n fn subscribe_ticker(&self, _pairs: &[String]) {\n\n panic!(\"FTX does NOT have ticker channel\");\n", "file_path": "crypto-ws-client/src/clients/ftx.rs", "rank": 27, "score": 295670.61419720354 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n format!(\"{}_{}\", channel, pair)\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, BitstampWSClient, subscribe_trade, \"live_trades\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBook, BitstampWSClient, subscribe_orderbook, \"diff_order_book\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBookSnapshot, BitstampWSClient, subscribe_orderbook_snapshot, \"order_book\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(Level3OrderBook, BitstampWSClient, subscribe_l3_orderbook, \"live_orders\", to_raw_channel);\n\n\n\nimpl<'a> Ticker for BitstampWSClient<'a> {\n\n fn subscribe_ticker(&self, _pairs: &[String]) {\n\n panic!(\"Bitstamp WebSocket does NOT have ticker channel\");\n\n }\n\n}\n\n\n\nimpl<'a> BBO for BitstampWSClient<'a> {\n", "file_path": "crypto-ws-client/src/clients/bitstamp.rs", "rank": 28, "score": 295670.61419720354 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n format!(\"{}{}{}\", channel, CHANNEL_PAIR_DELIMITER, pair)\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, BitmexWSClient, subscribe_trade, \"trade\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(BBO, BitmexWSClient, subscribe_bbo, \"quote\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBook, BitmexWSClient, subscribe_orderbook, \"orderBookL2_25\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBookSnapshot, BitmexWSClient, subscribe_orderbook_snapshot, \"orderBook10\", to_raw_channel);\n\n\n\nimpl<'a> Ticker for BitmexWSClient<'a> {\n\n fn subscribe_ticker(&self, _pairs: &[String]) {\n\n panic!(\"BitMEX WebSocket does NOT have ticker channel\");\n\n }\n\n}\n\n\n", "file_path": "crypto-ws-client/src/clients/bitmex.rs", "rank": 29, "score": 295670.61419720354 }, { "content": "#[test_case(MarketType::Spot, \"BTCUSDT\")]\n\n#[test_case(MarketType::InverseFuture, \"BTCUSD_210924\")]\n\n#[test_case(MarketType::LinearFuture, \"BTCUSDT_210924\")]\n\n#[test_case(MarketType::InverseSwap, \"BTCUSD_PERP\")]\n\n#[test_case(MarketType::LinearSwap, \"BTCUSDT\")]\n\n#[test_case(MarketType::EuropeanOption, \"BTC-210129-40000-C\"; \"inconclusive\")]\n\nfn test_crawl_l2_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l2_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L2Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/binance.rs", "rank": 30, "score": 294139.9360399535 }, { "content": "// get the contract_val field\n\n// market_type, futures, swap, option\n\nfn fetch_contract_val(market_type: &str) -> BTreeMap<String, f64> {\n\n #[derive(Serialize, Deserialize)]\n\n struct Instrument {\n\n instrument_id: String,\n\n underlying: String,\n\n contract_val: String,\n\n is_inverse: String,\n\n }\n\n let mut mapping: BTreeMap<String, f64> = BTreeMap::new();\n\n\n\n let txt = http_get(&format!(\n\n \"https://www.okex.com/api/{}/v3/instruments\",\n\n market_type\n\n ))\n\n .unwrap();\n\n let instruments = serde_json::from_str::<Vec<Instrument>>(&txt).unwrap();\n\n\n\n for instrument in instruments.iter().filter(|x| x.is_inverse == \"false\") {\n\n let pair = crypto_pair::normalize_pair(&instrument.instrument_id, \"okex\").unwrap();\n\n mapping.insert(pair, instrument.contract_val.parse::<f64>().unwrap());\n", "file_path": "crypto-contract-value/src/exchanges/okex.rs", "rank": 31, "score": 292524.4893463734 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n format!(\"{}{}{}\", channel, CHANNEL_PAIR_DELIMITER, pair)\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, CoinbaseProWSClient, subscribe_trade, \"matches\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(Ticker, CoinbaseProWSClient, subscribe_ticker, \"ticker\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBook, CoinbaseProWSClient, subscribe_orderbook, \"level2\", to_raw_channel);\n\n\n\nimpl<'a> BBO for CoinbaseProWSClient<'a> {\n\n fn subscribe_bbo(&self, _pairs: &[String]) {\n\n panic!(\"CoinbasePro WebSocket does NOT have BBO channel\");\n\n }\n\n}\n\n\n\nimpl<'a> OrderBookSnapshot for CoinbaseProWSClient<'a> {\n\n fn subscribe_orderbook_snapshot(&self, _pairs: &[String]) {\n\n panic!(\"CoinbasePro does NOT have orderbook snapshot channel\");\n", "file_path": "crypto-ws-client/src/clients/coinbase_pro.rs", "rank": 32, "score": 291296.2302943265 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n if !SYMBOL_CONTRACT_ID_MAP.read().unwrap().contains_key(pair) {\n\n // found new symbols\n\n reload_contract_ids();\n\n }\n\n let contract_id = *SYMBOL_CONTRACT_ID_MAP\n\n .read()\n\n .unwrap()\n\n .get(pair)\n\n .unwrap_or_else(|| panic!(\"Failed to find contract_id for {}\", pair));\n\n format!(\"{}-{}\", channel, contract_id)\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, ZbgSwapWSClient, subscribe_trade, \"future_tick\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBook, ZbgSwapWSClient, subscribe_orderbook, \"future_snapshot_depth\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(Ticker, ZbgSwapWSClient, subscribe_ticker, \"future_snapshot_indicator\", to_raw_channel);\n\n\n", "file_path": "crypto-ws-client/src/clients/zbg/zbg_swap.rs", "rank": 34, "score": 287126.2034886021 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n if !SYMBOL_ID_MAP.read().unwrap().contains_key(pair) {\n\n // found new symbols\n\n reload_symbol_ids();\n\n }\n\n let symbol_id = SYMBOL_ID_MAP\n\n .read()\n\n .unwrap()\n\n .get(pair)\n\n .unwrap_or_else(|| panic!(\"Failed to find symbol_id for {}\", pair))\n\n .clone();\n\n if channel == \"TRADE_STATISTIC_24H\" {\n\n format!(\"{}_{}\", symbol_id, channel)\n\n } else {\n\n format!(\"{}_{}_{}\", symbol_id, channel, pair.to_uppercase())\n\n }\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, ZbgSpotWSClient, subscribe_trade, \"TRADE\", to_raw_channel);\n", "file_path": "crypto-ws-client/src/clients/zbg/zbg_spot.rs", "rank": 35, "score": 287126.2034886021 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n format!(\"{}{}{}\", channel, CHANNEL_PAIR_DELIMITER, pair)\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, BitzSpotWSClient, subscribe_trade, \"order\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBook, BitzSpotWSClient, subscribe_orderbook, \"depth\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(Ticker, BitzSpotWSClient, subscribe_ticker, \"market\", to_raw_channel);\n\n\n\nimpl<'a> BBO for BitzSpotWSClient<'a> {\n\n fn subscribe_bbo(&self, _pairs: &[String]) {\n\n panic!(\"Bitz does NOT have BBO channel\");\n\n }\n\n}\n\n\n\nimpl<'a> OrderBookSnapshot for BitzSpotWSClient<'a> {\n\n fn subscribe_orderbook_snapshot(&self, _pairs: &[String]) {\n\n panic!(\"Bitz does NOT have orderbook snapshot channel\");\n\n }\n\n}\n\n\n", "file_path": "crypto-ws-client/src/clients/bitz/bitz_spot.rs", "rank": 36, "score": 287126.2034886021 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n format!(\"swap/{}:{}\", channel, pair)\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, BitgetSwapWSClient, subscribe_trade, \"trade\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(BBO, BitgetSwapWSClient, subscribe_bbo, \"depth5\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBook, BitgetSwapWSClient, subscribe_orderbook, \"depth\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(Ticker, BitgetSwapWSClient, subscribe_ticker, \"ticker\", to_raw_channel);\n\n\n\nimpl<'a> OrderBookSnapshot for BitgetSwapWSClient<'a> {\n\n fn subscribe_orderbook_snapshot(&self, _pairs: &[String]) {\n\n panic!(\"Bitget does NOT have orderbook snapshot channel\");\n\n }\n\n}\n\n\n", "file_path": "crypto-ws-client/src/clients/bitget/bitget_swap.rs", "rank": 37, "score": 287126.2034886021 }, { "content": "fn to_raw_channel(channel: &str, pair: &str) -> String {\n\n format!(\"{}{}{}\", channel, CHANNEL_PAIR_DELIMITER, pair)\n\n}\n\n\n\n#[rustfmt::skip]\n\nimpl_trait!(Trade, GateSpotWSClient, subscribe_trade, \"trades\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(OrderBook, GateSpotWSClient, subscribe_orderbook, \"depth\", to_raw_channel);\n\n#[rustfmt::skip]\n\nimpl_trait!(Ticker, GateSpotWSClient, subscribe_ticker, \"ticker\", to_raw_channel);\n\n\n\nimpl<'a> BBO for GateSpotWSClient<'a> {\n\n fn subscribe_bbo(&self, _pairs: &[String]) {\n\n panic!(\"Bitz does NOT have BBO channel\");\n\n }\n\n}\n\n\n\nimpl<'a> OrderBookSnapshot for GateSpotWSClient<'a> {\n\n fn subscribe_orderbook_snapshot(&self, _pairs: &[String]) {\n\n panic!(\"Bitz does NOT have orderbook snapshot channel\");\n\n }\n\n}\n\n\n", "file_path": "crypto-ws-client/src/clients/gate/gate_spot.rs", "rank": 38, "score": 287126.2034886021 }, { "content": "fn pair_to_market_type(pair: &str) -> &'static str {\n\n if pair.ends_with(\"-SWAP\") {\n\n \"swap\"\n\n } else {\n\n let c = pair.matches('-').count();\n\n if c == 1 {\n\n \"spot\"\n\n } else if c == 2 {\n\n let date = &pair[(pair.len() - 6)..];\n\n debug_assert!(date.parse::<i64>().is_ok());\n\n \"futures\"\n\n } else {\n\n debug_assert!(pair.ends_with(\"-C\") || pair.ends_with(\"-P\"));\n\n \"option\"\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "crypto-rest-client/src/exchanges/okex.rs", "rank": 39, "score": 286552.31024059904 }, { "content": "fn name_pairs_to_command(name: &str, pairs: &[String], subscribe: bool) -> String {\n\n format!(\n\n r#\"{{\"event\":\"{}\",\"pair\":{},\"subscription\":{{\"name\":\"{}\"}}}}\"#,\n\n if subscribe {\n\n \"subscribe\"\n\n } else {\n\n \"unsubscribe\"\n\n },\n\n serde_json::to_string(pairs).unwrap(),\n\n name\n\n )\n\n}\n\n\n", "file_path": "crypto-ws-client/src/clients/kraken.rs", "rank": 40, "score": 284405.06583840906 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct RawOrderbookMsg {\n\n action: String, // partial, update\n\n bids: Vec<[f64; 2]>,\n\n asks: Vec<[f64; 2]>,\n\n time: f64,\n\n #[serde(flatten)]\n\n extra: HashMap<String, Value>,\n\n}\n\n\n", "file_path": "crypto-msg-parser/src/exchanges/ftx.rs", "rank": 41, "score": 282802.4565666085 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct RawOrderbookMsg {\n\n #[serde(rename = \"type\")]\n\n type_: String, // snapshot, change\n\n timestamp: i64,\n\n instrument_name: String,\n\n bids: Vec<[Value; 3]>,\n\n asks: Vec<[Value; 3]>,\n\n #[serde(flatten)]\n\n extra: HashMap<String, Value>,\n\n}\n\n\n", "file_path": "crypto-msg-parser/src/exchanges/deribit.rs", "rank": 42, "score": 282802.4565666085 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct RawOrderbookMsg {\n\n topic: String,\n\n #[serde(rename = \"type\")]\n\n type_: String,\n\n data: Value,\n\n timestamp_e6: Value, // i64 or String\n\n}\n\n\n\npub(crate) fn parse_trade(market_type: MarketType, msg: &str) -> Result<Vec<TradeMsg>> {\n\n match market_type {\n\n MarketType::InverseSwap | MarketType::InverseFuture => {\n\n let ws_msg = serde_json::from_str::<WebsocketMsg<InverseTradeMsg>>(msg)?;\n\n\n\n let trades: Vec<TradeMsg> = ws_msg\n\n .data\n\n .into_iter()\n\n .map(|raw_trade| TradeMsg {\n\n exchange: EXCHANGE_NAME.to_string(),\n\n market_type,\n\n symbol: raw_trade.symbol.clone(),\n", "file_path": "crypto-msg-parser/src/exchanges/bybit.rs", "rank": 43, "score": 282802.4565666085 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct RawOrderbookMsg {\n\n instrument_id: String,\n\n timestamp: String,\n\n asks: Vec<[String; 4]>,\n\n bids: Vec<[String; 4]>,\n\n #[serde(flatten)]\n\n extra: HashMap<String, Value>,\n\n}\n\n\n", "file_path": "crypto-msg-parser/src/exchanges/okex.rs", "rank": 44, "score": 282802.4565666085 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct RawTradeMsg {\n\n id: i64,\n\n price: f64,\n\n size: f64,\n\n side: String, // buy, sell\n\n liquidation: bool,\n\n time: String,\n\n #[serde(flatten)]\n\n extra: HashMap<String, Value>,\n\n}\n\n\n\n// https://docs.ftx.com/#orderbooks\n", "file_path": "crypto-msg-parser/src/exchanges/ftx.rs", "rank": 45, "score": 282690.94475016766 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct RawTradeMsg {\n\n instrument_id: String,\n\n trade_id: String,\n\n price: String,\n\n size: Option<String>,\n\n qty: Option<String>,\n\n trade_side: Option<String>, // buy, sell, for option/trades only\n\n side: Option<String>, // buy, sell, for other\n\n timestamp: String,\n\n #[serde(flatten)]\n\n extra: HashMap<String, Value>,\n\n}\n\n\n\n// https://www.okex.com/docs/en/#spot_ws-full_depth\n\n// https://www.okex.com/docs/en/#futures_ws-full_depth\n\n// https://www.okex.com/docs/en/#ws_swap-full_depth\n\n// https://www.okex.com/docs/en/#option_ws-full_depth\n", "file_path": "crypto-msg-parser/src/exchanges/okex.rs", "rank": 46, "score": 282690.94475016766 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct RawTradeMsg {\n\n trade_seq: i64,\n\n trade_id: String,\n\n timestamp: i64,\n\n price: f64,\n\n instrument_name: String,\n\n direction: String, // buy, sell\n\n amount: f64,\n\n #[serde(flatten)]\n\n extra: HashMap<String, Value>,\n\n}\n\n\n\n// https://docs.deribit.com/?javascript#book-instrument_name-interval\n", "file_path": "crypto-msg-parser/src/exchanges/deribit.rs", "rank": 47, "score": 282690.94475016766 }, { "content": "#[derive(Serialize, Deserialize)]\n\n#[allow(non_snake_case)]\n\nstruct RawTradeMsg {\n\n timestamp: String,\n\n symbol: String,\n\n side: String, // Sell, Buy'\n\n size: f64,\n\n price: f64,\n\n tickDirection: String, // MinusTick, PlusTick, ZeroMinusTick, ZeroPlusTick\n\n trdMatchID: String,\n\n grossValue: f64,\n\n homeNotional: f64,\n\n foreignNotional: f64,\n\n #[serde(flatten)]\n\n extra: HashMap<String, Value>,\n\n}\n\n\n", "file_path": "crypto-msg-parser/src/exchanges/bitmex.rs", "rank": 48, "score": 282690.23069551575 }, { "content": "fn channel_pairs_to_command(channel: &str, pairs: &[String], subscribe: bool) -> String {\n\n format!(\n\n r#\"{{\"id\":\"crypto-ws-client\",\"type\":\"{}\",\"topic\":\"{}:{}\",\"privateChannel\":false,\"response\":true}}\"#,\n\n if subscribe {\n\n \"subscribe\"\n\n } else {\n\n \"unsubscribe\"\n\n },\n\n channel,\n\n pairs.join(\",\")\n\n )\n\n}\n\n\n\npub(super) fn channels_to_commands(channels: &[String], subscribe: bool) -> Vec<String> {\n\n let mut all_commands: Vec<String> = channels\n\n .iter()\n\n .filter(|ch| ch.starts_with('{'))\n\n .map(|s| s.to_string())\n\n .collect();\n\n\n", "file_path": "crypto-ws-client/src/clients/kucoin/utils.rs", "rank": 49, "score": 280612.82331367757 }, { "content": "fn channel_pairs_to_command(channel: &str, pairs: &[String], subscribe: bool) -> String {\n\n let params: Vec<Value> = pairs\n\n .iter()\n\n .map(|pair| {\n\n if channel == \"depth\" {\n\n serde_json::json!([pair, 30, \"0\"])\n\n } else {\n\n serde_json::json!(pair)\n\n }\n\n })\n\n .collect();\n\n\n\n format!(\n\n r#\"{{\"id\":9527, \"method\":\"{}.{}\", \"params\":{}}}\"#,\n\n channel,\n\n if subscribe {\n\n \"subscribe\"\n\n } else {\n\n \"unsubscribe\"\n\n },\n\n serde_json::to_string(&params).unwrap(),\n\n )\n\n}\n\n\n", "file_path": "crypto-ws-client/src/clients/gate/gate_spot.rs", "rank": 50, "score": 276975.3800925765 }, { "content": "#[derive(Serialize, Deserialize)]\n\n#[allow(non_snake_case)]\n\nstruct RawOrderbookMsg {\n\n asks: Vec<[f64; 3]>,\n\n bids: Vec<[f64; 3]>,\n\n #[serde(flatten)]\n\n extra: HashMap<String, Value>,\n\n}\n\n\n", "file_path": "crypto-msg-parser/src/exchanges/mxc/mxc_swap.rs", "rank": 51, "score": 272420.4831581685 }, { "content": "#[derive(Serialize, Deserialize)]\n\n#[allow(non_snake_case)]\n\nstruct RawOrderbookMsg {\n\n contractId: i64,\n\n asks: Vec<[String; 2]>,\n\n bids: Vec<[String; 2]>,\n\n time: i64,\n\n #[serde(flatten)]\n\n extra: HashMap<String, Value>,\n\n}\n\n\n", "file_path": "crypto-msg-parser/src/exchanges/zbg/zbg_swap.rs", "rank": 52, "score": 272420.4831581685 }, { "content": "#[derive(Serialize, Deserialize)]\n\n#[allow(non_snake_case)]\n\nstruct RawTradeMsg {\n\n p: String, // price\n\n q: String, // quantity\n\n T: i64, // 1, buy; 2, sell\n\n t: i64, // timestamp\n\n #[serde(flatten)]\n\n extra: HashMap<String, Value>,\n\n}\n\n\n", "file_path": "crypto-msg-parser/src/exchanges/mxc/mxc_spot.rs", "rank": 53, "score": 272314.6457274141 }, { "content": "#[derive(Serialize, Deserialize)]\n\n#[allow(non_snake_case)]\n\nstruct RawTradeMsg {\n\n p: f64, // price\n\n v: f64, // quantity\n\n T: i64, // 1, buy; 2, sell\n\n t: i64, // timestamp\n\n #[serde(flatten)]\n\n extra: HashMap<String, Value>,\n\n}\n\n\n\n// https://mxcdevelop.github.io/APIDoc/contract.api.cn.html#a1128a972d\n", "file_path": "crypto-msg-parser/src/exchanges/mxc/mxc_swap.rs", "rank": 54, "score": 272314.6457274141 }, { "content": "#[derive(Serialize, Deserialize)]\n\n#[allow(non_snake_case)]\n\nstruct RawTradeMsg {\n\n contractId: i64,\n\n trades: Vec<Value>,\n\n #[serde(flatten)]\n\n extra: HashMap<String, Value>,\n\n}\n\n\n\n// https://www.zbgpro.com/docs/future/v1/cn/#1529c9267f\n", "file_path": "crypto-msg-parser/src/exchanges/zbg/zbg_swap.rs", "rank": 55, "score": 272314.6457274141 }, { "content": "fn channel_pairs_to_command(channel: &str, pairs: &[String], subscribe: bool) -> Vec<String> {\n\n match channel {\n\n \"trades\" | \"tickers\" => {\n\n vec![format!(\n\n r#\"{{\"channel\":\"{}\",\"event\":\"{}\", \"payload\":{}}}\"#,\n\n format!(\"futures.{}\", channel),\n\n if subscribe {\n\n \"subscribe\"\n\n } else {\n\n \"unsubscribe\"\n\n },\n\n serde_json::to_string(pairs).unwrap(),\n\n )]\n\n }\n\n _ => {\n\n let commands: Vec<String> = pairs\n\n .iter()\n\n .map(|pair| {\n\n let command = serde_json::json!({\n\n \"channel\": format!(\"futures.{}\", channel),\n", "file_path": "crypto-ws-client/src/clients/gate/utils.rs", "rank": 56, "score": 271895.6610910479 }, { "content": "fn to_candlestick_raw_channel(pair: &str, interval: u32) -> String {\n\n let valid_set: Vec<u32> = vec![\n\n 60, 180, 300, 900, 1800, 3600, 7200, 14400, 21600, 43200, 86400, 604800,\n\n ];\n\n if !valid_set.contains(&interval) {\n\n let joined = valid_set\n\n .into_iter()\n\n .map(|x| x.to_string())\n\n .collect::<Vec<String>>()\n\n .join(\",\");\n\n panic!(\"OKEx has intervals {}\", joined);\n\n }\n\n let channel = format!(\"candle{}s\", interval);\n\n to_raw_channel(&channel, pair)\n\n}\n\n\n\nimpl_candlestick!(OkexWSClient);\n\n\n\ndefine_client!(\n\n OkexWSClient,\n", "file_path": "crypto-ws-client/src/clients/okex.rs", "rank": 57, "score": 267774.015054047 }, { "content": "fn to_candlestick_raw_channel(pair: &str, interval: u32) -> String {\n\n let interval_str = match interval {\n\n 60 => \"1\",\n\n 180 => \"3\",\n\n 300 => \"5\",\n\n 600 => \"10\",\n\n 900 => \"15\",\n\n 1800 => \"30\",\n\n 3600 => \"60\",\n\n 7200 => \"120\",\n\n 10800 => \"180\",\n\n 21600 => \"360\",\n\n 43200 => \"720\",\n\n 86400 => \"1D\",\n\n _ => panic!(\"Unknown interval {}\", interval),\n\n };\n\n format!(\"chart.trades.{}.{}\", pair, interval_str)\n\n}\n\n\n\nimpl_candlestick!(DeribitWSClient);\n", "file_path": "crypto-ws-client/src/clients/deribit.rs", "rank": 58, "score": 267774.015054047 }, { "content": "fn to_candlestick_raw_channel(pair: &str, interval: u32) -> String {\n\n let interval_str = match interval {\n\n 60 => \"1m\",\n\n 300 => \"5m\",\n\n 3600 => \"1h\",\n\n 86400 => \"1d\",\n\n _ => panic!(\"BitMEX has intervals 1m,5m,1h,1d\"),\n\n };\n\n format!(\"tradeBin{}:{}\", interval_str, pair)\n\n}\n\n\n\nimpl_candlestick!(BitmexWSClient);\n\n\n\ndefine_client!(\n\n BitmexWSClient,\n\n EXCHANGE_NAME,\n\n WEBSOCKET_URL,\n\n channels_to_commands,\n\n on_misc_msg,\n\n Some(CLIENT_PING_INTERVAL_AND_MSG),\n", "file_path": "crypto-ws-client/src/clients/bitmex.rs", "rank": 59, "score": 267774.015054047 }, { "content": "fn to_candlestick_raw_channel(pair: &str, interval: u32) -> String {\n\n let interval_str = match interval {\n\n 60 => \"1min\",\n\n 300 => \"5min\",\n\n 900 => \"15min\",\n\n 1800 => \"30min\",\n\n 3600 => \"60min\",\n\n 14400 => \"4hour\",\n\n 86400 => \"1day\",\n\n 604800 => \"1week\",\n\n 2592000 => \"1mon\",\n\n _ => panic!(\"Huobi has intervals 1min,5min,15min,30min,60min,4hour,1day,1week,1mon\"),\n\n };\n\n format!(\"market.{}.kline.{}\", pair, interval_str)\n\n}\n\n\n\nimpl_candlestick!(HuobiWSClient);\n\n\n\n/// Define market specific client.\n\nmacro_rules! define_market_client {\n", "file_path": "crypto-ws-client/src/clients/huobi.rs", "rank": 60, "score": 267774.015054047 }, { "content": "fn to_candlestick_raw_channel(symbol: &str, interval: u32) -> String {\n\n let interval_str = match interval {\n\n 60 => \"1m\",\n\n 300 => \"5m\",\n\n 900 => \"15m\",\n\n 1800 => \"30m\",\n\n 3600 => \"1h\",\n\n 10800 => \"3h\",\n\n 21600 => \"6h\",\n\n 43200 => \"12h\",\n\n 86400 => \"1D\",\n\n 604800 => \"7D\",\n\n 1209600 => \"14D\",\n\n 2592000 => \"1M\",\n\n _ => panic!(\"Bitfinex has intervals 1m,5m,15m,30m,1h,3h,6h,12h,1D,7D,14D,1M\"),\n\n };\n\n\n\n format!(\n\n r#\"{{\n\n \"event\": \"subscribe\",\n", "file_path": "crypto-ws-client/src/clients/bitfinex.rs", "rank": 61, "score": 267308.76585972373 }, { "content": "// get the quanto_multiplier field from:\n\n// https://api.gateio.ws/api/v4/futures/usdt/contracts\n\n// https://api.gateio.ws/api/v4/delivery/usdt/contracts\n\nfn fetch_quanto_multipliers(url: &str) -> BTreeMap<String, f64> {\n\n #[derive(Serialize, Deserialize)]\n\n struct RawMarket {\n\n name: String,\n\n quanto_multiplier: String,\n\n }\n\n\n\n let mut mapping: BTreeMap<String, f64> = BTreeMap::new();\n\n\n\n let txt = http_get(url).unwrap_or_else(|_| \"[]\".to_string());\n\n let markets = serde_json::from_str::<Vec<RawMarket>>(&txt).unwrap();\n\n for market in markets.iter() {\n\n mapping.insert(\n\n crypto_pair::normalize_pair(&market.name, \"gate\").unwrap(),\n\n market.quanto_multiplier.parse::<f64>().unwrap(),\n\n );\n\n }\n\n\n\n mapping\n\n}\n", "file_path": "crypto-contract-value/src/exchanges/gate.rs", "rank": 62, "score": 265287.96486162534 }, { "content": "#[test_case(MarketType::Spot, \"btcusdt\")]\n\n#[test_case(MarketType::InverseFuture, \"BTC_CQ\")]\n\n#[test_case(MarketType::InverseSwap, \"BTC-USD\")]\n\n#[test_case(MarketType::LinearSwap, \"BTC-USDT\")]\n\n#[test_case(MarketType::EuropeanOption, \"BTC-USDT-210625-P-27000\"; \"inconclusive\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/huobi.rs", "rank": 63, "score": 264367.32355445705 }, { "content": "#[test_case(MarketType::InverseFuture, \"XBTU21\")]\n\n#[test_case(MarketType::LinearFuture, \"ETHU21\")]\n\n#[test_case(MarketType::QuantoFuture, \"ETHUSDU21\")]\n\n#[test_case(MarketType::InverseSwap, \"XBTUSD\")]\n\n#[test_case(MarketType::QuantoSwap, \"ETHUSD\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/bitmex.rs", "rank": 64, "score": 264364.1657458053 }, { "content": "#[test_case(MarketType::Spot, \"BTC-USDT\")]\n\n#[test_case(MarketType::InverseSwap, \"XBTUSDM\")]\n\n#[test_case(MarketType::LinearSwap, \"XBTUSDTM\")]\n\n#[test_case(MarketType::InverseFuture, \"XBTMU21\"; \"inconclusive\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/kucoin.rs", "rank": 65, "score": 264364.06946735067 }, { "content": "#[test_case(MarketType::Spot, \"BTC-USDT\")]\n\n#[test_case(MarketType::InverseFuture, \"BTC-USD-210924\")]\n\n#[test_case(MarketType::LinearFuture, \"BTC-USDT-210924\")]\n\n#[test_case(MarketType::InverseSwap, \"BTC-USD-SWAP\")]\n\n#[test_case(MarketType::LinearSwap, \"BTC-USDT-SWAP\")]\n\n#[test_case(MarketType::EuropeanOption, \"BTC-USD-210702-35000-C\"; \"inconclusive\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/okex.rs", "rank": 66, "score": 264364.04321177216 }, { "content": "#[test_case(MarketType::Spot, \"BTC_USDT\")]\n\n#[test_case(MarketType::InverseSwap, \"BTC_USD\")]\n\n#[test_case(MarketType::LinearSwap, \"BTC_USDT\")]\n\n#[test_case(MarketType::LinearFuture, \"BTC_USDT_20210924\"; \"inconclusive\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/gate.rs", "rank": 67, "score": 264363.9878143997 }, { "content": "#[test_case(MarketType::InverseFuture, \"BTCUSDU21\")]\n\n#[test_case(MarketType::InverseSwap, \"BTCUSD\")]\n\n#[test_case(MarketType::LinearSwap, \"BTCUSDT\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/bybit.rs", "rank": 68, "score": 264363.9866096834 }, { "content": "#[test_case(MarketType::Spot, \"BTC_USDT\")]\n\n#[test_case(MarketType::LinearSwap, \"BTC_USDT\")]\n\n#[test_case(MarketType::InverseSwap, \"BTC_USD\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/mxc.rs", "rank": 69, "score": 264363.9160315612 }, { "content": "#[test_case(MarketType::InverseSwap, \"BTC-PERPETUAL\")]\n\n#[test_case(MarketType::InverseFuture, \"BTC-24SEP21\")]\n\n#[test_case(MarketType::EuropeanOption, \"BTC-2JUL21-37000-C\"; \"inconclusive\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/deribit.rs", "rank": 70, "score": 264363.8134834611 }, { "content": "#[test_case(MarketType::Spot, \"tBTCUSD\")]\n\n#[test_case(MarketType::LinearSwap, \"tBTCF0:USTF0\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/bitfinex.rs", "rank": 71, "score": 264363.7779215462 }, { "content": "#[test_case(MarketType::Spot, \"BTC-USDT\")]\n\n#[test_case(MarketType::Spot, \"ETH-USDT\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/bithumb.rs", "rank": 72, "score": 264363.7779215462 }, { "content": "#[test_case(MarketType::InverseSwap, \"btcusd\")]\n\n#[test_case(MarketType::LinearSwap, \"cmt_btcusdt\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/bitget.rs", "rank": 73, "score": 264363.7287382432 }, { "content": "// #[test_case(MarketType::Move, \"BTC-MOVE-2021Q3\")]\n\n// #[test_case(MarketType::BVOL, \"BVOL/USD\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/ftx.rs", "rank": 74, "score": 264363.7287382432 }, { "content": "// #[test_case(MarketType::InverseSwap, \"BTC_USD\")]\n\n// #[test_case(MarketType::LinearSwap, \"BTC_USDT\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n\n#[test_case(MarketType::Spot, \"btc_usdt\")]\n", "file_path": "crypto-crawler/tests/bitz.rs", "rank": 75, "score": 264363.6804571105 }, { "content": "// #[test_case(MarketType::InverseSwap, \"BTC_USD-R\")]\n\n// #[test_case(MarketType::LinearSwap, \"BTC_USDT\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n\n#[test_case(MarketType::Spot, \"btc_usdt\")]\n", "file_path": "crypto-crawler/tests/zbg.rs", "rank": 76, "score": 264363.6330535508 }, { "content": "#[test_case(MarketType::Spot, \"btcusd\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/bitstamp.rs", "rank": 77, "score": 264363.31832122424 }, { "content": "#[test_case(MarketType::Spot, \"XBT/USD\")]\n\nfn test_crawl_trade(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_trade,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Trade\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/kraken.rs", "rank": 78, "score": 264363.23685923713 }, { "content": "#[test_case(MarketType::Spot, \"BTCUSDT\")]\n\n#[test_case(MarketType::InverseFuture, \"BTCUSD_210924\")]\n\n#[test_case(MarketType::LinearFuture, \"BTCUSDT_210924\")]\n\n#[test_case(MarketType::InverseSwap, \"BTCUSD_PERP\")]\n\n#[test_case(MarketType::LinearSwap, \"BTCUSDT\")]\n\n#[test_case(MarketType::EuropeanOption, \"BTC-210129-40000-C\"; \"inconclusive\")]\n\nfn test_crawl_ticker(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_ticker,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::Ticker\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/binance.rs", "rank": 79, "score": 264046.4947246289 }, { "content": "// Example: symbol:BTC_USDT -> 42[\"sub.symbol\",{\"symbol\":\"BTC_USDT\"}]\n\nfn spot_channel_to_command(raw_channel: &str, subscribe: bool) -> String {\n\n if raw_channel.starts_with('[') {\n\n return format!(\"{}{}\", SOCKETIO_PREFIX, raw_channel);\n\n }\n\n let v: Vec<&str> = raw_channel.split(CHANNEL_PAIR_DELIMITER).collect();\n\n let channel = v[0];\n\n let pair = v[1];\n\n\n\n let (command, ch) = if channel.starts_with(\"get.\") {\n\n let v: Vec<&str> = channel.split('.').collect();\n\n (v[0], v[1])\n\n } else {\n\n (if subscribe { \"sub\" } else { \"unsub\" }, channel)\n\n };\n\n format!(\n\n r#\"{}[\"{}.{}\",{{\"symbol\":\"{}\"}}]\"#,\n\n SOCKETIO_PREFIX, command, ch, pair\n\n )\n\n}\n\n\n", "file_path": "crypto-ws-client/src/clients/mxc.rs", "rank": 80, "score": 264043.0313544671 }, { "content": "/// Get market types of a cryptocurrency exchange.\n\npub fn get_market_types(exchange: &str) -> Vec<MarketType> {\n\n match exchange {\n\n \"binance\" => vec![\n\n MarketType::Spot,\n\n MarketType::LinearFuture,\n\n MarketType::InverseFuture,\n\n MarketType::LinearSwap,\n\n MarketType::InverseSwap,\n\n MarketType::EuropeanOption,\n\n ],\n\n \"bitfinex\" => vec![MarketType::Spot, MarketType::LinearSwap],\n\n \"bitget\" => vec![\n\n MarketType::Spot,\n\n MarketType::InverseSwap,\n\n MarketType::LinearSwap,\n\n ],\n\n \"bithumb\" => vec![MarketType::Spot],\n\n // BitMEX only handles Bitcoin. All profit and loss is in Bitcoin\n\n \"bitmex\" => vec![\n\n MarketType::InverseSwap,\n", "file_path": "crypto-market-type/src/lib.rs", "rank": 81, "score": 260639.78170654265 }, { "content": "#[test_case(MarketType::Spot, \"btcusdt\")]\n\n#[test_case(MarketType::InverseFuture, \"BTC_CQ\")]\n\n#[test_case(MarketType::InverseSwap, \"BTC-USD\")]\n\n#[test_case(MarketType::LinearSwap, \"BTC-USDT\")]\n\n#[test_case(MarketType::EuropeanOption, \"BTC-USDT-210625-P-27000\"; \"inconclusive\")]\n\nfn test_crawl_l2_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l2_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L2Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/huobi.rs", "rank": 82, "score": 260119.46248569147 }, { "content": "#[test_case(MarketType::InverseFuture, \"XBTU21\")]\n\n#[test_case(MarketType::LinearFuture, \"ETHU21\")]\n\n#[test_case(MarketType::QuantoFuture, \"ETHUSDU21\")]\n\n#[test_case(MarketType::InverseSwap, \"XBTUSD\")]\n\n#[test_case(MarketType::QuantoSwap, \"ETHUSD\")]\n\nfn test_crawl_l2_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l2_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L2Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/bitmex.rs", "rank": 83, "score": 260116.30467703973 }, { "content": "#[test_case(MarketType::Spot, \"BTC/USD\")]\n\n#[test_case(MarketType::LinearSwap, \"BTC-PERP\")]\n\n#[test_case(MarketType::LinearFuture, \"BTC-0924\")]\n\n#[test_case(MarketType::Move, \"BTC-MOVE-2021Q3\")]\n\n#[test_case(MarketType::BVOL, \"BVOL/USD\")]\n\nfn test_crawl_l2_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l2_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L2Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/ftx.rs", "rank": 84, "score": 260116.25931755046 }, { "content": "#[test_case(MarketType::Spot, \"BTC-USDT\")]\n\n#[test_case(MarketType::InverseSwap, \"XBTUSDM\")]\n\n#[test_case(MarketType::LinearSwap, \"XBTUSDTM\")]\n\n#[test_case(MarketType::InverseFuture, \"XBTMU21\")]\n\nfn test_crawl_l3_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l3_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L3Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/kucoin.rs", "rank": 85, "score": 260116.23615538067 }, { "content": "#[test_case(MarketType::Spot, \"BTC-USDT\")]\n\n#[test_case(MarketType::InverseSwap, \"XBTUSDM\")]\n\n#[test_case(MarketType::LinearSwap, \"XBTUSDTM\")]\n\n#[test_case(MarketType::InverseFuture, \"XBTMU21\")]\n\nfn test_crawl_l2_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l2_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L2Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/kucoin.rs", "rank": 86, "score": 260116.23615538073 }, { "content": "#[test_case(MarketType::Spot, \"BTC-USDT\")]\n\n#[test_case(MarketType::InverseFuture, \"BTC-USD-210924\")]\n\n#[test_case(MarketType::LinearFuture, \"BTC-USDT-210924\")]\n\n#[test_case(MarketType::InverseSwap, \"BTC-USD-SWAP\")]\n\n#[test_case(MarketType::LinearSwap, \"BTC-USDT-SWAP\")]\n\n#[test_case(MarketType::EuropeanOption, \"BTC-USD-210702-35000-C\")]\n\nfn test_crawl_l2_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l2_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L2Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/okex.rs", "rank": 87, "score": 260116.2004440874 }, { "content": "#[test_case(MarketType::Spot, \"BTC_USDT\")]\n\n#[test_case(MarketType::InverseSwap, \"BTC_USD\")]\n\n#[test_case(MarketType::LinearSwap, \"BTC_USDT\")]\n\n#[test_case(MarketType::LinearFuture, \"BTC_USDT_20210924\"; \"inconclusive\")]\n\nfn test_crawl_l2_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l2_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L2Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/gate.rs", "rank": 88, "score": 260116.12674563413 }, { "content": "#[test_case(MarketType::InverseFuture, \"BTCUSDU21\")]\n\n#[test_case(MarketType::InverseSwap, \"BTCUSD\")]\n\n#[test_case(MarketType::LinearSwap, \"BTCUSDT\")]\n\nfn test_crawl_l2_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l2_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L2Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/bybit.rs", "rank": 89, "score": 260116.1255409178 }, { "content": "#[test_case(MarketType::Spot, \"BTC_USDT\")]\n\n#[test_case(MarketType::LinearSwap, \"BTC_USDT\")]\n\n#[test_case(MarketType::InverseSwap, \"BTC_USD\")]\n\nfn test_crawl_l2_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l2_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L2Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/mxc.rs", "rank": 90, "score": 260116.05496279564 }, { "content": "#[test_case(MarketType::InverseSwap, \"BTC-PERPETUAL\")]\n\n#[test_case(MarketType::InverseFuture, \"BTC-24SEP21\")]\n\n#[test_case(MarketType::EuropeanOption, \"BTC-2JUL21-37000-C\")]\n\nfn test_crawl_l2_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l2_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L2Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/deribit.rs", "rank": 91, "score": 260115.98616602068 }, { "content": "#[test_case(MarketType::Spot, \"BTC-USDT\")]\n\n#[test_case(MarketType::Spot, \"ETH-USDT\")]\n\nfn test_crawl_l2_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l2_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L2Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/bithumb.rs", "rank": 92, "score": 260115.91685278062 }, { "content": "#[test_case(MarketType::Spot, \"tBTCUSD\")]\n\n#[test_case(MarketType::LinearSwap, \"tBTCF0:USTF0\")]\n\nfn test_crawl_l3_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l3_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L3Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/bitfinex.rs", "rank": 93, "score": 260115.91685278062 }, { "content": "#[test_case(MarketType::Spot, \"tBTCUSD\")]\n\n#[test_case(MarketType::LinearSwap, \"tBTCF0:USTF0\")]\n\nfn test_crawl_l2_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l2_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L2Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/bitfinex.rs", "rank": 94, "score": 260115.91685278062 }, { "content": "#[test_case(MarketType::InverseSwap, \"btcusd\")]\n\n#[test_case(MarketType::LinearSwap, \"cmt_btcusdt\")]\n\nfn test_crawl_l2_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l2_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L2Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/bitget.rs", "rank": 95, "score": 260115.86766947765 }, { "content": "// #[test_case(MarketType::InverseSwap, \"BTC_USD\")]\n\n// #[test_case(MarketType::LinearSwap, \"BTC_USDT\")]\n\nfn test_crawl_l2_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l2_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L2Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/bitz.rs", "rank": 96, "score": 260115.81938834494 }, { "content": "// #[test_case(MarketType::InverseSwap, \"BTC_USD-R\")]\n\n// #[test_case(MarketType::LinearSwap, \"BTC_USDT\")]\n\nfn test_crawl_l2_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l2_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L2Event\n\n )\n\n}\n\n\n\n#[test_case(MarketType::Spot, \"btc_usdt\")]\n", "file_path": "crypto-crawler/tests/zbg.rs", "rank": 97, "score": 260115.7719847852 }, { "content": "#[test_case(MarketType::Spot, \"btcusd\")]\n\nfn test_crawl_l2_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l2_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L2Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/bitstamp.rs", "rank": 98, "score": 260115.45725245867 }, { "content": "#[test_case(MarketType::Spot, \"btcusd\")]\n\nfn test_crawl_l3_event(market_type: MarketType, symbol: &str) {\n\n gen_test_code!(\n\n crawl_l3_event,\n\n EXCHANGE_NAME,\n\n market_type,\n\n symbol,\n\n MessageType::L3Event\n\n )\n\n}\n\n\n", "file_path": "crypto-crawler/tests/bitstamp.rs", "rank": 99, "score": 260115.45725245867 } ]
Rust
lapce-data/src/rich_text.rs
mirchandani-mohnish/lapce
d20ddbee3bd39c03aae6d59e7bd1c61eb3c45e9f
use std::{ ops::{Range, RangeBounds}, sync::Arc, }; use druid::{ piet::TextStorage as PietTextStorage, piet::{PietTextLayoutBuilder, TextLayoutBuilder}, text::{Attribute, AttributeSpans, Link}, text::{EnvUpdateCtx, TextStorage}, ArcStr, Color, Command, Data, Env, FontDescriptor, FontFamily, FontStyle, FontWeight, KeyOrValue, }; #[derive(Clone, Debug, Data)] pub struct RichText { buffer: ArcStr, attrs: Arc<AttributeSpans>, line_height: f64, } impl RichText { pub fn new(buffer: ArcStr) -> Self { RichText::new_with_attributes(buffer, Default::default()) } pub fn new_with_attributes(buffer: ArcStr, attributes: AttributeSpans) -> Self { RichText { buffer, attrs: Arc::new(attributes), line_height: 0.0, } } pub fn with_attribute( mut self, range: impl RangeBounds<usize>, attr: Attribute, ) -> Self { self.add_attribute(range, attr); self } pub fn len(&self) -> usize { self.buffer.len() } pub fn is_empty(&self) -> bool { self.buffer.is_empty() } pub fn add_attribute( &mut self, range: impl RangeBounds<usize>, attr: Attribute, ) { let range = druid::piet::util::resolve_range(range, self.buffer.len()); Arc::make_mut(&mut self.attrs).add(range, attr); } } impl PietTextStorage for RichText { fn as_str(&self) -> &str { self.buffer.as_str() } } impl TextStorage for RichText { fn add_attributes( &self, mut builder: PietTextLayoutBuilder, env: &Env, ) -> PietTextLayoutBuilder { for (range, attr) in self.attrs.to_piet_attrs(env) { builder = builder.range_attribute(range, attr); } if self.line_height > 0.0 { builder = builder.set_line_height(self.line_height); } builder } fn env_update(&self, _ctx: &EnvUpdateCtx) -> bool { false } fn links(&self) -> &[Link] { &[] } } #[derive(Default)] pub struct RichTextBuilder { buffer: String, attrs: AttributeSpans, links: Vec<Link>, line_height: f64, } impl RichTextBuilder { pub fn new() -> Self { Self::default() } pub fn push(&mut self, string: &str) -> AttributesAdder { let range = self.buffer.len()..(self.buffer.len() + string.len()); self.buffer.push_str(string); self.add_attributes_for_range(range) } pub fn set_line_height(&mut self, line_height: f64) { self.line_height = line_height; } #[doc(hidden)] pub fn write_fmt(&mut self, fmt: std::fmt::Arguments<'_>) -> AttributesAdder { use std::fmt::Write; let start = self.buffer.len(); self.buffer .write_fmt(fmt) .expect("a formatting trait implementation returned an error"); self.add_attributes_for_range(start..self.buffer.len()) } pub fn add_attributes_for_range( &mut self, range: impl RangeBounds<usize>, ) -> AttributesAdder { let range = druid::piet::util::resolve_range(range, self.buffer.len()); AttributesAdder { rich_text_builder: self, range, } } pub fn build(self) -> RichText { RichText { buffer: self.buffer.into(), attrs: self.attrs.into(), line_height: self.line_height, } } } pub struct AttributesAdder<'a> { rich_text_builder: &'a mut RichTextBuilder, range: Range<usize>, } impl AttributesAdder<'_> { pub fn add_attr(&mut self, attr: Attribute) -> &mut Self { self.rich_text_builder.attrs.add(self.range.clone(), attr); self } pub fn size(&mut self, size: impl Into<KeyOrValue<f64>>) -> &mut Self { self.add_attr(Attribute::size(size)); self } pub fn text_color(&mut self, color: impl Into<KeyOrValue<Color>>) -> &mut Self { self.add_attr(Attribute::text_color(color)); self } pub fn font_family(&mut self, family: FontFamily) -> &mut Self { self.add_attr(Attribute::font_family(family)); self } pub fn weight(&mut self, weight: FontWeight) -> &mut Self { self.add_attr(Attribute::weight(weight)); self } pub fn style(&mut self, style: FontStyle) -> &mut Self { self.add_attr(Attribute::style(style)); self } pub fn underline(&mut self, underline: bool) -> &mut Self { self.add_attr(Attribute::underline(underline)); self } pub fn font_descriptor( &mut self, font: impl Into<KeyOrValue<FontDescriptor>>, ) -> &mut Self { self.add_attr(Attribute::font_descriptor(font)); self } pub fn link(&mut self, command: impl Into<Command>) -> &mut Self { self.rich_text_builder .links .push(Link::new(self.range.clone(), command.into())); self } }
use std::{ ops::{Range, RangeBounds}, sync::Arc, }; use druid::{ piet::TextStorage as PietTextStorage, piet::{PietTextLayoutBuilder, TextLayoutBuilder}, text::{Attribute, AttributeSpans, Link}, text::{EnvUpdateCtx, TextStorage}, ArcStr, Color, Command, Data, Env, FontDescriptor, FontFamily, FontStyle, FontWeight, KeyOrValue, }; #[derive(Clone, Debug, Data)] pub struct RichText { buffer: ArcStr, attrs: Arc<AttributeSpans>, line_height: f64, } impl RichText { pub fn new(buffer: ArcStr) -> Self { RichText::new_with_attributes(buffer, Default::default()) } pub fn new_with_attributes(buffer: ArcStr, attributes: AttributeSpans) -> Self { RichText { buffer, attrs: Arc::new(attributes), line_height: 0.0, } } pub fn with_attribute( mut self, range: impl RangeBounds<usize>, attr: Attribute, ) -> Self { self.add_attribute(range, attr); self } pub fn len(&self) -> usize { self.buffer.len() } pub fn is_empty(&self) -> bool { self.buffer.is_empty() } pub fn add_attribute( &mut self, range: impl RangeBounds<usize>, attr: Attribute, ) { let range = druid::piet::util::resolve_range(range, self.buffer.len()); Arc::make_mut(&mut self.attrs).add(range, attr); } } impl PietTextStorage for RichText { fn as_str(&self) -> &str { self.buffer.as_str() } } impl TextStorage for RichText { fn add_attributes( &self, mut builder: PietTextLayoutBuilder, env: &Env, ) -> PietTextLayoutBuilder { for (range, attr) in self.attrs.to_piet_attrs(env) { builder = builder.range_attribute(range, attr); } if self.line_height > 0.0 { builder = builder.set_line_height(self.line_height); } builder } fn env_update(&self, _ctx: &EnvUpdateCtx) -> bool { false } fn links(&self) -> &[Link] { &[] } } #[derive(Default)] pub struct RichTextBuilder { buffer: String, attrs: AttributeSpans, links: Vec<Link>, line_height: f64, } impl RichTextBuilder { pub fn
t_family(&mut self, family: FontFamily) -> &mut Self { self.add_attr(Attribute::font_family(family)); self } pub fn weight(&mut self, weight: FontWeight) -> &mut Self { self.add_attr(Attribute::weight(weight)); self } pub fn style(&mut self, style: FontStyle) -> &mut Self { self.add_attr(Attribute::style(style)); self } pub fn underline(&mut self, underline: bool) -> &mut Self { self.add_attr(Attribute::underline(underline)); self } pub fn font_descriptor( &mut self, font: impl Into<KeyOrValue<FontDescriptor>>, ) -> &mut Self { self.add_attr(Attribute::font_descriptor(font)); self } pub fn link(&mut self, command: impl Into<Command>) -> &mut Self { self.rich_text_builder .links .push(Link::new(self.range.clone(), command.into())); self } }
new() -> Self { Self::default() } pub fn push(&mut self, string: &str) -> AttributesAdder { let range = self.buffer.len()..(self.buffer.len() + string.len()); self.buffer.push_str(string); self.add_attributes_for_range(range) } pub fn set_line_height(&mut self, line_height: f64) { self.line_height = line_height; } #[doc(hidden)] pub fn write_fmt(&mut self, fmt: std::fmt::Arguments<'_>) -> AttributesAdder { use std::fmt::Write; let start = self.buffer.len(); self.buffer .write_fmt(fmt) .expect("a formatting trait implementation returned an error"); self.add_attributes_for_range(start..self.buffer.len()) } pub fn add_attributes_for_range( &mut self, range: impl RangeBounds<usize>, ) -> AttributesAdder { let range = druid::piet::util::resolve_range(range, self.buffer.len()); AttributesAdder { rich_text_builder: self, range, } } pub fn build(self) -> RichText { RichText { buffer: self.buffer.into(), attrs: self.attrs.into(), line_height: self.line_height, } } } pub struct AttributesAdder<'a> { rich_text_builder: &'a mut RichTextBuilder, range: Range<usize>, } impl AttributesAdder<'_> { pub fn add_attr(&mut self, attr: Attribute) -> &mut Self { self.rich_text_builder.attrs.add(self.range.clone(), attr); self } pub fn size(&mut self, size: impl Into<KeyOrValue<f64>>) -> &mut Self { self.add_attr(Attribute::size(size)); self } pub fn text_color(&mut self, color: impl Into<KeyOrValue<Color>>) -> &mut Self { self.add_attr(Attribute::text_color(color)); self } pub fn fon
random
[]
Rust
src/proc/bin/starnix/fs/fuchsia/remote.rs
dahliaOS/fuchsia-pi4
5b534fccefd918b5f03205393c1fe5fddf8031d0
use fidl_fuchsia_io as fio; use fidl_fuchsia_kernel as fkernel; use fuchsia_component::client::connect_channel_to_protocol; use fuchsia_zircon as zx; use lazy_static::lazy_static; use log::info; use crate::fd_impl_seekable; use crate::fs::*; use crate::task::*; use crate::types::*; lazy_static! { static ref VMEX_RESOURCE: zx::Resource = { let (client_end, server_end) = zx::Channel::create().unwrap(); connect_channel_to_protocol::<fkernel::VmexResourceMarker>(server_end) .expect("couldn't connect to fuchsia.kernel.VmexResource"); let service = fkernel::VmexResourceSynchronousProxy::new(client_end); service.get(zx::Time::INFINITE).expect("couldn't talk to fuchsia.kernel.VmexResource") }; } pub struct RemoteFile { node: RemoteNode, } enum RemoteNode { File(fio::FileSynchronousProxy), Directory(fio::DirectorySynchronousProxy), Other(fio::NodeSynchronousProxy), } impl RemoteNode { fn get_attr(&self) -> Result<(i32, fio::NodeAttributes), fidl::Error> { match self { RemoteNode::File(n) => n.get_attr(zx::Time::INFINITE), RemoteNode::Directory(n) => n.get_attr(zx::Time::INFINITE), RemoteNode::Other(n) => n.get_attr(zx::Time::INFINITE), } } } impl RemoteFile { pub fn from_description(description: syncio::DescribedNode) -> FileHandle { let node = match description.info { fio::NodeInfo::Directory(_) => RemoteNode::Directory( fio::DirectorySynchronousProxy::new(description.node.into_channel()), ), fio::NodeInfo::File(_) => { RemoteNode::File(fio::FileSynchronousProxy::new(description.node.into_channel())) } _ => RemoteNode::Other(description.node), }; FileObject::new(RemoteFile { node }) } } const BYTES_PER_BLOCK: i64 = 512; impl FileOps for RemoteFile { fd_impl_seekable!(); fn read_at( &self, _fd: &FileObject, task: &Task, offset: usize, buf: &[iovec_t], ) -> Result<usize, Errno> { let mut total = 0; for vec in buf { total += vec.iov_len; } let (status, data) = match self.node { RemoteNode::File(ref n) => { n.read_at(total as u64, offset as u64, zx::Time::INFINITE).map_err(fidl_error) } RemoteNode::Directory(_) => Err(EISDIR), RemoteNode::Other(_) => Err(EINVAL), }?; zx::Status::ok(status).map_err(fio_error)?; let mut offset = 0; for vec in buf { let end = std::cmp::min(offset + vec.iov_len, data.len()); task.mm.write_memory(vec.iov_base, &data[offset..end])?; offset = end; if offset == data.len() { break; } } Ok(data.len()) } fn write_at( &self, _fd: &FileObject, _task: &Task, _offset: usize, _data: &[iovec_t], ) -> Result<usize, Errno> { Err(ENOSYS) } fn get_vmo( &self, _fd: &FileObject, _task: &Task, mut prot: zx::VmarFlags, _flags: u32, ) -> Result<zx::Vmo, Errno> { let has_execute = prot.contains(zx::VmarFlags::PERM_EXECUTE); prot -= zx::VmarFlags::PERM_EXECUTE; let (status, buffer) = match self.node { RemoteNode::File(ref n) => { n.get_buffer(prot.bits(), zx::Time::INFINITE).map_err(fidl_error) } _ => Err(ENODEV), }?; zx::Status::ok(status).map_err(fio_error)?; let mut vmo = buffer.unwrap().vmo; if has_execute { vmo = vmo.replace_as_executable(&VMEX_RESOURCE).expect("replace_as_executable failed"); } Ok(vmo) } fn fstat(&self, _fd: &FileObject, task: &Task) -> Result<stat_t, Errno> { let (status, attrs) = self.node.get_attr().map_err(fidl_error)?; zx::Status::ok(status).map_err(fio_error)?; Ok(stat_t { st_mode: attrs.mode, st_ino: attrs.id, st_size: attrs.content_size as i64, st_blocks: attrs.storage_size as i64 / BYTES_PER_BLOCK, st_uid: task.creds.uid, st_gid: task.creds.gid, st_nlink: attrs.link_count, ..stat_t::default() }) } } fn fidl_error(err: fidl::Error) -> Errno { info!("fidl error: {}", err); EIO } fn fio_error(status: zx::Status) -> Errno { Errno::from_status_like_fdio(status) }
use fidl_fuchsia_io as fio; use fidl_fuchsia_kernel as fkernel; use fuchsia_component::client::connect_channel_to_protocol; use fuchsia_zircon as zx; use lazy_static::lazy_static; use log::info; use crate::fd_impl_seekable; use crate::fs::*; use crate::task::*; use crate::types::*; lazy_static! { static ref VMEX_RESOURCE: zx::Resource = { let (client_end, server_end) = zx::Channel::create().unwrap(); connect_channel_to_protocol::<fkernel::VmexResourceMarker>(server_end)
s, buffer) = match self.node { RemoteNode::File(ref n) => { n.get_buffer(prot.bits(), zx::Time::INFINITE).map_err(fidl_error) } _ => Err(ENODEV), }?; zx::Status::ok(status).map_err(fio_error)?; let mut vmo = buffer.unwrap().vmo; if has_execute { vmo = vmo.replace_as_executable(&VMEX_RESOURCE).expect("replace_as_executable failed"); } Ok(vmo) } fn fstat(&self, _fd: &FileObject, task: &Task) -> Result<stat_t, Errno> { let (status, attrs) = self.node.get_attr().map_err(fidl_error)?; zx::Status::ok(status).map_err(fio_error)?; Ok(stat_t { st_mode: attrs.mode, st_ino: attrs.id, st_size: attrs.content_size as i64, st_blocks: attrs.storage_size as i64 / BYTES_PER_BLOCK, st_uid: task.creds.uid, st_gid: task.creds.gid, st_nlink: attrs.link_count, ..stat_t::default() }) } } fn fidl_error(err: fidl::Error) -> Errno { info!("fidl error: {}", err); EIO } fn fio_error(status: zx::Status) -> Errno { Errno::from_status_like_fdio(status) }
.expect("couldn't connect to fuchsia.kernel.VmexResource"); let service = fkernel::VmexResourceSynchronousProxy::new(client_end); service.get(zx::Time::INFINITE).expect("couldn't talk to fuchsia.kernel.VmexResource") }; } pub struct RemoteFile { node: RemoteNode, } enum RemoteNode { File(fio::FileSynchronousProxy), Directory(fio::DirectorySynchronousProxy), Other(fio::NodeSynchronousProxy), } impl RemoteNode { fn get_attr(&self) -> Result<(i32, fio::NodeAttributes), fidl::Error> { match self { RemoteNode::File(n) => n.get_attr(zx::Time::INFINITE), RemoteNode::Directory(n) => n.get_attr(zx::Time::INFINITE), RemoteNode::Other(n) => n.get_attr(zx::Time::INFINITE), } } } impl RemoteFile { pub fn from_description(description: syncio::DescribedNode) -> FileHandle { let node = match description.info { fio::NodeInfo::Directory(_) => RemoteNode::Directory( fio::DirectorySynchronousProxy::new(description.node.into_channel()), ), fio::NodeInfo::File(_) => { RemoteNode::File(fio::FileSynchronousProxy::new(description.node.into_channel())) } _ => RemoteNode::Other(description.node), }; FileObject::new(RemoteFile { node }) } } const BYTES_PER_BLOCK: i64 = 512; impl FileOps for RemoteFile { fd_impl_seekable!(); fn read_at( &self, _fd: &FileObject, task: &Task, offset: usize, buf: &[iovec_t], ) -> Result<usize, Errno> { let mut total = 0; for vec in buf { total += vec.iov_len; } let (status, data) = match self.node { RemoteNode::File(ref n) => { n.read_at(total as u64, offset as u64, zx::Time::INFINITE).map_err(fidl_error) } RemoteNode::Directory(_) => Err(EISDIR), RemoteNode::Other(_) => Err(EINVAL), }?; zx::Status::ok(status).map_err(fio_error)?; let mut offset = 0; for vec in buf { let end = std::cmp::min(offset + vec.iov_len, data.len()); task.mm.write_memory(vec.iov_base, &data[offset..end])?; offset = end; if offset == data.len() { break; } } Ok(data.len()) } fn write_at( &self, _fd: &FileObject, _task: &Task, _offset: usize, _data: &[iovec_t], ) -> Result<usize, Errno> { Err(ENOSYS) } fn get_vmo( &self, _fd: &FileObject, _task: &Task, mut prot: zx::VmarFlags, _flags: u32, ) -> Result<zx::Vmo, Errno> { let has_execute = prot.contains(zx::VmarFlags::PERM_EXECUTE); prot -= zx::VmarFlags::PERM_EXECUTE; let (statu
random
[]
Rust
policy-test/tests/e2e_authorization_policy.rs
giantswarm/linkerd2
9d868c097d6c01f63d371578b960ff5d844303cf
use linkerd_policy_controller_k8s_api::{ self as k8s, policy::{LocalTargetRef, NamespacedTargetRef}, }; use linkerd_policy_test::{create, create_ready_pod, curl, nginx, with_temp_ns, LinkerdInject}; #[tokio::test(flavor = "current_thread")] async fn meshtls() { with_temp_ns(|client, ns| async move { let (srv, all_mtls) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, all_authenticated(&ns)) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef::from_resource(&srv), Some(NamespacedTargetRef::from_resource(&all_mtls)), ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); let curl = curl::Runner::init(&client, &ns).await; let (injected, uninjected) = tokio::join!( curl.run("curl-injected", "http://nginx", LinkerdInject::Enabled), curl.run("curl-uninjected", "http://nginx", LinkerdInject::Disabled), ); let (injected_status, uninjected_status) = tokio::join!(injected.exit_code(), uninjected.exit_code()); assert_eq!( injected_status, 0, "uninjected curl must fail to contact nginx" ); assert_ne!(uninjected_status, 0, "injected curl must contact nginx"); }) .await; } #[tokio::test(flavor = "current_thread")] async fn targets_namespace() { with_temp_ns(|client, ns| async move { let (_srv, all_mtls) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, all_authenticated(&ns)) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef { group: None, kind: "Namespace".to_string(), name: ns.clone(), }, Some(NamespacedTargetRef::from_resource(&all_mtls)), ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); let curl = curl::Runner::init(&client, &ns).await; let (injected, uninjected) = tokio::join!( curl.run("curl-injected", "http://nginx", LinkerdInject::Enabled), curl.run("curl-uninjected", "http://nginx", LinkerdInject::Disabled), ); let (injected_status, uninjected_status) = tokio::join!(injected.exit_code(), uninjected.exit_code()); assert_eq!(injected_status, 0, "injected curl must contact nginx"); assert_ne!( uninjected_status, 0, "uninjected curl must fail to contact nginx" ); }) .await; } #[tokio::test(flavor = "current_thread")] async fn meshtls_namespace() { with_temp_ns(|client, ns| async move { let (srv, mtls_ns) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, ns_authenticated(&ns)) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef::from_resource(&srv), Some(NamespacedTargetRef::from_resource(&mtls_ns)), ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); let curl = curl::Runner::init(&client, &ns).await; let (injected, uninjected) = tokio::join!( curl.run("curl-injected", "http://nginx", LinkerdInject::Enabled), curl.run("curl-uninjected", "http://nginx", LinkerdInject::Disabled), ); let (injected_status, uninjected_status) = tokio::join!(injected.exit_code(), uninjected.exit_code()); assert_eq!(injected_status, 0, "injected curl must contact nginx"); assert_ne!( uninjected_status, 0, "uninjected curl must fail to contact nginx" ); }) .await; } #[tokio::test(flavor = "current_thread")] async fn network() { with_temp_ns(|client, ns| async move { let curl = curl::Runner::init(&client, &ns).await; curl.create_lock().await; let blessed = curl .run("curl-blessed", "http://nginx", LinkerdInject::Disabled) .await; let blessed_ip = blessed.ip().await; tracing::debug!(curl.blessed.ip = %blessed_ip); let (srv, allow_ips) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, allow_ips(&ns, Some(blessed_ip))) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef::from_resource(&srv), Some(NamespacedTargetRef::from_resource(&allow_ips)), ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); curl.delete_lock().await; let status = blessed.exit_code().await; assert_eq!(status, 0, "blessed curl pod must succeed"); let status = curl .run("curl-cursed", "http://nginx", LinkerdInject::Disabled) .await .exit_code() .await; assert_ne!(status, 0, "cursed curl pod must fail"); }) .await; } #[tokio::test(flavor = "current_thread")] async fn both() { with_temp_ns(|client, ns| async move { let curl = curl::Runner::init(&client, &ns).await; curl.create_lock().await; let (blessed_injected, blessed_uninjected) = tokio::join!( curl.run( "curl-blessed-injected", "http://nginx", LinkerdInject::Enabled, ), curl.run( "curl-blessed-uninjected", "http://nginx", LinkerdInject::Disabled, ) ); let (blessed_injected_ip, blessed_uninjected_ip) = tokio::join!(blessed_injected.ip(), blessed_uninjected.ip(),); tracing::debug!(curl.blessed.injected.ip = ?blessed_injected_ip); tracing::debug!(curl.blessed.uninjected.ip = ?blessed_uninjected_ip); let (srv, allow_ips, all_mtls) = tokio::join!( create(&client, nginx::server(&ns)), create( &client, allow_ips(&ns, vec![blessed_injected_ip, blessed_uninjected_ip]), ), create(&client, all_authenticated(&ns)) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef::from_resource(&srv), vec![ NamespacedTargetRef::from_resource(&allow_ips), NamespacedTargetRef::from_resource(&all_mtls), ], ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); curl.delete_lock().await; tracing::info!("unblocked curl"); let (blessed_injected_status, blessed_uninjected_status) = tokio::join!(blessed_injected.exit_code(), blessed_uninjected.exit_code()); assert_eq!( blessed_injected_status, 0, "blessed injected curl pod must succeed" ); assert_ne!( blessed_uninjected_status, 0, "blessed uninjected curl pod must NOT succeed" ); let (cursed_injected, cursed_uninjected) = tokio::join!( curl.run( "curl-cursed-injected", "http://nginx", LinkerdInject::Enabled, ), curl.run( "curl-cursed-uninjected", "http://nginx", LinkerdInject::Disabled, ) ); let (cursed_injected_status, cursed_uninjected_status) = tokio::join!(cursed_injected.exit_code(), cursed_uninjected.exit_code(),); assert_ne!( cursed_injected_status, 0, "cursed injected curl pod must fail" ); assert_ne!( cursed_uninjected_status, 0, "cursed uninjected curl pod must fail" ); }) .await; } #[tokio::test(flavor = "current_thread")] async fn either() { with_temp_ns(|client, ns| async move { let curl = curl::Runner::init(&client, &ns).await; curl.create_lock().await; let (blessed_injected, blessed_uninjected) = tokio::join!( curl.run( "curl-blessed-injected", "http://nginx", LinkerdInject::Enabled, ), curl.run( "curl-blessed-uninjected", "http://nginx", LinkerdInject::Disabled, ) ); let (blessed_injected_ip, blessed_uninjected_ip) = tokio::join!(blessed_injected.ip(), blessed_uninjected.ip()); tracing::debug!(curl.blessed.injected.ip = ?blessed_injected_ip); tracing::debug!(curl.blessed.uninjected.ip = ?blessed_uninjected_ip); let (srv, allow_ips, all_mtls) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, allow_ips(&ns, vec![blessed_uninjected_ip])), create(&client, all_authenticated(&ns)) ); tokio::join!( create( &client, authz_policy( &ns, "nginx-from-ip", LocalTargetRef::from_resource(&srv), vec![NamespacedTargetRef::from_resource(&allow_ips)], ), ), create( &client, authz_policy( &ns, "nginx-from-id", LocalTargetRef::from_resource(&srv), vec![NamespacedTargetRef::from_resource(&all_mtls)], ), ) ); tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)), ); curl.delete_lock().await; tracing::info!("unblocking curl"); let (blessed_injected_status, blessed_uninjected_status) = tokio::join!(blessed_injected.exit_code(), blessed_uninjected.exit_code()); assert_eq!( blessed_injected_status, 0, "blessed injected curl pod must succeed" ); assert_eq!( blessed_uninjected_status, 0, "blessed uninjected curl pod must succeed" ); let (cursed_injected, cursed_uninjected) = tokio::join!( curl.run( "curl-cursed-injected", "http://nginx", LinkerdInject::Enabled, ), curl.run( "curl-cursed-uninjected", "http://nginx", LinkerdInject::Disabled, ), ); let (cursed_injected_status, cursed_uninjected_status) = tokio::join!(cursed_injected.exit_code(), cursed_uninjected.exit_code()); assert_eq!( cursed_injected_status, 0, "cursed injected curl pod must succeed" ); assert_ne!( cursed_uninjected_status, 0, "cursed uninjected curl pod must fail" ); }) .await; } fn authz_policy( ns: &str, name: &str, target: LocalTargetRef, authns: impl IntoIterator<Item = NamespacedTargetRef>, ) -> k8s::policy::AuthorizationPolicy { k8s::policy::AuthorizationPolicy { metadata: k8s::ObjectMeta { namespace: Some(ns.to_string()), name: Some(name.to_string()), ..Default::default() }, spec: k8s::policy::AuthorizationPolicySpec { target_ref: target, required_authentication_refs: authns.into_iter().collect(), }, } } fn all_authenticated(ns: &str) -> k8s::policy::MeshTLSAuthentication { k8s::policy::MeshTLSAuthentication { metadata: k8s::ObjectMeta { namespace: Some(ns.to_string()), name: Some("all-authenticated".to_string()), ..Default::default() }, spec: k8s::policy::MeshTLSAuthenticationSpec { identity_refs: None, identities: Some(vec!["*".to_string()]), }, } } fn ns_authenticated(ns: &str) -> k8s::policy::MeshTLSAuthentication { k8s::policy::MeshTLSAuthentication { metadata: k8s::ObjectMeta { namespace: Some(ns.to_string()), name: Some("all-authenticated".to_string()), ..Default::default() }, spec: k8s::policy::MeshTLSAuthenticationSpec { identity_refs: Some(vec![NamespacedTargetRef { group: None, kind: "Namespace".to_string(), name: ns.to_string(), namespace: None, }]), identities: None, }, } } fn allow_ips( ns: &str, ips: impl IntoIterator<Item = std::net::IpAddr>, ) -> k8s::policy::NetworkAuthentication { k8s::policy::NetworkAuthentication { metadata: k8s::ObjectMeta { namespace: Some(ns.to_string()), name: Some("allow-pod".to_string()), ..Default::default() }, spec: k8s::policy::NetworkAuthenticationSpec { networks: ips .into_iter() .map(|ip| k8s::policy::Network { cidr: ip.into(), except: None, }) .collect(), }, } }
use linkerd_policy_controller_k8s_api::{ self as k8s, policy::{LocalTargetRef, NamespacedTargetRef}, }; use linkerd_policy_test::{create, create_ready_pod, curl, nginx, with_temp_ns, LinkerdInject}; #[tokio::test(flavor = "current_thread")] async fn meshtls() { with_temp_ns(|client, ns| async move { let (srv, all_mtls) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, all_authenticated(&ns)) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef::from_resource(&srv), Some(NamespacedTargetRef::from_resource(&all_mtls)), ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); let curl = curl::Runner::init(&client, &ns).await; let (injected, uninjected) = tokio::join!( curl.run("curl-injected", "http://nginx", LinkerdInject::Enabled), curl.run("curl-uninjected", "http://nginx", LinkerdInject::Disabled), ); let (injected_status, uninjected_status) = tokio::join!(injected.exit_code(), uninjected.exit_code()); assert_eq!( injected_status, 0, "uninjected curl must fail to contact nginx" ); assert_ne!(uninjected_status, 0, "injected curl must contact nginx"); }) .await; } #[tokio::test(flavor = "current_thread")] async fn targets_namespace() { with_temp_ns(|client, ns| async move { let (_srv, all_mtls) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, all_authenticated(&ns)) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef { group: None, kind: "Namespace".to_string(), name: ns.clone(), }, Some(NamespacedTargetRef::from_resource(&all_mtls)), ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); let curl = curl::Runner::init(&client, &ns).await; let (injected, uninjected) = tokio::join!( curl.run("curl-injected", "http://nginx", LinkerdInject::Enabled), curl.run("curl-uninjected", "http://nginx", LinkerdInject::Disabled), ); let (injected_status, uninjected_status) = tokio::join!(injected.exit_code(), uninjected.exit_code()); assert_eq!(injected_status, 0, "injected curl must contact nginx"); assert_ne!( uninjected_status, 0, "uninjected curl must fail to contact nginx" ); }) .await; } #[tokio::test(flavor = "current_thread")] async fn meshtls_namespace() { with_temp_ns(|client, ns| async move { let (srv, mtls_ns) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, ns_authenticated(&ns)) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef::from_resource(&srv), Some(NamespacedTargetRef::from_resource(&mtls_ns)), ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); let curl = curl::Runner::init(&client, &ns).await; let (injected, uninjected) = tokio::join!( curl.run("curl-injected", "http://nginx", LinkerdInject::Enabled), curl.run("curl-uninjected", "http://nginx", LinkerdInject::Disabled), ); let (injected_status, uninjected_status) = tokio::join!(injected.exit_code(), uninjected.exit_code()); assert_eq!(injected_status, 0, "injected curl must contact nginx"); assert_ne!( uninjected_status, 0, "uninjected curl must fail to contact nginx" ); }) .await; } #[tokio::test(flavor = "current_thread")] async fn network() { with_temp_ns(|client, ns| async move { let curl = curl::Runner::init(&client, &ns).await; curl.create_lock().await; let blessed = curl .run("curl-blessed", "http://nginx", LinkerdInject::Disabled) .await; let blessed_ip = blessed.ip().await; tracing::debug!(curl.blessed.ip = %blessed_ip); let (srv, allow_ips) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, allow_ips(&ns, Some(blessed_ip))) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef::from_resource(&srv), Some(NamespacedTargetRef::from_resource(&allow_ips)), ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); curl.delete_lock().await; let status = blessed.exit_code().await; assert_eq!(status, 0, "blessed curl pod must succeed"); let status = curl .run("curl-cursed", "http://nginx", LinkerdInject::Disabled) .await .exit_code() .await; assert_ne!(status, 0, "cursed curl pod must fail"); }) .await; } #[tokio::test(flavor = "current_thread")] async fn both() { with_temp_ns(|client, ns| async move { let curl = curl::Runner::init(&client, &ns).await; curl.create_lock().await; let (blessed_injected, blessed_uninjected) = tokio::join!( curl.run( "curl-blessed-injected", "http://nginx", LinkerdInject::Enabled, ), curl.run( "curl-blessed-uninjected", "http://nginx", LinkerdInject::Disabled, ) ); let (blessed_injected_ip, blessed_uninjected_ip) = tokio::join!(blessed_injected.ip(), blessed_uninjected.ip(),); tracing::debug!(curl.blessed.injected.ip = ?blessed_injected_ip); tracing::debug!(curl.blessed.uninjected.ip = ?blessed_uninjected_ip); let (srv, allow_ips, all_mtls) = tokio::join!( create(&client, nginx::server(&ns)), create( &client, allow_ips(&ns, vec![blessed_injected_ip, blessed_uninjected_ip]), ), create(&client, all_authenticated(&ns)) ); create( &client, authz_policy( &ns, "nginx", LocalTargetRef::from_resource(&srv), vec![ NamespacedTargetRef::from_resource(&allow_ips), NamespacedTargetRef::from_resource(&all_mtls), ], ), ) .await; tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)) ); curl.delete_lock().await; tracing::info!("unblocked curl"); let (blessed_injected_status, blessed_uninjected_status) = tokio::join!(blessed_injected.exit_code(), blessed_uninjected.exit_code()); assert_eq!( blessed_injected_status, 0, "blessed injected curl pod must succeed" ); assert_ne!( blessed_uninjected_status, 0, "blessed uninjected curl pod must NOT succeed" ); let (cursed_injected, cursed_uninjected) = tokio::join!( curl.run( "curl-cursed-injected", "http://nginx", LinkerdInject::Enabled, ), curl.run( "curl-cursed-uninjected", "http://nginx", LinkerdInject::Disabled, ) ); let (cursed_injected_status, cursed_uninjected_status) = tokio::join!(cursed_injected.exit_code(), cursed_uninjected.exit_code(),); assert_ne!( cursed_injected_status, 0, "cursed injected curl pod must fail" ); assert_ne!( cursed_uninjected_status, 0, "cursed uninjected curl pod must fail" ); }) .await; } #[tokio::test(flavor = "current_thread")] async fn either() { with_temp_ns(|client, ns| async move { let curl = curl::Runner::init(&client, &ns).await; curl.create_lock().await; let (blessed_injected, blessed_uninjected) = tokio::join!( curl.run( "curl-blessed-injected", "http://nginx", LinkerdInject::Enabled, ), curl.run( "curl-blessed-uninjected", "http://nginx", LinkerdInject::Disabled, ) ); let (blessed_injected_ip, blessed_uninjected_ip) = tokio::join!(blessed_injected.ip(), blessed_uninjected.ip()); tracing::debug!(curl.blessed.injected.ip = ?blessed_injected_ip); tracing::debug!(curl.blessed.uninjected.ip = ?blessed_uninjected_ip); let (srv, allow_ips, all_mtls) = tokio::join!( create(&client, nginx::server(&ns)), create(&client, allow_ips(&ns, vec![blessed_uninjected_ip])), create(&client, all_authenticated(&ns)) ); tokio::join!( create( &client, authz_policy( &ns, "nginx-from-ip", LocalTargetRef::from_resource(&srv), vec![NamespacedTargetRef::from_resource(&allow_ips)], ), ), create( &client, authz_policy( &ns, "nginx-from-id", LocalTargetRef::from_resource(&srv), vec![NamespacedTargetRef::from_resource(&all_mtls)], ), ) ); tokio::join!( create(&client, nginx::service(&ns)), create_ready_pod(&client, nginx::pod(&ns)), ); curl.delete_lock().await; tracing::info!("unblocking curl"); let (blessed_injected_status, blessed_uninjected_status) = tokio::join!(blessed_injected.exit_code(), blessed_uninjected.exit_code()); assert_eq!( blessed_injected_status, 0, "blessed injected curl pod must succeed" ); assert_eq!( blessed_uninjected_status, 0, "blessed uninjected curl pod must succeed" ); let (cursed_injected, cursed_uninjected) = tokio::join!( curl.run( "curl-cursed-injected", "http://nginx", LinkerdInject::Enabled, ), curl.run( "curl-cursed-uninjected", "http://nginx", LinkerdInject::Disabled, ), ); let (cursed_injected_status, cursed_uninjected_status) = tokio::join!(cursed_injected.exit_code(), cursed_uninjected.exit_code()); assert_eq!( cursed_injected_status, 0, "cursed injected curl pod must succeed" ); assert_ne!( cursed_uninjected_status, 0, "cursed uninjected curl pod must fail" ); }) .await; }
fn all_authenticated(ns: &str) -> k8s::policy::MeshTLSAuthentication { k8s::policy::MeshTLSAuthentication { metadata: k8s::ObjectMeta { namespace: Some(ns.to_string()), name: Some("all-authenticated".to_string()), ..Default::default() }, spec: k8s::policy::MeshTLSAuthenticationSpec { identity_refs: None, identities: Some(vec!["*".to_string()]), }, } } fn ns_authenticated(ns: &str) -> k8s::policy::MeshTLSAuthentication { k8s::policy::MeshTLSAuthentication { metadata: k8s::ObjectMeta { namespace: Some(ns.to_string()), name: Some("all-authenticated".to_string()), ..Default::default() }, spec: k8s::policy::MeshTLSAuthenticationSpec { identity_refs: Some(vec![NamespacedTargetRef { group: None, kind: "Namespace".to_string(), name: ns.to_string(), namespace: None, }]), identities: None, }, } } fn allow_ips( ns: &str, ips: impl IntoIterator<Item = std::net::IpAddr>, ) -> k8s::policy::NetworkAuthentication { k8s::policy::NetworkAuthentication { metadata: k8s::ObjectMeta { namespace: Some(ns.to_string()), name: Some("allow-pod".to_string()), ..Default::default() }, spec: k8s::policy::NetworkAuthenticationSpec { networks: ips .into_iter() .map(|ip| k8s::policy::Network { cidr: ip.into(), except: None, }) .collect(), }, } }
fn authz_policy( ns: &str, name: &str, target: LocalTargetRef, authns: impl IntoIterator<Item = NamespacedTargetRef>, ) -> k8s::policy::AuthorizationPolicy { k8s::policy::AuthorizationPolicy { metadata: k8s::ObjectMeta { namespace: Some(ns.to_string()), name: Some(name.to_string()), ..Default::default() }, spec: k8s::policy::AuthorizationPolicySpec { target_ref: target, required_authentication_refs: authns.into_iter().collect(), }, } }
function_block-full_function
[ { "content": "pub fn pod(ns: &str) -> k8s::Pod {\n\n k8s::Pod {\n\n metadata: k8s::ObjectMeta {\n\n namespace: Some(ns.to_string()),\n\n name: Some(\"nginx\".to_string()),\n\n annotations: Some(convert_args!(btreemap!(\n\n \"linkerd.io/inject\" => \"enabled\",\n\n \"config.linkerd.io/proxy-log-level\" => \"linkerd=trace,info\",\n\n ))),\n\n labels: Some(convert_args!(btreemap!(\n\n \"app\" => \"nginx\",\n\n ))),\n\n ..Default::default()\n\n },\n\n spec: Some(k8s::PodSpec {\n\n containers: vec![k8s::api::core::v1::Container {\n\n name: \"nginx\".to_string(),\n\n image: Some(\"docker.io/library/nginx:latest\".to_string()),\n\n ports: Some(vec![k8s::api::core::v1::ContainerPort {\n\n container_port: 80,\n\n ..Default::default()\n\n }]),\n\n ..Default::default()\n\n }],\n\n ..Default::default()\n\n }),\n\n ..k8s::Pod::default()\n\n }\n\n}\n\n\n", "file_path": "policy-test/src/nginx.rs", "rank": 0, "score": 277125.7688997681 }, { "content": "fn mk_pause(ns: &str, name: &str) -> k8s::Pod {\n\n k8s::Pod {\n\n metadata: k8s::ObjectMeta {\n\n namespace: Some(ns.to_string()),\n\n name: Some(name.to_string()),\n\n annotations: Some(convert_args!(btreemap!(\n\n \"linkerd.io/inject\" => \"enabled\",\n\n ))),\n\n ..Default::default()\n\n },\n\n spec: Some(k8s::PodSpec {\n\n containers: vec![k8s::api::core::v1::Container {\n\n name: \"pause\".to_string(),\n\n image: Some(\"gcr.io/google_containers/pause:3.2\".to_string()),\n\n ..Default::default()\n\n }],\n\n ..Default::default()\n\n }),\n\n ..k8s::Pod::default()\n\n }\n\n}\n\n\n", "file_path": "policy-test/tests/api.rs", "rank": 1, "score": 253813.48666609963 }, { "content": "fn group_kind_name<T>(resource: &T) -> (Option<String>, String, String)\n\nwhere\n\n T: kube::Resource,\n\n T::DynamicType: Default,\n\n{\n\n let dt = Default::default();\n\n\n\n let group = match T::group(&dt) {\n\n g if (*g).is_empty() => None,\n\n g => Some(g.to_string()),\n\n };\n\n\n\n let kind = T::kind(&dt).to_string();\n\n\n\n let name = resource\n\n .meta()\n\n .name\n\n .clone()\n\n .expect(\"resource must have a name\");\n\n\n", "file_path": "policy-controller/k8s/api/src/policy/target_ref.rs", "rank": 2, "score": 197827.49948935848 }, { "content": "fn canonical_kind(group: Option<&str>, kind: &str) -> String {\n\n if let Some(group) = group {\n\n format!(\"{}.{}\", kind, group)\n\n } else {\n\n kind.to_string()\n\n }\n\n}\n\n\n", "file_path": "policy-controller/k8s/api/src/policy/target_ref.rs", "rank": 3, "score": 194776.21648214542 }, { "content": "#[test]\n\nfn pod_must_exist_for_lookup() {\n\n let test = TestConfig::default();\n\n test.index\n\n .write()\n\n .pod_server_rx(\"ns-0\", \"pod-0\", 8080)\n\n .expect_err(\"pod-0.ns-0 must not exist\");\n\n}\n\n\n", "file_path": "policy-controller/k8s/index/src/tests.rs", "rank": 4, "score": 192004.3760354552 }, { "content": "fn targets_kind<T>(group: Option<&str>, kind: &str) -> bool\n\nwhere\n\n T: kube::Resource,\n\n T::DynamicType: Default,\n\n{\n\n let dt = Default::default();\n\n\n\n let mut t_group = &*T::group(&dt);\n\n if t_group.is_empty() {\n\n t_group = \"core\";\n\n }\n\n\n\n group.unwrap_or(\"core\").eq_ignore_ascii_case(t_group)\n\n && kind.eq_ignore_ascii_case(&*T::kind(&dt))\n\n}\n\n\n", "file_path": "policy-controller/k8s/api/src/policy/target_ref.rs", "rank": 5, "score": 189229.48974818055 }, { "content": "fn mk_admin_server(ns: &str, name: &str) -> k8s::policy::Server {\n\n k8s::policy::Server {\n\n metadata: k8s::ObjectMeta {\n\n namespace: Some(ns.to_string()),\n\n name: Some(name.to_string()),\n\n ..Default::default()\n\n },\n\n spec: k8s::policy::ServerSpec {\n\n pod_selector: k8s::labels::Selector::default(),\n\n port: k8s::policy::server::Port::Number(4191),\n\n proxy_protocol: Some(k8s::policy::server::ProxyProtocol::Http1),\n\n },\n\n }\n\n}\n\n\n\nasync fn retry_watch_server(\n\n client: &kube::Client,\n\n ns: &str,\n\n pod_name: &str,\n\n) -> tonic::Streaming<grpc::inbound::Server> {\n", "file_path": "policy-test/tests/api.rs", "rank": 6, "score": 181829.2354406154 }, { "content": "pub fn server(ns: &str) -> k8s::policy::Server {\n\n k8s::policy::Server {\n\n metadata: k8s::ObjectMeta {\n\n namespace: Some(ns.to_string()),\n\n name: Some(\"nginx\".to_string()),\n\n ..Default::default()\n\n },\n\n spec: k8s::policy::ServerSpec {\n\n pod_selector: k8s::labels::Selector::from_iter(Some((\"app\", \"nginx\"))),\n\n port: k8s::policy::server::Port::Number(80),\n\n proxy_protocol: Some(k8s::policy::server::ProxyProtocol::Http1),\n\n },\n\n }\n\n}\n\n\n", "file_path": "policy-test/src/nginx.rs", "rank": 7, "score": 181750.0415895624 }, { "content": "pub fn service(ns: &str) -> k8s::api::core::v1::Service {\n\n k8s::api::core::v1::Service {\n\n metadata: k8s::ObjectMeta {\n\n namespace: Some(ns.to_string()),\n\n name: Some(\"nginx\".to_string()),\n\n ..Default::default()\n\n },\n\n spec: Some(k8s::api::core::v1::ServiceSpec {\n\n type_: Some(\"ClusterIP\".to_string()),\n\n selector: Some(convert_args!(btreemap!(\n\n \"app\" => \"nginx\"\n\n ))),\n\n ports: Some(vec![k8s::api::core::v1::ServicePort {\n\n port: 80,\n\n ..Default::default()\n\n }]),\n\n ..Default::default()\n\n }),\n\n ..Default::default()\n\n }\n\n}\n", "file_path": "policy-test/src/nginx.rs", "rank": 8, "score": 172203.50726062222 }, { "content": "func (conf *ResourceConfig) uninjectPodSpec(report *Report) {\n\n\tt := conf.pod.spec\n\n\tinitContainers := []v1.Container{}\n\n\tfor _, container := range t.InitContainers {\n\n\t\tif container.Name != k8s.InitContainerName {\n\n\t\t\tinitContainers = append(initContainers, container)\n\n\t\t} else {\n\n\t\t\treport.Uninjected.ProxyInit = true\n\n\t\t}\n\n\t}\n\n\tt.InitContainers = initContainers\n\n\n\n\tcontainers := []v1.Container{}\n\n\tfor _, container := range t.Containers {\n\n\t\tif container.Name != k8s.ProxyContainerName {\n\n\t\t\tcontainers = append(containers, container)\n\n\t\t} else {\n\n\t\t\treport.Uninjected.Proxy = true\n\n\t\t}\n\n\t}\n\n\tt.Containers = containers\n\n\n\n\tvolumes := []v1.Volume{}\n\n\tfor _, volume := range t.Volumes {\n\n\t\tif volume.Name != k8s.IdentityEndEntityVolumeName && volume.Name != k8s.InitXtablesLockVolumeMountName && volume.Name != k8s.LinkerdTokenVolumeMountName {\n\n\t\t\tvolumes = append(volumes, volume)\n\n\t\t}\n\n\t}\n\n\tt.Volumes = volumes\n", "file_path": "pkg/inject/uninject.go", "rank": 9, "score": 171679.0936425395 }, { "content": "fn to_server(srv: &InboundServer, cluster_networks: &[IpNet]) -> proto::Server {\n\n // Convert the protocol object into a protobuf response.\n\n let protocol = proto::ProxyProtocol {\n\n kind: match srv.protocol {\n\n ProxyProtocol::Detect { timeout } => Some(proto::proxy_protocol::Kind::Detect(\n\n proto::proxy_protocol::Detect {\n\n timeout: Some(timeout.into()),\n\n },\n\n )),\n\n ProxyProtocol::Http1 => Some(proto::proxy_protocol::Kind::Http1(\n\n proto::proxy_protocol::Http1::default(),\n\n )),\n\n ProxyProtocol::Http2 => Some(proto::proxy_protocol::Kind::Http2(\n\n proto::proxy_protocol::Http2::default(),\n\n )),\n\n ProxyProtocol::Grpc => Some(proto::proxy_protocol::Kind::Grpc(\n\n proto::proxy_protocol::Grpc::default(),\n\n )),\n\n ProxyProtocol::Opaque => Some(proto::proxy_protocol::Kind::Opaque(\n\n proto::proxy_protocol::Opaque {},\n", "file_path": "policy-controller/grpc/src/lib.rs", "rank": 10, "score": 170972.2910594464 }, { "content": "package inject\n\n\n\nimport (\n\n\t\"strings\"\n\n\n\n\t\"github.com/linkerd/linkerd2/pkg/k8s\"\n\n\tv1 \"k8s.io/api/core/v1\"\n\n\tmetav1 \"k8s.io/apimachinery/pkg/apis/meta/v1\"\n\n)\n\n\n\n// Uninject removes from the workload in conf the init and proxy containers,\n\n// the TLS volumes and the extra annotations/labels that were added\n\nfunc (conf *ResourceConfig) Uninject(report *Report) ([]byte, error) {\n\n\tif conf.IsNamespace() || conf.IsService() {\n\n\t\tuninjectObjectMeta(conf.workload.Meta, report)\n\n\t\treturn conf.YamlMarshalObj()\n\n\t}\n\n\n\n\tif conf.pod.spec == nil {\n\n\t\treturn nil, nil\n\n\t}\n\n\n\n\tconf.uninjectPodSpec(report)\n\n\n\n\tif conf.workload.Meta != nil {\n\n\t\tuninjectObjectMeta(conf.workload.Meta, report)\n\n\t}\n\n\n\n\tuninjectObjectMeta(conf.pod.meta, report)\n\n\treturn conf.YamlMarshalObj()\n\n}\n\n\n\n// Given a PodSpec, update the PodSpec in place with the sidecar\n\n// and init-container uninjected\n\nfunc (conf *ResourceConfig) uninjectPodSpec(report *Report) {\n\n\tt := conf.pod.spec\n\n\tinitContainers := []v1.Container{}\n\n\tfor _, container := range t.InitContainers {\n\n\t\tif container.Name != k8s.InitContainerName {\n\n\t\t\tinitContainers = append(initContainers, container)\n\n\t\t} else {\n\n\t\t\treport.Uninjected.ProxyInit = true\n\n\t\t}\n\n\t}\n\n\tt.InitContainers = initContainers\n\n\n\n\tcontainers := []v1.Container{}\n\n\tfor _, container := range t.Containers {\n\n\t\tif container.Name != k8s.ProxyContainerName {\n\n\t\t\tcontainers = append(containers, container)\n\n\t\t} else {\n\n\t\t\treport.Uninjected.Proxy = true\n\n\t\t}\n\n\t}\n\n\tt.Containers = containers\n\n\n\n\tvolumes := []v1.Volume{}\n\n\tfor _, volume := range t.Volumes {\n\n\t\tif volume.Name != k8s.IdentityEndEntityVolumeName && volume.Name != k8s.InitXtablesLockVolumeMountName && volume.Name != k8s.LinkerdTokenVolumeMountName {\n\n\t\t\tvolumes = append(volumes, volume)\n\n\t\t}\n\n\t}\n\n\tt.Volumes = volumes\n\n}\n\n\n\nfunc uninjectObjectMeta(t *metav1.ObjectMeta, report *Report) {\n\n\t// We only uninject control plane components in the context\n\n\t// of doing an inject --manual. This is done as a way to update\n\n\t// something about the injection configuration - for example\n\n\t// adding a debug sidecar to the identity service.\n\n\t// With that in mind it is not really necessary to strip off\n\n\t// the linkerd.io/* metadata from the pod during uninjection.\n\n\t// This is why we skip that part for control plane components.\n\n\t// Furthermore the latter will never have linkerd.io/inject as\n\n\t// they are always manually injected.\n\n\tif _, ok := t.Labels[k8s.ControllerComponentLabel]; !ok {\n\n\t\tnewAnnotations := make(map[string]string)\n\n\t\tfor key, val := range t.Annotations {\n\n\t\t\tif !strings.HasPrefix(key, k8s.Prefix) ||\n\n\t\t\t\t(key == k8s.ProxyInjectAnnotation && val == k8s.ProxyInjectDisabled) {\n\n\t\t\t\tnewAnnotations[key] = val\n\n\t\t\t} else {\n\n\t\t\t\treport.Uninjected.Proxy = true\n\n\t\t\t}\n\n\n\n\t\t}\n\n\t\tt.Annotations = newAnnotations\n\n\n\n\t\tlabels := make(map[string]string)\n\n\t\tfor key, val := range t.Labels {\n\n\t\t\tif !strings.HasPrefix(key, k8s.Prefix) {\n\n\t\t\t\tlabels[key] = val\n\n\t\t\t}\n\n\t\t}\n\n\t\tt.Labels = labels\n\n\t}\n\n}\n", "file_path": "pkg/inject/uninject.go", "rank": 11, "score": 168949.31057037215 }, { "content": "\tpod struct {\n\n\t\tmeta *metav1.ObjectMeta\n\n\t\t// This fields hold labels and annotations which are to be added to the\n\n\t\t// injected resource. This is different from meta.Labels and\n\n\t\t// meta.Annotations which are the labels and annotations on the original\n\n\t\t// resource before injection.\n\n\t\tlabels map[string]string\n\n\t\tannotations map[string]string\n\n\t\tspec *corev1.PodSpec\n", "file_path": "pkg/inject/inject.go", "rank": 12, "score": 168719.22445575852 }, { "content": "\tKind string\n", "file_path": "pkg/inject/report.go", "rank": 13, "score": 156244.76592489053 }, { "content": "\tName string\n", "file_path": "pkg/inject/report.go", "rank": 14, "score": 156041.99582998457 }, { "content": "\t\tName string `json:\"name\"`\n", "file_path": "web/srv/api_handlers.go", "rank": 15, "score": 153996.16839248326 }, { "content": "func (x *StatTable_PodGroup_Row) GetFailedPodCount() uint64 {\n\n\tif x != nil {\n\n\t\treturn x.FailedPodCount\n\n\t}\n\n\treturn 0\n", "file_path": "viz/metrics-api/gen/viz/viz.pb.go", "rank": 16, "score": 152898.94235548883 }, { "content": "type group struct {\n\n\tfactory internalinterfaces.SharedInformerFactory\n\n\tnamespace string\n\n\ttweakListOptions internalinterfaces.TweakListOptionsFunc\n", "file_path": "controller/gen/client/informers/externalversions/link/interface.go", "rank": 17, "score": 147547.9805840695 }, { "content": "type group struct {\n\n\tfactory internalinterfaces.SharedInformerFactory\n\n\tnamespace string\n\n\ttweakListOptions internalinterfaces.TweakListOptionsFunc\n", "file_path": "controller/gen/client/informers/externalversions/serviceprofile/interface.go", "rank": 18, "score": 147547.9805840695 }, { "content": "type group struct {\n\n\tfactory internalinterfaces.SharedInformerFactory\n\n\tnamespace string\n\n\ttweakListOptions internalinterfaces.TweakListOptionsFunc\n", "file_path": "controller/gen/client/informers/externalversions/policy/interface.go", "rank": 19, "score": 147547.9805840695 }, { "content": "type group struct {\n\n\tfactory internalinterfaces.SharedInformerFactory\n\n\tnamespace string\n\n\ttweakListOptions internalinterfaces.TweakListOptionsFunc\n", "file_path": "controller/gen/client/informers/externalversions/server/interface.go", "rank": 20, "score": 147547.9805840695 }, { "content": "type group struct {\n\n\tfactory internalinterfaces.SharedInformerFactory\n\n\tnamespace string\n\n\ttweakListOptions internalinterfaces.TweakListOptionsFunc\n", "file_path": "controller/gen/client/informers/externalversions/serverauthorization/interface.go", "rank": 21, "score": 147547.9805840695 }, { "content": "func (c *networkAuthentications) Create(ctx context.Context, networkAuthentication *v1alpha1.NetworkAuthentication, opts v1.CreateOptions) (result *v1alpha1.NetworkAuthentication, err error) {\n\n\tresult = &v1alpha1.NetworkAuthentication{}\n\n\terr = c.client.Post().\n\n\t\tNamespace(c.ns).\n\n\t\tResource(\"networkauthentications\").\n\n\t\tVersionedParams(&opts, scheme.ParameterCodec).\n\n\t\tBody(networkAuthentication).\n\n\t\tDo(ctx).\n\n\t\tInto(result)\n\n\treturn\n", "file_path": "controller/gen/client/clientset/versioned/typed/policy/v1alpha1/networkauthentication.go", "rank": 22, "score": 147034.79320503373 }, { "content": "\tns string\n", "file_path": "controller/gen/client/clientset/versioned/typed/server/v1beta1/server.go", "rank": 23, "score": 143882.46064368615 }, { "content": "\tns string\n", "file_path": "controller/gen/client/clientset/versioned/typed/serviceprofile/v1alpha2/serviceprofile.go", "rank": 24, "score": 143882.46064368615 }, { "content": "\tns string\n", "file_path": "controller/gen/client/clientset/versioned/typed/policy/v1alpha1/networkauthentication.go", "rank": 25, "score": 143882.46064368615 }, { "content": "\tns string\n", "file_path": "controller/gen/client/clientset/versioned/typed/link/v1alpha1/link.go", "rank": 26, "score": 143882.46064368615 }, { "content": "\tns string\n", "file_path": "controller/gen/client/clientset/versioned/typed/policy/v1alpha1/meshtlsauthentication.go", "rank": 27, "score": 143882.46064368615 }, { "content": "\tns string\n", "file_path": "controller/gen/client/clientset/versioned/typed/policy/v1alpha1/authorizationpolicy.go", "rank": 28, "score": 143882.46064368615 }, { "content": "\tns string\n", "file_path": "controller/gen/client/clientset/versioned/typed/serverauthorization/v1beta1/serverauthorization.go", "rank": 29, "score": 143882.46064368615 }, { "content": "/// Attempts to read a default policy override from an annotation map.\n\nfn default_policy(\n\n ann: &std::collections::BTreeMap<String, String>,\n\n) -> Result<Option<DefaultPolicy>> {\n\n if let Some(v) = ann.get(\"config.linkerd.io/default-inbound-policy\") {\n\n let mode = v.parse()?;\n\n return Ok(Some(mode));\n\n }\n\n\n\n Ok(None)\n\n}\n\n\n", "file_path": "policy-controller/k8s/index/src/pod.rs", "rank": 30, "score": 142333.15565659138 }, { "content": "/// Reads `annotation` from the provided set of annotations, parsing it as a port set. If the\n\n/// annotation is not set or is invalid, the empty set is returned.\n\nfn ports_annotation(\n\n annotations: &std::collections::BTreeMap<String, String>,\n\n annotation: &str,\n\n) -> PortSet {\n\n annotations\n\n .get(annotation)\n\n .map(|spec| {\n\n parse_portset(spec).unwrap_or_else(|error| {\n\n tracing::info!(%spec, %error, %annotation, \"Invalid ports list\");\n\n Default::default()\n\n })\n\n })\n\n .unwrap_or_default()\n\n}\n\n\n", "file_path": "policy-controller/k8s/index/src/pod.rs", "rank": 31, "score": 142333.15565659138 }, { "content": "fn mk_pod(\n\n ns: impl ToString,\n\n name: impl ToString,\n\n containers: impl IntoIterator<Item = (impl ToString, impl IntoIterator<Item = ContainerPort>)>,\n\n) -> k8s::Pod {\n\n k8s::Pod {\n\n metadata: k8s::ObjectMeta {\n\n namespace: Some(ns.to_string()),\n\n name: Some(name.to_string()),\n\n ..Default::default()\n\n },\n\n spec: Some(k8s::api::core::v1::PodSpec {\n\n containers: containers\n\n .into_iter()\n\n .map(|(name, ports)| k8s::api::core::v1::Container {\n\n name: name.to_string(),\n\n ports: Some(ports.into_iter().collect()),\n\n ..Default::default()\n\n })\n\n .collect(),\n\n ..Default::default()\n\n }),\n\n ..k8s::Pod::default()\n\n }\n\n}\n\n\n", "file_path": "policy-controller/k8s/index/src/tests.rs", "rank": 32, "score": 142333.15565659138 }, { "content": "\tns string\n", "file_path": "controller/gen/client/clientset/versioned/typed/policy/v1alpha1/fake/fake_networkauthentication.go", "rank": 33, "score": 140465.9400729742 }, { "content": "\tns string\n", "file_path": "controller/gen/client/clientset/versioned/typed/policy/v1alpha1/fake/fake_authorizationpolicy.go", "rank": 34, "score": 140465.9400729742 }, { "content": "\tns string\n", "file_path": "controller/gen/client/clientset/versioned/typed/server/v1beta1/fake/fake_server.go", "rank": 35, "score": 140465.9400729742 }, { "content": "\tns string\n", "file_path": "controller/gen/client/clientset/versioned/typed/link/v1alpha1/fake/fake_link.go", "rank": 36, "score": 140465.9400729742 }, { "content": "\tns string\n", "file_path": "controller/gen/client/clientset/versioned/typed/policy/v1alpha1/fake/fake_meshtlsauthentication.go", "rank": 37, "score": 140465.9400729742 }, { "content": "\tns string\n", "file_path": "controller/gen/client/clientset/versioned/typed/serverauthorization/v1beta1/fake/fake_serverauthorization.go", "rank": 38, "score": 140465.9400729742 }, { "content": "\tns string\n", "file_path": "controller/gen/client/clientset/versioned/typed/serviceprofile/v1alpha2/fake/fake_serviceprofile.go", "rank": 39, "score": 140465.9400729742 }, { "content": "func (x *StatTable_PodGroup_Row) GetSrvStats() *ServerStats {\n\n\tif x != nil {\n\n\t\treturn x.SrvStats\n\n\t}\n\n\treturn nil\n", "file_path": "viz/metrics-api/gen/viz/viz.pb.go", "rank": 40, "score": 140307.42294337906 }, { "content": "func (c *FakeNetworkAuthentications) Create(ctx context.Context, networkAuthentication *v1alpha1.NetworkAuthentication, opts v1.CreateOptions) (result *v1alpha1.NetworkAuthentication, err error) {\n\n\tobj, err := c.Fake.\n\n\t\tInvokes(testing.NewCreateAction(networkauthenticationsResource, c.ns, networkAuthentication), &v1alpha1.NetworkAuthentication{})\n\n\n\n\tif obj == nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn obj.(*v1alpha1.NetworkAuthentication), err\n", "file_path": "controller/gen/client/clientset/versioned/typed/policy/v1alpha1/fake/fake_networkauthentication.go", "rank": 41, "score": 139952.81103590754 }, { "content": "fn client_authz(\n\n client: k8s::policy::server_authorization::Client,\n\n namespace: &str,\n\n cluster: &ClusterInfo,\n\n) -> Result<ClientAuthorization> {\n\n let networks = client\n\n .networks\n\n .into_iter()\n\n .flatten()\n\n .map(|net| NetworkMatch {\n\n net: net.cidr.into(),\n\n except: net.except.into_iter().flatten().map(Into::into).collect(),\n\n })\n\n .collect();\n\n\n\n let authentication = if client.unauthenticated {\n\n ClientAuthentication::Unauthenticated\n\n } else if let Some(mtls) = client.mesh_tls {\n\n client_mtls_authn(mtls, namespace, cluster)?\n\n } else {\n\n anyhow::bail!(\"no client authentication configured\");\n\n };\n\n\n\n Ok(ClientAuthorization {\n\n networks,\n\n authentication,\n\n })\n\n}\n\n\n", "file_path": "policy-controller/k8s/index/src/server_authorization.rs", "rank": 42, "score": 139799.4137717963 }, { "content": "/// Validates the target of an `AuthorizationPolicy`.\n\nfn validate_policy_target(ns: &str, tgt: &LocalTargetRef) -> Result<()> {\n\n if tgt.targets_kind::<Server>() {\n\n return Ok(());\n\n }\n\n\n\n if tgt.targets_kind::<Namespace>() {\n\n if tgt.name != ns {\n\n bail!(\"cannot target another namespace: {}\", tgt.name);\n\n }\n\n return Ok(());\n\n }\n\n\n\n bail!(\"invalid targetRef kind: {}\", tgt.canonical_kind());\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Validate<AuthorizationPolicySpec> for Admission {\n\n async fn validate(self, ns: &str, _name: &str, spec: AuthorizationPolicySpec) -> Result<()> {\n\n validate_policy_target(ns, &spec.target_ref)?;\n\n\n", "file_path": "policy-controller/src/admission.rs", "rank": 43, "score": 139282.00334695625 }, { "content": "fn client_mtls_authn(\n\n mtls: MeshTls,\n\n namespace: &str,\n\n cluster: &ClusterInfo,\n\n) -> Result<ClientAuthentication> {\n\n if mtls.unauthenticated_tls {\n\n return Ok(ClientAuthentication::TlsUnauthenticated);\n\n }\n\n\n\n let ids = mtls\n\n .identities\n\n .into_iter()\n\n .flatten()\n\n .map(|id| match id.parse() {\n\n Ok(id) => id,\n\n Err(e) => match e {},\n\n });\n\n\n\n let sas = mtls.service_accounts.into_iter().flatten().map(|sa| {\n\n let ns = sa.namespace.as_deref().unwrap_or(namespace);\n", "file_path": "policy-controller/k8s/index/src/server_authorization.rs", "rank": 45, "score": 137793.75325038447 }, { "content": "fn mk_meshtls_authentication(\n\n ns: impl ToString,\n\n name: impl ToString,\n\n identities: impl IntoIterator<Item = String>,\n\n refs: impl IntoIterator<Item = NamespacedTargetRef>,\n\n) -> k8s::policy::MeshTLSAuthentication {\n\n let identities = identities.into_iter().collect::<Vec<_>>();\n\n let identity_refs = refs.into_iter().collect::<Vec<_>>();\n\n k8s::policy::MeshTLSAuthentication {\n\n metadata: k8s::ObjectMeta {\n\n namespace: Some(ns.to_string()),\n\n name: Some(name.to_string()),\n\n ..Default::default()\n\n },\n\n spec: k8s::policy::MeshTLSAuthenticationSpec {\n\n identities: if identities.is_empty() {\n\n None\n\n } else {\n\n Some(identities)\n\n },\n\n identity_refs: if identity_refs.is_empty() {\n\n None\n\n } else {\n\n Some(identity_refs)\n\n },\n\n },\n\n }\n\n}\n\n\n", "file_path": "policy-controller/k8s/index/src/tests/authorization_policy.rs", "rank": 46, "score": 136503.6897315066 }, { "content": "fn mk_network_authentication(\n\n ns: impl ToString,\n\n name: impl ToString,\n\n networks: impl IntoIterator<Item = k8s::policy::network_authentication::Network>,\n\n) -> k8s::policy::NetworkAuthentication {\n\n k8s::policy::NetworkAuthentication {\n\n metadata: k8s::ObjectMeta {\n\n namespace: Some(ns.to_string()),\n\n name: Some(name.to_string()),\n\n ..Default::default()\n\n },\n\n spec: k8s::policy::NetworkAuthenticationSpec {\n\n networks: networks.into_iter().collect(),\n\n },\n\n }\n\n}\n", "file_path": "policy-controller/k8s/index/src/tests/authorization_policy.rs", "rank": 47, "score": 136444.53936189765 }, { "content": "#[test]\n\nfn links_named_server_port() {\n\n let test = TestConfig::default();\n\n\n\n let mut pod = mk_pod(\n\n \"ns-0\",\n\n \"pod-0\",\n\n Some((\n\n \"container-0\",\n\n Some(ContainerPort {\n\n name: Some(\"admin-http\".to_string()),\n\n container_port: 8080,\n\n protocol: Some(\"TCP\".to_string()),\n\n ..ContainerPort::default()\n\n }),\n\n )),\n\n );\n\n pod.labels_mut()\n\n .insert(\"app\".to_string(), \"app-0\".to_string());\n\n test.index.write().apply(pod);\n\n\n", "file_path": "policy-controller/k8s/index/src/tests/server.rs", "rank": 48, "score": 136321.40819887674 }, { "content": "#[test]\n\nfn server_update_deselects_pod() {\n\n let test = TestConfig::default();\n\n\n\n test.index.write().reset(\n\n vec![mk_pod(\"ns-0\", \"pod-0\", Some((\"container-0\", None)))],\n\n Default::default(),\n\n );\n\n\n\n let mut srv = mk_server(\n\n \"ns-0\",\n\n \"srv-0\",\n\n Port::Number(2222),\n\n None,\n\n None,\n\n Some(k8s::policy::server::ProxyProtocol::Http2),\n\n );\n\n test.index\n\n .write()\n\n .reset(vec![srv.clone()], Default::default());\n\n\n", "file_path": "policy-controller/k8s/index/src/tests/server.rs", "rank": 49, "score": 136287.51583110247 }, { "content": "#[test]\n\nfn links_server_authz_by_name() {\n\n link_server_authz(ServerSelector::Name(\"srv-8080\".to_string()))\n\n}\n\n\n", "file_path": "policy-controller/k8s/index/src/tests/server_authorization.rs", "rank": 50, "score": 134448.8190184422 }, { "content": "#[test]\n\nfn links_authorization_policy_with_mtls_name() {\n\n let test = TestConfig::default();\n\n\n\n let mut pod = mk_pod(\"ns-0\", \"pod-0\", Some((\"container-0\", None)));\n\n pod.labels_mut()\n\n .insert(\"app\".to_string(), \"app-0\".to_string());\n\n test.index.write().apply(pod);\n\n\n\n let mut rx = test\n\n .index\n\n .write()\n\n .pod_server_rx(\"ns-0\", \"pod-0\", 8080)\n\n .expect(\"pod-0.ns-0 should exist\");\n\n assert_eq!(*rx.borrow_and_update(), test.default_server());\n\n\n\n test.index.write().apply(mk_server(\n\n \"ns-0\",\n\n \"srv-8080\",\n\n Port::Number(8080),\n\n None,\n", "file_path": "policy-controller/k8s/index/src/tests/authorization_policy.rs", "rank": 51, "score": 132641.6129247358 }, { "content": "func (conf *ResourceConfig) injectPodSpec(values *podPatch) {\n\n\tsaVolumeMount := conf.serviceAccountVolumeMount()\n\n\n\n\t// use the primary container's capabilities to ensure psp compliance, if\n\n\t// enabled\n\n\tif conf.pod.spec.Containers != nil && len(conf.pod.spec.Containers) > 0 {\n\n\t\tif sc := conf.pod.spec.Containers[0].SecurityContext; sc != nil && sc.Capabilities != nil {\n\n\t\t\tvalues.Proxy.Capabilities = &l5dcharts.Capabilities{\n\n\t\t\t\tAdd: []string{},\n\n\t\t\t\tDrop: []string{},\n\n\t\t\t}\n\n\t\t\tfor _, add := range sc.Capabilities.Add {\n\n\t\t\t\tvalues.Proxy.Capabilities.Add = append(values.Proxy.Capabilities.Add, string(add))\n\n\t\t\t}\n\n\t\t\tfor _, drop := range sc.Capabilities.Drop {\n\n\t\t\t\tvalues.Proxy.Capabilities.Drop = append(values.Proxy.Capabilities.Drop, string(drop))\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\tif saVolumeMount != nil {\n\n\t\tvalues.Proxy.SAMountPath = &l5dcharts.VolumeMountPath{\n\n\t\t\tName: saVolumeMount.Name,\n\n\t\t\tMountPath: saVolumeMount.MountPath,\n\n\t\t\tReadOnly: saVolumeMount.ReadOnly,\n\n\t\t}\n\n\t}\n\n\n\n\tif v := conf.pod.meta.Annotations[k8s.ProxyEnableDebugAnnotation]; v != \"\" {\n\n\t\tdebug, err := strconv.ParseBool(v)\n\n\t\tif err != nil {\n\n\t\t\tlog.Warnf(\"unrecognized value used for the %s annotation: %s\", k8s.ProxyEnableDebugAnnotation, v)\n\n\t\t\tdebug = false\n\n\t\t}\n\n\n\n\t\tif debug {\n\n\t\t\tlog.Infof(\"inject debug container\")\n\n\t\t\tvalues.DebugContainer = &l5dcharts.DebugContainer{\n\n\t\t\t\tImage: &l5dcharts.Image{\n\n\t\t\t\t\tName: conf.values.DebugContainer.Image.Name,\n\n\t\t\t\t\tVersion: conf.values.DebugContainer.Image.Version,\n\n\t\t\t\t\tPullPolicy: conf.values.DebugContainer.Image.PullPolicy,\n\n\t\t\t\t},\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\tconf.injectProxyInit(values)\n\n\tvalues.AddRootVolumes = len(conf.pod.spec.Volumes) == 0\n", "file_path": "pkg/inject/inject.go", "rank": 53, "score": 125108.41755337891 }, { "content": "func (conf *ResourceConfig) injectPodAnnotations(values *podPatch) {\n\n\t// ObjectMetaAnnotations.Annotations is nil for new empty structs, but we always initialize\n\n\t// it to an empty map in parse() above, so we follow suit here.\n\n\temptyMeta := &metav1.ObjectMeta{Annotations: map[string]string{}}\n\n\t// Cronjobs might have an empty `spec.jobTemplate.spec.template.metadata`\n\n\t// field so we make sure to create it if needed, before attempting adding annotations\n\n\tvalues.AddRootMetadata = reflect.DeepEqual(conf.pod.meta, emptyMeta)\n\n\tvalues.AddRootAnnotations = len(conf.pod.meta.Annotations) == 0\n\n\n\n\tfor _, k := range sortedKeys(conf.pod.annotations) {\n\n\t\tvalues.Annotations[k] = conf.pod.annotations[k]\n\n\n\n\t\t// append any additional pod annotations to the pod's meta.\n\n\t\t// for e.g., annotations that were converted from CLI inject options.\n\n\t\tconf.pod.meta.Annotations[k] = conf.pod.annotations[k]\n\n\t}\n", "file_path": "pkg/inject/inject.go", "rank": 54, "score": 125097.55105128886 }, { "content": "func uninjectAndInject(inputs []io.Reader, errWriter, outWriter io.Writer, transformer *resourceTransformerInject) int {\n\n\tvar out bytes.Buffer\n\n\tif exitCode := runUninjectSilentCmd(inputs, errWriter, &out, transformer.values); exitCode != 0 {\n\n\t\treturn exitCode\n\n\t}\n\n\treturn runInjectCmd([]io.Reader{&out}, errWriter, outWriter, transformer)\n", "file_path": "cli/cmd/inject.go", "rank": 55, "score": 124413.93198843556 }, { "content": "\tnsAnnotations map[string]string\n", "file_path": "pkg/inject/inject.go", "rank": 56, "score": 124396.33047161004 }, { "content": "type podPatch struct {\n\n\tl5dcharts.Values\n\n\tPathPrefix string `json:\"pathPrefix\"`\n\n\tAddRootMetadata bool `json:\"addRootMetadata\"`\n\n\tAddRootAnnotations bool `json:\"addRootAnnotations\"`\n\n\tAnnotations map[string]string `json:\"annotations\"`\n\n\tAddRootLabels bool `json:\"addRootLabels\"`\n\n\tAddRootInitContainers bool `json:\"addRootInitContainers\"`\n\n\tAddRootVolumes bool `json:\"addRootVolumes\"`\n\n\tLabels map[string]string `json:\"labels\"`\n\n\tDebugContainer *l5dcharts.DebugContainer `json:\"debugContainer\"`\n", "file_path": "pkg/inject/inject.go", "rank": 57, "score": 124194.03717956212 }, { "content": "func (conf *ResourceConfig) Uninject(report *Report) ([]byte, error) {\n\n\tif conf.IsNamespace() || conf.IsService() {\n\n\t\tuninjectObjectMeta(conf.workload.Meta, report)\n\n\t\treturn conf.YamlMarshalObj()\n\n\t}\n\n\n\n\tif conf.pod.spec == nil {\n\n\t\treturn nil, nil\n\n\t}\n\n\n\n\tconf.uninjectPodSpec(report)\n\n\n\n\tif conf.workload.Meta != nil {\n\n\t\tuninjectObjectMeta(conf.workload.Meta, report)\n\n\t}\n\n\n\n\tuninjectObjectMeta(conf.pod.meta, report)\n\n\treturn conf.YamlMarshalObj()\n", "file_path": "pkg/inject/uninject.go", "rank": 58, "score": 122612.44331021374 }, { "content": "func uninjectObjectMeta(t *metav1.ObjectMeta, report *Report) {\n\n\t// We only uninject control plane components in the context\n\n\t// of doing an inject --manual. This is done as a way to update\n\n\t// something about the injection configuration - for example\n\n\t// adding a debug sidecar to the identity service.\n\n\t// With that in mind it is not really necessary to strip off\n\n\t// the linkerd.io/* metadata from the pod during uninjection.\n\n\t// This is why we skip that part for control plane components.\n\n\t// Furthermore the latter will never have linkerd.io/inject as\n\n\t// they are always manually injected.\n\n\tif _, ok := t.Labels[k8s.ControllerComponentLabel]; !ok {\n\n\t\tnewAnnotations := make(map[string]string)\n\n\t\tfor key, val := range t.Annotations {\n\n\t\t\tif !strings.HasPrefix(key, k8s.Prefix) ||\n\n\t\t\t\t(key == k8s.ProxyInjectAnnotation && val == k8s.ProxyInjectDisabled) {\n\n\t\t\t\tnewAnnotations[key] = val\n\n\t\t\t} else {\n\n\t\t\t\treport.Uninjected.Proxy = true\n\n\t\t\t}\n\n\n\n\t\t}\n\n\t\tt.Annotations = newAnnotations\n\n\n\n\t\tlabels := make(map[string]string)\n\n\t\tfor key, val := range t.Labels {\n\n\t\t\tif !strings.HasPrefix(key, k8s.Prefix) {\n\n\t\t\t\tlabels[key] = val\n\n\t\t\t}\n\n\t\t}\n\n\t\tt.Labels = labels\n\n\t}\n", "file_path": "pkg/inject/uninject.go", "rank": 59, "score": 122612.44331021374 }, { "content": "func (conf *ResourceConfig) WithKind(kind string) *ResourceConfig {\n\n\tconf.workload.metaType = metav1.TypeMeta{Kind: kind}\n\n\treturn conf\n", "file_path": "pkg/inject/inject.go", "rank": 60, "score": 122563.00037706288 }, { "content": "\tallowNsInject bool\n", "file_path": "cli/cmd/inject.go", "rank": 61, "score": 122544.33694902241 }, { "content": "func (conf *ResourceConfig) IsPod() bool {\n\n\treturn strings.ToLower(conf.workload.metaType.Kind) == k8s.Pod\n", "file_path": "pkg/inject/inject.go", "rank": 62, "score": 122362.2817902013 }, { "content": "fn is_kind<T>(req: &AdmissionRequest) -> bool\n\nwhere\n\n T: Resource,\n\n T::DynamicType: Default,\n\n{\n\n let dt = Default::default();\n\n *req.kind.group == *T::group(&dt) && *req.kind.kind == *T::kind(&dt)\n\n}\n\n\n", "file_path": "policy-controller/src/admission.rs", "rank": 63, "score": 122258.11370497092 }, { "content": "func TestInjectAutoPod(t *testing.T) {\n\n\tpodsYAML, err := testutil.ReadFile(\"testdata/pods.yaml\")\n\n\tif err != nil {\n\n\t\ttestutil.AnnotatedFatalf(t, \"failed to read inject test file\",\n\n\t\t\t\"failed to read inject test file: %s\", err)\n\n\t}\n\n\n\n\tinjectNS := \"inject-pod-test\"\n\n\tpodName := \"inject-pod-test-terminus\"\n\n\topaquePodName := \"inject-opaque-pod-test-terminus\"\n\n\tnsAnnotations := map[string]string{\n\n\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\tk8s.ProxyOpaquePortsAnnotation: opaquePorts,\n\n\t}\n\n\n\n\ttruthy := true\n\n\tfalsy := false\n\n\treg := \"cr.l5d.io/linkerd\"\n\n\tif override := os.Getenv(flags.EnvOverrideDockerRegistry); override != \"\" {\n\n\t\treg = override\n\n\t}\n\n\texpectedInitContainer := v1.Container{\n\n\t\tName: k8s.InitContainerName,\n\n\t\tImage: reg + \"/proxy-init:\" + version.ProxyInitVersion,\n\n\t\tArgs: []string{\n\n\t\t\t\"--incoming-proxy-port\", \"4143\",\n\n\t\t\t\"--outgoing-proxy-port\", \"4140\",\n\n\t\t\t\"--proxy-uid\", \"2102\",\n\n\t\t\t// 1234,5678 were added at install time in `install_test.go`'s helmOverridesEdge()\n\n\t\t\t\"--inbound-ports-to-ignore\", \"4190,4191,1234,5678\",\n\n\t\t\t\"--outbound-ports-to-ignore\", \"4567,4568\",\n\n\t\t},\n\n\t\tResources: v1.ResourceRequirements{\n\n\t\t\tLimits: v1.ResourceList{\n\n\t\t\t\tv1.ResourceName(\"cpu\"): resource.MustParse(\"100m\"),\n\n\t\t\t\tv1.ResourceName(\"memory\"): resource.MustParse(\"50Mi\"),\n\n\t\t\t},\n\n\t\t\tRequests: v1.ResourceList{\n\n\t\t\t\tv1.ResourceName(\"cpu\"): resource.MustParse(\"10m\"),\n\n\t\t\t\tv1.ResourceName(\"memory\"): resource.MustParse(\"10Mi\"),\n\n\t\t\t},\n\n\t\t},\n\n\t\tVolumeMounts: []v1.VolumeMount{\n\n\t\t\t{\n\n\t\t\t\tName: \"linkerd-proxy-init-xtables-lock\",\n\n\t\t\t\tReadOnly: false,\n\n\t\t\t\tMountPath: \"/run\",\n\n\t\t\t},\n\n\t\t\t{\n\n\t\t\t\tReadOnly: true,\n\n\t\t\t\tMountPath: \"/var/run/secrets/kubernetes.io/serviceaccount\",\n\n\t\t\t},\n\n\t\t},\n\n\t\tTerminationMessagePath: \"/dev/termination-log\",\n\n\t\tImagePullPolicy: \"IfNotPresent\",\n\n\t\tSecurityContext: &v1.SecurityContext{\n\n\t\t\tCapabilities: &v1.Capabilities{\n\n\t\t\t\tAdd: []v1.Capability{v1.Capability(\"NET_ADMIN\"), v1.Capability(\"NET_RAW\")},\n\n\t\t\t},\n\n\t\t\tPrivileged: &falsy,\n\n\t\t\tRunAsNonRoot: &truthy,\n\n\t\t\tAllowPrivilegeEscalation: &falsy,\n\n\t\t\tReadOnlyRootFilesystem: &truthy,\n\n\t\t},\n\n\t\tTerminationMessagePolicy: v1.TerminationMessagePolicy(\"FallbackToLogsOnError\"),\n\n\t}\n\n\n\n\tctx := context.Background()\n\n\n\n\tTestHelper.WithDataPlaneNamespace(ctx, injectNS, nsAnnotations, t, func(t *testing.T, ns string) {\n\n\t\to, err := TestHelper.Kubectl(podsYAML, \"--namespace\", ns, \"create\", \"-f\", \"-\")\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to create pods\",\n\n\t\t\t\t\"failed to create pods in namespace %s for %s: %s\", ns, err, o)\n\n\t\t}\n\n\n\n\t\to, err = TestHelper.Kubectl(\"\", \"--namespace\", ns, \"wait\", \"--for=condition=initialized\", \"--timeout=120s\", \"pod/\"+podName)\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to wait for condition=initialized\",\n\n\t\t\t\t\"failed to wait for condition=initialized for pod/%s in namespace %s: %s: %s\", podName, ns, err, o)\n\n\t\t}\n\n\n\n\t\t// Check that pods with no annotation inherit from the namespace.\n\n\t\tpods, err := TestHelper.GetPods(ctx, ns, map[string]string{\"app\": podName})\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to get pods\", \"failed to get pods for namespace %s: %s\", ns, err)\n\n\t\t}\n\n\t\tif len(pods) != 1 {\n\n\t\t\ttestutil.Fatalf(t, \"wrong number of pods returned for namespace %s: %d\", ns, len(pods))\n\n\t\t}\n\n\t\tannotation, ok := pods[0].Annotations[k8s.ProxyOpaquePortsAnnotation]\n\n\t\tif !ok {\n\n\t\t\ttestutil.Fatalf(t, \"pod in namespace %s did not inherit opaque ports annotation\", ns)\n\n\t\t}\n\n\t\tif annotation != opaquePorts {\n\n\t\t\ttestutil.Fatalf(t, \"expected pod in namespace %s to have %s opaque ports, but it had %s\", ns, opaquePorts, annotation)\n\n\t\t}\n\n\n\n\t\t// Check that pods with an annotation do not inherit from the\n\n\t\t// namespace.\n\n\t\topaquePods, err := TestHelper.GetPods(ctx, ns, map[string]string{\"app\": opaquePodName})\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to get pods\", \"failed to get pods for namespace %s: %s\", ns, err)\n\n\t\t}\n\n\t\tif len(opaquePods) != 1 {\n\n\t\t\ttestutil.Fatalf(t, \"wrong number of pods returned for namespace %s: %d\", ns, len(opaquePods))\n\n\t\t}\n\n\t\tannotation = opaquePods[0].Annotations[k8s.ProxyOpaquePortsAnnotation]\n\n\t\tif annotation != manualOpaquePorts {\n\n\t\t\ttestutil.Fatalf(t, \"expected pod in namespace %s to have %s opaque ports, but it had %s\", ns, manualOpaquePorts, annotation)\n\n\t\t}\n\n\n\n\t\tcontainers := pods[0].Spec.Containers\n\n\t\tif proxyContainer := testutil.GetProxyContainer(containers); proxyContainer == nil {\n\n\t\t\ttestutil.Fatalf(t, \"pod in namespace %s wasn't injected with the proxy container\", ns)\n\n\t\t}\n\n\n\n\t\tif !TestHelper.CNI() {\n\n\t\t\tinitContainers := pods[0].Spec.InitContainers\n\n\t\t\tif len(initContainers) == 0 {\n\n\t\t\t\ttestutil.Fatalf(t, \"pod in namespace %s wasn't injected with the init container\", ns)\n\n\t\t\t}\n\n\t\t\tinitContainer := initContainers[0]\n\n\t\t\tif mounts := initContainer.VolumeMounts; len(mounts) == 0 {\n\n\t\t\t\ttestutil.AnnotatedFatalf(t, \"init container doesn't have volume mounts\", \"init container doesn't have volume mounts: %#v\", initContainer)\n\n\t\t\t}\n\n\t\t\t// Removed token volume name from comparison because it contains a random string\n\n\t\t\tinitContainer.VolumeMounts[1].Name = \"\"\n\n\t\t\tif diff := deep.Equal(expectedInitContainer, initContainer); diff != nil {\n\n\t\t\t\ttestutil.AnnotatedFatalf(t, \"malformed init container\", \"malformed init container:\\n%v\", diff)\n\n\t\t\t}\n\n\t\t}\n\n\t})\n", "file_path": "test/integration/install/inject/inject_test.go", "rank": 64, "score": 122075.94389838754 }, { "content": "func TestUninjectAndInject(t *testing.T) {\n\n\tdefaultValues := defaultConfig()\n\n\n\n\toverrideConfig := defaultConfig()\n\n\toverrideConfig.Proxy.Image.Version = \"override\"\n\n\n\n\tproxyResourceConfig := defaultConfig()\n\n\tproxyResourceConfig.Proxy.Resources = &linkerd2.Resources{\n\n\t\tCPU: linkerd2.Constraints{\n\n\t\t\tRequest: \"110m\",\n\n\t\t\tLimit: \"160m\",\n\n\t\t},\n\n\t\tMemory: linkerd2.Constraints{\n\n\t\t\tRequest: \"100Mi\",\n\n\t\t\tLimit: \"150Mi\",\n\n\t\t},\n\n\t}\n\n\n\n\tcniEnabledConfig := defaultConfig()\n\n\tcniEnabledConfig.CNIEnabled = true\n\n\n\n\topaquePortsConfig := defaultConfig()\n\n\topaquePortsConfig.Proxy.OpaquePorts = \"3000,5000-6000,mysql\"\n\n\n\n\tingressConfig := defaultConfig()\n\n\tingressConfig.Proxy.IsIngress = true\n\n\n\n\tproxyIgnorePortsConfig := defaultConfig()\n\n\tproxyIgnorePortsConfig.ProxyInit.IgnoreInboundPorts = \"22,8100-8102\"\n\n\tproxyIgnorePortsConfig.ProxyInit.IgnoreOutboundPorts = \"5432\"\n\n\n\n\ttestCases := []testCase{\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_deployment.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_deployment.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_deployment.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_deployment.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_deployment_overridden_noinject.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_deployment.report\",\n\n\t\t\tinjectProxy: false,\n\n\t\t\ttestInjectConfig: func() *linkerd2.Values {\n\n\t\t\t\tvalues := defaultConfig()\n\n\t\t\t\tvalues.Proxy.Ports.Admin = 1234\n\n\t\t\t\treturn values\n\n\t\t\t}(),\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_deployment.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_deployment_overridden.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_deployment.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: func() *linkerd2.Values {\n\n\t\t\t\tvalues := defaultConfig()\n\n\t\t\t\tvalues.Proxy.Ports.Admin = 1234\n\n\t\t\t\treturn values\n\n\t\t\t}(),\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_deployment.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_deployment_access_log.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_deployment.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: func() *linkerd2.Values {\n\n\t\t\t\tvalues := defaultConfig()\n\n\t\t\t\tvalues.Proxy.AccessLog = \"apache\"\n\n\t\t\t\treturn values\n\n\t\t\t}(),\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_list.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_list.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_list.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_deployment_hostNetwork_false.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_deployment_hostNetwork_false.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_deployment_hostNetwork_false.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_deployment_capabilities.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_deployment_capabilities.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_deployment.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_deployment_injectDisabled.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_deployment_injectDisabled.input.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_deployment_injectDisabled.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_deployment_controller_name.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_deployment_controller_name.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_deployment_controller_name.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_statefulset.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_statefulset.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_statefulset.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_cronjob.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_cronjob.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_cronjob.report\",\n\n\t\t\tinjectProxy: false,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_cronjob_nometa.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_cronjob_nometa.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_cronjob.report\",\n\n\t\t\tinjectProxy: false,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_pod.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_pod.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_pod.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_pod_with_requests.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_pod_with_requests.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_pod_with_requests.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: proxyResourceConfig,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_deployment_udp.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_deployment_udp.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_deployment_udp.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_already_injected.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_already_injected.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_already_injected.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_contour.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_contour.golden.yml\",\n\n\t\t\treportFileName: \"inject_contour.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_deployment_empty_resources.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_deployment_empty_resources.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_deployment_empty_resources.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_list_empty_resources.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_list_empty_resources.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_list_empty_resources.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_deployment.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_deployment_no_init_container.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_deployment.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: cniEnabledConfig,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_deployment_config_overrides.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_deployment_config_overrides.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_deployment.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: overrideConfig,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_deployment.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_deployment_debug.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_deployment.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t\tenableDebugSidecarFlag: true,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_tap_deployment.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_tap_deployment_debug.golden.yml\",\n\n\t\t\treportFileName: \"inject_tap_deployment_debug.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: defaultValues,\n\n\t\t\tenableDebugSidecarFlag: true,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_namespace_good.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_namespace_good.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_namespace_good.golden.report\",\n\n\t\t\tinjectProxy: false,\n\n\t\t\ttestInjectConfig: defaultConfig(),\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_namespace_good.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_namespace_overidden_good.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_namespace_good.golden.report\",\n\n\t\t\tinjectProxy: false,\n\n\t\t\ttestInjectConfig: defaultConfig(),\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_deployment.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_deployment_proxyignores.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_deployment.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: proxyIgnorePortsConfig,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_pod.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_pod_proxyignores.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_pod.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: proxyIgnorePortsConfig,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_deployment.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_deployment_opaque_ports.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_deployment_opaque_ports.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: opaquePortsConfig,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_pod.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_pod_ingress.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_pod_ingress.report\",\n\n\t\t\tinjectProxy: true,\n\n\t\t\ttestInjectConfig: ingressConfig,\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_deployment.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_deployment_default_inbound_policy.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_deployment_default_inbound_policy.golden.report\",\n\n\t\t\tinjectProxy: false,\n\n\t\t\ttestInjectConfig: func() *linkerd2.Values {\n\n\t\t\t\tvalues := defaultConfig()\n\n\t\t\t\tvalues.Proxy.DefaultInboundPolicy = k8s.AllAuthenticated\n\n\t\t\t\treturn values\n\n\t\t\t}(),\n\n\t\t},\n\n\t\t{\n\n\t\t\tinputFileName: \"inject_emojivoto_pod.input.yml\",\n\n\t\t\tgoldenFileName: \"inject_emojivoto_pod_default_inbound_policy.golden.yml\",\n\n\t\t\treportFileName: \"inject_emojivoto_pod_default_inbound_policy.golden.report\",\n\n\t\t\tinjectProxy: false,\n\n\t\t\ttestInjectConfig: func() *linkerd2.Values {\n\n\t\t\t\tvalues := defaultConfig()\n\n\t\t\t\tvalues.Proxy.DefaultInboundPolicy = k8s.AllAuthenticated\n\n\t\t\t\treturn values\n\n\t\t\t}(),\n\n\t\t},\n\n\t}\n\n\n\n\tfor i, tc := range testCases {\n\n\t\ttc := tc // pin\n\n\t\tverbose = true\n\n\t\tt.Run(fmt.Sprintf(\"%d: %s --verbose\", i, tc.inputFileName), func(t *testing.T) {\n\n\t\t\ttestUninjectAndInject(t, tc)\n\n\t\t})\n\n\t\tverbose = false\n\n\t\tt.Run(fmt.Sprintf(\"%d: %s\", i, tc.inputFileName), func(t *testing.T) {\n\n\t\t\ttestUninjectAndInject(t, tc)\n\n\t\t})\n\n\t}\n", "file_path": "cli/cmd/inject_test.go", "rank": 65, "score": 120776.73835723022 }, { "content": "func testUninjectAndInject(t *testing.T, tc testCase) {\n\n\tfile, err := os.Open(\"testdata/\" + tc.inputFileName)\n\n\tif err != nil {\n\n\t\tt.Errorf(\"error opening test input file: %v\\n\", err)\n\n\t}\n\n\n\n\tread := bufio.NewReader(file)\n\n\n\n\toutput := new(bytes.Buffer)\n\n\treport := new(bytes.Buffer)\n\n\ttransformer := &resourceTransformerInject{\n\n\t\tinjectProxy: tc.injectProxy,\n\n\t\tvalues: tc.testInjectConfig,\n\n\t\toverrideAnnotations: getOverrideAnnotations(tc.testInjectConfig, defaultConfig()),\n\n\t\tenableDebugSidecar: tc.enableDebugSidecarFlag,\n\n\t\tallowNsInject: true,\n\n\t}\n\n\n\n\tif exitCode := uninjectAndInject([]io.Reader{read}, report, output, transformer); exitCode != 0 {\n\n\t\tt.Errorf(\"Unexpected error injecting YAML: %v\", report)\n\n\t}\n\n\tif err := testDataDiffer.DiffTestYAML(tc.goldenFileName, output.String()); err != nil {\n\n\t\tt.Error(err)\n\n\t}\n\n\n\n\treportFileName := mkFilename(tc.reportFileName, verbose)\n\n\ttestDataDiffer.DiffTestdata(t, reportFileName, report.String())\n", "file_path": "cli/cmd/inject_test.go", "rank": 66, "score": 120776.73835723022 }, { "content": "func (conf *ResourceConfig) WithNsAnnotations(m map[string]string) *ResourceConfig {\n\n\tconf.nsAnnotations = m\n\n\treturn conf\n", "file_path": "pkg/inject/inject.go", "rank": 67, "score": 120765.93986843414 }, { "content": "func TestInjectDisabledAutoPod(t *testing.T) {\n\n\tpodsYAML, err := testutil.ReadFile(\"testdata/pods.yaml\")\n\n\tif err != nil {\n\n\t\ttestutil.AnnotatedFatalf(t, \"failed to read inject test file\",\n\n\t\t\t\"failed to read inject test file: %s\", err)\n\n\t}\n\n\n\n\tns := \"inject-disabled-pod-test\"\n\n\tpodName := \"inject-pod-test-terminus\"\n\n\topaquePodName := \"inject-opaque-pod-test-terminus\"\n\n\tnsAnnotations := map[string]string{\n\n\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectDisabled,\n\n\t\tk8s.ProxyOpaquePortsAnnotation: opaquePorts,\n\n\t}\n\n\tctx := context.Background()\n\n\tTestHelper.WithDataPlaneNamespace(ctx, ns, nsAnnotations, t, func(t *testing.T, ns string) {\n\n\t\to, err := TestHelper.Kubectl(podsYAML, \"--namespace\", ns, \"create\", \"-f\", \"-\")\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to create pods\",\n\n\t\t\t\t\"failed to create pods in namespace %s for %s: %s\", ns, err, o)\n\n\t\t}\n\n\n\n\t\to, err = TestHelper.Kubectl(\"\", \"--namespace\", ns, \"wait\", \"--for=condition=initialized\", \"--timeout=120s\", \"pod/\"+podName)\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to wait for condition=initialized\",\n\n\t\t\t\t\"failed to wait for condition=initialized for pod/%s in namespace %s: %s: %s\", podName, ns, err, o)\n\n\t\t}\n\n\n\n\t\t// Check that pods with no annotation inherit from the namespace.\n\n\t\tpods, err := TestHelper.GetPods(ctx, ns, map[string]string{\"app\": podName})\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to get pods\", \"failed to get pods for namespace %s: %s\", ns, err)\n\n\t\t}\n\n\t\tif len(pods) != 1 {\n\n\t\t\ttestutil.Fatalf(t, \"wrong number of pods returned for namespace %s: %d\", ns, len(pods))\n\n\t\t}\n\n\t\tannotation, ok := pods[0].Annotations[k8s.ProxyOpaquePortsAnnotation]\n\n\t\tif !ok {\n\n\t\t\ttestutil.Fatalf(t, \"pod in namespace %s did not inherit opaque ports annotation\", ns)\n\n\t\t}\n\n\t\tif annotation != opaquePorts {\n\n\t\t\ttestutil.Fatalf(t, \"expected pod in namespace %s to have %s opaque ports, but it had %s\", ns, opaquePorts, annotation)\n\n\t\t}\n\n\n\n\t\t// Check that pods with an annotation do not inherit from the\n\n\t\t// namespace.\n\n\t\topaquePods, err := TestHelper.GetPods(ctx, ns, map[string]string{\"app\": opaquePodName})\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to get pods\", \"failed to get pods for namespace %s: %s\", ns, err)\n\n\t\t}\n\n\t\tif len(opaquePods) != 1 {\n\n\t\t\ttestutil.Fatalf(t, \"wrong number of pods returned for namespace %s: %d\", ns, len(opaquePods))\n\n\t\t}\n\n\t\tannotation = opaquePods[0].Annotations[k8s.ProxyOpaquePortsAnnotation]\n\n\t\tif annotation != manualOpaquePorts {\n\n\t\t\ttestutil.Fatalf(t, \"expected pod in namespace %s to have %s opaque ports, but it had %s\", ns, manualOpaquePorts, annotation)\n\n\t\t}\n\n\n\n\t\tcontainers := pods[0].Spec.Containers\n\n\t\tif proxyContainer := testutil.GetProxyContainer(containers); proxyContainer != nil {\n\n\t\t\ttestutil.Fatalf(t, \"pod in namespace %s should not have been injected\", ns)\n\n\t\t}\n\n\t})\n", "file_path": "test/integration/install/inject/inject_test.go", "rank": 68, "score": 120626.8156521286 }, { "content": "func (conf *ResourceConfig) HasPodTemplate() bool {\n\n\treturn conf.pod.meta != nil && conf.pod.spec != nil\n", "file_path": "pkg/inject/inject.go", "rank": 69, "score": 120570.97082726956 }, { "content": "func (iv *InjectValidator) ValidatePod(pod *v1.PodSpec) error {\n\n\n\n\tif err := iv.validateProxyContainer(pod); err != nil {\n\n\t\treturn err\n\n\t}\n\n\n\n\tif err := iv.validateInitContainer(pod); err != nil {\n\n\t\treturn err\n\n\t}\n\n\n\n\tif err := iv.validateDebugContainer(pod); err != nil {\n\n\t\treturn err\n\n\t}\n\n\n\n\treturn nil\n", "file_path": "testutil/inject_validator.go", "rank": 70, "score": 120564.94570866694 }, { "content": "func getPodInboundPorts(podSpec *corev1.PodSpec) string {\n\n\tports := make(map[int32]struct{})\n\n\tif podSpec != nil {\n\n\t\tfor _, container := range podSpec.Containers {\n\n\t\t\tfor _, port := range container.Ports {\n\n\t\t\t\tports[port.ContainerPort] = struct{}{}\n\n\t\t\t}\n\n\n\n\t\t\tif readiness := container.ReadinessProbe; readiness != nil {\n\n\t\t\t\tif port, ok := getProbePort(readiness); ok {\n\n\t\t\t\t\tports[port] = struct{}{}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\n\n\t\t\tif liveness := container.LivenessProbe; liveness != nil {\n\n\t\t\t\tif port, ok := getProbePort(liveness); ok {\n\n\t\t\t\t\tports[port] = struct{}{}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\tportList := make([]string, 0, len(ports))\n\n\tfor port := range ports {\n\n\t\tportList = append(portList, strconv.Itoa(int(port)))\n\n\t}\n\n\n\n\t// sort slice in ascending order\n\n\tsort.Strings(portList)\n\n\treturn strings.Join(portList, \",\")\n", "file_path": "pkg/inject/inject.go", "rank": 71, "score": 120564.94570866694 }, { "content": "func (conf *ResourceConfig) CreateAnnotationPatch(opaquePorts string) ([]byte, error) {\n\n\taddRootAnnotations := false\n\n\tif conf.IsPod() {\n\n\t\taddRootAnnotations = len(conf.pod.meta.Annotations) == 0\n\n\t} else {\n\n\t\taddRootAnnotations = len(conf.workload.Meta.Annotations) == 0\n\n\t}\n\n\n\n\tpatch := &annotationPatch{\n\n\t\tAddRootAnnotations: addRootAnnotations,\n\n\t\tOpaquePorts: opaquePorts,\n\n\t}\n\n\tt, err := template.New(\"tpl\").Parse(tpl)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\tvar patchJSON bytes.Buffer\n\n\tif err = t.Execute(&patchJSON, patch); err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn patchJSON.Bytes(), nil\n", "file_path": "pkg/inject/inject.go", "rank": 72, "score": 119008.03801254333 }, { "content": "func (conf *ResourceConfig) AppendPodAnnotations(annotations map[string]string) {\n\n\tfor annotation, value := range annotations {\n\n\t\tconf.pod.annotations[annotation] = value\n\n\t}\n", "file_path": "pkg/inject/inject.go", "rank": 73, "score": 118838.94304080924 }, { "content": "func (conf *ResourceConfig) AppendPodAnnotation(k, v string) {\n\n\tconf.pod.annotations[k] = v\n", "file_path": "pkg/inject/inject.go", "rank": 74, "score": 118838.87368944507 }, { "content": "func (conf *ResourceConfig) GetPodPatch(injectProxy bool) ([]byte, error) {\n\n\n\n\tvalues, err := conf.GetOverriddenValues()\n\n\tif err != nil {\n\n\t\treturn nil, fmt.Errorf(\"could not generate Overridden Values: %w\", err)\n\n\t}\n\n\n\n\tif values.ClusterNetworks != \"\" {\n\n\t\tfor _, network := range strings.Split(strings.Trim(values.ClusterNetworks, \",\"), \",\") {\n\n\t\t\tif _, _, err := net.ParseCIDR(network); err != nil {\n\n\t\t\t\treturn nil, fmt.Errorf(\"cannot parse destination get networks: %w\", err)\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\tpatch := &podPatch{\n\n\t\tValues: *values,\n\n\t\tAnnotations: map[string]string{},\n\n\t\tLabels: map[string]string{},\n\n\t}\n\n\tswitch strings.ToLower(conf.workload.metaType.Kind) {\n\n\tcase k8s.Pod:\n\n\tcase k8s.CronJob:\n\n\t\tpatch.PathPrefix = \"/spec/jobTemplate/spec/template\"\n\n\tdefault:\n\n\t\tpatch.PathPrefix = \"/spec/template\"\n\n\t}\n\n\n\n\tif conf.pod.spec != nil {\n\n\t\tconf.injectPodAnnotations(patch)\n\n\t\tif injectProxy {\n\n\t\t\tconf.injectObjectMeta(patch)\n\n\t\t\tconf.injectPodSpec(patch)\n\n\t\t} else {\n\n\t\t\tpatch.Proxy = nil\n\n\t\t\tpatch.ProxyInit = nil\n\n\t\t}\n\n\t}\n\n\n\n\trawValues, err := yaml.Marshal(patch)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\n\n\tfiles := []*loader.BufferedFile{\n\n\t\t{Name: chartutil.ChartfileName},\n\n\t\t{Name: \"requirements.yaml\"},\n\n\t\t{Name: \"templates/patch.json\"},\n\n\t}\n\n\n\n\tchart := &charts.Chart{\n\n\t\tName: \"patch\",\n\n\t\tDir: \"patch\",\n\n\t\tNamespace: conf.namespace,\n\n\t\tRawValues: rawValues,\n\n\t\tFiles: files,\n\n\t\tFs: static.Templates,\n\n\t}\n\n\tbuf, err := chart.Render()\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\n\n\t// Get rid of invalid trailing commas\n\n\tres := rTrail.ReplaceAll(buf.Bytes(), []byte(\"}\\n]\"))\n\n\n\n\treturn res, nil\n", "file_path": "pkg/inject/inject.go", "rank": 75, "score": 118837.99787619105 }, { "content": "/// Read a comma-separated of ports or port ranges from the given string.\n\nfn parse_portset(s: &str) -> Result<PortSet> {\n\n let mut ports = PortSet::default();\n\n\n\n for spec in s.split(',') {\n\n match spec.split_once('-') {\n\n None => {\n\n if !spec.trim().is_empty() {\n\n let port = spec.trim().parse().context(\"parsing port\")?;\n\n if port == 0 {\n\n bail!(\"port must not be 0\")\n\n }\n\n ports.insert(port);\n\n }\n\n }\n\n Some((floor, ceil)) => {\n\n let floor = floor.trim().parse::<u16>().context(\"parsing port\")?;\n\n let ceil = ceil.trim().parse::<u16>().context(\"parsing port\")?;\n\n if floor == 0 {\n\n bail!(\"port must not be 0\")\n\n }\n", "file_path": "policy-controller/k8s/index/src/pod.rs", "rank": 76, "score": 118243.25921231491 }, { "content": "package inject\n\n\n\nimport (\n\n\t\"bytes\"\n\n\t\"encoding/json\"\n\n\t\"errors\"\n\n\t\"fmt\"\n\n\t\"html/template\"\n\n\t\"net\"\n\n\t\"reflect\"\n\n\t\"regexp\"\n\n\t\"sort\"\n\n\t\"strconv\"\n\n\t\"strings\"\n\n\t\"time\"\n\n\n\n\tjsonfilter \"github.com/clarketm/json\"\n\n\t\"github.com/linkerd/linkerd2/pkg/charts\"\n\n\t\"github.com/linkerd/linkerd2/pkg/charts/linkerd2\"\n\n\tl5dcharts \"github.com/linkerd/linkerd2/pkg/charts/linkerd2\"\n\n\t\"github.com/linkerd/linkerd2/pkg/charts/static\"\n\n\t\"github.com/linkerd/linkerd2/pkg/k8s\"\n\n\t\"github.com/linkerd/linkerd2/pkg/util\"\n\n\tlog \"github.com/sirupsen/logrus\"\n\n\t\"helm.sh/helm/v3/pkg/chart/loader\"\n\n\t\"helm.sh/helm/v3/pkg/chartutil\"\n\n\tappsv1 \"k8s.io/api/apps/v1\"\n\n\tbatchv1 \"k8s.io/api/batch/v1\"\n\n\tcorev1 \"k8s.io/api/core/v1\"\n\n\tk8sResource \"k8s.io/apimachinery/pkg/api/resource\"\n\n\tmetav1 \"k8s.io/apimachinery/pkg/apis/meta/v1\"\n\n\t\"k8s.io/apimachinery/pkg/runtime\"\n\n\t\"k8s.io/apimachinery/pkg/util/intstr\"\n\n\t\"sigs.k8s.io/yaml\"\n\n)\n\n\n\nvar (\n\n\trTrail = regexp.MustCompile(`\\},\\s*\\]`)\n\n\n\n\t// ProxyAnnotations is the list of possible annotations that can be applied on a pod or namespace\n\n\tProxyAnnotations = []string{\n\n\t\tk8s.ProxyAdminPortAnnotation,\n\n\t\tk8s.ProxyControlPortAnnotation,\n\n\t\tk8s.ProxyEnableDebugAnnotation,\n\n\t\tk8s.ProxyEnableExternalProfilesAnnotation,\n\n\t\tk8s.ProxyImagePullPolicyAnnotation,\n\n\t\tk8s.ProxyInboundPortAnnotation,\n\n\t\tk8s.ProxyInitImageAnnotation,\n\n\t\tk8s.ProxyInitImageVersionAnnotation,\n\n\t\tk8s.ProxyOutboundPortAnnotation,\n\n\t\tk8s.ProxyPodInboundPortsAnnotation,\n\n\t\tk8s.ProxyCPULimitAnnotation,\n\n\t\tk8s.ProxyCPURequestAnnotation,\n\n\t\tk8s.ProxyImageAnnotation,\n\n\t\tk8s.ProxyLogFormatAnnotation,\n\n\t\tk8s.ProxyLogLevelAnnotation,\n\n\t\tk8s.ProxyMemoryLimitAnnotation,\n\n\t\tk8s.ProxyMemoryRequestAnnotation,\n\n\t\tk8s.ProxyEphemeralStorageLimitAnnotation,\n\n\t\tk8s.ProxyEphemeralStorageRequestAnnotation,\n\n\t\tk8s.ProxyUIDAnnotation,\n\n\t\tk8s.ProxyVersionOverrideAnnotation,\n\n\t\tk8s.ProxyRequireIdentityOnInboundPortsAnnotation,\n\n\t\tk8s.ProxyIgnoreInboundPortsAnnotation,\n\n\t\tk8s.ProxyOpaquePortsAnnotation,\n\n\t\tk8s.ProxyIgnoreOutboundPortsAnnotation,\n\n\t\tk8s.ProxyOutboundConnectTimeout,\n\n\t\tk8s.ProxyInboundConnectTimeout,\n\n\t\tk8s.ProxyAwait,\n\n\t\tk8s.ProxyDefaultInboundPolicyAnnotation,\n\n\t\tk8s.ProxySkipSubnetsAnnotation,\n\n\t\tk8s.ProxyAccessLogAnnotation,\n\n\t}\n\n\t// ProxyAlphaConfigAnnotations is the list of all alpha configuration\n\n\t// (config.alpha prefix) that can be applied to a pod or namespace.\n\n\tProxyAlphaConfigAnnotations = []string{\n\n\t\tk8s.ProxyWaitBeforeExitSecondsAnnotation,\n\n\t}\n\n)\n\n\n\n// Origin defines where the input YAML comes from. Refer the ResourceConfig's\n\n// 'origin' field\n\ntype Origin int\n\n\n\nconst (\n\n\t// OriginCLI is the value of the ResourceConfig's 'origin' field if the input\n\n\t// YAML comes from the CLI\n\n\tOriginCLI Origin = iota\n\n\n\n\t// OriginWebhook is the value of the ResourceConfig's 'origin' field if the input\n\n\t// YAML comes from the CLI\n\n\tOriginWebhook\n\n\n\n\t// OriginUnknown is the value of the ResourceConfig's 'origin' field if the\n\n\t// input YAML comes from an unknown source\n\n\tOriginUnknown\n\n)\n\n\n\n// OwnerRetrieverFunc is a function that returns a pod's owner reference\n\n// kind and name\n\ntype OwnerRetrieverFunc func(*corev1.Pod) (string, string)\n\n\n\n// ResourceConfig contains the parsed information for a given workload\n\ntype ResourceConfig struct {\n\n\t// These values used for the rendering of the patch may be further\n\n\t// overridden by the annotations on the resource or the resource's\n\n\t// namespace.\n\n\tvalues *l5dcharts.Values\n\n\n\n\tnamespace string\n\n\n\n\t// These annotations from the resources's namespace are used as a base.\n\n\t// The resources's annotations will be applied on top of these, which\n\n\t// allows the nsAnnotations to act as a default.\n\n\tnsAnnotations map[string]string\n\n\townerRetriever OwnerRetrieverFunc\n\n\torigin Origin\n\n\n\n\tworkload struct {\n\n\t\tobj runtime.Object\n\n\t\tmetaType metav1.TypeMeta\n\n\t\t// Meta is the workload's metadata. It's exported so that metadata of\n\n\t\t// non-workload resources can be unmarshalled by the YAML parser\n\n\t\tMeta *metav1.ObjectMeta `json:\"metadata,omitempty\" protobuf:\"bytes,1,opt,name=metadata\"`\n\n\t\townerRef *metav1.OwnerReference\n\n\t}\n\n\n\n\tpod struct {\n\n\t\tmeta *metav1.ObjectMeta\n\n\t\t// This fields hold labels and annotations which are to be added to the\n\n\t\t// injected resource. This is different from meta.Labels and\n\n\t\t// meta.Annotations which are the labels and annotations on the original\n\n\t\t// resource before injection.\n\n\t\tlabels map[string]string\n\n\t\tannotations map[string]string\n\n\t\tspec *corev1.PodSpec\n\n\t}\n\n}\n\n\n\ntype podPatch struct {\n\n\tl5dcharts.Values\n\n\tPathPrefix string `json:\"pathPrefix\"`\n\n\tAddRootMetadata bool `json:\"addRootMetadata\"`\n\n\tAddRootAnnotations bool `json:\"addRootAnnotations\"`\n\n\tAnnotations map[string]string `json:\"annotations\"`\n\n\tAddRootLabels bool `json:\"addRootLabels\"`\n\n\tAddRootInitContainers bool `json:\"addRootInitContainers\"`\n\n\tAddRootVolumes bool `json:\"addRootVolumes\"`\n\n\tLabels map[string]string `json:\"labels\"`\n\n\tDebugContainer *l5dcharts.DebugContainer `json:\"debugContainer\"`\n\n}\n\n\n\ntype annotationPatch struct {\n\n\tAddRootAnnotations bool\n\n\tOpaquePorts string\n\n}\n\n\n\n// NewResourceConfig creates and initializes a ResourceConfig\n\nfunc NewResourceConfig(values *l5dcharts.Values, origin Origin, ns string) *ResourceConfig {\n\n\tconfig := &ResourceConfig{\n\n\t\tnamespace: ns,\n\n\t\tnsAnnotations: make(map[string]string),\n\n\t\tvalues: values,\n\n\t\torigin: origin,\n\n\t}\n\n\n\n\tconfig.workload.Meta = &metav1.ObjectMeta{}\n\n\tconfig.pod.meta = &metav1.ObjectMeta{}\n\n\n\n\tconfig.pod.labels = map[string]string{k8s.ControllerNSLabel: ns}\n\n\tconfig.pod.annotations = map[string]string{}\n\n\treturn config\n\n}\n\n\n\n// WithKind enriches ResourceConfig with the workload kind\n\nfunc (conf *ResourceConfig) WithKind(kind string) *ResourceConfig {\n\n\tconf.workload.metaType = metav1.TypeMeta{Kind: kind}\n\n\treturn conf\n\n}\n\n\n\n// WithNsAnnotations enriches ResourceConfig with the namespace annotations, that can\n\n// be used in shouldInject()\n\nfunc (conf *ResourceConfig) WithNsAnnotations(m map[string]string) *ResourceConfig {\n\n\tconf.nsAnnotations = m\n\n\treturn conf\n\n}\n\n\n\n// WithOwnerRetriever enriches ResourceConfig with a function that allows to retrieve\n\n// the kind and name of the workload's owner reference\n\nfunc (conf *ResourceConfig) WithOwnerRetriever(f OwnerRetrieverFunc) *ResourceConfig {\n\n\tconf.ownerRetriever = f\n\n\treturn conf\n\n}\n\n\n\n// GetOwnerRef returns a reference to the resource's owner resource, if any\n\nfunc (conf *ResourceConfig) GetOwnerRef() *metav1.OwnerReference {\n\n\treturn conf.workload.ownerRef\n\n}\n\n\n\n// AppendNamespaceAnnotations allows pods to inherit config specific annotations\n\n// from the namespace they belong to. If the namespace has a valid config key\n\n// that the pod does not, then it is appended to the pod's template\n\nfunc (conf *ResourceConfig) AppendNamespaceAnnotations() {\n\n\tfor _, key := range ProxyAnnotations {\n\n\t\tif _, found := conf.nsAnnotations[key]; !found {\n\n\t\t\tcontinue\n\n\t\t}\n\n\t\tif val, ok := conf.GetConfigAnnotation(key); ok {\n\n\t\t\tconf.AppendPodAnnotation(key, val)\n\n\t\t}\n\n\t}\n\n\n\n\tfor _, key := range ProxyAlphaConfigAnnotations {\n\n\t\tif _, found := conf.nsAnnotations[key]; !found {\n\n\t\t\tcontinue\n\n\t\t}\n\n\t\tif val, ok := conf.GetConfigAnnotation(key); ok {\n\n\t\t\tconf.AppendPodAnnotation(key, val)\n\n\t\t}\n\n\t}\n\n}\n\n\n\n// AppendPodAnnotations appends the given annotations to the pod spec in conf\n\nfunc (conf *ResourceConfig) AppendPodAnnotations(annotations map[string]string) {\n\n\tfor annotation, value := range annotations {\n\n\t\tconf.pod.annotations[annotation] = value\n\n\t}\n\n}\n\n\n\n// AppendPodAnnotation appends the given single annotation to the pod spec in conf\n\nfunc (conf *ResourceConfig) AppendPodAnnotation(k, v string) {\n\n\tconf.pod.annotations[k] = v\n\n}\n\n\n\n// YamlMarshalObj returns the yaml for the workload in conf\n\nfunc (conf *ResourceConfig) YamlMarshalObj() ([]byte, error) {\n\n\tj, err := getFilteredJSON(conf.workload.obj)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn yaml.JSONToYAML(j)\n\n}\n\n\n\n// ParseMetaAndYAML extracts the workload metadata and pod specs from the given\n\n// input bytes. The results are stored in the conf's fields.\n\nfunc (conf *ResourceConfig) ParseMetaAndYAML(bytes []byte) (*Report, error) {\n\n\tif err := conf.parse(bytes); err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\n\n\treturn newReport(conf), nil\n\n}\n\n\n\n// GetValues returns the values used for rendering patches.\n\nfunc (conf *ResourceConfig) GetValues() *linkerd2.Values {\n\n\treturn conf.values\n\n}\n\n\n\n// GetOverriddenValues returns the final Values struct which is created\n\n// by overriding annotated configuration on top of default Values\n\nfunc (conf *ResourceConfig) GetOverriddenValues() (*linkerd2.Values, error) {\n\n\t// Make a copy of Values and mutate that\n\n\tcopyValues, err := conf.values.DeepCopy()\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\n\n\tcopyValues.Proxy.PodInboundPorts = getPodInboundPorts(conf.pod.spec)\n\n\tconf.applyAnnotationOverrides(copyValues)\n\n\treturn copyValues, nil\n\n}\n\n\n\n// GetPodPatch returns the JSON patch containing the proxy and init containers specs, if any.\n\n// If injectProxy is false, only the config.linkerd.io annotations are set.\n\nfunc (conf *ResourceConfig) GetPodPatch(injectProxy bool) ([]byte, error) {\n\n\n\n\tvalues, err := conf.GetOverriddenValues()\n\n\tif err != nil {\n\n\t\treturn nil, fmt.Errorf(\"could not generate Overridden Values: %w\", err)\n\n\t}\n\n\n\n\tif values.ClusterNetworks != \"\" {\n\n\t\tfor _, network := range strings.Split(strings.Trim(values.ClusterNetworks, \",\"), \",\") {\n\n\t\t\tif _, _, err := net.ParseCIDR(network); err != nil {\n\n\t\t\t\treturn nil, fmt.Errorf(\"cannot parse destination get networks: %w\", err)\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\tpatch := &podPatch{\n\n\t\tValues: *values,\n\n\t\tAnnotations: map[string]string{},\n\n\t\tLabels: map[string]string{},\n\n\t}\n\n\tswitch strings.ToLower(conf.workload.metaType.Kind) {\n\n\tcase k8s.Pod:\n\n\tcase k8s.CronJob:\n\n\t\tpatch.PathPrefix = \"/spec/jobTemplate/spec/template\"\n\n\tdefault:\n\n\t\tpatch.PathPrefix = \"/spec/template\"\n\n\t}\n\n\n\n\tif conf.pod.spec != nil {\n\n\t\tconf.injectPodAnnotations(patch)\n\n\t\tif injectProxy {\n\n\t\t\tconf.injectObjectMeta(patch)\n\n\t\t\tconf.injectPodSpec(patch)\n\n\t\t} else {\n\n\t\t\tpatch.Proxy = nil\n\n\t\t\tpatch.ProxyInit = nil\n\n\t\t}\n\n\t}\n\n\n\n\trawValues, err := yaml.Marshal(patch)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\n\n\tfiles := []*loader.BufferedFile{\n\n\t\t{Name: chartutil.ChartfileName},\n\n\t\t{Name: \"requirements.yaml\"},\n\n\t\t{Name: \"templates/patch.json\"},\n\n\t}\n\n\n\n\tchart := &charts.Chart{\n\n\t\tName: \"patch\",\n\n\t\tDir: \"patch\",\n\n\t\tNamespace: conf.namespace,\n\n\t\tRawValues: rawValues,\n\n\t\tFiles: files,\n\n\t\tFs: static.Templates,\n\n\t}\n\n\tbuf, err := chart.Render()\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\n\n\t// Get rid of invalid trailing commas\n\n\tres := rTrail.ReplaceAll(buf.Bytes(), []byte(\"}\\n]\"))\n\n\n\n\treturn res, nil\n\n}\n\n\n\n// GetConfigAnnotation returns two values. The first value is the the annotation\n\n// value for a given key. The second is used to decide whether or not the caller\n\n// should add the annotation. The caller should not add the annotation if the\n\n// resource already has its own.\n\nfunc (conf *ResourceConfig) GetConfigAnnotation(annotationKey string) (string, bool) {\n\n\t_, ok := conf.pod.meta.Annotations[annotationKey]\n\n\tif ok {\n\n\t\tlog.Debugf(\"using pod %s %s annotation value\", conf.pod.meta.Name, annotationKey)\n\n\t\treturn \"\", false\n\n\t}\n\n\t_, ok = conf.workload.Meta.Annotations[annotationKey]\n\n\tif ok {\n\n\t\tlog.Debugf(\"using service %s %s annotation value\", conf.workload.Meta.Name, annotationKey)\n\n\t\treturn \"\", false\n\n\t}\n\n\tannotation, ok := conf.nsAnnotations[annotationKey]\n\n\tif ok {\n\n\t\tlog.Debugf(\"using namespace %s %s annotation value\", conf.workload.Meta.Namespace, annotationKey)\n\n\t\treturn annotation, true\n\n\t}\n\n\treturn \"\", false\n\n}\n\n\n\n// CreateOpaquePortsPatch creates a patch that will add the default\n\n// list of opaque ports.\n\nfunc (conf *ResourceConfig) CreateOpaquePortsPatch() ([]byte, error) {\n\n\tif conf.HasWorkloadAnnotation(k8s.ProxyOpaquePortsAnnotation) {\n\n\t\t// The workload already has the opaque ports annotation so a patch\n\n\t\t// does not need to be created.\n\n\t\treturn nil, nil\n\n\t}\n\n\topaquePorts, ok := conf.GetConfigAnnotation(k8s.ProxyOpaquePortsAnnotation)\n\n\tif ok {\n\n\t\t// The workload's namespace has the opaque ports annotation, so it\n\n\t\t// should inherit that value. A patch is created which adds that\n\n\t\t// list.\n\n\t\treturn conf.CreateAnnotationPatch(opaquePorts)\n\n\t}\n\n\n\n\t// Both the workload and the namespace do not have the annotation so a\n\n\t// patch is created which adds the default list.\n\n\tdefaultPorts := strings.Split(conf.GetValues().Proxy.OpaquePorts, \",\")\n\n\tvar filteredPorts []string\n\n\tif conf.IsPod() {\n\n\t\t// The workload is a pod so only add the default opaque ports that it\n\n\t\t// exposes as container ports.\n\n\t\tfilteredPorts = conf.FilterPodOpaquePorts(defaultPorts)\n\n\t} else if conf.IsService() {\n\n\t\t// The workload is a service so only add the default opaque ports that\n\n\t\t// are exposed as a service port, or targeted as a targetPort.\n\n\t\tservice := conf.workload.obj.(*corev1.Service)\n\n\t\tfor _, p := range service.Spec.Ports {\n\n\t\t\tport := strconv.Itoa(int(p.Port))\n\n\t\t\tif p.TargetPort.Type == 0 && p.TargetPort.IntVal == 0 {\n\n\t\t\t\t// The port's targetPort is not set, so add the port if is\n\n\t\t\t\t// opaque by default. Checking that targetPort is not set\n\n\t\t\t\t// avoids marking a port as opaque if it targets a port that\n\n\t\t\t\t// not opaque (e.g. port=3306 and targetPort=80; 3306 should\n\n\t\t\t\t// not be opaque)\n\n\t\t\t\tif util.ContainsString(port, defaultPorts) {\n\n\t\t\t\t\tfilteredPorts = append(filteredPorts, port)\n\n\t\t\t\t}\n\n\t\t\t} else if util.ContainsString(strconv.Itoa(int(p.TargetPort.IntVal)), defaultPorts) {\n\n\t\t\t\t// The port's targetPort is set; if it is opaque then port\n\n\t\t\t\t// should also be opaque.\n\n\t\t\t\tfilteredPorts = append(filteredPorts, port)\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\tif len(filteredPorts) == 0 {\n\n\t\t// There are no default opaque ports to add so a patch does not need\n\n\t\t// to be created.\n\n\t\treturn nil, nil\n\n\t}\n\n\tports := strings.Join(filteredPorts, \",\")\n\n\treturn conf.CreateAnnotationPatch(ports)\n\n}\n\n\n\n// FilterPodOpaquePorts returns a list of opaque ports that a pod exposes that\n\n// are also in the given default opaque ports list.\n\nfunc (conf *ResourceConfig) FilterPodOpaquePorts(defaultPorts []string) []string {\n\n\tvar filteredPorts []string\n\n\tfor _, c := range conf.pod.spec.Containers {\n\n\t\tfor _, p := range c.Ports {\n\n\t\t\tport := strconv.Itoa(int(p.ContainerPort))\n\n\t\t\tif util.ContainsString(port, defaultPorts) {\n\n\t\t\t\tfilteredPorts = append(filteredPorts, port)\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\treturn filteredPorts\n\n}\n\n\n\n// HasWorkloadAnnotation returns true if the workload has the annotation set\n\n// by the resource config or its metadata.\n\nfunc (conf *ResourceConfig) HasWorkloadAnnotation(annotation string) bool {\n\n\tif _, ok := conf.pod.meta.Annotations[annotation]; ok {\n\n\t\treturn true\n\n\t}\n\n\tif _, ok := conf.workload.Meta.Annotations[annotation]; ok {\n\n\t\treturn true\n\n\t}\n\n\t_, ok := conf.pod.annotations[annotation]\n\n\treturn ok\n\n}\n\n\n\n// CreateAnnotationPatch returns a json patch which adds the opaque ports\n\n// annotation with the `opaquePorts` value.\n\nfunc (conf *ResourceConfig) CreateAnnotationPatch(opaquePorts string) ([]byte, error) {\n\n\taddRootAnnotations := false\n\n\tif conf.IsPod() {\n\n\t\taddRootAnnotations = len(conf.pod.meta.Annotations) == 0\n\n\t} else {\n\n\t\taddRootAnnotations = len(conf.workload.Meta.Annotations) == 0\n\n\t}\n\n\n\n\tpatch := &annotationPatch{\n\n\t\tAddRootAnnotations: addRootAnnotations,\n\n\t\tOpaquePorts: opaquePorts,\n\n\t}\n\n\tt, err := template.New(\"tpl\").Parse(tpl)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\tvar patchJSON bytes.Buffer\n\n\tif err = t.Execute(&patchJSON, patch); err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn patchJSON.Bytes(), nil\n\n}\n\n\n\n// Note this switch also defines what kinds are injectable\n\nfunc (conf *ResourceConfig) getFreshWorkloadObj() runtime.Object {\n\n\tswitch strings.ToLower(conf.workload.metaType.Kind) {\n\n\tcase k8s.Deployment:\n\n\t\treturn &appsv1.Deployment{}\n\n\tcase k8s.ReplicationController:\n\n\t\treturn &corev1.ReplicationController{}\n\n\tcase k8s.ReplicaSet:\n\n\t\treturn &appsv1.ReplicaSet{}\n\n\tcase k8s.Job:\n\n\t\treturn &batchv1.Job{}\n\n\tcase k8s.DaemonSet:\n\n\t\treturn &appsv1.DaemonSet{}\n\n\tcase k8s.StatefulSet:\n\n\t\treturn &appsv1.StatefulSet{}\n\n\tcase k8s.Pod:\n\n\t\treturn &corev1.Pod{}\n\n\tcase k8s.Namespace:\n\n\t\treturn &corev1.Namespace{}\n\n\tcase k8s.CronJob:\n\n\t\treturn &batchv1.CronJob{}\n\n\tcase k8s.Service:\n\n\t\treturn &corev1.Service{}\n\n\t}\n\n\n\n\treturn nil\n\n}\n\n\n\n// JSONToYAML is a replacement for the same function in sigs.k8s.io/yaml\n\n// that does conserve the field order as portrayed in k8s' api structs\n\nfunc (conf *ResourceConfig) JSONToYAML(bytes []byte) ([]byte, error) {\n\n\tobj := conf.getFreshWorkloadObj()\n\n\tif err := json.Unmarshal(bytes, obj); err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\n\n\tj, err := getFilteredJSON(obj)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn yaml.JSONToYAML(j)\n\n}\n\n\n\n// parse parses the bytes payload, filling the gaps in ResourceConfig\n\n// depending on the workload kind\n\nfunc (conf *ResourceConfig) parse(bytes []byte) error {\n\n\t// The Kubernetes API is versioned and each version has an API modeled\n\n\t// with its own distinct Go types. If we tell `yaml.Unmarshal()` which\n\n\t// version we support then it will provide a representation of that\n\n\t// object using the given type if possible. However, it only allows us\n\n\t// to supply one object (of one type), so first we have to determine\n\n\t// what kind of object `bytes` represents so we can pass an object of\n\n\t// the correct type to `yaml.Unmarshal()`.\n\n\t// ---------------------------------------\n\n\t// Note: bytes is expected to be YAML and will only modify it when a\n\n\t// supported type is found. Otherwise, conf is left unmodified.\n\n\n\n\t// When injecting the linkerd proxy into a linkerd controller pod. The linkerd proxy's\n\n\t// LINKERD2_PROXY_DESTINATION_SVC_ADDR variable must be set to localhost for\n\n\t// the following reasons:\n\n\t//\t1. According to https://github.com/kubernetes/minikube/issues/1568, minikube has an issue\n\n\t// where pods are unable to connect to themselves through their associated service IP.\n\n\t// Setting the LINKERD2_PROXY_DESTINATION_SVC_ADDR to localhost allows the\n\n\t// proxy to bypass kube DNS name resolution as a workaround to this issue.\n\n\t// 2. We avoid the TLS overhead in encrypting and decrypting intra-pod traffic i.e. traffic\n\n\t// between containers in the same pod.\n\n\t// 3. Using a Service IP instead of localhost would mean intra-pod traffic would be load-balanced\n\n\t// across all controller pod replicas. This is undesirable as we would want all traffic between\n\n\t//\t containers to be self contained.\n\n\t// 4. We skip recording telemetry for intra-pod traffic within the control plane.\n\n\n\n\tif err := yaml.Unmarshal(bytes, &conf.workload.metaType); err != nil {\n\n\t\treturn err\n\n\t}\n\n\tobj := conf.getFreshWorkloadObj()\n\n\n\n\tswitch v := obj.(type) {\n\n\tcase *appsv1.Deployment:\n\n\t\tif err := yaml.Unmarshal(bytes, v); err != nil {\n\n\t\t\treturn err\n\n\t\t}\n\n\n\n\t\tconf.workload.obj = v\n\n\t\tconf.workload.Meta = &v.ObjectMeta\n\n\t\tconf.pod.labels[k8s.ProxyDeploymentLabel] = v.Name\n\n\t\tconf.pod.labels[k8s.WorkloadNamespaceLabel] = v.Namespace\n\n\t\tconf.complete(&v.Spec.Template)\n\n\n\n\tcase *corev1.ReplicationController:\n\n\t\tif err := yaml.Unmarshal(bytes, v); err != nil {\n\n\t\t\treturn err\n\n\t\t}\n\n\n\n\t\tconf.workload.obj = v\n\n\t\tconf.workload.Meta = &v.ObjectMeta\n\n\t\tconf.pod.labels[k8s.ProxyReplicationControllerLabel] = v.Name\n\n\t\tconf.pod.labels[k8s.WorkloadNamespaceLabel] = v.Namespace\n\n\t\tconf.complete(v.Spec.Template)\n\n\n\n\tcase *appsv1.ReplicaSet:\n\n\t\tif err := yaml.Unmarshal(bytes, v); err != nil {\n\n\t\t\treturn err\n\n\t\t}\n\n\n\n\t\tconf.workload.obj = v\n\n\t\tconf.workload.Meta = &v.ObjectMeta\n\n\t\tconf.pod.labels[k8s.ProxyReplicaSetLabel] = v.Name\n\n\t\tconf.pod.labels[k8s.WorkloadNamespaceLabel] = v.Namespace\n\n\t\tconf.complete(&v.Spec.Template)\n\n\n\n\tcase *batchv1.Job:\n\n\t\tif err := yaml.Unmarshal(bytes, v); err != nil {\n\n\t\t\treturn err\n\n\t\t}\n\n\n\n\t\tconf.workload.obj = v\n\n\t\tconf.workload.Meta = &v.ObjectMeta\n\n\t\tconf.pod.labels[k8s.ProxyJobLabel] = v.Name\n\n\t\tconf.pod.labels[k8s.WorkloadNamespaceLabel] = v.Namespace\n\n\t\tconf.complete(&v.Spec.Template)\n\n\n\n\tcase *appsv1.DaemonSet:\n\n\t\tif err := yaml.Unmarshal(bytes, v); err != nil {\n\n\t\t\treturn err\n\n\t\t}\n\n\n\n\t\tconf.workload.obj = v\n\n\t\tconf.workload.Meta = &v.ObjectMeta\n\n\t\tconf.pod.labels[k8s.ProxyDaemonSetLabel] = v.Name\n\n\t\tconf.pod.labels[k8s.WorkloadNamespaceLabel] = v.Namespace\n\n\t\tconf.complete(&v.Spec.Template)\n\n\n\n\tcase *appsv1.StatefulSet:\n\n\t\tif err := yaml.Unmarshal(bytes, v); err != nil {\n\n\t\t\treturn err\n\n\t\t}\n\n\n\n\t\tconf.workload.obj = v\n\n\t\tconf.workload.Meta = &v.ObjectMeta\n\n\t\tconf.pod.labels[k8s.ProxyStatefulSetLabel] = v.Name\n\n\t\tconf.pod.labels[k8s.WorkloadNamespaceLabel] = v.Namespace\n\n\t\tconf.complete(&v.Spec.Template)\n\n\n\n\tcase *corev1.Namespace:\n\n\t\tif err := yaml.Unmarshal(bytes, v); err != nil {\n\n\t\t\treturn err\n\n\t\t}\n\n\t\tconf.workload.obj = v\n\n\t\tconf.workload.Meta = &v.ObjectMeta\n\n\t\tif conf.workload.Meta.Annotations == nil {\n\n\t\t\tconf.workload.Meta.Annotations = map[string]string{}\n\n\t\t}\n\n\n\n\tcase *batchv1.CronJob:\n\n\t\tif err := yaml.Unmarshal(bytes, v); err != nil {\n\n\t\t\treturn err\n\n\t\t}\n\n\n\n\t\tconf.workload.obj = v\n\n\t\tconf.workload.Meta = &v.ObjectMeta\n\n\t\tconf.pod.labels[k8s.ProxyCronJobLabel] = v.Name\n\n\t\tconf.pod.labels[k8s.WorkloadNamespaceLabel] = v.Namespace\n\n\t\tconf.complete(&v.Spec.JobTemplate.Spec.Template)\n\n\n\n\tcase *corev1.Pod:\n\n\t\tif err := yaml.Unmarshal(bytes, v); err != nil {\n\n\t\t\treturn err\n\n\t\t}\n\n\n\n\t\tconf.workload.obj = v\n\n\t\tconf.pod.spec = &v.Spec\n\n\t\tconf.pod.meta = &v.ObjectMeta\n\n\n\n\t\tif conf.ownerRetriever != nil {\n\n\t\t\tkind, name := conf.ownerRetriever(v)\n\n\t\t\tconf.workload.ownerRef = &metav1.OwnerReference{Kind: kind, Name: name}\n\n\t\t\tswitch kind {\n\n\t\t\tcase k8s.Deployment:\n\n\t\t\t\tconf.pod.labels[k8s.ProxyDeploymentLabel] = name\n\n\t\t\tcase k8s.ReplicationController:\n\n\t\t\t\tconf.pod.labels[k8s.ProxyReplicationControllerLabel] = name\n\n\t\t\tcase k8s.ReplicaSet:\n\n\t\t\t\tconf.pod.labels[k8s.ProxyReplicaSetLabel] = name\n\n\t\t\tcase k8s.Job:\n\n\t\t\t\tconf.pod.labels[k8s.ProxyJobLabel] = name\n\n\t\t\tcase k8s.DaemonSet:\n\n\t\t\t\tconf.pod.labels[k8s.ProxyDaemonSetLabel] = name\n\n\t\t\tcase k8s.StatefulSet:\n\n\t\t\t\tconf.pod.labels[k8s.ProxyStatefulSetLabel] = name\n\n\t\t\t}\n\n\t\t}\n\n\t\tconf.pod.labels[k8s.WorkloadNamespaceLabel] = v.Namespace\n\n\t\tif conf.pod.meta.Annotations == nil {\n\n\t\t\tconf.pod.meta.Annotations = map[string]string{}\n\n\t\t}\n\n\n\n\tcase *corev1.Service:\n\n\t\tif err := yaml.Unmarshal(bytes, v); err != nil {\n\n\t\t\treturn err\n\n\t\t}\n\n\t\tconf.workload.obj = v\n\n\t\tconf.workload.Meta = &v.ObjectMeta\n\n\t\tif conf.workload.Meta.Annotations == nil {\n\n\t\t\tconf.workload.Meta.Annotations = map[string]string{}\n\n\t\t}\n\n\n\n\tdefault:\n\n\t\t// unmarshal the metadata of other resource kinds like namespace, secret,\n\n\t\t// config map etc. to be used in the report struct\n\n\t\tif err := yaml.Unmarshal(bytes, &conf.workload); err != nil {\n\n\t\t\treturn err\n\n\t\t}\n\n\t}\n\n\n\n\treturn nil\n\n}\n\n\n\nfunc (conf *ResourceConfig) complete(template *corev1.PodTemplateSpec) {\n\n\tconf.pod.spec = &template.Spec\n\n\tconf.pod.meta = &template.ObjectMeta\n\n\tif conf.pod.meta.Annotations == nil {\n\n\t\tconf.pod.meta.Annotations = map[string]string{}\n\n\t}\n\n}\n\n\n\n// injectPodSpec adds linkerd sidecars to the provided PodSpec.\n\nfunc (conf *ResourceConfig) injectPodSpec(values *podPatch) {\n\n\tsaVolumeMount := conf.serviceAccountVolumeMount()\n\n\n\n\t// use the primary container's capabilities to ensure psp compliance, if\n\n\t// enabled\n\n\tif conf.pod.spec.Containers != nil && len(conf.pod.spec.Containers) > 0 {\n\n\t\tif sc := conf.pod.spec.Containers[0].SecurityContext; sc != nil && sc.Capabilities != nil {\n\n\t\t\tvalues.Proxy.Capabilities = &l5dcharts.Capabilities{\n\n\t\t\t\tAdd: []string{},\n\n\t\t\t\tDrop: []string{},\n\n\t\t\t}\n\n\t\t\tfor _, add := range sc.Capabilities.Add {\n\n\t\t\t\tvalues.Proxy.Capabilities.Add = append(values.Proxy.Capabilities.Add, string(add))\n\n\t\t\t}\n\n\t\t\tfor _, drop := range sc.Capabilities.Drop {\n\n\t\t\t\tvalues.Proxy.Capabilities.Drop = append(values.Proxy.Capabilities.Drop, string(drop))\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\tif saVolumeMount != nil {\n\n\t\tvalues.Proxy.SAMountPath = &l5dcharts.VolumeMountPath{\n\n\t\t\tName: saVolumeMount.Name,\n\n\t\t\tMountPath: saVolumeMount.MountPath,\n\n\t\t\tReadOnly: saVolumeMount.ReadOnly,\n\n\t\t}\n\n\t}\n\n\n\n\tif v := conf.pod.meta.Annotations[k8s.ProxyEnableDebugAnnotation]; v != \"\" {\n\n\t\tdebug, err := strconv.ParseBool(v)\n\n\t\tif err != nil {\n\n\t\t\tlog.Warnf(\"unrecognized value used for the %s annotation: %s\", k8s.ProxyEnableDebugAnnotation, v)\n\n\t\t\tdebug = false\n\n\t\t}\n\n\n\n\t\tif debug {\n\n\t\t\tlog.Infof(\"inject debug container\")\n\n\t\t\tvalues.DebugContainer = &l5dcharts.DebugContainer{\n\n\t\t\t\tImage: &l5dcharts.Image{\n\n\t\t\t\t\tName: conf.values.DebugContainer.Image.Name,\n\n\t\t\t\t\tVersion: conf.values.DebugContainer.Image.Version,\n\n\t\t\t\t\tPullPolicy: conf.values.DebugContainer.Image.PullPolicy,\n\n\t\t\t\t},\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\tconf.injectProxyInit(values)\n\n\tvalues.AddRootVolumes = len(conf.pod.spec.Volumes) == 0\n\n}\n\n\n\nfunc (conf *ResourceConfig) injectProxyInit(values *podPatch) {\n\n\n\n\t// Fill common fields from Proxy into ProxyInit\n\n\tif values.Proxy.Capabilities != nil {\n\n\t\tvalues.ProxyInit.Capabilities = &l5dcharts.Capabilities{}\n\n\t\tvalues.ProxyInit.Capabilities.Add = values.Proxy.Capabilities.Add\n\n\t\tvalues.ProxyInit.Capabilities.Drop = []string{}\n\n\t\tfor _, drop := range values.Proxy.Capabilities.Drop {\n\n\t\t\t// Skip NET_RAW and NET_ADMIN as the init container requires them to setup iptables.\n\n\t\t\tif drop == \"NET_RAW\" || drop == \"NET_ADMIN\" {\n\n\t\t\t\tcontinue\n\n\t\t\t}\n\n\t\t\tvalues.ProxyInit.Capabilities.Drop = append(values.ProxyInit.Capabilities.Drop, drop)\n\n\t\t}\n\n\t}\n\n\n\n\tvalues.ProxyInit.SAMountPath = values.Proxy.SAMountPath\n\n\n\n\tif v := conf.pod.meta.Annotations[k8s.CloseWaitTimeoutAnnotation]; v != \"\" {\n\n\t\tcloseWait, err := time.ParseDuration(v)\n\n\t\tif err != nil {\n\n\t\t\tlog.Warnf(\"invalid duration value used for the %s annotation: %s\", k8s.CloseWaitTimeoutAnnotation, v)\n\n\t\t} else {\n\n\t\t\tvalues.ProxyInit.CloseWaitTimeoutSecs = int64(closeWait.Seconds())\n\n\t\t}\n\n\t}\n\n\n\n\tvalues.AddRootInitContainers = len(conf.pod.spec.InitContainers) == 0\n\n\n\n}\n\n\n\nfunc (conf *ResourceConfig) serviceAccountVolumeMount() *corev1.VolumeMount {\n\n\t// Probably always true, but want to be super-safe\n\n\tif containers := conf.pod.spec.Containers; len(containers) > 0 {\n\n\t\tfor _, vm := range containers[0].VolumeMounts {\n\n\t\t\tif vm.MountPath == k8s.MountPathServiceAccount {\n\n\t\t\t\tvm := vm // pin\n\n\t\t\t\treturn &vm\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\treturn nil\n\n}\n\n\n\n// Given a ObjectMeta, update ObjectMeta in place with the new labels and\n\n// annotations.\n\nfunc (conf *ResourceConfig) injectObjectMeta(values *podPatch) {\n\n\n\n\tvalues.Annotations[k8s.ProxyVersionAnnotation] = values.Proxy.Image.Version\n\n\n\n\tif len(conf.pod.labels) > 0 {\n\n\t\tvalues.AddRootLabels = len(conf.pod.meta.Labels) == 0\n\n\t\tfor _, k := range sortedKeys(conf.pod.labels) {\n\n\t\t\tvalues.Labels[k] = conf.pod.labels[k]\n\n\t\t}\n\n\t}\n\n}\n\n\n\nfunc (conf *ResourceConfig) injectPodAnnotations(values *podPatch) {\n\n\t// ObjectMetaAnnotations.Annotations is nil for new empty structs, but we always initialize\n\n\t// it to an empty map in parse() above, so we follow suit here.\n\n\temptyMeta := &metav1.ObjectMeta{Annotations: map[string]string{}}\n\n\t// Cronjobs might have an empty `spec.jobTemplate.spec.template.metadata`\n\n\t// field so we make sure to create it if needed, before attempting adding annotations\n\n\tvalues.AddRootMetadata = reflect.DeepEqual(conf.pod.meta, emptyMeta)\n\n\tvalues.AddRootAnnotations = len(conf.pod.meta.Annotations) == 0\n\n\n\n\tfor _, k := range sortedKeys(conf.pod.annotations) {\n\n\t\tvalues.Annotations[k] = conf.pod.annotations[k]\n\n\n\n\t\t// append any additional pod annotations to the pod's meta.\n\n\t\t// for e.g., annotations that were converted from CLI inject options.\n\n\t\tconf.pod.meta.Annotations[k] = conf.pod.annotations[k]\n\n\t}\n\n}\n\n\n\nfunc (conf *ResourceConfig) applyAnnotationOverrides(values *l5dcharts.Values) {\n\n\tannotations := make(map[string]string)\n\n\tfor k, v := range conf.pod.meta.Annotations {\n\n\t\tannotations[k] = v\n\n\t}\n\n\n\n\t// If injecting from CLI, skip applying overrides from new annotations;\n\n\t// overrides in this case should already be applied through flags.\n\n\tif conf.origin != OriginCLI {\n\n\t\t// Override base values inferred from current pod annotations with\n\n\t\t// values from annotations that will be applied to pod after the patch.\n\n\t\tfor k, v := range conf.pod.annotations {\n\n\t\t\tannotations[k] = v\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyInjectAnnotation]; ok {\n\n\t\tif override == k8s.ProxyInjectIngress {\n\n\t\t\tvalues.Proxy.IsIngress = true\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyImageAnnotation]; ok {\n\n\t\tvalues.Proxy.Image.Name = override\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyVersionOverrideAnnotation]; ok {\n\n\t\tvalues.Proxy.Image.Version = override\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyImagePullPolicyAnnotation]; ok {\n\n\t\tvalues.Proxy.Image.PullPolicy = override\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyInitImageVersionAnnotation]; ok {\n\n\t\tvalues.ProxyInit.Image.Version = override\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyControlPortAnnotation]; ok {\n\n\t\tcontrolPort, err := strconv.ParseInt(override, 10, 32)\n\n\t\tif err == nil {\n\n\t\t\tvalues.Proxy.Ports.Control = int32(controlPort)\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyInboundPortAnnotation]; ok {\n\n\t\tinboundPort, err := strconv.ParseInt(override, 10, 32)\n\n\t\tif err == nil {\n\n\t\t\tvalues.Proxy.Ports.Inbound = int32(inboundPort)\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyAdminPortAnnotation]; ok {\n\n\t\tadminPort, err := strconv.ParseInt(override, 10, 32)\n\n\t\tif err == nil {\n\n\t\t\tvalues.Proxy.Ports.Admin = int32(adminPort)\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyOutboundPortAnnotation]; ok {\n\n\t\toutboundPort, err := strconv.ParseInt(override, 10, 32)\n\n\t\tif err == nil {\n\n\t\t\tvalues.Proxy.Ports.Outbound = int32(outboundPort)\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyPodInboundPortsAnnotation]; ok {\n\n\t\tvalues.Proxy.PodInboundPorts = override\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyLogLevelAnnotation]; ok {\n\n\t\tvalues.Proxy.LogLevel = override\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyLogFormatAnnotation]; ok {\n\n\t\tvalues.Proxy.LogFormat = override\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyRequireIdentityOnInboundPortsAnnotation]; ok {\n\n\t\tvalues.Proxy.RequireIdentityOnInboundPorts = override\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyOutboundConnectTimeout]; ok {\n\n\t\tduration, err := time.ParseDuration(override)\n\n\t\tif err != nil {\n\n\t\t\tlog.Warnf(\"unrecognized proxy-outbound-connect-timeout duration value found on pod annotation: %s\", err.Error())\n\n\t\t} else {\n\n\t\t\tvalues.Proxy.OutboundConnectTimeout = fmt.Sprintf(\"%dms\", int(duration.Seconds()*1000))\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyInboundConnectTimeout]; ok {\n\n\t\tduration, err := time.ParseDuration(override)\n\n\t\tif err != nil {\n\n\t\t\tlog.Warnf(\"unrecognized proxy-inbound-connect-timeout duration value found on pod annotation: %s\", err.Error())\n\n\t\t} else {\n\n\t\t\tvalues.Proxy.InboundConnectTimeout = fmt.Sprintf(\"%dms\", int(duration.Seconds()*1000))\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyEnableGatewayAnnotation]; ok {\n\n\t\tvalue, err := strconv.ParseBool(override)\n\n\t\tif err == nil {\n\n\t\t\tvalues.Proxy.IsGateway = value\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyWaitBeforeExitSecondsAnnotation]; ok {\n\n\t\twaitBeforeExitSeconds, err := strconv.ParseUint(override, 10, 64)\n\n\t\tif nil != err {\n\n\t\t\tlog.Warnf(\"unrecognized value used for the %s annotation, uint64 is expected: %s\",\n\n\t\t\t\tk8s.ProxyWaitBeforeExitSecondsAnnotation, override)\n\n\t\t} else {\n\n\t\t\tvalues.Proxy.WaitBeforeExitSeconds = waitBeforeExitSeconds\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyCPURequestAnnotation]; ok {\n\n\t\t_, err := k8sResource.ParseQuantity(override)\n\n\t\tif err != nil {\n\n\t\t\tlog.Warnf(\"%s (%s)\", err, k8s.ProxyCPURequestAnnotation)\n\n\t\t} else {\n\n\t\t\tvalues.Proxy.Resources.CPU.Request = override\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyMemoryRequestAnnotation]; ok {\n\n\t\t_, err := k8sResource.ParseQuantity(override)\n\n\t\tif err != nil {\n\n\t\t\tlog.Warnf(\"%s (%s)\", err, k8s.ProxyMemoryRequestAnnotation)\n\n\t\t} else {\n\n\t\t\tvalues.Proxy.Resources.Memory.Request = override\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyEphemeralStorageRequestAnnotation]; ok {\n\n\t\t_, err := k8sResource.ParseQuantity(override)\n\n\t\tif err != nil {\n\n\t\t\tlog.Warnf(\"%s (%s)\", err, k8s.ProxyEphemeralStorageRequestAnnotation)\n\n\t\t} else {\n\n\t\t\tvalues.Proxy.Resources.EphemeralStorage.Request = override\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyCPULimitAnnotation]; ok {\n\n\t\tq, err := k8sResource.ParseQuantity(override)\n\n\t\tif err != nil {\n\n\t\t\tlog.Warnf(\"%s (%s)\", err, k8s.ProxyCPULimitAnnotation)\n\n\t\t} else {\n\n\t\t\tvalues.Proxy.Resources.CPU.Limit = override\n\n\n\n\t\t\tn, err := ToWholeCPUCores(q)\n\n\t\t\tif err != nil {\n\n\t\t\t\tlog.Warnf(\"%s (%s)\", err, k8s.ProxyCPULimitAnnotation)\n\n\t\t\t}\n\n\t\t\tvalues.Proxy.Cores = n\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyMemoryLimitAnnotation]; ok {\n\n\t\t_, err := k8sResource.ParseQuantity(override)\n\n\t\tif err != nil {\n\n\t\t\tlog.Warnf(\"%s (%s)\", err, k8s.ProxyMemoryLimitAnnotation)\n\n\t\t} else {\n\n\t\t\tvalues.Proxy.Resources.Memory.Limit = override\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyEphemeralStorageLimitAnnotation]; ok {\n\n\t\t_, err := k8sResource.ParseQuantity(override)\n\n\t\tif err != nil {\n\n\t\t\tlog.Warnf(\"%s (%s)\", err, k8s.ProxyEphemeralStorageLimitAnnotation)\n\n\t\t} else {\n\n\t\t\tvalues.Proxy.Resources.EphemeralStorage.Limit = override\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyUIDAnnotation]; ok {\n\n\t\tv, err := strconv.ParseInt(override, 10, 64)\n\n\t\tif err == nil {\n\n\t\t\tvalues.Proxy.UID = v\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyEnableExternalProfilesAnnotation]; ok {\n\n\t\tvalue, err := strconv.ParseBool(override)\n\n\t\tif err == nil {\n\n\t\t\tvalues.Proxy.EnableExternalProfiles = value\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyInitImageAnnotation]; ok {\n\n\t\tvalues.ProxyInit.Image.Name = override\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyImagePullPolicyAnnotation]; ok {\n\n\t\tvalues.ProxyInit.Image.PullPolicy = override\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyIgnoreInboundPortsAnnotation]; ok {\n\n\t\tvalues.ProxyInit.IgnoreInboundPorts = override\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyIgnoreOutboundPortsAnnotation]; ok {\n\n\t\tvalues.ProxyInit.IgnoreOutboundPorts = override\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyOpaquePortsAnnotation]; ok {\n\n\t\topaquePortsStrs := util.ParseContainerOpaquePorts(override, conf.pod.spec.Containers)\n\n\t\tvalues.Proxy.OpaquePorts = strings.Join(opaquePortsStrs, \",\")\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.DebugImageAnnotation]; ok {\n\n\t\tvalues.DebugContainer.Image.Name = override\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.DebugImageVersionAnnotation]; ok {\n\n\t\tvalues.DebugContainer.Image.Version = override\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.DebugImagePullPolicyAnnotation]; ok {\n\n\t\tvalues.DebugContainer.Image.PullPolicy = override\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyAwait]; ok {\n\n\t\tif override == k8s.Enabled || override == k8s.Disabled {\n\n\t\t\tvalues.Proxy.Await = override == k8s.Enabled\n\n\t\t} else {\n\n\t\t\tlog.Warnf(\"unrecognized value used for the %s annotation, valid values are: [%s, %s]\", k8s.ProxyAwait, k8s.Enabled, k8s.Disabled)\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyDefaultInboundPolicyAnnotation]; ok {\n\n\t\tif override != k8s.AllUnauthenticated && override != k8s.AllAuthenticated && override != k8s.ClusterUnauthenticated && override != k8s.ClusterAuthenticated && override != k8s.Deny {\n\n\t\t\tlog.Warnf(\"unrecognized value used for the %s annotation, valid values are: [%s, %s, %s, %s, %s]\", k8s.ProxyDefaultInboundPolicyAnnotation, k8s.AllUnauthenticated, k8s.AllAuthenticated, k8s.ClusterUnauthenticated, k8s.ClusterAuthenticated, k8s.Deny)\n\n\t\t} else {\n\n\t\t\tvalues.Proxy.DefaultInboundPolicy = override\n\n\t\t}\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxySkipSubnetsAnnotation]; ok {\n\n\t\tvalues.ProxyInit.SkipSubnets = override\n\n\t}\n\n\n\n\tif override, ok := annotations[k8s.ProxyAccessLogAnnotation]; ok {\n\n\t\tvalues.Proxy.AccessLog = override\n\n\t}\n\n}\n\n\n\n// GetOverriddenConfiguration returns a map of the overridden proxy annotations\n\nfunc (conf *ResourceConfig) GetOverriddenConfiguration() map[string]string {\n\n\tproxyOverrideConfig := map[string]string{}\n\n\tfor _, annotation := range ProxyAnnotations {\n\n\t\tproxyOverrideConfig[annotation] = conf.pod.meta.Annotations[annotation]\n\n\t}\n\n\n\n\treturn proxyOverrideConfig\n\n}\n\n\n\n// IsControlPlaneComponent returns true if the component is part of linkerd control plane\n\nfunc (conf *ResourceConfig) IsControlPlaneComponent() bool {\n\n\t_, b := conf.pod.meta.Labels[k8s.ControllerComponentLabel]\n\n\treturn b\n\n}\n\n\n\nfunc sortedKeys(m map[string]string) []string {\n\n\tkeys := []string{}\n\n\tfor k := range m {\n\n\t\tkeys = append(keys, k)\n\n\t}\n\n\n\n\tsort.Strings(keys)\n\n\n\n\treturn keys\n\n}\n\n\n\n// IsNamespace checks if a given config is a workload of Kind namespace\n\nfunc (conf *ResourceConfig) IsNamespace() bool {\n\n\treturn strings.ToLower(conf.workload.metaType.Kind) == k8s.Namespace\n\n}\n\n\n\n// IsService checks if a given config is a workload of Kind service\n\nfunc (conf *ResourceConfig) IsService() bool {\n\n\treturn strings.ToLower(conf.workload.metaType.Kind) == k8s.Service\n\n}\n\n\n\n// IsPod checks if a given config is a workload of Kind pod.\n\nfunc (conf *ResourceConfig) IsPod() bool {\n\n\treturn strings.ToLower(conf.workload.metaType.Kind) == k8s.Pod\n\n}\n\n\n\n// HasPodTemplate checks if a given config has a pod template spec.\n\nfunc (conf *ResourceConfig) HasPodTemplate() bool {\n\n\treturn conf.pod.meta != nil && conf.pod.spec != nil\n\n}\n\n\n\n// AnnotateNamespace annotates a namespace resource config with `annotations`.\n\nfunc (conf *ResourceConfig) AnnotateNamespace(annotations map[string]string) ([]byte, error) {\n\n\tns, ok := conf.workload.obj.(*corev1.Namespace)\n\n\tif !ok {\n\n\t\treturn nil, errors.New(\"can't inject namespace. Type assertion failed\")\n\n\t}\n\n\tns.Annotations[k8s.ProxyInjectAnnotation] = k8s.ProxyInjectEnabled\n\n\tif len(annotations) > 0 {\n\n\t\tfor annotation, value := range annotations {\n\n\t\t\tns.Annotations[annotation] = value\n\n\t\t}\n\n\t}\n\n\tj, err := getFilteredJSON(ns)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn yaml.JSONToYAML(j)\n\n}\n\n\n\n// AnnotateService annotates a service resource config with `annotations`.\n\nfunc (conf *ResourceConfig) AnnotateService(annotations map[string]string) ([]byte, error) {\n\n\tservice, ok := conf.workload.obj.(*corev1.Service)\n\n\tif !ok {\n\n\t\treturn nil, errors.New(\"can't inject service. Type assertion failed\")\n\n\t}\n\n\tif len(annotations) > 0 {\n\n\t\tfor annotation, value := range annotations {\n\n\t\t\tservice.Annotations[annotation] = value\n\n\t\t}\n\n\t}\n\n\tj, err := getFilteredJSON(service)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn yaml.JSONToYAML(j)\n\n}\n\n\n\n// getFilteredJSON method performs JSON marshaling such that zero values of\n\n// empty structs are respected by `omitempty` tags. We make use of a drop-in\n\n// replacement of the standard json/encoding library, without which empty struct values\n\n// present in workload objects would make it into the marshaled JSON.\n\nfunc getFilteredJSON(conf runtime.Object) ([]byte, error) {\n\n\treturn jsonfilter.Marshal(&conf)\n\n}\n\n\n\n// ToWholeCPUCores coerces a k8s resource value to a whole integer value, rounding up.\n\nfunc ToWholeCPUCores(q k8sResource.Quantity) (int64, error) {\n\n\tq.RoundUp(0)\n\n\tif n, ok := q.AsInt64(); ok {\n\n\t\treturn n, nil\n\n\t}\n\n\treturn 0, fmt.Errorf(\"Could not parse cores: %s\", q.String())\n\n}\n\n\n\n// getPodInboundPorts will return a string-formatted list of ports (in ascending\n\n// order) based on a PodSpec object. The function will check each container in\n\n// the pod and extract any defined ports. Additionally, it will also extract any\n\n// healthcheck target probes, provided the probe is an HTTP healthcheck\n\nfunc getPodInboundPorts(podSpec *corev1.PodSpec) string {\n\n\tports := make(map[int32]struct{})\n\n\tif podSpec != nil {\n\n\t\tfor _, container := range podSpec.Containers {\n\n\t\t\tfor _, port := range container.Ports {\n\n\t\t\t\tports[port.ContainerPort] = struct{}{}\n\n\t\t\t}\n\n\n\n\t\t\tif readiness := container.ReadinessProbe; readiness != nil {\n\n\t\t\t\tif port, ok := getProbePort(readiness); ok {\n\n\t\t\t\t\tports[port] = struct{}{}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\n\n\t\t\tif liveness := container.LivenessProbe; liveness != nil {\n\n\t\t\t\tif port, ok := getProbePort(liveness); ok {\n\n\t\t\t\t\tports[port] = struct{}{}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\tportList := make([]string, 0, len(ports))\n\n\tfor port := range ports {\n\n\t\tportList = append(portList, strconv.Itoa(int(port)))\n\n\t}\n\n\n\n\t// sort slice in ascending order\n\n\tsort.Strings(portList)\n\n\treturn strings.Join(portList, \",\")\n\n}\n\n\n\n// getProbePort takes the healthcheck probe spec of a container and returns the\n\n// target port if the probe is configured to do an HTTPGet. The function returns\n\n// the probe's target port and a success value (if successful)\n\nfunc getProbePort(probe *corev1.Probe) (int32, bool) {\n\n\tif probe.HTTPGet != nil {\n\n\t\t// HTTPGet probes use a named port, in this case, do not return it. A\n\n\t\t// named port must be declared in the container's own ports; if probe uses\n\n\t\t// a named port it is likely the port has been seen before.\n\n\t\tswitch probe.HTTPGet.Port.Type {\n\n\t\tcase intstr.Int:\n\n\t\t\treturn probe.HTTPGet.Port.IntVal, true\n\n\t\t}\n\n\t}\n\n\n\n\treturn 0, false\n\n}\n", "file_path": "pkg/inject/inject.go", "rank": 77, "score": 117633.81234453995 }, { "content": "func (conf *ResourceConfig) CreateOpaquePortsPatch() ([]byte, error) {\n\n\tif conf.HasWorkloadAnnotation(k8s.ProxyOpaquePortsAnnotation) {\n\n\t\t// The workload already has the opaque ports annotation so a patch\n\n\t\t// does not need to be created.\n\n\t\treturn nil, nil\n\n\t}\n\n\topaquePorts, ok := conf.GetConfigAnnotation(k8s.ProxyOpaquePortsAnnotation)\n\n\tif ok {\n\n\t\t// The workload's namespace has the opaque ports annotation, so it\n\n\t\t// should inherit that value. A patch is created which adds that\n\n\t\t// list.\n\n\t\treturn conf.CreateAnnotationPatch(opaquePorts)\n\n\t}\n\n\n\n\t// Both the workload and the namespace do not have the annotation so a\n\n\t// patch is created which adds the default list.\n\n\tdefaultPorts := strings.Split(conf.GetValues().Proxy.OpaquePorts, \",\")\n\n\tvar filteredPorts []string\n\n\tif conf.IsPod() {\n\n\t\t// The workload is a pod so only add the default opaque ports that it\n\n\t\t// exposes as container ports.\n\n\t\tfilteredPorts = conf.FilterPodOpaquePorts(defaultPorts)\n\n\t} else if conf.IsService() {\n\n\t\t// The workload is a service so only add the default opaque ports that\n\n\t\t// are exposed as a service port, or targeted as a targetPort.\n\n\t\tservice := conf.workload.obj.(*corev1.Service)\n\n\t\tfor _, p := range service.Spec.Ports {\n\n\t\t\tport := strconv.Itoa(int(p.Port))\n\n\t\t\tif p.TargetPort.Type == 0 && p.TargetPort.IntVal == 0 {\n\n\t\t\t\t// The port's targetPort is not set, so add the port if is\n\n\t\t\t\t// opaque by default. Checking that targetPort is not set\n\n\t\t\t\t// avoids marking a port as opaque if it targets a port that\n\n\t\t\t\t// not opaque (e.g. port=3306 and targetPort=80; 3306 should\n\n\t\t\t\t// not be opaque)\n\n\t\t\t\tif util.ContainsString(port, defaultPorts) {\n\n\t\t\t\t\tfilteredPorts = append(filteredPorts, port)\n\n\t\t\t\t}\n\n\t\t\t} else if util.ContainsString(strconv.Itoa(int(p.TargetPort.IntVal)), defaultPorts) {\n\n\t\t\t\t// The port's targetPort is set; if it is opaque then port\n\n\t\t\t\t// should also be opaque.\n\n\t\t\t\tfilteredPorts = append(filteredPorts, port)\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\tif len(filteredPorts) == 0 {\n\n\t\t// There are no default opaque ports to add so a patch does not need\n\n\t\t// to be created.\n\n\t\treturn nil, nil\n\n\t}\n\n\tports := strings.Join(filteredPorts, \",\")\n\n\treturn conf.CreateAnnotationPatch(ports)\n", "file_path": "pkg/inject/inject.go", "rank": 78, "score": 117324.58553560532 }, { "content": "func (conf *ResourceConfig) FilterPodOpaquePorts(defaultPorts []string) []string {\n\n\tvar filteredPorts []string\n\n\tfor _, c := range conf.pod.spec.Containers {\n\n\t\tfor _, p := range c.Ports {\n\n\t\t\tport := strconv.Itoa(int(p.ContainerPort))\n\n\t\t\tif util.ContainsString(port, defaultPorts) {\n\n\t\t\t\tfilteredPorts = append(filteredPorts, port)\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\treturn filteredPorts\n", "file_path": "pkg/inject/inject.go", "rank": 79, "score": 117152.64674590302 }, { "content": "package inject\n\n\n\nimport (\n\n\tl5dcharts \"github.com/linkerd/linkerd2/pkg/charts/linkerd2\"\n\n\n\n\tfuzz \"github.com/AdaLogics/go-fuzz-headers\"\n\n)\n\n\n\n// FuzzInject fuzzes Pod injection.\n\nfunc FuzzInject(data []byte) int {\n\n\tf := fuzz.NewConsumer(data)\n\n\tyamlBytes, err := f.GetBytes()\n\n\tif err != nil {\n\n\t\treturn 0\n\n\t}\n\n\n\n\tv := &l5dcharts.Values{}\n\n\terr = f.GenerateStruct(v)\n\n\tif err != nil {\n\n\t\treturn 0\n\n\t}\n\n\tconf := NewResourceConfig(v, OriginUnknown, \"\")\n\n\t_, _ = conf.ParseMetaAndYAML(yamlBytes)\n\n\tinjectProxy, err := f.GetBool()\n\n\tif err != nil {\n\n\t\treturn 0\n\n\t}\n\n\t_, _ = conf.GetPodPatch(injectProxy)\n\n\t_, _ = conf.CreateOpaquePortsPatch()\n\n\n\n\treport := &Report{}\n\n\terr = f.GenerateStruct(report)\n\n\tif err == nil {\n\n\t\t_, _ = conf.Uninject(report)\n\n\t}\n\n\treturn 1\n\n}\n", "file_path": "pkg/inject/inject_fuzzer.go", "rank": 80, "score": 116994.52940191765 }, { "content": "package inject\n\n\n\nimport (\n\n\t\"testing\"\n\n\n\n\t\"github.com/go-test/deep\"\n\n\tl5dcharts \"github.com/linkerd/linkerd2/pkg/charts/linkerd2\"\n\n\t\"github.com/linkerd/linkerd2/pkg/k8s\"\n\n\t\"github.com/linkerd/linkerd2/pkg/version\"\n\n\tappsv1 \"k8s.io/api/apps/v1\"\n\n\tcorev1 \"k8s.io/api/core/v1\"\n\n\tk8sResource \"k8s.io/apimachinery/pkg/api/resource\"\n\n\tmetav1 \"k8s.io/apimachinery/pkg/apis/meta/v1\"\n\n\t\"sigs.k8s.io/yaml\"\n\n)\n\n\n\nfunc TestGetOverriddenValues(t *testing.T) {\n\n\t// this test uses an annotated deployment and a expected Values object to verify\n\n\t// the GetOverriddenValues function.\n\n\n\n\tvar (\n\n\t\tproxyVersionOverride = \"proxy-version-override\"\n\n\t\tpullPolicy = \"Always\"\n\n\t)\n\n\n\n\ttestConfig, err := l5dcharts.NewValues()\n\n\tif err != nil {\n\n\t\tt.Fatalf(\"Unexpected error: %v\", err)\n\n\t}\n\n\n\n\tvar testCases = []struct {\n\n\t\tid string\n\n\t\tnsAnnotations map[string]string\n\n\t\tspec appsv1.DeploymentSpec\n\n\t\texpected func() *l5dcharts.Values\n\n\t}{\n\n\t\t{id: \"use overrides\",\n\n\t\t\tnsAnnotations: make(map[string]string),\n\n\t\t\tspec: appsv1.DeploymentSpec{\n\n\t\t\t\tTemplate: corev1.PodTemplateSpec{\n\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\n\t\t\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\t\t\tk8s.ProxyImageAnnotation: \"cr.l5d.io/linkerd/proxy\",\n\n\t\t\t\t\t\t\tk8s.ProxyImagePullPolicyAnnotation: pullPolicy,\n\n\t\t\t\t\t\t\tk8s.ProxyInitImageAnnotation: \"cr.l5d.io/linkerd/proxy-init\",\n\n\t\t\t\t\t\t\tk8s.ProxyControlPortAnnotation: \"4000\",\n\n\t\t\t\t\t\t\tk8s.ProxyInboundPortAnnotation: \"5000\",\n\n\t\t\t\t\t\t\tk8s.ProxyAdminPortAnnotation: \"5001\",\n\n\t\t\t\t\t\t\tk8s.ProxyOutboundPortAnnotation: \"5002\",\n\n\t\t\t\t\t\t\tk8s.ProxyPodInboundPortsAnnotation: \"1234,5678\",\n\n\t\t\t\t\t\t\tk8s.ProxyIgnoreInboundPortsAnnotation: \"4222,6222\",\n\n\t\t\t\t\t\t\tk8s.ProxyIgnoreOutboundPortsAnnotation: \"8079,8080\",\n\n\t\t\t\t\t\t\tk8s.ProxyCPURequestAnnotation: \"0.15\",\n\n\t\t\t\t\t\t\tk8s.ProxyMemoryRequestAnnotation: \"120\",\n\n\t\t\t\t\t\t\tk8s.ProxyEphemeralStorageRequestAnnotation: \"10\",\n\n\t\t\t\t\t\t\tk8s.ProxyCPULimitAnnotation: \"1.5\",\n\n\t\t\t\t\t\t\tk8s.ProxyMemoryLimitAnnotation: \"256\",\n\n\t\t\t\t\t\t\tk8s.ProxyEphemeralStorageLimitAnnotation: \"50\",\n\n\t\t\t\t\t\t\tk8s.ProxyUIDAnnotation: \"8500\",\n\n\t\t\t\t\t\t\tk8s.ProxyLogLevelAnnotation: \"debug,linkerd=debug\",\n\n\t\t\t\t\t\t\tk8s.ProxyLogFormatAnnotation: \"json\",\n\n\t\t\t\t\t\t\tk8s.ProxyEnableExternalProfilesAnnotation: \"false\",\n\n\t\t\t\t\t\t\tk8s.ProxyVersionOverrideAnnotation: proxyVersionOverride,\n\n\t\t\t\t\t\t\tk8s.ProxyWaitBeforeExitSecondsAnnotation: \"123\",\n\n\t\t\t\t\t\t\tk8s.ProxyRequireIdentityOnInboundPortsAnnotation: \"8888,9999\",\n\n\t\t\t\t\t\t\tk8s.ProxyOutboundConnectTimeout: \"6000ms\",\n\n\t\t\t\t\t\t\tk8s.ProxyInboundConnectTimeout: \"600ms\",\n\n\t\t\t\t\t\t\tk8s.ProxyOpaquePortsAnnotation: \"4320-4325,3306\",\n\n\t\t\t\t\t\t\tk8s.ProxyAwait: \"enabled\",\n\n\t\t\t\t\t\t\tk8s.ProxySkipSubnetsAnnotation: \"172.17.0.0/16\",\n\n\t\t\t\t\t\t\tk8s.ProxyAccessLogAnnotation: \"apache\",\n\n\t\t\t\t\t\t},\n\n\t\t\t\t\t},\n\n\t\t\t\t\tSpec: corev1.PodSpec{},\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\texpected: func() *l5dcharts.Values {\n\n\t\t\t\tvalues, _ := l5dcharts.NewValues()\n\n\n\n\t\t\t\tvalues.Proxy.Cores = 2\n\n\t\t\t\tvalues.Proxy.Image.Name = \"cr.l5d.io/linkerd/proxy\"\n\n\t\t\t\tvalues.Proxy.Image.PullPolicy = pullPolicy\n\n\t\t\t\tvalues.Proxy.Image.Version = proxyVersionOverride\n\n\t\t\t\tvalues.Proxy.PodInboundPorts = \"1234,5678\"\n\n\t\t\t\tvalues.Proxy.Ports.Control = 4000\n\n\t\t\t\tvalues.Proxy.Ports.Inbound = 5000\n\n\t\t\t\tvalues.Proxy.Ports.Admin = 5001\n\n\t\t\t\tvalues.Proxy.Ports.Outbound = 5002\n\n\t\t\t\tvalues.Proxy.WaitBeforeExitSeconds = 123\n\n\t\t\t\tvalues.Proxy.LogLevel = \"debug,linkerd=debug\"\n\n\t\t\t\tvalues.Proxy.LogFormat = \"json\"\n\n\t\t\t\tvalues.Proxy.Resources = &l5dcharts.Resources{\n\n\t\t\t\t\tCPU: l5dcharts.Constraints{\n\n\t\t\t\t\t\tLimit: \"1.5\",\n\n\t\t\t\t\t\tRequest: \"0.15\",\n\n\t\t\t\t\t},\n\n\t\t\t\t\tMemory: l5dcharts.Constraints{\n\n\t\t\t\t\t\tLimit: \"256\",\n\n\t\t\t\t\t\tRequest: \"120\",\n\n\t\t\t\t\t},\n\n\t\t\t\t\tEphemeralStorage: l5dcharts.Constraints{\n\n\t\t\t\t\t\tLimit: \"50\",\n\n\t\t\t\t\t\tRequest: \"10\",\n\n\t\t\t\t\t},\n\n\t\t\t\t}\n\n\t\t\t\tvalues.Proxy.UID = 8500\n\n\t\t\t\tvalues.ProxyInit.Image.Name = \"cr.l5d.io/linkerd/proxy-init\"\n\n\t\t\t\tvalues.ProxyInit.Image.PullPolicy = pullPolicy\n\n\t\t\t\tvalues.ProxyInit.Image.Version = version.ProxyInitVersion\n\n\t\t\t\tvalues.ProxyInit.IgnoreInboundPorts = \"4222,6222\"\n\n\t\t\t\tvalues.ProxyInit.IgnoreOutboundPorts = \"8079,8080\"\n\n\t\t\t\tvalues.ProxyInit.SkipSubnets = \"172.17.0.0/16\"\n\n\t\t\t\tvalues.Proxy.RequireIdentityOnInboundPorts = \"8888,9999\"\n\n\t\t\t\tvalues.Proxy.OutboundConnectTimeout = \"6000ms\"\n\n\t\t\t\tvalues.Proxy.InboundConnectTimeout = \"600ms\"\n\n\t\t\t\tvalues.Proxy.OpaquePorts = \"4320,4321,4322,4323,4324,4325,3306\"\n\n\t\t\t\tvalues.Proxy.Await = true\n\n\t\t\t\tvalues.Proxy.AccessLog = \"apache\"\n\n\t\t\t\treturn values\n\n\t\t\t},\n\n\t\t},\n\n\t\t{id: \"use defaults\",\n\n\t\t\tnsAnnotations: make(map[string]string),\n\n\t\t\tspec: appsv1.DeploymentSpec{\n\n\t\t\t\tTemplate: corev1.PodTemplateSpec{\n\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{},\n\n\t\t\t\t\tSpec: corev1.PodSpec{},\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\texpected: func() *l5dcharts.Values {\n\n\t\t\t\tvalues, _ := l5dcharts.NewValues()\n\n\t\t\t\treturn values\n\n\t\t\t},\n\n\t\t},\n\n\t\t{id: \"use namespace overrides\",\n\n\t\t\tnsAnnotations: map[string]string{\n\n\t\t\t\tk8s.ProxyImageAnnotation: \"cr.l5d.io/linkerd/proxy\",\n\n\t\t\t\tk8s.ProxyImagePullPolicyAnnotation: pullPolicy,\n\n\t\t\t\tk8s.ProxyInitImageAnnotation: \"cr.l5d.io/linkerd/proxy-init\",\n\n\t\t\t\tk8s.ProxyControlPortAnnotation: \"4000\",\n\n\t\t\t\tk8s.ProxyInboundPortAnnotation: \"5000\",\n\n\t\t\t\tk8s.ProxyAdminPortAnnotation: \"5001\",\n\n\t\t\t\tk8s.ProxyOutboundPortAnnotation: \"5002\",\n\n\t\t\t\tk8s.ProxyPodInboundPortsAnnotation: \"1234,5678\",\n\n\t\t\t\tk8s.ProxyIgnoreInboundPortsAnnotation: \"4222,6222\",\n\n\t\t\t\tk8s.ProxyIgnoreOutboundPortsAnnotation: \"8079,8080\",\n\n\t\t\t\tk8s.ProxyCPURequestAnnotation: \"0.15\",\n\n\t\t\t\tk8s.ProxyMemoryRequestAnnotation: \"120\",\n\n\t\t\t\tk8s.ProxyCPULimitAnnotation: \"1.5\",\n\n\t\t\t\tk8s.ProxyMemoryLimitAnnotation: \"256\",\n\n\t\t\t\tk8s.ProxyUIDAnnotation: \"8500\",\n\n\t\t\t\tk8s.ProxyLogLevelAnnotation: \"debug,linkerd=debug\",\n\n\t\t\t\tk8s.ProxyLogFormatAnnotation: \"json\",\n\n\t\t\t\tk8s.ProxyEnableExternalProfilesAnnotation: \"false\",\n\n\t\t\t\tk8s.ProxyVersionOverrideAnnotation: proxyVersionOverride,\n\n\t\t\t\tk8s.ProxyWaitBeforeExitSecondsAnnotation: \"123\",\n\n\t\t\t\tk8s.ProxyOutboundConnectTimeout: \"6000ms\",\n\n\t\t\t\tk8s.ProxyInboundConnectTimeout: \"600ms\",\n\n\t\t\t\tk8s.ProxyOpaquePortsAnnotation: \"4320-4325,3306\",\n\n\t\t\t\tk8s.ProxyAwait: \"enabled\",\n\n\t\t\t\tk8s.ProxyAccessLogAnnotation: \"apache\",\n\n\t\t\t},\n\n\t\t\tspec: appsv1.DeploymentSpec{\n\n\t\t\t\tTemplate: corev1.PodTemplateSpec{\n\n\t\t\t\t\tSpec: corev1.PodSpec{},\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\texpected: func() *l5dcharts.Values {\n\n\t\t\t\tvalues, _ := l5dcharts.NewValues()\n\n\n\n\t\t\t\tvalues.Proxy.Cores = 2\n\n\t\t\t\tvalues.Proxy.Image.Name = \"cr.l5d.io/linkerd/proxy\"\n\n\t\t\t\tvalues.Proxy.Image.PullPolicy = pullPolicy\n\n\t\t\t\tvalues.Proxy.Image.Version = proxyVersionOverride\n\n\t\t\t\tvalues.Proxy.PodInboundPorts = \"1234,5678\"\n\n\t\t\t\tvalues.Proxy.Ports.Control = 4000\n\n\t\t\t\tvalues.Proxy.Ports.Inbound = 5000\n\n\t\t\t\tvalues.Proxy.Ports.Admin = 5001\n\n\t\t\t\tvalues.Proxy.Ports.Outbound = 5002\n\n\t\t\t\tvalues.Proxy.WaitBeforeExitSeconds = 123\n\n\t\t\t\tvalues.Proxy.LogLevel = \"debug,linkerd=debug\"\n\n\t\t\t\tvalues.Proxy.LogFormat = \"json\"\n\n\t\t\t\tvalues.Proxy.Resources = &l5dcharts.Resources{\n\n\t\t\t\t\tCPU: l5dcharts.Constraints{\n\n\t\t\t\t\t\tLimit: \"1.5\",\n\n\t\t\t\t\t\tRequest: \"0.15\",\n\n\t\t\t\t\t},\n\n\t\t\t\t\tMemory: l5dcharts.Constraints{\n\n\t\t\t\t\t\tLimit: \"256\",\n\n\t\t\t\t\t\tRequest: \"120\",\n\n\t\t\t\t\t},\n\n\t\t\t\t}\n\n\t\t\t\tvalues.Proxy.UID = 8500\n\n\t\t\t\tvalues.ProxyInit.Image.Name = \"cr.l5d.io/linkerd/proxy-init\"\n\n\t\t\t\tvalues.ProxyInit.Image.PullPolicy = pullPolicy\n\n\t\t\t\tvalues.ProxyInit.Image.Version = version.ProxyInitVersion\n\n\t\t\t\tvalues.ProxyInit.IgnoreInboundPorts = \"4222,6222\"\n\n\t\t\t\tvalues.ProxyInit.IgnoreOutboundPorts = \"8079,8080\"\n\n\t\t\t\tvalues.Proxy.OutboundConnectTimeout = \"6000ms\"\n\n\t\t\t\tvalues.Proxy.InboundConnectTimeout = \"600ms\"\n\n\t\t\t\tvalues.Proxy.OpaquePorts = \"4320,4321,4322,4323,4324,4325,3306\"\n\n\t\t\t\tvalues.Proxy.Await = true\n\n\t\t\t\tvalues.Proxy.AccessLog = \"apache\"\n\n\t\t\t\treturn values\n\n\t\t\t},\n\n\t\t},\n\n\t\t{id: \"use invalid duration for TCP connect timeouts\",\n\n\t\t\tnsAnnotations: map[string]string{\n\n\t\t\t\tk8s.ProxyOutboundConnectTimeout: \"6000\",\n\n\t\t\t\tk8s.ProxyInboundConnectTimeout: \"600\",\n\n\t\t\t},\n\n\t\t\tspec: appsv1.DeploymentSpec{\n\n\t\t\t\tTemplate: corev1.PodTemplateSpec{\n\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{},\n\n\t\t\t\t\tSpec: corev1.PodSpec{},\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\texpected: func() *l5dcharts.Values {\n\n\t\t\t\tvalues, _ := l5dcharts.NewValues()\n\n\t\t\t\treturn values\n\n\t\t\t},\n\n\t\t},\n\n\t\t{id: \"use valid duration for TCP connect timeouts\",\n\n\t\t\tnsAnnotations: map[string]string{\n\n\t\t\t\t// Validate we're converting time values into ms for the proxy to parse correctly.\n\n\t\t\t\tk8s.ProxyOutboundConnectTimeout: \"6s5ms\",\n\n\t\t\t\tk8s.ProxyInboundConnectTimeout: \"2s5ms\",\n\n\t\t\t},\n\n\t\t\tspec: appsv1.DeploymentSpec{\n\n\t\t\t\tTemplate: corev1.PodTemplateSpec{\n\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{},\n\n\t\t\t\t\tSpec: corev1.PodSpec{},\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\texpected: func() *l5dcharts.Values {\n\n\t\t\t\tvalues, _ := l5dcharts.NewValues()\n\n\t\t\t\tvalues.Proxy.OutboundConnectTimeout = \"6005ms\"\n\n\t\t\t\tvalues.Proxy.InboundConnectTimeout = \"2005ms\"\n\n\t\t\t\treturn values\n\n\t\t\t},\n\n\t\t},\n\n\t\t{id: \"use named port for opaque ports\",\n\n\t\t\tnsAnnotations: make(map[string]string),\n\n\t\t\tspec: appsv1.DeploymentSpec{\n\n\t\t\t\tTemplate: corev1.PodTemplateSpec{\n\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\n\t\t\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\t\t\tk8s.ProxyOpaquePortsAnnotation: \"mysql\",\n\n\t\t\t\t\t\t},\n\n\t\t\t\t\t},\n\n\t\t\t\t\tSpec: corev1.PodSpec{\n\n\t\t\t\t\t\tContainers: []corev1.Container{\n\n\t\t\t\t\t\t\t{\n\n\t\t\t\t\t\t\t\tPorts: []corev1.ContainerPort{\n\n\t\t\t\t\t\t\t\t\t{\n\n\t\t\t\t\t\t\t\t\t\tName: \"mysql\",\n\n\t\t\t\t\t\t\t\t\t\tContainerPort: 3306,\n\n\t\t\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t},\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\texpected: func() *l5dcharts.Values {\n\n\t\t\t\tvalues, _ := l5dcharts.NewValues()\n\n\t\t\t\tvalues.Proxy.OpaquePorts = \"3306\"\n\n\t\t\t\tvalues.Proxy.PodInboundPorts = \"3306\"\n\n\t\t\t\treturn values\n\n\t\t\t},\n\n\t\t},\n\n\t}\n\n\n\n\tfor _, tc := range testCases {\n\n\t\ttestCase := tc\n\n\t\tt.Run(testCase.id, func(t *testing.T) {\n\n\t\t\tdata, err := yaml.Marshal(&appsv1.Deployment{Spec: testCase.spec})\n\n\t\t\tif err != nil {\n\n\t\t\t\tt.Fatal(err)\n\n\t\t\t}\n\n\n\n\t\t\tresourceConfig := NewResourceConfig(testConfig, OriginUnknown, \"linkerd\").\n\n\t\t\t\tWithKind(\"Deployment\").WithNsAnnotations(testCase.nsAnnotations)\n\n\t\t\tif err := resourceConfig.parse(data); err != nil {\n\n\t\t\t\tt.Fatal(err)\n\n\t\t\t}\n\n\n\n\t\t\tresourceConfig.AppendNamespaceAnnotations()\n\n\t\t\tactual, err := resourceConfig.GetOverriddenValues()\n\n\t\t\tif err != nil {\n\n\t\t\t\tt.Fatal(err)\n\n\t\t\t}\n\n\t\t\texpected := testCase.expected()\n\n\t\t\tif diff := deep.Equal(actual, expected); diff != nil {\n\n\t\t\t\tt.Errorf(\"%+v\", diff)\n\n\t\t\t}\n\n\t\t})\n\n\t}\n\n}\n\n\n\nfunc TestWholeCPUCores(t *testing.T) {\n\n\tfor _, c := range []struct {\n\n\t\tv string\n\n\t\tn int\n\n\t}{\n\n\t\t{v: \"1\", n: 1},\n\n\t\t{v: \"1m\", n: 1},\n\n\t\t{v: \"1000m\", n: 1},\n\n\t\t{v: \"1001m\", n: 2},\n\n\t} {\n\n\t\tq, err := k8sResource.ParseQuantity(c.v)\n\n\t\tif err != nil {\n\n\t\t\tt.Fatal(err)\n\n\t\t}\n\n\t\tn, err := ToWholeCPUCores(q)\n\n\t\tif err != nil {\n\n\t\t\tt.Fatal(err)\n\n\t\t}\n\n\t\tif n != int64(c.n) {\n\n\t\t\tt.Fatalf(\"Unexpected value: %v != %v\", n, c.n)\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "pkg/inject/inject_test.go", "rank": 81, "score": 116994.52940191765 }, { "content": "package inject\n\n\n\nimport (\n\n\t\"context\"\n\n\t\"fmt\"\n\n\t\"os\"\n\n\t\"strings\"\n\n\t\"testing\"\n\n\t\"time\"\n\n\n\n\t\"github.com/go-test/deep\"\n\n\t\"github.com/linkerd/linkerd2/controller/gen/client/clientset/versioned/scheme\"\n\n\t\"github.com/linkerd/linkerd2/pkg/flags\"\n\n\t\"github.com/linkerd/linkerd2/pkg/k8s\"\n\n\t\"github.com/linkerd/linkerd2/pkg/version\"\n\n\t\"github.com/linkerd/linkerd2/testutil\"\n\n\tappsv1 \"k8s.io/api/apps/v1\"\n\n\tv1 \"k8s.io/api/core/v1\"\n\n\t\"k8s.io/apimachinery/pkg/api/resource\"\n\n\t\"k8s.io/apimachinery/pkg/runtime/serializer/json\"\n\n)\n\n\n\nconst (\n\n\topaquePorts = \"11211\"\n\n\tmanualOpaquePorts = \"22122\"\n\n)\n\n\n\n//////////////////////\n\n/// TEST SETUP ///\n\n//////////////////////\n\n\n\nvar TestHelper *testutil.TestHelper\n\n\n\nfunc TestMain(m *testing.M) {\n\n\tTestHelper = testutil.NewTestHelper()\n\n\t// Block test execution until control plane pods are running\n\n\tTestHelper.WaitUntilDeployReady(testutil.LinkerdDeployReplicasEdge)\n\n\tos.Exit(m.Run())\n\n}\n\n\n\n//////////////////////\n\n/// TEST EXECUTION ///\n\n//////////////////////\n\n\n\nfunc parseDeployment(yamlString string) (*appsv1.Deployment, error) {\n\n\ts := json.NewYAMLSerializer(json.DefaultMetaFactory, scheme.Scheme,\n\n\t\tscheme.Scheme)\n\n\tvar deploy appsv1.Deployment\n\n\t_, _, err := s.Decode([]byte(yamlString), nil, &deploy)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\n\n\treturn &deploy, nil\n\n}\n\n\n\nfunc TestInjectManualParams(t *testing.T) {\n\n\treg := \"cr.l5d.io/linkerd\"\n\n\tif override := os.Getenv(flags.EnvOverrideDockerRegistry); reg != \"\" {\n\n\t\treg = override\n\n\t}\n\n\n\n\tinjectionValidator := testutil.InjectValidator{\n\n\t\tNoInitContainer: TestHelper.CNI(),\n\n\t\tVersion: \"proxy-version\",\n\n\t\tImage: reg + \"/proxy-image\",\n\n\t\tInitImage: reg + \"/init-image\",\n\n\t\tImagePullPolicy: \"Never\",\n\n\t\tControlPort: 123,\n\n\t\tSkipInboundPorts: \"234,345\",\n\n\t\tSkipOutboundPorts: \"456,567\",\n\n\t\tInboundPort: 678,\n\n\t\tAdminPort: 789,\n\n\t\tOutboundPort: 890,\n\n\t\tCPURequest: \"10m\",\n\n\t\tMemoryRequest: \"10Mi\",\n\n\t\tCPULimit: \"20m\",\n\n\t\tMemoryLimit: \"20Mi\",\n\n\t\tUID: 1337,\n\n\t\tLogLevel: \"off\",\n\n\t\tEnableExternalProfiles: true,\n\n\t}\n\n\tflags, _ := injectionValidator.GetFlagsAndAnnotations()\n\n\n\n\t// TODO: test config.linkerd.io/proxy-version\n\n\tcmd := append([]string{\"inject\",\n\n\t\t\"--manual\",\n\n\t}, flags...)\n\n\n\n\tcmd = append(cmd, \"testdata/inject_test.yaml\")\n\n\n\n\tout, err := TestHelper.LinkerdRun(cmd...)\n\n\tif err != nil {\n\n\t\ttestutil.AnnotatedFatal(t, \"unexpected error\", err)\n\n\t}\n\n\n\n\tdeploy, err := parseDeployment(out)\n\n\tif err != nil {\n\n\t\ttestutil.AnnotatedFatalf(t, \"failed parsing deployment\", \"failed parsing deployment\\n%s\", err.Error())\n\n\t}\n\n\n\n\terr = injectionValidator.ValidatePod(&deploy.Spec.Template.Spec)\n\n\tif err != nil {\n\n\t\ttestutil.AnnotatedFatalf(t, \"received unexpected output\", \"received unexpected output\\n%s\", err.Error())\n\n\t}\n\n}\n\n\n\nfunc TestInjectAutoParams(t *testing.T) {\n\n\tinjectYAML, err := testutil.ReadFile(\"testdata/inject_test.yaml\")\n\n\tif err != nil {\n\n\t\ttestutil.AnnotatedFatalf(t, \"failed to read inject test file\", \"failed to read inject test file: %s\", err)\n\n\t}\n\n\n\n\tinjectNS := \"inj-auto-params-test\"\n\n\tdeployName := \"inject-test-terminus-auto\"\n\n\n\n\tctx := context.Background()\n\n\n\n\tTestHelper.WithDataPlaneNamespace(ctx, injectNS, map[string]string{}, t, func(t *testing.T, ns string) {\n\n\t\tinjectionValidator := testutil.InjectValidator{\n\n\t\t\tNoInitContainer: TestHelper.CNI() || TestHelper.Calico(),\n\n\t\t\tAutoInject: true,\n\n\t\t\tAdminPort: 8888,\n\n\t\t\tControlPort: 8881,\n\n\t\t\tEnableExternalProfiles: true,\n\n\t\t\tEnableDebug: true,\n\n\t\t\tImagePullPolicy: \"Never\",\n\n\t\t\tInboundPort: 8882,\n\n\t\t\tInitImage: \"init-image\",\n\n\t\t\tInitImageVersion: \"init-image-version\",\n\n\t\t\tOutboundPort: 8883,\n\n\t\t\tCPULimit: \"160m\",\n\n\t\t\tCPURequest: \"150m\",\n\n\t\t\tMemoryLimit: \"150Mi\",\n\n\t\t\tMemoryRequest: \"100Mi\",\n\n\t\t\tEphemeralStorageLimit: \"50Mi\",\n\n\t\t\tEphemeralStorageRequest: \"10Mi\",\n\n\t\t\tImage: \"proxy-image\",\n\n\t\t\tLogLevel: \"proxy-log-level\",\n\n\t\t\tUID: 10,\n\n\t\t\tVersion: \"proxy-version\",\n\n\t\t\tRequireIdentityOnPorts: \"8884,8885\",\n\n\t\t\tOpaquePorts: \"8888,8889\",\n\n\t\t\tOutboundConnectTimeout: \"888ms\",\n\n\t\t\tInboundConnectTimeout: \"999ms\",\n\n\t\t\tSkipOutboundPorts: \"1111,2222,3333\",\n\n\t\t\tSkipInboundPorts: \"4444,5555,6666\",\n\n\t\t\tWaitBeforeExitSeconds: 10,\n\n\t\t}\n\n\n\n\t\t_, annotations := injectionValidator.GetFlagsAndAnnotations()\n\n\n\n\t\tpatchedYAML, err := testutil.PatchDeploy(injectYAML, deployName, annotations)\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to patch inject test YAML\",\n\n\t\t\t\t\"failed to patch inject test YAML in namespace %s for deploy/%s: %s\", ns, deployName, err)\n\n\t\t}\n\n\n\n\t\to, err := TestHelper.Kubectl(patchedYAML, \"--namespace\", ns, \"create\", \"-f\", \"-\")\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to create deployment\", \"failed to create deploy/%s in namespace %s for %s: %s\", deployName, ns, err, o)\n\n\t\t}\n\n\n\n\t\tvar pod *v1.Pod\n\n\t\terr = TestHelper.RetryFor(30*time.Second, func() error {\n\n\t\t\tpods, err := TestHelper.GetPodsForDeployment(ctx, ns, deployName)\n\n\t\t\tif err != nil {\n\n\t\t\t\treturn fmt.Errorf(\"failed to get pods for namespace %s\", ns)\n\n\t\t\t}\n\n\n\n\t\t\tfor _, p := range pods {\n\n\t\t\t\tp := p // pin\n\n\t\t\t\tcreator, ok := p.Annotations[k8s.CreatedByAnnotation]\n\n\t\t\t\tif ok && strings.Contains(creator, \"proxy-injector\") {\n\n\t\t\t\t\tpod = &p\n\n\t\t\t\t\tbreak\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tif pod == nil {\n\n\t\t\t\treturn fmt.Errorf(\"failed to find auto injected pod for deployment %s\", deployName)\n\n\t\t\t}\n\n\t\t\treturn nil\n\n\t\t})\n\n\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to find autoinjected pod: \", err.Error())\n\n\t\t}\n\n\n\n\t\tif err := injectionValidator.ValidatePod(&pod.Spec); err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to validate auto injection\", err.Error())\n\n\t\t}\n\n\t})\n\n}\n\n\n\nfunc TestInjectAutoNamespaceOverrideAnnotations(t *testing.T) {\n\n\t// Check for Namespace level override of proxy Configurations\n\n\tinjectYAML, err := testutil.ReadFile(\"testdata/inject_test.yaml\")\n\n\tif err != nil {\n\n\t\ttestutil.AnnotatedFatalf(t, \"failed to read inject test file\", \"failed to read inject test file: %s\", err)\n\n\t}\n\n\n\n\tinjectNS := \"inj-ns-override-test\"\n\n\tdeployName := \"inject-test-terminus\"\n\n\tnsProxyMemReq := \"50Mi\"\n\n\tnsProxyCPUReq := \"200m\"\n\n\n\n\t// Namespace level proxy configuration override\n\n\tnsAnnotations := map[string]string{\n\n\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\tk8s.ProxyCPURequestAnnotation: nsProxyCPUReq,\n\n\t\tk8s.ProxyMemoryRequestAnnotation: nsProxyMemReq,\n\n\t}\n\n\n\n\tctx := context.Background()\n\n\tTestHelper.WithDataPlaneNamespace(ctx, injectNS, nsAnnotations, t, func(t *testing.T, ns string) {\n\n\t\t// patch injectYAML with unique name and pod annotations\n\n\t\t// Pod Level proxy configuration override\n\n\t\tpodProxyCPUReq := \"600m\"\n\n\t\tpodAnnotations := map[string]string{\n\n\t\t\tk8s.ProxyCPURequestAnnotation: podProxyCPUReq,\n\n\t\t}\n\n\n\n\t\tpatchedYAML, err := testutil.PatchDeploy(injectYAML, deployName, podAnnotations)\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to patch inject test YAML\",\n\n\t\t\t\t\"failed to patch inject test YAML in namespace %s for deploy/%s: %s\", ns, deployName, err)\n\n\t\t}\n\n\n\n\t\to, err := TestHelper.Kubectl(patchedYAML, \"--namespace\", ns, \"create\", \"-f\", \"-\")\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to create deployment\", \"failed to create deploy/%s in namespace %s for %s: %s\", deployName, ns, err, o)\n\n\t\t}\n\n\n\n\t\to, err = TestHelper.Kubectl(\"\", \"--namespace\", ns, \"wait\", \"--for=condition=available\", \"--timeout=120s\", \"deploy/\"+deployName)\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, fmt.Sprintf(\"failed to wait for condition=available for deploy/%s in namespace %s\", deployName, ns),\n\n\t\t\t\t\"failed to wait for condition=available for deploy/%s in namespace %s: %s: %s\", deployName, ns, err, o)\n\n\t\t}\n\n\n\n\t\tpods, err := TestHelper.GetPodsForDeployment(ctx, ns, deployName)\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, fmt.Sprintf(\"failed to get pods for namespace %s\", ns),\n\n\t\t\t\t\"failed to get pods for namespace %s: %s\", ns, err)\n\n\t\t}\n\n\n\n\t\tcontainers := pods[0].Spec.Containers\n\n\t\tproxyContainer := testutil.GetProxyContainer(containers)\n\n\n\n\t\t// Match the pod configuration with the namespace level overrides\n\n\t\tif proxyContainer.Resources.Requests[\"memory\"] != resource.MustParse(nsProxyMemReq) {\n\n\t\t\ttestutil.Fatalf(t, \"proxy memory resource request failed to match with namespace level override\")\n\n\t\t}\n\n\n\n\t\t// Match with proxy level override\n\n\t\tif proxyContainer.Resources.Requests[\"cpu\"] != resource.MustParse(podProxyCPUReq) {\n\n\t\t\ttestutil.Fatalf(t, \"proxy cpu resource request failed to match with pod level override\")\n\n\t\t}\n\n\t})\n\n}\n\n\n\nfunc TestInjectAutoAnnotationPermutations(t *testing.T) {\n\n\tinjectYAML, err := testutil.ReadFile(\"testdata/inject_test.yaml\")\n\n\tif err != nil {\n\n\t\ttestutil.AnnotatedFatalf(t, \"failed to read inject test file\", \"failed to read inject test file: %s\", err)\n\n\t}\n\n\n\n\tinjectNS := \"inject-test\"\n\n\tdeployName := \"inject-test-terminus\"\n\n\tcontainerName := \"bb-terminus\"\n\n\tinjectAnnotations := []string{\"\", k8s.ProxyInjectDisabled, k8s.ProxyInjectEnabled}\n\n\n\n\t// deploy\n\n\tctx := context.Background()\n\n\tfor _, nsAnnotation := range injectAnnotations {\n\n\t\tnsAnnotation := nsAnnotation // pin\n\n\t\tnsPrefix := injectNS\n\n\t\tnsAnnotations := map[string]string{}\n\n\t\tif nsAnnotation != \"\" {\n\n\t\t\tnsAnnotations[k8s.ProxyInjectAnnotation] = nsAnnotation\n\n\t\t\tnsPrefix = fmt.Sprintf(\"%s-%s\", nsPrefix, nsAnnotation)\n\n\t\t}\n\n\n\n\t\tTestHelper.WithDataPlaneNamespace(ctx, nsPrefix, nsAnnotations, t, func(t *testing.T, ns string) {\n\n\t\t\tfor _, podAnnotation := range injectAnnotations {\n\n\t\t\t\t// patch injectYAML with unique name and pod annotations\n\n\t\t\t\tname := deployName\n\n\t\t\t\tpodAnnotations := map[string]string{}\n\n\t\t\t\tif podAnnotation != \"\" {\n\n\t\t\t\t\tpodAnnotations[k8s.ProxyInjectAnnotation] = podAnnotation\n\n\t\t\t\t\tname = fmt.Sprintf(\"%s-%s\", name, podAnnotation)\n\n\t\t\t\t}\n\n\n\n\t\t\t\tpatchedYAML, err := testutil.PatchDeploy(injectYAML, name, podAnnotations)\n\n\t\t\t\tif err != nil {\n\n\t\t\t\t\ttestutil.AnnotatedFatalf(t, fmt.Sprintf(\"failed to patch inject test YAML in namespace %s for deploy/%s\", ns, name),\n\n\t\t\t\t\t\t\"failed to patch inject test YAML in namespace %s for deploy/%s: %s\", ns, name, err)\n\n\t\t\t\t}\n\n\n\n\t\t\t\to, err := TestHelper.Kubectl(patchedYAML, \"--namespace\", ns, \"create\", \"-f\", \"-\")\n\n\t\t\t\tif err != nil {\n\n\t\t\t\t\ttestutil.AnnotatedFatalf(t, fmt.Sprintf(\"failed to create deploy/%s in namespace %s\", name, ns),\n\n\t\t\t\t\t\t\"failed to create deploy/%s in namespace %s for %s: %s\", name, ns, err, o)\n\n\t\t\t\t}\n\n\n\n\t\t\t\t// check for successful deploy\n\n\t\t\t\to, err = TestHelper.Kubectl(\"\", \"--namespace\", ns, \"wait\", \"--for=condition=available\", \"--timeout=120s\", \"deploy/\"+name)\n\n\t\t\t\tif err != nil {\n\n\t\t\t\t\ttestutil.AnnotatedFatalf(t, fmt.Sprintf(\"failed to wait for condition=available for deploy/%s in namespace %s\", name, ns),\n\n\t\t\t\t\t\t\"failed to wait for condition=available for deploy/%s in namespace %s: %s: %s\", name, ns, err, o)\n\n\t\t\t\t}\n\n\n\n\t\t\t\tpods, err := TestHelper.GetPodsForDeployment(ctx, ns, name)\n\n\t\t\t\tif err != nil {\n\n\t\t\t\t\ttestutil.AnnotatedFatalf(t, fmt.Sprintf(\"failed to get pods for namespace %s\", ns),\n\n\t\t\t\t\t\t\"failed to get pods for namespace %s: %s\", ns, err)\n\n\t\t\t\t}\n\n\n\n\t\t\t\tif len(pods) != 1 {\n\n\t\t\t\t\ttestutil.Fatalf(t, \"expected 1 pod for namespace %s, got %d\", ns, len(pods))\n\n\t\t\t\t}\n\n\n\n\t\t\t\tshouldBeInjected := false\n\n\t\t\t\tswitch nsAnnotation {\n\n\t\t\t\tcase \"\", k8s.ProxyInjectDisabled:\n\n\t\t\t\t\tswitch podAnnotation {\n\n\t\t\t\t\tcase k8s.ProxyInjectEnabled:\n\n\t\t\t\t\t\tshouldBeInjected = true\n\n\t\t\t\t\t}\n\n\t\t\t\tcase k8s.ProxyInjectEnabled:\n\n\t\t\t\t\tswitch podAnnotation {\n\n\t\t\t\t\tcase \"\", k8s.ProxyInjectEnabled:\n\n\t\t\t\t\t\tshouldBeInjected = true\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\n\n\t\t\t\tcontainers := pods[0].Spec.Containers\n\n\t\t\t\tinitContainers := pods[0].Spec.InitContainers\n\n\n\n\t\t\t\tif shouldBeInjected {\n\n\t\t\t\t\tif len(containers) != 2 {\n\n\t\t\t\t\t\ttestutil.Fatalf(t, \"expected 2 containers for pod %s/%s, got %d\", ns, pods[0].GetName(), len(containers))\n\n\t\t\t\t\t}\n\n\t\t\t\t\tif containers[0].Name != containerName && containers[1].Name != containerName {\n\n\t\t\t\t\t\ttestutil.Fatalf(t, \"expected bb-terminus container in pod %s/%s, got %+v\", ns, pods[0].GetName(), containers[0])\n\n\t\t\t\t\t}\n\n\t\t\t\t\tif containers[0].Name != k8s.ProxyContainerName && containers[1].Name != k8s.ProxyContainerName {\n\n\t\t\t\t\t\ttestutil.Fatalf(t, \"expected %s container in pod %s/%s, got %+v\", ns, pods[0].GetName(), k8s.ProxyContainerName, containers[0])\n\n\t\t\t\t\t}\n\n\t\t\t\t\tif !TestHelper.CNI() && len(initContainers) != 1 {\n\n\t\t\t\t\t\ttestutil.Fatalf(t, \"expected 1 init container for pod %s/%s, got %d\", ns, pods[0].GetName(), len(initContainers))\n\n\t\t\t\t\t}\n\n\t\t\t\t\tif !TestHelper.CNI() && initContainers[0].Name != k8s.InitContainerName {\n\n\t\t\t\t\t\ttestutil.Fatalf(t, \"expected %s init container in pod %s/%s, got %+v\", ns, pods[0].GetName(), k8s.InitContainerName, initContainers[0])\n\n\t\t\t\t\t}\n\n\t\t\t\t} else {\n\n\t\t\t\t\tif len(containers) != 1 {\n\n\t\t\t\t\t\ttestutil.Fatalf(t, \"expected 1 container for pod %s/%s, got %d\", ns, pods[0].GetName(), len(containers))\n\n\t\t\t\t\t}\n\n\t\t\t\t\tif containers[0].Name != containerName {\n\n\t\t\t\t\t\ttestutil.Fatalf(t, \"expected bb-terminus container in pod %s/%s, got %s\", ns, pods[0].GetName(), containers[0].Name)\n\n\t\t\t\t\t}\n\n\t\t\t\t\tif len(initContainers) != 0 {\n\n\t\t\t\t\t\ttestutil.Fatalf(t, \"expected 0 init containers for pod %s/%s, got %d\", ns, pods[0].GetName(), len(initContainers))\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t})\n\n\n\n\t}\n\n}\n\n\n\nfunc TestInjectAutoPod(t *testing.T) {\n\n\tpodsYAML, err := testutil.ReadFile(\"testdata/pods.yaml\")\n\n\tif err != nil {\n\n\t\ttestutil.AnnotatedFatalf(t, \"failed to read inject test file\",\n\n\t\t\t\"failed to read inject test file: %s\", err)\n\n\t}\n\n\n\n\tinjectNS := \"inject-pod-test\"\n\n\tpodName := \"inject-pod-test-terminus\"\n\n\topaquePodName := \"inject-opaque-pod-test-terminus\"\n\n\tnsAnnotations := map[string]string{\n\n\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\tk8s.ProxyOpaquePortsAnnotation: opaquePorts,\n\n\t}\n\n\n\n\ttruthy := true\n\n\tfalsy := false\n\n\treg := \"cr.l5d.io/linkerd\"\n\n\tif override := os.Getenv(flags.EnvOverrideDockerRegistry); override != \"\" {\n\n\t\treg = override\n\n\t}\n\n\texpectedInitContainer := v1.Container{\n\n\t\tName: k8s.InitContainerName,\n\n\t\tImage: reg + \"/proxy-init:\" + version.ProxyInitVersion,\n\n\t\tArgs: []string{\n\n\t\t\t\"--incoming-proxy-port\", \"4143\",\n\n\t\t\t\"--outgoing-proxy-port\", \"4140\",\n\n\t\t\t\"--proxy-uid\", \"2102\",\n\n\t\t\t// 1234,5678 were added at install time in `install_test.go`'s helmOverridesEdge()\n\n\t\t\t\"--inbound-ports-to-ignore\", \"4190,4191,1234,5678\",\n\n\t\t\t\"--outbound-ports-to-ignore\", \"4567,4568\",\n\n\t\t},\n\n\t\tResources: v1.ResourceRequirements{\n\n\t\t\tLimits: v1.ResourceList{\n\n\t\t\t\tv1.ResourceName(\"cpu\"): resource.MustParse(\"100m\"),\n\n\t\t\t\tv1.ResourceName(\"memory\"): resource.MustParse(\"50Mi\"),\n\n\t\t\t},\n\n\t\t\tRequests: v1.ResourceList{\n\n\t\t\t\tv1.ResourceName(\"cpu\"): resource.MustParse(\"10m\"),\n\n\t\t\t\tv1.ResourceName(\"memory\"): resource.MustParse(\"10Mi\"),\n\n\t\t\t},\n\n\t\t},\n\n\t\tVolumeMounts: []v1.VolumeMount{\n\n\t\t\t{\n\n\t\t\t\tName: \"linkerd-proxy-init-xtables-lock\",\n\n\t\t\t\tReadOnly: false,\n\n\t\t\t\tMountPath: \"/run\",\n\n\t\t\t},\n\n\t\t\t{\n\n\t\t\t\tReadOnly: true,\n\n\t\t\t\tMountPath: \"/var/run/secrets/kubernetes.io/serviceaccount\",\n\n\t\t\t},\n\n\t\t},\n\n\t\tTerminationMessagePath: \"/dev/termination-log\",\n\n\t\tImagePullPolicy: \"IfNotPresent\",\n\n\t\tSecurityContext: &v1.SecurityContext{\n\n\t\t\tCapabilities: &v1.Capabilities{\n\n\t\t\t\tAdd: []v1.Capability{v1.Capability(\"NET_ADMIN\"), v1.Capability(\"NET_RAW\")},\n\n\t\t\t},\n\n\t\t\tPrivileged: &falsy,\n\n\t\t\tRunAsNonRoot: &truthy,\n\n\t\t\tAllowPrivilegeEscalation: &falsy,\n\n\t\t\tReadOnlyRootFilesystem: &truthy,\n\n\t\t},\n\n\t\tTerminationMessagePolicy: v1.TerminationMessagePolicy(\"FallbackToLogsOnError\"),\n\n\t}\n\n\n\n\tctx := context.Background()\n\n\n\n\tTestHelper.WithDataPlaneNamespace(ctx, injectNS, nsAnnotations, t, func(t *testing.T, ns string) {\n\n\t\to, err := TestHelper.Kubectl(podsYAML, \"--namespace\", ns, \"create\", \"-f\", \"-\")\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to create pods\",\n\n\t\t\t\t\"failed to create pods in namespace %s for %s: %s\", ns, err, o)\n\n\t\t}\n\n\n\n\t\to, err = TestHelper.Kubectl(\"\", \"--namespace\", ns, \"wait\", \"--for=condition=initialized\", \"--timeout=120s\", \"pod/\"+podName)\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to wait for condition=initialized\",\n\n\t\t\t\t\"failed to wait for condition=initialized for pod/%s in namespace %s: %s: %s\", podName, ns, err, o)\n\n\t\t}\n\n\n\n\t\t// Check that pods with no annotation inherit from the namespace.\n\n\t\tpods, err := TestHelper.GetPods(ctx, ns, map[string]string{\"app\": podName})\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to get pods\", \"failed to get pods for namespace %s: %s\", ns, err)\n\n\t\t}\n\n\t\tif len(pods) != 1 {\n\n\t\t\ttestutil.Fatalf(t, \"wrong number of pods returned for namespace %s: %d\", ns, len(pods))\n\n\t\t}\n\n\t\tannotation, ok := pods[0].Annotations[k8s.ProxyOpaquePortsAnnotation]\n\n\t\tif !ok {\n\n\t\t\ttestutil.Fatalf(t, \"pod in namespace %s did not inherit opaque ports annotation\", ns)\n\n\t\t}\n\n\t\tif annotation != opaquePorts {\n\n\t\t\ttestutil.Fatalf(t, \"expected pod in namespace %s to have %s opaque ports, but it had %s\", ns, opaquePorts, annotation)\n\n\t\t}\n\n\n\n\t\t// Check that pods with an annotation do not inherit from the\n\n\t\t// namespace.\n\n\t\topaquePods, err := TestHelper.GetPods(ctx, ns, map[string]string{\"app\": opaquePodName})\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to get pods\", \"failed to get pods for namespace %s: %s\", ns, err)\n\n\t\t}\n\n\t\tif len(opaquePods) != 1 {\n\n\t\t\ttestutil.Fatalf(t, \"wrong number of pods returned for namespace %s: %d\", ns, len(opaquePods))\n\n\t\t}\n\n\t\tannotation = opaquePods[0].Annotations[k8s.ProxyOpaquePortsAnnotation]\n\n\t\tif annotation != manualOpaquePorts {\n\n\t\t\ttestutil.Fatalf(t, \"expected pod in namespace %s to have %s opaque ports, but it had %s\", ns, manualOpaquePorts, annotation)\n\n\t\t}\n\n\n\n\t\tcontainers := pods[0].Spec.Containers\n\n\t\tif proxyContainer := testutil.GetProxyContainer(containers); proxyContainer == nil {\n\n\t\t\ttestutil.Fatalf(t, \"pod in namespace %s wasn't injected with the proxy container\", ns)\n\n\t\t}\n\n\n\n\t\tif !TestHelper.CNI() {\n\n\t\t\tinitContainers := pods[0].Spec.InitContainers\n\n\t\t\tif len(initContainers) == 0 {\n\n\t\t\t\ttestutil.Fatalf(t, \"pod in namespace %s wasn't injected with the init container\", ns)\n\n\t\t\t}\n\n\t\t\tinitContainer := initContainers[0]\n\n\t\t\tif mounts := initContainer.VolumeMounts; len(mounts) == 0 {\n\n\t\t\t\ttestutil.AnnotatedFatalf(t, \"init container doesn't have volume mounts\", \"init container doesn't have volume mounts: %#v\", initContainer)\n\n\t\t\t}\n\n\t\t\t// Removed token volume name from comparison because it contains a random string\n\n\t\t\tinitContainer.VolumeMounts[1].Name = \"\"\n\n\t\t\tif diff := deep.Equal(expectedInitContainer, initContainer); diff != nil {\n\n\t\t\t\ttestutil.AnnotatedFatalf(t, \"malformed init container\", \"malformed init container:\\n%v\", diff)\n\n\t\t\t}\n\n\t\t}\n\n\t})\n\n}\n\n\n\nfunc TestInjectDisabledAutoPod(t *testing.T) {\n\n\tpodsYAML, err := testutil.ReadFile(\"testdata/pods.yaml\")\n\n\tif err != nil {\n\n\t\ttestutil.AnnotatedFatalf(t, \"failed to read inject test file\",\n\n\t\t\t\"failed to read inject test file: %s\", err)\n\n\t}\n\n\n\n\tns := \"inject-disabled-pod-test\"\n\n\tpodName := \"inject-pod-test-terminus\"\n\n\topaquePodName := \"inject-opaque-pod-test-terminus\"\n\n\tnsAnnotations := map[string]string{\n\n\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectDisabled,\n\n\t\tk8s.ProxyOpaquePortsAnnotation: opaquePorts,\n\n\t}\n\n\tctx := context.Background()\n\n\tTestHelper.WithDataPlaneNamespace(ctx, ns, nsAnnotations, t, func(t *testing.T, ns string) {\n\n\t\to, err := TestHelper.Kubectl(podsYAML, \"--namespace\", ns, \"create\", \"-f\", \"-\")\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to create pods\",\n\n\t\t\t\t\"failed to create pods in namespace %s for %s: %s\", ns, err, o)\n\n\t\t}\n\n\n\n\t\to, err = TestHelper.Kubectl(\"\", \"--namespace\", ns, \"wait\", \"--for=condition=initialized\", \"--timeout=120s\", \"pod/\"+podName)\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to wait for condition=initialized\",\n\n\t\t\t\t\"failed to wait for condition=initialized for pod/%s in namespace %s: %s: %s\", podName, ns, err, o)\n\n\t\t}\n\n\n\n\t\t// Check that pods with no annotation inherit from the namespace.\n\n\t\tpods, err := TestHelper.GetPods(ctx, ns, map[string]string{\"app\": podName})\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to get pods\", \"failed to get pods for namespace %s: %s\", ns, err)\n\n\t\t}\n\n\t\tif len(pods) != 1 {\n\n\t\t\ttestutil.Fatalf(t, \"wrong number of pods returned for namespace %s: %d\", ns, len(pods))\n\n\t\t}\n\n\t\tannotation, ok := pods[0].Annotations[k8s.ProxyOpaquePortsAnnotation]\n\n\t\tif !ok {\n\n\t\t\ttestutil.Fatalf(t, \"pod in namespace %s did not inherit opaque ports annotation\", ns)\n\n\t\t}\n\n\t\tif annotation != opaquePorts {\n\n\t\t\ttestutil.Fatalf(t, \"expected pod in namespace %s to have %s opaque ports, but it had %s\", ns, opaquePorts, annotation)\n\n\t\t}\n\n\n\n\t\t// Check that pods with an annotation do not inherit from the\n\n\t\t// namespace.\n\n\t\topaquePods, err := TestHelper.GetPods(ctx, ns, map[string]string{\"app\": opaquePodName})\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to get pods\", \"failed to get pods for namespace %s: %s\", ns, err)\n\n\t\t}\n\n\t\tif len(opaquePods) != 1 {\n\n\t\t\ttestutil.Fatalf(t, \"wrong number of pods returned for namespace %s: %d\", ns, len(opaquePods))\n\n\t\t}\n\n\t\tannotation = opaquePods[0].Annotations[k8s.ProxyOpaquePortsAnnotation]\n\n\t\tif annotation != manualOpaquePorts {\n\n\t\t\ttestutil.Fatalf(t, \"expected pod in namespace %s to have %s opaque ports, but it had %s\", ns, manualOpaquePorts, annotation)\n\n\t\t}\n\n\n\n\t\tcontainers := pods[0].Spec.Containers\n\n\t\tif proxyContainer := testutil.GetProxyContainer(containers); proxyContainer != nil {\n\n\t\t\ttestutil.Fatalf(t, \"pod in namespace %s should not have been injected\", ns)\n\n\t\t}\n\n\t})\n\n}\n\n\n\nfunc TestInjectService(t *testing.T) {\n\n\tservicesYAML, err := testutil.ReadFile(\"testdata/services.yaml\")\n\n\tif err != nil {\n\n\t\ttestutil.AnnotatedFatalf(t, \"failed to read inject test file\",\n\n\t\t\t\"failed to read inject test file: %s\", err)\n\n\t}\n\n\n\n\tns := \"inject-service-test\"\n\n\tserviceName := \"service-test\"\n\n\topaqueServiceName := \"opaque-service-test\"\n\n\tnsAnnotations := map[string]string{\n\n\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\tk8s.ProxyOpaquePortsAnnotation: opaquePorts,\n\n\t}\n\n\tctx := context.Background()\n\n\tTestHelper.WithDataPlaneNamespace(ctx, ns, nsAnnotations, t, func(t *testing.T, ns string) {\n\n\t\to, err := TestHelper.Kubectl(servicesYAML, \"--namespace\", ns, \"create\", \"-f\", \"-\")\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to create services\",\n\n\t\t\t\t\"failed to create services in namespace %s for %s: %s\", ns, err, o)\n\n\t\t}\n\n\n\n\t\t// Check that the service with no annotation inherits from the namespace.\n\n\t\tservice, err := TestHelper.GetService(ctx, ns, serviceName)\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to get service\", \"failed to get service for namespace %s: %s\", ns, err)\n\n\t\t}\n\n\t\tannotation, ok := service.Annotations[k8s.ProxyOpaquePortsAnnotation]\n\n\t\tif !ok {\n\n\t\t\ttestutil.Fatalf(t, \"pod in namespace %s did not inherit opaque ports annotation\", ns)\n\n\t\t}\n\n\t\tif annotation != opaquePorts {\n\n\t\t\ttestutil.Fatalf(t, \"expected pod in namespace %s to have %s opaque ports, but it had %s\", ns, opaquePorts, annotation)\n\n\t\t}\n\n\n\n\t\t// Check that the service with no annotation did not inherit from the namespace.\n\n\t\tservice, err = TestHelper.GetService(ctx, ns, opaqueServiceName)\n\n\t\tif err != nil {\n\n\t\t\ttestutil.AnnotatedFatalf(t, \"failed to get service\", \"failed to get service for namespace %s: %s\", ns, err)\n\n\t\t}\n\n\t\tannotation = service.Annotations[k8s.ProxyOpaquePortsAnnotation]\n\n\t\tif annotation != manualOpaquePorts {\n\n\t\t\ttestutil.Fatalf(t, \"expected service in namespace %s to have %s opaque ports, but it had %s\", ns, manualOpaquePorts, annotation)\n\n\t\t}\n\n\t})\n\n}\n", "file_path": "test/integration/install/inject/inject_test.go", "rank": 82, "score": 115748.10780073502 }, { "content": "package client\n\n\n\nimport (\n\n\t\"context\"\n\n\n\n\t\"github.com/linkerd/linkerd2/pkg/k8s\"\n\n\tpb \"github.com/linkerd/linkerd2/viz/metrics-api/gen/viz\"\n\n\t\"go.opencensus.io/plugin/ocgrpc\"\n\n\t\"google.golang.org/grpc\"\n\n\t\"google.golang.org/grpc/credentials/insecure\"\n\n)\n\n\n\nconst (\n\n\tapiPort = 8085\n\n\tapiDeployment = \"metrics-api\"\n\n)\n\n\n\n// NewInternalClient creates a new Viz API client intended to run inside a\n\n// Kubernetes cluster.\n\nfunc NewInternalClient(addr string) (pb.ApiClient, error) {\n\n\tconn, err := grpc.Dial(addr, grpc.WithTransportCredentials(insecure.NewCredentials()), grpc.WithStatsHandler(&ocgrpc.ClientHandler{}))\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\n\n\treturn pb.NewApiClient(conn), nil\n\n}\n\n\n\n// NewExternalClient creates a new Viz API client intended to run from\n\n// outside a Kubernetes cluster.\n\nfunc NewExternalClient(ctx context.Context, namespace string, kubeAPI *k8s.KubernetesAPI) (pb.ApiClient, error) {\n\n\tportforward, err := k8s.NewPortForward(\n\n\t\tctx,\n\n\t\tkubeAPI,\n\n\t\tnamespace,\n\n\t\tapiDeployment,\n\n\t\t\"localhost\",\n\n\t\t0,\n\n\t\tapiPort,\n\n\t\tfalse,\n\n\t)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\n\n\taddr := portforward.AddressAndPort()\n\n\tif err = portforward.Init(); err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\n\n\tconn, err := grpc.Dial(addr, grpc.WithTransportCredentials(insecure.NewCredentials()), grpc.WithStatsHandler(&ocgrpc.ClientHandler{}))\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\n\n\treturn pb.NewApiClient(conn), nil\n\n}\n", "file_path": "viz/metrics-api/client/client.go", "rank": 83, "score": 115727.77600042056 }, { "content": "func (c *PolicyV1alpha1Client) NetworkAuthentications(namespace string) NetworkAuthenticationInterface {\n\n\treturn newNetworkAuthentications(c, namespace)\n", "file_path": "controller/gen/client/clientset/versioned/typed/policy/v1alpha1/policy_client.go", "rank": 84, "score": 114750.04219505236 }, { "content": "\tUninjected struct {\n\n\t\t// Proxy is true if a proxy container has been uninjected\n\n\t\tProxy bool\n\n\n\n\t\t// ProxyInit is true if a proxy-init container has been uninjected\n\n\t\tProxyInit bool\n", "file_path": "pkg/inject/report.go", "rank": 85, "score": 113632.2554224494 }, { "content": "package srv\n\n\n\nimport (\n\n\t\"bytes\"\n\n\t\"fmt\"\n\n\t\"net/http\"\n\n\t\"regexp\"\n\n\n\n\t\"github.com/julienschmidt/httprouter\"\n\n\t\"github.com/linkerd/linkerd2/pkg/k8s\"\n\n\tprofiles \"github.com/linkerd/linkerd2/pkg/profiles\"\n\n\tvizPb \"github.com/linkerd/linkerd2/viz/metrics-api/gen/viz\"\n\n\t\"github.com/patrickmn/go-cache\"\n\n\tlog \"github.com/sirupsen/logrus\"\n\n)\n\n\n\nvar proxyPathRegexp = regexp.MustCompile(\"/api/v1/namespaces/.*/proxy/\")\n\n\n\ntype (\n\n\trenderTemplate func(http.ResponseWriter, string, string, interface{}) error\n\n\n\n\thandler struct {\n\n\t\trender renderTemplate\n\n\t\tapiClient vizPb.ApiClient\n\n\t\tk8sAPI *k8s.KubernetesAPI\n\n\t\tuuid string\n\n\t\tversion string\n\n\t\tcontrollerNamespace string\n\n\t\tclusterDomain string\n\n\t\tgrafana string\n\n\t\tgrafanaExternalURL string\n\n\t\tgrafanaPrefix string\n\n\t\tjaeger string\n\n\t\tgrafanaProxy *reverseProxy\n\n\t\tjaegerProxy *reverseProxy\n\n\t\thc healthChecker\n\n\t\tstatCache *cache.Cache\n\n\t}\n\n)\n\n\n\nfunc (h *handler) handleIndex(w http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\n\t// when running the dashboard via `linkerd dashboard`, serve the index bundle at the right path\n\n\tpathPfx := proxyPathRegexp.FindString(req.URL.Path)\n\n\tif pathPfx == \"\" {\n\n\t\tpathPfx = \"/\"\n\n\t}\n\n\n\n\tparams := appParams{\n\n\t\tUUID: h.uuid,\n\n\t\tReleaseVersion: h.version,\n\n\t\tControllerNamespace: h.controllerNamespace,\n\n\t\tPathPrefix: pathPfx,\n\n\t\tGrafana: h.grafana,\n\n\t\tGrafanaExternalURL: h.grafanaExternalURL,\n\n\t\tGrafanaPrefix: h.grafanaPrefix,\n\n\t\tJaeger: h.jaeger,\n\n\t}\n\n\n\n\terr := h.render(w, \"app.tmpl.html\", \"base\", params)\n\n\tif err != nil {\n\n\t\tlog.Error(err)\n\n\t}\n\n}\n\n\n\nfunc (h *handler) handleProfileDownload(w http.ResponseWriter, req *http.Request, params httprouter.Params) {\n\n\tservice := req.FormValue(\"service\")\n\n\tnamespace := req.FormValue(\"namespace\")\n\n\n\n\tif service == \"\" || namespace == \"\" {\n\n\t\terr := fmt.Errorf(\"Service and namespace must be provided to create a new profile\")\n\n\t\tlog.Error(err)\n\n\t\thttp.Error(w, err.Error(), http.StatusBadRequest)\n\n\t\treturn\n\n\t}\n\n\n\n\tprofileYaml := &bytes.Buffer{}\n\n\terr := profiles.RenderProfileTemplate(namespace, service, h.clusterDomain, profileYaml)\n\n\n\n\tif err != nil {\n\n\t\tlog.Error(err)\n\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\n\t\treturn\n\n\t}\n\n\n\n\tdispositionHeaderVal := fmt.Sprintf(\"attachment; filename=%s-profile.yml\", service)\n\n\n\n\tw.Header().Set(\"Content-Type\", \"text/yaml\")\n\n\tw.Header().Set(\"Content-Disposition\", dispositionHeaderVal)\n\n\n\n\tw.Write(profileYaml.Bytes())\n\n}\n\n\n\nfunc (h *handler) handleGrafana(w http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\n\th.grafanaProxy.ServeHTTP(w, req)\n\n}\n\n\n\nfunc (h *handler) handleJaeger(w http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\n\th.jaegerProxy.ServeHTTP(w, req)\n\n}\n", "file_path": "web/srv/handlers.go", "rank": 86, "score": 112191.93887291638 }, { "content": "package srv\n\n\n\nimport (\n\n\t\"fmt\"\n\n\t\"html\"\n\n\t\"html/template\"\n\n\t\"net/http\"\n\n\t\"path\"\n\n\t\"path/filepath\"\n\n\t\"regexp\"\n\n\t\"time\"\n\n\n\n\t\"github.com/julienschmidt/httprouter\"\n\n\t\"github.com/linkerd/linkerd2/pkg/filesonly\"\n\n\t\"github.com/linkerd/linkerd2/pkg/healthcheck\"\n\n\t\"github.com/linkerd/linkerd2/pkg/k8s\"\n\n\t\"github.com/linkerd/linkerd2/pkg/prometheus\"\n\n\tvizPb \"github.com/linkerd/linkerd2/viz/metrics-api/gen/viz\"\n\n\t\"github.com/patrickmn/go-cache\"\n\n\tlog \"github.com/sirupsen/logrus\"\n\n)\n\n\n\nconst (\n\n\ttimeout = 10 * time.Second\n\n\n\n\t// statExpiration indicates when items in the stat cache expire.\n\n\tstatExpiration = 1500 * time.Millisecond\n\n\n\n\t// statCleanupInterval indicates how often expired items in the stat cache\n\n\t// are cleaned up.\n\n\tstatCleanupInterval = 5 * time.Minute\n\n)\n\n\n\ntype (\n\n\t// Server encapsulates the Linkerd control plane's web dashboard server.\n\n\tServer struct {\n\n\t\ttemplateDir string\n\n\t\treload bool\n\n\t\ttemplates map[string]*template.Template\n\n\t\trouter *httprouter.Router\n\n\t\treHost *regexp.Regexp\n\n\t}\n\n\n\n\ttemplatePayload struct {\n\n\t\tContents interface{}\n\n\t}\n\n\tappParams struct {\n\n\t\tUUID string\n\n\t\tReleaseVersion string\n\n\t\tControllerNamespace string\n\n\t\tError bool\n\n\t\tErrorMessage string\n\n\t\tPathPrefix string\n\n\t\tJaeger string\n\n\t\tGrafana string\n\n\t\tGrafanaExternalURL string\n\n\t\tGrafanaPrefix string\n\n\t}\n\n\n\n\thealthChecker interface {\n\n\t\tRunChecks(observer healthcheck.CheckObserver) (bool, bool)\n\n\t}\n\n)\n\n\n\n// this is called by the HTTP server to actually respond to a request\n\nfunc (s *Server) ServeHTTP(w http.ResponseWriter, req *http.Request) {\n\n\tif !s.reHost.MatchString(req.Host) {\n\n\t\terr := fmt.Sprintf(`It appears that you are trying to reach this service with a host of '%s'.\n\nThis does not match /%s/ and has been denied for security reasons.\n\nPlease see https://linkerd.io/dns-rebinding for an explanation of what is happening and how to fix it.`,\n\n\t\t\thtml.EscapeString(req.Host),\n\n\t\t\thtml.EscapeString(s.reHost.String()))\n\n\t\thttp.Error(w, err, http.StatusBadRequest)\n\n\t\treturn\n\n\t}\n\n\tw.Header().Set(\"X-Content-Type-Options\", \"nosniff\")\n\n\tw.Header().Set(\"X-Frame-Options\", \"SAMEORIGIN\")\n\n\tw.Header().Set(\"X-XSS-Protection\", \"1; mode=block\")\n\n\ts.router.ServeHTTP(w, req)\n\n}\n\n\n\n// NewServer returns an initialized `http.Server`, configured to listen on an\n\n// address, render templates, and serve static assets, for a given Linkerd\n\n// control plane.\n\nfunc NewServer(\n\n\taddr string,\n\n\tgrafanaAddr string,\n\n\tgrafanaExternalAddr string,\n\n\tgrafanaPrefix string,\n\n\tjaegerAddr string,\n\n\ttemplateDir string,\n\n\tstaticDir string,\n\n\tuuid string,\n\n\tversion string,\n\n\tcontrollerNamespace string,\n\n\tclusterDomain string,\n\n\treload bool,\n\n\treHost *regexp.Regexp,\n\n\tapiClient vizPb.ApiClient,\n\n\tk8sAPI *k8s.KubernetesAPI,\n\n\thc healthChecker,\n\n) *http.Server {\n\n\tserver := &Server{\n\n\t\ttemplateDir: templateDir,\n\n\t\treload: reload,\n\n\t\treHost: reHost,\n\n\t}\n\n\n\n\tserver.router = &httprouter.Router{\n\n\t\tRedirectTrailingSlash: true,\n\n\t\tRedirectFixedPath: true,\n\n\t\tHandleMethodNotAllowed: false, // disable 405s\n\n\t}\n\n\n\n\twrappedServer := prometheus.WithTelemetry(server)\n\n\thandler := &handler{\n\n\t\tapiClient: apiClient,\n\n\t\tk8sAPI: k8sAPI,\n\n\t\trender: server.RenderTemplate,\n\n\t\tuuid: uuid,\n\n\t\tversion: version,\n\n\t\tcontrollerNamespace: controllerNamespace,\n\n\t\tclusterDomain: clusterDomain,\n\n\t\tjaegerProxy: newReverseProxy(jaegerAddr, \"\"),\n\n\t\tgrafana: grafanaAddr,\n\n\t\tgrafanaExternalURL: grafanaExternalAddr,\n\n\t\tgrafanaPrefix: grafanaPrefix,\n\n\t\tjaeger: jaegerAddr,\n\n\t\thc: hc,\n\n\t\tstatCache: cache.New(statExpiration, statCleanupInterval),\n\n\t}\n\n\n\n\t// Only create the grafana reverse proxy if we aren't using external grafana\n\n\tif grafanaExternalAddr == \"\" {\n\n\t\thandler.grafanaProxy = newReverseProxy(grafanaAddr, \"/grafana\")\n\n\t}\n\n\n\n\thttpServer := &http.Server{\n\n\t\tAddr: addr,\n\n\t\tReadTimeout: timeout,\n\n\t\tWriteTimeout: timeout,\n\n\t\tHandler: wrappedServer,\n\n\t}\n\n\n\n\t// webapp routes\n\n\tserver.router.GET(\"/\", handler.handleIndex)\n\n\tserver.router.GET(\"/controlplane\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces\", handler.handleIndex)\n\n\tserver.router.GET(\"/gateways\", handler.handleIndex)\n\n\n\n\t// paths for a list of resources by namespace\n\n\tserver.router.GET(\"/namespaces/:namespace/daemonsets\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/statefulsets\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/trafficsplits\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/jobs\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/deployments\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/services\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/replicationcontrollers\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/pods\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/cronjobs\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/replicasets\", handler.handleIndex)\n\n\n\n\t// legacy paths that are deprecated but should not 404\n\n\tserver.router.GET(\"/overview\", handler.handleIndex)\n\n\tserver.router.GET(\"/daemonsets\", handler.handleIndex)\n\n\tserver.router.GET(\"/statefulsets\", handler.handleIndex)\n\n\tserver.router.GET(\"/trafficsplits\", handler.handleIndex)\n\n\tserver.router.GET(\"/jobs\", handler.handleIndex)\n\n\tserver.router.GET(\"/deployments\", handler.handleIndex)\n\n\tserver.router.GET(\"/services\", handler.handleIndex)\n\n\tserver.router.GET(\"/replicationcontrollers\", handler.handleIndex)\n\n\tserver.router.GET(\"/pods\", handler.handleIndex)\n\n\n\n\t// paths for individual resource view\n\n\tserver.router.GET(\"/namespaces/:namespace\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/pods/:pod\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/daemonsets/:daemonset\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/statefulsets/:statefulset\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/trafficsplits/:trafficsplit\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/deployments/:deployment\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/services/:deployment\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/jobs/:job\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/replicationcontrollers/:replicationcontroller\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/cronjobs/:cronjob\", handler.handleIndex)\n\n\tserver.router.GET(\"/namespaces/:namespace/replicasets/:replicaset\", handler.handleIndex)\n\n\n\n\t// tools and community paths\n\n\tserver.router.GET(\"/tap\", handler.handleIndex)\n\n\tserver.router.GET(\"/top\", handler.handleIndex)\n\n\tserver.router.GET(\"/community\", handler.handleIndex)\n\n\tserver.router.GET(\"/routes\", handler.handleIndex)\n\n\tserver.router.GET(\"/extensions\", handler.handleIndex)\n\n\tserver.router.GET(\"/profiles/new\", handler.handleProfileDownload)\n\n\n\n\t// add catch-all parameter to match all files in dir\n\n\tserver.router.GET(\"/dist/*filepath\", mkStaticHandler(staticDir))\n\n\n\n\t// webapp api routes\n\n\tserver.router.GET(\"/api/version\", handler.handleAPIVersion)\n\n\t// Traffic Performance Summary. This route used to be called /api/stat\n\n\t// but was renamed to avoid triggering ad blockers.\n\n\t// See: https://github.com/linkerd/linkerd2/issues/970\n\n\tserver.router.GET(\"/api/tps-reports\", handler.handleAPIStat)\n\n\tserver.router.GET(\"/api/pods\", handler.handleAPIPods)\n\n\tserver.router.GET(\"/api/services\", handler.handleAPIServices)\n\n\tserver.router.GET(\"/api/tap\", handler.handleAPITap)\n\n\tserver.router.GET(\"/api/routes\", handler.handleAPITopRoutes)\n\n\tserver.router.GET(\"/api/edges\", handler.handleAPIEdges)\n\n\tserver.router.GET(\"/api/check\", handler.handleAPICheck)\n\n\tserver.router.GET(\"/api/resource-definition\", handler.handleAPIResourceDefinition)\n\n\tserver.router.GET(\"/api/gateways\", handler.handleAPIGateways)\n\n\tserver.router.GET(\"/api/extensions\", handler.handleGetExtensions)\n\n\n\n\t// grafana proxy, only used if external grafana is not in use\n\n\tif grafanaExternalAddr == \"\" {\n\n\t\tserver.handleAllOperationsForPath(\"/grafana/*grafanapath\", handler.handleGrafana)\n\n\t}\n\n\n\n\t// jaeger proxy\n\n\tserver.handleAllOperationsForPath(\"/jaeger/*jaegerpath\", handler.handleJaeger)\n\n\n\n\treturn httpServer\n\n}\n\n\n\n// RenderTemplate writes a rendered template into a buffer, given an HTTP\n\n// request and template information.\n\nfunc (s *Server) RenderTemplate(w http.ResponseWriter, templateFile, templateName string, args interface{}) error {\n\n\tlog.Debugf(\"emitting template %s\", templateFile)\n\n\ttemplate, err := s.loadTemplate(templateFile)\n\n\n\n\tif err != nil {\n\n\t\tlog.Error(err.Error())\n\n\t\thttp.Error(w, \"internal server error\", http.StatusInternalServerError)\n\n\t\treturn nil\n\n\t}\n\n\n\n\tw.Header().Set(\"Content-Type\", \"text/html\")\n\n\tif templateName == \"\" {\n\n\t\treturn template.Execute(w, args)\n\n\t}\n\n\n\n\treturn template.ExecuteTemplate(w, templateName, templatePayload{Contents: args})\n\n}\n\n\n\nfunc (s *Server) loadTemplate(templateFile string) (template *template.Template, err error) {\n\n\t// load template from disk if necessary\n\n\ttemplate = s.templates[templateFile]\n\n\n\n\tif template == nil || s.reload {\n\n\t\ttemplatePath := safelyJoinPath(s.templateDir, templateFile)\n\n\t\tincludes, err := filepath.Glob(filepath.Join(s.templateDir, \"includes\", \"*.tmpl.html\"))\n\n\t\tif err != nil {\n\n\t\t\treturn nil, err\n\n\t\t}\n\n\t\t// for cases where you're not calling a named template, the passed-in path needs to be first\n\n\t\ttemplateFiles := append([]string{templatePath}, includes...)\n\n\t\tlog.Debugf(\"loading templates from %v\", templateFiles)\n\n\t\ttemplate, err = template.ParseFiles(templateFiles...)\n\n\t\tif err == nil && !s.reload {\n\n\t\t\ts.templates[templateFile] = template\n\n\t\t}\n\n\t}\n\n\treturn template, err\n\n}\n\n\n\nfunc (s *Server) handleAllOperationsForPath(path string, handle httprouter.Handle) {\n\n\ts.router.DELETE(path, handle)\n\n\ts.router.GET(path, handle)\n\n\ts.router.HEAD(path, handle)\n\n\ts.router.OPTIONS(path, handle)\n\n\ts.router.PATCH(path, handle)\n\n\ts.router.POST(path, handle)\n\n\ts.router.PUT(path, handle)\n\n}\n\n\n\nfunc safelyJoinPath(rootPath, userPath string) string {\n\n\treturn filepath.Join(rootPath, path.Clean(\"/\"+userPath))\n\n}\n\n\n\nfunc mkStaticHandler(staticDir string) httprouter.Handle {\n\n\tfileServer := http.FileServer(filesonly.FileSystem(staticDir))\n\n\n\n\treturn func(w http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\n\t\tfilepath := p.ByName(\"filepath\")\n\n\t\tif filepath == \"/index_bundle.js\" {\n\n\t\t\t// don't cache the bundle because it references a hashed js file\n\n\t\t\tw.Header().Set(\"Cache-Control\", \"no-store, must-revalidate\")\n\n\t\t}\n\n\n\n\t\treq.URL.Path = filepath\n\n\t\tfileServer.ServeHTTP(w, req)\n\n\t}\n\n}\n", "file_path": "web/srv/server.go", "rank": 87, "score": 112191.93887291638 }, { "content": "func (s *networkAuthenticationLister) NetworkAuthentications(namespace string) NetworkAuthenticationNamespaceLister {\n\n\treturn networkAuthenticationNamespaceLister{indexer: s.indexer, namespace: namespace}\n", "file_path": "controller/gen/client/listers/policy/v1alpha1/networkauthentication.go", "rank": 88, "score": 112163.76388010493 }, { "content": "package inject\n\n\n\nimport (\n\n\t\"errors\"\n\n\t\"fmt\"\n\n\t\"strings\"\n\n\n\n\t\"github.com/linkerd/linkerd2/pkg/healthcheck\"\n\n\t\"github.com/linkerd/linkerd2/pkg/k8s\"\n\n\tv1 \"k8s.io/api/core/v1\"\n\n)\n\n\n\nconst (\n\n\thostNetworkEnabled = \"host_network_enabled\"\n\n\tsidecarExists = \"sidecar_already_exists\"\n\n\tunsupportedResource = \"unsupported_resource\"\n\n\tinjectEnableAnnotationAbsent = \"injection_enable_annotation_absent\"\n\n\tinjectDisableAnnotationPresent = \"injection_disable_annotation_present\"\n\n\tannotationAtNamespace = \"namespace\"\n\n\tannotationAtWorkload = \"workload\"\n\n\tinvalidInjectAnnotationWorkload = \"invalid_inject_annotation_at_workload\"\n\n\tinvalidInjectAnnotationNamespace = \"invalid_inject_annotation_at_ns\"\n\n\tdisabledAutomountServiceAccountToken = \"disabled_automount_service_account_token_account\"\n\n\tudpPortsEnabled = \"udp_ports_enabled\"\n\n)\n\n\n\nvar (\n\n\t// Reasons is a map of inject skip reasons with human readable sentences\n\n\tReasons = map[string]string{\n\n\t\thostNetworkEnabled: \"hostNetwork is enabled\",\n\n\t\tsidecarExists: \"pod has a sidecar injected already\",\n\n\t\tunsupportedResource: \"this resource kind is unsupported\",\n\n\t\tinjectEnableAnnotationAbsent: fmt.Sprintf(\"neither the namespace nor the pod have the annotation \\\"%s:%s\\\"\", k8s.ProxyInjectAnnotation, k8s.ProxyInjectEnabled),\n\n\t\tinjectDisableAnnotationPresent: fmt.Sprintf(\"pod has the annotation \\\"%s:%s\\\"\", k8s.ProxyInjectAnnotation, k8s.ProxyInjectDisabled),\n\n\t\tinvalidInjectAnnotationWorkload: fmt.Sprintf(\"invalid value for annotation \\\"%s\\\" at workload\", k8s.ProxyInjectAnnotation),\n\n\t\tinvalidInjectAnnotationNamespace: fmt.Sprintf(\"invalid value for annotation \\\"%s\\\" at namespace\", k8s.ProxyInjectAnnotation),\n\n\t\tdisabledAutomountServiceAccountToken: \"automountServiceAccountToken set to \\\"false\\\", with Values.identity.serviceAccountTokenProjection set to \\\"false\\\"\",\n\n\t\tudpPortsEnabled: \"UDP port(s) configured on pod spec\",\n\n\t}\n\n)\n\n\n\n// Report contains the Kind and Name for a given workload along with booleans\n\n// describing the result of the injection transformation\n\ntype Report struct {\n\n\tKind string\n\n\tName string\n\n\tHostNetwork bool\n\n\tSidecar bool\n\n\tUDP bool // true if any port in any container has `protocol: UDP`\n\n\tUnsupportedResource bool\n\n\tInjectDisabled bool\n\n\tInjectDisabledReason string\n\n\tInjectAnnotationAt string\n\n\tAnnotatable bool\n\n\tAnnotated bool\n\n\tAutomountServiceAccountToken bool\n\n\n\n\t// Uninjected consists of two boolean flags to indicate if a proxy and\n\n\t// proxy-init containers have been uninjected in this report\n\n\tUninjected struct {\n\n\t\t// Proxy is true if a proxy container has been uninjected\n\n\t\tProxy bool\n\n\n\n\t\t// ProxyInit is true if a proxy-init container has been uninjected\n\n\t\tProxyInit bool\n\n\t}\n\n}\n\n\n\n// newReport returns a new Report struct, initialized with the Kind and Name\n\n// from conf\n\nfunc newReport(conf *ResourceConfig) *Report {\n\n\tvar name string\n\n\tif conf.IsPod() {\n\n\t\tname = conf.pod.meta.Name\n\n\t\tif name == \"\" {\n\n\t\t\tname = conf.pod.meta.GenerateName\n\n\t\t}\n\n\t} else if m := conf.workload.Meta; m != nil {\n\n\t\tname = m.Name\n\n\t}\n\n\n\n\treport := &Report{\n\n\t\tKind: strings.ToLower(conf.workload.metaType.Kind),\n\n\t\tName: name,\n\n\t\tAutomountServiceAccountToken: true,\n\n\t}\n\n\n\n\tif conf.HasPodTemplate() {\n\n\t\treport.InjectDisabled, report.InjectDisabledReason, report.InjectAnnotationAt = report.disabledByAnnotation(conf)\n\n\t\treport.HostNetwork = conf.pod.spec.HostNetwork\n\n\t\treport.Sidecar = healthcheck.HasExistingSidecars(conf.pod.spec)\n\n\t\treport.UDP = checkUDPPorts(conf.pod.spec)\n\n\t\tif conf.pod.spec.AutomountServiceAccountToken != nil &&\n\n\t\t\t(conf.values != nil && !conf.values.Identity.ServiceAccountTokenProjection) {\n\n\t\t\treport.AutomountServiceAccountToken = *conf.pod.spec.AutomountServiceAccountToken\n\n\t\t}\n\n\t\tif conf.origin == OriginWebhook {\n\n\t\t\tif vm := conf.serviceAccountVolumeMount(); vm == nil {\n\n\t\t\t\t// set to false only if it is not using the new linkerd-token volume projection\n\n\t\t\t\tif conf.values != nil && !conf.values.Identity.ServiceAccountTokenProjection {\n\n\t\t\t\t\treport.AutomountServiceAccountToken = false\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t} else {\n\n\t\treport.UnsupportedResource = true\n\n\t}\n\n\n\n\tif conf.HasPodTemplate() || conf.IsService() || conf.IsNamespace() {\n\n\t\treport.Annotatable = true\n\n\t}\n\n\n\n\treturn report\n\n}\n\n\n\n// ResName returns a string \"Kind/Name\" for the workload referred in the report r\n\nfunc (r *Report) ResName() string {\n\n\treturn fmt.Sprintf(\"%s/%s\", r.Kind, r.Name)\n\n}\n\n\n\n// Injectable returns false if the report flags indicate that the workload is on a host network\n\n// or there is already a sidecar or the resource is not supported or inject is explicitly disabled.\n\n// If false, the second returned value describes the reason.\n\nfunc (r *Report) Injectable() (bool, []string) {\n\n\tvar reasons []string\n\n\tif r.HostNetwork {\n\n\t\treasons = append(reasons, hostNetworkEnabled)\n\n\t}\n\n\tif r.Sidecar {\n\n\t\treasons = append(reasons, sidecarExists)\n\n\t}\n\n\tif r.UnsupportedResource {\n\n\t\treasons = append(reasons, unsupportedResource)\n\n\t}\n\n\tif r.InjectDisabled {\n\n\t\treasons = append(reasons, r.InjectDisabledReason)\n\n\t}\n\n\n\n\tif !r.AutomountServiceAccountToken {\n\n\t\treasons = append(reasons, disabledAutomountServiceAccountToken)\n\n\t}\n\n\n\n\tif len(reasons) > 0 {\n\n\t\treturn false, reasons\n\n\t}\n\n\treturn true, nil\n\n}\n\n\n\n// IsAnnotatable returns true if the resource for a report can be annotated.\n\nfunc (r *Report) IsAnnotatable() bool {\n\n\treturn r.Annotatable\n\n}\n\n\n\nfunc checkUDPPorts(t *v1.PodSpec) bool {\n\n\t// Check for ports with `protocol: UDP`, which will not be routed by Linkerd\n\n\tfor _, container := range t.Containers {\n\n\t\tfor _, port := range container.Ports {\n\n\t\t\tif port.Protocol == v1.ProtocolUDP {\n\n\t\t\t\treturn true\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\treturn false\n\n}\n\n\n\n// disabledByAnnotation checks the workload and namespace for the annotation\n\n// that disables injection. It returns if it is disabled, why it is disabled,\n\n// and the location where the annotation was present.\n\nfunc (r *Report) disabledByAnnotation(conf *ResourceConfig) (bool, string, string) {\n\n\t// truth table of the effects of the inject annotation:\n\n\t//\n\n\t// origin | namespace | pod | inject? | return\n\n\t// ------- | --------- | -------- | -------- | ------\n\n\t// webhook | enabled | enabled | yes | false\n\n\t// webhook | enabled | \"\" | yes | false\n\n\t// webhook | enabled | disabled | no | true\n\n\t// webhook | disabled | enabled | yes | false\n\n\t// webhook | \"\" | enabled | yes | false\n\n\t// webhook | disabled | disabled | no | true\n\n\t// webhook | \"\" | disabled | no | true\n\n\t// webhook | disabled | \"\" | no | true\n\n\t// webhook | \"\" | \"\" | no | true\n\n\t// cli | n/a | enabled | yes | false\n\n\t// cli | n/a | \"\" | yes | false\n\n\t// cli | n/a | disabled | no | true\n\n\n\n\tpodAnnotation := conf.pod.meta.Annotations[k8s.ProxyInjectAnnotation]\n\n\tnsAnnotation := conf.nsAnnotations[k8s.ProxyInjectAnnotation]\n\n\n\n\tif conf.origin == OriginCLI {\n\n\t\treturn podAnnotation == k8s.ProxyInjectDisabled, \"\", \"\"\n\n\t}\n\n\n\n\tif !isInjectAnnotationValid(nsAnnotation) {\n\n\t\treturn true, invalidInjectAnnotationNamespace, \"\"\n\n\t}\n\n\n\n\tif !isInjectAnnotationValid(podAnnotation) {\n\n\t\treturn true, invalidInjectAnnotationWorkload, \"\"\n\n\t}\n\n\n\n\tif nsAnnotation == k8s.ProxyInjectEnabled || nsAnnotation == k8s.ProxyInjectIngress {\n\n\t\tif podAnnotation == k8s.ProxyInjectDisabled {\n\n\t\t\treturn true, injectDisableAnnotationPresent, annotationAtWorkload\n\n\t\t}\n\n\t\treturn false, \"\", annotationAtNamespace\n\n\t}\n\n\n\n\tif podAnnotation != k8s.ProxyInjectEnabled && podAnnotation != k8s.ProxyInjectIngress {\n\n\t\treturn true, injectEnableAnnotationAbsent, \"\"\n\n\t}\n\n\n\n\treturn false, \"\", annotationAtWorkload\n\n}\n\n\n\nfunc isInjectAnnotationValid(annotation string) bool {\n\n\tif annotation != \"\" && !(annotation == k8s.ProxyInjectEnabled || annotation == k8s.ProxyInjectDisabled || annotation == k8s.ProxyInjectIngress) {\n\n\t\treturn false\n\n\t}\n\n\treturn true\n\n}\n\n\n\n// ThrowInjectError errors out `inject` when the report contains errors\n\n// related to automountServiceAccountToken, hostNetwork, existing sidecar,\n\n// or udp ports\n\n// See - https://github.com/linkerd/linkerd2/issues/4214\n\nfunc (r *Report) ThrowInjectError() []error {\n\n\n\n\terrs := []error{}\n\n\n\n\tif !r.AutomountServiceAccountToken {\n\n\t\terrs = append(errs, errors.New(Reasons[disabledAutomountServiceAccountToken]))\n\n\t}\n\n\n\n\tif r.HostNetwork {\n\n\t\terrs = append(errs, errors.New(Reasons[hostNetworkEnabled]))\n\n\t}\n\n\n\n\tif r.Sidecar {\n\n\t\terrs = append(errs, errors.New(Reasons[sidecarExists]))\n\n\t}\n\n\n\n\tif r.UDP {\n\n\t\terrs = append(errs, errors.New(Reasons[udpPortsEnabled]))\n\n\t}\n\n\n\n\treturn errs\n\n}\n", "file_path": "pkg/inject/report.go", "rank": 89, "score": 112037.89457604362 }, { "content": "\tHostNetwork bool\n", "file_path": "pkg/inject/report.go", "rank": 90, "score": 111399.63186540913 }, { "content": "package srv\n\n\n\n// FakeServer provides a mock of a Server in `/web/srv`.\n\nfunc FakeServer() Server {\n\n\treturn Server{\n\n\t\ttemplateDir: \"../templates\",\n\n\t\treload: true,\n\n\t}\n\n}\n", "file_path": "web/srv/test_helpers.go", "rank": 91, "score": 111371.64452616076 }, { "content": "package srv\n\n\n\nimport (\n\n\t\"bytes\"\n\n\t\"encoding/json\"\n\n\t\"errors\"\n\n\t\"fmt\"\n\n\t\"io\"\n\n\t\"net/http\"\n\n\t\"regexp\"\n\n\t\"strings\"\n\n\t\"time\"\n\n\n\n\t\"github.com/gorilla/websocket\"\n\n\t\"github.com/julienschmidt/httprouter\"\n\n\t\"github.com/linkerd/linkerd2/pkg/healthcheck\"\n\n\t\"github.com/linkerd/linkerd2/pkg/k8s\"\n\n\t\"github.com/linkerd/linkerd2/pkg/protohttp\"\n\n\tmetricsPb \"github.com/linkerd/linkerd2/viz/metrics-api/gen/viz\"\n\n\tvizUtil \"github.com/linkerd/linkerd2/viz/metrics-api/util\"\n\n\ttapPb \"github.com/linkerd/linkerd2/viz/tap/gen/tap\"\n\n\ttappkg \"github.com/linkerd/linkerd2/viz/tap/pkg\"\n\n\tlog \"github.com/sirupsen/logrus\"\n\n\t\"google.golang.org/protobuf/encoding/protojson\"\n\n\t\"google.golang.org/protobuf/proto\"\n\n\tkerrors \"k8s.io/apimachinery/pkg/api/errors\"\n\n\tmetav1 \"k8s.io/apimachinery/pkg/apis/meta/v1\"\n\n\t\"sigs.k8s.io/yaml\"\n\n)\n\n\n\n// Control Frame payload size can be no bigger than 125 bytes. 2 bytes are\n\n// reserved for the status code when formatting the message.\n\nconst maxControlFrameMsgSize = 123\n\n\n\ntype (\n\n\tjsonError struct {\n\n\t\tError string `json:\"error\"`\n\n\t}\n\n)\n\n\n\nvar (\n\n\tdefaultResourceType = k8s.Deployment\n\n\tpbMarshaler = protojson.MarshalOptions{EmitUnpopulated: true}\n\n\tmaxMessageSize = 2048\n\n\twebsocketUpgrader = websocket.Upgrader{\n\n\t\tReadBufferSize: maxMessageSize,\n\n\t\tWriteBufferSize: maxMessageSize,\n\n\t}\n\n\n\n\t// Checks whose description matches the following regexp won't be included\n\n\t// in the handleApiCheck output. In the context of the dashboard, some\n\n\t// checks like cli or kubectl versions ones may not be relevant.\n\n\t//\n\n\t// TODO(tegioz): use more reliable way to identify the checks that should\n\n\t// not be displayed in the dashboard (hint anchor is not unique).\n\n\texcludedChecksRE = regexp.MustCompile(`(?i)cli|(?i)kubectl`)\n\n)\n\n\n\nfunc renderJSONError(w http.ResponseWriter, err error, status int) {\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\n\tlog.Error(err.Error())\n\n\trsp, _ := json.Marshal(jsonError{Error: err.Error()})\n\n\tw.WriteHeader(status)\n\n\tw.Write(rsp)\n\n}\n\n\n\nfunc renderJSON(w http.ResponseWriter, resp interface{}) {\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\n\tjsonResp, err := json.Marshal(resp)\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusInternalServerError)\n\n\t\treturn\n\n\t}\n\n\tw.Write(jsonResp)\n\n}\n\n\n\nfunc renderJSONPb(w http.ResponseWriter, msg proto.Message) {\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\n\tjson, err := pbMarshaler.Marshal(msg)\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusBadRequest)\n\n\t}\n\n\tw.Write(json)\n\n}\n\n\n\nfunc renderJSONBytes(w http.ResponseWriter, b []byte) {\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\n\tw.Write(b)\n\n}\n\n\n\nfunc (h *handler) handleAPIVersion(w http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\n\tresp := map[string]interface{}{\n\n\t\t\"version\": h.version,\n\n\t}\n\n\trenderJSON(w, resp)\n\n}\n\n\n\nfunc (h *handler) handleAPIPods(w http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\n\tpods, err := h.apiClient.ListPods(req.Context(), &metricsPb.ListPodsRequest{\n\n\t\tSelector: &metricsPb.ResourceSelection{\n\n\t\t\tResource: &metricsPb.Resource{\n\n\t\t\t\tNamespace: req.FormValue(\"namespace\"),\n\n\t\t\t},\n\n\t\t},\n\n\t})\n\n\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusInternalServerError)\n\n\t\treturn\n\n\t}\n\n\n\n\trenderJSONPb(w, pods)\n\n}\n\n\n\nfunc (h *handler) handleAPIServices(w http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\n\tservices, err := h.apiClient.ListServices(req.Context(), &metricsPb.ListServicesRequest{\n\n\t\tNamespace: req.FormValue(\"namespace\"),\n\n\t})\n\n\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusInternalServerError)\n\n\t\treturn\n\n\t}\n\n\n\n\trenderJSONPb(w, services)\n\n}\n\n\n\nfunc (h *handler) handleAPIStat(w http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\n\t// Try to get stat summary from cache using the query as key\n\n\tcachedResultJSON, ok := h.statCache.Get(req.URL.RawQuery)\n\n\tif ok {\n\n\t\t// Cache hit, render cached json result\n\n\t\trenderJSONBytes(w, cachedResultJSON.([]byte))\n\n\t\treturn\n\n\t}\n\n\n\n\ttrueStr := fmt.Sprintf(\"%t\", true)\n\n\n\n\trequestParams := vizUtil.StatsSummaryRequestParams{\n\n\t\tStatsBaseRequestParams: vizUtil.StatsBaseRequestParams{\n\n\t\t\tTimeWindow: req.FormValue(\"window\"),\n\n\t\t\tResourceName: req.FormValue(\"resource_name\"),\n\n\t\t\tResourceType: req.FormValue(\"resource_type\"),\n\n\t\t\tNamespace: req.FormValue(\"namespace\"),\n\n\t\t\tAllNamespaces: req.FormValue(\"all_namespaces\") == trueStr,\n\n\t\t},\n\n\t\tToName: req.FormValue(\"to_name\"),\n\n\t\tToType: req.FormValue(\"to_type\"),\n\n\t\tToNamespace: req.FormValue(\"to_namespace\"),\n\n\t\tFromName: req.FormValue(\"from_name\"),\n\n\t\tFromType: req.FormValue(\"from_type\"),\n\n\t\tFromNamespace: req.FormValue(\"from_namespace\"),\n\n\t\tSkipStats: req.FormValue(\"skip_stats\") == trueStr,\n\n\t\tTCPStats: req.FormValue(\"tcp_stats\") == trueStr,\n\n\t}\n\n\n\n\t// default to returning deployment stats\n\n\tif requestParams.ResourceType == \"\" {\n\n\t\trequestParams.ResourceType = defaultResourceType\n\n\t}\n\n\n\n\tstatRequest, err := vizUtil.BuildStatSummaryRequest(requestParams)\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusInternalServerError)\n\n\t\treturn\n\n\t}\n\n\n\n\tresult, err := h.apiClient.StatSummary(req.Context(), statRequest)\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusInternalServerError)\n\n\t\treturn\n\n\t}\n\n\n\n\t// Marshal result into json and cache it\n\n\tjson, err := pbMarshaler.Marshal(result)\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusInternalServerError)\n\n\t\treturn\n\n\t}\n\n\tvar resultJSON bytes.Buffer\n\n\tresultJSON.Write(json)\n\n\n\n\th.statCache.SetDefault(req.URL.RawQuery, resultJSON.Bytes())\n\n\n\n\trenderJSONBytes(w, resultJSON.Bytes())\n\n}\n\n\n\nfunc (h *handler) handleAPITopRoutes(w http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\n\trequestParams := vizUtil.TopRoutesRequestParams{\n\n\t\tStatsBaseRequestParams: vizUtil.StatsBaseRequestParams{\n\n\t\t\tTimeWindow: req.FormValue(\"window\"),\n\n\t\t\tResourceName: req.FormValue(\"resource_name\"),\n\n\t\t\tResourceType: req.FormValue(\"resource_type\"),\n\n\t\t\tNamespace: req.FormValue(\"namespace\"),\n\n\t\t},\n\n\t\tToName: req.FormValue(\"to_name\"),\n\n\t\tToType: req.FormValue(\"to_type\"),\n\n\t\tToNamespace: req.FormValue(\"to_namespace\"),\n\n\t}\n\n\n\n\ttopReq, err := vizUtil.BuildTopRoutesRequest(requestParams)\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusBadRequest)\n\n\t\treturn\n\n\t}\n\n\n\n\tresult, err := h.apiClient.TopRoutes(req.Context(), topReq)\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusInternalServerError)\n\n\t\treturn\n\n\t}\n\n\n\n\trenderJSONPb(w, result)\n\n}\n\n\n\n// Control frame payload size must be no longer than `maxControlFrameMsgSize`\n\n// bytes. In the case of an unexpected HTTP status code or unexpected error,\n\n// truncate the message after `maxControlFrameMsgSize` bytes so that the web\n\n// socket message is properly written.\n\nfunc validateControlFrameMsg(err error) string {\n\n\tlog.Debugf(\"tap error: %s\", err.Error())\n\n\n\n\tmsg := err.Error()\n\n\tif len(msg) > maxControlFrameMsgSize {\n\n\t\treturn msg[:maxControlFrameMsgSize]\n\n\t}\n\n\n\n\treturn msg\n\n}\n\n\n\nfunc websocketError(ws *websocket.Conn, wsError int, err error) {\n\n\tmsg := validateControlFrameMsg(err)\n\n\n\n\terr = ws.WriteControl(websocket.CloseMessage,\n\n\t\twebsocket.FormatCloseMessage(wsError, msg),\n\n\t\ttime.Time{})\n\n\tif err != nil {\n\n\t\tlog.Errorf(\"Unexpected websocket error: %s\", err)\n\n\t}\n\n}\n\n\n\nfunc (h *handler) handleAPITap(w http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\n\tws, err := websocketUpgrader.Upgrade(w, req, nil)\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusInternalServerError)\n\n\t\treturn\n\n\t}\n\n\tdefer ws.Close()\n\n\n\n\tmessageType, message, err := ws.ReadMessage()\n\n\tif err != nil {\n\n\t\twebsocketError(ws, websocket.CloseInternalServerErr, err)\n\n\t\treturn\n\n\t}\n\n\n\n\tif messageType != websocket.TextMessage {\n\n\t\twebsocketError(ws, websocket.CloseUnsupportedData, errors.New(\"messageType not supported\"))\n\n\t\treturn\n\n\t}\n\n\n\n\tvar requestParams tappkg.TapRequestParams\n\n\terr = json.Unmarshal(message, &requestParams)\n\n\tif err != nil {\n\n\t\twebsocketError(ws, websocket.CloseInternalServerErr, err)\n\n\t\treturn\n\n\t}\n\n\n\n\ttapReq, err := tappkg.BuildTapByResourceRequest(requestParams)\n\n\tif err != nil {\n\n\t\twebsocketError(ws, websocket.CloseInternalServerErr, err)\n\n\t\treturn\n\n\t}\n\n\n\n\tgo func() {\n\n\t\treader, body, err := tappkg.Reader(req.Context(), h.k8sAPI, tapReq)\n\n\t\tif err != nil {\n\n\t\t\t// If there was a [403] error when initiating a tap, close the\n\n\t\t\t// socket with `ClosePolicyViolation` status code so that the error\n\n\t\t\t// renders without the error prefix in the banner\n\n\t\t\tvar he protohttp.HTTPError\n\n\t\t\tif errors.Is(err, &he) && he.Code == http.StatusForbidden {\n\n\t\t\t\terr := fmt.Errorf(\"missing authorization, visit %s to remedy\", tappkg.TapRbacURL)\n\n\t\t\t\twebsocketError(ws, websocket.ClosePolicyViolation, err)\n\n\t\t\t\treturn\n\n\t\t\t}\n\n\n\n\t\t\t// All other errors from initiating a tap should close with\n\n\t\t\t// `CloseInternalServerErr` status code\n\n\t\t\twebsocketError(ws, websocket.CloseInternalServerErr, err)\n\n\t\t\treturn\n\n\t\t}\n\n\t\tdefer body.Close()\n\n\n\n\t\tfor {\n\n\t\t\tevent := tapPb.TapEvent{}\n\n\t\t\terr := protohttp.FromByteStreamToProtocolBuffers(reader, &event)\n\n\t\t\tif err != nil {\n\n\t\t\t\tif errors.Is(err, io.EOF) {\n\n\t\t\t\t\tbreak\n\n\t\t\t\t}\n\n\t\t\t\twebsocketError(ws, websocket.CloseInternalServerErr, err)\n\n\t\t\t\tbreak\n\n\t\t\t}\n\n\n\n\t\t\tjson, err := pbMarshaler.Marshal(&event)\n\n\t\t\tif err != nil {\n\n\t\t\t\twebsocketError(ws, websocket.CloseUnsupportedData, err)\n\n\t\t\t\tbreak\n\n\t\t\t}\n\n\t\t\tbuf := new(bytes.Buffer)\n\n\t\t\tbuf.Write(json)\n\n\n\n\t\t\tif err := ws.WriteMessage(websocket.TextMessage, buf.Bytes()); err != nil {\n\n\t\t\t\tif websocket.IsUnexpectedCloseError(err, websocket.CloseNormalClosure) {\n\n\t\t\t\t\tlog.Error(err)\n\n\t\t\t\t}\n\n\t\t\t\tbreak\n\n\t\t\t}\n\n\t\t}\n\n\t}()\n\n\n\n\tfor {\n\n\t\t_, _, err := ws.ReadMessage()\n\n\t\tif err != nil {\n\n\t\t\tlog.Debugf(\"Received close frame: %v\", err)\n\n\t\t\tif websocket.IsUnexpectedCloseError(err, websocket.CloseNormalClosure) {\n\n\t\t\t\tlog.Errorf(\"Unexpected close error: %s\", err)\n\n\t\t\t}\n\n\t\t\treturn\n\n\t\t}\n\n\t}\n\n}\n\n\n\nfunc (h *handler) handleAPIEdges(w http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\n\trequestParams := vizUtil.EdgesRequestParams{\n\n\t\tNamespace: req.FormValue(\"namespace\"),\n\n\t\tResourceType: req.FormValue(\"resource_type\"),\n\n\t}\n\n\n\n\tedgesRequest, err := vizUtil.BuildEdgesRequest(requestParams)\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusInternalServerError)\n\n\t\treturn\n\n\t}\n\n\n\n\tresult, err := h.apiClient.Edges(req.Context(), edgesRequest)\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusInternalServerError)\n\n\t\treturn\n\n\t}\n\n\trenderJSONPb(w, result)\n\n}\n\n\n\nfunc (h *handler) handleAPICheck(w http.ResponseWriter, req *http.Request, p httprouter.Params) {\n\n\ttype CheckResult struct {\n\n\t\t*healthcheck.CheckResult\n\n\t\tErrMsg string `json:\",omitempty\"`\n\n\t\tHintURL string `json:\",omitempty\"`\n\n\t}\n\n\n\n\tsuccess := true\n\n\tresults := make(map[healthcheck.CategoryID][]*CheckResult)\n\n\n\n\tcollectResults := func(result *healthcheck.CheckResult) {\n\n\t\tif result.Retry || excludedChecksRE.MatchString(result.Description) {\n\n\t\t\treturn\n\n\t\t}\n\n\t\tvar errMsg, hintURL string\n\n\t\tif result.Err != nil {\n\n\t\t\tif !result.Warning {\n\n\t\t\t\tsuccess = false\n\n\t\t\t}\n\n\t\t\terrMsg = result.Err.Error()\n\n\t\t\thintURL = result.HintURL\n\n\t\t}\n\n\t\tresults[result.Category] = append(results[result.Category], &CheckResult{\n\n\t\t\tCheckResult: result,\n\n\t\t\tErrMsg: errMsg,\n\n\t\t\tHintURL: hintURL,\n\n\t\t})\n\n\t}\n\n\t// TODO (tegioz): ignore runchecks results until we stop filtering checks\n\n\t// in this method (see #3670 for more details)\n\n\t_, _ = h.hc.RunChecks(collectResults)\n\n\n\n\trenderJSON(w, map[string]interface{}{\n\n\t\t\"success\": success,\n\n\t\t\"results\": results,\n\n\t})\n\n}\n\n\n\nfunc (h *handler) handleAPIResourceDefinition(w http.ResponseWriter, req *http.Request, _ httprouter.Params) {\n\n\tvar missingParams []string\n\n\trequiredParams := []string{\"namespace\", \"resource_type\", \"resource_name\"}\n\n\tfor _, param := range requiredParams {\n\n\t\tif req.FormValue(param) == \"\" {\n\n\t\t\tmissingParams = append(missingParams, param)\n\n\t\t}\n\n\t}\n\n\tif len(missingParams) != 0 {\n\n\t\trenderJSONError(w, fmt.Errorf(\"required params not provided: %s\", strings.Join(missingParams, \", \")), http.StatusBadRequest)\n\n\t\treturn\n\n\t}\n\n\n\n\tnamespace := req.FormValue(\"namespace\")\n\n\tresourceType := req.FormValue(\"resource_type\")\n\n\tresourceName := req.FormValue(\"resource_name\")\n\n\n\n\tvar resource interface{}\n\n\tvar err error\n\n\toptions := metav1.GetOptions{}\n\n\tswitch resourceType {\n\n\tcase k8s.CronJob:\n\n\t\tresource, err = h.k8sAPI.BatchV1beta1().CronJobs(namespace).Get(req.Context(), resourceName, options)\n\n\tcase k8s.DaemonSet:\n\n\t\tresource, err = h.k8sAPI.AppsV1().DaemonSets(namespace).Get(req.Context(), resourceName, options)\n\n\tcase k8s.Deployment:\n\n\t\tresource, err = h.k8sAPI.AppsV1().Deployments(namespace).Get(req.Context(), resourceName, options)\n\n\tcase k8s.Service:\n\n\t\tresource, err = h.k8sAPI.CoreV1().Services(namespace).Get(req.Context(), resourceName, options)\n\n\tcase k8s.Job:\n\n\t\tresource, err = h.k8sAPI.BatchV1().Jobs(namespace).Get(req.Context(), resourceName, options)\n\n\tcase k8s.Pod:\n\n\t\tresource, err = h.k8sAPI.CoreV1().Pods(namespace).Get(req.Context(), resourceName, options)\n\n\tcase k8s.ReplicationController:\n\n\t\tresource, err = h.k8sAPI.CoreV1().ReplicationControllers(namespace).Get(req.Context(), resourceName, options)\n\n\tcase k8s.ReplicaSet:\n\n\t\tresource, err = h.k8sAPI.AppsV1().ReplicaSets(namespace).Get(req.Context(), resourceName, options)\n\n\tdefault:\n\n\t\trenderJSONError(w, errors.New(\"Invalid resource type: \"+resourceType), http.StatusBadRequest)\n\n\t\treturn\n\n\t}\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusInternalServerError)\n\n\t\treturn\n\n\t}\n\n\n\n\tresourceDefinition, err := yaml.Marshal(resource)\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusInternalServerError)\n\n\t\treturn\n\n\t}\n\n\tw.Header().Set(\"Content-Type\", \"text/yaml\")\n\n\tw.Write(resourceDefinition)\n\n}\n\n\n\nfunc (h *handler) handleGetExtensions(w http.ResponseWriter, req *http.Request, _ httprouter.Params) {\n\n\tctx := req.Context()\n\n\textensionName := req.FormValue(\"extension_name\")\n\n\n\n\ttype Extension struct {\n\n\t\tName string `json:\"name\"`\n\n\t\tUID string `json:\"uid\"`\n\n\t\tNamespace string `json:\"namespace\"`\n\n\t}\n\n\n\n\tresp := map[string]interface{}{}\n\n\tif extensionName != \"\" {\n\n\t\tns, err := h.k8sAPI.GetNamespaceWithExtensionLabel(ctx, extensionName)\n\n\t\tif err != nil && kerrors.IsNotFound(err) {\n\n\t\t\trenderJSON(w, resp)\n\n\t\t\treturn\n\n\t\t} else if err != nil {\n\n\t\t\trenderJSONError(w, err, http.StatusInternalServerError)\n\n\t\t\treturn\n\n\t\t}\n\n\n\n\t\tresp[\"data\"] = Extension{\n\n\t\t\tUID: string(ns.UID),\n\n\t\t\tName: ns.GetLabels()[k8s.LinkerdExtensionLabel],\n\n\t\t\tNamespace: ns.Name,\n\n\t\t}\n\n\n\n\t\trenderJSON(w, resp)\n\n\t\treturn\n\n\t}\n\n\n\n\tinstalledExtensions, err := h.k8sAPI.GetAllNamespacesWithExtensionLabel(ctx)\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusInternalServerError)\n\n\t\treturn\n\n\t}\n\n\n\n\textensionList := make([]Extension, len(installedExtensions))\n\n\n\n\tfor i, installedExtension := range installedExtensions {\n\n\t\textensionList[i] = Extension{\n\n\t\t\tUID: string(installedExtension.GetObjectMeta().GetUID()),\n\n\t\t\tName: installedExtension.GetLabels()[k8s.LinkerdExtensionLabel],\n\n\t\t\tNamespace: installedExtension.GetName(),\n\n\t\t}\n\n\t}\n\n\n\n\tresp[\"extensions\"] = extensionList\n\n\trenderJSON(w, resp)\n\n}\n\n\n\nfunc (h *handler) handleAPIGateways(w http.ResponseWriter, req *http.Request, _ httprouter.Params) {\n\n\twindow := req.FormValue(\"window\")\n\n\tif window == \"\" {\n\n\t\twindow = \"1m\"\n\n\t}\n\n\t_, err := time.ParseDuration(window)\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusInternalServerError)\n\n\t\treturn\n\n\t}\n\n\tgatewayRequest := &metricsPb.GatewaysRequest{\n\n\t\tTimeWindow: window,\n\n\t\tGatewayNamespace: req.FormValue(\"gatewayNamespace\"),\n\n\t\tRemoteClusterName: req.FormValue(\"remoteClusterName\"),\n\n\t}\n\n\tresult, err := h.apiClient.Gateways(req.Context(), gatewayRequest)\n\n\tif err != nil {\n\n\t\trenderJSONError(w, err, http.StatusInternalServerError)\n\n\t\treturn\n\n\t}\n\n\trenderJSONPb(w, result)\n\n}\n", "file_path": "web/srv/api_handlers.go", "rank": 92, "score": 111371.64452616076 }, { "content": "package srv\n\n\n\nimport (\n\n\t\"net/http\"\n\n\t\"net/http/httputil\"\n\n\t\"strings\"\n\n)\n\n\n\n// reverseProxy is an HTTP reverse proxy that forwards all web requests\n\n// containing paths prefixed to the corresponding service. The proxy\n\n// strips the prefix and rewrites the Host header before sending.\n\ntype reverseProxy struct {\n\n\t*httputil.ReverseProxy\n\n}\n\n\n\nfunc newReverseProxy(addr string, prefix string) *reverseProxy {\n\n\tdirector := func(req *http.Request) {\n\n\t\treq.URL.Host = addr\n\n\t\treq.URL.Scheme = \"http\"\n\n\t\treq.URL.Path = strings.TrimPrefix(req.URL.Path, prefix)\n\n\n\n\t\t// the default director implementation does this, so we will too\n\n\t\tif _, ok := req.Header[\"User-Agent\"]; !ok {\n\n\t\t\t// explicitly disable User-Agent so it's not set to default value\n\n\t\t\treq.Header.Set(\"User-Agent\", \"\")\n\n\t\t}\n\n\t}\n\n\n\n\treturn &reverseProxy{\n\n\t\tReverseProxy: &httputil.ReverseProxy{Director: director},\n\n\t}\n\n}\n", "file_path": "web/srv/reverse_proxy.go", "rank": 93, "score": 111371.64452616076 }, { "content": "package srv\n\n\n\nimport (\n\n\t\"net/http\"\n\n\t\"net/http/httptest\"\n\n\t\"strings\"\n\n\t\"testing\"\n\n\n\n\t\"github.com/go-test/deep\"\n\n\t\"github.com/julienschmidt/httprouter\"\n\n\t\"github.com/linkerd/linkerd2/controller/gen/apis/serviceprofile/v1alpha2\"\n\n\thelpers \"github.com/linkerd/linkerd2/pkg/profiles\"\n\n\t\"sigs.k8s.io/yaml\"\n\n)\n\n\n\nconst releaseVersion = \"0.3.3\"\n\n\n\nfunc TestHandleIndex(t *testing.T) {\n\n\tserver := FakeServer()\n\n\n\n\thandler := &handler{\n\n\t\trender: server.RenderTemplate,\n\n\t\tversion: releaseVersion,\n\n\t}\n\n\n\n\trecorder := httptest.NewRecorder()\n\n\treq := httptest.NewRequest(\"GET\", \"/\", nil)\n\n\thandler.handleIndex(recorder, req, httprouter.Params{})\n\n\n\n\tif recorder.Code != http.StatusOK {\n\n\t\tt.Errorf(\"Incorrect StatusCode: %+v\", recorder.Code)\n\n\t\tt.Errorf(\"Expected %+v\", http.StatusOK)\n\n\t}\n\n\n\n\theader := http.Header{\n\n\t\t\"Content-Type\": []string{\"text/html\"},\n\n\t}\n\n\tif diff := deep.Equal(recorder.Header(), header); diff != nil {\n\n\t\tt.Errorf(\"Unexpected header: %v\", diff)\n\n\t}\n\n\n\n\tactualBody := recorder.Body.String()\n\n\n\n\texpectedSubstrings := []string{\n\n\t\t\"<div class=\\\"main\\\" id=\\\"main\\\"\",\n\n\t\t\"data-release-version=\\\"\" + releaseVersion + \"\\\"\",\n\n\t\t\"data-controller-namespace=\\\"\\\"\",\n\n\t\t\"data-uuid=\\\"\\\"\",\n\n\t}\n\n\tfor _, expectedSubstring := range expectedSubstrings {\n\n\t\tif !strings.Contains(actualBody, expectedSubstring) {\n\n\t\t\tt.Fatalf(\"Expected string [%s] to be present in [%s]\", expectedSubstring, actualBody)\n\n\t\t}\n\n\t}\n\n}\n\n\n\nfunc TestHandleConfigDownload(t *testing.T) {\n\n\tserver := FakeServer()\n\n\n\n\thandler := &handler{\n\n\t\trender: server.RenderTemplate,\n\n\t\tcontrollerNamespace: \"linkerd\",\n\n\t\tclusterDomain: \"mycluster.local\",\n\n\t}\n\n\n\n\trecorder := httptest.NewRecorder()\n\n\treq := httptest.NewRequest(\"GET\", \"/profiles/new?service=authors&namespace=booksns\", nil)\n\n\n\n\thandler.handleProfileDownload(recorder, req, httprouter.Params{})\n\n\n\n\tif recorder.Code != http.StatusOK {\n\n\t\tt.Errorf(\"Incorrect StatusCode: %+v\", recorder.Code)\n\n\t\tt.Errorf(\"Expected %+v\", http.StatusOK)\n\n\t}\n\n\n\n\theader := http.Header{\n\n\t\t\"Content-Type\": []string{\n\n\t\t\t\"text/yaml\",\n\n\t\t},\n\n\t\t\"Content-Disposition\": []string{\n\n\t\t\t\"attachment; filename=authors-profile.yml\",\n\n\t\t},\n\n\t}\n\n\tif diff := deep.Equal(recorder.Header(), header); diff != nil {\n\n\t\tt.Errorf(\"Unexpected header: %v\", diff)\n\n\t}\n\n\n\n\tvar serviceProfile v1alpha2.ServiceProfile\n\n\terr := yaml.Unmarshal(recorder.Body.Bytes(), &serviceProfile)\n\n\tif err != nil {\n\n\t\tt.Fatalf(\"Error parsing service profile: %v\", err)\n\n\t}\n\n\n\n\texpectedServiceProfile := helpers.GenServiceProfile(\"authors\", \"booksns\", \"mycluster.local\")\n\n\n\n\terr = helpers.ServiceProfileYamlEquals(serviceProfile, expectedServiceProfile)\n\n\tif err != nil {\n\n\t\tt.Fatalf(\"ServiceProfiles are not equal: %v\", err)\n\n\t}\n\n}\n", "file_path": "web/srv/handlers_test.go", "rank": 94, "score": 111371.64452616076 }, { "content": "\tpodName string\n", "file_path": "pkg/k8s/portforward.go", "rank": 95, "score": 111241.72461267145 }, { "content": "package inject\n\n\n\nimport (\n\n\t\"fmt\"\n\n\t\"testing\"\n\n\n\n\t\"github.com/linkerd/linkerd2/pkg/k8s\"\n\n\tcorev1 \"k8s.io/api/core/v1\"\n\n\tmetav1 \"k8s.io/apimachinery/pkg/apis/meta/v1\"\n\n)\n\n\n\nfunc TestInjectable(t *testing.T) {\n\n\tvar testCases = []struct {\n\n\t\tpodSpec *corev1.PodSpec\n\n\t\tpodMeta *metav1.ObjectMeta\n\n\t\tnsAnnotations map[string]string\n\n\t\tunsupportedResource bool\n\n\t\tinjectable bool\n\n\t\treasons []string\n\n\t}{\n\n\t\t{\n\n\t\t\tpodSpec: &corev1.PodSpec{\n\n\t\t\t\tHostNetwork: false,\n\n\t\t\t\tContainers: []corev1.Container{\n\n\t\t\t\t\t{\n\n\t\t\t\t\t\tVolumeMounts: []corev1.VolumeMount{\n\n\t\t\t\t\t\t\t{\n\n\t\t\t\t\t\t\t\tMountPath: k8s.MountPathServiceAccount,\n\n\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t},\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\tinjectable: true,\n\n\t\t},\n\n\t\t{\n\n\t\t\tpodSpec: &corev1.PodSpec{\n\n\t\t\t\tHostNetwork: true,\n\n\t\t\t\tContainers: []corev1.Container{\n\n\t\t\t\t\t{\n\n\t\t\t\t\t\tVolumeMounts: []corev1.VolumeMount{\n\n\t\t\t\t\t\t\t{\n\n\t\t\t\t\t\t\t\tMountPath: k8s.MountPathServiceAccount,\n\n\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t},\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\tinjectable: false,\n\n\t\t\treasons: []string{hostNetworkEnabled},\n\n\t\t},\n\n\t\t{\n\n\t\t\tpodSpec: &corev1.PodSpec{\n\n\t\t\t\tContainers: []corev1.Container{\n\n\t\t\t\t\t{\n\n\t\t\t\t\t\tName: k8s.ProxyContainerName,\n\n\t\t\t\t\t\tImage: \"cr.l5d.io/linkerd/proxy:\",\n\n\t\t\t\t\t\tVolumeMounts: []corev1.VolumeMount{\n\n\t\t\t\t\t\t\t{\n\n\t\t\t\t\t\t\t\tMountPath: k8s.MountPathServiceAccount,\n\n\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t},\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\tinjectable: false,\n\n\t\t\treasons: []string{sidecarExists},\n\n\t\t},\n\n\t\t{\n\n\t\t\tpodSpec: &corev1.PodSpec{\n\n\t\t\t\tInitContainers: []corev1.Container{\n\n\t\t\t\t\t{\n\n\t\t\t\t\t\tName: k8s.InitContainerName,\n\n\t\t\t\t\t\tImage: \"cr.l5d.io/linkerd/proxy-init:\",\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t\tContainers: []corev1.Container{\n\n\t\t\t\t\t{\n\n\t\t\t\t\t\tVolumeMounts: []corev1.VolumeMount{\n\n\t\t\t\t\t\t\t{\n\n\t\t\t\t\t\t\t\tMountPath: k8s.MountPathServiceAccount,\n\n\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t},\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\tinjectable: false,\n\n\t\t\treasons: []string{sidecarExists},\n\n\t\t},\n\n\t\t{\n\n\t\t\tunsupportedResource: true,\n\n\t\t\tpodSpec: &corev1.PodSpec{\n\n\t\t\t\tContainers: []corev1.Container{\n\n\t\t\t\t\t{\n\n\t\t\t\t\t\tVolumeMounts: []corev1.VolumeMount{\n\n\t\t\t\t\t\t\t{\n\n\t\t\t\t\t\t\t\tMountPath: k8s.MountPathServiceAccount,\n\n\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t},\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\tinjectable: false,\n\n\t\t\treasons: []string{unsupportedResource},\n\n\t\t},\n\n\t\t{\n\n\t\t\tunsupportedResource: true,\n\n\t\t\tpodSpec: &corev1.PodSpec{\n\n\t\t\t\tHostNetwork: true,\n\n\t\t\t\tContainers: []corev1.Container{\n\n\t\t\t\t\t{\n\n\t\t\t\t\t\tVolumeMounts: []corev1.VolumeMount{\n\n\t\t\t\t\t\t\t{\n\n\t\t\t\t\t\t\t\tMountPath: k8s.MountPathServiceAccount,\n\n\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t},\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\t\t\t},\n\n\t\t\t},\n\n\n\n\t\t\tinjectable: false,\n\n\t\t\treasons: []string{hostNetworkEnabled, unsupportedResource},\n\n\t\t},\n\n\t\t{\n\n\t\t\tnsAnnotations: map[string]string{\n\n\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\t\t},\n\n\t\t\tpodSpec: &corev1.PodSpec{\n\n\t\t\t\tHostNetwork: true,\n\n\t\t\t\tContainers: []corev1.Container{\n\n\t\t\t\t\t{\n\n\t\t\t\t\t\tVolumeMounts: []corev1.VolumeMount{\n\n\t\t\t\t\t\t\t{\n\n\t\t\t\t\t\t\t\tMountPath: k8s.MountPathServiceAccount,\n\n\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t},\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectDisabled,\n\n\t\t\t\t},\n\n\t\t\t},\n\n\n\n\t\t\tinjectable: false,\n\n\t\t\treasons: []string{hostNetworkEnabled, injectDisableAnnotationPresent},\n\n\t\t},\n\n\t\t{\n\n\t\t\tnsAnnotations: map[string]string{\n\n\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\t\t},\n\n\t\t\tunsupportedResource: true,\n\n\t\t\tpodSpec: &corev1.PodSpec{\n\n\t\t\t\tHostNetwork: true,\n\n\t\t\t\tContainers: []corev1.Container{\n\n\t\t\t\t\t{\n\n\t\t\t\t\t\tVolumeMounts: []corev1.VolumeMount{\n\n\t\t\t\t\t\t\t{\n\n\t\t\t\t\t\t\t\tMountPath: k8s.MountPathServiceAccount,\n\n\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t},\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectDisabled,\n\n\t\t\t\t},\n\n\t\t\t},\n\n\n\n\t\t\tinjectable: false,\n\n\t\t\treasons: []string{hostNetworkEnabled, unsupportedResource, injectDisableAnnotationPresent},\n\n\t\t},\n\n\t\t{\n\n\t\t\tunsupportedResource: true,\n\n\t\t\tpodSpec: &corev1.PodSpec{\n\n\t\t\t\tHostNetwork: true,\n\n\t\t\t\tContainers: []corev1.Container{\n\n\t\t\t\t\t{\n\n\t\t\t\t\t\tVolumeMounts: []corev1.VolumeMount{\n\n\t\t\t\t\t\t\t{\n\n\t\t\t\t\t\t\t\tMountPath: k8s.MountPathServiceAccount,\n\n\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t},\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\tAnnotations: map[string]string{},\n\n\t\t\t},\n\n\n\n\t\t\tinjectable: false,\n\n\t\t\treasons: []string{hostNetworkEnabled, unsupportedResource, injectEnableAnnotationAbsent},\n\n\t\t},\n\n\t\t{\n\n\t\t\tpodSpec: &corev1.PodSpec{HostNetwork: true,\n\n\t\t\t\tContainers: []corev1.Container{\n\n\t\t\t\t\t{\n\n\t\t\t\t\t\tName: k8s.ProxyContainerName,\n\n\t\t\t\t\t\tImage: \"cr.l5d.io/linkerd/proxy:\",\n\n\t\t\t\t\t\tVolumeMounts: []corev1.VolumeMount{\n\n\t\t\t\t\t\t\t{\n\n\t\t\t\t\t\t\t\tMountPath: k8s.MountPathServiceAccount,\n\n\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t},\n\n\t\t\t\t\t},\n\n\t\t\t\t}},\n\n\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\tAnnotations: map[string]string{},\n\n\t\t\t},\n\n\n\n\t\t\tinjectable: false,\n\n\t\t\treasons: []string{hostNetworkEnabled, sidecarExists, injectEnableAnnotationAbsent},\n\n\t\t},\n\n\t}\n\n\n\n\tfor i, testCase := range testCases {\n\n\t\ttestCase := testCase\n\n\t\tt.Run(fmt.Sprintf(\"test case #%d\", i), func(t *testing.T) {\n\n\t\t\tresourceConfig := &ResourceConfig{}\n\n\t\t\tresourceConfig.WithNsAnnotations(testCase.nsAnnotations)\n\n\t\t\tresourceConfig.pod.spec = testCase.podSpec\n\n\t\t\tresourceConfig.origin = OriginWebhook\n\n\t\t\tresourceConfig.pod.meta = testCase.podMeta\n\n\n\n\t\t\treport := newReport(resourceConfig)\n\n\t\t\treport.UnsupportedResource = testCase.unsupportedResource\n\n\n\n\t\t\tactual, reasons := report.Injectable()\n\n\t\t\tif testCase.injectable != actual {\n\n\t\t\t\tt.Errorf(\"Expected %t. Actual %t\", testCase.injectable, actual)\n\n\t\t\t}\n\n\n\n\t\t\tif len(reasons) != len(testCase.reasons) {\n\n\t\t\t\tt.Errorf(\"Expected %d number of reasons. Actual %d\", len(testCase.reasons), len(reasons))\n\n\t\t\t}\n\n\n\n\t\t\tfor i := range reasons {\n\n\t\t\t\tif testCase.reasons[i] != reasons[i] {\n\n\t\t\t\t\tt.Errorf(\"Expected reason '%s'. Actual reason '%s'\", testCase.reasons[i], reasons[i])\n\n\t\t\t\t}\n\n\t\t\t}\n\n\n\n\t\t})\n\n\t}\n\n}\n\n\n\nfunc TestDisableByAnnotation(t *testing.T) {\n\n\tt.Run(\"webhook origin\", func(t *testing.T) {\n\n\t\tvar testCases = []struct {\n\n\t\t\tpodMeta *metav1.ObjectMeta\n\n\t\t\tnsAnnotations map[string]string\n\n\t\t\texpected bool\n\n\t\t}{\n\n\t\t\t{\n\n\t\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t\texpected: false,\n\n\t\t\t},\n\n\t\t\t{\n\n\t\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t\tnsAnnotations: map[string]string{\n\n\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\t\t\t},\n\n\t\t\t\texpected: false,\n\n\t\t\t},\n\n\t\t\t{\n\n\t\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t\tnsAnnotations: map[string]string{\n\n\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectDisabled,\n\n\t\t\t\t},\n\n\t\t\t\texpected: false,\n\n\t\t\t},\n\n\t\t\t{\n\n\t\t\t\tpodMeta: &metav1.ObjectMeta{},\n\n\t\t\t\tnsAnnotations: map[string]string{\n\n\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\t\t\t},\n\n\t\t\t\texpected: false,\n\n\t\t\t},\n\n\t\t\t{\n\n\t\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectDisabled,\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t\tnsAnnotations: map[string]string{\n\n\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectDisabled,\n\n\t\t\t\t},\n\n\t\t\t\texpected: true,\n\n\t\t\t},\n\n\t\t\t{\n\n\t\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectDisabled,\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t\tnsAnnotations: map[string]string{\n\n\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\t\t\t},\n\n\t\t\t\texpected: true,\n\n\t\t\t},\n\n\t\t\t{\n\n\t\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectDisabled,\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t\tnsAnnotations: map[string]string{},\n\n\t\t\t\texpected: true,\n\n\t\t\t},\n\n\t\t\t{\n\n\t\t\t\tpodMeta: &metav1.ObjectMeta{},\n\n\t\t\t\tnsAnnotations: map[string]string{\n\n\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectDisabled,\n\n\t\t\t\t},\n\n\t\t\t\texpected: true,\n\n\t\t\t},\n\n\t\t\t{\n\n\t\t\t\tpodMeta: &metav1.ObjectMeta{},\n\n\t\t\t\tnsAnnotations: map[string]string{},\n\n\t\t\t\texpected: true,\n\n\t\t\t},\n\n\t\t}\n\n\n\n\t\tfor i, testCase := range testCases {\n\n\t\t\ttestCase := testCase\n\n\t\t\tt.Run(fmt.Sprintf(\"test case #%d\", i), func(t *testing.T) {\n\n\t\t\t\tresourceConfig := &ResourceConfig{origin: OriginWebhook}\n\n\t\t\t\tresourceConfig.WithNsAnnotations(testCase.nsAnnotations)\n\n\t\t\t\tresourceConfig.pod.meta = testCase.podMeta\n\n\t\t\t\tresourceConfig.pod.spec = &corev1.PodSpec{} // initialize empty spec to prevent test from failing\n\n\n\n\t\t\t\treport := newReport(resourceConfig)\n\n\t\t\t\tif actual, _, _ := report.disabledByAnnotation(resourceConfig); testCase.expected != actual {\n\n\t\t\t\t\tt.Errorf(\"Expected %t. Actual %t\", testCase.expected, actual)\n\n\t\t\t\t}\n\n\t\t\t})\n\n\t\t}\n\n\t})\n\n\n\n\tt.Run(\"CLI origin\", func(t *testing.T) {\n\n\t\tvar testCases = []struct {\n\n\t\t\tpodMeta *metav1.ObjectMeta\n\n\t\t\texpected bool\n\n\t\t}{\n\n\t\t\t{\n\n\t\t\t\tpodMeta: &metav1.ObjectMeta{},\n\n\t\t\t\texpected: false,\n\n\t\t\t},\n\n\t\t\t{\n\n\t\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectEnabled,\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t\texpected: false,\n\n\t\t\t},\n\n\t\t\t{\n\n\t\t\t\tpodMeta: &metav1.ObjectMeta{\n\n\t\t\t\t\tAnnotations: map[string]string{\n\n\t\t\t\t\t\tk8s.ProxyInjectAnnotation: k8s.ProxyInjectDisabled,\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t\texpected: true,\n\n\t\t\t},\n\n\t\t}\n\n\n\n\t\tfor i, testCase := range testCases {\n\n\t\t\ttestCase := testCase\n\n\t\t\tt.Run(fmt.Sprintf(\"test case #%d\", i), func(t *testing.T) {\n\n\t\t\t\tresourceConfig := &ResourceConfig{origin: OriginCLI}\n\n\t\t\t\tresourceConfig.pod.meta = testCase.podMeta\n\n\t\t\t\tresourceConfig.pod.spec = &corev1.PodSpec{} // initialize empty spec to prevent test from failing\n\n\n\n\t\t\t\treport := newReport(resourceConfig)\n\n\t\t\t\tif actual, _, _ := report.disabledByAnnotation(resourceConfig); testCase.expected != actual {\n\n\t\t\t\t\tt.Errorf(\"Expected %t. Actual %t\", testCase.expected, actual)\n\n\t\t\t\t}\n\n\t\t\t})\n\n\t\t}\n\n\t})\n\n}\n", "file_path": "pkg/inject/report_test.go", "rank": 96, "score": 111219.42445005468 }, { "content": "package inject\n\n\n\nvar tpl = `[\n\n {{- if .AddRootAnnotations }}\n\n {\n\n \"op\": \"add\",\n\n \"path\": \"/metadata/annotations\",\n\n \"value\": {}\n\n },\n\n {{- end }}\n\n {\n\n \"op\": \"add\",\n\n \"path\": \"/metadata/annotations/config.linkerd.io~1opaque-ports\",\n\n \"value\": \"{{.OpaquePorts}}\"\n\n }\n\n]`\n", "file_path": "pkg/inject/annotation_patch.go", "rank": 97, "score": 111219.42445005468 }, { "content": "\t\tapiClient vizPb.ApiClient\n", "file_path": "web/srv/handlers.go", "rank": 98, "score": 110906.23397468332 }, { "content": "package srv\n\n\n\nimport (\n\n\t\"bytes\"\n\n\t\"encoding/json\"\n\n\t\"errors\"\n\n\t\"io/ioutil\"\n\n\t\"net/http\"\n\n\t\"net/http/httptest\"\n\n\t\"testing\"\n\n\n\n\t\"github.com/go-test/deep\"\n\n\t\"github.com/julienschmidt/httprouter\"\n\n\t\"github.com/linkerd/linkerd2/pkg/healthcheck\"\n\n\tvizApi \"github.com/linkerd/linkerd2/viz/metrics-api\"\n\n\tpb \"github.com/linkerd/linkerd2/viz/metrics-api/gen/viz\"\n\n)\n\n\n\ntype mockHealthChecker struct {\n\n\tresults []*healthcheck.CheckResult\n\n}\n\n\n\nfunc (c *mockHealthChecker) RunChecks(observer healthcheck.CheckObserver) (bool, bool) {\n\n\tfor _, result := range c.results {\n\n\t\tobserver(result)\n\n\t}\n\n\treturn true, false\n\n}\n\n\n\nfunc TestHandleApiCheck(t *testing.T) {\n\n\t// Setup handler using a mock health checker\n\n\tmockResults := []*healthcheck.CheckResult{\n\n\t\t{\n\n\t\t\tCategory: healthcheck.LinkerdConfigChecks,\n\n\t\t\tDescription: \"check3-description\",\n\n\t\t\tHintURL: healthcheck.DefaultHintBaseURL + \"check3-hint-anchor\",\n\n\t\t\tWarning: false,\n\n\t\t\tErr: nil,\n\n\t\t},\n\n\t\t{\n\n\t\t\tCategory: healthcheck.LinkerdConfigChecks,\n\n\t\t\tDescription: \"check4-description-kubectl\",\n\n\t\t\tHintURL: healthcheck.DefaultHintBaseURL + \"check4-hint-anchor\",\n\n\t\t\tWarning: true,\n\n\t\t\tErr: nil,\n\n\t\t},\n\n\t\t{\n\n\t\t\tCategory: healthcheck.KubernetesAPIChecks,\n\n\t\t\tDescription: \"check1-description\",\n\n\t\t\tHintURL: healthcheck.DefaultHintBaseURL + \"check1-hint-anchor\",\n\n\t\t\tWarning: false,\n\n\t\t\tErr: nil,\n\n\t\t},\n\n\t\t{\n\n\t\t\tCategory: healthcheck.KubernetesAPIChecks,\n\n\t\t\tDescription: \"check2-description\",\n\n\t\t\tHintURL: healthcheck.DefaultHintBaseURL + \"check2-hint-anchor\",\n\n\t\t\tWarning: true,\n\n\t\t\tErr: errors.New(\"check2-error\"),\n\n\t\t},\n\n\t}\n\n\th := &handler{\n\n\t\thc: &mockHealthChecker{\n\n\t\t\tresults: mockResults,\n\n\t\t},\n\n\t}\n\n\n\n\t// Handle request recording the response\n\n\treq := httptest.NewRequest(\"GET\", \"/api/check\", nil)\n\n\tw := httptest.NewRecorder()\n\n\th.handleAPICheck(w, req, httprouter.Params{})\n\n\tresp := w.Result()\n\n\tdefer resp.Body.Close()\n\n\tbody, err := ioutil.ReadAll(resp.Body)\n\n\tif err != nil {\n\n\t\tt.Fatalf(\"not expecting error reading response body but got: %v\", err)\n\n\t}\n\n\n\n\t// Check we receive the headers and body expected\n\n\texpectedHeaders := http.Header{\n\n\t\t\"Content-Type\": []string{\"application/json\"},\n\n\t}\n\n\tif diff := deep.Equal(resp.Header, expectedHeaders); diff != nil {\n\n\t\tt.Errorf(\"Unexpected header: %v\", diff)\n\n\t}\n\n\tapiCheckOutputGolden, err := ioutil.ReadFile(\"testdata/api_check_output.json\")\n\n\tif err != nil {\n\n\t\tt.Fatalf(\"not expecting error reading api check output golden file but got: %v\", err)\n\n\t}\n\n\tapiCheckOutputGoldenCompact := &bytes.Buffer{}\n\n\terr = json.Compact(apiCheckOutputGoldenCompact, apiCheckOutputGolden)\n\n\tif err != nil {\n\n\t\tt.Fatalf(\"not expecting error compacting api check output golden file but got: %v\", err)\n\n\t}\n\n\tif !bytes.Equal(body, apiCheckOutputGoldenCompact.Bytes()) {\n\n\t\tt.Errorf(\"expecting response body to be\\n %s\\n but got\\n %s\", apiCheckOutputGoldenCompact.Bytes(), body)\n\n\t}\n\n}\n\n\n\nfunc TestHandleApiGateway(t *testing.T) {\n\n\tmockAPIClient := &vizApi.MockAPIClient{\n\n\t\tGatewaysResponseToReturn: &pb.GatewaysResponse{\n\n\t\t\tResponse: &pb.GatewaysResponse_Ok_{\n\n\t\t\t\tOk: &pb.GatewaysResponse_Ok{\n\n\t\t\t\t\tGatewaysTable: &pb.GatewaysTable{\n\n\t\t\t\t\t\tRows: []*pb.GatewaysTable_Row{\n\n\t\t\t\t\t\t\t{\n\n\t\t\t\t\t\t\t\tNamespace: \"test_namespace\",\n\n\t\t\t\t\t\t\t\tName: \"test_gateway\",\n\n\t\t\t\t\t\t\t\tClusterName: \"multi_cluster\",\n\n\t\t\t\t\t\t\t\tAlive: true,\n\n\t\t\t\t\t\t\t},\n\n\t\t\t\t\t\t},\n\n\t\t\t\t\t},\n\n\t\t\t\t},\n\n\t\t\t},\n\n\t\t},\n\n\t}\n\n\tserver := FakeServer()\n\n\n\n\thandler := &handler{\n\n\t\trender: server.RenderTemplate,\n\n\t\tapiClient: mockAPIClient,\n\n\t}\n\n\n\n\tt.Run(\"Returns expected gateway response\", func(t *testing.T) {\n\n\t\trecorder := httptest.NewRecorder()\n\n\t\treq := httptest.NewRequest(\"GET\", \"/api/gateways\", nil)\n\n\t\thandler.handleAPIGateways(recorder, req, httprouter.Params{})\n\n\t\tresp := recorder.Result()\n\n\t\tdefer resp.Body.Close()\n\n\t\tbody, err := ioutil.ReadAll(resp.Body)\n\n\t\tif err != nil {\n\n\t\t\tt.Fatalf(\"not expecting error reading response body but got: %v\", err)\n\n\t\t}\n\n\n\n\t\tif recorder.Code != http.StatusOK {\n\n\t\t\tt.Errorf(\"Incorrect StatusCode: %+v\", recorder.Code)\n\n\t\t\tt.Errorf(\"Expected %+v\", http.StatusOK)\n\n\t\t}\n\n\n\n\t\theader := http.Header{\n\n\t\t\t\"Content-Type\": []string{\"application/json\"},\n\n\t\t}\n\n\t\tif diff := deep.Equal(recorder.Header(), header); diff != nil {\n\n\t\t\tt.Errorf(\"Unexpected header: %v\", diff)\n\n\t\t}\n\n\n\n\t\tapiGatewayOutputGolden, err := ioutil.ReadFile(\"testdata/api_gateway_output.json\")\n\n\t\tif err != nil {\n\n\t\t\tt.Fatalf(\"not expecting error reading api check output golden file but got: %v\", err)\n\n\t\t}\n\n\t\tapiGatewayOutputGoldenCompact := &bytes.Buffer{}\n\n\t\terr = json.Compact(apiGatewayOutputGoldenCompact, apiGatewayOutputGolden)\n\n\t\tif err != nil {\n\n\t\t\tt.Fatalf(\"not expecting error compacting api check output golden file but got: %v\", err)\n\n\t\t}\n\n\t\tbodyCompact := &bytes.Buffer{}\n\n\t\terr = json.Compact(bodyCompact, body)\n\n\t\tif err != nil {\n\n\t\t\tt.Fatalf(\"failed to compact response body: %s\", err)\n\n\t\t}\n\n\t\tif !bytes.Equal(bodyCompact.Bytes(), apiGatewayOutputGoldenCompact.Bytes()) {\n\n\t\t\tt.Errorf(\"expecting response body to be\\n %s\\n but got\\n %s\", apiGatewayOutputGoldenCompact.Bytes(), bodyCompact.Bytes())\n\n\t\t}\n\n\t})\n\n\n\n\tt.Run(\"Returns error when invalid timeWindow is passed\", func(t *testing.T) {\n\n\t\trecorder := httptest.NewRecorder()\n\n\t\treq := httptest.NewRequest(\"GET\", \"/api/gateways?window=1t\", nil)\n\n\t\thandler.handleAPIGateways(recorder, req, httprouter.Params{})\n\n\t\tresp := recorder.Result()\n\n\t\tdefer resp.Body.Close()\n\n\t\t_, err := ioutil.ReadAll(resp.Body)\n\n\t\tif err != nil {\n\n\t\t\tt.Fatalf(\"not expecting error reading response body but got: %v\", err)\n\n\t\t}\n\n\t\tif recorder.Code == http.StatusOK {\n\n\t\t\tt.Errorf(\"Incorrect StatusCode: %+v\", recorder.Code)\n\n\t\t\tt.Errorf(\"Expected %+v\", http.StatusInternalServerError)\n\n\t\t}\n\n\t})\n\n}\n", "file_path": "web/srv/api_handlers_test.go", "rank": 99, "score": 110570.47036209295 } ]
Rust
modules/fdb/src/ro/mod.rs
enteryournamehere/assembly_rs
dd5250abb586e135b59bf574543c386a4c89cbd9
use std::{ops::Deref, sync::Arc}; use assembly_core::buffer::{CastError, MinimallyAligned, Repr}; use self::buffer::Buffer; use super::file::ArrayHeader; pub mod buffer; pub mod handle; pub mod slice; pub type ArcHandle<B, T> = BaseHandle<Arc<B>, T>; impl<B: AsRef<[u8]>> ArcHandle<B, ()> { pub fn new_arc(inner: B) -> Self { Self::new(Arc::new(inner)) } } impl<B: AsRef<[u8]>, T: Copy> ArcHandle<B, T> { pub fn as_bytes_handle(&self) -> Handle<T> { BaseHandle { mem: Buffer::new(self.mem.as_ref().as_ref()), raw: self.raw, } } } #[derive(Clone, Debug)] pub struct BaseHandle<P: Deref, T> where <P as Deref>::Target: AsRef<[u8]>, { pub(super) mem: P, pub(super) raw: T, } impl<P, T> Copy for BaseHandle<P, T> where P: Deref + Copy, T: Copy, <P as Deref>::Target: AsRef<[u8]>, { } impl<P: Deref> BaseHandle<P, ()> where <P as Deref>::Target: AsRef<[u8]>, { pub fn new(mem: P) -> Self { Self { mem, raw: () } } } impl<T, P: Deref> BaseHandle<P, Option<T>> where <P as Deref>::Target: AsRef<[u8]>, { pub fn transpose(self) -> Option<BaseHandle<P, T>> { if let Some(raw) = self.raw { Some(BaseHandle { mem: self.mem, raw }) } else { None } } } impl<P: Deref, T> BaseHandle<P, T> where <P as Deref>::Target: AsRef<[u8]>, { pub fn raw(&self) -> &T { &self.raw } pub fn raw_mut(&mut self) -> &mut T { &mut self.raw } pub fn as_bytes(&self) -> &[u8] { self.mem.deref().as_ref() } pub fn replace<O>(self, raw: O) -> BaseHandle<P, O> { BaseHandle { mem: self.mem, raw } } } pub type Handle<'a, T> = BaseHandle<Buffer<'a>, T>; impl<'a, T> Handle<'a, T> { pub fn buf(self) -> Buffer<'a> { self.mem } pub fn into_raw(self) -> T { self.raw } pub(crate) fn wrap<R>(&self, raw: R) -> Handle<'a, R> { Handle { mem: self.mem, raw } } pub(crate) fn try_map_cast<R: MinimallyAligned>( &self, offset: u32, ) -> Result<RefHandle<'a, R>, CastError> { let raw: &'a R = self.mem.try_cast(offset)?; Ok(self.wrap(raw)) } pub(crate) fn try_map_cast_slice<R: MinimallyAligned>( &self, offset: u32, count: u32, ) -> Result<RefHandle<'a, [R]>, CastError> { let raw: &'a [R] = self.mem.try_cast_slice(offset, count)?; Ok(self.wrap(raw)) } pub(crate) fn try_map_cast_array<R: MinimallyAligned>( &self, array: ArrayHeader, ) -> Result<RefHandle<'a, [R]>, CastError> { let raw: &'a [R] = self.mem.try_cast_slice(array.base_offset, array.count)?; Ok(self.wrap(raw)) } pub fn map<X>(self, mapper: impl Fn(Buffer<'a>, T) -> X) -> Handle<'a, X> { let raw = mapper(self.mem, self.raw); Handle { mem: self.mem, raw } } pub fn map_val<X>(self, mapper: impl Fn(T) -> X) -> Handle<'a, X> { let raw = mapper(self.raw); Handle { mem: self.mem, raw } } pub fn try_map<X, E>( self, mapper: impl Fn(Buffer<'a>, T) -> Result<X, E>, ) -> Result<Handle<'a, X>, E> { let raw = mapper(self.mem, self.raw)?; Ok(Handle { mem: self.mem, raw }) } } impl<'a, T> Iterator for Handle<'a, T> where T: Iterator, { type Item = Handle<'a, T::Item>; fn next(&mut self) -> Option<Self::Item> { self.raw.next().map(|raw| Handle { mem: self.mem, raw }) } } impl<'a, T> RefHandle<'a, [T]> { pub fn get(self, index: usize) -> Option<RefHandle<'a, T>> { self.raw.get(index).map(|raw| self.wrap(raw)) } } pub type RefHandle<'a, T> = Handle<'a, &'a T>; impl<'a, T: Repr> RefHandle<'a, T> { pub fn map_extract(self) -> Handle<'a, T::Value> { self.wrap(self.raw.extract()) } } pub type SliceHandle<'a, T> = RefHandle<'a, [T]>; pub type SliceIterHandle<'a, T> = Handle<'a, std::slice::Iter<'a, T>>;
use std::{ops::Deref, sync::Arc}; use assembly_core::buffer::{CastError, MinimallyAligned, Repr}; use self::buffer::Buffer; use super::file::ArrayHeader; pub mod buffer; pub mod handle; pub mod slice; pub type ArcHandle<B, T> = BaseHandle<Arc<B>, T>; impl<B: AsRef<[u8]>> ArcHandle<B, ()> { pub fn new_arc(inner: B) -> Self { Self::new(Arc::new(inner)) } } impl<B: AsRef<[u8]>, T: Copy> ArcHandle<B, T> { pub f
} #[derive(Clone, Debug)] pub struct BaseHandle<P: Deref, T> where <P as Deref>::Target: AsRef<[u8]>, { pub(super) mem: P, pub(super) raw: T, } impl<P, T> Copy for BaseHandle<P, T> where P: Deref + Copy, T: Copy, <P as Deref>::Target: AsRef<[u8]>, { } impl<P: Deref> BaseHandle<P, ()> where <P as Deref>::Target: AsRef<[u8]>, { pub fn new(mem: P) -> Self { Self { mem, raw: () } } } impl<T, P: Deref> BaseHandle<P, Option<T>> where <P as Deref>::Target: AsRef<[u8]>, { pub fn transpose(self) -> Option<BaseHandle<P, T>> { if let Some(raw) = self.raw { Some(BaseHandle { mem: self.mem, raw }) } else { None } } } impl<P: Deref, T> BaseHandle<P, T> where <P as Deref>::Target: AsRef<[u8]>, { pub fn raw(&self) -> &T { &self.raw } pub fn raw_mut(&mut self) -> &mut T { &mut self.raw } pub fn as_bytes(&self) -> &[u8] { self.mem.deref().as_ref() } pub fn replace<O>(self, raw: O) -> BaseHandle<P, O> { BaseHandle { mem: self.mem, raw } } } pub type Handle<'a, T> = BaseHandle<Buffer<'a>, T>; impl<'a, T> Handle<'a, T> { pub fn buf(self) -> Buffer<'a> { self.mem } pub fn into_raw(self) -> T { self.raw } pub(crate) fn wrap<R>(&self, raw: R) -> Handle<'a, R> { Handle { mem: self.mem, raw } } pub(crate) fn try_map_cast<R: MinimallyAligned>( &self, offset: u32, ) -> Result<RefHandle<'a, R>, CastError> { let raw: &'a R = self.mem.try_cast(offset)?; Ok(self.wrap(raw)) } pub(crate) fn try_map_cast_slice<R: MinimallyAligned>( &self, offset: u32, count: u32, ) -> Result<RefHandle<'a, [R]>, CastError> { let raw: &'a [R] = self.mem.try_cast_slice(offset, count)?; Ok(self.wrap(raw)) } pub(crate) fn try_map_cast_array<R: MinimallyAligned>( &self, array: ArrayHeader, ) -> Result<RefHandle<'a, [R]>, CastError> { let raw: &'a [R] = self.mem.try_cast_slice(array.base_offset, array.count)?; Ok(self.wrap(raw)) } pub fn map<X>(self, mapper: impl Fn(Buffer<'a>, T) -> X) -> Handle<'a, X> { let raw = mapper(self.mem, self.raw); Handle { mem: self.mem, raw } } pub fn map_val<X>(self, mapper: impl Fn(T) -> X) -> Handle<'a, X> { let raw = mapper(self.raw); Handle { mem: self.mem, raw } } pub fn try_map<X, E>( self, mapper: impl Fn(Buffer<'a>, T) -> Result<X, E>, ) -> Result<Handle<'a, X>, E> { let raw = mapper(self.mem, self.raw)?; Ok(Handle { mem: self.mem, raw }) } } impl<'a, T> Iterator for Handle<'a, T> where T: Iterator, { type Item = Handle<'a, T::Item>; fn next(&mut self) -> Option<Self::Item> { self.raw.next().map(|raw| Handle { mem: self.mem, raw }) } } impl<'a, T> RefHandle<'a, [T]> { pub fn get(self, index: usize) -> Option<RefHandle<'a, T>> { self.raw.get(index).map(|raw| self.wrap(raw)) } } pub type RefHandle<'a, T> = Handle<'a, &'a T>; impl<'a, T: Repr> RefHandle<'a, T> { pub fn map_extract(self) -> Handle<'a, T::Value> { self.wrap(self.raw.extract()) } } pub type SliceHandle<'a, T> = RefHandle<'a, [T]>; pub type SliceIterHandle<'a, T> = Handle<'a, std::slice::Iter<'a, T>>;
n as_bytes_handle(&self) -> Handle<T> { BaseHandle { mem: Buffer::new(self.mem.as_ref().as_ref()), raw: self.raw, } }
function_block-function_prefixed
[ { "content": "/// Expect an opening tag `<{key}>`\n\npub fn expect_elem<B: BufRead>(\n\n xml: &mut Reader<B>,\n\n buf: &mut Vec<u8>,\n\n key: &'static str,\n\n) -> Result<()> {\n\n if let Event::Start(start) = xml.read_event(buf)? {\n\n if start.name() == key.as_bytes() {\n\n buf.clear();\n\n Ok(())\n\n } else {\n\n todo!();\n\n }\n\n } else {\n\n todo!()\n\n }\n\n}\n\n\n", "file_path": "modules/xml/src/common/mod.rs", "rank": 0, "score": 270309.61857087445 }, { "content": "/// Expect a closing tag `</{key}>`\n\npub fn expect_end<B: BufRead>(\n\n xml: &mut Reader<B>,\n\n buf: &mut Vec<u8>,\n\n key: &'static str,\n\n) -> Result<()> {\n\n if let Event::End(end) = xml.read_event(buf)? {\n\n #[allow(clippy::branches_sharing_code)]\n\n if end.name() == key.as_bytes() {\n\n buf.clear();\n\n Ok(())\n\n } else {\n\n buf.clear();\n\n todo!()\n\n }\n\n } else {\n\n todo!()\n\n }\n\n}\n", "file_path": "modules/xml/src/common/mod.rs", "rank": 1, "score": 270309.6185708745 }, { "content": "/// Expects an opening `<database>`\n\npub fn expect_database<B: BufRead>(\n\n xml: &mut Reader<B>,\n\n buf: &mut Vec<u8>,\n\n) -> Result<Option<String>, XmlError> {\n\n expect_named_elem(xml, buf, \"database\", None)\n\n}\n\n\n", "file_path": "modules/xml/src/database/mod.rs", "rank": 2, "score": 270309.61857087445 }, { "content": "/// Expects an opening `<table>` tag or a closing `</database>` tag\n\npub fn expect_table<B: BufRead>(\n\n xml: &mut Reader<B>,\n\n buf: &mut Vec<u8>,\n\n) -> Result<Option<String>, XmlError> {\n\n expect_named_elem(xml, buf, \"table\", Some(\"database\"))\n\n}\n\n\n", "file_path": "modules/xml/src/database/mod.rs", "rank": 3, "score": 270309.61857087445 }, { "content": "/// Encode a byte slice into a vector\n\npub fn encode<B: AsRef<[u8]>>(\n\n data: B,\n\n output: &mut Vec<u8>,\n\n level: Compression,\n\n) -> write::Result<()> {\n\n let input = data.as_ref();\n\n let mut reader = Cursor::new(input);\n\n\n\n let writer = Cursor::new(output);\n\n\n\n let mut writer = write::SegmentedEncoder::new(writer, level)?;\n\n std::io::copy(&mut reader, &mut writer)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "modules/pack/src/sd0/mod.rs", "rank": 4, "score": 269673.383910425 }, { "content": "/// Expect an opening tag `<{key} name=\"…\">`\n\npub fn expect_named_elem<B: BufRead>(\n\n xml: &mut Reader<B>,\n\n buf: &mut Vec<u8>,\n\n key: &'static str,\n\n parent: Option<&'static str>,\n\n) -> Result<Option<String>> {\n\n match xml.read_event(buf)? {\n\n Event::Start(start) => {\n\n if start.name() == key.as_bytes() {\n\n let mut name = String::new();\n\n for attr in start.attributes() {\n\n let attr = attr?;\n\n if attr.key == b\"name\" {\n\n name = xml.decode(&attr.value).into_owned();\n\n break;\n\n }\n\n }\n\n buf.clear();\n\n Ok(Some(name))\n\n } else {\n", "file_path": "modules/xml/src/common/mod.rs", "rank": 5, "score": 267859.6721717376 }, { "content": "/// Expects an empty `<row …/>` tag or a closing `</rows>` tag\n\npub fn expect_row_or_end_rows<B: BufRead>(\n\n xml: &mut Reader<B>,\n\n buf: &mut Vec<u8>,\n\n load_attrs: bool,\n\n) -> Result<Option<HashMap<String, String>>, XmlError> {\n\n match xml.read_event(buf)? {\n\n Event::Empty(start) => {\n\n if start.name() == b\"row\" {\n\n let map = if load_attrs {\n\n let mut m = HashMap::new();\n\n for attr in start.attributes() {\n\n let attr = attr?;\n\n let key = xml.decode(attr.key).into_owned();\n\n let value = attr.unescape_and_decode_value(xml)?;\n\n m.insert(key, value);\n\n }\n\n m\n\n } else {\n\n HashMap::new()\n\n };\n", "file_path": "modules/xml/src/database/mod.rs", "rank": 6, "score": 265479.71602369886 }, { "content": "/// Expects an empty `<column …/>` tag or a closing `</columns>` tag\n\npub fn expect_column_or_end_columns<B: BufRead>(\n\n xml: &mut Reader<B>,\n\n buf: &mut Vec<u8>,\n\n) -> Result<Option<Column>, XmlError> {\n\n match xml.read_event(buf)? {\n\n Event::Empty(start) => {\n\n if start.name() == b\"column\" {\n\n let mut name = None;\n\n let mut data_type = None;\n\n for attr in start.attributes() {\n\n let attr = attr?;\n\n if attr.key == b\"name\" {\n\n name = Some(xml.decode(&attr.value).into_owned());\n\n }\n\n\n\n if attr.key == b\"type\" {\n\n data_type = Some(\n\n xml.decode(&attr.value)\n\n .parse()\n\n .expect(\"Expected well-known value type\"),\n", "file_path": "modules/xml/src/database/mod.rs", "rank": 7, "score": 265479.71602369886 }, { "content": "fn find_files_lines<B: BufRead>(buffer: &mut String, reader: &mut B) -> Result<()> {\n\n buffer.clear();\n\n loop {\n\n let len = reader.read_line(buffer)?;\n\n assert_ne!(len, 0);\n\n\n\n if buffer.trim() == Section::Files.as_header() {\n\n buffer.clear();\n\n break Ok(());\n\n }\n\n buffer.clear();\n\n }\n\n}\n\n\n\nimpl Manifest {\n\n /// Read a manifest from a [BufRead] implementation\n\n ///\n\n /// ```\n\n /// use std::{io::Cursor, collections::BTreeMap};\n\n /// use assembly_pack::{md5::MD5Sum, txt::{Manifest, VersionLine}};\n", "file_path": "modules/pack/src/txt/mod.rs", "rank": 8, "score": 261507.500689991 }, { "content": "fn find_version_line<B: BufRead>(buffer: &mut String, reader: &mut B) -> Result<VersionLine> {\n\n buffer.clear();\n\n loop {\n\n let len = reader.read_line(buffer)?;\n\n assert_ne!(len, 0);\n\n\n\n if buffer.trim() == Section::Version.as_header() {\n\n buffer.clear();\n\n reader.read_line(buffer)?;\n\n //panic!(\"{:?}\", buffer);\n\n let line = version_line(buffer.trim())?;\n\n //panic!(\"{:?}\", &line);\n\n break Ok(line);\n\n }\n\n buffer.clear();\n\n }\n\n}\n\n\n", "file_path": "modules/pack/src/txt/mod.rs", "rank": 9, "score": 255511.88762769 }, { "content": "/// Decode a byte slice into a vector\n\npub fn decode<B: AsRef<[u8]>>(data: B, output: &mut Vec<u8>) -> read::Result<()> {\n\n let mut writer = Cursor::new(output);\n\n\n\n let compressed = Cursor::new(data);\n\n let mut reader = read::SegmentedDecoder::new(compressed)?;\n\n\n\n std::io::copy(&mut reader, &mut writer)?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::sd0::encode;\n\n\n\n use super::{decode, Compression};\n\n use std::io;\n\n\n\n fn roundtrip(data: &[u8]) -> io::Result<Vec<u8>> {\n\n let mut compressed = Vec::with_capacity(data.len() / 2);\n\n super::encode(data, &mut compressed, Compression::best())?;\n", "file_path": "modules/pack/src/sd0/mod.rs", "rank": 10, "score": 252313.72237657776 }, { "content": "/// Cast a buffer to a slice\n\n///\n\n/// ## Panics\n\n///\n\n/// - If the `[offset, offset + len]` is not contained by the buffer\n\npub fn cast_slice<T: MinimallyAligned>(buffer: &[u8], offset: u32, len: u32) -> &[T] {\n\n try_cast_slice(buffer, offset, len).unwrap()\n\n}\n\n\n", "file_path": "modules/core/src/buffer.rs", "rank": 11, "score": 249776.63835276983 }, { "content": "/// Expect an `<?xml …` declaration\n\npub fn expect_decl<B: BufRead>(xml: &mut Reader<B>, buf: &mut Vec<u8>) -> Result<()> {\n\n if let Event::Decl(_) = xml.read_event(buf)? {\n\n buf.clear();\n\n Ok(())\n\n } else {\n\n Err(XmlError::ExpectedDecl)\n\n }\n\n}\n\n\n", "file_path": "modules/xml/src/common/mod.rs", "rank": 12, "score": 248505.10716089688 }, { "content": "/// Expects an opening `<rows>` tag\n\npub fn expect_rows<B: BufRead>(xml: &mut Reader<B>, buf: &mut Vec<u8>) -> Result<(), XmlError> {\n\n expect_elem(xml, buf, \"rows\")\n\n}\n\n\n\n/// The information on a column\n\n#[cfg_attr(feature = \"serialize\", derive(Deserialize))]\n\npub struct Column {\n\n /// The name of the column\n\n pub name: String,\n\n /// The data type of the column\n\n pub r#type: ValueType,\n\n}\n\n\n\n/*#[derive(Deserialize)]\n\n/// The Columns struct\n\npub struct Columns {\n\n /// The columns\n\n columns: Vec<Column>\n\n}*/\n\n\n", "file_path": "modules/xml/src/database/mod.rs", "rank": 13, "score": 243038.74988118073 }, { "content": "/// Expects an opening `<columns>` tag\n\npub fn expect_columns<B: BufRead>(xml: &mut Reader<B>, buf: &mut Vec<u8>) -> Result<(), XmlError> {\n\n expect_elem(xml, buf, \"columns\")\n\n}\n\n\n", "file_path": "modules/xml/src/database/mod.rs", "rank": 14, "score": 243038.7498811807 }, { "content": "/// Run the function `run` and print the how much time the execution took.\n\npub fn time<F, E>(run: F) -> Result<(), E>\n\nwhere\n\n F: FnOnce() -> Result<(), E>,\n\n{\n\n let start = Instant::now();\n\n let res = run();\n\n let duration = start.elapsed();\n\n\n\n println!(\n\n \"{} in {}.{}s\",\n\n if res.is_ok() { \"Finished\" } else { \"Failed\" },\n\n duration.as_secs(),\n\n duration.subsec_millis(),\n\n );\n\n\n\n res\n\n}\n", "file_path": "modules/core/src/lib.rs", "rank": 15, "score": 234419.2032401344 }, { "content": "/// Try to cast a buffer to a slice\n\npub fn try_cast_slice<T: MinimallyAligned>(\n\n buffer: &[u8],\n\n offset: u32,\n\n len: u32,\n\n) -> Result<&[T], CastError> {\n\n let base = buffer.as_ptr();\n\n let buf_len = buffer.len();\n\n\n\n let ulen = len as usize;\n\n let needed = std::mem::size_of::<T>() * ulen;\n\n\n\n if offset as usize + needed <= buf_len {\n\n unsafe {\n\n let addr = base.offset(offset as isize) as *const T;\n\n Ok(std::slice::from_raw_parts(addr, ulen))\n\n }\n\n } else {\n\n Err(CastError::OutOfBounds { offset })\n\n }\n\n}\n\n\n", "file_path": "modules/core/src/buffer.rs", "rank": 16, "score": 233851.9704368277 }, { "content": "/// Combine a parser 2 times\n\npub fn count_2<I, O, E, F>(fun: F) -> impl Fn(I) -> IResult<I, [O; 2], E>\n\nwhere\n\n F: Fn(I) -> IResult<I, O, E>,\n\n{\n\n move |input: I| {\n\n let (input, o1) = fun(input)?;\n\n fun(input).map(|(i, o2)| (i, [o1, o2]))\n\n }\n\n}\n\n\n", "file_path": "modules/core/src/nom_ext.rs", "rank": 17, "score": 228735.50111408025 }, { "content": "/// Combine a parser 5 times\n\npub fn count_5<I, O, E, F>(fun: F) -> impl Fn(I) -> IResult<I, [O; 5], E>\n\nwhere\n\n F: Fn(I) -> IResult<I, O, E>,\n\n{\n\n move |input: I| {\n\n let (input, o1) = fun(input)?;\n\n let (input, o2) = fun(input)?;\n\n let (input, o3) = fun(input)?;\n\n let (input, o4) = fun(input)?;\n\n fun(input).map(|(i, o5)| (i, [o1, o2, o3, o4, o5]))\n\n }\n\n}\n", "file_path": "modules/core/src/nom_ext.rs", "rank": 18, "score": 228735.50111408025 }, { "content": "/// Expect an opening tag and return it\n\npub fn expect_start<'a, 'b, 'c, B: BufRead>(\n\n key: &'a str,\n\n reader: &'b mut XmlReader<B>,\n\n buf: &'c mut Vec<u8>,\n\n) -> Result<XmlBytesStart<'c>> {\n\n if let Ok(XmlEvent::Start(e)) = reader.read_event(buf) {\n\n if e.name() == key.as_bytes() {\n\n Ok(e)\n\n } else {\n\n Err(Error::ExpectedTag(\n\n key.to_owned(),\n\n reader.decode(e.name()).into_owned(),\n\n ))\n\n }\n\n } else {\n\n Err(Error::MissingTag(key.to_owned()))\n\n }\n\n}\n\n\n", "file_path": "modules/xml/src/common/exact.rs", "rank": 19, "score": 228099.58149796934 }, { "content": "/// Expect a closing tag and return it\n\npub fn expect_end<'a, 'b, 'c, B: BufRead>(\n\n key: &'a str,\n\n reader: &'b mut XmlReader<B>,\n\n buf: &'c mut Vec<u8>,\n\n) -> Result<XmlBytesEnd<'c>> {\n\n if let Ok(XmlEvent::End(e)) = reader.read_event(buf) {\n\n if e.name() == key.as_bytes() {\n\n Ok(e)\n\n } else {\n\n Err(Error::ExpectedEndTag(\n\n key.to_owned(),\n\n reader.decode(e.name()).into_owned(),\n\n ))\n\n }\n\n } else {\n\n Err(Error::MissingEndTag(key.to_owned()))\n\n }\n\n}\n\n\n", "file_path": "modules/xml/src/common/exact.rs", "rank": 20, "score": 228099.58149796934 }, { "content": "fn get_field_raw(buf: Buffer, data_type: ValueType, bytes: [u8; 4]) -> Field {\n\n match data_type {\n\n ValueType::Nothing => Field::Nothing,\n\n ValueType::Integer => Field::Integer(i32::from_le_bytes(bytes)),\n\n ValueType::Float => Field::Float(f32::from_le_bytes(bytes)),\n\n ValueType::Text => {\n\n let addr = u32::from_le_bytes(bytes);\n\n let text = get_latin1_str(buf.as_bytes(), addr);\n\n Field::Text(text)\n\n }\n\n ValueType::Boolean => Field::Boolean(bytes != [0, 0, 0, 0]),\n\n ValueType::BigInt => {\n\n let addr = u32::from_le_bytes(bytes);\n\n let val = buf.cast::<LEI64>(addr).extract();\n\n Field::BigInt(val)\n\n }\n\n ValueType::VarChar => {\n\n let addr = u32::from_le_bytes(bytes);\n\n let text = get_latin1_str(buf.as_bytes(), addr);\n\n Field::VarChar(text)\n", "file_path": "modules/fdb/src/mem/mod.rs", "rank": 21, "score": 224378.62914917633 }, { "content": "/// Cast a buffer to a reference\n\n///\n\n/// ## Panics\n\n///\n\n/// - If the `[offset, offset + size_of::<Self>]` is not contained by the buffer\n\npub fn cast<T: MinimallyAligned>(buffer: &[u8], offset: u32) -> &T {\n\n try_cast(buffer, offset).unwrap()\n\n}\n\n\n", "file_path": "modules/core/src/buffer.rs", "rank": 22, "score": 219308.63448565645 }, { "content": "/// Expect an attribute on an opening tag and return a parsed value\n\npub fn expect_attribute<T: FromStr, B: BufRead>(\n\n key: &str,\n\n reader: &XmlReader<B>,\n\n event: &XmlBytesStart,\n\n) -> Result<T>\n\nwhere\n\n <T as FromStr>::Err: std::error::Error + Send + Sync + Sized + 'static,\n\n{\n\n let attr = event\n\n .attributes()\n\n .next()\n\n .ok_or_else(|| Error::MissingAttribute(key.to_owned()))??;\n\n\n\n if attr.key == key.as_bytes() {\n\n let attr_unesc = attr.unescaped_value()?;\n\n let attr_str = reader.decode(&attr_unesc);\n\n let value = attr_str.parse().map_err(|e| {\n\n let b: Box<dyn StdError + Sync + Send> = Box::new(e);\n\n b\n\n })?;\n\n Ok(value)\n\n } else {\n\n Err(Error::ExpectedAttribute(\n\n key.to_owned(),\n\n reader.decode(attr.key).into_owned(),\n\n ))\n\n }\n\n}\n", "file_path": "modules/xml/src/common/exact.rs", "rank": 23, "score": 218426.24719573394 }, { "content": "/// Similar to `From<&U> for T`\n\npub trait Repr {\n\n /// The value that this struct encodes\n\n type Value;\n\n\n\n /// extract the contained value\n\n fn extract(&self) -> Self::Value;\n\n}\n\n\n\n/// little-endian u16\n\n#[repr(C, align(1))]\n\npub struct LEU16([u8; 2]);\n\n\n\n/// little-endian u32\n\n#[repr(C, align(1))]\n\n#[derive(Debug)]\n\npub struct LEU32([u8; 4]);\n\n\n\n/// little-endian u64\n\n#[repr(C, align(1))]\n\npub struct LEI64([u8; 8]);\n", "file_path": "modules/core/src/buffer.rs", "rank": 24, "score": 216184.8080287219 }, { "content": "/// Try to cast a buffer to a reference\n\npub fn try_cast<T: MinimallyAligned>(buffer: &[u8], offset: u32) -> Result<&T, CastError> {\n\n let base = buffer.as_ptr();\n\n let len = buffer.len();\n\n\n\n if offset as usize + std::mem::size_of::<T>() <= len {\n\n unsafe {\n\n let addr = base.offset(offset as isize);\n\n Ok(&*(addr as *const T))\n\n }\n\n } else {\n\n Err(CastError::OutOfBounds { offset })\n\n }\n\n}\n\n\n", "file_path": "modules/core/src/buffer.rs", "rank": 25, "score": 208662.41907930037 }, { "content": "/// Get a reference to a slice at the given address of this buffer\n\n///\n\n/// This functions checks whether the offset and alignment is valid\n\npub fn get_slice_at<T>(buf: &[u8], addr: usize, count: usize) -> Res<&[T]> {\n\n let base = buf.as_ptr();\n\n let len = buf.len();\n\n let size = std::mem::size_of::<T>();\n\n let align = std::mem::align_of::<T>();\n\n\n\n let slice_bytes = size\n\n .checked_mul(count)\n\n .ok_or(BufferError::OutOfBounds(addr..len))?;\n\n\n\n let needed = addr\n\n .checked_add(slice_bytes)\n\n .ok_or(BufferError::OutOfBounds(addr..len))?;\n\n\n\n if needed > len {\n\n return Err(BufferError::OutOfBounds(addr..needed));\n\n }\n\n\n\n let start = unsafe { base.add(addr) };\n\n if 0 != start.align_offset(align) {\n\n return Err(BufferError::Unaligned(addr));\n\n }\n\n\n\n Ok(unsafe { &*(std::ptr::slice_from_raw_parts(start as *const T, count)) })\n\n}\n\n\n\n/// Get the database header\n", "file_path": "modules/fdb/src/ro/buffer.rs", "rank": 26, "score": 208308.64276341358 }, { "content": "/// Expect some text and return it\n\npub fn expect_text<B: BufRead>(reader: &mut XmlReader<B>, buf: &mut Vec<u8>) -> Result<String> {\n\n if let Ok(XmlEvent::Text(e)) = reader.read_event(buf) {\n\n let text = e.unescape_and_decode(reader)?;\n\n Ok(text)\n\n } else {\n\n Err(Error::MissingText)\n\n }\n\n}\n\n\n", "file_path": "modules/xml/src/common/exact.rs", "rank": 27, "score": 206782.9179985219 }, { "content": "/// Create a PK filter from a string\n\npub fn pk_filter<T: Into<String>>(\n\n key: T,\n\n field_type: ValueType,\n\n) -> Result<PrimaryKeyFilter, PKFilterError> {\n\n match field_type {\n\n ValueType::Text => text_pk_filter(key.into()),\n\n ValueType::Integer => integer_pk_filter(key.into()),\n\n ValueType::BigInt => bigint_pk_filter(key.into()),\n\n _ => Err(PKFilterError::UnsupportedType(field_type)),\n\n }\n\n}\n", "file_path": "modules/fdb/src/query/mod.rs", "rank": 28, "score": 189666.40479318402 }, { "content": "pub fn parse_path_type(input: &[u8]) -> IResult<&[u8], PathType> {\n\n map_opt(le_u32, PathType::from_u32)(input)\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 29, "score": 187168.1031017956 }, { "content": "/// Get the header of the file.\n\npub fn header(buf: &[u8], _: ()) -> Res<FDBHeader> {\n\n Ok(*header_ref(buf)?)\n\n}\n\n\n", "file_path": "modules/fdb/src/ro/buffer.rs", "rank": 30, "score": 183219.58638316678 }, { "content": "#[cfg(target_endian = \"little\")]\n\npub fn header_ref(buf: &[u8]) -> Res<&FDBHeader> {\n\n get_at(buf, 0)\n\n}\n\n\n", "file_path": "modules/fdb/src/ro/buffer.rs", "rank": 31, "score": 183197.54386413412 }, { "content": "/// Get a reference to a type at the given address of this buffer\n\n///\n\n/// This functions checks whether the offset and alignment is valid\n\npub fn get_at<T>(buf: &[u8], addr: usize) -> Res<&T> {\n\n let base = buf.as_ptr();\n\n let len = buf.len();\n\n let size = std::mem::size_of::<T>();\n\n let align = std::mem::align_of::<T>();\n\n\n\n let needed = addr\n\n .checked_add(size)\n\n .ok_or(BufferError::OutOfBounds(addr..len))?;\n\n\n\n if needed > len {\n\n return Err(BufferError::OutOfBounds(addr..needed));\n\n }\n\n\n\n let start = unsafe { base.add(addr) };\n\n if 0 != start.align_offset(align) {\n\n return Err(BufferError::Unaligned(addr));\n\n }\n\n Ok(unsafe { &*(start as *const T) })\n\n}\n\n\n", "file_path": "modules/fdb/src/ro/buffer.rs", "rank": 32, "score": 176473.91716741354 }, { "content": "fn get_field<'a>(buf: Buffer<'a>, data: &'a FDBFieldDataC) -> Field<'a> {\n\n let data_type = ValueType::try_from(data.data_type.extract()).unwrap();\n\n let bytes = data.value.0;\n\n get_field_raw(buf, data_type, bytes)\n\n}\n\n\n", "file_path": "modules/fdb/src/mem/mod.rs", "rank": 33, "score": 175611.0159043261 }, { "content": "/// Create a bigint PK filter\n\npub fn bigint_pk_filter(key: String) -> Result<PrimaryKeyFilter, PKFilterError> {\n\n let value: i64 = key.parse().map_err(PKFilterError::KeyError)?;\n\n let hash_value = (u64::from_ne_bytes(value.to_ne_bytes()) % 0x1_0000_0000) as u32;\n\n Ok(PrimaryKeyFilter {\n\n hash_value,\n\n value: Field::BigInt(value),\n\n })\n\n}\n\n\n", "file_path": "modules/fdb/src/query/mod.rs", "rank": 34, "score": 172394.76343656145 }, { "content": "/// Create an integer PK filter\n\npub fn integer_pk_filter(key: String) -> Result<PrimaryKeyFilter, PKFilterError> {\n\n let value: i32 = key.parse().map_err(PKFilterError::KeyError)?;\n\n let hash_value = u32::from_ne_bytes(value.to_ne_bytes());\n\n Ok(PrimaryKeyFilter {\n\n hash_value,\n\n value: Field::Integer(value),\n\n })\n\n}\n\n\n", "file_path": "modules/fdb/src/query/mod.rs", "rank": 35, "score": 172394.76343656145 }, { "content": "/// Create a text PK filter\n\npub fn text_pk_filter(key: String) -> Result<PrimaryKeyFilter, PKFilterError> {\n\n let hash_value = sfhash::digest(key.as_bytes());\n\n let value = Field::Text(key);\n\n Ok(PrimaryKeyFilter { hash_value, value })\n\n}\n\n\n", "file_path": "modules/fdb/src/query/mod.rs", "rank": 36, "score": 172394.76343656145 }, { "content": "fn get_row_header_list_entry(buf: Buffer, addr: u32) -> Option<&FDBRowHeaderListEntryC> {\n\n if addr == u32::MAX {\n\n None\n\n } else {\n\n Some(buf.cast::<FDBRowHeaderListEntryC>(addr))\n\n }\n\n}\n\n\n\n/*#[allow(clippy::needless_lifetimes)] // <- clippy gets this wrong\n", "file_path": "modules/fdb/src/mem/mod.rs", "rank": 37, "score": 171356.63571674208 }, { "content": "/// Get the table data header\n\npub fn table_data(buf: &[u8], header: FDBTableHeader) -> Res<FDBTableDataHeader> {\n\n table_data_ref(buf, header).map(|x| *x)\n\n}\n\n\n", "file_path": "modules/fdb/src/ro/buffer.rs", "rank": 38, "score": 170042.65097850095 }, { "content": "/// Get the table definition header\n\npub fn table_definition(buf: &[u8], header: FDBTableHeader) -> Res<FDBTableDefHeader> {\n\n table_definition_ref(buf, header).map(|x| *x)\n\n}\n\n\n", "file_path": "modules/fdb/src/ro/buffer.rs", "rank": 39, "score": 170042.65097850095 }, { "content": "/// Get the table definition reference\n\npub fn table_definition_ref(buf: &[u8], header: FDBTableHeader) -> Res<&FDBTableDefHeader> {\n\n get_at(buf, header.table_def_header_addr as usize)\n\n}\n\n\n", "file_path": "modules/fdb/src/ro/buffer.rs", "rank": 40, "score": 168568.67551285046 }, { "content": "/// Get the table data reference\n\npub fn table_data_ref(buf: &[u8], header: FDBTableHeader) -> Res<&FDBTableDataHeader> {\n\n get_at(buf, header.table_data_header_addr as usize)\n\n}\n\n\n", "file_path": "modules/fdb/src/ro/buffer.rs", "rank": 41, "score": 168568.67551285046 }, { "content": "/// Get the table slice\n\npub fn table_headers<'a>(buf: &'a [u8], header: &'a FDBHeader) -> Res<&'a [FDBTableHeader]> {\n\n get_slice_at(\n\n buf,\n\n header.tables.base_offset as usize,\n\n header.tables.count as usize,\n\n )\n\n}\n\n\n", "file_path": "modules/fdb/src/ro/buffer.rs", "rank": 42, "score": 167316.07915917004 }, { "content": "/// Compares the name given by `bytes` with the one referenced in `table_header`\n\npub fn cmp_table_header_name(buf: &[u8], bytes: &[u8], table_header: FDBTableHeader) -> Ordering {\n\n let def_header_addr = table_header.table_def_header_addr;\n\n // FIXME: what to do with this unwrap?\n\n let def_header = get_at::<FDBTableDefHeader>(buf, def_header_addr as usize).unwrap();\n\n let name_addr = def_header.table_name_addr as usize;\n\n\n\n let name_bytes = buf.get(name_addr..).unwrap();\n\n\n\n compare_bytes(bytes, name_bytes)\n\n}\n\n\n\nimpl<'a> Buffer<'a> {\n\n /// Creates a new instance.\n\n pub fn new(buf: &'a [u8]) -> Self {\n\n Self(buf)\n\n }\n\n\n\n /// Returns the contained byte slice\n\n pub fn as_bytes(self) -> &'a [u8] {\n\n self.0\n", "file_path": "modules/fdb/src/ro/buffer.rs", "rank": 43, "score": 166998.09422243177 }, { "content": "type StringArena = BTreeMap<usize, Vec<Latin1String>>;\n\n\n", "file_path": "modules/fdb/src/store/mod.rs", "rank": 44, "score": 159436.46378765412 }, { "content": "fn visit_entries_json<F>(\n\n mut entries: PackEntryAccessor<&mut BufReader<File>>,\n\n make_ser: impl Fn() -> serde_json::ser::Serializer<Stdout, F>,\n\n) -> color_eyre::Result<()>\n\nwhere\n\n F: Formatter,\n\n{\n\n let mut ser = make_ser();\n\n let seq = ser.serialize_map(None)?;\n\n let mut jv = JsonVisitor(seq);\n\n if let ControlFlow::Break(e) = entries.visit(&mut jv)? {\n\n return Err(color_eyre::Report::from(e));\n\n }\n\n jv.0.end()?;\n\n println!();\n\n Ok(())\n\n}\n\n\n", "file_path": "modules/pack/examples/pk-entries.rs", "rank": 45, "score": 158784.97345984878 }, { "content": "pub fn parse_spawn_point<'a>(\n\n input: &'a [u8],\n\n version: FileVersion,\n\n) -> IResult<&'a [u8], Option<Placement3D>> {\n\n let inner = |i: &'a [u8]| {\n\n let (i, a) = parse_vec3f(i)?;\n\n let (i, b) = parse_quat(i)?;\n\n Ok((i, Placement3D { pos: a, rot: b }))\n\n };\n\n\n\n cond(version.id() >= 0x26, inner)(input)\n\n}\n\n\n", "file_path": "modules/maps/src/luz/parser.rs", "rank": 46, "score": 152329.67332310404 }, { "content": "type FnBucketToRowIter<'a> = fn(Bucket<'a>) -> RowHeaderIter<'a>;\n\n\n\n/// Iterator produced by [`Table::row_iter`]\n\npub struct TableRowIter<'a> {\n\n inner: Flatten<Map<BucketIter<'a>, FnBucketToRowIter<'a>>>,\n\n}\n\n\n\nimpl<'a> TableRowIter<'a> {\n\n /// Create a new row iter from a bucket iter\n\n pub fn new(inner: BucketIter<'a>) -> Self {\n\n Self {\n\n inner: inner.map(bucket_rows as FnBucketToRowIter<'a>).flatten(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for TableRowIter<'a> {\n\n type Item = Row<'a>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n", "file_path": "modules/fdb/src/mem/iter.rs", "rank": 47, "score": 152096.32470714848 }, { "content": "pub fn parse_property_achievement_required(\n\n input: &[u8],\n\n) -> IResult<&[u8], PropertyAchievementRequired> {\n\n map_opt(le_u32, PropertyAchievementRequired::from_u32)(input)\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 48, "score": 152064.383764138 }, { "content": "pub fn parse_objects_chunk_data<'a>(\n\n version: u32,\n\n i: &'a [u8],\n\n) -> IResult<&'a [u8], ObjectsChunkData<String>> {\n\n let parse_object = move |i: &'a [u8]| -> IResult<&'a [u8], Object<String>> {\n\n let (i, obj_id) = parse_object_id(i)?;\n\n let (i, lot) = parse_object_template(i)?;\n\n let (i, asset_type) = cond(version >= 0x26, le_u32)(i)?;\n\n let (i, value_1) = cond(version >= 0x20, le_u32)(i)?;\n\n let (i, position) = parse_vec3f(i)?;\n\n let (i, rotation) = parse_quat_wxyz(i)?;\n\n let (i, scale) = le_f32(i)?;\n\n let (i, settings) = parse_u32_wstring(i)?;\n\n let (i, extra) = cond(version >= 0x07, length_count(le_u32, parse_object_extra))(i)?;\n\n Ok((\n\n i,\n\n Object {\n\n obj_id,\n\n lot,\n\n asset_type,\n", "file_path": "modules/maps/src/lvl/parser.rs", "rank": 49, "score": 151040.70393147616 }, { "content": "pub fn parse_path_data_moving_platform(\n\n version: PathVersion,\n\n) -> fn(&[u8]) -> IResult<&[u8], PathDataMovingPlatform> {\n\n fn pre_v13(i: &[u8]) -> IResult<&[u8], PathDataMovingPlatform> {\n\n Ok((i, PathDataMovingPlatform::PreV13))\n\n }\n\n fn v13_to_17(i: &[u8]) -> IResult<&[u8], PathDataMovingPlatform> {\n\n let (i, platform_travel_sound) = parse_u8_wstring(i)?;\n\n Ok((\n\n i,\n\n PathDataMovingPlatform::V13ToV17 {\n\n platform_travel_sound,\n\n },\n\n ))\n\n }\n\n fn post_v18(i: &[u8]) -> IResult<&[u8], PathDataMovingPlatform> {\n\n let (i, something) = le_u8(i)?;\n\n Ok((i, PathDataMovingPlatform::PostV18 { something }))\n\n }\n\n match version.id() {\n\n 0..=12 => pre_v13,\n\n 13..=17 => v13_to_17,\n\n _ => post_v18,\n\n }\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 50, "score": 150849.5790356155 }, { "content": "pub fn main() -> Result<(), Error> {\n\n let opt = Opt::from_args();\n\n\n\n if !opt.input.exists() || !opt.input.is_file() {\n\n return Err(Error::FileNotFound);\n\n }\n\n\n\n let file = File::open(opt.input.as_path()).unwrap();\n\n let mut buf = BufReader::new(file);\n\n let header = buf.read_terrain_header().unwrap();\n\n let chunk1 = buf.read_terrain_chunk().unwrap();\n\n let hmh = buf.read_height_map_header().unwrap();\n\n let _hm_data = buf.read_height_map_data(hmh.width, hmh.height).unwrap();\n\n let _cm_data = buf.read_color_map_data().unwrap();\n\n let lm_data = buf.read_embedded_file().unwrap();\n\n let _cm2_data = buf.read_color_map_data().unwrap();\n\n\n\n let _padding1 = buf.read_u8().unwrap();\n\n let lm2_data = buf.read_embedded_file().unwrap();\n\n let _padding2 = buf.read_i32::<LE>().unwrap();\n", "file_path": "modules/maps/examples/read-raw.rs", "rank": 51, "score": 150201.70758719154 }, { "content": "pub fn parse_path_data_camera<'a>(\n\n version: PathVersion,\n\n) -> impl FnMut(&'a [u8]) -> IResult<&'a [u8], PathDataCamera> {\n\n let mut v1_parser = cond(version.min(14), le_u8);\n\n move |i: &'a [u8]| {\n\n let (i, next_path) = parse_u8_wstring(i)?;\n\n let (i, value_1) = v1_parser(i)?;\n\n Ok((i, PathDataCamera { next_path, value_1 }))\n\n }\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 52, "score": 149789.64082289147 }, { "content": "pub fn main() -> color_eyre::Result<()> {\n\n // Load the options\n\n let opt = Options::from_args();\n\n assembly_core::time(|| {\n\n // load the file\n\n let file = File::open(&opt.file)?;\n\n let mmap = unsafe { Mmap::map(&file)? };\n\n let buffer: &[u8] = &mmap;\n\n\n\n // create the database handle\n\n let db = Database::new(buffer);\n\n\n\n // prepare the output\n\n let mut count = 0;\n\n let mut output = PTable::new();\n\n output.set_format(*prettytable::format::consts::FORMAT_NO_LINESEP_WITH_TITLE);\n\n output.set_titles(PRow::new(vec![PCell::new(\"Name\")]));\n\n\n\n // loop through all tables\n\n let tables = db.tables()?;\n", "file_path": "modules/fdb/examples/fdb-tables.rs", "rank": 53, "score": 148912.73819556367 }, { "content": "fn map_table_header<'a>(handle: RefHandle<'a, FDBTableHeaderC>) -> Result<Table<'a>, CastError> {\n\n let table_header = handle.into_raw().extract();\n\n\n\n let def_header: &'a FDBTableDefHeaderC =\n\n handle.buf().try_cast(table_header.table_def_header_addr)?;\n\n let def_header = def_header.extract();\n\n\n\n let data_header: &'a FDBTableDataHeaderC =\n\n handle.buf().try_cast(table_header.table_data_header_addr)?;\n\n let data_header = data_header.extract();\n\n\n\n let name = get_latin1_str(handle.buf().as_bytes(), def_header.table_name_addr);\n\n\n\n let columns: RefHandle<'a, [FDBColumnHeaderC]> =\n\n handle.try_map_cast_slice(def_header.column_header_list_addr, def_header.column_count)?;\n\n\n\n let buckets: RefHandle<'a, [FDBBucketHeaderC]> =\n\n handle.try_map_cast_array(data_header.buckets)?;\n\n\n\n Ok(Table::new(handle.wrap(InnerTable {\n", "file_path": "modules/fdb/src/mem/mod.rs", "rank": 54, "score": 148807.60262239107 }, { "content": "pub fn parse_path_waypoint_data_moving_platform_sounds(\n\n input: &[u8],\n\n) -> IResult<&[u8], PathWaypointDataMovingPlatformSounds> {\n\n let (input, depart_sound) = parse_u8_wstring(input)?;\n\n let (input, arrive_sound) = parse_u8_wstring(input)?;\n\n Ok((\n\n input,\n\n PathWaypointDataMovingPlatformSounds {\n\n depart_sound,\n\n arrive_sound,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 55, "score": 148522.61725112918 }, { "content": "pub fn main() -> Result<(), anyhow::Error> {\n\n let args: Vec<String> = env::args().collect();\n\n let program = args[0].clone();\n\n\n\n let mut opts = Options::new();\n\n opts.optflag(\"h\", \"help\", \"print this help menu\");\n\n let matches = match opts.parse(&args[1..]) {\n\n Ok(m) => m,\n\n Err(f) => panic!(\"{}\", f),\n\n };\n\n if matches.opt_present(\"h\") {\n\n print_usage(&program, opts);\n\n return Ok(());\n\n }\n\n let input = if !matches.free.is_empty() {\n\n matches.free[0].clone()\n\n } else {\n\n print_usage(&program, opts);\n\n return Ok(());\n\n };\n\n load_database(&input).with_context(|| \"Loading database failed!\")\n\n}\n", "file_path": "modules/sysdiagram/examples/sysdiagrams.rs", "rank": 56, "score": 148206.75691364787 }, { "content": "pub fn parse_path_waypoint_data_moving_platform<'a>(\n\n version: PathVersion,\n\n) -> impl FnMut(&'a [u8]) -> IResult<&'a [u8], PathWaypointDataMovingPlatform> {\n\n let mut sounds_parser = cond(\n\n version.min(13),\n\n parse_path_waypoint_data_moving_platform_sounds,\n\n );\n\n move |input: &'a [u8]| {\n\n let (input, rotation) = parse_quat(input)?;\n\n let (input, lock_player) = parse_u8_bool(input)?;\n\n let (input, speed) = le_f32(input)?;\n\n let (input, wait) = le_f32(input)?;\n\n let (input, sounds) = sounds_parser(input)?;\n\n Ok((\n\n input,\n\n PathWaypointDataMovingPlatform {\n\n rotation,\n\n lock_player,\n\n speed,\n\n wait,\n\n sounds,\n\n },\n\n ))\n\n }\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 57, "score": 147394.73599775776 }, { "content": "pub fn parse_scene_count(version: FileVersion) -> fn(input: &[u8]) -> IResult<&[u8], usize> {\n\n fn pre_x25(input: &[u8]) -> IResult<&[u8], usize> {\n\n map_res(le_u8, usize::try_from)(input)\n\n }\n\n\n\n fn post_x25(input: &[u8]) -> IResult<&[u8], usize> {\n\n map_res(le_u32, usize::try_from)(input)\n\n }\n\n\n\n if version.id() >= 0x25 {\n\n post_x25\n\n } else {\n\n pre_x25\n\n }\n\n}\n\n\n", "file_path": "modules/maps/src/luz/parser.rs", "rank": 58, "score": 147272.43584762645 }, { "content": "/// Calculate the Cyclic-Redundancy-Check for a file path\n\n///\n\n/// The game uses [CRC-32/MPEG-2], transforms all letters to lowercase,\n\n/// replaces slashes with backslashes and appends 4 NULL bytes.\n\n///\n\n/// [CRC-32/MPEG-2]: https://reveng.sourceforge.io/crc-catalogue/17plus.htm#crc.cat.crc-32-mpeg-2\n\npub fn calculate_crc(path: &[u8]) -> u32 {\n\n let mut crc = ALG.digest();\n\n\n\n let mut s = 0;\n\n for (i, b) in path.iter().copied().enumerate() {\n\n let n = normalize_char(b);\n\n if n != b {\n\n if i > s {\n\n crc.update(&path[s..i]);\n\n }\n\n crc.update(&[n]);\n\n s = i + 1;\n\n }\n\n }\n\n crc.update(&path[s..]);\n\n\n\n // I have no clue why this was added\n\n crc.update(&[0, 0, 0, 0]);\n\n\n\n crc.finalize()\n\n}\n", "file_path": "modules/pack/src/crc.rs", "rank": 59, "score": 146921.63583826885 }, { "content": "/// Write the directory of a PK file.\n\n///\n\n/// This function takes a [Write] implementation and a CRCTree<PKEntryData>\n\n/// and writes the tree part of the PK directory to disk\n\npub fn write_pk_directory_tree<W: Write>(\n\n writer: &mut W,\n\n tree: &CRCTree<PKEntryData>,\n\n) -> io::Result<()> {\n\n write_crc_tree(writer, tree, write_pk_entry_data)\n\n}\n\n\n", "file_path": "modules/pack/src/pk/writer.rs", "rank": 60, "score": 146446.87035845645 }, { "content": "#[allow(dead_code)]\n\npub fn dump<T>(val: T) -> T\n\nwhere\n\n T: std::fmt::Debug,\n\n{\n\n println!(\"{:?}\", val);\n\n val\n\n}\n\n\n", "file_path": "modules/core/src/parser.rs", "rank": 61, "score": 145043.75811566273 }, { "content": "/// Write a CRC tree to a writer\n\npub fn write_crc_tree<V, W: Write>(\n\n writer: &mut W,\n\n tree: &BTreeMap<u32, V>,\n\n write_value: fn(&mut W, &V) -> io::Result<()>,\n\n) -> io::Result<()> {\n\n let len = tree.len() as u32;\n\n writer.write_all(&len.to_le_bytes())?;\n\n write_crc_tree_recursive(\n\n writer,\n\n &mut tree.iter().map(|(a, b)| (*a, b)),\n\n 0..len,\n\n write_value,\n\n )\n\n}\n", "file_path": "modules/pack/src/common/writer.rs", "rank": 62, "score": 144451.91968491278 }, { "content": "/// Write the full directory to disk\n\n///\n\n/// For a \"complete\" PK file, this function takes the dictionary as a sorted tree\n\n/// and writes the PK directory as well as the trailer to disk.\n\npub fn write_pk_directory<W: Write + Seek>(\n\n writer: &mut W,\n\n tree: &CRCTree<PKEntryData>,\n\n) -> io::Result<()> {\n\n let file_list_base_addr = writer.seek(SeekFrom::Current(0))? as u32;\n\n let num_compressed = tree\n\n .iter()\n\n .filter(|(_, &x)| x.is_compressed & 0xFF > 0)\n\n .count() as u32;\n\n let trailer = PKTrailer {\n\n file_list_base_addr,\n\n num_compressed,\n\n };\n\n write_pk_directory_tree(writer, tree)?;\n\n write_pk_trailer(writer, &trailer)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "modules/pack/src/pk/writer.rs", "rank": 63, "score": 144451.91968491278 }, { "content": "/// Parse a single line of text as a command\n\npub fn parse_line(line: &str) -> Option<Command> {\n\n let wo_comment = line.split_once('#').map(|x| x.0).unwrap_or(line);\n\n\n\n if wo_comment.trim().is_empty() {\n\n return None;\n\n }\n\n\n\n let (cmd, arg) = match wo_comment.split_once('=') {\n\n Some((l, r)) => (l.trim(), Some(r.trim())),\n\n None => (wo_comment.trim(), None),\n\n };\n\n\n\n match cmd {\n\n \"current_directory\" => {\n\n if let Some(value) = arg {\n\n return Some(Command::CurrentDirectory(value.to_owned()));\n\n }\n\n }\n\n \"pack_index\" => {\n\n if let Some(value) = arg {\n", "file_path": "modules/pack/src/txt/gen.rs", "rank": 64, "score": 142577.89027855548 }, { "content": "/// Parse a CRC node\n\npub fn parse_crc_node<'r, D, P, E>(\n\n mut parser: P,\n\n) -> impl FnMut(&'r [u8]) -> IResult<&'r [u8], CRCTreeNode<D>, E>\n\nwhere\n\n P: Parser<&'r [u8], D, E>,\n\n E: ParseError<&'r [u8]>,\n\n{\n\n move |input: &'r [u8]| -> IResult<&[u8], CRCTreeNode<D>, E> {\n\n let (input, crc) = le_u32(input)?;\n\n let (input, left) = le_i32(input)?;\n\n let (input, right) = le_i32(input)?;\n\n let (input, data) = parser.parse(input)?;\n\n Ok((\n\n input,\n\n CRCTreeNode {\n\n crc,\n\n left,\n\n right,\n\n data,\n\n },\n\n ))\n\n }\n\n}\n\n\n", "file_path": "modules/pack/src/common/parser.rs", "rank": 65, "score": 142577.89027855548 }, { "content": "/// Parse the magic bytes\n\npub fn parse_pk_magic(input: &[u8]) -> IResult<&[u8], ()> {\n\n let (rest, _) = tag(\"ndpk\")(input)?;\n\n Ok((rest, ()))\n\n}\n\n\n", "file_path": "modules/pack/src/pk/parser.rs", "rank": 66, "score": 141397.79018194432 }, { "content": "pub fn parse_control1(input: &[u8]) -> IResult<&[u8], Control1> {\n\n do_parse!(\n\n input,\n\n pos_count: le_u16\n\n >> d1: le_u16\n\n >> positions: count!(parse_position, usize::from(pos_count))\n\n >> _d2: take!(32)\n\n >> d3: le_u32\n\n >> d4: le_u32\n\n >> pos: parse_position\n\n >> d5: le_u32\n\n >> d6: le_u32\n\n >> d7: le_u32\n\n >> _d8: take!(6)\n\n >> d9: le_u32\n\n >> (Control1 {\n\n positions,\n\n pos,\n\n d1,\n\n /*d2,*/ d3,\n\n d4,\n\n d5,\n\n d6,\n\n d7,\n\n /*d8,*/ d9,\n\n })\n\n )\n\n}\n\n\n", "file_path": "modules/sysdiagram/src/parser.rs", "rank": 67, "score": 140814.1113168535 }, { "content": "/// Get the md5sum of a file\n\npub fn md5sum(path: &Path) -> io::Result<FileMeta> {\n\n let file = File::open(path)?;\n\n let mut md5sum = IOSum::new(file);\n\n\n\n let mut buf: Box<[u8]> = Box::from([0u8; 1204 * 16]);\n\n\n\n let mut c = 1;\n\n while c > 0 {\n\n c = md5sum.read(buf.as_mut())?;\n\n }\n\n\n\n let size = md5sum.byte_count() as u32;\n\n let (_, hash) = md5sum.into_inner();\n\n Ok(FileMeta { size, hash })\n\n}\n", "file_path": "modules/pack/src/md5/fs.rs", "rank": 68, "score": 139634.0112202423 }, { "content": "/// Push a command to the config struct\n\npub fn push_command(config: &mut Config, cmd: Command) {\n\n match cmd {\n\n Command::CurrentDirectory(v) => {\n\n config.directory = PathBuf::from(v);\n\n }\n\n Command::PackIndex(v) => {\n\n config.output = PathBuf::from(v);\n\n }\n\n Command::ManifestFile(v) => {\n\n config.manifest = PathBuf::from(v);\n\n }\n\n Command::Pack {\n\n filename,\n\n force_compression,\n\n } => config.pack_files.push(PackFileConfig {\n\n name: filename,\n\n compressed: force_compression,\n\n args: vec![],\n\n }),\n\n Command::AddDir(d) => {\n", "file_path": "modules/pack/src/txt/gen.rs", "rank": 69, "score": 139634.0112202423 }, { "content": "pub fn parse_path(input: &[u8]) -> IResult<&[u8], Path> {\n\n let (input, version) = parse_path_version(input)?;\n\n let (input, path_name) = parse_u8_wstring(input)?;\n\n let (input, path_type) = parse_path_type(input)?;\n\n let (input, value_1) = le_u32(input)?;\n\n let (input, path_composition) = parse_path_composition(input)?;\n\n let header = PathHeader {\n\n version,\n\n path_name,\n\n value_1,\n\n path_composition,\n\n };\n\n parse_path_data(input, path_type, header)\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 70, "score": 138487.14953236718 }, { "content": "pub fn parse_sch_grid(input: &[u8]) -> IResult<&[u8], SchGrid> {\n\n do_parse!(\n\n input,\n\n d1: le_u32\n\n >> d2: le_u32\n\n >> size1: parse_size\n\n >> d3: le_u32\n\n >> d4: le_u32\n\n >> buf_len: map_res!(le_u32, usize::try_from)\n\n >> name: parse_wstring_nt\n\n >> take!(buf_len - name.len() * 2 - 2)\n\n >> _d5: take!(6 * 4)\n\n >> d6: le_u32\n\n >> _d7: take!(16 * 4)\n\n >> size2: parse_size\n\n >> _d8: take!(16 * 4)\n\n >> d9: le_u32\n\n >> _d10: take!(16 * 4)\n\n >> _d11: take!(11 * 4)\n\n >> d12: le_u32\n", "file_path": "modules/sysdiagram/src/parser.rs", "rank": 71, "score": 138487.14953236718 }, { "content": "/// Load a locale file\n\npub fn load_locale(path: &Path) -> Result<LocaleNode, LocaleError> {\n\n let file = File::open(path)?;\n\n let file = BufReader::new(file);\n\n\n\n let mut root = LocaleNode {\n\n value: None,\n\n int_children: BTreeMap::new(),\n\n str_children: BTreeMap::new(),\n\n };\n\n\n\n let mut reader = XmlReader::from_reader(file);\n\n reader.trim_text(true);\n\n\n\n let mut buf = Vec::new();\n\n\n\n // The `Reader` does not implement `Iterator` because it outputs borrowed data (`Cow`s)\n\n if let Ok(XmlEvent::Decl(_)) = reader.read_event(&mut buf) {}\n\n buf.clear();\n\n\n\n let _ = expect_start(\"localization\", &mut reader, &mut buf)?;\n", "file_path": "modules/xml/src/localization.rs", "rank": 72, "score": 138487.14953236718 }, { "content": "pub fn get_settings(val: String) -> Result<StringMap, SettingsError> {\n\n parse_connection_string(val.as_str())\n\n .map(|y| y.1)\n\n .map_err(|_| SettingsError)\n\n}\n\n\n", "file_path": "modules/sysdiagram/src/parser.rs", "rank": 73, "score": 138487.14953236718 }, { "content": "pub fn parse_sky_section(input: &[u8]) -> IResult<&[u8], SkySection> {\n\n let mut files: [String; 6] = Default::default();\n\n let (input, _) = fill(parse_u32_string, &mut files)(input)?;\n\n Ok((input, SkySection { files }))\n\n}\n", "file_path": "modules/maps/src/lvl/parser.rs", "rank": 74, "score": 137372.14147759898 }, { "content": "/// Parse a file list entry\n\npub fn parse_pk_entry(input: &[u8]) -> IResult<&[u8], PKEntry> {\n\n parse_crc_node(parse_pk_entry_data)(input)\n\n}\n\n\n", "file_path": "modules/pack/src/pk/parser.rs", "rank": 75, "score": 137372.14147759898 }, { "content": "pub fn parse_terrain_header(input: &[u8]) -> IResult<&[u8], TerrainHeader> {\n\n let (input, version) = le_u8(input)?;\n\n let (input, value_1) = le_u8(input)?;\n\n let (input, value_2) = le_u8(input)?;\n\n let (input, chunk_count) = le_u32(input)?;\n\n let (input, width_in_chunks) = le_u32(input)?;\n\n let (input, height_in_chunks) = le_u32(input)?;\n\n Ok((\n\n input,\n\n TerrainHeader {\n\n version,\n\n value_1,\n\n value_2,\n\n chunk_count,\n\n width_in_chunks,\n\n height_in_chunks,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "modules/maps/src/raw/parser.rs", "rank": 76, "score": 137372.14147759898 }, { "content": "pub fn parse_chunk_version(input: &[u8]) -> IResult<&[u8], ChunkVersion> {\n\n map(tuple((le_u16, le_u16)), |(header, data)| ChunkVersion {\n\n header,\n\n data,\n\n })(input)\n\n}\n\n\n", "file_path": "modules/maps/src/lvl/parser.rs", "rank": 77, "score": 137372.14147759898 }, { "content": "/// Parse the trailer\n\npub fn parse_pk_trailer(input: &[u8]) -> IResult<&[u8], PKTrailer> {\n\n map(tuple((le_u32, le_u32)), |(a, b)| PKTrailer {\n\n file_list_base_addr: a,\n\n num_compressed: b,\n\n })(input)\n\n}\n\n\n", "file_path": "modules/pack/src/pk/parser.rs", "rank": 78, "score": 137372.14147759898 }, { "content": "pub fn parse_file_version(input: &[u8]) -> IResult<&[u8], FileVersion> {\n\n map(le_u32, FileVersion::from)(input)\n\n}\n\n\n", "file_path": "modules/maps/src/luz/parser.rs", "rank": 79, "score": 137372.14147759898 }, { "content": "pub fn parse_chunk_header(input: &[u8]) -> IResult<&[u8], ChunkHeader> {\n\n let (input, _) = tag(\"CHNK\")(input)?;\n\n let (input, id) = le_u32(input)?;\n\n let (input, version) = parse_chunk_version(input)?;\n\n let (input, size) = le_u32(input)?;\n\n let (input, offset) = le_u32(input)?;\n\n Ok((\n\n input,\n\n ChunkHeader {\n\n id,\n\n version,\n\n size,\n\n offset,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "modules/maps/src/lvl/parser.rs", "rank": 80, "score": 137372.14147759898 }, { "content": "/// Parse a complete PKI file from an in-memory buffer\n\npub fn parse_pki_file(input: &[u8]) -> IResult<&[u8], PackIndexFile> {\n\n let (input, _version) = tag(LE_THREE)(input)?;\n\n let (input, archives) = length_count(le_u32, parse_pack_file_ref)(input)?;\n\n let (input, file_count) = map_res(le_u32, usize::try_from)(input)?;\n\n let (input, files) = fold_many_m_n(\n\n file_count,\n\n file_count,\n\n parse_file_ref_node,\n\n BTreeMap::new,\n\n extend_map,\n\n )(input)?;\n\n Ok((input, PackIndexFile { archives, files }))\n\n}\n", "file_path": "modules/pack/src/pki/parser.rs", "rank": 81, "score": 136292.55517084408 }, { "content": "pub fn parse_zone_paths(input: &[u8]) -> IResult<&[u8], ZonePaths> {\n\n let (input, version) = parse_zone_paths_version(input)?;\n\n let (input, paths) = length_count(le_u32, parse_path)(input)?;\n\n Ok((input, ZonePaths { version, paths }))\n\n}\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 82, "score": 136287.6781498835 }, { "content": "pub fn parse_waypoint_config(input: &[u8]) -> IResult<&[u8], WaypointConfig> {\n\n let (input, count) = map_res(le_u32, usize::try_from)(input)?;\n\n fold_many_m_n(\n\n count,\n\n count,\n\n parse_waypoint_config_entry,\n\n HashMap::new,\n\n extend_config_map,\n\n )(input)\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 83, "score": 136287.6781498835 }, { "content": "pub fn parse_path_composition(input: &[u8]) -> IResult<&[u8], PathComposition> {\n\n map_opt(le_u32, PathComposition::from_u32)(input)\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 84, "score": 136287.6781498835 }, { "content": "pub fn parse_path_version(input: &[u8]) -> IResult<&[u8], PathVersion> {\n\n map_opt(le_u32, PathVersion::from_u32)(input)\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 85, "score": 136287.6781498835 }, { "content": "pub fn parse_dsref_schema_contents(input: &[u8]) -> IResult<&[u8], DSRefSchemaContents> {\n\n do_parse!(\n\n input,\n\n _d1: take!(25)\n\n >> len: map!(le_u8, usize::from)\n\n >> _d2: take!(26)\n\n >> connection: parse_u32_bytes_wstring_nt\n\n >> settings: map_res!(value!(connection.clone()), get_settings)\n\n >> _d3: le_u32\n\n >> name: parse_u32_bytes_wstring_nt\n\n >> tables: count!(parse_entry, len)\n\n >> _d4: take!(22)\n\n >> guid: parse_u32_bytes_wstring_nt\n\n >> ({\n\n //println!(\"{:?}\", d1);\n\n //println!(\"{:?}\", d2);\n\n //println!(\"{:08X}\", d3);\n\n //println!(\"{:?}\", d4);\n\n\n\n DSRefSchemaContents {\n\n name,\n\n guid,\n\n tables,\n\n settings,\n\n }\n\n })\n\n )\n\n}\n\n\n", "file_path": "modules/sysdiagram/src/parser.rs", "rank": 86, "score": 135232.52138698823 }, { "content": "/// Parse a file list entry\n\npub fn parse_pk_entry_data(input: &[u8]) -> IResult<&[u8], PKEntryData> {\n\n let (input, orig_file_size) = le_u32(input)?;\n\n let (input, orig_file_hash) = parse_hash(input)?;\n\n let (input, _ofh_padding) = take(4usize)(input)?;\n\n let (input, compr_file_size) = le_u32(input)?;\n\n let (input, compr_file_hash) = parse_hash(input)?;\n\n let (input, _cfh_padding) = take(4usize)(input)?;\n\n let (input, file_data_addr) = le_u32(input)?;\n\n let (input, is_compressed) = parse_compressed(input)?;\n\n Ok((\n\n input,\n\n PKEntryData {\n\n orig_file_size,\n\n orig_file_hash,\n\n compr_file_size,\n\n compr_file_hash,\n\n file_data_addr,\n\n is_compressed,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "modules/pack/src/pk/parser.rs", "rank": 87, "score": 135232.52138698823 }, { "content": "#[allow(clippy::just_underscores_and_digits)]\n\npub fn parse_height_map_header(input: &[u8]) -> IResult<&[u8], HeightMapHeader> {\n\n let (input, width) = le_u32(input)?;\n\n let (input, height) = le_u32(input)?;\n\n let (input, pos_x) = le_f32(input)?;\n\n let (input, pos_z) = le_f32(input)?;\n\n let (input, _1) = le_u32(input)?;\n\n let (input, _2) = le_u32(input)?;\n\n let (input, _3) = le_u32(input)?;\n\n let (input, _4) = le_u32(input)?;\n\n let (input, _5) = le_f32(input)?;\n\n Ok((\n\n input,\n\n HeightMapHeader {\n\n width,\n\n height,\n\n pos_x,\n\n pos_z,\n\n _1,\n\n _2,\n\n _3,\n\n _4,\n\n _5,\n\n },\n\n ))\n\n}\n", "file_path": "modules/maps/src/raw/parser.rs", "rank": 88, "score": 135232.52138698823 }, { "content": "pub fn parse_env_chunk_data(input: &[u8]) -> IResult<&[u8], EnvironmentChunkData> {\n\n let (input, section1_address) = le_u32(input)?;\n\n let (input, sky_address) = le_u32(input)?;\n\n let (input, section3_address) = le_u32(input)?;\n\n Ok((\n\n input,\n\n EnvironmentChunkData {\n\n section1_address,\n\n sky_address,\n\n section3_address,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "modules/maps/src/lvl/parser.rs", "rank": 89, "score": 135232.52138698823 }, { "content": "pub fn parse_path_data_movement(input: &[u8]) -> IResult<&[u8], PathDataMovement> {\n\n Ok((input, PathDataMovement {}))\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 90, "score": 134205.49905476425 }, { "content": "pub fn parse_path_data_rail(input: &[u8]) -> IResult<&[u8], PathDataRail> {\n\n Ok((input, PathDataRail {}))\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 91, "score": 134205.49905476425 }, { "content": "pub fn parse_path_data_showcase(input: &[u8]) -> IResult<&[u8], PathDataShowcase> {\n\n Ok((input, PathDataShowcase {}))\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 92, "score": 134205.49905476425 }, { "content": "pub fn parse_path_data_spawner(input: &[u8]) -> IResult<&[u8], PathDataSpawner> {\n\n let (input, spawned_lot) = parse_object_template(input)?;\n\n let (input, respawn_time) = le_u32(input)?;\n\n let (input, max_to_spawn) = le_u32(input)?;\n\n let (input, min_to_spawn) = le_u32(input)?;\n\n let (input, spawner_obj_id) = parse_object_id(input)?;\n\n let (input, activate_network_on_load) = parse_u8_bool(input)?;\n\n Ok((\n\n input,\n\n PathDataSpawner {\n\n spawned_lot,\n\n respawn_time,\n\n max_to_spawn,\n\n min_to_spawn,\n\n spawner_obj_id,\n\n activate_network_on_load,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 93, "score": 134205.49905476425 }, { "content": "pub fn parse_zone_paths_version(input: &[u8]) -> IResult<&[u8], ZonePathsVersion> {\n\n map_opt(le_u32, ZonePathsVersion::from_u32)(input)\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 94, "score": 134205.49905476425 }, { "content": "pub fn parse_path_data_property(input: &[u8]) -> IResult<&[u8], PathDataProperty> {\n\n let (input, value_1) = le_u32(input)?;\n\n let (input, price) = le_u32(input)?;\n\n let (input, rental_time) = le_u32(input)?;\n\n let (input, associated_map) = parse_world_id(input)?;\n\n let (input, value_2) = le_u32(input)?;\n\n let (input, display_name) = parse_u8_wstring(input)?;\n\n let (input, display_description) = parse_u32_wstring(input)?;\n\n let (input, value_3) = le_u32(input)?;\n\n let (input, clone_limit) = le_u32(input)?;\n\n let (input, reputation_multiplier) = le_f32(input)?;\n\n let (input, rental_time_unit) = parse_property_rental_time_unit(input)?;\n\n let (input, achievement_required) = parse_property_achievement_required(input)?;\n\n let (input, player_zone_coordinate) = parse_vec3f(input)?;\n\n let (input, max_build_height) = le_f32(input)?;\n\n Ok((\n\n input,\n\n PathDataProperty {\n\n value_1,\n\n price,\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 95, "score": 134205.49905476425 }, { "content": "pub fn parse_path_data_race(input: &[u8]) -> IResult<&[u8], PathDataRace> {\n\n Ok((input, PathDataRace {}))\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 96, "score": 134205.49905476425 }, { "content": "/// Try to export a database to a SQL connection\n\n///\n\n/// This function does the following:\n\n///\n\n/// 1. `BEGIN`s a transaction\n\n/// 2. For every table:\n\n/// a. Run `CREATE TABLE IF NOT EXISTS`\n\n/// b. Prepares an `INSERT` statement\n\n/// c. Runs the insert with data from every row\n\n/// 3. `COMMIT`s the transaction\n\npub fn try_export_db(conn: &mut Connection, db: Database) -> rusqlite::Result<()> {\n\n conn.execute(\"BEGIN\", rusqlite::params![])?;\n\n\n\n let tables = db.tables().unwrap();\n\n for table in tables.iter() {\n\n let table = table.unwrap();\n\n let mut create_query = format!(\"CREATE TABLE IF NOT EXISTS \\\"{}\\\"\\n(\\n\", table.name());\n\n let mut insert_query = format!(\"INSERT INTO \\\"{}\\\" (\", table.name());\n\n let mut first = true;\n\n for col in table.column_iter() {\n\n if first {\n\n first = false;\n\n } else {\n\n writeln!(create_query, \",\").unwrap();\n\n write!(insert_query, \", \").unwrap();\n\n }\n\n let typ = col.value_type().to_sqlite_type();\n\n write!(create_query, \" [{}] {}\", col.name(), typ).unwrap();\n\n write!(insert_query, \"[{}]\", col.name()).unwrap();\n\n }\n", "file_path": "modules/fdb/src/sqlite.rs", "rank": 97, "score": 134138.58051440516 }, { "content": "pub fn parse_waypoint_config_entry(input: &[u8]) -> IResult<&[u8], (String, String)> {\n\n tuple((parse_u8_wstring, parse_u8_wstring))(input)\n\n}\n\n\n", "file_path": "modules/maps/src/luz/paths/parser.rs", "rank": 98, "score": 133569.54171421303 }, { "content": "/// Parse the file list\n\npub fn parse_pk_entry_list(input: &[u8]) -> IResult<&[u8], Vec<PKEntry>> {\n\n length_count(le_u32, parse_pk_entry)(input)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EMPTY: &[u8] = &[];\n\n\n\n #[test]\n\n fn test_magic() {\n\n assert_eq!(parse_pk_magic(b\"ndpk\"), Ok((EMPTY, ())));\n\n }\n\n\n\n #[test]\n\n fn test_trailer() {\n\n assert_eq!(\n\n parse_pk_trailer(&[0x20, 0x10, 0, 0, 0, 0, 0, 0]),\n\n Ok((\n", "file_path": "modules/pack/src/pk/parser.rs", "rank": 99, "score": 133569.54171421303 } ]
Rust
src/main.rs
cspital/lsplit
dcab20d5aef4ff8ec4a35e57e28adbc18d3240b7
extern crate clap; use clap::{App, Arg, ArgMatches}; use std::env; use std::error; use std::error::Error; use std::fmt; use std::fs; use std::fs::File; use std::io; use std::io::{BufRead, BufReader, BufWriter, Write}; use std::path::PathBuf; use std::str::FromStr; use std::sync::mpsc::{channel, Receiver, RecvError, SendError, Sender}; use std::thread; fn main() { let matches = App::new("By Line File Splitter") .version("0.1.0") .author("Cliff Spital <cspital@uw.edu>") .about("Splits a file on line ending, to chunks of specified size.") .arg( Arg::with_name("bytes") .value_name("bytes") .short("b") .long("bytes") .help("Specify the maximum size of a chunk in bytes, [k|m] may be appended to the end of this number to indicate [k]ilobytes or [m]egabytes.") .required(true) ).arg( Arg::with_name("file") .help("Specifies the file to split.") .required(true) .index(1), ).arg( Arg::with_name("dir") .help("Optionally specify the directory into which the files will be added.") .required(false) .index(2), ).get_matches(); let config = match Config::new(&matches) { Ok(c) => c, Err(e) => { println!("{}", e); return; } }; let splitter = Splitter::new(config); match splitter.split() { Ok(()) => return, Err(e) => println!("{}", e.description()), } } #[derive(Debug)] struct Config { size: u32, pwd: PathBuf, target: PathBuf, dir: Option<PathBuf>, } impl Config { fn new(matches: &ArgMatches) -> ConfigResult<Config> { let presize = matches.value_of("bytes").unwrap(); let size = Config::parse_size(presize)?; let pwd = env::current_dir()?; let target = PathBuf::from(matches.value_of("file").unwrap()); if !target.is_file() { return Err(ConfigError::StateError("target must be a file".to_owned())); } Ok(Config { size, pwd, target, dir: match matches.value_of("dir") { Some(s) => Some(PathBuf::from(s)), None => None, }, }) } #[inline] fn parse_size(arg: &str) -> ConfigResult<u32> { match arg.parse::<ByteSize>() { Ok(b) => { let ByteSize(s) = b; Ok(s) } Err(e) => Err(e), } } } type ConfigResult<T> = std::result::Result<T, ConfigError>; #[derive(Debug)] enum ConfigError { ByteSizeError(String), DirError(io::Error), StateError(String), } impl fmt::Display for ConfigError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ConfigError::ByteSizeError(msg) => write!(f, "{}", msg), ConfigError::DirError(err) => err.fmt(f), ConfigError::StateError(msg) => write!(f, "{}", msg), } } } impl error::Error for ConfigError { fn description(&self) -> &str { match self { ConfigError::ByteSizeError(msg) => msg, ConfigError::DirError(err) => err.description(), ConfigError::StateError(msg) => msg, } } fn cause(&self) -> Option<&error::Error> { match self { ConfigError::ByteSizeError(_) => None, ConfigError::DirError(err) => Some(err), ConfigError::StateError(_) => None, } } } impl From<io::Error> for ConfigError { fn from(err: io::Error) -> Self { ConfigError::DirError(err) } } #[derive(Debug)] struct ByteSize(u32); impl FromStr for ByteSize { type Err = ConfigError; fn from_str(arg: &str) -> Result<Self, Self::Err> { match arg.parse::<u32>() { Ok(s) => Ok(ByteSize(s)), _ => { let pivot = arg.len() - 1; let prefix = &arg[..pivot]; match prefix.parse::<u32>() { Ok(s) => { let last = &arg[pivot..]; match last { "k" => Ok(ByteSize(s * 1_000)), "m" => Ok(ByteSize(s * 1_000_000)), _ => Err(ConfigError::ByteSizeError(format!( "{} is not a support size suffix", last ))), } } _ => Err(ConfigError::ByteSizeError(format!( "{} is not numeric, only k or m is a supported size suffix", prefix ))), } } } } } type SplitterResult = Result<(), SplitterError>; type SplitterHandle = thread::JoinHandle<SplitterResult>; #[derive(Debug)] enum SplitterError { IOError(io::Error), SendError(SendError<Line>), RecvError(RecvError), Temp(String), } impl fmt::Display for SplitterError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { SplitterError::IOError(e) => e.fmt(f), SplitterError::Temp(s) => write!(f, "{}", s), SplitterError::SendError(e) => e.fmt(f), SplitterError::RecvError(e) => e.fmt(f), } } } impl error::Error for SplitterError { fn description(&self) -> &str { match self { SplitterError::IOError(e) => e.description(), SplitterError::Temp(s) => s, SplitterError::SendError(e) => e.description(), SplitterError::RecvError(e) => e.description(), } } fn cause(&self) -> Option<&error::Error> { match self { SplitterError::IOError(e) => Some(e), SplitterError::Temp(_) => None, SplitterError::SendError(e) => Some(e), SplitterError::RecvError(e) => Some(e), } } } impl From<io::Error> for SplitterError { fn from(err: io::Error) -> Self { SplitterError::IOError(err) } } impl From<SendError<Line>> for SplitterError { fn from(err: SendError<Line>) -> Self { SplitterError::SendError(err) } } impl From<RecvError> for SplitterError { fn from(err: RecvError) -> Self { SplitterError::RecvError(err) } } struct Line { content: String, size: u32, } impl Line { fn new(content: String, size: usize) -> Self { Line { content: content, size: size as u32, } } } impl<'a> From<&'a Line> for &'a [u8] { fn from(line: &'a Line) -> &'a [u8] { line.content.as_bytes() } } impl AsRef<Line> for Line { fn as_ref(&self) -> &Line { &self } } struct Splitter { chunk_size: u32, read: PathBuf, write_dir: PathBuf, } impl Splitter { fn new(cfg: Config) -> Self { Splitter { chunk_size: cfg.size, read: cfg.target, write_dir: match cfg.dir { Some(d) => d, None => cfg.pwd, }, } } fn split(&self) -> Result<(), SplitterError> { let (sender, receiver) = channel::<Line>(); let target = fs::File::open(&self.read)?; let split_reader = SplitReader::new(target); let split_writer = SplitWriter::new(self); let _read_result: SplitterHandle = thread::spawn(move || Ok(split_reader.stream(sender)?)); Ok(split_writer.stream(receiver)?) } } struct SplitWriter<'s> { splitter: &'s Splitter, } impl<'s> SplitWriter<'s> { fn new(splitter: &'s Splitter) -> Self { SplitWriter { splitter } } fn stream(&self, receiver: Receiver<Line>) -> SplitterResult { if let Ok(mut line) = receiver.recv() { let mut progress = 0; let mut file_num = 1; fs::create_dir_all(&self.splitter.write_dir)?; let mut writer = new_writer(file_num, self.splitter)?; while line.size > 0 { progress += line.size; if progress > self.splitter.chunk_size { if line.size > self.splitter.chunk_size { return Err(SplitterError::Temp( "line size exceeds maximum allowed chunk size".to_owned(), )); } file_num += 1; progress = line.size; writer.flush()?; writer = new_writer(file_num, self.splitter)?; } writer.write_all(line.as_ref().into())?; line = receiver.recv()?; } } Ok(()) } } fn new_writer(file_num: i32, splitter: &Splitter) -> Result<BufWriter<File>, SplitterError> { if let Some(new_path) = derive_new_path(file_num, splitter) { let new_file = File::create(new_path)?; return Ok(BufWriter::new(new_file)); } Err(SplitterError::Temp("Invalid filename.".to_string())) } fn derive_new_path(file_num: i32, splitter: &Splitter) -> Option<PathBuf> { match splitter.read.file_name() { None => None, Some(oss) => match oss.to_str() { None => None, Some(s) => { let dir = PathBuf::from(&splitter.write_dir); Some(dir.join(format!("{}_{}", file_num, s))) } }, } } #[derive(Debug)] struct SplitReader { read: File, } impl SplitReader { fn new(read: File) -> Self { SplitReader { read } } fn stream(&self, send: Sender<Line>) -> SplitterResult { let mut reader = BufReader::new(&self.read); let mut first = String::new(); if let Ok(mut count) = reader.read_line(&mut first) { send.send(Line::new(first, count))?; while count > 0 { let mut subs = String::new(); count = reader.read_line(&mut subs)?; send.send(Line::new(subs, count))?; } } Ok(send.send(Line::new(String::new(), 0))?) } } #[cfg(test)] mod tests { use super::*; #[test] fn bytesize_fromstr_numeric_ok() { let input = "2000"; let ByteSize(size) = input.parse::<ByteSize>().unwrap(); assert_eq!(size, 2000); } #[test] fn bytesize_fromstr_kilo_ok() { let input = "2k"; let ByteSize(size) = input.parse::<ByteSize>().unwrap(); assert_eq!(size, 2000); } #[test] fn bytesize_fromstr_mega_ok() { let input = "2m"; let ByteSize(size) = input.parse::<ByteSize>().unwrap(); assert_eq!(size, 2_000_000); } #[test] fn bytesize_fromstr_invalid() { let input = "2km"; let size = input.parse::<ByteSize>(); assert!(size.is_err()); } }
extern crate clap; use clap::{App, Arg, ArgMatches}; use std::env; use std::error; use std::error::Error; use std::fmt; use std::fs; use std::fs::File; use std::io; use std::io::{BufRead, BufReader, BufWriter, Write}; use std::path::PathBuf; use std::str::FromStr; use std::sync::mpsc::{channel, Receiver, RecvError, SendError, Sender}; use std::thread; fn main() { let matches = App::new("By Line File Splitter") .version("0.1.0") .author("Cliff Spital <cspital@uw.edu>") .about("Splits a file on line ending, to chunks of specified size.") .arg( Arg::with_name("bytes") .value_name("bytes") .short("b") .long("bytes") .help("Specify the maximum size of a chunk in bytes, [k|m] may be appended to the end of this number to indicate [k]ilobytes or [m]egabytes.") .required(true) ).arg( Arg::with_name("file") .help("Specifies the file to split.") .required(true) .index(1), ).arg( Arg::with_name("dir") .help("Optionally specify the directory into which the files will be added.") .required(false) .index(2), ).get_matches(); let config = match Config::new(&matches) { Ok(c) => c, Err(e) => { println!("{}", e); return; } }; let splitter = Splitter::new(config); match splitter.split() { Ok(()) => return, Err(e) => println!("{}", e.description()), } } #[derive(Debug)] struct Config { size: u32, pwd: PathBuf, target: PathBuf, dir: Option<PathBuf>, } impl Config { fn new(matches: &ArgMatches) -> ConfigResult<Config> { let presize = matches.value_of("bytes").unwrap(); let size = Config::parse_size(presize)?; let pwd = env::current_dir()?; let target = PathBuf::from(matches.value_of("file").unwrap()); if !target.is_file() { return Err(ConfigError::StateError("target must be a file".to_owned())); } Ok(Config { size, pwd, target, dir: match matches.value_of("dir") { Some(s) => Some(PathBuf::from(s)), None => None, }, }) } #[inline] fn parse_size(arg: &str) -> ConfigResult<u32> { match arg.parse::<ByteSize>() { Ok(b) => { let ByteSize(s) = b; Ok(s) } Err(e) => Err(e), } } } type ConfigResult<T> = std::result::Result<T, ConfigError>; #[derive(Debug)] enum ConfigError { ByteSizeError(String), DirError(io::Error), StateError(String), } impl fmt::Display for ConfigError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ConfigError::ByteSizeError(msg) => write!(f, "{}", msg), ConfigError::DirError(err) => err.fmt(f), ConfigError::StateError(msg) => write!(f, "{}", msg), } } } impl error::Error for ConfigError { fn description(&self) -> &str { match self { ConfigError::ByteSizeError(msg) => msg, ConfigError::DirError(err) => err.description(), ConfigError::StateError(msg) => msg, } } fn cause(&self) -> Option<&error::Error> { match self { ConfigError::ByteSizeError(_) => None, ConfigError::DirError(err) => Some(err), ConfigError::StateError(_) => None, } } } impl From<io::Error> for ConfigError { fn from(err: io::Error) -> Self { ConfigError::DirError(err) } } #[derive(Debug)] struct ByteSize(u32); impl FromStr for ByteSize { type Err = ConfigError; fn from_str(arg: &str) -> Result<Self, Self::Err> { match arg.parse::<u32>() { Ok(s) => Ok(ByteSize(s)), _ => { let pivot = arg.len() - 1; let prefix = &arg[..pivot]; match prefix.parse::<u32>() { Ok(s) => { let last = &arg[pivot..]; match last { "k" => Ok(ByteSize(s * 1_000)), "m" => Ok(ByteSize(s * 1_000_000)), _ => Err(ConfigError::ByteSizeError(format!( "{} is not a support size suffix", last ))), } } _ => Err(ConfigError::ByteSizeError(format!( "{} is not numeric, only k or m is a supported size suffix", prefix ))), } } } } } type SplitterResult = Result<(), SplitterError>; type SplitterHandle = thread::JoinHandle<SplitterResult>; #[derive(Debug)] enum SplitterError { IOError(io::Error), SendError(SendError<Line>), RecvError(RecvError), Temp(String), } impl fmt::Display for SplitterError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { SplitterError::IOError(e) => e.fmt(f), SplitterError::Temp(s) => write!(f, "{}", s), SplitterError::SendError(e) => e.fmt(f), SplitterError::RecvError(e) => e.fmt(f), } } } impl error::Error for SplitterError { fn description(&self) -> &str { match self { SplitterError::IOError(e) => e.description(), SplitterError::Temp(s) => s, SplitterError::SendError(e) => e.description(), SplitterError::RecvError(e) => e.description(), } } fn cause(&self) -> Option<&error::Error> { match self { SplitterError::IOError(e) => Some(e), SplitterError::Temp(_) => None, SplitterError::SendError(e) => Some(e), SplitterError::RecvError(e) => Some(e), } } } impl From<io::Error> for SplitterError { fn from(err: io::Error) -> Self { SplitterError::IOError(err) } } impl From<SendError<Line>> for SplitterError { fn from(err: SendError<Line>) -> Self { SplitterError::SendError(err) } } impl From<RecvError> for SplitterError { fn from(err: RecvError) -> Self { SplitterError::RecvError(err) } } struct Line { content: String, size: u32, } impl Line { fn new(content: String, size: usize) -> Self { Line { content: content, size: size as u32, } } } impl<'a> From<&'a Line> for &'a [u8] { fn from(line: &'a Line) -> &'a [u8] { line.content.as_bytes() } } impl AsRef<Line> for Line { fn as_ref(&self) -> &Line { &self } } struct Splitter { chunk_size: u32, read: PathBuf, write_dir: PathBuf, } impl Splitter { fn new(cfg: Config) -> Self { Splitter { chunk_size: cfg.size, read: cfg.target, write_dir: match cfg.dir { Some(d) => d, None => cfg.pwd, }, } } fn split(&self) -> Result<(), SplitterError> { let (sender, receiver) = channel::<Line>(); let target = fs::File::open(&self.read)?; let split_reader = SplitReader::new(target); let split_writer = SplitWriter::new(self); let _read_result: SplitterHandle = thread::spawn(move || Ok(split_reader.stream(sender)?)); Ok(split_writer.stream(receiver)?) } } struct SplitWriter<'s> { splitter: &'s Splitter, } impl<'s> SplitWriter<'s> { fn new(splitter: &'s Splitter) -> Self { SplitWriter { splitter } }
} fn new_writer(file_num: i32, splitter: &Splitter) -> Result<BufWriter<File>, SplitterError> { if let Some(new_path) = derive_new_path(file_num, splitter) { let new_file = File::create(new_path)?; return Ok(BufWriter::new(new_file)); } Err(SplitterError::Temp("Invalid filename.".to_string())) } fn derive_new_path(file_num: i32, splitter: &Splitter) -> Option<PathBuf> { match splitter.read.file_name() { None => None, Some(oss) => match oss.to_str() { None => None, Some(s) => { let dir = PathBuf::from(&splitter.write_dir); Some(dir.join(format!("{}_{}", file_num, s))) } }, } } #[derive(Debug)] struct SplitReader { read: File, } impl SplitReader { fn new(read: File) -> Self { SplitReader { read } } fn stream(&self, send: Sender<Line>) -> SplitterResult { let mut reader = BufReader::new(&self.read); let mut first = String::new(); if let Ok(mut count) = reader.read_line(&mut first) { send.send(Line::new(first, count))?; while count > 0 { let mut subs = String::new(); count = reader.read_line(&mut subs)?; send.send(Line::new(subs, count))?; } } Ok(send.send(Line::new(String::new(), 0))?) } } #[cfg(test)] mod tests { use super::*; #[test] fn bytesize_fromstr_numeric_ok() { let input = "2000"; let ByteSize(size) = input.parse::<ByteSize>().unwrap(); assert_eq!(size, 2000); } #[test] fn bytesize_fromstr_kilo_ok() { let input = "2k"; let ByteSize(size) = input.parse::<ByteSize>().unwrap(); assert_eq!(size, 2000); } #[test] fn bytesize_fromstr_mega_ok() { let input = "2m"; let ByteSize(size) = input.parse::<ByteSize>().unwrap(); assert_eq!(size, 2_000_000); } #[test] fn bytesize_fromstr_invalid() { let input = "2km"; let size = input.parse::<ByteSize>(); assert!(size.is_err()); } }
fn stream(&self, receiver: Receiver<Line>) -> SplitterResult { if let Ok(mut line) = receiver.recv() { let mut progress = 0; let mut file_num = 1; fs::create_dir_all(&self.splitter.write_dir)?; let mut writer = new_writer(file_num, self.splitter)?; while line.size > 0 { progress += line.size; if progress > self.splitter.chunk_size { if line.size > self.splitter.chunk_size { return Err(SplitterError::Temp( "line size exceeds maximum allowed chunk size".to_owned(), )); } file_num += 1; progress = line.size; writer.flush()?; writer = new_writer(file_num, self.splitter)?; } writer.write_all(line.as_ref().into())?; line = receiver.recv()?; } } Ok(()) }
function_block-full_function
[ { "content": "Split a file into byte sized chunks by line.\n\n\n", "file_path": "README.md", "rank": 24, "score": 11.377523898280417 } ]
Rust
clef/src/math/fraction.rs
dukguru/clef
edd54db5cd36ce41218453cd6c4d13e08da76310
use crate::math; use contracts::requires; use std::cmp::Ordering; use std::fmt; use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; #[derive(Clone, Copy, Debug, Eq, Ord)] pub struct Fraction { numerator: i32, denominator: i32, } impl Fraction { pub const ZERO: Fraction = Fraction { numerator: 0, denominator: 1, }; pub const ONE: Fraction = Fraction { numerator: 1, denominator: 1, }; pub const HALF: Fraction = Fraction { numerator: 1, denominator: 2, }; } impl Fraction { #[requires(denominator != 0, "denominator must not be zero")] pub fn new(numerator: i32, denominator: i32) -> Fraction { Self { numerator, denominator, } } pub fn numerator(&self) -> i32 { self.numerator } pub fn denominator(&self) -> i32 { self.denominator } pub fn signum(&self) -> i32 { self.numerator.signum() * self.denominator.signum() } pub fn to_irreducible(&self) -> Self { let gcd = math::gcd(self.numerator, self.denominator); Self { numerator: (self.numerator / gcd).abs() * self.signum(), denominator: (self.denominator / gcd).abs(), } } pub fn to_float(&self) -> f32 { self.numerator as f32 / self.denominator as f32 } } impl fmt::Display for Fraction { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}/{}", self.numerator, self.denominator) } } impl PartialEq for Fraction { fn eq(&self, other: &Self) -> bool { let l = self.to_irreducible(); let r = other.to_irreducible(); l.numerator == r.numerator && l.denominator == r.denominator } } impl PartialOrd for Fraction { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.to_float().partial_cmp(&other.to_float()) } } impl Add for Fraction { type Output = Self; fn add(self, other: Self) -> Self::Output { if self.denominator == other.denominator { Self::new(self.numerator + other.numerator, self.denominator).to_irreducible() } else { let numerator = self.numerator * other.denominator + other.numerator * self.denominator; let denominator = self.denominator * other.denominator; Self::new(numerator, denominator).to_irreducible() } } } impl Add<i32> for Fraction { type Output = Self; fn add(self, other: i32) -> Self::Output { self + Fraction::new(other, 1) } } impl AddAssign for Fraction { fn add_assign(&mut self, other: Self) { *self = *self + other; } } impl AddAssign<i32> for Fraction { fn add_assign(&mut self, other: i32) { *self = *self + other; } } impl Div for Fraction { type Output = Self; fn div(self, other: Self) -> Self::Output { Self::new( self.numerator * other.denominator, self.denominator * other.numerator, ) .to_irreducible() } } impl Div<i32> for Fraction { type Output = Self; fn div(self, other: i32) -> Self::Output { Self::new(self.numerator * other, self.denominator * other).to_irreducible() } } impl DivAssign for Fraction { fn div_assign(&mut self, other: Self) { *self = *self / other; } } impl DivAssign<i32> for Fraction { fn div_assign(&mut self, other: i32) { *self = *self / other; } } impl Mul for Fraction { type Output = Self; fn mul(self, other: Self) -> Self::Output { Self::new( self.numerator * other.numerator, self.denominator * other.denominator, ) .to_irreducible() } } impl Mul<i32> for Fraction { type Output = Self; fn mul(self, other: i32) -> Self::Output { Self::new(self.numerator * other, self.denominator).to_irreducible() } } impl MulAssign for Fraction { fn mul_assign(&mut self, other: Self) { *self = *self * other; } } impl MulAssign<i32> for Fraction { fn mul_assign(&mut self, other: i32) { *self = *self * other; } } impl Neg for Fraction { type Output = Self; fn neg(self) -> Self::Output { Self::new(self.numerator * -1, self.denominator).to_irreducible() } } impl Sub for Fraction { type Output = Self; fn sub(self, other: Self) -> Self::Output { self + -other } } impl Sub<i32> for Fraction { type Output = Self; fn sub(self, other: i32) -> Self::Output { self + -other } } impl SubAssign for Fraction { fn sub_assign(&mut self, other: Self) { *self += -other; } } impl SubAssign<i32> for Fraction { fn sub_assign(&mut self, other: i32) { *self += -other; } } #[cfg(test)] mod tests { use super::*; #[test] #[should_panic] fn test_zero_denominator() { let _illegal = Fraction::new(1, 0); } #[test] fn test_signum() { assert_eq!(Fraction::new(1, 2).signum(), 1); assert_eq!(Fraction::new(-1, 2).signum(), -1); assert_eq!(Fraction::new(1, -2).signum(), -1); assert_eq!(Fraction::new(-1, -2).signum(), 1); assert_eq!(Fraction::new(0, 2).signum(), 0); } #[test] fn test_to_irreducible() { assert_eq!(Fraction::new(3, 9).to_irreducible(), Fraction::new(1, 3)); assert_eq!(Fraction::new(27, 9).to_irreducible(), Fraction::new(3, 1)); assert_eq!( Fraction::new(11, 13).to_irreducible(), Fraction::new(11, 13) ); assert_eq!(Fraction::new(-3, 9).to_irreducible(), Fraction::new(-1, 3)); assert_eq!(Fraction::new(3, -9).to_irreducible(), Fraction::new(-1, 3)); assert_eq!(Fraction::new(-3, -9).to_irreducible(), Fraction::new(1, 3)); } #[test] fn test_to_float() { assert_eq!(0.25, Fraction::new(1, 4).to_float()); assert_eq!(-0.25, Fraction::new(-1, 4).to_float()); } #[test] fn test_op_eq() { assert!(Fraction::new(5, -10) == Fraction::new(-1, 2)); assert!(Fraction::new(5, 10) != Fraction::new(1, 3)); } #[test] fn test_op_ord() { assert!(Fraction::new(3, 5) < Fraction::new(4, 5)); assert!(Fraction::new(3, 5) <= Fraction::new(15, 25)); } #[test] fn test_op_add() { assert_eq!( Fraction::new(5, 10) + Fraction::new(5, 20), Fraction::new(3, 4) ); assert_eq!( Fraction::new(-5, 10) + Fraction::new(5, 20), Fraction::new(-1, 4) ); assert_eq!( Fraction::new(5, -10) + Fraction::new(5, 20), Fraction::new(-1, 4) ); assert_eq!( Fraction::new(5, 10) + Fraction::new(0, 20), Fraction::new(1, 2) ); assert_eq!(Fraction::ZERO + Fraction::new(3, 4), Fraction::new(3, 4)); assert_eq!(Fraction::new(3, 4) + Fraction::ZERO, Fraction::new(3, 4)); let mut a = Fraction::new(5, 10); let b = Fraction::new(5, 20); a += b; assert_eq!(a, Fraction::new(3, 4)); } #[test] fn test_op_sub() { assert_eq!( Fraction::new(5, 10) - Fraction::new(5, 20), Fraction::new(1, 4) ); assert_eq!( Fraction::new(5, 20) - Fraction::new(5, 10), Fraction::new(-1, 4) ); assert_eq!(Fraction::new(5, 20) - Fraction::new(5, 20), Fraction::ZERO); } #[test] fn test_op_neg() { assert_eq!(-Fraction::new(5, 10), Fraction::new(-1, 2)); assert_eq!(-Fraction::new(-5, 10), Fraction::new(1, 2)); assert_eq!(-Fraction::new(5, -10), Fraction::new(1, 2)); assert_eq!(-Fraction::new(-5, -10), Fraction::new(-1, 2)); } }
use crate::math; use contracts::requires; use std::cmp::Ordering; use std::fmt; use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; #[derive(Clone, Copy, Debug, Eq, Ord)] pub struct Fraction { numerator: i32, denominator: i32, } impl Fraction { pub const ZERO: Fraction = Fraction { numerator: 0, denominator: 1, }; pub const ONE: Fraction = Fraction { numerator: 1, denominator: 1, }; pub const HALF: Fraction = Fraction { numerator: 1, denominator: 2, }; } impl Fraction { #[requires(denominator != 0, "denominator must not be zero")] pub fn new(numerator: i32, denominator: i32) -> Fraction { Self { numerator, denominator, } } pub fn numerator(&self) -> i32 { self.numerator } pub fn denominator(&self) -> i32 { self.denominator } pub fn signum(&self) -> i32 { self.numerator.signum() * self.denominator.signum() } pub fn to_irreducible(&self) -> Self { let gcd = math::gcd(self.numerator, self.denominator); Self { numerator: (self.numerator / gcd).abs() * self.signum(), denominator: (self.denominator / gcd).abs(), } } pub fn to_float(&self) -> f32 { self.numerator as f32 / self.denominator as f32 } } impl fmt::Display for Fraction { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}/{}", self.numerator, self.denominator) } } impl PartialEq for Fraction { fn eq(&self, other: &Self) -> bool { let l = self.to_irreducible(); let r = other.to_irreducible(); l.numerator == r.numerator && l.denominator == r.denominator } } impl PartialOrd for Fraction { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.to_float().partial_cmp(&other.to_float()) } } impl Add for Fraction { type Output = Self; fn add(self, other: Self) -> Self::Output { if self.denominator == other.denominator { Self::new(self.numerator + other.numerator, self.denominator).to_irreducible() } else { let numerator = self.numerator * other.denominator + other.numerator * self.denominator; let denominator = self.denominator * other.denominator; Self::new(numerator, denominator).to_irreducible() } } } impl Add<i32> for Fraction { type Output = Self; fn add(self, other: i32) -> Self::Output { self + Fraction::new(other, 1) } } impl AddAssign for Fraction { fn add_assign(&mut self, other: Self) { *self = *self + other; } } impl AddAssign<i32> for Fraction { fn add_assign(&mut self, other: i32) { *self = *self + other; } } impl Div for Fraction { type Output = Self; fn div(self, other: Self) -> Self::Output { Self::new( self.numerator * other.denominator, self.denominator * other.numerator, ) .to_irreducible() } } impl Div<i32> for Fraction { type Output = Self; fn div(self, other: i32) -> Self::Output { Self::new(self.numerator * other, self.denominator * other).to_irreducible() } } impl DivAssign for Fraction { fn div_assign(&mut self, other: Self) { *self = *self / other; } } impl DivAssign<i32> for Fraction { fn div_assign(&mut self, other: i32) { *self = *self / other; } } impl Mul for Fraction { type Output = Self; fn mul(self, other: Self) -> Self::Output { Self::new( self.numerator * other.numerator, self.denominator * other.denominator, ) .to_irreducible() } } impl Mul<i32> for Fraction { type Output = Self; fn mul(self, other: i32) -> Self::Output { Self::new(self.numerator * other, self.denominator).to_irreducible() } } impl MulAssign for Fraction { fn mul_assign(&mut self, other: Self) { *self = *self * other; } } impl MulAssign<i32> for Fraction { fn mul_assign(&mut self, other: i32) { *self = *self * other; } } impl Neg for Fraction { type Output = Self; fn neg(self) -> Self::Output { Self::new(self.numerator * -1, self.denominator).to_irreducible() } } impl Sub for Fraction { type Output = Self; fn sub(self, other: Self) -> Self::Output { self + -other } } impl Sub<i32> for Fraction { type Output = Self; fn sub(self, other: i32) -> Self::Output { self + -other } } impl SubAssign for Fraction { fn sub_assign(&mut self, other: Self) { *self += -other; } } impl SubAssign<i32> for Fraction { fn sub_assign(&mut self, other: i32) { *self += -other; } } #[cfg(test)] mod tests { use super::*; #[test] #[should_panic] fn test_zero_denominator() { let _illegal = Fraction::new(1, 0); } #[test] fn test_signum() { assert_eq!(Fraction::new(1, 2).signum(), 1); assert_eq!(Fraction::new(-1, 2).signum(), -1); assert_eq!(Fraction::new(1, -2).signum(), -1); assert_eq!(Fraction::new(-1, -2).signum(), 1); assert_eq!(Fraction::new(0, 2).signum(), 0); } #[test] fn test_to_irreducible() { assert_eq!(Fraction::new(3, 9).to_irreducible(), Fraction::new(1, 3)); assert_eq!(Fraction::new(27, 9).to_irreducible(), Fraction::new(3, 1)); assert_eq!( Fraction::new(11, 13).to_irreducible(), Fraction::new(11, 13) ); assert_eq!(Fraction::new(-3, 9).to_irreducible(), Fraction::new(-1, 3)); assert_eq!(Fraction::new(3, -9).to_irreducible(), Fraction::new(-1, 3)); assert_eq!(Fraction::new(-3, -9).to_irreducible(), Fraction::new(1, 3)); } #[test] fn test_to_float() { assert_eq!(0.25, Fraction::new(1, 4).to_float()); assert_eq!(-0.25, Fraction::new(-1, 4).to_float()); } #[test] fn test_op_eq() { assert!(Fraction::new(5, -10) == Fraction::new(-1, 2)); assert!(Fraction::new(5, 10) != Fraction::new(1, 3)); } #[test] fn test_op_ord() { assert!(Fraction::new(3, 5) < Fraction::new(4, 5)); assert!(Fraction::new(3, 5) <= Fraction::new(15, 25)); } #[test] fn test_op_add() { assert_eq!( Fraction::new(5, 10) + Fraction::new(5, 20), Fraction::new(3, 4) ); assert_eq!( Fraction::new(-5, 10) + Fraction::new(5, 20), Fraction::new(-1, 4) ); assert_eq!( Fraction::new(5, -10) + Fraction::new(5, 20), Fraction::new(-1, 4) ); assert_eq!( Fraction::new(5, 10) + Fraction::new(0, 20), Fraction::new(1, 2) ); assert_eq!(Fraction::ZERO + Fraction::new(3, 4), Fraction::new(3, 4)); assert_eq!(Fraction::new(3, 4) + Fraction::ZERO, Fraction::new(3, 4)); let mut a = Fraction::new(5, 10); let b = Fraction::new(5, 20); a += b; assert_eq!(a, Fraction::new(3, 4)); } #[test] fn test_op_sub() { assert_eq!( Fraction::new(5, 10) - Fraction::new(5, 20), Fraction::new(1, 4) ); assert_eq!( Fraction::new(5, 20) - Fraction::new(5, 10), Fraction::new(-1, 4) ); assert_eq!(Fraction::new(5, 20) - Fraction::new(5, 20), Fraction::ZERO); } #[test] fn test_op_neg() { assert_eq!(-Fraction::new(5, 10), Fraction::new(-1, 2)); assert_eq!(-Fraction::new(-5, 10), Fraction::new(1,
}
2)); assert_eq!(-Fraction::new(5, -10), Fraction::new(1, 2)); assert_eq!(-Fraction::new(-5, -10), Fraction::new(-1, 2)); }
function_block-function_prefixed
[ { "content": "pub fn gcd(a: i32, b: i32) -> i32 {\n\n let mut a = a.abs();\n\n let mut b = b.abs();\n\n\n\n while a != 0 && b != 0 {\n\n if a > b {\n\n a %= b;\n\n } else {\n\n b %= a;\n\n }\n\n }\n\n\n\n cmp::max(a, b)\n\n}\n\n\n", "file_path": "clef/src/math/mod.rs", "rank": 0, "score": 105508.95296841541 }, { "content": "pub fn is_power_of_2(x: u32) -> bool {\n\n (x != 0) && ((x & (x - 1)) == 0)\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_gcd() {\n\n assert_eq!(gcd(11, 21), 1);\n\n assert_eq!(gcd(14, 35), 7);\n\n assert_eq!(gcd(35, 14), 7);\n\n assert_eq!(gcd(14, -35), 7);\n\n assert_eq!(gcd(-35, 14), 7);\n\n assert_eq!(gcd(-35, -14), 7);\n\n assert_eq!(gcd(0, 0), 0);\n\n assert_eq!(gcd(0, 14), 14);\n\n assert_eq!(gcd(14, 0), 14);\n", "file_path": "clef/src/math/mod.rs", "rank": 1, "score": 75617.07549922937 }, { "content": "mod duration;\n\n\n\npub use duration::Duration;\n\npub mod pitch;\n\npub use pitch::*;\n\n\n\npub mod tune_sys;\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\n\npub struct NoteName(i32);\n\npub const C: NoteName = NoteName(0);\n\npub const D: NoteName = NoteName(2);\n\npub const E: NoteName = NoteName(4);\n\npub const F: NoteName = NoteName(5);\n\npub const G: NoteName = NoteName(7);\n\npub const A: NoteName = NoteName(9);\n\npub const B: NoteName = NoteName(11);\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\n\npub struct Accidental(i32);\n\npub const NATURAL: Accidental = Accidental(0);\n\npub const SHARP: Accidental = Accidental(1);\n\npub const FLAT: Accidental = Accidental(-1);\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\n\npub struct Chord {\n\n root: Pitch\n\n}", "file_path": "clef/src/music/mod.rs", "rank": 2, "score": 21628.052495507374 }, { "content": "use std::cmp;\n\n\n\nmod fraction;\n\npub use fraction::Fraction;\n\n\n", "file_path": "clef/src/math/mod.rs", "rank": 3, "score": 21623.65822462317 }, { "content": " assert_eq!(gcd(-14, 0), 14);\n\n assert_eq!(gcd(0, -14), 14);\n\n }\n\n\n\n #[test]\n\n fn test_is_power_of_2() {\n\n assert!(is_power_of_2(1));\n\n assert!(is_power_of_2(128));\n\n assert!(!is_power_of_2(0));\n\n assert!(!is_power_of_2(1023));\n\n }\n\n}\n", "file_path": "clef/src/math/mod.rs", "rank": 4, "score": 21621.02018849015 }, { "content": "#[contract_trait]\n\npub trait TuningSystem {\n\n #[ensures(ret > 0f32)]\n\n fn to_hertz(&self, pitch: &Pitch) -> f32;\n\n #[requires(hertz > 0f32)]\n\n fn to_pitch(&self, hertz: f32) -> Pitch;\n\n}\n\n\n\npub struct EqualTemperament {\n\n a4_hertz: f32,\n\n}\n\n\n\nimpl EqualTemperament {\n\n const TWELFTH_ROOT_OF_TWO: f32 = 1.05946309435929526456182;\n\n const LN_TWELFTH_ROOT_OF_TWO: f32 = 0.05776226504666210911809767902434;\n\n\n\n pub fn new(a4_hertz: f32) -> Self {\n\n Self { a4_hertz }\n\n }\n\n}\n\n\n", "file_path": "clef/src/music/tune_sys.rs", "rank": 5, "score": 21139.21332084363 }, { "content": "\n\n pub fn accidental(&self) -> Accidental {\n\n self.accidental\n\n }\n\n}\n\n\n\nimpl Sub for Pitch {\n\n type Output = i32;\n\n\n\n fn sub(self, other: Self) -> Self::Output {\n\n let from: i32 = self.name.0 + self.octave * 12 + self.accidental.0;\n\n let to: i32 = other.name.0 + other.octave * 12 + other.accidental.0;\n\n from - to\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "clef/src/music/pitch.rs", "rank": 22, "score": 23.286334734758228 }, { "content": " fraction -= half;\n\n dots += 1;\n\n half *= Fraction::HALF;\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n if fraction != Fraction::ZERO {\n\n return Err(\"not a durational fraction\")\n\n }\n\n\n\n Ok(Duration::new_with_dots(denominator, dots))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "clef/src/music/duration.rs", "rank": 23, "score": 19.55863935503554 }, { "content": "#![allow(non_upper_case_globals)]\n\n\n\nuse super::*;\n\nuse super::{FLAT, NATURAL, SHARP};\n\nuse contracts::requires;\n\nuse std::ops::Sub;\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\n\npub struct Pitch {\n\n name: NoteName,\n\n octave: i32,\n\n accidental: Accidental,\n\n}\n\n\n\nimpl Pitch {\n\n #[requires(octave >= -1 && octave <= 9, \"octave must be in the range -1 to 9\")]\n\n pub fn new(name: NoteName, octave: i32) -> Pitch {\n\n Pitch {\n\n name,\n\n octave,\n", "file_path": "clef/src/music/pitch.rs", "rank": 24, "score": 17.724535282984615 }, { "content": "use crate::math;\n\nuse crate::math::Fraction;\n\nuse contracts::requires;\n\nuse std::convert::TryFrom;\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub struct Duration {\n\n denominator: u16,\n\n dots: u8,\n\n}\n\n\n\nimpl Duration {\n\n #[requires(math::is_power_of_2(denominator as u32), \"denominator must be a power of 2\")]\n\n pub fn new(denominator: u16) -> Duration {\n\n Duration {\n\n denominator,\n\n dots: 0,\n\n }\n\n }\n\n\n", "file_path": "clef/src/music/duration.rs", "rank": 25, "score": 17.259297770400767 }, { "content": "pub mod math;\n\n\n\nmod music;\n\npub use music::pitch::*;\n\npub use music::tune_sys;\n\npub use music::Accidental;\n\npub use music::Chord;\n\npub use music::Duration;\n\npub use music::NoteName;\n\npub use music::{A, B, C, D, E, F, G};\n\npub use music::{FLAT, NATURAL, SHARP};\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn it_works() {\n\n let _half = crate::Duration::new(2);\n\n }\n\n}\n", "file_path": "clef/src/lib.rs", "rank": 26, "score": 15.218769005502194 }, { "content": " #[requires(math::is_power_of_2(denominator as u32), \"denominator must be a power of 2\")]\n\n #[requires(dots <= 4, \"dots must be no more than 4\")]\n\n pub fn new_with_dots(denominator: u16, dots: u8) -> Duration {\n\n Duration { denominator, dots }\n\n }\n\n\n\n pub fn denominator(&self) -> u16 {\n\n self.denominator\n\n }\n\n\n\n pub fn dots(&self) -> u8 {\n\n self.dots\n\n }\n\n\n\n pub fn to_fraction(&self) -> Fraction {\n\n let denominator = self.denominator as i32;\n\n let mut fraction = Fraction::new(1, denominator);\n\n\n\n if self.dots > 0 {\n\n let mut base_pow = 2;\n", "file_path": "clef/src/music/duration.rs", "rank": 27, "score": 14.796383440577033 }, { "content": " let mut dots = 0;\n\n\n\n let mut d = 1i32;\n\n while d <= 128 {\n\n let base = Fraction::new(1, d);\n\n if fraction >= base {\n\n fraction -= base;\n\n denominator = d as u16;\n\n break;\n\n }\n\n d *= 2\n\n }\n\n\n\n if denominator == 0 {\n\n return Err(\"denominator too large\");\n\n }\n\n\n\n let mut half = Fraction::new(1, (denominator * 2).into());\n\n while dots < 4 {\n\n if fraction >= half {\n", "file_path": "clef/src/music/duration.rs", "rank": 28, "score": 14.619844326403928 }, { "content": " for _ in 1..=self.dots {\n\n fraction += Fraction::new(1, denominator * base_pow);\n\n base_pow *= 2;\n\n }\n\n }\n\n\n\n fraction\n\n }\n\n}\n\n\n\nimpl TryFrom<Fraction> for Duration {\n\n type Error = &'static str;\n\n\n\n fn try_from(fraction: Fraction) -> Result<Self, Self::Error> {\n\n if fraction.signum() <= 0 {\n\n return Err(\"fraction must be positive\");\n\n }\n\n\n\n let mut fraction = fraction;\n\n let mut denominator = 0u16;\n", "file_path": "clef/src/music/duration.rs", "rank": 29, "score": 14.598328174920729 }, { "content": " \n\n if tone < 0 {\n\n tone += 12;\n\n octave -= 1;\n\n }\n\n \n\n self.ref_hertz * self.ratio[tone as usize] * 2.0f32.powi(octave)\n\n }\n\n\n\n fn to_pitch(&self, _hertz: f32) -> Pitch {\n\n C4\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn et_to_hertz_test() {\n", "file_path": "clef/src/music/tune_sys.rs", "rank": 30, "score": 12.132750540113012 }, { "content": " 15.0 / 8.0,\n\n ];\n\n}\n\n\n\nimpl<'a> JustIntonation<'a> {\n\n pub fn new(ref_pitch: Pitch, ref_hertz: f32) -> JustIntonation<'a> {\n\n Self {\n\n ref_pitch,\n\n ref_hertz,\n\n ratio: &Self::standard_ratio,\n\n }\n\n }\n\n}\n\n\n\n#[contract_trait]\n\nimpl<'a> TuningSystem for JustIntonation<'a> {\n\n fn to_hertz(&self, pitch: &Pitch) -> f32 {\n\n let intervals: i32 = *pitch - self.ref_pitch;\n\n let mut octave = intervals / 12;\n\n let mut tone = intervals % 12;\n", "file_path": "clef/src/music/tune_sys.rs", "rank": 31, "score": 11.88328983144886 }, { "content": " #[test]\n\n #[should_panic]\n\n fn test_not_power_of_2_denominator() {\n\n let _illegal = Duration::new(3);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn test_more_than_4_dots() {\n\n let _illegal = Duration::new_with_dots(4, 5);\n\n }\n\n\n\n #[test]\n\n fn test_to_fraction() {\n\n assert_eq!(Fraction::new(1, 1), Duration::new(1).to_fraction());\n\n assert_eq!(Fraction::new(1, 4), Duration::new(4).to_fraction());\n\n assert_eq!(\n\n Fraction::new(1, 8),\n\n Duration::new_with_dots(8, 0).to_fraction()\n\n );\n", "file_path": "clef/src/music/duration.rs", "rank": 32, "score": 11.10125218675245 }, { "content": " assert_eq!(\n\n Fraction::new(7, 16),\n\n Duration::new_with_dots(4, 2).to_fraction()\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_from_fraction() {\n\n assert_eq!(Duration::new(4), Duration::try_from(Fraction::new(1, 4)).unwrap());\n\n assert_eq!(Duration::new_with_dots(4, 2), Duration::try_from(Fraction::new(7, 16)).unwrap());\n\n }\n\n\n\n #[test]\n\n fn test_from_illegal_fraction() {\n\n assert!(Duration::try_from(Fraction::new(9, 16)).is_err());\n\n assert!(Duration::try_from(Fraction::new(1, 256)).is_err());\n\n }\n\n}\n", "file_path": "clef/src/music/duration.rs", "rank": 33, "score": 9.209141809538625 }, { "content": "#[contract_trait]\n\nimpl TuningSystem for EqualTemperament {\n\n fn to_hertz(&self, pitch: &Pitch) -> f32 {\n\n let intervals: i32 = *pitch - A4;\n\n self.a4_hertz * Self::TWELFTH_ROOT_OF_TWO.powi(intervals)\n\n }\n\n\n\n fn to_pitch(&self, hertz: f32) -> Pitch {\n\n let intervals = (hertz / self.a4_hertz).ln() / Self::LN_TWELFTH_ROOT_OF_TWO;\n\n let intervals_rounded = intervals.round();\n\n\n\n let accidental = match intervals - intervals_rounded {\n\n d if d > 0.0 => FLAT,\n\n _ => SHARP,\n\n };\n\n\n\n let intervals = intervals_rounded as i32;\n\n let mut octave = A4.octave() + intervals / 12;\n\n let mut tone = A4.name().0 + intervals % 12;\n\n\n", "file_path": "clef/src/music/tune_sys.rs", "rank": 34, "score": 9.107178124397157 }, { "content": " #[test]\n\n fn test_op_sub() {\n\n assert_eq!(0, C4 - C4);\n\n assert_eq!(-12, C4 - C5);\n\n }\n\n}\n\n\n\npub const C_1: Pitch = Pitch {\n\n name: C,\n\n octave: -1,\n\n accidental: NATURAL,\n\n};\n\npub const Cs_1: Pitch = Pitch {\n\n name: C,\n\n octave: -1,\n\n accidental: SHARP,\n\n};\n\npub const Db_1: Pitch = Pitch {\n\n name: D,\n\n octave: -1,\n", "file_path": "clef/src/music/pitch.rs", "rank": 35, "score": 8.796838999821208 }, { "content": "\n\npub struct JustIntonation<'a> {\n\n ref_pitch: Pitch,\n\n ref_hertz: f32,\n\n ratio: &'a [f32; 12],\n\n}\n\n\n\nimpl<'a> JustIntonation<'a> {\n\n const standard_ratio: [f32; 12] = [\n\n 1.0,\n\n 25.0 / 24.0,\n\n 9.0 / 8.0,\n\n 6.0 / 5.0,\n\n 5.0 / 4.0,\n\n 4.0 / 3.0,\n\n 45.0 / 32.0,\n\n 3.0 / 2.0,\n\n 8.0 / 5.0,\n\n 5.0 / 3.0,\n\n 9.0 / 5.0,\n", "file_path": "clef/src/music/tune_sys.rs", "rank": 36, "score": 7.5354059720726045 }, { "content": " accidental: NATURAL,\n\n }\n\n }\n\n\n\n #[requires(octave >= -1 && octave <= 9, \"octave must be in the range -1 to 9\")]\n\n pub fn new_with_accidental(name: NoteName, accidental: Accidental, octave: i32) -> Pitch {\n\n Pitch {\n\n name,\n\n octave,\n\n accidental,\n\n }\n\n }\n\n\n\n pub fn name(&self) -> NoteName {\n\n self.name\n\n }\n\n\n\n pub fn octave(&self) -> i32 {\n\n self.octave\n\n }\n", "file_path": "clef/src/music/pitch.rs", "rank": 37, "score": 6.705551585706242 }, { "content": "use super::*;\n\nuse contracts::{contract_trait, ensures, requires};\n\n\n\n#[contract_trait]\n", "file_path": "clef/src/music/tune_sys.rs", "rank": 38, "score": 6.005478619284605 }, { "content": " let et = EqualTemperament::new(440.0);\n\n assert_eq!(440.0, et.to_hertz(&A4));\n\n assert_eq!(523.2512, et.to_hertz(&C5));\n\n assert_eq!(830.6098, et.to_hertz(&Gs5));\n\n assert_eq!(233.08176, et.to_hertz(&Bb3));\n\n assert_eq!(219.99988, et.to_hertz(&A3));\n\n }\n\n\n\n #[test]\n\n fn et_to_pitch_test() {\n\n let et = EqualTemperament::new(440.0);\n\n assert_eq!(A4, et.to_pitch(440.0));\n\n assert_eq!(C5, et.to_pitch(523.2512));\n\n assert_eq!(Gs5, et.to_pitch(830.6));\n\n assert_eq!(Ab5, et.to_pitch(830.7));\n\n assert_eq!(A3, et.to_pitch(219.99988));\n\n assert_eq!(Fs4, et.to_pitch(369.9));\n\n }\n\n\n\n #[test]\n", "file_path": "clef/src/music/tune_sys.rs", "rank": 39, "score": 5.781515003469148 }, { "content": " accidental: SHARP,\n\n};\n\npub const Bb_1: Pitch = Pitch {\n\n name: B,\n\n octave: -1,\n\n accidental: FLAT,\n\n};\n\npub const B_1: Pitch = Pitch {\n\n name: B,\n\n octave: -1,\n\n accidental: NATURAL,\n\n};\n\n\n\npub const C0: Pitch = Pitch {\n\n name: C,\n\n octave: 0,\n\n accidental: NATURAL,\n\n};\n\npub const Cs0: Pitch = Pitch {\n\n name: C,\n", "file_path": "clef/src/music/pitch.rs", "rank": 40, "score": 5.358592944431133 }, { "content": " accidental: NATURAL,\n\n};\n\npub const F_1: Pitch = Pitch {\n\n name: F,\n\n octave: -1,\n\n accidental: NATURAL,\n\n};\n\npub const Fs_1: Pitch = Pitch {\n\n name: F,\n\n octave: -1,\n\n accidental: SHARP,\n\n};\n\npub const Gb_1: Pitch = Pitch {\n\n name: G,\n\n octave: -1,\n\n accidental: FLAT,\n\n};\n\npub const G_1: Pitch = Pitch {\n\n name: G,\n\n octave: -1,\n", "file_path": "clef/src/music/pitch.rs", "rank": 41, "score": 5.321103940469907 }, { "content": " accidental: SHARP,\n\n};\n\npub const Bb9: Pitch = Pitch {\n\n name: B,\n\n octave: 9,\n\n accidental: FLAT,\n\n};\n\npub const B9: Pitch = Pitch {\n\n name: B,\n\n octave: 9,\n\n accidental: NATURAL,\n\n};\n", "file_path": "clef/src/music/pitch.rs", "rank": 42, "score": 5.176791127976123 }, { "content": " fn just_to_hertz_test() {\n\n let just = JustIntonation::new(C4, 261.63);\n\n assert_eq!(418.608 * 0.25, just.to_hertz(&Ab2));\n\n assert_eq!(418.608 * 0.5, just.to_hertz(&Ab3));\n\n assert_eq!(436.05, just.to_hertz(&A4));\n\n assert_eq!(418.608, just.to_hertz(&Ab4));\n\n assert_eq!(523.26, just.to_hertz(&C5));\n\n assert_eq!(418.608 * 2.0, just.to_hertz(&Ab5));\n\n }\n\n}\n", "file_path": "clef/src/music/tune_sys.rs", "rank": 43, "score": 5.063004733257948 }, { "content": " octave: 0,\n\n accidental: NATURAL,\n\n};\n\npub const As0: Pitch = Pitch {\n\n name: A,\n\n octave: 0,\n\n accidental: SHARP,\n\n};\n\npub const Bb0: Pitch = Pitch {\n\n name: B,\n\n octave: 0,\n\n accidental: FLAT,\n\n};\n\npub const B0: Pitch = Pitch {\n\n name: B,\n\n octave: 0,\n\n accidental: NATURAL,\n\n};\n\n\n\npub const C1: Pitch = Pitch {\n", "file_path": "clef/src/music/pitch.rs", "rank": 44, "score": 5.0276225043953975 }, { "content": "};\n\npub const As3: Pitch = Pitch {\n\n name: A,\n\n octave: 3,\n\n accidental: SHARP,\n\n};\n\npub const Bb3: Pitch = Pitch {\n\n name: B,\n\n octave: 3,\n\n accidental: FLAT,\n\n};\n\npub const B3: Pitch = Pitch {\n\n name: B,\n\n octave: 3,\n\n accidental: NATURAL,\n\n};\n\n\n\npub const C4: Pitch = Pitch {\n\n name: C,\n\n octave: 4,\n", "file_path": "clef/src/music/pitch.rs", "rank": 45, "score": 5.0276225043953975 }, { "content": "pub const A7: Pitch = Pitch {\n\n name: A,\n\n octave: 7,\n\n accidental: NATURAL,\n\n};\n\npub const As7: Pitch = Pitch {\n\n name: A,\n\n octave: 7,\n\n accidental: SHARP,\n\n};\n\npub const Bb7: Pitch = Pitch {\n\n name: B,\n\n octave: 7,\n\n accidental: FLAT,\n\n};\n\npub const B7: Pitch = Pitch {\n\n name: B,\n\n octave: 7,\n\n accidental: NATURAL,\n\n};\n", "file_path": "clef/src/music/pitch.rs", "rank": 46, "score": 4.986627199355519 }, { "content": " accidental: FLAT,\n\n};\n\npub const A4: Pitch = Pitch {\n\n name: A,\n\n octave: 4,\n\n accidental: NATURAL,\n\n};\n\npub const As4: Pitch = Pitch {\n\n name: A,\n\n octave: 4,\n\n accidental: SHARP,\n\n};\n\npub const Bb4: Pitch = Pitch {\n\n name: B,\n\n octave: 4,\n\n accidental: FLAT,\n\n};\n\npub const B4: Pitch = Pitch {\n\n name: B,\n\n octave: 4,\n", "file_path": "clef/src/music/pitch.rs", "rank": 47, "score": 4.986627199355519 }, { "content": " name: A,\n\n octave: 6,\n\n accidental: SHARP,\n\n};\n\npub const Bb6: Pitch = Pitch {\n\n name: B,\n\n octave: 6,\n\n accidental: FLAT,\n\n};\n\npub const B6: Pitch = Pitch {\n\n name: B,\n\n octave: 6,\n\n accidental: NATURAL,\n\n};\n\n\n\npub const C7: Pitch = Pitch {\n\n name: C,\n\n octave: 7,\n\n accidental: NATURAL,\n\n};\n", "file_path": "clef/src/music/pitch.rs", "rank": 48, "score": 4.9108338381259635 }, { "content": "};\n\npub const E3: Pitch = Pitch {\n\n name: E,\n\n octave: 3,\n\n accidental: NATURAL,\n\n};\n\npub const F3: Pitch = Pitch {\n\n name: F,\n\n octave: 3,\n\n accidental: NATURAL,\n\n};\n\npub const Fs3: Pitch = Pitch {\n\n name: F,\n\n octave: 3,\n\n accidental: SHARP,\n\n};\n\npub const Gb3: Pitch = Pitch {\n\n name: G,\n\n octave: 3,\n\n accidental: FLAT,\n", "file_path": "clef/src/music/pitch.rs", "rank": 49, "score": 4.906705338659228 }, { "content": "pub const Eb7: Pitch = Pitch {\n\n name: E,\n\n octave: 7,\n\n accidental: FLAT,\n\n};\n\npub const E7: Pitch = Pitch {\n\n name: E,\n\n octave: 7,\n\n accidental: NATURAL,\n\n};\n\npub const F7: Pitch = Pitch {\n\n name: F,\n\n octave: 7,\n\n accidental: NATURAL,\n\n};\n\npub const Fs7: Pitch = Pitch {\n\n name: F,\n\n octave: 7,\n\n accidental: SHARP,\n\n};\n", "file_path": "clef/src/music/pitch.rs", "rank": 50, "score": 4.906705338659228 }, { "content": "pub const F2: Pitch = Pitch {\n\n name: F,\n\n octave: 2,\n\n accidental: NATURAL,\n\n};\n\npub const Fs2: Pitch = Pitch {\n\n name: F,\n\n octave: 2,\n\n accidental: SHARP,\n\n};\n\npub const Gb2: Pitch = Pitch {\n\n name: G,\n\n octave: 2,\n\n accidental: FLAT,\n\n};\n\npub const G2: Pitch = Pitch {\n\n name: G,\n\n octave: 2,\n\n accidental: NATURAL,\n\n};\n", "file_path": "clef/src/music/pitch.rs", "rank": 51, "score": 4.906705338659228 }, { "content": " name: E,\n\n octave: 6,\n\n accidental: NATURAL,\n\n};\n\npub const F6: Pitch = Pitch {\n\n name: F,\n\n octave: 6,\n\n accidental: NATURAL,\n\n};\n\npub const Fs6: Pitch = Pitch {\n\n name: F,\n\n octave: 6,\n\n accidental: SHARP,\n\n};\n\npub const Gb6: Pitch = Pitch {\n\n name: G,\n\n octave: 6,\n\n accidental: FLAT,\n\n};\n\npub const G6: Pitch = Pitch {\n", "file_path": "clef/src/music/pitch.rs", "rank": 52, "score": 4.906705338659228 }, { "content": "pub const Bb2: Pitch = Pitch {\n\n name: B,\n\n octave: 2,\n\n accidental: FLAT,\n\n};\n\npub const B2: Pitch = Pitch {\n\n name: B,\n\n octave: 2,\n\n accidental: NATURAL,\n\n};\n\n\n\npub const C3: Pitch = Pitch {\n\n name: C,\n\n octave: 3,\n\n accidental: NATURAL,\n\n};\n\npub const Cs3: Pitch = Pitch {\n\n name: C,\n\n octave: 3,\n\n accidental: SHARP,\n", "file_path": "clef/src/music/pitch.rs", "rank": 53, "score": 4.906705338659228 }, { "content": " accidental: SHARP,\n\n};\n\npub const Eb4: Pitch = Pitch {\n\n name: E,\n\n octave: 4,\n\n accidental: FLAT,\n\n};\n\npub const E4: Pitch = Pitch {\n\n name: E,\n\n octave: 4,\n\n accidental: NATURAL,\n\n};\n\npub const F4: Pitch = Pitch {\n\n name: F,\n\n octave: 4,\n\n accidental: NATURAL,\n\n};\n\npub const Fs4: Pitch = Pitch {\n\n name: F,\n\n octave: 4,\n", "file_path": "clef/src/music/pitch.rs", "rank": 54, "score": 4.906705338659228 }, { "content": " octave: 0,\n\n accidental: FLAT,\n\n};\n\npub const E0: Pitch = Pitch {\n\n name: E,\n\n octave: 0,\n\n accidental: NATURAL,\n\n};\n\npub const F0: Pitch = Pitch {\n\n name: F,\n\n octave: 0,\n\n accidental: NATURAL,\n\n};\n\npub const Fs0: Pitch = Pitch {\n\n name: F,\n\n octave: 0,\n\n accidental: SHARP,\n\n};\n\npub const Gb0: Pitch = Pitch {\n\n name: G,\n", "file_path": "clef/src/music/pitch.rs", "rank": 55, "score": 4.906705338659228 }, { "content": " accidental: NATURAL,\n\n};\n\npub const F9: Pitch = Pitch {\n\n name: F,\n\n octave: 9,\n\n accidental: NATURAL,\n\n};\n\npub const Fs9: Pitch = Pitch {\n\n name: F,\n\n octave: 9,\n\n accidental: SHARP,\n\n};\n\npub const Gb9: Pitch = Pitch {\n\n name: G,\n\n octave: 9,\n\n accidental: FLAT,\n\n};\n\npub const G9: Pitch = Pitch {\n\n name: G,\n\n octave: 9,\n", "file_path": "clef/src/music/pitch.rs", "rank": 56, "score": 4.906705338659228 }, { "content": " name: A,\n\n octave: 1,\n\n accidental: FLAT,\n\n};\n\npub const A1: Pitch = Pitch {\n\n name: A,\n\n octave: 1,\n\n accidental: NATURAL,\n\n};\n\npub const As1: Pitch = Pitch {\n\n name: A,\n\n octave: 1,\n\n accidental: SHARP,\n\n};\n\npub const Bb1: Pitch = Pitch {\n\n name: B,\n\n octave: 1,\n\n accidental: FLAT,\n\n};\n\npub const B1: Pitch = Pitch {\n", "file_path": "clef/src/music/pitch.rs", "rank": 57, "score": 4.2868858466246 }, { "content": "};\n\npub const Ab8: Pitch = Pitch {\n\n name: A,\n\n octave: 8,\n\n accidental: FLAT,\n\n};\n\npub const A8: Pitch = Pitch {\n\n name: A,\n\n octave: 8,\n\n accidental: NATURAL,\n\n};\n\npub const As8: Pitch = Pitch {\n\n name: A,\n\n octave: 8,\n\n accidental: SHARP,\n\n};\n\npub const Bb8: Pitch = Pitch {\n\n name: B,\n\n octave: 8,\n\n accidental: FLAT,\n", "file_path": "clef/src/music/pitch.rs", "rank": 58, "score": 4.2868858466246 }, { "content": " octave: 5,\n\n accidental: SHARP,\n\n};\n\npub const Ab5: Pitch = Pitch {\n\n name: A,\n\n octave: 5,\n\n accidental: FLAT,\n\n};\n\npub const A5: Pitch = Pitch {\n\n name: A,\n\n octave: 5,\n\n accidental: NATURAL,\n\n};\n\npub const As5: Pitch = Pitch {\n\n name: A,\n\n octave: 5,\n\n accidental: SHARP,\n\n};\n\npub const Bb5: Pitch = Pitch {\n\n name: B,\n", "file_path": "clef/src/music/pitch.rs", "rank": 59, "score": 4.2868858466246 }, { "content": " octave: 5,\n\n accidental: FLAT,\n\n};\n\npub const B5: Pitch = Pitch {\n\n name: B,\n\n octave: 5,\n\n accidental: NATURAL,\n\n};\n\n\n\npub const C6: Pitch = Pitch {\n\n name: C,\n\n octave: 6,\n\n accidental: NATURAL,\n\n};\n\npub const Cs6: Pitch = Pitch {\n\n name: C,\n\n octave: 6,\n\n accidental: SHARP,\n\n};\n\npub const Db6: Pitch = Pitch {\n", "file_path": "clef/src/music/pitch.rs", "rank": 60, "score": 4.246185299832634 }, { "content": "};\n\npub const B8: Pitch = Pitch {\n\n name: B,\n\n octave: 8,\n\n accidental: NATURAL,\n\n};\n\n\n\npub const C9: Pitch = Pitch {\n\n name: C,\n\n octave: 9,\n\n accidental: NATURAL,\n\n};\n\npub const Cs9: Pitch = Pitch {\n\n name: C,\n\n octave: 9,\n\n accidental: SHARP,\n\n};\n\npub const Db9: Pitch = Pitch {\n\n name: D,\n\n octave: 9,\n", "file_path": "clef/src/music/pitch.rs", "rank": 61, "score": 4.246185299832634 }, { "content": " octave: 5,\n\n accidental: NATURAL,\n\n};\n\npub const Fs5: Pitch = Pitch {\n\n name: F,\n\n octave: 5,\n\n accidental: SHARP,\n\n};\n\npub const Gb5: Pitch = Pitch {\n\n name: G,\n\n octave: 5,\n\n accidental: FLAT,\n\n};\n\npub const G5: Pitch = Pitch {\n\n name: G,\n\n octave: 5,\n\n accidental: NATURAL,\n\n};\n\npub const Gs5: Pitch = Pitch {\n\n name: G,\n", "file_path": "clef/src/music/pitch.rs", "rank": 62, "score": 4.167404448460103 }, { "content": "};\n\npub const Fs8: Pitch = Pitch {\n\n name: F,\n\n octave: 8,\n\n accidental: SHARP,\n\n};\n\npub const Gb8: Pitch = Pitch {\n\n name: G,\n\n octave: 8,\n\n accidental: FLAT,\n\n};\n\npub const G8: Pitch = Pitch {\n\n name: G,\n\n octave: 8,\n\n accidental: NATURAL,\n\n};\n\npub const Gs8: Pitch = Pitch {\n\n name: G,\n\n octave: 8,\n\n accidental: SHARP,\n", "file_path": "clef/src/music/pitch.rs", "rank": 63, "score": 4.167404448460103 }, { "content": " name: F,\n\n octave: 1,\n\n accidental: SHARP,\n\n};\n\npub const Gb1: Pitch = Pitch {\n\n name: G,\n\n octave: 1,\n\n accidental: FLAT,\n\n};\n\npub const G1: Pitch = Pitch {\n\n name: G,\n\n octave: 1,\n\n accidental: NATURAL,\n\n};\n\npub const Gs1: Pitch = Pitch {\n\n name: G,\n\n octave: 1,\n\n accidental: SHARP,\n\n};\n\npub const Ab1: Pitch = Pitch {\n", "file_path": "clef/src/music/pitch.rs", "rank": 64, "score": 4.167404448460103 }, { "content": " octave: 5,\n\n accidental: NATURAL,\n\n};\n\npub const Ds5: Pitch = Pitch {\n\n name: D,\n\n octave: 5,\n\n accidental: SHARP,\n\n};\n\npub const Eb5: Pitch = Pitch {\n\n name: E,\n\n octave: 5,\n\n accidental: FLAT,\n\n};\n\npub const E5: Pitch = Pitch {\n\n name: E,\n\n octave: 5,\n\n accidental: NATURAL,\n\n};\n\npub const F5: Pitch = Pitch {\n\n name: F,\n", "file_path": "clef/src/music/pitch.rs", "rank": 65, "score": 4.167404448460103 }, { "content": "};\n\npub const Ds8: Pitch = Pitch {\n\n name: D,\n\n octave: 8,\n\n accidental: SHARP,\n\n};\n\npub const Eb8: Pitch = Pitch {\n\n name: E,\n\n octave: 8,\n\n accidental: FLAT,\n\n};\n\npub const E8: Pitch = Pitch {\n\n name: E,\n\n octave: 8,\n\n accidental: NATURAL,\n\n};\n\npub const F8: Pitch = Pitch {\n\n name: F,\n\n octave: 8,\n\n accidental: NATURAL,\n", "file_path": "clef/src/music/pitch.rs", "rank": 66, "score": 4.167404448460103 }, { "content": " name: D,\n\n octave: 1,\n\n accidental: SHARP,\n\n};\n\npub const Eb1: Pitch = Pitch {\n\n name: E,\n\n octave: 1,\n\n accidental: FLAT,\n\n};\n\npub const E1: Pitch = Pitch {\n\n name: E,\n\n octave: 1,\n\n accidental: NATURAL,\n\n};\n\npub const F1: Pitch = Pitch {\n\n name: F,\n\n octave: 1,\n\n accidental: NATURAL,\n\n};\n\npub const Fs1: Pitch = Pitch {\n", "file_path": "clef/src/music/pitch.rs", "rank": 67, "score": 4.167404448460103 }, { "content": " name: B,\n\n octave: 1,\n\n accidental: NATURAL,\n\n};\n\n\n\npub const C2: Pitch = Pitch {\n\n name: C,\n\n octave: 2,\n\n accidental: NATURAL,\n\n};\n\npub const Cs2: Pitch = Pitch {\n\n name: C,\n\n octave: 2,\n\n accidental: SHARP,\n\n};\n\npub const Db2: Pitch = Pitch {\n\n name: D,\n\n octave: 2,\n\n accidental: FLAT,\n\n};\n", "file_path": "clef/src/music/pitch.rs", "rank": 68, "score": 4.125425604984535 }, { "content": " 5 => Pitch::new(F, octave),\n\n 6 => match accidental {\n\n FLAT => Pitch::new_with_accidental(G, FLAT, octave),\n\n _ => Pitch::new_with_accidental(F, SHARP, octave),\n\n },\n\n 7 => Pitch::new(G, octave),\n\n 8 => match accidental {\n\n FLAT => Pitch::new_with_accidental(A, FLAT, octave),\n\n _ => Pitch::new_with_accidental(G, SHARP, octave),\n\n },\n\n 9 => Pitch::new(A, octave),\n\n 10 => match accidental {\n\n FLAT => Pitch::new_with_accidental(B, FLAT, octave),\n\n _ => Pitch::new_with_accidental(A, SHARP, octave),\n\n },\n\n 11 => Pitch::new(B, octave),\n\n _ => unreachable!(),\n\n }\n\n }\n\n}\n", "file_path": "clef/src/music/tune_sys.rs", "rank": 69, "score": 3.9703574366562373 }, { "content": "# clef\n\nMusic library written in Rust.\n\n\n", "file_path": "clef/README.md", "rank": 70, "score": 2.8316636459355093 }, { "content": "# clef\n\nMusic library written in Rust.\n\n\n", "file_path": "README.md", "rank": 71, "score": 2.8316636459355093 }, { "content": " accidental: NATURAL,\n\n};\n\npub const Gs_1: Pitch = Pitch {\n\n name: G,\n\n octave: -1,\n\n accidental: SHARP,\n\n};\n\npub const Ab_1: Pitch = Pitch {\n\n name: A,\n\n octave: -1,\n\n accidental: FLAT,\n\n};\n\npub const A_1: Pitch = Pitch {\n\n name: A,\n\n octave: -1,\n\n accidental: NATURAL,\n\n};\n\npub const As_1: Pitch = Pitch {\n\n name: A,\n\n octave: -1,\n", "file_path": "clef/src/music/pitch.rs", "rank": 72, "score": 2.554687046983491 }, { "content": " accidental: NATURAL,\n\n};\n\npub const Gs9: Pitch = Pitch {\n\n name: G,\n\n octave: 9,\n\n accidental: SHARP,\n\n};\n\npub const Ab9: Pitch = Pitch {\n\n name: A,\n\n octave: 9,\n\n accidental: FLAT,\n\n};\n\npub const A9: Pitch = Pitch {\n\n name: A,\n\n octave: 9,\n\n accidental: NATURAL,\n\n};\n\npub const As9: Pitch = Pitch {\n\n name: A,\n\n octave: 9,\n", "file_path": "clef/src/music/pitch.rs", "rank": 73, "score": 2.522091295507701 }, { "content": "pub const Gs2: Pitch = Pitch {\n\n name: G,\n\n octave: 2,\n\n accidental: SHARP,\n\n};\n\npub const Ab2: Pitch = Pitch {\n\n name: A,\n\n octave: 2,\n\n accidental: FLAT,\n\n};\n\npub const A2: Pitch = Pitch {\n\n name: A,\n\n octave: 2,\n\n accidental: NATURAL,\n\n};\n\npub const As2: Pitch = Pitch {\n\n name: A,\n\n octave: 2,\n\n accidental: SHARP,\n\n};\n", "file_path": "clef/src/music/pitch.rs", "rank": 74, "score": 2.522091295507701 }, { "content": " name: G,\n\n octave: 6,\n\n accidental: NATURAL,\n\n};\n\npub const Gs6: Pitch = Pitch {\n\n name: G,\n\n octave: 6,\n\n accidental: SHARP,\n\n};\n\npub const Ab6: Pitch = Pitch {\n\n name: A,\n\n octave: 6,\n\n accidental: FLAT,\n\n};\n\npub const A6: Pitch = Pitch {\n\n name: A,\n\n octave: 6,\n\n accidental: NATURAL,\n\n};\n\npub const As6: Pitch = Pitch {\n", "file_path": "clef/src/music/pitch.rs", "rank": 75, "score": 2.5061033641236845 }, { "content": "};\n\npub const G3: Pitch = Pitch {\n\n name: G,\n\n octave: 3,\n\n accidental: NATURAL,\n\n};\n\npub const Gs3: Pitch = Pitch {\n\n name: G,\n\n octave: 3,\n\n accidental: SHARP,\n\n};\n\npub const Ab3: Pitch = Pitch {\n\n name: A,\n\n octave: 3,\n\n accidental: FLAT,\n\n};\n\npub const A3: Pitch = Pitch {\n\n name: A,\n\n octave: 3,\n\n accidental: NATURAL,\n", "file_path": "clef/src/music/pitch.rs", "rank": 76, "score": 2.5061033641236845 }, { "content": " octave: 0,\n\n accidental: FLAT,\n\n};\n\npub const G0: Pitch = Pitch {\n\n name: G,\n\n octave: 0,\n\n accidental: NATURAL,\n\n};\n\npub const Gs0: Pitch = Pitch {\n\n name: G,\n\n octave: 0,\n\n accidental: SHARP,\n\n};\n\npub const Ab0: Pitch = Pitch {\n\n name: A,\n\n octave: 0,\n\n accidental: FLAT,\n\n};\n\npub const A0: Pitch = Pitch {\n\n name: A,\n", "file_path": "clef/src/music/pitch.rs", "rank": 77, "score": 2.5061033641236845 }, { "content": " accidental: NATURAL,\n\n};\n\n\n\npub const C5: Pitch = Pitch {\n\n name: C,\n\n octave: 5,\n\n accidental: NATURAL,\n\n};\n\npub const Cs5: Pitch = Pitch {\n\n name: C,\n\n octave: 5,\n\n accidental: SHARP,\n\n};\n\npub const Db5: Pitch = Pitch {\n\n name: D,\n\n octave: 5,\n\n accidental: FLAT,\n\n};\n\npub const D5: Pitch = Pitch {\n\n name: D,\n", "file_path": "clef/src/music/pitch.rs", "rank": 78, "score": 2.490316855884565 }, { "content": "pub const Gb7: Pitch = Pitch {\n\n name: G,\n\n octave: 7,\n\n accidental: FLAT,\n\n};\n\npub const G7: Pitch = Pitch {\n\n name: G,\n\n octave: 7,\n\n accidental: NATURAL,\n\n};\n\npub const Gs7: Pitch = Pitch {\n\n name: G,\n\n octave: 7,\n\n accidental: SHARP,\n\n};\n\npub const Ab7: Pitch = Pitch {\n\n name: A,\n\n octave: 7,\n\n accidental: FLAT,\n\n};\n", "file_path": "clef/src/music/pitch.rs", "rank": 79, "score": 2.490316855884565 }, { "content": " accidental: SHARP,\n\n};\n\npub const Gb4: Pitch = Pitch {\n\n name: G,\n\n octave: 4,\n\n accidental: FLAT,\n\n};\n\npub const G4: Pitch = Pitch {\n\n name: G,\n\n octave: 4,\n\n accidental: NATURAL,\n\n};\n\npub const Gs4: Pitch = Pitch {\n\n name: G,\n\n octave: 4,\n\n accidental: SHARP,\n\n};\n\npub const Ab4: Pitch = Pitch {\n\n name: A,\n\n octave: 4,\n", "file_path": "clef/src/music/pitch.rs", "rank": 80, "score": 2.490316855884565 }, { "content": "};\n\npub const Db3: Pitch = Pitch {\n\n name: D,\n\n octave: 3,\n\n accidental: FLAT,\n\n};\n\npub const D3: Pitch = Pitch {\n\n name: D,\n\n octave: 3,\n\n accidental: NATURAL,\n\n};\n\npub const Ds3: Pitch = Pitch {\n\n name: D,\n\n octave: 3,\n\n accidental: SHARP,\n\n};\n\npub const Eb3: Pitch = Pitch {\n\n name: E,\n\n octave: 3,\n\n accidental: FLAT,\n", "file_path": "clef/src/music/pitch.rs", "rank": 81, "score": 2.474727988188846 }, { "content": "pub const D2: Pitch = Pitch {\n\n name: D,\n\n octave: 2,\n\n accidental: NATURAL,\n\n};\n\npub const Ds2: Pitch = Pitch {\n\n name: D,\n\n octave: 2,\n\n accidental: SHARP,\n\n};\n\npub const Eb2: Pitch = Pitch {\n\n name: E,\n\n octave: 2,\n\n accidental: FLAT,\n\n};\n\npub const E2: Pitch = Pitch {\n\n name: E,\n\n octave: 2,\n\n accidental: NATURAL,\n\n};\n", "file_path": "clef/src/music/pitch.rs", "rank": 82, "score": 2.474727988188846 }, { "content": " accidental: FLAT,\n\n};\n\npub const D9: Pitch = Pitch {\n\n name: D,\n\n octave: 9,\n\n accidental: NATURAL,\n\n};\n\npub const Ds9: Pitch = Pitch {\n\n name: D,\n\n octave: 9,\n\n accidental: SHARP,\n\n};\n\npub const Eb9: Pitch = Pitch {\n\n name: E,\n\n octave: 9,\n\n accidental: FLAT,\n\n};\n\npub const E9: Pitch = Pitch {\n\n name: E,\n\n octave: 9,\n", "file_path": "clef/src/music/pitch.rs", "rank": 83, "score": 2.474727988188846 }, { "content": " octave: 0,\n\n accidental: SHARP,\n\n};\n\npub const Db0: Pitch = Pitch {\n\n name: D,\n\n octave: 0,\n\n accidental: FLAT,\n\n};\n\npub const D0: Pitch = Pitch {\n\n name: D,\n\n octave: 0,\n\n accidental: NATURAL,\n\n};\n\npub const Ds0: Pitch = Pitch {\n\n name: D,\n\n octave: 0,\n\n accidental: SHARP,\n\n};\n\npub const Eb0: Pitch = Pitch {\n\n name: E,\n", "file_path": "clef/src/music/pitch.rs", "rank": 84, "score": 2.474727988188846 }, { "content": "\n\npub const C8: Pitch = Pitch {\n\n name: C,\n\n octave: 8,\n\n accidental: NATURAL,\n\n};\n\npub const Cs8: Pitch = Pitch {\n\n name: C,\n\n octave: 8,\n\n accidental: SHARP,\n\n};\n\npub const Db8: Pitch = Pitch {\n\n name: D,\n\n octave: 8,\n\n accidental: FLAT,\n\n};\n\npub const D8: Pitch = Pitch {\n\n name: D,\n\n octave: 8,\n\n accidental: NATURAL,\n", "file_path": "clef/src/music/pitch.rs", "rank": 85, "score": 2.474727988188846 }, { "content": " accidental: NATURAL,\n\n};\n\npub const Cs4: Pitch = Pitch {\n\n name: C,\n\n octave: 4,\n\n accidental: SHARP,\n\n};\n\npub const Db4: Pitch = Pitch {\n\n name: D,\n\n octave: 4,\n\n accidental: FLAT,\n\n};\n\npub const D4: Pitch = Pitch {\n\n name: D,\n\n octave: 4,\n\n accidental: NATURAL,\n\n};\n\npub const Ds4: Pitch = Pitch {\n\n name: D,\n\n octave: 4,\n", "file_path": "clef/src/music/pitch.rs", "rank": 86, "score": 2.474727988188846 }, { "content": " accidental: FLAT,\n\n};\n\npub const D_1: Pitch = Pitch {\n\n name: D,\n\n octave: -1,\n\n accidental: NATURAL,\n\n};\n\npub const Ds_1: Pitch = Pitch {\n\n name: D,\n\n octave: -1,\n\n accidental: SHARP,\n\n};\n\npub const Eb_1: Pitch = Pitch {\n\n name: E,\n\n octave: -1,\n\n accidental: FLAT,\n\n};\n\npub const E_1: Pitch = Pitch {\n\n name: E,\n\n octave: -1,\n", "file_path": "clef/src/music/pitch.rs", "rank": 87, "score": 2.474727988188846 }, { "content": "pub const Cs7: Pitch = Pitch {\n\n name: C,\n\n octave: 7,\n\n accidental: SHARP,\n\n};\n\npub const Db7: Pitch = Pitch {\n\n name: D,\n\n octave: 7,\n\n accidental: FLAT,\n\n};\n\npub const D7: Pitch = Pitch {\n\n name: D,\n\n octave: 7,\n\n accidental: NATURAL,\n\n};\n\npub const Ds7: Pitch = Pitch {\n\n name: D,\n\n octave: 7,\n\n accidental: SHARP,\n\n};\n", "file_path": "clef/src/music/pitch.rs", "rank": 88, "score": 2.474727988188846 }, { "content": " name: D,\n\n octave: 6,\n\n accidental: FLAT,\n\n};\n\npub const D6: Pitch = Pitch {\n\n name: D,\n\n octave: 6,\n\n accidental: NATURAL,\n\n};\n\npub const Ds6: Pitch = Pitch {\n\n name: D,\n\n octave: 6,\n\n accidental: SHARP,\n\n};\n\npub const Eb6: Pitch = Pitch {\n\n name: E,\n\n octave: 6,\n\n accidental: FLAT,\n\n};\n\npub const E6: Pitch = Pitch {\n", "file_path": "clef/src/music/pitch.rs", "rank": 89, "score": 2.474727988188846 }, { "content": " name: C,\n\n octave: 1,\n\n accidental: NATURAL,\n\n};\n\npub const Cs1: Pitch = Pitch {\n\n name: C,\n\n octave: 1,\n\n accidental: SHARP,\n\n};\n\npub const Db1: Pitch = Pitch {\n\n name: D,\n\n octave: 1,\n\n accidental: FLAT,\n\n};\n\npub const D1: Pitch = Pitch {\n\n name: D,\n\n octave: 1,\n\n accidental: NATURAL,\n\n};\n\npub const Ds1: Pitch = Pitch {\n", "file_path": "clef/src/music/pitch.rs", "rank": 90, "score": 2.474727988188846 } ]
Rust
src/arena.rs
scottjmaddox/rust-memory-arena
66dfdf6a683cd2d0066ab1742100ade8256d3a7b
use core::cell::Cell; use arena_box::ArenaBox; pub struct Arena { size: usize, used: Cell<usize>, mem: *mut u8, } impl Arena { pub fn new(size: usize, alignment: usize) -> Result<Self, ::alloc::AllocError> { if size == 0 { Ok(Self { size: size, used: Cell::new(0), mem: 1 as *mut u8, }) } else { unsafe { let mem = ::alloc::aligned_alloc(size, alignment)?; Ok(Self { size: size, used: Cell::new(0), mem: mem, }) } } } fn aligned_alloc(&self, size: usize, alignment: usize) -> Option<*mut u8> { assert!(alignment.count_ones() == 1); let unaligned_p = self.mem as usize + self.used.get(); let aligned_p = (unaligned_p + alignment - 1) & !(alignment - 1); let offset = aligned_p - unaligned_p; if self.used.get() + size + offset > self.size { return None; } self.used.set(self.used.get() + size + offset); Some(aligned_p as *mut u8) } fn alloc<T>(&self) -> Option<*mut T> { let size = ::core::mem::size_of::<T>(); if size == 0 { return Some(::core::mem::align_of::<T>() as *mut T); } let alignment = ::core::mem::align_of::<T>(); match self.aligned_alloc(size, alignment) { None => None, Some(p) => Some(p as *mut T), } } pub fn new_box<'a, T>(&'a self, x: T) -> Result<ArenaBox<'a, T>, T> { match self.alloc::<T>() { None => Err(x), Some(p) => { unsafe { ::core::ptr::write(p, x); } Ok(unsafe { ArenaBox::from_raw(p) }) } } } } impl Drop for Arena { fn drop(&mut self) { unsafe { ::alloc::free(self.mem); } } } #[cfg(test)] mod tests { #[allow(unused_imports)] use super::*; #[test] fn arena_box() { let alignment = 1024; let size = 1024; let a = Arena::new(size, alignment).unwrap(); let mut num = a.new_box(42).unwrap(); assert_eq!(*num, 42); *num += 1; assert_eq!(*num, 43); } #[test] fn arena_out_of_memory() { let alignment = 512; let size = 1; let a = Arena::new(size, alignment).unwrap(); let i: usize = 42; assert_eq!(a.new_box(i), Err(42)); } #[test] fn arena_aligned_alloc() { let a = Arena::new(1024, 1024).unwrap(); let p1 = a.aligned_alloc(1, 1).unwrap(); let p2 = a.aligned_alloc(1, 4).unwrap(); let p3 = a.aligned_alloc(1, 8).unwrap(); let p4 = a.aligned_alloc(1, 512).unwrap(); assert!(((p1 as usize) % 1024) == 0); assert!(((p2 as usize) % 4) == 0); assert!(((p3 as usize) % 8) == 0); assert!(((p4 as usize) % 512) == 0); } #[test] #[should_panic] fn arena_invalid_alignment() { let _ = Arena::new(1024, 1025).unwrap(); } #[test] #[should_panic] fn arena_aligned_alloc_invalid_alignment() { let a = Arena::new(1024, 1024).unwrap(); let _ = a.aligned_alloc(1, 3).unwrap(); } }
use core::cell::Cell; use arena_box::ArenaBox; pub struct Arena { size: usize, used: Cell<usize>, mem: *mut u8, } impl Arena { pub fn new(size: usize, alignment: usize) -> Result<Self, ::alloc::AllocError> { if size == 0 { Ok(Self { size: size, used: Cell::new(0), mem: 1 as *mut u8, }) } else { unsafe { let mem = ::alloc::aligned_alloc(size, alignment)?; Ok(Self { size: size, used: Cell::new(0), mem: mem, }) } } } fn aligned_alloc(&self, size: usize, alignment: usize) -> Option<*mut u8> {
fn alloc<T>(&self) -> Option<*mut T> { let size = ::core::mem::size_of::<T>(); if size == 0 { return Some(::core::mem::align_of::<T>() as *mut T); } let alignment = ::core::mem::align_of::<T>(); match self.aligned_alloc(size, alignment) { None => None, Some(p) => Some(p as *mut T), } } pub fn new_box<'a, T>(&'a self, x: T) -> Result<ArenaBox<'a, T>, T> { match self.alloc::<T>() { None => Err(x), Some(p) => { unsafe { ::core::ptr::write(p, x); } Ok(unsafe { ArenaBox::from_raw(p) }) } } } } impl Drop for Arena { fn drop(&mut self) { unsafe { ::alloc::free(self.mem); } } } #[cfg(test)] mod tests { #[allow(unused_imports)] use super::*; #[test] fn arena_box() { let alignment = 1024; let size = 1024; let a = Arena::new(size, alignment).unwrap(); let mut num = a.new_box(42).unwrap(); assert_eq!(*num, 42); *num += 1; assert_eq!(*num, 43); } #[test] fn arena_out_of_memory() { let alignment = 512; let size = 1; let a = Arena::new(size, alignment).unwrap(); let i: usize = 42; assert_eq!(a.new_box(i), Err(42)); } #[test] fn arena_aligned_alloc() { let a = Arena::new(1024, 1024).unwrap(); let p1 = a.aligned_alloc(1, 1).unwrap(); let p2 = a.aligned_alloc(1, 4).unwrap(); let p3 = a.aligned_alloc(1, 8).unwrap(); let p4 = a.aligned_alloc(1, 512).unwrap(); assert!(((p1 as usize) % 1024) == 0); assert!(((p2 as usize) % 4) == 0); assert!(((p3 as usize) % 8) == 0); assert!(((p4 as usize) % 512) == 0); } #[test] #[should_panic] fn arena_invalid_alignment() { let _ = Arena::new(1024, 1025).unwrap(); } #[test] #[should_panic] fn arena_aligned_alloc_invalid_alignment() { let a = Arena::new(1024, 1024).unwrap(); let _ = a.aligned_alloc(1, 3).unwrap(); } }
assert!(alignment.count_ones() == 1); let unaligned_p = self.mem as usize + self.used.get(); let aligned_p = (unaligned_p + alignment - 1) & !(alignment - 1); let offset = aligned_p - unaligned_p; if self.used.get() + size + offset > self.size { return None; } self.used.set(self.used.get() + size + offset); Some(aligned_p as *mut u8) }
function_block-function_prefix_line
[ { "content": "/// Types that can be \"unsized\" to a dynamically-sized type.\n\n///\n\n/// For example, the sized array type `[i8; 2]` implements `Unsize<[i8]>` and\n\n/// `Unsize<fmt::Debug>`.\n\n///\n\n/// All implementations of `Unsize` are provided automatically by the compiler.\n\n///\n\n/// `Unsize` is implemented for:\n\n///\n\n/// - `[T; N]` is `Unsize<[T]>`\n\n/// - `T` is `Unsize<Trait>` when `T: Trait`\n\n/// - `Foo<..., T, ...>` is `Unsize<Foo<..., U, ...>>` if:\n\n/// - `T: Unsize<U>`\n\n/// - Foo is a struct\n\n/// - Only the last field of `Foo` has a type involving `T`\n\n/// - `T` is not part of the type of any other fields\n\n/// - `Bar<T>: Unsize<Bar<U>>`, if the last field of `Foo` has type `Bar<T>`\n\n///\n\n/// `Unsize` is used along with [`ops::CoerceUnsized`][coerceunsized] to allow\n\n/// \"user-defined\" containers such as [`rc::Rc`][rc] to contain dynamically-sized\n\n/// types. See the [DST coercion RFC][RFC982] and [the nomicon entry on coercion][nomicon-coerce]\n\n/// for more details.\n\n///\n\n/// [coerceunsized]: ../ops/trait.CoerceUnsized.html\n\n/// [rc]: ../../std/rc/struct.Rc.html\n\n/// [RFC982]: https://github.com/rust-lang/rfcs/blob/master/text/0982-dst-coercion.md\n\n/// [nomicon-coerce]: ../../nomicon/coercions.html\n\npub trait Unsize<T: ?Sized> {\n\n // Empty.\n\n}\n", "file_path": "src/unsize.rs", "rank": 0, "score": 42679.2344976347 }, { "content": "fn main() {\n\n let arena = Arena::new(1024, 1024).unwrap();\n\n let a = arena.new_box(Choice::A).unwrap();\n\n let b = arena.new_box(Choice::B).unwrap();\n\n println!(\"{:?}\", a);\n\n println!(\"{:?}\", b);\n\n}\n", "file_path": "examples/enum.rs", "rank": 1, "score": 31786.785855157133 }, { "content": "fn main() {\n\n let a = Arena::new(1024, 1024).unwrap();\n\n let list = a.new_box(List::Nil).unwrap();\n\n let list = a.new_box(List::Cons(1, list)).unwrap();\n\n let list = a.new_box(List::Cons(2, list)).unwrap();\n\n let list = a.new_box(List::Cons(3, list)).unwrap();\n\n println!(\"{:?}\", list);\n\n}\n", "file_path": "examples/list.rs", "rank": 2, "score": 31786.785855157133 }, { "content": "/// Trait that indicates that this is a pointer or a wrapper for one,\n\n/// where unsizing can be performed on the pointee.\n\n///\n\n/// See the [DST coercion RfC][dst-coerce] and [the nomicon entry on coercion][nomicon-coerce]\n\n/// for more details.\n\n///\n\n/// For builtin pointer types, pointers to `T` will coerce to pointers to `U` if `T: Unsize<U>`\n\n/// by converting from a thin pointer to a fat pointer.\n\n///\n\n/// For custom types, the coercion here works by coercing `Foo<T>` to `Foo<U>`\n\n/// provided an impl of `CoerceUnsized<Foo<U>> for Foo<T>` exists.\n\n/// Such an impl can only be written if `Foo<T>` has only a single non-phantomdata\n\n/// field involving `T`. If the type of that field is `Bar<T>`, an implementation\n\n/// of `CoerceUnsized<Bar<U>> for Bar<T>` must exist. The coercion will work by\n\n/// coercing the `Bar<T>` field into `Bar<U>` and filling in the rest of the fields\n\n/// from `Foo<T>` to create a `Foo<U>`. This will effectively drill down to a pointer\n\n/// field and coerce that.\n\n///\n\n/// Generally, for smart pointers you will implement\n\n/// `CoerceUnsized<Ptr<U>> for Ptr<T> where T: Unsize<U>, U: ?Sized`, with an\n\n/// optional `?Sized` bound on `T` itself. For wrapper types that directly embed `T`\n\n/// like `Cell<T>` and `RefCell<T>`, you\n\n/// can directly implement `CoerceUnsized<Wrap<U>> for Wrap<T> where T: CoerceUnsized<U>`.\n\n/// This will let coercions of types like `Cell<Box<T>>` work.\n\n///\n\n/// [`Unsize`][unsize] is used to mark types which can be coerced to DSTs if behind\n\n/// pointers. It is implemented automatically by the compiler.\n\n///\n\n/// [dst-coerce]: https://github.com/rust-lang/rfcs/blob/master/text/0982-dst-coercion.md\n\n/// [unsize]: ../marker/trait.Unsize.html\n\n/// [nomicon-coerce]: ../../nomicon/coercions.html\n\npub trait CoerceUnsized<T> {\n\n // Empty.\n\n}\n\n\n\n// &mut T -> &mut U\n\nimpl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a mut U> for &'a mut T {}\n\n// &mut T -> &U\n\nimpl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b mut T {}\n\n// &mut T -> *mut U\n\nimpl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for &'a mut T {}\n\n// &mut T -> *const U\n\nimpl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for &'a mut T {}\n\n\n\n// &T -> &U\n\nimpl<'a, 'b: 'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<&'a U> for &'b T {}\n\n// &T -> *const U\n\nimpl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for &'a T {}\n\n\n\n// *mut T -> *mut U\n\nimpl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*mut U> for *mut T {}\n\n// *mut T -> *const U\n\nimpl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *mut T {}\n\n\n\n// *const T -> *const U\n\nimpl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *const T {}\n", "file_path": "src/coerce_unsized.rs", "rank": 3, "score": 24815.428906710607 }, { "content": "impl<'a, I: Iterator + ?Sized> Iterator for ArenaBox<'a, I> {\n\n type Item = I::Item;\n\n fn next(&mut self) -> Option<I::Item> {\n\n (**self).next()\n\n }\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n (**self).size_hint()\n\n }\n\n fn nth(&mut self, n: usize) -> Option<I::Item> {\n\n (**self).nth(n)\n\n }\n\n}\n\n\n\nimpl<'a, I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for ArenaBox<'a, I> {\n\n fn next_back(&mut self) -> Option<I::Item> {\n\n (**self).next_back()\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized> borrow::Borrow<T> for ArenaBox<'a, T> {\n", "file_path": "src/arena_box.rs", "rank": 13, "score": 18179.36413125112 }, { "content": " fmt::Pointer::fmt(&ptr, f)\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized> Deref for ArenaBox<'a, T> {\n\n type Target = T;\n\n\n\n #[warn(unconditional_recursion)]\n\n fn deref(&self) -> &T {\n\n unsafe { self.value.as_ref() }\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized> DerefMut for ArenaBox<'a, T> {\n\n #[warn(unconditional_recursion)]\n\n fn deref_mut(&mut self) -> &mut T {\n\n unsafe { self.value.as_mut() }\n\n }\n\n}\n\n\n", "file_path": "src/arena_box.rs", "rank": 14, "score": 18178.674927881733 }, { "content": "impl<'a, T: ?Sized + Ord> Ord for ArenaBox<'a, T> {\n\n #[inline]\n\n fn cmp(&self, other: &ArenaBox<T>) -> Ordering {\n\n Ord::cmp(&**self, &**other)\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized + Eq> Eq for ArenaBox<'a, T> {}\n\n\n\nimpl<'a, T: ?Sized + Hash> Hash for ArenaBox<'a, T> {\n\n fn hash<H: hash::Hasher>(&self, state: &mut H) {\n\n (**self).hash(state);\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized + Hasher> Hasher for ArenaBox<'a, T> {\n\n fn finish(&self) -> u64 {\n\n (**self).finish()\n\n }\n\n fn write(&mut self, bytes: &[u8]) {\n", "file_path": "src/arena_box.rs", "rank": 15, "score": 18178.198773509004 }, { "content": "// unsafe {\n\n// let raw: *mut Any = ArenaBox::into_raw(self);\n\n// Ok(ArenaBox::from_raw(raw as *mut T))\n\n// }\n\n// } else {\n\n// Err(self)\n\n// }\n\n// }\n\n// }\n\n\n\n//TODO: fix this\n\n// impl<'a> ArenaBox<'a, Any + Send> {\n\n// #[inline]\n\n// /// Attempt to downcast the box to a concrete type.\n\n// ///\n\n// /// # Examples\n\n// ///\n\n// /// ```\n\n// /// # use memory_arena::*;\n\n// /// use core::any::Any;\n", "file_path": "src/arena_box.rs", "rank": 16, "score": 18178.132408338235 }, { "content": " fn borrow(&self) -> &T {\n\n &**self\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized> borrow::BorrowMut<T> for ArenaBox<'a, T> {\n\n fn borrow_mut(&mut self) -> &mut T {\n\n &mut **self\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized> AsRef<T> for ArenaBox<'a, T> {\n\n fn as_ref(&self) -> &T {\n\n &**self\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized> AsMut<T> for ArenaBox<'a, T> {\n\n fn as_mut(&mut self) -> &mut T {\n\n &mut **self\n", "file_path": "src/arena_box.rs", "rank": 17, "score": 18177.485709014825 }, { "content": "//! for a `Cons`. By introducing an `ArenaBox`, which has a defined size, we\n\n//! know how big `Cons` needs to be.\n\n\n\nuse core::borrow;\n\nuse core::cmp::Ordering;\n\nuse core::fmt;\n\nuse core::hash::{self, Hash, Hasher};\n\nuse core::ops::{Deref, DerefMut};\n\nuse core::marker::PhantomData;\n\n\n\nuse unique::Unique;\n\nuse Arena;\n\n\n\n/// A pointer type for a value that lives in an `Arena`.\n\n///\n\n/// See the [module-level documentation](../arena_box/) for more.\n\npub struct ArenaBox<'a, T: ?Sized> {\n\n value: Unique<T>,\n\n phantom: PhantomData<&'a Arena>,\n\n}\n", "file_path": "src/arena_box.rs", "rank": 18, "score": 18177.027763903585 }, { "content": "impl<'a, T: ?Sized> Drop for ArenaBox<'a, T> {\n\n fn drop(&mut self) {\n\n unsafe { ::core::ptr::drop_in_place(self.value.as_ptr()) }\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized + PartialEq> PartialEq for ArenaBox<'a, T> {\n\n #[inline]\n\n fn eq(&self, other: &ArenaBox<T>) -> bool {\n\n PartialEq::eq(&**self, &**other)\n\n }\n\n #[inline]\n\n fn ne(&self, other: &ArenaBox<T>) -> bool {\n\n PartialEq::ne(&**self, &**other)\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized + PartialOrd> PartialOrd for ArenaBox<'a, T> {\n\n #[inline]\n\n fn partial_cmp(&self, other: &ArenaBox<T>) -> Option<Ordering> {\n", "file_path": "src/arena_box.rs", "rank": 19, "score": 18176.865692487252 }, { "content": " ///\n\n /// Note: this is an associated function, which means that you have\n\n /// to call it as `ArenaBox::into_raw(b)` instead of `b.into_raw()`. This\n\n /// is so that there is no conflict with a method on the inner type.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// # use memory_arena::*;\n\n /// let a = Arena::new(1024, 1024).unwrap();\n\n /// let x = a.new_box(5).unwrap();\n\n /// let ptr = ArenaBox::into_raw(x);\n\n /// ```\n\n pub fn into_raw(b: ArenaBox<T>) -> *mut T {\n\n let p = b.value.as_ptr();\n\n ::core::mem::forget(b);\n\n p\n\n }\n\n}\n\n\n", "file_path": "src/arena_box.rs", "rank": 20, "score": 18176.484293670263 }, { "content": "// }\n\n// }\n\n\n\nimpl<'a, T: fmt::Display + ?Sized> fmt::Display for ArenaBox<'a, T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n fmt::Display::fmt(&**self, f)\n\n }\n\n}\n\n\n\nimpl<'a, T: fmt::Debug + ?Sized> fmt::Debug for ArenaBox<'a, T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n fmt::Debug::fmt(&**self, f)\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized> fmt::Pointer for ArenaBox<'a, T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n // It's not possible to extract the inner Uniq directly from the ArenaBox,\n\n // instead we cast it to a *const which aliases the Unique\n\n let ptr: *const T = &**self;\n", "file_path": "src/arena_box.rs", "rank": 21, "score": 18175.97413553368 }, { "content": " /// # use memory_arena::*;\n\n /// let a = Arena::new(1024, 1024).unwrap();\n\n /// let x = a.new_box(5).unwrap();\n\n /// let ptr = ArenaBox::into_raw(x);\n\n /// let x = unsafe { ArenaBox::from_raw(ptr) };\n\n /// ```\n\n #[inline]\n\n pub unsafe fn from_raw(raw: *mut T) -> Self {\n\n ArenaBox {\n\n value: Unique::new_unchecked(raw),\n\n phantom: PhantomData,\n\n }\n\n }\n\n\n\n /// Consumes the `ArenaBox`, returning the wrapped raw pointer.\n\n ///\n\n /// After calling this function, the caller is responsible for the\n\n /// memory previously managed by the `ArenaBox`. In particular, the\n\n /// caller should properly destroy `T`, by calling\n\n /// `std::ptr::drop_in_place` on the pointer.\n", "file_path": "src/arena_box.rs", "rank": 22, "score": 18175.685728494627 }, { "content": " (**self).write(bytes)\n\n }\n\n fn write_u8(&mut self, i: u8) {\n\n (**self).write_u8(i)\n\n }\n\n fn write_u16(&mut self, i: u16) {\n\n (**self).write_u16(i)\n\n }\n\n fn write_u32(&mut self, i: u32) {\n\n (**self).write_u32(i)\n\n }\n\n fn write_u64(&mut self, i: u64) {\n\n (**self).write_u64(i)\n\n }\n\n fn write_usize(&mut self, i: usize) {\n\n (**self).write_usize(i)\n\n }\n\n fn write_i8(&mut self, i: i8) {\n\n (**self).write_i8(i)\n\n }\n", "file_path": "src/arena_box.rs", "rank": 23, "score": 18175.53975872017 }, { "content": "\n\nimpl<'a, T: ?Sized> ArenaBox<'a, T> {\n\n /// Constructs an arena box from a raw pointer.\n\n ///\n\n /// After calling this function, the raw pointer is owned by the\n\n /// resulting `ArenaBox`. Specifically, the `ArenaBox` destructor will call\n\n /// the destructor of `T`. Since the\n\n /// way `ArenaBox` allocates and releases memory is unspecified, the\n\n /// only valid pointer to pass to this function is the one taken\n\n /// from another `ArenaBox` via the [`ArenaBox::into_raw`] function.\n\n ///\n\n /// This function is unsafe because improper use may lead to\n\n /// memory problems. For example, a double-free may occur if the\n\n /// function is called twice on the same raw pointer.\n\n ///\n\n /// [`ArenaBox::into_raw`]: struct.ArenaBox.html#method.into_raw\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n", "file_path": "src/arena_box.rs", "rank": 24, "score": 18174.541074771954 }, { "content": "// ///\n\n// /// fn print_if_string<'a>(value: ArenaBox<'a, Any + Send>) {\n\n// /// if let Ok(string) = value.downcast::<String>() {\n\n// /// println!(\"String ({}): {}\", string.len(), string);\n\n// /// }\n\n// /// }\n\n// ///\n\n// /// fn main() {\n\n// /// let my_string = \"Hello World\".to_string();\n\n// /// let a = Arena::new(1024, 1024).unwrap();\n\n// /// print_if_string(a.new_box(my_string).unwrap());\n\n// /// print_if_string(a.new_box(0i8).unwrap());\n\n// /// }\n\n// /// ```\n\n// pub fn downcast<T: Any>(self) -> Result<ArenaBox<'a, T>, ArenaBox<'a, Any + Send>> {\n\n// let s: ArenaBox<'a, Any + 'static> = unsafe { transmute(self) };\n\n// <ArenaBox<'a, Any>>::downcast(s).map_err(|s| unsafe {\n\n// // reapply the Send marker\n\n// ArenaBox::from_raw(ArenaBox::into_raw(s) as *mut (Any + Send))\n\n// })\n", "file_path": "src/arena_box.rs", "rank": 25, "score": 18174.26827030726 }, { "content": "// ///\n\n// /// ```\n\n// /// # use memory_arena::*;\n\n// /// use core::any::Any;\n\n// ///\n\n// /// fn print_if_string(value: ArenaBox<Any>) {\n\n// /// if let Ok(string) = value.downcast::<String>() {\n\n// /// println!(\"String ({}): {}\", string.len(), string);\n\n// /// }\n\n// /// }\n\n// ///\n\n// /// fn main() {\n\n// /// let a = Arena::new(1024, 1024).unwrap();\n\n// /// let my_string = \"Hello World\".to_string();\n\n// /// print_if_string(a.new_box(my_string).unwrap());\n\n// /// print_if_string(a.new_box(0i8).unwrap());\n\n// /// }\n\n// /// ```\n\n// pub fn downcast<T: Any>(self) -> Result<ArenaBox<'a, T>, ArenaBox<'a, Any>> {\n\n// if self.is::<T>() {\n", "file_path": "src/arena_box.rs", "rank": 26, "score": 18173.185012178015 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #[allow(unused_imports)]\n\n use super::*;\n\n\n\n #[test]\n\n fn ln112() {\n\n let a = Arena::new(1024, 1024).unwrap();\n\n let x = a.new_box(5).unwrap();\n\n let ptr = ArenaBox::into_raw(x);\n\n let _ = unsafe { ArenaBox::from_raw(ptr) };\n\n }\n\n\n\n #[test]\n\n fn ln140() {\n\n let a = Arena::new(1024, 1024).unwrap();\n\n let x = a.new_box(5).unwrap();\n\n let _ = ArenaBox::into_raw(x);\n\n }\n\n}", "file_path": "src/arena_box.rs", "rank": 27, "score": 18173.126995476527 }, { "content": " fn write_i16(&mut self, i: i16) {\n\n (**self).write_i16(i)\n\n }\n\n fn write_i32(&mut self, i: i32) {\n\n (**self).write_i32(i)\n\n }\n\n fn write_i64(&mut self, i: i64) {\n\n (**self).write_i64(i)\n\n }\n\n fn write_isize(&mut self, i: isize) {\n\n (**self).write_isize(i)\n\n }\n\n}\n\n\n\n//TODO: fix this\n\n// impl<'a> ArenaBox<'a, Any> {\n\n// #[inline]\n\n// /// Attempt to downcast the box to a concrete type.\n\n// ///\n\n// /// # Examples\n", "file_path": "src/arena_box.rs", "rank": 28, "score": 18173.067637736942 }, { "content": "//! ```\n\n//! # use memory_arena::*;\n\n//! let a = Arena::new(1024, 1024).unwrap();\n\n//! let x = a.new_box(5).unwrap();\n\n//! ```\n\n//!\n\n//! Creating a recursive data structure:\n\n//!\n\n//! ```\n\n//! # use memory_arena::*;\n\n//! #[derive(Debug)]\n\n//! enum List<'a, T> {\n\n//! Nil,\n\n//! Cons(T, ArenaBox<'a, List<'a, T>>),\n\n//! }\n\n//!\n\n//! fn main() {\n\n//! let a = Arena::new(1024, 1024).unwrap();\n\n//! let list = a.new_box(List::Nil).unwrap();\n\n//! let list = a.new_box(List::Cons(1, list)).unwrap();\n", "file_path": "src/arena_box.rs", "rank": 29, "score": 18171.748654868366 }, { "content": "//! let list = a.new_box(List::Cons(2, list)).unwrap();\n\n//! let list = a.new_box(List::Cons(3, list)).unwrap();\n\n//! println!(\"{:?}\", list);\n\n//! }\n\n//! ```\n\n//!\n\n//! This will print `Cons(3, Cons(2, Cons(1, Nil)))`.\n\n//!\n\n//! Recursive structures must be boxed, because if the definition of `Cons`\n\n//! looked like this:\n\n//!\n\n//! ```compile_fail,E0072\n\n//! # use memory_arena::*;\n\n//! # enum List<T> {\n\n//! Cons(T, List<T>),\n\n//! # }\n\n//! ```\n\n//!\n\n//! It wouldn't work. This is because the size of a `List` depends on how many\n\n//! elements are in the list, and so we don't know how much memory to allocate\n", "file_path": "src/arena_box.rs", "rank": 30, "score": 18171.01604342909 }, { "content": "\n\n//! A pointer type for a value that lives in an `Arena`.\n\n//!\n\n//! `ArenaBox<T>`, casually referred to as an 'arena box', provides a safe\n\n//! interface around allocation from a memory arena allocation. Arena boxes\n\n//! provide ownership for the allocated value, and drop their contained value\n\n//! when they go out of scope.\n\n//!\n\n//! Unlike the `Box<T>` type from the standard library, out-of-scope arena boxes\n\n//! do not get free'd through the Rust allocator. Instead, they continue to take\n\n//! up space in their memory arena until the memory arena itself goes out of\n\n//! scope and is dropped. While this can temporarily result in higher memory\n\n//! usage, it can greatly reduce the performance impact of allocating and\n\n//! freeing, since allocations are a just a (checked) pointer bump, and frees\n\n//! cost nothing.\n\n//!\n\n//! # Examples\n\n//!\n\n//! Creating an arena box:\n\n//!\n", "file_path": "src/arena_box.rs", "rank": 31, "score": 18169.695370803573 }, { "content": " PartialOrd::partial_cmp(&**self, &**other)\n\n }\n\n #[inline]\n\n fn lt(&self, other: &ArenaBox<T>) -> bool {\n\n PartialOrd::lt(&**self, &**other)\n\n }\n\n #[inline]\n\n fn le(&self, other: &ArenaBox<T>) -> bool {\n\n PartialOrd::le(&**self, &**other)\n\n }\n\n #[inline]\n\n fn ge(&self, other: &ArenaBox<T>) -> bool {\n\n PartialOrd::ge(&**self, &**other)\n\n }\n\n #[inline]\n\n fn gt(&self, other: &ArenaBox<T>) -> bool {\n\n PartialOrd::gt(&**self, &**other)\n\n }\n\n}\n\n\n", "file_path": "src/arena_box.rs", "rank": 32, "score": 18169.538717796328 }, { "content": "// See the COPYRIGHT file at the top-level directory of this distribution.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n//\n\n// This file has been modified from the original version in the\n\n// Rust core and/or standard library. The original copyright is below:\n\n//\n\n// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT\n\n// file at the top-level directory of this distribution and at\n\n// http://rust-lang.org/COPYRIGHT.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n", "file_path": "src/arena_box.rs", "rank": 33, "score": 18166.838756288773 }, { "content": "# Memory Arena\n\n\n\n## License\n\n\n\nThis software and associated documentation files (the \"Software\") is licensed\n\nunder either of\n\n\n\n * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or\n\n http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license ([LICENSE-MIT](LICENSE-MIT) or\n\n http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion into the Software by you, as defined in the Apache-2.0 license,\n", "file_path": "Readme.md", "rank": 34, "score": 10449.286439069843 }, { "content": " if size == 0 {\n\n return Err(AllocError::ZeroSizeAlloc);\n\n }\n\n let mem = _aligned_malloc(size, alignment);\n\n if mem.is_null() {\n\n let mut errno: c_int = 0;\n\n _get_errno(&mut errno);\n\n Err(AllocError::Errno(errno))\n\n } else {\n\n Ok(mem as *mut u8)\n\n }\n\n}\n\n\n\n#[cfg(not(windows))]\n\npub(crate) unsafe fn free(ptr: *mut u8) {\n\n c_free(ptr as *mut c_void);\n\n}\n\n\n\n#[cfg(windows)]\n\npub(crate) unsafe fn free(ptr: *mut u8) {\n", "file_path": "src/alloc.rs", "rank": 35, "score": 17.174579853910338 }, { "content": "/// unenforced by the type system; the abstraction using the\n\n/// `Unique` must enforce it.\n\nunsafe impl<T: Send + ?Sized> Send for Unique<T> {}\n\n\n\n/// `Unique` pointers are `Sync` if `T` is `Sync` because the data they\n\n/// reference is unaliased. Note that this aliasing invariant is\n\n/// unenforced by the type system; the abstraction using the\n\n/// `Unique` must enforce it.\n\nunsafe impl<T: Sync + ?Sized> Sync for Unique<T> {}\n\n\n\n// impl<T: Sized> Unique<T> {\n\n// /// Creates a new `Unique` that is dangling, but well-aligned.\n\n// ///\n\n// /// This is useful for initializing types which lazily allocate, like\n\n// /// `Vec::new` does.\n\n// pub fn empty() -> Self {\n\n// unsafe {\n\n// let ptr = mem::align_of::<T>() as *mut T;\n\n// Unique {\n\n// pointer: NonZero::new_unchecked(ptr),\n", "file_path": "src/unique.rs", "rank": 36, "score": 14.339190638264022 }, { "content": "mod tests {\n\n #[allow(unused_imports)]\n\n use super::*;\n\n #[test]\n\n fn aligned_alloc_and_free() {\n\n unsafe {\n\n let alignment = 1024;\n\n let ptr = aligned_alloc(alignment, ::core::mem::size_of::<isize>()).unwrap();\n\n let iptr = ptr as *mut isize;\n\n *iptr = 0;\n\n free(ptr);\n\n }\n\n }\n\n}\n", "file_path": "src/alloc.rs", "rank": 37, "score": 12.52042485137375 }, { "content": "\n\n /// Mutably dereferences the content.\n\n ///\n\n /// The resulting lifetime is bound to self so this behaves \"as if\"\n\n /// it were actually an instance of T that is getting borrowed. If a longer\n\n /// (unbound) lifetime is needed, use `&mut *my_ptr.ptr()`.\n\n pub unsafe fn as_mut(&mut self) -> &mut T {\n\n &mut *self.as_ptr()\n\n }\n\n}\n\n\n\nimpl<T: ?Sized> Clone for Unique<T> {\n\n fn clone(&self) -> Self {\n\n *self\n\n }\n\n}\n\n\n\nimpl<T: ?Sized> Copy for Unique<T> {}\n\n\n\nimpl<T: ?Sized, U: ?Sized> CoerceUnsized<Unique<U>> for Unique<T>\n", "file_path": "src/unique.rs", "rank": 38, "score": 12.119127875952248 }, { "content": "// See the COPYRIGHT file at the top-level directory of this distribution.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse core::fmt;\n\nuse core::result;\n\n#[allow(unused_imports)]\n\nuse libc::{c_int, c_void, size_t};\n\n\n\n\n\n#[cfg(not(windows))]\n\npub(crate) use libc::posix_memalign;\n\n#[cfg(not(windows))]\n\npub(crate) use libc::free as c_free;\n\n#[cfg(windows)]\n\nextern {\n\n fn _aligned_malloc(size: size_t, alignment: size_t) -> *mut c_void;\n\n fn _get_errno(p: *mut c_int) -> c_int;\n\n fn _aligned_free(p: *mut c_void);\n\n}\n\n\n", "file_path": "src/alloc.rs", "rank": 39, "score": 10.633500109208946 }, { "content": "\n\nuse coerce_unsized::CoerceUnsized;\n\n\n\n/// Unsafe trait to indicate what types are usable with the NonZero struct\n\npub unsafe trait Zeroable {\n\n /// Whether this value is zero\n\n fn is_zero(&self) -> bool;\n\n}\n\n\n\nmacro_rules! impl_zeroable_for_pointer_types {\n\n ( $( $Ptr: ty )+ ) => {\n\n $(\n\n /// For fat pointers to be considered \"zero\", only the \"data\" part needs to be null.\n\n unsafe impl<T: ?Sized> Zeroable for $Ptr {\n\n #[inline]\n\n fn is_zero(&self) -> bool {\n\n (*self).is_null()\n\n }\n\n }\n\n )+\n", "file_path": "src/nonzero.rs", "rank": 40, "score": 9.479748891183718 }, { "content": " _aligned_free(ptr as *mut c_void);\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub enum AllocError {\n\n ZeroSizeAlloc,\n\n Errno(c_int),\n\n}\n\n\n\nimpl fmt::Display for AllocError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n AllocError::ZeroSizeAlloc => write!(f, \"zero sized allocation is not supported\"),\n\n\n\n AllocError::Errno(errno) => write!(f, \"system allocation error number: {}\", errno),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/alloc.rs", "rank": 41, "score": 8.968917471563376 }, { "content": "// _marker: PhantomData,\n\n// }\n\n// }\n\n// }\n\n// }\n\n\n\nimpl<T: ?Sized> Unique<T> {\n\n /// Creates a new `Unique`.\n\n ///\n\n /// # Safety\n\n ///\n\n /// `ptr` must be non-null.\n\n pub unsafe fn new_unchecked(ptr: *mut T) -> Self {\n\n Unique {\n\n pointer: NonZero::new_unchecked(ptr),\n\n _marker: PhantomData,\n\n }\n\n }\n\n\n\n /// Creates a new `Unique` if `ptr` is non-null.\n", "file_path": "src/unique.rs", "rank": 42, "score": 8.312430838563404 }, { "content": " // #[inline]\n\n // pub fn new(inner: T) -> Option<Self> {\n\n // if inner.is_zero() {\n\n // None\n\n // } else {\n\n // Some(NonZero(inner))\n\n // }\n\n // }\n\n\n\n /// Gets the inner value.\n\n pub fn get(self) -> T {\n\n self.0\n\n }\n\n}\n\n\n\nimpl<T: Zeroable + CoerceUnsized<U>, U: Zeroable> CoerceUnsized<NonZero<U>> for NonZero<T> {}\n\n\n\nimpl<'a, T: ?Sized> From<&'a mut T> for NonZero<*mut T> {\n\n fn from(reference: &'a mut T) -> Self {\n\n NonZero(reference)\n", "file_path": "src/nonzero.rs", "rank": 43, "score": 8.113892613377818 }, { "content": " // pub fn new(ptr: *mut T) -> Option<Self> {\n\n // NonZero::new(ptr as *const T).map(|nz| Unique {\n\n // pointer: nz,\n\n // _marker: PhantomData,\n\n // })\n\n // }\n\n\n\n /// Acquires the underlying `*mut` pointer.\n\n pub fn as_ptr(self) -> *mut T {\n\n self.pointer.get() as *mut T\n\n }\n\n\n\n /// Dereferences the content.\n\n ///\n\n /// The resulting lifetime is bound to self so this behaves \"as if\"\n\n /// it were actually an instance of T that is getting borrowed. If a longer\n\n /// (unbound) lifetime is needed, use `&*my_ptr.ptr()`.\n\n pub unsafe fn as_ref(&self) -> &T {\n\n &*self.as_ptr()\n\n }\n", "file_path": "src/unique.rs", "rank": 44, "score": 7.897267011278791 }, { "content": "\n\nimpl_zeroable_for_integer_types! {\n\n usize u8 u16 u32 u64\n\n isize i8 i16 i32 i64\n\n}\n\n\n\n/// A wrapper type for raw pointers and integers that will never be\n\n/// NULL or 0 that might allow certain optimizations.\n\n#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug, Hash)]\n\npub struct NonZero<T: Zeroable>(T);\n\n\n\nimpl<T: Zeroable> NonZero<T> {\n\n /// Creates an instance of NonZero with the provided value.\n\n /// You must indeed ensure that the value is actually \"non-zero\".\n\n #[inline]\n\n pub unsafe fn new_unchecked(inner: T) -> Self {\n\n NonZero(inner)\n\n }\n\n\n\n /// Creates an instance of NonZero with the provided value.\n", "file_path": "src/nonzero.rs", "rank": 45, "score": 7.631533007843753 }, { "content": " }\n\n}\n\n\n\nimpl<'a, T: ?Sized> From<&'a mut T> for NonZero<*const T> {\n\n fn from(reference: &'a mut T) -> Self {\n\n let ptr: *mut T = reference;\n\n NonZero(ptr)\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized> From<&'a T> for NonZero<*const T> {\n\n fn from(reference: &'a T) -> Self {\n\n NonZero(reference)\n\n }\n\n}\n", "file_path": "src/nonzero.rs", "rank": 46, "score": 7.314769838689757 }, { "content": "where\n\n T: Unsize<U>,\n\n{\n\n}\n\n\n\nimpl<T: ?Sized> fmt::Pointer for Unique<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n fmt::Pointer::fmt(&self.as_ptr(), f)\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized> From<&'a mut T> for Unique<T> {\n\n fn from(reference: &'a mut T) -> Self {\n\n Unique {\n\n pointer: NonZero::from(reference),\n\n _marker: PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, T: ?Sized> From<&'a T> for Unique<T> {\n\n fn from(reference: &'a T) -> Self {\n\n Unique {\n\n pointer: NonZero::from(reference),\n\n _marker: PhantomData,\n\n }\n\n }\n\n}\n", "file_path": "src/unique.rs", "rank": 47, "score": 6.987445734036493 }, { "content": " }\n\n}\n\n\n\nmacro_rules! impl_zeroable_for_integer_types {\n\n ( $( $Int: ty )+ ) => {\n\n $(\n\n unsafe impl Zeroable for $Int {\n\n #[inline]\n\n fn is_zero(&self) -> bool {\n\n *self == 0\n\n }\n\n }\n\n )+\n\n }\n\n}\n\n\n\nimpl_zeroable_for_pointer_types! {\n\n *const T\n\n *mut T\n\n}\n", "file_path": "src/nonzero.rs", "rank": 48, "score": 6.970304096542305 }, { "content": "// See the COPYRIGHT file at the top-level directory of this distribution.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\n#![no_std]\n\n\n\nextern crate libc;\n\n\n\nmod coerce_unsized;\n\nmod nonzero;\n\nmod unsize;\n\nmod unique;\n\nmod alloc;\n\nmod arena_box;\n\nmod arena;\n\n\n\npub use arena::Arena;\n\npub use arena_box::ArenaBox;\n", "file_path": "src/lib.rs", "rank": 49, "score": 6.53973270664463 }, { "content": "/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer\n\n/// is never dereferenced. This is so that enums may use this forbidden value\n\n/// as a discriminant -- `Option<Unique<T>>` has the same size as `Unique<T>`.\n\n/// However the pointer may still dangle if it isn't dereferenced.\n\n///\n\n/// Unlike `*mut T`, `Unique<T>` is covariant over `T`. This should always be correct\n\n/// for any type which upholds Unique's aliasing requirements.\n\n#[allow(missing_debug_implementations)]\n\npub struct Unique<T: ?Sized> {\n\n pointer: NonZero<*const T>,\n\n // NOTE: this marker has no consequences for variance, but is necessary\n\n // for dropck to understand that we logically own a `T`.\n\n //\n\n // For details, see:\n\n // https://github.com/rust-lang/rfcs/blob/master/text/0769-sound-generic-drop.md#phantom-data\n\n _marker: PhantomData<T>,\n\n}\n\n\n\n/// `Unique` pointers are `Send` if `T` is `Send` because the data they\n\n/// reference is unaliased. Note that this aliasing invariant is\n", "file_path": "src/unique.rs", "rank": 50, "score": 5.236297220413448 }, { "content": "extern crate memory_arena;\n\nuse memory_arena::*;\n\n\n\n#[derive(Debug)]\n", "file_path": "examples/list.rs", "rank": 51, "score": 5.113257422516758 }, { "content": "extern crate memory_arena;\n\nuse memory_arena::*;\n\n\n\n#[derive(Debug)]\n", "file_path": "examples/enum.rs", "rank": 52, "score": 5.113257422516758 }, { "content": "\n\nuse nonzero::NonZero;\n\nuse core::marker::PhantomData;\n\nuse core::fmt;\n\nuse coerce_unsized::CoerceUnsized;\n\nuse unsize::Unsize;\n\n\n\n/// A wrapper around a raw non-null `*mut T` that indicates that the possessor\n\n/// of this wrapper owns the referent. Useful for building abstractions like\n\n/// `Box<T>`, `Vec<T>`, `String`, and `HashMap<K, V>`.\n\n///\n\n/// Unlike `*mut T`, `Unique<T>` behaves \"as if\" it were an instance of `T`.\n\n/// It implements `Send`/`Sync` if `T` is `Send`/`Sync`. It also implies\n\n/// the kind of strong aliasing guarantees an instance of `T` can expect:\n\n/// the referent of the pointer should not be modified without a unique path to\n\n/// its owning Unique.\n\n///\n\n/// If you're uncertain of whether it's correct to use `Unique` for your purposes,\n\n/// consider using `Shared`, which has weaker semantics.\n\n///\n", "file_path": "src/unique.rs", "rank": 53, "score": 4.3152598317153075 }, { "content": "///\n\n/// Generally, for smart pointers you will implement\n\n/// `CoerceUnsized<Ptr<U>> for Ptr<T> where T: Unsize<U>, U: ?Sized`, with an\n\n/// optional `?Sized` bound on `T` itself. For wrapper types that directly embed `T`\n\n/// like `Cell<T>` and `RefCell<T>`, you\n\n/// can directly implement `CoerceUnsized<Wrap<U>> for Wrap<T> where T: CoerceUnsized<U>`.\n\n/// This will let coercions of types like `Cell<Box<T>>` work.\n\n///\n\n/// [`Unsize`][unsize] is used to mark types which can be coerced to DSTs if behind\n\n/// pointers. It is implemented automatically by the compiler.\n\n///\n\n/// [dst-coerce]: https://github.com/rust-lang/rfcs/blob/master/text/0982-dst-coercion.md\n\n/// [unsize]: ../marker/trait.Unsize.html\n\n/// [nomicon-coerce]: ../../nomicon/coercions.html\n", "file_path": "src/coerce_unsized.rs", "rank": 54, "score": 2.7072248742646234 }, { "content": "\n\n/// Types that can be \"unsized\" to a dynamically-sized type.\n\n///\n\n/// For example, the sized array type `[i8; 2]` implements `Unsize<[i8]>` and\n\n/// `Unsize<fmt::Debug>`.\n\n///\n\n/// All implementations of `Unsize` are provided automatically by the compiler.\n\n///\n\n/// `Unsize` is implemented for:\n\n///\n\n/// - `[T; N]` is `Unsize<[T]>`\n\n/// - `T` is `Unsize<Trait>` when `T: Trait`\n\n/// - `Foo<..., T, ...>` is `Unsize<Foo<..., U, ...>>` if:\n\n/// - `T: Unsize<U>`\n\n/// - Foo is a struct\n\n/// - Only the last field of `Foo` has a type involving `T`\n\n/// - `T` is not part of the type of any other fields\n\n/// - `Bar<T>: Unsize<Bar<U>>`, if the last field of `Foo` has type `Bar<T>`\n\n///\n\n/// `Unsize` is used along with [`ops::CoerceUnsized`][coerceunsized] to allow\n\n/// \"user-defined\" containers such as [`rc::Rc`][rc] to contain dynamically-sized\n\n/// types. See the [DST coercion RFC][RFC982] and [the nomicon entry on coercion][nomicon-coerce]\n\n/// for more details.\n\n///\n\n/// [coerceunsized]: ../ops/trait.CoerceUnsized.html\n\n/// [rc]: ../../std/rc/struct.Rc.html\n\n/// [RFC982]: https://github.com/rust-lang/rfcs/blob/master/text/0982-dst-coercion.md\n\n/// [nomicon-coerce]: ../../nomicon/coercions.html\n", "file_path": "src/unsize.rs", "rank": 55, "score": 2.671064589432289 }, { "content": "\n\nuse unsize::Unsize;\n\n\n\n/// Trait that indicates that this is a pointer or a wrapper for one,\n\n/// where unsizing can be performed on the pointee.\n\n///\n\n/// See the [DST coercion RfC][dst-coerce] and [the nomicon entry on coercion][nomicon-coerce]\n\n/// for more details.\n\n///\n\n/// For builtin pointer types, pointers to `T` will coerce to pointers to `U` if `T: Unsize<U>`\n\n/// by converting from a thin pointer to a fat pointer.\n\n///\n\n/// For custom types, the coercion here works by coercing `Foo<T>` to `Foo<U>`\n\n/// provided an impl of `CoerceUnsized<Foo<U>> for Foo<T>` exists.\n\n/// Such an impl can only be written if `Foo<T>` has only a single non-phantomdata\n\n/// field involving `T`. If the type of that field is `Bar<T>`, an implementation\n\n/// of `CoerceUnsized<Bar<U>> for Bar<T>` must exist. The coercion will work by\n\n/// coercing the `Bar<T>` field into `Bar<U>` and filling in the rest of the fields\n\n/// from `Foo<T>` to create a `Foo<U>`. This will effectively drill down to a pointer\n\n/// field and coerce that.\n", "file_path": "src/coerce_unsized.rs", "rank": 56, "score": 2.5594667362543975 } ]
Rust
src/bin/server.rs
srgsrg/machiavelli
ebbaae04e4867a123c652f5cad6292a176034a06
use std::process; use std::fs::File; use std::thread; use std::env; use rand::{ thread_rng, Rng }; use machiavelli::lib_server::*; const SAVE_EXTENSION: &str = ".sav"; fn get_port() -> usize { println!("Which port should I use?"); loop { match get_input() { Ok(s) => match s.trim().parse::<usize>() { Ok(p)=> return p, Err(_) => println!("Could not parse the input") } Err(_) => println!("Could not parse the input") } } } fn main() { let mut args = env::args(); args.next(); print!("\x1b[2J\x1b[1;1H"); println!("Machiavelli server\n"); let name_file_port_server = "Config/port_server.dat"; let port = match std::fs::read_to_string(name_file_port_server) { Ok(s) => match s.trim().parse::<usize>() { Ok(n) => n, Err(_) => get_port() } Err(_) => get_port() }; let load: bool; let load_from_command_line: bool; match args.next() { Some(s) => { load_from_command_line = true; match s.trim().parse::<u8>() { Ok(1) => { println!("Loading a previous game"); load = true; }, Ok(121) => { println!("Loading a previous game"); load = true; }, _ => load = false }; } None => { load_from_command_line = false; println!("Load a previous game? (y/n)"); load = match get_input().unwrap().trim() { "y" => true, _ => false }; } }; let mut config = Config { n_decks: 0, n_jokers: 0, n_cards_to_start: 0, custom_rule_jokers: false, n_players: 0 }; let mut savefile = "machiavelli_save".to_string(); if !load { match get_config_from_file(&"Config/config.dat") { Ok(conf) => { config = conf.0; savefile = conf.1; }, Err(_) => { println!("Could not read the config from the file!"); match get_config_and_savefile() { Ok(conf) => { config = conf.0; savefile = conf.1; }, Err(_) => { println!("Invalid input!"); process::exit(1); } } } }; } let mut starting_player: u8; let mut table = Table::new(); let mut deck: Sequence; let mut hands: Vec<Sequence>; let mut player: usize; let mut player_names = Vec::<String>::new(); let mut rng = thread_rng(); if load { let mut fname = String::new(); let mut bytes = Vec::<u8>::new(); if load_from_command_line { match args.next() { Some(s) => fname = s, None => fname = savefile.clone() + SAVE_EXTENSION }; } loop { if fname.len() == 0 { println!("Name of the save file (nothing for the default file):"); match stdin().read_line(&mut fname) { Ok(_) => (), Err(_) => { println!("Could not read the input"); continue; } }; } fname = fname.trim().to_string(); if fname.len() == 0 { fname = savefile.clone() + SAVE_EXTENSION; } let mut file: File; match File::open(fname.clone()) { Ok(f) => file = f, Err(_) => { println!("Could not open the file!"); fname.clear(); continue; } }; match file.read_to_end(&mut bytes) { Ok(_) => (), Err(_) => { println!("Could not read from the file!"); bytes.clear(); fname.clear(); continue; } }; bytes = encode::xor(&bytes, &fname.as_bytes()); match load_game(&bytes) { Ok(lg) => { config = lg.0; starting_player = lg.1; player = lg.2 as usize; table = lg.3; hands = lg.4; deck = lg.5; player_names = lg.6; }, Err(_) => { println!("Error loading the save file!"); bytes.clear(); fname.clear(); continue; } }; break; } } else { deck = Sequence::multi_deck(config.n_decks, config.n_jokers, &mut rng); starting_player = rng.gen_range(0..config.n_players); player = starting_player as usize; hands = vec![Sequence::new(); config.n_players as usize]; for i in 0..config.n_players { for _ in 0..config.n_cards_to_start { hands[i as usize].add_card(deck.draw_card().unwrap()); } } } let mut n_clients: u8 = 0; let mut client_threads = Vec::<thread::JoinHandle<(TcpStream, String, usize)>>::new(); let mut client_streams = Vec::<TcpStream>::new(); { let listener = TcpListener::bind(format!("0.0.0.0:{}", port)).unwrap(); let names_taken = Arc::new(Mutex::new(Vec::<String>::new())); println!("\nserver listening to port {}", port); for stream_res in listener.incoming() { match stream_res { Ok(stream) => { n_clients += 1; println!("New connection: {} (player {})", stream.peer_addr().unwrap(), n_clients); if load { let player_names_ = player_names.clone(); let arc = names_taken.clone(); client_threads.push(thread::spawn(move || { handle_client_load(stream, &player_names_, arc).unwrap() })); } else { client_threads.push(thread::spawn(move || {handle_client(stream).unwrap()})); } }, Err(e) => { println!("Error: {}", e); } } if n_clients == config.n_players { break; } } if load { for _i in 0..config.n_players { client_streams.push(TcpStream::connect(format!("0.0.0.0:{}", port)).unwrap()); } for thread in client_threads { let output = thread.join().unwrap(); client_streams[output.2] = output.0; } } else { for thread in client_threads { let output = thread.join().unwrap(); client_streams.push(output.0); player_names.push(output.1); } ensure_names_are_different(&mut player_names, &mut client_streams).unwrap(); } } let save_name = &(savefile.clone() + SAVE_EXTENSION); let backup_name = &(savefile.clone() + &"_bak" + SAVE_EXTENSION); let mut sort_modes: Vec<u8> = vec![0; config.n_players as usize]; let mut play_again = true; let mut previous_messages: Vec<Option<String>> = vec![None; config.n_players as usize]; while play_again { loop { if deck.number_cards() == 0 { send_message_all_players(&mut client_streams, &"\n\x1b[1mNo more cards in the deck—it's a draw!\x1b[0m\n"); break; } let mut bytes = game_to_bytes(starting_player as u8, player as u8, &table, &hands, &deck, &config, &player_names); bytes = encode::xor(&bytes, save_name.as_bytes()); match File::create(save_name) { Ok(mut f) => match f.write_all(&bytes) { Ok(_) => (), Err(_) => { println!("Could not write to the save file!"); } }, Err(_) => { println!("Could not create the save file!"); } }; match std::fs::copy(&save_name, &backup_name) { Ok(_) => (), Err(_) => println!("Could not create the backup file!") }; clear_and_send_message_all_players(&mut client_streams, &format!("\x1b[1m{}'s turn:{}", &player_names[player], &reset_style_string())); let mut string_n_cards = format!("\nNumber of cards ({} remaining in the deck):", deck.number_cards()); for i in 0..(config.n_players as usize) { string_n_cards += &format!("\n {}: {}", &player_names[i], &hands[i].number_cards()); } string_n_cards += "\n"; for i in 0..(config.n_players as usize) { loop { match send_message_to_client(&mut client_streams[i], &format!("{}{}", &string_n_cards, &situation_to_string(&table, &hands[i], &Sequence::new())) ) { Ok(_) => break, Err(_) => { send_message_all_players( &mut client_streams, &format!("{} seems to have disconnected... Waiting for them to reconnect.\n", &player_names[i]) ); println!("Lost connection with player {}", i + 1); wait_for_reconnection(&mut client_streams[i], &player_names[i], port).unwrap(); println!("Player {} is back", i + 1); send_message_all_players( &mut client_streams, &format!("{} is back!\n", &player_names[i]) ); } }; } if let Some(s) = &previous_messages[i] { send_message_to_client(&mut client_streams[i], &format!("\n{}", s)).unwrap(); }; } previous_messages[player] = match start_player_turn(&mut table, &mut hands, &mut deck, config.custom_rule_jokers, &player_names, player, config.n_players as usize, &mut client_streams, port, &mut sort_modes[player], &previous_messages) { Ok(o_m) => o_m, Err(err) => { println!("{}", err); process::exit(1); } }; if hands[player].number_cards() == 0 { send_message_all_players(&mut client_streams, &format!("\n\u{0007}\u{0007}\u{0007}\x1b[1m{} wins! Congratulations!\x1b[0m{}\n\n", player_names[player], &reset_style_string()) ); break; } player += 1; if player >= config.n_players as usize { player = 0; } } send_message_all_players(&mut client_streams, &"Play again? (‘y’ for yes)\n".to_string()); for stream in &mut client_streams { let reply = match get_string_from_client(stream) { Ok(s) => s, Err(_) => "y".to_string() }; if !is_yes(reply.trim()) { play_again = false; match stream.write(&mut [5]) { Ok(_) => {}, Err(_) => println!("Could not send the exit signal") }; } } if play_again { deck = Sequence::multi_deck(config.n_decks, config.n_jokers, &mut rng); hands = vec![Sequence::new(); config.n_players as usize]; table = Table::new(); for i in 0..config.n_players { for _ in 0..config.n_cards_to_start { hands[i as usize].add_card(deck.draw_card().unwrap()); } } starting_player += 1; if starting_player >= config.n_players { starting_player = 0; } player = starting_player as usize; } } for i in 0..config.n_players as usize { match client_streams[i].write(&mut [5]) { Ok(_) => {}, Err(_) => println!("Could not send the exit signal to client {}", i) }; } }
use std::process; use std::fs::File; use std::thread; use std::env; use rand::{ thread_rng, Rng }; use machiavelli::lib_server::*; const SAVE_EXTENSION: &str = ".sav"; fn get_port() -> usize { println!("Which port should I use?"); loop { match get_input() { Ok(s) => match s.trim().parse::<usize>() { Ok(p)=> return p, Err(_) => println!("Could not parse the input") } Err(_) => println!("Could not parse the input") } } } fn main() { let mut args = env::args(); args.next(); print!("\x1b[2J\x1b[1;1H"); println!("Machiavelli server\n"); let name_file_port_server = "Config/port_server.dat"; let port = match std::fs::read_to_string(name_file_port_server) { Ok(s) => match s.trim().parse::<usize>() { Ok(n) => n, Err(_) => get_port() } Err(_) => get_port() }; let load: bool; let load_from_command_line: bool; match args.next() { Some(s) => { load_from_command_line = true; match s.trim().parse::<u8>() { Ok(1) => { println!("Loading a previous game"); load = true; }, Ok(121) => { println!("Loading a previous game"); load = true; }, _ => load = false }; } None => { load_from_command_line = false; println!("Load a previous game? (y/n)"); load = match get_input().unwrap().trim() { "y" => true, _ => false }; } };
let mut savefile = "machiavelli_save".to_string(); if !load { match get_config_from_file(&"Config/config.dat") { Ok(conf) => { config = conf.0; savefile = conf.1; }, Err(_) => { println!("Could not read the config from the file!"); match get_config_and_savefile() { Ok(conf) => { config = conf.0; savefile = conf.1; }, Err(_) => { println!("Invalid input!"); process::exit(1); } } } }; } let mut starting_player: u8; let mut table = Table::new(); let mut deck: Sequence; let mut hands: Vec<Sequence>; let mut player: usize; let mut player_names = Vec::<String>::new(); let mut rng = thread_rng(); if load { let mut fname = String::new(); let mut bytes = Vec::<u8>::new(); if load_from_command_line { match args.next() { Some(s) => fname = s, None => fname = savefile.clone() + SAVE_EXTENSION }; } loop { if fname.len() == 0 { println!("Name of the save file (nothing for the default file):"); match stdin().read_line(&mut fname) { Ok(_) => (), Err(_) => { println!("Could not read the input"); continue; } }; } fname = fname.trim().to_string(); if fname.len() == 0 { fname = savefile.clone() + SAVE_EXTENSION; } let mut file: File; match File::open(fname.clone()) { Ok(f) => file = f, Err(_) => { println!("Could not open the file!"); fname.clear(); continue; } }; match file.read_to_end(&mut bytes) { Ok(_) => (), Err(_) => { println!("Could not read from the file!"); bytes.clear(); fname.clear(); continue; } }; bytes = encode::xor(&bytes, &fname.as_bytes()); match load_game(&bytes) { Ok(lg) => { config = lg.0; starting_player = lg.1; player = lg.2 as usize; table = lg.3; hands = lg.4; deck = lg.5; player_names = lg.6; }, Err(_) => { println!("Error loading the save file!"); bytes.clear(); fname.clear(); continue; } }; break; } } else { deck = Sequence::multi_deck(config.n_decks, config.n_jokers, &mut rng); starting_player = rng.gen_range(0..config.n_players); player = starting_player as usize; hands = vec![Sequence::new(); config.n_players as usize]; for i in 0..config.n_players { for _ in 0..config.n_cards_to_start { hands[i as usize].add_card(deck.draw_card().unwrap()); } } } let mut n_clients: u8 = 0; let mut client_threads = Vec::<thread::JoinHandle<(TcpStream, String, usize)>>::new(); let mut client_streams = Vec::<TcpStream>::new(); { let listener = TcpListener::bind(format!("0.0.0.0:{}", port)).unwrap(); let names_taken = Arc::new(Mutex::new(Vec::<String>::new())); println!("\nserver listening to port {}", port); for stream_res in listener.incoming() { match stream_res { Ok(stream) => { n_clients += 1; println!("New connection: {} (player {})", stream.peer_addr().unwrap(), n_clients); if load { let player_names_ = player_names.clone(); let arc = names_taken.clone(); client_threads.push(thread::spawn(move || { handle_client_load(stream, &player_names_, arc).unwrap() })); } else { client_threads.push(thread::spawn(move || {handle_client(stream).unwrap()})); } }, Err(e) => { println!("Error: {}", e); } } if n_clients == config.n_players { break; } } if load { for _i in 0..config.n_players { client_streams.push(TcpStream::connect(format!("0.0.0.0:{}", port)).unwrap()); } for thread in client_threads { let output = thread.join().unwrap(); client_streams[output.2] = output.0; } } else { for thread in client_threads { let output = thread.join().unwrap(); client_streams.push(output.0); player_names.push(output.1); } ensure_names_are_different(&mut player_names, &mut client_streams).unwrap(); } } let save_name = &(savefile.clone() + SAVE_EXTENSION); let backup_name = &(savefile.clone() + &"_bak" + SAVE_EXTENSION); let mut sort_modes: Vec<u8> = vec![0; config.n_players as usize]; let mut play_again = true; let mut previous_messages: Vec<Option<String>> = vec![None; config.n_players as usize]; while play_again { loop { if deck.number_cards() == 0 { send_message_all_players(&mut client_streams, &"\n\x1b[1mNo more cards in the deck—it's a draw!\x1b[0m\n"); break; } let mut bytes = game_to_bytes(starting_player as u8, player as u8, &table, &hands, &deck, &config, &player_names); bytes = encode::xor(&bytes, save_name.as_bytes()); match File::create(save_name) { Ok(mut f) => match f.write_all(&bytes) { Ok(_) => (), Err(_) => { println!("Could not write to the save file!"); } }, Err(_) => { println!("Could not create the save file!"); } }; match std::fs::copy(&save_name, &backup_name) { Ok(_) => (), Err(_) => println!("Could not create the backup file!") }; clear_and_send_message_all_players(&mut client_streams, &format!("\x1b[1m{}'s turn:{}", &player_names[player], &reset_style_string())); let mut string_n_cards = format!("\nNumber of cards ({} remaining in the deck):", deck.number_cards()); for i in 0..(config.n_players as usize) { string_n_cards += &format!("\n {}: {}", &player_names[i], &hands[i].number_cards()); } string_n_cards += "\n"; for i in 0..(config.n_players as usize) { loop { match send_message_to_client(&mut client_streams[i], &format!("{}{}", &string_n_cards, &situation_to_string(&table, &hands[i], &Sequence::new())) ) { Ok(_) => break, Err(_) => { send_message_all_players( &mut client_streams, &format!("{} seems to have disconnected... Waiting for them to reconnect.\n", &player_names[i]) ); println!("Lost connection with player {}", i + 1); wait_for_reconnection(&mut client_streams[i], &player_names[i], port).unwrap(); println!("Player {} is back", i + 1); send_message_all_players( &mut client_streams, &format!("{} is back!\n", &player_names[i]) ); } }; } if let Some(s) = &previous_messages[i] { send_message_to_client(&mut client_streams[i], &format!("\n{}", s)).unwrap(); }; } previous_messages[player] = match start_player_turn(&mut table, &mut hands, &mut deck, config.custom_rule_jokers, &player_names, player, config.n_players as usize, &mut client_streams, port, &mut sort_modes[player], &previous_messages) { Ok(o_m) => o_m, Err(err) => { println!("{}", err); process::exit(1); } }; if hands[player].number_cards() == 0 { send_message_all_players(&mut client_streams, &format!("\n\u{0007}\u{0007}\u{0007}\x1b[1m{} wins! Congratulations!\x1b[0m{}\n\n", player_names[player], &reset_style_string()) ); break; } player += 1; if player >= config.n_players as usize { player = 0; } } send_message_all_players(&mut client_streams, &"Play again? (‘y’ for yes)\n".to_string()); for stream in &mut client_streams { let reply = match get_string_from_client(stream) { Ok(s) => s, Err(_) => "y".to_string() }; if !is_yes(reply.trim()) { play_again = false; match stream.write(&mut [5]) { Ok(_) => {}, Err(_) => println!("Could not send the exit signal") }; } } if play_again { deck = Sequence::multi_deck(config.n_decks, config.n_jokers, &mut rng); hands = vec![Sequence::new(); config.n_players as usize]; table = Table::new(); for i in 0..config.n_players { for _ in 0..config.n_cards_to_start { hands[i as usize].add_card(deck.draw_card().unwrap()); } } starting_player += 1; if starting_player >= config.n_players { starting_player = 0; } player = starting_player as usize; } } for i in 0..config.n_players as usize { match client_streams[i].write(&mut [5]) { Ok(_) => {}, Err(_) => println!("Could not send the exit signal to client {}", i) }; } }
let mut config = Config { n_decks: 0, n_jokers: 0, n_cards_to_start: 0, custom_rule_jokers: false, n_players: 0 };
assignment_statement
[ { "content": "/// wait for a player to reconnect\n\npub fn wait_for_reconnection(stream: &mut TcpStream, name: &str, port: usize) \n\n -> Result<(), StreamError>\n\n{\n\n\n\n // wait for a connection\n\n\n\n // set-up the tcp listener\n\n let listener = TcpListener::bind(format!(\"0.0.0.0:{}\", port))?;\n\n\n\n // get connections and check the player is the right one\n\n for stream_res in listener.incoming() {\n\n match stream_res {\n\n Ok(mut new_stream) => {\n\n println!(\"New connection: {}\", new_stream.peer_addr()?);\n\n\n\n // get the name \n\n match get_str_from_client(&mut new_stream) {\n\n Ok(s) => {\n\n if s == name {\n\n new_stream.write(&[1]).unwrap_or(1);\n", "file_path": "src/lib_server.rs", "rank": 0, "score": 140429.55401223822 }, { "content": "/// check if a string is a synonym of ‘yes’\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use machiavelli::lib_server::is_yes;\n\n///\n\n/// let example_yes = &\"ja\";\n\n/// let example_no = &\"nein\";\n\n///\n\n/// assert!(is_yes(example_yes));\n\n/// assert!(!is_yes(example_no));\n\n/// ```\n\npub fn is_yes(s: &str) -> bool {\n\n let s_l = s.to_lowercase();\n\n for &synonym in &YES_VALUES {\n\n if s_l == synonym {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 1, "score": 123028.91185019503 }, { "content": "fn main() {\n\n\n\n // set the style\n\n reset_style();\n\n\n\n // clear the terminal\n\n print!(\"\\x1b[2J\\x1b[1;1H\");\n\n\n\n // get the config\n\n println!(\"Hi there! Up for a game of Machiavelli?\\n\");\n\n let mut config = match get_config() {\n\n Ok(conf) => conf, \n\n Err(_) => {\n\n println!(\"Invalid input!\");\n\n process::exit(1);\n\n },\n\n };\n\n \n\n // create the table\n\n let mut table = Table::new();\n", "file_path": "src/main.rs", "rank": 2, "score": 118988.75282147352 }, { "content": "/// get the vector of player names from a file\n\npub fn load_names(fname: &str) -> Result<Vec<String>, InvalidInputError> {\n\n let content = std::fs::read_to_string(fname)?;\n\n Ok(content.trim().split(\"\\n\").map(String::from).collect())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 3, "score": 100349.2352939624 }, { "content": "/// convert a string to a sequence of bytes and sent it to the server\n\npub fn send_str_to_server(stream: &mut TcpStream, s: &str) -> Result<(), StreamError> {\n\n send_bytes_to_server(stream, &s.as_bytes())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib_client.rs", "rank": 4, "score": 97760.24746513556 }, { "content": "/// send a message as a string to a client\n\npub fn send_str_to_client(stream: &mut TcpStream, s: &str) -> Result<(), StreamError> {\n\n send_bytes_to_client(stream, &s.as_bytes())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 5, "score": 97760.24746513556 }, { "content": "/// send a message and get the response\n\npub fn send_message_get_reply(stream: &mut TcpStream, message: &str) \n\n -> Result<Vec<u8>, StreamError>\n\n{\n\n stream.write(&mut [3])?;\n\n send_str_to_client(stream, message)?;\n\n get_bytes_from_client(stream)\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 6, "score": 96443.26349603823 }, { "content": "/// send the same message to all players\n\npub fn send_message_all_players(client_streams: &mut [TcpStream], message: &str) {\n\n\n\n let n_players: usize = client_streams.len();\n\n\n\n // send the messages\n\n for i in 0..n_players {\n\n client_streams[i].write(&mut [1]).unwrap_or(1);\n\n send_bytes_to_client_no_wait(&mut client_streams[i], &message.as_bytes()).unwrap_or(());\n\n }\n\n\n\n // wait until all clients have confirmed reception\n\n for i in 0..n_players {\n\n client_streams[i].read(&mut [0]).unwrap_or(0);\n\n }\n\n \n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 7, "score": 96443.26349603823 }, { "content": "/// clear the screens and send the same message to all players\n\npub fn clear_and_send_message_all_players(client_streams: &mut [TcpStream], message: &str) {\n\n\n\n let n_players: usize = client_streams.len();\n\n\n\n // send the messages\n\n for i in 0..n_players {\n\n client_streams[i].write(&mut [2]).unwrap_or(1);\n\n send_bytes_to_client_no_wait(&mut client_streams[i], &message.as_bytes()).unwrap_or(());\n\n }\n\n\n\n // wait until all clients have confirmed reception\n\n for i in 0..n_players {\n\n client_streams[i].read(&mut [0]).unwrap_or(1);\n\n }\n\n \n\n}\n\n\n\n// errors\n\n\n\n#[derive(Debug)]\n", "file_path": "src/lib_server.rs", "rank": 8, "score": 94656.61477227366 }, { "content": "fn first_word(s: &str) -> Result<String,InvalidInputError> {\n\n match s.split(' ').next() {\n\n Some(res) => Ok(res.to_string()),\n\n None => Err(InvalidInputError {})\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 9, "score": 92913.94924840276 }, { "content": "/// Encrypt a string\n\n///\n\n/// # Example\n\n/// ```\n\n/// use machiavelli::encode::encrypt_str;\n\n///\n\n/// let message = \"I am a string literal!\";\n\n/// let password = \"passw0rd\";\n\n///\n\n/// let cipher = encrypt_str(&message, &password);\n\n///\n\n/// ```\n\npub fn encrypt_str(message: &str, password: &str) -> Vec<u8> {\n\n let plaintext_u8 = message.as_bytes();\n\n let password_u8 = password.as_bytes();\n\n xor(&plaintext_u8, &password_u8)\n\n}\n\n\n", "file_path": "src/encode.rs", "rank": 10, "score": 88337.5558092292 }, { "content": "/// send the instruction to print a message to the client, then send a message to the same client\n\npub fn send_message_to_client(stream: &mut TcpStream, msg: &str) -> Result<(), StreamError>{\n\n stream.write(&mut [1])?;\n\n send_str_to_client(stream, msg)\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 11, "score": 87286.66687455558 }, { "content": "/// send the instruction to clear the screen and send back a message to the client, and read the \n\n/// response as a string\n\npub fn clear_and_send_message_to_client(stream: &mut TcpStream, msg: &str) -> Result<(), StreamError>{\n\n stream.write(&mut [2])?;\n\n send_str_to_client(stream, msg)\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 12, "score": 85726.44542990641 }, { "content": "pub fn instructions_no_save(must_pick_a_card: bool, print_reset_option: bool) \n\n -> String \n\n{\n\n let mut will_pick_a_card = &\"\";\n\n let mut reset_option = &\"\";\n\n if must_pick_a_card {\n\n will_pick_a_card = &\" (and pick a card)\";\n\n }\n\n if print_reset_option {\n\n reset_option = &\"g: Give up and reset\\n\";\n\n }\n\n format!(\"{}{}\\n{}\\n{}\\n{}\\n{}\\n{}\\n\",\n\n \"e: End your turn\",\n\n will_pick_a_card,\n\n \"p x y ...: Play the sequence x y ...\",\n\n \"t x y ...: Take the sequences x, y, ... from the table\",\n\n \"a x y z ...: Add the sequence y z ... to sequence x on the table\",\n\n \"r, s: Sort cards by rank or suit\",\n\n reset_option\n\n )\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 13, "score": 85696.67434655281 }, { "content": "fn print_str_from_server(stream: &mut TcpStream) -> Result<(), StreamError> {\n\n print!(\"{}\", get_str_from_server(stream)?);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib_client.rs", "rank": 14, "score": 85120.16260245926 }, { "content": "pub fn get_input() -> Result<String, InvalidInputError> {\n\n let mut buffer = String::new();\n\n match stdin().read_line(&mut buffer) {\n\n Ok(_) => (),\n\n Err(_) => return Err(InvalidInputError {})\n\n }\n\n Ok(buffer)\n\n}\n\n\n\n\n", "file_path": "src/lib.rs", "rank": 15, "score": 84674.6868701266 }, { "content": "pub fn give_up(table: &mut Table, hand: &mut Sequence, deck: &mut Sequence, \n\n hand_start_round: &Sequence, table_start_round: &Table,\n\n cards_from_table: &mut Sequence) {\n\n \n\n // reset the situation\n\n *hand = hand_start_round.clone();\n\n *table = table_start_round.clone();\n\n *cards_from_table = Sequence::new();\n\n\n\n // penalty\n\n for _i in 0..PENALTY_RESET {\n\n match pick_a_card(hand, deck) {\n\n Ok(_) => (),\n\n Err(_) => {\n\n println!(\"No more card to draw!\");\n\n break;\n\n }\n\n };\n\n }\n\n}\n\n\n\n\n", "file_path": "src/lib.rs", "rank": 16, "score": 83754.12319759536 }, { "content": "fn clear_and_print_str_from_server(stream: &mut TcpStream) -> Result<(), StreamError> {\n\n clear_terminal();\n\n println!(\"{}\", get_str_from_server(stream)?);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib_client.rs", "rank": 17, "score": 83201.3508224162 }, { "content": "pub fn player_turn(table: &mut Table, hand: &mut Sequence, deck: &mut Sequence, \n\n custom_rule_jokers: bool, player_name: &String) -> bool {\n\n\n\n // copy the initial hand\n\n let hand_start_round = hand.clone();\n\n \n\n // copy the initial table\n\n let table_start_round = table.clone();\n\n\n\n // get the player choice\n\n let mut message = String::new();\n\n loop {\n\n \n\n // clear the terminal\n\n clear_terminal();\n\n \n\n println!(\"\\x1b[1m{}'s turn\", player_name);\n\n reset_style();\n\n \n\n print_situation(table, hand, deck);\n", "file_path": "src/lib.rs", "rank": 18, "score": 82441.96682166767 }, { "content": "fn take_sequence(table: &mut Table, hand: &mut Sequence) -> String {\n\n println!(\"Which sequence would you like to take?\");\n\n match get_input().unwrap_or_else(|_| {\"\".to_string()})\n\n .trim().parse::<usize>() {\n\n Ok(n) => match table.take(n) {\n\n Some(seq) => {\n\n hand.merge(seq);\n\n return String::new();\n\n },\n\n None => return \"This sequence is not on the table\".to_string()\n\n },\n\n Err(_) => return \"Error parsing the input!\".to_string()\n\n };\n\n}\n\n\n\n\n", "file_path": "src/lib.rs", "rank": 19, "score": 81924.76042684764 }, { "content": "fn play_sequence(hand: &mut Sequence, table: &mut Table) -> String {\n\n println!(\"Please enter the sequence, separated by spaces\");\n\n let hand_and_indices = hand.show_indices();\n\n println!(\"{}\", hand_and_indices.0);\n\n reset_style();\n\n println!(\"{}\", hand_and_indices.1);\n\n let mut seq = Sequence::new();\n\n \n\n let mut s = get_input().unwrap_or_else(|_| {\"\".to_string()});\n\n s.pop();\n\n let mut seq_i = Vec::<usize>::new();\n\n for item in s.split(' ') {\n\n match item.parse::<usize>() {\n\n Ok(n) => {\n\n let mut n_i = 0;\n\n for &i in &seq_i {\n\n if i < n {\n\n n_i += 1;\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 20, "score": 81924.76042684764 }, { "content": "fn play_sequence_remote(hand: &mut Sequence, cards_from_table: &mut Sequence,\n\n table: &mut Table, mes: &[u8]) \n\n -> Result<Option<String>, StreamError>\n\n{\n\n // copy the initial hand and cards from tables\n\n let hand_copy = hand.clone();\n\n let cards_from_table_copy = cards_from_table.clone();\n\n\n\n // combine the hand and cards from the table\n\n let mut full_hand = hand.clone();\n\n let buffer = cards_from_table.clone();\n\n full_hand.merge(buffer.reverse());\n\n \n\n let mut seq = Sequence::new();\n\n \n\n let s = String::from_utf8(mes.to_vec())?;\n\n \n\n let mut seq_i_hand = Vec::<usize>::new();\n\n let mut seq_i_cft = Vec::<usize>::new();\n\n let n_hand = hand.number_cards();\n", "file_path": "src/lib_server.rs", "rank": 21, "score": 81123.39448833503 }, { "content": "fn add_to_table_sequence_remote(table: &mut Table, hand: &mut Sequence, \n\n cards_from_table: &mut Sequence, mes: &[u8]) \n\n -> Result<Option<String>, StreamError> \n\n{\n\n \n\n // copy the initial hand and cards from tables\n\n let hand_copy = hand.clone();\n\n let cards_from_table_copy = cards_from_table.clone();\n\n\n\n let mut seq_from_table: Sequence;\n\n let mut seq_from_hand = Sequence::new();\n\n let mut seq_from_hand_from_table = Sequence::new();\n\n\n\n // parse the request\n\n let content = String::from_utf8(mes.to_vec())?;\n\n let mut content = content.trim().split(\" \");\n\n\n\n // parse the index of the sequence to which to add cards\n\n match content.next() {\n\n Some(x) => match x.parse::<usize>() {\n", "file_path": "src/lib_server.rs", "rank": 22, "score": 81123.39448833503 }, { "content": "/// load the config from a file\n\npub fn get_config_from_file(fname: &str) -> Result<(Config,String),InvalidInputError> {\n\n \n\n // open the file\n\n let content = std::fs::read_to_string(fname)?;\n\n let content: Vec<&str> = content.split(\"\\n\").collect();\n\n\n\n // check that the file has at least the right number of lines\n\n if content.len() < 6 {\n\n return Err(InvalidInputError {});\n\n }\n\n\n\n // get the config\n\n let n_decks = first_word(&content[0])?.parse::<u8>()?;\n\n let n_jokers = first_word(&content[1])?.parse::<u8>()?;\n\n let n_cards_to_start = first_word(&content[2])?.parse::<u16>()?;\n\n let custom_rule_jokers = first_word(&content[3])? == \"1\";\n\n let n_players = first_word(&content[4])?.parse::<u8>()?;\n\n let savefile = first_word(&content[5])?;\n\n \n\n // print the parameters\n", "file_path": "src/lib.rs", "rank": 23, "score": 79852.07217431508 }, { "content": "/// save the vector of player names to a file\n\npub fn save_names(names: &Vec<String>, fname: &str) -> Result<(), InvalidInputError> {\n\n let names_single_string = names.join(\"\\n\");\n\n let mut file = std::fs::File::create(fname)?;\n\n file.write_all(names_single_string.as_bytes())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 24, "score": 78087.40290730502 }, { "content": "/// Decrypt an array of bytes into a string\n\n///\n\n/// # Example\n\n/// ```\n\n/// use machiavelli::encode::{ encrypt_str, decrypt_str };\n\n///\n\n/// let message = \"I am a string literal!\";\n\n/// let password = \"passw0rd\";\n\n///\n\n/// let cipher = encrypt_str(&message, &password);\n\n/// let decrypted = decrypt_str(&cipher, &password).unwrap();\n\n///\n\n/// assert_eq!(message.to_string(), decrypted);\n\n///\n\n/// ```\n\npub fn decrypt_str(cipher: &[u8], password: &str) -> Result<String, std::str::Utf8Error> {\n\n let password_u8 = password.as_bytes();\n\n match std::str::from_utf8(&xor(&cipher, &password_u8)) {\n\n Ok(s) => return Ok(s.to_string()),\n\n Err(e) => return Err(e)\n\n };\n\n}\n", "file_path": "src/encode.rs", "rank": 25, "score": 77601.96064186918 }, { "content": "/// player turn\n\npub fn start_player_turn(table: &mut Table, hands: &mut Vec<Sequence>, deck: &mut Sequence, \n\n custom_rule_jokers: bool, player_names: &Vec<String>, current_player: usize, \n\n n_players: usize, streams: &mut Vec<TcpStream>, port: usize, \n\n sort_mode: &mut u8, previous_messages: &Vec<Option<String>>)\n\n -> Result<Option<String>,StreamError> {\n\n\n\n // copy the initial hand\n\n let hand_start_round = hands[current_player].clone();\n\n\n\n // copy the initial table\n\n let table_start_round = table.clone();\n\n \n\n // cards taken from the table\n\n let mut cards_from_table = Sequence::new();\n\n \n\n // send the instructions\n\n send_message_to_client(&mut streams[current_player], \n\n &format!(\"\\u{0007}\\n{}\", instructions_no_save(true,false)))?;\n\n\n\n // get and process the player choice\n", "file_path": "src/lib_server.rs", "rank": 26, "score": 77476.05792319783 }, { "content": "/// get a sequence of bytes from the server and convert it to a string\n\npub fn get_str_from_server(stream: &mut TcpStream) -> Result<String, StreamError> {\n\n let bytes = get_bytes_from_server(stream)?;\n\n match String::from_utf8(bytes) {\n\n Ok(s) => Ok(s),\n\n Err(_) => Err(StreamError::from(BytesToStringError {}))\n\n }\n\n}\n\n\n", "file_path": "src/lib_client.rs", "rank": 27, "score": 77132.87839892888 }, { "content": "/// get a message (string) from a client\n\npub fn get_str_from_client(stream: &mut TcpStream) -> Result<String, StreamError> {\n\n let bytes = get_bytes_from_client(stream)?;\n\n match String::from_utf8(bytes) {\n\n Ok(s) => Ok(s),\n\n Err(_) => Err(StreamError::from(BytesToStringError {}))\n\n }\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 28, "score": 77132.87839892888 }, { "content": "fn take_sequence_remote(table: &mut Table, hand: &mut Sequence, mes: &[u8], stream: &mut TcpStream) \n\n -> Result<(), StreamError> \n\n{\n\n let content = String::from_utf8(mes.to_vec())?;\n\n let content = content.trim().split(\" \");\n\n let mut seq_i = Vec::<usize>::new();\n\n for s in content {\n\n match s.parse::<usize>() {\n\n Ok(n) => {\n\n let mut n_i: usize = 0;\n\n for &i in &seq_i {\n\n if i < n {\n\n n_i += 1;\n\n }\n\n }\n\n seq_i.push(n);\n\n match table.take(n-n_i) {\n\n Some(seq) => {\n\n hand.merge(seq.reverse());\n\n },\n\n None => send_message_to_client(stream, &\"This sequence is not on the table\\n\")?\n\n }\n\n },\n\n Err(_) => send_message_to_client(stream, &\"Error parsing the input!\\n\")?\n\n };\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 29, "score": 76417.63750748438 }, { "content": "fn pick_a_card(hand: &mut Sequence, deck: &mut Sequence) -> Result<Card, NoMoreCards> {\n\n let card = match deck.draw_card() {\n\n Some(c) => c,\n\n None => return Err(NoMoreCards {})\n\n };\n\n hand.add_card(card.clone());\n\n Ok(card)\n\n}\n\n\n\n\n", "file_path": "src/lib.rs", "rank": 30, "score": 75334.42250226151 }, { "content": "/// get the player name\n\npub fn handle_client(mut stream: TcpStream) -> Result<(TcpStream, String, usize), StreamError> {\n\n let mut player_name: String = \"\".to_string();\n\n match get_str_from_client(&mut stream) {\n\n Ok(s) => {\n\n // great the player\n\n player_name = s.clone();\n\n let msg = format!(\"Hello {}!\\nWaiting for other players to join...\", &s);\n\n stream.write(&[1])?;\n\n send_str_to_client(&mut stream, &msg)?;\n\n },\n\n Err(_)=> {\n\n println!(\"An error occured while reading the stream; terminating connection with {}\", \n\n stream.peer_addr()?);\n\n stream.shutdown(Shutdown::Both)?;\n\n }\n\n };\n\n Ok((stream, player_name, 0))\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 31, "score": 70887.5172265905 }, { "content": "/// check that no players have the same name; if yes, rename players\n\npub fn ensure_names_are_different(player_names: &mut Vec<String>, client_streams: &mut Vec<TcpStream>) \n\n -> Result<(), StreamError>\n\n{\n\n let mut cont = true;\n\n while cont {\n\n cont = false;\n\n for i in 0..player_names.len() {\n\n for j in (i+1)..player_names.len() {\n\n if player_names[j] == player_names[i] {\n\n cont = true;\n\n match String::from_utf8(send_message_get_reply(&mut client_streams[j], \n\n &format!(\"The name {} is already taken! Please choose a different one.\\n\",\n\n &player_names[j]))?) {\n\n Ok(n) => player_names[j] = n,\n\n Err(_) => send_message_to_client(&mut client_streams[j], &\"Could not read the input!\")?\n\n }\n\n }\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 32, "score": 69357.67937435854 }, { "content": "fn send_message(stream: &mut TcpStream) -> Result<(), StreamError> {\n\n let mut reply = String::new();\n\n let mut cont = true;\n\n while cont {\n\n match get_input() {\n\n Ok(s) => {\n\n reply = s.trim().to_string();\n\n cont = false\n\n },\n\n Err(_) => println!(\"Could not parse the input\")\n\n };\n\n }\n\n send_str_to_server(stream, &reply)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib_client.rs", "rank": 33, "score": 68170.61275754844 }, { "content": "fn print_and_reply(stream: &mut TcpStream) -> Result<(), StreamError> {\n\n println!(\"{}\", get_str_from_server(stream)?);\n\n send_message(stream)\n\n}\n\n\n", "file_path": "src/lib_client.rs", "rank": 34, "score": 68170.61275754844 }, { "content": "/// get a request from te server and act accordingly\n\n///\n\n/// The request is initially encoded in a single byte sent by the server to `stream`. \n\n/// Five values are currently supported: \n\n///\n\n/// * 1: print the next message sent by the server\n\n/// * 2: clear the terminal and print the next message sent by the server\n\n/// * 3: print the next message sent by the server and send back a message from stdin\n\n/// * 4: send a message from stdin\n\n/// * 5: close the client\n\npub fn handle_server_request(single_byte_buffer: &mut [u8; 1], stream: &mut TcpStream) -> Result<(), StreamError> {\n\n stream.read(single_byte_buffer)?;\n\n match single_byte_buffer[0] {\n\n \n\n // value 1: print the message from the server\n\n 1 => print_str_from_server(stream)?,\n\n \n\n // value 2: clear the terminal and print the message from the server\n\n 2 => clear_and_print_str_from_server(stream)?,\n\n \n\n // value 3: print the message and return a reply in bytes\n\n 3 => print_and_reply(stream)?,\n\n \n\n // value 4: send a message\n\n 4 => send_message(stream)?,\n\n \n\n // value 5: exit\n\n 5 => {\n\n print!(\"\\x1b[0m\\x1b[?25h\"); // reset the style and show the cursor\n\n print!(\"\\x1b[2J\\x1b[1;1H\"); // clear the screen\n\n print!(\"\\x1b[K\"); // redraw the screen\n\n std::process::exit(0)\n\n },\n\n\n\n _ => ()\n\n };\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib_client.rs", "rank": 35, "score": 66813.0264280156 }, { "content": "/// get the player name and check that it is in the list of players and not already taken\n\npub fn handle_client_load(mut stream: TcpStream, names: &Vec<String>, names_taken: Arc<Mutex<Vec<String>>>) \n\n -> Result<(TcpStream, String, usize), StreamError> \n\n{\n\n let mut player_name: String;\n\n let position: usize;\n\n loop {\n\n match get_str_from_client(&mut stream) {\n\n Ok(s) => {\n\n player_name = s.clone();\n\n \n\n // check if the name is in the list\n\n match names.iter().position(|x| x == &player_name) {\n\n Some(i) => {\n\n // check if it is not already taken\n\n let mut lock = names_taken.lock().unwrap();\n\n match lock.iter().position(|x| x == &player_name) {\n\n Some(_) => {\n\n stream.write(&[0])?;\n\n let msg = format!(\"Sorry, this name is already taken!\\n\");\n\n send_str_to_client(&mut stream, &msg)?;\n", "file_path": "src/lib_server.rs", "rank": 36, "score": 63088.08929207615 }, { "content": "/// load the game info from a sequence of bytes\n\npub fn load_game(bytes: &[u8]) -> Result<(Config, u8, u8, Table, Vec<Sequence>, Sequence, Vec<String>), LoadingError> {\n\n let mut i_byte: usize = 0; // index of the current element in bytes\n\n\n\n // load the config\n\n let n_bytes_config: usize = 6;\n\n let config = Config::from_bytes(&bytes[i_byte..n_bytes_config]);\n\n i_byte += n_bytes_config;\n\n \n\n // load the starting player\n\n let starting_player = bytes[i_byte];\n\n i_byte += 1;\n\n \n\n // load the current player\n\n let player = bytes[i_byte];\n\n i_byte += 1;\n\n \n\n // hand of each player\n\n let mut hands = Vec::<Sequence>::new();\n\n for _i_player in 0..config.n_players {\n\n \n", "file_path": "src/lib.rs", "rank": 37, "score": 62049.66362050237 }, { "content": "/// try to connect to the server and send the player name\n\n///\n\n/// If the connection is successful, clear the terminal, print the reply from the server, and\n\n/// return a `TcpStream`. \n\n/// If not, return a `StreamError`.\n\npub fn say_hello(mut name: String) -> Result<TcpStream, StreamError> {\n\n\n\n // host address\n\n let name_file_port_server = \"Config/port_client.dat\";\n\n let host = match std::fs::read_to_string(name_file_port_server) {\n\n Ok(s) => s.trim().to_string(),\n\n Err(_) => get_address()\n\n };\n\n\n\n match TcpStream::connect(&host) {\n\n Ok(mut stream) => {\n\n println!(\"Successfully connected to {}\", &host);\n\n \n\n loop {\n\n \n\n if name.len() == 0 {\n\n // get the player name\n\n let mut cont = true;\n\n println!(\"Player name:\");\n\n while cont {\n", "file_path": "src/lib_client.rs", "rank": 38, "score": 61263.89513738445 }, { "content": "/// send the instruction to send a message to the client, and read the response as a string\n\npub fn get_string_from_client(stream: &mut TcpStream) -> Result<String, StreamError> {\n\n let msg = get_message_from_client(stream)?;\n\n match String::from_utf8(msg) {\n\n Ok(s) => Ok(s),\n\n Err(_) => Err(StreamError { message: \"Could not convert the input to a string\".to_string() })\n\n }\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 39, "score": 60081.76352843795 }, { "content": "fn get_message_from_client(stream: &mut TcpStream) -> Result<Vec<u8>, StreamError>{\n\n stream.write(&mut [4])?;\n\n get_bytes_from_client(stream)\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 40, "score": 60081.76352843795 }, { "content": "fn send_bytes_to_client_no_wait(stream: &mut TcpStream, bytes: &[u8]) -> Result<(), StreamError> {\n\n \n\n // ensure that the number of bytes is small enough\n\n if bytes.len() > MAX_N_BUFFERS * BUFFER_SIZE {\n\n return Err(StreamError { message: format!(\n\n \"Stream too long: size: {}, maximum size: {}\",\n\n bytes.len(), MAX_N_BUFFERS*BUFFER_SIZE\n\n ) })\n\n }\n\n\n\n // the first bytes will determine the number of times the buffer should be read\n\n let mut n_buffers: u8 = (bytes.len() / BUFFER_SIZE) as u8;\n\n if bytes.len() % BUFFER_SIZE != 0 {\n\n n_buffers += 1;\n\n }\n\n stream.write(&[n_buffers])?;\n\n\n\n // write the data stream\n\n for i in 0..((n_buffers-1) as usize) {\n\n stream.write(&bytes[i*BUFFER_SIZE..(i+1)*BUFFER_SIZE])?;\n\n }\n\n stream.write(&bytes[((n_buffers-1) as usize)*BUFFER_SIZE..])?;\n\n \n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 41, "score": 58981.747520563076 }, { "content": "/// send a message as bytes to a client\n\npub fn send_bytes_to_client(stream: &mut TcpStream, bytes: &[u8]) -> Result<(), StreamError> {\n\n \n\n send_bytes_to_client_no_wait(stream, bytes)?;\n\n \n\n // wait for a reply to be sent from the receiver\n\n stream.read(&mut [0])?;\n\n \n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 42, "score": 57251.74911584409 }, { "content": "/// send a sequence of bytes to the server and wait for confirmation that it has been received\n\npub fn send_bytes_to_server(stream: &mut TcpStream, bytes: &[u8]) -> Result<(), StreamError> {\n\n \n\n // ensure that the number of bytes is small enough\n\n if bytes.len() > MAX_N_BUFFERS * BUFFER_SIZE {\n\n return Err(StreamError { message: format!(\n\n \"Stream too long: size: {}, maximum size: {}\",\n\n bytes.len(), MAX_N_BUFFERS*BUFFER_SIZE\n\n ) })\n\n }\n\n\n\n // the first bytes will determine the number of times the buffer should be read\n\n let mut n_buffers: u8 = (bytes.len() / BUFFER_SIZE) as u8;\n\n if bytes.len() % BUFFER_SIZE != 0 {\n\n n_buffers += 1;\n\n }\n\n stream.write(&[n_buffers])?;\n\n\n\n // write the data stream\n\n for i in 1..(n_buffers as usize) {\n\n stream.write(&bytes[(i-1)*BUFFER_SIZE..i*BUFFER_SIZE])?;\n", "file_path": "src/lib_client.rs", "rank": 43, "score": 57251.74911584409 }, { "content": "/// get a message (bytes) from a client\n\npub fn get_bytes_from_client(stream: &mut TcpStream) -> Result<Vec<u8>, StreamError> {\n\n \n\n // buffer\n\n let mut buffer: [u8; BUFFER_SIZE] = [0; BUFFER_SIZE];\n\n\n\n // the first bytes will determine the number of times the buffer should be read\n\n let mut n_buffers: [u8; 1] = [0];\n\n stream.read(&mut n_buffers)?;\n\n\n\n // vector containing the result\n\n let mut res = Vec::<u8>::new();\n\n\n\n // read the data stream\n\n let mut size;\n\n for _i in 0..n_buffers[0] {\n\n size = stream.read(&mut buffer)?;\n\n res.extend_from_slice(&buffer[..size]);\n\n }\n\n \n\n // send something to confirm I have received the data\n\n stream.write(&[0])?;\n\n \n\n // return the result\n\n Ok(res)\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 44, "score": 57251.74911584409 }, { "content": "/// get a sequence of bytes from the server\n\npub fn get_bytes_from_server(stream: &mut TcpStream) -> Result<Vec<u8>, StreamError> {\n\n \n\n // buffer\n\n let mut buffer: [u8; BUFFER_SIZE] = [0; BUFFER_SIZE];\n\n\n\n // the first bytes will determine the number of times the buffer should be read\n\n let mut n_buffers: [u8; 1] = [0];\n\n stream.read_exact(&mut n_buffers)?;\n\n\n\n // vector containing the result\n\n let mut res = Vec::<u8>::new();\n\n\n\n // read the data stream\n\n let mut size;\n\n for _i in 0..n_buffers[0] {\n\n size = stream.read(&mut buffer)?;\n\n res.extend_from_slice(&buffer[..size]);\n\n }\n\n \n\n // send something to confirm I have received the data\n\n stream.write(&[0])?;\n\n\n\n // return the result\n\n Ok(res)\n\n}\n\n\n", "file_path": "src/lib_client.rs", "rank": 45, "score": 57251.74911584409 }, { "content": "/// ask the user for the game information and return a Config\n\npub fn get_config() -> Result<Config,InvalidInputError> {\n\n \n\n println!(\"Number of decks (integer between 1 and 255) (enter 0 to load a previously saved game): \");\n\n let mut n_decks: u8 = 0;\n\n let mut load = false;\n\n while n_decks == 0 {\n\n n_decks = match get_input()?.trim().parse::<u8>() {\n\n Ok(0) => {\n\n load = true;\n\n 1\n\n },\n\n Ok(n) => n,\n\n Err(_) => {\n\n println!(\"Invalid input\");\n\n 0\n\n }\n\n };\n\n }\n\n\n\n if load {\n", "file_path": "src/lib.rs", "rank": 46, "score": 56852.10865946117 }, { "content": "/// ask the user for the game information and savefile name\n\npub fn get_config_and_savefile() -> Result<(Config, String),InvalidInputError> {\n\n let conf = get_config()?;\n\n println!(\"Name of the save file: \");\n\n let savefile = get_input()?.trim().to_string();\n\n Ok((conf, savefile))\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 47, "score": 52443.342068683065 }, { "content": "fn instructions() -> String {\n\n format!(\"{}\\n{}\\n{}\\n{}\\n{}\\n{}\\n{}\\n\",\n\n \"q: Save and quit\",\n\n \"c: Pick a card\",\n\n \"p: Play a sequence\",\n\n \"t: Take from the table\",\n\n \"a: Pass\",\n\n \"r, s: Sort cards by rank or suit\",\n\n \"g: Give up and reset\"\n\n )\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 48, "score": 48263.9565838593 }, { "content": "/// wait a moment (`N_MILLISECONDS_WAIT` in milliseconds)\n\npub fn wait() {\n\n std::thread::sleep(std::time::Duration::from_millis(N_MILLISECONDS_WAIT));\n\n}\n\n\n\n\n\n// errors\n\n\n\n/// generic error raised when reading from or writing to a stream fails\n\n#[derive(Debug)]\n\npub struct StreamError {\n\n message: String\n\n}\n\n\n\n/// generic error raised when conversion from a sequence of bytes to a string fails\n\n#[derive(Debug)]\n\npub struct BytesToStringError {}\n\n\n\nimpl std::fmt::Display for StreamError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"StreamError: {}\", self.message)\n", "file_path": "src/lib_client.rs", "rank": 49, "score": 46893.93984080716 }, { "content": "/// clear the terminal\n\npub fn clear_terminal() {\n\n print!(\"\\x1b[2J\\x1b[1;1H\");\n\n}\n\n\n\n\n\n/// Structure to store the game configuration\n\n#[derive(Debug, PartialEq)]\n\npub struct Config {\n\n pub n_decks: u8,\n\n pub n_jokers: u8,\n\n pub n_cards_to_start: u16,\n\n pub custom_rule_jokers: bool,\n\n pub n_players: u8\n\n}\n\n\n\n\n\nimpl Config {\n\n\n\n /// Convert the config structure to a sequence of bytes\n\n ///\n", "file_path": "src/lib.rs", "rank": 50, "score": 46890.37876646343 }, { "content": "/// wait a moment\n\npub fn wait() {\n\n std::thread::sleep(std::time::Duration::from_millis(N_MILLISECONDS_WAIT));\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 51, "score": 46890.37876646343 }, { "content": "/// reset the terminal output style\n\npub fn reset_style() {\n\n print!(\"{}\", reset_style_string());\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 52, "score": 46890.37876646343 }, { "content": "// ask for the port\n\nfn get_address() -> String {\n\n println!(\"Address and port of the server?\");\n\n loop {\n\n match get_input() {\n\n Ok(s) => return s.trim().to_string(),\n\n Err(_) => println!(\"Could not parse the input\")\n\n };\n\n }\n\n}\n\n\n", "file_path": "src/lib_client.rs", "rank": 53, "score": 45657.793009815905 }, { "content": "/// wait a longer moment\n\npub fn long_wait() {\n\n std::thread::sleep(std::time::Duration::from_millis(N_MILLISECONDS_LONG_WAIT));\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 54, "score": 45653.904834722605 }, { "content": "/// convert the game info to a sequence of bytes\n\npub fn game_to_bytes (starting_player: u8, player: u8, table: &Table, hands: &Vec<Sequence>, \n\n deck: &Sequence, config: &Config, player_names: &Vec<String>) -> Vec<u8> {\n\n \n\n // construct the sequence of bytes to be saved\n\n let mut bytes = Vec::<u8>::new();\n\n \n\n // config\n\n bytes.append(&mut config.to_bytes());\n\n\n\n // starting player\n\n bytes.push(starting_player);\n\n \n\n // player about to play\n\n bytes.push(player);\n\n \n\n // hand of each player\n\n for i_player in 0..config.n_players {\n\n \n\n // number of cards in the hand as 2 u8\n\n let n_cards_in_hand = hands[i_player as usize].number_cards() as u16;\n", "file_path": "src/lib.rs", "rank": 55, "score": 44374.05741075377 }, { "content": "pub fn reset_style_string() -> String {\n\n [\n\n \"\\x1b[0m\", // reset attributes\n\n \"\\x1b[30;47m\", // set the foreground and background colours\n\n \"\\x1b[?25l\", // hide the cursor\n\n \"\\x1b[K\" // redraw the prompt\n\n ].join(\"\")\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 56, "score": 42238.594676915134 }, { "content": "fn suit_to_int(suit: Suit) -> u8 {\n\n match suit {\n\n Heart => 1,\n\n Club => 2,\n\n Diamond => 3,\n\n Spade => 4,\n\n }\n\n}\n\n\n", "file_path": "src/sequence_cards.rs", "rank": 57, "score": 39356.8760701993 }, { "content": "/// assign a value to each card, with the value given higher weight than the suit\n\nfn value_card_by_rank(card: &Card) -> u8 {\n\n match *card {\n\n Joker => 255,\n\n RegularCard(suit, val) => 4 * val + suit_to_int(suit)\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use Card::{ RegularCard, Joker };\n\n use rand::thread_rng;\n\n\n\n #[test]\n\n fn take_jokers_1() {\n\n let mut seq = Sequence::from_cards(&[\n\n Joker, \n\n Joker\n", "file_path": "src/sequence_cards.rs", "rank": 58, "score": 38427.82840045709 }, { "content": "/// assign a value to each card, with the suit given higher weight than the value\n\nfn value_card_by_suit(card: &Card) -> u8 {\n\n match *card {\n\n Joker => 255,\n\n RegularCard(suit, val) => (MAX_VAL + 1) * suit_to_int(suit) + val\n\n }\n\n}\n\n\n\n\n", "file_path": "src/sequence_cards.rs", "rank": 59, "score": 38427.82840045709 }, { "content": "fn int_to_suit(s: u8) -> Option<Suit> {\n\n match s {\n\n 1 => Some(Heart),\n\n 2 => Some(Club),\n\n 3 => Some(Diamond),\n\n 4 => Some(Spade),\n\n _ => None\n\n }\n\n}\n\n\n\nimpl Card {\n\n\n\n fn from_byte(x: u8) -> Option<Card> {\n\n if x == 0 {\n\n return Some(Joker);\n\n }\n\n let mut val = x % MAX_VAL;\n\n if val == 0 {\n\n val = MAX_VAL;\n\n }\n", "file_path": "src/sequence_cards.rs", "rank": 60, "score": 37813.12926039426 }, { "content": "/// A simple sorting function with quadratic runtime\n\n///\n\n/// # Example\n\n/// ```\n\n/// use machiavelli::sort::sort;\n\n///\n\n/// let unsorted = vec![1,5,3,2,4];\n\n/// let sorted = sort(&unsorted, Box::new(|x: &i8| {-x}));\n\n///\n\n/// assert_eq!(vec![5,4,3,2,1], sorted);\n\n/// ```\n\npub fn sort<T: Clone, U: Ord+Clone> (a: &Vec<T>, f: Box<dyn Fn(&T) -> U>) -> Vec<T> {\n\n let mut sorted = Vec::<T>::new();\n\n let mut sorted_f = Vec::<U>::new();\n\n let mut a_f = Vec::<U>::new();\n\n \n\n for x in a {\n\n a_f.push((*f)(x));\n\n }\n\n \n\n for j in 0..a.len() {\n\n let mut inserted = false;\n\n for i in 0..sorted.len() {\n\n if a_f[j] <= sorted_f[i] {\n\n sorted.insert(i, a[j].clone());\n\n sorted_f.insert(i, a_f[j].clone());\n\n inserted = true;\n\n break;\n\n }\n\n }\n\n if !inserted {\n\n sorted.push(a[j].clone());\n\n sorted_f.push(a_f[j].clone());\n\n }\n\n } \n\n sorted\n\n}\n", "file_path": "src/sort.rs", "rank": 61, "score": 36028.46606817303 }, { "content": "pub fn situation_to_string(table: &Table, hand: &Sequence, \n\n cards_from_table: &Sequence) -> String {\n\n \n\n let hi = hand.show_indices();\n\n let ht = cards_from_table.show_indices_shifted(hand.number_cards());\n\n if cards_from_table.number_cards() == 0 {\n\n format!(\"\\n{}\\n{}\\n{}\\n{}\\n{}{}\\n\",\n\n \"Table:\", table, \"Your hand:\", hi.0, reset_style_string(), hi.1)\n\n } else {\n\n format!(\"\\n{}\\n{}\\n{}\\n{}{}\\n{}\\n\\n{}\\n{}\\n{}{}\\n\", \n\n \"Table:\", table, \"Your hand:\", hi.0, reset_style_string(), hi.1,\n\n \"Cards from the table:\", ht.0, reset_style_string(), ht.1)\n\n }\n\n}\n\n\n\n\n", "file_path": "src/lib.rs", "rank": 62, "score": 35584.784455218265 }, { "content": "fn print_situation(table: &Table, hand: &Sequence, deck: &Sequence) {\n\n \n\n println!(\"\\n{} cards remaining in the deck\", deck.number_cards());\n\n \n\n // print the table\n\n println!(\"Table: \\n{}\", table);\n\n\n\n // print the player hand\n\n println!(\"Your hand:\\n{}\", hand);\n\n reset_style();\n\n\n\n}\n\n\n\n\n", "file_path": "src/lib.rs", "rank": 63, "score": 33624.38815610245 }, { "content": "/// Encrypt a plaintext by xoring it with a password\n\n///\n\n/// # Example\n\n/// ```\n\n/// use machiavelli::encode::xor;\n\n///\n\n/// let plaintext: Vec<u8> = vec![1,2,3,4,5];\n\n/// let password: Vec<u8> = vec![0,1];\n\n///\n\n/// let cipher = xor(&plaintext, &password);\n\n///\n\n/// assert_eq!(vec![1,3,3,5,5], cipher);\n\n///\n\n/// ```\n\npub fn xor(plaintext: &[u8], password: &[u8]) -> Vec<u8> {\n\n let mut cipher = Vec::<u8>::new();\n\n let n = password.len();\n\n for i in 0..plaintext.len() {\n\n cipher.push(plaintext[i] ^ password[i%n])\n\n }\n\n cipher\n\n}\n\n\n", "file_path": "src/encode.rs", "rank": 64, "score": 32673.40498954508 }, { "content": " Err(_) => retry = true\n\n };\n\n\n\n fname = fname.trim().to_string();\n\n\n\n if !retry {\n\n\n\n // load the data from the file\n\n let mut file: File; \n\n match File::open(fname.clone()) {\n\n Ok(f) => file = f,\n\n Err(_) => {\n\n println!(\"Could not open the file!\");\n\n retry = true;\n\n fname.clear();\n\n continue;\n\n }\n\n };\n\n match file.read_to_end(&mut bytes) {\n\n Ok(_) => (),\n", "file_path": "src/main.rs", "rank": 65, "score": 31129.585551341712 }, { "content": " }\n\n }\n\n\n\n // get the players name\n\n for i in 0..config.n_players {\n\n println!(\"Player {}'s name: \", i+1);\n\n let mut cont = true;\n\n while cont {\n\n match get_input() {\n\n Ok(s) => {\n\n player_names.push(s.trim().to_string());\n\n cont = false\n\n },\n\n Err(_) => println!(\"Could not parse the input\")\n\n };\n\n }\n\n }\n\n\n\n\n\n }\n", "file_path": "src/main.rs", "rank": 66, "score": 31128.557481781223 }, { "content": " let mut deck = Sequence::new();\n\n let mut hands = Vec::<Sequence>::new();\n\n let mut starting_player: u8 = 0;\n\n let mut player: u8 = 0;\n\n let mut player_names = Vec::<String>::new();\n\n\n\n if config.n_decks == 0 {\n\n \n\n // load the previous game\n\n println!(\"Name of the save file:\");\n\n let mut fname = String::new();\n\n let mut bytes = Vec::<u8>::new();\n\n let mut retry = true;\n\n while retry {\n\n\n\n retry = false;\n\n \n\n // get the file name\n\n match stdin().read_line(&mut fname) {\n\n Ok(_) => (),\n", "file_path": "src/main.rs", "rank": 67, "score": 31128.128816766286 }, { "content": " \n\n // play until a player wins, there is no card left in the deck, or the player decides to save\n\n // and quit\n\n let mut save_and_quit: bool;\n\n loop {\n\n if deck.number_cards() == 0 {\n\n println!(\"\\x1b[1mNo more cards in the deck—It's a draw!\\x1b[0m\\n\");\n\n break;\n\n }\n\n save_and_quit = player_turn(&mut table, &mut hands[player as usize], \n\n &mut deck, config.custom_rule_jokers, &player_names[player as usize]);\n\n if save_and_quit {\n\n \n\n // convert the game data to a sequence of bytes\n\n let mut bytes = game_to_bytes(starting_player, player, &table, &hands, &deck, &config, &player_names);\n\n\n\n println!(\"Name of the save file:\");\n\n let mut fname = String::new();\n\n let mut retry = true;\n\n while retry {\n", "file_path": "src/main.rs", "rank": 68, "score": 31127.875591608426 }, { "content": "//! # Machiavelli\n\n//!\n\n//! A simple machiavelli card game *(work in progress)*\n\n\n\nuse std::process;\n\nuse std::io::{ stdin, Read, Write };\n\nuse std::fs::File;\n\nuse rand::thread_rng;\n\nuse machiavelli::*;\n\n\n", "file_path": "src/main.rs", "rank": 69, "score": 31127.57439636215 }, { "content": " bytes = Vec::<u8>::new();\n\n },\n\n Err(_) => {\n\n println!(\"Error loading the save file!\");\n\n }\n\n };\n\n }\n\n }\n\n\n\n } else {\n\n\n\n // build the deck\n\n let mut rng = thread_rng();\n\n deck = Sequence::multi_deck(config.n_decks, config.n_jokers, &mut rng);\n\n \n\n // build the hands\n\n hands = vec![Sequence::new(); config.n_players as usize];\n\n for i in 0..config.n_players {\n\n for _ in 0..config.n_cards_to_start {\n\n hands[i as usize].add_card(deck.draw_card().unwrap());\n", "file_path": "src/main.rs", "rank": 70, "score": 31126.268732822104 }, { "content": "\n\n retry = false;\n\n \n\n // get the file name\n\n match stdin().read_line(&mut fname) {\n\n Ok(_) => (),\n\n Err(_) => retry = true\n\n };\n\n fname = fname.trim().to_string();\n\n\n\n // obfuscate the save file (not very secure!)\n\n bytes = encode::xor(&bytes, &fname.as_bytes());\n\n \n\n if !retry {\n\n\n\n // save the data to the file\n\n let mut file: File; \n\n match File::create(fname.clone()) {\n\n Ok(f) => file = f,\n\n Err(_) => {\n", "file_path": "src/main.rs", "rank": 71, "score": 31125.905149971433 }, { "content": " Err(_) => {\n\n println!(\"Could not read from the file!\");\n\n retry = true;\n\n bytes.clear();\n\n fname.clear();\n\n }\n\n };\n\n \n\n // decode the sequence of bytes\n\n bytes = encode::xor(&bytes, &fname.as_bytes());\n\n\n\n match load_game(&bytes) {\n\n Ok(lg) => {\n\n config = lg.0;\n\n starting_player = lg.1; \n\n player = lg.2; \n\n table = lg.3;\n\n hands = lg.4; \n\n deck = lg.5;\n\n player_names = lg.6;\n", "file_path": "src/main.rs", "rank": 72, "score": 31125.88229284635 }, { "content": " println!(\"Could not create the file!\");\n\n retry = true;\n\n continue;\n\n }\n\n };\n\n match file.write_all(&bytes) {\n\n Ok(_) => (),\n\n Err(_) => {\n\n println!(\"Could not write to the file!\");\n\n retry = true;\n\n }\n\n };\n\n }\n\n }\n\n\n\n break;\n\n }\n\n if hands[player as usize].number_cards() == 0 {\n\n println!(\"\\x1b[1mPlayer {} wins! Congratulations!\\x1b[0m\\n\", player+1);\n\n break;\n\n }\n\n player = (player + 1) % config.n_players;\n\n }\n\n \n\n // reset the style\n\n println!(\"\\x1b[0m\");\n\n print!(\"\\x1b[?25h\");\n\n}\n", "file_path": "src/main.rs", "rank": 73, "score": 31124.709176928154 }, { "content": "fn print_situation_remote(table: &Table, hands: &Vec<Sequence>, deck: &Sequence, \n\n player_names: &Vec<String>, player: usize, current_player: usize, \n\n stream: &mut TcpStream, print_instructions: bool, cards_from_table: &Sequence, \n\n has_played_something: bool, print_reset_option: bool) \n\n -> Result<(), StreamError>\n\n{\n\n // string with the number of cards each player has\n\n let mut string_n_cards = format!(\"\\nNumber of cards ({} remaining in the deck):\", deck.number_cards());\n\n for i in 0..(hands.len()) {\n\n string_n_cards += &format!(\"\\n {}: {}\", &player_names[i], &hands[i].number_cards());\n\n }\n\n string_n_cards += \"\\n\";\n\n\n\n clear_and_send_message_to_client(stream, \n\n &format!(\"\\x1b[1m{}'s turn:{}\", player_names[current_player], &reset_style_string()))?;\n\n send_message_to_client(stream, &string_n_cards)?;\n\n send_message_to_client(stream, &situation_to_string(table, &hands[player], cards_from_table))?;\n\n if print_instructions {\n\n send_message_to_client(stream, &\"\\n\")?;\n\n send_message_to_client(stream, &instructions_no_save(!has_played_something, print_reset_option))?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 74, "score": 30490.08901061261 }, { "content": " Ok(n) => match table.take(n) {\n\n Some(seq) => {\n\n seq_from_table = seq;\n\n },\n\n None => {\n\n let message = format!(\"Sequence {} is not on the table\\n\", n);\n\n return Ok(Some(message))\n\n }\n\n },\n\n Err(_) => {\n\n let message = \"Error parsing the input!\\n\".to_string();\n\n return Ok(Some(message))\n\n }\n\n },\n\n None => return Ok(None)\n\n }\n\n\n\n // parse the sequence to play\n\n let mut seq_i_hand = Vec::<usize>::new();\n\n let mut seq_i_cft = Vec::<usize>::new();\n", "file_path": "src/lib_server.rs", "rank": 75, "score": 16.519706717805782 }, { "content": "//! Library for the game client\n\n\n\nuse super::*;\n\npub use std::net::TcpStream;\n\npub use std::io::{ Read, Write };\n\npub use std::str::from_utf8;\n\n\n\nconst BUFFER_SIZE: usize = 50;\n\nconst MAX_N_BUFFERS: usize = 255;\n\nconst N_MILLISECONDS_WAIT: u64 = 10;\n\n\n\n// ask for the port\n", "file_path": "src/lib_client.rs", "rank": 76, "score": 16.438323482168514 }, { "content": " match get_input() {\n\n Ok(s) => {\n\n name = s.trim().to_string();\n\n cont = false\n\n },\n\n Err(_) => println!(\"Could not parse the input\")\n\n };\n\n }\n\n }\n\n\n\n send_str_to_server(&mut stream, &name)?;\n\n println!(\"Sent the name to server; awaiting reply...\");\n\n \n\n let mut buffer: [u8; 1] = [0];\n\n stream.read_exact(&mut buffer)?;\n\n match buffer[0] {\n\n 1 => {\n\n match get_str_from_server(&mut stream) {\n\n Ok(s) => {\n\n \n", "file_path": "src/lib_client.rs", "rank": 77, "score": 15.209284428443386 }, { "content": " for item in s.trim().split(' ') {\n\n match item.parse::<usize>() {\n\n Ok(n) => {\n\n if n <= n_hand {\n\n let mut n_i = 0;\n\n for &i in &seq_i_hand {\n\n if i < n {\n\n n_i += 1;\n\n }\n\n }\n\n let card = match hand.take_card(n-n_i) {\n\n Some(c) => c,\n\n None => continue\n\n };\n\n seq.add_card(card);\n\n seq_i_hand.push(n);\n\n } else {\n\n let m = n - n_hand;\n\n let mut n_i = 0;\n\n for &i in &seq_i_cft {\n", "file_path": "src/lib_server.rs", "rank": 78, "score": 12.893097246402235 }, { "content": " return Ok(Config {\n\n n_decks: 0,\n\n n_jokers: 0,\n\n n_cards_to_start: 0,\n\n custom_rule_jokers: false,\n\n n_players: 0\n\n });\n\n }\n\n \n\n println!(\"Number of jokers (integer between 0 and 255): \");\n\n let mut n_jokers: u8 = 0; \n\n let mut set = false;\n\n while !set {\n\n n_jokers = match get_input()?.trim().parse::<u8>() {\n\n Ok(n) => {\n\n set = true;\n\n n\n\n },\n\n Err(_) => {\n\n println!(\"Invalid input\");\n", "file_path": "src/lib.rs", "rank": 79, "score": 12.890621657155709 }, { "content": "//! Library for the game server\n\n\n\npub use super::*;\n\npub use std::io::{ stdin, Read, Write };\n\npub use std::net::{ TcpListener, TcpStream, Shutdown };\n\npub use std::str::from_utf8;\n\npub use std::sync::{ Arc, Mutex };\n\nuse std::string::FromUtf8Error;\n\n\n\nconst BUFFER_SIZE: usize = 50;\n\nconst MAX_N_BUFFERS: usize = 255;\n\nconst N_MILLISECONDS_WAIT: u64 = 10;\n\nconst N_MILLISECONDS_LONG_WAIT: u64 = 1000;\n\nconst YES_VALUES: [&str;10] = [\"y\", \"yes\", \"yeah\", \"aye\", \"oui\", \"ja\", \"da\", \"ok\", \"si\", \"sim\"];\n\n\n\n/// check if a string is a synonym of ‘yes’\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use machiavelli::lib_server::is_yes;\n\n///\n\n/// let example_yes = &\"ja\";\n\n/// let example_no = &\"nein\";\n\n///\n\n/// assert!(is_yes(example_yes));\n\n/// assert!(!is_yes(example_no));\n\n/// ```\n", "file_path": "src/lib_server.rs", "rank": 80, "score": 12.744546631312904 }, { "content": " let n_hand = hand.number_cards();\n\n while let Some(s) = content.next() {\n\n match s.parse::<usize>() {\n\n Ok(n) => {\n\n if n <= n_hand {\n\n let mut n_i = 0;\n\n for &i in &seq_i_hand {\n\n if i < n {\n\n n_i += 1;\n\n }\n\n }\n\n let card = match hand.take_card(n-n_i) {\n\n Some(c) => c,\n\n None => continue\n\n };\n\n seq_from_hand.add_card(card);\n\n seq_i_hand.push(n);\n\n } else {\n\n let m = n - n_hand;\n\n let mut n_i = 0;\n", "file_path": "src/lib_server.rs", "rank": 81, "score": 12.738389784415812 }, { "content": " Err(_) => return Err(InvalidInputError {})\n\n };\n\n }\n\n \n\n println!(\"Custom rule—jokers must be played immediately (y/n): \");\n\n let custom_rule_jokers = match get_input()?.trim() {\n\n \"y\" => true,\n\n _ => false\n\n };\n\n \n\n println!(\"Number of players: \");\n\n let mut n_players = 0;\n\n while n_players == 0 {\n\n n_players = match get_input()?.trim().parse::<u8>() {\n\n Ok(0) => {\n\n println!(\"I need at least one player!\");\n\n 0\n\n }\n\n Ok(n) => n,\n\n Err(_) => {\n", "file_path": "src/lib.rs", "rank": 82, "score": 12.716698182006903 }, { "content": " }\n\n true\n\n }\n\n\n\n /// check if the sequence is a valid one with a unique suit\n\n fn is_valid_sequence_same_suit(&mut self) -> bool {\n\n let mut jokers = self.take_jokers();\n\n let mut common_suit = Club;\n\n let mut current_value: u8 = 0;\n\n let mut di: usize = 0;\n\n for i in 0..self.0.len() {\n\n let card = &self.0[i+di];\n\n match card {\n\n RegularCard(suit, value) => {\n\n if current_value == 0 {\n\n common_suit = *suit;\n\n current_value = *value;\n\n } else {\n\n if *suit != common_suit {\n\n self.merge(jokers);\n", "file_path": "src/sequence_cards.rs", "rank": 83, "score": 10.59376451039946 }, { "content": " while let Some(card) = seq.draw_card() {\n\n self.add_card(card);\n\n }\n\n }\n\n\n\n /// Build a randomly-shuffled deck of cards\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `n_decks`: the number of copies of a full deck of 52 cards\n\n /// * `n_jokers`: the number of jokers\n\n /// * `rng`: mutable reference to the random-number generator used foor shuffling\n\n ///\n\n /// # Example\n\n ///\n\n /// ```\n\n /// use rand::thread_rng;\n\n /// use machiavelli::sequence_cards::Sequence;\n\n ///\n\n /// let mut rng = thread_rng();\n", "file_path": "src/sequence_cards.rs", "rank": 84, "score": 10.546665316400428 }, { "content": " Ok(card) => message = format!(\"You picked a {}{}\\n\", &card, &reset_style_string()),\n\n Err(_) => message = \"No more card to draw!\\n\".to_string()\n\n };\n\n match *sort_mode {\n\n 1 => hands[current_player].sort_by_rank(),\n\n 2 => hands[current_player].sort_by_suit(),\n\n _ => ()\n\n }\n\n return Ok(Some(message));\n\n } else {\n\n break\n\n }\n\n },\n\n \n\n // value 'p': play a sequence\n\n 112 => {\n\n match play_sequence_remote(&mut hands[current_player], &mut cards_from_table,\n\n table, &mes[1..]) {\n\n Ok(None) => {\n\n \n", "file_path": "src/lib_server.rs", "rank": 85, "score": 10.137727853811112 }, { "content": " match int_to_suit((x-1) / MAX_VAL + 1) { \n\n Some(suit) => Some(RegularCard(suit, val)),\n\n None => None\n\n }\n\n }\n\n\n\n fn to_byte(&self) -> u8 {\n\n match self {\n\n Joker => 0,\n\n RegularCard(suit, value) => (suit_to_int(*suit)-1) * MAX_VAL + value\n\n }\n\n }\n\n\n\n}\n\n\n\nimpl fmt::Display for Card {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n RegularCard(suit, val) => {\n\n let str_val = match val {\n", "file_path": "src/sequence_cards.rs", "rank": 86, "score": 9.975852860463474 }, { "content": " \n\n /// check if the sequence contains only jokers\n\n fn has_only_jokers(&self) -> bool {\n\n for card in &self.0 {\n\n if *card != Joker {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n\n\n // /// count the number of jokers in the sequence\n\n // fn n_jokers(&self) -> u8 {\n\n // let mut res = 0;\n\n // for card in &self.0 {\n\n // match card {\n\n // Joker => res += 1,\n\n // _ => ()\n\n // };\n\n // }\n", "file_path": "src/sequence_cards.rs", "rank": 87, "score": 9.725404549129788 }, { "content": "\n\n /// determine if the sequence contains another one\n\n pub fn contains(&self, seq: &Sequence) -> bool {\n\n let count_rhs = seq.count_cards();\n\n let count_self = self.count_cards();\n\n for (card, count) in count_rhs {\n\n if !count_self.contains_key(&card) {\n\n return false;\n\n }\n\n if count_self[&card] < count {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n\n\n /// randomly shuffle the sequence\n\n fn shuffle(&mut self, rng: &mut ThreadRng) {\n\n self.0.shuffle(rng);\n\n }\n", "file_path": "src/sequence_cards.rs", "rank": 88, "score": 9.523871470373106 }, { "content": " /// # Example\n\n ///\n\n /// ```\n\n /// use machiavelli::sequence_cards::{ Sequence, Card::* , Suit::*};\n\n ///\n\n /// let cards = [\n\n /// RegularCard(Heart, 1),\n\n /// Joker, \n\n /// RegularCard(Heart, 3),\n\n /// ];\n\n /// let mut sequence = Sequence::from_cards(&cards);\n\n ///\n\n /// assert_eq!(sequence.is_valid(), true);\n\n /// ```\n\n pub fn is_valid(&mut self) -> bool {\n\n \n\n if self.0.len() == 0 {\n\n return false;\n\n }\n\n \n", "file_path": "src/sequence_cards.rs", "rank": 89, "score": 9.450106833417804 }, { "content": " match take_sequence_remote(table, &mut cards_from_table, &mes[1..], \n\n &mut streams[current_player]) {\n\n Ok(()) => {\n\n\n\n // print the new situation for the current player\n\n print_situation_remote(&table, &hands, deck, player_names, \n\n current_player, current_player, \n\n &mut streams[current_player], true, &cards_from_table,\n\n false, cards_from_table.number_cards() > 0)?;\n\n\n\n // print the new situation for the other players\n\n for i in 0..n_players {\n\n if i != current_player {\n\n print_situation_remote(&table, &hands, deck, player_names, \n\n i, current_player, &mut streams[i],\n\n false, &cards_from_table, false, false)?;\n\n match &previous_messages[i] {\n\n Some(s) => send_message_to_client(&mut streams[i], &s).unwrap(),\n\n None => ()\n\n };\n", "file_path": "src/lib_server.rs", "rank": 90, "score": 9.249713997205886 }, { "content": " },\n\n None => {\n\n position = i;\n\n stream.write(&[1])?;\n\n let msg = format!(\"Hello {}!\\nWaiting for other players to join...\", &s);\n\n send_str_to_client(&mut stream, &msg)?;\n\n lock.push(player_name.clone());\n\n break;\n\n }\n\n }\n\n },\n\n None => {\n\n stream.write(&[0])?;\n\n let msg = format!(\"Sorry, {} is not in the list of players!\\n\", &s);\n\n send_str_to_client(&mut stream, &msg)?;\n\n }\n\n }\n\n\n\n },\n\n Err(_)=> {\n\n println!(\"An error occured while reading the stream; terminating connection with {}\", \n\n stream.peer_addr()?);\n\n stream.shutdown(Shutdown::Both)?;\n\n }\n\n };\n\n }\n\n Ok((stream, player_name, position))\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 91, "score": 9.148067709353993 }, { "content": " }\n\n }\n\n },\n\n\n\n Err(_) => send_message_to_client(&mut streams[current_player], &\"Communication error\\n\")?\n\n };\n\n },\n\n \n\n // value 'a': add cards to a sequence already on the table\n\n 97 => {\n\n match add_to_table_sequence_remote(table, &mut hands[current_player], \n\n &mut cards_from_table, &mes[1..]) {\n\n Ok(None) => {\n\n\n\n // print the new situation for the current player\n\n print_situation_remote(&table, &hands, deck, player_names, \n\n current_player, current_player, \n\n &mut streams[current_player], true, &cards_from_table,\n\n !hands[current_player].contains(&hand_start_round),\n\n cards_from_table.number_cards() > 0)?;\n", "file_path": "src/lib_server.rs", "rank": 92, "score": 8.862020487060537 }, { "content": " wait_for_reconnection(&mut streams[current_player], &player_names[current_player], port)?;\n\n println!(\"Player {} is back\", current_player + 1);\n\n print_situation_remote(&table, &hands, deck, player_names, current_player,\n\n current_player, &mut streams[current_player],\n\n true, &cards_from_table, \n\n !hands[current_player].contains(&hand_start_round),\n\n cards_from_table.number_cards() > 0)?;\n\n send_message_all_players(\n\n streams,\n\n &format!(\"{} is back!\\n\", \n\n &player_names[current_player])\n\n );\n\n }\n\n };\n\n }\n\n Ok(None)\n\n}\n\n\n", "file_path": "src/lib_server.rs", "rank": 93, "score": 8.818927736933524 }, { "content": " 0\n\n }\n\n };\n\n }\n\n \n\n println!(\"Number of cards to start with (integer): \");\n\n let mut n_cards_to_start: u16 = 0;\n\n while n_cards_to_start == 0 {\n\n n_cards_to_start = match get_input()?.trim().parse::<u16>() {\n\n Ok(n) => {\n\n let mut res = 0;\n\n if n==0 {\n\n println!(\"You need to start with at least one card\");\n\n } else if n > ((52 * (n_decks as u16)) + (n_jokers as u16)) {\n\n println!(\"You can't draw more cards than there are in the deck\");\n\n } else {\n\n res = n;\n\n }\n\n res\n\n },\n", "file_path": "src/lib.rs", "rank": 94, "score": 8.724581029784627 }, { "content": " // set the terminal appearance\n\n reset_style();\n\n\n\n // clear the terminal\n\n clear_terminal();\n\n\n\n // print the message sent by the server\n\n println!(\"{}\", s);\n\n }\n\n Err(e) => {\n\n println!(\"Failed to receive data: {}\", e);\n\n }\n\n }\n\n break;\n\n },\n\n 2 => {\n\n match get_str_from_server(&mut stream) {\n\n Ok(s) => { \n\n // print the message sent by the server\n\n println!(\"{}\", s);\n", "file_path": "src/lib_client.rs", "rank": 95, "score": 8.679374520689297 }, { "content": " let mut message: String;\n\n loop {\n\n match get_message_from_client(&mut streams[current_player]) {\n\n Ok(mes) => {\n\n if mes.len() == 0 {\n\n ()\n\n } else {\n\n match mes[0] {\n\n \n\n // value 'e': end the turn\n\n 101 => {\n\n if cards_from_table.number_cards() != 0 {\n\n message = \"You can't end your turn until you've played all the cards you've taken from the table!\\n\"\n\n .to_string();\n\n send_message_to_client(&mut streams[current_player], &message)?;\n\n } else if custom_rule_jokers && hands[current_player].contains_joker() {\n\n message = \"Jokers must be played!\\n\".to_string();\n\n send_message_to_client(&mut streams[current_player], &message)?;\n\n } else if hands[current_player].contains(&hand_start_round) {\n\n match pick_a_card(&mut hands[current_player], deck) {\n", "file_path": "src/lib_server.rs", "rank": 96, "score": 8.389656624646245 }, { "content": "\n\n // print the new situation for the other players\n\n for i in 0..n_players {\n\n if i != current_player {\n\n print_situation_remote(&table, &hands, deck, player_names, \n\n i, current_player, &mut streams[i],\n\n false, &cards_from_table, false, false)?;\n\n match &previous_messages[i] {\n\n Some(s) => send_message_to_client(&mut streams[i], &s).unwrap(),\n\n None => ()\n\n };\n\n }\n\n }\n\n \n\n // if the player has no more card, end the turn \n\n if hands[current_player].number_cards() == 0 {\n\n break;\n\n }\n\n },\n\n Ok(Some(s)) => {\n", "file_path": "src/lib_server.rs", "rank": 97, "score": 8.237231915274204 }, { "content": " }\n\n if n_buffers > 0 {\n\n stream.write(&bytes[((n_buffers-1) as usize)*BUFFER_SIZE..])?;\n\n }\n\n\n\n // wait for a reply to be sent from the receiver\n\n while let Err(_) = stream.read_exact(&mut [0]) {}\n\n \n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib_client.rs", "rank": 98, "score": 8.007165717445002 }, { "content": " send_str_to_client(&mut new_stream, \n\n &reset_style_string()).unwrap_or(());\n\n *stream = new_stream;\n\n break;\n\n } else {\n\n new_stream.write(&[2]).unwrap_or(1);\n\n send_str_to_client(&mut new_stream, \n\n &\"Sorry; you're not the player we're expecting\\n\").unwrap_or(());\n\n new_stream.write(&[5]).unwrap_or(1);\n\n }\n\n },\n\n _ => ()\n\n }\n\n },\n\n _ => ()\n\n };\n\n }\n\n Ok(())\n\n} \n\n\n", "file_path": "src/lib_server.rs", "rank": 99, "score": 7.927347891574086 } ]
Rust
src/lib.rs
tstellanova/ist8310
851f9767759073520eb538fe6987e51c12b53c1b
/* Copyright (c) 2020 Todd Stellanova LICENSE: BSD3 (see LICENSE file) */ #![no_std] use embedded_hal as hal; use hal::blocking::delay::DelayMs; #[derive(Debug)] pub enum Error<CommE> { Comm(CommE), OutOfRange, Configuration, UnknownChipId, } pub const ADDR_0_0_7BIT:u8 = 0x0C; pub const ADDR_0_1_7BIT:u8 = 0x0D; pub const ADDR_1_0_7BIT:u8 = 0x0E; pub const ADDR_1_1_7BIT:u8 = 0x0F; pub const ADDR_7BIT_DEFAULT:u8 = 0x0E; pub const ADDR_0_0_8BIT:u8 = 0x18; pub const ADDR_0_1_8BIT:u8 = 0x1A; pub const ADDR_1_0_8BIT:u8 = 0x1C; pub const ADDR_1_1_8BIT:u8 = 0x1E; pub const ADDR_8BIT_DEFAULT:u8 = 0x1C; pub const DEFAULT_ADDRESS:u8 = ADDR_7BIT_DEFAULT; pub const REG_WAI:u8 = 0x00; const REG_DATA_X:u8 = 0x03; const REG_MAG_DATA_START:u8 = REG_DATA_X; const REG_CTRL1: u8 = 0x0A; pub const REG_CTRL2: u8 = 0x0B; pub const REG_AVG_CTRL:u8 = 0x41; pub const REG_SENS_MODE_SELECT:u8 = 0x42; const AVG_CTRL_16X: u8 = 0x24; const SRPD_MODE_LOW_POWER: u8 = 0xC0; const BLOCK_BUF_LEN: usize = 32; pub struct IST8310<I2C> { i2c_port: I2C, address: u8, block_buf: [u8; BLOCK_BUF_LEN], avg_ctrl_reg_set: u8, srpd_ctrl_reg_set: u8, } impl<I2C, CommE> IST8310<I2C> where I2C: hal::blocking::i2c::Write<Error = CommE> + hal::blocking::i2c::Read<Error = CommE> + hal::blocking::i2c::WriteRead<Error = CommE>, { pub fn default(i2c: I2C) -> Result<Self, Error<CommE>> { Self::new(i2c, DEFAULT_ADDRESS) } pub fn new(i2c_port: I2C, address: u8) -> Result<Self, Error<CommE>> { let mut inst = Self { i2c_port, address, block_buf: [0; BLOCK_BUF_LEN], avg_ctrl_reg_set: 0, srpd_ctrl_reg_set: 0, }; inst.reset()?; Ok(inst) } fn reset(&mut self) -> Result<(), Error<CommE>> { const SRST_POR_FLAG: u8 = 0x01 << 0; const EXPECTED_PROD_ID:u8 = 0x10; self.write_reg(REG_CTRL2, SRST_POR_FLAG)?; self.avg_ctrl_reg_set = AVG_CTRL_16X; self.write_reg(REG_AVG_CTRL, self.avg_ctrl_reg_set)?; self.srpd_ctrl_reg_set = SRPD_MODE_LOW_POWER; self.write_reg(REG_SENS_MODE_SELECT, self.srpd_ctrl_reg_set)?; let product_id = self.read_reg(REG_WAI)?; if product_id != EXPECTED_PROD_ID { return Err(Error::UnknownChipId) } Ok(()) } fn read_block(&mut self, reg: u8, recv_count: usize) -> Result<(), Error<CommE>> { let cmd_buf = [reg]; self.i2c_port .write_read(self.address, &cmd_buf, &mut self.block_buf[..recv_count]) .map_err(Error::Comm)?; Ok(()) } fn read_reg(&mut self, reg: u8 ) -> Result<u8, Error<CommE>> { self.read_block(reg,1)?; Ok(self.block_buf[0]) } fn write_reg(&mut self, reg: u8, val: u8) -> Result<(), Error<CommE>> { self.block_buf[0] = reg; self.block_buf[1] = val; self.i2c_port .write(self.address, &self.block_buf[..2]) .map_err(Error::Comm)?; Ok(()) } fn reading_in_range(sample: &[i16; 3]) -> bool { const MDR_XY_AXES: i16 = 1600; const MDR_Z_AXIS: i16 = 2500; const RESO_PER_BIT: f32 = 0.3; const MAX_VAL_XY: i16 = (((MDR_XY_AXES as f32) / RESO_PER_BIT) as i16) + 1; const MAX_VAL_Z: i16 = (((MDR_Z_AXIS as f32) / RESO_PER_BIT) as i16) + 1; sample[0].abs() < MAX_VAL_XY && sample[1].abs() < MAX_VAL_XY && sample[2].abs() < MAX_VAL_Z } fn raw_reading_to_i16(buf: &[u8], idx: usize) -> i16 { let val: i16 = (buf[idx] as i16) | ((buf[idx+1] as i16) << 8) ; val } pub fn get_mag_vector(&mut self, delay_source: &mut impl DelayMs<u8>) -> Result<[i16; 3], Error<CommE>> { const SINGLE_MEASURE_MODE: u8 = 0x01; const XYZ_DATA_LEN: usize = 6; self.write_reg(REG_CTRL1, SINGLE_MEASURE_MODE)?; delay_source.delay_ms(6); self.read_block(REG_MAG_DATA_START, XYZ_DATA_LEN)?; let sample_i16 = [ Self::raw_reading_to_i16(&self.block_buf, 0), Self::raw_reading_to_i16(&self.block_buf, 2), Self::raw_reading_to_i16(&self.block_buf, 4) ]; if !Self::reading_in_range(&sample_i16) { return Err(Error::OutOfRange) } Ok(sample_i16) } }
/* Copyright (c) 2020 Todd Stellanova LICENSE: BSD3 (see LICENSE file) */ #![no_std] use embedded_hal as hal; use hal::blocking::delay::DelayMs; #[derive(Debug)] pub enum Error<CommE> { Comm(CommE), OutOfRange, Configuration, UnknownChipId, } pub const ADDR_0_0_7BIT:u8 = 0x0C; pub const ADDR_0_1_7BIT:u8 = 0x0D; pub const ADDR_1_0_7BIT:u8 = 0x0E; pub const ADDR_1_1_7BIT:u8 = 0x0F; pub const ADDR_7BIT_DEFAULT:u8 = 0x0E; pub const ADDR_0_0_8BIT:u8 = 0x18; pub const ADDR_0_1_8BIT:u8 = 0x1A; pub const ADDR_1_0_8BIT:u8 = 0x1C; pub const ADDR_1_1_8BIT:u8 = 0x1E; pub const ADDR_8BIT_DEFAULT:u8 = 0x1C; pub const DEFAULT_ADDRESS:u8 = ADDR_7BIT_DEFAULT; pub const REG_WAI:u8 = 0x00; const REG_DATA_X:u8 = 0x03; const REG_MAG_DATA_START:u8 = REG_DATA_X; const REG_CTRL1: u8 = 0x0A; pub const REG_CTRL2: u8 = 0x0B; pub const REG_AVG_CTRL:u8 = 0x41; pub const REG_SENS_MODE_SELECT:u8 = 0x42; const AVG_CTRL_16X: u8 = 0x24; const SRPD_MODE_LOW_POWER: u8 = 0xC0; const BLOCK_BUF_LEN: usize = 32; pub struct IST8310<I2C> { i2c_port: I2C, address: u8, block_buf: [u8; BLOCK_BUF_LEN], avg_ctrl_reg_set: u8, srpd_ctrl_reg_set: u8, } impl<I2C, CommE> IST8310<I2C> where I2C: hal::blocking::i2c::Write<Error = CommE> + hal::blocking::i2c::Read<Error = CommE> + hal::blocking::i2c::WriteRead<Error = CommE>, { pub fn default(i2c: I2C) -> Result<Self, Error<CommE>> { Self::new(i2c, DEFAULT_ADDRESS) } pub fn new(i2c_port: I2C, address: u8) -> Result<Self, Error<CommE>> { let mut inst = Self { i2c_port, address, block_buf: [0; BLOCK_BUF_LEN], avg_ctrl_reg_set: 0, srpd_ctrl_reg_set: 0, }; inst.reset()?; Ok(inst) } fn reset(&mut self) -> Result<(), Error<CommE>> { const SRST_POR_FLAG: u8 = 0x01 << 0; const EXPECTED_PROD_ID:u8 = 0x10; self.write_reg(REG_CTRL2, SRST_POR_FLAG)?; self.avg_ctrl_reg_set = AVG_CTRL_16X; self.write_reg(REG_AVG_CTRL, self.avg_ctrl_reg_set)?; self.srpd_ctrl_reg_set = SRPD_MODE_LOW_POWER; self.write_reg(REG_SENS_MODE_SELECT, self.srpd_ctrl_reg_set)?; let product_id = self.read_reg(REG_WAI)?; if product_id != EXPECTED_PROD_ID { return Err(Error::UnknownChipId) } Ok(()) } fn read_block(&mut self, reg: u8, recv_count: usize) -> Result<(), Error<CommE>> { let cmd_buf = [reg]; self.i2c_port .write_read(self.address, &cmd_buf, &mut self.block_buf[..recv_count]) .map_err(Error::Comm)?; Ok(()) } fn read_reg(&mut self, reg: u8 ) -> Result<u8, Error<CommE>> { self.read_block(reg,1)?; Ok(self.block_buf[0]) } fn write_reg(&mut self, reg: u8, val: u8) -> Result<(), Error<CommE>> { self.block_buf[0] = reg; self.block_buf[1] = val; self.i2c_port .write(self.address, &self.block_buf[..2]) .map_err(Error::Comm)?; Ok(()) } fn reading_in_range(sample: &[i16; 3]) -> bool { const MDR_XY_AXES: i16 = 1600; const MDR_Z_AXIS: i16 = 2500; const RESO_PER_BIT: f32 = 0.3; const MAX_VAL_XY: i16 = (((MDR_XY_AXES as f32) / RESO_PER_BIT) as i16) + 1; const MAX_VAL_Z: i16 = (((MDR_Z_AXIS as f32) / RESO_PER_BIT) as i16) + 1; sample[0].abs() < MAX_VAL_XY && sample[1].abs() < MAX_VAL_XY && sample[2].abs() < MAX_VAL_Z } fn raw_reading_to_i16(buf: &[u8], idx: usize) -> i16 { let val: i16 = (buf[idx] as i16) | ((buf[idx+1] as i16) << 8) ; val }
}
pub fn get_mag_vector(&mut self, delay_source: &mut impl DelayMs<u8>) -> Result<[i16; 3], Error<CommE>> { const SINGLE_MEASURE_MODE: u8 = 0x01; const XYZ_DATA_LEN: usize = 6; self.write_reg(REG_CTRL1, SINGLE_MEASURE_MODE)?; delay_source.delay_ms(6); self.read_block(REG_MAG_DATA_START, XYZ_DATA_LEN)?; let sample_i16 = [ Self::raw_reading_to_i16(&self.block_buf, 0), Self::raw_reading_to_i16(&self.block_buf, 2), Self::raw_reading_to_i16(&self.block_buf, 4) ]; if !Self::reading_in_range(&sample_i16) { return Err(Error::OutOfRange) } Ok(sample_i16) }
function_block-full_function
[ { "content": "#[test]\n\nfn test_init() {\n\n const SRST_POR_FLAG: u8 = 0x01 << 0;\n\n const SRPD_MODE_LOW_POWER: u8 = 0xC0;\n\n const AVG_CTRL_16X: u8 = 0x24;\n\n\n\n let addr = ist8310::DEFAULT_ADDRESS;\n\n\n\n // Configure expectations\n\n let expectations = [\n\n I2cTransaction::write(addr, vec![ist8310::REG_CTRL2, SRST_POR_FLAG]),\n\n I2cTransaction::write(addr, vec![ist8310::REG_AVG_CTRL, AVG_CTRL_16X]),\n\n I2cTransaction::write(addr, vec![ist8310::REG_SENS_MODE_SELECT, SRPD_MODE_LOW_POWER]),\n\n I2cTransaction::write_read(addr, vec![ist8310::REG_WAI], vec![0x10]),\n\n ];\n\n\n\n let i2c_port = I2cMock::new(&expectations);\n\n let sensor_res = IST8310::default(i2c_port);\n\n assert!(sensor_res.is_ok());\n\n\n\n}", "file_path": "tests/integration_tests.rs", "rank": 0, "score": 14024.652800147574 }, { "content": "\n\nuse ist8310::IST8310;\n\n\n\n\n\n// use embedded_hal::prelude::*;\n\n// use embedded_hal::blocking::i2c::{Read, Write, WriteRead};\n\nuse embedded_hal_mock::i2c::{Mock as I2cMock, Transaction as I2cTransaction};\n\n\n\n\n\n#[test]\n", "file_path": "tests/integration_tests.rs", "rank": 12, "score": 5.710807453557022 }, { "content": "# ist8310\n\n\n\nA rust embedded-hal driver for the \n\nIsentek IST8310 \n\n3-axis magnetometer.\n\n\n\nThis sensor is claimed to be pin-compatible with the obsolete\n\nHoneywell HMC5883 magnetometer. \n\nLike the HMC5883, the IST8310 only supports an I2C interface.\n\n\n\nThe register map of the IST8310 differs significantly from\n\nthe HMC5883, and the reset and configuration sequence is different.\n\n\n\n## Status\n\n\n\n- [x] Basic i2c setup support\n\n- [x] read of main xyz magentometer vector\n\n- [ ] Tests with mock embedded hal\n\n- [ ] Periodic configuration check (for poor i2c connections)\n\n- [ ] Usage example with `cortex-m` hal\n\n- [ ] Doc comments\n\n- [ ] CI\n\n- [ ] support for cross-axis flow calibration\n\n\n\n\n\n\n\n\n\n\n", "file_path": "README.md", "rank": 14, "score": 4.558202126623536 } ]
Rust
src/server/rpc/client.rs
gavento/rain
9372c66d82180ecae12af065a81631565c0d40dc
use capnp::capability::Promise; use std::net::SocketAddr; use futures::{future, Future}; use common::resources::Resources; use common::id::{DataObjectId, SId, TaskId}; use common::convert::{FromCapnp, ToCapnp}; use client_capnp::client_service; use server::state::StateRef; use server::graph::{ClientRef, DataObjectRef, SessionError, TaskInput, TaskRef}; use errors::{Error, ErrorKind, Result}; use common::Attributes; use common::RcSet; use server::rpc::ClientDataStoreImpl; use common::events::{ObjectDescriptor, TaskDescriptor}; pub struct ClientServiceImpl { state: StateRef, client: ClientRef, } impl ClientServiceImpl { pub fn new(state: &StateRef, address: &SocketAddr) -> Result<Self> { Ok(Self { state: state.clone(), client: state.get_mut().add_client(address.clone())?, }) } } impl Drop for ClientServiceImpl { fn drop(&mut self) { let mut s = self.state.get_mut(); info!("Client {} disconnected", self.client.get_id()); s.remove_client(&self.client) .expect("client connection drop"); } } impl client_service::Server for ClientServiceImpl { fn get_server_info( &mut self, _: client_service::GetServerInfoParams, mut results: client_service::GetServerInfoResults, ) -> Promise<(), ::capnp::Error> { debug!("Client asked for info"); let s = self.state.get(); let futures: Vec<_> = s.graph .workers .iter() .map(|(worker_id, worker)| { let w = worker.get(); let control = w.control.as_ref().unwrap(); let worker_id = worker_id.clone(); let resources = w.resources.clone(); control .get_info_request() .send() .promise .map(move |r| (worker_id, r, resources)) }) .collect(); Promise::from_future(future::join_all(futures).map(move |rs| { let results = results.get(); let mut workers = results.init_workers(rs.len() as u32); for (i, &(ref worker_id, ref r, ref resources)) in rs.iter().enumerate() { let mut w = workers.borrow().get(i as u32); let r = r.get().unwrap(); w.set_tasks(r.get_tasks().unwrap()).unwrap(); w.set_objects(r.get_objects().unwrap()).unwrap(); w.set_objects_to_delete(r.get_objects_to_delete().unwrap()) .unwrap(); resources.to_capnp(&mut w.borrow().get_resources().unwrap()); worker_id.to_capnp(&mut w.get_worker_id().unwrap()); } () })) } fn new_session( &mut self, _: client_service::NewSessionParams, mut results: client_service::NewSessionResults, ) -> Promise<(), ::capnp::Error> { let mut s = self.state.get_mut(); let session = pry!(s.add_session(&self.client)); results.get().set_session_id(session.get_id()); debug!("Client asked for a new session, got {:?}", session.get_id()); Promise::ok(()) } fn close_session( &mut self, params: client_service::CloseSessionParams, _: client_service::CloseSessionResults, ) -> Promise<(), ::capnp::Error> { let params = pry!(params.get()); let mut s = self.state.get_mut(); let session = pry!(s.session_by_id(params.get_session_id())); s.remove_session(&session).unwrap(); Promise::ok(()) } fn submit( &mut self, params: client_service::SubmitParams, _: client_service::SubmitResults, ) -> Promise<(), ::capnp::Error> { let mut s = self.state.get_mut(); let params = pry!(params.get()); let tasks = pry!(params.get_tasks()); let objects = pry!(params.get_objects()); info!( "New task submission ({} tasks, {} data objects) from client {}", tasks.len(), objects.len(), self.client.get_id() ); debug!("Sessions: {:?}", s.graph.sessions); let mut created_tasks = Vec::<TaskRef>::new(); let mut created_objects = Vec::<DataObjectRef>::new(); let res: Result<()> = (|| { for co in objects.iter() { let id = DataObjectId::from_capnp(&co.borrow().get_id()?); let session = s.session_by_id(id.get_session_id())?; let data = if co.get_has_data() { Some(co.get_data()?.into()) } else { None }; let attributes = Attributes::from_capnp(&co.get_attributes()?); let o = s.add_object( &session, id, co.get_keep(), co.get_label()?.to_string(), data, attributes, )?; created_objects.push(o); } for ct in tasks.iter() { let id = TaskId::from_capnp(&ct.get_id()?); let session = s.session_by_id(id.get_session_id())?; let attributes = Attributes::from_capnp(&ct.get_attributes().unwrap()); let resources: Resources = attributes.get("resources")?; let mut inputs = Vec::<TaskInput>::new(); for ci in ct.get_inputs()?.iter() { inputs.push(TaskInput { object: s.object_by_id(DataObjectId::from_capnp(&ci.get_id()?))?, label: ci.get_label()?.into(), path: ci.get_path()?.into(), }); } let mut outputs = Vec::<DataObjectRef>::new(); for co in ct.get_outputs()?.iter() { outputs.push(s.object_by_id(DataObjectId::from_capnp(&co))?); } let t = s.add_task( &session, id, inputs, outputs, ct.get_task_type()?.to_string(), attributes, resources, )?; created_tasks.push(t); } debug!("New tasks: {:?}", created_tasks); debug!("New objects: {:?}", created_objects); s.logger.add_client_submit_event( created_tasks .iter() .map(|t| TaskDescriptor::from(&t.get())) .collect(), created_objects .iter() .map(|o| ObjectDescriptor::from(&o.get())) .collect(), ); s.verify_submit(&created_tasks, &created_objects) })(); if res.is_err() { debug!("Error: {:?}", res); for t in created_tasks { pry!(s.remove_task(&t)); } for o in created_objects { pry!(s.remove_object(&o)); } pry!(res); } Promise::ok(()) } fn get_data_store( &mut self, _params: client_service::GetDataStoreParams, mut results: client_service::GetDataStoreResults, ) -> Promise<(), ::capnp::Error> { debug!("server data store requested from client"); let datastore = ::datastore_capnp::data_store::ToClient::new(ClientDataStoreImpl::new( &self.state, )).from_server::<::capnp_rpc::Server>(); results.get().set_store(datastore); Promise::ok(()) } fn wait( &mut self, params: client_service::WaitParams, mut result: client_service::WaitResults, ) -> Promise<(), ::capnp::Error> { fn set_error(result: &mut ::common_capnp::unit_result::Builder, error: &SessionError) { error.to_capnp(&mut result.borrow().init_error()); } let s = self.state.get_mut(); let params = pry!(params.get()); let task_ids = pry!(params.get_task_ids()); let object_ids = pry!(params.get_object_ids()); info!( "New wait request ({} tasks, {} data objects) from client", task_ids.len(), object_ids.len() ); if task_ids.len() == 1 && object_ids.len() == 0 && task_ids.get(0).get_id() == ::common_capnp::ALL_TASKS_ID { let session_id = task_ids.get(0).get_session_id(); debug!("Waiting for all session session_id={}", session_id); let session = match s.session_by_id(session_id) { Ok(s) => s, Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; if let &Some(ref e) = session.get().get_error() { set_error(&mut result.get(), e); return Promise::ok(()); } let session2 = session.clone(); return Promise::from_future(session.get_mut().wait().then(move |r| { match r { Ok(_) => result.get().set_ok(()), Err(_) => { set_error( &mut result.get(), session2.get().get_error().as_ref().unwrap(), ); } }; Ok(()) })); } let mut sessions = RcSet::new(); let mut task_futures = Vec::new(); for id in task_ids.iter() { match s.task_by_id_check_session(TaskId::from_capnp(&id)) { Ok(t) => { let mut task = t.get_mut(); sessions.insert(task.session.clone()); if task.is_finished() { continue; } task_futures.push(task.wait()); } Err(Error(ErrorKind::SessionErr(ref e), _)) => { set_error(&mut result.get(), e); return Promise::ok(()); } Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; } debug!("{} waiting futures", task_futures.len()); if task_futures.is_empty() { result.get().set_ok(()); return Promise::ok(()); } Promise::from_future(::futures::future::join_all(task_futures).then(move |r| { match r { Ok(_) => result.get().set_ok(()), Err(_) => { let session = sessions.iter().find(|s| s.get().is_failed()).unwrap(); set_error( &mut result.get(), session.get().get_error().as_ref().unwrap(), ); } }; Ok(()) })) } fn wait_some( &mut self, params: client_service::WaitSomeParams, _results: client_service::WaitSomeResults, ) -> Promise<(), ::capnp::Error> { let params = pry!(params.get()); let task_ids = pry!(params.get_task_ids()); let object_ids = pry!(params.get_object_ids()); info!( "New wait_some request ({} tasks, {} data objects) from client", task_ids.len(), object_ids.len() ); Promise::err(::capnp::Error::failed( "wait_sone is not implemented yet".to_string(), )) } fn unkeep( &mut self, params: client_service::UnkeepParams, mut results: client_service::UnkeepResults, ) -> Promise<(), ::capnp::Error> { let mut s = self.state.get_mut(); let params = pry!(params.get()); let object_ids = pry!(params.get_object_ids()); debug!( "New unkeep request ({} data objects) from client", object_ids.len() ); let mut objects = Vec::new(); for oid in object_ids.iter() { let id: DataObjectId = DataObjectId::from_capnp(&oid); match s.object_by_id_check_session(id) { Ok(obj) => objects.push(obj), Err(Error(ErrorKind::SessionErr(ref e), _)) => { e.to_capnp(&mut results.get().init_error()); return Promise::ok(()); } Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; } for o in objects.iter() { s.unkeep_object(&o); } s.logger .add_client_unkeep_event(objects.iter().map(|o| o.get().id).collect()); Promise::ok(()) } fn get_state( &mut self, params: client_service::GetStateParams, mut results: client_service::GetStateResults, ) -> Promise<(), ::capnp::Error> { let params = pry!(params.get()); let task_ids = pry!(params.get_task_ids()); let object_ids = pry!(params.get_object_ids()); info!( "New get_state request ({} tasks, {} data objects) from client", task_ids.len(), object_ids.len() ); let s = self.state.get(); let tasks: Vec<_> = match task_ids .iter() .map(|id| s.task_by_id_check_session(TaskId::from_capnp(&id))) .collect() { Ok(tasks) => tasks, Err(Error(ErrorKind::SessionErr(ref e), _)) => { e.to_capnp(&mut results.get().get_state().unwrap().init_error()); return Promise::ok(()); } Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; let objects: Vec<_> = match object_ids .iter() .map(|id| s.object_by_id_check_session(DataObjectId::from_capnp(&id))) .collect() { Ok(tasks) => tasks, Err(Error(ErrorKind::SessionErr(ref e), _)) => { e.to_capnp(&mut results.get().get_state().unwrap().init_error()); return Promise::ok(()); } Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; let mut results = results.get(); { let mut task_updates = results.borrow().init_tasks(tasks.len() as u32); for (i, task) in tasks.iter().enumerate() { let mut update = task_updates.borrow().get(i as u32); let t = task.get(); t.id.to_capnp(&mut update.borrow().get_id().unwrap()); t.attributes.to_capnp(&mut update.get_attributes().unwrap()); } } { let mut obj_updates = results.borrow().init_objects(objects.len() as u32); for (i, obj) in objects.iter().enumerate() { let mut update = obj_updates.borrow().get(i as u32); let o = obj.get(); o.attributes .to_capnp(&mut update.borrow().get_attributes().unwrap()); o.id.to_capnp(&mut update.get_id().unwrap()); } } results.get_state().unwrap().set_ok(()); Promise::ok(()) } }
use capnp::capability::Promise; use std::net::SocketAddr; use futures::{future, Future}; use common::resources::Resources; use common::id::{DataObjectId, SId, TaskId}; use common::convert::{FromCapnp, ToCapnp}; use client_capnp::client_service; use server::state::StateRef; use server::graph::{ClientRef, DataObjectRef, SessionError, TaskInput, TaskRef}; use errors::{Error, ErrorKind, Result}; use common::Attributes; use common::RcSet; use server::rpc::ClientDataStoreImpl; use common::events::{ObjectDescriptor, TaskDescriptor}; pub struct ClientServiceImpl { state: StateRef, client: ClientRef, } impl ClientServiceImpl { pub fn new(state: &StateRef, address: &SocketAddr) -> Result<Self> { Ok(Self { state: state.clone(), client: state.get_mut().add_client(address.clone())?, }) } } impl Drop for ClientServiceImpl {
} impl client_service::Server for ClientServiceImpl { fn get_server_info( &mut self, _: client_service::GetServerInfoParams, mut results: client_service::GetServerInfoResults, ) -> Promise<(), ::capnp::Error> { debug!("Client asked for info"); let s = self.state.get(); let futures: Vec<_> = s.graph .workers .iter() .map(|(worker_id, worker)| { let w = worker.get(); let control = w.control.as_ref().unwrap(); let worker_id = worker_id.clone(); let resources = w.resources.clone(); control .get_info_request() .send() .promise .map(move |r| (worker_id, r, resources)) }) .collect(); Promise::from_future(future::join_all(futures).map(move |rs| { let results = results.get(); let mut workers = results.init_workers(rs.len() as u32); for (i, &(ref worker_id, ref r, ref resources)) in rs.iter().enumerate() { let mut w = workers.borrow().get(i as u32); let r = r.get().unwrap(); w.set_tasks(r.get_tasks().unwrap()).unwrap(); w.set_objects(r.get_objects().unwrap()).unwrap(); w.set_objects_to_delete(r.get_objects_to_delete().unwrap()) .unwrap(); resources.to_capnp(&mut w.borrow().get_resources().unwrap()); worker_id.to_capnp(&mut w.get_worker_id().unwrap()); } () })) } fn new_session( &mut self, _: client_service::NewSessionParams, mut results: client_service::NewSessionResults, ) -> Promise<(), ::capnp::Error> { let mut s = self.state.get_mut(); let session = pry!(s.add_session(&self.client)); results.get().set_session_id(session.get_id()); debug!("Client asked for a new session, got {:?}", session.get_id()); Promise::ok(()) } fn close_session( &mut self, params: client_service::CloseSessionParams, _: client_service::CloseSessionResults, ) -> Promise<(), ::capnp::Error> { let params = pry!(params.get()); let mut s = self.state.get_mut(); let session = pry!(s.session_by_id(params.get_session_id())); s.remove_session(&session).unwrap(); Promise::ok(()) } fn submit( &mut self, params: client_service::SubmitParams, _: client_service::SubmitResults, ) -> Promise<(), ::capnp::Error> { let mut s = self.state.get_mut(); let params = pry!(params.get()); let tasks = pry!(params.get_tasks()); let objects = pry!(params.get_objects()); info!( "New task submission ({} tasks, {} data objects) from client {}", tasks.len(), objects.len(), self.client.get_id() ); debug!("Sessions: {:?}", s.graph.sessions); let mut created_tasks = Vec::<TaskRef>::new(); let mut created_objects = Vec::<DataObjectRef>::new(); let res: Result<()> = (|| { for co in objects.iter() { let id = DataObjectId::from_capnp(&co.borrow().get_id()?); let session = s.session_by_id(id.get_session_id())?; let data = if co.get_has_data() { Some(co.get_data()?.into()) } else { None }; let attributes = Attributes::from_capnp(&co.get_attributes()?); let o = s.add_object( &session, id, co.get_keep(), co.get_label()?.to_string(), data, attributes, )?; created_objects.push(o); } for ct in tasks.iter() { let id = TaskId::from_capnp(&ct.get_id()?); let session = s.session_by_id(id.get_session_id())?; let attributes = Attributes::from_capnp(&ct.get_attributes().unwrap()); let resources: Resources = attributes.get("resources")?; let mut inputs = Vec::<TaskInput>::new(); for ci in ct.get_inputs()?.iter() { inputs.push(TaskInput { object: s.object_by_id(DataObjectId::from_capnp(&ci.get_id()?))?, label: ci.get_label()?.into(), path: ci.get_path()?.into(), }); } let mut outputs = Vec::<DataObjectRef>::new(); for co in ct.get_outputs()?.iter() { outputs.push(s.object_by_id(DataObjectId::from_capnp(&co))?); } let t = s.add_task( &session, id, inputs, outputs, ct.get_task_type()?.to_string(), attributes, resources, )?; created_tasks.push(t); } debug!("New tasks: {:?}", created_tasks); debug!("New objects: {:?}", created_objects); s.logger.add_client_submit_event( created_tasks .iter() .map(|t| TaskDescriptor::from(&t.get())) .collect(), created_objects .iter() .map(|o| ObjectDescriptor::from(&o.get())) .collect(), ); s.verify_submit(&created_tasks, &created_objects) })(); if res.is_err() { debug!("Error: {:?}", res); for t in created_tasks { pry!(s.remove_task(&t)); } for o in created_objects { pry!(s.remove_object(&o)); } pry!(res); } Promise::ok(()) } fn get_data_store( &mut self, _params: client_service::GetDataStoreParams, mut results: client_service::GetDataStoreResults, ) -> Promise<(), ::capnp::Error> { debug!("server data store requested from client"); let datastore = ::datastore_capnp::data_store::ToClient::new(ClientDataStoreImpl::new( &self.state, )).from_server::<::capnp_rpc::Server>(); results.get().set_store(datastore); Promise::ok(()) } fn wait( &mut self, params: client_service::WaitParams, mut result: client_service::WaitResults, ) -> Promise<(), ::capnp::Error> { fn set_error(result: &mut ::common_capnp::unit_result::Builder, error: &SessionError) { error.to_capnp(&mut result.borrow().init_error()); } let s = self.state.get_mut(); let params = pry!(params.get()); let task_ids = pry!(params.get_task_ids()); let object_ids = pry!(params.get_object_ids()); info!( "New wait request ({} tasks, {} data objects) from client", task_ids.len(), object_ids.len() ); if task_ids.len() == 1 && object_ids.len() == 0 && task_ids.get(0).get_id() == ::common_capnp::ALL_TASKS_ID { let session_id = task_ids.get(0).get_session_id(); debug!("Waiting for all session session_id={}", session_id); let session = match s.session_by_id(session_id) { Ok(s) => s, Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; if let &Some(ref e) = session.get().get_error() { set_error(&mut result.get(), e); return Promise::ok(()); } let session2 = session.clone(); return Promise::from_future(session.get_mut().wait().then(move |r| { match r { Ok(_) => result.get().set_ok(()), Err(_) => { set_error( &mut result.get(), session2.get().get_error().as_ref().unwrap(), ); } }; Ok(()) })); } let mut sessions = RcSet::new(); let mut task_futures = Vec::new(); for id in task_ids.iter() { match s.task_by_id_check_session(TaskId::from_capnp(&id)) { Ok(t) => { let mut task = t.get_mut(); sessions.insert(task.session.clone()); if task.is_finished() { continue; } task_futures.push(task.wait()); } Err(Error(ErrorKind::SessionErr(ref e), _)) => { set_error(&mut result.get(), e); return Promise::ok(()); } Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; } debug!("{} waiting futures", task_futures.len()); if task_futures.is_empty() { result.get().set_ok(()); return Promise::ok(()); } Promise::from_future(::futures::future::join_all(task_futures).then(move |r| { match r { Ok(_) => result.get().set_ok(()), Err(_) => { let session = sessions.iter().find(|s| s.get().is_failed()).unwrap(); set_error( &mut result.get(), session.get().get_error().as_ref().unwrap(), ); } }; Ok(()) })) } fn wait_some( &mut self, params: client_service::WaitSomeParams, _results: client_service::WaitSomeResults, ) -> Promise<(), ::capnp::Error> { let params = pry!(params.get()); let task_ids = pry!(params.get_task_ids()); let object_ids = pry!(params.get_object_ids()); info!( "New wait_some request ({} tasks, {} data objects) from client", task_ids.len(), object_ids.len() ); Promise::err(::capnp::Error::failed( "wait_sone is not implemented yet".to_string(), )) } fn unkeep( &mut self, params: client_service::UnkeepParams, mut results: client_service::UnkeepResults, ) -> Promise<(), ::capnp::Error> { let mut s = self.state.get_mut(); let params = pry!(params.get()); let object_ids = pry!(params.get_object_ids()); debug!( "New unkeep request ({} data objects) from client", object_ids.len() ); let mut objects = Vec::new(); for oid in object_ids.iter() { let id: DataObjectId = DataObjectId::from_capnp(&oid); match s.object_by_id_check_session(id) { Ok(obj) => objects.push(obj), Err(Error(ErrorKind::SessionErr(ref e), _)) => { e.to_capnp(&mut results.get().init_error()); return Promise::ok(()); } Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; } for o in objects.iter() { s.unkeep_object(&o); } s.logger .add_client_unkeep_event(objects.iter().map(|o| o.get().id).collect()); Promise::ok(()) } fn get_state( &mut self, params: client_service::GetStateParams, mut results: client_service::GetStateResults, ) -> Promise<(), ::capnp::Error> { let params = pry!(params.get()); let task_ids = pry!(params.get_task_ids()); let object_ids = pry!(params.get_object_ids()); info!( "New get_state request ({} tasks, {} data objects) from client", task_ids.len(), object_ids.len() ); let s = self.state.get(); let tasks: Vec<_> = match task_ids .iter() .map(|id| s.task_by_id_check_session(TaskId::from_capnp(&id))) .collect() { Ok(tasks) => tasks, Err(Error(ErrorKind::SessionErr(ref e), _)) => { e.to_capnp(&mut results.get().get_state().unwrap().init_error()); return Promise::ok(()); } Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; let objects: Vec<_> = match object_ids .iter() .map(|id| s.object_by_id_check_session(DataObjectId::from_capnp(&id))) .collect() { Ok(tasks) => tasks, Err(Error(ErrorKind::SessionErr(ref e), _)) => { e.to_capnp(&mut results.get().get_state().unwrap().init_error()); return Promise::ok(()); } Err(e) => return Promise::err(::capnp::Error::failed(e.description().to_string())), }; let mut results = results.get(); { let mut task_updates = results.borrow().init_tasks(tasks.len() as u32); for (i, task) in tasks.iter().enumerate() { let mut update = task_updates.borrow().get(i as u32); let t = task.get(); t.id.to_capnp(&mut update.borrow().get_id().unwrap()); t.attributes.to_capnp(&mut update.get_attributes().unwrap()); } } { let mut obj_updates = results.borrow().init_objects(objects.len() as u32); for (i, obj) in objects.iter().enumerate() { let mut update = obj_updates.borrow().get(i as u32); let o = obj.get(); o.attributes .to_capnp(&mut update.borrow().get_attributes().unwrap()); o.id.to_capnp(&mut update.get_id().unwrap()); } } results.get_state().unwrap().set_ok(()); Promise::ok(()) } }
fn drop(&mut self) { let mut s = self.state.get_mut(); info!("Client {} disconnected", self.client.get_id()); s.remove_client(&self.client) .expect("client connection drop"); }
function_block-function_prefixed
[ { "content": "pub fn task_run(state: &mut State, task_ref: TaskRef) -> TaskResult {\n\n let state_ref = state.self_ref();\n\n let config: RunConfig = task_ref.get().attributes.get(\"config\")?;\n\n\n\n let (dir, future, stderr_path) = {\n\n // Parse arguments\n\n let name = config.args.get(0).ok_or_else(|| \"Arguments are empty\")?;\n\n let task = task_ref.get();\n\n\n\n let dir = state.work_dir().make_task_temp_dir(task.id)?;\n\n\n\n // Map inputs\n\n let mut in_io = Stdio::null();\n\n\n\n for (path, input) in config.in_paths.iter().zip(&task.inputs) {\n\n let obj = input.object.get();\n\n obj.data().map_to_path(&dir.path().join(path))?;\n\n if path == \"+in\" {\n\n let in_id = File::open(dir.path().join(\"+in\"))?.into_raw_fd();\n\n in_io = unsafe { Stdio::from_raw_fd(in_id) };\n", "file_path": "src/worker/tasks/run.rs", "rank": 0, "score": 188822.5555325966 }, { "content": "/// Open external file\n\npub fn task_open(state: &mut State, task_ref: TaskRef) -> TaskResult {\n\n {\n\n let task = task_ref.get();\n\n task.check_number_of_args(0)?;\n\n }\n\n let state_ref = state.self_ref();\n\n Ok(Box::new(future::lazy(move || {\n\n {\n\n let task = task_ref.get();\n\n let config: OpenConfig = task.attributes.get(\"config\")?;\n\n let path = Path::new(&config.path);\n\n if !path.is_absolute() {\n\n bail!(\"Path {:?} is not absolute\", path);\n\n }\n\n let target_path = state_ref.get().work_dir().new_path_for_dataobject();\n\n let data = Data::new_by_fs_copy(&path, target_path)?;\n\n let output = task_ref.get().output(0);\n\n output.get_mut().set_data(Arc::new(data));\n\n }\n\n Ok(())\n\n })))\n\n}\n\n\n", "file_path": "src/worker/tasks/basic.rs", "rank": 1, "score": 188822.5555325966 }, { "content": "/// Task that returns the input argument after a given number of milliseconds\n\npub fn task_sleep(state: &mut State, task_ref: TaskRef) -> TaskResult {\n\n let sleep_ms: u64 = {\n\n let task = task_ref.get();\n\n task.check_number_of_args(1)?;\n\n task.attributes.get(\"config\")?\n\n };\n\n debug!(\"Starting sleep task for {} ms\", sleep_ms);\n\n let duration = ::std::time::Duration::from_millis(sleep_ms);\n\n Ok(Box::new(\n\n state\n\n .timer()\n\n .sleep(duration)\n\n .map_err(|e| e.into())\n\n .map(move |()| {\n\n {\n\n let task = task_ref.get();\n\n let output = task.output(0);\n\n output.get_mut().set_data(task.input_data(0));\n\n }\n\n ()\n\n }),\n\n ))\n\n}\n\n\n", "file_path": "src/worker/tasks/basic.rs", "rank": 2, "score": 188822.5555325966 }, { "content": "/// Task that merge all input blobs and merge them into one blob\n\npub fn task_concat(_state: &mut State, task_ref: TaskRef) -> TaskResult {\n\n let inputs = {\n\n let task = task_ref.get();\n\n task.inputs_data()\n\n };\n\n\n\n for (i, input) in inputs.iter().enumerate() {\n\n if !input.is_blob() {\n\n bail!(\"Input {} object is not blob\", i);\n\n }\n\n }\n\n\n\n Ok(Box::new(future::lazy(move || {\n\n let result_size: usize = inputs.iter().map(|d| d.size()).sum();\n\n let mut builder = DataBuilder::new();\n\n builder.set_size(result_size);\n\n for input in inputs {\n\n builder.write_blob(&input).unwrap();\n\n }\n\n let result = builder.build();\n\n let output = task_ref.get().output(0);\n\n output.get_mut().set_data(Arc::new(result));\n\n Ok(())\n\n })))\n\n}\n\n\n", "file_path": "src/worker/tasks/basic.rs", "rank": 3, "score": 180523.3539233545 }, { "content": "pub fn test_scheduler(state: &mut State) {\n\n for oref in state.updates.new_objects.clone() {\n\n let config: Option<TestConfig> = oref.get().attributes.find(\"__test\").unwrap();\n\n if let Some(c) = config {\n\n oref.get_mut().size = Some(c.size);\n\n\n\n for worker_id in c.workers {\n\n debug!(\n\n \"Forcing object id={} to worker={} with fake size={}\",\n\n oref.get_mut().id,\n\n worker_id,\n\n c.size\n\n );\n\n let wref = state\n\n .graph\n\n .workers\n\n .get(&worker_id.parse().unwrap())\n\n .unwrap()\n\n .clone();\n\n wref.get_mut().scheduled_objects.insert(oref.clone());\n\n oref.get_mut().scheduled.insert(wref.clone());\n\n state.update_object_assignments(&oref, Some(&wref));\n\n state.updates.new_objects.remove(&oref);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/server/testmode.rs", "rank": 4, "score": 177394.01391940392 }, { "content": "/// Export internal file to external file system\n\npub fn task_export(_: &mut State, task_ref: TaskRef) -> TaskResult {\n\n {\n\n let task = task_ref.get();\n\n task.check_number_of_args(1)?;\n\n }\n\n Ok(Box::new(future::lazy(move || {\n\n let task = task_ref.get();\n\n let config: ExportConfig = task.attributes.get(\"config\")?;\n\n let path = Path::new(&config.path);\n\n if !path.is_absolute() {\n\n bail!(\"Path {:?} is not absolute\", path);\n\n }\n\n let input = task.input_data(0);\n\n input.export_to_path(path)\n\n })))\n\n}\n", "file_path": "src/worker/tasks/basic.rs", "rank": 5, "score": 171895.87258685453 }, { "content": "fn lite_dashboard(state: &StateRef) -> ResponseFuture {\n\n Box::new(::futures::future::ok(make_text_response(format!(\n\n \"<html>\n\n <style>\n\n table, th, td {{\n\n border: 1px solid black;\n\n border-collapse: collapse;\n\n }}\n\n </style>\n\n <body>\n\n <h1>Rain / Dashboard Lite</h1>\n\n <p>{time}</p>\n\n <h2>Workers</h2>\n\n <table>\n\n <thead><tr><th>ID<th>cpus</tr>\n\n </thead>\n\n {worker_tab}\n\n </table>\n\n </body>\n\n </html>\",\n", "file_path": "src/server/http.rs", "rank": 6, "score": 171161.5569167232 }, { "content": "fn get_events(state: &StateRef, body: &str) -> ResponseFuture {\n\n let state = state.clone();\n\n match ::serde_json::from_str(body) {\n\n Ok(search_criteria) => Box::new(state.get().logger.get_events(search_criteria).map(\n\n |events| {\n\n let chunks: Vec<_> = events\n\n .iter()\n\n .map(|&(id, time, ref event)| {\n\n format!(\n\n \"{{\\\"id\\\":{}, \\\"time\\\":\\\"{}\\\", \\\"event\\\":{}}}\",\n\n id, time, event\n\n )\n\n })\n\n .collect();\n\n let result = format!(\"[{}]\", chunks.join(\",\"));\n\n make_text_response(result)\n\n },\n\n )),\n\n Err(e) => Box::new(::futures::future::failed(e.into())),\n\n }\n\n}\n\n\n", "file_path": "src/server/http.rs", "rank": 7, "score": 154840.10962977714 }, { "content": "fn fail_unknown_type(_state: &mut State, task_ref: TaskRef) -> TaskResult {\n\n bail!(\"Unknown task type {}\", task_ref.get().task_type)\n\n}\n\n\n", "file_path": "src/worker/tasks/instance.rs", "rank": 8, "score": 141502.23930256948 }, { "content": "// Create a new pack stream for given dataobject\n\npub fn new_pack_stream(data: Arc<Data>) -> Result<Box<PackStream>> {\n\n let data_ref = data.clone();\n\n Ok(match data.storage() {\n\n &Storage::Memory(_) => Box::new(MemoryPackStream {\n\n data: data_ref,\n\n position: 0,\n\n }),\n\n // TODO: Directory\n\n &Storage::Path(ref p) => Box::new(MmapPackStream {\n\n data: data_ref,\n\n position: 0,\n\n mmap: unsafe { ::memmap::Mmap::map(&File::open(&p.path)?) }?,\n\n }),\n\n })\n\n}\n\n\n", "file_path": "src/worker/data/pack.rs", "rank": 9, "score": 129236.0213539152 }, { "content": "// TODO: Remove box when impl Trait\n\npub fn fetch_from_reader(\n\n reader: ::datastore_capnp::reader::Client,\n\n size: Option<usize>,\n\n) -> Box<Future<Item = Data, Error = Error>> {\n\n let builder = DataBuilder::new();\n\n let fetch_size = size.unwrap_or(1 << 20 /* 1 MB */);\n\n Box::new(future::loop_fn(builder, move |mut builder| {\n\n let mut req = reader.read_request();\n\n req.get().set_size(fetch_size as u64);\n\n req.send()\n\n .promise\n\n .map_err(|e| Error::with_chain(e, \"Read failed\"))\n\n .and_then(move |r| {\n\n let read = r.get().unwrap();\n\n builder.write(read.get_data().unwrap());\n\n match read.get_status().unwrap() {\n\n ::datastore_capnp::read_reply::Status::Ok => {\n\n Ok(future::Loop::Continue(builder))\n\n }\n\n ::datastore_capnp::read_reply::Status::Eof => {\n\n Ok(future::Loop::Break(builder.build()))\n\n }\n\n }\n\n })\n\n }))\n\n}\n", "file_path": "src/worker/rpc/fetch.rs", "rank": 10, "score": 125339.63286482636 }, { "content": "pub fn subworker_command(\n\n work_dir: &WorkDir,\n\n log_dir: &LogDir,\n\n subworker_id: SubworkerId,\n\n subworker_type: &str,\n\n program_name: &str,\n\n program_args: &[String],\n\n) -> Result<(Command, ::tempdir::TempDir)> {\n\n let (log_path_out, log_path_err) = log_dir.subworker_log_paths(subworker_id);\n\n let subworker_dir = work_dir.make_subworker_work_dir(subworker_id)?;\n\n\n\n info!(\n\n \"Staring new subworker type={} id={}\",\n\n subworker_type, subworker_id\n\n );\n\n info!(\"Subworker stdout log: {:?}\", log_path_out);\n\n info!(\"Subworker stderr log: {:?}\", log_path_err);\n\n\n\n // --- Open log files ---\n\n let log_path_out_id = File::create(log_path_out)\n", "file_path": "src/worker/graph/subworker.rs", "rank": 11, "score": 125334.99043438226 }, { "content": "pub fn data_from_capnp(\n\n state: &State,\n\n subworker_dir: &Path,\n\n reader: &::subworker_capnp::local_data::Reader,\n\n) -> Result<Arc<Data>> {\n\n match reader.get_storage().which()? {\n\n ::subworker_capnp::local_data::storage::Memory(data) => {\n\n Ok(Arc::new(Data::new(Storage::Memory(data?.into()))))\n\n }\n\n ::subworker_capnp::local_data::storage::Path(data) => {\n\n let source_path = Path::new(data?);\n\n if !source_path.is_absolute() {\n\n bail!(\"Path of dataobject is not absolute\");\n\n }\n\n if !source_path.starts_with(subworker_dir) {\n\n bail!(\"Path of dataobject is not in subworker dir\");\n\n }\n\n let target_path = state.work_dir().new_path_for_dataobject();\n\n Ok(Arc::new(Data::new_by_fs_move(\n\n &Path::new(source_path),\n", "file_path": "src/worker/rpc/subworker.rs", "rank": 12, "score": 125334.99043438226 }, { "content": "pub fn get_hostname() -> String {\n\n let mut buf = [0u8; 256];\n\n gethostname(&mut buf).unwrap().to_str().unwrap().to_string()\n\n}\n", "file_path": "src/common/sys.rs", "rank": 13, "score": 118406.22856206153 }, { "content": "pub fn new_rpc_system<Stream>(\n\n stream: Stream,\n\n bootstrap: Option<::capnp::capability::Client>,\n\n) -> RpcSystem<twoparty::VatId>\n\nwhere\n\n Stream: AsyncRead + AsyncWrite + 'static,\n\n{\n\n let (reader, writer) = stream.split();\n\n let network = Box::new(twoparty::VatNetwork::new(\n\n reader,\n\n writer,\n\n rpc_twoparty_capnp::Side::Client,\n\n Default::default(),\n\n ));\n\n RpcSystem::new(network, bootstrap)\n\n}\n", "file_path": "src/common/rpc.rs", "rank": 14, "score": 116147.73879210555 }, { "content": "pub fn empty_worker_id() -> WorkerId {\n\n SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 0)\n\n}\n", "file_path": "src/common/id.rs", "rank": 15, "score": 114018.65861922794 }, { "content": "/// Reference to subworker. When dropped it calls \"kill()\" method\n\nstruct KillOnDrop {\n\n subworker_ref: Option<SubworkerRef>,\n\n}\n\n\n\nimpl KillOnDrop {\n\n pub fn new(subworker_ref: SubworkerRef) -> Self {\n\n KillOnDrop {\n\n subworker_ref: Some(subworker_ref),\n\n }\n\n }\n\n\n\n pub fn deactive(&mut self) -> SubworkerRef {\n\n ::std::mem::replace(&mut self.subworker_ref, None).unwrap()\n\n }\n\n}\n\n\n\nimpl Drop for KillOnDrop {\n\n fn drop(&mut self) {\n\n if let Some(ref sw) = self.subworker_ref {\n\n sw.get_mut().kill();\n", "file_path": "src/worker/tasks/instance.rs", "rank": 16, "score": 108674.06835740738 }, { "content": "/// Create \"ready file\", a file that is created when Rain is fully initialized\n\n/// What it exactly means depends on type of execution (server/worker/...)\n\n/// When creation failed, the program is terminated, since the outer waiter\n\n/// cannot be informed about progress\n\npub fn create_ready_file(path: &Path) {\n\n match ::std::fs::File::create(path) {\n\n Ok(mut file) => {\n\n file.write_all(b\"ready\\n\").unwrap();\n\n debug!(\"Ready file {:?} created\", path);\n\n }\n\n Err(e) => {\n\n error!(\"Cannot create ready file: {}\", e.description());\n\n exit(1);\n\n }\n\n }\n\n}\n", "file_path": "src/common/fs/fs.rs", "rank": 17, "score": 106803.49659090479 }, { "content": "fn read_stderr(path: &Path) -> Result<String> {\n\n // TODO: If the file is too big, truncate the beginning\n\n let mut file = File::open(path)?;\n\n let mut s = String::new();\n\n file.read_to_string(&mut s)?;\n\n Ok(s)\n\n}\n\n\n", "file_path": "src/worker/tasks/run.rs", "rank": 18, "score": 102862.99453376302 }, { "content": "fn ensure_directory(dir: &Path, name: &str) -> Result<()> {\n\n if !dir.exists() {\n\n debug!(\"{} not found, creating ... {:?}\", name, dir);\n\n if let Err(e) = std::fs::create_dir_all(dir) {\n\n bail!(format!(\n\n \"{} {:?} cannot by created: {}\",\n\n name,\n\n dir,\n\n e.description()\n\n ));\n\n }\n\n } else if !dir.is_dir() {\n\n bail!(\"{} {:?} exists but it is not a directory\", name, dir);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/bin.rs", "rank": 19, "score": 101622.42445871305 }, { "content": "fn read_host_file(path: &Path) -> Result<Vec<String>> {\n\n let file = BufReader::new(File::open(path).map_err(|e| {\n\n format!(\n\n \"Cannot open worker host file {:?}: {}\",\n\n path,\n\n ::std::error::Error::description(&e)\n\n )\n\n })?);\n\n let mut result = Vec::new();\n\n for line in file.lines() {\n\n let line = line?;\n\n let trimmed_line = line.trim();\n\n if !trimmed_line.is_empty() && !trimmed_line.starts_with('#') {\n\n result.push(trimmed_line.to_string());\n\n }\n\n }\n\n Ok(result)\n\n}\n\n\n\nimpl Starter {\n", "file_path": "src/start/starter.rs", "rank": 20, "score": 97708.02341183924 }, { "content": "fn make_where_string(column: &str, mode: &str) -> Result<String> {\n\n match mode {\n\n \"=\" | \"<\" | \">\" | \"<=\" | \">=\" => Ok(format!(\"{} {} ?\", column, mode)),\n\n _ => bail!(\"Invalid search criteria\"),\n\n }\n\n}\n\n\n\nimpl Logger for SQLiteLogger {\n\n fn get_events(\n\n &self,\n\n search_criteria: SearchCriteria,\n\n ) -> Box<Future<Item = QueryEvents, Error = Error>> {\n\n let (sx, rx) = oneshot::channel();\n\n self.queue\n\n .unbounded_send(LoggerMessage::LoadEvents(search_criteria, sx))\n\n .unwrap();\n\n Box::new(rx.map_err(|_| \"Invalid logger query\".into()))\n\n }\n\n\n\n fn flush_events(&mut self) {\n", "file_path": "src/common/logging/sqlite_logger.rs", "rank": 21, "score": 91533.1114774878 }, { "content": "class Client:\n\n \"\"\"\n\n A client connection object. Can hold multiple\n\n :py:class:`Session`\\ s.\n\n \"\"\"\n\n\n\n def __init__(self, address, port):\n\n self._rpc_client = capnp.TwoPartyClient(\"{}:{}\".format(address, port))\n\n\n\n bootstrap = self._rpc_client.bootstrap().cast_as(\n\n rpc.server.ServerBootstrap)\n\n registration = bootstrap.registerAsClient(CLIENT_PROTOCOL_VERSION)\n\n self._service = registration.wait().service\n\n self._datastore = self._service.getDataStore().wait().store\n\n\n\n def new_session(self):\n\n \"\"\"\n\n Creates a new session.\n\n\n\n Note the session is destroyed server-side when the client disconnects.\n\n\n\n Returns:\n\n :class:`Session`: A new session\n\n \"\"\"\n\n session_id = self._service.newSession().wait().sessionId\n\n return Session(self, session_id)\n\n\n\n def get_server_info(self):\n\n \"\"\"\n\n Returns basic server info. Unstable.\n\n\n\n Returns:\n\n dict: A JSON-like dictionary.\n\n \"\"\"\n\n info = self._service.getServerInfo().wait()\n\n return {\n\n \"workers\": [{\"worker_id\": worker_id_from_capnp(w.workerId),\n\n \"tasks\": [id_from_capnp(t) for t in w.tasks],\n\n \"objects\": [id_from_capnp(o) for o in w.objects],\n\n \"objects_to_delete\": [id_from_capnp(o) for o in w.objectsToDelete],\n\n \"resources\": {\"cpus\": w.resources.nCpus}}\n\n for w in info.workers]\n\n }\n\n\n\n def _submit(self, tasks, dataobjs):\n\n req = self._service.submit_request()\n\n\n\n # Serialize tasks\n\n req.init(\"tasks\", len(tasks))\n\n for i in range(len(tasks)):\n\n tasks[i].to_capnp(req.tasks[i])\n\n\n\n # Serialize objects\n\n req.init(\"objects\", len(dataobjs))\n\n for i in range(len(dataobjs)):\n\n dataobjs[i].to_capnp(req.objects[i])\n\n\n\n req.send().wait()\n\n\n\n def _fetch(self, dataobj):\n\n \"Fetch the object data and update its state.\"\n\n if not dataobj._keep:\n\n raise RainException(\n\n \"Can't fetch object {} without keep flag.\".format(dataobj))\n\n\n\n if dataobj.state is None:\n\n raise RainException(\n\n \"Object {} is not submitted.\".format(dataobj))\n\n\n\n req = self._datastore.createReader_request()\n\n id_to_capnp(dataobj.id, req.id)\n\n req.offset = 0\n\n result = req.send().wait()\n\n check_result(result)\n\n\n\n reader = result.reader\n\n FETCH_SIZE = 2 << 20 # 2MB\n\n eof = False\n\n data = []\n\n while not eof:\n\n r = reader.read(FETCH_SIZE).wait()\n\n data.append(r.data)\n\n eof = r.status == \"eof\"\n\n bytedata = b\"\".join(data)\n\n self._get_state((), (dataobj, ))\n\n return DataInstance(data=bytedata,\n\n data_object=dataobj)\n\n\n\n def _wait(self, tasks, dataobjs):\n\n req = self._service.wait_request()\n\n\n\n req.init(\"taskIds\", len(tasks))\n\n for i in range(len(tasks)):\n\n task = tasks[i]\n\n if task.state is None:\n\n raise RainException(\"Task {} is not submitted\".format(task))\n\n id_to_capnp(task.id, req.taskIds[i])\n\n\n\n req.init(\"objectIds\", len(dataobjs))\n\n for i in range(len(dataobjs)):\n\n id_to_capnp(dataobjs[i].id, req.objectIds[i])\n\n\n\n result = req.send().wait()\n\n check_result(result)\n\n\n\n def _close_session(self, session):\n\n self._service.closeSession(session.session_id).wait()\n\n\n\n def _wait_some(self, tasks, dataobjs):\n\n req = self._service.waitSome_request()\n\n\n\n tasks_dict = {}\n\n req.init(\"taskIds\", len(tasks))\n\n for i in range(len(tasks)):\n\n tasks_dict[tasks[i].id] = tasks[i]\n\n id_to_capnp(tasks[i].id, req.taskIds[i])\n\n\n\n dataobjs_dict = {}\n\n req.init(\"objectIds\", len(dataobjs))\n\n for i in range(len(dataobjs)):\n\n dataobjs_dict[dataobjs[i].id] = dataobjs[i]\n\n id_to_capnp(dataobjs[i].id, req.objectIds[i])\n\n\n\n finished = req.send().wait()\n\n finished_tasks = [tasks_dict[f_task.id]\n\n for f_task in finished.finishedTasks]\n\n finished_dataobjs = [dataobjs_dict[f_dataobj.id]\n\n for f_dataobj in finished.finishedObjects]\n\n\n\n return finished_tasks, finished_dataobjs\n\n\n\n def _wait_all(self, session_id):\n\n req = self._service.wait_request()\n\n req.init(\"taskIds\", 1)\n\n req.taskIds[0].id = rpc.common.allTasksId\n\n req.taskIds[0].sessionId = session_id\n\n result = req.send().wait()\n\n check_result(result)\n\n\n\n def _unkeep(self, dataobjs):\n\n req = self._service.unkeep_request()\n\n\n\n req.init(\"objectIds\", len(dataobjs))\n\n for i in range(len(dataobjs)):\n\n id_to_capnp(dataobjs[i].id, req.objectIds[i])\n\n\n\n result = req.send().wait()\n\n check_result(result)\n\n\n\n def update(self, items):\n\n tasks, dataobjects = split_items(items)\n\n self._get_state(tasks, dataobjects)\n\n\n\n def _get_state(self, tasks, dataobjs):\n\n req = self._service.getState_request()\n\n\n\n req.init(\"taskIds\", len(tasks))\n\n for i in range(len(tasks)):\n\n id_to_capnp(tasks[i].id, req.taskIds[i])\n\n\n\n dataobjs_dict = {}\n\n req.init(\"objectIds\", len(dataobjs))\n\n for i in range(len(dataobjs)):\n\n dataobjs_dict[dataobjs[i].id.id] = dataobjs[i]\n\n id_to_capnp(dataobjs[i].id, req.objectIds[i])\n\n\n\n results = req.send().wait()\n\n check_result(results.state)\n\n\n\n for task_update, task in zip(results.tasks, tasks):\n\n task.state = task_update.state\n\n new_attributes = attributes.attributes_from_capnp(\n\n task_update.attributes)\n\n task.attributes.update(new_attributes)\n\n\n\n for object_update in results.objects:\n\n dataobj = dataobjs_dict[object_update.id.id]\n\n dataobj.state = object_update.state\n\n dataobj.size = object_update.size\n\n dataobj.attributes = attributes.attributes_from_capnp(\n", "file_path": "python/rain/client/client.py", "rank": 22, "score": 88609.44062050275 }, { "content": "/// Common trait for `TaskId` and `DataObjectID`.\n\npub trait SId\n\n : for<'a> ToCapnp<'a> + for<'a> FromCapnp<'a> + WriteCapnp + ReadCapnp {\n\n fn new(session_id: SessionId, id: Id) -> Self;\n\n fn get_id(&self) -> Id;\n\n fn get_session_id(&self) -> SessionId;\n\n}\n\n\n\n/// ID type for task objects.\n\n#[derive(Copy, Clone, Debug, Ord, Eq, PartialEq, PartialOrd, Hash, Serialize, Deserialize)]\n\npub struct TaskId {\n\n session_id: SessionId,\n\n id: Id,\n\n}\n\n\n\nimpl SId for TaskId {\n\n #[inline]\n\n fn new(session_id: SessionId, id: Id) -> Self {\n\n TaskId {\n\n session_id: session_id,\n\n id: id,\n", "file_path": "src/common/id.rs", "rank": 23, "score": 88550.61858089792 }, { "content": "fn save_events(conn: &mut Connection, events: Vec<EventWrapper>) -> Result<()> {\n\n debug!(\"Saving {} events into log\", events.len());\n\n let tx = conn.transaction()?;\n\n {\n\n let mut stmt = tx.prepare_cached(\n\n \"INSERT INTO events (timestamp, event_type, session, event) VALUES (?, ?, ?, ?)\",\n\n )?;\n\n\n\n for e in events.iter() {\n\n stmt.execute(&[\n\n &e.timestamp,\n\n &e.event.event_type(),\n\n &e.event.session_id(),\n\n &serde_json::to_string(&e.event)?,\n\n ])?;\n\n }\n\n }\n\n tx.commit()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/common/logging/sqlite_logger.rs", "rank": 24, "score": 86144.06936250697 }, { "content": "fn static_data_response(data: &'static [u8]) -> ResponseFuture {\n\n Box::new(::futures::future::ok(\n\n Response::new()\n\n .with_header(ContentLength(data.len() as u64))\n\n .with_body(data),\n\n ))\n\n}\n\n\n", "file_path": "src/server/http.rs", "rank": 25, "score": 84614.06300584455 }, { "content": "fn static_gzipped_response(data: &'static [u8]) -> ResponseFuture {\n\n Box::new(::futures::future::ok(\n\n Response::new()\n\n .with_header(ContentEncoding(vec![Encoding::Gzip]))\n\n .with_header(ContentLength(data.len() as u64))\n\n .with_body(data),\n\n ))\n\n}\n\n\n\nimpl Service for RequestHandler {\n\n type Request = Request;\n\n type Response = Response;\n\n type Error = Error;\n\n\n\n type Future = Box<futures::Future<Item = Self::Response, Error = Self::Error>>;\n\n\n\n fn call(&self, req: Request) -> Self::Future {\n\n let state_ref = self.state.clone();\n\n debug!(\"HTTP request: {}\", req.path());\n\n let path = req.path().to_string();\n", "file_path": "src/server/http.rs", "rank": 26, "score": 84614.06300584455 }, { "content": "fn load_events(conn: &mut Connection, search_criteria: &SearchCriteria) -> Result<QueryEvents> {\n\n let mut args: Vec<&::rusqlite::types::ToSql> = Vec::new();\n\n let mut where_conds = Vec::new();\n\n\n\n if let Some(ref v) = search_criteria.id {\n\n where_conds.push(make_where_string(\"id\", &v.mode)?);\n\n args.push(&v.value);\n\n }\n\n\n\n if let Some(ref v) = search_criteria.event_type {\n\n where_conds.push(make_where_string(\"event_type\", &v.mode)?);\n\n args.push(&v.value);\n\n }\n\n\n\n if let Some(ref v) = search_criteria.session {\n\n where_conds.push(make_where_string(\"session\", &v.mode)?);\n\n args.push(&v.value);\n\n }\n\n\n\n let query_str = if where_conds.is_empty() {\n", "file_path": "src/common/logging/sqlite_logger.rs", "rank": 27, "score": 83270.95976553544 }, { "content": "/// Generic trait for storing the value into a Capnp `Builder`.\n\npub trait ToCapnp<'a> {\n\n type Builder: traits::FromPointerBuilder<'a>;\n\n fn to_capnp(self: &Self, build: &mut Self::Builder);\n\n}\n\n\n", "file_path": "src/common/convert.rs", "rank": 28, "score": 82865.14198531758 }, { "content": "def check_result(result):\n\n if result.which() == \"ok\":\n\n return # Do nothing\n\n elif result.which() == \"error\":\n\n message = result.error.message\n\n if result.error.debug:\n\n message += \"\\nDebug:\\n\" + result.error.debug\n\n raise RainException(message)\n\n else:\n", "file_path": "python/rain/client/client.py", "rank": 29, "score": 82144.78877408298 }, { "content": " def _get_state(self, tasks, dataobjs):\n\n req = self._service.getState_request()\n\n\n\n req.init(\"taskIds\", len(tasks))\n\n for i in range(len(tasks)):\n\n id_to_capnp(tasks[i].id, req.taskIds[i])\n\n\n\n dataobjs_dict = {}\n\n req.init(\"objectIds\", len(dataobjs))\n\n for i in range(len(dataobjs)):\n\n dataobjs_dict[dataobjs[i].id.id] = dataobjs[i]\n\n id_to_capnp(dataobjs[i].id, req.objectIds[i])\n\n\n\n results = req.send().wait()\n\n check_result(results.state)\n\n\n\n for task_update, task in zip(results.tasks, tasks):\n\n task.state = task_update.state\n\n new_attributes = attributes.attributes_from_capnp(\n\n task_update.attributes)\n\n task.attributes.update(new_attributes)\n\n\n\n for object_update in results.objects:\n\n dataobj = dataobjs_dict[object_update.id.id]\n\n dataobj.state = object_update.state\n\n dataobj.size = object_update.size\n\n dataobj.attributes = attributes.attributes_from_capnp(\n", "file_path": "python/rain/client/client.py", "rank": 30, "score": 81541.23463649889 }, { "content": "fn main() {\n\n capnpc::CompilerCommand::new()\n\n .file(\"capnp/common.capnp\")\n\n .file(\"capnp/server.capnp\")\n\n .file(\"capnp/client.capnp\")\n\n .file(\"capnp/datastore.capnp\")\n\n .file(\"capnp/worker.capnp\")\n\n .file(\"capnp/subworker.capnp\")\n\n .file(\"capnp/monitor.capnp\")\n\n .run()\n\n .expect(\"schema compiler command\");\n\n}\n", "file_path": "build.rs", "rank": 31, "score": 74247.80302623018 }, { "content": " def client(self):\n\n if self._client is not None:\n\n return self._client\n\n import rain # noqa\n\n if self.running_port is None:\n\n raise Exception(\"Server was not started in test environment\")\n\n client = rain.client.Client(\"127.0.0.1\", self.running_port)\n\n self._client = client\n", "file_path": "tests/pytests/conftest.py", "rank": 32, "score": 73842.4940126983 }, { "content": "fn main() {\n\n init_log();\n\n\n\n // We do not use clap macro to build parser,\n\n // since it cannot handle \"-\" in name of long arguments\n\n let args = App::new(\"Rain\")\n\n .version(VERSION)\n\n .about(\"Task-based workflow manager and executor\")\n\n .subcommand( // ---- SERVER ----\n\n SubCommand::with_name(\"server\")\n\n .about(\"Rain server\")\n\n .arg(Arg::with_name(\"LISTEN_ADDRESS\")\n\n .short(\"l\")\n\n .long(\"--listen\")\n\n .help(\"Listening port/address/address:port (default 0.0.0.0:7210)\")\n\n .takes_value(true))\n\n .arg(Arg::with_name(\"HTTP_LISTEN_ADDRESS\")\n\n .long(\"--http-listen\")\n\n .value_name(\"ADDRESS\")\n\n .help(\"Listening HTTP port/address/address:port (default = 0.0.0.0:8080)\")\n", "file_path": "src/bin.rs", "rank": 33, "score": 72687.80622687479 }, { "content": "fn init_log() {\n\n // T emporary simple logger for better module log control, default level is INFO\n\n // TODO: replace with Fern or log4rs later\n\n if std::env::var(\"RUST_LOG\").is_err() {\n\n std::env::set_var(\"RUST_LOG\", \"info\");\n\n }\n\n if ::atty::is(::atty::Stream::Stdout) {\n\n ::env_logger::Builder::new()\n\n .format(|buf, record| {\n\n use env_logger::Color;\n\n use log::Level;\n\n\n\n let ts = buf.timestamp();\n\n let level = record.level();\n\n let mut level_style = buf.style();\n\n\n\n match level {\n\n Level::Trace => level_style.set_color(Color::White),\n\n Level::Debug => level_style.set_color(Color::Blue),\n\n Level::Info => level_style.set_color(Color::Green),\n", "file_path": "src/bin.rs", "rank": 34, "score": 71232.14127649608 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TestConfig {\n\n workers: Vec<String>,\n\n size: usize,\n\n}\n\n\n", "file_path": "src/server/testmode.rs", "rank": 35, "score": 70288.3542327915 }, { "content": "struct TransportStream {\n\n data: Arc<Data>,\n\n transport_type: TransportStreamType,\n\n position: usize\n\n}\n\n\n\nimpl TransportStream {\n\n pub fn new(data: Arc<Data>) -> Result<Self> {\n\n let transport_type = match data.storage {\n\n Storage::Memory(_) => TransportStreamType::MemoryBlob,\n\n Storage::Path(ref path) => TransportStreamType::MMap(\n\n ::memmap::Mmap::open_path(&path.path, ::memmap::Protection::Read)?)\n\n };\n\n Ok(TransportStream {\n\n position: 0, transport_type, data\n\n })\n\n }\n\n\n\n pub fn read(&mut self, size: usize) -> (&[u8], bool) {\n\n match self.transport_type {\n\n TransportStreamType::MemoryBlob\n\n }\n\n }\n\n}*/\n", "file_path": "src/worker/data/pack.rs", "rank": 36, "score": 69006.54728735317 }, { "content": "#[derive(Serialize)]\n\nstruct AttributeInfo {\n\n worker: String,\n\n start: String,\n\n duration: i64,\n\n}\n\n\n", "file_path": "src/worker/tasks/instance.rs", "rank": 37, "score": 69006.54728735317 }, { "content": "#[derive(Deserialize)]\n\nstruct ExportConfig {\n\n path: String,\n\n}\n\n\n", "file_path": "src/worker/tasks/basic.rs", "rank": 38, "score": 69006.54728735317 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct RunConfig {\n\n pub args: Vec<String>,\n\n pub in_paths: Vec<String>,\n\n pub out_paths: Vec<String>,\n\n}\n\n\n", "file_path": "src/worker/tasks/run.rs", "rank": 39, "score": 69006.54728735317 }, { "content": "#[derive(Deserialize)]\n\nstruct OpenConfig {\n\n path: String,\n\n}\n\n\n", "file_path": "src/worker/tasks/basic.rs", "rank": 40, "score": 69006.54728735317 }, { "content": "struct MemoryPackStream {\n\n data: Arc<Data>,\n\n position: usize,\n\n}\n\n\n\nimpl PackStream for MemoryPackStream {\n\n fn read(&mut self, read_size: usize) -> (&[u8], bool) {\n\n let start = self.position;\n\n let data_size = self.data.size();\n\n let (end, eof) = if start + read_size < data_size {\n\n (start + read_size, false)\n\n } else {\n\n (data_size, true)\n\n };\n\n\n\n if let &Storage::Memory(ref mem) = self.data.storage() {\n\n self.position = end;\n\n (&mem[start..end], eof)\n\n } else {\n\n unreachable!()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/worker/data/pack.rs", "rank": 41, "score": 67802.65103896352 }, { "content": "struct MmapPackStream {\n\n data: Arc<Data>,\n\n mmap: ::memmap::Mmap,\n\n position: usize,\n\n}\n\n\n\nimpl PackStream for MmapPackStream {\n\n fn read(&mut self, read_size: usize) -> (&[u8], bool) {\n\n let start = self.position;\n\n let data_size = self.data.size();\n\n let (end, eof) = if start + read_size < data_size {\n\n (start + read_size, false)\n\n } else {\n\n (data_size, true)\n\n };\n\n (&self.mmap[start..end], eof)\n\n }\n\n}\n\n\n\n/*enum TransportStreamType {\n\n MemoryBlob,\n\n MMap(::memmap::Mmap)\n\n}\n\n\n", "file_path": "src/worker/data/pack.rs", "rank": 42, "score": 67802.65103896352 }, { "content": "enum State<T> {\n\n // Object is still in initialization, vector contains callbacks when\n\n // object is ready\n\n Initing(Vec<unsync::oneshot::Sender<()>>),\n\n\n\n // Value is ready\n\n Ready(T),\n\n}\n\n\n\npub struct AsyncInitWrapper<T> {\n\n state: State<T>,\n\n}\n\n\n\nimpl<T> AsyncInitWrapper<T> {\n\n pub fn new() -> Self {\n\n Self {\n\n state: State::Initing(Vec::new()),\n\n }\n\n }\n\n\n", "file_path": "src/common/asycinit.rs", "rank": 43, "score": 64700.507213375386 }, { "content": "/// Generic trait for reading the value as a Capnp message from `Read`.\n\n/// All values are copied into `Self`.\n\npub trait ReadCapnp {\n\n fn read_capnp<R: Read>(r: &mut R) -> Self;\n\n}\n\n\n\n/* NOTE: This general impl does not work (lifetime problems) :-(\n\nimpl<'a, T: FromCapnp<'a>> ReadCapnp for T {\n\n fn read_capnp<R: Read>(r: &mut R) -> Self {\n\n let msg = serialize::read_message(r, Default::default()).unwrap();\n\n let read = msg.get_root::<T::Reader>().unwrap();\n\n T::from_capnp(&read)\n\n }\n\n}\n\n*/\n\n\n", "file_path": "src/common/convert.rs", "rank": 44, "score": 63969.689515594204 }, { "content": "/// Common trait for objects with checkable consistency\n\npub trait ConsistencyCheck {\n\n fn check_consistency(&self) -> ::errors::Result<()>;\n\n\n\n /// Run check_consistency depending on DEBUG_CHECK_CONSISTENCY.\n\n fn check_consistency_opt(&self) -> ::errors::Result<()> {\n\n if ::DEBUG_CHECK_CONSISTENCY.load(::std::sync::atomic::Ordering::Relaxed) {\n\n self.check_consistency()\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n}\n", "file_path": "src/common/mod.rs", "rank": 45, "score": 63969.689515594204 }, { "content": "pub trait Logger {\n\n fn add_event(&mut self, event: Event) {\n\n self.add_event_with_timestamp(event, Utc::now());\n\n }\n\n\n\n fn add_event_with_timestamp(&mut self, event: Event, ::chrono::DateTime<::chrono::Utc>);\n\n\n\n fn flush_events(&mut self);\n\n\n\n fn add_new_worker_event(&mut self, worker: WorkerId) {\n\n self.add_event(Event::WorkerNew(events::WorkerNewEvent { worker }));\n\n }\n\n\n\n fn add_worker_removed_event(&mut self, worker: WorkerId, error_msg: String) {\n\n self.add_event(Event::WorkerRemoved(events::WorkerRemovedEvent {\n\n worker,\n\n error_msg,\n\n }));\n\n }\n\n\n", "file_path": "src/common/logging/logger.rs", "rank": 46, "score": 63969.689515594204 }, { "content": "/// Generic trait for writing the value into a `Write` as a Capnp message.\n\npub trait WriteCapnp {\n\n fn write_capnp<W: Write>(self: &Self, w: &mut W);\n\n}\n\n\n\nimpl<T> WriteCapnp for T\n\nwhere\n\n T: for<'a> ToCapnp<'a>,\n\n{\n\n fn write_capnp<W: Write>(self: &Self, w: &mut W) {\n\n let mut msg = message::Builder::new_default();\n\n self.to_capnp(&mut msg.get_root::<T::Builder>().unwrap());\n\n serialize::write_message(w, &msg).unwrap();\n\n }\n\n}\n", "file_path": "src/common/convert.rs", "rank": 47, "score": 63969.689515594204 }, { "content": "pub trait PackStream {\n\n fn read(&mut self, size: usize) -> (&[u8], bool);\n\n}\n\n\n", "file_path": "src/worker/data/pack.rs", "rank": 48, "score": 62768.171923740316 }, { "content": "fn default_working_directory() -> PathBuf {\n\n let pid = getpid();\n\n let hostname = ::librain::common::sys::get_hostname();\n\n PathBuf::from(\"/tmp/rain-work\").join(format!(\"worker-{}-{}\", hostname, pid))\n\n}\n\n\n", "file_path": "src/bin.rs", "rank": 49, "score": 62566.81851064195 }, { "content": "import capnp\n\nfrom rain.client import rpc\n\nfrom rain.common import RainException\n\nfrom rain.client.task import Task\n\nfrom rain.client.data import DataObject\n\nfrom ..common import attributes, DataInstance\n\nfrom ..common.ids import id_from_capnp, id_to_capnp, worker_id_from_capnp\n\nfrom .session import Session\n\n\n\nCLIENT_PROTOCOL_VERSION = 0\n\n\n\n\n\ndef check_result(result):\n\n if result.which() == \"ok\":\n\n return # Do nothing\n\n elif result.which() == \"error\":\n\n message = result.error.message\n\n if result.error.debug:\n\n message += \"\\nDebug:\\n\" + result.error.debug\n\n raise RainException(message)\n\n else:\n\n raise Exception(\"Invalid result\")\n\n\n\n\n\nclass Client:\n\n \"\"\"\n\n A client connection object. Can hold multiple\n\n :py:class:`Session`\\ s.\n\n \"\"\"\n\n\n\n def __init__(self, address, port):\n\n self._rpc_client = capnp.TwoPartyClient(\"{}:{}\".format(address, port))\n\n\n\n bootstrap = self._rpc_client.bootstrap().cast_as(\n\n rpc.server.ServerBootstrap)\n\n registration = bootstrap.registerAsClient(CLIENT_PROTOCOL_VERSION)\n\n self._service = registration.wait().service\n\n self._datastore = self._service.getDataStore().wait().store\n\n\n\n def new_session(self):\n\n \"\"\"\n\n Creates a new session.\n\n\n\n Note the session is destroyed server-side when the client disconnects.\n\n\n\n Returns:\n\n :class:`Session`: A new session\n\n \"\"\"\n\n session_id = self._service.newSession().wait().sessionId\n\n return Session(self, session_id)\n\n\n\n def get_server_info(self):\n\n \"\"\"\n\n Returns basic server info. Unstable.\n\n\n\n Returns:\n\n dict: A JSON-like dictionary.\n\n \"\"\"\n\n info = self._service.getServerInfo().wait()\n\n return {\n\n \"workers\": [{\"worker_id\": worker_id_from_capnp(w.workerId),\n\n \"tasks\": [id_from_capnp(t) for t in w.tasks],\n\n \"objects\": [id_from_capnp(o) for o in w.objects],\n\n \"objects_to_delete\": [id_from_capnp(o) for o in w.objectsToDelete],\n\n \"resources\": {\"cpus\": w.resources.nCpus}}\n\n for w in info.workers]\n\n }\n\n\n\n def _submit(self, tasks, dataobjs):\n\n req = self._service.submit_request()\n\n\n\n # Serialize tasks\n\n req.init(\"tasks\", len(tasks))\n\n for i in range(len(tasks)):\n\n tasks[i].to_capnp(req.tasks[i])\n\n\n\n # Serialize objects\n\n req.init(\"objects\", len(dataobjs))\n\n for i in range(len(dataobjs)):\n\n dataobjs[i].to_capnp(req.objects[i])\n\n\n\n req.send().wait()\n\n\n\n def _fetch(self, dataobj):\n\n \"Fetch the object data and update its state.\"\n\n if not dataobj._keep:\n\n raise RainException(\n\n \"Can't fetch object {} without keep flag.\".format(dataobj))\n\n\n\n if dataobj.state is None:\n\n raise RainException(\n\n \"Object {} is not submitted.\".format(dataobj))\n\n\n\n req = self._datastore.createReader_request()\n\n id_to_capnp(dataobj.id, req.id)\n\n req.offset = 0\n\n result = req.send().wait()\n\n check_result(result)\n\n\n\n reader = result.reader\n\n FETCH_SIZE = 2 << 20 # 2MB\n\n eof = False\n\n data = []\n\n while not eof:\n\n r = reader.read(FETCH_SIZE).wait()\n\n data.append(r.data)\n\n eof = r.status == \"eof\"\n\n bytedata = b\"\".join(data)\n\n self._get_state((), (dataobj, ))\n\n return DataInstance(data=bytedata,\n\n data_object=dataobj)\n\n\n\n def _wait(self, tasks, dataobjs):\n\n req = self._service.wait_request()\n\n\n\n req.init(\"taskIds\", len(tasks))\n\n for i in range(len(tasks)):\n\n task = tasks[i]\n\n if task.state is None:\n\n raise RainException(\"Task {} is not submitted\".format(task))\n\n id_to_capnp(task.id, req.taskIds[i])\n\n\n\n req.init(\"objectIds\", len(dataobjs))\n\n for i in range(len(dataobjs)):\n\n id_to_capnp(dataobjs[i].id, req.objectIds[i])\n\n\n\n result = req.send().wait()\n\n check_result(result)\n\n\n\n def _close_session(self, session):\n\n self._service.closeSession(session.session_id).wait()\n\n\n\n def _wait_some(self, tasks, dataobjs):\n\n req = self._service.waitSome_request()\n\n\n\n tasks_dict = {}\n\n req.init(\"taskIds\", len(tasks))\n\n for i in range(len(tasks)):\n\n tasks_dict[tasks[i].id] = tasks[i]\n\n id_to_capnp(tasks[i].id, req.taskIds[i])\n\n\n\n dataobjs_dict = {}\n\n req.init(\"objectIds\", len(dataobjs))\n\n for i in range(len(dataobjs)):\n\n dataobjs_dict[dataobjs[i].id] = dataobjs[i]\n\n id_to_capnp(dataobjs[i].id, req.objectIds[i])\n\n\n\n finished = req.send().wait()\n\n finished_tasks = [tasks_dict[f_task.id]\n\n for f_task in finished.finishedTasks]\n\n finished_dataobjs = [dataobjs_dict[f_dataobj.id]\n\n for f_dataobj in finished.finishedObjects]\n\n\n\n return finished_tasks, finished_dataobjs\n\n\n\n def _wait_all(self, session_id):\n\n req = self._service.wait_request()\n\n req.init(\"taskIds\", 1)\n\n req.taskIds[0].id = rpc.common.allTasksId\n\n req.taskIds[0].sessionId = session_id\n\n result = req.send().wait()\n\n check_result(result)\n\n\n\n def _unkeep(self, dataobjs):\n\n req = self._service.unkeep_request()\n\n\n\n req.init(\"objectIds\", len(dataobjs))\n\n for i in range(len(dataobjs)):\n\n id_to_capnp(dataobjs[i].id, req.objectIds[i])\n\n\n\n result = req.send().wait()\n\n check_result(result)\n\n\n\n def update(self, items):\n\n tasks, dataobjects = split_items(items)\n\n self._get_state(tasks, dataobjects)\n\n\n\n def _get_state(self, tasks, dataobjs):\n\n req = self._service.getState_request()\n\n\n\n req.init(\"taskIds\", len(tasks))\n\n for i in range(len(tasks)):\n\n id_to_capnp(tasks[i].id, req.taskIds[i])\n\n\n\n dataobjs_dict = {}\n\n req.init(\"objectIds\", len(dataobjs))\n\n for i in range(len(dataobjs)):\n\n dataobjs_dict[dataobjs[i].id.id] = dataobjs[i]\n\n id_to_capnp(dataobjs[i].id, req.objectIds[i])\n\n\n\n results = req.send().wait()\n\n check_result(results.state)\n\n\n\n for task_update, task in zip(results.tasks, tasks):\n\n task.state = task_update.state\n\n new_attributes = attributes.attributes_from_capnp(\n\n task_update.attributes)\n\n task.attributes.update(new_attributes)\n\n\n\n for object_update in results.objects:\n\n dataobj = dataobjs_dict[object_update.id.id]\n\n dataobj.state = object_update.state\n\n dataobj.size = object_update.size\n\n dataobj.attributes = attributes.attributes_from_capnp(\n\n object_update.attributes)\n\n\n\n\n\ndef split_items(items):\n\n \"\"\"Split items into 'tasks' and 'dataobjects'\n\n Throws an error if an item is not task nor object\"\"\"\n\n tasks = []\n\n dataobjects = []\n\n for item in items:\n\n if isinstance(item, Task):\n\n tasks.append(item)\n\n elif isinstance(item, DataObject):\n\n dataobjects.append(item)\n\n else:\n\n raise RainException(\n\n \"'{}' is not tasks nor dataobject\".format(item))\n\n return tasks, dataobjects\n", "file_path": "python/rain/client/client.py", "rank": 50, "score": 62486.31782473314 }, { "content": "/// Generic trait for reading the value from a Capnp `Reader`.\n\n/// All values are copied into `Self`.\n\npub trait FromCapnp<'a>\n\nwhere\n\n Self: Sized,\n\n{\n\n type Reader: traits::FromPointerReader<'a>;\n\n fn from_capnp(read: &'a Self::Reader) -> Self;\n\n}\n\n\n", "file_path": "src/common/convert.rs", "rank": 51, "score": 61777.003573577116 }, { "content": "fn parse_listen_arg(key: &str, args: &ArgMatches, default_port: u16) -> SocketAddr {\n\n if !args.is_present(key) {\n\n return SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), default_port);\n\n }\n\n\n\n value_t!(args, key, SocketAddr).unwrap_or_else(|_| match value_t!(args, key, IpAddr) {\n\n Ok(ip) => SocketAddr::new(ip, default_port),\n\n _ => SocketAddr::new(\n\n IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)),\n\n value_t_or_exit!(args, key, u16),\n\n ),\n\n })\n\n}\n\n\n", "file_path": "src/bin.rs", "rank": 52, "score": 55291.16172583961 }, { "content": "fn make_text_response(data: String) -> Response {\n\n Response::new()\n\n .with_header(ContentLength(data.len() as u64))\n\n .with_header(AccessControlAllowOrigin::Any)\n\n .with_body(data)\n\n /* Err(e) => {\n\n warn!(\"Http request error: {}\", e.description());\n\n Response::new()\n\n .with_status(StatusCode::InternalServerError)\n\n .with_header(AccessControlAllowOrigin::Any)\n\n }\n\n }*/\n\n}\n\n\n", "file_path": "src/server/http.rs", "rank": 53, "score": 54322.29834687746 }, { "content": "fn default_logging_directory(basename: &str) -> PathBuf {\n\n let pid = getpid();\n\n let hostname = ::librain::common::sys::get_hostname();\n\n PathBuf::from(\"/tmp/rain-logs\").join(format!(\"{}-{}-{}\", basename, hostname, pid))\n\n}\n\n\n", "file_path": "src/bin.rs", "rank": 54, "score": 54322.29834687746 }, { "content": "\"\"\"\n\nGlobal stack of active sessions\n\nDo not directly acces to this array\n\nbut use\n\n\n\n>>> with session:\n\n... pass\n\n\n\nor function\n\n\n\nget_active_session()\n\n\"\"\"\n\n\n\nimport weakref\n\n\n\nfrom rain.client import rpc\n\nfrom ..common import RainException, ID\n\nfrom . import graph\n\n\n\n_global_sessions = []\n\n\n\n# TODO: Check attribute \"active\" before making remote calls\n\n\n\n\n\ndef global_session_push(session):\n\n global _global_sessions\n\n if not session.active:\n\n raise RainException(\"Session is closed\")\n\n _global_sessions.append(session)\n\n\n\n\n\ndef global_session_pop():\n\n global _global_sessions\n\n return _global_sessions.pop()\n\n\n\n\n\nclass SessionBinder:\n\n \"\"\"This class is returned when session.bind_only() is used\"\"\"\n\n\n\n def __init__(self, session):\n\n self.session = session\n\n\n\n def __enter__(self):\n\n global_session_push(self.session)\n\n return self.session\n\n\n\n def __exit__(self, type, value, traceback):\n\n s = global_session_pop()\n\n assert self.session is s\n\n\n\n\n\nclass Session:\n\n \"\"\"\n\n A container for one task graph.\n\n\n\n Do not create directly, rather using :func:`Client.new_session`.\n\n When used as a context manager, all new objects and tasks are created\n\n within the session. Note the session is closed afterwards.\n\n\n\n >>> with client.new_session() as s:\n\n ... bl = blob(\"Hello rain!\")\n\n ... tsk = tasks.sleep(1.0, bl)\n\n ... tsk.output.keep()\n\n ... s.submit()\n\n ... print(tsk.output.fetch()) # waits for completion\n\n\n\n Currently, the graph and objects are alive on the server only as long as\n\n the `Session` exists.\n\n \"\"\"\n\n\n\n def __init__(self, client, session_id):\n\n self.active = True # True if a session is live in server\n\n self.client = client\n\n self.session_id = session_id\n\n\n\n self._tasks = [] # Unsubmitted task\n\n self._dataobjs = [] # Unsubmitted objects\n\n self._id_counter = 9\n\n self._submitted_tasks = []\n\n self._submitted_dataobjs = []\n\n\n\n # Cache for not submited constants: bytes/str -> DataObject\n\n # It is cleared on submit\n\n # TODO: It is not now implemented\n\n self._const_cache = {}\n\n\n\n # Static data serves for internal usage of client.\n\n # It is not directly available to user\n\n # It is used to store e.g. for serialized Python objects\n\n self._static_data = {}\n\n\n\n @property\n\n def task_count(self):\n\n \"\"\"The number of unsubmitted tasks.\"\"\"\n\n return len(self._tasks)\n\n\n\n @property\n\n def dataobj_count(self):\n\n \"\"\"The number of unsubmitted objects.\"\"\"\n\n return len(self._dataobjs)\n\n\n\n def __enter__(self):\n\n global_session_push(self)\n\n return self\n\n\n\n def __exit__(self, type, value, traceback):\n\n s = global_session_pop()\n\n assert s is self\n\n self.close()\n\n\n\n def __repr__(self):\n\n return \"<Session session_id={}>\".format(self.session_id)\n\n\n\n def close(self):\n\n \"\"\"Closes session; all tasks are stopped, all objects freed.\"\"\"\n\n if self.active and self.client:\n\n self.client._close_session(self)\n\n self._tasks = []\n\n self._dataobjs = []\n\n self._submitted_dataobjs = []\n\n self._submitted_dataobjs = []\n\n self.active = False\n\n\n\n def bind_only(self):\n\n \"\"\"\n\n This method serves to bind session without autoclose functionality.\n\n\n\n >>> with session.bind_only() as s:\n\n ... doSometing()\n\n\n\n binds the session, but do not close it at the end (so it may be bound\n\n again either with `bind_only` or normally with `with session: ...`).\n\n \"\"\"\n\n return SessionBinder(self)\n\n\n\n def _register_task(self, task):\n\n \"\"\"Register task into session.\n\n\n\n Returns:\n\n ID: the assigned id.\"\"\"\n\n assert task.session == self and task.id is None\n\n self._tasks.append(task)\n\n self._id_counter += 1\n\n return ID(session_id=self.session_id, id=self._id_counter)\n\n\n\n def _register_dataobj(self, dataobj):\n\n \"\"\"Register data object into session.\n\n\n\n Returns:\n\n ID: the assigned id.\"\"\"\n\n assert dataobj.session == self and dataobj.id is None\n\n self._dataobjs.append(dataobj)\n\n self._id_counter += 1\n\n return ID(session_id=self.session_id, id=self._id_counter)\n\n\n\n def keep_all(self):\n\n \"\"\"Set keep flag for all unsubmitted objects\"\"\"\n\n for dataobj in self._dataobjs:\n\n dataobj.keep()\n\n\n\n def submit(self):\n\n \"\"\"\"Submit all unsubmitted objects.\"\"\"\n\n self.client._submit(self._tasks, self._dataobjs)\n\n for task in self._tasks:\n\n task.state = rpc.common.TaskState.notAssigned\n\n self._submitted_tasks.append(weakref.ref(task))\n\n for dataobj in self._dataobjs:\n\n dataobj.state = rpc.common.DataObjectState.unfinished\n\n self._submitted_dataobjs.append(weakref.ref(dataobj))\n\n self._tasks = []\n\n self._dataobjs = []\n\n\n\n def _split_tasks_objects(self, items):\n\n \"\"\"Split `items` into `Task`s and `DataObject`s, raisong error on anything else.\n\n\n\n Returns:\n\n `(tasks, dataobjs)`\"\"\"\n\n from . import Task, DataObject\n\n tasks, dataobjs = [], []\n\n for i in items:\n\n if isinstance(i, Task):\n\n tasks.append(i)\n\n elif isinstance(i, DataObject):\n\n dataobjs.append(i)\n\n else:\n\n raise TypeError(\"Neither Task or DataObject: {!r}\".format(i))\n\n return (tasks, dataobjs)\n\n\n\n def wait(self, items):\n\n \"\"\"Wait until *all* specified tasks and dataobjects are finished.\"\"\"\n\n tasks, dataobjs = self._split_tasks_objects(items)\n\n self.client._wait(tasks, dataobjs)\n\n\n\n for task in tasks:\n\n task.state = rpc.common.TaskState.finished\n\n\n\n for dataobj in dataobjs:\n\n dataobj.state = rpc.common.DataObjectState.finished\n\n\n\n def wait_some(self, items):\n\n \"\"\"Wait until *some* of specified tasks/dataobjects are finished.\n\n\n\n Returns:\n\n `(finished_tasks, finished_dataobjs)`\"\"\"\n\n tasks, dataobjs = self._split_tasks_objects(items)\n\n finished_tasks, finished_dataobjs = self.client._wait_some(\n\n tasks, dataobjs)\n\n\n\n for task in finished_tasks:\n\n task.state = rpc.common.TaskState.finished\n\n\n\n for dataobj in finished_dataobjs:\n\n dataobj.state = rpc.common.DataObjectState.finished\n\n\n\n return finished_tasks, finished_dataobjs\n\n\n\n def wait_all(self):\n\n \"\"\"Wait until all submitted tasks and objects are finished.\"\"\"\n\n self.client._wait_all(self.session_id)\n\n\n\n for task in self._submitted_tasks:\n\n t = task()\n\n if t:\n\n t.state = rpc.common.TaskState.finished\n\n\n\n for dataobj in self._submitted_dataobjs:\n\n o = dataobj()\n\n if o:\n\n o.state = rpc.common.DataObjectState.finished\n\n\n\n def fetch(self, dataobject):\n\n \"\"\"Wait for the object to finish, update its state and\n\n fetch the object data.\n\n\n\n Returns:\n\n `DataInstance`: The object data proxy.\"\"\"\n\n return self.client._fetch(dataobject)\n\n\n\n def unkeep(self, dataobjects):\n\n \"\"\"Unset keep flag for given objects.\"\"\"\n\n submitted = []\n\n from . import DataObject\n\n for dataobj in dataobjects:\n\n if not isinstance(dataobj, DataObject):\n\n raise TypeError(\"Not a DataObject: {!r}\".format(dataobj))\n\n if not dataobj.is_kept():\n\n raise RainException(\"Object {} is not kept\".format(dataobj.id))\n\n if dataobj.state is not None:\n\n submitted.append(dataobj)\n\n else:\n\n dataobj._keep = False\n\n\n\n if not submitted:\n\n return\n\n\n\n self.client._unkeep(submitted)\n\n\n\n for dataobj in submitted:\n\n dataobj._free()\n\n\n\n def update(self, items):\n\n \"\"\"Update the status and metadata of given tasks and objects.\"\"\"\n\n self.client.update(items)\n\n\n\n def make_graph(self, show_ids=True):\n\n \"\"\"Create a graph of tasks and objects that were *not yet* submitted.\"\"\"\n\n\n\n def add_obj(o):\n\n if o is None:\n\n return\n\n node = g.node(o)\n\n node.label = o.id\n\n node.shape = \"box\"\n\n node.color = \"none\"\n\n node.fillcolor = \"#0088aa\"\n\n node.fillcolor = \"#44ccff\"\n\n if o.is_kept():\n\n node.fillcolor = \"#44ccff\"\n\n node.color = \"black\"\n\n\n\n def add_task(t):\n\n if t is None:\n\n return\n\n node = g.node(t)\n\n node.label = \"{}\\n{}\".format(t.id_pair, t.task_type)\n\n node.shape = \"oval\"\n\n node.fillcolor = \"#0088aa\"\n\n node.color = \"none\"\n\n for i, (key, o) in enumerate(t.inputs.items()):\n\n if key is None:\n\n label = str(i)\n\n else:\n\n label = \"{}: {}\".format(i, key)\n\n g.node(o).add_arc(node, label)\n\n\n\n for i, (key, o) in enumerate(t.outputs.items()):\n\n if key is None:\n\n label = str(i)\n\n else:\n\n label = \"{}: {}\".format(i, key)\n\n node.add_arc(g.node(o), label)\n\n\n\n g = graph.Graph()\n\n\n\n for o in self._dataobjs:\n\n add_obj(o)\n\n\n\n for o in self._submitted_dataobjs:\n\n add_obj(o())\n\n\n\n for t in self._tasks:\n\n add_task(t)\n\n\n\n for t in self._submitted_tasks:\n\n add_task(t())\n\n\n\n return g\n\n\n\n\n\ndef get_active_session():\n\n \"\"\"Internal helper to get innermost active `Session`.\"\"\"\n\n if not _global_sessions:\n\n raise RainException(\"No active session\")\n\n else:\n\n return _global_sessions[-1]\n", "file_path": "python/rain/client/session.py", "rank": 55, "score": 51956.46350700229 }, { "content": "from .task import Task\n\nfrom .data import to_data\n\nfrom .input import Input\n\nfrom .output import Output\n\nfrom .data import DataObject\n\n\n\nimport shlex\n\n\n\n\n\ndef concat(objs):\n\n \"\"\"Creates a task concatenating data objects\"\"\"\n\n return Task(\"!concat\", inputs=tuple(objs), outputs=1)\n\n\n\n\n\ndef sleep(timeout, dataobj, cpus=1):\n\n \"\"\"Task that forwards argument 'dataobj' after 'timeout' seconds.\n\n The type of resulting data object is the same as type of input data object\n\n This task serves for testing purpose\"\"\"\n\n time_ms = int(timeout * 1000)\n\n dataobj = to_data(dataobj)\n\n return Task(\"!sleep\",\n\n time_ms,\n\n inputs=(dataobj,),\n\n outputs=(dataobj.__class__(\"output\"),),\n\n cpus=cpus)\n\n\n\n\n\ndef open(filename):\n\n return Task(\"!open\", {\"path\": filename}, outputs=1)\n\n\n\n\n\ndef export(dataobj, filename):\n\n return Task(\"!export\", {\"path\": filename}, inputs=(dataobj,))\n\n\n\n\n\ndef execute(args,\n\n stdout=None,\n\n stdin=None,\n\n input_files=(),\n\n output_files=(),\n\n shell=False,\n\n cpus=1):\n\n\n\n ins = []\n\n outs = []\n\n\n\n if stdout is not None:\n\n if stdout is True:\n\n stdout = \"stdout\"\n\n stdout = Output._for_program(stdout, label=\"stdout\", execute=True)\n\n # '+out' is the file name of where stdout is redirected\n\n stdout.path = \"+out\"\n\n outs.append(stdout)\n\n\n\n if stdin is not None:\n\n # '+in' is the file name of where stdin is redirected\n\n stdin = Input._for_program(stdin, label=\"stdin\", execute=True)\n\n stdin.path = \"+in\"\n\n ins.append(stdin)\n\n\n\n ins += [Input._for_program(obj, execute=True, label_as_path=True)\n\n for obj in input_files]\n\n outs += [Output._for_program(obj, execute=True, label_as_path=True)\n\n for obj in output_files]\n\n\n\n if isinstance(args, str):\n\n args = shlex.split(args)\n\n\n\n proc_args = []\n\n for i, a in enumerate(args):\n\n argname = \"arg{}\".format(i)\n\n if isinstance(a, str):\n\n proc_args.append(a)\n\n elif isinstance(a, Input) or isinstance(a, DataObject) or isinstance(a, Task):\n\n arg = Input._for_program(a, execute=True, label=argname)\n\n ins.append(arg)\n\n proc_args.append(arg.path)\n\n elif isinstance(a, Output):\n\n arg = Output._for_program(a, execute=True, label=argname)\n\n outs.append(arg)\n\n proc_args.append(arg.path)\n\n else:\n\n raise Exception(\"Argument {!r} is invalid\".format(arg))\n\n\n\n if shell:\n\n proc_args = (\"/bin/sh\", \"-c\", \" \".join(proc_args))\n\n# proc_args = (\"/bin/sh\", \"-c\", \" \".join(shlex.quote(a) for a in proc_args))\n\n\n\n task_inputs = [obj.dataobj for obj in ins]\n\n task_outputs = [output.create_data_object() for output in outs]\n\n return Task(\"!run\",\n\n {\n\n \"args\": proc_args,\n\n \"in_paths\": [obj.path for obj in ins],\n\n \"out_paths\": [obj.path for obj in outs],\n\n },\n\n inputs=task_inputs,\n\n outputs=task_outputs,\n\n cpus=cpus)\n", "file_path": "python/rain/client/tasks.py", "rank": 56, "score": 51952.20397497059 }, { "content": "class Arc(object):\n\n\n\n def __init__(self, node, data):\n\n self.node = node\n\n self.data = data\n\n\n\n\n\nclass Node(object):\n\n\n\n color = None\n\n fillcolor = None\n\n label = \"\"\n\n shape = \"circle\"\n\n fontcolor = None\n\n\n\n def __init__(self, key):\n\n self.key = key\n\n self.arcs = []\n\n\n\n def add_arc(self, node, data=None):\n\n self.arcs.append(Arc(node, data))\n\n\n\n def arc_by_data(self, data):\n\n for arc in self.arcs:\n\n if arc.data == data:\n\n return arc\n\n return None\n\n\n\n def merge_arcs(self, merge_fn):\n\n if len(self.arcs) < 2:\n\n return\n\n node_to_arcs = {}\n\n for arc in self.arcs[:]:\n\n a = node_to_arcs.get(arc.node)\n\n if a is None:\n\n node_to_arcs[arc.node] = arc\n\n else:\n\n self.arcs.remove(arc)\n\n a.data = merge_fn(a.data, arc.data)\n\n\n\n def __repr__(self):\n\n return \"<Node {}>\".format(self.key)\n\n\n\n\n\nclass Graph(object):\n\n\n\n def __init__(self):\n\n self.nodes = {}\n\n\n\n @property\n\n def size(self):\n\n return len(self.nodes)\n\n\n\n def has_node(self, key):\n\n return key in self.nodes\n\n\n\n def node_check(self, key):\n\n node = self.nodes.get(key)\n\n if node is not None:\n\n return (node, True)\n\n node = Node(key)\n\n self.nodes[key] = node\n\n return (node, False)\n\n\n\n def node(self, key):\n\n node = self.nodes.get(key)\n\n if node is not None:\n\n return node\n\n node = Node(key)\n\n self.nodes[key] = node\n\n return node\n\n\n\n def show(self):\n\n run_xdot(self.make_dot(\"G\"))\n\n\n\n def write(self, filename):\n\n dot = self.make_dot(\"G\")\n\n with open(filename, \"w\") as f:\n\n f.write(dot)\n\n\n\n def make_dot(self, name):\n\n stream = [\"digraph \" + name + \" {\\n\"]\n\n for node in self.nodes.values():\n\n extra = \"\"\n\n if node.color is not None:\n\n extra += \" color=\\\"{}\\\"\".format(node.color)\n\n if node.fillcolor is not None:\n\n extra += \" style=filled fillcolor=\\\"{}\\\"\" \\\n\n .format(node.fillcolor)\n\n stream.append(\"v{} [label=\\\"{}\\\" shape=\\\"{}\\\"{}]\\n\".format(\n\n id(node), node.label, node.shape, extra))\n\n for arc in node.arcs:\n\n stream.append(\"v{} -> v{} [label=\\\"{}\\\"]\\n\".format(\n\n id(node), id(arc.node), str(arc.data)))\n\n stream.append(\"}\\n\")\n\n return \"\".join(stream)\n\n\n\n def merge_arcs(self, merge_fn):\n\n for node in self.nodes.values():\n\n node.merge_arcs(merge_fn)\n\n\n\n\n\ndef run_xdot(dot):\n\n import subprocess\n\n import tempfile\n\n with tempfile.NamedTemporaryFile() as f:\n\n f.write(dot)\n\n f.flush()\n\n subprocess.call((\"xdot\", f.name))\n", "file_path": "python/rain/client/graph.py", "rank": 57, "score": 51952.20397497059 }, { "content": "from .session import get_active_session\n\nfrom .data import DataObject, to_data\n\nfrom .output import Output\n\nfrom ..common import RainException, ID, LabeledList, ids\n\nfrom ..common.attributes import attributes_to_capnp\n\n\n\n\n\nclass Task:\n\n \"\"\"\n\n A single task instance in the task graph.\n\n\n\n `__init__` creates a single task instance, inserts it into `Session`\n\n and assigns it an `ID`. Creates output `DataObject` instances based\n\n on `outputs` given.\n\n\n\n `Task` is commonly created by functions in `rain.client.tasks`, or task builders\n\n created by `Remote` or `Program`. Always belongs to a `Session` and has a valid `ID`.\n\n You may wish to call it explicitely (or subclass it) when creating your own task-types.\n\n\n\n Particular task types are not realized via subclasses but\n\n with string `task_type` attribute. (Subclassing may be introduced later.)\n\n\n\n The task state is *not* automatically updated by the server. The state and\n\n attributes are updated on `Task.update()`, `Task.fetch()` and `Task.wait()`.\n\n\n\n Args:\n\n task_type (`str`): Task-type name known to rain workers.\n\n config: Any task-specific config.\n\n inputs (`LabeledList` or sequence): Sequence of `Input` or `DataObject`.\n\n outputs (`LabeledList` or sequence): Specification of `Output`\\ s for the task.\n\n session (`Session` or `None`): Session to create the task in.\n\n If not specified, the current `Session` is used.\n\n cpus (`int`): Number of cpus.\n\n\n\n Attributes:\n\n id (`ID`): Auto-assigned task ID.\n\n inputs (`LabeledList[DataObject]`): Input objects.\n\n outputs (`LabeledList[DataObject]`): Output objects created by the task.\n\n output (`DataObject`): Shortcut for `outputs[0]`. Raises Exception on multiple outputs.\n\n attributes (`dict`): Task attributes. See attributes_ for details.\n\n state (`TaskState` enum): Task state on last update.\n\n \"\"\"\n\n # State of object\n\n # None = Not submitted\n\n state = None\n\n id = None\n\n config = None\n\n\n\n def __init__(self,\n\n task_type,\n\n config=None,\n\n inputs=(),\n\n outputs=None,\n\n session=None,\n\n cpus=1):\n\n if session is None:\n\n session = get_active_session()\n\n self.session = session\n\n self.id = session._register_task(self)\n\n assert isinstance(self.id, ID)\n\n\n\n self.task_type = task_type\n\n self.attributes = {}\n\n\n\n if config is not None:\n\n self.attributes[\"config\"] = config\n\n\n\n if cpus is not None:\n\n self.attributes[\"resources\"] = {\"cpus\": cpus}\n\n\n\n def to_data_object(o):\n\n if isinstance(o, int):\n\n o = \"out{}\".format(o)\n\n if isinstance(o, str):\n\n return DataObject(label=o, session=session)\n\n if isinstance(o, Output):\n\n return o.create_data_object(session=session)\n\n if isinstance(o, DataObject):\n\n return o\n\n raise TypeError(\"Only `Output` and `str` allowed as outputs.\")\n\n\n\n if outputs is None:\n\n outputs = ()\n\n elif isinstance(outputs, int):\n\n outputs = tuple(to_data_object(i) for i in range(outputs))\n\n else:\n\n outputs = tuple(to_data_object(obj) for obj in outputs)\n\n\n\n self.outputs = LabeledList(pairs=((output.label, output)\n\n for output in outputs))\n\n\n\n input_pairs = []\n\n for input in inputs:\n\n if isinstance(input, tuple):\n\n label, inp = input\n\n input_pairs.append((label, to_data(inp)))\n\n else:\n\n input_pairs.append((None, to_data(input)))\n\n self.inputs = LabeledList(pairs=input_pairs)\n\n\n\n def keep_outputs(self):\n\n \"\"\"Keep all output objects of the task.\"\"\"\n\n for output in self.outputs:\n\n output.keep()\n\n\n\n def unkeep_outputs(self):\n\n \"\"\"Unkeep all output objects of the task.\"\"\"\n\n self.session.unkeep(self.outputs)\n\n\n\n def fetch_outputs(self):\n\n \"\"\"Fetch all outputs of the task.\n\n\n\n Returns:\n\n [`DataInstance`]: Fetched output data.\"\"\"\n\n return [output.fetch() for output in self.outputs]\n\n\n\n @property\n\n def output(self):\n\n count = len(self.outputs)\n\n if count == 0 or count > 1:\n\n raise RainException(\"Task {!r} has no unique output (outputs={})\"\n\n .format(self, count))\n\n return self.outputs[0]\n\n\n\n def to_capnp(self, out):\n\n ids.id_to_capnp(self.id, out.id)\n\n out.init(\"inputs\", len(self.inputs))\n\n\n\n for i, (key, dataobj) in enumerate(self.inputs.items()):\n\n ids.id_to_capnp(dataobj.id, out.inputs[i].id)\n\n if key:\n\n out.inputs[i].label = key\n\n\n\n out.init(\"outputs\", len(self.outputs))\n\n for i, dataobj in enumerate(self.outputs):\n\n ids.id_to_capnp(dataobj.id, out.outputs[i])\n\n\n\n out.taskType = self.task_type\n\n out.taskType = self.task_type\n\n attributes_to_capnp(self.attributes, out.attributes)\n\n\n\n def wait(self):\n\n \"\"\"Wait for the task to complete. See `Session.wait()`.\"\"\"\n\n self.session.wait((self,))\n\n\n\n def update(self):\n\n \"\"\"Update task state and attributes. See `Session.update()`.\"\"\"\n\n self.session.update((self,))\n\n\n\n def __repr__(self):\n\n return \"<Task id={}/{} type={}>\".format(\n\n self.session.session_id, self.id, self.task_type)\n\n\n\n def __reduce__(self):\n\n \"\"\"Speciaization to replace with subworker.unpickle_input_object\n\n in Python task args while (cloud)pickling. Raises RainError when\n\n using task with `len(outputs) != 1` as a data object.\"\"\"\n\n from . import pycode\n\n if pycode._global_pickle_inputs is None:\n\n # call normal __reduce__\n\n return super().__reduce__()\n\n return self.output.__reduce__()\n", "file_path": "python/rain/client/task.py", "rank": 58, "score": 51952.20397497059 }, { "content": "from .data import to_data, DataObject\n\nfrom .task import Task\n\n\n\n\n\nclass Input:\n\n\n\n dataobj = None\n\n label = None\n\n path = None\n\n load = None\n\n content_type = None\n\n\n\n def __init__(self, label=None, path=None, dataobj=None, load=None, content_type=None):\n\n\n\n if label is not None and not isinstance(label, str):\n\n raise Exception(\"Label has to be string, not {!r}\".format(label))\n\n self.label = label\n\n if path is None:\n\n if label:\n\n path = label\n\n else:\n\n path = \"input_{}\".format(id(self))\n\n self.path = path\n\n if dataobj is not None:\n\n self.dataobj = to_data(dataobj)\n\n self.load = load\n\n self.content_type = content_type\n\n\n\n def __repr__(self):\n\n args = []\n\n if self.path:\n\n args.append(\"path={}\".format(self.path))\n\n if self.dataobj:\n\n args.append(\"data={}\".format(self.dataobj))\n\n return \"<Input '{}'>\".format(self.label, \" \".join(args))\n\n\n\n @classmethod\n\n def _for_data_object(cls, do):\n\n assert isinstance(do, DataObject)\n\n return cls(label=do.label, dataobj=do, content_type=do.content_type)\n\n\n\n @classmethod\n\n def _for_program(cls, inp, label=None, execute=False, label_as_path=False):\n\n \"\"\"\n\n Create `Input` from `Input`, `DataObject`, `Task` (single output)\n\n or `str` for `Program` or `execute`.\n\n \"\"\"\n\n inp0 = inp\n\n if isinstance(inp, str):\n\n inp = cls(inp)\n\n if isinstance(inp, Task):\n\n inp = inp.output\n\n if isinstance(inp, DataObject):\n\n inp = Input._for_data_object(inp)\n\n if not isinstance(inp, Input):\n\n raise TypeError(\"Object {!r} cannot be used as input\".format(inp0))\n\n if inp.label is None:\n\n inp.label = label\n\n if inp.label is None:\n\n raise ValueError(\"Program/execute Inputs need `label`\")\n\n if inp.load is not None:\n\n raise ValueError(\"Program/execute Inputs do not accept `load`.\")\n\n if execute and inp.dataobj is None:\n\n raise(ValueError(\"`execute` Inputs need `dataobj`\"))\n\n if not execute and inp.dataobj is not None:\n\n raise(ValueError(\"`Program` Inputs can't have `dataobj`\"))\n\n\n\n if execute and inp.path is None:\n\n if label_as_path:\n\n inp.path = inp.label\n\n else:\n\n inp.path = \"in_{}_{}\".format(inp.label, inp.dataobj.id[1])\n\n\n\n return inp\n", "file_path": "python/rain/client/input.py", "rank": 59, "score": 51952.20397497059 }, { "content": "import shlex\n\n\n\nfrom .tasks import execute\n\nfrom .input import Input\n\nfrom .output import Output\n\n\n\nfrom copy import copy\n\n\n\n\n\nclass Program:\n\n # Input filenames\n\n input_files = ()\n\n # Output filenames\n\n output_files = ()\n\n stdin = None\n\n stdout = None\n\n shell = False\n\n\n\n def __init__(self,\n\n args,\n\n stdout=None, stdin=None,\n\n input_files=(), output_files=(),\n\n shell=False,\n\n cpus=1):\n\n\n\n if stdin is not None:\n\n self.stdin = Input._for_program(stdin, label=\"stdin\")\n\n\n\n if stdout:\n\n if stdout is True:\n\n stdout = \"stdout\"\n\n self.stdout = Output._for_program(stdout, label=\"stdout\")\n\n\n\n self.input_files = tuple(Input._for_program(obj, label_as_path=True)\n\n for obj in input_files)\n\n self.output_files = tuple(Output._for_program(obj, label_as_path=True)\n\n for obj in output_files)\n\n self.cpus = cpus\n\n\n\n if isinstance(args, str):\n\n args = shlex.split(args)\n\n self.args = []\n\n for i, a in enumerate(args):\n\n if isinstance(a, str):\n\n self.args.append(a)\n\n elif isinstance(a, Input):\n\n if a.label is None:\n\n a.label = \"arg{}\".format(i)\n\n self.args.append(Input._for_program(a))\n\n elif isinstance(a, Output):\n\n if a.label is None:\n\n a.label = \"arg{}\".format(i)\n\n self.args.append(Output._for_program(a))\n\n else:\n\n raise TypeError(\"Can't use {!r} in program argument list.\"\n\n .format(a))\n\n\n\n self.shell = shell\n\n\n\n def __repr__(self):\n\n return \"<Program {}>\".format(self.args)\n\n\n\n def __call__(self, **kw):\n\n def apply_data(obj):\n\n if isinstance(obj, Input):\n\n new = copy(obj)\n\n new.dataobj = kw[obj.label]\n\n return new\n\n else:\n\n return obj\n\n\n\n return execute([apply_data(obj) for obj in self.args],\n\n stdout=self.stdout,\n\n stdin=apply_data(self.stdin),\n\n input_files=[apply_data(obj) for obj in self.input_files],\n\n output_files=[obj for obj in self.output_files],\n\n shell=self.shell,\n\n cpus=self.cpus)\n", "file_path": "python/rain/client/program.py", "rank": 60, "score": 51952.20397497059 }, { "content": "import capnp\n\n\n\nfrom .session import get_active_session\n\nfrom ..common import RainException, ids, ID\n\nfrom ..common.attributes import attributes_to_capnp\n\nfrom ..common.content_type import check_content_type, encode_value\n\n\n\n\n\nclass DataObject:\n\n\n\n id = None\n\n\n\n # Flag if data object should be kept on server\n\n _keep = False\n\n\n\n # State of object\n\n # None = Not submitted\n\n state = None\n\n\n\n # Value of data object (value can be filled by client if it is constant,\n\n # or by fetching from server)\n\n data = None\n\n\n\n def __init__(self, label=None, session=None, content_type=None):\n\n if session is None:\n\n session = get_active_session()\n\n self.session = session\n\n self.label = label\n\n self.id = session._register_dataobj(self)\n\n assert isinstance(self.id, ID)\n\n self.attributes = {\n\n \"spec\": {\"content_type\": content_type}\n\n }\n\n\n\n @property\n\n def content_type(self):\n\n return self.attributes[\"spec\"][\"content_type\"]\n\n\n\n def _free(self):\n\n \"\"\"Set flag that object is not available on the server \"\"\"\n\n self._keep = False\n\n\n\n def unkeep(self):\n\n \"\"\"Remove data object from the server\"\"\"\n\n self.session.unkeep((self,))\n\n\n\n def keep(self):\n\n \"\"\"Set flag that is object should be kept on the server\"\"\"\n\n if self.state is not None:\n\n raise RainException(\"Cannot keep submitted task\")\n\n self._keep = True\n\n\n\n def is_kept(self):\n\n \"\"\"Returns the value of self._keep\"\"\"\n\n return self._keep\n\n\n\n def to_capnp(self, out):\n\n ids.id_to_capnp(self.id, out.id)\n\n out.keep = self._keep\n\n if self.label:\n\n out.label = self.label\n\n\n\n if self.data is not None:\n\n out.hasData = True\n\n out.data = self.data\n\n\n\n attributes_to_capnp(self.attributes, out.attributes)\n\n\n\n def wait(self):\n\n self.session.wait((self,))\n\n\n\n def fetch(self):\n\n \"\"\"\n\n Fetch the object data and update its state.\n\n\n\n Returns:\n\n DataInstance\n\n \"\"\"\n\n return self.session.fetch(self)\n\n\n\n def update(self):\n\n self.session.update((self,))\n\n\n\n def __del__(self):\n\n if self.state is not None and self._keep:\n\n try:\n\n self.session.client._unkeep((self,))\n\n except capnp.lib.capnp.KjException:\n\n # Ignore capnp exception, since this constructor may be\n\n # called when connection is closed\n\n pass\n\n\n\n def is_blob(self):\n\n return self.content_type != \"dir\"\n\n\n\n def is_directory(self):\n\n return self.content_type == \"dir\"\n\n\n\n def __reduce__(self):\n\n \"\"\"Speciaization to replace with subworker.unpickle_input_object\n\n in Python task args while (cloud)pickling.\"\"\"\n\n from . import pycode\n\n from ..subworker import subworker\n\n if pycode._global_pickle_inputs is None:\n\n # call normal __reduce__\n\n return super().__reduce__()\n\n base_name, counter, inputs, input_proto = pycode._global_pickle_inputs\n\n input_name = \"{}{{{}}}\".format(base_name, counter)\n\n pycode._global_pickle_inputs[1] += 1\n\n inputs.append((input_name, self))\n\n return (subworker.unpickle_input_object,\n\n (input_name, len(inputs) - 1,\n\n input_proto.load, input_proto.content_type))\n\n\n\n def __repr__(self):\n\n return \"<DObj {} {}:{} {}>\".format(\n\n self.label, self.session.session_id, self.id, self.attributes)\n\n\n\n\n\ndef blob(value, label=\"const\", content_type=None, encode=None):\n\n \"\"\"\n\n Create a constant data object with accompanying data.\n\n\n\n Given `value` may be either `bytes` or any object to be encoded with\n\n `encoding` content type. Strings are encoded with utf-8 by default.\n\n Specify at most one of `content_type` and `encode`.\n\n \"\"\"\n\n\n\n if content_type is not None:\n\n if encode is not None:\n\n raise RainException(\"Specify only one of content_type and encode\")\n\n if not isinstance(value, bytes):\n\n raise RainException(\"content_type only allowed for `bytes`\")\n\n\n\n if encode is None and isinstance(value, str):\n\n encode = \"text:utf-8\"\n\n if content_type is not None:\n\n raise RainException(\"content_type not allowed for `str`, use `encode=...`\")\n\n\n\n if encode is not None:\n\n check_content_type(encode)\n\n value = encode_value(value, content_type=encode)\n\n content_type = encode\n\n\n\n if not isinstance(value, bytes):\n\n raise RainException(\n\n \"Invalid blob type (only str or bytes are allowed without `encode`)\")\n\n\n\n dataobj = DataObject(label, content_type=content_type)\n\n dataobj.data = value\n\n return dataobj\n\n\n\n\n\ndef pickled(val, label=\"pickle\"):\n\n \"\"\"\n\n Create a data object with pickled `val`.\n\n\n\n A shorthand for `blob(val, ancode='pickle')`.\n\n The default label is \"pickle\".\n\n \"\"\"\n\n return blob(val, encode='pickle', label=label)\n\n\n\n\n\ndef to_data(obj):\n\n \"\"\"Convert an object to DataObject/DataObjectPart\"\"\"\n\n if isinstance(obj, DataObject):\n\n return obj\n\n from .task import Task\n\n if isinstance(obj, Task):\n\n if len(obj.outputs) == 1:\n\n return obj.outputs[0]\n\n if len(obj.outputs) == 0:\n\n raise RainException(\"{} does not have any output\".format(obj))\n\n else:\n\n raise RainException(\"{} returns multiple outputs\".format(obj))\n\n\n\n if isinstance(obj, str) or isinstance(obj, bytes):\n\n raise RainException(\n\n \"Instance of {!r} cannot be used as a data object.\\n\"\n\n \"Hint: Wrap it with `blob` to use it as data object.\"\n\n .format(type(obj)))\n\n\n\n raise RainException(\n\n \"Instance of {!r} cannot be used as a data object.\\n\"\n\n \"Hint: Wrap it with `pickled` or `blob(encode=...)` to use it as a data object.\"\n\n .format(type(obj)))\n", "file_path": "python/rain/client/data.py", "rank": 61, "score": 51952.20397497059 }, { "content": "from ..common.fs import load_capnp\n\n\n\ncommon = load_capnp(\"common.capnp\")\n\nserver = load_capnp(\"server.capnp\")\n", "file_path": "python/rain/client/rpc.py", "rank": 62, "score": 51952.20397497059 }, { "content": "from .data import DataObject\n\nfrom ..common.content_type import check_content_type, merge_content_types\n\nfrom ..common import LabeledList\n\nfrom copy import copy\n\nimport collections\n\n\n\n\n\nclass Output:\n\n \"\"\"\n\n A multi-purpose object for specifying output data objects of tasks.\n\n\n\n May be used in task factory construction (e.g. in `@remote` and `Program`),\n\n or in concrete task instantiation (as `outputs=[...]` or `output=...`).\n\n\n\n A default label is the number of the output in the task.\n\n \"\"\"\n\n\n\n def __init__(self, label=None, *, size_hint=None, content_type=None,\n\n mode=None, encode=None, path=None):\n\n\n\n self.label = label\n\n self.size_hint = size_hint\n\n self.content_type = content_type\n\n check_content_type(self.content_type)\n\n assert mode is None, \"Data object modes not supported yet\"\n\n self.encode = encode\n\n if (self.encode is not None and self.content_type is not None and\n\n self.content_type != self.encode and self.content_type != \"\"):\n\n raise ValueError(\n\n \"When specifying both encode and content_type \" +\n\n \"for Output, they must match.\")\n\n\n\n self.path = path\n\n\n\n def to_json(self):\n\n return {k: v for (k, v) in self.__dict__.items() if v is not None}\n\n\n\n def _check_for_task(self, task, order):\n\n \"Check the output for a task instance creation.\"\n\n if self.encode is not None or self.path is not None:\n\n raise ValueError(\"Task Outputs do not accept `encode`, `path`.\")\n\n\n\n def _check_for_remote(self, pytask):\n\n \"Check and finalize the output for a Remote (pytask) factory.\"\n\n if self.path is not None:\n\n raise ValueError(\"Python remote task Outputs do not accept `path`.\")\n\n\n\n def __repr__(self):\n\n if self.path is not None:\n\n return \"<Output {!r} path={!r}>\".format(self.label, self.path)\n\n else:\n\n return \"<Output {!r}>\".format(self.label)\n\n\n\n def merge_with_prototype(self, proto):\n\n \"Return a copy of self updated with `Output` `proto` properties.\"\n\n assert isinstance(proto, Output)\n\n o = copy(self)\n\n if o.size_hint is None:\n\n o.size_hint == proto.size_hint\n\n if o.label is None:\n\n o.label = proto.label\n\n if o.path is None:\n\n o.path = proto.path\n\n o.content_type = merge_content_types(o.content_type, proto.content_type)\n\n o.encode = merge_content_types(o.encode, proto.encode)\n\n return o\n\n\n\n def create_data_object(self, session=None):\n\n d = DataObject(label=self.label, session=session, content_type=self.content_type)\n\n if self.size_hint is not None:\n\n d.attributes['size_hint'] = self.size_hint\n\n return d\n\n\n\n @classmethod\n\n def _for_program(cls, out, label=None, execute=False, label_as_path=False):\n\n \"\"\"\n\n Create `Output` from `Output` or `str` for `Program` or `execute`.\n\n \"\"\"\n\n if isinstance(out, str):\n\n out = cls(out)\n\n if not isinstance(out, Output):\n\n raise TypeError(\"Object {!r} cannot be used as output\".format(out))\n\n if out.label is None:\n\n out.label = label\n\n if out.label is None:\n\n raise ValueError(\"Program/execute Outputs need `label`\")\n\n if out.encode is not None:\n\n raise ValueError(\"Program/execute Outputs do not accept `encode`.\")\n\n\n\n if execute and out.path is None:\n\n if label_as_path:\n\n out.path = out.label\n\n else:\n\n out.path = \"out_{}\".format(out.label)\n\n\n\n return out\n\n\n\n\n\ndef to_output(obj):\n\n if isinstance(obj, Output):\n\n return obj\n\n if isinstance(obj, str):\n\n return Output(obj)\n\n raise Exception(\"Object {!r} cannot be used as output\".format(obj))\n\n\n\n\n\nclass OutputSpec:\n\n \"\"\"\n\n A base class for task outputs list.\n\n Provides input and output specification, checking and instantiation.\n\n \"\"\"\n\n\n\n # Required / default outputs; LabeledList of `Output`s\n\n outputs = ()\n\n\n\n def __init__(self, outputs=None, output=None):\n\n\n\n if output is not None:\n\n if outputs is not None:\n\n raise ValueError(\"Both `output` and `outputs` not allowed.\")\n\n outputs = (output,)\n\n\n\n if isinstance(outputs, int):\n\n self.outputs = LabeledList(Output() for i in range(outputs))\n\n elif isinstance(outputs, LabeledList):\n\n self.outputs = outputs\n\n elif isinstance(outputs, collections.Sequence):\n\n self.outputs = LabeledList(outputs)\n\n else:\n\n raise TypeError(\"expected int, LabeledList or a sequence \"\n\n \"for `outputs`, got {:r}\".format(type(outputs)))\n\n\n\n for i, (label, output) in enumerate(self.outputs.items()):\n\n if isinstance(output, str):\n\n self.outputs.set(i, Output(label=output), label=output)\n\n elif not isinstance(output, Output):\n\n raise TypeError(\"Only string labels and `Output` accepted in output list.\")\n\n\n\n def instantiate(self, outputs=None, output=None, session=None):\n\n \"\"\"\n\n Create new output `DataObject`s for `Output`s given.\n\n\n\n Returns a tuple of `LabeledList`s `(outputs, data_objects)`.\n\n If both `output=None` and `outputs=None`, creates builder prototype outputs.\n\n \"\"\"\n\n\n\n if output is not None:\n\n if outputs is not None:\n\n raise ValueError(\"Both `output` and `outputs` not allowed.\")\n\n outputs = (output,)\n\n\n\n if outputs is None:\n\n outputs = LabeledList(self.outputs)\n\n if not isinstance(outputs, LabeledList):\n\n if not isinstance(outputs, collections.Sequence):\n\n raise TypeError(\"`outputs` must be None or a sequence type.\")\n\n outputs = LabeledList(outputs)\n\n\n\n if len(outputs) != len(self.outputs):\n\n raise ValueError(\"Got {} outputs, {} expected.\"\n\n .format(len(outputs), len(self.outputs)))\n\n\n\n objs = LabeledList()\n\n for i, (label, out) in enumerate(outputs.items()):\n\n if i < len(self.outputs):\n\n proto = self.outputs[i]\n\n else:\n\n proto = self.more_outputs\n\n if isinstance(out, str):\n\n out = Output(label=out)\n\n if out is None:\n\n out = Output()\n\n if not isinstance(out, Output):\n\n raise TypeError(\"Only `Output` and `str` instances accepted in output list.\")\n\n out_merged = out.merge_with_prototype(proto)\n\n if out_merged.label is None:\n\n out_merged.label = \"out{}\".format(i)\n\n do = out_merged.create_data_object(session=session)\n\n if out_merged.encode is not None:\n\n do.attributes['spec']['encode'] = out_merged.encode\n\n do.attributes['spec']['content_type'] = out_merged.encode\n\n if out_merged.size_hint is not None:\n\n do.attributes['spec']['size_hint'] = out_merged.size_hint\n\n objs.append(do, label=do.label)\n\n\n\n return objs\n", "file_path": "python/rain/client/output.py", "rank": 63, "score": 51952.20397497059 }, { "content": "import inspect\n\nimport contextlib\n\nimport time\n\nimport base64\n\nimport cloudpickle\n\nfrom collections import OrderedDict\n\n\n\nfrom .task import Task\n\nfrom .data import blob\n\nfrom .session import get_active_session\n\nfrom ..common import RainException, RainWarning\n\nfrom .input import Input\n\nfrom .output import OutputSpec\n\n\n\n\n\nPICKLE_ARG_SIZE_LIMIT = 256 * 1024\n\nPICKLE_ARG_TIME_LIMIT = 1.0\n\n\n\n\n\n# Base name of current argument and growing list of input data objects\n\n# while Py task arguments are pickled.\n\n# `[arg_base_name, counter, inputs_list, input_prototype]`\n\n_global_pickle_inputs = None\n\n\n\n\n\n@contextlib.contextmanager\n\ndef _pickle_inputs_context(name, inputs, input_prototype):\n\n \"\"\"Context manager to store current argument name and growing input\n\n objects list while Py task arguments are unpickled. Internal, not\n\n thread safe, not reentrant.\"\"\"\n\n global _global_pickle_inputs\n\n assert _global_pickle_inputs is None\n\n _global_pickle_inputs = [name, 0, inputs, input_prototype]\n\n try:\n\n yield\n\n finally:\n\n _global_pickle_inputs = None\n\n\n\n\n\ndef _checked_cloudpickle(d, name=None):\n\n \"\"\"Perform cloudpickle.dumps and issue a warning if the result is\n\n unexpectedly big (PICKLE_ARG_SIZE_LIMIT) or it takes too\n\n long (PICKLE_ARG_TIME_LIMIT).\"\"\"\n\n t0 = time.clock()\n\n p = cloudpickle.dumps(d)\n\n if len(p) > PICKLE_ARG_SIZE_LIMIT:\n\n raise RainWarning(\"Pickled object {} length {} > PICKLE_ARG_SIZE_LIMIT={}. \"\n\n \"Consider using a blob() for the data.\"\n\n .format(name or '<unknown>', len(d), PICKLE_ARG_SIZE_LIMIT))\n\n if time.clock() - t0 > PICKLE_ARG_TIME_LIMIT:\n\n raise RainWarning(\"Pickling object {} took {} s > PICKLE_ARG_TIME_LIMIT={}. \"\n\n \"Consider using a blob() for the data.\"\n\n .format(name or '<unknown>', len(d), PICKLE_ARG_TIME_LIMIT))\n\n return p\n\n\n\n\n\ndef _checked_cloudpickle_to_string(d, name=None):\n\n \"\"\"Same as _changed_pickle but encodes result to base64 string\"\"\"\n\n return base64.b64encode(_checked_cloudpickle(d, name)).decode(\"ascii\")\n\n\n\n\n\ndef remote(*,\n\n outputs=None,\n\n inputs=(),\n\n auto_load=None,\n\n auto_encode=None,\n\n cpus=1):\n\n \"Decorator for :py:class:`Remote`, see the documentation there.\"\n\n def make_remote(fn):\n\n if not inspect.isfunction(fn):\n\n raise RainException(\n\n \"remote() arg {!r} is not a function\".format(fn))\n\n return Remote(fn,\n\n outputs=outputs,\n\n inputs=inputs,\n\n auto_load=auto_load,\n\n auto_encode=auto_encode,\n\n cpus=cpus)\n\n return make_remote\n\n\n\n\n\nclass Remote:\n\n # The function to run remotely\n\n fn = None\n\n # OutputSpec for output data objects\n\n outputs = None\n\n # Dict of named argument Input specs, including args and kwargs\n\n inputs = None\n\n\n\n def __init__(self,\n\n fn, *,\n\n inputs=None,\n\n outputs=None,\n\n auto_load=False,\n\n auto_encode=None,\n\n cpus=1):\n\n self.fn = fn\n\n code = self.fn.__code__\n\n self.cpus = cpus\n\n\n\n if 'return' in fn.__annotations__:\n\n assert outputs is None\n\n outputs = fn.__annotations__['return']\n\n elif outputs is None:\n\n outputs = 1\n\n self.outputs = OutputSpec(outputs=outputs)\n\n for o in self.outputs.outputs:\n\n if o.encode is None:\n\n o.encode = auto_encode\n\n\n\n self.inputs = {}\n\n for name in code.co_varnames:\n\n if name in inputs:\n\n assert name not in self.fn.__annotations__\n\n inp = inputs[name]\n\n elif name in self.fn.__annotations__:\n\n inp = self.fn.__annotations__[name]\n\n else:\n\n inp = Input(label=name)\n\n assert isinstance(inp, Input)\n\n if inp.load is None:\n\n inp.load = auto_load\n\n self.inputs[name] = inp\n\n\n\n def __call__(self, *args, output=None, outputs=None, session=None, **kwargs):\n\n # TODO(gavento): Use Input()s arguments\n\n if session is None:\n\n session = get_active_session()\n\n\n\n # cache the code in a static blob\n\n fn_blob = session._static_data.get(self.fn)\n\n if fn_blob is None:\n\n d = _checked_cloudpickle(self.fn, self.fn.__name__)\n\n fn_blob = blob(d, self.fn.__name__, content_type=\"cloudpickle\")\n\n fn_blob.keep()\n\n session._static_data[self.fn] = fn_blob\n\n\n\n input_objs = [fn_blob]\n\n\n\n # Check the parameter compatibility for fn\n\n # Note that the first arg is the context\n\n sig = inspect.signature(self.fn)\n\n sig.bind(None, *args, **kwargs)\n\n code = self.fn.__code__\n\n\n\n # Pickle positional args\n\n pickled_args = []\n\n for i, argval in enumerate(args):\n\n if i < code.co_argcount - 1:\n\n name = code.co_varnames[i + 1]\n\n input_proto = self.inputs[name]\n\n else:\n\n args_name = code.co_varnames[code.co_argcount +\n\n code.co_kwonlyargcount]\n\n name = \"{}[{}]\".format(args_name, i + 1 - code.co_argcount)\n\n input_proto = self.inputs[args_name]\n\n # Within this session state, the DataObjects are seialized as\n\n # subworker.unpickle_input_object call\n\n assert isinstance(input_proto, Input)\n\n with _pickle_inputs_context(name, input_objs, input_proto):\n\n d = _checked_cloudpickle_to_string(argval, name=name)\n\n pickled_args.append(d)\n\n\n\n # Pickle keyword args\n\n pickled_kwargs = OrderedDict()\n\n for name, argval in kwargs.items():\n\n input_proto = self.inputs[code.co_varnames[-1]]\n\n # Within this session state, the DataObjects are seialized as\n\n # subworker.unpickle_input_object call\n\n with _pickle_inputs_context(name, input_objs, input_proto):\n\n d = _checked_cloudpickle_to_string(argval, name=name)\n\n pickled_kwargs[name] = d\n\n\n\n # create list of Output objects and DO instances\n\n output_objs = self.outputs.instantiate(\n\n output=output, outputs=outputs, session=session)\n\n\n\n task_config = {\n\n 'args': pickled_args,\n\n 'kwargs': pickled_kwargs,\n\n 'encode_outputs': [o.attributes['spec'].get('encode') for o in output_objs]\n\n }\n\n\n\n return Task(\"py\", task_config, input_objs, output_objs, cpus=self.cpus)\n", "file_path": "python/rain/client/pycode.py", "rank": 64, "score": 51952.20397497059 }, { "content": "from .input import Input # noqa\n\nfrom .output import Output # noqa\n\nfrom .data import blob, pickled, DataObject # noqa\n\nfrom .task import Task # noqa\n\nfrom ..common import RainException, RainWarning # noqa\n\nfrom .pycode import remote, Remote # noqa\n\nfrom .client import Client # noqa\n\nfrom .program import Program # noqa\n\nfrom .session import Session # noqa\n", "file_path": "python/rain/client/__init__.py", "rank": 65, "score": 50617.60684571669 }, { "content": "type ResponseFuture = Box<futures::Future<Item = Response, Error = ::errors::Error>>;\n\n\n", "file_path": "src/server/http.rs", "rank": 66, "score": 50129.848642637204 }, { "content": "fn run_starter(_global_args: &ArgMatches, cmd_args: &ArgMatches) {\n\n let listen_address = parse_listen_arg(\"LISTEN_ADDRESS\", cmd_args, DEFAULT_SERVER_PORT);\n\n let http_listen_address =\n\n parse_listen_arg(\"HTTP_LISTEN_ADDRESS\", cmd_args, DEFAULT_HTTP_SERVER_PORT);\n\n let log_dir = cmd_args\n\n .value_of(\"LOG_DIR\")\n\n .map(PathBuf::from)\n\n .unwrap_or_else(|| default_logging_directory(\"worker\"));\n\n\n\n info!(\"Log directory: {}\", log_dir.to_str().unwrap());\n\n\n\n ensure_directory(&log_dir, \"logging directory\").unwrap_or_else(|e| {\n\n error!(\"{}\", e);\n\n exit(1);\n\n });\n\n\n\n let mut local_workers = Vec::new();\n\n\n\n if cmd_args.is_present(\"SIMPLE\") && cmd_args.is_present(\"LOCAL_WORKERS\") {\n\n error!(\"--simple and --local-workers are mutually exclusive\");\n", "file_path": "src/bin.rs", "rank": 67, "score": 49447.21188587551 }, { "content": "fn run_server(_global_args: &ArgMatches, cmd_args: &ArgMatches) {\n\n let listen_address = parse_listen_arg(\"LISTEN_ADDRESS\", cmd_args, DEFAULT_SERVER_PORT);\n\n let http_listen_address =\n\n parse_listen_arg(\"HTTP_LISTEN_ADDRESS\", cmd_args, DEFAULT_HTTP_SERVER_PORT);\n\n let ready_file = cmd_args.value_of(\"READY_FILE\");\n\n info!(\n\n \"Starting Rain {} server at port {}\",\n\n VERSION, listen_address\n\n );\n\n\n\n let log_dir = cmd_args\n\n .value_of(\"LOG_DIR\")\n\n .map(PathBuf::from)\n\n .unwrap_or_else(|| default_logging_directory(\"server\"));\n\n\n\n ensure_directory(&log_dir, \"logging directory\").unwrap_or_else(|e| {\n\n error!(\"{}\", e);\n\n exit(1);\n\n });\n\n\n", "file_path": "src/bin.rs", "rank": 68, "score": 49447.21188587551 }, { "content": "fn run_worker(_global_args: &ArgMatches, cmd_args: &ArgMatches) {\n\n let ready_file = cmd_args.value_of(\"READY_FILE\");\n\n let listen_address = parse_listen_arg(\"LISTEN_ADDRESS\", cmd_args, DEFAULT_WORKER_PORT);\n\n let mut server_address = cmd_args.value_of(\"SERVER_ADDRESS\").unwrap().to_string();\n\n if !server_address.contains(':') {\n\n server_address = format!(\"{}:{}\", server_address, DEFAULT_SERVER_PORT);\n\n }\n\n\n\n let server_addr = match server_address.to_socket_addrs() {\n\n Err(_) => {\n\n error!(\"Cannot resolve server address\");\n\n exit(1);\n\n }\n\n Ok(mut addrs) => match addrs.next() {\n\n None => {\n\n error!(\"Cannot resolve server address\");\n\n exit(1);\n\n }\n\n Some(ref addr) => *addr,\n\n },\n", "file_path": "src/bin.rs", "rank": 69, "score": 49447.21188587551 }, { "content": "fn random_worker(g: &mut Graph, seed: usize) -> WorkerRef {\n\n let ws: Vec<_> = g.workers.values().collect();\n\n assert!(ws.len() > 0);\n\n ws[seed % ws.len()].clone()\n\n}\n\n*/\n", "file_path": "src/server/scheduler.rs", "rank": 70, "score": 47401.82360972077 }, { "content": " def _fetch(self, dataobj):\n\n \"Fetch the object data and update its state.\"\n\n if not dataobj._keep:\n\n raise RainException(\n\n \"Can't fetch object {} without keep flag.\".format(dataobj))\n\n\n\n if dataobj.state is None:\n\n raise RainException(\n\n \"Object {} is not submitted.\".format(dataobj))\n\n\n\n req = self._datastore.createReader_request()\n\n id_to_capnp(dataobj.id, req.id)\n\n req.offset = 0\n\n result = req.send().wait()\n\n check_result(result)\n\n\n\n reader = result.reader\n\n FETCH_SIZE = 2 << 20 # 2MB\n\n eof = False\n\n data = []\n\n while not eof:\n\n r = reader.read(FETCH_SIZE).wait()\n\n data.append(r.data)\n\n eof = r.status == \"eof\"\n\n bytedata = b\"\".join(data)\n\n self._get_state((), (dataobj, ))\n\n return DataInstance(data=bytedata,\n", "file_path": "python/rain/client/client.py", "rank": 71, "score": 46846.82495973894 }, { "content": " def _wait_all(self, session_id):\n\n req = self._service.wait_request()\n\n req.init(\"taskIds\", 1)\n\n req.taskIds[0].id = rpc.common.allTasksId\n\n req.taskIds[0].sessionId = session_id\n\n result = req.send().wait()\n", "file_path": "python/rain/client/client.py", "rank": 72, "score": 46842.306514457836 }, { "content": " def _wait_some(self, tasks, dataobjs):\n\n req = self._service.waitSome_request()\n\n\n\n tasks_dict = {}\n\n req.init(\"taskIds\", len(tasks))\n\n for i in range(len(tasks)):\n\n tasks_dict[tasks[i].id] = tasks[i]\n\n id_to_capnp(tasks[i].id, req.taskIds[i])\n\n\n\n dataobjs_dict = {}\n\n req.init(\"objectIds\", len(dataobjs))\n\n for i in range(len(dataobjs)):\n\n dataobjs_dict[dataobjs[i].id] = dataobjs[i]\n\n id_to_capnp(dataobjs[i].id, req.objectIds[i])\n\n\n\n finished = req.send().wait()\n\n finished_tasks = [tasks_dict[f_task.id]\n\n for f_task in finished.finishedTasks]\n\n finished_dataobjs = [dataobjs_dict[f_dataobj.id]\n\n for f_dataobj in finished.finishedObjects]\n\n\n", "file_path": "python/rain/client/client.py", "rank": 73, "score": 46842.306514457836 }, { "content": " def _unkeep(self, dataobjs):\n\n req = self._service.unkeep_request()\n\n\n\n req.init(\"objectIds\", len(dataobjs))\n\n for i in range(len(dataobjs)):\n\n id_to_capnp(dataobjs[i].id, req.objectIds[i])\n\n\n\n result = req.send().wait()\n", "file_path": "python/rain/client/client.py", "rank": 74, "score": 46842.306514457836 }, { "content": " def __init__(self, address, port):\n\n self._rpc_client = capnp.TwoPartyClient(\"{}:{}\".format(address, port))\n\n\n\n bootstrap = self._rpc_client.bootstrap().cast_as(\n\n rpc.server.ServerBootstrap)\n\n registration = bootstrap.registerAsClient(CLIENT_PROTOCOL_VERSION)\n\n self._service = registration.wait().service\n", "file_path": "python/rain/client/client.py", "rank": 75, "score": 46842.306514457836 }, { "content": " def update(self, items):\n\n tasks, dataobjects = split_items(items)\n", "file_path": "python/rain/client/client.py", "rank": 76, "score": 46842.306514457836 }, { "content": " def _wait(self, tasks, dataobjs):\n\n req = self._service.wait_request()\n\n\n\n req.init(\"taskIds\", len(tasks))\n\n for i in range(len(tasks)):\n\n task = tasks[i]\n\n if task.state is None:\n\n raise RainException(\"Task {} is not submitted\".format(task))\n\n id_to_capnp(task.id, req.taskIds[i])\n\n\n\n req.init(\"objectIds\", len(dataobjs))\n\n for i in range(len(dataobjs)):\n\n id_to_capnp(dataobjs[i].id, req.objectIds[i])\n\n\n\n result = req.send().wait()\n", "file_path": "python/rain/client/client.py", "rank": 77, "score": 46842.306514457836 }, { "content": " def _submit(self, tasks, dataobjs):\n\n req = self._service.submit_request()\n\n\n\n # Serialize tasks\n\n req.init(\"tasks\", len(tasks))\n\n for i in range(len(tasks)):\n\n tasks[i].to_capnp(req.tasks[i])\n\n\n\n # Serialize objects\n\n req.init(\"objects\", len(dataobjs))\n\n for i in range(len(dataobjs)):\n\n dataobjs[i].to_capnp(req.objects[i])\n\n\n", "file_path": "python/rain/client/client.py", "rank": 78, "score": 46842.306514457836 }, { "content": " def new_session(self):\n\n \"\"\"\n\n Creates a new session.\n\n\n\n Note the session is destroyed server-side when the client disconnects.\n\n\n\n Returns:\n\n :class:`Session`: A new session\n\n \"\"\"\n\n session_id = self._service.newSession().wait().sessionId\n", "file_path": "python/rain/client/client.py", "rank": 79, "score": 45967.54913912803 }, { "content": " def _close_session(self, session):\n", "file_path": "python/rain/client/client.py", "rank": 80, "score": 45963.34208682607 }, { "content": "def split_items(items):\n\n \"\"\"Split items into 'tasks' and 'dataobjects'\n\n Throws an error if an item is not task nor object\"\"\"\n\n tasks = []\n\n dataobjects = []\n\n for item in items:\n\n if isinstance(item, Task):\n\n tasks.append(item)\n\n elif isinstance(item, DataObject):\n\n dataobjects.append(item)\n\n else:\n\n raise RainException(\n\n \"'{}' is not tasks nor dataobject\".format(item))\n", "file_path": "python/rain/client/client.py", "rank": 81, "score": 45963.34208682607 }, { "content": " def get_server_info(self):\n\n \"\"\"\n\n Returns basic server info. Unstable.\n\n\n\n Returns:\n\n dict: A JSON-like dictionary.\n\n \"\"\"\n\n info = self._service.getServerInfo().wait()\n\n return {\n\n \"workers\": [{\"worker_id\": worker_id_from_capnp(w.workerId),\n\n \"tasks\": [id_from_capnp(t) for t in w.tasks],\n\n \"objects\": [id_from_capnp(o) for o in w.objects],\n\n \"objects_to_delete\": [id_from_capnp(o) for o in w.objectsToDelete],\n\n \"resources\": {\"cpus\": w.resources.nCpus}}\n\n for w in info.workers]\n", "file_path": "python/rain/client/client.py", "rank": 82, "score": 45116.75644751198 }, { "content": "fn wrap_elements<I>(open_tag: &str, close_tag: &str, elements: I) -> String\n\nwhere\n\n I: IntoIterator<Item = String>,\n\n{\n\n let mut result = String::new();\n\n for e in elements.into_iter() {\n\n result.push_str(open_tag);\n\n result.push_str(&e);\n\n result.push_str(close_tag);\n\n }\n\n result\n\n}\n\n\n\nimpl RequestHandler {\n\n pub fn new(state: ::server::state::StateRef) -> Self {\n\n Self { state: state }\n\n }\n\n}\n\n\n", "file_path": "src/server/http.rs", "rank": 83, "score": 43607.723960006726 }, { "content": " for wr in self.graph.workers.values() {\n\n wr.check_consistency()?;\n\n }\n\n for sr in self.graph.sessions.values() {\n\n sr.check_consistency()?;\n\n }\n\n for cr in self.graph.clients.values() {\n\n cr.check_consistency()?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Note: No `Drop` impl as a `State` is assumed to live forever.\n\npub type StateRef = WrappedRcRefCell<State>;\n\n\n\nimpl StateRef {\n\n pub fn new(\n\n handle: Handle,\n\n listen_address: SocketAddr,\n", "file_path": "src/server/state.rs", "rank": 84, "score": 42986.25802406919 }, { "content": " pub logger: Box<Logger>,\n\n\n\n timer: tokio_timer::Timer,\n\n\n\n /// Listening port and address.\n\n listen_address: SocketAddr,\n\n\n\n /// Listening port for HTTP interface\n\n http_listen_address: SocketAddr,\n\n}\n\n\n\nimpl State {\n\n /// Add new worker, register it in the graph\n\n pub fn add_worker(\n\n &mut self,\n\n address: SocketAddr,\n\n control: Option<::worker_capnp::worker_control::Client>,\n\n resources: Resources,\n\n ) -> Result<WorkerRef> {\n\n debug!(\"New worker {}\", address);\n", "file_path": "src/server/state.rs", "rank": 85, "score": 42984.77715270228 }, { "content": "use std::net::SocketAddr;\n\nuse std::path::PathBuf;\n\nuse std::time::Duration;\n\nuse std::collections::HashSet;\n\n\n\nuse futures::{Future, Stream};\n\nuse tokio_core::reactor::Handle;\n\nuse tokio_core::net::{TcpListener, TcpStream};\n\nuse tokio_timer;\n\n\n\nuse errors::Result;\n\nuse common::RcSet;\n\nuse common::id::{ClientId, DataObjectId, SId, SessionId, TaskId, WorkerId};\n\nuse common::rpc::new_rpc_system;\n\nuse server::graph::{ClientRef, DataObjectRef, DataObjectState, Graph, SessionError, SessionRef,\n\n TaskInput, TaskRef, TaskState, WorkerRef};\n\nuse server::rpc::ServerBootstrapImpl;\n\nuse server::scheduler::{ReactiveScheduler, UpdatedIn};\n\nuse common::convert::ToCapnp;\n\nuse common::wrapped::WrappedRcRefCell;\n", "file_path": "src/server/state.rs", "rank": 86, "score": 42982.8267699599 }, { "content": " debug!(\"Failing worker {} with cause {:?}\", worker.get_id(), cause);\n\n assert!(worker.get_mut().error.is_none());\n\n worker.get_mut().error = Some(cause.clone());\n\n // TODO: Cleanup and recovery if possible\n\n self.logger\n\n .add_worker_removed_event(worker.get_id(), cause.clone());\n\n panic!(\"Worker {} error: {:?}\", worker.get_id(), cause);\n\n }\n\n\n\n /// Add new client, register it in the graph\n\n pub fn add_client(&mut self, address: SocketAddr) -> Result<ClientRef> {\n\n debug!(\"New client {}\", address);\n\n if self.graph.clients.contains_key(&address) {\n\n bail!(\"State already contains client {}\", address);\n\n }\n\n let c = ClientRef::new(address);\n\n self.graph.clients.insert(c.get().id, c.clone());\n\n self.logger.add_new_client_event(c.get().id);\n\n Ok(c)\n\n }\n", "file_path": "src/server/state.rs", "rank": 87, "score": 42980.74526598891 }, { "content": " self.update_task_assignment(tref);\n\n }\n\n self.underload_workers = self.graph.workers.values().map(|w| w.clone()).collect();\n\n }\n\n\n\n pub fn handle(&self) -> &Handle {\n\n &self.handle\n\n }\n\n}\n\n\n\nimpl ConsistencyCheck for State {\n\n /// Check consistency of all tasks, objects, workers, clients and sessions. Quite slow.\n\n fn check_consistency(&self) -> Result<()> {\n\n debug!(\"Checking State consistency\");\n\n for tr in self.graph.tasks.values() {\n\n tr.check_consistency()?;\n\n }\n\n for or in self.graph.objects.values() {\n\n or.check_consistency()?;\n\n }\n", "file_path": "src/server/state.rs", "rank": 88, "score": 42980.20911095307 }, { "content": "use worker::tasks::TaskInstance;\n\nuse worker::rpc::{SubworkerUpstreamImpl, WorkerControlImpl};\n\nuse worker::fs::workdir::WorkDir;\n\n\n\nuse futures::Future;\n\nuse futures::Stream;\n\nuse futures::IntoFuture;\n\nuse tokio_core::reactor::Handle;\n\nuse tokio_core::net::TcpListener;\n\nuse tokio_core::net::TcpStream;\n\nuse tokio_timer;\n\nuse tokio_uds::{UnixListener, UnixStream};\n\nuse capnp_rpc::rpc_twoparty_capnp;\n\nuse capnp::capability::Promise;\n\nuse errors::{Error, ErrorKind, Result};\n\n\n\nuse WORKER_PROTOCOL_VERSION;\n\n\n\nconst MONITORING_INTERVAL: u64 = 5; // Monitoring interval in seconds\n\nconst DELETE_WAIT_LIST_INTERVAL: u64 = 2; // How often is delete_wait_list checked in seconds\n", "file_path": "src/worker/state.rs", "rank": 89, "score": 42979.66927196295 }, { "content": " ignored_sessions: Default::default(),\n\n });\n\n s.get_mut().self_ref = Some(s.clone());\n\n s\n\n }\n\n\n\n pub fn start(&self) {\n\n let listen_address = self.get().listen_address;\n\n let http_listen_address = self.get().http_listen_address;\n\n let handle = self.get().handle.clone();\n\n let listener = TcpListener::bind(&listen_address, &handle).unwrap();\n\n\n\n let state = self.clone();\n\n let future = listener\n\n .incoming()\n\n .for_each(move |(stream, addr)| {\n\n state.on_connection(stream, addr);\n\n Ok(())\n\n })\n\n .map_err(|e| {\n", "file_path": "src/server/state.rs", "rank": 90, "score": 42978.963591458945 }, { "content": "\n\n let up_impl = SubworkerUpstreamImpl::new(self);\n\n let subworker_id_rc = up_impl.subworker_id_rc();\n\n let upstream = ::subworker_capnp::subworker_upstream::ToClient::new(up_impl)\n\n .from_server::<::capnp_rpc::Server>();\n\n let rpc_system = ::common::rpc::new_rpc_system(stream, Some(upstream.client));\n\n let inner = self.get();\n\n\n\n let state_ref = self.clone();\n\n\n\n inner\n\n .handle\n\n .spawn(\n\n rpc_system\n\n .map_err(|e| error!(\"RPC error: {:?}\", e))\n\n .then(move |result| {\n\n debug!(\"Subworker cleanup\");\n\n let mut s = state_ref.get_mut();\n\n if let Some(subworker_id) = subworker_id_rc.get() {\n\n let sw = s.graph.subworkers.remove(&subworker_id).unwrap();\n", "file_path": "src/worker/state.rs", "rank": 91, "score": 42978.18565206296 }, { "content": " subworker_args: subworkers,\n\n self_ref: None,\n\n });\n\n state.get_mut().self_ref = Some(state.clone());\n\n state\n\n }\n\n\n\n // This is called when an incoming connection arrives\n\n fn on_connection(&self, stream: TcpStream, address: SocketAddr) {\n\n // Handle an incoming connection; spawn gate object for it\n\n\n\n info!(\"New connection from {}\", address);\n\n stream.set_nodelay(true).unwrap();\n\n\n\n let bootstrap = ::datastore_capnp::data_store::ToClient::new(\n\n ::worker::rpc::datastore::DataStoreImpl::new(self),\n\n ).from_server::<::capnp_rpc::Server>();\n\n let rpc_system = ::common::rpc::new_rpc_system(stream, Some(bootstrap.client));\n\n self.get()\n\n .spawn_panic_on_error(rpc_system.map_err(|e| e.into()));\n", "file_path": "src/worker/state.rs", "rank": 92, "score": 42977.83331420176 }, { "content": " #[inline]\n\n pub fn is_object_ignored(&self, object_id: &DataObjectId) -> bool {\n\n self.ignored_sessions.contains(&object_id.get_session_id())\n\n }\n\n\n\n pub fn worker_by_id(&self, id: WorkerId) -> Result<WorkerRef> {\n\n match self.graph.workers.get(&id) {\n\n Some(w) => Ok(w.clone()),\n\n None => Err(format!(\"Worker {:?} not found\", id))?,\n\n }\n\n }\n\n\n\n pub fn client_by_id(&self, id: ClientId) -> Result<ClientRef> {\n\n match self.graph.clients.get(&id) {\n\n Some(c) => Ok(c.clone()),\n\n None => Err(format!(\"Client {:?} not found\", id))?,\n\n }\n\n }\n\n\n\n pub fn session_by_id(&self, id: SessionId) -> Result<SessionRef> {\n", "file_path": "src/server/state.rs", "rank": 93, "score": 42977.27547574113 }, { "content": " self.get_mut().distribute_tasks();\n\n !self.get().stop_server\n\n }\n\n\n\n fn on_connection(&self, stream: TcpStream, address: SocketAddr) {\n\n // Handle an incoming connection; spawn gate object for it\n\n\n\n info!(\"New connection from {}\", address);\n\n stream.set_nodelay(true).unwrap();\n\n let bootstrap = ::server_capnp::server_bootstrap::ToClient::new(ServerBootstrapImpl::new(\n\n self,\n\n address,\n\n )).from_server::<::capnp_rpc::Server>();\n\n\n\n let rpc_system = new_rpc_system(stream, Some(bootstrap.client));\n\n self.get()\n\n .handle\n\n .spawn(rpc_system.map_err(|e| panic!(\"RPC error: {:?}\", e)));\n\n }\n\n\n\n #[inline]\n\n pub fn handle(&self) -> Handle {\n\n self.get().handle.clone()\n\n }\n\n}\n", "file_path": "src/server/state.rs", "rank": 94, "score": 42977.03465510734 }, { "content": "pub type StateRef = WrappedRcRefCell<State>;\n\n\n\nimpl State {\n\n #[inline]\n\n pub fn work_dir(&self) -> &WorkDir {\n\n &self.work_dir\n\n }\n\n\n\n #[inline]\n\n pub fn handle(&self) -> &Handle {\n\n &self.handle\n\n }\n\n\n\n #[inline]\n\n pub fn worker_id(&self) -> &WorkerId {\n\n &self.worker_id\n\n }\n\n\n\n #[inline]\n\n pub fn timer(&self) -> &tokio_timer::Timer {\n", "file_path": "src/worker/state.rs", "rank": 95, "score": 42976.76517487513 }, { "content": " if self.graph.workers.contains_key(&address) {\n\n bail!(\"State already contains worker {}\", address);\n\n }\n\n let w = WorkerRef::new(address, control, resources);\n\n self.graph.workers.insert(w.get_id(), w.clone());\n\n self.underload_workers.insert(w.clone());\n\n self.logger.add_new_worker_event(w.get_id());\n\n Ok(w)\n\n }\n\n\n\n /// Remove the worker from the graph, forcefully unassigning all tasks and objects.\n\n /// TODO: better specs and context of worker removal\n\n pub fn remove_worker(&mut self, _worker: &WorkerRef) -> Result<()> {\n\n unimplemented!() /*\n\n pub fn delete(self, graph: &mut Graph) {\n\n debug!(\"Deleting worker {}\", self.get_id());\n\n // remove from objects\n\n for o in self.get_mut().assigned_objects.iter() {\n\n assert!(o.get_mut().assigned.remove(&self));\n\n }\n", "file_path": "src/server/state.rs", "rank": 96, "score": 42976.38308540076 }, { "content": "\n\n /// Create a new session fr a client, register it in the graph.\n\n pub fn add_session(&mut self, client: &ClientRef) -> Result<SessionRef> {\n\n let s = SessionRef::new(self.graph.new_session_id(), client);\n\n self.graph.sessions.insert(s.get_id(), s.clone());\n\n self.logger\n\n .add_new_session_event(s.get_id(), client.get().id);\n\n Ok(s)\n\n }\n\n\n\n /// Helper for .remove_session() and .fail_session(). Remove all session tasks,\n\n /// objects and cancel all finish hooks.\n\n fn clear_session(&mut self, s: &SessionRef) -> Result<()> {\n\n let session_id = s.get().id.clone();\n\n debug!(\"Clearing session {}\", session_id);\n\n self.scheduler.clear_session(&s);\n\n\n\n let state_ref = self.self_ref.clone().unwrap();\n\n assert!(self.ignored_sessions.insert(session_id));\n\n let duration = ::std::time::Duration::from_secs(IGNORE_ID_TIME_SECONDS);\n", "file_path": "src/server/state.rs", "rank": 97, "score": 42976.17255916521 }, { "content": " self.spawn_panic_on_error(req.send().promise.map(|_| ()).map_err(|e| e.into()));\n\n }\n\n\n\n fn subworker_cleanup(&mut self, subworker_ref: &SubworkerRef) {\n\n for (_, obj_ref) in &self.graph.objects {\n\n obj_ref.get_mut().subworker_cache.remove(&subworker_ref);\n\n }\n\n }\n\n\n\n pub fn get_subworker(\n\n &mut self,\n\n subworker_type: &str,\n\n ) -> Result<Box<Future<Item = SubworkerRef, Error = Error>>> {\n\n use tokio_process::CommandExt;\n\n let sw_result = self.graph\n\n .idle_subworkers\n\n .iter()\n\n .find(|sw| sw.get().subworker_type() == subworker_type)\n\n .cloned();\n\n match sw_result {\n", "file_path": "src/worker/state.rs", "rank": 98, "score": 42976.0581840749 }, { "content": " self.graph.sessions.remove(&session.get_id()).unwrap();\n\n // unlink\n\n session.unlink();\n\n Ok(())\n\n }\n\n\n\n /// Put the session into a failed state, removing all tasks and objects,\n\n /// cancelling all finish_hooks.\n\n /// Debug message string is propagated together with error message\n\n /// it usually comes from task debug string\n\n pub fn fail_session(\n\n &mut self,\n\n session: &SessionRef,\n\n cause: String,\n\n debug: Option<String>,\n\n ) -> Result<()> {\n\n debug!(\n\n \"Failing session {} of client {} with cause {:?}\",\n\n session.get_id(),\n\n session.get().client.get_id(),\n", "file_path": "src/server/state.rs", "rank": 99, "score": 42975.981392974834 } ]
Rust
src/hierarchies.rs
esrlabs/cgroups-rs
556dea62b963fb75c3b1234f9cab6164706fc594
use std::fs; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::PathBuf; use crate::blkio::BlkIoController; use crate::cpu::CpuController; use crate::cpuacct::CpuAcctController; use crate::cpuset::CpuSetController; use crate::devices::DevicesController; use crate::freezer::FreezerController; use crate::hugetlb::HugeTlbController; use crate::memory::MemController; use crate::net_cls::NetClsController; use crate::net_prio::NetPrioController; use crate::perf_event::PerfEventController; use crate::pid::PidController; use crate::rdma::RdmaController; use crate::systemd::SystemdController; use crate::{Controllers, Hierarchy, Subsystem}; use crate::cgroup::Cgroup; #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub struct Mountinfo { pub mount_point: PathBuf, pub fs_type: (String, Option<String>), pub super_opts: Vec<String>, } pub(crate) fn parse_mountinfo_for_line(line: &str) -> Option<Mountinfo> { let s_values: Vec<_> = line.split(" - ").collect(); if s_values.len() != 2 { return None; } let s0_values: Vec<_> = s_values[0].trim().split(' ').collect(); let s1_values: Vec<_> = s_values[1].trim().split(' ').collect(); if s0_values.len() < 6 || s1_values.len() < 3 { return None; } let mount_point = PathBuf::from(s0_values[4]); let fs_type_values: Vec<_> = s1_values[0].trim().split('.').collect(); let fs_type = match fs_type_values.len() { 1 => (fs_type_values[0].to_string(), None), 2 => ( fs_type_values[0].to_string(), Some(fs_type_values[1].to_string()), ), _ => return None, }; let super_opts: Vec<String> = s1_values[2].trim().split(',').map(String::from).collect(); Some(Mountinfo { mount_point, fs_type, super_opts, }) } fn mountinfo_file(file: &mut File) -> Vec<Mountinfo> { let mut r = Vec::new(); for line in BufReader::new(file).lines() { match line { Ok(line) => { if let Some(mi) = parse_mountinfo_for_line(&line) { if mi.fs_type.0 == "cgroup" { r.push(mi); } } } Err(_) => break, } } r } pub fn mountinfo_self() -> Vec<Mountinfo> { match File::open("/proc/self/mountinfo") { Ok(mut file) => mountinfo_file(&mut file), Err(_) => vec![], } } #[derive(Debug, Clone)] pub struct V1 { mountinfo: Vec<Mountinfo>, } #[derive(Debug, Clone)] pub struct V2 { root: String, } impl Hierarchy for V1 { fn v2(&self) -> bool { false } fn subsystems(&self) -> Vec<Subsystem> { let mut subs = vec![]; if let Some(root) = self.get_mount_point(Controllers::BlkIo) { subs.push(Subsystem::BlkIo(BlkIoController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::Mem) { subs.push(Subsystem::Mem(MemController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::Pids) { subs.push(Subsystem::Pid(PidController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::CpuSet) { subs.push(Subsystem::CpuSet(CpuSetController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::CpuAcct) { subs.push(Subsystem::CpuAcct(CpuAcctController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::Cpu) { subs.push(Subsystem::Cpu(CpuController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::Devices) { subs.push(Subsystem::Devices(DevicesController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::Freezer) { subs.push(Subsystem::Freezer(FreezerController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::NetCls) { subs.push(Subsystem::NetCls(NetClsController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::PerfEvent) { subs.push(Subsystem::PerfEvent(PerfEventController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::NetPrio) { subs.push(Subsystem::NetPrio(NetPrioController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::HugeTlb) { subs.push(Subsystem::HugeTlb(HugeTlbController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::Rdma) { subs.push(Subsystem::Rdma(RdmaController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::Systemd) { subs.push(Subsystem::Systemd(SystemdController::new(root, false))); } subs } fn root_control_group(&self) -> Cgroup { Cgroup::load(auto(), "") } fn root(&self) -> PathBuf { self.mountinfo .iter() .find_map(|m| { if m.fs_type.0 == "cgroup" { return Some(m.mount_point.parent().unwrap()); } None }) .unwrap() .to_path_buf() } } impl Hierarchy for V2 { fn v2(&self) -> bool { true } fn subsystems(&self) -> Vec<Subsystem> { let p = format!("{}/{}", UNIFIED_MOUNTPOINT, "cgroup.controllers"); let ret = fs::read_to_string(p.as_str()); if ret.is_err() { return vec![]; } let mut subs = vec![]; let controllers = ret.unwrap().trim().to_string(); let mut controller_list: Vec<&str> = controllers.split(' ').collect(); controller_list.push("freezer"); for s in controller_list { match s { "cpu" => { subs.push(Subsystem::Cpu(CpuController::new(self.root(), true))); } "io" => { subs.push(Subsystem::BlkIo(BlkIoController::new(self.root(), true))); } "cpuset" => { subs.push(Subsystem::CpuSet(CpuSetController::new(self.root(), true))); } "memory" => { subs.push(Subsystem::Mem(MemController::new(self.root(), true))); } "pids" => { subs.push(Subsystem::Pid(PidController::new(self.root(), true))); } "freezer" => { subs.push(Subsystem::Freezer(FreezerController::new( self.root(), true, ))); } "hugetlb" => { subs.push(Subsystem::HugeTlb(HugeTlbController::new( self.root(), true, ))); } _ => {} } } subs } fn root_control_group(&self) -> Cgroup { Cgroup::load(auto(), "") } fn root(&self) -> PathBuf { PathBuf::from(self.root.clone()) } } impl V1 { pub fn new() -> V1 { V1 { mountinfo: mountinfo_self(), } } pub fn get_mount_point(&self, controller: Controllers) -> Option<PathBuf> { self.mountinfo.iter().find_map(|m| { if m.fs_type.0 == "cgroup" && m.super_opts.contains(&controller.to_string()) { return Some(m.mount_point.clone()); } None }) } } impl Default for V1 { fn default() -> Self { Self::new() } } impl V2 { pub fn new() -> V2 { V2 { root: String::from(UNIFIED_MOUNTPOINT), } } } impl Default for V2 { fn default() -> Self { Self::new() } } pub const UNIFIED_MOUNTPOINT: &str = "/sys/fs/cgroup"; #[cfg(any( all(target_os = "linux", not(target_env = "musl")), target_os = "android" ))] pub fn is_cgroup2_unified_mode() -> bool { use nix::sys::statfs; let path = std::path::Path::new(UNIFIED_MOUNTPOINT); let fs_stat = statfs::statfs(path); if fs_stat.is_err() { return false; } fs_stat.unwrap().filesystem_type() == statfs::CGROUP2_SUPER_MAGIC } pub const INIT_CGROUP_PATHS: &str = "/proc/1/cgroup"; #[cfg(all(target_os = "linux", target_env = "musl"))] pub fn is_cgroup2_unified_mode() -> bool { let lines = fs::read_to_string(INIT_CGROUP_PATHS); if lines.is_err() { return false; } for line in lines.unwrap().lines() { let fields: Vec<&str> = line.split(':').collect(); if fields.len() != 3 { continue; } if fields[0] != "0" { return false; } } true } pub fn auto() -> Box<dyn Hierarchy> { if is_cgroup2_unified_mode() { Box::new(V2::new()) } else { Box::new(V1::new()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_mount() { let mountinfo = vec![ ("29 26 0:26 / /sys/fs/cgroup/cpuset,cpu,cpuacct rw,nosuid,nodev,noexec,relatime shared:10 - cgroup cgroup rw,cpuset,cpu,cpuacct", Mountinfo{mount_point: PathBuf::from("/sys/fs/cgroup/cpuset,cpu,cpuacct"), fs_type: ("cgroup".to_string(), None), super_opts: vec![ "rw".to_string(), "cpuset".to_string(), "cpu".to_string(), "cpuacct".to_string(), ]}), ("121 1731 0:42 / /shm rw,nosuid,nodev,noexec,relatime shared:68 master:66 - tmpfs shm rw,size=65536k", Mountinfo{mount_point: PathBuf::from("/shm"), fs_type: ("tmpfs".to_string(), None), super_opts: vec![ "rw".to_string(), "size=65536k".to_string(), ]}), ("121 1731 0:42 / /shm rw,nosuid,nodev,noexec,relatime shared:68 master:66 - tmpfs.123 shm rw,size=65536k", Mountinfo{mount_point: PathBuf::from("/shm"), fs_type: ("tmpfs".to_string(), Some("123".to_string())), super_opts: vec![ "rw".to_string(), "size=65536k".to_string(), ]}), ]; for mi in mountinfo { let info = parse_mountinfo_for_line(mi.0).unwrap(); assert_eq!(info, mi.1) } } }
use std::fs; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::PathBuf; use crate::blkio::BlkIoController; use crate::cpu::CpuController; use crate::cpuacct::CpuAcctController; use crate::cpuset::CpuSetController; use crate::devices::DevicesController; use crate::freezer::FreezerController; use crate::hugetlb::HugeTlbController; use crate::memory::MemController; use crate::net_cls::NetClsController; use crate::net_prio::NetPrioController; use crate::perf_event::PerfEventController; use crate::pid::PidController; use crate::rdma::RdmaController; use crate::systemd::SystemdController; use crate::{Controllers, Hierarchy, Subsystem}; use crate::cgroup::Cgroup; #[derive(Debug, PartialEq, Eq, Hash, Clone)] pub struct Mountinfo { pub mount_point: PathBuf, pub fs_type: (String, Option<String>), pub super_opts: Vec<String>, } pub(crate) fn parse_mountinfo_for_line(line: &str) -> Option<Mountinfo> { let s_values: Vec<_> = line.split(" - ").collect(); if s_values.len() != 2 { return None; } let s0_values: Vec<_> = s_values[0].trim().split(' ').collect(); let s1_values: Vec<_> = s_values[1].trim().split(' ').collect(); if s0_values.len() < 6 || s1_values.len() < 3 { return None; } let mount_point = PathBuf::from(s0_values[4]); let fs_type_values: Vec<_> = s1_values[0].trim().split('.').collect(); let fs_type = match fs_type_values.len() { 1 => (fs_type_values[0].to_string(), None), 2 => ( fs_type_values[0].to_string(), Some(fs_type_values[1].to_string()), ), _ => return None, }; let super_opts: Vec<String> = s1_values[2].trim().split(',').map(String::from).collect(); Some(Mountinfo { mount_point, fs_type, super_opts, }) } fn mountinfo_file(file: &mut File) -> Vec<Mountinfo> { let mut r = Vec::new(); for line in BufReader::new(file).lines() { match line { Ok(line) => { if let Some(mi) = parse_mountinfo_for_line(&line) { if mi.fs_type.0 == "cgroup" { r.push(mi); } } } Err(_) => break, } } r } pub fn mountinfo_self() -> Vec<Mountinfo> { match File::open("/proc/self/mountinfo") { Ok(mut file) => mountinfo_file(&mut file), Err(_) => vec![], } } #[derive(Debug, Clone)] pub struct V1 { mountinfo: Vec<Mountinfo>, } #[derive(Debug, Clone)] pub struct V2 { root: String, } impl Hierarchy for V1 { fn v2(&self) -> bool { false } fn subsystems(&self) -> Vec<Subsystem> { let mut subs = vec![]; if let Some(root) = self.get_mount_point(Controllers::BlkIo) { subs.push(Subsystem::BlkIo(BlkIoController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::Mem) { subs.push(Subsystem::Mem(MemController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::Pids) { subs.push(Subsystem::Pid(PidController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::CpuSet) { subs.push(Subsystem::CpuSet(CpuSetController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::CpuAcct) { subs.push(Subsystem::CpuAcct(CpuAcctController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::Cpu) { subs.push(Subsystem::Cpu(CpuController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::Devices) { subs.push(Subsystem::Devices(DevicesController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::Freezer) { subs.push(Subsystem::Freezer(FreezerController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::NetCls) { subs.push(Subsystem::NetCls(NetClsController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::PerfEvent) { subs.push(Subsystem::PerfEvent(PerfEventController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::NetPrio) { subs.push(Subsystem::NetPrio(NetPrioController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::HugeTlb) { subs.push(Subsystem::HugeTlb(HugeTlbController::new(root, false))); } if let Some(root) = self.get_mount_point(Controllers::Rdma) { subs.push(Subsystem::Rdma(RdmaController::new(root))); } if let Some(root) = self.get_mount_point(Controllers::Systemd) { subs.push(Subsystem::Systemd(SystemdController::new(root, false))); } subs } fn root_control_group(&self) -> Cgroup { Cgroup::load(auto(), "") } fn root(&self) -> PathBuf { self.mountinfo .iter() .find_map(|m| { if m.fs_type.0 == "cgroup" { return Some(m.mount_point.parent().unwrap()); } None }) .unwrap() .to_path_buf() } } impl Hierarchy for V2 { fn v2(&self) -> bool { true } fn subsystems(&self) -> Vec<Subsystem> { let p = format!("{}/{}", UNIFIED_MOUNTPOINT, "cgroup.controllers"); let ret = fs::read_to_string(p.as_str()); if ret.is_err() { return vec![]; } let mut subs = vec![]; let controllers = ret.unwrap().trim().to_string(); let mut controller_list: Vec<&str> = controllers.split(' ').collect(); controller_list.push("freezer"); for s in controller_list { match s { "cpu" => { subs.push(Subsystem::Cpu(CpuController::new(self.root(), true))); } "io" => { subs.push(Subsystem::BlkIo(BlkIoController::new(self.root(), true))); } "cpuset" => { subs.push(Subsystem::CpuSet(CpuSetController::new(self.root(), true))); } "memory" => { subs.push(Subsystem::Mem(MemController::new(self.root(), true))); } "pids" => { subs.push(Subsystem::Pid(PidController::new(self.root(), true))); } "freezer" => { subs.push(Subsystem::Freezer(FreezerController::new( self.root(), true, ))); } "hugetlb" => { subs.push(Subsystem::HugeTlb(HugeTlbController::new( self.root(), true, ))); } _ => {} } } subs } fn root_control_group(&self) -> Cgroup { Cgroup::load(auto(), "") } fn root(&self) -> PathBuf { PathBuf::from(self.root.clone()) } } impl V1 { pub fn new() -> V1 { V1 { mountinfo: mountinfo_self(), } } pub fn get_mount_point(&self, controller: Controllers) -> Option<PathBuf> { self.mountinfo.iter().find_map(|m| { if m.fs_type.0 == "cgroup" && m.super_opts.contains(&controller.to_string()) { return Some(m.mount_point.clone()); } None }) } } impl Default for V1 { fn default() -> Self { Self::new() } } impl V2 { pub fn new() -> V2 { V2 { root: String::from(UNIFIED_MOUNTPOINT), } } } impl Default for V2 { fn default() -> Self { Self::new() } } pub const UNIFIED_MOUNTPOINT: &str = "/sys/fs/cgroup"; #[cfg(any( all(target_os = "linux", not(target_env = "musl")), target_os = "android" ))] pub fn is_cgroup2_unified_mode() -> bool { use nix::sys::statfs; let path = std::path::Path::new(UNIFIED_MOUNTPOINT); let fs_stat = statfs::statfs(path); if fs_stat.is_err() { return false; } fs_stat.unwrap().filesystem_type() == statfs::CGROUP2_SUPER_MAGIC } pub const INIT_CGROUP_PATHS: &str = "/proc/1/cgroup"; #[cfg(all(target_os = "linux", target_env = "musl"))]
pub fn auto() -> Box<dyn Hierarchy> { if is_cgroup2_unified_mode() { Box::new(V2::new()) } else { Box::new(V1::new()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_parse_mount() { let mountinfo = vec![ ("29 26 0:26 / /sys/fs/cgroup/cpuset,cpu,cpuacct rw,nosuid,nodev,noexec,relatime shared:10 - cgroup cgroup rw,cpuset,cpu,cpuacct", Mountinfo{mount_point: PathBuf::from("/sys/fs/cgroup/cpuset,cpu,cpuacct"), fs_type: ("cgroup".to_string(), None), super_opts: vec![ "rw".to_string(), "cpuset".to_string(), "cpu".to_string(), "cpuacct".to_string(), ]}), ("121 1731 0:42 / /shm rw,nosuid,nodev,noexec,relatime shared:68 master:66 - tmpfs shm rw,size=65536k", Mountinfo{mount_point: PathBuf::from("/shm"), fs_type: ("tmpfs".to_string(), None), super_opts: vec![ "rw".to_string(), "size=65536k".to_string(), ]}), ("121 1731 0:42 / /shm rw,nosuid,nodev,noexec,relatime shared:68 master:66 - tmpfs.123 shm rw,size=65536k", Mountinfo{mount_point: PathBuf::from("/shm"), fs_type: ("tmpfs".to_string(), Some("123".to_string())), super_opts: vec![ "rw".to_string(), "size=65536k".to_string(), ]}), ]; for mi in mountinfo { let info = parse_mountinfo_for_line(mi.0).unwrap(); assert_eq!(info, mi.1) } } }
pub fn is_cgroup2_unified_mode() -> bool { let lines = fs::read_to_string(INIT_CGROUP_PATHS); if lines.is_err() { return false; } for line in lines.unwrap().lines() { let fields: Vec<&str> = line.split(':').collect(); if fields.len() != 3 { continue; } if fields[0] != "0" { return false; } } true }
function_block-full_function
[]
Rust
stake-pool/program/tests/set_fee.rs
honeydefi/NFT-farm
dc97a5439c6ab85e7b0ed4c86f3f5c6939d07c99
#![cfg(feature = "test-bpf")] mod helpers; use { helpers::*, solana_program_test::*, solana_sdk::{ borsh::try_from_slice_unchecked, instruction::InstructionError, signature::{Keypair, Signer}, transaction::{Transaction, TransactionError}, }, spl_stake_pool::{ error, id, instruction, state::{Fee, FeeType, StakePool}, }, }; async fn setup() -> (ProgramTestContext, StakePoolAccounts, Fee) { let mut context = program_test().start_with_context().await; let stake_pool_accounts = StakePoolAccounts::new(); stake_pool_accounts .initialize_stake_pool( &mut context.banks_client, &context.payer, &context.last_blockhash, 1, ) .await .unwrap(); let new_fee = Fee { numerator: 10, denominator: 10, }; (context, stake_pool_accounts, new_fee) } #[tokio::test] async fn success() { let (mut context, stake_pool_accounts, new_fee) = setup().await; let stake_pool = get_account( &mut context.banks_client, &stake_pool_accounts.stake_pool.pubkey(), ) .await; let stake_pool = try_from_slice_unchecked::<StakePool>(&stake_pool.data.as_slice()).unwrap(); let old_fee = stake_pool.fee; let transaction = Transaction::new_signed_with_payer( &[instruction::set_fee( &id(), &stake_pool_accounts.stake_pool.pubkey(), &stake_pool_accounts.manager.pubkey(), FeeType::Epoch(new_fee), )], Some(&context.payer.pubkey()), &[&context.payer, &stake_pool_accounts.manager], context.last_blockhash, ); context .banks_client .process_transaction(transaction) .await .unwrap(); let stake_pool = get_account( &mut context.banks_client, &stake_pool_accounts.stake_pool.pubkey(), ) .await; let stake_pool = try_from_slice_unchecked::<StakePool>(&stake_pool.data.as_slice()).unwrap(); assert_eq!(stake_pool.fee, old_fee); assert_eq!(stake_pool.next_epoch_fee, Some(new_fee)); let first_normal_slot = context.genesis_config().epoch_schedule.first_normal_slot; let slots_per_epoch = context.genesis_config().epoch_schedule.slots_per_epoch; context .warp_to_slot(first_normal_slot + slots_per_epoch) .unwrap(); stake_pool_accounts .update_all( &mut context.banks_client, &context.payer, &context.last_blockhash, &[], false, ) .await; let stake_pool = get_account( &mut context.banks_client, &stake_pool_accounts.stake_pool.pubkey(), ) .await; let stake_pool = try_from_slice_unchecked::<StakePool>(&stake_pool.data.as_slice()).unwrap(); assert_eq!(stake_pool.fee, new_fee); assert_eq!(stake_pool.next_epoch_fee, None); } #[tokio::test] async fn fail_wrong_manager() { let (mut context, stake_pool_accounts, new_fee) = setup().await; let wrong_manager = Keypair::new(); let transaction = Transaction::new_signed_with_payer( &[instruction::set_fee( &id(), &stake_pool_accounts.stake_pool.pubkey(), &wrong_manager.pubkey(), FeeType::Epoch(new_fee), )], Some(&context.payer.pubkey()), &[&context.payer, &wrong_manager], context.last_blockhash, ); let error = context .banks_client .process_transaction(transaction) .await .err() .unwrap() .unwrap(); match error { TransactionError::InstructionError(_, InstructionError::Custom(error_index)) => { let program_error = error::StakePoolError::WrongManager as u32; assert_eq!(error_index, program_error); } _ => panic!("Wrong error occurs while malicious try to set manager"), } } #[tokio::test] async fn fail_high_fee() { let (mut context, stake_pool_accounts, _new_fee) = setup().await; let new_fee = Fee { numerator: 11, denominator: 10, }; let transaction = Transaction::new_signed_with_payer( &[instruction::set_fee( &id(), &stake_pool_accounts.stake_pool.pubkey(), &stake_pool_accounts.manager.pubkey(), FeeType::Epoch(new_fee), )], Some(&context.payer.pubkey()), &[&context.payer, &stake_pool_accounts.manager], context.last_blockhash, ); let error = context .banks_client .process_transaction(transaction) .await .err() .unwrap() .unwrap(); match error { TransactionError::InstructionError(_, InstructionError::Custom(error_index)) => { let program_error = error::StakePoolError::FeeTooHigh as u32; assert_eq!(error_index, program_error); } _ => panic!("Wrong error occurs when setting fee too high"), } } #[tokio::test] async fn fail_not_updated() { let mut context = program_test().start_with_context().await; let stake_pool_accounts = StakePoolAccounts::new(); stake_pool_accounts .initialize_stake_pool( &mut context.banks_client, &context.payer, &context.last_blockhash, 1, ) .await .unwrap(); let new_fee = Fee { numerator: 10, denominator: 100, }; context.warp_to_slot(50_000).unwrap(); let transaction = Transaction::new_signed_with_payer( &[instruction::set_fee( &id(), &stake_pool_accounts.stake_pool.pubkey(), &stake_pool_accounts.manager.pubkey(), FeeType::Epoch(new_fee), )], Some(&context.payer.pubkey()), &[&context.payer, &stake_pool_accounts.manager], context.last_blockhash, ); let error = context .banks_client .process_transaction(transaction) .await .err() .unwrap() .unwrap(); match error { TransactionError::InstructionError(_, InstructionError::Custom(error_index)) => { let program_error = error::StakePoolError::StakeListAndPoolOutOfDate as u32; assert_eq!(error_index, program_error); } _ => panic!("Wrong error occurs when stake pool out of date"), } }
#![cfg(feature = "test-bpf")] mod helpers; use { helpers::*, solana_program_test::*, solana_sdk::{ borsh::try_from_slice_unchecked, instruction::InstructionError, signature::{Keypair, Signer}, transaction::{Transaction, TransactionError}, }, spl_stake_pool::{ error, id, instruction, state::{Fee, FeeType, StakePool}, }, }; async fn setup() -> (ProgramTestContext, StakePoolAccounts, Fee) { let mut context = program_test().start_with_context().await; let stake_pool_accounts = StakePoolAccounts::new(); stake_pool_accounts .initialize_stake_pool( &mut context.banks_client, &context.payer, &context.last_blockhash, 1, ) .await .unwrap(); let new_fee = Fee { numerator: 10, denominator: 10, }; (context, stake_pool_accounts, new_fee) } #[tokio::test] async fn success() { let (mut context, stake_pool_accounts, new_fee) = setup().await; let stake_pool = get_account( &mut context.banks_client, &stake_pool_accounts.stake_pool.pubkey(), ) .await; let stake_pool = try_from_slice_unchecked::<StakePool>(&stake_pool.data.as_slice()).unwrap(); let old_fee = stake_pool.fee; let transaction = Transaction::new_signed_with_payer( &[instruction::set_fee( &id(), &stake_pool_accounts.stake_pool.pubkey(), &stake_pool_accounts.manager.pubkey(), FeeType::Epoch(new_fee), )], Some(&context.payer.pubkey()), &[&context.payer, &stake_pool_accounts.manager], context.last_blockhash, ); context .banks_client .process_transaction(transaction) .await .unwrap(); let stake_pool = get_account( &mut context.banks_client, &stake_pool_accounts.stake_pool.pubkey(), ) .await; let stake_pool = try_from_slice_unchecked::<StakePool>(&stake_pool.data.as_slice()).unwrap(); assert_eq!(stake_pool.fee, old_fee); assert_eq!(stake_pool.next_epoch_fee, Some(new_fee)); let first_normal_slot = context.genesis_config().epoch_schedule.first_normal_slot; let slots_per_epoch = context.genesis_config().epoch_schedule.slots_per_epoch; context .warp_to_slot(first_normal_slot + slots_per_epoch) .unwrap(); stake_pool_accounts .update_all( &mut context.banks_client, &context.payer, &context.last_blockhash, &[], false, ) .await; let stake_pool = get_account( &mut context.banks_client, &stake_pool_accounts.stake_pool.pubkey(), ) .await; let stake_pool = try_from_slice_unchecked::<StakePool>(&stake_pool.data.as_slice()).unwrap(); assert_eq!(stake_pool.fee, new_fee); assert_eq!(stake_pool.next_epoch_fee, None); } #[tokio::test] async fn fail_wrong_manager() { let (mut context, stake_pool_acco
#[tokio::test] async fn fail_high_fee() { let (mut context, stake_pool_accounts, _new_fee) = setup().await; let new_fee = Fee { numerator: 11, denominator: 10, }; let transaction = Transaction::new_signed_with_payer( &[instruction::set_fee( &id(), &stake_pool_accounts.stake_pool.pubkey(), &stake_pool_accounts.manager.pubkey(), FeeType::Epoch(new_fee), )], Some(&context.payer.pubkey()), &[&context.payer, &stake_pool_accounts.manager], context.last_blockhash, ); let error = context .banks_client .process_transaction(transaction) .await .err() .unwrap() .unwrap(); match error { TransactionError::InstructionError(_, InstructionError::Custom(error_index)) => { let program_error = error::StakePoolError::FeeTooHigh as u32; assert_eq!(error_index, program_error); } _ => panic!("Wrong error occurs when setting fee too high"), } } #[tokio::test] async fn fail_not_updated() { let mut context = program_test().start_with_context().await; let stake_pool_accounts = StakePoolAccounts::new(); stake_pool_accounts .initialize_stake_pool( &mut context.banks_client, &context.payer, &context.last_blockhash, 1, ) .await .unwrap(); let new_fee = Fee { numerator: 10, denominator: 100, }; context.warp_to_slot(50_000).unwrap(); let transaction = Transaction::new_signed_with_payer( &[instruction::set_fee( &id(), &stake_pool_accounts.stake_pool.pubkey(), &stake_pool_accounts.manager.pubkey(), FeeType::Epoch(new_fee), )], Some(&context.payer.pubkey()), &[&context.payer, &stake_pool_accounts.manager], context.last_blockhash, ); let error = context .banks_client .process_transaction(transaction) .await .err() .unwrap() .unwrap(); match error { TransactionError::InstructionError(_, InstructionError::Custom(error_index)) => { let program_error = error::StakePoolError::StakeListAndPoolOutOfDate as u32; assert_eq!(error_index, program_error); } _ => panic!("Wrong error occurs when stake pool out of date"), } }
unts, new_fee) = setup().await; let wrong_manager = Keypair::new(); let transaction = Transaction::new_signed_with_payer( &[instruction::set_fee( &id(), &stake_pool_accounts.stake_pool.pubkey(), &wrong_manager.pubkey(), FeeType::Epoch(new_fee), )], Some(&context.payer.pubkey()), &[&context.payer, &wrong_manager], context.last_blockhash, ); let error = context .banks_client .process_transaction(transaction) .await .err() .unwrap() .unwrap(); match error { TransactionError::InstructionError(_, InstructionError::Custom(error_index)) => { let program_error = error::StakePoolError::WrongManager as u32; assert_eq!(error_index, program_error); } _ => panic!("Wrong error occurs while malicious try to set manager"), } }
function_block-function_prefixed
[ { "content": "fn validate_fraction(numerator: u64, denominator: u64) -> Result<(), SwapError> {\n\n if denominator == 0 && numerator == 0 {\n\n Ok(())\n\n } else if numerator >= denominator {\n\n Err(SwapError::InvalidFee)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Fees {\n\n /// Calculate the withdraw fee in pool tokens\n\n pub fn owner_withdraw_fee(&self, pool_tokens: u128) -> Option<u128> {\n\n calculate_fee(\n\n pool_tokens,\n\n u128::try_from(self.owner_withdraw_fee_numerator).ok()?,\n\n u128::try_from(self.owner_withdraw_fee_denominator).ok()?,\n\n )\n\n }\n\n\n", "file_path": "token-swap/program/src/curve/fees.rs", "rank": 0, "score": 232132.94119047144 }, { "content": "pub fn add_usdc_oracle(test: &mut ProgramTest) -> TestOracle {\n\n add_oracle(\n\n test,\n\n // Mock with SRM since Pyth doesn't have USDC yet\n\n Pubkey::from_str(SRM_PYTH_PRODUCT).unwrap(),\n\n Pubkey::from_str(SRM_PYTH_PRICE).unwrap(),\n\n // Set USDC price to $1\n\n Decimal::from(1u64),\n\n )\n\n}\n\n\n", "file_path": "token-lending/program/tests/helpers/mod.rs", "rank": 1, "score": 211719.0609397907 }, { "content": "pub fn add_sol_oracle(test: &mut ProgramTest) -> TestOracle {\n\n add_oracle(\n\n test,\n\n Pubkey::from_str(SOL_PYTH_PRODUCT).unwrap(),\n\n Pubkey::from_str(SOL_PYTH_PRICE).unwrap(),\n\n // Set SOL price to $20\n\n Decimal::from(20u64),\n\n )\n\n}\n\n\n", "file_path": "token-lending/program/tests/helpers/mod.rs", "rank": 2, "score": 211719.0609397907 }, { "content": "pub fn add_usdc_mint(test: &mut ProgramTest) -> TestMint {\n\n let authority = Keypair::new();\n\n let pubkey = Pubkey::from_str(USDC_MINT).unwrap();\n\n let decimals = 6;\n\n test.add_packable_account(\n\n pubkey,\n\n u32::MAX as u64,\n\n &Mint {\n\n is_initialized: true,\n\n mint_authority: COption::Some(authority.pubkey()),\n\n decimals,\n\n ..Mint::default()\n\n },\n\n &spl_token::id(),\n\n );\n\n TestMint {\n\n pubkey,\n\n authority,\n\n decimals,\n\n }\n\n}\n\n\n\npub struct TestOracle {\n\n pub product_pubkey: Pubkey,\n\n pub price_pubkey: Pubkey,\n\n pub price: Decimal,\n\n}\n\n\n", "file_path": "token-lending/program/tests/helpers/mod.rs", "rank": 3, "score": 211719.0609397907 }, { "content": "pub fn add_lending_market(test: &mut ProgramTest) -> TestLendingMarket {\n\n let lending_market_pubkey = Pubkey::new_unique();\n\n let (lending_market_authority, bump_seed) =\n\n Pubkey::find_program_address(&[lending_market_pubkey.as_ref()], &spl_token_lending::id());\n\n\n\n let lending_market_owner =\n\n read_keypair_file(\"tests/fixtures/lending_market_owner.json\").unwrap();\n\n let oracle_program_id = read_keypair_file(\"tests/fixtures/oracle_program_id.json\")\n\n .unwrap()\n\n .pubkey();\n\n\n\n test.add_packable_account(\n\n lending_market_pubkey,\n\n u32::MAX as u64,\n\n &LendingMarket::new(InitLendingMarketParams {\n\n bump_seed,\n\n owner: lending_market_owner.pubkey(),\n\n quote_currency: QUOTE_CURRENCY,\n\n token_program_id: spl_token::id(),\n\n oracle_program_id,\n", "file_path": "token-lending/program/tests/helpers/mod.rs", "rank": 4, "score": 208249.04249432398 }, { "content": "fn pack_bool(boolean: bool, dst: &mut [u8; 1]) {\n\n *dst = (boolean as u8).to_le_bytes()\n\n}\n\n\n", "file_path": "token-lending/program/src/state/mod.rs", "rank": 5, "score": 197638.22860588675 }, { "content": "/// Creates FlagInstructionError instruction\n\npub fn flag_instruction_error(\n\n program_id: &Pubkey,\n\n // Accounts\n\n proposal: &Pubkey,\n\n token_owner_record: &Pubkey,\n\n governance_authority: &Pubkey,\n\n proposal_instruction: &Pubkey,\n\n) -> Instruction {\n\n let accounts = vec![\n\n AccountMeta::new(*proposal, false),\n\n AccountMeta::new_readonly(*token_owner_record, false),\n\n AccountMeta::new_readonly(*governance_authority, true),\n\n AccountMeta::new(*proposal_instruction, false),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n ];\n\n\n\n let instruction = GovernanceInstruction::FlagInstructionError {};\n\n\n\n Instruction {\n\n program_id: *program_id,\n\n accounts,\n\n data: instruction.try_to_vec().unwrap(),\n\n }\n\n}\n\n\n", "file_path": "governance/program/src/instruction.rs", "rank": 6, "score": 195168.01787610166 }, { "content": "/// Processes FlagInstructionError instruction\n\npub fn process_flag_instruction_error(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n) -> ProgramResult {\n\n let account_info_iter = &mut accounts.iter();\n\n\n\n let proposal_info = next_account_info(account_info_iter)?; // 0\n\n let token_owner_record_info = next_account_info(account_info_iter)?; // 1\n\n let governance_authority_info = next_account_info(account_info_iter)?; // 2\n\n\n\n let proposal_instruction_info = next_account_info(account_info_iter)?; // 3\n\n\n\n let clock_info = next_account_info(account_info_iter)?; // 4\n\n let clock = Clock::from_account_info(clock_info)?;\n\n\n\n let mut proposal_data = get_proposal_data(program_id, proposal_info)?;\n\n\n\n let mut proposal_instruction_data = get_proposal_instruction_data_for_proposal(\n\n program_id,\n\n proposal_instruction_info,\n", "file_path": "governance/program/src/processor/process_flag_instruction_error.rs", "rank": 7, "score": 193566.18757572133 }, { "content": "fn checked_transaction_with_signers<T: Signers>(\n\n config: &Config,\n\n instructions: &[Instruction],\n\n signers: &T,\n\n) -> Result<Transaction, Error> {\n\n let (recent_blockhash, fee_calculator) = config.rpc_client.get_recent_blockhash()?;\n\n let transaction = Transaction::new_signed_with_payer(\n\n instructions,\n\n Some(&config.fee_payer.pubkey()),\n\n signers,\n\n recent_blockhash,\n\n );\n\n\n\n check_fee_payer_balance(config, fee_calculator.calculate_fee(transaction.message()))?;\n\n Ok(transaction)\n\n}\n\n\n", "file_path": "stake-pool/cli/src/main.rs", "rank": 8, "score": 185793.9973436392 }, { "content": "pub fn load_mut<T: Pod>(data: &mut [u8]) -> Result<&mut T, PodCastError> {\n\n let size = size_of::<T>();\n\n Ok(from_bytes_mut(cast_slice_mut::<u8, u8>(\n\n try_cast_slice_mut(&mut data[0..size])?,\n\n )))\n\n}\n", "file_path": "token-lending/program/src/pyth.rs", "rank": 9, "score": 183084.63655258546 }, { "content": "/// Creates a 'set fee' instruction.\n\npub fn set_fee(\n\n program_id: &Pubkey,\n\n stake_pool: &Pubkey,\n\n manager: &Pubkey,\n\n fee: FeeType,\n\n) -> Instruction {\n\n let accounts = vec![\n\n AccountMeta::new(*stake_pool, false),\n\n AccountMeta::new_readonly(*manager, true),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n ];\n\n Instruction {\n\n program_id: *program_id,\n\n accounts,\n\n data: StakePoolInstruction::SetFee { fee }.try_to_vec().unwrap(),\n\n }\n\n}\n\n\n", "file_path": "stake-pool/program/src/instruction.rs", "rank": 10, "score": 181724.38108320886 }, { "content": "/// Processes an instruction\n\npub fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n input: &[u8],\n\n) -> ProgramResult {\n\n let instruction = GovernanceInstruction::try_from_slice(input)\n\n .map_err(|_| ProgramError::InvalidInstructionData)?;\n\n\n\n if let GovernanceInstruction::InsertInstruction {\n\n index,\n\n hold_up_time,\n\n instruction: _,\n\n } = instruction\n\n {\n\n // Do not dump instruction data into logs\n\n msg!(\n\n \"GOVERNANCE-INSTRUCTION: InsertInstruction {{ index: {:?}, hold_up_time: {:?} }}\",\n\n index,\n\n hold_up_time\n\n );\n", "file_path": "governance/program/src/processor/mod.rs", "rank": 11, "score": 181557.78540526703 }, { "content": "pub fn add_reserve(\n\n test: &mut ProgramTest,\n\n lending_market: &TestLendingMarket,\n\n oracle: &TestOracle,\n\n user_accounts_owner: &Keypair,\n\n args: AddReserveArgs,\n\n) -> TestReserve {\n\n let AddReserveArgs {\n\n name,\n\n config,\n\n liquidity_amount,\n\n liquidity_mint_pubkey,\n\n liquidity_mint_decimals,\n\n user_liquidity_amount,\n\n borrow_amount,\n\n initial_borrow_rate,\n\n collateral_amount,\n\n mark_fresh,\n\n slots_elapsed,\n\n } = args;\n", "file_path": "token-lending/program/tests/helpers/mod.rs", "rank": 12, "score": 178406.98627201092 }, { "content": "pub fn add_oracle(\n\n test: &mut ProgramTest,\n\n product_pubkey: Pubkey,\n\n price_pubkey: Pubkey,\n\n price: Decimal,\n\n) -> TestOracle {\n\n let oracle_program_id = read_keypair_file(\"tests/fixtures/oracle_program_id.json\").unwrap();\n\n\n\n // Add Pyth product account\n\n test.add_account_with_file_data(\n\n product_pubkey,\n\n u32::MAX as u64,\n\n oracle_program_id.pubkey(),\n\n &format!(\"{}.bin\", product_pubkey.to_string()),\n\n );\n\n\n\n // Add Pyth price account after setting the price\n\n let filename = &format!(\"{}.bin\", price_pubkey.to_string());\n\n let mut pyth_price_data = read_file(find_file(filename).unwrap_or_else(|| {\n\n panic!(\"Unable to locate {}\", filename);\n", "file_path": "token-lending/program/tests/helpers/mod.rs", "rank": 13, "score": 178406.98627201092 }, { "content": "pub fn add_obligation(\n\n test: &mut ProgramTest,\n\n lending_market: &TestLendingMarket,\n\n user_accounts_owner: &Keypair,\n\n args: AddObligationArgs,\n\n) -> TestObligation {\n\n let AddObligationArgs {\n\n deposits,\n\n borrows,\n\n mark_fresh,\n\n slots_elapsed,\n\n } = args;\n\n\n\n let obligation_keypair = Keypair::new();\n\n let obligation_pubkey = obligation_keypair.pubkey();\n\n\n\n let (obligation_deposits, test_deposits) = deposits\n\n .iter()\n\n .map(|(deposit_reserve, collateral_amount)| {\n\n let mut collateral = ObligationCollateral::new(deposit_reserve.pubkey);\n", "file_path": "token-lending/program/tests/helpers/mod.rs", "rank": 14, "score": 178406.98627201092 }, { "content": "/// Create a `RecordInstruction::CloseAccount` instruction\n\npub fn close_account(record_account: &Pubkey, signer: &Pubkey, receiver: &Pubkey) -> Instruction {\n\n Instruction::new_with_borsh(\n\n id(),\n\n &RecordInstruction::CloseAccount,\n\n vec![\n\n AccountMeta::new(*record_account, false),\n\n AccountMeta::new_readonly(*signer, true),\n\n AccountMeta::new(*receiver, false),\n\n ],\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::state::tests::TEST_DATA;\n\n use solana_program::program_error::ProgramError;\n\n\n\n #[test]\n\n fn serialize_initialize() {\n", "file_path": "record/program/src/instruction.rs", "rank": 15, "score": 178004.8601791127 }, { "content": "pub fn map_transaction_error(transport_error: TransportError) -> ProgramError {\n\n match transport_error {\n\n TransportError::TransactionError(TransactionError::InstructionError(\n\n _,\n\n InstructionError::Custom(error_index),\n\n )) => ProgramError::Custom(error_index),\n\n TransportError::TransactionError(TransactionError::InstructionError(\n\n _,\n\n instruction_error,\n\n )) => ProgramError::try_from(instruction_error).unwrap_or_else(|ie| match ie {\n\n InstructionError::IncorrectAuthority => {\n\n ProgramInstructionError::IncorrectAuthority.into()\n\n }\n\n InstructionError::PrivilegeEscalation => {\n\n ProgramInstructionError::PrivilegeEscalation.into()\n\n }\n\n _ => panic!(\"TEST-INSTRUCTION-ERROR {:?}\", ie),\n\n }),\n\n\n\n _ => panic!(\"TEST-TRANSPORT-ERROR: {:?}\", transport_error),\n\n }\n\n}\n\n\n", "file_path": "governance/test-sdk/src/tools.rs", "rank": 16, "score": 177576.63091726828 }, { "content": "// Helpers\n\nfn pack_decimal(decimal: Decimal, dst: &mut [u8; 16]) {\n\n *dst = decimal\n\n .to_scaled_val()\n\n .expect(\"Decimal cannot be packed\")\n\n .to_le_bytes();\n\n}\n\n\n", "file_path": "token-lending/program/src/state/mod.rs", "rank": 17, "score": 175243.78609462606 }, { "content": "pub fn add_account_for_program(\n\n test: &mut ProgramTest,\n\n program_derived_account: &Pubkey,\n\n amount: u64,\n\n mint_pubkey: &Pubkey,\n\n) -> Pubkey {\n\n let program_owned_token_account = Keypair::new();\n\n test.add_packable_account(\n\n program_owned_token_account.pubkey(),\n\n u32::MAX as u64,\n\n &Token {\n\n mint: *mint_pubkey,\n\n owner: *program_derived_account,\n\n amount,\n\n state: AccountState::Initialized,\n\n is_native: COption::None,\n\n ..Token::default()\n\n },\n\n &spl_token::id(),\n\n );\n", "file_path": "token-lending/program/tests/helpers/mod.rs", "rank": 18, "score": 174825.94655804904 }, { "content": "fn unpack_bool(src: &[u8; 1]) -> Result<bool, ProgramError> {\n\n match u8::from_le_bytes(*src) {\n\n 0 => Ok(false),\n\n 1 => Ok(true),\n\n _ => {\n\n msg!(\"Boolean cannot be unpacked\");\n\n Err(ProgramError::InvalidAccountData)\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn initial_collateral_rate_sanity() {\n\n assert_eq!(\n\n INITIAL_COLLATERAL_RATIO.checked_mul(WAD).unwrap(),\n\n INITIAL_COLLATERAL_RATE\n\n );\n\n }\n\n}\n", "file_path": "token-lending/program/src/state/mod.rs", "rank": 19, "score": 172163.74897731893 }, { "content": "pub fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n Processor::process(program_id, accounts, instruction_data)\n\n}\n\n\n\npub struct Processor;\n\nimpl Processor {\n\n pub fn process(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n ) -> ProgramResult {\n\n let instruction = FlashLoanReceiverInstruction::unpack(instruction_data)?;\n\n\n\n match instruction {\n\n FlashLoanReceiverInstruction::ReceiveFlashLoan { amount } => {\n\n msg!(\"Instruction: Receive Flash Loan\");\n", "file_path": "token-lending/program/tests/helpers/flash_loan_receiver.rs", "rank": 20, "score": 170930.30080875396 }, { "content": "pub fn program_test() -> ProgramTest {\n\n ProgramTest::new(\n\n \"spl_stake_pool\",\n\n id(),\n\n processor!(processor::Processor::process),\n\n )\n\n}\n\n\n\npub async fn get_account(banks_client: &mut BanksClient, pubkey: &Pubkey) -> Account {\n\n banks_client\n\n .get_account(*pubkey)\n\n .await\n\n .expect(\"account not found\")\n\n .expect(\"account empty\")\n\n}\n\n\n\npub async fn create_mint(\n\n banks_client: &mut BanksClient,\n\n payer: &Keypair,\n\n recent_blockhash: &Hash,\n", "file_path": "stake-pool/program/tests/helpers/mod.rs", "rank": 21, "score": 167883.2917589849 }, { "content": "/// A more efficient `copy_from_slice` implementation.\n\nfn fast_copy(mut src: &[u8], mut dst: &mut [u8]) {\n\n while src.len() >= 8 {\n\n #[allow(clippy::ptr_offset_with_cast)]\n\n let (src_word, src_rem) = array_refs![src, 8; ..;];\n\n #[allow(clippy::ptr_offset_with_cast)]\n\n let (dst_word, dst_rem) = mut_array_refs![dst, 8; ..;];\n\n *dst_word = *src_word;\n\n src = src_rem;\n\n dst = dst_rem;\n\n }\n\n unsafe {\n\n std::ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), src.len());\n\n }\n\n}\n\n\n\n/// Deserializes only the particular input parameters that the shared memory\n\n/// program uses. For more information about the format of the serialized input\n\n/// parameters see `solana_sdk::entrypoint::deserialize`\n\nunsafe fn deserialize_input_parameters<'a>(\n\n input: *mut u8,\n", "file_path": "shared-memory/program/src/lib.rs", "rank": 22, "score": 167198.96198108856 }, { "content": "/// Create a `RecordInstruction::Write` instruction\n\npub fn write(record_account: &Pubkey, signer: &Pubkey, offset: u64, data: Vec<u8>) -> Instruction {\n\n Instruction::new_with_borsh(\n\n id(),\n\n &RecordInstruction::Write { offset, data },\n\n vec![\n\n AccountMeta::new(*record_account, false),\n\n AccountMeta::new_readonly(*signer, true),\n\n ],\n\n )\n\n}\n\n\n", "file_path": "record/program/src/instruction.rs", "rank": 23, "score": 166745.765516123 }, { "content": "fn check_fee_payer_balance(config: &Config, required_balance: u64) -> Result<(), Error> {\n\n let balance = config.rpc_client.get_balance(&config.fee_payer.pubkey())?;\n\n if balance < required_balance {\n\n Err(format!(\n\n \"Fee payer, {}, has insufficient balance: {} required, {} available\",\n\n config.fee_payer.pubkey(),\n\n lamports_to_sol(required_balance),\n\n lamports_to_sol(balance)\n\n )\n\n .into())\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "token-lending/cli/src/main.rs", "rank": 24, "score": 166733.67066759896 }, { "content": "fn check_fee_payer_balance(config: &Config, required_balance: u64) -> Result<(), Error> {\n\n let balance = config.rpc_client.get_balance(&config.fee_payer.pubkey())?;\n\n if balance < required_balance {\n\n Err(format!(\n\n \"Fee payer, {}, has insufficient balance: {} required, {} available\",\n\n config.fee_payer.pubkey(),\n\n Sol(required_balance),\n\n Sol(balance)\n\n )\n\n .into())\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "stake-pool/cli/src/main.rs", "rank": 25, "score": 166733.67066759896 }, { "content": "/// Utility function that checks index is between MIN_SIGNERS and MAX_SIGNERS\n\npub fn is_valid_signer_index(index: usize) -> bool {\n\n (MIN_SIGNERS..=MAX_SIGNERS).contains(&index)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_instruction_packing() {\n\n let check = TokenInstruction::InitializeMint {\n\n decimals: 2,\n\n mint_authority: Pubkey::new(&[1u8; 32]),\n\n freeze_authority: COption::None,\n\n };\n\n let packed = check.pack();\n\n let mut expect = Vec::from([0u8, 2]);\n\n expect.extend_from_slice(&[1u8; 32]);\n\n expect.extend_from_slice(&[0]);\n\n assert_eq!(packed, expect);\n", "file_path": "token/program/src/instruction.rs", "rank": 26, "score": 162342.72692383107 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let app_matches = App::new(crate_name!())\n\n .about(crate_description!())\n\n .version(crate_version!())\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .arg({\n\n let arg = Arg::with_name(\"config_file\")\n\n .short(\"C\")\n\n .long(\"config\")\n\n .value_name(\"PATH\")\n\n .takes_value(true)\n\n .global(true)\n\n .help(\"Configuration file to use\");\n\n if let Some(ref config_file) = *solana_cli_config::CONFIG_FILE {\n\n arg.default_value(config_file)\n\n } else {\n\n arg\n\n }\n\n })\n\n .arg(\n", "file_path": "feature-proposal/cli/src/main.rs", "rank": 27, "score": 159290.2277276085 }, { "content": "/// puff metadata account instruction\n\npub fn puff_metadata_account(program_id: Pubkey, metadata_account: Pubkey) -> Instruction {\n\n Instruction {\n\n program_id,\n\n accounts: vec![AccountMeta::new(metadata_account, false)],\n\n data: MetadataInstruction::PuffMetadata.try_to_vec().unwrap(),\n\n }\n\n}\n\n\n\n/// creates a update_primary_sale_happened_via_token instruction\n", "file_path": "token-metadata/program/src/instruction.rs", "rank": 28, "score": 158563.61490772926 }, { "content": "fn new_throwaway_signer() -> (Box<dyn Signer>, Pubkey) {\n\n let keypair = Keypair::new();\n\n let pubkey = keypair.pubkey();\n\n (Box::new(keypair) as Box<dyn Signer>, pubkey)\n\n}\n\n\n", "file_path": "token/cli/src/main.rs", "rank": 29, "score": 157151.09950744736 }, { "content": "fn string_to_array(value: &str) -> Result<[u8; 32], TransportError> {\n\n if value.len() > 32 {\n\n return Err(TransportError::Custom(\"String too long\".to_string()));\n\n }\n\n let mut result: [u8; 32] = Default::default();\n\n &result[0..value.len()].copy_from_slice(value.as_bytes());\n\n Ok(result)\n\n}\n\n\n\npub async fn get_account(banks_client: &mut BanksClient, pubkey: &Pubkey) -> Account {\n\n banks_client\n\n .get_account(*pubkey)\n\n .await\n\n .expect(\"account not found\")\n\n .expect(\"account empty\")\n\n}\n\n\n\npub async fn create_mint(\n\n banks_client: &mut BanksClient,\n\n payer: &Keypair,\n", "file_path": "auction/program/tests/helpers.rs", "rank": 30, "score": 155913.687461346 }, { "content": "/// Check stake program address\n\nfn check_stake_program(program_id: &Pubkey) -> Result<(), ProgramError> {\n\n if *program_id != stake_program::id() {\n\n msg!(\n\n \"Expected stake program {}, received {}\",\n\n stake_program::id(),\n\n program_id\n\n );\n\n Err(ProgramError::IncorrectProgramId)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "stake-pool/program/src/processor.rs", "rank": 31, "score": 153547.72759745037 }, { "content": "/// Check system program address\n\nfn check_system_program(program_id: &Pubkey) -> Result<(), ProgramError> {\n\n if *program_id != system_program::id() {\n\n msg!(\n\n \"Expected system program {}, received {}\",\n\n system_program::id(),\n\n program_id\n\n );\n\n Err(ProgramError::IncorrectProgramId)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "stake-pool/program/src/processor.rs", "rank": 32, "score": 153547.72759745037 }, { "content": "/// Build a memo instruction, possibly signed\n\n///\n\n/// Accounts expected by this instruction:\n\n///\n\n/// 0. ..0+N. `[signer]` Expected signers; if zero provided, instruction will be processed as a\n\n/// normal, unsigned spl-memo\n\n///\n\npub fn build_memo(memo: &[u8], signer_pubkeys: &[&Pubkey]) -> Instruction {\n\n Instruction {\n\n program_id: id(),\n\n accounts: signer_pubkeys\n\n .iter()\n\n .map(|&pubkey| AccountMeta::new_readonly(*pubkey, true))\n\n .collect(),\n\n data: memo.to_vec(),\n\n }\n\n}\n", "file_path": "memo/program/src/lib.rs", "rank": 33, "score": 152402.89203325607 }, { "content": "fn get_number_from_data(data: &Ref<&mut [u8]>, data_type: TupleNumericType, offset: usize) -> u64 {\n\n return match data_type {\n\n TupleNumericType::U8 => data[offset] as u64,\n\n TupleNumericType::U16 => u16::from_le_bytes(*array_ref![data, offset, 2]) as u64,\n\n TupleNumericType::U32 => u32::from_le_bytes(*array_ref![data, offset, 4]) as u64,\n\n TupleNumericType::U64 => u64::from_le_bytes(*array_ref![data, offset, 8]),\n\n _ => 0,\n\n };\n\n}\n\n\n", "file_path": "metaplex/program/src/state.rs", "rank": 34, "score": 151534.58417092566 }, { "content": "/// Processes ExecuteInstruction instruction\n\npub fn process_execute_instruction(program_id: &Pubkey, accounts: &[AccountInfo]) -> ProgramResult {\n\n let account_info_iter = &mut accounts.iter();\n\n\n\n let governance_info = next_account_info(account_info_iter)?; // 0\n\n let proposal_info = next_account_info(account_info_iter)?; // 1\n\n let proposal_instruction_info = next_account_info(account_info_iter)?; // 2\n\n\n\n let clock_info = next_account_info(account_info_iter)?; // 3\n\n let clock = Clock::from_account_info(clock_info)?;\n\n\n\n let governance_data = get_governance_data(program_id, governance_info)?;\n\n\n\n let mut proposal_data =\n\n get_proposal_data_for_governance(program_id, proposal_info, governance_info.key)?;\n\n\n\n let mut proposal_instruction_data = get_proposal_instruction_data_for_proposal(\n\n program_id,\n\n proposal_instruction_info,\n\n proposal_info.key,\n\n )?;\n", "file_path": "governance/program/src/processor/process_execute_instruction.rs", "rank": 35, "score": 151253.65398089716 }, { "content": "/// Processes RemoveInstruction instruction\n\npub fn process_remove_instruction(program_id: &Pubkey, accounts: &[AccountInfo]) -> ProgramResult {\n\n let account_info_iter = &mut accounts.iter();\n\n\n\n let proposal_info = next_account_info(account_info_iter)?; // 0\n\n let token_owner_record_info = next_account_info(account_info_iter)?; // 1\n\n let governance_authority_info = next_account_info(account_info_iter)?; // 2\n\n\n\n let proposal_instruction_info = next_account_info(account_info_iter)?; // 3\n\n\n\n let beneficiary_info = next_account_info(account_info_iter)?; // 4\n\n\n\n let mut proposal_data = get_proposal_data(program_id, proposal_info)?;\n\n proposal_data.assert_can_edit_instructions()?;\n\n\n\n let token_owner_record_data = get_token_owner_record_data_for_proposal_owner(\n\n program_id,\n\n token_owner_record_info,\n\n &proposal_data.token_owner_record,\n\n )?;\n\n\n", "file_path": "governance/program/src/processor/process_remove_instruction.rs", "rank": 36, "score": 151253.65398089716 }, { "content": "pub fn get_config_count(data: &Ref<&mut [u8]>) -> core::result::Result<usize, ProgramError> {\n\n return Ok(u32::from_le_bytes(*array_ref![data, CONFIG_ARRAY_START, 4]) as usize);\n\n}\n\n\n", "file_path": "nft-candy-machine/src/lib.rs", "rank": 37, "score": 149392.96555681847 }, { "content": "/// Unpacks a reference from a bytes buffer.\n\n/// TODO actually pack / unpack instead of relying on normal memory layout.\n\npub fn unpack<T>(input: &[u8]) -> Result<&T, ProgramError> {\n\n if input.len() < size_of::<u8>() + size_of::<T>() {\n\n return Err(ProgramError::InvalidAccountData);\n\n }\n\n #[allow(clippy::cast_ptr_alignment)]\n\n let val: &T = unsafe { &*(&input[1] as *const u8 as *const T) };\n\n Ok(val)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use crate::curve::{base::CurveType, stable::StableCurve};\n\n\n\n #[test]\n\n fn pack_intialize() {\n\n let trade_fee_numerator: u64 = 1;\n\n let trade_fee_denominator: u64 = 4;\n\n let owner_trade_fee_numerator: u64 = 2;\n", "file_path": "token-swap/program/src/instruction.rs", "rank": 38, "score": 147883.7177116883 }, { "content": "/// Creates RemoveInstruction instruction\n\npub fn remove_instruction(\n\n program_id: &Pubkey,\n\n // Accounts\n\n proposal: &Pubkey,\n\n token_owner_record: &Pubkey,\n\n governance_authority: &Pubkey,\n\n proposal_instruction: &Pubkey,\n\n beneficiary: &Pubkey,\n\n) -> Instruction {\n\n let accounts = vec![\n\n AccountMeta::new(*proposal, false),\n\n AccountMeta::new_readonly(*token_owner_record, false),\n\n AccountMeta::new_readonly(*governance_authority, true),\n\n AccountMeta::new(*proposal_instruction, false),\n\n AccountMeta::new(*beneficiary, false),\n\n ];\n\n\n\n let instruction = GovernanceInstruction::RemoveInstruction {};\n\n\n\n Instruction {\n\n program_id: *program_id,\n\n accounts,\n\n data: instruction.try_to_vec().unwrap(),\n\n }\n\n}\n\n\n", "file_path": "governance/program/src/instruction.rs", "rank": 39, "score": 147462.8269561262 }, { "content": "/// Creates ExecuteInstruction instruction\n\npub fn execute_instruction(\n\n program_id: &Pubkey,\n\n // Accounts\n\n governance: &Pubkey,\n\n proposal: &Pubkey,\n\n proposal_instruction: &Pubkey,\n\n instruction_program_id: &Pubkey,\n\n instruction_accounts: &[AccountMeta],\n\n) -> Instruction {\n\n let mut accounts = vec![\n\n AccountMeta::new_readonly(*governance, false),\n\n AccountMeta::new(*proposal, false),\n\n AccountMeta::new(*proposal_instruction, false),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n AccountMeta::new_readonly(*instruction_program_id, false),\n\n ];\n\n\n\n accounts.extend_from_slice(instruction_accounts);\n\n\n\n let instruction = GovernanceInstruction::ExecuteInstruction {};\n\n\n\n Instruction {\n\n program_id: *program_id,\n\n accounts,\n\n data: instruction.try_to_vec().unwrap(),\n\n }\n\n}\n\n\n", "file_path": "governance/program/src/instruction.rs", "rank": 40, "score": 147462.8269561262 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn insert_instruction(\n\n program_id: &Pubkey,\n\n // Accounts\n\n governance: &Pubkey,\n\n proposal: &Pubkey,\n\n token_owner_record: &Pubkey,\n\n governance_authority: &Pubkey,\n\n payer: &Pubkey,\n\n // Args\n\n index: u16,\n\n hold_up_time: u32,\n\n instruction: InstructionData,\n\n) -> Instruction {\n\n let proposal_instruction_address =\n\n get_proposal_instruction_address(program_id, proposal, &index.to_le_bytes());\n\n\n\n let accounts = vec![\n\n AccountMeta::new_readonly(*governance, false),\n\n AccountMeta::new(*proposal, false),\n\n AccountMeta::new_readonly(*token_owner_record, false),\n", "file_path": "governance/program/src/instruction.rs", "rank": 41, "score": 147455.78666644386 }, { "content": "fn run_fuzz_instruction(\n\n fuzz_instruction: FuzzInstruction,\n\n token_swap: &mut NativeTokenSwap,\n\n token_a_accounts: &mut HashMap<AccountId, NativeAccountData>,\n\n token_b_accounts: &mut HashMap<AccountId, NativeAccountData>,\n\n pool_accounts: &mut HashMap<AccountId, NativeAccountData>,\n\n) {\n\n let result = match fuzz_instruction {\n\n FuzzInstruction::Swap {\n\n token_a_id,\n\n token_b_id,\n\n trade_direction,\n\n instruction,\n\n } => {\n\n let mut token_a_account = token_a_accounts.get_mut(&token_a_id).unwrap();\n\n let mut token_b_account = token_b_accounts.get_mut(&token_b_id).unwrap();\n\n match trade_direction {\n\n TradeDirection::AtoB => {\n\n token_swap.swap_a_to_b(&mut token_a_account, &mut token_b_account, instruction)\n\n }\n", "file_path": "token-swap/program/fuzz/src/instructions.rs", "rank": 42, "score": 147335.70967743977 }, { "content": "#[test]\n\nfn assert_instruction_count() {\n\n let program_id = Pubkey::new_unique();\n\n\n\n // Create new policies\n\n let policies_key = Pubkey::new_unique();\n\n let scalars = vec![1u64.into(), 2u64.into()];\n\n //let scalars = vec![\n\n // 1u64.into(),\n\n // 1u64.into(),\n\n // 1u64.into(),\n\n // 1u64.into(),\n\n // 1u64.into(),\n\n // 1u64.into(),\n\n // 1u64.into(),\n\n // 1u64.into(),\n\n // 1u64.into(),\n\n // 1u64.into(), //10\n\n // 2u64.into(),\n\n // 2u64.into(),\n\n // 2u64.into(),\n", "file_path": "themis/client_ristretto/tests/assert_instruction_count.rs", "rank": 43, "score": 145163.88867318997 }, { "content": "#[test]\n\nfn assert_instruction_count() {\n\n let program_id = Pubkey::new_unique();\n\n let source_key = Pubkey::new_unique();\n\n let source_account = SolanaAccount::new_ref(u64::MAX, Account::get_packed_len(), &program_id);\n\n let destination_key = Pubkey::new_unique();\n\n let destination_account =\n\n SolanaAccount::new_ref(u64::MAX, Account::get_packed_len(), &program_id);\n\n let owner_key = Pubkey::new_unique();\n\n let owner_account = RefCell::new(SolanaAccount::default());\n\n let mint_key = Pubkey::new_unique();\n\n let mint_account = SolanaAccount::new_ref(0, Mint::get_packed_len(), &program_id);\n\n let rent_key = rent::id();\n\n let rent_account = RefCell::new(create_account(&Rent::free(), 42));\n\n\n\n // Create new mint\n\n let instruction_data = TokenInstruction::InitializeMint {\n\n decimals: 9,\n\n mint_authority: owner_key,\n\n freeze_authority: COption::None,\n\n }\n", "file_path": "token/perf-monitor/tests/assert_instruction_count.rs", "rank": 44, "score": 145163.88867318997 }, { "content": "/// Creates an PlaceBid instruction.\n\npub fn place_bid_instruction(\n\n program_id: Pubkey,\n\n bidder_pubkey: Pubkey,\n\n bidder_token_pubkey: Pubkey,\n\n bidder_pot_token_pubkey: Pubkey,\n\n token_mint_pubkey: Pubkey,\n\n transfer_authority: Pubkey,\n\n payer: Pubkey,\n\n args: PlaceBidArgs,\n\n) -> Instruction {\n\n // Derive Auction Key\n\n let seeds = &[\n\n PREFIX.as_bytes(),\n\n program_id.as_ref(),\n\n args.resource.as_ref(),\n\n ];\n\n let (auction_pubkey, _) = Pubkey::find_program_address(seeds, &program_id);\n\n\n\n let seeds = &[\n\n PREFIX.as_bytes(),\n", "file_path": "auction/program/src/instruction.rs", "rank": 45, "score": 145096.2278985604 }, { "content": "/// Creates an CreateAuction instruction.\n\npub fn create_auction_instruction(\n\n program_id: Pubkey,\n\n creator_pubkey: Pubkey,\n\n args: CreateAuctionArgs,\n\n) -> Instruction {\n\n let seeds = &[\n\n PREFIX.as_bytes(),\n\n &program_id.as_ref(),\n\n args.resource.as_ref(),\n\n ];\n\n let (auction_pubkey, _) = Pubkey::find_program_address(seeds, &program_id);\n\n\n\n let seeds = &[\n\n PREFIX.as_bytes(),\n\n program_id.as_ref(),\n\n args.resource.as_ref(),\n\n EXTENDED.as_bytes(),\n\n ];\n\n let (auction_extended_pubkey, _) = Pubkey::find_program_address(seeds, &program_id);\n\n\n", "file_path": "auction/program/src/instruction.rs", "rank": 46, "score": 145096.2278985604 }, { "content": "/// Creates an StartAuction instruction.\n\npub fn start_auction_instruction(\n\n program_id: Pubkey,\n\n authority_pubkey: Pubkey,\n\n args: StartAuctionArgs,\n\n) -> Instruction {\n\n // Derive Auction Key\n\n let seeds = &[\n\n PREFIX.as_bytes(),\n\n &program_id.as_ref(),\n\n args.resource.as_ref(),\n\n ];\n\n let (auction_pubkey, _) = Pubkey::find_program_address(seeds, &program_id);\n\n\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n\n AccountMeta::new(authority_pubkey, true),\n\n AccountMeta::new(auction_pubkey, false),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n ],\n\n data: AuctionInstruction::StartAuction(args).try_to_vec().unwrap(),\n\n }\n\n}\n\n\n", "file_path": "auction/program/src/instruction.rs", "rank": 47, "score": 145096.2278985604 }, { "content": "/// Creates an SetAuthority instruction.\n\npub fn set_authority_instruction(\n\n program_id: Pubkey,\n\n resource: Pubkey,\n\n authority: Pubkey,\n\n new_authority: Pubkey,\n\n) -> Instruction {\n\n let seeds = &[PREFIX.as_bytes(), &program_id.as_ref(), resource.as_ref()];\n\n let (auction_pubkey, _) = Pubkey::find_program_address(seeds, &program_id);\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n\n AccountMeta::new(auction_pubkey, false),\n\n AccountMeta::new_readonly(authority, true),\n\n AccountMeta::new_readonly(new_authority, false),\n\n ],\n\n data: AuctionInstruction::SetAuthority.try_to_vec().unwrap(),\n\n }\n\n}\n\n\n", "file_path": "auction/program/src/instruction.rs", "rank": 48, "score": 145096.2278985604 }, { "content": "pub fn claim_bid_instruction(\n\n program_id: Pubkey,\n\n destination_pubkey: Pubkey,\n\n authority_pubkey: Pubkey,\n\n bidder_pubkey: Pubkey,\n\n bidder_pot_token_pubkey: Pubkey,\n\n token_mint_pubkey: Pubkey,\n\n auction_extended_pubkey: Option<Pubkey>,\n\n args: ClaimBidArgs,\n\n) -> Instruction {\n\n // Derive Auction Key\n\n let seeds = &[\n\n PREFIX.as_bytes(),\n\n &program_id.as_ref(),\n\n args.resource.as_ref(),\n\n ];\n\n let (auction_pubkey, _) = Pubkey::find_program_address(seeds, &program_id);\n\n\n\n // Derive Bidder Pot\n\n let seeds = &[\n", "file_path": "auction/program/src/instruction.rs", "rank": 49, "score": 145090.11494062492 }, { "content": "pub fn end_auction_instruction(\n\n program_id: Pubkey,\n\n authority_pubkey: Pubkey,\n\n args: EndAuctionArgs,\n\n) -> Instruction {\n\n // Derive Auction Key\n\n let seeds = &[\n\n PREFIX.as_bytes(),\n\n &program_id.as_ref(),\n\n args.resource.as_ref(),\n\n ];\n\n let (auction_pubkey, _) = Pubkey::find_program_address(seeds, &program_id);\n\n\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n\n AccountMeta::new(authority_pubkey, true),\n\n AccountMeta::new(auction_pubkey, false),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n ],\n\n data: AuctionInstruction::EndAuction(args).try_to_vec().unwrap(),\n\n }\n\n}\n\n\n", "file_path": "auction/program/src/instruction.rs", "rank": 50, "score": 145090.11494062492 }, { "content": "/// Creates an CancelBidinstruction.\n\npub fn cancel_bid_instruction(\n\n program_id: Pubkey,\n\n bidder_pubkey: Pubkey,\n\n bidder_token_pubkey: Pubkey,\n\n bidder_pot_token_pubkey: Pubkey,\n\n token_mint_pubkey: Pubkey,\n\n args: CancelBidArgs,\n\n) -> Instruction {\n\n // Derive Auction Key\n\n let seeds = &[\n\n PREFIX.as_bytes(),\n\n program_id.as_ref(),\n\n args.resource.as_ref(),\n\n ];\n\n let (auction_pubkey, _) = Pubkey::find_program_address(seeds, &program_id);\n\n\n\n let seeds = &[\n\n PREFIX.as_bytes(),\n\n program_id.as_ref(),\n\n args.resource.as_ref(),\n", "file_path": "auction/program/src/instruction.rs", "rank": 51, "score": 145090.11494062492 }, { "content": "/// Create PreciseSquareRoot instruction\n\npub fn noop() -> Instruction {\n\n Instruction {\n\n program_id: id(),\n\n accounts: vec![],\n\n data: MathInstruction::Noop.try_to_vec().unwrap(),\n\n }\n\n}\n", "file_path": "libraries/math/src/instruction.rs", "rank": 52, "score": 143928.5327209359 }, { "content": "/// Helper function for calculating swap fee\n\npub fn calculate_fee(\n\n token_amount: u128,\n\n fee_numerator: u128,\n\n fee_denominator: u128,\n\n) -> Option<u128> {\n\n if fee_numerator == 0 || token_amount == 0 {\n\n Some(0)\n\n } else {\n\n let fee = token_amount\n\n .checked_mul(fee_numerator)?\n\n .checked_div(fee_denominator)?;\n\n if fee == 0 {\n\n Some(1) // minimum fee of one token\n\n } else {\n\n Some(fee)\n\n }\n\n }\n\n}\n\n\n", "file_path": "token-swap/program/src/curve/fees.rs", "rank": 53, "score": 143694.6230404575 }, { "content": "/// Creates an SetStore instruction\n\npub fn create_set_store_instruction(\n\n program_id: Pubkey,\n\n store: Pubkey,\n\n admin: Pubkey,\n\n payer: Pubkey,\n\n public: bool,\n\n) -> Instruction {\n\n let accounts = vec![\n\n AccountMeta::new(store, false),\n\n AccountMeta::new_readonly(admin, true),\n\n AccountMeta::new_readonly(payer, true),\n\n AccountMeta::new_readonly(spl_token::id(), false),\n\n AccountMeta::new_readonly(spl_token_vault::id(), false),\n\n AccountMeta::new_readonly(spl_token_metadata::id(), false),\n\n AccountMeta::new_readonly(spl_auction::id(), false),\n\n AccountMeta::new_readonly(solana_program::system_program::id(), false),\n\n AccountMeta::new_readonly(sysvar::rent::id(), false),\n\n ];\n\n Instruction {\n\n program_id,\n\n accounts,\n\n data: MetaplexInstruction::SetStore(SetStoreArgs { public })\n\n .try_to_vec()\n\n .unwrap(),\n\n }\n\n}\n\n\n", "file_path": "metaplex/program/src/instruction.rs", "rank": 54, "score": 142830.73041600757 }, { "content": "/// Creates an CreateAuctionV2 instruction.\n\npub fn create_auction_instruction_v2(\n\n program_id: Pubkey,\n\n creator_pubkey: Pubkey,\n\n args: CreateAuctionArgsV2,\n\n) -> Instruction {\n\n let seeds = &[\n\n PREFIX.as_bytes(),\n\n &program_id.as_ref(),\n\n args.resource.as_ref(),\n\n ];\n\n let (auction_pubkey, _) = Pubkey::find_program_address(seeds, &program_id);\n\n\n\n let seeds = &[\n\n PREFIX.as_bytes(),\n\n program_id.as_ref(),\n\n args.resource.as_ref(),\n\n EXTENDED.as_bytes(),\n\n ];\n\n let (auction_extended_pubkey, _) = Pubkey::find_program_address(seeds, &program_id);\n\n\n", "file_path": "auction/program/src/instruction.rs", "rank": 55, "score": 142830.66619897087 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn create_end_auction_instruction(\n\n program_id: Pubkey,\n\n auction_manager: Pubkey,\n\n auction: Pubkey,\n\n auction_data_extended: Pubkey,\n\n auction_manager_authority: Pubkey,\n\n store: Pubkey,\n\n end_auction_args: EndAuctionArgs,\n\n) -> Instruction {\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n\n AccountMeta::new(auction_manager, false),\n\n AccountMeta::new(auction, false),\n\n AccountMeta::new_readonly(auction_data_extended, false),\n\n AccountMeta::new_readonly(auction_manager_authority, true),\n\n AccountMeta::new_readonly(store, false),\n\n AccountMeta::new_readonly(spl_auction::id(), false),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n ],\n\n data: MetaplexInstruction::EndAuction(end_auction_args)\n\n .try_to_vec()\n\n .unwrap(),\n\n }\n\n}\n", "file_path": "metaplex/program/src/instruction.rs", "rank": 56, "score": 142824.61745807208 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn create_redeem_bid_instruction(\n\n program_id: Pubkey,\n\n auction_manager: Pubkey,\n\n safety_deposit_token_store: Pubkey,\n\n destination: Pubkey,\n\n bid_redemption: Pubkey,\n\n safety_deposit_box: Pubkey,\n\n vault: Pubkey,\n\n fraction_mint: Pubkey,\n\n auction: Pubkey,\n\n auction_extended: Pubkey,\n\n bidder_metadata: Pubkey,\n\n bidder: Pubkey,\n\n payer: Pubkey,\n\n store: Pubkey,\n\n transfer_authority: Pubkey,\n\n) -> Instruction {\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n", "file_path": "metaplex/program/src/instruction.rs", "rank": 57, "score": 142824.61745807208 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn create_start_auction_instruction(\n\n program_id: Pubkey,\n\n auction_manager: Pubkey,\n\n auction: Pubkey,\n\n auction_manager_authority: Pubkey,\n\n store: Pubkey,\n\n) -> Instruction {\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n\n AccountMeta::new(auction_manager, false),\n\n AccountMeta::new(auction, false),\n\n AccountMeta::new_readonly(auction_manager_authority, true),\n\n AccountMeta::new_readonly(store, false),\n\n AccountMeta::new_readonly(spl_auction::id(), false),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n ],\n\n data: MetaplexInstruction::StartAuction.try_to_vec().unwrap(),\n\n }\n\n}\n\n\n", "file_path": "metaplex/program/src/instruction.rs", "rank": 58, "score": 142824.61745807208 }, { "content": "fn command_unwrap(\n\n config: &Config,\n\n wallet_address: Pubkey,\n\n address: Option<Pubkey>,\n\n) -> CommandResult {\n\n let use_associated_account = address.is_none();\n\n let address = address\n\n .unwrap_or_else(|| get_associated_token_address(&wallet_address, &native_mint::id()));\n\n println_display(config, format!(\"Unwrapping {}\", address));\n\n if !config.sign_only {\n\n let lamports = config.rpc_client.get_balance(&address)?;\n\n if lamports == 0 {\n\n if use_associated_account {\n\n return Err(\"No wrapped SOL in associated account; did you mean to specify an auxiliary address?\".to_string().into());\n\n } else {\n\n return Err(format!(\"No wrapped SOL in {}\", address).into());\n\n }\n\n }\n\n println_display(\n\n config,\n", "file_path": "token/cli/src/main.rs", "rank": 59, "score": 142274.18615223377 }, { "content": "fn get_signer(\n\n matches: &ArgMatches<'_>,\n\n keypair_name: &str,\n\n wallet_manager: &mut Option<Arc<RemoteWalletManager>>,\n\n) -> Option<(Box<dyn Signer>, Pubkey)> {\n\n matches.value_of(keypair_name).map(|path| {\n\n let signer =\n\n signer_from_path(matches, path, keypair_name, wallet_manager).unwrap_or_else(|e| {\n\n eprintln!(\"error: {}\", e);\n\n exit(1);\n\n });\n\n let signer_pubkey = signer.pubkey();\n\n (signer, signer_pubkey)\n\n })\n\n}\n\n\n\npub(crate) fn check_fee_payer_balance(config: &Config, required_balance: u64) -> Result<(), Error> {\n\n let balance = config.rpc_client.get_balance(&config.fee_payer)?;\n\n if balance < required_balance {\n\n Err(format!(\n", "file_path": "token/cli/src/main.rs", "rank": 60, "score": 142254.89931755664 }, { "content": "fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n if let Err(error) = processor::process_instruction(program_id, accounts, instruction_data) {\n\n error.print::<AuctionError>();\n\n msg!(\"{}\", error);\n\n return Err(error);\n\n }\n\n Ok(())\n\n}\n", "file_path": "auction/program/src/entrypoint.rs", "rank": 61, "score": 141466.65610482256 }, { "content": "fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n if let Err(error) = Processor::process(program_id, accounts, instruction_data) {\n\n // catch the error so we can print it\n\n error.print::<TokenError>();\n\n return Err(error);\n\n }\n\n Ok(())\n\n}\n", "file_path": "token/program/src/entrypoint.rs", "rank": 62, "score": 141466.65610482256 }, { "content": "fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n crate::processor::process_instruction(program_id, accounts, instruction_data)\n\n}\n", "file_path": "record/program/src/entrypoint.rs", "rank": 63, "score": 141466.65610482256 }, { "content": "fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n crate::processor::process_instruction(program_id, accounts, instruction_data)\n\n}\n", "file_path": "memo/program/src/entrypoint.rs", "rank": 64, "score": 141466.65610482256 }, { "content": "fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n if let Err(error) = processor::process_instruction(program_id, accounts, instruction_data) {\n\n // catch the error so we can print it\n\n error.print::<GovernanceError>();\n\n return Err(error);\n\n }\n\n Ok(())\n\n}\n", "file_path": "governance/program/src/entrypoint.rs", "rank": 65, "score": 141466.65610482256 }, { "content": "fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n crate::processor::process_instruction(program_id, accounts, instruction_data)\n\n}\n", "file_path": "libraries/math/src/entrypoint.rs", "rank": 66, "score": 141466.65610482256 }, { "content": "/// Creates an DecommissionAuctionManager instruction\n\npub fn create_decommission_auction_manager_instruction(\n\n program_id: Pubkey,\n\n auction_manager: Pubkey,\n\n auction: Pubkey,\n\n authority: Pubkey,\n\n vault: Pubkey,\n\n store: Pubkey,\n\n) -> Instruction {\n\n let accounts = vec![\n\n AccountMeta::new(auction_manager, false),\n\n AccountMeta::new(auction, false),\n\n AccountMeta::new_readonly(authority, true),\n\n AccountMeta::new_readonly(vault, false),\n\n AccountMeta::new_readonly(store, false),\n\n AccountMeta::new_readonly(spl_auction::id(), false),\n\n AccountMeta::new_readonly(sysvar::clock::id(), false),\n\n ];\n\n Instruction {\n\n program_id,\n\n accounts,\n\n data: MetaplexInstruction::DecommissionAuctionManager\n\n .try_to_vec()\n\n .unwrap(),\n\n }\n\n}\n\n\n\n/// Creates an RedeemPrintingV2Bid instruction\n", "file_path": "metaplex/program/src/instruction.rs", "rank": 67, "score": 140658.84519472107 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn deprecated_create_validate_participation_instruction(\n\n program_id: Pubkey,\n\n auction_manager: Pubkey,\n\n open_edition_metadata: Pubkey,\n\n open_edition_master_edition: Pubkey,\n\n printing_authorization_token_account: Pubkey,\n\n auction_manager_authority: Pubkey,\n\n whitelisted_creator: Pubkey,\n\n store: Pubkey,\n\n safety_deposit_box: Pubkey,\n\n safety_deposit_box_token_store: Pubkey,\n\n vault: Pubkey,\n\n) -> Instruction {\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n\n AccountMeta::new(auction_manager, false),\n\n AccountMeta::new_readonly(open_edition_metadata, false),\n\n AccountMeta::new_readonly(open_edition_master_edition, false),\n\n AccountMeta::new_readonly(printing_authorization_token_account, false),\n", "file_path": "metaplex/program/src/instruction.rs", "rank": 68, "score": 140652.7964538223 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn create_add_shares_instruction(\n\n program_id: Pubkey,\n\n source: Pubkey,\n\n fraction_treasury: Pubkey,\n\n vault: Pubkey,\n\n transfer_authority: Pubkey,\n\n vault_authority: Pubkey,\n\n number_of_shares: u64,\n\n) -> Instruction {\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n\n AccountMeta::new(source, false),\n\n AccountMeta::new(fraction_treasury, false),\n\n AccountMeta::new_readonly(vault, false),\n\n AccountMeta::new_readonly(transfer_authority, true),\n\n AccountMeta::new_readonly(vault_authority, true),\n\n AccountMeta::new_readonly(spl_token::id(), false),\n\n AccountMeta::new_readonly(sysvar::rent::id(), false),\n\n ],\n\n data: VaultInstruction::AddSharesToTreasury(NumberOfShareArgs { number_of_shares })\n\n .try_to_vec()\n\n .unwrap(),\n\n }\n\n}\n\n\n", "file_path": "token-vault/program/src/instruction.rs", "rank": 69, "score": 140652.7964538223 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn create_activate_vault_instruction(\n\n program_id: Pubkey,\n\n vault: Pubkey,\n\n fraction_mint: Pubkey,\n\n fraction_treasury: Pubkey,\n\n fraction_mint_authority: Pubkey,\n\n vault_authority: Pubkey,\n\n number_of_shares: u64,\n\n) -> Instruction {\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n\n AccountMeta::new(vault, false),\n\n AccountMeta::new(fraction_mint, false),\n\n AccountMeta::new(fraction_treasury, false),\n\n AccountMeta::new_readonly(fraction_mint_authority, false),\n\n AccountMeta::new_readonly(vault_authority, true),\n\n AccountMeta::new_readonly(spl_token::id(), false),\n\n ],\n\n data: VaultInstruction::ActivateVault(NumberOfShareArgs { number_of_shares })\n\n .try_to_vec()\n\n .unwrap(),\n\n }\n\n}\n\n\n\n/// Creates an CombineVault instruction\n", "file_path": "token-vault/program/src/instruction.rs", "rank": 70, "score": 140652.7964538223 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn create_init_vault_instruction(\n\n program_id: Pubkey,\n\n fraction_mint: Pubkey,\n\n redeem_treasury: Pubkey,\n\n fraction_treasury: Pubkey,\n\n vault: Pubkey,\n\n vault_authority: Pubkey,\n\n external_price_account: Pubkey,\n\n allow_further_share_creation: bool,\n\n) -> Instruction {\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n\n AccountMeta::new(fraction_mint, false),\n\n AccountMeta::new(redeem_treasury, false),\n\n AccountMeta::new(fraction_treasury, false),\n\n AccountMeta::new(vault, false),\n\n AccountMeta::new_readonly(vault_authority, false),\n\n AccountMeta::new_readonly(external_price_account, false),\n\n AccountMeta::new_readonly(spl_token::id(), false),\n", "file_path": "token-vault/program/src/instruction.rs", "rank": 71, "score": 140652.7964538223 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn create_withdraw_shares_instruction(\n\n program_id: Pubkey,\n\n destination: Pubkey,\n\n fraction_treasury: Pubkey,\n\n vault: Pubkey,\n\n transfer_authority: Pubkey,\n\n vault_authority: Pubkey,\n\n number_of_shares: u64,\n\n) -> Instruction {\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n\n AccountMeta::new(destination, false),\n\n AccountMeta::new(fraction_treasury, false),\n\n AccountMeta::new_readonly(vault, false),\n\n AccountMeta::new_readonly(transfer_authority, false),\n\n AccountMeta::new_readonly(vault_authority, true),\n\n AccountMeta::new_readonly(spl_token::id(), false),\n\n AccountMeta::new_readonly(sysvar::rent::id(), false),\n\n ],\n\n data: VaultInstruction::WithdrawSharesFromTreasury(NumberOfShareArgs { number_of_shares })\n\n .try_to_vec()\n\n .unwrap(),\n\n }\n\n}\n\n\n", "file_path": "token-vault/program/src/instruction.rs", "rank": 72, "score": 140652.7964538223 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn create_mint_shares_instruction(\n\n program_id: Pubkey,\n\n fraction_treasury: Pubkey,\n\n fraction_mint: Pubkey,\n\n vault: Pubkey,\n\n fraction_mint_authority: Pubkey,\n\n vault_authority: Pubkey,\n\n number_of_shares: u64,\n\n) -> Instruction {\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n\n AccountMeta::new(fraction_treasury, false),\n\n AccountMeta::new(fraction_mint, false),\n\n AccountMeta::new_readonly(vault, false),\n\n AccountMeta::new_readonly(fraction_mint_authority, false),\n\n AccountMeta::new_readonly(vault_authority, true),\n\n AccountMeta::new_readonly(spl_token::id(), false),\n\n ],\n\n data: VaultInstruction::MintFractionalShares(NumberOfShareArgs { number_of_shares })\n\n .try_to_vec()\n\n .unwrap(),\n\n }\n\n}\n\n\n", "file_path": "token-vault/program/src/instruction.rs", "rank": 73, "score": 140652.7964538223 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn create_redeem_shares_instruction(\n\n program_id: Pubkey,\n\n outstanding_shares_account: Pubkey,\n\n proceeds_account: Pubkey,\n\n fraction_mint: Pubkey,\n\n redeem_treasury: Pubkey,\n\n transfer_authority: Pubkey,\n\n burn_authority: Pubkey,\n\n vault: Pubkey,\n\n) -> Instruction {\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n\n AccountMeta::new(outstanding_shares_account, false),\n\n AccountMeta::new(proceeds_account, false),\n\n AccountMeta::new(fraction_mint, false),\n\n AccountMeta::new(redeem_treasury, false),\n\n AccountMeta::new_readonly(transfer_authority, false),\n\n AccountMeta::new_readonly(burn_authority, true),\n\n AccountMeta::new_readonly(vault, false),\n\n AccountMeta::new_readonly(spl_token::id(), false),\n\n AccountMeta::new_readonly(sysvar::rent::id(), false),\n\n ],\n\n data: VaultInstruction::RedeemShares.try_to_vec().unwrap(),\n\n }\n\n}\n\n\n", "file_path": "token-vault/program/src/instruction.rs", "rank": 74, "score": 140652.7964538223 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn create_combine_vault_instruction(\n\n program_id: Pubkey,\n\n vault: Pubkey,\n\n outstanding_share_token_account: Pubkey,\n\n paying_token_account: Pubkey,\n\n fraction_mint: Pubkey,\n\n fraction_treasury: Pubkey,\n\n redeem_treasury: Pubkey,\n\n new_authority: Pubkey,\n\n vault_authority: Pubkey,\n\n paying_transfer_authority: Pubkey,\n\n uncirculated_burn_authority: Pubkey,\n\n external_pricing_account: Pubkey,\n\n) -> Instruction {\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n\n AccountMeta::new(vault, false),\n\n AccountMeta::new(outstanding_share_token_account, false),\n\n AccountMeta::new(paying_token_account, false),\n", "file_path": "token-vault/program/src/instruction.rs", "rank": 75, "score": 140652.7964538223 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn create_withdraw_tokens_instruction(\n\n program_id: Pubkey,\n\n destination: Pubkey,\n\n safety_deposit_box: Pubkey,\n\n store: Pubkey,\n\n vault: Pubkey,\n\n fraction_mint: Pubkey,\n\n vault_authority: Pubkey,\n\n transfer_authority: Pubkey,\n\n amount: u64,\n\n) -> Instruction {\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n\n AccountMeta::new(destination, false),\n\n AccountMeta::new(safety_deposit_box, false),\n\n AccountMeta::new(store, false),\n\n AccountMeta::new(vault, false),\n\n AccountMeta::new_readonly(fraction_mint, false),\n\n AccountMeta::new_readonly(vault_authority, true),\n\n AccountMeta::new_readonly(transfer_authority, false),\n\n AccountMeta::new_readonly(spl_token::id(), false),\n\n AccountMeta::new_readonly(sysvar::rent::id(), false),\n\n ],\n\n data: VaultInstruction::WithdrawTokenFromSafetyDepositBox(AmountArgs { amount })\n\n .try_to_vec()\n\n .unwrap(),\n\n }\n\n}\n\n\n", "file_path": "token-vault/program/src/instruction.rs", "rank": 76, "score": 140652.7964538223 }, { "content": "pub fn create_set_authority_instruction(\n\n program_id: Pubkey,\n\n vault: Pubkey,\n\n current_authority: Pubkey,\n\n new_authority: Pubkey,\n\n) -> Instruction {\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n\n AccountMeta::new(vault, false),\n\n AccountMeta::new_readonly(current_authority, true),\n\n AccountMeta::new_readonly(new_authority, false),\n\n ],\n\n data: VaultInstruction::SetAuthority.try_to_vec().unwrap(),\n\n }\n\n}\n", "file_path": "token-vault/program/src/instruction.rs", "rank": 77, "score": 140652.7964538223 }, { "content": "pub fn puff_out_data_fields(metadata: &mut Metadata) {\n\n let mut array_of_zeroes = vec![];\n\n while array_of_zeroes.len() < MAX_NAME_LENGTH - metadata.data.name.len() {\n\n array_of_zeroes.push(0u8);\n\n }\n\n metadata.data.name =\n\n metadata.data.name.clone() + std::str::from_utf8(&array_of_zeroes).unwrap();\n\n\n\n let mut array_of_zeroes = vec![];\n\n while array_of_zeroes.len() < MAX_SYMBOL_LENGTH - metadata.data.symbol.len() {\n\n array_of_zeroes.push(0u8);\n\n }\n\n metadata.data.symbol =\n\n metadata.data.symbol.clone() + std::str::from_utf8(&array_of_zeroes).unwrap();\n\n\n\n let mut array_of_zeroes = vec![];\n\n while array_of_zeroes.len() < MAX_URI_LENGTH - metadata.data.uri.len() {\n\n array_of_zeroes.push(0u8);\n\n }\n\n metadata.data.uri = metadata.data.uri.clone() + std::str::from_utf8(&array_of_zeroes).unwrap();\n", "file_path": "token-metadata/program/src/utils.rs", "rank": 78, "score": 140633.85529957712 }, { "content": "fn send_transaction(\n\n config: &Config,\n\n transaction: Transaction,\n\n) -> solana_client::client_error::Result<()> {\n\n if config.dry_run {\n\n let result = config.rpc_client.simulate_transaction(&transaction)?;\n\n println!(\"Simulate result: {:?}\", result);\n\n } else {\n\n let signature = config\n\n .rpc_client\n\n .send_and_confirm_transaction_with_spinner(&transaction)?;\n\n println!(\"Signature: {}\", signature);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "stake-pool/cli/src/main.rs", "rank": 79, "score": 139476.8808710936 }, { "content": "fn send_transaction(\n\n config: &Config,\n\n transaction: Transaction,\n\n) -> solana_client::client_error::Result<()> {\n\n if config.dry_run {\n\n let result = config.rpc_client.simulate_transaction(&transaction)?;\n\n println!(\"Simulate result: {:?}\", result);\n\n } else {\n\n let signature = config\n\n .rpc_client\n\n .send_and_confirm_transaction_with_spinner(&transaction)?;\n\n println!(\"Signature: {}\", signature);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "token-lending/cli/src/main.rs", "rank": 80, "score": 139476.8808710936 }, { "content": "fn get_signer(\n\n matches: &ArgMatches<'_>,\n\n keypair_name: &str,\n\n keypair_path: &str,\n\n wallet_manager: &mut Option<Arc<RemoteWalletManager>>,\n\n) -> Box<dyn Signer> {\n\n signer_from_path(\n\n matches,\n\n matches.value_of(keypair_name).unwrap_or(keypair_path),\n\n keypair_name,\n\n wallet_manager,\n\n )\n\n .unwrap_or_else(|e| {\n\n eprintln!(\"error: {}\", e);\n\n exit(1);\n\n })\n\n}\n\n\n", "file_path": "stake-pool/cli/src/main.rs", "rank": 81, "score": 139467.1634372312 }, { "content": "fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n crate::processor::process_instruction(program_id, accounts, instruction_data)\n\n}\n", "file_path": "feature-proposal/program/src/entrypoint.rs", "rank": 82, "score": 138699.81979361002 }, { "content": "fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n Processor::process(program_id, accounts, instruction_data)\n\n}\n", "file_path": "binary-option/program/src/entrypoint.rs", "rank": 83, "score": 138699.81979361002 }, { "content": "fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n if let Err(error) = Processor::process(program_id, accounts, instruction_data) {\n\n // catch the error so we can print it\n\n error.print::<StakePoolError>();\n\n Err(error)\n\n } else {\n\n Ok(())\n\n }\n\n}\n", "file_path": "stake-pool/program/src/entrypoint.rs", "rank": 84, "score": 138699.81979361002 }, { "content": "fn main() {\n\n loop {\n\n fuzz!(|fuzz_data: FuzzData| { run_fuzz(fuzz_data) });\n\n }\n\n}\n\n\n", "file_path": "token-swap/program/fuzz/src/instructions.rs", "rank": 85, "score": 138699.81979361002 }, { "content": "fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n crate::processor::process_instruction(program_id, accounts, instruction_data)?;\n\n Ok(())\n\n}\n", "file_path": "themis/program_ristretto/src/entrypoint.rs", "rank": 86, "score": 138699.81979361002 }, { "content": "fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n if let Err(error) = Processor::process(program_id, accounts, instruction_data) {\n\n // catch the error so we can print it\n\n error.print::<SwapError>();\n\n return Err(error);\n\n }\n\n Ok(())\n\n}\n", "file_path": "token-swap/program/src/entrypoint.rs", "rank": 87, "score": 138699.81979361002 }, { "content": "fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n if let Err(error) = processor::process_instruction(program_id, accounts, instruction_data) {\n\n // catch the error so we can print it\n\n error.print::<LendingError>();\n\n return Err(error);\n\n }\n\n Ok(())\n\n}\n", "file_path": "token-lending/program/src/entrypoint.rs", "rank": 88, "score": 138699.81979361002 }, { "content": "fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n if let Err(error) = processor::process_instruction(program_id, accounts, instruction_data) {\n\n // catch the error so we can print it\n\n error.print::<GovernanceChatError>();\n\n return Err(error);\n\n }\n\n Ok(())\n\n}\n", "file_path": "governance/chat/program/src/entrypoint.rs", "rank": 89, "score": 138699.81979361002 }, { "content": "fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n if let Err(error) = processor::process_instruction(program_id, accounts, instruction_data) {\n\n // catch the error so we can print it\n\n error.print::<VaultError>();\n\n return Err(error);\n\n }\n\n Ok(())\n\n}\n", "file_path": "token-vault/program/src/entrypoint.rs", "rank": 90, "score": 138699.81979361002 }, { "content": "fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n crate::processor::process_instruction(program_id, accounts, instruction_data)\n\n}\n", "file_path": "examples/rust/logging/src/entrypoint.rs", "rank": 91, "score": 138699.81979361002 }, { "content": "fn process_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_data: &[u8],\n\n) -> ProgramResult {\n\n crate::processor::process_instruction(program_id, accounts, instruction_data)\n\n}\n", "file_path": "examples/rust/sysvar/src/entrypoint.rs", "rank": 92, "score": 138699.81979361002 }, { "content": "#[allow(non_snake_case)]\n\npub fn NopOverride<T>(_: &mut T) {}\n", "file_path": "governance/test-sdk/src/tools.rs", "rank": 93, "score": 138690.3157969146 }, { "content": "/// Processes InsertInstruction instruction\n\npub fn process_insert_instruction(\n\n program_id: &Pubkey,\n\n accounts: &[AccountInfo],\n\n instruction_index: u16,\n\n hold_up_time: u32,\n\n instruction: InstructionData,\n\n) -> ProgramResult {\n\n let account_info_iter = &mut accounts.iter();\n\n\n\n let governance_info = next_account_info(account_info_iter)?; // 0\n\n let proposal_info = next_account_info(account_info_iter)?; // 1\n\n let token_owner_record_info = next_account_info(account_info_iter)?; // 2\n\n let governance_authority_info = next_account_info(account_info_iter)?; // 3\n\n\n\n let proposal_instruction_info = next_account_info(account_info_iter)?; // 4\n\n\n\n let payer_info = next_account_info(account_info_iter)?; // 5\n\n let system_info = next_account_info(account_info_iter)?; // 6\n\n\n\n let rent_sysvar_info = next_account_info(account_info_iter)?; // 7\n", "file_path": "governance/program/src/processor/process_insert_instruction.rs", "rank": 94, "score": 138575.76346063652 }, { "content": "/// Deserializes ProposalInstruction account and checks owner program\n\npub fn get_proposal_instruction_data(\n\n program_id: &Pubkey,\n\n proposal_instruction_info: &AccountInfo,\n\n) -> Result<ProposalInstruction, ProgramError> {\n\n get_account_data::<ProposalInstruction>(proposal_instruction_info, program_id)\n\n}\n\n\n", "file_path": "governance/program/src/state/proposal_instruction.rs", "rank": 95, "score": 138574.6474421142 }, { "content": "/// Deserializes ProposalInstruction account and checks it belongs to the given Proposal\n\npub fn assert_proposal_instruction_for_proposal(\n\n program_id: &Pubkey,\n\n proposal_instruction_info: &AccountInfo,\n\n proposal: &Pubkey,\n\n) -> Result<(), ProgramError> {\n\n get_proposal_instruction_data_for_proposal(program_id, proposal_instruction_info, proposal)\n\n .map(|_| ())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n\n\n use std::str::FromStr;\n\n\n\n use solana_program::bpf_loader_upgradeable;\n\n\n\n use super::*;\n\n\n\n fn create_test_account_meta_data() -> AccountMetaData {\n\n AccountMetaData {\n", "file_path": "governance/program/src/state/proposal_instruction.rs", "rank": 96, "score": 138574.5871086657 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn create_init_auction_manager_v2_instruction(\n\n program_id: Pubkey,\n\n auction_manager: Pubkey,\n\n vault: Pubkey,\n\n auction: Pubkey,\n\n auction_manager_authority: Pubkey,\n\n payer: Pubkey,\n\n accept_payment_account_key: Pubkey,\n\n store: Pubkey,\n\n amount_type: TupleNumericType,\n\n length_type: TupleNumericType,\n\n max_ranges: u64,\n\n) -> Instruction {\n\n Instruction {\n\n program_id,\n\n accounts: vec![\n\n AccountMeta::new(auction_manager, false),\n\n AccountMeta::new_readonly(vault, false),\n\n AccountMeta::new_readonly(auction, false),\n\n AccountMeta::new_readonly(auction_manager_authority, false),\n", "file_path": "metaplex/program/src/instruction.rs", "rank": 97, "score": 138568.72317095418 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn create_deprecated_redeem_participation_bid_instruction(\n\n program_id: Pubkey,\n\n auction_manager: Pubkey,\n\n safety_deposit_token_store: Pubkey,\n\n destination: Pubkey,\n\n bid_redemption: Pubkey,\n\n safety_deposit_box: Pubkey,\n\n vault: Pubkey,\n\n fraction_mint: Pubkey,\n\n auction: Pubkey,\n\n auction_extended: Pubkey,\n\n bidder_metadata: Pubkey,\n\n bidder: Pubkey,\n\n payer: Pubkey,\n\n store: Pubkey,\n\n transfer_authority: Pubkey,\n\n accept_payment: Pubkey,\n\n paying_token_account: Pubkey,\n\n printing_authorization_token_account: Pubkey,\n\n) -> Instruction {\n", "file_path": "metaplex/program/src/instruction.rs", "rank": 98, "score": 138568.72317095418 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn create_redeem_printing_v2_bid_instruction(\n\n program_id: Pubkey,\n\n auction_manager: Pubkey,\n\n safety_deposit_token_store: Pubkey,\n\n destination: Pubkey,\n\n bid_redemption: Pubkey,\n\n safety_deposit_box: Pubkey,\n\n vault: Pubkey,\n\n auction: Pubkey,\n\n auction_extended: Pubkey,\n\n bidder_metadata: Pubkey,\n\n bidder: Pubkey,\n\n payer: Pubkey,\n\n store: Pubkey,\n\n new_metadata: Pubkey,\n\n original_mint: Pubkey,\n\n new_mint: Pubkey,\n\n new_mint_authority: Pubkey,\n\n edition: u64,\n\n win_index: u64,\n", "file_path": "metaplex/program/src/instruction.rs", "rank": 99, "score": 138568.72317095418 } ]
Rust
samples/d3d12-hello-world/d3d12-hello-window/src/main.rs
damyanp/directx-graphics-samples-rs
d1b04382984b3ad1facf5f52ffd7901cc6d00488
use d3dx12::*; use dxsample::*; use windows::{ core::*, Win32::{ Foundation::*, Graphics::{Direct3D12::*, Dxgi::Common::*, Dxgi::*}, }, }; mod d3d12_hello_window { use std::convert::TryInto; use super::*; const FRAME_COUNT: usize = 2; pub struct Sample { dxgi_factory: IDXGIFactory4, device: ID3D12Device, resources: Option<Resources>, } struct Resources { command_queue: SynchronizedCommandQueue, swap_chain: IDXGISwapChain3, frame_index: usize, render_targets: [ID3D12Resource; FRAME_COUNT], rtv_heap: RtvDescriptorHeap, command_allocator: ID3D12CommandAllocator, command_list: ID3D12GraphicsCommandList, } impl DXSample for Sample { fn new(command_line: &SampleCommandLine) -> Result<Self> { let (dxgi_factory, device) = create_device(command_line)?; Ok(Sample { dxgi_factory, device, resources: None, }) } fn bind_to_window(&mut self, hwnd: &HWND) -> Result<()> { let command_queue = SynchronizedCommandQueue::new(&self.device, D3D12_COMMAND_LIST_TYPE_DIRECT)?; let (width, height) = self.window_size(); let swap_chain_desc = DXGI_SWAP_CHAIN_DESC1 { Width: width as u32, Height: height as u32, Format: DXGI_FORMAT_R8G8B8A8_UNORM, SampleDesc: DXGI_SAMPLE_DESC { Count: 1, ..Default::default() }, BufferUsage: DXGI_USAGE_RENDER_TARGET_OUTPUT, BufferCount: FRAME_COUNT as u32, SwapEffect: DXGI_SWAP_EFFECT_FLIP_DISCARD, ..Default::default() }; let swap_chain: IDXGISwapChain3 = unsafe { self.dxgi_factory.CreateSwapChainForHwnd( &command_queue.queue, hwnd, &swap_chain_desc, std::ptr::null(), None, ) }? .cast()?; unsafe { self.dxgi_factory .MakeWindowAssociation(hwnd, DXGI_MWA_NO_ALT_ENTER) }?; let frame_index = unsafe { swap_chain.GetCurrentBackBufferIndex() } .try_into() .unwrap(); let rtv_heap = RtvDescriptorHeap::new(&self.device, FRAME_COUNT)?; let render_targets: [ID3D12Resource; FRAME_COUNT] = array_init::try_array_init(|i: usize| -> Result<ID3D12Resource> { let render_target: ID3D12Resource = unsafe { swap_chain.GetBuffer(i as u32) }?; unsafe { rtv_heap.create_render_target_view(&self.device, &render_target, None, i); } Ok(render_target) })?; let command_allocator = unsafe { self.device .CreateCommandAllocator(D3D12_COMMAND_LIST_TYPE_DIRECT) }?; let command_list: ID3D12GraphicsCommandList = unsafe { self.device.CreateCommandList( 0, D3D12_COMMAND_LIST_TYPE_DIRECT, &command_allocator, None, ) }?; unsafe { command_list.Close() }?; self.resources = Some(Resources { command_queue, swap_chain, frame_index, render_targets, rtv_heap, command_allocator, command_list, }); Ok(()) } fn title(&self) -> String { "D3D12 Hello Window".into() } fn window_size(&self) -> (i32, i32) { (1280, 720) } fn render(&mut self) { let resources = match &mut self.resources { Some(it) => it, _ => return, }; populate_command_list(resources).unwrap(); let command_list = ID3D12CommandList::from(&resources.command_list); unsafe { resources .command_queue .ExecuteCommandLists(1, &mut Some(command_list)) }; unsafe { resources.swap_chain.Present(1, 0) }.ok().unwrap(); wait_for_previous_frame(resources); } } fn populate_command_list(resources: &Resources) -> Result<()> { unsafe { resources.command_allocator.Reset() }?; let command_list = &resources.command_list; unsafe { command_list.Reset(&resources.command_allocator, None) }?; let barrier = transition_barrier( &resources.render_targets[resources.frame_index as usize], D3D12_RESOURCE_STATE_PRESENT, D3D12_RESOURCE_STATE_RENDER_TARGET, ); unsafe { command_list.ResourceBarrier(1, &barrier) }; let rtv_handle = resources .rtv_heap .get_cpu_descriptor_handle(resources.frame_index); unsafe { command_list.OMSetRenderTargets(1, &rtv_handle, false, std::ptr::null()) }; unsafe { command_list.ClearRenderTargetView( rtv_handle, [0.0, 0.2, 0.4, 1.0].as_ptr(), 0, std::ptr::null(), ); command_list.ResourceBarrier( 1, &transition_barrier( &resources.render_targets[resources.frame_index as usize], D3D12_RESOURCE_STATE_RENDER_TARGET, D3D12_RESOURCE_STATE_PRESENT, ), ); } unsafe { command_list.Close() } } fn wait_for_previous_frame(resources: &mut Resources) { resources.command_queue.signal_and_wait_for_gpu().unwrap(); resources.frame_index = unsafe { resources .swap_chain .GetCurrentBackBufferIndex() .try_into() .unwrap() }; } } fn main() -> Result<()> { run_sample::<d3d12_hello_window::Sample>()?; Ok(()) }
use d3dx12::*; use dxsample::*; use windows::{ core::*, Win32::{ Foundation::*, Graphics::{Direct3D12::*, Dxgi::Common::*, Dxgi::*}, }, }; mod d3d12_hello_wi
}); Ok(()) } fn title(&self) -> String { "D3D12 Hello Window".into() } fn window_size(&self) -> (i32, i32) { (1280, 720) } fn render(&mut self) { let resources = match &mut self.resources { Some(it) => it, _ => return, }; populate_command_list(resources).unwrap(); let command_list = ID3D12CommandList::from(&resources.command_list); unsafe { resources .command_queue .ExecuteCommandLists(1, &mut Some(command_list)) }; unsafe { resources.swap_chain.Present(1, 0) }.ok().unwrap(); wait_for_previous_frame(resources); } } fn populate_command_list(resources: &Resources) -> Result<()> { unsafe { resources.command_allocator.Reset() }?; let command_list = &resources.command_list; unsafe { command_list.Reset(&resources.command_allocator, None) }?; let barrier = transition_barrier( &resources.render_targets[resources.frame_index as usize], D3D12_RESOURCE_STATE_PRESENT, D3D12_RESOURCE_STATE_RENDER_TARGET, ); unsafe { command_list.ResourceBarrier(1, &barrier) }; let rtv_handle = resources .rtv_heap .get_cpu_descriptor_handle(resources.frame_index); unsafe { command_list.OMSetRenderTargets(1, &rtv_handle, false, std::ptr::null()) }; unsafe { command_list.ClearRenderTargetView( rtv_handle, [0.0, 0.2, 0.4, 1.0].as_ptr(), 0, std::ptr::null(), ); command_list.ResourceBarrier( 1, &transition_barrier( &resources.render_targets[resources.frame_index as usize], D3D12_RESOURCE_STATE_RENDER_TARGET, D3D12_RESOURCE_STATE_PRESENT, ), ); } unsafe { command_list.Close() } } fn wait_for_previous_frame(resources: &mut Resources) { resources.command_queue.signal_and_wait_for_gpu().unwrap(); resources.frame_index = unsafe { resources .swap_chain .GetCurrentBackBufferIndex() .try_into() .unwrap() }; } } fn main() -> Result<()> { run_sample::<d3d12_hello_window::Sample>()?; Ok(()) }
ndow { use std::convert::TryInto; use super::*; const FRAME_COUNT: usize = 2; pub struct Sample { dxgi_factory: IDXGIFactory4, device: ID3D12Device, resources: Option<Resources>, } struct Resources { command_queue: SynchronizedCommandQueue, swap_chain: IDXGISwapChain3, frame_index: usize, render_targets: [ID3D12Resource; FRAME_COUNT], rtv_heap: RtvDescriptorHeap, command_allocator: ID3D12CommandAllocator, command_list: ID3D12GraphicsCommandList, } impl DXSample for Sample { fn new(command_line: &SampleCommandLine) -> Result<Self> { let (dxgi_factory, device) = create_device(command_line)?; Ok(Sample { dxgi_factory, device, resources: None, }) } fn bind_to_window(&mut self, hwnd: &HWND) -> Result<()> { let command_queue = SynchronizedCommandQueue::new(&self.device, D3D12_COMMAND_LIST_TYPE_DIRECT)?; let (width, height) = self.window_size(); let swap_chain_desc = DXGI_SWAP_CHAIN_DESC1 { Width: width as u32, Height: height as u32, Format: DXGI_FORMAT_R8G8B8A8_UNORM, SampleDesc: DXGI_SAMPLE_DESC { Count: 1, ..Default::default() }, BufferUsage: DXGI_USAGE_RENDER_TARGET_OUTPUT, BufferCount: FRAME_COUNT as u32, SwapEffect: DXGI_SWAP_EFFECT_FLIP_DISCARD, ..Default::default() }; let swap_chain: IDXGISwapChain3 = unsafe { self.dxgi_factory.CreateSwapChainForHwnd( &command_queue.queue, hwnd, &swap_chain_desc, std::ptr::null(), None, ) }? .cast()?; unsafe { self.dxgi_factory .MakeWindowAssociation(hwnd, DXGI_MWA_NO_ALT_ENTER) }?; let frame_index = unsafe { swap_chain.GetCurrentBackBufferIndex() } .try_into() .unwrap(); let rtv_heap = RtvDescriptorHeap::new(&self.device, FRAME_COUNT)?; let render_targets: [ID3D12Resource; FRAME_COUNT] = array_init::try_array_init(|i: usize| -> Result<ID3D12Resource> { let render_target: ID3D12Resource = unsafe { swap_chain.GetBuffer(i as u32) }?; unsafe { rtv_heap.create_render_target_view(&self.device, &render_target, None, i); } Ok(render_target) })?; let command_allocator = unsafe { self.device .CreateCommandAllocator(D3D12_COMMAND_LIST_TYPE_DIRECT) }?; let command_list: ID3D12GraphicsCommandList = unsafe { self.device.CreateCommandList( 0, D3D12_COMMAND_LIST_TYPE_DIRECT, &command_allocator, None, ) }?; unsafe { command_list.Close() }?; self.resources = Some(Resources { command_queue, swap_chain, frame_index, render_targets, rtv_heap, command_allocator, command_list,
random
[ { "content": "use windows::Win32::Graphics::{Direct3D12::*, Dxgi::Common::*};\n\n\n\nmod descriptor_heaps;\n\npub use descriptor_heaps::*;\n\n\n\nmod pipeline_states;\n\npub use pipeline_states::*;\n\n\n\npub mod build;\n\n\n", "file_path": "d3dx12/src/lib.rs", "rank": 0, "score": 34164.99046050119 }, { "content": "use std::path::Path;\n\nuse std::{env::var, fs::copy};\n\n\n", "file_path": "d3dx12/src/build.rs", "rank": 1, "score": 34152.01110355462 }, { "content": " Width: width,\n\n Height: height,\n\n ..ResourceDesc::default()\n\n }\n\n }\n\n\n\n fn default() -> Self {\n\n D3D12_RESOURCE_DESC {\n\n Dimension: D3D12_RESOURCE_DIMENSION_UNKNOWN,\n\n Alignment: 0,\n\n Width: 1,\n\n Height: 1,\n\n DepthOrArraySize: 1,\n\n MipLevels: 1,\n\n Format: DXGI_FORMAT_UNKNOWN,\n\n SampleDesc: DXGI_SAMPLE_DESC {\n\n Count: 1,\n\n Quality: 0,\n\n },\n\n Layout: D3D12_TEXTURE_LAYOUT_UNKNOWN,\n\n Flags: D3D12_RESOURCE_FLAG_NONE,\n\n }\n\n }\n\n}\n\n\n", "file_path": "d3dx12/src/lib.rs", "rank": 2, "score": 34150.75132721436 }, { "content": "use std::mem::transmute;\n\nuse windows::core::*;\n\nuse windows::Win32::Graphics::Direct3D::{D3D_FEATURE_LEVEL, D3D_FEATURE_LEVEL_11_0};\n\nuse windows::Win32::UI::Input::KeyboardAndMouse::VIRTUAL_KEY;\n\nuse windows::Win32::{\n\n Foundation::*,\n\n Graphics::{Direct3D12::*, Dxgi::*},\n\n System::LibraryLoader::*,\n\n System::Threading::*,\n\n System::WindowsProgramming::*,\n\n UI::WindowsAndMessaging::*,\n\n};\n\n\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 3, "score": 32995.208303002386 }, { "content": " let size = sample.window_size();\n\n\n\n let atom = unsafe { RegisterClassExA(&wc) };\n\n debug_assert_ne!(atom, 0);\n\n\n\n let mut window_rect = RECT {\n\n left: 0,\n\n top: 0,\n\n right: size.0,\n\n bottom: size.1,\n\n };\n\n unsafe { AdjustWindowRect(&mut window_rect, WS_OVERLAPPEDWINDOW, false) };\n\n\n\n let mut title = sample.title();\n\n\n\n if command_line.use_warp_device {\n\n title.push_str(\" (WARP)\");\n\n }\n\n\n\n let hwnd = unsafe {\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 4, "score": 32982.22676909808 }, { "content": "#[cfg(target_pointer_width = \"32\")]\n\nunsafe fn SetWindowLong(window: HWND, index: WINDOW_LONG_PTR_INDEX, value: isize) -> isize {\n\n SetWindowLongA(window, index, value as _) as _\n\n}\n\n\n\n#[allow(non_snake_case)]\n\n#[cfg(target_pointer_width = \"64\")]\n\nunsafe fn SetWindowLong(window: HWND, index: WINDOW_LONG_PTR_INDEX, value: isize) -> isize {\n\n SetWindowLongPtrA(window, index, value)\n\n}\n\n\n\n#[allow(non_snake_case)]\n\n#[cfg(target_pointer_width = \"32\")]\n\nunsafe fn GetWindowLong(window: HWND, index: WINDOW_LONG_PTR_INDEX) -> isize {\n\n GetWindowLongA(window, index) as _\n\n}\n\n\n\n#[allow(non_snake_case)]\n\n#[cfg(target_pointer_width = \"64\")]\n\nunsafe fn GetWindowLong(window: HWND, index: WINDOW_LONG_PTR_INDEX) -> isize {\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 5, "score": 32980.800426298665 }, { "content": " CreateWindowExA(\n\n Default::default(),\n\n \"RustWindowClass\",\n\n title,\n\n WS_OVERLAPPEDWINDOW,\n\n CW_USEDEFAULT,\n\n CW_USEDEFAULT,\n\n window_rect.right - window_rect.left,\n\n window_rect.bottom - window_rect.top,\n\n None, // no parent window\n\n None, // no menus\n\n instance,\n\n &mut sample as *mut _ as _,\n\n )\n\n };\n\n debug_assert_ne!(hwnd, 0);\n\n\n\n sample.bind_to_window(&hwnd)?;\n\n\n\n unsafe { ShowWindow(hwnd, SW_SHOW) };\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 6, "score": 32980.72021012861 }, { "content": " GetWindowLongPtrA(window, index)\n\n}\n\n\n\nextern \"system\" fn wndproc<S: DXSample>(\n\n window: HWND,\n\n message: u32,\n\n wparam: WPARAM,\n\n lparam: LPARAM,\n\n) -> LRESULT {\n\n match message {\n\n WM_CREATE => {\n\n unsafe {\n\n let create_struct: &CREATESTRUCTA = transmute(lparam);\n\n SetWindowLong(window, GWLP_USERDATA, create_struct.lpCreateParams as _);\n\n }\n\n LRESULT::default()\n\n }\n\n WM_DESTROY => {\n\n unsafe { PostQuitMessage(0) };\n\n LRESULT::default()\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 7, "score": 32980.196368770135 }, { "content": " }\n\n _ => {\n\n let user_data = unsafe { GetWindowLong(window, GWLP_USERDATA) };\n\n let sample = std::ptr::NonNull::<S>::new(user_data as _);\n\n let handled = sample.map_or(false, |mut s| {\n\n sample_wndproc(unsafe { s.as_mut() }, message, wparam)\n\n });\n\n\n\n if handled {\n\n LRESULT::default()\n\n } else {\n\n unsafe { DefWindowProcA(window, message, wparam, lparam) }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 8, "score": 32980.179446754046 }, { "content": " } else {\n\n get_hardware_adapter(&dxgi_factory)\n\n }?;\n\n\n\n let mut device: Option<ID3D12Device> = None;\n\n unsafe { D3D12CreateDevice(adapter, D3D_FEATURE_LEVEL_11_0, &mut device) }?;\n\n\n\n Ok((dxgi_factory, device.unwrap()))\n\n}\n\n\n\n/// A command queue, a fence, and an event. This allows us to synchronize the\n\n/// GPU or CPU. with each other.\n\npub struct SynchronizedCommandQueue {\n\n pub queue: ID3D12CommandQueue,\n\n pub fence: ID3D12Fence,\n\n fence_value: u64,\n\n fence_event: HANDLE,\n\n}\n\n\n\nimpl SynchronizedCommandQueue {\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 9, "score": 32978.883343796704 }, { "content": " /// # Safety\n\n /// commandlists is expected to be an array of size numcommandlists. Make\n\n /// sure it is!\n\n pub unsafe fn ExecuteCommandLists(\n\n &self,\n\n numcommandlists: u32,\n\n commandlists: *mut Option<ID3D12CommandList>,\n\n ) {\n\n self.queue\n\n .ExecuteCommandLists(numcommandlists, commandlists)\n\n }\n\n\n\n pub fn execute_command_lists(&self, command_lists: &[ID3D12GraphicsCommandList]) {\n\n unsafe {\n\n self.ExecuteCommandLists(\n\n command_lists.len() as u32,\n\n command_lists.as_ptr() as *mut Option<ID3D12CommandList>,\n\n );\n\n }\n\n }\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 10, "score": 32975.96137044653 }, { "content": "\n\n pub fn enqueue_signal(&mut self) -> Result<u64> {\n\n unsafe { self.queue.Signal(&self.fence, self.fence_value) }?;\n\n\n\n let signaled_value = self.fence_value;\n\n self.fence_value += 1;\n\n\n\n Ok(signaled_value)\n\n }\n\n\n\n pub fn wait_for_gpu(&self, signaled_value: u64) -> Result<()> {\n\n unsafe {\n\n self.fence\n\n .SetEventOnCompletion(signaled_value, self.fence_event)?;\n\n WaitForSingleObject(self.fence_event, INFINITE);\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn signal_and_wait_for_gpu(&mut self) -> Result<()> {\n\n let enqueued_signal = self.enqueue_signal()?;\n\n self.wait_for_gpu(enqueued_signal)?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 11, "score": 32975.96137044653 }, { "content": " pub fn new(device: &ID3D12Device, queue_type: D3D12_COMMAND_LIST_TYPE) -> Result<Self> {\n\n let command_queue = unsafe {\n\n device.CreateCommandQueue(&D3D12_COMMAND_QUEUE_DESC {\n\n Type: queue_type,\n\n ..Default::default()\n\n })\n\n }?;\n\n\n\n let fence = unsafe { device.CreateFence(0, D3D12_FENCE_FLAG_NONE) }?;\n\n let fence_event = unsafe { CreateEventA(std::ptr::null_mut(), false, false, None) };\n\n\n\n Ok(SynchronizedCommandQueue {\n\n queue: command_queue,\n\n fence,\n\n fence_value: 1,\n\n fence_event,\n\n })\n\n }\n\n\n\n #[allow(non_snake_case)]\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 12, "score": 32975.96137044653 }, { "content": " ppdevice: *mut *mut ::std::ffi::c_void,\n\n ) -> HRESULT;\n\n }\n\n\n\n // Check to see whether the adapter supports Direct3D 12, but don't\n\n // create the actual device yet.\n\n if unsafe {\n\n D3D12CreateDevice(\n\n std::mem::transmute_copy(&adapter),\n\n D3D_FEATURE_LEVEL_11_0,\n\n &ID3D12Device::IID,\n\n std::ptr::null_mut(),\n\n )\n\n }\n\n .is_ok()\n\n {\n\n return Ok(adapter);\n\n }\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 13, "score": 32975.96137044653 }, { "content": "\n\n loop {\n\n let mut message = MSG::default();\n\n\n\n if unsafe { PeekMessageA(&mut message, None, 0, 0, PM_REMOVE) }.into() {\n\n unsafe {\n\n TranslateMessage(&message);\n\n DispatchMessageA(&message);\n\n }\n\n\n\n if message.message == WM_QUIT {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 14, "score": 32975.96137044653 }, { "content": "use windows::core::*;\n\nuse windows::Win32::Graphics::{Direct3D12::*, Dxgi::Common::DXGI_FORMAT};\n\n\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 15, "score": 32703.30589655247 }, { "content": "use windows::Win32::Graphics::{Direct3D::ID3DBlob, Direct3D12::*};\n\n\n", "file_path": "d3dx12/src/pipeline_states.rs", "rank": 16, "score": 32694.473701436553 }, { "content": "}\n\n\n\nimpl DescriptorHeap for SamplerDescriptorHeap {\n\n fn from_fields(\n\n heap: ID3D12DescriptorHeap,\n\n start_cpu_handle: D3D12_CPU_DESCRIPTOR_HANDLE,\n\n start_gpu_handle: D3D12_GPU_DESCRIPTOR_HANDLE,\n\n increment: usize,\n\n ) -> Self {\n\n SamplerDescriptorHeap {\n\n heap,\n\n start_cpu_handle,\n\n start_gpu_handle,\n\n increment,\n\n }\n\n }\n\n\n\n fn start_cpu_handle(&self) -> D3D12_CPU_DESCRIPTOR_HANDLE {\n\n self.start_cpu_handle\n\n }\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 17, "score": 32683.621180793158 }, { "content": " }\n\n\n\n fn from_descriptor_heap(\n\n device: &ID3D12Device,\n\n heap: ID3D12DescriptorHeap,\n\n heap_type: D3D12_DESCRIPTOR_HEAP_TYPE,\n\n flags: D3D12_DESCRIPTOR_HEAP_FLAGS,\n\n ) -> Self\n\n where\n\n Self: Sized,\n\n {\n\n let increment = unsafe { device.GetDescriptorHandleIncrementSize(heap_type) } as usize;\n\n let start_cpu_handle = unsafe { heap.GetCPUDescriptorHandleForHeapStart() };\n\n let start_gpu_handle = if flags == D3D12_DESCRIPTOR_HEAP_FLAG_SHADER_VISIBLE {\n\n unsafe { heap.GetGPUDescriptorHandleForHeapStart() }\n\n } else {\n\n D3D12_GPU_DESCRIPTOR_HANDLE { ptr: 0 }\n\n };\n\n\n\n Self::from_fields(heap, start_cpu_handle, start_gpu_handle, increment)\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 18, "score": 32683.621180793158 }, { "content": " pub heap: ID3D12DescriptorHeap,\n\n pub start_cpu_handle: D3D12_CPU_DESCRIPTOR_HANDLE,\n\n pub start_gpu_handle: D3D12_GPU_DESCRIPTOR_HANDLE,\n\n pub increment: usize,\n\n}\n\n\n\nimpl DescriptorHeap for CbvSrvUavDescriptorHeap {\n\n fn from_fields(\n\n heap: ID3D12DescriptorHeap,\n\n start_cpu_handle: D3D12_CPU_DESCRIPTOR_HANDLE,\n\n start_gpu_handle: D3D12_GPU_DESCRIPTOR_HANDLE,\n\n increment: usize,\n\n ) -> Self {\n\n CbvSrvUavDescriptorHeap {\n\n heap,\n\n start_cpu_handle,\n\n start_gpu_handle,\n\n increment,\n\n }\n\n }\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 19, "score": 32683.621180793158 }, { "content": " }\n\n\n\n pub fn get_descriptor_handles(&self, index: usize) -> DescriptorHandles {\n\n DescriptorHandles {\n\n cpu: self.get_cpu_descriptor_handle(index),\n\n gpu: self.get_gpu_descriptor_handle(index),\n\n }\n\n }\n\n\n\n /// Creates a SRV in this heap.\n\n ///\n\n /// # Safety\n\n /// Ensure that dest_index is a valid index in the heap and that the desc is\n\n /// valid.\n\n pub unsafe fn create_shader_resource_view<'a>(\n\n &self,\n\n device: &ID3D12Device,\n\n resource: impl IntoParam<'a, ID3D12Resource>,\n\n desc: Option<&D3D12_SHADER_RESOURCE_VIEW_DESC>,\n\n dest_index: usize,\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 20, "score": 32683.621180793158 }, { "content": " RenderTargetWriteMask: D3D12_COLOR_WRITE_ENABLE_ALL as u8,\n\n },\n\n D3D12_RENDER_TARGET_BLEND_DESC::default(),\n\n D3D12_RENDER_TARGET_BLEND_DESC::default(),\n\n D3D12_RENDER_TARGET_BLEND_DESC::default(),\n\n D3D12_RENDER_TARGET_BLEND_DESC::default(),\n\n D3D12_RENDER_TARGET_BLEND_DESC::default(),\n\n D3D12_RENDER_TARGET_BLEND_DESC::default(),\n\n D3D12_RENDER_TARGET_BLEND_DESC::default(),\n\n ],\n\n }\n\n }\n\n}\n", "file_path": "d3dx12/src/pipeline_states.rs", "rank": 21, "score": 32683.621180793158 }, { "content": " }\n\n\n\n fn from_fields(\n\n heap: ID3D12DescriptorHeap,\n\n start_cpu_handle: D3D12_CPU_DESCRIPTOR_HANDLE,\n\n start_gpu_handle: D3D12_GPU_DESCRIPTOR_HANDLE,\n\n increment: usize,\n\n ) -> Self;\n\n\n\n fn start_cpu_handle(&self) -> D3D12_CPU_DESCRIPTOR_HANDLE;\n\n fn start_gpu_handle(&self) -> D3D12_GPU_DESCRIPTOR_HANDLE;\n\n fn increment(&self) -> usize;\n\n\n\n fn get_cpu_descriptor_handle(&self, index: usize) -> D3D12_CPU_DESCRIPTOR_HANDLE {\n\n D3D12_CPU_DESCRIPTOR_HANDLE {\n\n ptr: self.start_cpu_handle().ptr + self.increment() * index,\n\n }\n\n }\n\n\n\n fn get_gpu_descriptor_handle(&self, index: usize) -> D3D12_GPU_DESCRIPTOR_HANDLE {\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 22, "score": 32683.621180793158 }, { "content": " D3D12_GPU_DESCRIPTOR_HANDLE {\n\n ptr: self.start_gpu_handle().ptr + (self.increment() * index) as u64,\n\n }\n\n }\n\n}\n\n\n\npub struct RtvDescriptorHeap {\n\n pub heap: ID3D12DescriptorHeap,\n\n pub start_cpu_handle: D3D12_CPU_DESCRIPTOR_HANDLE,\n\n pub increment: usize,\n\n}\n\n\n\nimpl DescriptorHeap for RtvDescriptorHeap {\n\n fn from_fields(\n\n heap: ID3D12DescriptorHeap,\n\n start_cpu_handle: D3D12_CPU_DESCRIPTOR_HANDLE,\n\n start_gpu_handle: D3D12_GPU_DESCRIPTOR_HANDLE,\n\n increment: usize,\n\n ) -> Self {\n\n std::assert_eq!(start_gpu_handle.ptr, 0);\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 23, "score": 32683.621180793158 }, { "content": " ) {\n\n let desc_ptr: *const D3D12_SHADER_RESOURCE_VIEW_DESC = if let Some(desc) = desc {\n\n desc\n\n } else {\n\n std::ptr::null()\n\n };\n\n\n\n device.CreateShaderResourceView(\n\n resource,\n\n desc_ptr,\n\n self.get_cpu_descriptor_handle(dest_index),\n\n );\n\n }\n\n\n\n /// Creates a CBV in this heap\n\n ///\n\n /// # Safety\n\n /// Ensure that dest_index is a valid index in the heap and that the desc is\n\n /// valid.\n\n pub unsafe fn create_constant_buffer_view(\n\n &self,\n\n device: &ID3D12Device,\n\n desc: &D3D12_CONSTANT_BUFFER_VIEW_DESC,\n\n dest_index: usize,\n\n ) {\n\n device.CreateConstantBufferView(desc, self.get_cpu_descriptor_handle(dest_index));\n\n }\n\n}\n\n\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 24, "score": 32683.621180793158 }, { "content": " D3D12_GPU_DESCRIPTOR_HANDLE::default(),\n\n self.increment(),\n\n )\n\n }\n\n\n\n /// Creates a DSV in this heap.\n\n ///\n\n /// # Safety\n\n /// Ensure that dest_index is a valid index in the heap and that the desc is\n\n /// valid.\n\n pub unsafe fn create_depth_stencil_view<'a>(\n\n &self,\n\n device: &ID3D12Device,\n\n resource: impl IntoParam<'a, ID3D12Resource>,\n\n desc: Option<&D3D12_DEPTH_STENCIL_VIEW_DESC>,\n\n dest_index: usize,\n\n ) {\n\n let desc_ptr: *const D3D12_DEPTH_STENCIL_VIEW_DESC = if let Some(desc) = desc {\n\n desc\n\n } else {\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 25, "score": 32683.621180793158 }, { "content": " DescriptorHeap::create(\n\n device,\n\n D3D12_DESCRIPTOR_HEAP_TYPE_RTV,\n\n num_descriptors,\n\n D3D12_DESCRIPTOR_HEAP_FLAG_NONE,\n\n )\n\n }\n\n\n\n /// Creates an RTV in this heap.\n\n ///\n\n /// # Safety\n\n /// Ensure that dest_index is a valid index in the heap and that the desc is\n\n /// valid.\n\n pub unsafe fn create_render_target_view<'a>(\n\n &self,\n\n device: &ID3D12Device,\n\n resource: impl IntoParam<'a, ID3D12Resource>,\n\n desc: Option<&D3D12_RENDER_TARGET_VIEW_DESC>,\n\n dest_index: usize,\n\n ) {\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 26, "score": 32683.621180793158 }, { "content": "\n\n fn start_cpu_handle(&self) -> D3D12_CPU_DESCRIPTOR_HANDLE {\n\n self.start_cpu_handle\n\n }\n\n\n\n fn start_gpu_handle(&self) -> D3D12_GPU_DESCRIPTOR_HANDLE {\n\n self.start_gpu_handle\n\n }\n\n\n\n fn increment(&self) -> usize {\n\n self.increment\n\n }\n\n}\n\n\n\npub struct DescriptorHandles {\n\n pub cpu: D3D12_CPU_DESCRIPTOR_HANDLE,\n\n pub gpu: D3D12_GPU_DESCRIPTOR_HANDLE,\n\n}\n\n\n\nimpl CbvSrvUavDescriptorHeap {\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 27, "score": 32683.621180793158 }, { "content": "impl DescriptorHeap for DsvDescriptorHeap {\n\n fn from_fields(\n\n heap: ID3D12DescriptorHeap,\n\n start_cpu_handle: D3D12_CPU_DESCRIPTOR_HANDLE,\n\n start_gpu_handle: D3D12_GPU_DESCRIPTOR_HANDLE,\n\n increment: usize,\n\n ) -> Self {\n\n std::assert_eq!(start_gpu_handle.ptr, 0);\n\n DsvDescriptorHeap {\n\n heap,\n\n start_cpu_handle,\n\n increment,\n\n }\n\n }\n\n\n\n fn start_cpu_handle(&self) -> D3D12_CPU_DESCRIPTOR_HANDLE {\n\n self.start_cpu_handle\n\n }\n\n fn start_gpu_handle(&self) -> D3D12_GPU_DESCRIPTOR_HANDLE {\n\n std::panic!();\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 28, "score": 32683.621180793158 }, { "content": " }\n\n fn increment(&self) -> usize {\n\n self.increment\n\n }\n\n}\n\n\n\nimpl DsvDescriptorHeap {\n\n pub fn new(device: &ID3D12Device, num_descriptors: usize) -> Result<Self> {\n\n DescriptorHeap::create(\n\n device,\n\n D3D12_DESCRIPTOR_HEAP_TYPE_DSV,\n\n num_descriptors,\n\n D3D12_DESCRIPTOR_HEAP_FLAG_NONE,\n\n )\n\n }\n\n\n\n pub fn slice(&self, start_index: usize) -> Self {\n\n Self::from_fields(\n\n self.heap.clone(), // TODO: this clone is icky\n\n self.get_cpu_descriptor_handle(start_index),\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 29, "score": 32683.621180793158 }, { "content": "\n\n fn start_gpu_handle(&self) -> D3D12_GPU_DESCRIPTOR_HANDLE {\n\n self.start_gpu_handle\n\n }\n\n\n\n fn increment(&self) -> usize {\n\n self.increment\n\n }\n\n}\n\n\n\nimpl SamplerDescriptorHeap {\n\n pub fn new(\n\n device: &ID3D12Device,\n\n num_descriptors: usize,\n\n flags: D3D12_DESCRIPTOR_HEAP_FLAGS,\n\n ) -> Result<Self> {\n\n DescriptorHeap::create(\n\n device,\n\n D3D12_DESCRIPTOR_HEAP_TYPE_SAMPLER,\n\n num_descriptors,\n\n flags,\n\n )\n\n }\n\n}\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 30, "score": 32683.621180793158 }, { "content": " std::ptr::null()\n\n };\n\n\n\n device.CreateDepthStencilView(\n\n resource,\n\n desc_ptr,\n\n self.get_cpu_descriptor_handle(dest_index),\n\n );\n\n }\n\n}\n\n\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 31, "score": 32683.621180793158 }, { "content": " RtvDescriptorHeap {\n\n heap,\n\n start_cpu_handle,\n\n increment,\n\n }\n\n }\n\n\n\n fn start_cpu_handle(&self) -> D3D12_CPU_DESCRIPTOR_HANDLE {\n\n self.start_cpu_handle\n\n }\n\n fn start_gpu_handle(&self) -> D3D12_GPU_DESCRIPTOR_HANDLE {\n\n std::panic!();\n\n }\n\n fn increment(&self) -> usize {\n\n self.increment\n\n }\n\n}\n\n\n\nimpl RtvDescriptorHeap {\n\n pub fn new(device: &ID3D12Device, num_descriptors: usize) -> Result<Self> {\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 32, "score": 32683.621180793158 }, { "content": " pub fn new(\n\n device: &ID3D12Device,\n\n num_descriptors: usize,\n\n flags: D3D12_DESCRIPTOR_HEAP_FLAGS,\n\n ) -> Result<Self> {\n\n DescriptorHeap::create(\n\n device,\n\n D3D12_DESCRIPTOR_HEAP_TYPE_CBV_SRV_UAV,\n\n num_descriptors,\n\n flags,\n\n )\n\n }\n\n\n\n pub fn slice(&self, start_index: usize) -> Self {\n\n Self::from_fields(\n\n self.heap.clone(), // TODO: this clone is icky\n\n self.get_cpu_descriptor_handle(start_index),\n\n self.get_gpu_descriptor_handle(start_index),\n\n self.increment(),\n\n )\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 33, "score": 32683.621180793158 }, { "content": " let desc_ptr: *const D3D12_RENDER_TARGET_VIEW_DESC = if let Some(desc) = desc {\n\n desc\n\n } else {\n\n std::ptr::null()\n\n };\n\n\n\n device.CreateRenderTargetView(\n\n resource,\n\n desc_ptr,\n\n self.get_cpu_descriptor_handle(dest_index),\n\n );\n\n }\n\n}\n\n\n\npub struct DsvDescriptorHeap {\n\n pub heap: ID3D12DescriptorHeap,\n\n pub start_cpu_handle: D3D12_CPU_DESCRIPTOR_HANDLE,\n\n pub increment: usize,\n\n}\n\n\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 34, "score": 32683.621180793158 }, { "content": "pub trait ResourceDesc {\n\n fn default() -> Self;\n\n fn buffer(size: usize) -> Self;\n\n fn tex2d(format: DXGI_FORMAT, width: u64, height: u32) -> Self;\n\n}\n\n\n\nimpl ResourceDesc for D3D12_RESOURCE_DESC {\n\n fn buffer(size: usize) -> Self {\n\n D3D12_RESOURCE_DESC {\n\n Dimension: D3D12_RESOURCE_DIMENSION_BUFFER,\n\n Width: size as u64,\n\n Layout: D3D12_TEXTURE_LAYOUT_ROW_MAJOR,\n\n ..ResourceDesc::default()\n\n }\n\n }\n\n\n\n fn tex2d(format: DXGI_FORMAT, width: u64, height: u32) -> Self {\n\n D3D12_RESOURCE_DESC {\n\n Format: format,\n\n Dimension: D3D12_RESOURCE_DIMENSION_TEXTURE2D,\n", "file_path": "d3dx12/src/lib.rs", "rank": 35, "score": 30102.464462499007 }, { "content": "pub fn transition_barrier(\n\n resource: &ID3D12Resource,\n\n state_before: D3D12_RESOURCE_STATES,\n\n state_after: D3D12_RESOURCE_STATES,\n\n) -> D3D12_RESOURCE_BARRIER {\n\n D3D12_RESOURCE_BARRIER {\n\n Type: D3D12_RESOURCE_BARRIER_TYPE_TRANSITION,\n\n Flags: D3D12_RESOURCE_BARRIER_FLAG_NONE,\n\n Anonymous: D3D12_RESOURCE_BARRIER_0 {\n\n Transition: std::mem::ManuallyDrop::new(D3D12_RESOURCE_TRANSITION_BARRIER {\n\n pResource: Some(resource.clone()),\n\n StateBefore: state_before,\n\n StateAfter: state_after,\n\n Subresource: D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,\n\n }),\n\n },\n\n }\n\n}\n\n\n", "file_path": "d3dx12/src/lib.rs", "rank": 36, "score": 30102.464462499007 }, { "content": "pub trait HeapProperties {\n\n fn default() -> Self;\n\n fn standard(heap_type: D3D12_HEAP_TYPE) -> Self;\n\n}\n\n\n\nimpl HeapProperties for D3D12_HEAP_PROPERTIES {\n\n fn default() -> Self {\n\n D3D12_HEAP_PROPERTIES {\n\n Type: D3D12_HEAP_TYPE_DEFAULT,\n\n CPUPageProperty: D3D12_CPU_PAGE_PROPERTY_UNKNOWN,\n\n MemoryPoolPreference: D3D12_MEMORY_POOL_UNKNOWN,\n\n CreationNodeMask: 1,\n\n VisibleNodeMask: 1,\n\n }\n\n }\n\n\n\n fn standard(heap_type: D3D12_HEAP_TYPE) -> Self {\n\n D3D12_HEAP_PROPERTIES {\n\n Type: heap_type,\n\n ..HeapProperties::default()\n\n }\n\n }\n\n}\n", "file_path": "d3dx12/src/lib.rs", "rank": 37, "score": 30102.464462499007 }, { "content": "pub trait DXSample {\n\n fn new(command_line: &SampleCommandLine) -> Result<Self>\n\n where\n\n Self: Sized;\n\n\n\n fn bind_to_window(&mut self, hwnd: &HWND) -> Result<()>;\n\n\n\n fn update(&mut self) {}\n\n fn render(&mut self) {}\n\n fn on_key_up(&mut self, _key: VIRTUAL_KEY) {}\n\n fn on_key_down(&mut self, _key: VIRTUAL_KEY) {}\n\n\n\n fn title(&self) -> String {\n\n \"D3D12 Hello Triangle\".into()\n\n }\n\n\n\n fn window_size(&self) -> (i32, i32) {\n\n (640, 480)\n\n }\n\n}\n\n\n\n#[derive(Clone, Default)]\n\npub struct SampleCommandLine {\n\n pub use_warp_device: bool,\n\n}\n\n\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 38, "score": 29217.986551931353 }, { "content": "pub trait DescriptorHeap {\n\n fn create(\n\n device: &ID3D12Device,\n\n heap_type: D3D12_DESCRIPTOR_HEAP_TYPE,\n\n num_descriptors: usize,\n\n flags: D3D12_DESCRIPTOR_HEAP_FLAGS,\n\n ) -> Result<Self>\n\n where\n\n Self: Sized,\n\n {\n\n let heap = unsafe {\n\n device.CreateDescriptorHeap(&D3D12_DESCRIPTOR_HEAP_DESC {\n\n Type: heap_type,\n\n NumDescriptors: num_descriptors as u32,\n\n Flags: flags,\n\n NodeMask: 0,\n\n })\n\n }?;\n\n\n\n Ok(Self::from_descriptor_heap(device, heap, heap_type, flags))\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 39, "score": 28958.961754020955 }, { "content": "pub trait RasterizerDesc {\n\n fn reasonable_default() -> Self;\n\n}\n\n\n\nimpl RasterizerDesc for D3D12_RASTERIZER_DESC {\n\n fn reasonable_default() -> Self {\n\n D3D12_RASTERIZER_DESC {\n\n FillMode: D3D12_FILL_MODE_SOLID,\n\n CullMode: D3D12_CULL_MODE_BACK,\n\n FrontCounterClockwise: false.into(),\n\n DepthBias: D3D12_DEFAULT_DEPTH_BIAS as i32,\n\n DepthBiasClamp: D3D12_DEFAULT_DEPTH_BIAS_CLAMP,\n\n SlopeScaledDepthBias: D3D12_DEFAULT_SLOPE_SCALED_DEPTH_BIAS,\n\n DepthClipEnable: true.into(),\n\n MultisampleEnable: false.into(),\n\n AntialiasedLineEnable: false.into(),\n\n ForcedSampleCount: 0,\n\n ConservativeRaster: D3D12_CONSERVATIVE_RASTERIZATION_MODE_OFF,\n\n }\n\n }\n\n}\n\n\n", "file_path": "d3dx12/src/pipeline_states.rs", "rank": 40, "score": 28958.961754020955 }, { "content": "pub trait BlendDesc {\n\n fn reasonable_default() -> Self;\n\n}\n\n\n\nimpl BlendDesc for D3D12_BLEND_DESC {\n\n fn reasonable_default() -> Self {\n\n D3D12_BLEND_DESC {\n\n AlphaToCoverageEnable: false.into(),\n\n IndependentBlendEnable: false.into(),\n\n RenderTarget: [\n\n D3D12_RENDER_TARGET_BLEND_DESC {\n\n BlendEnable: false.into(),\n\n LogicOpEnable: false.into(),\n\n SrcBlend: D3D12_BLEND_ONE,\n\n DestBlend: D3D12_BLEND_ZERO,\n\n BlendOp: D3D12_BLEND_OP_ADD,\n\n SrcBlendAlpha: D3D12_BLEND_ONE,\n\n DestBlendAlpha: D3D12_BLEND_ZERO,\n\n BlendOpAlpha: D3D12_BLEND_OP_ADD,\n\n LogicOp: D3D12_LOGIC_OP_NOOP,\n", "file_path": "d3dx12/src/pipeline_states.rs", "rank": 41, "score": 28958.961754020955 }, { "content": "pub trait ShaderBytecode {\n\n fn from_blob(blob: &ID3DBlob) -> Self;\n\n}\n\n\n\nimpl ShaderBytecode for D3D12_SHADER_BYTECODE {\n\n fn from_blob(blob: &ID3DBlob) -> Self {\n\n D3D12_SHADER_BYTECODE {\n\n pShaderBytecode: unsafe { blob.GetBufferPointer() },\n\n BytecodeLength: unsafe { blob.GetBufferSize() },\n\n }\n\n }\n\n}\n\n\n", "file_path": "d3dx12/src/pipeline_states.rs", "rank": 42, "score": 28958.961754020955 }, { "content": "pub fn run_sample<S>() -> Result<()>\n\nwhere\n\n S: DXSample,\n\n{\n\n let instance = unsafe { GetModuleHandleA(None) };\n\n debug_assert_ne!(instance, 0);\n\n\n\n let wc = WNDCLASSEXA {\n\n cbSize: std::mem::size_of::<WNDCLASSEXA>() as u32,\n\n style: CS_HREDRAW | CS_VREDRAW,\n\n lpfnWndProc: Some(wndproc::<S>),\n\n hInstance: instance,\n\n hCursor: unsafe { LoadCursorW(None, IDC_ARROW) },\n\n lpszClassName: PSTR(b\"RustWindowClass\\0\".as_ptr() as _),\n\n ..Default::default()\n\n };\n\n\n\n let command_line = build_command_line();\n\n let mut sample = S::new(&command_line)?;\n\n\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 54, "score": 27154.91809895853 }, { "content": "pub trait ShaderResourceViewDesc {\n\n fn texture2d(format: DXGI_FORMAT, srv: D3D12_TEX2D_SRV) -> Self;\n\n}\n\n\n\nimpl ShaderResourceViewDesc for D3D12_SHADER_RESOURCE_VIEW_DESC {\n\n fn texture2d(format: DXGI_FORMAT, srv: D3D12_TEX2D_SRV) -> Self {\n\n D3D12_SHADER_RESOURCE_VIEW_DESC {\n\n Format: format,\n\n ViewDimension: D3D12_SRV_DIMENSION_TEXTURE2D,\n\n Shader4ComponentMapping: D3D12_DEFAULT_SHADER_4_COMPONENT_MAPPING,\n\n Anonymous: D3D12_SHADER_RESOURCE_VIEW_DESC_0 { Texture2D: srv },\n\n }\n\n }\n\n}\n\n\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 55, "score": 26914.182921660995 }, { "content": "pub trait ConstantBufferViewDesc {\n\n fn entire_resource(resource: &ID3D12Resource) -> Self;\n\n}\n\n\n\nimpl ConstantBufferViewDesc for D3D12_CONSTANT_BUFFER_VIEW_DESC {\n\n fn entire_resource(resource: &ID3D12Resource) -> Self {\n\n unsafe {\n\n D3D12_CONSTANT_BUFFER_VIEW_DESC {\n\n BufferLocation: resource.GetGPUVirtualAddress(),\n\n SizeInBytes: resource.GetDesc().Width as u32,\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct SamplerDescriptorHeap {\n\n pub heap: ID3D12DescriptorHeap,\n\n pub start_cpu_handle: D3D12_CPU_DESCRIPTOR_HANDLE,\n\n pub start_gpu_handle: D3D12_GPU_DESCRIPTOR_HANDLE,\n\n pub increment: usize,\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 56, "score": 26914.182921660995 }, { "content": "pub trait DepthStencilViewDesc {\n\n fn tex2d(format: DXGI_FORMAT, mip_slice: u32) -> Self;\n\n}\n\n\n\nimpl DepthStencilViewDesc for D3D12_DEPTH_STENCIL_VIEW_DESC {\n\n fn tex2d(format: DXGI_FORMAT, mip_slice: u32) -> D3D12_DEPTH_STENCIL_VIEW_DESC {\n\n D3D12_DEPTH_STENCIL_VIEW_DESC {\n\n Format: format,\n\n ViewDimension: D3D12_DSV_DIMENSION_TEXTURE2D,\n\n Anonymous: D3D12_DEPTH_STENCIL_VIEW_DESC_0 {\n\n Texture2D: D3D12_TEX2D_DSV {\n\n MipSlice: mip_slice,\n\n },\n\n },\n\n Flags: D3D12_DSV_FLAG_NONE,\n\n }\n\n }\n\n}\n\n\n\npub struct CbvSrvUavDescriptorHeap {\n", "file_path": "d3dx12/src/descriptor_heaps.rs", "rank": 57, "score": 26914.182921660995 }, { "content": "pub fn copy_data_file(source_path: &str) {\n\n println!(\"!cargo:rerun-if-changed={}\", source_path);\n\n\n\n let out_dir = var(\"OUT_DIR\").unwrap();\n\n let out_dir = Path::new(&out_dir);\n\n let dest_filename = Path::new(source_path).file_name().expect(\"dest_filename\");\n\n let dest = out_dir.ancestors().nth(3).expect(\"dest directory\").join(dest_filename);\n\n\n\n println!(\"dest: {}\", dest.to_str().expect(\"to str\"));\n\n copy(source_path, dest).expect(\"Copy\");\n\n}\n", "file_path": "d3dx12/src/build.rs", "rank": 58, "score": 25996.386535233036 }, { "content": "pub fn build_command_line() -> SampleCommandLine {\n\n let mut use_warp_device = false;\n\n\n\n for arg in std::env::args() {\n\n if arg.eq_ignore_ascii_case(\"-warp\") || arg.eq_ignore_ascii_case(\"/warp\") {\n\n use_warp_device = true;\n\n }\n\n }\n\n\n\n SampleCommandLine { use_warp_device }\n\n}\n\n\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 60, "score": 25363.979306637146 }, { "content": "fn get_hardware_adapter(factory: &IDXGIFactory4) -> Result<IDXGIAdapter1> {\n\n for i in 0.. {\n\n let adapter = unsafe { factory.EnumAdapters1(i) }?;\n\n let desc = unsafe { adapter.GetDesc1() }?;\n\n\n\n if (DXGI_ADAPTER_FLAG::from(desc.Flags) & DXGI_ADAPTER_FLAG_SOFTWARE)\n\n != DXGI_ADAPTER_FLAG_NONE\n\n {\n\n // Don't select the Basic Render Driver adapter. If you want a\n\n // software adapter, pass in \"/warp\" on the command line.\n\n continue;\n\n }\n\n\n\n // We need the variant where we pass in NULL for the outparam.\n\n #[link(name = \"d3d12\")]\n\n extern \"system\" {\n\n pub fn D3D12CreateDevice(\n\n padapter: RawPtr,\n\n minimumfeaturelevel: D3D_FEATURE_LEVEL,\n\n riid: *const GUID,\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 61, "score": 23794.65853515298 }, { "content": "fn sample_wndproc<S: DXSample>(sample: &mut S, message: u32, wparam: WPARAM) -> bool {\n\n match message {\n\n WM_KEYDOWN => {\n\n sample.on_key_down(wparam as u16);\n\n true\n\n }\n\n WM_KEYUP => {\n\n sample.on_key_up(wparam as u16);\n\n true\n\n }\n\n WM_PAINT => {\n\n sample.update();\n\n sample.render();\n\n true\n\n }\n\n _ => false,\n\n }\n\n}\n\n\n\n#[allow(non_snake_case)]\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 62, "score": 21174.446724693786 }, { "content": "pub fn create_device(command_line: &SampleCommandLine) -> Result<(IDXGIFactory4, ID3D12Device)> {\n\n if cfg!(debug_assertions) {\n\n unsafe {\n\n let mut debug: Option<ID3D12Debug> = None;\n\n if let Some(debug) = D3D12GetDebugInterface(&mut debug).ok().and(debug) {\n\n debug.EnableDebugLayer();\n\n }\n\n }\n\n }\n\n\n\n let dxgi_factory_flags = if cfg!(debug_assertions) {\n\n DXGI_CREATE_FACTORY_DEBUG\n\n } else {\n\n 0\n\n };\n\n\n\n let dxgi_factory: IDXGIFactory4 = unsafe { CreateDXGIFactory2(dxgi_factory_flags) }?;\n\n\n\n let adapter = if command_line.use_warp_device {\n\n unsafe { dxgi_factory.EnumWarpAdapter() }\n", "file_path": "samples/dxsample/src/lib.rs", "rank": 63, "score": 21174.446724693786 }, { "content": "use d3dx12::*;\n\nuse dxsample::*;\n\nuse std::convert::TryInto;\n\nuse std::mem::transmute;\n\nuse windows::{\n\n core::*,\n\n Win32::{\n\n Foundation::*,\n\n Graphics::{\n\n Direct3D::{*, Fxc::*},\n\n Direct3D12::*,\n\n Dxgi::Common::*,\n\n Dxgi::*,\n\n },\n\n },\n\n};\n\n\n\nmod d3d12_hello_texture {\n\n\n\n use super::*;\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-texture/src/main.rs", "rank": 64, "score": 27.300890151272487 }, { "content": "use d3dx12::*;\n\nuse dxsample::*;\n\nuse windows::{\n\n core::*,\n\n Win32::{\n\n Foundation::*,\n\n Graphics::{\n\n Direct3D::{Fxc::*, *},\n\n Direct3D12::*,\n\n Dxgi::{Common::*, *},\n\n },\n\n },\n\n};\n\n\n\nmod d3d12_hello_triangle {\n\n use std::convert::TryInto;\n\n\n\n use super::*;\n\n\n\n const FRAME_COUNT: usize = 2;\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-triangle/src/main.rs", "rank": 65, "score": 26.793330510097366 }, { "content": "use d3dx12::*;\n\nuse dxsample::*;\n\nuse windows::{\n\n core::*,\n\n Win32::{\n\n Foundation::*,\n\n Graphics::{Direct3D::Fxc::*, Direct3D::*, Direct3D12::*, Dxgi::Common::*, Dxgi::*},\n\n },\n\n};\n\n\n\nmod d3d12_hello_frame_buffering {\n\n use std::convert::TryInto;\n\n\n\n use super::*;\n\n\n\n const FRAME_COUNT: usize = 2;\n\n\n\n pub struct Sample {\n\n dxgi_factory: IDXGIFactory4,\n\n device: ID3D12Device,\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-frame-buffering/src/main.rs", "rank": 66, "score": 25.02066726918158 }, { "content": "use d3dx12::*;\n\nuse dxsample::*;\n\nuse std::convert::TryInto;\n\nuse windows::{\n\n core::*,\n\n Win32::{\n\n Foundation::*,\n\n Graphics::{Direct3D::Fxc::*, Direct3D::*, Direct3D12::*, Dxgi::Common::*, Dxgi::*},\n\n },\n\n};\n\n\n\nextern crate static_assertions as sa;\n\n\n\nmod d3d12_hello_constbuffers {\n\n\n\n use std::intrinsics::transmute;\n\n\n\n use super::*;\n\n\n\n const FRAME_COUNT: usize = 2;\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-constbuffers/src/main.rs", "rank": 67, "score": 24.88753777926192 }, { "content": "use array_init::try_array_init;\n\nuse async_std::task;\n\nuse cgmath::{point3, vec3, vec4, Deg, Matrix4, Point3, SquareMatrix, Vector3, Vector4, Zero};\n\nuse d3dx12::*;\n\nuse dxsample::*;\n\nuse static_assertions::const_assert_eq;\n\nuse std::{intrinsics::transmute, sync::Arc};\n\nuse windows::{\n\n core::*,\n\n Win32::{\n\n Foundation::{HWND, RECT},\n\n Graphics::{Direct3D12::*, Dxgi::Common::*, Dxgi::*},\n\n },\n\n};\n\n\n\nuse crate::State;\n\n\n", "file_path": "samples/d3d12-multithreading/src/rendering.rs", "rank": 68, "score": 20.851309572759174 }, { "content": "use array_init::{array_init, try_array_init};\n\nuse d3dx12::*;\n\nuse dxsample::SynchronizedCommandQueue;\n\nuse std::{fs::File, intrinsics::transmute, os::windows::prelude::FileExt};\n\nuse windows::{\n\n core::*,\n\n Win32::{\n\n Foundation::{PSTR, RECT},\n\n Graphics::{\n\n Direct3D::{\n\n Fxc::{\n\n D3DCompileFromFile, D3DCOMPILE_DEBUG, D3DCOMPILE_OPTIMIZATION_LEVEL3,\n\n D3DCOMPILE_SKIP_OPTIMIZATION,\n\n },\n\n ID3DBlob, D3D_PRIMITIVE_TOPOLOGY_TRIANGLELIST,\n\n },\n\n Direct3D12::*,\n\n Dxgi::Common::*,\n\n },\n\n },\n", "file_path": "samples/d3d12-multithreading/src/rendering/squidroom.rs", "rank": 69, "score": 19.02767720160364 }, { "content": "//*********************************************************\n\n//\n\n// Copyright (c) Microsoft. All rights reserved.\n\n// This code is licensed under the MIT License (MIT).\n\n// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF\n\n// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY\n\n// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR\n\n// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.\n\n//\n\n//*********************************************************\n\n\n\nuse camera::{Camera, ViewAndProjectionMatrices};\n\nuse cgmath::{point3, vec3, Deg, InnerSpace, Matrix3, Rad, Transform};\n\nuse dxsample::{run_sample, DXSample, SampleCommandLine};\n\nuse rendering::*;\n\nuse timer::Timer;\n\nuse windows::core::*;\n\nuse windows::Win32::UI::Input::KeyboardAndMouse::*;\n\nuse windows::Win32::{\n\n Foundation::HWND,\n", "file_path": "samples/d3d12-multithreading/src/main.rs", "rank": 70, "score": 16.16031952587745 }, { "content": " Graphics::Dxgi::{\n\n DXGIDeclareAdapterRemovalSupport, DXGI_ERROR_DEVICE_REMOVED, DXGI_ERROR_DEVICE_RESET,\n\n },\n\n};\n\n\n\nmod camera;\n\nmod rendering;\n\nmod timer;\n\n\n\n#[derive(Default)]\n", "file_path": "samples/d3d12-multithreading/src/main.rs", "rank": 71, "score": 8.93934110031605 }, { "content": "use d3dx12::build::*;\n\n\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-constbuffers/build.rs", "rank": 72, "score": 8.14867029733741 }, { "content": "use d3dx12::build::*;\n\n\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-frame-buffering/build.rs", "rank": 73, "score": 8.14867029733741 }, { "content": "use d3dx12::build::copy_data_file;\n\n\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-triangle/build.rs", "rank": 74, "score": 7.737329040530879 }, { "content": "use d3dx12::build::copy_data_file;\n\n\n", "file_path": "samples/d3d12-multithreading/build.rs", "rank": 75, "score": 7.737329040530879 }, { "content": "use d3dx12::build::copy_data_file;\n\n\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-texture/build.rs", "rank": 76, "score": 7.737329040530879 }, { "content": " fn bind_to_window(&mut self, hwnd: &HWND) -> Result<()> {\n\n let mut command_queue =\n\n SynchronizedCommandQueue::new(&self.device, D3D12_COMMAND_LIST_TYPE_DIRECT)?;\n\n\n\n let (width, height) = self.window_size();\n\n\n\n let swap_chain_desc = DXGI_SWAP_CHAIN_DESC1 {\n\n BufferCount: FRAME_COUNT as u32,\n\n Width: width as u32,\n\n Height: height as u32,\n\n Format: DXGI_FORMAT_R8G8B8A8_UNORM,\n\n BufferUsage: DXGI_USAGE_RENDER_TARGET_OUTPUT,\n\n SwapEffect: DXGI_SWAP_EFFECT_FLIP_DISCARD,\n\n SampleDesc: DXGI_SAMPLE_DESC {\n\n Count: 1,\n\n ..Default::default()\n\n },\n\n ..Default::default()\n\n };\n\n\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-texture/src/main.rs", "rank": 77, "score": 6.580341108316487 }, { "content": " }\n\n\n\n impl DXSample for Sample {\n\n fn new(command_line: &SampleCommandLine) -> Result<Self> {\n\n let (dxgi_factory, device) = create_device(command_line)?;\n\n\n\n Ok(Sample {\n\n dxgi_factory,\n\n device,\n\n resources: None,\n\n })\n\n }\n\n\n\n fn bind_to_window(&mut self, hwnd: &HWND) -> Result<()> {\n\n let command_queue =\n\n SynchronizedCommandQueue::new(&self.device, D3D12_COMMAND_LIST_TYPE_DIRECT)?;\n\n\n\n let (width, height) = self.window_size();\n\n\n\n let swap_chain_desc = DXGI_SWAP_CHAIN_DESC1 {\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-constbuffers/src/main.rs", "rank": 78, "score": 6.570958868186224 }, { "content": " { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 14155776, 524288, 2048 }, } }, // marbel drum texture_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 14680064, 524288, 2048 }, } }, // marbel drum texture _Nrml_1024.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 15204352, 131072, 1024 }, } }, // Catwalk_02_Diffuse512.dds\n\n { 1, 1, 1, DXGI_FORMAT_R8G8B8A8_UNORM, { { 15335424, 4, 4 }, } }, // default-normalmap.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 15335428, 131072, 1024 }, } }, // Catwalk_03_Diffuse_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 15466500, 131072, 1024 }, } }, // shelves3_diff_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 15597572, 131072, 1024 }, } }, // shelves3_nor_512.dds\n\n { 2048, 2048, 1, DXGI_FORMAT_BC1_UNORM, { { 15728644, 2097152, 4096 }, } }, // Misc_Boss_3_2048.dds\n\n { 2048, 2048, 1, DXGI_FORMAT_BC1_UNORM, { { 17825796, 2097152, 4096 }, } }, // Misc_Boss_3_normal2048R.dds\n\n { 1, 1, 1, DXGI_FORMAT_R8G8B8A8_UNORM, { { 19922948, 4, 4 }, } }, // default.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 19922952, 131072, 1024 }, } }, // Hanghing Light_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 20054024, 131072, 1024 }, } }, // Hanghing Light_normal_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 20185096, 131072, 1024 }, } }, // Hanging_bundle_normal_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 20316168, 131072, 1024 }, } }, // Hanging_bundle_marble_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 20447240, 131072, 1024 }, } }, // Hanging_bundle_marble_normal_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 20578312, 131072, 1024 }, } }, // window_Diff512.dds\n\n { 512, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 20709384, 262144, 1024 }, } }, // Sliding Steel Door_Diff_512.dds\n\n { 512, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 20971528, 262144, 1024 }, } }, // Sliding Steel Door_Norm_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 21233672, 131072, 1024 }, } }, // window_Norm512.dds\n\n { 512, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 21364744, 262144, 1024 }, } }, // Door_Diff_512.dds\n", "file_path": "samples/d3d12-multithreading/src/rendering/squidroom.rs", "rank": 79, "score": 6.360275302505146 }, { "content": " vbv: D3D12_VERTEX_BUFFER_VIEW,\n\n }\n\n\n\n impl DXSample for Sample {\n\n fn new(command_line: &SampleCommandLine) -> Result<Self> {\n\n let (dxgi_factory, device) = create_device(command_line)?;\n\n\n\n Ok(Sample {\n\n dxgi_factory,\n\n device,\n\n resources: None,\n\n })\n\n }\n\n\n\n fn bind_to_window(&mut self, hwnd: &HWND) -> Result<()> {\n\n let command_queue =\n\n SynchronizedCommandQueue::new(&self.device, D3D12_COMMAND_LIST_TYPE_DIRECT)?;\n\n\n\n let (width, height) = self.window_size();\n\n\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-triangle/src/main.rs", "rank": 80, "score": 6.076535684249012 }, { "content": " fence_value: u64,\n\n command_allocator: ID3D12CommandAllocator,\n\n }\n\n\n\n impl DXSample for Sample {\n\n fn new(command_line: &SampleCommandLine) -> Result<Self> {\n\n let (dxgi_factory, device) = create_device(command_line)?;\n\n\n\n Ok(Sample {\n\n dxgi_factory,\n\n device,\n\n resources: None,\n\n })\n\n }\n\n\n\n fn bind_to_window(&mut self, hwnd: &HWND) -> Result<()> {\n\n let command_queue =\n\n SynchronizedCommandQueue::new(&self.device, D3D12_COMMAND_LIST_TYPE_DIRECT)?;\n\n\n\n let (width, height) = self.window_size();\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-frame-buffering/src/main.rs", "rank": 81, "score": 5.996333273053222 }, { "content": " let swap_chain: IDXGISwapChain3 = unsafe {\n\n self.dxgi_factory.CreateSwapChainForHwnd(\n\n &command_queue.queue,\n\n hwnd,\n\n &swap_chain_desc,\n\n std::ptr::null(),\n\n None,\n\n )\n\n }?\n\n .cast()?;\n\n\n\n // This sample does not support fullscreen transitions\n\n unsafe {\n\n self.dxgi_factory\n\n .MakeWindowAssociation(hwnd, DXGI_MWA_NO_ALT_ENTER)\n\n }?;\n\n\n\n let frame_index = unsafe { swap_chain.GetCurrentBackBufferIndex() }\n\n .try_into()\n\n .unwrap();\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-texture/src/main.rs", "rank": 82, "score": 5.817607634003104 }, { "content": " std::ptr::null(),\n\n None,\n\n )\n\n }?\n\n .cast()?;\n\n\n\n // This sample does not support fullscreen transitions\n\n unsafe {\n\n self.dxgi_factory\n\n .MakeWindowAssociation(hwnd, DXGI_MWA_NO_ALT_ENTER)\n\n }?;\n\n\n\n let frame_index = unsafe { swap_chain.GetCurrentBackBufferIndex() }\n\n .try_into()\n\n .unwrap();\n\n\n\n let rtv_heap = RtvDescriptorHeap::new(&self.device, FRAME_COUNT)?;\n\n\n\n let frames = array_init::try_array_init(|i| -> Result<FrameData> {\n\n let render_target: ID3D12Resource = unsafe { swap_chain.GetBuffer(i as u32) }?;\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-frame-buffering/src/main.rs", "rank": 83, "score": 4.973789415764408 }, { "content": " )?\n\n }\n\n .cast()?;\n\n\n\n // This sample does not support fullscreen transitions\n\n unsafe {\n\n self.dxgi_factory\n\n .MakeWindowAssociation(hwnd, DXGI_MWA_NO_ALT_ENTER)\n\n }?;\n\n\n\n let frame_index = unsafe { swap_chain.GetCurrentBackBufferIndex() }\n\n .try_into()\n\n .unwrap();\n\n\n\n let rtv_heap = RtvDescriptorHeap::new(&self.device, FRAME_COUNT)?;\n\n\n\n let render_targets: [ID3D12Resource; FRAME_COUNT] =\n\n array_init::try_array_init(|i: usize| -> Result<ID3D12Resource> {\n\n let render_target: ID3D12Resource = unsafe { swap_chain.GetBuffer(i as u32) }?;\n\n unsafe {\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-constbuffers/src/main.rs", "rank": 84, "score": 4.864591515739399 }, { "content": " None,\n\n )\n\n }?\n\n .cast()?;\n\n\n\n // This sample does not support fullscreen transitions\n\n unsafe {\n\n self.dxgi_factory\n\n .MakeWindowAssociation(hwnd, DXGI_MWA_NO_ALT_ENTER)\n\n }?;\n\n\n\n let frame_index = unsafe { swap_chain.GetCurrentBackBufferIndex() }\n\n .try_into()\n\n .unwrap();\n\n\n\n let rtv_heap = RtvDescriptorHeap::new(&self.device, FRAME_COUNT)?;\n\n\n\n let render_targets: [ID3D12Resource; FRAME_COUNT] =\n\n array_init::try_array_init(|i: usize| -> Result<ID3D12Resource> {\n\n let render_target: ID3D12Resource = unsafe { swap_chain.GetBuffer(i as u32) }?;\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-triangle/src/main.rs", "rank": 85, "score": 4.864591515739399 }, { "content": " { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 7602176, 524288, 2048 }, } }, // Back_Alley_box _norm_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 8126464, 524288, 2048 }, } }, // gameCrates_01_Diff_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 8650752, 524288, 2048 }, } }, // gameCrates_01_Nor_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 9175040, 524288, 2048 }, } }, // RaceCar_Strorage_Diff512.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 9699328, 524288, 2048 }, } }, // RaceCar_Strorage_Norm512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 10223616, 131072, 1024 }, } }, // hats_02_diff_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 10354688, 131072, 1024 }, } }, // hats_02_norm_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 10485760, 131072, 1024 }, } }, // hats_01_diff_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 10616832, 131072, 1024 }, } }, // hats_01_norm_512.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 10747904, 524288, 2048 }, } }, // Misc_Boss_1_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 11272192, 524288, 2048 }, } }, // Misc_Boss_1_normal_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 11796480, 524288, 2048 }, } }, // gameCrates_03_Diff_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 12320768, 524288, 2048 }, } }, // gameCrates_03_Nor_1024.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 12845056, 131072, 1024 }, } }, // gameCrates_02_Diff_512.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 12976128, 524288, 2048 }, } }, // Back_Alley_Drum.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 13500416, 131072, 1024 }, } }, // Back_Alley_Drum _norm_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 13631488, 131072, 1024 }, } }, // shelves2_diff_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 13762560, 131072, 1024 }, } }, // shelves2_nor_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 13893632, 131072, 1024 }, } }, // shelves2_diff2_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 14024704, 131072, 1024 }, } }, // shelves2_nm2_512.dds\n", "file_path": "samples/d3d12-multithreading/src/rendering/squidroom.rs", "rank": 86, "score": 4.5779053846283375 }, { "content": " { 512, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 21626888, 262144, 1024 }, } }, // Door_Norm_512.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 21889032, 524288, 2048 }, } }, // floor_Diff_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 22413320, 524288, 2048 }, } }, // floor_Normal_1024.dds\n\n { 2048, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 22937608, 1048576, 4096 }, } }, // wall03_Diff_2048.dds\n\n { 2048, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 23986184, 1048576, 4096 }, } }, // wall03_Normal_2048.dds\n\n { 2048, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 25034760, 1048576, 4096 }, } }, // wall01_Diff_2048.dds\n\n { 2048, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 26083336, 1048576, 4096 }, } }, // wall01_Normal_2048.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 27131912, 524288, 2048 }, } }, // Roof_Diff1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 27656200, 524288, 2048 }, } }, // Roof_Normal1024.dds\n\n { 512, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 28180488, 262144, 1024 }, } }, // pillar_Diff_512.dds\n\n { 512, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 28442632, 262144, 1024 }, } }, // pillar_Norm_512.dds\n\n { 512, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 28704776, 262144, 1024 }, } }, // Broken_Pillar_Diff_512.dds\n\n { 512, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 28966920, 262144, 1024 }, } }, // Broken_Pillar_Norm_512.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 29229064, 524288, 2048 }, } }, // Golfclub_dm_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 29753352, 524288, 2048 }, } },// Golfclub_nm_1024.dds\n\n};\n\n\n\nconst DRAWS: [DrawParameters; 1025] = draws_array! {\n\n { 0, 1, -1, 0, 15198, 0 }, // subset0_squard_room_platform_3_dif1\n\n { 2, 3, -1, 15198, 438, 6051 }, // subset0_squard_room_platform_2_dif\n", "file_path": "samples/d3d12-multithreading/src/rendering/squidroom.rs", "rank": 87, "score": 4.510333981747334 }, { "content": "const TEXTURES: [TextureResource; 74] = textures_array! {\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 0, 131072, 1024 }, } }, // squard room platform_3_diff_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 131072, 131072, 1024 }, } }, // squard room platform_3_norm_512.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 262144, 524288, 2048 }, } }, // squard room platform_2_diff_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 786432, 524288, 2048 }, } }, // squard room platform_2_norm_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 1310720, 524288, 2048 }, } }, // squard room platform_1_diff_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 1835008, 524288, 2048 }, } }, // squard room platform_1_norm_1024.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 2359296, 131072, 1024 }, } }, // shelves2_diff1_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 2490368, 131072, 1024 }, } }, // shelves2_nm1_512.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 2621440, 524288, 2048 }, } }, // Misc_Boss_2 1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 3145728, 524288, 2048 }, } }, // Misc_Boss_2_normal1024.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 3670016, 131072, 1024 }, } }, // Hanging_bundle_512.dds\n\n { 512, 512, 1, DXGI_FORMAT_BC1_UNORM, { { 3801088, 131072, 1024 }, } }, // Catwalk_03_Normal_512.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 3932160, 524288, 2048 }, } }, // Stack_ Boxes_Diff02_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 4456448, 524288, 2048 }, } }, // Stack_ Boxes_Nm02_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 4980736, 524288, 2048 }, } }, // Stack_ Boxes_Diff03_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 5505024, 524288, 2048 }, } }, // Stack_ Boxes_Nm03_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 6029312, 524288, 2048 }, } }, // Stack_ Boxes_Diff01_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 6553600, 524288, 2048 }, } }, // Stack_ Boxes_Nm01_1024.dds\n\n { 1024, 1024, 1, DXGI_FORMAT_BC1_UNORM, { { 7077888, 524288, 2048 }, } }, // Back_Alley_box_1024.dds\n", "file_path": "samples/d3d12-multithreading/src/rendering/squidroom.rs", "rank": 88, "score": 4.509486819296537 }, { "content": "\n\n wait_for_previous_frame(resources);\n\n }\n\n\n\n fn title(&self) -> String {\n\n \"D3D12 Hello Triangle\".into()\n\n }\n\n\n\n fn window_size(&self) -> (i32, i32) {\n\n (1280, 720)\n\n }\n\n }\n\n\n\n fn populate_command_list(resources: &Resources) -> Result<()> {\n\n // Command list allocators can only be reset when the associated\n\n // command lists have finished execution on the GPU; apps should use\n\n // fences to determine GPU execution progress.\n\n unsafe { resources.command_allocator.Reset() }?;\n\n\n\n let command_list = &resources.command_list;\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-triangle/src/main.rs", "rank": 89, "score": 4.490580295193784 }, { "content": "\n\n // Present the frame.\n\n unsafe { resources.swap_chain.Present(1, 0) }.ok().unwrap();\n\n\n\n move_to_next_frame(resources);\n\n }\n\n\n\n fn title(&self) -> String {\n\n \"D3D12 Hello Frame Buffering\".into()\n\n }\n\n\n\n fn window_size(&self) -> (i32, i32) {\n\n (1280, 720)\n\n }\n\n }\n\n\n\n fn populate_command_list(resources: &Resources) -> Result<()> {\n\n // Command list allocators can only be reset when the associated\n\n // command lists have finished execution on the GPU; apps should use\n\n // fences to determine GPU execution progress.\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-frame-buffering/src/main.rs", "rank": 90, "score": 4.490580295193784 }, { "content": "\n\n let swap_chain_desc = DXGI_SWAP_CHAIN_DESC1 {\n\n Width: width as u32,\n\n Height: height as u32,\n\n Format: DXGI_FORMAT_R8G8B8A8_UNORM,\n\n SampleDesc: DXGI_SAMPLE_DESC {\n\n Count: 1,\n\n ..Default::default()\n\n },\n\n BufferUsage: DXGI_USAGE_RENDER_TARGET_OUTPUT,\n\n BufferCount: FRAME_COUNT as u32,\n\n SwapEffect: DXGI_SWAP_EFFECT_FLIP_DISCARD,\n\n ..Default::default()\n\n };\n\n\n\n let swap_chain: IDXGISwapChain3 = unsafe {\n\n self.dxgi_factory.CreateSwapChainForHwnd(\n\n &command_queue.queue,\n\n hwnd,\n\n &swap_chain_desc,\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-frame-buffering/src/main.rs", "rank": 91, "score": 4.130752372171318 }, { "content": " let swap_chain_desc = DXGI_SWAP_CHAIN_DESC1 {\n\n Width: width as u32,\n\n Height: height as u32,\n\n Format: DXGI_FORMAT_R8G8B8A8_UNORM,\n\n SampleDesc: DXGI_SAMPLE_DESC {\n\n Count: 1,\n\n ..Default::default()\n\n },\n\n BufferUsage: DXGI_USAGE_RENDER_TARGET_OUTPUT,\n\n BufferCount: FRAME_COUNT as u32,\n\n SwapEffect: DXGI_SWAP_EFFECT_FLIP_DISCARD,\n\n ..Default::default()\n\n };\n\n\n\n let swap_chain: IDXGISwapChain3 = unsafe {\n\n self.dxgi_factory.CreateSwapChainForHwnd(\n\n &command_queue.queue,\n\n hwnd,\n\n &swap_chain_desc,\n\n std::ptr::null(),\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-triangle/src/main.rs", "rank": 92, "score": 4.0933056096641 }, { "content": " Width: width as u32,\n\n Height: height as u32,\n\n Format: DXGI_FORMAT_R8G8B8A8_UNORM,\n\n SampleDesc: DXGI_SAMPLE_DESC {\n\n Count: 1,\n\n ..Default::default()\n\n },\n\n BufferUsage: DXGI_USAGE_RENDER_TARGET_OUTPUT,\n\n BufferCount: FRAME_COUNT as u32,\n\n SwapEffect: DXGI_SWAP_EFFECT_FLIP_DISCARD,\n\n ..Default::default()\n\n };\n\n\n\n let swap_chain: IDXGISwapChain3 = unsafe {\n\n self.dxgi_factory.CreateSwapChainForHwnd(\n\n &command_queue.queue,\n\n hwnd,\n\n &swap_chain_desc,\n\n std::ptr::null(),\n\n None,\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-constbuffers/src/main.rs", "rank": 93, "score": 3.977138972290163 }, { "content": " window_size.0 as f32,\n\n window_size.1 as f32,\n\n );\n\n\n\n *light = LightState {\n\n position,\n\n view,\n\n projection,\n\n ..*light\n\n };\n\n\n\n *camera = Camera { eye, at, up };\n\n }\n\n }\n\n }\n\n\n\n fn render(&mut self) {\n\n let renderer = match &mut self.renderer {\n\n Some(it) => it,\n\n _ => return,\n", "file_path": "samples/d3d12-multithreading/src/main.rs", "rank": 94, "score": 3.9225250069828195 }, { "content": " },\n\n InputLayout: input_layout,\n\n PrimitiveTopologyType: D3D12_PRIMITIVE_TOPOLOGY_TYPE_TRIANGLE,\n\n NumRenderTargets: 1,\n\n RTVFormats: array_init(|i| {\n\n if i == 0 {\n\n DXGI_FORMAT_R8G8B8A8_UNORM\n\n } else {\n\n DXGI_FORMAT_UNKNOWN\n\n }\n\n }),\n\n DSVFormat: DXGI_FORMAT_D32_FLOAT,\n\n SampleDesc: DXGI_SAMPLE_DESC {\n\n Count: 1,\n\n Quality: 0,\n\n },\n\n ..Default::default()\n\n };\n\n\n\n let pso = unsafe { device.CreateGraphicsPipelineState(&pso_desc) }?;\n", "file_path": "samples/d3d12-multithreading/src/rendering/squidroom.rs", "rank": 95, "score": 3.9139474000090737 }, { "content": "\n\n fn window_size(&self) -> (i32, i32) {\n\n (1280, 720)\n\n }\n\n }\n\n\n\n fn populate_command_list(resources: &Resources) -> Result<()> {\n\n // Command list allocators can only be reset when the associated\n\n // command lists have finished execution on the GPU; apps should use\n\n // fences to determine GPU execution progress.\n\n unsafe { resources.command_allocator.Reset() }?;\n\n\n\n let command_list = &resources.command_list;\n\n\n\n // However, when ExecuteCommandList() is called on a particular\n\n // command list, that command list can then be reset at any time and\n\n // must be before re-recording.\n\n unsafe { command_list.Reset(&resources.command_allocator, &resources.pso) }?;\n\n\n\n // Set necessary state.\n", "file_path": "samples/d3d12-hello-world/d3d12-hello-constbuffers/src/main.rs", "rank": 96, "score": 3.81377844211258 }, { "content": " SemanticIndex: $semantic_index,\n\n Format: $format,\n\n InputSlot: $slot,\n\n AlignedByteOffset: $offset,\n\n InputSlotClass: $class,\n\n InstanceDataStepRate: $rate\n\n },\n\n )* ]\n\n };\n\n}\n\n\n\nconst STANDARD_VERTEX_DESCRIPTION: [D3D12_INPUT_ELEMENT_DESC; 4] = input_element_desc! {\n\n { \"POSITION\", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D12_INPUT_CLASSIFICATION_PER_VERTEX_DATA, 0 },\n\n { \"NORMAL\", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 12, D3D12_INPUT_CLASSIFICATION_PER_VERTEX_DATA, 0 },\n\n { \"TEXCOORD\", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 24, D3D12_INPUT_CLASSIFICATION_PER_VERTEX_DATA, 0 },\n\n { \"TANGENT\", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 32, D3D12_INPUT_CLASSIFICATION_PER_VERTEX_DATA, 0 }\n\n};\n\n\n\nconst STANDARD_VERTEX_STRIDE: u32 = 44;\n\n\n\nconst STANDARD_INDEX_FORMAT: DXGI_FORMAT = DXGI_FORMAT_R32_UINT;\n\n\n", "file_path": "samples/d3d12-multithreading/src/rendering/squidroom.rs", "rank": 97, "score": 3.773988313615859 }, { "content": "\n\n let swap_chain: IDXGISwapChain3 = unsafe {\n\n factory.CreateSwapChainForHwnd(command_queue, hwnd, &desc, std::ptr::null(), None)\n\n }?\n\n .cast()?;\n\n\n\n unsafe { factory.MakeWindowAssociation(hwnd, DXGI_MWA_NO_ALT_ENTER) }?;\n\n\n\n Ok(swap_chain)\n\n}\n\n\n\nimpl Frames {\n\n fn new(\n\n device: &ID3D12Device,\n\n swap_chain: IDXGISwapChain3,\n\n rtv_descriptor_heap: &RtvDescriptorHeap,\n\n dsv_descriptor_heap: &DsvDescriptorHeap,\n\n gpu_descriptor_heap: &CbvSrvUavDescriptorHeap,\n\n resources: Arc<Resources>,\n\n ) -> Result<Frames> {\n", "file_path": "samples/d3d12-multithreading/src/rendering.rs", "rank": 98, "score": 3.7107913945346525 }, { "content": "\n\n // Alter the description and create the PSO for rendering the shadow map.\n\n // The shadow map does not use a pixel shader or render targets.\n\n let pso_shadow_desc = D3D12_GRAPHICS_PIPELINE_STATE_DESC {\n\n PS: Default::default(),\n\n NumRenderTargets: 0,\n\n RTVFormats: [DXGI_FORMAT_UNKNOWN; 8],\n\n ..pso_desc\n\n };\n\n\n\n let pso_shadow = unsafe { device.CreateGraphicsPipelineState(&pso_shadow_desc) }?;\n\n\n\n Ok((pso, pso_shadow))\n\n}\n\n\n\nmacro_rules! input_element_desc {\n\n { $( { $name:literal, $semantic_index:expr, $format:expr, $slot:expr, $offset:expr, $class:expr, $rate:expr } ),* }\n\n => { [\n\n $( D3D12_INPUT_ELEMENT_DESC{\n\n SemanticName: PSTR(concat!($name, \"\\0\").as_ptr() as _),\n", "file_path": "samples/d3d12-multithreading/src/rendering/squidroom.rs", "rank": 99, "score": 3.342685599281362 } ]
Rust
src/device/vga/crt.rs
shift-crops/x64emu
18f661a9a64bfbfce76c15dc7039abee73e4e128
use packed_struct::prelude::*; #[derive(Debug, Default)] pub(super) struct CRT { pub ccir: CRTCtrlIndex, htr: u8, pub hdeer: u8, hbsr: u8, hber: HorBlnkEnd, hssr: u8, hser: HorSyncEnd, vtr: u8, ofr: Overflow, prsr: PresetRowScan, mslr: MaxScanLine, tcsr: TextCurStart, tcer: TextCurEnd, sahr: u8, salr: u8, tclhr: u8, tcllr: u8, vssr: u8, vser: VertSyncEnd, pub vdeer: u8, or: u8, ulr: UnderLocate, vbsr: u8, vber: u8, cmr: CRTMode, lcr: u8, pub latch: u8, } impl CRT { pub fn get(&self) -> u8 { match self.ccir.idx { 0x00 => self.htr, 0x01 => self.hdeer, 0x02 => self.hbsr, 0x03 => self.hber.pack().unwrap()[0], 0x04 => self.hssr, 0x05 => self.hser.pack().unwrap()[0], 0x06 => self.vtr, 0x07 => self.ofr.pack().unwrap()[0], 0x08 => self.prsr.pack().unwrap()[0], 0x09 => self.mslr.pack().unwrap()[0], 0x0a => self.tcsr.pack().unwrap()[0], 0x0b => self.tcer.pack().unwrap()[0], 0x0c => self.sahr, 0x0d => self.salr, 0x0e => self.tclhr, 0x0f => self.tcllr, 0x10 => self.vssr, 0x11 => self.vser.pack().unwrap()[0], 0x12 => self.vdeer, 0x13 => self.or, 0x14 => self.ulr.pack().unwrap()[0], 0x15 => self.vbsr, 0x16 => self.vber, 0x17 => self.cmr.pack().unwrap()[0], 0x18 => self.lcr, 0x22 => self.latch, _ => 0, } } pub fn set(&mut self, v: u8) -> () { let data = &[v]; match self.ccir.idx { 0x00 => self.htr = v, 0x01 => self.hdeer = v, 0x02 => self.hbsr = v, 0x03 => self.hber = HorBlnkEnd::unpack(data).unwrap(), 0x04 => self.hssr = v, 0x05 => self.hser = HorSyncEnd::unpack(data).unwrap(), 0x06 => self.vtr = v, 0x07 => self.ofr = Overflow::unpack(data).unwrap(), 0x08 => self.prsr = PresetRowScan::unpack(data).unwrap(), 0x09 => self.mslr = MaxScanLine::unpack(data).unwrap(), 0x0a => self.tcsr = TextCurStart::unpack(data).unwrap(), 0x0b => self.tcer = TextCurEnd::unpack(data).unwrap(), 0x0c => self.sahr = v, 0x0d => self.salr = v, 0x0e => self.tclhr = v, 0x0f => self.tcllr = v, 0x10 => self.vssr = v, 0x11 => self.vser = VertSyncEnd::unpack(data).unwrap(), 0x12 => self.vdeer = v, 0x13 => self.or = v, 0x14 => self.ulr = UnderLocate::unpack(data).unwrap(), 0x15 => self.vbsr = v, 0x16 => self.vber = v, 0x17 => self.cmr = CRTMode::unpack(data).unwrap(), 0x18 => self.lcr = v, _ => {}, } } pub fn get_windowsize(&self) -> (u32, u32) { (8 * self.hdeer as u32, 8 * self.vdeer as u32) } pub fn char_height(&self) -> u8 { self.mslr.scan_count + 1 } pub fn pixel_to_pos(&self, pxl: u32) -> (u32, u32) { let (x_size, _) = self.get_windowsize(); (pxl % x_size, pxl / x_size) } pub fn pos_to_chridx(&self, x: u32, y: u32) -> u16 { (y/self.char_height() as u32 * self.hdeer as u32 + x/8) as u16 } pub fn get_cursor(&self, idx: u16) -> Option<(std::ops::RangeInclusive<u8>, u8)> { let loc = ((self.tclhr as u16) << 8) + self.tcllr as u16; if !self.tcsr.cur_off && idx == loc { let (start, end) = (self.tcsr.cur_srt, self.tcer.cur_end); if start > end { None } else { Some((start..=end, self.tcer.cur_skew)) } } else { None } } } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct CRTCtrlIndex { #[packed_field(bits="0:6")] idx: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct HorBlnkEnd { #[packed_field(bits="0:4")] bl_end: u8, #[packed_field(bits="5:6")] skew_ctrl: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct HorSyncEnd { #[packed_field(bits="0:4")] end: u8, #[packed_field(bits="5:6")] delay: u8, #[packed_field(bits="7")] bl_end: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct Overflow { #[packed_field(bits="0")] vt_total8: u8, #[packed_field(bits="1")] vt_disp_ena8: u8, #[packed_field(bits="2")] vt_sync_str8: u8, #[packed_field(bits="3")] vt_bl_str8: u8, #[packed_field(bits="4")] line_cmp8: u8, #[packed_field(bits="5")] vt_total9: u8, #[packed_field(bits="6")] vt_disp_ena9: u8, #[packed_field(bits="7")] vt_sync_str9: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct PresetRowScan { #[packed_field(bits="0:4")] scan_count: u8, #[packed_field(bits="5:6")] byte_pan: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct MaxScanLine { #[packed_field(bits="0:4")] scan_count: u8, #[packed_field(bits="5")] vt_bl_str9: u8, #[packed_field(bits="6")] line_cmp9: u8, #[packed_field(bits="7")] dbl_scan: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct TextCurStart { #[packed_field(bits="0:4")] cur_srt: u8, #[packed_field(bits="5")] cur_off: bool, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct TextCurEnd { #[packed_field(bits="0:4")] cur_end: u8, #[packed_field(bits="5:6")] cur_skew: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct VertSyncEnd { #[packed_field(bits="0:3")] end: u8, #[packed_field(bits="4")] int_clr: u8, #[packed_field(bits="5")] int_ena: u8, #[packed_field(bits="7")] prot_reg: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct UnderLocate { #[packed_field(bits="0:4")] location: u8, #[packed_field(bits="5")] count: u8, #[packed_field(bits="6")] dword: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct CRTMode { #[packed_field(bits="0")] compat: u8, #[packed_field(bits="1")] row_ctrl: u8, #[packed_field(bits="2")] hor_sel: u8, #[packed_field(bits="3")] count: u8, #[packed_field(bits="5")] addr_wrap: u8, #[packed_field(bits="6")] wb_mode: u8, #[packed_field(bits="7")] ctrl_reset: u8, }
use packed_struct::prelude::*; #[derive(Debug, Default)] pub(super) struct CRT { pub ccir: CRTCtrlIndex, htr: u8, pub hdeer: u8, hbsr: u8, hber: HorBlnkEnd, hssr: u8, hser: HorSyncEnd, vtr: u8, ofr: Overflow, prsr: PresetRowScan, mslr: MaxScanLine, tcsr: TextCurStart, tcer: TextCurEnd, sahr: u8, salr: u8, tclhr: u8, tcllr: u8, vssr: u8, vser: VertSyncEnd, pub vdeer: u8, or: u8, ulr: UnderLocate, vbsr: u8, vber: u8, cmr: CRTMode, lcr: u8, pub latch: u8, } impl CRT { pub fn get(&self) -> u8 { match self.ccir.idx { 0x00 => self.htr, 0x01 => self.hdeer, 0x02 => self.hbsr, 0x03 => self.hber.pack().unwrap()[0], 0x04 => self.hssr, 0x05 => self.hser.pack().unwrap()[0], 0x06 => self.vtr, 0x07 => self.ofr.pack().unwrap()[0], 0x08 => self.prsr.pack().unwrap()[0], 0x09 => self.mslr.pack().unwrap()[0], 0x0a => self.tcsr.pack().unwrap()[0], 0x0b => self.tcer.pack().unwrap()[0], 0x0c => self.sahr, 0x0d => self.salr, 0x0e => self.tclhr, 0x0f => self.tcllr, 0x10 => self.vssr, 0x11 => self.vser.pack().unwrap()[0], 0x12 => self.vdeer, 0x13 => self.or, 0x14 => self.ulr.pack().unwrap()[0], 0x15 => self.vbsr, 0x16 => self.vber, 0x17 => self.cmr.pack().unwrap()[0], 0x18 => self.lcr, 0x22 => self.latch, _ => 0, } } pub fn set(&mut self, v: u8) -> () { let data = &[v]; match self.ccir.idx { 0x00 => self.htr = v, 0x01 => self.hdeer = v, 0x02 => self.hbsr = v, 0x03 => self.hber = HorBlnkEnd::unpack(data).unwrap(), 0x04 => self.hssr = v, 0x05 => self.hser = HorSyncEnd::unpack(data).unwrap(), 0x06 => self.vtr = v, 0x07 => self.ofr = Overflow::unpack(data).unwrap(), 0x08 => self.prsr = PresetRowScan::unpack(data).unwrap(), 0x09 => self.mslr = MaxScanLine::unpack(data).unwrap(), 0x0a => self.tcsr = TextCurStart::unpack(data).unwrap(), 0x0b => self.tcer = TextCurEnd::unpack(data).unwrap(), 0x0c => self.sahr = v, 0x0d => self.salr = v, 0x0e => self.tclhr = v, 0x0f => self.tcllr = v, 0x10 => self.vssr = v, 0x11 => self.vser = VertSyncEnd::unpack(data).unwrap(), 0x12 => self.vdeer = v, 0x13 => self.or = v, 0x14 => self.ulr = UnderLocate::unpack(data).unwrap(), 0x15 => self.vbsr = v, 0x16 => self.vber = v, 0x17 => self.cmr = CRTMode::unpack(data).unwrap(), 0x18 => self.lcr = v, _ => {}, } } pub fn get_windowsize(&self) -> (u32, u32) { (8 * self.hdeer as u32, 8 * self.vdeer as u32) } pub fn char_height(&self) -> u8 { self.mslr.scan_count + 1 } pub fn pixel_to_pos(&self, pxl: u32) -> (u32, u32) { let (x_size, _) = self.get_windowsize(); (pxl % x_size, pxl / x_size) } pub fn pos_to_chridx(&self, x: u32, y: u32) -> u16 { (y/self.char_height() as u32 * self.hdeer as u32 + x/8) as u16 } pub fn get_cursor(&self, idx: u16) -> Option<(std::ops::RangeInclusive<u8>, u8)> { let loc = ((self.tclhr as u16) << 8) + self.tcllr as u16; if !self.tcsr.cur_off && idx == loc { let (start, end) = (self.tcsr.cur_srt, self.tcer.cur_end); if start > end { None } else { Some((start..=end, self.tcer.cur_skew)) } } else { None } } } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct CRTCtrlIndex { #[packed_field(bits="0:6")] idx: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct HorBlnkEnd { #[packed_field(bits="0:4")] bl_end: u8, #[packed_field(bits="5:6")] skew_ctrl: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct HorSyncEnd { #[packed_field(bits="0:4")] end: u8, #[packed_field(bits="5:6")] delay: u8, #[packed_field(bits="7")] bl_end: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct Overflow { #[packed_field(bits="0")] vt_total8: u8, #[packed_field(bits="1")] vt_disp_ena8: u8, #[packed_field(bits="2")] vt_sync_str8: u8, #[packed_field(bits="3")] vt_bl_str8: u8, #[packed_field(bits="4")] line_cmp8: u8, #[packed_field(bits="5")] vt_total9: u8, #[packed_field(bits="6")] vt_disp_ena9: u8, #[packed_field(bits="7")] vt_sync_str9: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct PresetRowScan { #[packed_field(bits="0:4")] scan_count: u8, #[packed_field(bits="5:6")] byte_pan: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct MaxScanLine { #[packed_field(bits="0:4")] scan_count: u8, #[packed_field(bits="5")] vt_bl_str9: u8, #[packed_field(bits="6")] line_cmp9:
: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct UnderLocate { #[packed_field(bits="0:4")] location: u8, #[packed_field(bits="5")] count: u8, #[packed_field(bits="6")] dword: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct CRTMode { #[packed_field(bits="0")] compat: u8, #[packed_field(bits="1")] row_ctrl: u8, #[packed_field(bits="2")] hor_sel: u8, #[packed_field(bits="3")] count: u8, #[packed_field(bits="5")] addr_wrap: u8, #[packed_field(bits="6")] wb_mode: u8, #[packed_field(bits="7")] ctrl_reset: u8, }
u8, #[packed_field(bits="7")] dbl_scan: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct TextCurStart { #[packed_field(bits="0:4")] cur_srt: u8, #[packed_field(bits="5")] cur_off: bool, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct TextCurEnd { #[packed_field(bits="0:4")] cur_end: u8, #[packed_field(bits="5:6")] cur_skew: u8, } #[derive(Debug, Default, PackedStruct)] #[packed_struct(bit_numbering="lsb0", size_bytes="1")] pub struct VertSyncEnd { #[packed_field(bits="0:3")] end: u8, #[packed_field(bits="4")] int_clr: u8, #[packed_field(bits="5")] int_ena: u8, #[packed_field(bits="7")] prot_reg
random
[ { "content": "pub fn wait_for_tcp(port: u16) -> DynResult<TcpStream> {\n\n let sockaddr = format!(\"127.0.0.1:{}\", port);\n\n eprintln!(\"Waiting for a GDB connection on {:?}...\", sockaddr);\n\n\n\n let sock = TcpListener::bind(sockaddr)?;\n\n let (stream, addr) = sock.accept()?;\n\n eprintln!(\"Debugger connected from {}\", addr);\n\n\n\n Ok(stream)\n\n}\n\n\n\nimpl Target for emulator::Emulator {\n\n type Arch = arch::x86::X86_64_SSE;\n\n type Error = &'static str;\n\n\n\n fn base_ops(&mut self) -> target::ext::base::BaseOps<Self::Arch, Self::Error> {\n\n target::ext::base::BaseOps::SingleThread(self)\n\n }\n\n}\n\n\n", "file_path": "src/interface/gdbserver_64.rs", "rank": 0, "score": 145325.2486727186 }, { "content": "pub fn wait_for_tcp(port: u16) -> DynResult<TcpStream> {\n\n let sockaddr = format!(\"127.0.0.1:{}\", port);\n\n eprintln!(\"Waiting for a GDB connection on {:?}...\", sockaddr);\n\n\n\n let sock = TcpListener::bind(sockaddr)?;\n\n let (stream, addr) = sock.accept()?;\n\n eprintln!(\"Debugger connected from {}\", addr);\n\n\n\n Ok(stream)\n\n}\n\n\n\nimpl Target for emulator::Emulator {\n\n type Arch = arch::x86::X86_SSE;\n\n type Error = &'static str;\n\n\n\n fn base_ops(&mut self) -> target::ext::base::BaseOps<Self::Arch, Self::Error> {\n\n target::ext::base::BaseOps::SingleThread(self)\n\n }\n\n\n\n fn sw_breakpoint(&mut self) -> Option<target::ext::breakpoints::SwBreakpointOps<Self>> {\n", "file_path": "src/interface/gdbserver.rs", "rank": 1, "score": 145325.2486727186 }, { "content": "fn classify_descriptor(raw: &[u8; 16]) -> Option<DescType> {\n\n let desc = Desc::unpack(&raw).unwrap_or(Default::default());\n\n\n\n if desc.S == 0 { // system\n\n if let Ok(t) = SysTypes::try_from(desc.Type&7) {\n\n let sysdsc = match t {\n\n SysTypes::TSSAvl | SysTypes::TSSBsy => SysDescType::TSS(TSSDesc::unpack(&raw).unwrap()),\n\n SysTypes::LDT => SysDescType::LDT(LDTDesc::unpack(&raw).unwrap()),\n\n SysTypes::Call => SysDescType::Call(CallGateDesc::unpack(&raw).unwrap()),\n\n SysTypes::Task => SysDescType::Task(TaskGateDesc::unpack(&raw).unwrap()),\n\n SysTypes::Intr => SysDescType::Intr(IntrTrapGateDesc::unpack(&raw).unwrap()),\n\n SysTypes::Trap => SysDescType::Trap(IntrTrapGateDesc::unpack(&raw).unwrap()),\n\n };\n\n debug!(\"{:x?}\", sysdsc);\n\n return Some(DescType::System(sysdsc));\n\n }\n\n } else { // segment\n\n let sg = SegDesc::unpack(&raw).unwrap();\n\n let segdsc = if desc.Type & 8 == 0 { SegDescType::Data(sg) } else { SegDescType::Code(sg) };\n\n debug!(\"{:x?}\", segdsc);\n", "file_path": "src/emulator/access/descriptor.rs", "rank": 2, "score": 97653.91901738686 }, { "content": "pub fn init_cmn_opcode(op: &mut super::OpcodeArr){\n\n macro_rules! setcmnop {\n\n ($n:expr, $fnc:ident, $flg:expr) => { op[$n & 0x1ff] = OpcodeType{func:$fnc, flag:$flg} }\n\n }\n\n\n\n setcmnop!(0x00, add_rm8_r8, OpFlags::MODRM);\n\n setcmnop!(0x02, add_r8_rm8, OpFlags::MODRM);\n\n setcmnop!(0x04, add_al_imm8, OpFlags::IMM8);\n\n setcmnop!(0x08, or_rm8_r8, OpFlags::MODRM);\n\n setcmnop!(0x0a, or_r8_rm8, OpFlags::MODRM);\n\n setcmnop!(0x0c, or_al_imm8, OpFlags::IMM8);\n\n setcmnop!(0x10, adc_rm8_r8, OpFlags::MODRM);\n\n setcmnop!(0x12, adc_r8_rm8, OpFlags::MODRM);\n\n setcmnop!(0x14, adc_al_imm8, OpFlags::IMM8);\n\n setcmnop!(0x18, sbb_rm8_r8, OpFlags::MODRM);\n\n setcmnop!(0x1a, sbb_r8_rm8, OpFlags::MODRM);\n\n setcmnop!(0x1c, sbb_al_imm8, OpFlags::IMM8);\n\n setcmnop!(0x20, and_rm8_r8, OpFlags::MODRM);\n\n setcmnop!(0x22, and_r8_rm8, OpFlags::MODRM);\n\n setcmnop!(0x24, and_al_imm8, OpFlags::IMM8);\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 3, "score": 92989.82527596476 }, { "content": "pub fn undefined(_exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n Err(EmuException::CPUException(CPUException::UD))\n\n}", "file_path": "src/emulator/instruction/opcode.rs", "rank": 4, "score": 90098.7835329411 }, { "content": "pub fn code_82(exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n code_80(exec)\n\n}\n\n\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 5, "score": 88674.2295409374 }, { "content": "fn interrupt_vector(ac: &mut Access, ivec: u8, hw: bool) -> Result<(), EmuException> {\n\n let idtr = &ac.core.dtregs.idtr;\n\n\n\n match ac.mode {\n\n CpuMode::Real => {\n\n let ivt_ofs = (ivec as u32) << 2;\n\n\n\n if ivt_ofs > idtr.limit { return Err(EmuException::CPUException(CPUException::GP(None))); }\n\n\n\n let mut ivt: IVT = Default::default();\n\n ac.read_l(&mut ivt as *mut IVT as *mut _, idtr.base + ivt_ofs as u64, std::mem::size_of_val(&ivt))?;\n\n\n\n ac.save_regs(AcsSize::BIT16, None)?;\n\n ac.load_segment(SgReg::CS, ivt.segment)?;\n\n ac.set_ip(ivt.offset as u64)?;\n\n },\n\n CpuMode::Protected | CpuMode::Long => {\n\n let cpl = ac.get_cpl()?;\n\n match ac.obtain_i_desc(ivec)? {\n\n Some(DescType::System(SysDescType::Intr(gate))) => {\n", "file_path": "src/emulator/interrupt.rs", "rank": 6, "score": 84109.07497024043 }, { "content": "#[derive(Debug, Default)]\n\nstruct PagingStructIndex {\n\n legacy: bool,\n\n pml5: Option<u64>,\n\n pml4: Option<u64>,\n\n pdpt: Option<u64>,\n\n pd: u64,\n\n pt: u64,\n\n}\n\nimpl From<&LAddrLegacy> for PagingStructIndex {\n\n fn from(l: &LAddrLegacy) -> Self {\n\n Self { legacy: true, pml5: None, pml4: None, pdpt: None, pd: l.pd_ofs as u64, pt: l.pt_ofs as u64 }\n\n }\n\n}\n\nimpl From<&LAddrPAE> for PagingStructIndex {\n\n fn from(l: &LAddrPAE) -> Self {\n\n Self { legacy: false, pml5: None, pml4: None, pdpt: Some(l.pdpt_ofs as u64), pd: l.pd_ofs as u64, pt: l.pt_ofs as u64 }\n\n }\n\n}\n\nimpl From<&LAddrIa32e> for PagingStructIndex {\n\n fn from(l: &LAddrIa32e) -> Self {\n\n Self { legacy: false, pml5: Some(l.pml5_ofs as u64), pml4: Some(l.pml4_ofs as u64), pdpt: Some(l.pdpt_ofs as u64), pd: l.pd_ofs as u64, pt: l.pt_ofs as u64 }\n\n }\n\n}\n\n\n", "file_path": "src/emulator/access/memory.rs", "rank": 7, "score": 73558.02633835677 }, { "content": "#[derive(Debug)]\n\nstruct Args {\n\n input: Vec<String>,\n\n gdbport: Option<u16>,\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 8, "score": 68427.29121086792 }, { "content": "struct IORequest {\n\n ty: IOReqType,\n\n rw: IOReqRW,\n\n}\n\n\n\npub struct IOResult {\n\n data: Option<Vec<u8>>,\n\n}\n\n\n", "file_path": "src/device.rs", "rank": 9, "score": 67182.04362222293 }, { "content": "#[derive(Default, Debug)]\n\n#[repr(C)]\n\nstruct TSS16 {\n\n prev_task: u16,\n\n sp0: u16,\n\n ss0: u16,\n\n sp1: u16,\n\n ss1: u16,\n\n sp2: u16,\n\n ss2: u16,\n\n ip: u16,\n\n flags: u16,\n\n ax: u16,\n\n cx: u16,\n\n dx: u16,\n\n bx: u16,\n\n sp: u16,\n\n bp: u16,\n\n si: u16,\n\n di: u16,\n\n es: u16,\n\n cs: u16,\n\n ss: u16,\n\n ds: u16,\n\n ldtr: u16,\n\n}\n\n\n", "file_path": "src/emulator/access/descriptor.rs", "rank": 10, "score": 66015.77220560497 }, { "content": "#[derive(Default, Debug)]\n\n#[repr(C)]\n\nstruct TSS32 {\n\n prev_task: u16,\n\n esp0: u32,\n\n ss0: u16,\n\n esp1: u32,\n\n ss1: u16,\n\n esp2: u32,\n\n ss2: u16,\n\n cr3: u32,\n\n eip: u32,\n\n eflags: u32,\n\n eax: u32,\n\n ecx: u32,\n\n edx: u32,\n\n ebx: u32,\n\n esp: u32,\n\n ebp: u32,\n\n esi: u32,\n\n edi: u32,\n\n es: u16,\n", "file_path": "src/emulator/access/descriptor.rs", "rank": 11, "score": 66015.77220560497 }, { "content": "#[derive(Debug, Default, Clone, Copy)]\n\nstruct PageCache {\n\n RW: bool,\n\n US: bool,\n\n PWT: bool,\n\n PCD: bool,\n\n G: bool,\n\n base: u64,\n\n XD: bool,\n\n}\n\nimpl From<&PDPTE> for PageCache {\n\n fn from(e: &PDPTE) -> Self {\n\n Self { RW: e.RW, US: e.US, PWT: e.PWT, PCD: e.PCD, G: e.G, base: (e.pdt_base as u64) << 12, XD: e.XD, }\n\n }\n\n}\n\nimpl From<&PDE> for PageCache {\n\n fn from(e: &PDE) -> Self {\n\n Self { RW: e.RW, US: e.US, PWT: e.PWT, PCD: e.PCD, G: e.G, base: (e.pt_base as u64) << 12, XD: e.XD, }\n\n }\n\n}\n\nimpl From<&PTE> for PageCache {\n", "file_path": "src/emulator/access/memory.rs", "rank": 12, "score": 64913.40990567657 }, { "content": "fn main() {\n\n let args = parse_args();\n\n\n\n env_logger::init();\n\n\n\n let hw = hardware::Hardware::new(0x400*0x400);\n\n let gui = interface::gui::GUI::new(320, 200);\n\n\n\n let (mut dev, chan_dev) = device::Device::new();\n\n dev.init_devices(chan_dev, hw.mem.clone(), gui.buffer.clone());\n\n\n\n let mut emu = emulator::Emulator::new(hw, dev);\n\n\n\n emu.map_binary(0xffff0, include_bytes!(\"bios/crt0.bin\")).expect(\"Failed to map\");\n\n emu.map_binary(0xf0000, include_bytes!(\"bios/bios.bin\")).expect(\"Failed to map\");\n\n\n\n let imgname = if args.input.len() > 0 { args.input[0].clone() } else { \"/tmp/test\".to_string() };\n\n emu.load_binfile(0x7c00, imgname).expect(\"Failed to load binary\");\n\n \n\n std::thread::spawn(move || {\n", "file_path": "src/main.rs", "rank": 13, "score": 64534.80743784274 }, { "content": "struct IOQueue<T> {\n\n que: Mutex<VecDeque<T>>,\n\n cvar: Condvar\n\n}\n\n\n\nimpl<T> IOQueue<T> {\n\n fn new() -> Self {\n\n Self {\n\n que: Mutex::new(VecDeque::new()),\n\n cvar: Condvar::new(),\n\n }\n\n }\n\n\n\n fn enqueue(&self, req: T) -> () {\n\n self.que.lock().unwrap().push_back(req);\n\n }\n\n\n\n fn enqueue_notify(&self, req: T) -> () {\n\n self.que.lock().unwrap().push_back(req);\n\n self.cvar.notify_one();\n", "file_path": "src/device.rs", "rank": 14, "score": 63139.38165415621 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn mem_test(){\n\n let mut mem = Memory::new(0x1000);\n\n\n\n mem.write16(0x100, 0xbabe);\n\n mem.write16(0x102, 0xcafe);\n\n mem.write32(0x104, 0xdeadbeef);\n\n assert_eq!(mem.read64(0x100), 0xdeadbeefcafebabe);\n\n\n\n let mut x = mem.as_mut_ptr(0x200).unwrap() as *mut u32;\n\n unsafe {\n\n *x = 0x55667788;\n\n x = (x as usize + 4) as *mut u32;\n\n *x = 0x11223344;\n\n }\n\n assert_eq!(mem.read64(0x200), 0x1122334455667788);\n\n\n\n mem.write64(0x1100, 0xdeadbeef);\n\n assert_eq!(mem.read64(0x1100), 0x0);\n\n}\n\n\n", "file_path": "src/hardware/memory.rs", "rank": 15, "score": 62178.463283812715 }, { "content": "#[cfg(test)]\n\n#[test]\n\n#[should_panic]\n\nfn mem_test_panic(){\n\n let mem = Memory::new(0x1000);\n\n let mut v = vec![0; 0x20];\n\n\n\n mem.read_data(v.as_mut_ptr() as *mut _, 0xff0, v.len()).unwrap();\n\n}", "file_path": "src/hardware/memory.rs", "rank": 16, "score": 61103.22661565681 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn sgreg_test() {\n\n let mut reg = SgRegisters::new();\n\n\n\n let mut sel = reg.get_mut(SgReg::ES).selector;\n\n sel.from_u16(0x2e);\n\n assert_eq!(sel.IDX, 5);\n\n assert_eq!(sel.TI, 1);\n\n assert_eq!(sel.RPL, 2);\n\n}\n\n\n", "file_path": "src/hardware/processor/segment.rs", "rank": 17, "score": 61103.22661565681 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn exec_test() {\n\n use crate::hardware;\n\n use crate::device;\n\n use crate::emulator::access::register::*;\n\n\n\n let hw = hardware::Hardware::new(0x1000);\n\n let (dev, _) = device::Device::new();\n\n let mut ac = super::access::Access::new(hw, dev);\n\n let parse: parse::ParseInstr = Default::default();\n\n\n\n let exe = Exec::new(&mut ac, &parse);\n\n exe.ac.set_gpreg(GpReg64::RSP, 0xf20).unwrap();\n\n exe.ac.push_u64(0xdeadbeef).unwrap();\n\n exe.ac.push_u64(0xcafebabe).unwrap();\n\n assert_eq!(exe.ac.pop_u64().unwrap(), 0xcafebabe);\n\n assert_eq!(exe.ac.pop_u64().unwrap(), 0xdeadbeef);\n\n\n\n let mut x = exe.ac.mem.write().unwrap().as_mut_ptr(0xf20).unwrap() as *mut u64;\n\n unsafe {\n\n *x = 0x11223344;\n\n x = (x as usize + 8) as *mut u64;\n\n *x = 0x55667788;\n\n }\n\n assert_eq!(exe.ac.pop_u64().unwrap(), 0x11223344);\n\n assert_eq!(exe.ac.pop_u64().unwrap(), 0x55667788);\n\n}\n", "file_path": "src/emulator/instruction/exec.rs", "rank": 18, "score": 61103.22661565681 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn laddr_test() {\n\n let laddr_32: u32 = 0x44332211;\n\n let laddr_64: u64 = 0x77665544332211;\n\n\n\n let legacy = LAddrLegacy::unpack(&laddr_32.to_be_bytes()).unwrap();\n\n assert_eq!(legacy.p_ofs, 0x211);\n\n assert_eq!(legacy.pt_ofs, 0x332);\n\n assert_eq!(legacy.pd_ofs, 0x110);\n\n\n\n let pae = LAddrPAE::unpack(&laddr_32.to_be_bytes()).unwrap();\n\n assert_eq!(pae.p_ofs, 0x211);\n\n assert_eq!(pae.pt_ofs, 0x132);\n\n assert_eq!(pae.pd_ofs, 0x21);\n\n assert_eq!(pae.pdpt_ofs, 0x1);\n\n\n\n let ia32e = LAddrIa32e::unpack(&laddr_64.to_be_bytes()).unwrap();\n\n assert_eq!(ia32e.p_ofs, 0x211);\n\n assert_eq!(ia32e.pt_ofs, 0x132);\n\n assert_eq!(ia32e.pd_ofs, 0x21);\n\n assert_eq!(ia32e.pdpt_ofs, 0x155);\n\n assert_eq!(ia32e.pml4_ofs, 0xcc);\n\n assert_eq!(ia32e.pml5_ofs, 0x77);\n\n}\n\n\n", "file_path": "src/emulator/access/memory.rs", "rank": 19, "score": 61103.22661565681 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn rflags_test() {\n\n let mut flag: RFlags = Default::default();\n\n\n\n flag.from_u64(0);\n\n assert_eq!(flag.to_u64(), 2);\n\n flag.set_carry(true);\n\n assert_eq!(flag.to_u64(), 3);\n\n}", "file_path": "src/hardware/processor/rflags.rs", "rank": 20, "score": 61103.22661565681 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn gpreg_test() {\n\n let mut reg = GpRegisters::new();\n\n\n\n reg.set64(GpReg64::RAX, 0xdeadbeefcafebabe);\n\n reg.set16(GpReg16::AX, 0x1122);\n\n reg.set8h(GpReg8h::AH, 0x00);\n\n reg.update64(GpReg64::RAX, -0x10);\n\n assert_eq!(reg.get64(GpReg64::RAX), 0xdeadbeefcafe0012);\n\n reg.update32(GpReg32::EAX, 0x10000000);\n\n assert_eq!(reg.get64(GpReg64::RAX), 0xdafe0012);\n\n\n\n reg.set32(GpReg32::EAX, 0x11223344);\n\n\n\n reg.set32(GpReg32::EDI, 0xc0bebeef);\n\n reg.set8l(GpReg8l::DIL, 0xff);\n\n assert_eq!(reg.get64(GpReg64::RDI), 0xc0bebeff);\n\n}\n\n\n", "file_path": "src/hardware/processor/general.rs", "rank": 21, "score": 61103.22661565681 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn cr_test() {\n\n let mut cr: CRegisters = Default::default();\n\n\n\n cr.0.from_u32(0x50001);\n\n assert_eq!(cr.0.PE, 1);\n\n assert_eq!(cr.0.WP, 1);\n\n assert_eq!(cr.0.AM, 1);\n\n assert_eq!(cr.0.PG, 0);\n\n\n\n cr.3.PWT = 1;\n\n cr.3.PageDirBase = 0xdead;\n\n assert_eq!(cr.3.to_u32(), 0xdead008);\n\n}", "file_path": "src/hardware/processor/control.rs", "rank": 22, "score": 61103.22661565681 }, { "content": "pub trait CRAccess {\n\n fn to_u32(&self) -> u32;\n\n fn from_u32(&mut self, v: u32) -> ();\n\n fn to_u64(&self) -> u64 { self.to_u32() as u64 }\n\n fn from_u64(&mut self, v: u64) -> () { self.from_u32(v as u32); }\n\n}\n\n\n\n#[derive(Debug, Default, PackedStruct)]\n\n#[packed_struct(bit_numbering=\"lsb0\", size_bytes=\"4\")]\n\npub struct CR0 {\n\n #[packed_field(bits=\"0\")] pub PE: u8,\n\n #[packed_field(bits=\"1\")] MP: u8,\n\n #[packed_field(bits=\"2\")] EM: u8,\n\n #[packed_field(bits=\"3\")] pub TS: u8,\n\n #[packed_field(bits=\"4\")] ET: u8,\n\n #[packed_field(bits=\"5\")] NE: u8,\n\n #[packed_field(bits=\"16\")] WP: u8,\n\n #[packed_field(bits=\"18\")] AM: u8,\n\n #[packed_field(bits=\"29\")] NW: u8,\n\n #[packed_field(bits=\"30\")] CD: u8,\n", "file_path": "src/hardware/processor/control.rs", "rank": 23, "score": 61055.39751111614 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn msr_test() {\n\n let mut msr: ModelSpecific = Default::default();\n\n\n\n msr.efer.from_u64(0x401);\n\n assert_eq!(msr.efer.LMA, 1);\n\n assert_eq!(msr.efer.LME, 0);\n\n assert_eq!(msr.efer.SCE, 1);\n\n\n\n msr.apic.from_u64(0xdead100);\n\n assert_eq!(msr.apic.BSP, 1);\n\n assert_eq!(msr.apic.G, 0);\n\n assert_eq!(msr.apic.Base, 0xdead);\n\n\n\n\n\n msr.star.from_u64(0x11223344deadbeef);\n\n assert_eq!(msr.star.ip, 0xdeadbeef);\n\n assert_eq!(msr.star.cs, 0x3344);\n\n\n\n msr.fmask.from_u64(0xdeadbeef);\n\n assert_eq!(msr.fmask.mask, 0xdeadbeef);\n\n}", "file_path": "src/hardware/processor/model_specific.rs", "rank": 24, "score": 60088.84569873218 }, { "content": "#[cfg(test)]\n\n#[test]\n\n#[should_panic]\n\nfn gpreg_test_panic() {\n\n use std::convert::TryFrom;\n\n let reg = GpRegisters::new();\n\n reg.get64(GpReg64::try_from(20).unwrap());\n\n}", "file_path": "src/hardware/processor/general.rs", "rank": 25, "score": 60088.84569873218 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn page_table_test() {\n\n let paddr: u64 = 0xdeadbee000;\n\n let mut raw = paddr.to_le_bytes();\n\n raw[0] |= 0x01;\n\n raw[1] |= 0x01;\n\n raw[7] |= 0x80;\n\n raw.reverse();\n\n\n\n let pte = PTE::unpack(&raw).unwrap();\n\n assert_eq!(pte.P, true);\n\n assert_eq!(pte.G, true);\n\n assert_eq!(pte.page_base, 0xdeadbee);\n\n assert_eq!(pte.XD, true);\n\n}\n\n\n", "file_path": "src/emulator/access/memory.rs", "rank": 26, "score": 60088.84569873218 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn real_mem_test() {\n\n let hw = hardware::Hardware::new(0x1000);\n\n let (dev, _) = device::Device::new();\n\n let mut ac = super::Access::new(hw, dev);\n\n\n\n ac.set_data32((SgReg::DS, 0x10), 0xdeadbeef).unwrap();\n\n assert_eq!(ac.get_data8((SgReg::DS, 0x10)).unwrap(), 0xef);\n\n\n\n ac.set_data32((SgReg::DS, 0x1010), 0xdeadbeef).unwrap();\n\n assert_eq!(ac.get_data8((SgReg::DS, 0x1010)).unwrap(), 0);\n\n}", "file_path": "src/emulator/access/memory.rs", "rank": 27, "score": 60088.84569873218 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn access_msr_test() {\n\n let hw = hardware::Hardware::new(0x1000);\n\n let (dev, _) = device::Device::new();\n\n let mut ac = access::Access::new(hw, dev);\n\n\n\n ac.core.msr.efer.LMA = 1;\n\n assert_eq!(ac.read_msr(MSRAddress::IA32_EFER as u32).unwrap(), 0x400);\n\n\n\n ac.write_msr(0xc0000100, 0xdeadbeef).unwrap();\n\n assert_eq!(ac.core.sgregs.get(SgReg::FS).cache.base, 0xdeadbeef);\n\n}\n\n\n", "file_path": "src/emulator/access/msr.rs", "rank": 28, "score": 60088.84569873218 }, { "content": "#[cfg(test)]\n\n#[test]\n\n#[should_panic]\n\nfn sgreg_test_panic() {\n\n use std::convert::TryFrom;\n\n\n\n let reg = SgRegisters::new();\n\n reg.get(SgReg::try_from(10).unwrap());\n\n}", "file_path": "src/hardware/processor/segment.rs", "rank": 29, "score": 60088.84569873218 }, { "content": "pub trait MSRAccess {\n\n fn to_u64(&self) -> u64;\n\n fn from_u64(&mut self, v: u64) -> ();\n\n}\n\n\n\n#[derive(Default, PackedStruct)]\n\n#[packed_struct(bit_numbering=\"lsb0\", size_bytes=\"8\")]\n\npub struct IA32_EFER {\n\n #[packed_field(bits=\"0\")] SCE: u8,\n\n #[packed_field(bits=\"1:7\")] _r01: ReservedZero<packed_bits::Bits7>,\n\n #[packed_field(bits=\"8\")] pub LME: u8,\n\n #[packed_field(bits=\"9\")] _r09: ReservedZero<packed_bits::Bits1>,\n\n #[packed_field(bits=\"10\")] pub LMA: u8,\n\n #[packed_field(bits=\"11\")] pub NXE: u8,\n\n #[packed_field(bits=\"12\")] SVME: u8,\n\n #[packed_field(bits=\"13\")] LMSLE: u8,\n\n #[packed_field(bits=\"14\")] FFXSR: u8,\n\n #[packed_field(bits=\"15\")] TCE: u8,\n\n #[packed_field(bits=\"16:63\")] _r11: ReservedZero<packed_bits::Bits48>,\n\n}\n", "file_path": "src/hardware/processor/model_specific.rs", "rank": 30, "score": 60072.352913839364 }, { "content": "#[derive(Debug, Default, Clone, Copy)]\n\nstruct RGB([Color; 3]);\n\n\n\n#[derive(Debug, Default, Clone, Copy, PackedStruct)]\n\n#[packed_struct(bit_numbering=\"lsb0\", size_bytes=\"1\")]\n\npub struct Color {\n\n #[packed_field(bits=\"0:6\")] v: u8,\n\n}\n\n\n", "file_path": "src/device/vga/dac.rs", "rank": 31, "score": 59700.82606581605 }, { "content": "fn parse_args() -> Args {\n\n let args: Vec<String> = env::args().collect();\n\n let program = args[0].clone();\n\n\n\n let mut opts = Options::new();\n\n opts.optopt(\"s\", \"gdb\", \"set gdb tcp port\", \"1234\");\n\n opts.optflag(\"h\", \"help\", \"print this help menu\");\n\n\n\n let matches = opts.parse(&args[1..])\n\n .unwrap_or_else(|f| panic!(\"{}\", f.to_string()));\n\n\n\n if matches.opt_present(\"h\") {\n\n print_usage(&program, &opts);\n\n }\n\n\n\n /*\n\n if matches.free.is_empty() {\n\n print_usage(&program, &opts);\n\n }\n\n */\n\n\n\n Args {\n\n input: matches.free.clone(),\n\n gdbport: matches.opt_get(\"s\").unwrap(),\n\n }\n\n}", "file_path": "src/main.rs", "rank": 32, "score": 59602.293332337416 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn page_walk_ia32e4l_test() {\n\n let hw = hardware::Hardware::new(0x5000);\n\n let (dev, _) = device::Device::new();\n\n let mut ac = super::Access::new(hw, dev);\n\n\n\n ac.pgmd = Some(super::PagingMode::Ia32e4Lv);\n\n ac.core.cregs.3.from_u64(0);\n\n {\n\n let mut mem = ac.mem.write().unwrap();\n\n\n\n let mut pdpte: PML4E = Default::default();\n\n pdpte.P = true;\n\n pdpte.pdpt_base = 0x1;\n\n mem.write64(0 + 8*0xff, u64::from_be_bytes(pdpte.pack().unwrap()));\n\n\n\n let mut pdpte: PML4E = Default::default();\n\n pdpte.P = true;\n\n pdpte.pdpt_base = 0x2;\n\n mem.write64(0 + 8*0x100, u64::from_be_bytes(pdpte.pack().unwrap()));\n\n\n", "file_path": "src/emulator/access/memory.rs", "rank": 33, "score": 59130.296284302465 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn page_walk_legacy_test() {\n\n let hw = hardware::Hardware::new(0x2000);\n\n let (dev, _) = device::Device::new();\n\n let mut ac = super::Access::new(hw, dev);\n\n\n\n ac.pgmd = Some(super::PagingMode::Legacy);\n\n ac.core.cregs.3.from_u64(0);\n\n ac.core.cregs.4.PSE = 1;\n\n {\n\n let mut mem = ac.mem.write().unwrap();\n\n\n\n let mut pde: PDE = Default::default();\n\n pde.P = true;\n\n pde.pt_base = 0x1;\n\n mem.write32(0 + 4*0x32b, u64::from_be_bytes(pde.pack().unwrap()) as u32);\n\n\n\n let mut pde: PDE = Default::default();\n\n pde.P = true;\n\n pde.PS = true;\n\n pde.pt_base = 0x2fc00;\n", "file_path": "src/emulator/access/memory.rs", "rank": 34, "score": 59130.296284302465 }, { "content": "#[cfg(test)]\n\n#[test]\n\n#[should_panic]\n\nfn page_walk_test_panic() {\n\n let hw = hardware::Hardware::new(0x1000);\n\n let (dev, _) = device::Device::new();\n\n let mut ac = super::Access::new(hw, dev);\n\n\n\n ac.pgmd = Some(super::PagingMode::Legacy);\n\n ac.core.cregs.3.from_u64(0);\n\n\n\n ac.trans_l2p(MemAccessMode::Read, 0xdeadbeef).unwrap();\n\n}\n\n\n", "file_path": "src/emulator/access/memory.rs", "rank": 35, "score": 59130.296284302465 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn page_walk_pae_test() {\n\n let hw = hardware::Hardware::new(0x3000);\n\n let (dev, _) = device::Device::new();\n\n let mut ac = super::Access::new(hw, dev);\n\n\n\n ac.pgmd = Some(super::PagingMode::LegacyPAE);\n\n ac.core.cregs.3.from_u64(0);\n\n {\n\n let mut mem = ac.mem.write().unwrap();\n\n\n\n let mut pdpte: PDPTE = Default::default();\n\n pdpte.P = true;\n\n pdpte.pdt_base = 0x1;\n\n mem.write64(0 + 8*0x3, u64::from_be_bytes(pdpte.pack().unwrap()));\n\n\n\n let mut pde: PDE = Default::default();\n\n pde.P = true;\n\n pde.pt_base = 0x2;\n\n mem.write64(0x1000 + 8*0x57, u64::from_be_bytes(pde.pack().unwrap()));\n\n\n", "file_path": "src/emulator/access/memory.rs", "rank": 36, "score": 59130.296284302465 }, { "content": "#[cfg(test)]\n\n#[test]\n\n#[should_panic]\n\nfn access_msr_test_panic() {\n\n let hw = hardware::Hardware::new(0x1000);\n\n let (dev, _) = device::Device::new();\n\n let mut ac = access::Access::new(hw, dev);\n\n\n\n ac.write_msr(0xc0000103, 0xdeadbeef).unwrap();\n\n}", "file_path": "src/emulator/access/msr.rs", "rank": 37, "score": 59130.296284302465 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn page_walk_ia32e5l_test() {\n\n let hw = hardware::Hardware::new(0x6000);\n\n let (dev, _) = device::Device::new();\n\n let mut ac = super::Access::new(hw, dev);\n\n\n\n ac.pgmd = Some(super::PagingMode::Ia32e5Lv);\n\n ac.core.cregs.3.from_u64(0);\n\n {\n\n let mut mem = ac.mem.write().unwrap();\n\n\n\n let mut pdpte: PML5E = Default::default();\n\n pdpte.P = true;\n\n pdpte.pml4_base = 0x1;\n\n mem.write64(0 + 8*0x100, u64::from_be_bytes(pdpte.pack().unwrap()));\n\n\n\n let mut pdpte: PML4E = Default::default();\n\n pdpte.P = true;\n\n pdpte.pdpt_base = 0x2;\n\n mem.write64(0x1000 + 8*0xff, u64::from_be_bytes(pdpte.pack().unwrap()));\n\n\n", "file_path": "src/emulator/access/memory.rs", "rank": 38, "score": 59130.296284302465 }, { "content": "enum IOReqType { PortIO(u16), MemIO(u64) }\n", "file_path": "src/device.rs", "rank": 39, "score": 51624.93993098142 }, { "content": "fn print_usage(program: &str, opts: &Options) {\n\n let brief = format!(\"Usage: {} IMGFILE [options]\", program);\n\n print!(\"{}\", opts.usage(&brief));\n\n process::exit(0);\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 40, "score": 51239.944427761526 }, { "content": "enum IOReqRW { Read(usize), Write(Vec<u8>) }\n\n\n", "file_path": "src/device.rs", "rank": 41, "score": 51093.41141836026 }, { "content": "type PortIOMap<'a> = Vec<(Range<u16>, &'a mut dyn PortIO)>;\n", "file_path": "src/device.rs", "rank": 42, "score": 46185.74938618261 }, { "content": "fn code_80(exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n match exec.idata.modrm.reg as u8 {\n\n 0 => add_rm8_imm8(exec)?,\n\n 1 => or_rm8_imm8(exec)?,\n\n 2 => adc_rm8_imm8(exec)?,\n\n 3 => sbb_rm8_imm8(exec)?,\n\n 4 => and_rm8_imm8(exec)?,\n\n 5 => sub_rm8_imm8(exec)?,\n\n 6 => xor_rm8_imm8(exec)?,\n\n 7 => cmp_rm8_imm8(exec)?,\n\n _ => { return Err(EmuException::UnexpectedError); },\n\n }\n\n Ok(())\n\n}\n\n\n\nadd_dst_src!(8, rm8, imm8);\n\nor_dst_src!(8, rm8, imm8);\n\nadc_dst_src!(8, rm8, imm8);\n\nsbb_dst_src!(8, rm8, imm8);\n\nand_dst_src!(8, rm8, imm8);\n\nsub_dst_src!(8, rm8, imm8);\n\nxor_dst_src!(8, rm8, imm8);\n\ncmp_dst_src!(8, rm8, imm8);\n\n\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 43, "score": 44796.59674748475 }, { "content": "fn code_fe(exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n match exec.idata.modrm.reg as u8 {\n\n 0 => inc_rm8(exec)?,\n\n 1 => dec_rm8(exec)?,\n\n _ => { return Err(EmuException::UnexpectedError); },\n\n }\n\n Ok(())\n\n}\n\n\n\ninc_dst!(rm8);\n\ndec_dst!(rm8);\n\n\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 44, "score": 44093.30585009551 }, { "content": "fn code_c0(exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n match exec.idata.modrm.reg as u8 {\n\n /*\n\n 0 => rol_rm8_imm8(exec)?,\n\n 1 => ror_rm8_imm8(exec)?,\n\n 2 => rcl_rm8_imm8(exec)?,\n\n 3 => rcr_rm8_imm8(exec)?,\n\n */\n\n 4 => shl_rm8_imm8(exec)?,\n\n 5 => shr_rm8_imm8(exec)?,\n\n 6 => sal_rm8_imm8(exec)?,\n\n 7 => sar_rm8_imm8(exec)?,\n\n _ => { return Err(EmuException::UnexpectedError); },\n\n }\n\n Ok(())\n\n}\n\n\n\n/*\n\nrol_dst_src!(8, rm8, imm8);\n\nror_dst_src!(8, rm8, imm8);\n\nrcl_dst_src!(8, rm8, imm8);\n\nrcr_dst_src!(8, rm8, imm8);\n\n*/\n\nshl_dst_src!(8, rm8, imm8);\n\nshr_dst_src!(8, rm8, imm8);\n\nsal_dst_src!(8, rm8, imm8);\n\nsar_dst_src!(8, rm8, imm8);\n\n\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 45, "score": 44093.30585009551 }, { "content": "fn code_0f00(exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n match exec.idata.modrm.reg as u16 {\n\n 2 => lldt_rm16(exec)?,\n\n 3 => ltr_rm16(exec)?,\n\n _ => { return Err(EmuException::NotImplementedOpcode); },\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 46, "score": 44093.30585009551 }, { "content": "fn code_f6(exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let back = match exec.idata.modrm.reg as u8 {\n\n 0 => { test_rm8_imm8(exec)?; 0},\n\n 2 => { not_rm8(exec)?; -1},\n\n 3 => { neg_rm8(exec)?; -1},\n\n 4 => { mul_ah_al_rm8(exec)?; -1},\n\n 5 => { imul_ah_al_rm8(exec)?; -1},\n\n 6 => { div_al_ah_rm8(exec)?; -1},\n\n 7 => { idiv_al_ah_rm8(exec)?; -1},\n\n _ => { return Err(EmuException::UnexpectedError); },\n\n };\n\n exec.ac.update_ip(back)\n\n}\n\n\n\ntest_dst_src!(8, rm8, imm8);\n\nnot_dst!(8, rm8);\n\nneg_dst!(8, rm8);\n\nmul_high_low_src!(8, ah, al, rm8);\n\nimul_high_low_src!(8, ah, al, rm8);\n\ndiv_quot_rem_src!(8, al, ah, rm8);\n\nidiv_quot_rem_src!(8, al, ah, rm8);\n\n\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 47, "score": 44093.30585009551 }, { "content": "fn lldt_rm16(exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n if exec.ac.test_cpumode(access::CpuMode::Real) {\n\n return Err(EmuException::CPUException(CPUException::UD));\n\n }\n\n\n\n let sel = exec.get_rm16()?;\n\n exec.ac.set_ldtr(sel)\n\n}\n\n\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 48, "score": 44093.30585009551 }, { "content": "fn ltr_rm16(exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n if exec.ac.test_cpumode(access::CpuMode::Real) {\n\n return Err(EmuException::CPUException(CPUException::UD));\n\n }\n\n\n\n let sel = exec.get_rm16()?;\n\n exec.ac.set_tr(sel)\n\n}\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 49, "score": 44093.30585009551 }, { "content": "fn code_d2(exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n match exec.idata.modrm.reg as u8 {\n\n /*\n\n 0 => rol_rm8_cl(exec)?,\n\n 1 => ror_rm8_cl(exec)?,\n\n 2 => rcl_rm8_cl(exec)?,\n\n 3 => rcr_rm8_cl(exec)?,\n\n */\n\n 4 => shl_rm8_cl(exec)?,\n\n 5 => shr_rm8_cl(exec)?,\n\n 6 => sal_rm8_cl(exec)?,\n\n 7 => sar_rm8_cl(exec)?,\n\n _ => { return Err(EmuException::UnexpectedError); },\n\n }\n\n Ok(())\n\n}\n\n\n\n/*\n\nrol_dst_src!(8, rm8, cl);\n\nror_dst_src!(8, rm8, cl);\n\nrcl_dst_src!(8, rm8, cl);\n\nrcr_dst_src!(8, rm8, cl);\n\n*/\n\nshl_dst_src!(8, rm8, cl);\n\nshr_dst_src!(8, rm8, cl);\n\nsal_dst_src!(8, rm8, cl);\n\nsar_dst_src!(8, rm8, cl);\n\n\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 50, "score": 44093.30585009551 }, { "content": "fn nop(_exec: &mut exec::Exec) -> Result<(), EmuException> { Ok(()) }\n\n\n\nmov_dst_src!(8, al, moffs8);\n\nmov_dst_src!(8, moffs8, al);\n\n\n\ntest_dst_src!(8, al, imm8);\n\n\n\nmov_dst_src!(8, opr8, imm8);\n\n\n\nmov_dst_src!(8, rm8, imm8);\n\n\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 51, "score": 43142.861360820505 }, { "content": "fn rdmsr(exec: &mut exec::Exec) -> Result<(), EmuException> { exec.msr_to_reg() }\n\n\n\nsetcc_dst!(8, o, rm8);\n\nsetcc_dst!(8, b, rm8);\n\nsetcc_dst!(8, z, rm8);\n\nsetcc_dst!(8, be, rm8);\n\nsetcc_dst!(8, s, rm8);\n\nsetcc_dst!(8, p, rm8);\n\nsetcc_dst!(8, l, rm8);\n\nsetcc_dst!(8, le, rm8);\n\n\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 52, "score": 41831.45343953029 }, { "content": "fn wrmsr(exec: &mut exec::Exec) -> Result<(), EmuException> { exec.msr_from_reg() }\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 53, "score": 41831.45343953029 }, { "content": "fn mov_cr_r32(exec: &mut exec::Exec) -> Result<(), EmuException> { exec.cr_from_reg() }\n\n\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 54, "score": 40633.25108363386 }, { "content": "fn mov_r32_cr(exec: &mut exec::Exec) -> Result<(), EmuException> { exec.cr_to_reg() }\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 55, "score": 40633.25108363386 }, { "content": "fn hlt(_exec: &mut exec::Exec) -> Result<(), EmuException> { Err(EmuException::Halt) }\n\n\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 56, "score": 39638.544184130296 }, { "content": "fn cli(exec: &mut exec::Exec) -> Result<(), EmuException> { exec.ac.core.rflags.set_interrupt(false); Ok(()) }\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 68, "score": 37437.498580988606 }, { "content": "fn cld(exec: &mut exec::Exec) -> Result<(), EmuException> { exec.ac.core.rflags.set_direction(false); Ok(()) }\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 69, "score": 37437.498580988606 }, { "content": "fn std(exec: &mut exec::Exec) -> Result<(), EmuException> { exec.ac.core.rflags.set_direction(true); Ok(()) }\n\n\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 70, "score": 37437.498580988606 }, { "content": "fn sti(exec: &mut exec::Exec) -> Result<(), EmuException> { exec.ac.core.rflags.set_interrupt(true); Ok(()) }\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 71, "score": 37437.498580988606 }, { "content": "fn into(_exec: &mut exec::Exec) -> Result<(), EmuException> { Err(EmuException::CPUException(CPUException::OF)) }\n\n\n\nin_reg_port!(8, al, imm8);\n\nout_port_reg!(8, imm8, al);\n\n\n\njmp_rel!(8, imm8);\n\n\n\nin_reg_port!(8, al, dx);\n\nout_port_reg!(8, dx, al);\n\n\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 72, "score": 37230.1712977297 }, { "content": "fn int_imm8(exec: &mut exec::Exec) -> Result<(), EmuException> { Err(EmuException::Interrupt(exec.get_imm8()?)) }\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 73, "score": 36773.2833515557 }, { "content": "fn icebp(_exec: &mut exec::Exec) -> Result<(), EmuException> { Err(EmuException::CPUException(CPUException::DB)) }\n\n\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 74, "score": 36176.14563729434 }, { "content": "fn int3(_exec: &mut exec::Exec) -> Result<(), EmuException> { Err(EmuException::CPUException(CPUException::BP)) }\n", "file_path": "src/emulator/instruction/opcode/common.rs", "rank": 75, "score": 36176.14563729434 }, { "content": " ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<shr_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let dst = exec.[<get_ $dst>]()? as u!($size);\n\n let src: u32 = exec.[<get_ $src>]()? as u32;\n\n debug!(\"shr: {:02x}, {:02x}\", dst, src);\n\n exec.update_rflags_shr(dst, src)?;\n\n exec.[<set_ $dst>](dst.wrapping_shr(src))\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! sal_dst_src {\n\n ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<sal_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let dst = exec.[<get_ $dst>]()? as i!($size);\n\n let src: u32 = exec.[<get_ $src>]()? as u32;\n\n debug!(\"sal: {:02x}, {:02x}\", dst, src);\n\n exec.update_rflags_shl(dst as u!($size), src)?;\n\n exec.[<set_ $dst>](dst.wrapping_shl(src) as u!($size))\n\n }\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 76, "score": 35849.401107153986 }, { "content": " let src: i!($ssize) = exec.[<get_ $src>]()? as i!($ssize);\n\n debug!(\"movzsx: {:02x}\", src);\n\n exec.[<set_ $dst>](src as u!($dsize))\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! shl_dst_src {\n\n ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<shl_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let dst = exec.[<get_ $dst>]()? as u!($size);\n\n let src: u32 = exec.[<get_ $src>]()? as u32;\n\n debug!(\"shl: {:02x}, {:02x}\", dst, src);\n\n exec.update_rflags_shl(dst, src)?;\n\n exec.[<set_ $dst>](dst.wrapping_shl(src))\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! shr_dst_src {\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 77, "score": 35848.846589690125 }, { "content": " } };\n\n}\n\n\n\nmacro_rules! sar_dst_src {\n\n ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<sar_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let dst = exec.[<get_ $dst>]()? as i!($size);\n\n let src: u32 = exec.[<get_ $src>]()? as u32;\n\n debug!(\"sar: {:02x}, {:02x}\", dst, src);\n\n exec.update_rflags_sar(dst, src)?;\n\n exec.[<set_ $dst>](dst.wrapping_shr(src) as u!($size))\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! not_dst {\n\n ( $size:expr, $dst:ident ) => { paste::item! {\n\n fn [<not_ $dst>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let v = exec.[<get_ $dst>]()? as u!($size);\n\n debug!(\"not: {:02x}\", v);\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 78, "score": 35848.78419124861 }, { "content": " let port = exec.[<get_ $port>]()? as u16;\n\n let v = exec.[<get_ $reg>]()?;\n\n debug!(\"out: {:04x}\", port);\n\n exec.ac.[<out_ $size>](port, v)\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! call_rel {\n\n ( $size:expr, $rel:ident ) => { paste::item! {\n\n fn [<call_ $rel>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let offs = exec.[<get_ $rel>]()? as i64;\n\n let rip = exec.ac.get_ip()?;\n\n debug!(\"call: 0x{:04x}\", rip as i64 + offs);\n\n exec.ac.[<push_u $size>](rip as u!($size))?;\n\n exec.ac.update_ip(offs)\n\n }\n\n } };\n\n}\n\n\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 79, "score": 35848.02470639233 }, { "content": " }\n\n } };\n\n}\n\n\n\nmacro_rules! callf_abs {\n\n ( $size:expr, $sel:ident, $abs:ident ) => { paste::item! {\n\n fn [<callf_ $sel _ $abs>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let sel: u16 = exec.[<get_ $sel>]()?;\n\n let abs = exec.[<get_ $abs>]()? as u!($size);\n\n debug!(\"callf: {:04x}:{:04x}\", sel, abs);\n\n exec.[<call_far_u $size>](sel, abs)\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! pushf {\n\n ( $size:expr ) => { paste::item! {\n\n fn pushf(exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let flag = exec.ac.get_rflags()? as u!($size);\n\n debug!(\"pushf: {:08x}\", flag);\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 80, "score": 35847.901545647044 }, { "content": " fn iret(exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n exec.[<int_ret_u $size>]()\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! in_reg_port {\n\n ( $size:expr, $reg:ident, $port:ident ) => { paste::item! {\n\n fn [<in_ $reg _ $port>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let port = exec.[<get_ $port>]()? as u16;\n\n let v = exec.ac.[<in_ $size>](port)?;\n\n debug!(\"in: {:04x}\", port);\n\n exec.[<set_ $reg>](v)\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! out_port_reg {\n\n ( $size:expr, $port:ident, $reg:ident ) => { paste::item! {\n\n fn [<out_ $port _ $reg>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 81, "score": 35847.86845652319 }, { "content": "macro_rules! jmp_rel {\n\n ( $size:expr, $rel:ident ) => { paste::item! {\n\n fn [<jmp_ $rel>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let rel = exec.[<get_ $rel>]()? as i!($size);\n\n debug!(\"jmp: {:04x}\", rel);\n\n exec.ac.update_ip(rel as i64)\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! jmpf_abs {\n\n ( $size:expr, $sel:ident, $abs:ident ) => { paste::item! {\n\n fn [<jmpf_ $sel _ $abs>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let sel: u16 = exec.[<get_ $sel>]()?;\n\n let abs = exec.[<get_ $abs>]()? as u!($size);\n\n debug!(\"jmpf: {:04x}:{:04x}\", sel, abs);\n\n exec.[<jmp_far_u $size>](sel, abs)\n\n }\n\n } };\n\n}\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 82, "score": 35847.75924843276 }, { "content": " } };\n\n}\n\n\n\nmacro_rules! sbb_dst_src {\n\n ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<sbb_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let dst = exec.[<get_ $dst>]()? as u!($size);\n\n let src = exec.[<get_ $src>]()? as u!($size);\n\n let cf = exec.check_rflags_b()? as u!($size);\n\n\n\n debug!(\"sbb: {:02x}, {:02x}\", dst, src);\n\n exec.update_rflags_sbb(dst, src, cf)?;\n\n exec.[<set_ $dst>](dst.wrapping_sub(src).wrapping_sub(cf))\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! and_dst_src {\n\n ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<and_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 83, "score": 35845.40607212905 }, { "content": " debug!(\"imul: {:02x}, {:02x}\", dst, src);\n\n exec.update_rflags_mul(dst, src)?;\n\n exec.[<set_ $dst>](dst.wrapping_mul(src))\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! movzx_dst_src {\n\n ( $dsize:expr, $dst:ident, $ssize:expr, $src:ident ) => { paste::item! {\n\n fn [<movzx_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let src: u!($ssize) = exec.[<get_ $src>]()? as u!($ssize);\n\n debug!(\"movzx: {:02x}\", src);\n\n exec.[<set_ $dst>](src as u!($dsize))\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! movsx_dst_src {\n\n ( $dsize:expr, $dst:ident, $ssize:expr, $src:ident ) => { paste::item! {\n\n fn [<movsx_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 84, "score": 35845.40607212905 }, { "content": " }\n\n } };\n\n}\n\n\n\nmacro_rules! mov_dst_src {\n\n ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<mov_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let src = exec.[<get_ $src>]()? as u!($size);\n\n debug!(\"mov: {:02x}\", src);\n\n exec.[<set_ $dst>](src)\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! lea_dst_src {\n\n ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<lea_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let src = exec.get_m()?.1 as u!($size);\n\n debug!(\"lea: {:02x}\", src);\n\n exec.[<set_ $dst>](src)\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 85, "score": 35845.40607212905 }, { "content": " exec.ac.[<push_u $size>](flag)\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! popf {\n\n ( $size:expr ) => { paste::item! {\n\n fn popf(exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let flag = exec.ac.[<pop_u $size>]()?;\n\n debug!(\"popf: {:08x}\", flag);\n\n exec.ac.set_rflags(flag as u64)\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! movs_dst_src {\n\n ( $opsize:expr, $adsize:expr ) => { paste::item! {\n\n fn [<movs_m $adsize>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n exec.[<move_str $adsize>]()?;\n\n exec.[<repeat_ $opsize>]()\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 86, "score": 35845.40607212905 }, { "content": " exec.[<set_ $dst>](v.wrapping_sub(1))\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! push_src {\n\n ( $size:expr, $src:ident ) => { paste::item! {\n\n fn [<push_ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let v = exec.[<get_ $src>]()? as u!($size);\n\n debug!(\"push: {:02x}\", v);\n\n exec.ac.[<push_u $size>](v)\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! pop_dst {\n\n ( $size:expr, $dst:ident ) => { paste::item! {\n\n fn [<pop_ $dst>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let v = exec.ac.[<pop_u $size>]()? as u!($size);\n\n debug!(\"pop: {:02x}\", v);\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 87, "score": 35845.40607212905 }, { "content": "macro_rules! ret {\n\n ( $size:expr ) => { paste::item! {\n\n fn ret(exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let ret = exec.ac.[<pop_u $size>]()? as u64;\n\n debug!(\"ret: {:04x}\", ret);\n\n exec.ac.set_ip(ret)\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! retf {\n\n ( $size:expr ) => { paste::item! {\n\n fn retf(exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n exec.[<ret_far_u $size>]()\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! iret {\n\n ( $size:expr ) => { paste::item! {\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 88, "score": 35845.40607212905 }, { "content": "}\n\n\n\nmacro_rules! lods_dst_src {\n\n ( $opsize:expr, $adsize:expr ) => { paste::item! {\n\n fn [<lods_m $adsize>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n exec.[<load_str $adsize>]()?;\n\n exec.[<repeat_ $opsize>]()\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! scas_src_dst {\n\n ( $opsize:expr, $adsize:expr ) => { paste::item! {\n\n fn [<scas_m $adsize>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n exec.[<scan_str $adsize>]()?;\n\n exec.[<repeat_ $opsize>]()\n\n }\n\n } };\n\n}\n\n\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 89, "score": 35845.40607212905 }, { "content": " exec.[<set_ $dst>](v)\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! imul_dst_src1_src2 {\n\n ( $size:expr, $dst:ident, $src1:ident, $src2:ident ) => { paste::item! {\n\n fn [<imul_ $dst _ $src1 _ $src2>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let src1 = exec.[<get_ $src1>]()? as i!($size);\n\n let src2 = exec.[<get_ $src2>]()? as i!($size);\n\n debug!(\"imul: {:02x}, {:02x}\", src1, src2);\n\n exec.update_rflags_imul(src1, src2)?;\n\n exec.[<set_ $dst>](src1.wrapping_mul(src2) as u!($size))\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! jcc_rel {\n\n ( $size:expr, $cc:ident, $rel:ident ) => { paste::item! {\n\n fn [<j $cc _ $rel>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 90, "score": 35845.40607212905 }, { "content": "macro_rules! i { ($size:expr) => { paste::item! { [<i $size>] } }}\n\nmacro_rules! u { ($size:expr) => { paste::item! { [<u $size>] } }}\n\n\n\nmacro_rules! add_dst_src {\n\n ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<add_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let dst = exec.[<get_ $dst>]()? as u!($size);\n\n let src = exec.[<get_ $src>]()? as u!($size);\n\n\n\n debug!(\"add: {:02x}, {:02x}\", dst, src);\n\n exec.update_rflags_add(dst, src)?;\n\n exec.[<set_ $dst>](dst.wrapping_add(src))\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! or_dst_src {\n\n ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<or_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let dst = exec.[<get_ $dst>]()? as u!($size);\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 91, "score": 35845.40607212905 }, { "content": " let src = exec.[<get_ $src>]()? as u!($size);\n\n\n\n debug!(\"or: {:02x}, {:02x}\", dst, src);\n\n exec.update_rflags_or(dst, src)?;\n\n exec.[<set_ $dst>](dst | src)\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! adc_dst_src {\n\n ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<adc_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let dst = exec.[<get_ $dst>]()? as u!($size);\n\n let src = exec.[<get_ $src>]()? as u!($size);\n\n let cf = exec.check_rflags_b()? as u!($size);\n\n\n\n debug!(\"adc: {:02x}, {:02x}\", dst, src);\n\n exec.update_rflags_adc(dst, src, cf)?;\n\n exec.[<set_ $dst>](dst.wrapping_add(src).wrapping_add(cf))\n\n }\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 92, "score": 35845.40607212905 }, { "content": " let src = exec.[<get_ $src>]()? as u!($size);\n\n debug!(\"cmp: {:02x}, {:02x}\", dst, src);\n\n exec.update_rflags_sub(dst, src)\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! inc_dst {\n\n ( $dst:ident ) => { paste::item! {\n\n fn [<inc_ $dst>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let v = exec.[<get_ $dst>]()?;\n\n exec.[<set_ $dst>](v.wrapping_add(1))\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! dec_dst {\n\n ( $dst:ident ) => { paste::item! {\n\n fn [<dec_ $dst>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let v = exec.[<get_ $dst>]()?;\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 93, "score": 35845.40607212905 }, { "content": " ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<test_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let dst = exec.[<get_ $dst>]()? as u!($size);\n\n let src = exec.[<get_ $src>]()? as u!($size);\n\n debug!(\"test: {:02x}, {:02x}\", dst, src);\n\n exec.update_rflags_and(dst, src)\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! xchg_dst_src {\n\n ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<xchg_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let dst = exec.[<get_ $dst>]()? as u!($size);\n\n let src = exec.[<get_ $src>]()? as u!($size);\n\n\n\n debug!(\"xchg: {:02x}, {:02x}\", dst, src);\n\n exec.[<set_ $dst>](src)?;\n\n exec.[<set_ $src>](dst)?;\n\n Ok(())\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 94, "score": 35845.40607212905 }, { "content": "\n\nmacro_rules! setcc_dst {\n\n ( $size:expr, $cc:ident, $dst:ident ) => { paste::item! {\n\n fn [<set $cc _ $dst>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let flag: bool = exec.[<check_rflags_ $cc>]()?;\n\n exec.[<set_ $dst>](flag as u!($size))\n\n }\n\n\n\n fn [<setn $cc _ $dst>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let flag: bool = exec.[<check_rflags_ $cc>]()?;\n\n exec.[<set_ $dst>](!flag as u!($size))\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! imul_dst_src {\n\n ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<imul_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let dst = exec.[<get_ $dst>]()? as u!($size);\n\n let src = exec.[<get_ $src>]()? as u!($size);\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 95, "score": 35845.40607212905 }, { "content": " } };\n\n}\n\n\n\nmacro_rules! xor_dst_src {\n\n ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<xor_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let dst = exec.[<get_ $dst>]()? as u!($size);\n\n let src = exec.[<get_ $src>]()? as u!($size);\n\n\n\n debug!(\"xor: {:02x}, {:02x}\", dst, src);\n\n exec.update_rflags_xor(dst, src)?;\n\n exec.[<set_ $dst>](dst ^ src)\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! cmp_dst_src {\n\n ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<cmp_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let dst = exec.[<get_ $dst>]()? as u!($size);\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 96, "score": 35845.40607212905 }, { "content": " }\n\n } };\n\n}\n\n\n\nmacro_rules! cmps_src_dst {\n\n ( $opsize:expr, $adsize:expr ) => { paste::item! {\n\n fn [<cmps_m $adsize>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n exec.[<cmp_str $adsize>]()?;\n\n exec.[<repeat_ $opsize>]()\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! stos_dst_src {\n\n ( $opsize:expr, $adsize:expr ) => { paste::item! {\n\n fn [<stos_m $adsize>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n exec.[<store_str $adsize>]()?;\n\n exec.[<repeat_ $opsize>]()\n\n }\n\n } };\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 97, "score": 35845.40607212905 }, { "content": " if(exec.[<check_rflags_ $cc>]()?){\n\n let rel = exec.[<get_ $rel>]()? as i!($size);\n\n debug!(\"jmp: {}\", rel);\n\n exec.ac.update_ip(rel as i64)?;\n\n }\n\n Ok(())\n\n }\n\n\n\n fn [<jn $cc _ $rel>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n if(!exec.[<check_rflags_ $cc>]()?){\n\n let rel = exec.[<get_ $rel>]()? as i!($size);\n\n debug!(\"jmp: {}\", rel);\n\n exec.ac.update_ip(rel as i64)?;\n\n }\n\n Ok(())\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! test_dst_src {\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 98, "score": 35845.40607212905 }, { "content": " let dst = exec.[<get_ $dst>]()? as u!($size);\n\n let src = exec.[<get_ $src>]()? as u!($size);\n\n\n\n debug!(\"and: {:02x}, {:02x}\", dst, src);\n\n exec.update_rflags_and(dst, src)?;\n\n exec.[<set_ $dst>](dst & src)\n\n }\n\n } };\n\n}\n\n\n\nmacro_rules! sub_dst_src {\n\n ( $size:expr, $dst:ident, $src:ident ) => { paste::item! {\n\n fn [<sub_ $dst _ $src>](exec: &mut exec::Exec) -> Result<(), EmuException> {\n\n let dst = exec.[<get_ $dst>]()? as u!($size);\n\n let src = exec.[<get_ $src>]()? as u!($size);\n\n\n\n debug!(\"sub: {:02x}, {:02x}\", dst, src);\n\n exec.update_rflags_sub(dst, src)?;\n\n exec.[<set_ $dst>](dst.wrapping_sub(src))\n\n }\n", "file_path": "src/emulator/instruction/opcode/fn_macro.rs", "rank": 99, "score": 35845.40607212905 } ]
Rust
src/photosdir.rs
kaj/rphotos
640dc328eb6338368b66831061530d8c894722f6
use crate::models::Photo; use crate::myexif::ExifData; use image::imageops::FilterType; use image::{self, GenericImageView, ImageError, ImageFormat}; use log::{debug, info, warn}; use std::ffi::OsStr; use std::path::{Path, PathBuf}; use std::{fs, io}; use tokio::task::{spawn_blocking, JoinError}; pub struct PhotosDir { basedir: PathBuf, } impl PhotosDir { pub fn new(basedir: &Path) -> Self { PhotosDir { basedir: basedir.into(), } } pub fn get_raw_path(&self, photo: &Photo) -> PathBuf { self.basedir.join(&photo.path) } pub fn has_file<S: AsRef<OsStr> + ?Sized>(&self, path: &S) -> bool { self.basedir.join(Path::new(path)).is_file() } pub fn find_files( &self, dir: &Path, cb: &dyn Fn(&str, &ExifData), ) -> io::Result<()> { let absdir = self.basedir.join(dir); if fs::metadata(&absdir)?.is_dir() { debug!("Should look in {:?}", absdir); for entry in fs::read_dir(absdir)? { let path = entry?.path(); if fs::metadata(&path)?.is_dir() { self.find_files(&path, cb)?; } else if let Some(exif) = load_meta(&path) { cb(self.subpath(&path)?, &exif); } else { debug!("{:?} is no pic.", path) } } } Ok(()) } fn subpath<'a>(&self, fullpath: &'a Path) -> Result<&'a str, io::Error> { let path = fullpath .strip_prefix(&self.basedir) .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?; path.to_str().ok_or_else(|| { io::Error::new( io::ErrorKind::InvalidInput, format!("Non-utf8 path {:?}", path), ) }) } } fn load_meta(path: &Path) -> Option<ExifData> { if let Ok(mut exif) = ExifData::read_from(&path) { if exif.width.is_none() || exif.height.is_none() { if let Ok((width, height)) = actual_image_size(&path) { exif.width = Some(width); exif.height = Some(height); } } Some(exif) } else if let Ok((width, height)) = actual_image_size(&path) { let mut meta = ExifData::default(); meta.width = Some(width); meta.height = Some(height); Some(meta) } else { None } } fn actual_image_size(path: &Path) -> Result<(u32, u32), ImageError> { let image = image::open(&path)?; Ok((image.width(), image.height())) } #[derive(Debug)] pub enum ImageLoadFailed { File(io::Error), Image(image::ImageError), Join(JoinError), } impl std::error::Error for ImageLoadFailed {} impl std::fmt::Display for ImageLoadFailed { fn fmt(&self, out: &mut std::fmt::Formatter) -> std::fmt::Result { match &self { ImageLoadFailed::File(e) => e.fmt(out), ImageLoadFailed::Image(e) => e.fmt(out), ImageLoadFailed::Join(e) => e.fmt(out), } } } impl From<io::Error> for ImageLoadFailed { fn from(e: io::Error) -> ImageLoadFailed { ImageLoadFailed::File(e) } } impl From<image::ImageError> for ImageLoadFailed { fn from(e: image::ImageError) -> ImageLoadFailed { ImageLoadFailed::Image(e) } } impl From<JoinError> for ImageLoadFailed { fn from(e: JoinError) -> ImageLoadFailed { ImageLoadFailed::Join(e) } } pub async fn get_scaled_jpeg( path: PathBuf, rotation: i16, size: u32, ) -> Result<Vec<u8>, ImageLoadFailed> { spawn_blocking(move || { info!("Should open {:?}", path); let img = if is_jpeg(&path) { use std::fs::File; use std::io::BufReader; let file = BufReader::new(File::open(path)?); let mut decoder = image::jpeg::JpegDecoder::new(file)?; decoder.scale(size as u16, size as u16)?; image::DynamicImage::from_decoder(decoder)? } else { image::open(path)? }; let img = if 3 * size <= img.width() || 3 * size <= img.height() { info!("T-nail from {}x{} to {}", img.width(), img.height(), size); img.thumbnail(size, size) } else if size < img.width() || size < img.height() { info!("Scaling from {}x{} to {}", img.width(), img.height(), size); img.resize(size, size, FilterType::CatmullRom) } else { img }; let img = match rotation { _x @ 0..=44 | _x @ 315..=360 => img, _x @ 45..=134 => img.rotate90(), _x @ 135..=224 => img.rotate180(), _x @ 225..=314 => img.rotate270(), x => { warn!("Should rotate photo {} deg, which is unsupported", x); img } }; let mut buf = Vec::new(); img.write_to(&mut buf, ImageFormat::Jpeg)?; Ok(buf) }) .await? } fn is_jpeg(path: &Path) -> bool { if let Some(suffix) = path.extension().and_then(|s| s.to_str()) { suffix.eq_ignore_ascii_case("jpg") || suffix.eq_ignore_ascii_case("jpeg") } else { false } }
use crate::models::Photo; use crate::myexif::ExifData; use image::imageops::FilterType; use image::{self, GenericImageView, ImageError, ImageFormat}; use log::{debug, info, warn}; use std::ffi::OsStr; use std::path::{Path, PathBuf}; use std::{fs, io}; use tokio::task::{spawn_blocking, JoinError}; pub struct PhotosDir { basedir: PathBuf, } impl PhotosDir { pub fn new(basedir: &Path) -> Self { PhotosDir { basedir: basedir.into(), } } pub fn get_raw_path(&self, photo: &Photo) -> PathBuf { self.basedir.join(&photo.path) } pub fn has_file<S: AsRef<OsStr> + ?Sized>(&self, path: &S) -> bool { self.basedir.join(Path::new(path)).is_file() } pub fn find_files( &self, dir: &Path, cb: &dyn Fn(&str, &ExifData), ) -> io::Result<()> { let absdir = self.basedir.join(dir); if fs::metadata(&absdir)?.is_dir() { debug!("Should look in {:?}", absdir); for entry in fs::read_dir(absdir)? { let path = entry?.path(); if fs::metadata(&path)?.is_dir() { self.find_files(&path, cb)?; } else if let Some(exif) = load_meta(&path) { cb(self.subpath(&path)?, &exif); } else { debug!("{:?} is no pic.", path) } } } Ok(()) } fn subpath<'a>(&self, fullpath: &'a Path) -> Resu
} fn load_meta(path: &Path) -> Option<ExifData> { if let Ok(mut exif) = ExifData::read_from(&path) { if exif.width.is_none() || exif.height.is_none() { if let Ok((width, height)) = actual_image_size(&path) { exif.width = Some(width); exif.height = Some(height); } } Some(exif) } else if let Ok((width, height)) = actual_image_size(&path) { let mut meta = ExifData::default(); meta.width = Some(width); meta.height = Some(height); Some(meta) } else { None } } fn actual_image_size(path: &Path) -> Result<(u32, u32), ImageError> { let image = image::open(&path)?; Ok((image.width(), image.height())) } #[derive(Debug)] pub enum ImageLoadFailed { File(io::Error), Image(image::ImageError), Join(JoinError), } impl std::error::Error for ImageLoadFailed {} impl std::fmt::Display for ImageLoadFailed { fn fmt(&self, out: &mut std::fmt::Formatter) -> std::fmt::Result { match &self { ImageLoadFailed::File(e) => e.fmt(out), ImageLoadFailed::Image(e) => e.fmt(out), ImageLoadFailed::Join(e) => e.fmt(out), } } } impl From<io::Error> for ImageLoadFailed { fn from(e: io::Error) -> ImageLoadFailed { ImageLoadFailed::File(e) } } impl From<image::ImageError> for ImageLoadFailed { fn from(e: image::ImageError) -> ImageLoadFailed { ImageLoadFailed::Image(e) } } impl From<JoinError> for ImageLoadFailed { fn from(e: JoinError) -> ImageLoadFailed { ImageLoadFailed::Join(e) } } pub async fn get_scaled_jpeg( path: PathBuf, rotation: i16, size: u32, ) -> Result<Vec<u8>, ImageLoadFailed> { spawn_blocking(move || { info!("Should open {:?}", path); let img = if is_jpeg(&path) { use std::fs::File; use std::io::BufReader; let file = BufReader::new(File::open(path)?); let mut decoder = image::jpeg::JpegDecoder::new(file)?; decoder.scale(size as u16, size as u16)?; image::DynamicImage::from_decoder(decoder)? } else { image::open(path)? }; let img = if 3 * size <= img.width() || 3 * size <= img.height() { info!("T-nail from {}x{} to {}", img.width(), img.height(), size); img.thumbnail(size, size) } else if size < img.width() || size < img.height() { info!("Scaling from {}x{} to {}", img.width(), img.height(), size); img.resize(size, size, FilterType::CatmullRom) } else { img }; let img = match rotation { _x @ 0..=44 | _x @ 315..=360 => img, _x @ 45..=134 => img.rotate90(), _x @ 135..=224 => img.rotate180(), _x @ 225..=314 => img.rotate270(), x => { warn!("Should rotate photo {} deg, which is unsupported", x); img } }; let mut buf = Vec::new(); img.write_to(&mut buf, ImageFormat::Jpeg)?; Ok(buf) }) .await? } fn is_jpeg(path: &Path) -> bool { if let Some(suffix) = path.extension().and_then(|s| s.to_str()) { suffix.eq_ignore_ascii_case("jpg") || suffix.eq_ignore_ascii_case("jpeg") } else { false } }
lt<&'a str, io::Error> { let path = fullpath .strip_prefix(&self.basedir) .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?; path.to_str().ok_or_else(|| { io::Error::new( io::ErrorKind::InvalidInput, format!("Non-utf8 path {:?}", path), ) }) }
function_block-function_prefixed
[ { "content": "pub fn to_dir(dir: &str) -> Result<(), Error> {\n\n let dir: &Path = dir.as_ref();\n\n for s in STATICS {\n\n // s.name may contain directory components.\n\n if let Some(parent) = dir.join(s.name).parent() {\n\n create_dir_all(parent)?;\n\n }\n\n File::create(dir.join(s.name)).and_then(|mut f| f.write(s.content))?;\n\n\n\n let limit = s.content.len() - 10; // Compensate a few bytes overhead\n\n let gz = gzipped(s.content)?;\n\n if gz.len() < limit {\n\n File::create(dir.join(format!(\"{}.gz\", s.name)))\n\n .and_then(|mut f| f.write(&gz))?;\n\n }\n\n let br = brcompressed(s.content)?;\n\n if br.len() < limit {\n\n File::create(dir.join(format!(\"{}.br\", s.name)))\n\n .and_then(|mut f| f.write(&br))?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/adm/storestatics.rs", "rank": 2, "score": 161282.01304882905 }, { "content": "pub fn get_positions(photos: &[Photo], c: &PgConnection) -> Vec<(Coord, i32)> {\n\n use crate::schema::positions::dsl::*;\n\n positions\n\n .filter(photo_id.eq_any(photos.iter().map(|p| p.id)))\n\n .select((photo_id, latitude, longitude))\n\n .load(c)\n\n .map_err(|e| warn!(\"Failed to load positions: {}\", e))\n\n .unwrap_or_default()\n\n .into_iter()\n\n .map(|(p_id, lat, long): (i32, i32, i32)| ((lat, long).into(), p_id))\n\n .collect()\n\n}\n\n\n", "file_path": "src/server/splitlist.rs", "rank": 4, "score": 140767.14676748577 }, { "content": "pub fn person_routes(s: ContextFilter) -> BoxedFilter<(impl Reply,)> {\n\n end()\n\n .and(s.clone())\n\n .and(get())\n\n .map(person_all)\n\n .or(s\n\n .and(param())\n\n .and(end())\n\n .and(get())\n\n .and(query())\n\n .map(person_one))\n\n .boxed()\n\n}\n", "file_path": "src/server/views_by_category.rs", "rank": 5, "score": 137705.3001646678 }, { "content": "pub fn tag_routes(s: ContextFilter) -> BoxedFilter<(impl Reply,)> {\n\n end()\n\n .and(s.clone())\n\n .and(get())\n\n .map(tag_all)\n\n .or(s\n\n .and(param())\n\n .and(end())\n\n .and(get())\n\n .and(query())\n\n .map(tag_one))\n\n .boxed()\n\n}\n\n\n", "file_path": "src/server/views_by_category.rs", "rank": 6, "score": 137705.3001646678 }, { "content": "pub fn place_routes(s: ContextFilter) -> BoxedFilter<(impl Reply,)> {\n\n end()\n\n .and(s.clone())\n\n .and(get())\n\n .map(place_all)\n\n .or(s\n\n .and(param())\n\n .and(end())\n\n .and(get())\n\n .and(query())\n\n .map(place_one))\n\n .boxed()\n\n}\n", "file_path": "src/server/views_by_category.rs", "rank": 7, "score": 137705.3001646678 }, { "content": "pub fn handle_pid_file(pidfile: &str, replace: bool) -> Result<(), String> {\n\n if replace {\n\n if let Some(oldpid) = read_pid_file(pidfile)? {\n\n info!(\"Killing old pid {}.\", oldpid);\n\n unsafe {\n\n kill(oldpid, SIGHUP);\n\n }\n\n }\n\n } else if Path::new(pidfile).exists() {\n\n return Err(format!(\"Pid file {:?} exists.\", pidfile));\n\n }\n\n let pid = unsafe { getpid() };\n\n debug!(\"Should write pid {} to {}\", pid, pidfile);\n\n File::create(pidfile)\n\n .and_then(|mut f| writeln!(f, \"{}\", pid))\n\n .map_err(|e| format!(\"Failed to write {}: {}\", pidfile, e))\n\n}\n\n\n", "file_path": "src/pidfiles.rs", "rank": 8, "score": 136737.18842571892 }, { "content": "pub fn routes(s: BoxedFilter<(Context,)>) -> BoxedFilter<(impl Reply,)> {\n\n use warp::filters::method::{get, post};\n\n use warp::path::{end, path};\n\n use warp::{body, query};\n\n let login = path(\"login\")\n\n .and(end())\n\n .and(post())\n\n .and(s.clone())\n\n .and(body::json())\n\n .map(login)\n\n .map(w);\n\n let gimg = end().and(get()).and(s.clone()).and(query()).map(get_img);\n\n let pimg = path(\"makepublic\")\n\n .and(end())\n\n .and(post())\n\n .and(s)\n\n .and(body::json())\n\n .map(make_public);\n\n\n\n login\n", "file_path": "src/server/api.rs", "rank": 9, "score": 136534.268298355 }, { "content": "pub fn routes(s: BoxedFilter<(Context,)>) -> BoxedFilter<(impl Reply,)> {\n\n use warp::{body::form, path, post};\n\n let route = path(\"grade\")\n\n .and(s.clone())\n\n .and(form())\n\n .and_then(set_grade)\n\n .or(path(\"locate\")\n\n .and(s.clone())\n\n .and(form())\n\n .and_then(set_location))\n\n .unify()\n\n .or(path(\"person\")\n\n .and(s.clone())\n\n .and(form())\n\n .and_then(set_person))\n\n .unify()\n\n .or(path(\"rotate\").and(s.clone()).and(form()).map(rotate))\n\n .unify()\n\n .or(path(\"tag\").and(s).and(form()).and_then(set_tag))\n\n .unify();\n\n post().and(route).boxed()\n\n}\n\n\n", "file_path": "src/server/admin.rs", "rank": 10, "score": 136534.268298355 }, { "content": "pub fn routes(s: BoxedFilter<(Context,)>) -> BoxedFilter<(impl Reply,)> {\n\n end()\n\n .and(get())\n\n .and(s.clone())\n\n .and(query())\n\n .map(auto_complete_any)\n\n .or(path(\"tag\")\n\n .and(get())\n\n .and(s.clone())\n\n .and(query())\n\n .map(auto_complete_tag))\n\n .or(path(\"person\")\n\n .and(get())\n\n .and(s)\n\n .and(query())\n\n .map(auto_complete_person))\n\n .boxed()\n\n}\n\n\n\nsql_function!(fn lower(string: Text) -> Text);\n\nsql_function!(fn strpos(string: Text, substring: Text) -> Integer);\n\n\n", "file_path": "src/server/autocomplete.rs", "rank": 11, "score": 136534.268298355 }, { "content": "pub fn auto_complete_any(context: Context, query: AcQ) -> impl Reply {\n\n let qq = query.q.to_lowercase();\n\n\n\n let tpos = strpos(lower(t::tag_name), &qq);\n\n let query = t::tags\n\n .select((t::tag_name, t::slug, tpos))\n\n .filter(tpos.gt(0))\n\n .into_boxed();\n\n let query = if context.is_authorized() {\n\n query\n\n } else {\n\n use crate::schema::photo_tags::dsl as tp;\n\n query.filter(t::id.eq_any(tp::photo_tags.select(tp::tag_id).filter(\n\n tp::photo_id.eq_any(p::photos.select(p::id).filter(p::is_public)),\n\n )))\n\n };\n\n let db = context.db().unwrap();\n\n let mut tags = query\n\n .order((tpos, t::tag_name))\n\n .limit(10)\n", "file_path": "src/server/autocomplete.rs", "rank": 12, "score": 134321.25209493298 }, { "content": "pub fn auto_complete_tag(context: Context, query: AcQ) -> impl Reply {\n\n use crate::schema::tags::dsl::{tag_name, tags};\n\n let tpos = strpos(lower(tag_name), query.q.to_lowercase());\n\n let q = tags\n\n .select(tag_name)\n\n .filter((&tpos).gt(0))\n\n .order((&tpos, tag_name))\n\n .limit(10);\n\n reply::json(&q.load::<String>(&context.db().unwrap()).unwrap())\n\n}\n\n\n", "file_path": "src/server/autocomplete.rs", "rank": 13, "score": 132231.19750090863 }, { "content": "pub fn auto_complete_person(context: Context, query: AcQ) -> impl Reply {\n\n use crate::schema::people::dsl::{people, person_name};\n\n let mpos = strpos(lower(person_name), query.q.to_lowercase());\n\n let q = people\n\n .select(person_name)\n\n .filter((&mpos).gt(0))\n\n .order((&mpos, person_name))\n\n .limit(10);\n\n reply::json(&q.load::<String>(&context.db().unwrap()).unwrap())\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct AcQ {\n\n pub q: String,\n\n}\n\n\n", "file_path": "src/server/autocomplete.rs", "rank": 14, "score": 132231.19750090863 }, { "content": "fn split(group: &[Photo]) -> (&[Photo], &[Photo]) {\n\n fn gradeval(p: &Photo) -> u64 {\n\n 1 + p.grade.unwrap_or(30) as u64\n\n }\n\n let l = group.len();\n\n let gradesum = group.iter().fold(0u64, |sum, p| sum + gradeval(p));\n\n let mut lsum = 0;\n\n let edge = l / 16;\n\n let mut pos = 0;\n\n let mut largest = 0;\n\n for i in edge..l - 1 - edge {\n\n let interval = timestamp(&group[i]) - timestamp(&group[i + 1]);\n\n let interval = if interval < 0 {\n\n panic!(\n\n \"Got images {:?}, {:?} in wrong order\",\n\n group[i],\n\n group[i + 1]\n\n )\n\n } else {\n\n interval as u64\n", "file_path": "src/server/splitlist.rs", "rank": 15, "score": 132111.5201322423 }, { "content": "fn split_to_groups(photos: &[Photo]) -> Option<Vec<&[Photo]>> {\n\n let wanted_groups = match photos.len() {\n\n l if l <= 18 => return None,\n\n l if l < 120 => 10,\n\n l if l < 256 => (l as f64).sqrt() as usize,\n\n _ => 16,\n\n };\n\n let mut groups = vec![photos];\n\n while groups.len() < wanted_groups {\n\n let i = find_largest(&groups);\n\n let (a, b) = split(groups[i]);\n\n groups[i] = a;\n\n groups.insert(i + 1, b);\n\n }\n\n Some(groups)\n\n}\n\n\n", "file_path": "src/server/splitlist.rs", "rank": 16, "score": 121598.0964338526 }, { "content": "pub fn all_for_day(\n\n year: i32,\n\n month: u32,\n\n day: u32,\n\n range: ImgRange,\n\n context: Context,\n\n) -> Response {\n\n let thedate = NaiveDate::from_ymd(year, month, day).and_hms(0, 0, 0);\n\n use crate::schema::photos::dsl::date;\n\n\n\n let photos = Photo::query(context.is_authorized())\n\n .filter(date.ge(thedate))\n\n .filter(date.lt(thedate + Duration::days(1)));\n\n let (links, coords) = links_by_time(&context, photos, range, false);\n\n\n\n if links.is_empty() {\n\n not_found(&context)\n\n } else {\n\n Builder::new()\n\n .html(|o| {\n", "file_path": "src/server/views_by_date.rs", "rank": 17, "score": 118818.30442731231 }, { "content": "pub fn split_to_group_links(\n\n photos: &[Photo],\n\n path: &UrlString,\n\n with_date: bool,\n\n) -> Vec<PhotoLink> {\n\n if let Some(groups) = split_to_groups(&photos) {\n\n groups\n\n .iter()\n\n .map(|g| PhotoLink::for_group(g, path.clone(), with_date))\n\n .collect()\n\n } else {\n\n let make_link = if with_date {\n\n PhotoLink::date_title\n\n } else {\n\n PhotoLink::no_title\n\n };\n\n photos.iter().map(make_link).collect()\n\n }\n\n}\n\n\n", "file_path": "src/server/splitlist.rs", "rank": 18, "score": 116680.53438753514 }, { "content": "pub fn links_by_time<'a>(\n\n context: &Context,\n\n photos: photos::BoxedQuery<'a, Pg>,\n\n range: ImgRange,\n\n with_date: bool,\n\n) -> (Vec<PhotoLink>, Vec<(Coord, i32)>) {\n\n let c = context.db().unwrap();\n\n use crate::schema::photos::dsl::{date, id};\n\n let photos =\n\n if let Some(from_date) = range.from.map(|i| date_of_img(&c, i)) {\n\n photos.filter(date.ge(from_date))\n\n } else {\n\n photos\n\n };\n\n let photos = if let Some(to_date) = range.to.map(|i| date_of_img(&c, i)) {\n\n photos.filter(date.le(to_date))\n\n } else {\n\n photos\n\n };\n\n let photos = photos\n\n .order((date.desc().nulls_last(), id.desc()))\n\n .load(&c)\n\n .unwrap();\n\n let baseurl = UrlString::new(context.path_without_query());\n\n (\n\n split_to_group_links(&photos, &baseurl, with_date),\n\n get_positions(&photos, &c),\n\n )\n\n}\n\n\n", "file_path": "src/server/splitlist.rs", "rank": 19, "score": 114596.279019306 }, { "content": "fn timestamp(p: &Photo) -> i64 {\n\n p.date.map(|d| d.timestamp()).unwrap_or(0)\n\n}\n\n\n", "file_path": "src/server/splitlist.rs", "rank": 20, "score": 111756.3176273401 }, { "content": "fn find_largest(groups: &[&[Photo]]) -> usize {\n\n let mut found = 0;\n\n let mut largest = 0.0;\n\n for (i, g) in groups.iter().enumerate() {\n\n let time = 1 + g.first().map(|p| timestamp(p)).unwrap_or(0)\n\n - g.last().map(|p| timestamp(p)).unwrap_or(0);\n\n let score = (g.len() as f64).powi(3) * (time as f64);\n\n if score > largest {\n\n largest = score;\n\n found = i;\n\n }\n\n }\n\n found\n\n}\n\n\n", "file_path": "src/server/splitlist.rs", "rank": 21, "score": 107628.0012078115 }, { "content": "pub fn date_of_img(db: &PgConnection, photo_id: i32) -> Option<NaiveDateTime> {\n\n use crate::schema::photos::dsl::{date, photos};\n\n photos.find(photo_id).select(date).first(db).unwrap_or(None)\n\n}\n\n\n", "file_path": "src/server/views_by_date.rs", "rank": 22, "score": 105501.68674153564 }, { "content": "pub fn logout(_context: Context) -> Response {\n\n Builder::new()\n\n .header(\n\n header::SET_COOKIE,\n\n \"EXAUTH=; Max-Age=0; SameSite=Strict; HttpOnly\",\n\n )\n\n .redirect(\"/\")\n\n}\n", "file_path": "src/server/login.rs", "rank": 23, "score": 103657.67195729414 }, { "content": "#[derive(StructOpt)]\n\n#[structopt(rename_all = \"kebab-case\")]\n\nstruct DirOpt {\n\n /// Path to the root directory storing all actual photos.\n\n #[structopt(long, env = \"RPHOTOS_DIR\")]\n\n photos_dir: PathBuf,\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n dotenv().ok();\n\n env_logger::init();\n\n match run(&RPhotos::from_args()).await {\n\n Ok(()) => (),\n\n Err(err) => {\n\n println!(\"{}\", err);\n\n exit(1);\n\n }\n\n }\n\n}\n\n\n\nasync fn run(args: &RPhotos) -> Result<(), Error> {\n", "file_path": "src/main.rs", "rank": 24, "score": 102254.46785823477 }, { "content": "CREATE UNIQUE INDEX photos_path_idx ON photos (path);\n", "file_path": "migrations/20160603223947_create_photos_table/up.sql", "rank": 25, "score": 102079.9186648915 }, { "content": "pub fn by_file_list<In: BufRead + Sized>(\n\n db: &PgConnection,\n\n list: In,\n\n) -> Result<(), Error> {\n\n for line in list.lines() {\n\n one(db, &line?)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/adm/makepublic.rs", "rank": 26, "score": 101791.1868002181 }, { "content": "pub fn all_years(context: Context) -> Response {\n\n use crate::schema::photos::dsl::{date, grade};\n\n let db = context.db().unwrap();\n\n let groups = Photo::query(context.is_authorized())\n\n .select(sql::<(Nullable<Integer>, BigInt)>(\n\n \"cast(extract(year from date) as int) y, count(*)\",\n\n ))\n\n .group_by(sql::<Nullable<Integer>>(\"y\"))\n\n .order(sql::<Nullable<Integer>>(\"y\").desc().nulls_last())\n\n .load::<(Option<i32>, i64)>(&db)\n\n .unwrap()\n\n .iter()\n\n .map(|&(year, count)| {\n\n let q = Photo::query(context.is_authorized())\n\n .order((grade.desc().nulls_last(), date.asc()))\n\n .limit(1);\n\n let photo = if let Some(year) = year {\n\n q.filter(date.ge(start_of_year(year)))\n\n .filter(date.lt(start_of_year(year + 1)))\n\n } else {\n", "file_path": "src/server/views_by_date.rs", "rank": 27, "score": 101791.1868002181 }, { "content": "pub fn on_this_day(context: Context) -> Response {\n\n use crate::schema::photos::dsl::{date, grade};\n\n use crate::schema::positions::dsl::{\n\n latitude, longitude, photo_id, positions,\n\n };\n\n\n\n let (month, day) = {\n\n let today = Local::now();\n\n (today.month(), today.day())\n\n };\n\n let db = context.db().unwrap();\n\n let pos = Photo::query(context.is_authorized())\n\n .inner_join(positions)\n\n .filter(\n\n sql(\"extract(month from date)=\").bind::<Integer, _>(month as i32),\n\n )\n\n .filter(sql(\"extract(day from date)=\").bind::<Integer, _>(day as i32))\n\n .select((photo_id, latitude, longitude))\n\n .load(&db)\n\n .map_err(|e| warn!(\"Failed to load positions: {}\", e))\n", "file_path": "src/server/views_by_date.rs", "rank": 28, "score": 101791.1868002181 }, { "content": "pub fn all_null_date(context: Context) -> Response {\n\n use crate::schema::photos::dsl::{date, path};\n\n\n\n Builder::new()\n\n .html(|o| {\n\n templates::index(\n\n o,\n\n &context,\n\n \"Photos without a date\",\n\n &[],\n\n &Photo::query(context.is_authorized())\n\n .filter(date.is_null())\n\n .order(path.asc())\n\n .limit(500)\n\n .load(&context.db().unwrap())\n\n .unwrap()\n\n .iter()\n\n .map(PhotoLink::no_title)\n\n .collect::<Vec<_>>(),\n\n &[], // Don't care about positions here\n\n )\n\n })\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/server/views_by_date.rs", "rank": 29, "score": 100041.37432746592 }, { "content": "#[derive(Debug, Serialize)]\n\nstruct LoginOk {\n\n token: String,\n\n}\n\n\n", "file_path": "src/server/api.rs", "rank": 30, "score": 99696.08428970526 }, { "content": "pub fn monthname(n: u32) -> &'static str {\n\n match n {\n\n 1 => \"january\",\n\n 2 => \"february\",\n\n 3 => \"march\",\n\n 4 => \"april\",\n\n 5 => \"may\",\n\n 6 => \"june\",\n\n 7 => \"july\",\n\n 8 => \"august\",\n\n 9 => \"september\",\n\n 10 => \"october\",\n\n 11 => \"november\",\n\n 12 => \"december\",\n\n _ => \"non-month\",\n\n }\n\n}\n", "file_path": "src/server/views_by_date.rs", "rank": 31, "score": 99008.53623651611 }, { "content": "pub fn create_session_filter(args: &Args) -> ContextFilter {\n\n let global = Arc::new(GlobalContext::new(args));\n\n let g1 = global.clone();\n\n warp::any()\n\n .and(path::full())\n\n .and(\n\n cookie::cookie(\"EXAUTH\")\n\n .or(header::header(\"Authorization\"))\n\n .unify()\n\n .map(move |key: String| {\n\n g1.verify_key(&key)\n\n .map_err(|e| warn!(\"Auth failed: {}\", e))\n\n .ok()\n\n })\n\n .or(warp::any().map(|| None))\n\n .unify(),\n\n )\n\n .map(move |path, user| {\n\n let global = global.clone();\n\n Context { global, path, user }\n\n })\n\n .boxed()\n\n}\n\n\n", "file_path": "src/server/context.rs", "rank": 32, "score": 98397.62640061587 }, { "content": "pub fn list(db: &PgConnection) -> Result<(), Error> {\n\n use crate::schema::users::dsl::*;\n\n println!(\n\n \"Existing users: {:?}.\",\n\n users.select(username).load::<String>(db)?,\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "src/adm/users.rs", "rank": 33, "score": 97258.72376376393 }, { "content": "pub fn show_stats(db: &PgConnection) -> Result<(), Error> {\n\n println!(\n\n \"There are {} photos in total.\",\n\n photos.select(count_star()).first::<i64>(db)?,\n\n );\n\n\n\n println!(\n\n \"There are {} persons, {} places, and {} tags mentioned.\",\n\n people.select(count_star()).first::<i64>(db)?,\n\n places.select(count_star()).first::<i64>(db)?,\n\n tags.select(count_star()).first::<i64>(db)?,\n\n );\n\n\n\n // Something like this should be possible, I guess?\n\n //\n\n // use schema::photos::dsl::date;\n\n // let year = date_part(\"year\", date).aliased(\"y\");\n\n // println!(\"Count per year: {:?}\",\n\n // photos.select((year, count_star()))\n\n // .group_by(year)\n", "file_path": "src/adm/stats.rs", "rank": 34, "score": 95614.97583691387 }, { "content": "#[cfg(test)]\n\nfn paths<'a>(\n\n (a, b): (&'a [Photo], &'a [Photo]),\n\n) -> (Vec<&'a str>, Vec<&'a str>) {\n\n (\n\n a.iter().map(|p| p.path.as_ref()).collect(),\n\n b.iter().map(|p| p.path.as_ref()).collect(),\n\n )\n\n}\n", "file_path": "src/server/splitlist.rs", "rank": 35, "score": 94322.74427215348 }, { "content": "fn save_photo(\n\n db: &PgConnection,\n\n file_path: &str,\n\n exif: &ExifData,\n\n) -> Result<(), Error> {\n\n let width = exif.width.ok_or(Error::MissingWidth)?;\n\n let height = exif.height.ok_or(Error::MissingHeight)?;\n\n let photo = match Photo::create_or_set_basics(\n\n db,\n\n file_path,\n\n width as i32,\n\n height as i32,\n\n exif.date(),\n\n exif.rotation()?,\n\n find_camera(db, exif)?,\n\n )? {\n\n Modification::Created(photo) => {\n\n info!(\"Created #{}, {}\", photo.id, photo.path);\n\n photo\n\n }\n", "file_path": "src/adm/findphotos.rs", "rank": 36, "score": 93577.24464789245 }, { "content": "pub fn prev_image(context: Context, param: FromParam) -> Response {\n\n use crate::schema::photos::dsl::{date, id};\n\n let db = context.db().unwrap();\n\n if let Some(from_date) = date_of_img(&db, param.from) {\n\n let q = Photo::query(context.is_authorized())\n\n .select(id)\n\n .filter(\n\n date.lt(from_date)\n\n .or(date.eq(from_date).and(id.lt(param.from))),\n\n )\n\n .order((date.desc().nulls_last(), id.desc()));\n\n if let Ok(photo) = q.first::<i32>(&db) {\n\n return redirect_to_img(photo);\n\n }\n\n }\n\n not_found(&context)\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct FromParam {\n\n from: i32,\n\n}\n\n\n", "file_path": "src/server/views_by_date.rs", "rank": 37, "score": 91604.00743108214 }, { "content": "pub fn next_image(context: Context, param: FromParam) -> Response {\n\n use crate::schema::photos::dsl::{date, id};\n\n let db = context.db().unwrap();\n\n if let Some(from_date) = date_of_img(&db, param.from) {\n\n let q = Photo::query(context.is_authorized())\n\n .select(id)\n\n .filter(\n\n date.gt(from_date)\n\n .or(date.eq(from_date).and(id.gt(param.from))),\n\n )\n\n .order((date, id));\n\n if let Ok(photo) = q.first::<i32>(&db) {\n\n return redirect_to_img(photo);\n\n }\n\n }\n\n not_found(&context)\n\n}\n\n\n", "file_path": "src/server/views_by_date.rs", "rank": 38, "score": 91604.00743108214 }, { "content": "pub fn months_in_year(year: i32, context: Context) -> Response {\n\n use crate::schema::photos::dsl::{date, grade};\n\n\n\n let title: String = format!(\"Photos from {}\", year);\n\n let db = context.db().unwrap();\n\n let groups = Photo::query(context.is_authorized())\n\n .filter(date.ge(start_of_year(year)))\n\n .filter(date.lt(start_of_year(year + 1)))\n\n .select(sql::<(Integer, BigInt)>(\n\n \"cast(extract(month from date) as int) m, count(*)\",\n\n ))\n\n .group_by(sql::<Integer>(\"m\"))\n\n .order(sql::<Integer>(\"m\").desc().nulls_last())\n\n .load::<(i32, i64)>(&db)\n\n .unwrap()\n\n .iter()\n\n .map(|&(month, count)| {\n\n let month = month as u32;\n\n let photo = Photo::query(context.is_authorized())\n\n .filter(date.ge(start_of_month(year, month)))\n", "file_path": "src/server/views_by_date.rs", "rank": 39, "score": 91604.00743108214 }, { "content": "pub fn get_login(context: Context, param: NextQ) -> Response {\n\n info!(\"Got request for login form. Param: {:?}\", param);\n\n let next = sanitize_next(param.next.as_ref().map(AsRef::as_ref));\n\n Builder::new()\n\n .html(|o| templates::login(o, &context, next, None))\n\n .unwrap()\n\n}\n\n\n\n#[derive(Debug, Default, Deserialize)]\n\npub struct NextQ {\n\n next: Option<String>,\n\n}\n\n\n", "file_path": "src/server/login.rs", "rank": 40, "score": 91604.00743108214 }, { "content": "pub fn post_login(context: Context, form: LoginForm) -> Response {\n\n let next = sanitize_next(form.next.as_ref().map(AsRef::as_ref));\n\n if let Some(user) = form.validate(&*context.db().unwrap()) {\n\n let token = context.make_token(&user).unwrap();\n\n return Builder::new()\n\n .header(\n\n header::SET_COOKIE,\n\n format!(\"EXAUTH={}; SameSite=Strict; HttpOnly\", token),\n\n )\n\n .redirect(next.unwrap_or(\"/\"));\n\n }\n\n let message = Some(\"Login failed, please try again\");\n\n Builder::new()\n\n .html(|o| templates::login(o, &context, next, message))\n\n .unwrap()\n\n}\n\n\n\n/// The data submitted by the login form.\n\n/// This does not derive Debug or Serialize, as the password is plain text.\n\n#[derive(Deserialize)]\n", "file_path": "src/server/login.rs", "rank": 41, "score": 91604.00743108214 }, { "content": "fn is_duplicate<T>(r: &Result<T, diesel::result::Error>) -> bool {\n\n use diesel::result::DatabaseErrorKind::UniqueViolation;\n\n use diesel::result::Error::DatabaseError;\n\n matches!(r, Err(DatabaseError(UniqueViolation, _)))\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n NoPosition(i32),\n\n Db(i32, diesel::result::Error),\n\n Pool(i32, String),\n\n Server(i32, reqwest::Error),\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::name_and_level;\n\n use serde_json::Value;\n\n\n\n #[test]\n", "file_path": "src/fetch_places.rs", "rank": 42, "score": 90133.19393070808 }, { "content": "pub fn one(db: &PgConnection, tpath: &str) -> Result<(), Error> {\n\n use crate::schema::photos::dsl::*;\n\n match update(photos.filter(path.eq(&tpath)))\n\n .set(is_public.eq(true))\n\n .get_result::<Photo>(db)\n\n {\n\n Ok(photo) => {\n\n println!(\"Made {} public: {:?}\", tpath, photo);\n\n Ok(())\n\n }\n\n Err(DieselError::NotFound) => {\n\n Err(Error::Other(format!(\"File {} is not known\", tpath,)))\n\n }\n\n Err(error) => Err(error.into()),\n\n }\n\n}\n\n\n", "file_path": "src/adm/makepublic.rs", "rank": 43, "score": 89407.01374922201 }, { "content": "pub fn passwd(db: &PgConnection, uname: &str) -> Result<(), Error> {\n\n let pword = random_password(14);\n\n let hashword = make_password(&pword);\n\n use crate::schema::users::dsl::*;\n\n match update(users.filter(username.eq(&uname)))\n\n .set(password.eq(&hashword))\n\n .execute(db)?\n\n {\n\n 1 => {\n\n println!(\"Updated password for {:?} to {:?}\", uname, pword);\n\n }\n\n 0 => {\n\n insert_into(users)\n\n .values((username.eq(uname), password.eq(&hashword)))\n\n .execute(db)?;\n\n println!(\"Created user {:?} with password {:?}\", uname, pword);\n\n }\n\n n => {\n\n println!(\n\n \"Strange, updated {} passwords for {:?} to {:?}\",\n\n n, uname, pword,\n\n );\n\n }\n\n };\n\n Ok(())\n\n}\n\n\n", "file_path": "src/adm/users.rs", "rank": 44, "score": 89407.01374922201 }, { "content": "pub fn search(context: Context, query: Vec<(String, String)>) -> Response {\n\n let query = SearchQuery::load(query, &context.db().unwrap()).unwrap();\n\n\n\n let mut photos = Photo::query(context.is_authorized());\n\n if let Some(since) = query.since.as_ref() {\n\n photos = photos.filter(p::date.ge(since));\n\n }\n\n if let Some(until) = query.until.as_ref() {\n\n photos = photos.filter(p::date.le(until));\n\n }\n\n for tag in &query.t {\n\n let ids = pt::photo_tags\n\n .select(pt::photo_id)\n\n .filter(pt::tag_id.eq(tag.item.id));\n\n photos = if tag.inc {\n\n photos.filter(p::id.eq_any(ids))\n\n } else {\n\n photos.filter(p::id.ne_all(ids))\n\n };\n\n }\n", "file_path": "src/server/search.rs", "rank": 45, "score": 87435.80303209773 }, { "content": "pub fn days_in_month(year: i32, month: u32, context: Context) -> Response {\n\n use crate::schema::photos::dsl::{date, grade};\n\n\n\n let lpath: Vec<Link> = vec![Link::year(year)];\n\n let title: String = format!(\"Photos from {} {}\", monthname(month), year);\n\n let db = context.db().unwrap();\n\n let groups = Photo::query(context.is_authorized())\n\n .filter(date.ge(start_of_month(year, month)))\n\n .filter(date.lt(start_of_month(year, month + 1)))\n\n .select(sql::<(Integer, BigInt)>(\n\n \"cast(extract(day from date) as int) d, count(*)\",\n\n ))\n\n .group_by(sql::<Integer>(\"d\"))\n\n .order(sql::<Integer>(\"d\").desc().nulls_last())\n\n .load::<(i32, i64)>(&db)\n\n .unwrap()\n\n .iter()\n\n .map(|&(day, count)| {\n\n let day = day as u32;\n\n let fromdate =\n", "file_path": "src/server/views_by_date.rs", "rank": 46, "score": 84599.59104067844 }, { "content": "fn photo_details(id: i32, context: Context) -> Response {\n\n use crate::schema::photos::dsl::photos;\n\n let c = context.db().unwrap();\n\n if let Ok(tphoto) = photos.find(id).first::<Photo>(&c) {\n\n if context.is_authorized() || tphoto.is_public() {\n\n return Builder::new()\n\n .html(|o| {\n\n templates::details(\n\n o,\n\n &context,\n\n &tphoto\n\n .date\n\n .map(|d| {\n\n vec![\n\n Link::year(d.year()),\n\n Link::month(d.year(), d.month()),\n\n Link::day(d.year(), d.month(), d.day()),\n\n Link::prev(tphoto.id),\n\n Link::next(tphoto.id),\n\n ]\n", "file_path": "src/server/mod.rs", "rank": 47, "score": 74622.52374538712 }, { "content": "fn login(context: Context, form: LoginForm) -> ApiResult<LoginOk> {\n\n let db = context.db()?;\n\n let user = form\n\n .validate(&db)\n\n .ok_or_else(|| ApiError::bad_request(\"login failed\"))?;\n\n Ok(LoginOk {\n\n token: context\n\n .make_token(&user)\n\n .ok_or_else(|| ApiError::bad_request(\"failed to make token\"))?,\n\n })\n\n}\n\n\n", "file_path": "src/server/api.rs", "rank": 48, "score": 69316.31802263591 }, { "content": "#[derive(StructOpt)]\n\n#[structopt(rename_all = \"kebab-case\")]\n\nstruct CacheOpt {\n\n /// How to connect to memcached.\n\n #[structopt(\n\n long,\n\n env = \"MEMCACHED_SERVER\",\n\n default_value = \"memcache://127.0.0.1:11211\"\n\n )]\n\n memcached_url: String,\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 49, "score": 67607.58604579956 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct ImgQuery {\n\n id: Option<u32>,\n\n path: Option<String>,\n\n}\n\n\n\nimpl ImgQuery {\n\n fn validate(self) -> Result<ImgIdentifier, &'static str> {\n\n match (self.id, self.path) {\n\n (None, None) => Err(\"id or path required\"),\n\n (Some(id), None) => Ok(ImgIdentifier::Id(id)),\n\n (None, Some(path)) => Ok(ImgIdentifier::Path(path)),\n\n (Some(_), Some(_)) => Err(\"Conflicting arguments\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/server/api.rs", "rank": 50, "score": 66332.64667917717 }, { "content": "#[derive(Debug, Serialize)]\n\nstruct ImgLink {\n\n url: String,\n\n width: u32,\n\n height: u32,\n\n}\n\n\n\nimpl ImgLink {\n\n fn new(img: &Photo, size: SizeTag) -> Self {\n\n let (width, height) = img.get_size(size);\n\n ImgLink {\n\n url: format!(\"/img/{}-{}.jpg\", img.id, size.tag()),\n\n width,\n\n height,\n\n }\n\n }\n\n}\n", "file_path": "src/server/api.rs", "rank": 51, "score": 66332.64667917717 }, { "content": "#[derive(Debug, Serialize, Eq, PartialEq)]\n\nstruct SearchTag {\n\n /// Kind (may be \"p\" for person, \"t\" for tag, \"l\" for location).\n\n k: char,\n\n /// Title of the the tag\n\n t: String,\n\n /// Slug\n\n s: String,\n\n}\n\n\n\nimpl Ord for SearchTag {\n\n fn cmp(&self, o: &Self) -> Ordering {\n\n self.t.cmp(&o.t)\n\n }\n\n}\n\nimpl PartialOrd for SearchTag {\n\n fn partial_cmp(&self, o: &Self) -> Option<Ordering> {\n\n Some(self.cmp(o))\n\n }\n\n}\n", "file_path": "src/server/autocomplete.rs", "rank": 52, "score": 66332.48631977277 }, { "content": "// Does _not_ derive debug, copy or clone, since it contains the jwt\n\n// secret and some connection pools.\n\nstruct GlobalContext {\n\n db_pool: PgPool,\n\n photosdir: PhotosDir,\n\n memcache_pool: MemcachePool,\n\n jwt_secret: String,\n\n overpass: OverpassOpt,\n\n}\n\n\n\nimpl GlobalContext {\n\n fn new(args: &Args) -> Self {\n\n let mc_manager =\n\n MemcacheConnectionManager::new(args.cache.memcached_url.as_ref());\n\n GlobalContext {\n\n db_pool: args.db.create_pool().expect(\"Posgresql pool\"),\n\n photosdir: PhotosDir::new(&args.photos.photos_dir),\n\n memcache_pool: Pool::builder()\n\n .connection_timeout(Duration::from_secs(1))\n\n .build(mc_manager)\n\n .expect(\"Memcache pool\"),\n\n jwt_secret: args.jwt_key.clone(),\n", "file_path": "src/server/context.rs", "rank": 53, "score": 66332.29270242093 }, { "content": "struct ApiError {\n\n code: StatusCode,\n\n msg: &'static str,\n\n}\n\n\n\nconst NOT_FOUND: ApiError = ApiError::bad_request(\"not found\");\n\n\n\nimpl ApiError {\n\n const fn bad_request(msg: &'static str) -> Self {\n\n ApiError {\n\n code: StatusCode::BAD_REQUEST,\n\n msg,\n\n }\n\n }\n\n fn into_response(self) -> Response {\n\n let mut response =\n\n warp::reply::json(&ApiErrorMessage { err: self.msg })\n\n .into_response();\n\n *response.status_mut() = self.code;\n\n response\n", "file_path": "src/server/api.rs", "rank": 54, "score": 66328.89902050406 }, { "content": "#[derive(Deserialize)]\n\nstruct GradeForm {\n\n image: i32,\n\n grade: i16,\n\n}\n\n\n\nasync fn set_location(context: Context, form: CoordForm) -> WarpResult {\n\n if !context.is_authorized() {\n\n return permission_denied();\n\n }\n\n let image = form.image;\n\n let coord = form.coord();\n\n info!(\"Should set location of #{} to {:?}.\", image, coord);\n\n\n\n let (lat, lng) = ((coord.x * 1e6) as i32, (coord.y * 1e6) as i32);\n\n use crate::schema::positions::dsl::*;\n\n use diesel::insert_into;\n\n insert_into(positions)\n\n .values((photo_id.eq(image), latitude.eq(lat), longitude.eq(lng)))\n\n .on_conflict(photo_id)\n\n .do_update()\n", "file_path": "src/server/admin.rs", "rank": 55, "score": 66328.89902050406 }, { "content": "#[derive(Deserialize)]\n\nstruct TagForm {\n\n image: i32,\n\n tag: String,\n\n}\n\n\n\nasync fn set_person(context: Context, form: PersonForm) -> WarpResult {\n\n if !context.is_authorized() {\n\n return permission_denied();\n\n }\n\n let c = context.db().unwrap();\n\n use crate::models::{Person, PhotoPerson};\n\n let person = Person::get_or_create_name(&c, &form.person)\n\n .expect(\"Find or create person\");\n\n use crate::schema::photo_people::dsl::*;\n\n let q = photo_people\n\n .filter(photo_id.eq(form.image))\n\n .filter(person_id.eq(person.id));\n\n if q.first::<PhotoPerson>(&c).is_ok() {\n\n info!(\"Photo #{} already has {:?}\", form.image, person);\n\n } else {\n\n info!(\"Add {:?} on photo #{}!\", person, form.image);\n\n diesel::insert_into(photo_people)\n\n .values((photo_id.eq(form.image), person_id.eq(person.id)))\n\n .execute(&c)\n\n .expect(\"Name person in photo\");\n\n }\n\n Ok(redirect_to_img(form.image))\n\n}\n\n\n", "file_path": "src/server/admin.rs", "rank": 56, "score": 66328.89902050406 }, { "content": "#[derive(Deserialize)]\n\nstruct CoordForm {\n\n image: i32,\n\n lat: f64,\n\n lng: f64,\n\n}\n\n\n\nimpl CoordForm {\n\n fn coord(&self) -> Coord {\n\n Coord {\n\n x: self.lat,\n\n y: self.lng,\n\n }\n\n }\n\n}\n", "file_path": "src/server/admin.rs", "rank": 57, "score": 66328.89902050406 }, { "content": "#[derive(Deserialize)]\n\nstruct RotateForm {\n\n image: i32,\n\n angle: i16,\n\n}\n\n\n", "file_path": "src/server/admin.rs", "rank": 58, "score": 66328.89902050406 }, { "content": "#[derive(Deserialize)]\n\nstruct PersonForm {\n\n image: i32,\n\n person: String,\n\n}\n\n\n\nasync fn set_grade(context: Context, form: GradeForm) -> WarpResult {\n\n if !context.is_authorized() {\n\n return permission_denied();\n\n }\n\n if form.grade >= 0 && form.grade <= 100 {\n\n info!(\"Should set grade of #{} to {}\", form.image, form.grade);\n\n use crate::schema::photos::dsl::{grade, photos};\n\n let q =\n\n diesel::update(photos.find(form.image)).set(grade.eq(form.grade));\n\n match q.execute(&context.db().unwrap()) {\n\n Ok(1) => {\n\n return Ok(redirect_to_img(form.image));\n\n }\n\n Ok(0) => (),\n\n Ok(n) => {\n", "file_path": "src/server/admin.rs", "rank": 59, "score": 66328.89902050406 }, { "content": "#[derive(Debug, Serialize)]\n\nstruct ApiErrorMessage {\n\n err: &'static str,\n\n}\n\n\n", "file_path": "src/server/api.rs", "rank": 60, "score": 65148.7155763218 }, { "content": "#[derive(Debug, Serialize)]\n\nstruct GetImgResult {\n\n small: ImgLink,\n\n medium: ImgLink,\n\n public: bool,\n\n}\n\n\n\nimpl GetImgResult {\n\n fn for_img(img: &Photo) -> Self {\n\n GetImgResult {\n\n small: ImgLink::new(img, SizeTag::Small),\n\n medium: ImgLink::new(img, SizeTag::Medium),\n\n public: img.is_public,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/server/api.rs", "rank": 61, "score": 65148.7155763218 }, { "content": "pub trait Facet {\n\n fn by_slug(slug: &str, db: &PgConnection) -> Result<Self, Error>\n\n where\n\n Self: Sized;\n\n}\n\n\n\n#[derive(Debug, Clone, Queryable)]\n\npub struct Tag {\n\n pub id: i32,\n\n pub slug: String,\n\n pub tag_name: String,\n\n}\n\n\n\nimpl Facet for Tag {\n\n fn by_slug(slug: &str, db: &PgConnection) -> Result<Tag, Error> {\n\n t::tags.filter(t::slug.eq(slug)).first(db)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Queryable)]\n", "file_path": "src/models.rs", "rank": 62, "score": 63858.48769942949 }, { "content": "fn crawl(\n\n db: &PgConnection,\n\n photos: &PhotosDir,\n\n only_in: &Path,\n\n) -> Result<(), Error> {\n\n photos.find_files(\n\n only_in,\n\n &|path, exif| match save_photo(db, path, exif) {\n\n Ok(()) => debug!(\"Saved photo {}\", path),\n\n Err(e) => warn!(\"Failed to save photo {}: {:?}\", path, e),\n\n },\n\n )?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/adm/findphotos.rs", "rank": 63, "score": 62085.61334783597 }, { "content": "fn datetime_from_parts(\n\n date: Option<&str>,\n\n time: Option<&str>,\n\n defaulttime: NaiveTime,\n\n) -> Option<NaiveDateTime> {\n\n date.and_then(|date| NaiveDate::parse_from_str(date, \"%Y-%m-%d\").ok())\n\n .map(|date| {\n\n date.and_time(\n\n time.and_then(|s| {\n\n NaiveTime::parse_from_str(s, \"%H:%M:%S\").ok()\n\n })\n\n .unwrap_or(defaulttime),\n\n )\n\n })\n\n}\n", "file_path": "src/server/search.rs", "rank": 64, "score": 60873.6649615453 }, { "content": "#[test]\n\nfn split_two() {\n\n let photos = [\n\n Photo::mock(2018, 08, 31, 21, 45, 48),\n\n Photo::mock(2018, 08, 31, 21, 45, 12),\n\n ];\n\n assert_eq!(paths(split(&photos)), paths((&photos[..1], &photos[1..])));\n\n}\n\n\n", "file_path": "src/server/splitlist.rs", "rank": 65, "score": 60873.6649615453 }, { "content": "fn find_camera(\n\n db: &PgConnection,\n\n exif: &ExifData,\n\n) -> Result<Option<Camera>, Error> {\n\n if let Some((make, model)) = exif.camera() {\n\n let cam = Camera::get_or_create(db, &make, &model)?;\n\n return Ok(Some(cam));\n\n }\n\n Ok(None)\n\n}\n", "file_path": "src/adm/findphotos.rs", "rank": 66, "score": 60873.6649615453 }, { "content": "fn verify_token(\n\n token: &Token<Header>,\n\n jwt_secret: &[u8],\n\n) -> Result<bool, String> {\n\n token\n\n .verify(jwt_secret)\n\n .map_err(|e| format!(\"Failed to verify token {:?}: {}\", token, e))\n\n}\n\n\n\n/// The request context, providing database, memcache and authorized user.\n\npub struct Context {\n\n global: Arc<GlobalContext>,\n\n path: FullPath,\n\n user: Option<String>,\n\n}\n\n\n\nimpl Context {\n\n pub fn db(&self) -> Result<PooledPg, Error> {\n\n self.global.db_pool.get()\n\n }\n", "file_path": "src/server/context.rs", "rank": 67, "score": 60873.6649615453 }, { "content": "pub trait BuilderExt {\n\n fn redirect(self, url: &str) -> Response;\n\n\n\n fn far_expires(self) -> Self;\n\n}\n\n\n\nimpl BuilderExt for Builder {\n\n fn redirect(self, url: &str) -> Response {\n\n self.status(StatusCode::FOUND)\n\n .header(header::LOCATION, url)\n\n .body(format!(\"Please refer to {}\", url).into())\n\n .unwrap()\n\n }\n\n\n\n fn far_expires(self) -> Self {\n\n let far_expires = Utc::now() + Duration::days(180);\n\n self.header(header::EXPIRES, far_expires.to_rfc2822())\n\n }\n\n}\n", "file_path": "src/server/render_ructe.rs", "rank": 68, "score": 60336.81169854343 }, { "content": "#[test]\n\nfn parse_good_imgname() {\n\n assert_eq!(\n\n \"4711-s.jpg\".parse(),\n\n Ok(ImgName {\n\n id: 4711,\n\n size: SizeTag::Small,\n\n })\n\n )\n\n}\n\n\n", "file_path": "src/server/image.rs", "rank": 69, "score": 59748.29904204185 }, { "content": "#[test]\n\nfn test_sanitize_bad_2() {\n\n assert_eq!(None, sanitize_next(Some(\"//evil.org/\")))\n\n}\n", "file_path": "src/server/login.rs", "rank": 70, "score": 59748.29904204185 }, { "content": "#[test]\n\nfn test_sanitize_bad_1() {\n\n assert_eq!(None, sanitize_next(Some(\"https://evil.org/\")))\n\n}\n\n\n", "file_path": "src/server/login.rs", "rank": 71, "score": 59748.29904204185 }, { "content": "#[test]\n\nfn test_sanitize_good_2() {\n\n assert_eq!(Some(\"/2017/7/15\"), sanitize_next(Some(\"/2017/7/15\")))\n\n}\n\n\n", "file_path": "src/server/login.rs", "rank": 72, "score": 59748.29904204185 }, { "content": "fn get_or_create_place(\n\n c: &PgConnection,\n\n t_osm_id: i64,\n\n name: &str,\n\n level: i16,\n\n) -> Result<Place, diesel::result::Error> {\n\n use crate::schema::places::dsl::*;\n\n places\n\n .filter(\n\n osm_id\n\n .eq(Some(t_osm_id))\n\n .or(place_name.eq(name).and(osm_id.is_null())),\n\n )\n\n .first::<Place>(c)\n\n .or_else(|_| {\n\n let mut result = diesel::insert_into(places)\n\n .values((\n\n place_name.eq(&name),\n\n slug.eq(&slugify(&name)),\n\n osm_id.eq(Some(t_osm_id)),\n", "file_path": "src/fetch_places.rs", "rank": 73, "score": 59748.29904204185 }, { "content": "#[test]\n\nfn parse_bad_imgname_1() {\n\n assert_eq!(\"4711-q.jpg\".parse::<ImgName>(), Err(BadImgName {}))\n\n}\n", "file_path": "src/server/image.rs", "rank": 74, "score": 59748.29904204185 }, { "content": "#[test]\n\nfn test_sanitize_bad_3() {\n\n assert_eq!(None, sanitize_next(Some(\"/evil\\\"hack\")))\n\n}\n", "file_path": "src/server/login.rs", "rank": 75, "score": 59748.29904204185 }, { "content": "#[test]\n\nfn parse_bad_imgname_2() {\n\n assert_eq!(\"blurgel\".parse::<ImgName>(), Err(BadImgName {}))\n\n}\n\n\n\nasync fn get_image_data(\n\n context: &Context,\n\n photo: &Photo,\n\n size: SizeTag,\n\n) -> Result<Vec<u8>, ImageLoadFailed> {\n\n let p = context.photos().get_raw_path(photo);\n\n let r = photo.rotation;\n\n context\n\n .cached_or(&photo.cache_key(size), || get_scaled_jpeg(p, r, size.px()))\n\n .await\n\n}\n", "file_path": "src/server/image.rs", "rank": 76, "score": 59748.29904204185 }, { "content": "#[test]\n\nfn test_sanitize_bad_4() {\n\n assert_eq!(None, sanitize_next(Some(\"/evil'hack\")))\n\n}\n\n\n", "file_path": "src/server/login.rs", "rank": 77, "score": 59748.29904204185 }, { "content": "#[test]\n\nfn split_group_same_time() {\n\n let photos = [\n\n Photo::mock(2018, 08, 31, 21, 45, 22),\n\n Photo::mock(2018, 08, 31, 21, 45, 22),\n\n Photo::mock(2018, 08, 31, 21, 45, 22),\n\n Photo::mock(2018, 08, 31, 21, 45, 22),\n\n ];\n\n assert_eq!(paths(split(&photos)), paths((&photos[..2], &photos[2..])));\n\n}\n\n\n", "file_path": "src/server/splitlist.rs", "rank": 78, "score": 59748.29904204185 }, { "content": "#[test]\n\nfn test_sanitize_good_1() {\n\n assert_eq!(Some(\"/foo/\"), sanitize_next(Some(\"/foo/\")))\n\n}\n", "file_path": "src/server/login.rs", "rank": 79, "score": 59748.29904204185 }, { "content": "#[test]\n\nfn split_group_by_time() {\n\n let photos = [\n\n Photo::mock(2018, 08, 31, 21, 45, 22),\n\n Photo::mock(2018, 08, 31, 21, 45, 20),\n\n Photo::mock(2018, 08, 31, 21, 45, 18),\n\n Photo::mock(2018, 08, 31, 21, 45, 16),\n\n Photo::mock(2018, 08, 31, 21, 45, 14),\n\n Photo::mock(2018, 08, 31, 21, 45, 12),\n\n Photo::mock(2018, 08, 31, 21, 45, 10),\n\n Photo::mock(2018, 08, 15, 13, 15, 0),\n\n Photo::mock(2018, 08, 15, 13, 14, 0),\n\n ];\n\n assert_eq!(paths(split(&photos)), paths((&photos[..7], &photos[7..])));\n\n}\n\n\n", "file_path": "src/server/splitlist.rs", "rank": 80, "score": 59748.29904204185 }, { "content": "/// Get the current value for jwt NumericDate.\n\n///\n\n/// Defined in RFC 7519 section 2 to be equivalent to POSIX.1 \"Seconds\n\n/// Since the Epoch\". The RFC allows a NumericDate to be non-integer\n\n/// (for sub-second resolution), but the jwt crate uses u64.\n\nfn current_numeric_date() -> u64 {\n\n use std::time::{SystemTime, UNIX_EPOCH};\n\n SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .unwrap()\n\n .as_secs()\n\n}\n", "file_path": "src/server/context.rs", "rank": 81, "score": 56346.459545305144 }, { "content": "fn main() -> Result<(), RucteError> {\n\n let mut ructe = Ructe::from_env()?;\n\n let mut statics = ructe.statics()?;\n\n statics.add_sass_file(\"res/photos.scss\")?;\n\n statics.add_file(\"res/admin.js\")?;\n\n statics.add_file(\"res/ux.js\")?;\n\n statics.add_files_as(\"res/leaflet-1.4.0\", \"l140\")?;\n\n statics.add_files_as(\"res/leaflet-cluster-1.4.1\", \"lm141\")?;\n\n ructe.compile_templates(\"templates\")?;\n\n Ok(())\n\n}\n", "file_path": "src/build.rs", "rank": 82, "score": 55384.79920040723 }, { "content": "fn redirect(url: &str) -> Response {\n\n Builder::new().redirect(url)\n\n}\n\n\n", "file_path": "src/server/mod.rs", "rank": 83, "score": 52607.91798203776 }, { "content": "fn not_found(context: &Context) -> Response {\n\n Builder::new()\n\n .status(StatusCode::NOT_FOUND)\n\n .html(|o| {\n\n templates::not_found(\n\n o,\n\n context,\n\n StatusCode::NOT_FOUND,\n\n \"The resource you requested could not be located.\",\n\n )\n\n })\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/server/mod.rs", "rank": 84, "score": 52607.91798203776 }, { "content": "fn place_all(context: Context) -> Response {\n\n use crate::schema::places::dsl::{id, place_name, places};\n\n let query = places.into_boxed();\n\n let query = if context.is_authorized() {\n\n query\n\n } else {\n\n use crate::schema::photo_places::dsl as pp;\n\n use crate::schema::photos::dsl as p;\n\n query.filter(id.eq_any(pp::photo_places.select(pp::place_id).filter(\n\n pp::photo_id.eq_any(p::photos.select(p::id).filter(p::is_public)),\n\n )))\n\n };\n\n Builder::new()\n\n .html(|o| {\n\n templates::places(\n\n o,\n\n &context,\n\n &query\n\n .order(place_name)\n\n .load(&context.db().unwrap())\n\n .expect(\"List places\"),\n\n )\n\n })\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/server/views_by_category.rs", "rank": 85, "score": 51630.03678911227 }, { "content": "fn permission_denied() -> Result<Response, Rejection> {\n\n error_response(StatusCode::UNAUTHORIZED)\n\n}\n\n\n", "file_path": "src/server/mod.rs", "rank": 86, "score": 51630.03678911227 }, { "content": "fn person_all(context: Context) -> Response {\n\n use crate::schema::people::dsl::{id, people, person_name};\n\n let query = people.into_boxed();\n\n let query = if context.is_authorized() {\n\n query\n\n } else {\n\n use crate::schema::photo_people::dsl as pp;\n\n use crate::schema::photos::dsl as p;\n\n query.filter(id.eq_any(pp::photo_people.select(pp::person_id).filter(\n\n pp::photo_id.eq_any(p::photos.select(p::id).filter(p::is_public)),\n\n )))\n\n };\n\n Builder::new()\n\n .html(|o| {\n\n templates::people(\n\n o,\n\n &context,\n\n &query\n\n .order(person_name)\n\n .load(&context.db().unwrap())\n\n .expect(\"list people\"),\n\n )\n\n })\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/server/views_by_category.rs", "rank": 87, "score": 51630.03678911227 }, { "content": "fn tag_all(context: Context) -> Response {\n\n use crate::schema::tags::dsl::{id, tag_name, tags};\n\n let query = tags.order(tag_name).into_boxed();\n\n let query = if context.is_authorized() {\n\n query\n\n } else {\n\n use crate::schema::photo_tags::dsl as tp;\n\n use crate::schema::photos::dsl as p;\n\n query.filter(id.eq_any(tp::photo_tags.select(tp::tag_id).filter(\n\n tp::photo_id.eq_any(p::photos.select(p::id).filter(p::is_public)),\n\n )))\n\n };\n\n Builder::new()\n\n .html(|o| {\n\n templates::tags(\n\n o,\n\n &context,\n\n &query.load(&context.db().unwrap()).expect(\"List tags\"),\n\n )\n\n })\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/server/views_by_category.rs", "rank": 88, "score": 51630.03678911227 }, { "content": "fn random_password(len: usize) -> String {\n\n let rng = thread_rng();\n\n // Note; I would like to have lowercase letters more probable\n\n use rand::distributions::Alphanumeric;\n\n String::from_utf8(rng.sample_iter(&Alphanumeric).take(len).collect())\n\n .unwrap()\n\n}\n", "file_path": "src/adm/users.rs", "rank": 89, "score": 51630.03678911227 }, { "content": "fn redirect_to_img(image: i32) -> Response {\n\n redirect(&format!(\"/img/{}\", image))\n\n}\n\n\n", "file_path": "src/server/mod.rs", "rank": 90, "score": 51630.03678911227 }, { "content": "fn random_image(context: Context) -> Response {\n\n use crate::schema::photos::dsl::id;\n\n use diesel::expression::dsl::sql;\n\n use diesel::sql_types::Integer;\n\n if let Ok(photo) = Photo::query(context.is_authorized())\n\n .select(id)\n\n .limit(1)\n\n .order(sql::<Integer>(\"random()\"))\n\n .first(&context.db().unwrap())\n\n {\n\n info!(\"Random: {:?}\", photo);\n\n redirect_to_img(photo)\n\n } else {\n\n not_found(&context)\n\n }\n\n}\n\n\n", "file_path": "src/server/mod.rs", "rank": 91, "score": 51630.03678911227 }, { "content": "fn osm_id(obj: &Value) -> Option<i64> {\n\n obj.get(\"id\").and_then(Value::as_i64)\n\n}\n\n\n\n#[rustfmt::skip] // This data is written in a more compact style\n\nstatic KNOWN: [(&str, &[(&str, i16)]); 16] = [\n\n (\"leisure\", &[\n\n (\"garden\", 18),\n\n (\"nature_reserve\", 12),\n\n (\"park\", 14),\n\n (\"pitch\", 15),\n\n (\"playground\", 16),\n\n (\"water_park\", 14),\n\n (\"fitness_station\", 17),\n\n ]),\n\n (\"tourism\", &[\n\n (\"attraction\", 16),\n\n (\"theme_park\", 14),\n\n (\"zoo\", 14),\n\n ]),\n", "file_path": "src/fetch_places.rs", "rank": 92, "score": 49209.790877037914 }, { "content": "fn start_of_year(year: i32) -> NaiveDateTime {\n\n NaiveDate::from_ymd(year, 1, 1).and_hms(0, 0, 0)\n\n}\n\n\n", "file_path": "src/server/views_by_date.rs", "rank": 93, "score": 49052.03769094951 }, { "content": "fn single_ascii(value: &Value) -> Result<&str, Error> {\n\n match value {\n\n &Value::Ascii(ref v) if v.len() == 1 => Ok(from_utf8(&v[0])?),\n\n &Value::Ascii(ref v) if v.len() > 1 => {\n\n for t in &v[1..] {\n\n if !t.is_empty() {\n\n return Err(Error::Other(format!(\n\n \"Got {:?}, expected single ascii value\",\n\n v,\n\n )));\n\n }\n\n }\n\n Ok(from_utf8(&v[0])?)\n\n }\n\n v => Err(Error::Other(format!(\n\n \"Got {:?}, expected single ascii value\",\n\n v,\n\n ))),\n\n }\n\n}\n", "file_path": "src/myexif.rs", "rank": 94, "score": 47887.238887475 }, { "content": "fn is_string(f: &Field, tag: Tag) -> Option<&str> {\n\n if f.tag == tag {\n\n match single_ascii(&f.value) {\n\n Ok(s) => Some(s),\n\n Err(err) => {\n\n println!(\"ERROR: Expected string for {}: {:?}\", tag, err);\n\n None\n\n }\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/myexif.rs", "rank": 95, "score": 47630.95472212325 }, { "content": "fn is_u32(f: &Field, tag: Tag) -> Option<u32> {\n\n if f.tag == tag {\n\n match &f.value {\n\n &Value::Long(ref v) if v.len() == 1 => Some(v[0]),\n\n &Value::Short(ref v) if v.len() == 1 => Some(u32::from(v[0])),\n\n v => {\n\n println!(\"ERROR: Unsuppored value for {}: {:?}\", tag, v);\n\n None\n\n }\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/myexif.rs", "rank": 96, "score": 47630.95472212325 }, { "content": "fn sanitize_next(next: Option<&str>) -> Option<&str> {\n\n if let Some(next) = next {\n\n use regex::Regex;\n\n let re = Regex::new(r\"^/([a-z0-9._-]+/?)*$\").unwrap();\n\n if re.is_match(next) {\n\n return Some(next);\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/server/login.rs", "rank": 97, "score": 47029.63890509808 }, { "content": "fn rotate(context: Context, form: RotateForm) -> Response {\n\n if !context.is_authorized() {\n\n return permission_denied().unwrap();\n\n }\n\n info!(\"Should rotate #{} by {}\", form.image, form.angle);\n\n use crate::schema::photos::dsl::photos;\n\n let c = context.db().unwrap();\n\n let c: &PgConnection = &c;\n\n if let Ok(mut image) = photos.find(form.image).first::<Photo>(c) {\n\n let newvalue = (360 + image.rotation + form.angle) % 360;\n\n info!(\"Rotation was {}, setting to {}\", image.rotation, newvalue);\n\n image.rotation = newvalue;\n\n match image.save_changes::<Photo>(c) {\n\n Ok(image) => {\n\n context.clear_cache(&image.cache_key(SizeTag::Small));\n\n context.clear_cache(&image.cache_key(SizeTag::Medium));\n\n return Builder::new().body(\"ok\".into()).unwrap();\n\n }\n\n Err(error) => {\n\n warn!(\"Failed to save image #{}: {}\", image.id, error);\n\n }\n\n }\n\n }\n\n not_found(&context)\n\n}\n\n\n", "file_path": "src/server/admin.rs", "rank": 98, "score": 47029.63890509808 }, { "content": "fn name_and_level(obj: &Value) -> Option<(&str, i16)> {\n\n let tags = obj.get(\"tags\")?;\n\n let name = tags\n\n .get(\"name:sv\")\n\n // .or_else(|| tags.get(\"name:en\"))\n\n .or_else(|| tags.get(\"name\"))\n\n .and_then(Value::as_str)?;\n\n let level = tags\n\n .get(\"admin_level\")\n\n .and_then(Value::as_str)\n\n .and_then(|l| l.parse().ok())\n\n .or_else(|| {\n\n KNOWN\n\n .iter()\n\n .find_map(|(name, values)| tag_level(tags, name, values))\n\n })?;\n\n\n\n debug!(\"{} is level {}\", name, level);\n\n Some((name, level))\n\n}\n\n\n", "file_path": "src/fetch_places.rs", "rank": 99, "score": 47029.63890509808 } ]
Rust
src/main.rs
Masorubka1/rs_graph_system
ac51d9ccdbd7f60996804287e527d9633fa5d4e9
use std::collections::HashSet; use std::thread::sleep_ms; use std::collections::HashMap; use rs_graph_system::ThreadPool; use petgraph::Graph; use petgraph::adj::NodeIndex; use petgraph::adj::IndexType; use std::collections::VecDeque; #[derive(Copy, Clone)] pub struct Xz { Xz: usize } impl Xz { fn new(num: usize) -> Xz { Xz {Xz: num} } } #[derive(Clone)] pub struct InfoNode<'a, T, V> { func: fn(&HashMap<&'a str, T>) -> V, args: HashMap<&'a str, T>, res: Box<V> } impl<'a> InfoNode<'a, usize, Xz> { fn new(name: fn(&HashMap<&'a str, usize>) -> Xz, Args: HashMap<&'a str, usize>) -> InfoNode<'a, usize, Xz> { InfoNode {func: name, args: Args, res: Box::new(Xz::new(0))} } fn execute(helper: InfoNode<'a, usize, Xz>) -> Box<Xz> { Box::new((helper.func)(&helper.args)) } fn execute_self(mut self) { self.res = Box::new((self.func)(&self.args)); } } impl<'a> Default for InfoNode<'a, usize, Xz> { fn default() -> Self { InfoNode { func: do_smth_2, args: HashMap::<&'a str, usize>::new(), res: Box::<Xz>::new(Xz::new(0)) } } } fn test_build_graph(deps: &mut Graph<InfoNode<usize, Xz>, &str>) -> HashMap<usize, NodeIndex> { let mut first_h = HashMap::new(); first_h.insert("name", 1); let mut second_h = HashMap::new(); second_h.insert("name", 2); let mut third_h = HashMap::new(); third_h.insert("name", 3); let mut fourth_h = HashMap::new(); fourth_h.insert("name", 4); let mut thith_h = HashMap::new(); thith_h.insert("name", 5); let first = InfoNode::new(do_smth_2, first_h); let second = InfoNode::new(do_smth_2, second_h); let third = InfoNode::new(do_smth_2, third_h); let fourth = InfoNode::new(do_smth_2, fourth_h); let thith = InfoNode::new(do_smth_2, thith_h); let arr = vec![first, second, third, fourth, thith]; let mut list_nodes = HashMap::<usize, NodeIndex>::new(); let mut tmp_cnt = 0; for i in arr { list_nodes.insert(tmp_cnt, deps.add_node(i).index().try_into().unwrap()); tmp_cnt += 1; } let pg = list_nodes[&0]; let fb = list_nodes[&1]; let qc = list_nodes[&2]; let rand = list_nodes[&3]; let libc = list_nodes[&4]; deps.extend_with_edges(&[ (pg, fb), (pg, qc), (qc, rand), (rand, libc), (qc, libc), ]); list_nodes } fn do_smth_2(tmp: &HashMap<&str, usize>) -> Xz { println!("{}", tmp["name"]); sleep_ms(400); Xz::new(5) } fn timesort(deps: &Graph::<InfoNode<usize, Xz>, &str>, ind: NodeIndex) -> Vec<isize> { let mut hash_nodes = HashSet::<usize>::new(); let mut queue_nodes = VecDeque::<usize>::new(); let mut ans = Vec::<isize>::new(); for _ in 0..deps.node_count() { ans.push(-1); } let mut cnt = 0; queue_nodes.push_back(ind.index().try_into().unwrap()); while queue_nodes.len() != 0 { let node = queue_nodes.pop_front().unwrap(); let tmp_node_index = NodeIndex::new(node); hash_nodes.insert(node); for i in deps.neighbors_directed(tmp_node_index, petgraph::EdgeDirection::Outgoing){ if ans[i.index()] == -1 { queue_nodes.push_back(i.index()); hash_nodes.insert(i.index()); ans[i.index()] = -2; } else { hash_nodes.remove(&i.index()); } } let mut f = 0; for i in deps.neighbors_directed(tmp_node_index, petgraph::EdgeDirection::Incoming) { if hash_nodes.contains(&i.index()) { f = 1; break; } } if f == 1{ queue_nodes.push_back(node); } else { ans[node] = cnt; cnt += 1; hash_nodes.remove(&node); } } ans } fn main() { let mut deps = Graph::<InfoNode<usize, Xz>, &str>::new(); let pool = ThreadPool::new(4); let list_nodes; { list_nodes = test_build_graph(&mut deps); } let sorted_nodes = timesort(&deps, NodeIndex::new(0)); println!("{:?}", sorted_nodes); for i in sorted_nodes { let id = list_nodes[&i.try_into().unwrap()]; let node_id = NodeIndex::new(id.try_into().unwrap()); { let info_node = deps.node_weight(node_id).unwrap().to_owned(); pool.execute(move || { info_node.execute_self(); }); } } }
use std::collections::HashSet; use std::thread::sleep_ms; use std::collections::HashMap; use rs_graph_system::ThreadPool; use petgraph::Graph; use petgraph::adj::NodeIndex; use petgraph::adj::IndexType; use std::collections::VecDeque; #[derive(Copy, Clone)] pub struct Xz { Xz: usize } impl Xz { fn new(num: usize) -> Xz { Xz {Xz: num} } } #[derive(Clone)] pub struct InfoNode<'a, T, V> { func: fn(&HashMap<&'a str, T>) -> V, args: HashMap<&'a str, T>, res: Box<V> } impl<'a> InfoNode<'a, usize, Xz> { fn new(name: fn(&HashMap<&'a str, usize>) -> Xz, Args: HashMap<&'a str, usize>) -> InfoNode<'a, usize, Xz> { InfoNode {func: name, args: Args, res: Box::new(Xz::new(0))} } fn execute(helper: InfoNode<'a, usize, Xz>) -> Box<Xz> { Box::new((helper.func)(&helper.args)) } fn execute_self(mut self) { self.res = Box::new((self.func)(&self.args)); } } impl<'a> Default for InfoNode<'a, usize, Xz> { fn default() -> Self { InfoNode { func: do_smth_2, args: HashMap::<&'a str, usize>::new(), res: Box::<Xz>::new(Xz::new(0)) } } } fn test_build_graph(deps: &mut Graph<InfoNode<usize, Xz>, &str>) -> HashMap<usize, NodeIndex>
); let third = InfoNode::new(do_smth_2, third_h); let fourth = InfoNode::new(do_smth_2, fourth_h); let thith = InfoNode::new(do_smth_2, thith_h); let arr = vec![first, second, third, fourth, thith]; let mut list_nodes = HashMap::<usize, NodeIndex>::new(); let mut tmp_cnt = 0; for i in arr { list_nodes.insert(tmp_cnt, deps.add_node(i).index().try_into().unwrap()); tmp_cnt += 1; } let pg = list_nodes[&0]; let fb = list_nodes[&1]; let qc = list_nodes[&2]; let rand = list_nodes[&3]; let libc = list_nodes[&4]; deps.extend_with_edges(&[ (pg, fb), (pg, qc), (qc, rand), (rand, libc), (qc, libc), ]); list_nodes } fn do_smth_2(tmp: &HashMap<&str, usize>) -> Xz { println!("{}", tmp["name"]); sleep_ms(400); Xz::new(5) } fn timesort(deps: &Graph::<InfoNode<usize, Xz>, &str>, ind: NodeIndex) -> Vec<isize> { let mut hash_nodes = HashSet::<usize>::new(); let mut queue_nodes = VecDeque::<usize>::new(); let mut ans = Vec::<isize>::new(); for _ in 0..deps.node_count() { ans.push(-1); } let mut cnt = 0; queue_nodes.push_back(ind.index().try_into().unwrap()); while queue_nodes.len() != 0 { let node = queue_nodes.pop_front().unwrap(); let tmp_node_index = NodeIndex::new(node); hash_nodes.insert(node); for i in deps.neighbors_directed(tmp_node_index, petgraph::EdgeDirection::Outgoing){ if ans[i.index()] == -1 { queue_nodes.push_back(i.index()); hash_nodes.insert(i.index()); ans[i.index()] = -2; } else { hash_nodes.remove(&i.index()); } } let mut f = 0; for i in deps.neighbors_directed(tmp_node_index, petgraph::EdgeDirection::Incoming) { if hash_nodes.contains(&i.index()) { f = 1; break; } } if f == 1{ queue_nodes.push_back(node); } else { ans[node] = cnt; cnt += 1; hash_nodes.remove(&node); } } ans } fn main() { let mut deps = Graph::<InfoNode<usize, Xz>, &str>::new(); let pool = ThreadPool::new(4); let list_nodes; { list_nodes = test_build_graph(&mut deps); } let sorted_nodes = timesort(&deps, NodeIndex::new(0)); println!("{:?}", sorted_nodes); for i in sorted_nodes { let id = list_nodes[&i.try_into().unwrap()]; let node_id = NodeIndex::new(id.try_into().unwrap()); { let info_node = deps.node_weight(node_id).unwrap().to_owned(); pool.execute(move || { info_node.execute_self(); }); } } }
{ let mut first_h = HashMap::new(); first_h.insert("name", 1); let mut second_h = HashMap::new(); second_h.insert("name", 2); let mut third_h = HashMap::new(); third_h.insert("name", 3); let mut fourth_h = HashMap::new(); fourth_h.insert("name", 4); let mut thith_h = HashMap::new(); thith_h.insert("name", 5); let first = InfoNode::new(do_smth_2, first_h); let second = InfoNode::new(do_smth_2, second_h
random
[ { "content": "struct Worker {\n\n id: usize,\n\n thread: Option<thread::JoinHandle<()>>,\n\n}\n\n\n\nimpl Worker {\n\n fn new(id: usize, receiver: Arc<Mutex<mpsc::Receiver<Message>>>) -> Worker {\n\n let thread = thread::spawn(move || loop {\n\n let message = receiver.lock().unwrap().recv().unwrap();\n\n\n\n match message {\n\n Message::NewJob(job) => {\n\n println!(\"Worker {} got a job; executing.\", id);\n\n\n\n job();\n\n }\n\n Message::Terminate => {\n\n println!(\"Worker {} was told to terminate.\", id);\n\n\n\n break;\n\n }\n\n }\n\n });\n\n\n\n Worker {\n\n id,\n\n thread: Some(thread),\n\n }\n\n }\n\n}", "file_path": "src/lib.rs", "rank": 3, "score": 27866.66717351393 }, { "content": "type Job = Box<dyn FnOnce() + Send + 'static>;\n\n\n", "file_path": "src/lib.rs", "rank": 5, "score": 10109.772150986919 }, { "content": " pub fn execute<F>(&self, f: F)\n\n where\n\n F: FnOnce() + Send + 'static,\n\n {\n\n let job = Box::new(f);\n\n self.sender.send(Message::NewJob(job)).unwrap();\n\n }\n\n}\n\n\n\nimpl Drop for ThreadPool {\n\n fn drop(&mut self) {\n\n println!(\"Sending terminate message to all workers.\");\n\n\n\n for _ in &self.workers {\n\n self.sender.send(Message::Terminate).unwrap();\n\n }\n\n\n\n println!(\"Shutting down all workers.\");\n\n\n\n for worker in &mut self.workers {\n\n println!(\"Shutting down worker {}\", worker.id);\n\n\n\n if let Some(thread) = worker.thread.take() {\n\n thread.join().unwrap();\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 8, "score": 2.367480604092026 }, { "content": "use std::sync::mpsc;\n\nuse std::sync::Arc;\n\nuse std::sync::Mutex;\n\nuse std::thread;\n\n\n\npub struct ThreadPool {\n\n workers: Vec<Worker>,\n\n sender: mpsc::Sender<Message>,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 9, "score": 2.22151698599895 } ]
Rust
src/colln_paint/mod.rs
pwil3058/rs_epaint
abde42728f65fc166df6416df8b3c3067faf51ed
use std::cell::RefCell; use std::cmp::Ordering; use std::fmt; use std::fmt::Debug; use std::fs::File; use std::hash::*; use std::io::Read; use std::marker::PhantomData; use std::path::Path; use std::rc::Rc; use std::str::FromStr; use pw_gix::{ gtk::{self, prelude::*}, wrapper::*, }; pub mod binder; pub mod collection; pub mod display; pub mod editor; use crate::basic_paint::*; use crate::colour::*; use crate::error::*; pub use crate::struct_traits::SimpleCreation; pub trait CollnIdInterface: Debug + PartialEq + PartialOrd + Eq + Ord + Clone + Default + Hash { fn new(colln_name: &str, colln_owner: &str) -> Self; fn colln_name_label() -> String; fn colln_owner_label() -> String; fn paint_select_label() -> String; fn paint_select_tooltip_text() -> String; fn recollection_name_for(item_name: &str) -> String; fn colln_load_image(size: i32) -> gtk::Image; fn display_current_target() -> bool { true } fn colln_name(&self) -> String; fn colln_owner(&self) -> String; fn tooltip_text(&self) -> String { format!("{}\n({})", self.colln_name(), self.colln_owner()) } fn rc_new(colln_name: &str, colln_owner: &str) -> Rc<Self> { Rc::new(Self::new(colln_name, colln_owner)) } } #[derive(PWO, Wrapper)] pub struct CollnIdEntryData<CID> where CID: CollnIdInterface, { grid: gtk::Grid, colln_name_entry: gtk::Entry, colln_owner_entry: gtk::Entry, changed_callbacks: RefCell<Vec<Box<dyn Fn()>>>, phantom_data: PhantomData<CID>, } pub type CollnIdEntry<CID> = Rc<CollnIdEntryData<CID>>; impl<CID> SimpleCreation for CollnIdEntry<CID> where CID: CollnIdInterface + 'static, { fn create() -> CollnIdEntry<CID> { let psie = Rc::new(CollnIdEntryData { grid: gtk::Grid::new(), colln_owner_entry: gtk::Entry::new(), colln_name_entry: gtk::Entry::new(), changed_callbacks: RefCell::new(Vec::new()), phantom_data: PhantomData, }); let label = gtk::Label::new(Some(CID::colln_name_label().as_str())); label.set_halign(gtk::Align::End); psie.grid.attach(&label, 0, 0, 1, 1); psie.colln_name_entry.set_hexpand(true); psie.grid.attach_next_to( &psie.colln_name_entry.clone(), Some(&label), gtk::PositionType::Right, 1, 1, ); let label = gtk::Label::new(Some(CID::colln_owner_label().as_str())); label.set_halign(gtk::Align::End); psie.grid.attach(&label, 0, 1, 1, 1); psie.colln_owner_entry.set_hexpand(true); psie.grid.attach_next_to( &psie.colln_owner_entry.clone(), Some(&label), gtk::PositionType::Right, 1, 1, ); let psie_c = psie.clone(); psie.colln_name_entry .connect_changed(move |_| psie_c.inform_changed()); let psie_c = psie.clone(); psie.colln_owner_entry .connect_changed(move |_| psie_c.inform_changed()); psie } } impl<CID> CollnIdEntryData<CID> where CID: CollnIdInterface, { pub fn get_colln_id(&self) -> Option<Rc<CID>> { let colln_name = self.colln_name_entry.get_text(); if colln_name.len() > 0 { let colln_owner = self.colln_owner_entry.get_text(); if colln_owner.len() > 0 { return Some(CID::rc_new(&colln_name, &colln_owner)); } }; None } pub fn set_colln_id(&self, o_cid: Option<&Rc<CID>>) { if let Some(cid) = o_cid { self.colln_name_entry.set_text(&cid.colln_name()); self.colln_owner_entry.set_text(&cid.colln_owner()); } else { self.colln_name_entry.set_text(""); self.colln_owner_entry.set_text(""); } } pub fn connect_changed<F: 'static + Fn()>(&self, callback: F) { self.changed_callbacks.borrow_mut().push(Box::new(callback)); } fn inform_changed(&self) { for callback in self.changed_callbacks.borrow().iter() { callback() } } } #[derive(Debug, Clone)] pub struct CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { colln_id: Rc<CID>, paint: BasicPaint<C>, } impl<C, CID> PartialEq for CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn eq(&self, other: &CollnPaintCore<C, CID>) -> bool { if self.colln_id != other.colln_id { false } else { self.paint == other.paint } } } impl<C, CID> Eq for CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { } impl<C, CID> PartialOrd for CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn partial_cmp(&self, other: &CollnPaintCore<C, CID>) -> Option<Ordering> { if let Some(ordering) = self.colln_id.partial_cmp(&other.colln_id) { if ordering == Ordering::Equal { self.paint.partial_cmp(&other.paint) } else { Some(ordering) } } else { None } } } impl<C, CID> Ord for CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn cmp(&self, other: &CollnPaintCore<C, CID>) -> Ordering { let ordering = self.colln_id.cmp(&other.colln_id); if ordering == Ordering::Equal { self.paint.cmp(&other.paint) } else { ordering } } } pub type CollnPaint<C, CID> = Rc<CollnPaintCore<C, CID>>; impl<C, CID> ColouredItemInterface for CollnPaint<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn colour(&self) -> Colour { self.paint.colour() } } impl<C, CID> BasicPaintInterface<C> for CollnPaint<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn name(&self) -> String { self.paint.name() } fn notes(&self) -> String { self.paint.notes() } fn tooltip_text(&self) -> String { format!( "{}\n{}", self.paint.tooltip_text(), self.colln_id.tooltip_text() ) } fn characteristics(&self) -> C { self.paint.characteristics() } } pub trait CollnPaintInterface<C, CID>: BasicPaintInterface<C> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn create(paint: &BasicPaint<C>, cid: &Rc<CID>) -> Self; fn colln_id(&self) -> Rc<CID>; } impl<C, CID> CollnPaintInterface<C, CID> for CollnPaint<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn create(paint: &BasicPaint<C>, cid: &Rc<CID>) -> CollnPaint<C, CID> { Rc::new(CollnPaintCore::<C, CID> { colln_id: cid.clone(), paint: paint.clone(), }) } fn colln_id(&self) -> Rc<CID> { self.colln_id.clone() } } #[derive(Debug)] pub struct PaintCollnSpec<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { pub colln_id: Rc<CID>, pub paint_specs: Vec<BasicPaintSpec<C>>, } impl<C, CID> PaintCollnSpec<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { pub fn from_file(path: &Path) -> Result<PaintCollnSpec<C, CID>, PaintError<C>> { let mut file = File::open(path)?; let mut string = String::new(); file.read_to_string(&mut string)?; PaintCollnSpec::<C, CID>::from_str(string.as_str()) } pub fn get_index_for_name(&self, name: &str) -> Option<usize> { match self .paint_specs .binary_search_by_key(&name.to_string(), |spec| spec.name.clone()) { Ok(index) => Some(index), Err(_) => None, } } } impl<C, CID> FromStr for PaintCollnSpec<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { type Err = PaintError<C>; fn from_str(string: &str) -> Result<PaintCollnSpec<C, CID>, PaintError<C>> { let mut lines = string.lines(); let mut colln_name = ""; let mut colln_owner = ""; for _ in 0..2 { if let Some(line) = lines.next() { if line.starts_with(&CID::colln_name_label()) { if let Some(tail) = line.get(CID::colln_name_label().len()..) { colln_name = tail.trim(); } } else if line.starts_with(&CID::colln_owner_label()) { if let Some(tail) = line.get(CID::colln_owner_label().len()..) { colln_owner = tail.trim(); } } else { return Err(PaintErrorType::MalformedText(line.to_string()).into()); } } else { return Err(PaintErrorType::MalformedText(string.to_string()).into()); } } if colln_name.len() == 0 || colln_owner.len() == 0 { return Err(PaintErrorType::MalformedText(string.to_string()).into()); }; let colln_id = Rc::new(CID::new(colln_name, colln_owner)); let mut paint_specs: Vec<BasicPaintSpec<C>> = Vec::new(); for line in lines { let spec = BasicPaintSpec::<C>::from_str(line)?; match paint_specs.binary_search_by_key(&spec.name, |bps| bps.name.clone()) { Ok(_) => return Err(PaintErrorType::AlreadyExists(spec.name).into()), Err(index) => paint_specs.insert(index, spec), } } let psc = PaintCollnSpec::<C, CID> { colln_id, paint_specs, }; Ok(psc) } } impl<C, CID> fmt::Display for PaintCollnSpec<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "{} {}\n", CID::colln_name_label(), self.colln_id.colln_name() )?; write!( f, "{} {}\n", CID::colln_owner_label(), self.colln_id.colln_owner() )?; for paint_spec in self.paint_specs.iter() { write!(f, "{}\n", paint_spec)?; } Ok(()) } } #[cfg(test)] mod tests { }
use std::cell::RefCell; use std::cmp::Ordering; use std::fmt; use std::fmt::Debug; use std::fs::File; use std::hash::*; use std::io::Read; use std::marker::PhantomData; use std::path::Path; use std::rc::Rc; use std::str::FromStr; use pw_gix::{ gtk::{self, prelude::*}, wrapper::*, }; pub mod binder; pub mod collection; pub mod display; pub mod editor; use crate::basic_paint::*; use crate::colour::*; use crate::error::*; pub use crate::struct_traits::SimpleCreation; pub trait CollnIdInterface: Debug + PartialEq + PartialOrd + Eq + Ord + Clone + Default + Hash { fn new(colln_name: &str, colln_owner: &str) -> Self; fn colln_name_label() -> String; fn colln_owner_label() -> String; fn paint_select_label() -> String; fn paint_select_tooltip_text() -> String; fn recollection_name_for(item_name: &str) -> String; fn colln_load_image(size: i32) -> gtk::Image; fn display_current_target() -> bool { true } fn colln_name(&self) -> String; fn colln_owner(&self) -> String; fn tooltip_text(&self) -> String { format!("{}\n({})", self.colln_name(), self.colln_owner()) } fn rc_new(colln_name: &str, colln_owner: &str) -> Rc<Self> { Rc::new(Self::new(colln_name, colln_owner)) } } #[derive(PWO, Wrapper)] pub struct CollnIdEntryData<CID> where CID: CollnIdInterface, { grid: gtk::Grid, colln_name_entry: gtk::Entry, colln_owner_entry: gtk::Entry, changed_callbacks: RefCell<Vec<Box<dyn Fn()>>>, phantom_data: PhantomData<CID>, } pub type CollnIdEntry<CID> = Rc<CollnIdEntryData<CID>>; impl<CID> SimpleCreation for CollnIdEntry<CID> where CID: CollnIdInterface + 'static, { fn create() -> CollnIdEntry<CID> { let psie = Rc::new(CollnIdEntryData { grid: gtk::Grid::new(), colln_owner_entry: gtk::Entry::new(), colln_name_entry: gtk::Entry::new(), changed_callbacks: RefCell::new(Vec::new()), phantom_data: PhantomData, }); let label = gtk::Label::new(Some(CID::colln_name_label().as_str())); label.set_halign(gtk::Align::End); psie.grid.attach(&label, 0, 0, 1, 1); psie.colln_name_entry.set_hexpand(true); psie.grid.attach_next_to( &psie.colln_name_entry.clone(), Some(&label), gtk::PositionType::Right, 1, 1, ); let label = gtk::Label::new(Some(CID::colln_owner_label().as_str())); label.set_halign(gtk::Align::End); psie.grid.attach(&label, 0, 1, 1, 1); psie.colln_owner_entry.set_hexpand(true); psie.grid.attach_next_to( &psie.colln_owner_entry.clone(), Some(&label), gtk::PositionType::Right, 1, 1, ); let psie_c = psie.clone(); psie.colln_name_entry .connect_changed(move |_| psie_c.inform_changed()); let psie_c = psie.clone(); psie.colln_owner_entry .connect_changed(move |_| psie_c.inform_changed()); psie } } impl<CID> CollnIdEntryData<CID> where CID: CollnIdInterface, { pub fn get_colln_id(&self) -> Option<Rc<CID>> { let colln_name = self.colln_name_entry.get_text(); if colln_name.len() > 0 { let colln_owner = self.colln_owner_entry.get_text(); if colln_owner.len() > 0 { return Some(CID::rc_new(&colln_name, &colln_owner)); } }; None } pub fn set_colln_id(&self, o_cid: Option<&Rc<CID>>) { if let Some(cid) = o_cid { self.colln_name_entry.set_text(&cid.colln_name()); self.colln_owner_entry.set_text(&cid.colln_owner()); } else { self.colln_name_entry.set_text(""); self.colln_owner_entry.set_text(""); } } pub fn connect_changed<F: 'static + Fn()>(&self, callback: F) { self.changed_callbacks.borrow_mut().push(Box::new(callback)); } fn inform_changed(&self) { for callback in self.changed_callbacks.borrow().iter() { callback() } } } #[derive(Debug, Clone)] pub struct CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { colln_id: Rc<CID>, paint: BasicPaint<C>, } impl<C, CID> PartialEq for CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn eq(&self, other: &CollnPaintCore<C, CID>) -> bool { if self.colln_id != other.colln_id { false } else { self.paint == other.paint } } } impl<C, CID> Eq for CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { } impl<C, CID> PartialOrd for CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn partial_cmp(&self, other: &CollnPaintCore<C, CID>) -> Option<Ordering> { if let Some(ordering) = self.colln_id.partial_cmp(&other.colln_id) { if ordering == Ordering::Equal { self.paint.partial_cmp(&other.paint) } else { Some(ordering) } } else { None } } } impl<C, CID> Ord for CollnPaintCore<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn cmp(&self, other: &CollnPaintCore<C, CID>) -> Ordering { let ordering = self.colln_id.cmp(&other.colln_id); if ordering == Ordering::Equal { self.paint.cmp(&other.paint) } else { ordering } } } pub type CollnPaint<C, CID> = Rc<CollnPaintCore<C, CID>>; impl<C, CID> ColouredItemInterface for CollnPaint<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn colour(&self) -> Colour { self.paint.colour() } } impl<C, CID> BasicPaintInterface<C> for CollnPaint<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn name(&self) -> String { self.paint.name() } fn notes(&self) -> String { self.paint.notes() } fn tooltip_text(&self) -> String { format!( "{}\n{}", self.paint.tooltip_text(), self.colln_id.tooltip_text() ) } fn characteristics(&self) -> C { self.paint.characteristics() } } pub trait CollnPaintInterface<C, CID>: BasicPaintInterface<C> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn create(paint: &BasicPaint<C>, cid: &Rc<CID>) -> Self; fn colln_id(&self) -> Rc<CID>; } impl<C, CID> CollnPaintInterface<C, CID> for CollnPaint<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn create(paint: &BasicPaint<C>, cid: &Rc<CID>) -> CollnPaint<C, CID> { Rc::new(CollnPaintCore::<C, CID> { colln_id: cid.clone(), paint: paint.clone(), }) } fn colln_id(&self) -> Rc<CID> { self.colln_id.clone() } } #[derive(Debug)] pub struct PaintCollnSpec<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { pub colln_id: Rc<CID>, pub paint_specs: Vec<BasicPaintSpec<C>>, } impl<C, CID> PaintCollnSpec<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { pub fn from_file(path: &Path) -> Result<PaintCollnSpec<C, CID>, PaintError<C>> { let mut file = File::open(path)?; let mut string = String::new(); file.read_to_string(&mut string)?; PaintCollnSpec::<C, CID>::from_str(string.as_str()) } pub fn get_index_for_name(&self, name: &str) -> Option<usize> { match self .paint_specs .binary_search_by_key(&name.to_string(), |spec| spec.name.clone()) { Ok(index) => Some(index), Err(_) => None, } } } impl<C, CID> FromStr for PaintCollnSpec<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { type Err = PaintError<C>; fn from_str(string: &str) -> Result<PaintCollnSpec<C, CID>, PaintError<C>> { let mut lines = string.lines(); let mut colln_name = ""; let mut colln_owner = ""; for _ in 0..2 { if let Some(line) = lines.next() { if line.starts_with(&CID::colln_name_label()) { if let Some(tail) = line.get(CID::colln_name_label().len()..) { colln_name = tail.trim(); } } else if line.starts_with(&CID::colln_owner_label()) { if let Some(tail) = line.get(CID::colln_owner_label().len()..) { colln_owner = tail.trim(); } } else { return Err(PaintErrorType::MalformedText(line.to_string()).into()); } } else { return Err(PaintErrorType::MalformedText(string.to_string()).into()); } }
} impl<C, CID> fmt::Display for PaintCollnSpec<C, CID> where C: CharacteristicsInterface, CID: CollnIdInterface, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "{} {}\n", CID::colln_name_label(), self.colln_id.colln_name() )?; write!( f, "{} {}\n", CID::colln_owner_label(), self.colln_id.colln_owner() )?; for paint_spec in self.paint_specs.iter() { write!(f, "{}\n", paint_spec)?; } Ok(()) } } #[cfg(test)] mod tests { }
if colln_name.len() == 0 || colln_owner.len() == 0 { return Err(PaintErrorType::MalformedText(string.to_string()).into()); }; let colln_id = Rc::new(CID::new(colln_name, colln_owner)); let mut paint_specs: Vec<BasicPaintSpec<C>> = Vec::new(); for line in lines { let spec = BasicPaintSpec::<C>::from_str(line)?; match paint_specs.binary_search_by_key(&spec.name, |bps| bps.name.clone()) { Ok(_) => return Err(PaintErrorType::AlreadyExists(spec.name).into()), Err(index) => paint_specs.insert(index, spec), } } let psc = PaintCollnSpec::<C, CID> { colln_id, paint_specs, }; Ok(psc) }
function_block-function_prefix_line
[ { "content": "pub trait BasicPaintInterface<C>: Clone + PartialEq + Ord + Debug + ColouredItemInterface\n\nwhere\n\n C: CharacteristicsInterface,\n\n{\n\n fn name(&self) -> String;\n\n fn notes(&self) -> String;\n\n fn tooltip_text(&self) -> String;\n\n fn characteristics(&self) -> C;\n\n\n\n fn get_spec(&self) -> BasicPaintSpec<C> {\n\n BasicPaintSpec::<C> {\n\n rgb: self.rgb(),\n\n name: self.name(),\n\n notes: self.notes(),\n\n characteristics: self.characteristics(),\n\n }\n\n }\n\n\n\n fn matches_spec(&self, spec: &BasicPaintSpec<C>) -> bool {\n\n if self.rgb() != spec.rgb {\n", "file_path": "src/basic_paint/mod.rs", "rank": 0, "score": 344566.2281021781 }, { "content": "pub trait CharacteristicsInterface: Debug + Hash + PartialEq + Clone + Copy + ToString {\n\n type Entry: CharacteristicsEntryInterface<Self>;\n\n\n\n fn tv_row_len() -> usize;\n\n fn tv_columns(start_col_id: i32) -> Vec<gtk::TreeViewColumn>;\n\n fn from_floats(floats: &Vec<f64>) -> Self;\n\n fn from_str(string: &str) -> Result<Self, PaintError<Self>>;\n\n\n\n fn tv_rows(&self) -> Vec<glib::Value>;\n\n fn gui_display_widget(&self) -> gtk::Box;\n\n fn to_floats(&self) -> Vec<f64>;\n\n}\n\n\n", "file_path": "src/basic_paint/mod.rs", "rank": 1, "score": 314895.9821377121 }, { "content": "#[derive(Debug)]\n\nstruct FileData<C, CID>\n\nwhere\n\n C: CharacteristicsInterface + 'static,\n\n CID: CollnIdInterface + 'static,\n\n{\n\n pub path: PathBuf,\n\n pub spec: PaintCollnSpec<C, CID>,\n\n}\n\n\n\n#[derive(PWO, Wrapper)]\n\npub struct CollnPaintEditorCore<A, C, CID>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n CID: CollnIdInterface + 'static,\n\n{\n\n vbox: gtk::Box,\n\n h_paned: gtk::Paned,\n\n basic_paint_factory: BasicPaintFactoryDisplay<A, C>,\n\n paint_spec_entry: BasicPaintSpecEntry<A, C>,\n", "file_path": "src/colln_paint/editor.rs", "rank": 2, "score": 249274.8291929998 }, { "content": "pub trait CollnPaintCollnBinderInterface<A, C, CID>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n CID: CollnIdInterface + 'static,\n\n{\n\n fn create(data_path: &Path) -> CollnPaintCollnBinder<A, C, CID>;\n\n fn _insert_paint_colln(&self, spec: &PaintCollnSpec<C, CID>, path: &Path, index: usize);\n\n fn _add_paint_colln_from_file(&self, path: &Path);\n\n fn load_paint_colln_from_file(&self);\n\n}\n\n\n\nimpl<A, C, CID> CollnPaintCollnBinderInterface<A, C, CID> for CollnPaintCollnBinder<A, C, CID>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n CID: CollnIdInterface + 'static,\n\n{\n\n fn create(data_path: &Path) -> CollnPaintCollnBinder<A, C, CID> {\n\n let cpcb = Rc::new(CollnPaintCollnBinderCore::<A, C, CID> {\n", "file_path": "src/colln_paint/binder.rs", "rank": 3, "score": 241077.65685997682 }, { "content": "pub trait CollnPaintDisplayDialogInterface<A, C, CID>\n\nwhere\n\n C: CharacteristicsInterface + 'static,\n\n A: ColourAttributesInterface + 'static,\n\n CID: CollnIdInterface + 'static,\n\n{\n\n fn create<W: WidgetWrapper>(\n\n paint: &CollnPaint<C, CID>,\n\n current_target: Option<&Colour>,\n\n caller: &Rc<W>,\n\n button_specs: Vec<PaintDisplayButtonSpec>,\n\n ) -> CollnPaintDisplayDialog<A, C, CID>;\n\n}\n\n\n\nimpl<A, C, CID> PaintDisplayWithCurrentTarget<A, C, CollnPaint<C, CID>>\n\n for CollnPaintDisplayDialog<A, C, CID>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n CID: CollnIdInterface + 'static,\n", "file_path": "src/colln_paint/display.rs", "rank": 4, "score": 240837.48359784155 }, { "content": "pub trait CollnPaintCollnInterface<C, CID>\n\nwhere\n\n C: CharacteristicsInterface,\n\n CID: CollnIdInterface,\n\n{\n\n fn from_spec(colln_spec: &PaintCollnSpec<C, CID>) -> CollnPaintColln<C, CID>;\n\n}\n\n\n\nimpl<C, CID> CollnPaintCollnInterface<C, CID> for CollnPaintColln<C, CID>\n\nwhere\n\n C: CharacteristicsInterface,\n\n CID: CollnIdInterface,\n\n{\n\n fn from_spec(colln_spec: &PaintCollnSpec<C, CID>) -> CollnPaintColln<C, CID> {\n\n let colln_id = colln_spec.colln_id.clone();\n\n let mut paints: Vec<CollnPaint<C, CID>> = Vec::new();\n\n for paint_spec in colln_spec.paint_specs.iter() {\n\n // Assume that the spec list is ordered and names are unique\n\n let basic_paint = BasicPaint::<C>::from_spec(paint_spec);\n\n let colln_paint = CollnPaint::<C, CID>::create(&basic_paint, &colln_id);\n", "file_path": "src/colln_paint/collection.rs", "rank": 6, "score": 239179.5111124133 }, { "content": "pub trait CollnPaintCollnWidgetInterface<A, C, CID>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n CID: CollnIdInterface + 'static,\n\n{\n\n fn create(colln_spec: &PaintCollnSpec<C, CID>) -> CollnPaintCollnWidget<A, C, CID>;\n\n}\n\n\n\nimpl<A, C, CID> CollnPaintCollnWidgetCore<A, C, CID>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n CID: CollnIdInterface + 'static,\n\n{\n\n pub fn colln_id(&self) -> Rc<CID> {\n\n self.paint_colln_view.colln_id()\n\n }\n\n\n\n fn inform_paint_selected(&self, paint: &CollnPaint<C, CID>) {\n", "file_path": "src/colln_paint/collection.rs", "rank": 7, "score": 229655.4102208983 }, { "content": "pub trait CollnPaintCollnViewInterface<A, C, CID>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n CID: CollnIdInterface,\n\n{\n\n fn create(colln: &CollnPaintColln<C, CID>) -> CollnPaintCollnView<A, C, CID>;\n\n}\n\n\n\nimpl<A, C, CID> CollnPaintCollnViewInterface<A, C, CID> for CollnPaintCollnView<A, C, CID>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n CID: CollnIdInterface,\n\n{\n\n fn create(colln: &CollnPaintColln<C, CID>) -> CollnPaintCollnView<A, C, CID> {\n\n let len = CollnPaint::<C, CID>::tv_row_len();\n\n let list_store = gtk::ListStore::new(&STANDARD_PAINT_ROW_SPEC[0..len]);\n\n for paint in colln.get_paints().iter() {\n\n list_store.append_row(&paint.tv_rows());\n", "file_path": "src/colln_paint/collection.rs", "rank": 8, "score": 229655.41022089834 }, { "content": "pub trait CollnPaintHueAttrWheelInterface<C, CID>\n\nwhere\n\n C: CharacteristicsInterface + 'static,\n\n CID: CollnIdInterface + 'static,\n\n{\n\n fn create(\n\n attr: ScalarAttribute,\n\n paints: Rc<Vec<CollnPaint<C, CID>>>,\n\n ) -> CollnPaintHueAttrWheel<C, CID>;\n\n}\n\n\n\nimpl<C, CID> CollnPaintHueAttrWheelInterface<C, CID> for CollnPaintHueAttrWheel<C, CID>\n\nwhere\n\n C: CharacteristicsInterface + 'static,\n\n CID: CollnIdInterface + 'static,\n\n{\n\n fn create(\n\n attr: ScalarAttribute,\n\n paints: Rc<Vec<CollnPaint<C, CID>>>,\n\n ) -> CollnPaintHueAttrWheel<C, CID> {\n", "file_path": "src/colln_paint/collection.rs", "rank": 9, "score": 228888.968683186 }, { "content": "pub trait FromSpec<C: CharacteristicsInterface> {\n\n fn from_spec(spec: &BasicPaintSpec<C>) -> Self;\n\n}\n\n\n\nimpl<C: CharacteristicsInterface> FromSpec<C> for BasicPaint<C> {\n\n fn from_spec(spec: &BasicPaintSpec<C>) -> BasicPaint<C> {\n\n Rc::new(BasicPaintCore::<C> {\n\n colour: Colour::from(spec.rgb),\n\n name: spec.name.clone(),\n\n notes: spec.notes.clone(),\n\n characteristics: spec.characteristics,\n\n })\n\n }\n\n}\n\n\n\nimpl<C: CharacteristicsInterface> ColouredItemInterface for BasicPaint<C> {\n\n fn colour(&self) -> Colour {\n\n self.colour.clone()\n\n }\n\n}\n", "file_path": "src/basic_paint/mod.rs", "rank": 10, "score": 219062.34540443108 }, { "content": "pub trait MixedPaintCollectionWidgetInterface<A, C>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n{\n\n fn create(mixing_mode: MixingMode) -> MixedPaintCollectionWidget<A, C>;\n\n}\n\n\n\nimpl<A, C> MixedPaintCollectionWidgetInterface<A, C> for MixedPaintCollectionWidget<A, C>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n{\n\n fn create(mixing_mode: MixingMode) -> MixedPaintCollectionWidget<A, C> {\n\n let len = MixedPaint::<C>::tv_row_len();\n\n let list_store = gtk::ListStore::new(&MIXED_PAINT_ROW_SPEC[0..len]);\n\n let view = gtk::TreeView::with_model(&list_store.clone());\n\n view.set_headers_visible(true);\n\n view.get_selection().set_mode(gtk::SelectionMode::Single);\n\n\n", "file_path": "src/mixed_paint/collection.rs", "rank": 11, "score": 199840.36136489038 }, { "content": " pub trait PaintDisplay<A, C, P>: DialogWrapper + TrackedDialog\n\n where\n\n C: CharacteristicsInterface + 'static,\n\n A: ColourAttributesInterface + 'static,\n\n P: BasicPaintInterface<C> + 'static,\n\n {\n\n fn create<W: WidgetWrapper>(\n\n paint: &P,\n\n caller: &Rc<W>,\n\n button_specs: Vec<PaintDisplayButtonSpec>,\n\n ) -> Self;\n\n\n\n fn paint(&self) -> P;\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 12, "score": 199608.43705904676 }, { "content": "pub trait CharacteristicInterface: FromStr + PartialEq {\n\n fn name() -> &'static str;\n\n fn abbrev(&self) -> &'static str;\n\n fn description(&self) -> &'static str;\n\n fn values() -> &'static [Self];\n\n\n\n fn prompt() -> String {\n\n Self::name().to_string() + \":\"\n\n }\n\n}\n\n\n", "file_path": "src/characteristics.rs", "rank": 13, "score": 196527.8112200799 }, { "content": "pub trait ColourAttributesInterface: WidgetWrapper {\n\n fn create() -> Rc<Self>;\n\n fn tv_columns() -> Vec<gtk::TreeViewColumn>;\n\n fn scalar_attributes() -> Vec<ScalarAttribute>;\n\n\n\n fn set_colour(&self, colour: Option<&Colour>);\n\n fn set_target_colour(&self, target_colour: Option<&Colour>);\n\n}\n\n\n", "file_path": "src/basic_paint/mod.rs", "rank": 14, "score": 193013.99442567132 }, { "content": "pub fn app_name() -> String {\n\n if let Some(ref text) = env::args().next() {\n\n pw_pathux::split_path_text(text).1.to_string()\n\n } else {\n\n \"unknown\".to_string()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn it_works() {\n\n assert_eq!(2 + 2, 4);\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 15, "score": 189996.1564141938 }, { "content": " pub trait PaintDisplayWithCurrentTarget<A, C, P>: DialogWrapper + TrackedDialog\n\n where\n\n C: CharacteristicsInterface + 'static,\n\n A: ColourAttributesInterface + 'static,\n\n P: BasicPaintInterface<C> + 'static,\n\n {\n\n fn create<W: WidgetWrapper>(\n\n paint: &P,\n\n current_target: Option<&Colour>,\n\n caller: &Rc<W>,\n\n button_specs: Vec<PaintDisplayButtonSpec>,\n\n ) -> Self;\n\n\n\n fn paint(&self) -> P;\n\n fn set_current_target(&self, new_current_target: Option<&Colour>);\n\n }\n\n}\n\n\n\npub mod art_paint;\n\npub mod basic_paint;\n", "file_path": "src/lib.rs", "rank": 16, "score": 189325.2116732269 }, { "content": "pub trait PaintComponentListViewInterface<A, C>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n{\n\n fn create(\n\n components: &Rc<Vec<PaintComponent<C>>>,\n\n current_target: Option<&Colour>,\n\n ) -> PaintComponentListView<A, C>;\n\n}\n\n\n\nimpl<A, C> PaintComponentListViewInterface<A, C> for PaintComponentListView<A, C>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n{\n\n fn create(\n\n components: &Rc<Vec<PaintComponent<C>>>,\n\n current_target: Option<&Colour>,\n\n ) -> PaintComponentListView<A, C> {\n", "file_path": "src/mixed_paint/display.rs", "rank": 17, "score": 188926.72881388466 }, { "content": "pub trait MixedPaintFactoryInterface<C: CharacteristicsInterface> {\n\n fn create() -> MixedPaintFactory<C>;\n\n}\n\n\n\nimpl<C> MixedPaintFactoryInterface<C> for MixedPaintFactory<C>\n\nwhere\n\n C: CharacteristicsInterface,\n\n{\n\n fn create() -> MixedPaintFactory<C> {\n\n let last_mixture_id: Cell<u32> = Cell::new(0);\n\n let paints: RefCell<Vec<MixedPaint<C>>> = RefCell::new(Vec::new());\n\n Rc::new(MixedPaintFactoryCore::<C> {\n\n last_mixture_id,\n\n paints,\n\n })\n\n }\n\n}\n\n\n\npub type MixedPaintComponentBox<A, C> =\n\n PaintComponentsBox<A, C, MixedPaint<C>, MixedPaintDisplayDialog<A, C>>;\n", "file_path": "src/mixed_paint/collection.rs", "rank": 18, "score": 184423.54410421915 }, { "content": "pub trait CharacteristicsEntryInterface<C: CharacteristicsInterface> {\n\n fn create() -> Rc<Self>;\n\n fn pwo(&self) -> gtk::Grid;\n\n fn get_characteristics(&self) -> Option<C>;\n\n fn set_characteristics(&self, o_characteristics: Option<&C>);\n\n fn connect_changed<F: 'static + Fn()>(&self, callback: F);\n\n}\n\n\n", "file_path": "src/basic_paint/mod.rs", "rank": 19, "score": 179871.86928031564 }, { "content": "pub trait ColourMatchAreaInterface {\n\n type ColourMatchAreaType;\n\n\n\n fn create(mixing_mode: MixingMode) -> Self::ColourMatchAreaType;\n\n}\n\n\n\nimpl ColourMatchAreaInterface for ColourMatchArea {\n\n type ColourMatchAreaType = ColourMatchArea;\n\n\n\n fn create(mixing_mode: MixingMode) -> ColourMatchArea {\n\n let colour_match_area = Rc::new(ColourMatchAreaCore {\n\n drawing_area: gtk::DrawingArea::new(),\n\n mixed_colour: RefCell::new(None),\n\n target_colour: RefCell::new(None),\n\n popup_menu: WrappedMenu::new(&vec![]),\n\n samples: RefCell::new(Vec::new()),\n\n popup_menu_position: Cell::new(Point(0.0, 0.0)),\n\n mixing_mode: mixing_mode,\n\n });\n\n\n", "file_path": "src/mixed_paint/match_area.rs", "rank": 20, "score": 178261.01905026234 }, { "content": "pub trait ColouredItemInterface {\n\n fn colour(&self) -> Colour;\n\n\n\n fn rgb(&self) -> RGB {\n\n self.colour().rgb()\n\n }\n\n\n\n fn hue(&self) -> Option<Hue> {\n\n self.colour().hue()\n\n }\n\n\n\n fn is_grey(&self) -> bool {\n\n self.colour().is_grey()\n\n }\n\n\n\n fn chroma(&self) -> f64 {\n\n self.colour().chroma()\n\n }\n\n\n\n fn greyness(&self) -> f64 {\n", "file_path": "src/basic_paint/mod.rs", "rank": 21, "score": 177270.56510007157 }, { "content": "pub trait PaintTreeViewColumnSpec {\n\n fn tv_columns() -> Vec<gtk::TreeViewColumn>;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn basic_paint_basic_paint_regex() {\n\n let test_str = r#\"ModelPaint(name=\"71.001 White\", rgb=RGB16(red=0xF800, green=0xFA00, blue=0xF600), transparency=\"O\", finish=\"F\", metallic=\"NM\", fluorescence=\"NF\", notes=\"FS37925 RAL9016 RLM21\")\"#.to_string();\n\n assert!(BASIC_PAINT_RE.is_match(&test_str));\n\n let captures = BASIC_PAINT_RE.captures(&test_str).unwrap();\n\n assert_eq!(captures.name(\"ptype\").unwrap().as_str(), \"ModelPaint\");\n\n assert_eq!(\n\n captures.name(\"rgb\").unwrap().as_str(),\n\n \"RGB16(red=0xF800, green=0xFA00, blue=0xF600)\"\n\n );\n\n assert_eq!(\n\n captures.name(\"characteristics\").unwrap().as_str(),\n", "file_path": "src/basic_paint/mod.rs", "rank": 22, "score": 175924.7954838702 }, { "content": "pub trait SeriesPaintManagerInterface<A, C>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n{\n\n fn create(data_path: &Path) -> SeriesPaintManager<A, C>;\n\n fn button(&self) -> gtk::Button;\n\n fn tool_button(&self) -> gtk::ToolButton;\n\n}\n\n\n\nimpl<A, C> SeriesPaintManagerInterface<A, C> for SeriesPaintManager<A, C>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n{\n\n fn create(data_path: &Path) -> SeriesPaintManager<A, C> {\n\n let window = gtk::Window::new(gtk::WindowType::Toplevel);\n\n window.set_geometry_from_recollections(\"series_paint_manager\", (600, 200));\n\n window.set_destroy_with_parent(true);\n\n window.set_title(\"Series Paint Manager\");\n", "file_path": "src/series_paint.rs", "rank": 23, "score": 169507.65408215902 }, { "content": "pub trait ColourEditorInterface {\n\n fn create(extra_buttons: &Vec<gtk::Button>) -> Self;\n\n}\n\n\n\n#[derive(PWO, Wrapper)]\n\npub struct ColourEditorCore<A>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n{\n\n vbox: gtk::Box,\n\n rgb_manipulator: RefCell<RGBManipulator>,\n\n cads: Rc<A>,\n\n rgb_entry: Rc<RGBHexEntry<u16>>,\n\n drawing_area: gtk::DrawingArea,\n\n incr_value_btn: gtk::Button,\n\n decr_value_btn: gtk::Button,\n\n hue_left_btn: gtk::Button,\n\n hue_right_btn: gtk::Button,\n\n decr_greyness_btn: gtk::Button,\n\n incr_greyness_btn: gtk::Button,\n", "file_path": "src/colour_edit.rs", "rank": 24, "score": 168832.97438702738 }, { "content": "pub trait PaintStandardManagerInterface<A, C>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n{\n\n fn create(data_path: &Path) -> PaintStandardManager<A, C>;\n\n fn button(&self) -> gtk::Button;\n\n fn tool_button(&self) -> gtk::ToolButton;\n\n}\n\n\n\nimpl<A, C> PaintStandardManagerInterface<A, C> for PaintStandardManager<A, C>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n{\n\n fn create(data_path: &Path) -> PaintStandardManager<A, C> {\n\n let window = gtk::Window::new(gtk::WindowType::Toplevel);\n\n window.set_geometry_from_recollections(\"paint_standards_manager\", (600, 200));\n\n window.set_destroy_with_parent(true);\n\n window.set_title(\"Paint Standards Manager\");\n", "file_path": "src/standards.rs", "rank": 25, "score": 164950.179434928 }, { "content": "pub trait TargetColourDisplayDialogInterface<A>\n\nwhere\n\n A: ColourAttributesInterface,\n\n{\n\n fn create<W: WidgetWrapper>(\n\n colour: &TargetColour,\n\n caller: &Rc<W>,\n\n ) -> TargetColourDisplayDialog<A>;\n\n}\n\n\n\nimpl<A> TargetColourDisplayDialogInterface<A> for TargetColourDisplayDialog<A>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n{\n\n fn create<W: WidgetWrapper>(\n\n colour: &TargetColour,\n\n caller: &Rc<W>,\n\n ) -> TargetColourDisplayDialog<A> {\n\n let dialog = new_display_dialog(&colour.name(), caller, &[]);\n\n dialog.set_size_from_recollections(\"target_colour_display\", (60, 180));\n", "file_path": "src/mixed_paint/target.rs", "rank": 26, "score": 164797.5874733994 }, { "content": "pub trait PaintMixerInterface<A, C, MC>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n MC: MixerConfig + 'static,\n\n{\n\n fn create(\n\n series_paint_data_path: &Path,\n\n paint_standards_data_path: Option<&Path>,\n\n ) -> PaintMixer<A, C, MC>;\n\n}\n\n\n\npub type SeriesPaintComponentBox<A, C> =\n\n PaintComponentsBox<A, C, SeriesPaint<C>, SeriesPaintDisplayDialog<A, C>>;\n\n\n\n#[derive(PWO, Wrapper)]\n\npub struct PaintMixerCore<A, C, MC>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n", "file_path": "src/mixed_paint/mixer.rs", "rank": 27, "score": 162651.4906232339 }, { "content": "pub trait BasicPaintFactoryViewInterface<A, C>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n{\n\n fn create() -> BasicPaintFactoryView<A, C>;\n\n}\n\n\n\nimpl<A, C> BasicPaintFactoryViewInterface<A, C> for BasicPaintFactoryView<A, C>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n{\n\n fn create() -> BasicPaintFactoryView<A, C> {\n\n let len = BasicPaint::<C>::tv_row_len();\n\n let list_store = gtk::ListStore::new(&STANDARD_PAINT_ROW_SPEC[0..len]);\n\n let view = gtk::TreeView::with_model(&list_store.clone());\n\n view.set_headers_visible(true);\n\n view.get_selection().set_mode(gtk::SelectionMode::None);\n\n\n", "file_path": "src/basic_paint/factory.rs", "rank": 28, "score": 161819.06098272098 }, { "content": "pub trait BasicPaintHueAttrWheelInterface<C>\n\nwhere\n\n C: CharacteristicsInterface + 'static,\n\n{\n\n fn create(attr: ScalarAttribute) -> BasicPaintHueAttrWheel<C>;\n\n}\n\n\n\nimpl<C> BasicPaintHueAttrWheelInterface<C> for BasicPaintHueAttrWheel<C>\n\nwhere\n\n C: CharacteristicsInterface + 'static,\n\n{\n\n fn create(attr: ScalarAttribute) -> BasicPaintHueAttrWheel<C> {\n\n let wheel = Rc::new(BasicPaintHueAttrWheelCore::<C> {\n\n paints: BasicPaintShapeList::<C>::new(attr),\n\n graticule: Graticule::create(attr),\n\n chosen_paint: RefCell::new(None),\n\n });\n\n let wheel_c = wheel.clone();\n\n wheel\n\n .paints\n", "file_path": "src/basic_paint/hue_wheel.rs", "rank": 29, "score": 158333.8390840061 }, { "content": "pub fn series_paint_image(size: i32) -> gtk::Image {\n\n if let Some(pixbuf) =\n\n series_paint_pixbuf().scale_simple(size, size, gdk_pixbuf::InterpType::Bilinear)\n\n {\n\n gtk::Image::from_pixbuf(Some(&pixbuf))\n\n } else {\n\n panic!(\"File: {:?} Line: {:?}\", file!(), line!())\n\n }\n\n}\n\n\n\n/* XPM */\n\nstatic SERIES_PAINT_LOAD_XPM: &[&str] = &[\n\n\"128 128 256 2\",\n\n\" \tc None\",\n\n\"BA\tc #FF0000\",\n\n\"CA\tc #00FF00\",\n\n\"DA\tc #FFFF00\",\n\n\"EA\tc #0000FF\",\n\n\"FA\tc #FF00FF\",\n\n\"GA\tc #00FFFF\",\n", "file_path": "src/icons/series_paint_xpm.rs", "rank": 30, "score": 153320.94146587147 }, { "content": "pub fn paint_standard_image(size: i32) -> gtk::Image {\n\n if let Some(pixbuf) =\n\n paint_standard_pixbuf().scale_simple(size, size, gdk_pixbuf::InterpType::Bilinear)\n\n {\n\n gtk::Image::from_pixbuf(Some(&pixbuf))\n\n } else {\n\n panic!(\"File: {:?} Line: {:?}\", file!(), line!())\n\n }\n\n}\n\n\n\n/* XPM */\n\nstatic PAINT_STANDARD_LOAD_XPM: &[&str] = &[\n\n \"64 64 12 1\",\n\n \"0\tc #242424\",\n\n \" \tc None\",\n\n \"2\tc #DB0000\",\n\n \"3\tc #004900\",\n\n \"4\tc #00DB00\",\n\n \"5\tc #000024\",\n\n \"6\tc #DBDB00\",\n", "file_path": "src/icons/paint_standard_xpms.rs", "rank": 31, "score": 153320.94146587147 }, { "content": "pub fn up_to_date_image(size: i32) -> gtk::Image {\n\n if let Some(pixbuf) =\n\n up_to_date_pixbuf().scale_simple(size, size, gdk_pixbuf::InterpType::Bilinear)\n\n {\n\n gtk::Image::from_pixbuf(Some(&pixbuf))\n\n } else {\n\n panic!(\"File: {:?} Line: {:?}\", file!(), line!())\n\n }\n\n}\n", "file_path": "src/icons/file_status_xpms.rs", "rank": 32, "score": 152906.66658492124 }, { "content": "pub trait PaintComponentsBoxInterface<A, C, P, D>\n\nwhere\n\n C: CharacteristicsInterface + 'static,\n\n A: ColourAttributesInterface + 'static,\n\n P: BasicPaintInterface<C> + 'static,\n\n D: PaintDisplayWithCurrentTarget<A, C, P> + 'static,\n\n{\n\n fn create_with(n_cols: u32, sensitive: bool) -> PaintComponentsBox<A, C, P, D>;\n\n fn add_paint(&self, paint: &P);\n\n fn iter_colour_components(&self) -> ColourPartsIter<A, C, P, D>;\n\n}\n\n\n\n#[derive(PWO, Wrapper)]\n\npub struct PaintComponentsBoxCore<A, C, P, D>\n\nwhere\n\n C: CharacteristicsInterface + 'static,\n\n A: ColourAttributesInterface + 'static,\n\n P: BasicPaintInterface<C> + 'static,\n\n D: PaintDisplayWithCurrentTarget<A, C, P> + 'static,\n\n{\n", "file_path": "src/mixed_paint/components.rs", "rank": 33, "score": 152892.56145015906 }, { "content": "pub trait CreateInterface {\n\n fn create(extra_buttons: &Vec<gtk::Button>) -> Self;\n\n}\n\n\n\n#[derive(PWO, Wrapper)]\n\npub struct BasicPaintSpecEntryCore<A, C>\n\nwhere\n\n C: CharacteristicsInterface + 'static,\n\n A: ColourAttributesInterface + 'static,\n\n{\n\n vbox: gtk::Box,\n\n edited_spec: RefCell<Option<BasicPaintSpec<C>>>,\n\n characteristics_entry: Rc<C::Entry>,\n\n name_entry: gtk::Entry,\n\n notes_entry: gtk::Entry,\n\n colour_editor: ColourEditor<A>,\n\n status_changed_callbacks: RefCell<Vec<Box<dyn Fn(EntryStatus)>>>,\n\n}\n\n\n\nimpl<A, C> BasicPaintSpecEntryCore<A, C>\n", "file_path": "src/basic_paint/entry.rs", "rank": 34, "score": 152068.39303267648 }, { "content": "pub fn series_paint_load_image(size: i32) -> gtk::Image {\n\n if let Some(pixbuf) =\n\n series_paint_load_pixbuf().scale_simple(size, size, gdk_pixbuf::InterpType::Bilinear)\n\n {\n\n gtk::Image::from_pixbuf(Some(&pixbuf))\n\n } else {\n\n panic!(\"File: {:?} Line: {:?}\", file!(), line!())\n\n }\n\n}\n", "file_path": "src/icons/series_paint_xpm.rs", "rank": 35, "score": 150243.41087980478 }, { "content": "pub fn paint_standard_load_image(size: i32) -> gtk::Image {\n\n if let Some(pixbuf) =\n\n paint_standard_load_pixbuf().scale_simple(size, size, gdk_pixbuf::InterpType::Bilinear)\n\n {\n\n gtk::Image::from_pixbuf(Some(&pixbuf))\n\n } else {\n\n panic!(\"File: {:?} Line: {:?}\", file!(), line!())\n\n }\n\n}\n", "file_path": "src/icons/paint_standard_xpms.rs", "rank": 36, "score": 150243.41087980478 }, { "content": "pub trait PaintPartsSpinButtonInterface<A, C, P, D>\n\nwhere\n\n C: CharacteristicsInterface + 'static,\n\n A: ColourAttributesInterface + 'static,\n\n P: BasicPaintInterface<C> + 'static,\n\n D: PaintDisplayWithCurrentTarget<A, C, P> + 'static,\n\n{\n\n fn create_with(\n\n paint: &P,\n\n current_target: Option<&Colour>,\n\n sensitive: bool,\n\n ) -> PaintPartsSpinButton<A, C, P, D>;\n\n fn inform_remove_me(&self);\n\n}\n\n\n\n#[derive(PWO, Wrapper)]\n\npub struct PaintPartsSpinButtonCore<A, C, P, D>\n\nwhere\n\n C: CharacteristicsInterface + 'static,\n\n A: ColourAttributesInterface + 'static,\n", "file_path": "src/mixed_paint/components.rs", "rank": 37, "score": 149627.75849120732 }, { "content": "pub trait MixerHueAttrWheelInterface<A, C>\n\nwhere\n\n C: CharacteristicsInterface + 'static,\n\n A: ColourAttributesInterface + 'static,\n\n{\n\n fn create(attr: ScalarAttribute) -> MixerHueAttrWheel<A, C>;\n\n}\n\n\n\nimpl<A, C> MixerHueAttrWheelInterface<A, C> for MixerHueAttrWheel<A, C>\n\nwhere\n\n C: CharacteristicsInterface + 'static,\n\n A: ColourAttributesInterface + 'static,\n\n{\n\n fn create(attr: ScalarAttribute) -> MixerHueAttrWheel<A, C> {\n\n let wheel = Rc::new(MixerHueAttrWheelCore::<A, C> {\n\n popup_menu: WrappedMenu::new(&vec![]),\n\n series_paints: SeriesPaintShapeList::<C>::new(attr),\n\n mixed_paints: MixedPaintShapeList::<C>::new(attr),\n\n target_colours: TargetColourShapeList::new(attr),\n\n graticule: Graticule::create(attr),\n", "file_path": "src/mixed_paint/hue_wheel.rs", "rank": 38, "score": 149414.7048900884 }, { "content": "pub trait TargetColourInterface {\n\n fn create(colour: &Colour, name: &str, notes: &str) -> TargetColour;\n\n}\n\n\n\nimpl TargetColourInterface for TargetColour {\n\n fn create(colour: &Colour, name: &str, notes: &str) -> TargetColour {\n\n Rc::new(TargetColourCore {\n\n colour: colour.clone(),\n\n name: name.to_string(),\n\n notes: notes.to_string(),\n\n })\n\n }\n\n}\n\n\n\npub struct TargetColourDisplayDialogCore<A>\n\nwhere\n\n A: ColourAttributesInterface,\n\n{\n\n dialog: gtk::Dialog,\n\n cads: PhantomData<A>,\n", "file_path": "src/mixed_paint/target.rs", "rank": 39, "score": 147132.1054503109 }, { "content": "pub fn create_ideal_model_paint_series() -> ModelPaintSeries {\n\n let spec = ModelPaintSeriesSpec::from_str(IDEAL_PAINT_STR).unwrap();\n\n ModelPaintSeries::from_spec(&spec)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const OBSOLETE_PAINT_STR: &str =\n\n\"Manufacturer: Tamiya\n\nSeries: Flat Acrylic (Peter Williams Digital Samples #3)\n\nNamedColour(name=\\\"XF 1: Flat Black *\\\", rgb=RGB(0x2D00, 0x2B00, 0x3000), transparency=\\\"O\\\", finish=\\\"F\\\")\n\nNamedColour(name=\\\"XF 2: Flat White *\\\", rgb=RGB(0xFE00, 0xFE00, 0xFE00), transparency=\\\"O\\\", finish=\\\"F\\\")\n\nNamedColour(name=\\\"XF 3: Flat Yellow *\\\", rgb=RGB(0xF800, 0xCD00, 0x2900), transparency=\\\"O\\\", finish=\\\"F\\\")\n\nNamedColour(name=\\\"XF 4: Yellow Green *\\\", rgb=RGB(0xAA00, 0xAE00, 0x4000), transparency=\\\"O\\\", finish=\\\"F\\\")\n\n\";\n\n\n\n #[test]\n\n fn paint_model_paint() {\n", "file_path": "src/model_paint.rs", "rank": 41, "score": 146736.94679387927 }, { "content": "pub fn create_ideal_art_paint_series() -> ArtPaintSeries {\n\n let spec = ArtPaintSeriesSpec::from_str(IDEAL_PAINT_STR).unwrap();\n\n ArtPaintSeries::from_spec(&spec)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n // const OBSOLETE_PAINT_STR: &str =\n\n //\"Manufacturer: Tamiya\n\n //Series: Flat Acrylic (Peter Williams Digital Samples #3)\n\n //NamedColour(name=\\\"XF 1: Flat Black *\\\", rgb=RGB(0x2D00, 0x2B00, 0x3000), transparency=\\\"O\\\", permanence=\\\"C\\\")\n\n //NamedColour(name=\\\"XF 2: Flat White *\\\", rgb=RGB(0xFE00, 0xFE00, 0xFE00), transparency=\\\"O\\\", permanence=\\\"C\\\")\n\n //NamedColour(name=\\\"XF 3: Flat Yellow *\\\", rgb=RGB(0xF800, 0xCD00, 0x2900), transparency=\\\"O\\\", permanence=\\\"C\\\")\n\n //NamedColour(name=\\\"XF 4: Yellow Green *\\\", rgb=RGB(0xAA00, 0xAE00, 0x4000), transparency=\\\"O\\\", permanence=\\\"C\\\")\n\n //\";\n\n\n\n #[test]\n\n fn art_paint() {\n", "file_path": "src/art_paint.rs", "rank": 42, "score": 146736.94679387927 }, { "content": "pub fn needs_save_ready_image(size: i32) -> gtk::Image {\n\n if let Some(pixbuf) =\n\n needs_save_ready_pixbuf().scale_simple(size, size, gdk_pixbuf::InterpType::Bilinear)\n\n {\n\n gtk::Image::from_pixbuf(Some(&pixbuf))\n\n } else {\n\n panic!(\"File: {:?} Line: {:?}\", file!(), line!())\n\n }\n\n}\n\n\n\nstatic UP_TO_DATE_XPM: &[&str] = &[\n\n \"64 64 2 1\",\n\n \" \tc None\",\n\n \"1\tc #00FF00\",\n\n \" 111111111111 \",\n\n \" 11111111111111111111 \",\n\n \" 111111111111111111111111 \",\n\n \" 1111111111111111111111111111 \",\n\n \" 11111111111111111111111111111111 \",\n\n \" 111111111111111111111111111111111111 \",\n", "file_path": "src/icons/file_status_xpms.rs", "rank": 43, "score": 146266.87445247342 }, { "content": "pub fn needs_save_not_ready_image(size: i32) -> gtk::Image {\n\n if let Some(pixbuf) =\n\n needs_save_not_ready_pixbuf().scale_simple(size, size, gdk_pixbuf::InterpType::Bilinear)\n\n {\n\n gtk::Image::from_pixbuf(Some(&pixbuf))\n\n } else {\n\n panic!(\"File: {:?} Line: {:?}\", file!(), line!())\n\n }\n\n}\n\n\n\nstatic NEEDS_SAVE_READY_XPM: &[&str] = &[\n\n \"64 64 2 1\",\n\n \" \tc None\",\n\n \"1\tc #FFAF00\",\n\n \" 111111111111 \",\n\n \" 11111111111111111111 \",\n\n \" 111111111111111111111111 \",\n\n \" 1111111111111111111111111111 \",\n\n \" 11111111111111111111111111111111 \",\n\n \" 111111111111111111111111111111111111 \",\n", "file_path": "src/icons/file_status_xpms.rs", "rank": 44, "score": 146266.87445247342 }, { "content": " pub trait TripleArgCreation<A, B, C> {\n\n fn create(a: &A, b: &B, c: &C) -> Self;\n\n }\n\n}\n\n\n\npub mod colour {\n\n use std::cmp::Ordering;\n\n\n\n use serde_derive::*;\n\n\n\n use normalised_angles::Degrees;\n\n\n\n pub use colour_math::{\n\n urgb::{URGBError, RGB16, RGB8},\n\n ColourInterface, HueConstants, RGBConstants, ScalarAttribute, CCI,\n\n };\n\n use colour_math::{HCV, RGBA};\n\n use pw_gix::gdk;\n\n\n\n pub type Hue = colour_math::hue::Hue<f64>;\n", "file_path": "src/lib.rs", "rank": 45, "score": 137657.22787743437 }, { "content": "pub trait NewTargetColourDialogInterface<A>\n\nwhere\n\n A: ColourAttributesInterface,\n\n{\n\n fn create<W: WidgetWrapper>(caller: &Rc<W>) -> NewTargetColourDialog<A>;\n\n}\n\n\n\nimpl<A> NewTargetColourDialogInterface<A> for NewTargetColourDialog<A>\n\nwhere\n\n A: ColourAttributesInterface,\n\n{\n\n fn create<W: WidgetWrapper>(caller: &Rc<W>) -> NewTargetColourDialog<A> {\n\n let title = format!(\"{}: New Mixed Paint Target Colour\", app_name());\n\n let dialog = caller.new_dialog_with_buttons(\n\n Some(&title),\n\n gtk::DialogFlags::DESTROY_WITH_PARENT,\n\n CANCEL_OK_BUTTONS,\n\n );\n\n let colour_editor = ColourEditor::<A>::create(&vec![]);\n\n let notes = gtk::Entry::new();\n", "file_path": "src/mixed_paint/target.rs", "rank": 46, "score": 136708.7877432977 }, { "content": " pub trait DialogWrapper {\n\n fn dialog(&self) -> gtk::Dialog;\n\n\n\n fn show(&self) {\n\n self.dialog().show()\n\n }\n\n\n\n fn present(&self) {\n\n self.dialog().present()\n\n }\n\n\n\n fn close(&self) {\n\n self.dialog().close()\n\n }\n\n\n\n fn set_response_sensitive(&self, response_id: gtk::ResponseType, setting: bool) {\n\n self.dialog().set_response_sensitive(response_id, setting);\n\n }\n\n\n\n fn connect_close<F: Fn(&gtk::Dialog) + 'static>(&self, f: F) -> SignalHandlerId {\n", "file_path": "src/lib.rs", "rank": 47, "score": 135010.37874527508 }, { "content": " pub trait DestroyedCallbacksIfce {\n\n fn create() -> DestroyedCallbacks;\n\n }\n\n\n\n impl DestroyedCallbacksIfce for DestroyedCallbacks {\n\n fn create() -> DestroyedCallbacks {\n\n RefCell::new(Vec::new())\n\n }\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 48, "score": 130681.82359468394 }, { "content": "pub trait ColourShapeInterface {\n\n fn xy(&self) -> Point;\n\n fn fill_rgb(&self) -> RGB;\n\n fn shape_type(&self) -> ShapeType;\n\n\n\n fn encloses(&self, xy: Point) -> bool {\n\n match self.shape_type() {\n\n ShapeType::Square => {\n\n let delta_xy = self.xy() - xy;\n\n delta_xy.x().abs() < SHAPE_RADIUS && delta_xy.y().abs() < SHAPE_RADIUS\n\n }\n\n ShapeType::Diamond => {\n\n let delta_xy = (self.xy() - xy).rotate_45_deg();\n\n delta_xy.x().abs() < SHAPE_RADIUS && delta_xy.y().abs() < SHAPE_RADIUS\n\n }\n\n _ => (self.xy() - xy).hypot() < SHAPE_RADIUS,\n\n }\n\n }\n\n\n\n fn distance_to(&self, xy: Point) -> f64 {\n", "file_path": "src/shape.rs", "rank": 49, "score": 130385.95535055044 }, { "content": "pub fn colln_open_image(size: i32) -> gtk::Image {\n\n if let Some(pixbuf) =\n\n colln_open_pixbuf().scale_simple(size, size, gdk_pixbuf::InterpType::Bilinear)\n\n {\n\n gtk::Image::from_pixbuf(Some(&pixbuf))\n\n } else {\n\n panic!(\"File: {:?} Line: {:?}\", file!(), line!())\n\n }\n\n}\n\n\n\nstatic COLLN_SAVE_XPM: &[&str] = &[\n\n \"64 64 7 1\",\n\n \" \tc None\",\n\n \".\tc #000000\",\n\n \"+\tc #5AFF00\",\n\n \"@\tc #FB0707\",\n\n \"#\tc #FBF207\",\n\n \"$\tc #0796FB\",\n\n \"%\tc #29E75E\",\n\n \" \",\n", "file_path": "src/icons/colln_xpms.rs", "rank": 50, "score": 126551.22469027984 }, { "content": "pub fn colln_new_image(size: i32) -> gtk::Image {\n\n if let Some(pixbuf) =\n\n colln_new_pixbuf().scale_simple(size, size, gdk_pixbuf::InterpType::Bilinear)\n\n {\n\n gtk::Image::from_pixbuf(Some(&pixbuf))\n\n } else {\n\n panic!(\"File: {:?} Line: {:?}\", file!(), line!())\n\n }\n\n}\n\n\n\nstatic COLLN_OPEN_XPM: &[&str] = &[\n\n \"64 64 9 1\",\n\n \" \tc None\",\n\n \".\tc #000000\",\n\n \"+\tc #F8F0F5\",\n\n \"@\tc #26EC0C\",\n\n \"#\tc #14F5F8\",\n\n \"$\tc #F82F14\",\n\n \"%\tc #142FF8\",\n\n \"&\tc #FAFD00\",\n", "file_path": "src/icons/colln_xpms.rs", "rank": 51, "score": 126551.22469027984 }, { "content": "pub fn colln_save_image(size: i32) -> gtk::Image {\n\n if let Some(pixbuf) =\n\n colln_save_pixbuf().scale_simple(size, size, gdk_pixbuf::InterpType::Bilinear)\n\n {\n\n gtk::Image::from_pixbuf(Some(&pixbuf))\n\n } else {\n\n panic!(\"File: {:?} Line: {:?}\", file!(), line!())\n\n }\n\n}\n\n\n\nstatic COLLN_SAVE_AS_XPM: &[&str] = &[\n\n \"64 64 7 1\",\n\n \" \tc None\",\n\n \".\tc #000000\",\n\n \"+\tc #5AFF00\",\n\n \"@\tc #29E75E\",\n\n \"#\tc #E73629\",\n\n \"$\tc #29C5E7\",\n\n \"%\tc #FFFF00\",\n\n \" ............................................ \",\n", "file_path": "src/icons/colln_xpms.rs", "rank": 52, "score": 126551.22469027984 }, { "content": "pub fn colln_save_as_image(size: i32) -> gtk::Image {\n\n if let Some(pixbuf) =\n\n colln_save_as_pixbuf().scale_simple(size, size, gdk_pixbuf::InterpType::Bilinear)\n\n {\n\n gtk::Image::from_pixbuf(Some(&pixbuf))\n\n } else {\n\n panic!(\"File: {:?} Line: {:?}\", file!(), line!())\n\n }\n\n}\n", "file_path": "src/icons/colln_xpms.rs", "rank": 53, "score": 126551.22469027984 }, { "content": "pub fn mixtures_print_image(size: i32) -> gtk::Image {\n\n if let Some(pixbuf) =\n\n mixtures_print_pixbuf().scale_simple(size, size, gdk_pixbuf::InterpType::Bilinear)\n\n {\n\n gtk::Image::from_pixbuf(Some(&pixbuf))\n\n } else {\n\n panic!(\"File: {:?} Line: {:?}\", file!(), line!())\n\n }\n\n}\n", "file_path": "src/icons/mixtures_print_xpm.rs", "rank": 54, "score": 123983.41211689697 }, { "content": "struct Sample {\n\n pix_buf: Pixbuf,\n\n position: Point,\n\n}\n\n\n\n#[derive(PWO, Wrapper)]\n\npub struct ColourMatchAreaCore {\n\n drawing_area: gtk::DrawingArea,\n\n mixed_colour: RefCell<Option<Colour>>,\n\n target_colour: RefCell<Option<Colour>>,\n\n popup_menu: WrappedMenu,\n\n samples: RefCell<Vec<Sample>>,\n\n popup_menu_position: Cell<Point>,\n\n mixing_mode: MixingMode,\n\n}\n\n\n\nimpl ColourMatchAreaCore {\n\n pub fn mixing_mode(&self) -> MixingMode {\n\n self.mixing_mode\n\n }\n", "file_path": "src/mixed_paint/match_area.rs", "rank": 55, "score": 121388.343689129 }, { "content": "pub trait ColouredItemShapeInterface<CI>: ColourShapeInterface\n\nwhere\n\n CI: ColouredItemInterface + Ord,\n\n{\n\n fn new(paint: &CI, attr: ScalarAttribute) -> Self;\n\n fn coloured_item(&self) -> CI;\n\n\n\n fn colour_xy(colour: Colour, attr: ScalarAttribute) -> Point {\n\n if let Some(hue) = colour.hue() {\n\n let radius = colour.scalar_attribute(attr);\n\n let angle: normalised_angles::Angle<f64> = hue.angle().into();\n\n Point::from((angle, radius))\n\n } else {\n\n let value = colour.value();\n\n Point(-1.05, 1.0 - 2.0 * value)\n\n }\n\n }\n\n}\n\n\n\npub struct ColouredItemSpapeList<CI, PS>\n", "file_path": "src/shape.rs", "rank": 56, "score": 120371.71596337414 }, { "content": "pub trait MixerConfig {\n\n fn mixing_mode() -> MixingMode;\n\n}\n\n\n", "file_path": "src/mixed_paint/mixer.rs", "rank": 57, "score": 120259.48033427315 }, { "content": "pub fn series_paint_pixbuf() -> gdk_pixbuf::Pixbuf {\n\n gdk_pixbuf::Pixbuf::from_xpm_data(SERIES_PAINT_XPM)\n\n}\n\n\n", "file_path": "src/icons/series_paint_xpm.rs", "rank": 58, "score": 113341.23698943316 }, { "content": "pub fn paint_standard_pixbuf() -> gdk_pixbuf::Pixbuf {\n\n gdk_pixbuf::Pixbuf::from_xpm_data(PAINT_STANDARD_XPM)\n\n}\n\n\n", "file_path": "src/icons/paint_standard_xpms.rs", "rank": 59, "score": 113341.23698943316 }, { "content": "pub fn up_to_date_pixbuf() -> gdk_pixbuf::Pixbuf {\n\n gdk_pixbuf::Pixbuf::from_xpm_data(UP_TO_DATE_XPM)\n\n}\n\n\n", "file_path": "src/icons/file_status_xpms.rs", "rank": 60, "score": 112400.9177364596 }, { "content": "pub fn series_paint_load_pixbuf() -> gdk_pixbuf::Pixbuf {\n\n gdk_pixbuf::Pixbuf::from_xpm_data(SERIES_PAINT_LOAD_XPM)\n\n}\n\n\n", "file_path": "src/icons/series_paint_xpm.rs", "rank": 61, "score": 110930.53653894391 }, { "content": "pub fn paint_standard_load_pixbuf() -> gdk_pixbuf::Pixbuf {\n\n gdk_pixbuf::Pixbuf::from_xpm_data(PAINT_STANDARD_LOAD_XPM)\n\n}\n\n\n", "file_path": "src/icons/paint_standard_xpms.rs", "rank": 62, "score": 110930.53653894391 }, { "content": "pub fn needs_save_ready_pixbuf() -> gdk_pixbuf::Pixbuf {\n\n gdk_pixbuf::Pixbuf::from_xpm_data(NEEDS_SAVE_READY_XPM)\n\n}\n\n\n", "file_path": "src/icons/file_status_xpms.rs", "rank": 63, "score": 107087.97488777024 }, { "content": "pub fn needs_save_not_ready_pixbuf() -> gdk_pixbuf::Pixbuf {\n\n gdk_pixbuf::Pixbuf::from_xpm_data(NEEDS_SAVE_NOT_READY_XPM)\n\n}\n\n\n", "file_path": "src/icons/file_status_xpms.rs", "rank": 64, "score": 107087.97488777024 }, { "content": "pub trait Draw {\n\n fn draw_circle(&self, centre: Point, radius: f64, fill: bool);\n\n fn draw_diamond(&self, centre: Point, side_length: f64, filled: bool);\n\n fn draw_line(&self, start: Point, end: Point);\n\n fn draw_polygon(&self, polygon: Points, fill: bool);\n\n fn draw_square(&self, centre: Point, side_length: f64, filled: bool);\n\n fn draw_indicator(&self, position: Point, dirn: Dirn, size: f64);\n\n fn move_to_point(&self, point: Point);\n\n fn line_to_point(&self, point: Point);\n\n fn set_source_surface_at(&self, surface: &cairo::Surface, position: Point);\n\n fn set_source_pixbuf_at(&self, pixbuf: &Pixbuf, position: Point);\n\n}\n\n\n\nimpl Draw for cairo::Context {\n\n fn draw_circle(&self, centre: Point, radius: f64, fill: bool) {\n\n self.arc(centre.0, centre.1, radius, 0.0, 2.0 * consts::PI);\n\n if fill {\n\n self.fill();\n\n } else {\n\n self.stroke();\n", "file_path": "src/cairox.rs", "rank": 65, "score": 103553.24487417591 }, { "content": "struct Sample {\n\n pix_buf: gdk_pixbuf::Pixbuf,\n\n position: Point,\n\n}\n\n\n", "file_path": "src/colour_edit.rs", "rank": 66, "score": 100790.65779055466 }, { "content": "pub trait GeometryInterface {\n\n fn transform(&self, point: Point) -> Point;\n\n fn reverse_transform(&self, point: Point) -> Point;\n\n fn scaled(&self, value: f64) -> f64;\n\n}\n\n\n\nconst SHAPE_SIDE: f64 = 0.06;\n\nconst SHAPE_RADIUS: f64 = SHAPE_SIDE / 2.0;\n\n\n", "file_path": "src/shape.rs", "rank": 67, "score": 100398.23540082789 }, { "content": " pub trait GdkConvert {\n\n fn into_gdk_rgba(&self) -> gdk::RGBA;\n\n }\n\n\n\n impl GdkConvert for RGB {\n\n fn into_gdk_rgba(&self) -> gdk::RGBA {\n\n gdk::RGBA {\n\n red: self[CCI::Red],\n\n green: self[CCI::Green],\n\n blue: self[CCI::Blue],\n\n alpha: 1.0,\n\n }\n\n }\n\n }\n\n}\n\n\n\npub mod error {\n\n use std::convert::From;\n\n use std::error::Error;\n\n use std::fmt;\n", "file_path": "src/lib.rs", "rank": 68, "score": 100398.23540082789 }, { "content": " pub trait TrackedDialog {\n\n fn id_no(&self) -> u32;\n\n fn destroyed_callbacks(&self) -> &DestroyedCallbacks;\n\n\n\n fn connect_destroyed<F: 'static + Fn(u32)>(&self, callback: F) {\n\n self.destroyed_callbacks()\n\n .borrow_mut()\n\n .push(Box::new(callback))\n\n }\n\n\n\n fn inform_destroyed(&self) {\n\n for callback in self.destroyed_callbacks().borrow().iter() {\n\n callback(self.id_no());\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 69, "score": 100398.23540082789 }, { "content": "pub trait GraticuleInterface {\n\n fn create(attr: ScalarAttribute) -> Rc<GraticuleCore>;\n\n}\n\n\n\nimpl GraticuleCore {\n\n pub fn attr(&self) -> ScalarAttribute {\n\n self.attr\n\n }\n\n\n\n pub fn drawing_area(&self) -> gtk::DrawingArea {\n\n self.drawing_area.clone()\n\n }\n\n\n\n fn update_drawing_area(&self) {\n\n let dw = self.drawing_area.get_allocated_width() as f64;\n\n let dh = self.drawing_area.get_allocated_height() as f64;\n\n\n\n self.raw_centre.set(Point(dw, dh) / 2.0);\n\n self.centre.set(self.raw_centre.get() + self.offset.get());\n\n self.scaled_one.set(dw.min(dh) / 2.2);\n", "file_path": "src/graticule.rs", "rank": 70, "score": 100398.23540082789 }, { "content": " pub trait SimpleCreation {\n\n fn create() -> Self;\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 71, "score": 100398.23540082789 }, { "content": "pub trait CharacteristicEntryInterface {\n\n type Characteristic: CharacteristicInterface + 'static;\n\n\n\n fn combo_box_text(&self) -> gtk::ComboBoxText;\n\n fn create() -> Self;\n\n\n\n fn get_value(&self) -> Option<Self::Characteristic> {\n\n if let Some(text) = self.combo_box_text().get_active_text() {\n\n match Self::Characteristic::from_str(&text) {\n\n Ok(value) => Some(value),\n\n Err(_) => panic!(\n\n \"File: {:?} Line: {:?} illegal value: {:?}\",\n\n file!(),\n\n line!(),\n\n text\n\n ),\n\n }\n\n } else {\n\n None\n\n }\n", "file_path": "src/characteristics.rs", "rank": 72, "score": 97510.27175512187 }, { "content": " pub trait SingleArgCreation<A> {\n\n fn create(a: &A) -> Self;\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 73, "score": 95409.62920866693 }, { "content": " pub trait DoubleArgCreation<A, B> {\n\n fn create(a: &A, b: &B) -> Self;\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 74, "score": 90913.14680644716 }, { "content": "pub fn colln_open_pixbuf() -> gdk_pixbuf::Pixbuf {\n\n gdk_pixbuf::Pixbuf::from_xpm_data(COLLN_OPEN_XPM)\n\n}\n\n\n", "file_path": "src/icons/colln_xpms.rs", "rank": 75, "score": 85124.88952264813 }, { "content": "pub fn colln_new_pixbuf() -> gdk_pixbuf::Pixbuf {\n\n gdk_pixbuf::Pixbuf::from_xpm_data(COLLN_NEW_XPM)\n\n}\n\n\n", "file_path": "src/icons/colln_xpms.rs", "rank": 76, "score": 85124.88952264813 }, { "content": "pub fn colln_save_pixbuf() -> gdk_pixbuf::Pixbuf {\n\n gdk_pixbuf::Pixbuf::from_xpm_data(COLLN_SAVE_XPM)\n\n}\n\n\n", "file_path": "src/icons/colln_xpms.rs", "rank": 77, "score": 85124.88952264813 }, { "content": "pub fn colln_save_as_pixbuf() -> gdk_pixbuf::Pixbuf {\n\n gdk_pixbuf::Pixbuf::from_xpm_data(COLLN_SAVE_AS_XPM)\n\n}\n\n\n", "file_path": "src/icons/colln_xpms.rs", "rank": 78, "score": 85124.88952264813 }, { "content": "pub fn mixtures_print_pixbuf() -> gdk_pixbuf::Pixbuf {\n\n gdk_pixbuf::Pixbuf::from_xpm_data(MIXTURES_PRINT_XPM)\n\n}\n\n\n", "file_path": "src/icons/mixtures_print_xpm.rs", "rank": 79, "score": 83299.34652332019 }, { "content": " for line in string.lines() {\n\n vpb.push(PathBuf::from(line));\n\n }\n\n\n\n vpb\n\n }\n\n\n\n fn write_colln_file_paths(&self) {\n\n let mut text = String::new();\n\n for colln_data in self.paint_collns.borrow().iter() {\n\n text += (pw_pathux::path_to_string(&colln_data.1) + \"\\n\").as_str();\n\n }\n\n match File::create(&self.paint_colln_files_data_path) {\n\n Ok(mut file) => file.write(&text.into_bytes()).unwrap_or_else(|err| {\n\n panic!(\"File: {:?} Line: {:?} : {:?}\", file!(), line!(), err)\n\n }),\n\n Err(err) => panic!(\"File: {:?} Line: {:?} : {:?}\", file!(), line!(), err),\n\n };\n\n }\n\n\n\n pub fn connect_paint_selected<F: 'static + Fn(&CollnPaint<C, CID>)>(&self, callback: F) {\n\n self.paint_selected_callbacks\n\n .borrow_mut()\n\n .push(Box::new(callback))\n\n }\n\n}\n\n\n\npub type CollnPaintCollnBinder<A, C, CID> = Rc<CollnPaintCollnBinderCore<A, C, CID>>;\n\n\n", "file_path": "src/colln_paint/binder.rs", "rank": 80, "score": 65094.51436684544 }, { "content": "#[derive(PWO, Wrapper)]\n\npub struct CollnPaintCollnBinderCore<A, C, CID>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n CID: CollnIdInterface + 'static,\n\n{\n\n vbox: gtk::Box,\n\n notebook: gtk::Notebook,\n\n load_colln_button: gtk::Button,\n\n initiate_select_ok: Cell<bool>,\n\n paint_selected_callbacks: RefCell<Vec<Box<dyn Fn(&CollnPaint<C, CID>)>>>,\n\n paint_collns: RefCell<Vec<(CollnPaintCollnWidget<A, C, CID>, PathBuf)>>,\n\n paint_colln_files_data_path: PathBuf,\n\n}\n\n\n\nimpl<A, C, CID> CollnPaintCollnBinderCore<A, C, CID>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n", "file_path": "src/colln_paint/binder.rs", "rank": 81, "score": 65077.074115896554 }, { "content": " panic!(\"File: {:?} Line: {:?}\", file!(), line!())\n\n }\n\n }\n\n\n\n pub fn set_target_colour(&self, ocolour: Option<&Colour>) {\n\n for selector in self.paint_collns.borrow().iter() {\n\n selector.0.set_target_colour(ocolour);\n\n }\n\n }\n\n\n\n fn read_colln_file_paths(&self) -> Vec<PathBuf> {\n\n let mut vpb = Vec::new();\n\n if !self.paint_colln_files_data_path.exists() {\n\n return vpb;\n\n };\n\n let mut file = File::open(&self.paint_colln_files_data_path)\n\n .unwrap_or_else(|err| panic!(\"File: {:?} Line: {:?} : {:?}\", file!(), line!(), err));\n\n let mut string = String::new();\n\n file.read_to_string(&mut string)\n\n .unwrap_or_else(|err| panic!(\"File: {:?} Line: {:?} : {:?}\", file!(), line!(), err));\n", "file_path": "src/colln_paint/binder.rs", "rank": 82, "score": 65073.92582774364 }, { "content": " let buttons = [\n\n (\"Cancel\", gtk::ResponseType::Other(0)),\n\n (\"Reload\", gtk::ResponseType::Other(1)),\n\n ];\n\n if self.ask_question(\"Duplicate Collection\", Some(expln.as_str()), &buttons)\n\n == gtk::ResponseType::Other(1)\n\n {\n\n self.remove_paint_colln_at_index(index);\n\n } else {\n\n return;\n\n }\n\n };\n\n self._add_paint_colln_from_file(&abs_file_path);\n\n let path_text = pw_pathux::path_to_string(&abs_file_path);\n\n remember(\n\n &CID::recollection_name_for(\"last_colln_loaded_file\"),\n\n &path_text,\n\n );\n\n }\n\n Err(err) => {\n", "file_path": "src/colln_paint/binder.rs", "rank": 83, "score": 65072.08657681108 }, { "content": " let cpcb_c = self.clone();\n\n let ps_id = colln_spec.colln_id.clone();\n\n label.connect_remove_page(move || cpcb_c.remove_paint_colln(&ps_id));\n\n self.notebook.insert_page_menu(\n\n &paint_colln.pwo(),\n\n Some(&label.pwo()),\n\n Some(&menu_label),\n\n Some(index as u32),\n\n );\n\n }\n\n\n\n fn _add_paint_colln_from_file(&self, path: &Path) {\n\n match PaintCollnSpec::<C, CID>::from_file(path) {\n\n Ok(colln_spec) => match self.find_cid(&colln_spec.colln_id) {\n\n Ok(index) => {\n\n let other_file_path = &self.paint_collns.borrow()[index].1;\n\n let expln = format!(\n\n \"\\\"{}\\\" ({}): already included in the tool box.\\nLoaded from file \\\"{:?}\\\".\",\n\n colln_spec.colln_id.colln_name(),\n\n colln_spec.colln_id.colln_owner(),\n", "file_path": "src/colln_paint/binder.rs", "rank": 84, "score": 65071.27016728932 }, { "content": "\n\n let colln_file_paths = cpcb.read_colln_file_paths();\n\n for colln_file_path in colln_file_paths.iter() {\n\n cpcb._add_paint_colln_from_file(colln_file_path);\n\n }\n\n cpcb.write_colln_file_paths();\n\n cpcb.notebook.show_all();\n\n\n\n let cpcb_c = cpcb.clone();\n\n cpcb.load_colln_button\n\n .connect_clicked(move |_| cpcb_c.load_paint_colln_from_file());\n\n cpcb.vbox.show_all();\n\n\n\n cpcb\n\n }\n\n\n\n fn _insert_paint_colln(&self, colln_spec: &PaintCollnSpec<C, CID>, path: &Path, index: usize) {\n\n let mut paint_collns = self.paint_collns.borrow_mut();\n\n let paint_colln = CollnPaintCollnWidget::<A, C, CID>::create(&colln_spec);\n\n paint_colln.set_initiate_select_ok(self.initiate_select_ok.get());\n", "file_path": "src/colln_paint/binder.rs", "rank": 85, "score": 65068.511606745305 }, { "content": " }\n\n\n\n fn load_paint_colln_from_file(&self) {\n\n let o_last_file = recall(&CID::recollection_name_for(\"last_colln_loaded_file\"));\n\n let last_file = if let Some(ref text) = o_last_file {\n\n Some(text.as_str())\n\n } else {\n\n None\n\n };\n\n if let Some(path) = self.ask_file_path(Some(\"Collection File Name:\"), last_file, true) {\n\n match pw_pathux::expand_home_dir_or_mine(&path).canonicalize() {\n\n Ok(abs_file_path) => {\n\n if let Some(index) = self.find_file_path(&abs_file_path) {\n\n let colln_id = &self.paint_collns.borrow()[index].0.colln_id();\n\n let expln = format!(\n\n \"\\\"{:?}\\\": already loaded providing \\\"{}\\\" ({}).\",\n\n path,\n\n colln_id.colln_name(),\n\n colln_id.colln_owner(),\n\n );\n", "file_path": "src/colln_paint/binder.rs", "rank": 86, "score": 65068.19079505377 }, { "content": " },\n\n Err(err) => match err.error_type() {\n\n &PaintErrorType::IOError(ref io_error) => {\n\n let expln = format!(\"\\\"{:?}\\\" \\\"{}\\\"\\n\", path, io_error.to_string());\n\n let msg = \"I/O Error\";\n\n self.warn_user(msg, Some(expln.as_str()));\n\n }\n\n &PaintErrorType::MalformedText(_) => {\n\n let expln = format!(\"Error parsing \\\"{:?}\\\"\\n\", path);\n\n let msg = \"Malformed Collection Specification Text\";\n\n self.warn_user(msg, Some(expln.as_str()));\n\n }\n\n &PaintErrorType::AlreadyExists(ref text) => {\n\n let expln = format!(\"\\\"{:?}\\\" contains two paints named\\\"{}\\\"\\n\", path, text);\n\n let msg = \"Malformed Collection (Duplicate Paints)\";\n\n self.warn_user(msg, Some(expln.as_str()));\n\n }\n\n _ => panic!(\"Unexpected error.\"),\n\n },\n\n }\n", "file_path": "src/colln_paint/binder.rs", "rank": 87, "score": 65067.64585781969 }, { "content": " other_file_path,\n\n );\n\n let buttons = [\n\n (\"Skip\", gtk::ResponseType::Other(0)),\n\n (\"Replace\", gtk::ResponseType::Other(1)),\n\n ];\n\n if self.ask_question(\"Duplicate Collection\", Some(expln.as_str()), &buttons)\n\n == gtk::ResponseType::Other(1)\n\n {\n\n self.remove_paint_colln_at_index(index);\n\n self._insert_paint_colln(&colln_spec, path, index);\n\n self.notebook.show_all();\n\n self.write_colln_file_paths();\n\n }\n\n }\n\n Err(index) => {\n\n self._insert_paint_colln(&colln_spec, path, index);\n\n self.notebook.show_all();\n\n self.write_colln_file_paths();\n\n }\n", "file_path": "src/colln_paint/binder.rs", "rank": 88, "score": 65066.63953858974 }, { "content": " paint_collns.insert(index, (paint_colln.clone(), path.to_path_buf()));\n\n let cpcb_c = self.clone();\n\n paint_colln.connect_paint_selected(move |paint| cpcb_c.inform_paint_selected(paint));\n\n let l_text = format!(\n\n \"{}\\n{}\",\n\n colln_spec.colln_id.colln_name(),\n\n colln_spec.colln_id.colln_owner()\n\n );\n\n let tt_text = format!(\n\n \"Remove {} ({}) from the tool kit\",\n\n colln_spec.colln_id.colln_name(),\n\n colln_spec.colln_id.colln_owner()\n\n );\n\n let label = TabRemoveLabel::create(Some(l_text.as_str()), Some(&tt_text.as_str()));\n\n let l_text = format!(\n\n \"{} ({})\",\n\n colln_spec.colln_id.colln_name(),\n\n colln_spec.colln_id.colln_owner()\n\n );\n\n let menu_label = gtk::Label::new(Some(l_text.as_str()));\n", "file_path": "src/colln_paint/binder.rs", "rank": 89, "score": 65066.290445168095 }, { "content": " CID: CollnIdInterface + 'static,\n\n{\n\n fn find_cid(&self, cid: &Rc<CID>) -> Result<usize, usize> {\n\n self.paint_collns\n\n .borrow()\n\n .binary_search_by_key(cid, |colln_data| colln_data.0.colln_id())\n\n }\n\n\n\n fn find_file_path(&self, path: &Path) -> Option<usize> {\n\n for (index, colln_data) in self.paint_collns.borrow().iter().enumerate() {\n\n if path == colln_data.1 {\n\n return Some(index);\n\n }\n\n }\n\n None\n\n }\n\n\n\n pub fn set_initiate_select_ok(&self, value: bool) {\n\n self.initiate_select_ok.set(value);\n\n for selector in self.paint_collns.borrow().iter() {\n", "file_path": "src/colln_paint/binder.rs", "rank": 90, "score": 65062.78794568658 }, { "content": " let expln = format!(\"\\\"{:?}\\\" \\\"{}\\\"\\n\", path, err.to_string());\n\n let msg = \"I/O Error\";\n\n self.warn_user(msg, Some(expln.as_str()));\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n //use super::*;\n\n\n\n #[test]\n\n fn it_works() {}\n\n}\n", "file_path": "src/colln_paint/binder.rs", "rank": 91, "score": 65059.76529763378 }, { "content": "// Copyright 2017 Peter Williams <pwil3058@gmail.com> <pwil3058@bigpond.net.au>\n\n\n\nuse std::cell::{Cell, RefCell};\n\nuse std::fs::File;\n\nuse std::io::{Read, Write};\n\nuse std::path::{Path, PathBuf};\n\nuse std::rc::Rc;\n\n\n\nuse pw_gix::{\n\n gtkx::notebook::*,\n\n recollections::{recall, remember},\n\n wrapper::*,\n\n};\n\n\n\nuse pw_pathux;\n\n\n\nuse super::collection::*;\n\nuse super::*;\n\nuse crate::basic_paint::*;\n\n\n", "file_path": "src/colln_paint/binder.rs", "rank": 92, "score": 65057.68177795237 }, { "content": " vbox: gtk::Box::new(gtk::Orientation::Vertical, 0),\n\n notebook: gtk::Notebook::new(),\n\n load_colln_button: gtk::Button::new(),\n\n initiate_select_ok: Cell::new(false),\n\n paint_selected_callbacks: RefCell::new(Vec::new()),\n\n paint_collns: RefCell::new(Vec::new()),\n\n paint_colln_files_data_path: data_path.to_path_buf(),\n\n });\n\n cpcb.notebook.set_scrollable(true);\n\n cpcb.notebook.popup_enable();\n\n\n\n cpcb.load_colln_button\n\n .set_tooltip_text(Some(\"Load collection from file.\"));\n\n cpcb.load_colln_button\n\n .set_image(Some(&CID::colln_load_image(24)));\n\n\n\n let hbox = gtk::Box::new(gtk::Orientation::Horizontal, 0);\n\n hbox.pack_start(&cpcb.load_colln_button, false, true, 2);\n\n cpcb.vbox.pack_start(&hbox, false, false, 2);\n\n cpcb.vbox.pack_start(&cpcb.notebook, true, true, 0);\n", "file_path": "src/colln_paint/binder.rs", "rank": 93, "score": 65055.528835926925 }, { "content": " selector.0.set_initiate_select_ok(value);\n\n }\n\n }\n\n\n\n fn inform_paint_selected(&self, paint: &CollnPaint<C, CID>) {\n\n for callback in self.paint_selected_callbacks.borrow().iter() {\n\n callback(&paint);\n\n }\n\n }\n\n\n\n fn remove_paint_colln_at_index(&self, index: usize) {\n\n let selector = self.paint_collns.borrow_mut().remove(index);\n\n let page_num = self.notebook.page_num(&selector.0.pwo());\n\n self.notebook.remove_page(page_num);\n\n }\n\n\n\n fn remove_paint_colln(&self, ps_id: &Rc<CID>) {\n\n if let Ok(index) = self.find_cid(ps_id) {\n\n self.remove_paint_colln_at_index(index);\n\n } else {\n", "file_path": "src/colln_paint/binder.rs", "rank": 94, "score": 65055.35929075059 }, { "content": " if let Some(ref file_path) = self.saved_file_path() {\n\n let path_text = pw_pathux::path_to_string(file_path);\n\n self.file_path_text.set_label(&path_text);\n\n remember(\n\n &CID::recollection_name_for(\"last_colln_edited_file\"),\n\n &path_text,\n\n );\n\n } else {\n\n self.file_path_text.set_label(\"\");\n\n };\n\n self.update_file_button_sensitivities();\n\n }\n\n\n\n fn write_to_file(&self, path: &Path) -> Result<(), PaintError<C>> {\n\n if let Some(colln_id) = self.cid_entry.get_colln_id() {\n\n let spec = PaintCollnSpec::<C, CID> {\n\n colln_id: colln_id,\n\n paint_specs: self.basic_paint_factory.get_paint_specs(),\n\n };\n\n let mut file = File::create(path)?;\n", "file_path": "src/colln_paint/editor.rs", "rank": 95, "score": 64930.32242569286 }, { "content": " panic!(\"File: {} Line: {}\", file!(), line!())\n\n }\n\n }\n\n\n\n fn set_edited_paint(&self, o_paint: Option<&BasicPaint<C>>) {\n\n if let Some(paint) = o_paint {\n\n // TODO: check for unsaved changes before setting edited spec\n\n *self.edited_paint.borrow_mut() = Some(paint.clone());\n\n self.paint_spec_entry\n\n .set_edited_spec(Some(paint.get_spec()))\n\n } else {\n\n *self.edited_paint.borrow_mut() = None;\n\n self.paint_spec_entry.set_edited_spec(None)\n\n };\n\n self.update_button_sensitivities();\n\n }\n\n\n\n fn set_file_data(&self, o_file_data: Option<FileData<C, CID>>) {\n\n // TODO: update displayed file path\n\n *self.file_data.borrow_mut() = o_file_data;\n", "file_path": "src/colln_paint/editor.rs", "rank": 96, "score": 64930.00906543638 }, { "content": " }\n\n } else {\n\n panic!(\"cannot save without collection id\")\n\n }\n\n }\n\n\n\n fn save_as(&self) -> Result<(), PaintError<C>> {\n\n let o_last_file = recall(&CID::recollection_name_for(\"last_colln_edited_file\"));\n\n let last_file = if let Some(ref text) = o_last_file {\n\n Some(text.as_str())\n\n } else {\n\n None\n\n };\n\n if let Some(path) = self.ask_file_path(Some(\"Save as:\"), last_file, false) {\n\n self.write_to_file(&path)\n\n } else {\n\n Err(PaintErrorType::UserCancelled.into())\n\n }\n\n }\n\n\n", "file_path": "src/colln_paint/editor.rs", "rank": 97, "score": 64928.764748509435 }, { "content": " } else {\n\n None\n\n };\n\n if let Some(path) = self.ask_file_path(Some(\"Load from:\"), last_file, true) {\n\n match PaintCollnSpec::from_file(&path) {\n\n Ok(spec) => {\n\n self.paint_spec_entry.set_edited_spec(None);\n\n self.cid_entry.set_colln_id(Some(&spec.colln_id));\n\n self.basic_paint_factory.clear();\n\n for paint_spec in spec.paint_specs.iter() {\n\n if let Err(err) = self.basic_paint_factory.add_paint(paint_spec) {\n\n self.report_error(\"Error\", &err)\n\n }\n\n }\n\n self.set_file_data(Some(FileData { path, spec }));\n\n }\n\n Err(err) => {\n\n let msg = format!(\"{:?}: Failed to load\", path);\n\n self.report_error(&msg, &err)\n\n }\n", "file_path": "src/colln_paint/editor.rs", "rank": 98, "score": 64927.20607845391 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\npub type CollnPaintEditor<A, C, CID> = Rc<CollnPaintEditorCore<A, C, CID>>;\n\n\n\nimpl<A, C, CID> SimpleCreation for CollnPaintEditor<A, C, CID>\n\nwhere\n\n A: ColourAttributesInterface + 'static,\n\n C: CharacteristicsInterface + 'static,\n\n CID: CollnIdInterface + 'static,\n\n{\n\n fn create() -> CollnPaintEditor<A, C, CID> {\n\n let add_paint_btn = gtk::Button::with_label(\"Add\");\n\n add_paint_btn.set_tooltip_text(Some(\n\n \"Add the paint defined by this specification to the collection\",\n\n ));\n\n let accept_changes_btn = gtk::Button::with_label(\"Accept\");\n\n accept_changes_btn.set_tooltip_text(Some(\"Accept the changes to the paint being edited\"));\n", "file_path": "src/colln_paint/editor.rs", "rank": 99, "score": 64927.10660511088 } ]
Rust
rounded-svg/src/main.rs
Ar37-rs/demos
b77283496f4076863cc16c059f1e1721932d3ea1
use fltk::{enums::*, prelude::*, *}; use std::cell::RefCell; use std::ops::{Deref, DerefMut}; use std::rc::Rc; use svg::node::element::Rectangle; use svg::Document; struct RoundedImageDisplay { frame_: frame::Frame, bordercolor_: Rc<RefCell<[u8; 3]>>, radius_: Rc<RefCell<i32>>, } impl RoundedImageDisplay { pub fn new(x: i32, y: i32, w: i32, h: i32, title: Option<&'static str>) -> Self { let mut frame_ = frame::Frame::new(x, y, w, h, title); let radius_ = 20; let bordercolor_ = [0x80, 0x80, 0x80]; frame_.set_frame(FrameType::BorderBox); let radius_ = Rc::from(RefCell::from(radius_)); let bordercolor_ = Rc::from(RefCell::from(bordercolor_)); frame_.draw({ let radius_ = radius_.clone(); let bordercolor_ = bordercolor_.clone(); move |f| { let radius_ = radius_.borrow(); let bordercolor_ = bordercolor_.borrow(); let rect = Rectangle::new() .set("x", 0 - *radius_ / 2) .set("y", 0 - *radius_ / 2) .set("rx", *radius_) .set("ry", *radius_) .set("width", f.w() + *radius_) .set("height", f.h() + *radius_) .set("fill", "none") .set( "stroke", format!( "rgb({},{},{})", bordercolor_[0], bordercolor_[1], bordercolor_[2], ), ) .set("stroke-width", *radius_); let document = Document::new() .set("viewBox", (0, 0, f.w(), f.h())) .add(rect); let mut svg = image::SvgImage::from_data(&document.to_string()).unwrap(); svg.draw(f.x(), f.y(), f.w(), f.h()) } }); Self { frame_, radius_, bordercolor_, } } pub fn bordercolor(&mut self, r: u8, g: u8, b: u8) { let mut bordercolor = self.bordercolor_.borrow_mut(); bordercolor[0] = r; bordercolor[1] = g; bordercolor[2] = b; self.frame_.parent().unwrap().redraw(); } pub fn radius(&mut self, val: i32) { *self.radius_.borrow_mut() = val; self.frame_.parent().unwrap().redraw(); } } impl Deref for RoundedImageDisplay { type Target = frame::Frame; fn deref(&self) -> &Self::Target { &self.frame_ } } impl DerefMut for RoundedImageDisplay { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.frame_ } } fn main() { let a = app::App::default().with_scheme(app::Scheme::Gtk); let border = [0x80, 0xa0, 0x80]; let mut win = window::Window::default() .with_size(1000, 800) .with_label("Rounded Corners"); win.set_color(Color::from_rgb(border[0], border[1], border[2])); let jpg = image::JpegImage::load("../opengl/ex.jpg").expect("Failed to open jpg file"); let mut rimage = RoundedImageDisplay::new(10, 10, jpg.w(), jpg.h(), None); rimage.bordercolor(border[0], border[1], border[2]); rimage.radius(50); rimage.set_image(Some(jpg)); let mut slider = valuator::Slider::new(1000 - 50, 10, 20, 200, "border\nradius"); slider.set_align(Align::Bottom); slider.set_bounds(0., 200.); slider.set_value(20.); slider.do_callback(); slider.set_color(Color::from_rgb( (border[0] as f64 / 1.5) as u8, (border[1] as f64 / 1.5) as u8, (border[2] as f64 / 1.5) as u8, )); slider.set_callback(move |s| { rimage.radius(s.value() as i32); }); win.end(); win.show(); a.run().unwrap(); }
use fltk::{enums::*, prelude::*, *}; use std::cell::RefCell; use std::ops::{Deref, DerefMut}; use std::rc::Rc; use svg::node::element::Rectangle; use svg::Document; struct RoundedImageDisplay { frame_: frame::Frame, bordercolor_: Rc<RefCell<[u8; 3]>>, radius_: Rc<RefCell<i32>>, } impl RoundedImageDisplay { pub fn new(x: i32, y: i32, w: i32, h: i32, title: Option<&'static str>) -> Self { let mut frame_ = frame::Frame::new(x, y, w, h, title); let radius_ = 20; let bordercolor_ = [0x80, 0x80, 0x80]; frame_.set_frame(FrameType::BorderBox); let radius_ = Rc::from(RefCell::from(radius_)); let bordercolor_ = Rc::from(RefCell::from(bordercolor_)); frame_.draw({ let radius_ = radius_.clone(); let bordercolor_ = bordercolor_.clone(); move |f| { let radius_ = radius_.borrow(); let bordercolor_ = bordercolor_.borrow(); let rect = Rectangle::new() .set("x", 0 - *radius_ / 2) .set("y", 0 - *radius_ / 2) .set("rx", *radius_) .set("ry", *radius_) .set("width", f.w() + *radius_) .set("height", f.h() + *radius_) .set("fill", "none") .set( "stroke", format!( "rgb({},{},{})", bordercolor_[0], bordercolor_[1], bordercolor_[2], ), ) .set("stroke-width", *radius_); let document = Document::new() .set("viewBox", (0, 0, f.w(), f.h())) .add(rect); let mut svg = image::SvgImage::from_data(&document.to_string()).unwrap(); svg.draw(f.x(), f.y(), f.w(), f.h()) } }); Self { frame_, radius_, bordercolor_, } } pub fn bordercolor(&mut self, r: u8, g: u8, b: u8) { let mut bordercolor = self.bordercolor_.borrow_mut(); bordercolor[0] = r; bordercolor[1] = g; bordercolor[2] = b; self.frame_.parent().unwrap().redraw(); } pub fn radius(&mut self, val: i32) { *self.radius_.borrow_mut() = val; self.frame_.parent().unwrap().redraw(); } } impl Deref for RoundedImageDisplay { type Target = frame::Frame; fn deref(&self) -> &Self::Target { &self.frame_ } } impl DerefMut for RoundedImageDisplay { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.frame_ } } fn main() { let a = app::App::default().with_scheme(app::Scheme::Gtk); let border = [0x80, 0xa0, 0x80]; let mut win = window::Window::default() .with_size(1000, 800) .with_label("Rounded Corners"); win.set_color(Color::from_rgb(border[0], border[1], border[2])); let jpg = image::JpegImage::load("../opengl/ex.jpg").expect("Failed to open jpg file"); let mut rimage = RoundedImageDisplay::new(10, 10, jpg.w(), jpg.h(), None); rimage.bordercolor(border[0], border[1], border[2]); rimage.radius(50); rimage.set_image(Some(jpg)); let mut slider = valuator::Slider::new(1000 - 50, 10, 20, 200, "border\nradius"); slider.set_align(Align::Bottom); slider.set_bounds(0., 200.); slider.set_value(20.); slider.do_callback(); slider.set_color(Color::from_rgb( (border[0] as f64 / 1.5) as u8, (border[1] as f64 /
1.5) as u8, (border[2] as f64 / 1.5) as u8, )); slider.set_callback(move |s| { rimage.radius(s.value() as i32); }); win.end(); win.show(); a.run().unwrap(); }
function_block-function_prefixed
[ { "content": "pub fn get_proc_address(win: &window::GlWindow, name: &str) -> *mut c_void {\n\n win.get_proc_address(name) as _\n\n}\n\n\n", "file_path": "libmpv/src/main.rs", "rank": 0, "score": 225561.0756709798 }, { "content": "// draw header with day names\n\nfn draw_header(txt: &str, x: i32, y: i32, w: i32, h: i32) {\n\n draw::push_clip(x, y, w, h);\n\n draw::draw_box(FrameType::ThinUpBox, x, y, w, h, Color::FrameDefault);\n\n draw::set_draw_color(Color::Black);\n\n draw::draw_text2(txt, x, y, w, h, Align::Center);\n\n draw::pop_clip();\n\n}\n\n\n", "file_path": "calendar/src/calendar.rs", "rank": 1, "score": 179066.379355228 }, { "content": "pub fn draw_line(x: i32, y: i32, x2: i32, y2: i32) -> Path {\n\n let mut pb = PathBuilder::new();\n\n pb.move_to(x as f32, y as f32);\n\n pb.line_to(x2 as f32, y2 as f32);\n\n let path = pb.finish();\n\n path\n\n}", "file_path": "raqote/src/main.rs", "rank": 2, "score": 170762.4028065584 }, { "content": "pub fn main() {\n\n let app = app::App::default();\n\n let mut wind = window::GlWindow::new(100, 100, W, H, \"Rotate me!\");\n\n\n\n wind.end();\n\n wind.show();\n\n\n\n let rotangle = Rc::from(RefCell::from(0.0));\n\n let rotangle_rc = rotangle.clone();\n\n\n\n wind.draw(move |_| draw_triangle(&rotangle_rc.borrow()));\n\n\n\n let (s, r) = app::channel::<(i32, i32)>();\n\n\n\n wind.handle(move |_, ev| match ev {\n\n enums::Event::Drag => {\n\n s.send(app::event_coords());\n\n true\n\n }\n\n _ => false,\n", "file_path": "opengl/src/main.rs", "rank": 3, "score": 158041.3867730993 }, { "content": "fn compile_shader(src: &str, ty: GLenum) -> GLuint {\n\n let shader;\n\n unsafe {\n\n shader = gl::CreateShader(ty);\n\n // Attempt to compile the shader\n\n let c_str = CString::new(src.as_bytes()).unwrap();\n\n gl::ShaderSource(shader, 1, &c_str.as_ptr(), ptr::null());\n\n gl::CompileShader(shader);\n\n\n\n // Get the compile status\n\n let mut status = gl::FALSE as GLint;\n\n gl::GetShaderiv(shader, gl::COMPILE_STATUS, &mut status);\n\n\n\n // Fail on error\n\n if status != (gl::TRUE as GLint) {\n\n let mut len = 0;\n\n gl::GetShaderiv(shader, gl::INFO_LOG_LENGTH, &mut len);\n\n let mut buf = Vec::with_capacity(len as usize);\n\n buf.set_len((len as usize) - 1); // subtract 1 to skip the trailing null character\n\n gl::GetShaderInfoLog(\n", "file_path": "glut/src/main.rs", "rank": 4, "score": 146666.7124879836 }, { "content": "// draw the numbers\n\nfn draw_data(day: i32, x: i32, y: i32, w: i32, h: i32, selected: bool) {\n\n draw::push_clip(x, y, w, h);\n\n if selected {\n\n draw::set_draw_color(Color::from_u32(0xbcd9ea));\n\n } else {\n\n draw::set_draw_color(Color::White);\n\n }\n\n draw::draw_rectf(x, y, w, h);\n\n draw::set_draw_color(Color::Gray0);\n\n draw::draw_text2(&format!(\"{}\", day), x, y, w, h, Align::Center);\n\n draw::pop_clip();\n\n}\n", "file_path": "calendar/src/calendar.rs", "rank": 7, "score": 133607.90667382086 }, { "content": "fn convert_to_rgba(arr: &[u8]) -> Vec<u8> {\n\n let mut v = vec![];\n\n for (_, pixel) in arr.chunks_exact(4).enumerate() {\n\n v.push(pixel[2]);\n\n v.push(pixel[1]);\n\n v.push(pixel[0]);\n\n v.push(pixel[3]);\n\n }\n\n v\n\n}\n\n\n", "file_path": "cairo-demo/src/main.rs", "rank": 8, "score": 130184.2268705599 }, { "content": "type HWND = *mut std::os::raw::c_void;\n\npub static mut WINDOW: HWND = std::ptr::null_mut();\n\n\n", "file_path": "systray/src/main.rs", "rank": 9, "score": 120584.37095033744 }, { "content": "fn main() {\n\n let mut fb: Vec<u8> = vec![0u8; 300 * 300 * 3];\n\n for (iter, pixel) in fb.chunks_exact_mut(3).enumerate() {\n\n let x = iter % 300;\n\n let y = iter / 300;\n\n let (red, green, blue) = utils::hex2rgb((x ^ y) as u32);\n\n pixel.copy_from_slice(&[red, green, blue]);\n\n }\n\n\n\n let app = app::App::default();\n\n let mut main_win = Window::default().with_size(800, 600);\n\n let mut win = GlWindow::default().with_size(300, 300).center_of(&main_win);\n\n win.end();\n\n main_win.end();\n\n main_win.show();\n\n win.make_current();\n\n\n\n win.handle(|_, ev| match ev {\n\n Event::Push => {\n\n println!(\"Pushed\");\n", "file_path": "speedy2d/src/main.rs", "rank": 10, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let app = app::App::default();\n\n let mut win = window::Window::default()\n\n .with_size(800, 600)\n\n .with_label(\"Webview\");\n\n let mut wv_win = window::Window::default()\n\n .with_size(790, 590)\n\n .center_of_parent();\n\n win.end();\n\n win.show();\n\n\n\n let mut wv = fltk_webview::Webview::create(false, &mut wv_win);\n\n wv.navigate(\"https://google.com\");\n\n \n\n // the webview handles the main loop\n\n app.run().unwrap();\n\n}\n", "file_path": "webview/src/main.rs", "rank": 11, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let app = app::App::default();\n\n let mut win = window::Window::default().with_size(400, 300);\n\n let mut frame = frame::Frame::new(10, 10, 380, 200, \"\");\n\n frame.set_frame(FrameType::EngravedBox);\n\n let mut but = button::Button::new(160, 220, 80, 40, \"Click me!\");\n\n win.end();\n\n win.show();\n\n\n\n but.set_callback(move |_| frame.set_label(\"Hello world!\"));\n\n\n\n #[cfg(target_os = \"windows\")]\n\n {\n\n unsafe {\n\n WINDOW = win.raw_handle();\n\n }\n\n win.set_callback(|w| {\n\n // We intercept the closing of the window here\n\n unsafe {\n\n w.platform_hide();\n", "file_path": "systray/src/main.rs", "rank": 12, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let a = app::App::default();\n\n let mut win = window::Window::default().with_size(400, 300);\n\n win.end();\n\n win.show();\n\n let state: State = pollster::block_on(State::new(&win));\n\n while a.wait() {\n\n let frame = state.surface\n\n .get_current_frame()\n\n .expect(\"Failed to acquire next swap chain texture\")\n\n .output;\n\n let view = frame\n\n .texture\n\n .create_view(&wgpu::TextureViewDescriptor::default());\n\n let mut encoder =\n\n state.device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: None });\n\n {\n\n let mut rpass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {\n\n label: None,\n\n color_attachments: &[wgpu::RenderPassColorAttachment {\n", "file_path": "wgpu/src/main.rs", "rank": 13, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let dt = DrawTarget::new(WIDTH, HEIGHT);\n\n let dt = Rc::from(RefCell::from(dt));\n\n let dt_c = dt.clone();\n\n\n\n let app = app::App::default().with_scheme(app::Scheme::Gtk);\n\n let mut win = window::Window::default().with_size(WIDTH + 10, HEIGHT + 10);\n\n let mut frame = frame::Frame::new(5, 5, WIDTH, HEIGHT, \"\");\n\n frame.set_frame(enums::FrameType::DownBox);\n\n frame.set_color(enums::Color::White);\n\n win.end();\n\n win.show();\n\n\n\n let mut x = 0;\n\n let mut y = 0;\n\n frame.handle(move |f, ev| match ev {\n\n enums::Event::Push => {\n\n let coords = app::event_coords();\n\n let path = draw_line(coords.0, coords.1, coords.0, coords.1);\n\n dt.borrow_mut().stroke(\n", "file_path": "raqote/src/main.rs", "rank": 14, "score": 105481.13247820188 }, { "content": "fn main() {\n\n\n\n let app = app::App::default();\n\n let mut win = window::GlWindow::default().with_size(800, 600);\n\n win.set_mode(enums::Mode::Opengl3);\n\n win.end();\n\n win.show();\n\n\n\n unsafe {\n\n let gl = glow::Context::from_loader_function(|s| {\n\n win.get_proc_address(s) as *const _\n\n });\n\n\n\n let vertex_array = gl\n\n .create_vertex_array()\n\n .expect(\"Cannot create vertex array\");\n\n gl.bind_vertex_array(Some(vertex_array));\n\n\n\n let program = gl.create_program().expect(\"Cannot create program\");\n\n\n", "file_path": "glow/src/main.rs", "rank": 15, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let app = app::App::default();\n\n let mut win = window::GlWindow::default().with_size(500, 400);\n\n win.set_mode(enums::Mode::Opengl3);\n\n win.end();\n\n win.show();\n\n let gl_window = Rc::new(RefCell::new(win.clone()));\n\n \n\n struct Backend {\n\n gl_window: Rc<RefCell<window::GlWindow>>,\n\n }\n\n\n\n unsafe impl glium::backend::Backend for Backend {\n\n fn swap_buffers(&self) -> Result<(), glium::SwapBuffersError> {\n\n Ok(self.gl_window.borrow_mut().swap_buffers())\n\n }\n\n\n\n unsafe fn get_proc_address(&self, symbol: &str) -> *const c_void {\n\n self.gl_window.borrow().get_proc_address(symbol) as *const _\n\n }\n", "file_path": "glium/src/main.rs", "rank": 16, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let app = app::App::default();\n\n let mut win = window::GlWindow::new(100, 100, 500, 400, \"\");\n\n win.set_mode(enums::Mode::Opengl3);\n\n win.end();\n\n win.show();\n\n win.make_current();\n\n\t\n\n gl::load_with(|s| win.get_proc_address(s));\n\n\n\n let vs = compile_shader(VS_SRC, gl::VERTEX_SHADER);\n\n let fs = compile_shader(FS_SRC, gl::FRAGMENT_SHADER);\n\n let program = link_program(vs, fs);\n\n\n\n let mut vao = 0;\n\n let mut vbo = 0;\n\n\n\n unsafe {\n\n // Create Vertex Array Object\n\n gl::GenVertexArrays(1, &mut vao);\n", "file_path": "glut/src/main.rs", "rank": 17, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let app = app::App::default();\n\n let mut wind = DoubleWindow::default()\n\n .with_size(400, 300)\n\n .center_screen()\n\n .with_label(\"Music Player\");\n\n\n\n let mut frm = Frame::new(160, 80, 80, 40, TRACK);\n\n frm.set_label_size(20);\n\n frm.set_label_color(Color::White);\n\n let mut slider = FancySlider::new(50, 150);\n\n let mut but = PowerButton::new(160, 210);\n\n\n\n let sl = Soloud::default().unwrap();\n\n\n\n wind.set_color(Color::Black);\n\n wind.end();\n\n wind.show();\n\n\n\n let sl = Rc::from(RefCell::from(sl));\n", "file_path": "musicplayer/src/main.rs", "rank": 18, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let app = app::App::default();\n\n let mut win = GlWindow::default()\n\n .with_size(640, 480)\n\n .with_label(\"femtovg example\");\n\n win.set_mode(enums::Mode::Opengl3);\n\n win.end();\n\n win.show();\n\n win.make_current();\n\n let renderer =\n\n OpenGl::new(|s| win.get_proc_address(s) as *const _).expect(\"Cannot create renderer\");\n\n let mut canvas = Canvas::new(renderer).expect(\"Cannot create canvas\");\n\n\n\n win.draw(move |w| {\n\n canvas.set_size(w.width() as u32, w.height() as u32, 1.);\n\n canvas.clear_rect(\n\n 0,\n\n 0,\n\n w.width() as u32,\n\n w.height() as u32,\n", "file_path": "femtovg/src/main.rs", "rank": 19, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let app = app::App::default().with_scheme(app::AppScheme::Gtk);\n\n let mut win = window::Window::new(100, 100, 800, 600, \"Terminal\");\n\n\n\n // Create inner window to act as embedded terminal\n\n let mut xterm_win = window::Window::new(10, 10, 780, 520, \"\");\n\n xterm_win.end();\n\n xterm_win.set_color(Color::Black);\n\n\n\n win.end();\n\n win.show();\n\n win.make_resizable(true);\n\n\n\n let handle = xterm_win.raw_handle();\n\n std::process::Command::new(\"xterm\")\n\n .args(&[\"-into\", &format!(\"{}\", handle), \"-bg\", \"black\", \"-fg\", \"white\"])\n\n .spawn()\n\n .unwrap();\n\n\n\n app.run().unwrap();\n\n}\n\n\n", "file_path": "xterm/src/main.rs", "rank": 20, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let triangle = create_triangle();\n\n\n\n let mut paint = Paint::default();\n\n paint.anti_alias = true;\n\n paint.shader = Pattern::new(\n\n triangle.as_ref(),\n\n SpreadMode::Repeat,\n\n FilterQuality::Bicubic,\n\n 1.0,\n\n Transform::from_row(1.5, -0.4, 0.0, -0.8, 5.0, 1.0),\n\n );\n\n\n\n let path = PathBuilder::from_circle(200.0, 200.0, 180.0).unwrap();\n\n\n\n let mut pixmap = Pixmap::new(400, 400).unwrap();\n\n pixmap.fill_path(\n\n &path,\n\n &paint,\n\n FillRule::Winding,\n", "file_path": "tinyskia/src/main.rs", "rank": 21, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n if args.len() < 2 {\n\n println!(\"Usage: mpv <video file>\");\n\n std::process::exit(1);\n\n }\n\n let a = app::App::default().with_scheme(app::Scheme::Gleam);\n\n app::get_system_colors();\n\n let mut win = window::Window::default().with_size(800, 600);\n\n let mut mpv_win = window::GlWindow::new(5, 5, 790, 530, None);\n\n mpv_win.set_mode(Mode::Opengl3);\n\n let mut btn = button::Button::new(360, 545, 80, 40, \"@||\");\n\n win.end();\n\n win.make_resizable(true);\n\n win.show();\n\n mpv_win.make_current();\n\n\n\n let mut mpv = Mpv::new().expect(\"Error while creating MPV\");\n\n let render_context = RenderContext::new(\n\n unsafe { mpv.ctx.as_mut() },\n", "file_path": "libmpv/src/main.rs", "rank": 22, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let app = app::App::default();\n\n app::set_background_color(170, 189, 206);\n\n dialog::message_title_default(\"Glyph Map\");\n\n let mut wind = window::Window::default()\n\n .with_size(250, 300)\n\n .with_label(\"Glyph Map\");\n\n wind.set_xclass(\"glyphmap\");\n\n let mut menu = menu::MenuBar::new(0, 0, 250, 30, None);\n\n menu.add_choice(\"File/Load font...|File/Quit|Help/About\");\n\n menu.set_color(Color::Background.lighter());\n\n menu.set_frame(FrameType::FlatBox);\n\n menu.set_text_size(14);\n\n let mut scroll =\n\n group::Scroll::new(0, 30, 250, 200, None).with_type(group::ScrollType::Vertical);\n\n let mut scrollbar = scroll.scrollbar();\n\n scrollbar.set_type(valuator::ScrollbarType::VerticalNice);\n\n let mut pack = group::Pack::default_fill();\n\n pack.end();\n\n scroll.end();\n", "file_path": "glyphmap/src/main.rs", "rank": 23, "score": 105481.13247820188 }, { "content": "fn main() {\n\n gstreamer::init().unwrap();\n\n let app = app::App::default().with_scheme(app::AppScheme::Gtk);\n\n let mut win = window::Window::new(100, 100, 800, 600, \"Media Player\");\n\n\n\n // Create inner window to act as embedded media player\n\n let mut gst_win = window::Window::new(10, 10, 780, 520, \"\");\n\n gst_win.end();\n\n gst_win.set_color(Color::Black);\n\n\n\n let mut but_play = button::Button::new(320, 545, 80, 40, \"@>\");\n\n let mut but_stop = button::Button::new(400, 545, 80, 40, \"@||\");\n\n\n\n win.end();\n\n win.show();\n\n win.make_resizable(true);\n\n\n\n let handle = gst_win.raw_handle();\n\n\n\n // gstreamer requires a uri\n", "file_path": "gst/src/main.rs", "rank": 24, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let files = std::fs::read_dir(\"historical_data\").unwrap();\n\n let app = app::App::default().with_scheme(app::Scheme::Gtk);\n\n app::background(79, 79, 79);\n\n app::background2(41, 41, 41);\n\n app::foreground(255, 255, 255);\n\n let mut wind = window::Window::default().with_size(800, 600);\n\n let mut browser = browser::Browser::new(5, 10, 100, 580, \"\");\n\n let mut frame = frame::Frame::default()\n\n .with_size(680, 580)\n\n .right_of(&browser, 10);\n\n wind.make_resizable(true);\n\n wind.show();\n\n\n\n browser.set_type(browser::BrowserType::Hold);\n\n for file in files {\n\n let entry = file.unwrap().file_name().into_string().unwrap();\n\n if entry.ends_with(\".csv\") {\n\n browser.add(&entry.strip_suffix(\".csv\").unwrap());\n\n }\n", "file_path": "csv/src/main.rs", "rank": 25, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let app = app::App::default().with_scheme(app::AppScheme::Gtk);\n\n let mut win = window::Window::new(100, 100, 800, 600, \"Media Player\");\n\n\n\n // Create inner window to act as embedded media player\n\n let mut vlc_win = window::Window::new(10, 10, 780, 520, \"\");\n\n vlc_win.end();\n\n vlc_win.set_color(Color::Black);\n\n\n\n let mut but_play = button::Button::new(320, 545, 80, 40, \"Play\");\n\n let mut but_stop = button::Button::new(400, 545, 80, 40, \"Stop\");\n\n\n\n win.end();\n\n win.show();\n\n win.make_resizable(true);\n\n\n\n // Take in same args as vlc\n\n let args: Vec<String> = std::env::args().collect();\n\n\n\n // Instantiate vlc instance and media player\n", "file_path": "libvlc/src/main.rs", "rank": 26, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let app = app::App::default().with_scheme(app::AppScheme::Gtk);\n\n let mut win = window::Window::new(100, 100, 800, 600, \"Media Player\");\n\n\n\n // Create inner window to act as embedded media player\n\n let mut mpv_win = window::Window::new(10, 10, 780, 520, \"\");\n\n mpv_win.end();\n\n mpv_win.set_color(Color::Black);\n\n\n\n win.end();\n\n win.show();\n\n win.make_resizable(true);\n\n\n\n let handle = mpv_win.raw_handle();\n\n std::process::Command::new(\"mpv\")\n\n .args(&[&format!(\"--wid={}\", handle as u64), \"../libvlc/video.mp4\"])\n\n .spawn()\n\n .unwrap();\n\n\n\n app.run().unwrap();\n\n}\n", "file_path": "mpv/src/main.rs", "rank": 27, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let app = app::App::default().with_scheme(app::Scheme::Gtk);\n\n app::background(0xd3, 0xd3, 0xd3);\n\n let mut win = window::Window::new(100, 100, 400, 300, \"\");\n\n let mut but = button::Button::new(160, 200, 80, 40, \"Click\");\n\n win.end();\n\n win.show();\n\n but.set_callback(move |_| {\n\n let cal = calendar::Calendar::default(); // or calendar::Calendar::new(200, 100);\n\n let date = cal.get_date();\n\n println!(\"{:?}\", date);\n\n if let Some(date) = date {\n\n println!(\"{:?}\", date.year());\n\n println!(\"{:?}\", date.month());\n\n println!(\"{:?}\", date.day());\n\n }\n\n });\n\n app.run().unwrap();\n\n}", "file_path": "calendar/src/main.rs", "rank": 28, "score": 105481.13247820188 }, { "content": "fn main() {\n\n let a = app::App::default().with_scheme(app::Scheme::Gtk);\n\n app::get_system_colors();\n\n app::set_font_size(20);\n\n let mut main_win = window::Window::new(100, 100, SCREEN_WIDTH as _, SCREEN_HEIGHT as _, None);\n\n let mut glut_win = window::GlWindow::new(5, 5, main_win.w() - 200, main_win.h() - 10, None);\n\n glut_win.set_mode(Mode::Opengl3);\n\n glut_win.end();\n\n let mut col = group::Flex::default()\n\n .column()\n\n .with_size(185, 590)\n\n .right_of(&glut_win, 5);\n\n col.set_frame(FrameType::DownBox);\n\n let mut frm = frame::Frame::default();\n\n frm.set_color(Color::Red.inactive());\n\n frm.set_frame(FrameType::FlatBox);\n\n let mut slider = valuator::Slider::default().with_type(valuator::SliderType::HorizontalFill);\n\n slider.set_slider_frame(FrameType::RFlatBox);\n\n slider.set_slider_size(0.20);\n\n slider.set_color(Color::Blue.inactive());\n", "file_path": "egui-demo/src/main.rs", "rank": 29, "score": 104146.38719154336 }, { "content": "fn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n if args.len() < 2 {\n\n println!(\"Usage: mpv <video file>\");\n\n std::process::exit(1);\n\n }\n\n let a = app::App::default();\n\n let mut win = window::Window::default().with_size(400, 300);\n\n let mut mpv_win = window::GlWindow::default()\n\n .with_size(390, 290)\n\n .center_of_parent();\n\n mpv_win.set_mode(Mode::Opengl3);\n\n win.end();\n\n win.make_resizable(true);\n\n win.show();\n\n mpv_win.make_current();\n\n\n\n let mut mpv_gl: *mut mpv_render_context = null_mut();\n\n\n\n unsafe {\n", "file_path": "libmpv/src/sys_main.rs", "rank": 30, "score": 104146.38719154336 }, { "content": "fn main() {\n\n let app = app::App::default().with_scheme(app::AppScheme::Gtk);\n\n let mut win = window::Window::new(100, 100, 600, 600, \"Cairo\");\n\n\n\n let mut wid = CairoWidget::new(0, 0, 600, 600, \"Label\");\n\n\n\n wid.draw(move |w| {\n\n let ctx = &w.ctx;\n\n ctx.set_source_rgb(0.0, 0.0, 1.0);\n\n ctx.paint().unwrap();\n\n let surface = ImageSurface::try_from(ctx.target()).unwrap();\n\n surface\n\n .with_data(|s| {\n\n let temp = convert_to_rgba(s);\n\n draw::draw_image(&temp, w.x(), w.y(), 600, 600, ColorDepth::Rgba8).unwrap();\n\n })\n\n .unwrap();\n\n });\n\n\n\n win.end();\n\n win.show();\n\n\n\n app.run().unwrap();\n\n}\n", "file_path": "cairo-demo/src/main.rs", "rank": 31, "score": 104146.38719154336 }, { "content": "fn link_program(vs: GLuint, fs: GLuint) -> GLuint {\n\n unsafe {\n\n let program = gl::CreateProgram();\n\n gl::AttachShader(program, vs);\n\n gl::AttachShader(program, fs);\n\n gl::LinkProgram(program);\n\n // Get the link status\n\n let mut status = gl::FALSE as GLint;\n\n gl::GetProgramiv(program, gl::LINK_STATUS, &mut status);\n\n\n\n // Fail on error\n\n if status != (gl::TRUE as GLint) {\n\n let mut len: GLint = 0;\n\n gl::GetProgramiv(program, gl::INFO_LOG_LENGTH, &mut len);\n\n let mut buf = Vec::with_capacity(len as usize);\n\n buf.set_len((len as usize) - 1); // subtract 1 to skip the trailing null character\n\n gl::GetProgramInfoLog(\n\n program,\n\n len,\n\n ptr::null_mut(),\n", "file_path": "glut/src/main.rs", "rank": 32, "score": 103009.81575746962 }, { "content": "fn main() {\n\n let target_os = env::var(\"CARGO_CFG_TARGET_OS\").unwrap();\n\n if target_os.as_str() == \"macos\" {\n\n env::set_var(\"PIXELS_HIGH_PERF\", \"1\");\n\n }\n\n}\n", "file_path": "pixels/build.rs", "rank": 33, "score": 101430.54804585219 }, { "content": "fn main() -> Result<(), Error> {\n\n let app = app::App::default();\n\n let mut win = Window::default()\n\n .with_size(WIDTH as i32, HEIGHT as i32)\n\n .with_label(\"Hello Pixels\");\n\n win.make_resizable(true);\n\n win.end();\n\n win.show();\n\n\n\n let mut pixels = {\n\n let pixel_width = win.w() as u32;\n\n let pixel_height = win.h() as u32;\n\n let surface_texture = SurfaceTexture::new(pixel_width, pixel_height, &win);\n\n Pixels::new(WIDTH, HEIGHT, surface_texture)?\n\n };\n\n\n\n let mut world = World::new();\n\n\n\n while app.wait() {\n\n // Handle window events\n", "file_path": "pixels/src/main.rs", "rank": 34, "score": 96839.69303643511 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let img = Asset::get(\"ex.jpg\").ok_or_else(|| \"\")?;\n\n let img = ImageReader::new(Cursor::new(img))\n\n .with_guessed_format()?\n\n .decode()?;\n\n let (w, h) = img.dimensions();\n\n\n\n let app = app::App::default();\n\n let mut wind = window::Window::default().with_size(w as i32, h as i32);\n\n let mut frame = frame::Frame::default().size_of(&wind);\n\n wind.end();\n\n wind.show();\n\n\n\n frame.draw(move |_| {\n\n draw::draw_image(&img.to_rgb8(), 0, 0, w as i32, h as i32, ColorDepth::Rgb8).unwrap();\n\n });\n\n\n\n // // Or just convert to fltk::image::RgbImage\n\n // let rgb = fl_image::RgbImage::new(&img.to_rgb8(), w, h, ColorDepth::Rgb8)?;\n\n // frame.set_image(Some(rgb));\n\n\n\n app.run()?;\n\n Ok(())\n\n}\n", "file_path": "image/src/main.rs", "rank": 35, "score": 89999.71939371146 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let mut buf = vec![0u8; W * H * 3];\n\n\n\n let fx: f64 = 1.0;\n\n let fy: f64 = 1.1;\n\n let xphase: f64 = 0.0;\n\n let yphase: f64 = 0.1;\n\n\n\n let app = app::App::default();\n\n let mut win = window::Window::default().with_size(W as i32, H as i32);\n\n let mut frame = frame::Frame::default().size_of(&win);\n\n win.end();\n\n win.show();\n\n let root =\n\n BitMapBackend::<RGBPixel>::with_buffer_and_format(&mut buf, (W as u32, H as u32))?\n\n .into_drawing_area();\n\n root.fill(&BLACK)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .margin(10)\n", "file_path": "plotters/src/main.rs", "rank": 36, "score": 89999.71939371146 }, { "content": "fn main() -> Result<(), Box<dyn error::Error>> {\n\n fs::create_dir(&*VIDEO_TEMP_DIR).ok();\n\n\n\n process::Command::new(\"ffmpeg\")\n\n .args(&[\n\n \"-i\",\n\n \"../libvlc/video.mp4\",\n\n &format!(\"{}/%d.bmp\", &*VIDEO_TEMP_DIR),\n\n \"-y\",\n\n ])\n\n .status()\n\n .unwrap();\n\n\n\n let mut signals = Signals::new(&[SIGINT])?;\n\n thread::spawn(move || {\n\n for _sig in signals.forever() {\n\n fs::remove_dir_all(&*VIDEO_TEMP_DIR).unwrap();\n\n }\n\n });\n\n\n", "file_path": "ffmpeg/src/main.rs", "rank": 37, "score": 87062.40552642383 }, { "content": "struct MyApp {}\n\n\n\nimpl Drop for MyApp {\n\n fn drop(&mut self) {\n\n fs::remove_dir_all(&*VIDEO_TEMP_DIR).unwrap();\n\n }\n\n}\n\n\n", "file_path": "ffmpeg/src/main.rs", "rank": 38, "score": 85496.90836973091 }, { "content": "/// Representation of the application state. In this example, a circle will bounce around the screen.\n\nstruct World {\n\n circle_x: i16,\n\n circle_y: i16,\n\n velocity_x: i16,\n\n velocity_y: i16,\n\n}\n\n\n", "file_path": "pixels/src/main.rs", "rank": 39, "score": 85496.90836973091 }, { "content": "#[derive(RustEmbed)]\n\n#[folder = \"../glow/\"]\n\nstruct Asset;\n\n\n", "file_path": "image/src/main.rs", "rank": 40, "score": 85496.90836973091 }, { "content": "struct State {\n\n device: wgpu::Device,\n\n surface: wgpu::Surface,\n\n queue: wgpu::Queue,\n\n render_pipeline: wgpu::RenderPipeline,\n\n}\n\n\n\nimpl State {\n\n pub async fn new(win: &window::Window) -> State {\n\n let size = (win.w() as _, win.h() as _);\n\n let instance = wgpu::Instance::new(wgpu::Backends::all());\n\n let surface = unsafe { instance.create_surface(win) };\n\n let adapter = instance\n\n .request_adapter(&wgpu::RequestAdapterOptions {\n\n power_preference: wgpu::PowerPreference::default(),\n\n compatible_surface: Some(&surface),\n\n })\n\n .await\n\n .expect(\"Failed to find an appropriate adapter\");\n\n\n", "file_path": "wgpu/src/main.rs", "rank": 41, "score": 85496.90836973091 }, { "content": "struct World {\n\n circle_x: i16,\n\n circle_y: i16,\n\n velocity_x: i16,\n\n velocity_y: i16,\n\n}\n\n\n", "file_path": "framebuffer/src/main.rs", "rank": 42, "score": 85496.90836973091 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let app = app::App::default();\n\n let mut win = Window::default()\n\n .with_size(WIDTH as i32, HEIGHT as i32)\n\n .with_label(\"Framebuffer\");\n\n let mut frame = frame::Frame::default().size_of(&win);\n\n win.end();\n\n win.make_resizable(true);\n\n win.show();\n\n\n\n let mut framebuf: Vec<u8> = vec![0; (WIDTH * HEIGHT * 4) as usize];\n\n let mut world = World::new();\n\n unsafe { draw::draw_rgba_nocopy(&mut frame, &framebuf); }\n\n\n\n while app.wait() {\n\n world.update();\n\n world.draw(&mut framebuf);\n\n // draw::draw_rgba(&mut frame, &framebuf).unwrap(); // A safe variant of draw_rgba_nocopy\n\n win.redraw();\n\n app::sleep(0.016);\n", "file_path": "framebuffer/src/main.rs", "rank": 43, "score": 84379.02891330275 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct Item {\n\n #[serde(rename = \"userId\")]\n\n user_id: i32,\n\n id: i32,\n\n title: String,\n\n completed: bool,\n\n}\n\n\n", "file_path": "web-todo/src/main.rs", "rank": 44, "score": 83965.28643941443 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct Item {\n\n #[serde(rename = \"userId\")]\n\n user_id: i32,\n\n id: i32,\n\n title: String,\n\n completed: bool,\n\n}\n\n\n", "file_path": "web-todo2/src/main.rs", "rank": 45, "score": 83965.28643941443 }, { "content": "struct FlatButton {\n\n frm: frame::Frame,\n\n}\n\n\n\nconst RED: u32 = 0xe51c23;\n\nconst GREEN: u32 = 0x8bc34a;\n\n\n\nimpl FlatButton {\n\n pub fn new(w: i32, h: i32, title: &str) -> FlatButton {\n\n let mut w = FlatButton {\n\n frm: frame::Frame::new(0, 0, w, h, None),\n\n };\n\n w.frm.set_label(title);\n\n w.frm.set_frame(FrameType::RFlatBox);\n\n w.frm.set_color(Color::Red);\n\n w.frm.handle(move |w, ev| match ev {\n\n Event::Push => {\n\n if w.color() == Color::from_u32(GREEN) {\n\n w.set_color(Color::from_u32(RED));\n\n } else {\n", "file_path": "web-todo2/src/main.rs", "rank": 46, "score": 82508.46957342609 }, { "content": "struct FlatButton {\n\n frm: frame::Frame,\n\n}\n\n\n\nconst RED: u32 = 0xe51c23;\n\nconst GREEN: u32 = 0x8bc34a;\n\n\n\nimpl FlatButton {\n\n pub fn new(w: i32, h: i32, title: &str) -> FlatButton {\n\n let mut w = FlatButton {\n\n frm: frame::Frame::new(0, 0, w, h, None),\n\n };\n\n w.frm.set_label(title);\n\n w.frm.set_frame(FrameType::RFlatBox);\n\n w.frm.set_color(Color::Red);\n\n w.frm.handle(move |w, ev| match ev {\n\n Event::Push => {\n\n if w.color() == Color::from_u32(GREEN) {\n\n w.set_color(Color::from_u32(RED));\n\n } else {\n", "file_path": "web-todo/src/main.rs", "rank": 47, "score": 82508.46957342609 }, { "content": "#[derive(Clone)]\n\nstruct CairoWidget {\n\n frm: frame::Frame,\n\n ctx: Context,\n\n}\n\n\n\nimpl CairoWidget {\n\n pub fn new(x: i32, y: i32, w: i32, h: i32, label: &str) -> Self {\n\n let frm = frame::Frame::new(x, y, w, h, None).with_label(label);\n\n let surface = ImageSurface::create(Format::ARgb32, w, h).expect(\"Couldn’t create surface\");\n\n let ctx = Context::new(&surface).unwrap();\n\n Self { frm, ctx }\n\n }\n\n\n\n pub fn draw<F: FnMut(&mut Self) + 'static>(&mut self, mut cb: F) {\n\n let mut frm = self.clone();\n\n self.frm.draw(move |_| {\n\n cb(&mut frm);\n\n });\n\n }\n\n}\n\n\n\nfltk::widget_extends!(CairoWidget, frame::Frame, frm);\n\n\n", "file_path": "cairo-demo/src/main.rs", "rank": 48, "score": 82508.46957342609 }, { "content": "fn create_triangle() -> Pixmap {\n\n let mut paint = Paint::default();\n\n paint.set_color_rgba8(50, 127, 150, 200);\n\n paint.anti_alias = true;\n\n\n\n let mut pb = PathBuilder::new();\n\n pb.move_to(0.0, 20.0);\n\n pb.line_to(20.0, 20.0);\n\n pb.line_to(10.0, 0.0);\n\n pb.close();\n\n let path = pb.finish().unwrap();\n\n\n\n let mut pixmap = Pixmap::new(20, 20).unwrap();\n\n pixmap.fill_path(\n\n &path,\n\n &paint,\n\n FillRule::Winding,\n\n Transform::identity(),\n\n None,\n\n );\n\n pixmap\n\n}\n", "file_path": "tinyskia/src/main.rs", "rank": 49, "score": 74334.99925945683 }, { "content": "fn draw_triangle(rotangle: &f32) {\n\n unsafe {\n\n glEnable(GL_DEPTH_TEST);\n\n glDepthFunc(GL_ALWAYS); \n\n glDepthFunc(GL_LEQUAL);\n\n glDepthRange(0.0, 1.0);\n\n glDepthMask(1);\n\n glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);\n\n glMatrixMode(GL_PROJECTION);\n\n glLoadIdentity();\n\n glViewport(0, 0, W, H);\n\n gluPerspective(45.0, (W as f32 / H as f32).into(), 1.0, 10.0);\n\n glTranslatef(0.0, 0.0, -5.0);\n\n glMatrixMode(GL_MODELVIEW);\n\n glLoadIdentity();\n\n glRotatef(*rotangle, 1.0, 1.0, 0.0);\n\n glColor3f(1.0, 0.0, 0.0);\n\n glBegin(GL_POLYGON);\n\n glVertex3f(0.0, 1.0, 0.0);\n\n glVertex3f(1.0, -1.0, 1.0);\n", "file_path": "opengl/src/main.rs", "rank": 50, "score": 71672.14896504673 }, { "content": "use fltk::{enums::*, prelude::*, *};\n\nuse std::ops::{Deref, DerefMut};\n\n\n\npub struct FancySlider {\n\n s: valuator::Slider,\n\n}\n\n\n\nimpl FancySlider {\n\n pub fn new(x: i32, y: i32) -> Self {\n\n let mut s = valuator::Slider::new(x, y, 300, 10, \"\");\n\n s.set_type(valuator::SliderType::Horizontal);\n\n s.set_frame(FrameType::RFlatBox);\n\n s.set_color(Color::from_u32(0x868db1));\n\n s.draw(|s| {\n\n draw::set_draw_color(Color::Blue);\n\n draw::draw_pie(\n\n s.x() - 10 + (s.w() as f64 * s.value()) as i32,\n\n s.y() - 10,\n\n 30,\n\n 30,\n", "file_path": "musicplayer/src/fancy_slider.rs", "rank": 56, "score": 34868.11866900833 }, { "content": " 0.,\n\n 360.,\n\n );\n\n });\n\n Self { s }\n\n }\n\n}\n\n\n\nimpl Deref for FancySlider {\n\n type Target = valuator::Slider;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.s\n\n }\n\n}\n\n\n\nimpl DerefMut for FancySlider {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.s\n\n }\n\n}", "file_path": "musicplayer/src/fancy_slider.rs", "rank": 57, "score": 34856.53290711051 }, { "content": " vec![\n\n RenderParam::ApiType(RenderParamApiType::OpenGl),\n\n RenderParam::InitParams(OpenGLInitParams {\n\n get_proc_address,\n\n ctx: mpv_win.clone(),\n\n }),\n\n ],\n\n )\n\n .expect(\"Failed creating render context\");\n\n mpv.event_context_mut().disable_deprecated_events().unwrap();\n\n mpv.playlist_load_files(&[(&args[1], FileState::AppendPlay, None)])\n\n .unwrap();\n\n\n\n btn.set_callback(move |b| {\n\n let prop: bool = mpv.get_property(\"pause\").unwrap();\n\n mpv.set_property(\"pause\", !prop).unwrap();\n\n if prop {\n\n b.set_label(\"@||\");\n\n } else {\n\n b.set_label(\"@>\");\n", "file_path": "libmpv/src/main.rs", "rank": 58, "score": 27364.165056158105 }, { "content": "use fltk::{prelude::*, *};\n\nuse plotters::prelude::*;\n\nuse plotters::style::Color;\n\nuse plotters_bitmap::bitmap_pixel::RGBPixel;\n\nuse plotters_bitmap::BitMapBackend;\n\nuse std::collections::VecDeque;\n\nuse std::error::Error;\n\nuse std::time::SystemTime;\n\nconst W: usize = 480;\n\nconst H: usize = 320;\n\n\n\nconst SAMPLE_RATE: f64 = 10_000.0;\n\nconst FREAME_RATE: f64 = 30.0;\n\n\n", "file_path": "plotters/src/main.rs", "rank": 59, "score": 27362.62356025568 }, { "content": "use glu_sys::*;\n\nuse fltk::{prelude::*, *};\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\n\n\nconst W: i32 = 600;\n\nconst H: i32 = 400;\n\n\n", "file_path": "opengl/src/main.rs", "rank": 60, "score": 27362.440477767588 }, { "content": " let mut inp = input::MultilineInput::default()\n\n .with_size(250, 67)\n\n .below_of(&scroll, 2);\n\n inp.set_text_size(22);\n\n inp.set_frame(FrameType::FlatBox);\n\n wind.end();\n\n wind.show();\n\n\n\n menu.set_callback(move |m| {\n\n if let Some(choice) = m.choice() {\n\n match choice.as_str() {\n\n \"Load font...\" => {\n\n let mut dlg = dialog::FileDialog::new(dialog::FileDialogType::BrowseFile);\n\n dlg.set_option(dialog::FileDialogOptions::NoOptions);\n\n dlg.set_filter(\"*.{ttf,otf}\");\n\n dlg.show();\n\n let filename = dlg.filename();\n\n if !filename.to_string_lossy().to_string().is_empty() && filename.exists() {\n\n let font_data = std::fs::read(&filename).unwrap();\n\n let face = ttf_parser::Face::from_slice(&font_data, 0).unwrap();\n", "file_path": "glyphmap/src/main.rs", "rank": 61, "score": 27360.62663774336 }, { "content": "use fltk::{enums::*, prelude::*, *};\n\nuse serde::Deserialize;\n\nuse std::sync::Mutex;\n\n\n\n#[macro_use]\n\nextern crate lazy_static;\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Price {\n\n #[serde(rename = \"Date\")]\n\n date: String,\n\n #[serde(rename = \"Open\")]\n\n open: f64,\n\n #[serde(rename = \"High\")]\n\n high: f64,\n\n #[serde(rename = \"Low\")]\n\n low: f64,\n\n #[serde(rename = \"Close\")]\n\n close: f64,\n\n #[serde(rename = \"Volume\")]\n\n volume: usize,\n\n}\n\n\n\nlazy_static! {\n\n pub static ref PRICES: Mutex<Vec<Price>> = Mutex::new(vec![]);\n\n}\n\n\n", "file_path": "csv/src/main.rs", "rank": 62, "score": 27360.214302774624 }, { "content": "use fltk::{enums::Mode, prelude::*, *};\n\nuse libmpv::{\n\n render::{OpenGLInitParams, RenderContext, RenderParam, RenderParamApiType},\n\n FileState, Mpv,\n\n};\n\nuse std::os::raw::c_void;\n\n\n", "file_path": "libmpv/src/main.rs", "rank": 63, "score": 27360.195658731966 }, { "content": "use fltk::{prelude::*, *};\n\n\n\nuse gl::types::*;\n\nuse std::ffi::CString;\n\nuse std::mem;\n\nuse std::ptr;\n\nuse std::str;\n\n\n\n// Vertex data\n\nstatic VERTEX_DATA: [GLfloat; 6] = [0.0, 0.5, 0.5, -0.5, -0.5, -0.5];\n\n\n\n// Shader sources\n\nstatic VS_SRC: &'static str = \"\n\n#version 150\n\nin vec2 position;\n\nvoid main() {\n\n gl_Position = vec4(position, 0.0, 1.0);\n\n}\";\n\n\n\nstatic FS_SRC: &'static str = \"\n\n#version 150\n\nout vec4 out_color;\n\nvoid main() {\n\n out_color = vec4(1.0, 1.0, 1.0, 1.0);\n\n}\";\n\n\n", "file_path": "glut/src/main.rs", "rank": 64, "score": 27360.08971021112 }, { "content": " let _a = MyApp {};\n\n let app = app::App::default();\n\n let mut win = window::Window::default().with_size(600, 400);\n\n let mut frame = frame::Frame::default()\n\n .with_size(400, 300)\n\n .center_of_parent();\n\n frame.set_frame(FrameType::FlatBox);\n\n frame.set_color(Color::Black);\n\n let mut but = button::Button::new(260, 355, 80, 40, \"@+6>\");\n\n win.end();\n\n win.show();\n\n\n\n let i = rc::Rc::from(cell::RefCell::from(0));\n\n\n\n frame.draw({\n\n let i = i.clone();\n\n move |f| {\n\n if *i.borrow() == 0 {\n\n return;\n\n }\n", "file_path": "ffmpeg/src/main.rs", "rank": 65, "score": 27359.80063762786 }, { "content": " }\n\n\n\n frame.set_frame(FrameType::DownBox);\n\n frame.set_color(Color::Black);\n\n\n\n frame.draw(|f| {\n\n let data = PRICES.lock().unwrap();\n\n let mut highest = data\n\n .iter()\n\n .map(|elem| elem.low)\n\n .collect::<Vec<f64>>()\n\n .iter()\n\n .cloned()\n\n .fold(0. / 0., f64::max);\n\n highest += (highest.to_string().len() * 10) as f64 / 3.;\n\n let factor = f.h() as f64 / highest;\n\n if data.len() != 0 {\n\n let step = f.w() / data.len() as i32;\n\n let mut idx = f.x() + step;\n\n for elem in &*data {\n", "file_path": "csv/src/main.rs", "rank": 66, "score": 27359.42817982511 }, { "content": " let open = f.h() - (elem.open * factor) as i32;\n\n let high = f.h() - (elem.high * factor) as i32;\n\n let low = f.h() - (elem.low * factor) as i32;\n\n let close = f.h() - (elem.close * factor) as i32;\n\n draw::set_draw_color(Color::White);\n\n draw::draw_line(idx, high, idx, low);\n\n let col = if close > open {\n\n Color::Red\n\n } else {\n\n Color::Green\n\n };\n\n draw::set_draw_color(col);\n\n draw::draw_rectf(idx - 2, open, 4, i32::abs(close - open));\n\n draw::set_draw_color(Color::White);\n\n idx += step;\n\n }\n\n }\n\n });\n\n\n\n browser.set_callback(move |t| {\n", "file_path": "csv/src/main.rs", "rank": 67, "score": 27358.69455280739 }, { "content": "use fltk::{app, enums::FrameType, prelude::*, *};\n\n\n\n#[cfg(target_os = \"windows\")]\n\nmod systray;\n\n\n", "file_path": "systray/src/main.rs", "rank": 68, "score": 27358.45131941454 }, { "content": "use fltk::{app, enums::*, frame::*, prelude::*, window::*};\n\nuse soloud::*;\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\n\n\nmod power_button;\n\nuse power_button::PowerButton;\n\n\n\nmod fancy_slider;\n\nuse fancy_slider::FancySlider;\n\n\n\nconst TRACK: &str = \"Alarm.mp3\";\n\n\n", "file_path": "musicplayer/src/main.rs", "rank": 69, "score": 27358.310514339468 }, { "content": " Transform::identity(),\n\n None,\n\n );\n\n\n\n let app = app::App::default();\n\n let mut win = window::Window::default()\n\n .with_size(600, 400)\n\n .with_label(\"tiny-skia\");\n\n win.set_color(fltk::enums::Color::White);\n\n let mut frame = frame::Frame::default().with_size(400, 400).center_of(&win);\n\n win.end();\n\n win.show();\n\n draw::draw_rgba(&mut frame, pixmap.data()).unwrap();\n\n app.run().unwrap();\n\n}\n\n\n", "file_path": "tinyskia/src/main.rs", "rank": 70, "score": 27357.55209966218 }, { "content": " });\n\n\n\n while app.wait() {\n\n match r.recv() {\n\n Some(coords) => {\n\n let rand: f32 = ((coords.0 - W / 2) * (coords.1 - H / 2) / 360) as f32;\n\n *rotangle.borrow_mut() += rand;\n\n wind.redraw();\n\n }\n\n None => (),\n\n }\n\n }\n\n}\n\n\n", "file_path": "opengl/src/main.rs", "rank": 71, "score": 27357.511242167144 }, { "content": " let sl = sl.clone();\n\n move |s, ev| match ev {\n\n Event::Push => true,\n\n Event::Drag => {\n\n let slider_x = s.x() as f32 / 50.0;\n\n let (x, _y) = app::event_coords();\n\n if x > 45 && x < 350 {\n\n s.set_pos(x - 15, 150);\n\n sl.borrow_mut().set_global_volume(slider_x);\n\n }\n\n app::redraw();\n\n true\n\n }\n\n _ => false,\n\n }\n\n });\n\n\n\n wind.set_callback(move |_| {\n\n // Triggered when the window closes\n\n sl.borrow().stop_all(); // Stop any playing audio before quitting\n\n app.quit();\n\n });\n\n\n\n app.run().unwrap();\n\n}\n", "file_path": "musicplayer/src/main.rs", "rank": 72, "score": 27357.25408618659 }, { "content": " self.velocity_y *= -1;\n\n }\n\n\n\n self.circle_x += self.velocity_x;\n\n self.circle_y += self.velocity_y;\n\n }\n\n\n\n fn draw(&self, frame: &mut [u8]) {\n\n for (i, pixel) in frame.chunks_exact_mut(4).enumerate() {\n\n let x = (i % WIDTH as usize) as i16;\n\n let y = (i / WIDTH as usize) as i16;\n\n let d = {\n\n let xd = x as i32 - self.circle_x as i32;\n\n let yd = y as i32 - self.circle_y as i32;\n\n ((xd.pow(2) + yd.pow(2)) as f64).sqrt().powi(2)\n\n };\n\n let inside_the_circle = d < (CIRCLE_RADIUS as f64).powi(2);\n\n\n\n let rgba = if inside_the_circle {\n\n [0xac, 0x00, 0xe6, 0xff]\n\n } else {\n\n [0x26, 0x00, 0x33, 0xff]\n\n };\n\n\n\n pixel.copy_from_slice(&rgba);\n\n }\n\n }\n\n}\n", "file_path": "framebuffer/src/main.rs", "rank": 73, "score": 27357.087447241505 }, { "content": " push_constant_ranges: &[],\n\n });\n\n\n\n let swapchain_format = surface.get_preferred_format(&adapter).unwrap();\n\n\n\n let render_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {\n\n label: None,\n\n layout: Some(&pipeline_layout),\n\n vertex: wgpu::VertexState {\n\n module: &shader,\n\n entry_point: \"vs_main\",\n\n buffers: &[],\n\n },\n\n fragment: Some(wgpu::FragmentState {\n\n module: &shader,\n\n entry_point: \"fs_main\",\n\n targets: &[swapchain_format.into()],\n\n }),\n\n primitive: wgpu::PrimitiveState::default(),\n\n depth_stencil: None,\n", "file_path": "wgpu/src/main.rs", "rank": 74, "score": 27357.07048081005 }, { "content": " let uri = \"../libvlc/video.mp4\".to_owned();\n\n let mut path = String::from(\"file:///\");\n\n let current_dir = std::env::current_dir().unwrap();\n\n let video_file = current_dir.join(uri);\n\n path += &video_file.to_str().unwrap();\n\n\n\n let playbin = gstreamer::ElementFactory::make(\"playbin\", None).unwrap();\n\n playbin.set_property(\"uri\", &path).unwrap();\n\n let video_overlay = playbin\n\n .clone()\n\n .dynamic_cast::<gstreamer_video::VideoOverlay>()\n\n .unwrap();\n\n\n\n unsafe {\n\n video_overlay.set_window_handle(handle as _);\n\n }\n\n\n\n let (s, r) = app::channel::<Message>();\n\n\n\n but_play.emit(s, Message::Play);\n", "file_path": "gst/src/main.rs", "rank": 75, "score": 27356.80524335762 }, { "content": " self.velocity_x *= -1;\n\n }\n\n if self.circle_y - CIRCLE_RADIUS <= 0 || self.circle_y + CIRCLE_RADIUS > HEIGHT as i16 {\n\n self.velocity_y *= -1;\n\n }\n\n\n\n self.circle_x += self.velocity_x;\n\n self.circle_y += self.velocity_y;\n\n }\n\n\n\n /// Draw the `World` state to the frame buffer.\n\n ///\n\n /// Assumes the default texture format: `wgpu::TextureFormat::Rgba8UnormSrgb`\n\n fn draw(&self, frame: &mut [u8]) {\n\n for (i, pixel) in frame.chunks_exact_mut(4).enumerate() {\n\n let x = (i % WIDTH as usize) as i16;\n\n let y = (i / WIDTH as usize) as i16;\n\n let d = {\n\n let xd = x as i32 - self.circle_x as i32;\n\n let yd = y as i32 - self.circle_y as i32;\n", "file_path": "pixels/src/main.rs", "rank": 76, "score": 27356.487318073006 }, { "content": "use fltk::{enums::Color, prelude::*, *};\n\nuse gstreamer_video::prelude::*;\n\n\n\n#[derive(Copy, Clone)]\n\npub enum Message {\n\n Play,\n\n Stop,\n\n}\n\n\n", "file_path": "gst/src/main.rs", "rank": 77, "score": 27356.241501395118 }, { "content": " app::awake();\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\nimpl World {\n\n /// Create a new `World` instance that can draw a moving circle.\n\n fn new() -> Self {\n\n Self {\n\n circle_x: 300,\n\n circle_y: 200,\n\n velocity_x: 5,\n\n velocity_y: 5,\n\n }\n\n }\n\n\n\n /// Update the `World` internal state; bounce the circle around the screen.\n\n fn update(&mut self) {\n\n if self.circle_x - CIRCLE_RADIUS <= 0 || self.circle_x + CIRCLE_RADIUS > WIDTH as i16 {\n", "file_path": "pixels/src/main.rs", "rank": 78, "score": 27356.056870857818 }, { "content": " .with_size(100, 0);\n\n out.set_value(&codepoint.to_string());\n\n out.set_frame(FrameType::FlatBox);\n\n out.set_color(Color::Background);\n\n let mut space = frame::Frame::default().with_size(50, 0);\n\n space.set_frame(FrameType::FlatBox);\n\n space.set_color(Color::White);\n\n let mut out = output::Output::default().with_size(150, 0);\n\n out.set_frame(FrameType::FlatBox);\n\n out.set_text_font(Font::Zapfdingbats);\n\n out.set_text_size(22);\n\n out.set_value(&txt);\n\n hpack.end();\n\n }\n\n }\n\n });\n\n }\n\n pack.end();\n\n scroll.redraw();\n\n }\n", "file_path": "glyphmap/src/main.rs", "rank": 79, "score": 27355.928812369824 }, { "content": "use fltk::{enums::Color, prelude::*, *};\n\nuse vlc::*;\n\n\n\n#[derive(Copy, Clone)]\n\npub enum Message {\n\n Play,\n\n Stop,\n\n}\n\n\n", "file_path": "libvlc/src/main.rs", "rank": 80, "score": 27355.748368016106 }, { "content": "use raqote::*;\n\nuse fltk::{\n\n app, enums, frame, draw,\n\n prelude::{WidgetBase, GroupExt, WidgetExt},\n\n window,\n\n};\n\nuse std::rc::Rc;\n\nuse std::cell::RefCell;\n\n\n\nconst WIDTH: i32 = 500;\n\nconst HEIGHT: i32 = 400;\n\n\n", "file_path": "raqote/src/main.rs", "rank": 81, "score": 27355.718145513503 }, { "content": "use fltk::{\n\n app, button,\n\n enums::{Color, FrameType},\n\n frame, image,\n\n prelude::*,\n\n window,\n\n};\n\nuse signal_hook::{consts::signal::SIGINT, iterator::Signals};\n\nuse std::{cell, env, error, fs, process, rc, thread};\n\n\n\nlazy_static::lazy_static! {\n\n pub static ref VIDEO_TEMP_DIR: String = env::temp_dir().join(\"video_mp4\").to_string_lossy().to_string();\n\n}\n\n\n", "file_path": "ffmpeg/src/main.rs", "rank": 82, "score": 27355.61452175975 }, { "content": " if epoch - last_flushed > 1.0 / FREAME_RATE {\n\n let root = BitMapBackend::<RGBPixel>::with_buffer_and_format(\n\n &mut buf,\n\n (W as u32, H as u32),\n\n )?\n\n .into_drawing_area();\n\n let mut chart = cs.clone().restore(&root);\n\n chart.plotting_area().fill(&BLACK)?;\n\n\n\n chart\n\n .configure_mesh()\n\n .bold_line_style(&GREEN.mix(0.2))\n\n .light_line_style(&TRANSPARENT)\n\n .draw()?;\n\n\n\n chart.draw_series(data.iter().zip(data.iter().skip(1)).map(\n\n |(&(e, x0, y0), &(_, x1, y1))| {\n\n PathElement::new(\n\n vec![(x0, y0), (x1, y1)],\n\n &GREEN.mix(((e - epoch) * 20.0).exp()),\n", "file_path": "plotters/src/main.rs", "rank": 83, "score": 27355.570308137114 }, { "content": " let file = format!(\"{}/{}.bmp\", &*VIDEO_TEMP_DIR, *i.borrow());\n\n if std::path::Path::new(&file).exists() {\n\n let bmp = image::BmpImage::load(&file);\n\n if let Ok(mut bmp) = bmp {\n\n bmp.draw(f.x(), f.y(), f.w(), f.h());\n\n }\n\n fs::remove_file(file).unwrap();\n\n }\n\n }\n\n });\n\n\n\n but.set_callback(move |_| {\n\n while app::wait() {\n\n *i.borrow_mut() += 1;\n\n frame.redraw();\n\n app::sleep(0.001);\n\n }\n\n });\n\n\n\n Ok(app.run()?)\n\n}\n", "file_path": "ffmpeg/src/main.rs", "rank": 84, "score": 27355.424549544154 }, { "content": " buf.as_mut_ptr() as *mut GLchar,\n\n );\n\n panic!(\n\n \"{}\",\n\n str::from_utf8(&buf)\n\n .ok()\n\n .expect(\"ProgramInfoLog not valid utf8\")\n\n );\n\n }\n\n program\n\n }\n\n}\n\n\n", "file_path": "glut/src/main.rs", "rank": 85, "score": 27355.324087844816 }, { "content": " but_stop.emit(s, Message::Stop);\n\n\n\n while app.wait() {\n\n match r.recv() {\n\n Some(val) => match val {\n\n Message::Play => {\n\n playbin.set_state(gstreamer::State::Playing).ok();\n\n }\n\n Message::Stop => {\n\n playbin.set_state(gstreamer::State::Paused).ok();\n\n }\n\n },\n\n None => (),\n\n }\n\n }\n\n}\n", "file_path": "gst/src/main.rs", "rank": 86, "score": 27355.244138868606 }, { "content": " shader,\n\n len,\n\n ptr::null_mut(),\n\n buf.as_mut_ptr() as *mut GLchar,\n\n );\n\n panic!(\n\n \"{}\",\n\n str::from_utf8(&buf)\n\n .ok()\n\n .expect(\"ShaderInfoLog not valid utf8\")\n\n );\n\n }\n\n }\n\n shader\n\n}\n\n\n", "file_path": "glut/src/main.rs", "rank": 87, "score": 27355.182882654695 }, { "content": "\n\n but.set_callback({\n\n let sl = sl.clone();\n\n move |_| {\n\n if sl.borrow().active_voice_count() > 0 {\n\n // Checks that no active audio is playing\n\n sl.borrow().stop_all();\n\n return;\n\n }\n\n let mut wav = audio::Wav::default();\n\n wav.load(&std::path::Path::new(TRACK)).unwrap();\n\n wav.set_looping(true);\n\n sl.borrow().play(&wav);\n\n while sl.borrow().active_voice_count() > 0 {\n\n app.wait();\n\n }\n\n }\n\n });\n\n\n\n slider.handle({\n", "file_path": "musicplayer/src/main.rs", "rank": 88, "score": 27355.1482804229 }, { "content": " let instance = Instance::with_args(Some(args)).unwrap();\n\n let md = Media::new_path(&instance, \"video.mp4\").unwrap();\n\n let mdp = MediaPlayer::new(&instance).unwrap();\n\n mdp.set_media(&md);\n\n\n\n // Get vlc_win handle that we'll pass to libvlc\n\n // Linux u32, windows HWND, Mac NSWindow\n\n let handle = vlc_win.raw_handle();\n\n\n\n // Pass the handle to vlc\n\n // Method depends on the platform\n\n // For Linux\n\n #[cfg(target_os = \"linux\")]\n\n mdp.set_xwindow(handle as u32);\n\n // For Windows\n\n #[cfg(target_os = \"windows\")]\n\n mdp.set_hwnd(handle);\n\n // For MacOS\n\n #[cfg(target_os = \"macos\")]\n\n mdp.set_nsobject(utils::content_view(&vlc_win) as _);\n", "file_path": "libvlc/src/main.rs", "rank": 89, "score": 27355.064446609158 }, { "content": "use femtovg::{renderer::OpenGl, Canvas, Color, Paint, Path};\n\nuse fltk::{\n\n app, enums,\n\n prelude::{GroupExt, WidgetBase, WidgetExt, WindowExt},\n\n window::GlWindow,\n\n};\n\n\n", "file_path": "femtovg/src/main.rs", "rank": 90, "score": 27354.661981519934 }, { "content": " }\n\n });\n\n use crate::systray::NativeUi;\n\n systray::init().expect(\"Failed to init Native Windows GUI\");\n\n let _ui = systray::SystemTray::build_ui(Default::default()).expect(\"Failed to build UI\");\n\n systray::dispatch_thread_events_with_callback(move || {\n\n if win.shown() {\n\n app.run().unwrap();\n\n } else {\n\n app::sleep(0.030);\n\n }\n\n });\n\n }\n\n\n\n #[cfg(not(target_os = \"windows\"))]\n\n app.run().unwrap();\n\n}\n", "file_path": "systray/src/main.rs", "rank": 91, "score": 27354.554488218186 }, { "content": "use fltk::{\n\n app,\n\n enums::Event,\n\n prelude::{WidgetBase, WidgetExt, GroupExt, WindowExt},\n\n window::{GlWindow, Window},\n\n utils\n\n};\n\nuse speedy2d::GLRenderer;\n\nuse speedy2d::color::Color;\n\nuse speedy2d::dimen::Vector2;\n\nuse speedy2d::image::{ImageDataType, ImageSmoothingMode};\n\n\n", "file_path": "speedy2d/src/main.rs", "rank": 92, "score": 27354.29251241757 }, { "content": "\n\n // Disable event handling on vlc's side\n\n // Do it thru fltk\n\n mdp.set_key_input(false);\n\n mdp.set_mouse_input(false);\n\n\n\n let (s, r) = app::channel::<Message>();\n\n\n\n but_play.emit(s, Message::Play);\n\n but_stop.emit(s, Message::Stop);\n\n\n\n while app.wait() {\n\n match r.recv() {\n\n Some(val) => match val {\n\n Message::Play => mdp.play().unwrap(),\n\n Message::Stop => mdp.stop(),\n\n },\n\n None => (),\n\n }\n\n }\n\n}\n", "file_path": "libvlc/src/main.rs", "rank": 93, "score": 27354.21436044329 }, { "content": " .as_secs_f64();\n\n\n\n if let Some((ts, _, _)) = data.back() {\n\n if epoch - ts < 1.0 / SAMPLE_RATE {\n\n std::thread::sleep(std::time::Duration::from_secs_f64(epoch - ts));\n\n continue;\n\n }\n\n let mut ts = *ts;\n\n while ts < epoch {\n\n ts += 1.0 / SAMPLE_RATE;\n\n let phase_x: f64 = 2.0 * ts * std::f64::consts::PI * fx + xphase;\n\n let phase_y: f64 = 2.0 * ts * std::f64::consts::PI * fy + yphase;\n\n data.push_back((ts, phase_x.sin(), phase_y.sin()));\n\n }\n\n }\n\n\n\n let phase_x = 2.0 * epoch * std::f64::consts::PI * fx + xphase;\n\n let phase_y = 2.0 * epoch * std::f64::consts::PI * fy + yphase;\n\n data.push_back((epoch, phase_x.sin(), phase_y.sin()));\n\n\n", "file_path": "plotters/src/main.rs", "rank": 94, "score": 27354.202208552542 }, { "content": " app::awake();\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl World {\n\n fn new() -> Self {\n\n Self {\n\n circle_x: 300,\n\n circle_y: 200,\n\n velocity_x: 5,\n\n velocity_y: 5,\n\n }\n\n }\n\n\n\n fn update(&mut self) {\n\n if self.circle_x - CIRCLE_RADIUS <= 0 || self.circle_x + CIRCLE_RADIUS > WIDTH as i16 {\n\n self.velocity_x *= -1;\n\n }\n\n if self.circle_y - CIRCLE_RADIUS <= 0 || self.circle_y + CIRCLE_RADIUS > HEIGHT as i16 {\n", "file_path": "framebuffer/src/main.rs", "rank": 95, "score": 27354.178944636697 }, { "content": " view: &view,\n\n resolve_target: None,\n\n ops: wgpu::Operations {\n\n load: wgpu::LoadOp::Clear(wgpu::Color::GREEN),\n\n store: true,\n\n },\n\n }],\n\n depth_stencil_attachment: None,\n\n });\n\n rpass.set_pipeline(&state.render_pipeline);\n\n rpass.draw(0..3, 0..1);\n\n }\n\n\n\n state.queue.submit(Some(encoder.finish()));\n\n }\n\n}\n", "file_path": "wgpu/src/main.rs", "rank": 96, "score": 27354.00075467717 }, { "content": " true\n\n },\n\n _ => false,\n\n });\n\n\n\n gl::load_with(|s| win.get_proc_address(s));\n\n\n\n let mut renderer = unsafe { GLRenderer::new_for_current_context((300, 300)) }.unwrap();\n\n\n\n renderer.draw_frame(|graphics| {\n\n graphics.clear_screen(Color::WHITE);\n\n let handle = graphics\n\n .create_image_from_raw_pixels(\n\n ImageDataType::RGB,\n\n ImageSmoothingMode::Linear,\n\n Vector2::new(300, 300),\n\n &fb,\n\n )\n\n .unwrap();\n\n graphics.draw_image(Vector2::new(0., 0.), &handle);\n\n });\n\n\n\n app.run().unwrap();\n\n}\n", "file_path": "speedy2d/src/main.rs", "rank": 97, "score": 27353.956417175697 }, { "content": " }\n\n \"#;\n\n\n\n let program = glium::Program::from_source(&context, vertex_shader_src, fragment_shader_src, None).unwrap();\n\n\n\n let mut target = glium::Frame::new(context.clone(), context.get_framebuffer_dimensions());\n\n target.clear_color(0.0, 0.0, 1.0, 1.0);\n\n target.draw(&vertex_buffer, &indices, &program, &glium::uniforms::EmptyUniforms,\n\n &Default::default()).unwrap();\n\n target.finish().unwrap();\n\n\n\n app.run().unwrap();\n\n}", "file_path": "glium/src/main.rs", "rank": 98, "score": 27353.89539815875 }, { "content": "mod calendar;\n\n\n\nuse fltk::{prelude::*, *};\n\nuse chrono::prelude::*;\n\n\n", "file_path": "calendar/src/main.rs", "rank": 99, "score": 27353.545618113825 } ]
Rust
build/sdk/meta/src/product_bundle_container.rs
allansrc/fuchsia
a2c235b33fc4305044d496354a08775f30cdcf37
use { crate::{ common::{ElementType, Envelope}, json::{schema, JsonObject}, metadata::Metadata, ProductBundleV1, }, serde::{Deserialize, Serialize}, }; #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(deny_unknown_fields)] pub struct WorkaroundProductBundleWrapper { pub data: ProductBundleV1, pub schema_id: String, } #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(deny_unknown_fields)] pub struct ProductBundleContainerV1 { pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "type")] pub kind: ElementType, pub bundles: Vec<WorkaroundProductBundleWrapper>, } impl JsonObject for Envelope<ProductBundleContainerV1> { fn get_schema() -> &'static str { include_str!("../product_bundle_container-76a5c104.json") } fn get_referenced_schemata() -> &'static [&'static str] { &[ schema::COMMON, schema::HARDWARE_V1, schema::EMU_MANIFEST, schema::FLASH_MANIFEST_V1, schema::PRODUCT_BUNDLE_V1, ] } } #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(deny_unknown_fields)] pub struct ProductBundleContainerV2 { pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "type")] pub kind: ElementType, pub fms_entries: Vec<Metadata>, } impl JsonObject for Envelope<ProductBundleContainerV2> { fn get_schema() -> &'static str { include_str!("../product_bundle_container-32z5e391.json") } fn get_referenced_schemata() -> &'static [&'static str] { &[ schema::COMMON, schema::HARDWARE_V1, schema::EMU_MANIFEST, schema::FLASH_MANIFEST_V1, schema::PRODUCT_BUNDLE_V1, schema::PHYSICAL_DEVICE_V1, schema::VIRTUAL_DEVICE_V1, ] } } #[cfg(test)] mod tests { use super::*; test_validation! { name = test_validation_v1, kind = Envelope::<ProductBundleContainerV1>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-76a5c104.json", "data": { "name": "Fuchsia F1", "type": "product_bundle_container", "bundles": [ { "data": { "name": "generic-x64", "type": "product_bundle", "device_refs": ["generic-x64"], "images": [{ "base_uri": "gs://fuchsia/development/0.20201216.2.1/images/generic-x64.tgz", "format": "tgz" }], "packages": [{ "format": "tgz", "repo_uri": "gs://fuchsia/development/0.20201216.2.1/packages/generic-x64.tar.gz" }] }, "schema_id": "product_bundle-6320eef1.json#/definitions/product_bundle" } ] } } "#, valid = true, } test_validation! { name = test_validation_v1_invalid, kind = Envelope::<ProductBundleContainerV1>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-76a5c104.json", "data": { "name": "Fuchsia F1", "type": "cc_prebuilt_library", "bundles": [] } } "#, valid = false, } test_validation! { name = test_validation_v2_pbm, kind = Envelope::<ProductBundleContainerV2>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-32z5e391.json", "data": { "name": "PBM container", "type": "product_bundle_container", "fms_entries": [ { "name": "generic-x64", "type": "product_bundle", "device_refs": ["generic-x64"], "images": [{ "base_uri": "gs://fuchsia/development/0.20201216.2.1/images/generic-x64.tgz", "format": "tgz" }], "packages": [{ "format": "tgz", "repo_uri": "gs://fuchsia/development/0.20201216.2.1/packages/generic-x64.tar.gz" }] } ] } } "#, valid = true, } test_validation! { name = test_validation_v2_virt_device, kind = Envelope::<ProductBundleContainerV2>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-32z5e391.json", "data": { "name": "Virtual device container", "type": "product_bundle_container", "fms_entries": [ { "name": "generic-x64", "type": "virtual_device", "hardware": { "audio": { "model": "hda" }, "cpu": { "arch": "x64" }, "inputs": { "pointing_device": "touch" }, "window_size": { "width": 640, "height": 480, "units": "pixels" }, "memory": { "quantity": 1, "units": "gigabytes" }, "storage": { "quantity": 1, "units": "gigabytes" } } } ] } } "#, valid = true, } test_validation! { name = test_validation_v2_phys_device, kind = Envelope::<ProductBundleContainerV2>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-32z5e391.json", "data": { "name": "Virtual device container", "type": "product_bundle_container", "fms_entries": [ { "name": "generic-x64", "type": "physical_device", "hardware": { "cpu": { "arch": "x64" } } } ] } } "#, valid = true, } }
use { crate::{ common::{ElementType, Envelope}, json::{schema, JsonObject}, metadata::Metadata, ProductBundleV1, }, serde::{Deserialize, Serialize}, }; #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(deny_unknown_fields)] pub struct WorkaroundProductBundleWrapper { pub data: ProductBundleV1, pub schema_id: String, } #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(deny_unknown_fields)] pub struct ProductBundleContainerV1 { pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>,
opment/0.20201216.2.1/packages/generic-x64.tar.gz" }] }, "schema_id": "product_bundle-6320eef1.json#/definitions/product_bundle" } ] } } "#, valid = true, } test_validation! { name = test_validation_v1_invalid, kind = Envelope::<ProductBundleContainerV1>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-76a5c104.json", "data": { "name": "Fuchsia F1", "type": "cc_prebuilt_library", "bundles": [] } } "#, valid = false, } test_validation! { name = test_validation_v2_pbm, kind = Envelope::<ProductBundleContainerV2>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-32z5e391.json", "data": { "name": "PBM container", "type": "product_bundle_container", "fms_entries": [ { "name": "generic-x64", "type": "product_bundle", "device_refs": ["generic-x64"], "images": [{ "base_uri": "gs://fuchsia/development/0.20201216.2.1/images/generic-x64.tgz", "format": "tgz" }], "packages": [{ "format": "tgz", "repo_uri": "gs://fuchsia/development/0.20201216.2.1/packages/generic-x64.tar.gz" }] } ] } } "#, valid = true, } test_validation! { name = test_validation_v2_virt_device, kind = Envelope::<ProductBundleContainerV2>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-32z5e391.json", "data": { "name": "Virtual device container", "type": "product_bundle_container", "fms_entries": [ { "name": "generic-x64", "type": "virtual_device", "hardware": { "audio": { "model": "hda" }, "cpu": { "arch": "x64" }, "inputs": { "pointing_device": "touch" }, "window_size": { "width": 640, "height": 480, "units": "pixels" }, "memory": { "quantity": 1, "units": "gigabytes" }, "storage": { "quantity": 1, "units": "gigabytes" } } } ] } } "#, valid = true, } test_validation! { name = test_validation_v2_phys_device, kind = Envelope::<ProductBundleContainerV2>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-32z5e391.json", "data": { "name": "Virtual device container", "type": "product_bundle_container", "fms_entries": [ { "name": "generic-x64", "type": "physical_device", "hardware": { "cpu": { "arch": "x64" } } } ] } } "#, valid = true, } }
#[serde(rename = "type")] pub kind: ElementType, pub bundles: Vec<WorkaroundProductBundleWrapper>, } impl JsonObject for Envelope<ProductBundleContainerV1> { fn get_schema() -> &'static str { include_str!("../product_bundle_container-76a5c104.json") } fn get_referenced_schemata() -> &'static [&'static str] { &[ schema::COMMON, schema::HARDWARE_V1, schema::EMU_MANIFEST, schema::FLASH_MANIFEST_V1, schema::PRODUCT_BUNDLE_V1, ] } } #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(deny_unknown_fields)] pub struct ProductBundleContainerV2 { pub name: String, #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "type")] pub kind: ElementType, pub fms_entries: Vec<Metadata>, } impl JsonObject for Envelope<ProductBundleContainerV2> { fn get_schema() -> &'static str { include_str!("../product_bundle_container-32z5e391.json") } fn get_referenced_schemata() -> &'static [&'static str] { &[ schema::COMMON, schema::HARDWARE_V1, schema::EMU_MANIFEST, schema::FLASH_MANIFEST_V1, schema::PRODUCT_BUNDLE_V1, schema::PHYSICAL_DEVICE_V1, schema::VIRTUAL_DEVICE_V1, ] } } #[cfg(test)] mod tests { use super::*; test_validation! { name = test_validation_v1, kind = Envelope::<ProductBundleContainerV1>, data = r#" { "schema_id": "http://fuchsia.com/schemas/sdk/product_bundle_container-76a5c104.json", "data": { "name": "Fuchsia F1", "type": "product_bundle_container", "bundles": [ { "data": { "name": "generic-x64", "type": "product_bundle", "device_refs": ["generic-x64"], "images": [{ "base_uri": "gs://fuchsia/development/0.20201216.2.1/images/generic-x64.tgz", "format": "tgz" }], "packages": [{ "format": "tgz", "repo_uri": "gs://fuchsia/devel
random
[]
Rust
src/lib.rs
Uriopass/inline_tweak
8ef340ea259854e21edfe2374c1237d1bf07a5e5
pub trait Tweakable: Sized { fn parse(x: &str) -> Option<Self>; } #[cfg(any(debug_assertions, feature = "release_tweak"))] mod itweak { use super::Tweakable; use lazy_static::*; use std::any::Any; use std::collections::{HashMap, HashSet}; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; use std::sync::Mutex; use std::time::{Instant, SystemTime}; macro_rules! impl_tweakable { ($($t: ty) +) => { $( impl Tweakable for $t { fn parse(x: &str) -> Option<$t> { x.parse().ok() } } )+ }; } impl_tweakable!(u8 u16 u32 u64 u128 i8 i16 i32 i64 i128 usize isize bool f32 f64); impl Tweakable for &'static str { fn parse(x: &str) -> Option<Self> { Some(Box::leak(Box::new(String::from( x.trim_start_matches('"').trim_end_matches('"'), )))) } } struct TweakValue { position: usize, value: Option<Box<dyn Any + Send>>, initialized: bool, last_checked: Instant, file_modified: SystemTime, } struct FileWatcher { last_checked: Instant, file_modified: SystemTime, } lazy_static! { static ref VALUES: Mutex<HashMap<(&'static str, u32, u32), TweakValue>> = Default::default(); static ref PARSED_FILES: Mutex<HashSet<&'static str>> = Default::default(); static ref WATCHERS: Mutex<HashMap<&'static str, FileWatcher>> = Default::default(); } fn try_open(file: &'static str) -> std::io::Result<File> { let p: &Path = file.as_ref(); if let Some(x) = File::open(p).ok() { return Ok(x); } let p: &Path = p .strip_prefix(p.iter().next().ok_or(std::io::Error::new( std::io::ErrorKind::Other, "path is empty", ))?) .map_err(|e| std::io::Error::new( std::io::ErrorKind::Other, e, ))?; File::open(&p) } fn last_modified(file: &File) -> Option<SystemTime> { file.metadata().ok()?.modified().ok() } fn parse_tweaks(fpath: &'static str) -> Option<()> { let mut fileinfos = PARSED_FILES.lock().unwrap(); if !fileinfos.contains(&fpath) { let mut values = VALUES.lock().unwrap(); let file = match try_open(fpath) { Ok(x) => x, Err(e) => { eprintln!("[inline-tweak] couldn't open file for tweaking: {}\n do you have the access rights? are you running this from the workspace root?", e); return None; } }; let file_modified = last_modified(&file).unwrap_or_else(SystemTime::now); let now = Instant::now(); let mut tweaks_seen = 0; for (line_n, line) in BufReader::new(file) .lines() .filter_map(|line| line.ok()) .enumerate() { for (column, _) in line.match_indices("tweak!(") { let path_corrected_column = line[..column] .rfind(|c: char| !(c.is_ascii_alphanumeric() || c == ':' || c == '_')) .map(|x| x + 1) .unwrap_or(0); values.insert( (fpath, line_n as u32 + 1, path_corrected_column as u32 + 1), TweakValue { position: tweaks_seen, value: None, initialized: false, last_checked: now, file_modified, }, ); tweaks_seen += 1; } } fileinfos.insert(fpath); } Some(()) } fn update_tweak<T: 'static + Tweakable + Clone + Send>( tweak: &mut TweakValue, fpath: &'static str, ) -> Option<()> { let file = try_open(fpath).ok()?; let last_modified = last_modified(&file)?; if tweak.value.is_none() || last_modified .duration_since(tweak.file_modified) .ok()? .as_secs_f32() > 0.5 { let mut tweaks_seen = 0; let line_str = BufReader::new(&file) .lines() .filter_map(|line| line.ok()) .find(|line| { tweaks_seen += line.matches("tweak!(").count(); tweaks_seen > tweak.position })?; let val_str = line_str .rsplit("tweak!(") .nth(tweaks_seen - tweak.position - 1)?; let mut prec = 1; let (end, _) = val_str.char_indices().find(|(_, c)| { match c { ';' | ')' if prec == 1 => { return true; } ')' => prec -= 1, '(' => prec += 1, _ => {} } false })?; let parsed: Option<T> = Tweakable::parse(&val_str[..end]); tweak.file_modified = last_modified; tweak.last_checked = Instant::now(); tweak.value = parsed.map(|inner| Box::new(inner) as Box<dyn Any + Send>); } Some(()) } pub(crate) fn get_value<T: 'static + Tweakable + Clone + Send>( initial_value: Option<T>, file: &'static str, line: u32, column: u32, ) -> Option<T> { parse_tweaks(file); let mut lock = VALUES.lock().unwrap(); let mut tweak = lock.get_mut(&(file, line, column))?; if !tweak.initialized { tweak.value = initial_value.map(|inner| Box::new(inner) as Box<dyn Any + Send>); tweak.initialized = true; } if tweak.last_checked.elapsed().as_secs_f32() > 0.5 { update_tweak::<T>(&mut tweak, file)?; } tweak.value.as_ref()?.downcast_ref().cloned() } pub fn watch_modified(file: &'static str) -> bool { let mut lock = WATCHERS.lock().unwrap(); let entry = lock.entry(file); let now = Instant::now(); let last_modified = try_open(file) .ok() .and_then(|f| last_modified(&f)) .unwrap_or_else(SystemTime::now); let watcher = entry.or_insert_with(|| FileWatcher { last_checked: now, file_modified: last_modified, }); watcher.last_checked = now; last_modified .duration_since(watcher.file_modified) .map(|time| { watcher.file_modified = last_modified; time.as_secs_f32() > 0.5 }) .unwrap_or(true) } } #[cfg(any(debug_assertions, feature = "release_tweak"))] pub fn inline_tweak<T: 'static + Tweakable + Clone + Send>( initial_value: Option<T>, file: &'static str, line: u32, column: u32, ) -> Option<T> { itweak::get_value(initial_value, file, line, column) } #[cfg(feature = "release_tweak")] #[macro_export] macro_rules! release_tweak { ($default:expr) => { inline_tweak::inline_tweak(None, file!(), line!(), column!()).unwrap_or_else(|| $default) }; ($value:literal; $default:expr) => { inline_tweak::inline_tweak(Some($value), file!(), line!(), column!()) .unwrap_or_else(|| $default) }; } #[cfg(debug_assertions)] #[macro_export] macro_rules! tweak { ($default:expr) => { inline_tweak::inline_tweak(None, file!(), line!(), column!()).unwrap_or_else(|| $default) }; ($value:literal; $default:expr) => { inline_tweak::inline_tweak(Some($value), file!(), line!(), column!()) .unwrap_or_else(|| $default) }; } #[cfg(not(debug_assertions))] #[macro_export] macro_rules! tweak { ($default:expr) => { $default }; ($value:literal; $default:expr) => { $default }; } #[cfg(debug_assertions)] pub fn watch_file(file: &'static str) { while !itweak::watch_modified(file) { std::thread::sleep(std::time::Duration::from_millis(500)); } } #[cfg(not(debug_assertions))] pub fn watch_file(_file: &'static str) {} #[macro_export] macro_rules! watch { () => { inline_tweak::watch_file(file!()); }; }
pub trait Tweakable: Sized { fn parse(x: &str) -> Option<Self>; } #[cfg(any(debug_assertions, feature = "release_tweak"))] mod itweak { use super::Tweakable; use lazy_static::*; use std::any::Any; use std::collections::{HashMap, HashSet}; use std::fs::File; use std::io::{BufRead, BufReader}; use std::path::Path; use std::sync::Mutex; use std::time::{Instant, SystemTime}; macro_rules! impl_tweakable { ($($t: ty) +) => { $( impl Tweakable for $t { fn parse(x: &str) -> Option<$t> { x.parse().ok() } } )+ }; } impl_tweakable!(u8 u16 u32 u64 u128 i8 i16 i32 i64 i128 usize isize bool f32 f64); impl Tweakable for &'static str { fn parse(x: &str) -> Option<Self> { Some(Box::leak(Box::new(String::from( x.trim_start_matches('"').trim_end_matches('"'), )))) } } struct TweakValue { position: usize, value: Option<Box<dyn Any + Send>>, initialized: bool, last_checked: Instant, file_modified: SystemTime, } struct FileWatcher { last_checked: Instant, file_modified: SystemTime, } lazy_static! { static ref VALUES: Mutex<HashMap<(&'static str, u32, u32), TweakValue>> = Default::default(); static ref PARSED_FILES: Mutex<HashSet<&'static str>> = Default::default(); static ref WATCHERS: Mutex<HashMap<&'static str, FileWatcher>> = Default::default(); } fn try_open(file: &'static str) -> std::io::Result<File> { let p: &Path = file.as_ref(); if let Some(x) = File::open(p).ok() { return Ok(x); } let p: &Path = p .strip_prefix(p.iter().next().ok_or(std::io::Error::new( std::io::ErrorKind::Other, "path is empty", ))?) .map_err(|e| std::io::Error::new( std::io::ErrorKind::Other, e, ))?; File::open(&p) } fn last_modified(file: &File) -> Option<SystemTime> { file.metadata().ok()?.modified().ok() } fn parse_tweaks(fpath: &'static str) -> Option<()> { let mut fileinfos = PARSED_FILES.lock().unwrap(); if !fileinfos.contains(&fpath) { let mut values = VALUES.lock().unwrap(); let file = match try_open(fpath) { Ok(x) => x, Err(e) => { eprintln!("[inline-tweak] couldn't open file for tweaking: {}\n do you have the access rights? are you running this from the workspace root?", e); return None; } }; let file_modified = last_modified(&file).unwrap_or_else(SystemTime::now); let now = Instant::now(); let mut tweaks_seen = 0; for (line_n, line) in BufReader::new(file) .lines() .filter_map(|line| line.ok()) .enumerate() { for (column, _) in line.match_indices("tweak!(") { let path_corrected_column = line[..column] .rfind(|c: char| !(c.is_ascii_alphanumeric() || c == ':' || c == '_')) .map(|x| x + 1) .unwrap_or(0); values.insert( (fpath, line_n as u32 + 1, path_corrected_column as u32 + 1), TweakValue { position: tweaks_seen, value: None, initialized: false, last_checked: now, file_modified, }, );
{ let mut tweaks_seen = 0; let line_str = BufReader::new(&file) .lines() .filter_map(|line| line.ok()) .find(|line| { tweaks_seen += line.matches("tweak!(").count(); tweaks_seen > tweak.position })?; let val_str = line_str .rsplit("tweak!(") .nth(tweaks_seen - tweak.position - 1)?; let mut prec = 1; let (end, _) = val_str.char_indices().find(|(_, c)| { match c { ';' | ')' if prec == 1 => { return true; } ')' => prec -= 1, '(' => prec += 1, _ => {} } false })?; let parsed: Option<T> = Tweakable::parse(&val_str[..end]); tweak.file_modified = last_modified; tweak.last_checked = Instant::now(); tweak.value = parsed.map(|inner| Box::new(inner) as Box<dyn Any + Send>); } Some(()) } pub(crate) fn get_value<T: 'static + Tweakable + Clone + Send>( initial_value: Option<T>, file: &'static str, line: u32, column: u32, ) -> Option<T> { parse_tweaks(file); let mut lock = VALUES.lock().unwrap(); let mut tweak = lock.get_mut(&(file, line, column))?; if !tweak.initialized { tweak.value = initial_value.map(|inner| Box::new(inner) as Box<dyn Any + Send>); tweak.initialized = true; } if tweak.last_checked.elapsed().as_secs_f32() > 0.5 { update_tweak::<T>(&mut tweak, file)?; } tweak.value.as_ref()?.downcast_ref().cloned() } pub fn watch_modified(file: &'static str) -> bool { let mut lock = WATCHERS.lock().unwrap(); let entry = lock.entry(file); let now = Instant::now(); let last_modified = try_open(file) .ok() .and_then(|f| last_modified(&f)) .unwrap_or_else(SystemTime::now); let watcher = entry.or_insert_with(|| FileWatcher { last_checked: now, file_modified: last_modified, }); watcher.last_checked = now; last_modified .duration_since(watcher.file_modified) .map(|time| { watcher.file_modified = last_modified; time.as_secs_f32() > 0.5 }) .unwrap_or(true) } } #[cfg(any(debug_assertions, feature = "release_tweak"))] pub fn inline_tweak<T: 'static + Tweakable + Clone + Send>( initial_value: Option<T>, file: &'static str, line: u32, column: u32, ) -> Option<T> { itweak::get_value(initial_value, file, line, column) } #[cfg(feature = "release_tweak")] #[macro_export] macro_rules! release_tweak { ($default:expr) => { inline_tweak::inline_tweak(None, file!(), line!(), column!()).unwrap_or_else(|| $default) }; ($value:literal; $default:expr) => { inline_tweak::inline_tweak(Some($value), file!(), line!(), column!()) .unwrap_or_else(|| $default) }; } #[cfg(debug_assertions)] #[macro_export] macro_rules! tweak { ($default:expr) => { inline_tweak::inline_tweak(None, file!(), line!(), column!()).unwrap_or_else(|| $default) }; ($value:literal; $default:expr) => { inline_tweak::inline_tweak(Some($value), file!(), line!(), column!()) .unwrap_or_else(|| $default) }; } #[cfg(not(debug_assertions))] #[macro_export] macro_rules! tweak { ($default:expr) => { $default }; ($value:literal; $default:expr) => { $default }; } #[cfg(debug_assertions)] pub fn watch_file(file: &'static str) { while !itweak::watch_modified(file) { std::thread::sleep(std::time::Duration::from_millis(500)); } } #[cfg(not(debug_assertions))] pub fn watch_file(_file: &'static str) {} #[macro_export] macro_rules! watch { () => { inline_tweak::watch_file(file!()); }; }
tweaks_seen += 1; } } fileinfos.insert(fpath); } Some(()) } fn update_tweak<T: 'static + Tweakable + Clone + Send>( tweak: &mut TweakValue, fpath: &'static str, ) -> Option<()> { let file = try_open(fpath).ok()?; let last_modified = last_modified(&file)?; if tweak.value.is_none() || last_modified .duration_since(tweak.file_modified) .ok()? .as_secs_f32() > 0.5
random
[ { "content": "fn do_fn(item: TokenStream, release_tweak: bool) -> TokenStream {\n\n let mut v: syn::ItemFn = parse_macro_input!(item as syn::ItemFn);\n\n\n\n let fname = v.sig.ident.clone();\n\n\n\n LiteralReplacer {\n\n nth: 0,\n\n fname,\n\n release_tweak,\n\n }\n\n .visit_item_fn_mut(&mut v);\n\n\n\n v.into_token_stream().into()\n\n}\n", "file_path": "inline_tweak_derive/src/lib.rs", "rank": 3, "score": 71451.9127627853 }, { "content": "fn b() -> f32 {\n\n tweak!(2.5)\n\n}\n\n\n", "file_path": "examples/order.rs", "rank": 4, "score": 67963.0039401465 }, { "content": "fn a() -> f32 {\n\n tweak!(1.5)\n\n}\n\n\n", "file_path": "examples/order.rs", "rank": 5, "score": 67963.0039401465 }, { "content": "fn counter() -> i32 {\n\n static mut N: i32 = 0;\n\n unsafe {\n\n N += 1;\n\n N\n\n }\n\n}\n\n\n", "file_path": "examples/expression.rs", "rank": 6, "score": 65364.52573136047 }, { "content": "#[proc_macro_attribute]\n\npub fn tweak_fn(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n do_fn(item, false)\n\n}\n\n\n\n/// Makes all the number/bool/char literals in a function tweakable. \n\n/// Doesn't apply to literals in macros as they cannot be replaced by expressions reliably. (e.g in calls to println!)\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// # use std::time::Duration;\n\n/// #[inline_tweak::tweak_fn]\n\n/// fn main() {\n\n/// loop {\n\n/// let v = 1.0; // Try changing this value!\n\n/// println!(\"{}\", v);\n\n/// std::thread::sleep(Duration::from_millis(200)); // or even this value :)\n\n/// }\n\n/// }\n\n/// ```#[cfg(feature = \"release_tweak\")]\n", "file_path": "inline_tweak_derive/src/lib.rs", "rank": 7, "score": 59161.567527769206 }, { "content": "#[proc_macro_attribute]\n\npub fn release_tweak_fn(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n do_fn(item, true)\n\n}\n\n\n", "file_path": "inline_tweak_derive/src/lib.rs", "rank": 8, "score": 57792.54302670593 }, { "content": "struct LiteralReplacer {\n\n nth: usize,\n\n fname: Ident,\n\n release_tweak: bool,\n\n}\n\n\n\nimpl VisitMut for LiteralReplacer {\n\n fn visit_expr_mut(&mut self, i: &mut Expr) {\n\n match *i {\n\n Expr::Lit(ref l) => {\n\n match l.lit {\n\n Lit::Char(_) | Lit::Int(_) | Lit::Float(_) | Lit::Bool(_) | Lit::Str(_) => {}\n\n _ => return,\n\n }\n\n\n\n let lit = std::mem::replace(\n\n i,\n\n Expr::Break(ExprBreak {\n\n attrs: vec![],\n\n break_token: Default::default(),\n", "file_path": "inline_tweak_derive/src/lib.rs", "rank": 9, "score": 50344.25932179425 }, { "content": "#[inline_tweak::tweak_fn]\n\nfn main() {\n\n loop {\n\n let char = 'c';\n\n let bool = true;\n\n let v = 1.0 + 5.0;\n\n println!(\"{} {} {} {}\", v, char, bool, s);\n\n inline_tweak::watch!();\n\n }\n\n}\n", "file_path": "examples/derive.rs", "rank": 10, "score": 35520.95455770853 }, { "content": "fn main() {\n\n loop {\n\n println!(\"{}\", tweak!(5.5)); // Try changing the value while the application is running\n\n std::thread::sleep(Duration::from_millis(500))\n\n }\n\n}\n", "file_path": "examples/loop.rs", "rank": 11, "score": 35515.709712622454 }, { "content": "fn main() {\n\n loop {\n\n // Try removing or changing the value while the application is running\n\n println!(\"{}\", tweak!(200; counter()));\n\n std::thread::sleep(Duration::from_millis(1000));\n\n }\n\n}\n", "file_path": "examples/expression.rs", "rank": 12, "score": 35515.709712622454 }, { "content": "fn main() {\n\n loop {\n\n println!(\"{}\", inline_tweak::tweak!(11.5)); // Try changing the value while the application is running\n\n inline_tweak::watch!();\n\n }\n\n}\n", "file_path": "examples/fullname.rs", "rank": 13, "score": 35515.709712622454 }, { "content": "fn main() {\n\n loop {\n\n println!(\"{}\", tweak!(4.5)); // Try changing the value while the application is running\n\n watch!(); // The thread will sleep here until anything in the file changes\n\n }\n\n}\n", "file_path": "examples/watch.rs", "rank": 14, "score": 35515.709712622454 }, { "content": "fn main() {\n\n loop {\n\n println!(\"{}\", tweak!(\"Lorem ipsum\")); // Try changing the text while the application is running\n\n println!(\"{}\", tweak!(\"габарит не выбран\")); // Supports non ascii text\n\n watch!(); // The thread will sleep here until anything in the file changes\n\n }\n\n}\n", "file_path": "examples/string.rs", "rank": 15, "score": 35515.709712622454 }, { "content": "fn main() {\n\n loop {\n\n // Try changing the value while the application is running (even in release mode)\n\n println!(\"{}\", release_tweak!(1.5));\n\n\n\n std::thread::sleep(std::time::Duration::from_millis(200));\n\n }\n\n}\n", "file_path": "examples/release.rs", "rank": 16, "score": 35515.709712622454 }, { "content": "fn main() {\n\n loop {\n\n let b = b();\n\n let a = a();\n\n\n\n println!(\"a:{} b:{}\", a, b);\n\n watch!(); // The thread will sleep here until anything in the file changes\n\n }\n\n}\n", "file_path": "examples/order.rs", "rank": 17, "score": 35515.709712622454 }, { "content": "fn main() {\n\n loop {\n\n let v = tweak!(16.5);\n\n let v2 = inline_tweak::tweak!(16.5);\n\n println!(\"{} {}\", v, v2); // Try changing the value while the application is running\n\n inline_tweak::watch!();\n\n }\n\n}\n", "file_path": "examples/start.rs", "rank": 18, "score": 35515.709712622454 }, { "content": "fn main() {\n\n loop {\n\n println!(\"{} {}\", tweak!(2.5), tweak!(35));\n\n std::thread::sleep(Duration::from_millis(500))\n\n }\n\n}\n", "file_path": "examples/multiple.rs", "rank": 19, "score": 35515.709712622454 }, { "content": "#[inline_tweak::release_tweak_fn]\n\nfn main() {\n\n loop {\n\n let v = 3;\n\n println!(\"{}\", v);\n\n std::thread::sleep(std::time::Duration::from_millis(500));\n\n }\n\n}\n", "file_path": "examples/derive_release.rs", "rank": 20, "score": 34198.41876005342 }, { "content": "/// test\n\nfn main() {\n\n loop {\n\n const C: i32 = 3;\n\n static V: i32 = 3;\n\n let v: [f32; 1] = [1.0];\n\n let test: &str = \"hmm\";\n\n let ok: f32 = 5.0f32;\n\n let underscores: i32 = 1_000;\n\n let radix: i32 = 0xFF;\n\n\n\n let s = \"mui\n\n linea\n\n strings!\";\n\n println!(\"{} {} {} {}\", s, ok, underscores, radix);\n\n std::thread::sleep(std::time::Duration::from_millis(20));\n\n }\n\n}\n", "file_path": "examples/derive_edge_cases.rs", "rank": 21, "score": 33030.18599038629 }, { "content": "extern crate proc_macro;\n\nuse proc_macro::TokenStream;\n\nuse proc_macro2::{Ident, Span};\n\nuse quote::ToTokens;\n\nuse syn::punctuated::Punctuated;\n\nuse syn::visit_mut::VisitMut;\n\nuse syn::{\n\n parse_macro_input, Attribute, Expr, ExprBreak, ExprConst, ExprMacro, ItemConst, ItemStatic,\n\n Lit, LitInt, LitStr, Macro, MacroDelimiter, Path, PathSegment, Token, Type,\n\n};\n\n\n", "file_path": "inline_tweak_derive/src/lib.rs", "rank": 22, "score": 18507.6283042473 }, { "content": "\n\n self.nth += 1;\n\n }\n\n _ => syn::visit_mut::visit_expr_mut(self, i),\n\n }\n\n }\n\n\n\n fn visit_expr_const_mut(&mut self, _: &mut ExprConst) {}\n\n\n\n fn visit_item_const_mut(&mut self, _: &mut ItemConst) {}\n\n\n\n fn visit_item_static_mut(&mut self, _: &mut ItemStatic) {}\n\n\n\n fn visit_type_mut(&mut self, _: &mut Type) {}\n\n\n\n fn visit_attribute_mut(&mut self, _: &mut Attribute) {}\n\n}\n\n\n\n/// Makes all the number/bool/char literals in a function tweakable. \n\n/// Doesn't apply to literals in macros as they cannot be replaced by expressions reliably. (e.g in calls to println!)\n", "file_path": "inline_tweak_derive/src/lib.rs", "rank": 23, "score": 18507.35999067551 }, { "content": "///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// # use std::time::Duration;\n\n/// #[inline_tweak::tweak_fn]\n\n/// fn main() {\n\n/// loop {\n\n/// let v = 1.0; // Try changing this value!\n\n/// println!(\"{}\", v);\n\n/// std::thread::sleep(Duration::from_millis(200)); // or even this value :)\n\n/// }\n\n/// }\n\n/// ```\n", "file_path": "inline_tweak_derive/src/lib.rs", "rank": 24, "score": 18506.128089315178 }, { "content": " label: None,\n\n expr: None,\n\n }),\n\n );\n\n\n\n let Expr::Lit(lit) = lit else {\n\n unreachable!();\n\n };\n\n\n\n *i = Expr::Macro(ExprMacro {\n\n attrs: vec![],\n\n mac: Macro {\n\n path: Path {\n\n segments: [\n\n PathSegment::from(Ident::new(\"inline_tweak\", Span::call_site())),\n\n PathSegment::from(Ident::new(\n\n if self.release_tweak {\n\n \"derive_release_tweak\"\n\n } else {\n\n \"derive_tweak\"\n", "file_path": "inline_tweak_derive/src/lib.rs", "rank": 25, "score": 18505.120448508515 }, { "content": " },\n\n Span::call_site(),\n\n )),\n\n ]\n\n .into_iter()\n\n .collect(),\n\n leading_colon: Some(Default::default()),\n\n },\n\n bang_token: Default::default(),\n\n delimiter: MacroDelimiter::Paren(Default::default()),\n\n tokens: [\n\n lit.lit,\n\n Lit::Str(LitStr::new(&self.fname.to_string(), Span::call_site())),\n\n Lit::Int(LitInt::new(&self.nth.to_string(), Span::call_site())),\n\n ]\n\n .into_iter()\n\n .collect::<Punctuated<Lit, Token![,]>>()\n\n .into_token_stream(),\n\n },\n\n });\n", "file_path": "inline_tweak_derive/src/lib.rs", "rank": 26, "score": 18499.520654994158 }, { "content": "## inline_tweak_derive\n\n\n", "file_path": "inline_tweak_derive/README.md", "rank": 27, "score": 17557.353438455066 }, { "content": "# inline_tweak\n\n\n\n![](https://i.imgur.com/DZrg910.gif)\n\n\n\n[![Crates.io](https://img.shields.io/crates/v/inline_tweak.svg)](https://crates.io/crates/inline_tweak)\n\n\n\n**inline_tweak** is based on [this blogpost](http://blog.tuxedolabs.com/2018/03/13/hot-reloading-hardcoded-parameters.html)\n\nby tuxedo labs. \n\n\n\nTweak any literal directly from your code, changes to the source appear while running the program. \n\nIt works by parsing the file when a change occurs. \n\n\n\nThe library is minimal, only requiring the `lazy_static` dependency to hold modified values. \n\nIn release mode, the tweaking code is disabled and compiled away.\n\n\n\nThe `derive` feature exposes a proc macro to turn all literals from a function body into tweakable values.\n\n\n\n## Usage\n\n\n\n```rust\n\nuse inline_tweak::*;\n\n\n\nfn main() {\n\n loop {\n\n println!(\"{}\", tweak!(3.14)); // Try changing the value while the application is running\n\n }\n\n}\n\n```\n\n\n\n## Extra features\n\n\n\n### derive\n\n\n\nThe `derive` feature allows to tweak any number/bool/char literal in a function.\n\nIt avoids cluttering the code with `inline_tweak::tweak!` calls.\n\n\n\n```rust\n\n#[inline_tweak::tweak_fn]\n\nfn main() {\n\n loop {\n\n let v = 1.0; // Try changing this value!\n\n println!(\"{}\", v);\n\n std::thread::sleep(Duration::from_millis(200)); // or even this value :)\n\n }\n\n}\n\n```\n\n\n\nNote that it requires `syn`/`quote`/`proc_macro2` dependencies which makes the crate slower to compile. \n\nContrary to `tweak!`, it does not allow tweaking literals in macro calls (like `println!`), as it cannot reliably replace literals by a function call since macros can have custom syntax.\n\n\n\n#### watch!\n\n\n\n`inline_tweak` provides a `watch!()` macro that sleeps until the file is modified, akin to a breakpoint:\n\n```rust\n\nuse inline_tweak::*;\n\n\n\nfn main() {\n\n loop {\n\n println!(\"{}\", tweak!(\"hello world\"));\n\n watch!(); // The thread will sleep here until anything in the file changes\n\n }\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 28, "score": 12494.604309335331 }, { "content": "# Changelog\n\n\n\nAll notable changes to this project will be documented in this file.\n\n\n\n## [1.1.1]\n\n - Fix `tweak_fn` not compiling if function has doc comments\n\n\n\n## [1.1.0]\n\n\n\n - Introduce `derive` feature to allow `#[tweak_fn]` on functions.\n\n - All calls to tweak are now almost always at most one hashmap lookup.\n\n - Cache `stat` syscalls.\n\n - Only read file from disk once per file change instead of per tweak in that file.\n\n - Add dependency on `rustc-hash` for faster hashing (no need for DOS protection).\n\n - Support multiline strings in `tweak_fn` proc-macro\n\n - Support more syntax for integers (e.g `0xFF_FF_u64`) and floats (e.g `1_000.0f64`)\n\n\n\nThese optimizations heavily reduce the overhead of using tweak! \n\nThis allows the proc-macro adding tweaks to all literals of not being too heavy.\n\n\n\n## [1.0.11]\n\n\n\n - Fix release_tweak! feature was broken\n\n\n\n## [1.0.10]\n\n\n\n - Add explicit `wasm32` compile conditions to avoid runtime panics\n\n\n\n## [1.0.9]\n\n\n\n - Performance improvement when using lots of `tweak!`s on unchanged files\n\n\n\n## [1.0.8]\n\n\n\n - Support non ascii text literals\n\n - Allow implementing custom Tweakable types\n\n\n\n## [1.0.7]\n\n\n\n - Add release_tweak! macro and feature\n\n - Support text literals\n\n\n\n## [1.0.6]\n\n\n\n - Support expressions by providing a constant value if desired\n\n\n\n## [1.0.5]\n\n\n\n - Allow full path to be used, `inline_tweak::tweak!` for example\n\n\n\n## [1.0.4]\n\n\n\n - Fix multiple `tweak!` not working if not called in order\n\n\n\n## [1.0.3]\n\n\n\n - Allow `tweak!`s to move to a different line at runtime while still being correctly parsed.\n\n\n\n## [1.0.2]\n\n\n\n - Add the `watch!` macro\n\n \n\n## [1.0.1]\n\n\n\n - Allow multiple `tweak!` on the same line\n\n\n\n## [1.0.0]\n\n - Add the tweak! macro to change number/boolean literals from source at runtime.\n", "file_path": "CHANGELOG.md", "rank": 29, "score": 12494.49408922463 }, { "content": "#### Expressions\n\n\n\n`inline_tweak` allows to tweak expressions by providing a value later.\n\nFor example:\n\n```rust\n\ntweak!(rng.gen_range(0.0, 1.0))\n\n``` \n\n\n\ncan then be replaced by a constant value by modifying the file (even while the application is running) to\n\n```rust\n\ntweak!(5.0; rng.gen_range(0.0, 1.0)) // will always return 5.0\n\n```\n\n\n\n[See the \"expression\" example in action](https://i.imgur.com/pSvLNlI.mp4)\n\n\n\nNote that this works only for expressions that return a tweakable type. (number/boolean literals)\n\n\n\n#### release_tweak!\n\n\n\nThe `release_tweak!` macro acts exactly like `tweak!` except that it also works in release mode. \n\nIt is accessible behind the feature flag `\"release_tweak\"` which is not enabled by default. \n\n\n\n## Installation\n\n\n\nSimply add this line to your Cargo.toml\n\n\n\n```toml\n\ninline_tweak = \"1.0.10\"\n\n```\n", "file_path": "README.md", "rank": 30, "score": 12493.32919973734 }, { "content": "use inline_tweak::tweak;\n\nuse std::time::Duration;\n\n\n", "file_path": "examples/expression.rs", "rank": 65, "score": 5.501263182275977 }, { "content": "use inline_tweak::tweak;\n\n\n", "file_path": "examples/start.rs", "rank": 66, "score": 5.259330231674214 }, { "content": "#[inline_tweak::tweak_fn]\n", "file_path": "examples/derive.rs", "rank": 67, "score": 5.244845086070981 }, { "content": "use inline_tweak::{tweak, watch};\n\n\n", "file_path": "examples/order.rs", "rank": 68, "score": 5.187214663430215 }, { "content": "use inline_tweak::{tweak, watch};\n\n\n", "file_path": "examples/string.rs", "rank": 69, "score": 5.187214663430215 }, { "content": "use inline_tweak::release_tweak;\n\n\n", "file_path": "examples/release.rs", "rank": 70, "score": 5.187214663430215 }, { "content": "use inline_tweak::{tweak, watch};\n\n\n", "file_path": "examples/watch.rs", "rank": 71, "score": 5.187214663430215 }, { "content": "use inline_tweak::*;\n\nuse std::time::Duration;\n\n\n", "file_path": "examples/multiple.rs", "rank": 72, "score": 5.174241484923308 }, { "content": "use inline_tweak::*;\n\nuse std::time::Duration;\n\n\n", "file_path": "examples/loop.rs", "rank": 73, "score": 5.174241484923308 }, { "content": "#[inline_tweak::tweak_fn]\n\n/// test\n", "file_path": "examples/derive_edge_cases.rs", "rank": 74, "score": 5.172975651636276 }, { "content": "#[inline_tweak::release_tweak_fn]\n", "file_path": "examples/derive_release.rs", "rank": 75, "score": 5.172975651636276 } ]
Rust
contract/ft-transfer-receiver-mock/src/lib.rs
evgenykuzyakov/oysterpack-near-stake-token
86a01e80f57780fa755bbc09e55b91714c0751d4
use near_sdk::serde::export::TryFrom; use near_sdk::{ borsh::{self, BorshDeserialize, BorshSerialize}, env, json_types::{ValidAccountId, U128}, log, near_bindgen, serde::{Deserialize, Serialize}, serde_json::{self, json}, wee_alloc, AccountId, Promise, PromiseOrValue, }; use std::{ cmp::Ordering, fmt::{self, Display, Formatter}, }; #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[near_bindgen] #[derive(BorshDeserialize, BorshSerialize, Default)] pub struct TransferReceiverMock {} const TGAS: u64 = 1_000_000_000_000; const YOCTO: u128 = 1_000_000_000_000_000_000_000_000; #[near_bindgen] impl TransferReceiver for TransferReceiverMock { fn ft_on_transfer( &mut self, #[allow(unused_variables)] sender_id: ValidAccountId, amount: TokenAmount, msg: String, ) -> PromiseOrValue<TokenAmount> { log!("{:#?}", msg); let msg = Message::try_from(msg.as_str()).expect("invalid msg"); match msg { Message::Panic => panic!("BOOM!"), Message::Accept { transfer_relay, refund_percent, } => { if let Some(relay) = transfer_relay { let transfer_relay_amount = amount.value() * relay.percent as u128 / 100; self.invoke_ft_transfer( &env::predecessor_account_id(), &relay.account_id, transfer_relay_amount.into(), ) .then(self.invoke_resolve_ft_on_transfer(amount, refund_percent)) .into() } else { let refund_amount = amount.value() * refund_percent as u128 / 100; PromiseOrValue::Value(refund_amount.into()) } } } } } #[near_bindgen] impl TransferReceiverMock { #[private] pub fn resolve_ft_on_transfer(&self, amount: TokenAmount, refund_percent: u8) -> TokenAmount { let refund_amount = amount.value() * refund_percent as u128 / 100; refund_amount.into() } pub fn register_account(&self, contract_id: ValidAccountId) -> Promise { Promise::new(contract_id.as_ref().to_string()).function_call( b"register_account".to_vec(), vec![], YOCTO, 5 * TGAS, ) } pub fn unregister_account(&self, contract_id: ValidAccountId) -> Promise { Promise::new(contract_id.as_ref().to_string()).function_call( b"unregister_account".to_vec(), vec![], YOCTO, 10 * TGAS, ) } pub fn ft_transfer( &self, token_contract: ValidAccountId, receiver_id: ValidAccountId, amount: TokenAmount, ) -> Promise { self.invoke_ft_transfer(token_contract.as_ref(), receiver_id.as_ref(), amount) } fn invoke_ft_transfer( &self, token_contract: &str, receiver_id: &str, amount: TokenAmount, ) -> Promise { Promise::new(token_contract.to_string()).function_call( b"ft_transfer".to_vec(), json!({ "receiver_id": receiver_id, "amount":amount }) .to_string() .into_bytes(), 1, 10 * TGAS, ) } fn invoke_resolve_ft_on_transfer(&self, amount: TokenAmount, refund_percent: u8) -> Promise { Promise::new(env::current_account_id()).function_call( b"resolve_ft_on_transfer".to_vec(), json!({ "amount": amount, "refund_percent": refund_percent }) .to_string() .into_bytes(), 0, 5 * TGAS, ) } } pub trait TransferReceiver { fn ft_on_transfer( &mut self, sender_id: ValidAccountId, amount: TokenAmount, msg: String, ) -> PromiseOrValue<TokenAmount>; } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] #[serde(crate = "near_sdk::serde")] pub struct TokenAmount(pub U128); impl From<u128> for TokenAmount { fn from(value: u128) -> Self { Self(U128::from(value)) } } impl TokenAmount { pub fn value(&self) -> u128 { self.0 .0 } } impl Display for TokenAmount { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { self.0 .0.fmt(f) } } impl PartialOrd for TokenAmount { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.value().partial_cmp(&other.value()) } } impl Default for TokenAmount { fn default() -> Self { Self(U128(0)) } } #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] #[serde(crate = "near_sdk::serde")] pub enum Message { Accept { refund_percent: u8, transfer_relay: Option<TransferRelay>, }, Panic, } impl TryFrom<&str> for Message { type Error = serde_json::Error; fn try_from(json: &str) -> Result<Self, Self::Error> { serde_json::from_str(json) } } #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] #[serde(crate = "near_sdk::serde")] pub struct TransferRelay { account_id: AccountId, percent: u8, } #[cfg(test)] mod tests { use super::*; use near_sdk::serde_json::{self, json}; #[test] fn message() { let msg = Message::Accept { refund_percent: 0, transfer_relay: None, }; let json = serde_json::to_string_pretty(&msg).unwrap(); println!("{}", json); let json = json!({ "Accept": { "refund_percent": 0, "transfer_relay": {"account_id": "account.near", "percent": 50} } }); let json = serde_json::to_string(&json).unwrap(); println!("{}", json); let msg: Message = serde_json::from_str(&json).unwrap(); match msg { Message::Accept { refund_percent, transfer_relay, } => { println!( "refund_percent={}% transfer_relay={:?}", refund_percent, transfer_relay ) } Message::Panic => panic!("expected Accept message type"), } let msg = Message::Panic; let json = serde_json::to_string_pretty(&msg).unwrap(); println!("{}", json); let msg: Message = serde_json::from_str(&json).unwrap(); println!("{:?}", msg); } }
use near_sdk::serde::export::TryFrom; use near_sdk::{ borsh::{self, BorshDeserialize, BorshSerialize}, env, json_types::{ValidAccountId, U128}, log, near_bindgen, serde::{Deserialize, Serialize}, serde_json::{self, json}, wee_alloc, AccountId, Promise, PromiseOrValue, }; use std::{ cmp::Ordering, fmt::{self, Display, Formatter}, }; #[global_allocator] static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[near_bindgen] #[derive(BorshDeserialize, BorshSerialize, Default)] pub struct TransferReceiverMock {} const TGAS: u64 = 1_000_000_000_000; const YOCTO: u128 = 1_000_000_000_000_000_000_000_000; #[near_bindgen] impl TransferReceiver for TransferReceiverMock { fn ft_on_transfer( &mut self, #[allow(unused_variables)] sender_id: ValidAccountId, amount: TokenAmount, msg: String, ) -> PromiseOrValue<TokenAmount> { log!("{:#?}", msg); let msg = Message::try_from(msg.as_str()).expect("invalid msg"); match msg { Message::Panic => panic!("BOOM!"), Message::Accept { transfer_relay, refund_percent, } => { if let Some(relay) = transfer_relay { let transfer_relay_amount = amount.value() * relay.percent as u128 / 100; self.invoke_ft_transfer( &env::predecessor_account_id(), &relay.account_id, transfer_relay_amount.into(), ) .then(self.invoke_resolve_ft_on_transfer(amount, refund_percent)) .into() } else { let refund_amount = amount.value() * refund_percent as u128 / 100; PromiseOrValue::Value(refund_amount.into()) } } } } } #[near_bindgen] impl TransferReceiverMock { #[private] pub fn resolve_ft_on_transfer(&self, amount: TokenAmount, refund_percent: u8) -> TokenAmount { let refund_amount = amount.value() * refund_percent as u128 / 100; refund_amount.into() } pub fn register_account(&self, contract_id: ValidAccountId) -> Promise { Promise::new(contract_id.as_ref().to_string()).function_call( b"register_account".to_vec(), vec![], YOCTO, 5 * TGAS, ) } pub fn unregister_account(&self, contract_id: ValidAccountId) -> Promise { Promise::new(contract_id.as_ref().to_string()).function_call( b"unregister_account".to_vec(), vec![], YOCTO, 10 * TGAS, ) } pub fn ft_transfer( &self, token_contract: ValidAccountId, receiver_id: ValidAccountId, amount: TokenAmount, ) -> Promise { self.invoke_ft_transfer(token_contract.as_ref(), receiver_id.as_ref(), amount) } fn invoke_ft_transfer( &self, token_contract: &str, receiver_id: &str, amount: TokenAmount, ) -> Promise { Promise::new(token_contract.to_string()).function_call( b"ft_transfer".to_vec(), json!({ "receiver_id": receiver_id, "amount":amount }) .to_string() .into_bytes(), 1, 10 * TGAS, ) } fn invoke_resolve_ft_on_transfer(&self, amount: TokenAmount, refund_percent: u8) -> Promise { Promise::new(env::current_account_id()).function_call( b"resolve_ft_on_transfer".to_vec(), json!({ "amount": amount, "refund_percent": refund_percent }) .to_string() .into_bytes(), 0, 5 * TGAS, ) } } pub trait TransferReceiver { fn ft_on_transfer( &mut self, sender_id: ValidAccountId, amount: TokenAmount, msg: String, ) -> PromiseOrValue<TokenAmount>; } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] #[serde(crate = "near_sdk::serde")] pub struct TokenAmount(pub U128); impl From<u128> for TokenAmount { fn from(value: u128) -> Self { Self(U128::from(value)) } } impl TokenAmount { pub fn value(&self) -> u128 { self.0 .0 } } impl Display for TokenAmount { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { self.0 .0.fmt(f) } } impl PartialOrd for TokenAmount { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.value().partial_cmp(&other.value()) } } impl Default for TokenAmount { fn default() -> Self { Self(U128(0)) } } #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] #[serde(crate = "near_sdk::serde")] pub enum Message { Accept { refund_percent: u8, transfer_relay: Option<TransferRelay>, }, Panic, } impl TryFrom<&str> for Message { type Error = serde_json::Error; fn try_from(json: &str) -> Result<Self, Self::Error> { serde_json::from_str(json) } } #[derive(Serialize, Deserialize, Debug, PartialEq, Clone)] #[serde(crate = "near_sdk::serde")] pub struct TransferRelay { account_id: AccountId, percent: u8, } #[cfg(test)] mod tests { use super::*; use near_sdk::serde_json::{self, json}; #[test] fn message() { let msg = Message::Accept { refund_percent: 0, transfer_relay: None, }; let json = serde_json::to_string_pretty(&msg).unwrap(); println!("{}", json); let json = json!({ "Accept": {
refund_percent, transfer_relay, } => { println!( "refund_percent={}% transfer_relay={:?}", refund_percent, transfer_relay ) } Message::Panic => panic!("expected Accept message type"), } let msg = Message::Panic; let json = serde_json::to_string_pretty(&msg).unwrap(); println!("{}", json); let msg: Message = serde_json::from_str(&json).unwrap(); println!("{:?}", msg); } }
"refund_percent": 0, "transfer_relay": {"account_id": "account.near", "percent": 50} } }); let json = serde_json::to_string(&json).unwrap(); println!("{}", json); let msg: Message = serde_json::from_str(&json).unwrap(); match msg { Message::Accept {
random
[ { "content": "pub fn deserialize_receipts() -> Vec<Receipt> {\n\n get_created_receipts()\n\n .iter()\n\n .map(|receipt| {\n\n let json = serde_json::to_string_pretty(receipt).unwrap();\n\n println!(\"{}\", json);\n\n let receipt: Receipt = serde_json::from_str(&json).unwrap();\n\n receipt\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "contract/src/test_utils.rs", "rank": 0, "score": 261854.0783660374 }, { "content": "pub fn to_valid_account_id(account_id: &str) -> ValidAccountId {\n\n account_id.try_into().unwrap()\n\n}\n\n\n", "file_path": "contract/tests/test_utils.rs", "rank": 1, "score": 246664.33885658835 }, { "content": "pub fn assert_private_func_call(result: ExecutionResult, func_name: &str) {\n\n if let ExecutionStatus::Failure(TxExecutionError::ActionError(err)) = result.status() {\n\n assert!(err\n\n .to_string()\n\n .contains(&format!(\"Method {} is private\", func_name)));\n\n } else {\n\n panic!(\"expected failure\");\n\n }\n\n}\n", "file_path": "contract/tests/test_utils.rs", "rank": 2, "score": 243949.00416149135 }, { "content": "pub fn set_env_with_failed_promise_result(contract: &mut Contract) {\n\n pub fn promise_result(_result_index: u64) -> PromiseResult {\n\n PromiseResult::Failed\n\n }\n\n\n\n pub fn promise_results_count() -> u64 {\n\n 1\n\n }\n\n\n\n contract.set_env(Env {\n\n promise_results_count_: promise_results_count,\n\n promise_result_: promise_result,\n\n });\n\n}\n", "file_path": "contract/src/test_utils.rs", "rank": 3, "score": 243558.45356963767 }, { "content": "pub fn set_env_with_success_promise_result(contract: &mut Contract) {\n\n pub fn promise_result(_result_index: u64) -> PromiseResult {\n\n PromiseResult::Successful(vec![])\n\n }\n\n\n\n pub fn promise_results_count() -> u64 {\n\n 1\n\n }\n\n\n\n contract.set_env(Env {\n\n promise_results_count_: promise_results_count,\n\n promise_result_: promise_result,\n\n });\n\n}\n\n\n", "file_path": "contract/src/test_utils.rs", "rank": 4, "score": 243558.45356963767 }, { "content": "pub fn to_valid_account_id(account_id: &str) -> ValidAccountId {\n\n account_id.try_into().unwrap()\n\n}\n\n\n\nconst TEST_ACCOUNT_ID: &str = \"oysterpack.near\";\n\nconst TEST_STAKING_POOL_ID: &str = \"staking-pool.near\";\n\npub const TEST_OWNER_ID: &str = \"owner.stake.oysterpack.near\";\n\npub const TEST_OPERATOR_ID: &str = \"operator.stake.oysterpack.near\";\n\n\n\nimpl<'a> TestContext<'a> {\n\n pub fn with_vm_context(context: VMContext) -> Self {\n\n let mut context = context.clone();\n\n context.is_view = false;\n\n testing_env!(context.clone());\n\n\n\n let contract = Contract::new(\n\n to_valid_account_id(TEST_STAKING_POOL_ID),\n\n to_valid_account_id(TEST_OWNER_ID),\n\n to_valid_account_id(TEST_OPERATOR_ID),\n\n );\n", "file_path": "contract/src/test_utils.rs", "rank": 5, "score": 236237.5401865482 }, { "content": "/// wrapper around `near_sdk::env::log()` which supports structured logging\n\npub fn log<T: Debug>(event: T) {\n\n env::log(format!(\"{:#?}\", event).as_bytes());\n\n}\n", "file_path": "contract/src/near.rs", "rank": 6, "score": 228618.58058003627 }, { "content": "type Balance = near_sdk::json_types::U128;\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug)]\n\n#[serde(crate = \"near_sdk::serde\")]\n\npub struct StakingPoolAccount {\n\n pub account_id: AccountId,\n\n /// The unstaked balance that can be withdrawn or staked.\n\n pub unstaked_balance: Balance,\n\n /// The amount balance staked at the current \"stake\" share price.\n\n pub staked_balance: Balance,\n\n /// Whether the unstaked balance is available for withdrawal now.\n\n pub can_withdraw: bool,\n\n}\n\n\n\nimpl StakingPoolAccount {\n\n pub fn total_balance(&self) -> u128 {\n\n self.staked_balance.0 + self.unstaked_balance.0\n\n }\n\n}\n", "file_path": "contract/tests/staking_pool_client.rs", "rank": 7, "score": 228221.0472723176 }, { "content": "pub fn set_env_with_promise_result(\n\n contract: &mut Contract,\n\n promise_result: fn(u64) -> PromiseResult,\n\n) {\n\n pub fn promise_results_count() -> u64 {\n\n 1\n\n }\n\n\n\n contract.set_env(Env {\n\n promise_results_count_: promise_results_count,\n\n promise_result_: promise_result,\n\n });\n\n}\n\n\n", "file_path": "contract/src/test_utils.rs", "rank": 8, "score": 208337.5424981934 }, { "content": "pub fn new_context(predecessor_account_id: &str) -> VMContext {\n\n VMContextBuilder::new()\n\n .current_account_id(\"stake.oysterpack.near\".to_string())\n\n .signer_account_id(predecessor_account_id.to_string())\n\n .predecessor_account_id(predecessor_account_id.to_string())\n\n .account_balance(10000 * YOCTO)\n\n .build()\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\n#[serde(crate = \"near_sdk::serde\")]\n\npub struct Receipt {\n\n pub receiver_id: String,\n\n pub receipt_indices: Vec<usize>,\n\n pub actions: Vec<Action>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\n#[serde(crate = \"near_sdk::serde\")]\n\npub enum Action {\n", "file_path": "contract/src/test_utils.rs", "rank": 9, "score": 204146.96998297944 }, { "content": "pub fn owner_id(contract_account_id: &str, user: &UserAccount) -> AccountId {\n\n let result = user.view(PendingContractTx::new(\n\n contract_account_id,\n\n \"owner_id\",\n\n json!({}),\n\n true,\n\n ));\n\n\n\n result.unwrap_json()\n\n}\n", "file_path": "contract/tests/contract_owner_client.rs", "rank": 10, "score": 202554.66410305575 }, { "content": "type Balance = near_sdk::json_types::U128;\n\n\n\n#[derive(Serialize, Deserialize, Clone)]\n\n#[serde(crate = \"near_sdk::serde\")]\n\npub struct StakingPoolAccount {\n\n pub account_id: AccountId,\n\n /// The unstaked balance that can be withdrawn or staked.\n\n pub unstaked_balance: Balance,\n\n /// The amount balance staked at the current \"stake\" share price.\n\n pub staked_balance: Balance,\n\n /// Whether the unstaked balance is available for withdrawal now.\n\n pub can_withdraw: bool,\n\n}\n\n\n", "file_path": "contract/src/contract/staking_service.rs", "rank": 11, "score": 188420.68363936697 }, { "content": "pub fn create_context() -> TestContext {\n\n let (runtime, signer, ..) = init_runtime(None);\n\n let runtime = Rc::new(RefCell::new(runtime));\n\n let master_account = UserAccount::new(&runtime, signer); // init_simulator(None);\n\n let contract_owner = master_account.create_user(\"oysterpack\".to_string(), 1000 * YOCTO);\n\n let contract_operator = contract_owner.create_user(\"operator\".to_string(), 10 * YOCTO);\n\n\n\n const STAKING_POOL_ID: &str = \"astro-stakers-poolv1\";\n\n let staking_pool_id = to_valid_account_id(STAKING_POOL_ID);\n\n let contract_owner_id = to_valid_account_id(&contract_owner.account_id());\n\n let contract_operator_id = to_valid_account_id(&contract_operator.account_id());\n\n\n\n let contract = deploy!(\n\n // Contract Proxy\n\n contract: StakeTokenContractContract,\n\n // Contract account id\n\n contract_id: \"astro-stakers-poolv1-stake-oysterpack\",\n\n // Bytes of contract\n\n bytes: &WASM_BYTES,\n\n // User deploying the contract,\n", "file_path": "contract/tests/test_utils.rs", "rank": 12, "score": 176349.38869780098 }, { "content": "fn assert_gas_range(gas: Gas, min: u8, max: u8, field: &str) {\n\n assert!(\n\n gas >= TGAS * min as u64 && gas <= TGAS * max as u64,\n\n \"{} must be within {} - {} TGas\",\n\n field,\n\n min,\n\n max\n\n );\n\n}\n\n\n\n#[derive(Debug, BorshSerialize, BorshDeserialize, Clone, Copy)]\n\npub struct GasConfig {\n\n staking_pool: StakingPoolGasConfig,\n\n callbacks: CallBacksGasConfig,\n\n\n\n function_call_promise: Gas,\n\n function_call_promise_data_dependency: Gas,\n\n}\n\n\n\nimpl GasConfig {\n", "file_path": "contract/src/config.rs", "rank": 13, "score": 170762.32633933533 }, { "content": "fn assert_token_amount_not_zero(amount: &TokenAmount) {\n\n assert!(amount.value() > 0, \"amount must not be zero\")\n\n}\n\n\n", "file_path": "contract/src/contract/fungible_token.rs", "rank": 14, "score": 154584.58451916656 }, { "content": "/// provides functions to support DevOps\n\npub trait Operator {\n\n fn operator_id(&self) -> AccountId;\n\n\n\n /// returns the contract's state\n\n /// - useful for monitoring and debugging\n\n fn contract_state(&self) -> ContractState;\n\n\n\n fn config(&self) -> Config;\n\n\n\n /// resets the config to default settings\n\n ///\n\n /// ## Panics\n\n /// if not invoked by the operator account\n\n fn reset_config_default(&mut self) -> Config;\n\n\n\n /// merges in config changes\n\n /// - performs basic validation to prevent mis-configurations\n\n ///\n\n /// NOTE: you can [force a config change](Operator::force_update_config) if the validation logic\n\n /// is flawed or becomes invalidated because of NEAR platform changes in the future.\n", "file_path": "contract/src/interface/operator.rs", "rank": 15, "score": 131647.7221261966 }, { "content": "#[ext_contract(ext_callbacks)]\n\npub trait Callbacks {\n\n fn on_refresh_stake_token_value(\n\n &mut self,\n\n #[callback] staking_pool_account: StakingPoolAccount,\n\n );\n\n}\n\n\n\n#[near_bindgen]\n\nimpl Contract {\n\n #[private]\n\n pub fn on_refresh_stake_token_value(\n\n &mut self,\n\n #[callback] staking_pool_account: StakingPoolAccount,\n\n ) -> interface::StakeTokenValue {\n\n let staked_balance = self.staked_near_balance(\n\n staking_pool_account.staked_balance.into(),\n\n staking_pool_account.unstaked_balance.into(),\n\n );\n\n self.update_stake_token_value(staked_balance);\n\n self.clear_stake_lock();\n", "file_path": "contract/src/contract/staking_service.rs", "rank": 16, "score": 130015.35592473962 }, { "content": "pub trait MetaData {\n\n /// returns None if the contract does not support the requested metadata\n\n fn metadata(uri: String) -> Option<Value>;\n\n\n\n /// returns the metadata that this contract exposes\n\n fn metadata_uris() -> Vec<String>;\n\n}\n", "file_path": "contract/src/interface/metadata.rs", "rank": 17, "score": 130015.35592473962 }, { "content": "pub trait ContractFinancials {\n\n /// returns consolidated view of contract balances\n\n fn balances(&self) -> ContractBalances;\n\n\n\n /// NEAR funds that are deposited are added to the contract's STAKE fund, which will be staked\n\n /// to boost STAKE token value by increasing the staked NEAR balance.\n\n ///\n\n /// Returns the updated STAKE fund balance.\n\n ///\n\n /// NOTE: The STAKE funds will be staked the next time the [StakeBatch](crate::domain::StakeBatch) is run.\n\n ///\n\n /// #\\[payable\\]\n\n fn deposit_earnings(&mut self) -> YoctoNear;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct EarningsDistribution {\n\n pub contract_owner_earnings: u128,\n\n pub user_accounts_earnings: u128,\n\n}\n", "file_path": "contract/src/interface/financials.rs", "rank": 18, "score": 130015.35592473962 }, { "content": "/// Defines the standard interface for the core Fungible Token contract\n\n/// - [NEP-141](https://github.com/near/NEPs/issues/141)\n\n/// - [NEP-141 Standard discussion](https://github.com/near/NEPs/discussions/146)\n\n///\n\n/// The core standard supports the following features:\n\n/// - [simple token transfers](FungibleToken::ft_transfer)\n\n/// - [token transfers between contracts](FungibleToken::ft_transfer_call)\n\n/// - accounting for [total token supply](FungibleToken::ft_total_supply) and\n\n/// [account balances](FungibleToken::ft_balance_of)\n\n///\n\n/// ## Notes\n\n/// - it doesn't include token metadata standard that will be covered by a separate NEP, because the\n\n/// metadata may evolve.\n\n/// - it also doesn't include account registration standard that also must be covered by a separate\n\n/// NEP because it can be reused for other contract.\n\n///\n\n/// ### Security\n\n/// Requirement for accept attached deposits (#\\[payable\\])\n\n/// Due to the nature of function-call permission access keys on NEAR protocol, the method that\n\n/// requires an attached deposit can't be called by the restricted access key. If the token contract\n\n/// requires an attached deposit of at least 1 yoctoNEAR on transfer methods, then the function-call\n\n/// restricted access key will not be able to call them without going through the wallet confirmation.\n\n/// This prevents some attacks like fishing through an authorization to a token contract.\n\n///\n\n/// This 1 yoctoNEAR is enforced by this standard.\n\n///\n\n/// ### Transfer Call Refunds\n\n/// If the receiver contract is malicious or incorrectly implemented, then the receiver's promise\n\n/// result may be invalid and the required balance may not be available on the receiver's account.\n\n/// In this case the refund can't be provided provided to the sender. This is prevented by #122\n\n/// standard that locks funds into a temporary vault and prevents receiver from overspending the\n\n/// funds and later retuning invalid value. But if this flaw exist in this standard, it's not an\n\n/// issue for the sender account. It only affects the transfer amount and the receiver's account\n\n/// balance. The receiver can't overspend tokens from the sender outside of sent amount, so this\n\n/// standard must be considered as safe as #122\n\n///\n\npub trait FungibleToken {\n\n /// Enables simple transfer between accounts.\n\n ///\n\n /// - Transfers positive `amount` of tokens from the `env::predecessor_account_id` to `receiver_id`.\n\n /// - Both accounts must be registered with the contract for transfer to succeed.\n\n /// - Sender account is required to attach exactly 1 yoctoNEAR to the function call - see security\n\n /// section of the standard.\n\n /// - the yoctoNEAR will be credited to the sender account's NEAR balance\n\n ///\n\n /// Arguments:\n\n /// - `receiver_id` - the account ID of the receiver.\n\n /// - `amount` - the amount of tokens to transfer - unsigned integer in string representation.\n\n /// - `memo` - an optional string field in a free form to associate a memo with this transfer.\n\n ///\n\n /// ## Panics\n\n /// - if the attached deposit does not equal 1 yoctoNEAR\n\n /// - if either sender or receiver accounts are not registered\n\n /// - if amount is zero\n\n /// - if the sender account has insufficient funds to fulfill the request\n\n ///\n", "file_path": "contract/src/interface/fungible_token.rs", "rank": 19, "score": 128450.56599237234 }, { "content": "/// Integrates with the staking pool contract and manages STAKE token assets. The main use\n\n/// cases supported by this interface are:\n\n/// 1. Users can [deposit](StakingService::deposit) NEAR funds to stake.\n\n/// 2. Users can withdraw NEAR funds from [StakeBatch](crate::interface::StakeBatch) that has not yet run.\n\n/// 3. Once the NEAR funds are staked, the account is issued STAKE tokens based on the STAKE token\n\n/// value computed when the [StakeBatch](crate::interface::StakeBatch) is run.\n\n/// 4. Users can [redeem](StakingService::redeem) STAKE tokens for NEAR.\n\n/// 5. Users can cancel their requests to redeem STAKE, i.e., the [RedeemStakeBatch](crate::interface::RedeemStakeBatch)\n\n/// is cancelled.\n\n/// 6. [StakeAccount](crate::interface::StakeAccount) info can be looked up\n\n/// 7. [RedeemStakeBatchReceipt](crate::interface::RedeemStakeBatchReceipt) information for pending staking pool\n\n/// withdrawals for unstaked NEAR can be looked up\n\n/// 8. Batch receipts can be looked for any active receipts which contain unclaimed funds.\n\n///\n\n/// ## How Staking NEAR Works\n\n/// Users [deposit](StakingService::deposit) NEAR funds into a [StakeBatch](crate::interface::StakeBatch).\n\n/// The staking batch workflow is run via [stake()](StakingService::stake), which deposits and stakes\n\n/// the batched NEAR funds with the staking pool. Users have the option to combine the deposit and\n\n/// stake operations via [deposit_and_stake()](StakingService::deposit_and_stake).\n\n///\n\n/// When the batch is run, the STAKE token value at that point in time is computed and recorded into a\n\n/// [StakeBatchReceipt](crate::interface::StakeBatchReceipt). The STAKE tokens are issued on demand when\n\n/// the user accounts access the contract for actions that involve STAKE tokens - when staking NEAR,\n\n/// redeeming STAKE, or withdrawing unstaked NEAR.\n\n///\n\n/// ## How Redeeming STAKE Works\n\n/// Users [redeem](StakingService::redeem) STAKE tokens, which are collected into a\n\n/// [RedeemStakeBatch](crate::interface::RedeemStakeBatch). The redeemed STAKE tokens will need to be\n\n/// unstaked from the staking pool via [unstake()](StakingService::unstake), which processes the\n\n/// [RedeemStakeBatch](crate::interface::RedeemStakeBatch). Users have the option to combine the\n\n/// redeem and unstake operations via [redeem_and_stake()](StakingService::redeem_and_unstake). For\n\n/// convenience users can also simply redeem all STAKE via [redeem_all()](StakingService::redeem) and\n\n/// [redeem_all_and_stake()](StakingService::redeem_all_and_unstake).\n\n///\n\n/// Redeeming STAKE tokens requires NEAR to be unstaked and withdrawn from the staking pool.\n\n/// When NEAR is unstaked, the unstaked NEAR funds are not available for withdrawal until 4 epochs\n\n/// later (~2 days). While waiting for the unstaked NEAR funds to be released and withdrawn,\n\n/// [unstake()](StakingService::unstake) requests will fail.\n\n/// When a [RedeemStakeBatch](crate::interface::RedeemStakeBatch) is run, the STAKE\n\n/// token value is computed at that point in time, which is used to compute the corresponding amount\n\n/// of NEAR tokens to unstake from the staking pool. This information is recorded in a\n\n/// [RedeemStakeBatchReceipt](crate::interface::RedeemStakeBatchReceipt), which is later used by user\n\n/// accounts to claim NEAR tokens from the processed batch.\n\n///\n\n/// ## Notes\n\n/// - Batches are processed serially\n\n/// - Users can continue to submit requests to [deposit](StakingService::deposit) and [redeem](StakingService::redeem)\n\n/// funds and they will be queued into the next batch\n\n/// - batch receipts will be deleted from storage once all funds on the receipt are claimed\n\npub trait StakingService {\n\n /// returns the staking pool account ID used for the STAKE token\n\n /// - this is the staking pool that this contract is linked to\n\n fn staking_pool_id(&self) -> AccountId;\n\n\n\n /// looks up the receipt for the specified batch ID\n\n /// - when a batch is successfully processed a receipt is created, meaning the NEAR funds have\n\n /// been successfully deposited and staked with the staking pool\n\n /// - the receipt is used by customer accounts to claim STAKE tokens for their staked NEAR based\n\n /// on the STAKE token value at the point in time when the batch was run.\n\n /// - once all funds have been claimed from the receipt, then the receipt will be automatically\n\n /// deleted from storage, i.e., if no receipt exists for the batch ID, then it means all funds\n\n /// have been claimed (for valid batch IDs)\n\n fn stake_batch_receipt(&self, batch_id: BatchId) -> Option<StakeBatchReceipt>;\n\n\n\n /// looks up the receipt for the specified batch ID\n\n /// - when a batch is successfully processed a receipt is created, meaning the unstaked NEAR\n\n /// has been withdrawn from the staking pool contract\n\n /// - the receipt is used by customer accounts to claim the unstaked NEAR tokens for their\n\n /// redeemed STAKE tokens based on the STAKE token value at the point in time when the batch\n", "file_path": "contract/src/interface/staking_service.rs", "rank": 20, "score": 128448.17379113127 }, { "content": "/// Used to manage user accounts. The main use cases supported by this interface are:\n\n/// 1. Users can register with the contract. Users are required to pay for account storage usage at\n\n/// time of registration. Accounts are required to register in order to use the contract.\n\n/// 2. Users can unregister with the contract. When a user unregisters, the account storage usage fee\n\n/// will be refunded.\n\n/// 3. The total number of registered users is tracked.\n\n/// 4. Users can withdraw unstaked NEAR from STAKE that has been redeemed.\n\n/// 5. User account info can be looked up.\n\npub trait AccountManagement {\n\n /// Creates and registers a new account for the predecessor account ID.\n\n /// - the account is required to pay for its storage. Storage fees will be escrowed and then refunded\n\n /// when the account is unregistered - use [account_storage_escrow_fee](crate::interface::AccountManagement::account_storage_fee)\n\n /// to lookup the required storage fee amount. Overpayment of storage fee is refunded.\n\n ///\n\n /// Gas Requirements: 4.5 TGas\n\n ///\n\n /// ## Panics\n\n /// - if deposit is not enough to cover storage usage fees\n\n /// - if account is already registered\n\n fn register_account(&mut self);\n\n\n\n /// In order to unregister the account all NEAR must be unstaked and withdrawn from the account.\n\n /// The escrowed storage fee will be refunded to the account.\n\n ///\n\n /// Gas Requirements: 8 TGas\n\n ///\n\n /// ## Panics\n\n /// - if account is not registered\n", "file_path": "contract/src/interface/account_management.rs", "rank": 21, "score": 128447.3483785716 }, { "content": "pub trait ContractOwner {\n\n fn owner_id(&self) -> AccountId;\n\n\n\n /// The new owner must have a registered account to protect against accounts that do not exist.\n\n /// When the ownership is transferred, the new owner becomes the operator.\n\n ///\n\n /// ## Panics\n\n /// - if the predecessor account is not the owner account\n\n /// - new owner account must be registered\n\n fn transfer_ownership(&mut self, new_owner: ValidAccountId);\n\n\n\n /// Assigns the operator role to the specified account.\n\n /// The new operator must have a registered account to protect against accounts that do not exist.\n\n ///\n\n /// ## Panics\n\n /// - if the predecessor account is not the owner account\n\n /// - new operator account must be registered\n\n fn set_operator_id(&mut self, account_id: ValidAccountId);\n\n\n\n /// Deposits the owner's balance into the owners STAKE account\n", "file_path": "contract/src/interface/contract_owner.rs", "rank": 22, "score": 128442.16533308697 }, { "content": "/// Receiver of the Fungible Token for [`FungibleToken::ft_transfer_call`] calls.\n\npub trait TransferReceiver {\n\n /// Callback to receive tokens.\n\n ///\n\n /// Called by fungible token contract `env::predecessor_account_id` after `transfer_call` was initiated by\n\n /// `sender_id` of the given `amount` with the transfer message given in `msg` field.\n\n /// The `amount` of tokens were already transferred to this contract account and ready to be used.\n\n ///\n\n /// The method must return the amount of tokens that are not used/accepted by this contract from\n\n /// the transferred amount, e.g.:\n\n /// - The transferred amount was `500`, the contract completely takes it and must return `0`.\n\n /// - The transferred amount was `500`, but this transfer call only needs `450` for the action passed in the `msg`\n\n /// field, then the method must return `50`.\n\n /// - The transferred amount was `500`, but the action in `msg` field has expired and the transfer must be\n\n /// cancelled. The method must return `500` or panic.\n\n ///\n\n /// Arguments:\n\n /// - `sender_id` - the account ID that initiated the transfer.\n\n /// - `amount` - the amount of tokens that were transferred to this account.\n\n /// - `msg` - a string message that was passed with this transfer call.\n\n ///\n\n /// Returns the amount of tokens that are used/accepted by this contract from the transferred amount.\n\n fn ft_on_transfer(\n\n &mut self,\n\n sender_id: ValidAccountId,\n\n amount: TokenAmount,\n\n msg: TransferCallMessage,\n\n ) -> PromiseOrValue<TokenAmount>;\n\n}\n\n\n", "file_path": "contract/src/interface/fungible_token.rs", "rank": 23, "score": 128442.16533308697 }, { "content": "#[ext_contract(ext_transfer_receiver)]\n\npub trait ExtTransferReceiver {\n\n fn ft_on_transfer(\n\n &mut self,\n\n sender_id: AccountId,\n\n amount: TokenAmount,\n\n msg: TransferCallMessage,\n\n ) -> PromiseOrValue<TokenAmount>;\n\n}\n\n\n", "file_path": "contract/src/contract/fungible_token.rs", "rank": 24, "score": 126924.9898370647 }, { "content": "/// Callback on fungible token contract to resolve transfer.\n\npub trait ResolveTransferCall {\n\n /// Callback to resolve transfer.\n\n /// Private method (`env::predecessor_account_id == env::current_account_id`).\n\n ///\n\n /// Called after the receiver handles the transfer call and returns unused token amount.\n\n ///\n\n /// This method must get `unused_amount` from the receiver's promise result and refund the\n\n /// `unused_amount` from the receiver's account back to the `sender_id` account.\n\n ///\n\n /// Arguments:\n\n /// - `sender_id` - the account ID that initiated the transfer.\n\n /// - `receiver_id` - the account ID of the receiver contract.\n\n /// - `amount` - the amount of tokens that were transferred to receiver's account.\n\n ///\n\n /// Promise result data dependency (`unused_amount`):\n\n /// - the amount of tokens that were unused by receiver's contract.\n\n /// - Received from `on_ft_receive`\n\n /// - `unused_amount` must be `U128` in range from `0` to `amount`. All other invalid values\n\n /// are considered to be equal to be the total transfer amount.\n\n ///\n", "file_path": "contract/src/interface/fungible_token.rs", "rank": 25, "score": 126924.9898370647 }, { "content": "#[ext_contract(ext_redeeming_workflow_callbacks)]\n\npub trait ExtRedeemingWorkflowCallbacks {\n\n fn on_run_redeem_stake_batch(\n\n &mut self,\n\n #[callback] staked_balance: near_sdk::json_types::U128,\n\n ) -> Promise;\n\n\n\n /// ## Success Workflow\n\n /// 1. store the redeem stake batch receipt\n\n /// 2. set the redeem stake batch lock state to pending withdrawal\n\n fn on_unstake(&mut self);\n\n\n\n fn clear_redeem_lock(&mut self);\n\n\n\n /// batch ID is returned when all unstaked NEAR has been withdrawn\n\n fn on_redeeming_stake_pending_withdrawal(\n\n &mut self,\n\n #[callback] staking_pool_account: StakingPoolAccount,\n\n ) -> near_sdk::PromiseOrValue<BatchId>;\n\n\n\n fn on_redeeming_stake_post_withdrawal(&mut self) -> BatchId;\n\n}\n\n\n", "file_path": "contract/src/contract/staking_service.rs", "rank": 26, "score": 125460.89005253804 }, { "content": "#[ext_contract(ext_staking_workflow_callbacks)]\n\npub trait ExtStakingWorkflowCallbacks {\n\n /// callback for getting staked balance from staking pool as part of stake batch processing workflow\n\n ///\n\n /// ## Success Workflow\n\n /// 1. Check if liquidity is needed\n\n /// 2. deposit and stake funds with staking pool\n\n /// 3. then get account from staking pool\n\n /// 4. then invoke [on_deposit_and_stake] callback\n\n fn on_run_stake_batch(\n\n &mut self,\n\n #[callback] staking_pool_account: StakingPoolAccount,\n\n ) -> Promise;\n\n\n\n /// ## Success Workflow\n\n /// 1. update the StateLock to Staked\n\n /// 2. invoke [`process_staked_batch`]\n\n fn on_deposit_and_stake(\n\n &mut self,\n\n near_liquidity: Option<interface::YoctoNear>,\n\n #[callback] staking_pool_account: StakingPoolAccount,\n", "file_path": "contract/src/contract/staking_service.rs", "rank": 27, "score": 125460.89005253804 }, { "content": "#[ext_contract(ext_resolve_transfer_call)]\n\npub trait ExtResolveTransferCall {\n\n fn ft_resolve_transfer_call(\n\n &mut self,\n\n sender_id: AccountId,\n\n receiver_id: AccountId,\n\n amount: TokenAmount,\n\n ) -> PromiseOrValue<TokenAmount>;\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_transfer {\n\n\n\n use super::*;\n\n use crate::interface::StakingService;\n\n use crate::near::YOCTO;\n\n use crate::test_utils::*;\n\n use near_sdk::{testing_env, MockedBlockchain};\n\n\n\n #[test]\n\n pub fn transfer_ok() {\n", "file_path": "contract/src/contract/fungible_token.rs", "rank": 28, "score": 125460.89005253804 }, { "content": "#[test]\n\nfn quick_test() {\n\n let balance_history: Vec<u128> = vec![\n\n 72145722678713040200000000,\n\n 72145780435590802700000000, // register account\n\n 72145841274282485800000000, // register account\n\n ];\n\n\n\n let mut i = 0;\n\n while i < balance_history.len() - 1 {\n\n let balance_1 = &balance_history[i];\n\n let balance_2 = &balance_history[i + 1];\n\n\n\n if balance_2 > balance_1 {\n\n println!(\n\n \"{} | {} | {}\",\n\n balance_2 - balance_1,\n\n (balance_2 - balance_1) as f64 / YOCTO as f64,\n\n YOCTO / (balance_2 - balance_1)\n\n );\n\n } else {\n", "file_path": "contract/tests/quick_test.rs", "rank": 30, "score": 123630.48212974847 }, { "content": "#[test]\n\nfn sim_test() {\n\n let ctx = test_utils::create_context();\n\n let user = &ctx.contract_operator;\n\n\n\n let (_initial_contract_state, _initial_config, _initial_contract_balances) =\n\n check_initial_state(&ctx, user);\n\n check_no_accounts_registered(&ctx, user);\n\n\n\n register_contract_owner_account(&ctx);\n\n register_user_accounts(&ctx);\n\n\n\n // simulates the entire work from depositing to unstaking and withdrawing\n\n deposit_funds_for_each_user_account(&ctx);\n\n let stake_accounts = ctx.registered_stake_accounts();\n\n stake(&ctx);\n\n check_user_accounts_after_deposits_are_staked(&ctx, &stake_accounts);\n\n\n\n redeem_all_stake_for_each_user_account(&ctx);\n\n // check_user_accounts_after_redeeming_all_stake(&ctx);\n\n //\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 31, "score": 122358.30521859185 }, { "content": "fn check_initial_state(\n\n ctx: &TestContext,\n\n user: &UserAccount,\n\n) -> (ContractState, Config, ContractBalances) {\n\n let initial_contract_state = ctx.operator.contract_state(user);\n\n check_contract_state_after_deployment(&initial_contract_state);\n\n\n\n let initial_config = ctx.operator.config(user);\n\n check_config_after_deployment(&initial_config);\n\n\n\n let initial_contract_balances = ctx.financials.balances(user);\n\n assert_eq!(initial_contract_balances, initial_contract_state.balances,\n\n \"the balances returned via `contract_state()` should be the same as the balances retrieved directly\");\n\n check_contract_balances_after_deployment(&initial_contract_balances);\n\n\n\n (\n\n initial_contract_state,\n\n initial_config,\n\n initial_contract_balances,\n\n )\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 32, "score": 116899.85666020273 }, { "content": "fn unstake(ctx: &TestContext) {\n\n println!(\"################\");\n\n println!(\"### unstake ####\");\n\n\n\n let initial_contract_state: ContractState = ctx.operator.contract_state(&ctx.master_account);\n\n let account_before_unstaking = ctx.contract().user_account.account();\n\n ctx.operator.contract_state(&ctx.master_account);\n\n match initial_contract_state.redeem_stake_batch_lock {\n\n None => {\n\n if let Some(batch) = initial_contract_state.redeem_stake_batch {\n\n let result: ExecutionResult = ctx.staking_service.unstake(&ctx.contract_operator);\n\n result.assert_success();\n\n println!(\"*** unstaked\");\n\n ctx.process_all_transactions();\n\n let account_after_unstaking = ctx.contract().user_account.account();\n\n let gas_rewards = account_after_unstaking.amount - account_before_unstaking.amount;\n\n println!(\"gas_rewards = {}\", gas_rewards);\n\n\n\n ctx.operator.contract_state(&ctx.master_account);\n\n }\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 33, "score": 115440.36760145906 }, { "content": "fn stake(ctx: &TestContext) {\n\n println!(\"##############\");\n\n println!(\"### stake ####\");\n\n\n\n let initial_contract_state = ctx.operator.contract_state(&ctx.master_account);\n\n match initial_contract_state.stake_batch {\n\n None => println!(\"there is no stake batch to stake\"),\n\n Some(batch) => {\n\n let result: ExecutionResult = ctx.staking_service.stake(&ctx.contract_operator);\n\n result.assert_success();\n\n\n\n ctx.process_all_transactions();\n\n {\n\n println!(\"stake receipt results: {:#?}\", result.get_receipt_results());\n\n println!(\"stake promise results: {:#?}\", result.promise_results());\n\n }\n\n\n\n let contract_state: ContractState = ctx.operator.contract_state(&ctx.master_account);\n\n assert!(\n\n contract_state.stake_batch.is_none(),\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 34, "score": 115440.36760145906 }, { "content": "fn check_user_accounts_after_deposits_are_staked(\n\n ctx: &TestContext,\n\n accounts_before_staking: &HashMap<String, StakeAccount>,\n\n) {\n\n println!(\"######################################################\");\n\n println!(\"### check_user_accounts_after_deposits_are_staked ####\");\n\n\n\n fn stake_balance(stake_accounts: &HashMap<String, StakeAccount>, user: &UserAccount) -> u128 {\n\n stake_accounts\n\n .get(&user.account_id())\n\n .as_ref()\n\n .unwrap()\n\n .stake\n\n .as_ref()\n\n .map_or(0, |balance| balance.amount.value())\n\n }\n\n\n\n let contract_state = ctx.operator.contract_state(&ctx.master_account);\n\n for user in ctx.users.values() {\n\n let receipt_before_claimed = ctx\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 35, "score": 114292.16670393987 }, { "content": "fn register_user_accounts(ctx: &TestContext) {\n\n println!(\"##############################\");\n\n println!(\"### register_user_accounts ###\");\n\n\n\n let account_storage_fee = ctx\n\n .account_management\n\n .account_storage_fee(ctx.master_account());\n\n println!(\"account_storage_fee = {}\", account_storage_fee);\n\n let gas = TGAS * 10;\n\n\n\n for user_account in ctx.users.values() {\n\n println!(\"registered user account: {}\", user_account.account_id());\n\n let result =\n\n ctx.account_management\n\n .register_account(user_account, account_storage_fee.into(), gas);\n\n result.assert_success();\n\n }\n\n\n\n println!(\"=== register_user_accounts === PASSED\");\n\n println!(\"=====================================\");\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 36, "score": 113138.87211565502 }, { "content": "fn check_pending_withdrawal(ctx: &TestContext) {\n\n println!(\"#################################\");\n\n println!(\"### check_pending_withdrawal ####\");\n\n\n\n unimplemented!();\n\n\n\n println!(\"=== check_pending_withdrawal ===\");\n\n println!(\"================================\");\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 37, "score": 113138.87211565502 }, { "content": "fn register_contract_owner_account(ctx: &TestContext) {\n\n println!(\"###########################################\");\n\n println!(\"### register_account_for_contract_owner ###\");\n\n\n\n let account_storage_fee = ctx\n\n .account_management\n\n .account_storage_fee(ctx.master_account());\n\n println!(\"account_storage_fee = {}\", account_storage_fee);\n\n let gas = TGAS * 10;\n\n let result = ctx.account_management.register_account(\n\n ctx.contract_owner(),\n\n account_storage_fee.into(),\n\n gas,\n\n );\n\n result.assert_success();\n\n\n\n println!(\"=== register_account_for_contract_owner === PASSED\");\n\n println!(\"==================================================\");\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 38, "score": 112034.61255187212 }, { "content": "fn redeem_all_stake_for_each_user_account(ctx: &TestContext) {\n\n println!(\"###############################################\");\n\n println!(\"### redeem_all_stake_for_each_user_account ####\");\n\n\n\n for user in ctx.users.values() {\n\n let account: StakeAccount = ctx\n\n .account_management\n\n .lookup_account(ctx.master_account(), &user.account_id())\n\n .unwrap();\n\n let batch_id: Option<BatchId> = ctx.staking_service.redeem_all(user);\n\n match account.stake.as_ref() {\n\n Some(stake) => {\n\n let batch_id = batch_id.unwrap();\n\n let account: StakeAccount = ctx\n\n .account_management\n\n .lookup_account(ctx.master_account(), &user.account_id())\n\n .unwrap();\n\n let batch = account.redeem_stake_batch.unwrap();\n\n assert_eq!(batch.id, batch_id);\n\n assert_eq!(batch.balance.amount, stake.amount);\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 39, "score": 112034.61255187212 }, { "content": "fn unlock_funds_in_staking_pool(ctx: &TestContext) {\n\n println!(\"#####################################\");\n\n println!(\"### unlock_funds_in_staking_pool ####\");\n\n\n\n let mut account = ctx.staking_pool.get_account(ctx.master_account());\n\n account.can_withdraw = true;\n\n ctx.staking_pool\n\n .update_account(ctx.master_account(), account);\n\n ctx.process_all_transactions();\n\n let account = ctx.staking_pool.get_account(ctx.master_account());\n\n assert!(account.can_withdraw);\n\n\n\n println!(\"=== unlock_funds_in_staking_pool ===\");\n\n println!(\"====================================\");\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 40, "score": 112034.61255187212 }, { "content": "fn deposit_funds_for_each_user_account(ctx: &TestContext) {\n\n println!(\"############################################\");\n\n println!(\"### deposit_funds_for_each_user_account ####\");\n\n\n\n let initial_contract_state = ctx.operator.contract_state(&ctx.master_account);\n\n let initial_batch_amount: YoctoNear = initial_contract_state\n\n .stake_batch\n\n .map_or(0.into(), |batch| batch.balance.amount);\n\n\n\n let mut amount = 0_u128;\n\n let mut total_deposit_amount = 0_u128;\n\n for user in ctx.users.values() {\n\n amount += 1;\n\n let deposit_amount: interface::YoctoNear = (YOCTO * amount).into();\n\n total_deposit_amount += deposit_amount.value();\n\n let batch_id: BatchId = ctx.staking_service.deposit(user, deposit_amount.clone());\n\n\n\n let stake_account: StakeAccount = ctx\n\n .account_management\n\n .lookup_account(&ctx.master_account, &user.account_id())\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 41, "score": 112034.61255187212 }, { "content": "fn check_state_after_all_redeemed_and_withdrawn(ctx: &TestContext) {\n\n println!(\"#####################################################\");\n\n println!(\"### check_state_after_all_redeemed_and_withdrawn ####\");\n\n\n\n unimplemented!();\n\n\n\n println!(\"=== check_state_after_all_redeemed_and_withdrawn ===\");\n\n println!(\"====================================================\");\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 42, "score": 112034.61255187212 }, { "content": "fn check_user_accounts_after_redeeming_all_stake(ctx: &TestContext) {\n\n println!(\"######################################################\");\n\n println!(\"### check_user_accounts_after_redeeming_all_stake ####\");\n\n\n\n unimplemented!();\n\n\n\n println!(\"=== check_user_accounts_after_redeeming_all_stake ===\");\n\n println!(\"=====================================================\");\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 43, "score": 110959.52459321052 }, { "content": "fn check_user_accounts_after_redeemed_stake_is_unstaked(ctx: &TestContext) {\n\n println!(\"#############################################################\");\n\n println!(\"### check_user_accounts_after_redeemed_stake_is_unstaked ####\");\n\n\n\n unimplemented!();\n\n\n\n println!(\"=== check_user_accounts_after_redeemed_stake_is_unstaked ===\");\n\n println!(\"============================================================\");\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 44, "score": 109912.3476901711 }, { "content": "fn check_config_after_deployment(config: &Config) {\n\n println!(\"#####################################\");\n\n println!(\"### check_config_after_deployment ###\");\n\n\n\n println!(\"{}\", serde_json::to_string_pretty(config).unwrap());\n\n // TODO\n\n\n\n println!(\"=== check_config_after_deployment === PASSED\");\n\n println!(\"============================================\")\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 45, "score": 109814.45856314592 }, { "content": "const debug = process.argv.pop() === '--debug'\n", "file_path": "contract-test/contract/compile.js", "rank": 46, "score": 109657.82173388064 }, { "content": "fn check_user_accounts_after_redeem_stake_batch_completed(ctx: &TestContext) {\n\n println!(\"###############################################################\");\n\n println!(\"### check_user_accounts_after_redeem_stake_batch_completed ####\");\n\n\n\n unimplemented!();\n\n\n\n println!(\"=== check_user_accounts_after_redeem_stake_batch_completed ===\");\n\n println!(\"==============================================================\");\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 47, "score": 108891.897310915 }, { "content": "fn check_contract_balances_after_deployment(balances: &ContractBalances) {\n\n println!(\"#####################################\");\n\n println!(\"### check_contract_balances_after_deployment ###\");\n\n\n\n println!(\"{}\", serde_json::to_string_pretty(balances).unwrap());\n\n assert_eq!(\n\n balances.total_contract_storage_usage_cost.value()\n\n + balances.total_available_balance.value(),\n\n balances.total_contract_balance.value(),\n\n \"total available balance = total contract balance minus contract's storage usage cost\"\n\n );\n\n assert_eq!(\n\n balances.contract_required_operational_balance.value(),\n\n CONTRACT_MIN_OPERATIONAL_BALANCE.value(),\n\n \"contract min operational balance did not match\"\n\n );\n\n assert_eq!(\n\n balances.contract_owner_available_balance.value(),\n\n balances.total_available_balance.value() - CONTRACT_MIN_OPERATIONAL_BALANCE.value(),\n\n \"contract owner available balance should be the entire contract available balance minus the min operational balance\"\n\n );\n\n\n\n println!(\"=== check_contract_balances_after_deployment === PASSED\");\n\n println!(\"=======================================================\")\n\n}\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 48, "score": 107355.02800962463 }, { "content": "fn check_no_accounts_registered(ctx: &TestContext, user: &UserAccount) {\n\n println!(\"####################################\");\n\n println!(\"### check_no_accounts_registered ###\");\n\n\n\n assert_eq!(ctx.account_management.total_registered_accounts(user), 0);\n\n assert!(ctx\n\n .account_management\n\n .lookup_account(user, &user.account_id())\n\n .is_none());\n\n\n\n println!(\"=== check_no_accounts_registered === PASSED\");\n\n println!(\"===========================================\")\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 49, "score": 106899.7541801671 }, { "content": "fn assert_yocto_near_attached() {\n\n assert_eq!(\n\n env::attached_deposit(),\n\n 1,\n\n \"exactly 1 yoctoNEAR must be attached\"\n\n )\n\n}\n\n\n", "file_path": "contract/src/contract/fungible_token.rs", "rank": 50, "score": 106537.2098250991 }, { "content": "fn check_contract_state_after_deployment(contract_state: &ContractState) {\n\n println!(\"#############################################\");\n\n println!(\"### check_contract_state_after_deployment ###\");\n\n\n\n assert_eq!(\n\n contract_state.storage_usage_growth.0 .0, 0,\n\n \"after deployment the contract storage usage should be baselined at zero\"\n\n );\n\n\n\n println!(\"=== check_contract_state_after_deployment === PASSED\");\n\n println!(\"=====================================================\")\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 51, "score": 106176.83441134857 }, { "content": " interface::StakeAccount, near::YOCTO, StakeTokenContractContract,\n\n};\n\nuse staking_pool_mock::StakingPoolContract;\n\nuse std::{cell::RefCell, collections::HashMap, convert::TryInto, rc::Rc};\n\n\n\nlazy_static! {\n\n static ref WASM_BYTES: &'static [u8] =\n\n include_bytes!(\"../res/oysterpack_near_stake_token.wasm\").as_ref();\n\n static ref STAKING_POOL_WASM_BYTES: &'static [u8] =\n\n include_bytes!(\"../res/staking_pool_mock.wasm\").as_ref();\n\n}\n\n\n\npub struct TestContext {\n\n pub runtime: Rc<RefCell<RuntimeStandalone>>,\n\n pub master_account: UserAccount,\n\n pub contract: ContractAccount<StakeTokenContractContract>,\n\n pub contract_owner: UserAccount,\n\n pub contract_account_id: AccountId,\n\n pub contract_operator: UserAccount,\n\n\n", "file_path": "contract/tests/test_utils.rs", "rank": 52, "score": 53922.512191165 }, { "content": " }\n\n\n\n pub fn contract(&self) -> &ContractAccount<StakeTokenContractContract> {\n\n &self.contract\n\n }\n\n\n\n pub fn contract_account_id(&self) -> &str {\n\n &self.contract_account_id\n\n }\n\n\n\n pub fn registered_stake_accounts(&self) -> HashMap<String, StakeAccount> {\n\n self.users\n\n .values()\n\n .fold(HashMap::new(), |mut accounts, user| {\n\n if let Some(account) = self\n\n .account_management\n\n .lookup_account(self.master_account(), &user.account_id())\n\n {\n\n accounts.insert(user.account_id().clone(), account);\n\n }\n\n accounts\n\n })\n\n }\n\n\n\n pub fn process_all_transactions(&self) {\n\n self.runtime.borrow_mut().process_all().unwrap();\n\n }\n\n}\n\n\n", "file_path": "contract/tests/test_utils.rs", "rank": 53, "score": 53914.71230201478 }, { "content": "#![allow(unused_imports)]\n\n\n\nuse oysterpack_near_stake_token::domain::{BlockTimeHeight, StakeTokenValue};\n\nuse oysterpack_near_stake_token::near::YOCTO;\n\nuse primitive_types::U256;\n\n\n\n#[test]\n", "file_path": "contract/tests/quick_test.rs", "rank": 54, "score": 53912.01373449972 }, { "content": " pub users: HashMap<String, UserAccount>,\n\n\n\n pub staking_pool: StakingPoolClient,\n\n pub staking_service: StakingServiceClient,\n\n pub account_management: AccountManagementClient,\n\n pub operator: OperatorClient,\n\n pub financials: FinancialsClient,\n\n}\n\n\n\nimpl TestContext {\n\n pub fn master_account(&self) -> &UserAccount {\n\n &self.master_account\n\n }\n\n\n\n pub fn contract_owner(&self) -> &UserAccount {\n\n &self.contract_owner\n\n }\n\n\n\n pub fn contract_operator(&self) -> &UserAccount {\n\n &self.contract_operator\n", "file_path": "contract/tests/test_utils.rs", "rank": 55, "score": 53910.91604805796 }, { "content": "#![allow(dead_code)]\n\n\n\nextern crate oysterpack_near_stake_token;\n\nextern crate staking_pool_mock;\n\n\n\nuse near_sdk_sim::*;\n\n\n\nuse crate::account_management_client::AccountManagementClient;\n\nuse crate::financials_client::FinancialsClient;\n\nuse crate::operator_client::OperatorClient;\n\nuse crate::staking_pool_client::StakingPoolClient;\n\nuse crate::staking_service_client::StakingServiceClient;\n\nuse near_sdk::json_types::ValidAccountId;\n\nuse near_sdk::AccountId;\n\nuse near_sdk_sim::errors::TxExecutionError;\n\nuse near_sdk_sim::{\n\n runtime::{init_runtime, RuntimeStandalone},\n\n transaction::ExecutionStatus,\n\n};\n\nuse oysterpack_near_stake_token::{\n", "file_path": "contract/tests/test_utils.rs", "rank": 56, "score": 53910.8555383836 }, { "content": " signer_account: master_account,\n\n // init method\n\n init_method: new(staking_pool_id, contract_owner_id, contract_operator_id)\n\n );\n\n let contract_account_id = contract.user_account.account_id();\n\n\n\n // create 3 user accounts with 1000 NEAR\n\n let mut users = HashMap::new();\n\n for i in 1..=3 {\n\n let account_id = format!(\"user-{}\", i);\n\n let user_account = master_account.create_user(account_id.clone(), 1000 * YOCTO);\n\n users.insert(account_id, user_account);\n\n }\n\n\n\n // deploy staking pool contract mock\n\n deploy!(\n\n // Contract Proxy\n\n contract: StakingPoolContract,\n\n // Contract account id\n\n contract_id: \"astro-stakers-poolv1\",\n", "file_path": "contract/tests/test_utils.rs", "rank": 57, "score": 53903.75224572476 }, { "content": " println!(\n\n \"balance went down by: {} | {}\",\n\n balance_1 - balance_2,\n\n (balance_1 - balance_2) as f64 / YOCTO as f64\n\n );\n\n }\n\n\n\n i += 1;\n\n }\n\n}\n", "file_path": "contract/tests/quick_test.rs", "rank": 58, "score": 53903.20412749113 }, { "content": " // Bytes of contract\n\n bytes: &STAKING_POOL_WASM_BYTES,\n\n // User deploying the contract,\n\n signer_account: master_account,\n\n // init method\n\n init_method: new()\n\n );\n\n\n\n let staking_service = StakingServiceClient::new(&contract_account_id);\n\n let staking_pool = StakingPoolClient::new(STAKING_POOL_ID, &contract_account_id);\n\n let account_management = AccountManagementClient::new(&contract_account_id);\n\n let operator = OperatorClient::new(&contract_account_id);\n\n let financials = FinancialsClient::new(&contract_account_id);\n\n\n\n TestContext {\n\n runtime,\n\n\n\n master_account,\n\n contract,\n\n contract_account_id,\n", "file_path": "contract/tests/test_utils.rs", "rank": 59, "score": 53897.32970481433 }, { "content": " contract_owner,\n\n contract_operator,\n\n\n\n users,\n\n staking_pool,\n\n staking_service,\n\n account_management,\n\n operator,\n\n financials,\n\n }\n\n}\n\n\n", "file_path": "contract/tests/test_utils.rs", "rank": 60, "score": 53894.06262779117 }, { "content": " / U256::from(YOCTO))\n\n .as_u128();\n\n\n\n let stake_account = ctx\n\n .account_management\n\n .lookup_account(&ctx.master_account, &user.account_id())\n\n .unwrap();\n\n assert_eq!(\n\n stake_account.stake.unwrap().amount.value(),\n\n stake_value + stake_balance(accounts_before_staking, user)\n\n );\n\n }\n\n\n\n assert!(ctx\n\n .staking_service\n\n .stake_batch_receipt(\n\n &ctx.master_account,\n\n contract_state.batch_id_sequence.clone(),\n\n ).is_none(), \"after all funds have been claimed from the receipt, then it should be deleted from storage\");\n\n\n\n println!(\"=== check_user_accounts_after_deposits_are_staked ===\");\n\n println!(\"=====================================================\");\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 61, "score": 53296.444100789115 }, { "content": "\n\nuse near_sdk::{\n\n serde_json::{self, json},\n\n PendingContractTx,\n\n};\n\nuse near_sdk_sim::*;\n\n\n\nuse oysterpack_near_stake_token::{\n\n config::CONTRACT_MIN_OPERATIONAL_BALANCE,\n\n domain::TGAS,\n\n interface::{\n\n self, contract_state::ContractState, BatchId, Config, ContractBalances, StakeAccount,\n\n StakeBatch, StakingService, YoctoNear,\n\n },\n\n near::{NO_DEPOSIT, YOCTO},\n\n};\n\n\n\nuse account_management_client::*;\n\nuse financials_client::*;\n\nuse oysterpack_near_stake_token::core::U256;\n\nuse staking_service_client::*;\n\n\n\nuse std::{collections::HashMap, convert::TryInto};\n\n\n\nuse near_sdk_sim::transaction::ExecutionStatus;\n\nuse test_utils::*;\n\n\n\n#[test]\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 62, "score": 53296.02804838267 }, { "content": " .unwrap();\n\n let user_stake_batch = stake_account.stake_batch.unwrap();\n\n assert_eq!(user_stake_batch.id, batch_id);\n\n assert_eq!(user_stake_batch.balance.amount, deposit_amount);\n\n }\n\n println!(\"total_deposit_amount = {}\", total_deposit_amount);\n\n\n\n // check that the StakeBatch amount matches\n\n let contract_state = ctx.operator.contract_state(&ctx.master_account);\n\n let batch: StakeBatch = contract_state.stake_batch.unwrap();\n\n assert_eq!(\n\n batch.balance.amount.value(),\n\n total_deposit_amount + initial_batch_amount.value()\n\n );\n\n\n\n println!(\"=== deposit_funds_for_each_user_account ===\");\n\n println!(\"===========================================\");\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 63, "score": 53288.213076665816 }, { "content": " }\n\n Some(_) => {\n\n let staking_pool_account = ctx.staking_pool.get_account(ctx.master_account());\n\n let result: ExecutionResult = ctx.staking_service.unstake(&ctx.contract_operator);\n\n if staking_pool_account.can_withdraw {\n\n result.assert_success();\n\n } else {\n\n assert!(!result.is_ok());\n\n if let ExecutionStatus::Failure(err) = &result.outcome().status {\n\n let err_msg = format!(\"{:?}\", err);\n\n assert!(err_msg\n\n .contains(\"unstaked NEAR funds are not yet available for withdrawal\"));\n\n } else {\n\n panic!(\"expected unstake to fail because the unstaked NEAR is not yet available for withdrawal\")\n\n }\n\n }\n\n }\n\n }\n\n\n\n println!(\"=== unstake ===\");\n\n println!(\"===============\");\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 64, "score": 53286.965224821965 }, { "content": " assert!(\n\n account.stake.is_none(),\n\n \"after redeeming all STAKE, then all STAKE should be moved into batch\"\n\n );\n\n }\n\n None => assert!(batch_id.is_none()),\n\n }\n\n }\n\n\n\n println!(\"=== redeem_all_stake_for_each_user_account ===\");\n\n println!(\"==============================================\");\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 65, "score": 53285.05062851385 }, { "content": "#![allow(unused_imports, unreachable_code, unused_variables, dead_code)]\n\n\n\n//! before running the simulation test, make sure the wasm files are built for the STAKE token contrac\n\n//! and the mock staking pool contract\n\n//! ```shell\n\n//! cd contract\n\n//! ./build.sh\n\n//!\n\n//! cd staking-pool-mock\n\n//! ./build.sh\n\n//! ```\n\n\n\nextern crate oysterpack_near_stake_token;\n\n\n\nmod account_management_client;\n\nmod financials_client;\n\nmod operator_client;\n\nmod staking_pool_client;\n\nmod staking_service_client;\n\nmod test_utils;\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 66, "score": 53284.28609915665 }, { "content": " \"stake batch should have been cleared\"\n\n );\n\n\n\n let receipt = ctx\n\n .staking_service\n\n .stake_batch_receipt(&ctx.master_account, contract_state.batch_id_sequence)\n\n .unwrap();\n\n assert_eq!(batch.balance.amount, receipt.staked_near);\n\n assert_eq!(\n\n contract_state.total_stake_supply.amount.value(),\n\n initial_contract_state.total_stake_supply.amount.value()\n\n + receipt.stake_minted.value()\n\n );\n\n\n\n let staking_pool_account = ctx.staking_pool.get_account(&ctx.master_account);\n\n assert_eq!(\n\n staking_pool_account.total_balance(),\n\n contract_state\n\n .stake_token_value\n\n .total_staked_near_balance\n\n .value()\n\n )\n\n }\n\n }\n\n\n\n println!(\"=== stake ===\");\n\n println!(\"=============\");\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 67, "score": 53283.29038440405 }, { "content": " .staking_service\n\n .stake_batch_receipt(\n\n &ctx.master_account,\n\n contract_state.batch_id_sequence.clone(),\n\n )\n\n .unwrap();\n\n ctx.staking_service.claim_receipts(user);\n\n\n\n let amount_claimed = if let Some(receipt_after_claimed) =\n\n ctx.staking_service.stake_batch_receipt(\n\n &ctx.master_account,\n\n contract_state.batch_id_sequence.clone(),\n\n ) {\n\n receipt_before_claimed.staked_near.value() - receipt_after_claimed.staked_near.value()\n\n } else {\n\n receipt_before_claimed.staked_near.value()\n\n };\n\n\n\n let stake_value = (U256::from(amount_claimed)\n\n * U256::from(contract_state.stake_token_value.value.value())\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 68, "score": 53282.853646272204 }, { "content": " unstake(&ctx);\n\n // check_pending_withdrawal(&ctx);\n\n // check_user_accounts_after_redeemed_stake_is_unstaked(&ctx);\n\n\n\n unstake(&ctx); // while pending withdrawal\n\n // check_pending_withdrawal(&ctx);\n\n unlock_funds_in_staking_pool(&ctx);\n\n\n\n unstake(&ctx); // unstaked NEAR should be withdrawn\n\n\n\n // check_state_after_all_redeemed_and_withdrawn(&ctx);\n\n // check_user_accounts_after_redeem_stake_batch_completed(&ctx);\n\n}\n\n\n", "file_path": "contract/tests/contract_sim_test.rs", "rank": 69, "score": 53273.99604045281 }, { "content": " signOut() {\n", "file_path": "contract-test/src/main.test.js", "rank": 70, "score": 49303.25944418887 }, { "content": " isSignedIn() {\n\n return true\n", "file_path": "contract-test/src/main.test.js", "rank": 71, "score": 49303.25944418887 }, { "content": " requestSignIn() {\n", "file_path": "contract-test/src/main.test.js", "rank": 72, "score": 48783.821604906814 }, { "content": " getAccountId() {\n\n return window.accountId\n", "file_path": "contract-test/src/main.test.js", "rank": 73, "score": 48275.21479981558 }, { "content": "//! centralizes all error messages\n\n\n\npub mod asserts {\n\n pub const PREDECESSOR_MUST_NE_SELF_OR_OPERATOR: &str =\n\n \"contract call is only allowed internally or by an operator account\";\n\n pub const PREDECESSOR_MUST_BE_OPERATOR: &str =\n\n \"contract call is only allowed by an operator account\";\n\n pub const OPERATOR_ID_MUST_NOT_BE_CONTRACT_ID: &str =\n\n \"operator account ID must not be the contract account ID\";\n\n pub const PREDECESSOR_MUST_BE_OWNER: &str =\n\n \"contract call is only allowed by the contract owner\";\n\n}\n\n\n\npub mod staking_pool_failures {\n\n\n\n pub const UNSTAKE_FAILURE: &str = \"failed to unstake NEAR with staking pool\";\n\n\n\n pub const GET_ACCOUNT_FAILURE: &str = \"failed to get account info from staking pool\";\n\n\n\n pub const WITHDRAW_ALL_FAILURE: &str =\n", "file_path": "contract/src/errors.rs", "rank": 74, "score": 47014.1495341449 }, { "content": " \"failed to withdraw all unstaked funds from staking pool\";\n\n}\n\n\n\npub mod staking_errors {\n\n pub const BLOCKED_BY_BATCH_RUNNING: &str = \"action is blocked because a batch is running\";\n\n\n\n pub const BLOCKED_BY_STAKE_TOKEN_VALUE_REFRESH: &str =\n\n \"action is blocked because STAKE token value is being refreshed\";\n\n\n\n pub const NO_FUNDS_IN_STAKE_BATCH_TO_WITHDRAW: &str = \"there are no funds in stake batch\";\n\n}\n\n\n\npub mod redeeming_stake_errors {\n\n pub const NO_REDEEM_STAKE_BATCH_TO_RUN: &str = \"there is no redeem stake batch\";\n\n\n\n pub const UNSTAKING_BLOCKED_BY_PENDING_WITHDRAWAL: &str =\n\n \"unstaking is blocked until all unstaked NEAR can be withdrawn\";\n\n\n\n pub const UNSTAKED_FUNDS_NOT_AVAILABLE_FOR_WITHDRAWAL: &str =\n\n \"unstaked NEAR funds are not yet available for withdrawal\";\n", "file_path": "contract/src/errors.rs", "rank": 75, "score": 47008.448350370585 }, { "content": "}\n\n\n\npub mod staking_service {\n\n pub const DEPOSIT_REQUIRED_FOR_STAKE: &str = \"deposit is required in order to stake\";\n\n\n\n pub const ZERO_REDEEM_AMOUNT: &str = \"redeem amount must not be zero\";\n\n\n\n pub const INSUFFICIENT_STAKE_FOR_REDEEM_REQUEST: &str =\n\n \"account STAKE balance is insufficient to fulfill request\";\n\n\n\n pub const BATCH_BALANCE_INSUFFICIENT: &str = \"batch balance is insufficient to fulfill request\";\n\n}\n\n\n\npub mod illegal_state {\n\n pub const STAKE_BATCH_SHOULD_EXIST: &str = \"ILLEGAL STATE : stake batch should exist\";\n\n\n\n pub const REDEEM_STAKE_BATCH_SHOULD_EXIST: &str =\n\n \"ILLEGAL STATE : redeem stake batch should exist\";\n\n\n\n pub const REDEEM_STAKE_BATCH_RECEIPT_SHOULD_EXIST: &str =\n", "file_path": "contract/src/errors.rs", "rank": 76, "score": 47008.40654990406 }, { "content": " \"ILLEGAL STATE : redeem stake batch receipt should exist\";\n\n\n\n pub const ILLEGAL_REDEEM_LOCK_STATE: &str = \"ILLEGAL STATE : illegal redeem lock state\";\n\n}\n\n\n\npub mod account_management {\n\n pub const INSUFFICIENT_STORAGE_FEE: &str =\n\n \"sufficient deposit is required to pay for account storage fees\";\n\n\n\n pub const ACCOUNT_ALREADY_REGISTERED: &str = \"account is already registered\";\n\n\n\n pub const UNREGISTER_REQUIRES_ZERO_BALANCES: &str =\n\n \"all funds must be withdrawn from the account in order to unregister\";\n\n\n\n pub const ACCOUNT_NOT_REGISTERED: &str = \"account is not registered\";\n\n}\n\n\n\npub mod contract_owner {\n\n\n\n pub const INSUFFICIENT_FUNDS_FOR_OWNER_WITHDRAWAL: &str =\n\n \"owner balance is too low to fulfill withdrawal request\";\n\n\n\n pub const INSUFFICIENT_FUNDS_FOR_OWNER_STAKING: &str =\n\n \"owner balance is too low to fulfill stake request\";\n\n\n\n pub const TRANSFER_TO_NON_REGISTERED_ACCOUNT: &str =\n\n \"contract ownership can only be transferred to a registered account\";\n\n}\n", "file_path": "contract/src/errors.rs", "rank": 77, "score": 47003.751579272575 }, { "content": "beforeAll(async function () {\n\n // NOTE: nearlib and nearConfig are made available by near-cli/test_environment\n\n const near = await nearlib.connect(nearConfig)\n\n window.accountId = nearConfig.contractName\n\n window.contract = await near.loadContract(nearConfig.contractName, {\n\n viewMethods: ['get_greeting'],\n\n changeMethods: [],\n\n sender: window.accountId\n\n })\n\n\n\n window.walletConnection = {\n\n requestSignIn() {\n\n },\n\n signOut() {\n\n },\n\n isSignedIn() {\n\n return true\n\n },\n\n getAccountId() {\n\n return window.accountId\n\n }\n\n }\n\n})\n\n\n\ntest('get_greeting', async () => {\n\n const message = await window.contract.get_greeting({ account_id: window.accountId })\n\n expect(message).toEqual('Hello')\n\n})\n", "file_path": "contract-test/src/main.test.js", "rank": 78, "score": 45883.37699260118 }, { "content": "#![allow(dead_code)]\n\n\n\nuse crate::interface::AccountManagement;\n\nuse crate::near_env::Env;\n\nuse crate::{near::*, Contract};\n\nuse near_sdk::test_utils::VMContextBuilder;\n\nuse near_sdk::{\n\n json_types::ValidAccountId,\n\n serde::{Deserialize, Serialize},\n\n serde_json,\n\n test_utils::get_created_receipts,\n\n testing_env, MockedBlockchain, PromiseResult, VMContext,\n\n};\n\nuse std::convert::TryInto;\n\nuse std::ops::{Deref, DerefMut};\n\n\n\npub struct TestContext<'a> {\n\n pub contract: Contract,\n\n pub account_id: &'a str,\n\n pub context: VMContext,\n\n}\n\n\n", "file_path": "contract/src/test_utils.rs", "rank": 79, "score": 45292.14887780559 }, { "content": "#![allow(dead_code)]\n\n\n\nuse near_sdk::{serde_json::json, AccountId, PendingContractTx};\n\nuse near_sdk_sim::*;\n\nuse oysterpack_near_stake_token::{\n\n domain::{Gas, YoctoNear},\n\n interface::{Config, ContractBalances},\n\n};\n\n\n\npub struct FinancialsClient {\n\n contract_account_id: AccountId,\n\n}\n\n\n\nimpl FinancialsClient {\n\n pub fn new(contract_account_id: &str) -> Self {\n\n Self {\n\n contract_account_id: contract_account_id.to_string(),\n\n }\n\n }\n\n\n", "file_path": "contract/tests/financials_client.rs", "rank": 80, "score": 45279.09479070184 }, { "content": " pub fn register_owner(&mut self) {\n\n self.register_account(TEST_OWNER_ID);\n\n }\n\n\n\n pub fn register_operator(&mut self) {\n\n self.register_account(TEST_OPERATOR_ID);\n\n }\n\n\n\n pub fn register_account(&mut self, account_id: &str) {\n\n let mut context = self.set_predecessor_account_id(account_id);\n\n context.attached_deposit = YOCTO;\n\n testing_env!(context.clone());\n\n self.contract.register_account();\n\n\n\n context.attached_deposit = 0;\n\n testing_env!(context);\n\n }\n\n\n\n pub fn set_predecessor_account_id(&mut self, account_id: &str) -> VMContext {\n\n let mut context = self.context.clone();\n", "file_path": "contract/src/test_utils.rs", "rank": 81, "score": 45277.75220409336 }, { "content": "\n\n Self {\n\n contract,\n\n account_id: TEST_ACCOUNT_ID,\n\n context,\n\n }\n\n }\n\n\n\n /// uses [`TEST_ACCOUNT_ID`] as the predecessor account ID\n\n pub fn new() -> Self {\n\n TestContext::with_vm_context(new_context(TEST_ACCOUNT_ID))\n\n }\n\n\n\n /// uses [`TEST_ACCOUNT_ID`] as the predecessor account ID and registers the account with the contract\n\n pub fn with_registered_account() -> Self {\n\n let mut context = new_context(TEST_ACCOUNT_ID);\n\n testing_env!(context.clone());\n\n\n\n let mut contract = Contract::new(\n\n to_valid_account_id(TEST_STAKING_POOL_ID),\n", "file_path": "contract/src/test_utils.rs", "rank": 82, "score": 45276.267953560426 }, { "content": "use near_sdk::{serde_json::json, AccountId, PendingContractTx};\n\nuse near_sdk_sim::*;\n\nuse oysterpack_near_stake_token::interface::contract_state::ContractState;\n\nuse oysterpack_near_stake_token::interface::Config;\n\n\n\npub struct OperatorClient {\n\n contract_account_id: AccountId,\n\n}\n\n\n\nimpl OperatorClient {\n\n pub fn new(contract_account_id: &str) -> Self {\n\n Self {\n\n contract_account_id: contract_account_id.to_string(),\n\n }\n\n }\n\n\n\n pub fn contract_state(&self, user: &UserAccount) -> ContractState {\n\n let result = user.view(PendingContractTx::new(\n\n &self.contract_account_id,\n\n \"contract_state\",\n", "file_path": "contract/tests/operator_client.rs", "rank": 83, "score": 45274.513065274616 }, { "content": " context.predecessor_account_id = account_id.to_string();\n\n context\n\n }\n\n}\n\n\n\nimpl<'a> Deref for TestContext<'a> {\n\n type Target = Contract;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.contract\n\n }\n\n}\n\n\n\nimpl<'a> DerefMut for TestContext<'a> {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.contract\n\n }\n\n}\n\n\n", "file_path": "contract/src/test_utils.rs", "rank": 84, "score": 45269.8956436255 }, { "content": " to_valid_account_id(TEST_OWNER_ID),\n\n to_valid_account_id(TEST_OPERATOR_ID),\n\n );\n\n\n\n context.attached_deposit = YOCTO;\n\n testing_env!(context.clone());\n\n contract.register_account();\n\n context.account_balance += contract.account_storage_fee().value();\n\n\n\n context.attached_deposit = 0;\n\n context.storage_usage = contract.contract_initial_storage_usage.value();\n\n testing_env!(context.clone());\n\n\n\n Self {\n\n contract,\n\n account_id: TEST_ACCOUNT_ID,\n\n context,\n\n }\n\n }\n\n\n", "file_path": "contract/src/test_utils.rs", "rank": 85, "score": 45266.80242309258 }, { "content": " json!({}),\n\n true,\n\n ));\n\n\n\n let state: ContractState = result.unwrap_json();\n\n println!(\n\n \"{}\",\n\n near_sdk::serde_json::to_string_pretty(&state).unwrap()\n\n );\n\n state\n\n }\n\n\n\n pub fn config(&self, user: &UserAccount) -> Config {\n\n let result = user.view(PendingContractTx::new(\n\n &self.contract_account_id,\n\n \"config\",\n\n json!({}),\n\n true,\n\n ));\n\n\n\n result.unwrap_json()\n\n }\n\n}\n", "file_path": "contract/tests/operator_client.rs", "rank": 86, "score": 45262.967310839646 }, { "content": " Transfer {\n\n deposit: u128,\n\n },\n\n FunctionCall {\n\n method_name: String,\n\n args: String,\n\n gas: u64,\n\n deposit: u128,\n\n },\n\n}\n\n\n", "file_path": "contract/src/test_utils.rs", "rank": 87, "score": 45260.69059287383 }, { "content": " pub fn balances(&self, user: &UserAccount) -> ContractBalances {\n\n let result = user.view(PendingContractTx::new(\n\n &self.contract_account_id,\n\n \"balances\",\n\n json!({}),\n\n true,\n\n ));\n\n\n\n result.unwrap_json()\n\n }\n\n\n\n pub fn deposit_earnings(\n\n &self,\n\n user: &UserAccount,\n\n deposit: YoctoNear,\n\n gas: Gas,\n\n ) -> ContractBalances {\n\n let result = user.call(\n\n PendingContractTx::new(\n\n &self.contract_account_id,\n", "file_path": "contract/tests/financials_client.rs", "rank": 88, "score": 45258.304876737406 }, { "content": " \"deposit_earnings\",\n\n json!({}),\n\n false,\n\n ),\n\n deposit.value(),\n\n gas.value(),\n\n );\n\n\n\n result.unwrap_json()\n\n }\n\n}\n", "file_path": "contract/tests/financials_client.rs", "rank": 89, "score": 45250.51975659279 }, { "content": "use crate::core::U256;\n\nuse crate::interface;\n\nuse near_sdk::{\n\n borsh::{self, BorshDeserialize, BorshSerialize},\n\n json_types::U128,\n\n};\n\nuse std::{\n\n fmt::{self, Display, Formatter},\n\n ops::{Add, AddAssign, Deref, DerefMut, Sub, SubAssign},\n\n};\n\n\n\n#[derive(\n\n BorshSerialize, BorshDeserialize, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Default,\n\n)]\n\npub struct YoctoNear(pub u128);\n\n\n\nimpl From<u128> for YoctoNear {\n\n fn from(value: u128) -> Self {\n\n Self(value)\n\n }\n", "file_path": "contract/src/domain/yocto_near.rs", "rank": 90, "score": 45173.084607659745 }, { "content": "use crate::core::U256;\n\nuse crate::interface;\n\nuse near_sdk::{\n\n borsh::{self, BorshDeserialize, BorshSerialize},\n\n json_types::U128,\n\n};\n\nuse std::{\n\n fmt::{self, Display, Formatter},\n\n ops::{Add, AddAssign, Deref, DerefMut, Sub, SubAssign},\n\n};\n\n\n\n#[derive(\n\n BorshSerialize, BorshDeserialize, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Default,\n\n)]\n\npub struct YoctoStake(pub u128);\n\n\n\nimpl From<u128> for YoctoStake {\n\n fn from(value: u128) -> Self {\n\n Self(value)\n\n }\n", "file_path": "contract/src/domain/yocto_stake.rs", "rank": 91, "score": 45173.084607659745 }, { "content": "impl DerefMut for YoctoStake {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.0\n\n }\n\n}\n\n\n\nimpl Display for YoctoStake {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n self.0.fmt(f)\n\n }\n\n}\n\n\n\nimpl From<YoctoStake> for U256 {\n\n fn from(value: YoctoStake) -> Self {\n\n U256::from(value.value())\n\n }\n\n}\n\n\n\nimpl Sub for YoctoStake {\n\n type Output = YoctoStake;\n", "file_path": "contract/src/domain/yocto_stake.rs", "rank": 92, "score": 45132.516144644935 }, { "content": "impl Display for YoctoNear {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n self.0.fmt(f)\n\n }\n\n}\n\n\n\nimpl Sub for YoctoNear {\n\n type Output = YoctoNear;\n\n\n\n fn sub(self, rhs: Self) -> Self::Output {\n\n YoctoNear(\n\n self.0\n\n .checked_sub(rhs.0)\n\n .expect(\"attempt to subtract with overflow\"),\n\n )\n\n }\n\n}\n\n\n\nimpl SubAssign for YoctoNear {\n\n fn sub_assign(&mut self, rhs: Self) {\n", "file_path": "contract/src/domain/yocto_near.rs", "rank": 93, "score": 45131.630795910074 }, { "content": "impl From<YoctoNear> for U128 {\n\n fn from(value: YoctoNear) -> Self {\n\n value.0.into()\n\n }\n\n}\n\n\n\nimpl Deref for YoctoNear {\n\n type Target = u128;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl DerefMut for YoctoNear {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.0\n\n }\n\n}\n\n\n", "file_path": "contract/src/domain/yocto_near.rs", "rank": 94, "score": 45129.7347709718 }, { "content": "}\n\n\n\nimpl From<U128> for YoctoNear {\n\n fn from(value: U128) -> Self {\n\n Self(value.0)\n\n }\n\n}\n\n\n\nimpl YoctoNear {\n\n pub fn value(&self) -> u128 {\n\n self.0\n\n }\n\n}\n\n\n\nimpl From<YoctoNear> for u128 {\n\n fn from(value: YoctoNear) -> Self {\n\n value.0\n\n }\n\n}\n\n\n", "file_path": "contract/src/domain/yocto_near.rs", "rank": 95, "score": 45129.382559743186 }, { "content": "}\n\n\n\nimpl YoctoStake {\n\n pub fn value(&self) -> u128 {\n\n self.0\n\n }\n\n}\n\n\n\nimpl From<YoctoStake> for u128 {\n\n fn from(value: YoctoStake) -> Self {\n\n value.0\n\n }\n\n}\n\n\n\nimpl From<YoctoStake> for U128 {\n\n fn from(value: YoctoStake) -> Self {\n\n value.0.into()\n\n }\n\n}\n\n\n", "file_path": "contract/src/domain/yocto_stake.rs", "rank": 96, "score": 45128.92043867663 }, { "content": "impl From<U128> for YoctoStake {\n\n fn from(value: U128) -> Self {\n\n Self(value.0)\n\n }\n\n}\n\n\n\nimpl From<interface::YoctoStake> for YoctoStake {\n\n fn from(value: interface::YoctoStake) -> Self {\n\n Self(value.0 .0)\n\n }\n\n}\n\n\n\nimpl Deref for YoctoStake {\n\n type Target = u128;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n", "file_path": "contract/src/domain/yocto_stake.rs", "rank": 97, "score": 45125.145643414355 }, { "content": " fn add_assign(&mut self, rhs: Self) {\n\n self.0 = self\n\n .0\n\n .checked_add(rhs.0)\n\n .expect(\"attempt to add with overflow\")\n\n }\n\n}\n\n\n\nimpl From<interface::YoctoNear> for YoctoNear {\n\n fn from(value: interface::YoctoNear) -> Self {\n\n YoctoNear(value.value())\n\n }\n\n}\n\n\n\nimpl From<YoctoNear> for U256 {\n\n fn from(value: YoctoNear) -> Self {\n\n U256::from(value.value())\n\n }\n\n}\n", "file_path": "contract/src/domain/yocto_near.rs", "rank": 98, "score": 45122.45944747938 }, { "content": "\n\n fn sub(self, rhs: Self) -> Self::Output {\n\n YoctoStake(\n\n self.0\n\n .checked_sub(rhs.0)\n\n .expect(\"attempt to subtract with overflow\"),\n\n )\n\n }\n\n}\n\n\n\nimpl SubAssign for YoctoStake {\n\n fn sub_assign(&mut self, rhs: Self) {\n\n self.0 = self\n\n .0\n\n .checked_sub(rhs.0)\n\n .expect(\"attempt to subtract with overflow\")\n\n }\n\n}\n\n\n\nimpl Add for YoctoStake {\n", "file_path": "contract/src/domain/yocto_stake.rs", "rank": 99, "score": 45122.0650174093 } ]
Rust
src/articles/library.rs
tiagoamaro/pickpocket-rust
fc95d1152da1e6526e4d357896a323b3294293d8
use crate::articles::api::API; use crate::articles::article::Article; use crate::articles::inventory::Inventory; use crate::configuration::Configuration; use crate::logger; use rand::seq::SliceRandom; use serde::{Deserialize, Serialize}; use serde_yaml; use std::collections::HashMap; use std::fs::File; use std::path::Path; #[derive(Serialize, Deserialize, Debug)] pub struct Library { read: Inventory, unread: Inventory, } impl Library { pub fn new() -> Library { Library { read: Inventory::new(), unread: Inventory::new(), } } pub fn guarantee_home_folder() { let config = Configuration::default(); match std::fs::create_dir_all(config.home_folder) { Ok(_) => {} Err(error) => { let message = format!("Could not create home folder. Motive: {}", error); logger::log(&message); } }; } fn write_inventory(library: &Library) { let config = Configuration::default(); let library_string = serde_yaml::to_string(library).unwrap(); std::fs::write(config.library_file, library_string).ok(); } fn load() -> Library { let config = Configuration::default(); if !Path::new(&config.library_file).exists() { logger::log("Inventory file not found. Creating..."); Library::write_inventory(&Library::new()); File::open(&config.library_file).unwrap(); } let content = std::fs::read_to_string(config.library_file).unwrap(); serde_yaml::from_str::<Library>(&content).unwrap() } fn random_unread_article() -> Option<Article> { let library = Library::load(); let article_ids: Vec<&String> = library.unread.articles.keys().collect(); let mut rng = rand::thread_rng(); let choice = article_ids.choose(&mut rng); match choice { Some(article_id) => { let id = article_id.to_string(); let article = &library.unread.articles[&id]; Some(article.to_owned()) } None => None, } } fn move_to_read(article_id: String) { let mut library = Library::load(); match library.unread.articles.remove(&article_id) { Some(read_article) => { library .read .articles .insert(read_article.id.to_owned(), read_article.to_owned()); } None => {} }; Library::write_inventory(&library); } pub fn status() { let library = Library::load(); logger::log(&format!( "You have {} read articles", &library.read.articles.len() )); logger::log(&format!( "You have {} unread articles", &library.unread.articles.len() )); } pub fn pick(quantity: Option<usize>) { let quantity = quantity.unwrap_or(1); for _ in 0..quantity { match Library::random_unread_article() { Some(article) => { Library::move_to_read(article.id); open::that(article.url).ok(); } None => { logger::log("You have read all articles!"); } }; } } pub fn renew() { let api = API::new(); let library = Library::load(); let read_articles: Vec<&Article> = library.read.articles.values().collect(); api.delete(read_articles); let api_list = api.retrieve()["list"].to_owned(); let api_articles = match serde_json::from_value::<HashMap<String, serde_json::Value>>(api_list) { Ok(articles) => articles, Err(_) => HashMap::new(), }; let new_inventory: HashMap<String, Article> = api_articles .into_iter() .map(|(id, data)| { ( id.to_string(), Article { id: id.to_owned(), url: data["given_url"].as_str().unwrap().to_owned(), title: data["resolved_title"].as_str().unwrap().to_owned(), }, ) }) .collect(); let new_library = Library { read: Inventory::new(), unread: Inventory { articles: new_inventory, }, }; Library::write_inventory(&new_library); logger::log("Refreshed library"); } }
use crate::articles::api::API; use crate::articles::article::Article; use crate::articles::inventory::Inventory; use crate::configuration::Configuration; use crate::logger; use rand::seq::SliceRandom; use serde::{Deserialize, Serialize}; use serde_yaml; use std::collections::HashMap; use std::fs::File; use std::path::Path; #[derive(Serialize, Deserialize, Debug)] pub struct Library { read: Inventory, unread: Inventory, } impl Library { pub fn new() -> Library { Library { read: Inventory::new(), unread: Inventory::new(), } } pub fn guarantee_home_folder() { let config = Configuration::default(); match std::fs::create_dir_all(config.home_folder) { Ok(_) => {} Err(error) => { let message = format!("Could not create home folder. Motive: {}", error); logger::log(&message); } }; } fn write_inventory(library: &Library) { let config = Configuration::default(); let library_string = serde_yaml::to_string(library).unwrap(); std::fs::write(config.library_file, library_string).ok(); } fn load() -> Library { let config = Configuration::default(); if !Path::new(&config.library_file).exists() { logger::log("Inventory file not found. Creating..."); Library::write_inventory(&Library::new()); File::open(&config.library_file).unwrap(); } let content = std::fs::read_to_string(config.library_file).unwrap(); serde_yaml::from_str::<Library>(&content).unwrap() } fn random_unread_article() -> Option<Article> { let library = Library::load(); let article_ids: Vec<&String> = library.unread.articles.keys().collect(); let mut rng = rand::thread_rng(); let choice = article_ids.choose(&mut rng); match choice { Some(article_id) => { let id = article_id.to_string(); let article = &library.unread.articles[&id]; Some(article.to_owned()) } None => None, } }
pub fn status() { let library = Library::load(); logger::log(&format!( "You have {} read articles", &library.read.articles.len() )); logger::log(&format!( "You have {} unread articles", &library.unread.articles.len() )); } pub fn pick(quantity: Option<usize>) { let quantity = quantity.unwrap_or(1); for _ in 0..quantity { match Library::random_unread_article() { Some(article) => { Library::move_to_read(article.id); open::that(article.url).ok(); } None => { logger::log("You have read all articles!"); } }; } } pub fn renew() { let api = API::new(); let library = Library::load(); let read_articles: Vec<&Article> = library.read.articles.values().collect(); api.delete(read_articles); let api_list = api.retrieve()["list"].to_owned(); let api_articles = match serde_json::from_value::<HashMap<String, serde_json::Value>>(api_list) { Ok(articles) => articles, Err(_) => HashMap::new(), }; let new_inventory: HashMap<String, Article> = api_articles .into_iter() .map(|(id, data)| { ( id.to_string(), Article { id: id.to_owned(), url: data["given_url"].as_str().unwrap().to_owned(), title: data["resolved_title"].as_str().unwrap().to_owned(), }, ) }) .collect(); let new_library = Library { read: Inventory::new(), unread: Inventory { articles: new_inventory, }, }; Library::write_inventory(&new_library); logger::log("Refreshed library"); } }
fn move_to_read(article_id: String) { let mut library = Library::load(); match library.unread.articles.remove(&article_id) { Some(read_article) => { library .read .articles .insert(read_article.id.to_owned(), read_article.to_owned()); } None => {} }; Library::write_inventory(&library); }
function_block-full_function
[ { "content": "pub fn log(message: &str) -> &str {\n\n println!(\"[Pickpocket] {}\", message);\n\n message\n\n}\n", "file_path": "src/logger.rs", "rank": 0, "score": 50771.27213072561 }, { "content": "use crate::articles::article::Article;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Inventory {\n\n pub articles: HashMap<String, Article>,\n\n}\n\n\n\nimpl Inventory {\n\n pub fn new() -> Self {\n\n Self {\n\n articles: HashMap::new(),\n\n }\n\n }\n\n}\n", "file_path": "src/articles/inventory.rs", "rank": 1, "score": 34954.55516173861 }, { "content": "fn main() {\n\n let matches =\n\n App::new(\"Pickpocket\")\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .author(\"Tiago Amaro <tiagopadrela@gmail.com>\")\n\n .about(\"Selects a random article from your Pocket (former Read It Later)\")\n\n .subcommand(\n\n SubCommand::with_name(\"oauth\")\n\n .about(\"1st authorization step: ask Pocket to allow Pickpocket app\"),\n\n )\n\n .subcommand(SubCommand::with_name(\"authorize\").about(\n\n \"2nd authorization step: allow Pickpocket read/write access to your library\",\n\n ))\n\n .subcommand(SubCommand::with_name(\"pick\").about(\n\n \"Picks a random article from your library (marking it as read)\",\n\n ).arg(\n\n Arg::with_name(\"quantity\").short(\"q\").help(\"Quantity of articles to open\").required(true).takes_value(true)\n\n ))\n\n .subcommand(SubCommand::with_name(\"renew\").about(\n\n \"Syncs your local library with your Pocket. It will delete read articles and download new articles from your library\",\n", "file_path": "src/main.rs", "rank": 10, "score": 22672.861993103692 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct Article {\n\n pub id: String,\n\n pub url: String,\n\n pub title: String,\n\n}\n", "file_path": "src/articles/article.rs", "rank": 11, "score": 21186.018180289637 }, { "content": "use crate::articles::article::Article;\n\nuse crate::authentication::token_handler::TokenHandler;\n\nuse crate::configuration::Configuration;\n\nuse crate::logger;\n\nuse serde_json::json;\n\n\n\nstatic ACTION_DELETE: &str = \"delete\";\n\nstatic STATE_UNREAD: &str = \"unread\";\n\n\n\npub struct API {\n\n configuration: Configuration,\n\n}\n\n\n\nimpl API {\n\n pub fn new() -> Self {\n\n Self {\n\n configuration: Default::default(),\n\n }\n\n }\n\n\n", "file_path": "src/articles/api.rs", "rank": 12, "score": 16105.944157370115 }, { "content": "pub mod api;\n\npub mod article;\n\npub mod inventory;\n\npub mod library;\n", "file_path": "src/articles/mod.rs", "rank": 13, "score": 16103.945916594634 }, { "content": " pub fn retrieve(&self) -> serde_json::Value {\n\n let token_handler = TokenHandler::new();\n\n let (consumer_key, pocket_retrieve_url, access_token) = (\n\n &self.configuration.consumer_key,\n\n &self.configuration.pocket_retrieve_url,\n\n &token_handler.read_auth(),\n\n );\n\n\n\n let params = [\n\n (\"consumer_key\", consumer_key),\n\n (\"access_token\", access_token),\n\n (\"state\", &STATE_UNREAD.to_owned()),\n\n ];\n\n let response = reqwest::Client::new()\n\n .post(pocket_retrieve_url)\n\n .form(&params)\n\n .send();\n\n\n\n match response {\n\n Ok(mut response) => {\n", "file_path": "src/articles/api.rs", "rank": 14, "score": 16102.778569312386 }, { "content": "\n\n let actions: serde_json::Value = articles\n\n .into_iter()\n\n .map(|article| {\n\n json!({\n\n \"action\": ACTION_DELETE,\n\n \"item_id\": article.id,\n\n })\n\n })\n\n .collect();\n\n\n\n let params = [\n\n (\"consumer_key\", consumer_key),\n\n (\"access_token\", access_token),\n\n (\"actions\", &actions.to_string()),\n\n ];\n\n let response = reqwest::Client::new()\n\n .post(pocket_send_url)\n\n .form(&params)\n\n .send();\n", "file_path": "src/articles/api.rs", "rank": 15, "score": 16100.95256022778 }, { "content": "\n\n match response {\n\n Ok(_) => {}\n\n Err(error) => {\n\n logger::log(&error.to_string());\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/articles/api.rs", "rank": 16, "score": 16100.502754709592 }, { "content": " let response_text = response.text().unwrap();\n\n let json: serde_json::Value = serde_json::from_str(&response_text).unwrap();\n\n\n\n json.to_owned()\n\n }\n\n Err(_) => {\n\n logger::log(\"Could not retrieve Pocket's data\");\n\n\n\n serde_json::Value::Null\n\n }\n\n }\n\n }\n\n\n\n pub fn delete(&self, articles: Vec<&Article>) {\n\n let token_handler = TokenHandler::new();\n\n let (consumer_key, pocket_send_url, access_token) = (\n\n &self.configuration.consumer_key,\n\n &self.configuration.pocket_send_url,\n\n &token_handler.read_auth(),\n\n );\n", "file_path": "src/articles/api.rs", "rank": 17, "score": 16100.396070881761 }, { "content": "# Pickpocket\n\n\n\n[![Build Status](https://travis-ci.com/tiagoamaro/pickpocket-rust.svg?branch=master)](https://travis-ci.com/tiagoamaro/pickpocket-rust)\n\n\n\nPickpocket is a command line tool which will help you with your [Pocket](http://getpocket.com/) library. It selects a random article for you to read, opening your browser and marking it is deleted. \n\n\n\n## Installation\n\n\n\nPickpocket is distributed as a Rust crate, also having its Linux, MacOS and Windows binaries released on this repository.\n\n\n\n### Binaries\n\n\n\nBinaries for all OSes are available at the \"releases page\": https://github.com/tiagoamaro/pickpocket-rust/releases.\n\n\n\n### cargo\n\n\n\nIf you wish to run this CLI using Rust's `cargo`:\n\n - Run `cargo install pickpocket`\n\n - Execute the binary through `cargo`'s default install path. Example: `~/.cargo/bin/pickpocket status`\n\n\n\n### Authentication\n\n\n\nTo use Pickpocket, you first need to go through Pocket's OAuth authentication process.\n\n\n\n1. Execute the `pickpocket oauth` command\n\n 1. This will open your web browser, asking you to approve Pickpocket's OAuth token\n\n2. Execute the `pickpocket authorize` command\n\n 1. This will authorize your OAuth token against Pocket, creating an authorization token\n\n \n\n### Usage\n\n\n\n- `pickpocket pick`\n\n - Selects a random article from your list, and open your browser with its resolved URL\n\n - Options: `-q`: quantity of articles to open. Examples:\n\n - `pickpocket pick -q 10` (open 10 articles)\n\n- `pickpocket renew`\n\n - This will synchronize your local library with your remote. Keep in mind: any article marked as read **WILL BE DELETED** from your remote library\n\n- `pickpocket status`\n\n - Show the number of read/unread articles you have on your local library\n\n\n\n## Pickpocket Files\n\n\n\nAll Pickpocket files are stored at the `~/.pickpocket` folder.\n\n\n\n- `library_file`\n\n - YAML file which stores your local library, marking articles as unread or read\n\n- `authorization_token`\n\n - File which stores your authorization token\n\n- `oauth_token`\n\n - File which stores your OAuth token\n\n\n", "file_path": "README.md", "rank": 18, "score": 11259.001715030381 }, { "content": "## Don't Trust Me?\n\n\n\nPickpocket ships with its own consumer key, which will ask for access to modify/retrieve your articles.\n\n \n\nIf you don't like this idea, you can use your own consumer key, setting up the `POCKET_CONSUMER_KEY` environment variable before calling it.\n\n\n\nExample:\n\n\n\n`POCKET_CONSUMER_KEY=\"my-consumer-key\" pickpocket oauth`\n\n \n\n> To know more about consumer keys and how Pocket deals with third party applications, read more on [Pocket's Authentication API documentation](https://getpocket.com/developer/docs/authentication). \n\n\n\n## License\n\n\n\nMIT\n", "file_path": "README.md", "rank": 19, "score": 11253.64132748559 }, { "content": "Copyright 2019 Tiago Padrela Amaro (tiagopadrela@gmail.com)\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "LICENSE.md", "rank": 20, "score": 11252.624676617645 }, { "content": " ))\n\n .subcommand(SubCommand::with_name(\"status\").about(\n\n \"Show the number of read/unread articles you have on your local library\",\n\n ))\n\n .get_matches();\n\n\n\n Library::guarantee_home_folder();\n\n\n\n match matches.subcommand() {\n\n (\"oauth\", _) => {\n\n OAuth::request_authorization();\n\n }\n\n (\"authorize\", _) => {\n\n OAuth::authorize();\n\n }\n\n (\"pick\", Some(pick_matches)) => {\n\n let quantity = pick_matches.value_of(\"quantity\").unwrap();\n\n\n\n match quantity.parse::<usize>() {\n\n Ok(quantity) => {\n", "file_path": "src/main.rs", "rank": 21, "score": 10.489723888204852 }, { "content": "use dirs;\n\nuse std::env;\n\nuse std::path::PathBuf;\n\n\n\npub struct Configuration {\n\n // Files\n\n pub authorization_token_file: PathBuf,\n\n pub home_folder: PathBuf,\n\n pub library_file: PathBuf,\n\n pub oauth_token_file: PathBuf,\n\n\n\n // Pocket\n\n pub consumer_key: String,\n\n pub pocket_homepage: String,\n\n pub pocket_oauth_authorize_url: String,\n\n pub pocket_oauth_request_url: String,\n\n pub pocket_retrieve_url: String,\n\n pub pocket_send_url: String,\n\n pub pocket_user_authorize_url: String,\n\n}\n", "file_path": "src/configuration.rs", "rank": 22, "score": 9.331867026896726 }, { "content": "}\n\n\n\nimpl Configuration {\n\n pub fn home_folder() -> PathBuf {\n\n dirs::home_dir().unwrap().join(\".pickpocket\")\n\n }\n\n\n\n pub fn default() -> Self {\n\n Self {\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::configuration::Configuration;\n\n\n\n #[test]\n\n fn allows_consumer_key_configuration_through_env() {\n", "file_path": "src/configuration.rs", "rank": 23, "score": 8.413501563298214 }, { "content": " match fs::read_to_string(path) {\n\n Ok(content) => content,\n\n Err(_) => {\n\n logger::log(message);\n\n \"no-token\".to_owned()\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/authentication/token_handler.rs", "rank": 24, "score": 8.355894756588777 }, { "content": "use crate::configuration::Configuration;\n\nuse crate::logger;\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\n\n\npub struct TokenHandler {\n\n configuration: Configuration,\n\n}\n\n\n\nimpl TokenHandler {\n\n pub fn new() -> Self {\n\n Self {\n\n configuration: Default::default(),\n\n }\n\n }\n\n\n\n pub fn save_oauth(&self, token: &str) {\n\n self.save_token(&self.configuration.oauth_token_file, token)\n\n }\n\n\n", "file_path": "src/authentication/token_handler.rs", "rank": 25, "score": 8.214308575642836 }, { "content": "\n\nimpl Default for Configuration {\n\n fn default() -> Self {\n\n Configuration {\n\n // Files\n\n authorization_token_file: Self::home_folder().join(\"authorization_token\"),\n\n home_folder: Self::home_folder(),\n\n library_file: Self::home_folder().join(\"library_file\"),\n\n oauth_token_file: Self::home_folder().join(\"oauth_token\"),\n\n // Pocket\n\n consumer_key: env::var(\"POCKET_CONSUMER_KEY\")\n\n .unwrap_or(\"58132-f824d5fbf935681e22e86a3c\".to_owned()),\n\n pocket_homepage: \"https://getpocket.com\".to_owned(),\n\n pocket_oauth_authorize_url: \"https://getpocket.com/v3/oauth/authorize\".to_owned(),\n\n pocket_oauth_request_url: \"https://getpocket.com/v3/oauth/request\".to_owned(),\n\n pocket_retrieve_url: \"https://getpocket.com/v3/get\".to_owned(),\n\n pocket_send_url: \"https://getpocket.com/v3/send\".to_owned(),\n\n pocket_user_authorize_url: \"https://getpocket.com/auth/authorize\".to_owned(),\n\n }\n\n }\n", "file_path": "src/configuration.rs", "rank": 26, "score": 8.13975506001296 }, { "content": " pub fn save_auth(&self, token: &str) {\n\n self.save_token(&self.configuration.authorization_token_file, token)\n\n }\n\n\n\n pub fn read_auth(&self) -> String {\n\n self.read_token(&self.configuration.authorization_token_file, \"Authorization Token file does not exist. Make sure you request authorization before proceeding.\")\n\n }\n\n\n\n pub fn read_oauth(&self) -> String {\n\n self.read_token(&self.configuration.oauth_token_file, \"OAuth Token file does not exist. Make sure you request authorization before proceeding.\")\n\n }\n\n\n\n fn save_token(&self, path: &PathBuf, token: &str) {\n\n match fs::write(path, token) {\n\n Ok(_) => \"ok\",\n\n Err(_) => logger::log(\"Could not write to token file\"),\n\n };\n\n }\n\n\n\n fn read_token(&self, path: &PathBuf, message: &str) -> String {\n", "file_path": "src/authentication/token_handler.rs", "rank": 27, "score": 7.225153599967848 }, { "content": "mod articles;\n\nmod authentication;\n\nmod configuration;\n\nmod logger;\n\n\n\nuse articles::library::Library;\n\nuse authentication::oauth::OAuth;\n\nuse clap::{App, Arg, SubCommand};\n\n\n", "file_path": "src/main.rs", "rank": 28, "score": 6.4801091970910125 }, { "content": "use crate::authentication::token_handler::TokenHandler;\n\nuse crate::configuration::Configuration;\n\nuse crate::logger;\n\n\n\npub struct OAuth {}\n\n\n\nimpl OAuth {\n\n pub fn request_authorization() {\n\n let token_handler = TokenHandler::new();\n\n let configuration = Configuration::default();\n\n let (auth_url, oauth_url, consumer_key, pocket_homepage) = (\n\n &configuration.pocket_user_authorize_url,\n\n &configuration.pocket_oauth_request_url,\n\n &configuration.consumer_key,\n\n &configuration.pocket_homepage,\n\n );\n\n\n\n // Fetch Pocket OAuth token\n\n let params = [\n\n (\"consumer_key\", consumer_key),\n", "file_path": "src/authentication/oauth.rs", "rank": 29, "score": 5.3316866834459855 }, { "content": " (\"redirect_uri\", pocket_homepage),\n\n ];\n\n let response = reqwest::Client::new().post(oauth_url).form(&params).send();\n\n\n\n let response_token = match response {\n\n Ok(mut response) => {\n\n let response_text = response.text().unwrap();\n\n let mut parse = url::form_urlencoded::parse(response_text.as_bytes());\n\n\n\n let (_code, response_token) = parse.next().unwrap();\n\n response_token.to_string()\n\n }\n\n Err(_) => {\n\n logger::log(\"Could not connect to Pocket\");\n\n \"Error\".to_owned()\n\n }\n\n };\n\n\n\n // Open auth on browser\n\n let query_string = format!(\n", "file_path": "src/authentication/oauth.rs", "rank": 30, "score": 5.199795477699096 }, { "content": " // Request authorization token (with OAuth token + consumer key)\n\n let params = [(\"consumer_key\", consumer_key), (\"code\", &response_token)];\n\n let response = reqwest::Client::new().post(uri).form(&params).send();\n\n\n\n let response_token = match response {\n\n Ok(mut response) => {\n\n let response_text = response.text().unwrap();\n\n let mut parse = url::form_urlencoded::parse(response_text.as_bytes());\n\n\n\n let (_code, response_token) = parse.next().unwrap();\n\n response_token.to_string()\n\n }\n\n Err(_) => {\n\n logger::log(\"Could not connect to Pocket\");\n\n \"Error\".to_owned()\n\n }\n\n };\n\n\n\n // Save authentication token\n\n token_handler.save_auth(&response_token);\n\n }\n\n}\n", "file_path": "src/authentication/oauth.rs", "rank": 31, "score": 4.618453043767122 }, { "content": " Library::pick(Some(quantity));\n\n }\n\n Err(_) => {\n\n logger::log(\"You must provide a valid quantity\");\n\n }\n\n };\n\n }\n\n (\"renew\", _) => {\n\n Library::renew();\n\n Library::status();\n\n }\n\n (\"status\", _) => {\n\n Library::status();\n\n }\n\n _ => {\n\n logger::log(\"Option not found\");\n\n }\n\n };\n\n}\n", "file_path": "src/main.rs", "rank": 32, "score": 4.315568473180967 }, { "content": " \"request_token={}&redirect_uri={}\",\n\n response_token, pocket_homepage\n\n );\n\n let mut open_on_browser_url = url::Url::parse(auth_url).unwrap();\n\n open_on_browser_url.set_query(Some(&query_string));\n\n open::that(open_on_browser_url.into_string()).ok();\n\n\n\n // Save OAuth token on file\n\n token_handler.save_oauth(&response_token);\n\n }\n\n\n\n pub fn authorize() {\n\n let token_handler = TokenHandler::new();\n\n let configuration = Configuration::default();\n\n let (uri, consumer_key, response_token) = (\n\n &configuration.pocket_oauth_authorize_url,\n\n &configuration.consumer_key,\n\n &token_handler.read_oauth(),\n\n );\n\n\n", "file_path": "src/authentication/oauth.rs", "rank": 33, "score": 3.8283377957059606 }, { "content": " std::env::set_var(\"POCKET_CONSUMER_KEY\", \"my-super-pocket-consumer-key\");\n\n let config = Configuration::default();\n\n assert_eq!(\"my-super-pocket-consumer-key\", config.consumer_key);\n\n\n\n // Without the env var\n\n std::env::remove_var(\"POCKET_CONSUMER_KEY\");\n\n let config = Configuration::default();\n\n assert_eq!(\"58132-f824d5fbf935681e22e86a3c\", config.consumer_key);\n\n }\n\n}\n", "file_path": "src/configuration.rs", "rank": 34, "score": 2.5664844338637374 }, { "content": "pub mod oauth;\n\npub mod token_handler;\n", "file_path": "src/authentication/mod.rs", "rank": 35, "score": 2.3592033103742036 } ]
Rust
src/event.rs
vstojkovic/sapi-lite
202e96fd1cca47863f5eca2c9b5b82b7ea390d88
use windows as Windows; use Windows::core::{implement, IUnknown}; use Windows::Win32::Foundation::PWSTR; use Windows::Win32::Media::Speech::{ ISpEventSource, ISpNotifySink, ISpObjectToken, ISpRecoResult, SPEI_END_INPUT_STREAM, SPEI_RECOGNITION, SPEI_RESERVED1, SPEI_RESERVED2, SPET_LPARAM_IS_OBJECT, SPET_LPARAM_IS_POINTER, SPET_LPARAM_IS_STRING, SPET_LPARAM_IS_TOKEN, SPET_LPARAM_IS_UNDEFINED, SPEVENT, SPEVENTENUM, SPEVENTLPARAMTYPE, }; use crate::com_util::{next_elem, ComBox, MaybeWeak}; use crate::token::Token; use crate::Result; #[derive(Debug)] pub(crate) enum Event { Recognition(ISpRecoResult), SpeechFinished(u32), OtherObject(IUnknown), OtherToken(Token), OtherString(ComBox<PWSTR>), OtherValue(ComBox<*const std::ffi::c_void>), Other, } impl Event { pub fn from_sapi(sapi_event: SPEVENT) -> Result<Self> { use Windows::core::{Abi, Interface}; let id = SPEVENTENUM(sapi_event._bitfield & 0xffff); let lparam = sapi_event.lParam.0; match SPEVENTLPARAMTYPE(sapi_event._bitfield >> 16) { SPET_LPARAM_IS_OBJECT => { let intf = unsafe { IUnknown::from_abi(lparam as _) }?; match id { SPEI_RECOGNITION => Ok(Self::Recognition(intf.cast()?)), _ => Ok(Self::OtherObject(intf)), } } SPET_LPARAM_IS_POINTER => { Ok(Self::OtherValue(unsafe { ComBox::from_raw(lparam as _) })) } SPET_LPARAM_IS_STRING => Ok(Self::OtherString(unsafe { ComBox::from_raw(PWSTR(lparam as _)) })), SPET_LPARAM_IS_TOKEN => Ok(Self::OtherToken(Token::from_sapi(unsafe { ISpObjectToken::from_abi(lparam as _) }?))), SPET_LPARAM_IS_UNDEFINED => match id { SPEI_END_INPUT_STREAM => Ok(Self::SpeechFinished(sapi_event.ulStreamNum)), _ => Ok(Self::Other), }, _ => panic!("Unrecognized SPEVENTLPARAMTYPE value"), } } } pub(crate) struct EventSource { intf: MaybeWeak<ISpEventSource>, } impl EventSource { pub(crate) fn from_sapi(intf: ISpEventSource) -> Self { Self { intf: MaybeWeak::new(intf), } } pub(crate) fn next_event(&self) -> Result<Option<Event>> { Ok( match unsafe { next_elem(&*self.intf, ISpEventSource::GetEvents) }? { Some(sapi_event) => Some(Event::from_sapi(sapi_event)?), None => None, }, ) } fn downgrade(&mut self) { self.intf.set_weak(true); } } #[implement(Windows::Win32::Media::Speech::ISpNotifySink)] pub(crate) struct EventSink { source: EventSource, handler: Box<dyn Fn(Event) -> Result<()>>, } #[allow(non_snake_case)] impl EventSink { pub(crate) fn new<F: Fn(Event) -> Result<()> + 'static>( source: EventSource, handler: F, ) -> Self { Self { source, handler: Box::new(handler), } } pub(crate) fn install(self, interest: Option<&[SPEVENTENUM]>) -> Result<()> { use windows::core::ToImpl; let src_intf = self.source.intf.clone(); let sink_intf: ISpNotifySink = self.into(); unsafe { src_intf.SetNotifySink(&sink_intf) }?; unsafe { Self::to_impl(&sink_intf) }.source.downgrade(); if let Some(flags) = interest { let mut flags_arg = (1u64 << SPEI_RESERVED1.0) | (1u64 << SPEI_RESERVED2.0); for flag in flags { flags_arg |= 1u64 << flag.0; } unsafe { src_intf.SetInterest(flags_arg, flags_arg) }?; } Ok(()) } fn Notify(&self) -> Result<()> { while let Some(event) = self.source.next_event()? { (*self.handler)(event)? } Ok(()) } }
use windows as Windows; use Windows::core::{implement, IUnknown}; use Windows::Win32::Fo
{ source, handler: Box::new(handler), } } pub(crate) fn install(self, interest: Option<&[SPEVENTENUM]>) -> Result<()> { use windows::core::ToImpl; let src_intf = self.source.intf.clone(); let sink_intf: ISpNotifySink = self.into(); unsafe { src_intf.SetNotifySink(&sink_intf) }?; unsafe { Self::to_impl(&sink_intf) }.source.downgrade(); if let Some(flags) = interest { let mut flags_arg = (1u64 << SPEI_RESERVED1.0) | (1u64 << SPEI_RESERVED2.0); for flag in flags { flags_arg |= 1u64 << flag.0; } unsafe { src_intf.SetInterest(flags_arg, flags_arg) }?; } Ok(()) } fn Notify(&self) -> Result<()> { while let Some(event) = self.source.next_event()? { (*self.handler)(event)? } Ok(()) } }
undation::PWSTR; use Windows::Win32::Media::Speech::{ ISpEventSource, ISpNotifySink, ISpObjectToken, ISpRecoResult, SPEI_END_INPUT_STREAM, SPEI_RECOGNITION, SPEI_RESERVED1, SPEI_RESERVED2, SPET_LPARAM_IS_OBJECT, SPET_LPARAM_IS_POINTER, SPET_LPARAM_IS_STRING, SPET_LPARAM_IS_TOKEN, SPET_LPARAM_IS_UNDEFINED, SPEVENT, SPEVENTENUM, SPEVENTLPARAMTYPE, }; use crate::com_util::{next_elem, ComBox, MaybeWeak}; use crate::token::Token; use crate::Result; #[derive(Debug)] pub(crate) enum Event { Recognition(ISpRecoResult), SpeechFinished(u32), OtherObject(IUnknown), OtherToken(Token), OtherString(ComBox<PWSTR>), OtherValue(ComBox<*const std::ffi::c_void>), Other, } impl Event { pub fn from_sapi(sapi_event: SPEVENT) -> Result<Self> { use Windows::core::{Abi, Interface}; let id = SPEVENTENUM(sapi_event._bitfield & 0xffff); let lparam = sapi_event.lParam.0; match SPEVENTLPARAMTYPE(sapi_event._bitfield >> 16) { SPET_LPARAM_IS_OBJECT => { let intf = unsafe { IUnknown::from_abi(lparam as _) }?; match id { SPEI_RECOGNITION => Ok(Self::Recognition(intf.cast()?)), _ => Ok(Self::OtherObject(intf)), } } SPET_LPARAM_IS_POINTER => { Ok(Self::OtherValue(unsafe { ComBox::from_raw(lparam as _) })) } SPET_LPARAM_IS_STRING => Ok(Self::OtherString(unsafe { ComBox::from_raw(PWSTR(lparam as _)) })), SPET_LPARAM_IS_TOKEN => Ok(Self::OtherToken(Token::from_sapi(unsafe { ISpObjectToken::from_abi(lparam as _) }?))), SPET_LPARAM_IS_UNDEFINED => match id { SPEI_END_INPUT_STREAM => Ok(Self::SpeechFinished(sapi_event.ulStreamNum)), _ => Ok(Self::Other), }, _ => panic!("Unrecognized SPEVENTLPARAMTYPE value"), } } } pub(crate) struct EventSource { intf: MaybeWeak<ISpEventSource>, } impl EventSource { pub(crate) fn from_sapi(intf: ISpEventSource) -> Self { Self { intf: MaybeWeak::new(intf), } } pub(crate) fn next_event(&self) -> Result<Option<Event>> { Ok( match unsafe { next_elem(&*self.intf, ISpEventSource::GetEvents) }? { Some(sapi_event) => Some(Event::from_sapi(sapi_event)?), None => None, }, ) } fn downgrade(&mut self) { self.intf.set_weak(true); } } #[implement(Windows::Win32::Media::Speech::ISpNotifySink)] pub(crate) struct EventSink { source: EventSource, handler: Box<dyn Fn(Event) -> Result<()>>, } #[allow(non_snake_case)] impl EventSink { pub(crate) fn new<F: Fn(Event) -> Result<()> + 'static>( source: EventSource, handler: F, ) -> Self { Self
random
[ { "content": "use std::ops::Deref;\n\n\n\nuse windows as Windows;\n\nuse Windows::core::Interface;\n\nuse Windows::Win32::Media::Speech::{SPEI_END_INPUT_STREAM, SPF_ASYNC};\n\n\n\nuse crate::event::{Event, EventSink, EventSource};\n\nuse crate::tts::Speech;\n\nuse crate::Result;\n\n\n\nuse super::Synthesizer;\n\n\n\n/// The handler [`EventfulSynthesizer`] will call.\n", "file_path": "src/tts/synthesizer/event.rs", "rank": 0, "score": 8.083915703577997 }, { "content": "use std::path::Path;\n\nuse std::ptr::null;\n\n\n\nuse windows as Windows;\n\nuse Windows::core::{GUID, HRESULT};\n\nuse Windows::Win32::Foundation::E_OUTOFMEMORY;\n\nuse Windows::Win32::Media::Speech::{\n\n ISpStream, SpStream, SPFILEMODE, SPFM_CREATE_ALWAYS, SPFM_OPEN_READONLY,\n\n};\n\nuse Windows::Win32::System::Com::{CoCreateInstance, IStream, CLSCTX_ALL};\n\nuse Windows::Win32::UI::Shell::SHCreateMemStream;\n\n\n\nuse crate::com_util::Intf;\n\nuse crate::Result;\n\n\n\nuse super::AudioFormat;\n\n\n\n/// An audio stream to read from or write to.\n\npub struct AudioStream {\n\n intf: Intf<ISpStream>,\n", "file_path": "src/audio/stream.rs", "rank": 1, "score": 8.067811064869508 }, { "content": "use std::ops::Deref;\n\n\n\nuse windows as Windows;\n\nuse Windows::core::Interface;\n\n\n\nuse crate::event::{Event, EventSink, EventSource};\n\nuse crate::stt::{Phrase, Recognizer};\n\nuse crate::Result;\n\n\n\nuse super::Context;\n\n\n\n/// The handler [`EventfulContext`] will call.\n", "file_path": "src/stt/context/event.rs", "rank": 2, "score": 8.019603872311611 }, { "content": "use std::ffi::OsString;\n\nuse std::os::windows::prelude::OsStringExt;\n\n\n\nuse windows as Windows;\n\nuse Windows::core::{IntoParam, Param};\n\nuse Windows::Win32::Foundation::PWSTR;\n\n\n\npub unsafe fn from_wide(s: &PWSTR) -> OsString {\n\n let len = (0..).take_while(|&i| *s.0.offset(i) != 0).count();\n\n let slice = std::slice::from_raw_parts(s.0, len);\n\n OsString::from_wide(slice)\n\n}\n\n\n", "file_path": "src/com_util/str.rs", "rank": 3, "score": 7.923757257676223 }, { "content": "use windows as Windows;\n\nuse Windows::core::IUnknown;\n\nuse Windows::Win32::Media::Speech::{ISpVoice, SpVoice};\n\nuse Windows::Win32::System::Com::{CoCreateInstance, CLSCTX_ALL};\n\n\n\nuse crate::audio::AudioStream;\n\nuse crate::com_util::{out_to_ret, Intf};\n\nuse crate::token::Token;\n\nuse crate::Result;\n\n\n\nuse super::{Rate, Speech, Voice, Volume};\n\n\n\nmod event;\n\nmod sync;\n\n\n\npub use event::{EventHandler, EventfulSynthesizer};\n\npub use sync::SyncSynthesizer;\n\n\n\n/// Specifies where the output of speech synthesis should go.\n\npub enum SpeechOutput {\n", "file_path": "src/tts/synthesizer/mod.rs", "rank": 4, "score": 7.863921990798287 }, { "content": "use std::ops::Deref;\n\nuse std::time::Duration;\n\n\n\nuse windows as Windows;\n\nuse Windows::Win32::Media::Speech::SPF_ASYNC;\n\nuse Windows::Win32::System::WindowsProgramming::INFINITE;\n\n\n\nuse crate::tts::Speech;\n\nuse crate::Result;\n\n\n\nuse super::Synthesizer;\n\n\n\n/// A speech synthesizer that blocks the current thread while rendering speech.\n\npub struct SyncSynthesizer {\n\n base: Synthesizer,\n\n}\n\n\n\nimpl SyncSynthesizer {\n\n /// Creates a new synthesizer, configured to output its speech to the default audio device.\n\n pub fn new() -> Result<Self> {\n", "file_path": "src/tts/synthesizer/sync.rs", "rank": 5, "score": 7.856932128152381 }, { "content": "use std::ffi::{OsStr, OsString};\n\nuse std::mem::ManuallyDrop;\n\n\n\nuse windows as Windows;\n\nuse Windows::core::{IntoParam, Param};\n\nuse Windows::Win32::Foundation::PWSTR;\n\nuse Windows::Win32::Media::Speech::{SPPHRASEPROPERTY, SPPROPERTYINFO};\n\nuse Windows::Win32::System::Com::{VARIANT, VARIANT_0, VARIANT_0_0, VARIANT_0_0_0};\n\nuse Windows::Win32::System::Ole::{VARENUM, VT_BOOL, VT_EMPTY, VT_I4, VT_R4, VT_R8};\n\n\n\nuse crate::com_util::from_wide;\n\n\n\nuse super::SemanticString;\n\n\n\n/// A value that forms part of the semantic information for a recognized phrase.\n\n#[derive(Debug, PartialEq, Clone)]\n\n#[allow(missing_docs)]\n\npub enum SemanticValue<S: SemanticString> {\n\n Bool(bool),\n\n Int(i32),\n", "file_path": "src/stt/semantics/value.rs", "rank": 6, "score": 7.811212411555769 }, { "content": "use std::ffi::OsString;\n\n\n\nuse windows as Windows;\n\nuse Windows::core::{IUnknown, IntoParam, Param};\n\nuse Windows::Win32::Foundation::PWSTR;\n\nuse Windows::Win32::Media::Speech::{\n\n IEnumSpObjectTokens, ISpObjectToken, ISpObjectTokenCategory, SpObjectToken,\n\n SpObjectTokenCategory,\n\n};\n\nuse Windows::Win32::System::Com::{CoCreateInstance, CLSCTX_ALL};\n\n\n\nuse crate::com_util::{from_wide, next_obj, opt_str_param, ComBox, Intf};\n\nuse crate::Result;\n\n\n\n#[derive(Debug)]\n\npub(crate) struct Token {\n\n intf: Intf<ISpObjectToken>,\n\n}\n\n\n\nimpl Token {\n", "file_path": "src/token.rs", "rank": 7, "score": 7.7001346854245885 }, { "content": "use std::ffi::c_void;\n\nuse std::ops::Deref;\n\n\n\nuse windows as Windows;\n\nuse Windows::Win32::Foundation::PWSTR;\n\nuse Windows::Win32::System::Com::CoTaskMemFree;\n\n\n\npub unsafe trait ComBuffer {\n\n fn as_ptr(&self) -> *const c_void;\n\n}\n\n\n\nunsafe impl<T> ComBuffer for *const T {\n\n fn as_ptr(&self) -> *const c_void {\n\n *self as _\n\n }\n\n}\n\n\n\nunsafe impl ComBuffer for PWSTR {\n\n fn as_ptr(&self) -> *const c_void {\n\n self.0 as _\n", "file_path": "src/com_util/mem.rs", "rank": 8, "score": 7.687143171878295 }, { "content": "//! Each context can have one or more grammars loaded into it. A grammar consists of one or more\n\n//! rules that define what phrases the engine can recognize. You can enable or disable the whole\n\n//! grammar, or individual rules in it by their name.\n\n\n\nuse std::sync::{Arc, Mutex};\n\n\n\nuse windows as Windows;\n\nuse Windows::core::IUnknown;\n\nuse Windows::Win32::Media::Speech::{\n\n ISpRecognizer, SpInprocRecognizer, SPRECOSTATE, SPRST_ACTIVE, SPRST_INACTIVE,\n\n};\n\nuse Windows::Win32::System::Com::{CoCreateInstance, CLSCTX_ALL};\n\n\n\nuse crate::audio::AudioStream;\n\nuse crate::com_util::Intf;\n\nuse crate::token::Category;\n\nuse crate::Result;\n\n\n\nmod context;\n\nmod grammar;\n", "file_path": "src/stt/mod.rs", "rank": 9, "score": 7.603994311667489 }, { "content": "use std::mem::ManuallyDrop;\n\nuse std::ptr::null_mut;\n\n\n\nuse windows as Windows;\n\nuse Windows::Win32::Media::Speech::{\n\n ISpRecoGrammar, SPGRAMMARSTATE, SPGS_DISABLED, SPGS_ENABLED, SPRS_ACTIVE, SPRS_INACTIVE,\n\n SPRULESTATE,\n\n};\n\n\n\nuse crate::com_util::Intf;\n\nuse crate::Result;\n\n\n\nuse super::RecognitionPauser;\n\n\n\nmod builder;\n\nmod rule;\n\n\n\npub use builder::GrammarBuilder;\n\npub use rule::{RepeatRange, Rule, RuleArena};\n\n\n", "file_path": "src/stt/grammar/mod.rs", "rank": 10, "score": 7.450904895636278 }, { "content": "use std::ops::Deref;\n\nuse std::time::Duration;\n\n\n\nuse windows as Windows;\n\nuse Windows::core::Interface;\n\n\n\nuse crate::event::{Event, EventSource};\n\nuse crate::stt::{Phrase, Recognizer};\n\nuse crate::Result;\n\n\n\nuse super::Context;\n\n\n\n/// A recognition context that blocks the current thread until the engine recognizes a phrase.\n\npub struct SyncContext {\n\n base: Context,\n\n event_src: EventSource,\n\n}\n\n\n\nimpl SyncContext {\n\n /// Creates a new recognition context for the given recognizer.\n", "file_path": "src/stt/context/sync.rs", "rank": 11, "score": 7.422863179999304 }, { "content": "use std::ffi::OsString;\n\nuse std::str::FromStr;\n\n\n\nuse windows::Win32::Foundation::PWSTR;\n\nuse windows::Win32::Globalization::{LCIDToLocaleName, LocaleNameToLCID};\n\nuse windows::Win32::System::SystemServices::LOCALE_NAME_MAX_LENGTH;\n\n\n\nuse super::from_wide;\n\n\n\npub struct Locale {\n\n lcid: u32,\n\n}\n\n\n\nimpl Locale {\n\n pub fn new(lcid: u32) -> Self {\n\n Self { lcid }\n\n }\n\n\n\n pub fn lcid(&self) -> u32 {\n\n self.lcid\n", "file_path": "src/com_util/locale.rs", "rank": 12, "score": 7.31812897723376 }, { "content": "use windows as Windows;\n\nuse Windows::Win32::Media::Speech::{ISpRecoContext, SPCS_DISABLED, SPCS_ENABLED};\n\n\n\nuse crate::com_util::Intf;\n\nuse crate::Result;\n\n\n\nuse super::{GrammarBuilder, RecognitionPauser};\n\n\n\nmod event;\n\nmod sync;\n\n\n\npub use event::{EventHandler, EventfulContext};\n\npub use sync::SyncContext;\n\n\n\n/// Provides the common API shared across different kinds of contexts.\n\npub struct Context {\n\n intf: Intf<ISpRecoContext>,\n\n pauser: RecognitionPauser,\n\n}\n\n\n", "file_path": "src/stt/context/mod.rs", "rank": 13, "score": 7.19169659534656 }, { "content": "use std::ffi::OsString;\n\nuse std::ptr::null_mut;\n\n\n\nuse windows as Windows;\n\nuse Windows::Win32::Media::Speech::{ISpRecoResult, SPPHRASE_50, SPPR_ALL_ELEMENTS};\n\n\n\nuse crate::com_util::{from_wide, out_to_ret, ComBox};\n\nuse crate::Result;\n\n\n\nuse super::SemanticTree;\n\n\n\n/// A successfully recognized phrase.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct Phrase {\n\n /// The text of the recognized phrase.\n\n pub text: OsString,\n\n /// The semantic information associated with the phrase.\n\n pub semantics: Vec<SemanticTree>,\n\n}\n\n\n", "file_path": "src/stt/phrase.rs", "rank": 15, "score": 7.139632429202498 }, { "content": "use std::borrow::{Borrow, Cow};\n\n\n\nuse windows as Windows;\n\nuse Windows::Win32::Media::Speech::{SPF_IS_NOT_XML, SPF_IS_XML, SPF_PARSE_SAPI};\n\n\n\nmod builder;\n\nmod types;\n\n\n\npub use builder::SpeechBuilder;\n\npub use types::{Pitch, Rate, SayAs, Volume};\n\n\n\n/// A speech to be rendered by a synthesizer.\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub enum Speech<'s> {\n\n /// Plain text\n\n Text(Cow<'s, str>),\n\n /// XML-encoded speech\n\n Xml(Cow<'s, str>),\n\n}\n\n\n", "file_path": "src/tts/speech/mod.rs", "rank": 16, "score": 7.102726628044879 }, { "content": "use std::mem::MaybeUninit;\n\n\n\nuse windows as Windows;\n\nuse Windows::core::Interface;\n\n\n\nuse crate::Result;\n\n\n\npub unsafe fn next_elem<I, R>(\n\n intf: &I,\n\n f: unsafe fn(&I, u32, *mut R, *mut u32) -> Result<()>,\n\n) -> Result<Option<R>> {\n\n let mut result = MaybeUninit::uninit();\n\n let mut fetched = MaybeUninit::uninit();\n\n f(intf, 1, result.as_mut_ptr(), fetched.as_mut_ptr())?;\n\n Ok(if fetched.assume_init() > 0 {\n\n Some(result.assume_init())\n\n } else {\n\n None\n\n })\n\n}\n", "file_path": "src/com_util/iter.rs", "rank": 17, "score": 6.889282618891841 }, { "content": "use std::mem::{transmute_copy, ManuallyDrop};\n\nuse std::ops::{Deref, DerefMut};\n\n\n\nuse windows as Windows;\n\nuse Windows::core::{IUnknown, Interface, IntoParam, Param};\n\n\n\n/// An incredibly dangerous wrapper around COM interface that should be used to avoid circular\n\n/// references, and only when you're sure that the target will be valid for at least as long as\n\n/// the wrapper lives.\n\npub struct MaybeWeak<I: Interface> {\n\n intf: ManuallyDrop<I>,\n\n is_weak: bool,\n\n}\n\n\n\nimpl<I: Interface> MaybeWeak<I> {\n\n pub fn new(intf: I) -> Self {\n\n Self {\n\n intf: ManuallyDrop::new(intf),\n\n is_weak: false,\n\n }\n", "file_path": "src/com_util/weak.rs", "rank": 18, "score": 6.814534813847543 }, { "content": "use std::ops::{Deref, DerefMut};\n\n\n\nuse windows as Windows;\n\nuse Windows::core::{Interface, IntoParam, Param};\n\n\n\n/// A zero-cost wrapper that makes a COM interface Send and Sync.\n\n#[derive(Debug)]\n\npub struct Intf<I: Interface>(pub I);\n\n\n\nunsafe impl<I: Interface> Send for Intf<I> {}\n\nunsafe impl<I: Interface> Sync for Intf<I> {}\n\n\n\nimpl<I: Interface> Deref for Intf<I> {\n\n type Target = I;\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl<I: Interface> DerefMut for Intf<I> {\n", "file_path": "src/com_util/intf.rs", "rank": 19, "score": 6.788216044904362 }, { "content": "use std::ffi::OsString;\n\nuse std::ops::{Deref, DerefMut};\n\n\n\nuse windows as Windows;\n\nuse Windows::Win32::Media::Speech::SPPHRASEPROPERTY;\n\n\n\nuse super::SemanticValue;\n\n\n\n/// A tree of values that forms part of the semantic information for a recognized phrase.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct SemanticTree {\n\n /// The value at the root of this tree.\n\n pub value: SemanticValue<OsString>,\n\n /// The sub-trees that form this tree.\n\n pub children: Vec<SemanticTree>,\n\n}\n\n\n\nimpl SemanticTree {\n\n pub(crate) fn from_sapi(sapi_prop: Option<&SPPHRASEPROPERTY>) -> Vec<Self> {\n\n let mut result = Vec::new();\n", "file_path": "src/stt/semantics/tree.rs", "rank": 20, "score": 6.626713532545807 }, { "content": "use std::borrow::Cow;\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::hash::Hash;\n\nuse std::mem::ManuallyDrop;\n\nuse std::ptr::{null, null_mut};\n\n\n\nuse windows::core::HRESULT;\n\nuse windows::Win32::Media::Speech::{\n\n ISpRecoContext, ISpRecoGrammar, SPRAF_Active, SPRAF_TopLevel, SPSTATEHANDLE__, SPWT_LEXICAL,\n\n};\n\n\n\nuse crate::com_util::{opt_str_param, out_to_ret, Intf};\n\nuse crate::stt::semantics::SemanticProperty;\n\nuse crate::stt::{RecognitionPauser, SemanticValue};\n\nuse crate::Result;\n\n\n\nuse super::{grammar_state, rule_state, Grammar, RepeatRange, Rule};\n\n\n\n/// Helper type that constructs a grammar from a set of top-level rules.\n\n///\n", "file_path": "src/stt/grammar/builder.rs", "rank": 21, "score": 6.447968958521501 }, { "content": "//!\n\n//! This crate does not currently represent these COM references using Rust lifetimes. This was a\n\n//! deliberate design decision to keep the API and the code as simple as possible.\n\n\n\nuse std::ptr::null;\n\n\n\nuse windows::Win32::System::Com::{CoInitialize, CoUninitialize};\n\n\n\npub mod audio;\n\nmod com_util;\n\nmod event;\n\npub mod stt;\n\nmod token;\n\npub mod tts;\n\n\n\n#[cfg(feature = \"tokio\")]\n\npub mod tokio;\n\n\n\n/// The error type returned by SAPI functions and methods.\n\npub type Error = windows::core::Error;\n\n\n\n/// The type returned by SAPI functions and methods.\n\npub type Result<T> = windows::core::Result<T>;\n\n\n\n/// Initializes SAPI on the current thread. This function must be called for every thread that\n\n/// intends to use SAPI.\n", "file_path": "src/lib.rs", "rank": 22, "score": 6.082624634029416 }, { "content": "use windows as Windows;\n\nuse Windows::Win32::Media::Audio::{WAVEFORMATEX, WAVE_FORMAT_PCM};\n\n\n\n/// Sample rate, in samples per second, at which to play or record.\n\n#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]\n\n#[allow(missing_docs)]\n\npub enum SampleRate {\n\n Hz8000 = 8000,\n\n Hz11025 = 11025,\n\n Hz12000 = 12000,\n\n Hz16000 = 16000,\n\n Hz22050 = 22050,\n\n Hz24000 = 24000,\n\n Hz32000 = 32000,\n\n Hz44100 = 44100,\n\n Hz48000 = 48000,\n\n}\n\n\n\n/// How many bits each sample should have.\n\n#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]\n", "file_path": "src/audio/format.rs", "rank": 23, "score": 6.004307498808608 }, { "content": "use std::collections::HashMap;\n\nuse std::ops::Deref;\n\nuse std::sync::{Arc, Mutex};\n\n\n\nuse tokio::sync::oneshot::{channel, Receiver, Sender};\n\n\n\nuse crate::tts::{EventfulSynthesizer, Speech, Synthesizer};\n\nuse crate::Result;\n\n\n", "file_path": "src/tokio/tts.rs", "rank": 24, "score": 4.218669523254669 }, { "content": "use std::sync::{Arc, Mutex, MutexGuard};\n\n\n\nuse futures::{SinkExt, StreamExt};\n\nuse sapi_lite::stt::{Grammar, Recognizer, RuleArena};\n\nuse sapi_lite::tokio::{AsyncSynthesizer, UnicastContext};\n\nuse sapi_lite::tts::SpeechBuilder;\n\nuse tokio::net::{TcpListener, TcpStream};\n\nuse tokio::sync::mpsc;\n\nuse tokio_util::codec::{Framed, LinesCodec};\n\n\n", "file_path": "examples/milliways.rs", "rank": 25, "score": 4.143280248202797 }, { "content": "mod string;\n\nmod tree;\n\nmod value;\n\n\n\npub use string::SemanticString;\n\npub use tree::SemanticTree;\n\npub(crate) use value::SemanticProperty;\n\npub use value::SemanticValue;\n", "file_path": "src/stt/semantics/mod.rs", "rank": 26, "score": 4.00735764681491 }, { "content": "use std::ffi::OsString;\n\nuse std::str::FromStr;\n\n\n\nuse strum_macros::{EnumString, IntoStaticStr};\n\n\n\nuse crate::com_util::Locale;\n\nuse crate::token::{Category, Token};\n\nuse crate::Result;\n\n\n\n/// Specifies the age of a voice.\n\n#[derive(Debug, EnumString, IntoStaticStr)]\n\n#[strum(ascii_case_insensitive)]\n\n#[allow(missing_docs)]\n\npub enum VoiceAge {\n\n Adult,\n\n Child,\n\n Senior,\n\n Teen,\n\n}\n\n\n", "file_path": "src/tts/voice.rs", "rank": 27, "score": 3.9451994463498394 }, { "content": "use std::mem::MaybeUninit;\n\n\n\nuse crate::Result;\n\n\n\nmod intf;\n\nmod iter;\n\nmod locale;\n\nmod mem;\n\nmod str;\n\nmod weak;\n\n\n\npub use self::intf::Intf;\n\npub use self::iter::{next_elem, next_obj};\n\npub use self::locale::Locale;\n\npub use self::mem::ComBox;\n\npub use self::str::{from_wide, opt_str_param};\n\npub use self::weak::MaybeWeak;\n\n\n\npub unsafe fn out_to_ret<T, F: FnOnce(*mut T) -> Result<()>>(f: F) -> Result<T> {\n\n let mut result = MaybeUninit::uninit();\n\n f(result.as_mut_ptr())?;\n\n Ok(result.assume_init())\n\n}\n", "file_path": "src/com_util/mod.rs", "rank": 28, "score": 3.923852219921105 }, { "content": "//! This showcases all the different rules you can define in a speech recognition grammar.\n\n\n\nuse std::collections::HashMap;\n\nuse std::time::Duration;\n\n\n\nuse sapi_lite::stt::{Recognizer, Rule, SyncContext};\n\n\n", "file_path": "examples/grammar.rs", "rank": 29, "score": 3.8162050916434405 }, { "content": "use std::ffi::OsString;\n\nuse std::time::{Duration, Instant};\n\n\n\nuse sapi_lite::audio::{AudioFormat, AudioStream, BitRate, Channels, MemoryStream, SampleRate};\n\nuse sapi_lite::stt::{\n\n Context, Grammar, Phrase, RecognitionInput, Recognizer, Rule, SemanticTree, SemanticValue,\n\n SyncContext,\n\n};\n\nuse sapi_lite::tts::{Speech, SpeechOutput, SyncSynthesizer};\n\n\n\n#[test]\n", "file_path": "tests/round_trip.rs", "rank": 30, "score": 3.79310253003381 }, { "content": "//! This example shows how to synthesize speech and write the audio to a file instead of playing it\n\n//! on the default audio device.\n\n\n\nuse std::env;\n\nuse std::io::{self, Write};\n\n\n\nuse sapi_lite::audio::{AudioFormat, AudioStream, BitRate, Channels, SampleRate};\n\nuse sapi_lite::tts::{SpeechOutput, SyncSynthesizer};\n\n\n", "file_path": "examples/stream_tts.rs", "rank": 31, "score": 3.776457922499538 }, { "content": "//! A bare-bones speech recognition example.\n\n\n\nuse std::time::Duration;\n\n\n\nuse sapi_lite::stt::{Recognizer, Rule, SyncContext};\n\n\n", "file_path": "examples/hello_stt.rs", "rank": 32, "score": 3.663600427111663 }, { "content": "//! This example shows how to recognize speech in an audio file instead of listening to the default\n\n//! recording device.\n\n//!\n\n//! Note that an easy alternative to recording your own voice to an audio file is to run the\n\n//! `stream_tts` example and then use the file it produced.\n\n\n\nuse std::env;\n\nuse std::time::Duration;\n\n\n\nuse sapi_lite::audio::{AudioFormat, AudioStream, BitRate, Channels, SampleRate};\n\nuse sapi_lite::stt::{Context, Grammar, RecognitionInput, Recognizer, Rule, SyncContext};\n\n\n\n// Create a grammar with the phrases for the engine to recognize.\n", "file_path": "examples/stream_stt.rs", "rank": 33, "score": 3.6626602119973453 }, { "content": "use std::ops::Deref;\n\n\n\nuse tokio::sync::broadcast::error::RecvError;\n\nuse tokio::sync::{broadcast, mpsc};\n\n\n\nuse crate::stt::{Context, EventfulContext, Phrase, Recognizer};\n\nuse crate::Result;\n\n\n\n/// A subscriber that can be awaited for recognized phrases.\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"tokio-stt\")))]\n\npub struct UnicastSubscriber {\n\n rx: mpsc::Receiver<Phrase>,\n\n}\n\n\n\nimpl UnicastSubscriber {\n\n /// Completes when the engine recognizes a phrase.\n\n pub async fn recognize(&mut self) -> Phrase {\n\n self.rx.recv().await.unwrap()\n\n }\n\n}\n", "file_path": "src/tokio/stt.rs", "rank": 34, "score": 3.6626602119973453 }, { "content": "//! This examples shows how to iterate over the available TTS voices.\n\n\n\nuse std::io::{self, Write};\n\n\n\nuse sapi_lite::tts::{installed_voices, SyncSynthesizer, Voice};\n\n\n", "file_path": "examples/voices.rs", "rank": 36, "score": 3.6016552442176737 }, { "content": "use std::borrow::Cow;\n\n\n\nuse typed_arena::Arena;\n\n\n\nuse crate::stt::SemanticValue;\n\n\n\nuse super::{RepeatRange, Rule};\n\n\n\n/// Allocation arena for grammar rules.\n\n///\n\n/// Provides an easy way to allocate [`Rule`] instances while building a grammar. All of the rules\n\n/// owned by the arena will be dropped when the arena itself is dropped.\n\npub struct RuleArena<'a> {\n\n arena: Arena<Rule<'a>>,\n\n}\n\n\n\nimpl<'a> RuleArena<'a> {\n\n /// Construct a new arena.\n\n pub fn new() -> Self {\n\n Self {\n", "file_path": "src/stt/grammar/rule/arena.rs", "rank": 37, "score": 3.5857439641144544 }, { "content": "//! A more comprehensive example of how to use `sapi-lite`.\n\n//!\n\n//! This example listens for TCP connections on a given address and port. Every peer that connects\n\n//! is treated like a guest at Milliways, the fictional \"Restaurant at the End of the Universe\" from\n\n//! the \"Hitchhiker's Guide to the Galaxy\" by Douglas Adams.\n\n//!\n\n//! Each guest has to identify themselves by their name, which has to be unique across all currently\n\n//! connected peers. The guest can then order items from the restaurant's menu, or leave when they\n\n//! are done.\n\n//!\n\n//! Each arrival, departure, and order are announced on the computer running the Milliways server,\n\n//! using `sapi-lite` TTS features. The Milliways server listens to the default audio device on the\n\n//! computer, and can recognize voice commands phrased as \"serve <item> to <guest>\", where <item>\n\n//! is one of the dishes or beverages on the menu and <guest> is the name of a guest. The outcome of\n\n//! each command is also announced using TTS.\n\n\n\nuse std::collections::HashMap;\n\nuse std::env;\n\nuse std::error::Error;\n\nuse std::fmt::Write;\n", "file_path": "examples/milliways.rs", "rank": 38, "score": 3.5746777331149757 }, { "content": "use std::borrow::Cow;\n\nuse std::ffi::{OsStr, OsString};\n\n\n\n/// A string type that can be borrowed as an [`OsStr`](std::ffi::OsStr).\n", "file_path": "src/stt/semantics/string.rs", "rank": 39, "score": 3.5714616072664684 }, { "content": "//! An example that shows how to use `SpeechBuilder` to create a speech that can be rendered\n\n//! multiple times. It also showcases some of the commands that control the speech rendering, as\n\n//! well as their interactions with the current configuration of the speech synthesizer.\n\n\n\nuse std::time::Duration;\n\n\n\nuse sapi_lite::tts::{SpeechBuilder, SyncSynthesizer, VoiceGender, VoiceSelector};\n\n\n", "file_path": "examples/speech.rs", "rank": 40, "score": 3.52176549063761 }, { "content": "mod speech;\n\nmod synthesizer;\n\nmod voice;\n\n\n\npub use self::speech::{Pitch, Rate, SayAs, Speech, SpeechBuilder, Volume};\n\npub use self::synthesizer::{\n\n EventHandler, EventfulSynthesizer, SpeechOutput, SyncSynthesizer, Synthesizer,\n\n};\n\npub use self::voice::{installed_voices, Voice, VoiceAge, VoiceGender, VoiceSelector};\n", "file_path": "src/tts/mod.rs", "rank": 41, "score": 3.5024631465801788 }, { "content": "use std::fmt;\n\nuse std::time::Duration;\n\n\n\nuse xml::writer::XmlEvent;\n\nuse xml::{EmitterConfig, EventWriter};\n\n\n\nuse crate::tts::{Voice, VoiceSelector};\n\n\n\nuse super::{Pitch, Rate, SayAs, Speech, Volume};\n\n\n\n/// Helper type that can construct a [`Speech`] from a sequence of rendering instructions.\n\n///\n\n/// It's important to understand that the instructions do not override the configuration of the\n\n/// synthesizer, but adjust them instead. For example, if you call `set_volume(80)` on the\n\n/// synthesizer, and your speech starts with the instruction `start_volume(50)`, the volume at that\n\n/// point will be set to 40 (i.e. 50% of 80%).\n\n///\n\n/// NOTE: Although any complex speech is encoded as XML, the builder performs no validation. This is\n\n/// because SAPI itself is very lax when processing speech. For example, SAPI will be perfectly\n\n/// happy to render the following XML:\n\n/// ```xml\n\n/// <emph><volume level=\"50\">Hello</emph>world</volume>\n\n/// ```\n\npub struct SpeechBuilder {\n\n state: SpeechBuilderState,\n\n}\n\n\n", "file_path": "src/tts/speech/builder.rs", "rank": 42, "score": 3.4734334151228357 }, { "content": "use std::borrow::Cow;\n\nuse std::ops::{RangeInclusive, RangeToInclusive};\n\n\n\nuse crate::stt::SemanticValue;\n\n\n\nmod arena;\n\n\n\npub use arena::RuleArena;\n\n\n\n/// A rule that defines one or more phrases or fragments that can be recognized by the engine.\n\n#[derive(Debug)]\n\npub enum Rule<'a> {\n\n /// A sequence of words\n\n Text(Cow<'a, str>),\n\n /// A set of rules to choose from\n\n Choice(Cow<'a, [&'a Rule<'a>]>),\n\n /// A sequence of rules that must be recognized in order\n\n Sequence(Cow<'a, [&'a Rule<'a>]>),\n\n /// A rule repeated a certain number of times\n\n Repeat(RepeatRange, &'a Rule<'a>),\n", "file_path": "src/stt/grammar/rule/mod.rs", "rank": 43, "score": 3.4270956382698516 }, { "content": "//! Support for streaming audio to and from files and memory buffers.\n\n\n\nmod format;\n\nmod stream;\n\n\n\npub use format::{AudioFormat, BitRate, Channels, SampleRate};\n\npub use stream::{AudioStream, MemoryStream};\n", "file_path": "src/audio/mod.rs", "rank": 45, "score": 3.373495232423766 }, { "content": "mod phrase;\n\nmod semantics;\n\n\n\npub use context::{Context, EventHandler, EventfulContext, SyncContext};\n\npub use grammar::{Grammar, GrammarBuilder, RepeatRange, Rule, RuleArena};\n\npub use phrase::Phrase;\n\npub use semantics::{SemanticString, SemanticTree, SemanticValue};\n\n\n\n/// Specifies where the input for speech recognition should come from.\n\npub enum RecognitionInput {\n\n /// Listen to the default recording device on the system\n\n Default,\n\n /// Read from the given stream\n\n Stream(AudioStream),\n\n}\n\n\n\nimpl RecognitionInput {\n\n fn to_sapi(self) -> Result<IUnknown> {\n\n Ok(match self {\n\n Self::Default => {\n", "file_path": "src/stt/mod.rs", "rank": 46, "score": 3.3595321081659266 }, { "content": "//! This example showcases how to use event-based speech recognition. The speech recognition engine\n\n//! is configured to recognize the word \"half\". Every time the word is recognized, the engine will\n\n//! call the given callback, which will then increment a counter. When the user signals they are\n\n//! finished reading, the program will print out the value of the counter.\n\n\n\nuse std::io::{stdin, BufRead};\n\nuse std::sync::atomic::{AtomicUsize, Ordering};\n\nuse std::sync::Arc;\n\n\n\nuse sapi_lite::stt::{EventfulContext, Recognizer, Rule};\n\n\n\nconst INSTRUCTIONS: &'static str = r#\"\n\nChoose a text and read it out loud. For example:\n\n\n\n\"I don't know half of you half as well as I should like;\n\nand I like less than half of you half as well as you deserve.\"\n\n\n\nWhen you're done, press ENTER to see how many times you said the word \"half\".\n\n\"#;\n\n\n", "file_path": "examples/events.rs", "rank": 47, "score": 3.3514719029394473 }, { "content": "/// The transmitter end of the channel used to serve food and drink to a specific guest.\n\ntype GuestTx = mpsc::UnboundedSender<&'static str>;\n\n\n", "file_path": "examples/milliways.rs", "rank": 48, "score": 3.27117177646144 }, { "content": "//! A bare-bones TTS example.\n\n\n\nuse sapi_lite::tts::SyncSynthesizer;\n\n\n", "file_path": "examples/hello_tts.rs", "rank": 49, "score": 3.221696831486491 }, { "content": "/// Initializes SAPI on the current thread. This function must be called for every thread that\n\n/// intends to use SAPI.\n\npub fn initialize() -> Result<()> {\n\n unsafe { CoInitialize(null()) }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 50, "score": 3.1271048320032846 }, { "content": "// #![cfg_attr(docsrs, doc(cfg(any(feature = \"tokio-rt\", feature = \"tokio-stt\", feature = \"tokio-tts\"))))]\n\n//! Support for async operations running on Tokio.\n\n\n\n#[cfg(feature = \"tokio-rt\")]\n\nmod rt;\n\n#[cfg(feature = \"tokio-stt\")]\n\nmod stt;\n\n#[cfg(feature = \"tokio-tts\")]\n\nmod tts;\n\n\n\n#[cfg(feature = \"tokio-rt\")]\n\npub use rt::BuilderExt;\n\n#[cfg(feature = \"tokio-stt\")]\n\npub use stt::{\n\n BroadcastContext, BroadcastResult, BroadcastSubscriber, UnicastContext, UnicastSubscriber,\n\n};\n\n#[cfg(feature = \"tokio-tts\")]\n\npub use tts::AsyncSynthesizer;\n", "file_path": "src/tokio/mod.rs", "rank": 51, "score": 2.9937961722574795 }, { "content": "# `sapi-lite`\n\n\n\n**A simplified wrapper around Microsoft's Speech API (SAPI) library.**\n\n\n\nThe goal of this crate is to expose a subset of SAPI features in a way that is easy to use in Rust.\n\nIt does not aim to provide the full set of features SAPI offers. Since the focus is on relative ease\n\nof use (compared to dealing with COM directly) and simplicity of API, many SAPI features are missing\n\nin this crate.\n\n\n\n## Example\n\n\n\n```rust\n\nuse sapi_lite::stt::{Recognizer, Rule, SyncContext};\n\nuse sapi_lite::tts::{SyncSynthesizer};\n\nuse std::time::Duration;\n\n\n\nsapi_lite::initialize().unwrap();\n\n\n\nlet synth = SyncSynthesizer::new().unwrap();\n\nsynth\n\n .speak(\"The Doors of Durin, Lord of Moria. Speak, friend, and enter.\", None)\n\n .unwrap();\n\n\n\nlet recog = Recognizer::new().unwrap();\n\nlet ctx = SyncContext::new(&recog).unwrap();\n\n\n\nlet grammar = ctx\n\n .grammar_builder()\n\n .add_rule(&Rule::text(\"friend\"))\n\n .build()\n\n .unwrap();\n\ngrammar.set_enabled(true).unwrap();\n\n\n\nif let Some(phrase) = ctx.recognize(Duration::from_secs(5)).unwrap() {\n\n println!(\n\n \"The gate swings open. Welcome to Moria, {}.\",\n\n phrase.text.to_string_lossy()\n\n );\n\n} else {\n\n println!(\"The gate to Moria remains shut.\")\n\n}\n\n\n\nsapi_lite::finalize();\n\n```\n", "file_path": "README.md", "rank": 52, "score": 2.8958460558070955 }, { "content": "#![warn(missing_docs)]\n\n#![cfg_attr(docsrs, feature(doc_cfg))]\n\n\n\n//! A simplified wrapper around Microsoft's Speech API (SAPI) library.\n\n//!\n\n//! # Features\n\n//!\n\n//! The goal of this crate is to expose a subset of SAPI features in a way that is easy to use in\n\n//! Rust. It does not aim to provide the full set of features SAPI offers. Since the focus is on\n\n//! relative ease of use (compared to dealing with COM directly) and simplicity of API, many\n\n//! SAPI features are missing in this crate.\n\n//!\n\n//! ## Text-to-speech\n\n//!\n\n//! The [tts] module provides the API to render text as a speech using one or more SAPI voices\n\n//! installed on the system.\n\n//!\n\n//! To generate speech, you first need to create an instance of one of the\n\n//! available synthesizer types. Which synthesizer you choose will depend on whether you want to\n\n//! block the execution while the speech is synthesized or not.\n", "file_path": "src/lib.rs", "rank": 53, "score": 2.8219309483167807 }, { "content": "use std::fmt::Display;\n\nuse std::hash::Hash;\n\n\n\n/// Provides a hint about how to pronounce the associated content.\n\n#[derive(Debug, Hash, PartialEq, Eq, Clone, Copy)]\n\npub enum SayAs<'s> {\n\n /// Pronounce a sequence of numbers as a date, e.g. \"03/08/2000\" as \"march eighth two thousand\".\n\n DateMDY,\n\n /// Pronounce a sequence of numbers as a date, e.g. \"03/08/2000\" as \"august third two thousand\".\n\n DateDMY,\n\n /// Pronounce a sequence of numbers as a date, e.g. \"2000/08/03\" as \"march eighth two thousand\".\n\n DateYMD,\n\n /// Pronounce a sequence of numbers as a year and month, e.g. \"2000/03\" as \"march two thousand\".\n\n DateYM,\n\n /// Pronounce a sequence of numbers as a month and year, e.g. \"03/2000\" as \"march two thousand\".\n\n DateMY,\n\n /// Pronounce a sequence of numbers as a day and month, e.g. \"03/08\" as \"march eighth\".\n\n DateDM,\n\n /// Pronounce a sequence of numbers as a month and day, e.g. \"03/08\" as \"august third\".\n\n DateMD,\n", "file_path": "src/tts/speech/types.rs", "rank": 54, "score": 2.2506913670968713 }, { "content": " &Rule::text(\"set\"),\n\n // The word \"the\" is optional, so we'll say it can be repeated 0..=1 times.\n\n &Rule::repeat(..=1, &Rule::text(\"the\")),\n\n // Which color does the user want to change, foreground or background? We'll use\n\n // the associated semantic value as a key in a hash map.\n\n &Rule::choice(vec![\n\n &Rule::semantic(\"bg\", &Rule::text(\"background\")),\n\n &Rule::semantic(\"fg\", &Rule::text(\"foreground\")),\n\n ]),\n\n // The word \"color\" is also optional.\n\n &Rule::repeat(..=1, &Rule::text(\"color\")),\n\n // But the word \"to\" is mandatory.\n\n &Rule::text(\"to\"),\n\n // We want to offer 8 possible colors, so we'll use a choice to represent that.\n\n // Each alternative will be a semantic rule that maps an RGB integer value to\n\n // a text rule with the name of that color.\n\n &Rule::choice(vec![\n\n &Rule::semantic(0x000000, &Rule::text(\"black\")),\n\n &Rule::semantic(0x0000ff, &Rule::text(\"blue\")),\n\n &Rule::semantic(0x00ff00, &Rule::text(\"green\")),\n", "file_path": "examples/grammar.rs", "rank": 55, "score": 2.15942487957659 }, { "content": " }\n\n\n\n pub fn name(&self) -> OsString {\n\n let mut buffer: [u16; LOCALE_NAME_MAX_LENGTH as _] = [0; LOCALE_NAME_MAX_LENGTH as _];\n\n unsafe {\n\n LCIDToLocaleName(\n\n self.lcid,\n\n PWSTR(&mut buffer[0]),\n\n LOCALE_NAME_MAX_LENGTH as _,\n\n 0,\n\n );\n\n from_wide(&PWSTR(&mut buffer[0]))\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for Locale {\n\n type Err = windows::core::Error;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let lcid = unsafe { LocaleNameToLCID(s, 0) };\n\n if lcid != 0 {\n\n Ok(Self::new(lcid))\n\n } else {\n\n Err(Self::Err::from_win32())\n\n }\n\n }\n\n}\n", "file_path": "src/com_util/locale.rs", "rank": 57, "score": 1.9493806445075856 }, { "content": " .0 as _\n\n }\n\n\n\n fn to_variant_union(&self) -> VARIANT_0_0_0 {\n\n match self {\n\n SemanticValue::Bool(b) => VARIANT_0_0_0 {\n\n // per https://docs.microsoft.com/en-us/windows/win32/api/oaidl/ns-oaidl-variant\n\n // 0 is false and 0xffff is true\n\n boolVal: -(*b as i16),\n\n },\n\n SemanticValue::Int(i) => VARIANT_0_0_0 { lVal: *i },\n\n SemanticValue::Float(f) => VARIANT_0_0_0 { fltVal: *f },\n\n SemanticValue::Double(d) => VARIANT_0_0_0 { dblVal: *d },\n\n SemanticValue::String(_) => Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl SemanticValue<OsString> {\n\n pub(super) fn from_sapi(property: &SPPHRASEPROPERTY) -> Result<Self, VARENUM> {\n", "file_path": "src/stt/semantics/value.rs", "rank": 58, "score": 1.8675548573049574 }, { "content": " self.synthesizer.speak_and_forget(speech).unwrap();\n\n }\n\n\n\n /// Update the recognition grammar to reflect the list of guests.\n\n fn update_grammar(&self, mut guests: MutexGuard<Guests>) {\n\n // If we had an active grammar, deactivate it.\n\n if let Some(old_grammar) = guests.grammar.take() {\n\n old_grammar.set_enabled(false).unwrap();\n\n }\n\n\n\n // If we don't have any guests right now, we don't need a grammar\n\n if guests.map.is_empty() {\n\n return;\n\n }\n\n\n\n // Since the grammar is a graph of rules referring to each other, we need to allocate the\n\n // rules somewhere. We can use a `RuleArena` for that.\n\n let arena = RuleArena::new();\n\n\n\n let mut name_choices = Vec::new();\n", "file_path": "examples/milliways.rs", "rank": 59, "score": 1.8033812702637402 }, { "content": " }\n\n\n\n /// Queues up the rendering of the given speech and forgets about it.\n\n ///\n\n /// Note that this function can be used from both async and synchronous code. The speech will\n\n /// be rendered, but there is no way to await its completion.\n\n pub fn speak_and_forget<'s, S: Into<Speech<'s>>>(&self, speech: S) -> Result<()> {\n\n let id = self.base.speak(speech)?;\n\n let _ = self.awaiter_for_speech_id(id);\n\n Ok(())\n\n }\n\n\n\n fn awaiter_for_speech_id(&self, id: u32) -> Option<Receiver<()>> {\n\n let mut map = self.pending_speeches.lock().unwrap();\n\n if let Some(PendingSpeech::Finished) = map.remove(&id) {\n\n return None;\n\n }\n\n let (tx, rx) = channel();\n\n map.insert(id, PendingSpeech::Waiting(tx));\n\n Some(rx)\n", "file_path": "src/tokio/tts.rs", "rank": 60, "score": 1.7443110253613656 }, { "content": " })\n\n }\n\n\n\n /// Returns the default speech volume for this synthesizer.\n\n pub fn volume(&self) -> Result<Volume> {\n\n unsafe { out_to_ret(|out| self.intf.GetVolume(out)) }.map(Volume::from_sapi)\n\n }\n\n\n\n /// Sets the default rate of speech for this synthesizer.\n\n pub fn set_rate<R: Into<Rate>>(&self, rate: R) -> Result<()> {\n\n unsafe { self.intf.SetRate(rate.into().value()) }\n\n }\n\n\n\n /// Sets the default voice this synthesizer will use to render speech.\n\n pub fn set_voice(&self, voice: &Voice) -> Result<()> {\n\n unsafe { self.intf.SetVoice(&voice.token) }\n\n }\n\n\n\n /// Sets the default speech volume for this synthesizer.\n\n pub fn set_volume<V: Into<Volume>>(&self, volume: V) -> Result<()> {\n", "file_path": "src/tts/synthesizer/mod.rs", "rank": 61, "score": 1.7024869572215215 }, { "content": " /// Adds an unnamed top-level rule to the grammar.\n\n pub fn add_rule(&mut self, rule: &'a Rule<'a>) -> &mut Self {\n\n self.top_rules.insert(RuleRef(rule));\n\n self\n\n }\n\n\n\n /// Adds a top-level rule with the given name to the grammar. The name can be used to enable\n\n /// or disable the rule.\n\n pub fn add_named_rule<S: Into<Cow<'a, str>>>(\n\n &mut self,\n\n name: S,\n\n rule: &'a Rule<'a>,\n\n ) -> &mut Self {\n\n self.add_rule(rule);\n\n self.rule_names.insert(RuleRef(rule), name.into());\n\n self\n\n }\n\n\n\n /// Builds the grammar from the given rules and loads it into the recognition context. The\n\n /// newly loaded grammar must be enabled before the engine will start recognizing phrases from\n", "file_path": "src/stt/grammar/builder.rs", "rank": 62, "score": 1.6370659432652515 }, { "content": "/// A top-level rule defines one or more phrases that can be recognized. The rules contained in or\n\n/// referenced by a top-level rule will not be recognized as phrases, unless they are also added as\n\n/// top-level rules. For example, consider the following top-level rule definition:\n\n/// ```\n\n/// # use sapi_lite::stt::Rule;\n\n/// Rule::sequence(vec![\n\n/// &Rule::text(\"good\"),\n\n/// &Rule::choice(vec![\n\n/// &Rule::text(\"morning\"),\n\n/// &Rule::text(\"evening\"),\n\n/// ])\n\n/// ])\n\n/// # ;\n\n/// ```\n\n/// Given this rule, the engine will only recognize the phrases \"good morning\" and \"good evening\",\n\n/// but not \"good\", \"morning\", or \"evening\".\n\npub struct GrammarBuilder<'a> {\n\n intf: Intf<ISpRecoContext>,\n\n pauser: RecognitionPauser,\n\n top_rules: HashSet<RuleRef<'a>>,\n", "file_path": "src/stt/grammar/builder.rs", "rank": 63, "score": 1.5649049205586678 }, { "content": "impl Synthesizer {\n\n fn new() -> Result<Self> {\n\n unsafe { CoCreateInstance(&SpVoice, None, CLSCTX_ALL) }\n\n .map(|intf| Self { intf: Intf(intf) })\n\n }\n\n\n\n /// Configures the synthesizer to render its speech to the given output destination.\n\n pub fn set_output(&self, output: SpeechOutput, allow_fmt_changes: bool) -> Result<()> {\n\n unsafe { self.intf.SetOutput(output.to_sapi(), allow_fmt_changes) }\n\n }\n\n\n\n /// Returns the default rate of speech for this synthesizer.\n\n pub fn rate(&self) -> Result<Rate> {\n\n unsafe { out_to_ret(|out| self.intf.GetRate(out)) }.map(Rate::new)\n\n }\n\n\n\n /// Returns the default voice this synthesizer will use to render speech.\n\n pub fn voice(&self) -> Result<Voice> {\n\n unsafe { self.intf.GetVoice() }.map(|intf| Voice {\n\n token: Token::from_sapi(intf),\n", "file_path": "src/tts/synthesizer/mod.rs", "rank": 64, "score": 1.4988369601565386 }, { "content": "//!\n\n//! To configure which spoken phrases the engine can recognize, you need to define one or more\n\n//! grammars. A grammar is a set of rules that define what word structures form recognizable\n\n//! phrases.\n\n//!\n\n//! A grammar must be loaded into a recognition context before the engine can recognize the phrases\n\n//! in it. Which context type you choose will depend on whether you want to block the execution\n\n//! while waiting for a phrase to be recognized or not.\n\n//!\n\n//! # COM and Lifetime of SAPI Types\n\n//!\n\n//! Microsoft SAPI is a COM library. All COM objects and interfaces use reference counting to\n\n//! control their lifetime. Many types in this crate wrap these COM objects. As such, when you drop\n\n//! an instance of one of these types, it doesn't mean that the underlying COM object will be\n\n//! destroyed.\n\n//!\n\n//! For example, if you have a [`Recognizer`](stt::Recognizer), a [`SyncContext`](stt::SyncContext),\n\n//! and a [`Grammar`](stt::Grammar), dropping the `Recognizer` while the `SyncContext` or the\n\n//! `Grammar` are still alive will *not* destroy the underlying recognition engine with its\n\n//! associated contexts and rules.\n", "file_path": "src/lib.rs", "rank": 65, "score": 1.4381216030289972 } ]
Rust
src/udp_mux/mod.rs
webrtc-rs/ice
ebdf3e3b6f431f0e5e59ca0be9f61d563743fb45
use std::{collections::HashMap, io::ErrorKind, net::SocketAddr, sync::Arc}; use util::{sync::RwLock, Conn, Error}; use async_trait::async_trait; use tokio::sync::{watch, Mutex}; mod udp_mux_conn; use udp_mux_conn::{UDPMuxConn, UDPMuxConnParams}; #[cfg(test)] mod udp_mux_test; mod socket_addr_ext; use stun::{ attributes::ATTR_USERNAME, message::{is_message as is_stun_message, Message as STUNMessage}, }; use crate::candidate::RECEIVE_MTU; fn normalize_socket_addr(target: &SocketAddr, socket_addr: &SocketAddr) -> SocketAddr { match (target, socket_addr) { (SocketAddr::V4(target_ipv4), SocketAddr::V6(_)) => { let ipv6_mapped = target_ipv4.ip().to_ipv6_mapped(); SocketAddr::new(std::net::IpAddr::V6(ipv6_mapped), target_ipv4.port()) } (_, _) => *target, } } #[async_trait] pub trait UDPMux { async fn close(&self) -> Result<(), Error>; async fn get_conn(self: Arc<Self>, ufrag: &str) -> Result<Arc<dyn Conn + Send + Sync>, Error>; async fn remove_conn_by_ufrag(&self, ufrag: &str); } pub struct UDPMuxParams { conn: Box<dyn Conn + Send + Sync>, } impl UDPMuxParams { pub fn new<C>(conn: C) -> Self where C: Conn + Send + Sync + 'static, { Self { conn: Box::new(conn), } } } pub struct UDPMuxDefault { params: UDPMuxParams, conns: Mutex<HashMap<String, UDPMuxConn>>, address_map: RwLock<HashMap<SocketAddr, UDPMuxConn>>, closed_watch_tx: Mutex<Option<watch::Sender<()>>>, closed_watch_rx: watch::Receiver<()>, } impl UDPMuxDefault { pub fn new(params: UDPMuxParams) -> Arc<Self> { let (closed_watch_tx, closed_watch_rx) = watch::channel(()); let mux = Arc::new(Self { params, conns: Mutex::default(), address_map: RwLock::default(), closed_watch_tx: Mutex::new(Some(closed_watch_tx)), closed_watch_rx: closed_watch_rx.clone(), }); let cloned_mux = Arc::clone(&mux); cloned_mux.start_conn_worker(closed_watch_rx); mux } pub async fn is_closed(&self) -> bool { self.closed_watch_tx.lock().await.is_none() } async fn send_to(&self, buf: &[u8], target: &SocketAddr) -> Result<usize, Error> { self.params .conn .send_to(buf, *target) .await .map_err(Into::into) } async fn create_muxed_conn(self: &Arc<Self>, ufrag: &str) -> Result<UDPMuxConn, Error> { let local_addr = self.params.conn.local_addr().await?; let params = UDPMuxConnParams { local_addr, key: ufrag.into(), udp_mux: Arc::clone(self), }; Ok(UDPMuxConn::new(params)) } async fn register_conn_for_address(&self, conn: &UDPMuxConn, addr: SocketAddr) { if self.is_closed().await { return; } let key = conn.key(); { let mut addresses = self.address_map.write(); addresses .entry(addr) .and_modify(|e| { if e.key() != key { e.remove_address(&addr); *e = conn.clone() } }) .or_insert_with(|| conn.clone()); } log::debug!("Registered {} for {}", addr, key); } async fn conn_from_stun_message(&self, buffer: &[u8], addr: &SocketAddr) -> Option<UDPMuxConn> { let (result, message) = { let mut m = STUNMessage::new(); (m.unmarshal_binary(buffer), m) }; match result { Err(err) => { log::warn!("Failed to handle decode ICE from {}: {}", addr, err); None } Ok(_) => { let (attr, found) = message.attributes.get(ATTR_USERNAME); if !found { log::warn!("No username attribute in STUN message from {}", &addr); return None; } let s = match String::from_utf8(attr.value) { Err(err) => { log::warn!( "Failed to decode USERNAME from STUN message as UTF-8: {}", err ); return None; } Ok(s) => s, }; let conns = self.conns.lock().await; let conn = s .split(':') .next() .and_then(|ufrag| conns.get(ufrag)) .map(Clone::clone); conn } } } fn start_conn_worker(self: Arc<Self>, mut closed_watch_rx: watch::Receiver<()>) { tokio::spawn(async move { let mut buffer = [0u8; RECEIVE_MTU]; loop { let loop_self = Arc::clone(&self); let conn = &loop_self.params.conn; tokio::select! { res = conn.recv_from(&mut buffer) => { match res { Ok((len, addr)) => { let conn = { let address_map = loop_self .address_map .read(); address_map.get(&addr).map(Clone::clone) }; let conn = match conn { None if is_stun_message(&buffer) => { loop_self.conn_from_stun_message(&buffer, &addr).await } s @ Some(_) => s, _ => None, }; match conn { None => { log::trace!("Dropping packet from {}", &addr); } Some(conn) => { if let Err(err) = conn.write_packet(&buffer[..len], addr).await { log::error!("Failed to write packet: {}", err); } } } } Err(Error::Io(err)) if err.0.kind() == ErrorKind::TimedOut => continue, Err(err) => { log::error!("Could not read udp packet: {}", err); break; } } } _ = closed_watch_rx.changed() => { return; } } } }); } } #[async_trait] impl UDPMux for UDPMuxDefault { async fn close(&self) -> Result<(), Error> { if self.is_closed().await { return Err(Error::ErrAlreadyClosed); } let mut closed_tx = self.closed_watch_tx.lock().await; if let Some(tx) = closed_tx.take() { let _ = tx.send(()); drop(closed_tx); let old_conns = { let mut conns = self.conns.lock().await; std::mem::take(&mut (*conns)) }; for (_, conn) in old_conns.into_iter() { conn.close(); } { let mut address_map = self.address_map.write(); let _ = std::mem::take(&mut (*address_map)); } } Ok(()) } async fn get_conn(self: Arc<Self>, ufrag: &str) -> Result<Arc<dyn Conn + Send + Sync>, Error> { if self.is_closed().await { return Err(Error::ErrUseClosedNetworkConn); } { let mut conns = self.conns.lock().await; if let Some(conn) = conns.get(ufrag) { return Ok(Arc::new(conn.clone()) as Arc<dyn Conn + Send + Sync>); } let muxed_conn = self.create_muxed_conn(ufrag).await?; let mut close_rx = muxed_conn.close_rx(); let cloned_self = Arc::clone(&self); let cloned_ufrag = ufrag.to_string(); tokio::spawn(async move { let _ = close_rx.changed().await; cloned_self.remove_conn_by_ufrag(&cloned_ufrag).await; }); conns.insert(ufrag.into(), muxed_conn.clone()); Ok(Arc::new(muxed_conn) as Arc<dyn Conn + Send + Sync>) } } async fn remove_conn_by_ufrag(&self, ufrag: &str) { let removed_conn = { let mut conns = self.conns.lock().await; conns.remove(ufrag) }; if let Some(conn) = removed_conn { let mut address_map = self.address_map.write(); for address in conn.get_addresses() { address_map.remove(&address); } } } }
use std::{collections::HashMap, io::ErrorKind, net::SocketAddr, sync::Arc}; use util::{sync::RwLock, Conn, Error}; use async_trait::async_trait; use tokio::sync::{watch, Mutex}; mod udp_mux_conn; use udp_mux_conn::{UDPMuxConn, UDPMuxConnParams}; #[cfg(test)] mod udp_mux_test; mod socket_addr_ext; use stun::{ attributes::ATTR_USERNAME, message::{is_message as is_stun_message, Message as STUNMessage}, }; use crate::candidate::RECEIVE_MTU; fn normalize_socket_addr(target: &SocketAddr, socket_addr: &SocketAddr) -> SocketAddr { match (target, socket_addr) { (SocketAddr::V4(target_ipv4), SocketAddr::V6(_)) => { let ipv6_mapped = target_ipv4.ip().to_ipv6_mapped(); SocketAddr::new(std::net::IpAddr::V6(ipv6_mapped), target_ipv4.port()) } (_, _) => *target, } } #[async_trait] pub trait UDPMux { async fn close(&self) -> Result<(), Error>; async fn get_conn(self: Arc<Self>, ufrag: &str) -> Result<Arc<dyn Conn + Send + Sync>, Error>; async fn remove_conn_by_ufrag(&self, ufrag: &str); } pub struct UDPMuxParams { conn: Box<dyn Conn + Send + Sync>, } impl UDPMuxParams { pub fn new<C>(conn: C) -> Self where C: Conn + Send + Sync + 'static, { Self { conn: Box::new(conn), } } } pub struct UDPMuxDefault { params: UDPMuxParams, conns: Mutex<HashMap<String, UDPMuxConn>>, address_map: RwLock<HashMap<SocketAddr, UDPMuxConn>>, closed_watch_tx: Mutex<Option<watch::Sender<()>>>, closed_watch_rx: watch::Receiver<()>, } impl UDPMuxDefault { pub fn new(params: UDPMuxParams) -> Arc<Self> { let (closed_watch_tx, closed_watch_rx) = watch::channel(()); let mux = Arc::new(Self { params, conns: Mutex::default(), address_map: RwLock::default(), closed_watch_tx: Mutex::new(Some(closed_watch_tx)), closed_watch_rx: closed_watch_rx.clone(), }); let cloned_mux = Arc::clone(&mux); cloned_mux.start_conn_worker(closed_watch_rx); mux } pub async fn is_closed(&self) -> bool { self.closed_watch_tx.lock().await.is_none() } async fn send_to(&self, buf: &[u8], target: &SocketAddr) -> Result<usize, Error> { self.params .conn .send_to(buf, *target) .await .map_err(Into::into) } async fn create_muxed_conn(self: &Arc<Self>, ufrag: &str) -> Result<UDPMuxConn, Error> { let local_addr = self.params.conn.local_addr().await?; let params = UDPMuxConnParams { local_addr, key: ufrag.into(), udp_mux: Arc::clone(self), }; Ok(UDPMuxConn::new(params)) } async fn register_conn_for_address(&self, conn: &UDPMuxConn, addr: SocketAddr) { if self.is_closed().await { return; } let key = conn.key(); { let mut addresses = self.address_map.write(); addresses .entry(addr) .and_modify(|e| { if e.key() != key { e.remove_address(&addr); *e = conn.clone() } }) .or_insert_with(|| conn.clone()); } log::debug!("Registered {} for {}", addr, key); } async fn conn_from_stun_message(&self, buffer: &[u8], addr: &SocketAddr) -> Option<UDPMuxConn> { let (result, message) = { let mut m = STUNMessage::new(); (m.unmarshal_binary(buffer), m) }; match result { Err(err) => { log::warn!("Failed to handle decode ICE from {}: {}", addr, err); None } Ok(_) => { let (attr, found) = message.attributes.get(ATTR_USERNAME); if !found { log::warn!("No username attribute in STUN message from {}", &addr); return None; } let s = match String::from_utf8(attr.value) { Err(err) => { log::warn!( "Failed to decode USERNAME from STUN message as UTF-8: {}", err ); return None; } Ok(s) => s, }; let conns = self.conns.lock().await; let conn = s .split(':') .next() .and_then(|ufrag| conns.get(ufrag)) .map(Clone::clone); conn } } } fn start_conn_worker(self: Arc<Self>, mut closed_watch_rx: watch::Receiver<()>) { tokio::spawn(async move { let mut buffer = [0u8; RECEIVE_MTU]; loop { let loop_self = Arc::clone(&self); let conn = &loop_self.params.conn; tokio::select! { res = conn.recv_from(&mut buffer) => { match res { Ok((len, addr)) => { let conn = { let address_map = loop_self .address_map .read(); address_map.get(&addr).map(Clone::clone) }; let conn = match conn { None if is_stun_message(&buffer) => { loop_self.conn_from_stun_message(&buffer, &addr).await } s @ Some(_) => s, _ => None, }; match conn { None => { log::trace!("Dropping packet from {}", &addr); } Some(conn) => { if let Err(err) = conn.write_packet(&buffer[..len], addr).await { log::error!("Failed to write packet: {}", err); } } } } Err(Error::Io(err)) if err.0.kind() == ErrorKind::TimedOut => continue, Err(err) => { log::error!("Could not read udp packet: {}", err); break; } } } _ = closed_watch_rx.changed() => { return; } } } }); } } #[async_trait] impl UDPMux for UDPMuxDefault { async fn close(&self) -> Result<(), Error> { if self.is_closed().await { return Err(Error::ErrAlreadyClosed); } let mut closed_tx = self.closed_watch_tx.lock().await; if let Some(tx) = closed_tx.take() { let _ = tx.send(()); drop(closed_tx); let old_conns = { let mut conns = self.conns.lock().await; std::mem::take(&mut (*conns)) };
async fn get_conn(self: Arc<Self>, ufrag: &str) -> Result<Arc<dyn Conn + Send + Sync>, Error> { if self.is_closed().await { return Err(Error::ErrUseClosedNetworkConn); } { let mut conns = self.conns.lock().await; if let Some(conn) = conns.get(ufrag) { return Ok(Arc::new(conn.clone()) as Arc<dyn Conn + Send + Sync>); } let muxed_conn = self.create_muxed_conn(ufrag).await?; let mut close_rx = muxed_conn.close_rx(); let cloned_self = Arc::clone(&self); let cloned_ufrag = ufrag.to_string(); tokio::spawn(async move { let _ = close_rx.changed().await; cloned_self.remove_conn_by_ufrag(&cloned_ufrag).await; }); conns.insert(ufrag.into(), muxed_conn.clone()); Ok(Arc::new(muxed_conn) as Arc<dyn Conn + Send + Sync>) } } async fn remove_conn_by_ufrag(&self, ufrag: &str) { let removed_conn = { let mut conns = self.conns.lock().await; conns.remove(ufrag) }; if let Some(conn) = removed_conn { let mut address_map = self.address_map.write(); for address in conn.get_addresses() { address_map.remove(&address); } } } }
for (_, conn) in old_conns.into_iter() { conn.close(); } { let mut address_map = self.address_map.write(); let _ = std::mem::take(&mut (*address_map)); } } Ok(()) }
function_block-function_prefix_line
[ { "content": "pub fn assert_inbound_message_integrity(m: &mut Message, key: &[u8]) -> Result<()> {\n\n let message_integrity_attr = MessageIntegrity(key.to_vec());\n\n Ok(message_integrity_attr.check(m)?)\n\n}\n\n\n\n/// Initiates a stun requests to `server_addr` using conn, reads the response and returns the\n\n/// `XORMappedAddress` returned by the stun server.\n\n/// Adapted from stun v0.2.\n\npub async fn get_xormapped_addr(\n\n conn: &Arc<dyn Conn + Send + Sync>,\n\n server_addr: SocketAddr,\n\n deadline: Duration,\n\n) -> Result<XorMappedAddress> {\n\n let resp = stun_request(conn, server_addr, deadline).await?;\n\n let mut addr = XorMappedAddress::default();\n\n addr.get_from(&resp)?;\n\n Ok(addr)\n\n}\n\n\n\nconst MAX_MESSAGE_SIZE: usize = 1280;\n", "file_path": "src/util/mod.rs", "rank": 0, "score": 359971.1622121617 }, { "content": "pub fn assert_inbound_username(m: &Message, expected_username: &str) -> Result<()> {\n\n let mut username = Username::new(ATTR_USERNAME, String::new());\n\n username.get_from(m)?;\n\n\n\n if username.to_string() != expected_username {\n\n return Err(Error::Other(format!(\n\n \"{:?} expected({}) actual({})\",\n\n Error::ErrMismatchUsername,\n\n expected_username,\n\n username,\n\n )));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 1, "score": 305372.90866712184 }, { "content": "fn verify_packet(buffer: &[u8], next_sequence: u32) {\n\n let read_sequence = u32::from_le_bytes(buffer[0..4].try_into().unwrap());\n\n assert_eq!(read_sequence, next_sequence);\n\n\n\n let hash = sha1_hash(&buffer[24..]);\n\n assert_eq!(hash, buffer[4..24]);\n\n}\n\n\n", "file_path": "src/udp_mux/udp_mux_test.rs", "rank": 2, "score": 304227.81640810106 }, { "content": "/// Create a buffer of appropriate size to fit both a packet with max RECEIVE_MTU and the\n\n/// additional metadata used for muxing.\n\nfn make_buffer() -> Vec<u8> {\n\n // The 4 extra bytes are used to encode the length of the data and address respectively.\n\n // See [`write_packet`] for details.\n\n vec![0u8; RECEIVE_MTU + MAX_ADDR_SIZE + 2 + 2]\n\n}\n\n\n\npub(crate) struct UDPMuxConnParams {\n\n pub(super) local_addr: SocketAddr,\n\n\n\n pub(super) key: String,\n\n\n\n // NOTE: This Arc exists in both directions which is liable to cause a retain cycle. This is\n\n // accounted for in [`UDPMuxDefault::close`], which makes sure to drop all Arcs referencing any\n\n // `UDPMuxConn`.\n\n pub(super) udp_mux: Arc<UDPMuxDefault>,\n\n}\n\n\n", "file_path": "src/udp_mux/udp_mux_conn.rs", "rank": 5, "score": 271368.47979343944 }, { "content": "fn sha1_hash(buffer: &[u8]) -> Vec<u8> {\n\n let mut hasher = Sha1::new();\n\n hasher.update(&buffer[24..]);\n\n\n\n hasher.finalize().to_vec()\n\n}\n", "file_path": "src/udp_mux/udp_mux_test.rs", "rank": 6, "score": 258306.00514297004 }, { "content": "type ConnResult<T> = Result<T, util::Error>;\n\n\n\n#[async_trait]\n\nimpl Conn for UDPMuxConn {\n\n async fn connect(&self, _addr: SocketAddr) -> ConnResult<()> {\n\n Err(io::Error::new(io::ErrorKind::Other, \"Not applicable\").into())\n\n }\n\n\n\n async fn recv(&self, _buf: &mut [u8]) -> ConnResult<usize> {\n\n Err(io::Error::new(io::ErrorKind::Other, \"Not applicable\").into())\n\n }\n\n\n\n async fn recv_from(&self, buf: &mut [u8]) -> ConnResult<(usize, SocketAddr)> {\n\n self.inner.recv_from(buf).await\n\n }\n\n\n\n async fn send(&self, _buf: &[u8]) -> ConnResult<usize> {\n\n Err(io::Error::new(io::ErrorKind::Other, \"Not applicable\").into())\n\n }\n\n\n", "file_path": "src/udp_mux/udp_mux_conn.rs", "rank": 7, "score": 240461.59389813856 }, { "content": "fn build_msg(c: MessageClass, username: String, key: String) -> Result<Message> {\n\n let mut msg = Message::new();\n\n msg.build(&[\n\n Box::new(MessageType::new(METHOD_BINDING, c)),\n\n Box::new(TransactionId::new()),\n\n Box::new(Username::new(ATTR_USERNAME, username)),\n\n Box::new(MessageIntegrity::new_short_term_integrity(key)),\n\n Box::new(FINGERPRINT),\n\n ])?;\n\n Ok(msg)\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_inbound_validity() -> Result<()> {\n\n /*env_logger::Builder::new()\n\n .format(|buf, record| {\n\n writeln!(\n\n buf,\n\n \"{}:{} [{}] {} - {}\",\n\n record.file().unwrap_or(\"unknown\"),\n", "file_path": "src/agent/agent_test.rs", "rank": 8, "score": 224908.84198137012 }, { "content": "struct UDPMuxConnInner {\n\n pub(super) params: UDPMuxConnParams,\n\n\n\n /// Close Sender. We'll send a value on this channel when we close\n\n closed_watch_tx: Mutex<Option<watch::Sender<bool>>>,\n\n\n\n /// Remote addresses we've seen on this connection.\n\n addresses: Mutex<HashSet<SocketAddr>>,\n\n\n\n buffer: Buffer,\n\n}\n\n\n\nimpl UDPMuxConnInner {\n\n // Sending/Recieving\n\n async fn recv_from(&self, buf: &mut [u8]) -> ConnResult<(usize, SocketAddr)> {\n\n // NOTE: Pion/ice uses Sync.Pool to optimise this.\n\n let mut buffer = make_buffer();\n\n let mut offset = 0;\n\n\n\n let len = self.buffer.read(&mut buffer, None).await?;\n", "file_path": "src/udp_mux/udp_mux_conn.rs", "rank": 9, "score": 208354.33480042132 }, { "content": "/// ICE user fragment.\n\n/// This internally uses `generate_crypto_random_string`.\n\npub fn generate_ufrag() -> String {\n\n generate_crypto_random_string(LEN_UFRAG, RUNES_ALPHA)\n\n}\n", "file_path": "src/rand/mod.rs", "rank": 10, "score": 189209.45998903606 }, { "content": "#[test]\n\nfn test_use_candidate_attr_add_to() -> Result<()> {\n\n let mut m = Message::new();\n\n assert!(!UseCandidateAttr::is_set(&m), \"should not be set\");\n\n\n\n m.build(&[Box::new(BINDING_REQUEST), Box::new(UseCandidateAttr::new())])?;\n\n\n\n let mut m1 = Message::new();\n\n m1.write(&m.raw)?;\n\n\n\n assert!(UseCandidateAttr::is_set(&m1), \"should be set\");\n\n\n\n Ok(())\n\n}\n", "file_path": "src/use_candidate/use_candidate_test.rs", "rank": 11, "score": 188733.51112438366 }, { "content": "pub fn create_addr(_network: NetworkType, ip: IpAddr, port: u16) -> SocketAddr {\n\n /*if network.is_tcp(){\n\n return &net.TCPAddr{IP: ip, Port: port}\n\n default:\n\n return &net.UDPAddr{IP: ip, Port: port}\n\n }*/\n\n SocketAddr::new(ip, port)\n\n}\n\n\n", "file_path": "src/util/mod.rs", "rank": 12, "score": 179144.7509497508 }, { "content": "//TODO: generates a random string for cryptographic usage.\n\npub fn generate_crypto_random_string(n: usize, runes: &[u8]) -> String {\n\n let mut rng = thread_rng();\n\n\n\n let rand_string: String = (0..n)\n\n .map(|_| {\n\n let idx = rng.gen_range(0..runes.len());\n\n runes[idx] as char\n\n })\n\n .collect();\n\n\n\n rand_string\n\n}\n\n\n", "file_path": "src/rand/mod.rs", "rank": 13, "score": 178367.24417748774 }, { "content": "struct GatherCandidatesLocalUDPMuxParams {\n\n network_types: Vec<NetworkType>,\n\n interface_filter: Arc<Option<InterfaceFilterFn>>,\n\n ext_ip_mapper: Arc<Option<ExternalIpMapper>>,\n\n net: Arc<Net>,\n\n agent_internal: Arc<AgentInternal>,\n\n udp_mux: Arc<dyn UDPMux + Send + Sync>,\n\n}\n\n\n", "file_path": "src/agent/agent_gather.rs", "rank": 14, "score": 167319.22835315546 }, { "content": "#[async_trait]\n\npub trait Candidate: fmt::Display {\n\n /// An arbitrary string used in the freezing algorithm to\n\n /// group similar candidates. It is the same for two candidates that\n\n /// have the same type, base IP address, protocol (UDP, TCP, etc.),\n\n /// and STUN or TURN server.\n\n fn foundation(&self) -> String;\n\n\n\n /// A unique identifier for just this candidate\n\n /// Unlike the foundation this is different for each candidate.\n\n fn id(&self) -> String;\n\n\n\n /// A component is a piece of a data stream.\n\n /// An example is one for RTP, and one for RTCP\n\n fn component(&self) -> u16;\n\n fn set_component(&self, c: u16);\n\n\n\n /// The last time this candidate received traffic\n\n fn last_received(&self) -> SystemTime;\n\n\n\n /// The last time this candidate sent traffic\n", "file_path": "src/candidate/mod.rs", "rank": 15, "score": 152884.40249607983 }, { "content": "/// Generates ICE pwd.\n\n/// This internally uses `generate_crypto_random_string`.\n\npub fn generate_pwd() -> String {\n\n generate_crypto_random_string(LEN_PWD, RUNES_ALPHA)\n\n}\n\n\n", "file_path": "src/rand/mod.rs", "rank": 16, "score": 150429.73271619974 }, { "content": "/// https://tools.ietf.org/html/rfc5245#section-15.1\n\n/// candidate-id = \"candidate\" \":\" foundation\n\n/// foundation = 1*32ice-char\n\n/// ice-char = ALPHA / DIGIT / \"+\" / \"/\"\n\npub fn generate_cand_id() -> String {\n\n format!(\n\n \"candidate:{}\",\n\n generate_crypto_random_string(32, RUNES_CANDIDATE_ID_FOUNDATION)\n\n )\n\n}\n\n\n", "file_path": "src/rand/mod.rs", "rank": 17, "score": 147530.29259434025 }, { "content": "struct MockPacketConn;\n\n\n\n#[async_trait]\n\nimpl Conn for MockPacketConn {\n\n async fn connect(&self, _addr: SocketAddr) -> std::result::Result<(), util::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn recv(&self, _buf: &mut [u8]) -> std::result::Result<usize, util::Error> {\n\n Ok(0)\n\n }\n\n\n\n async fn recv_from(\n\n &self,\n\n _buf: &mut [u8],\n\n ) -> std::result::Result<(usize, SocketAddr), util::Error> {\n\n Ok((0, SocketAddr::new(Ipv4Addr::new(0, 0, 0, 0).into(), 0)))\n\n }\n\n\n\n async fn send(&self, _buf: &[u8]) -> std::result::Result<usize, util::Error> {\n", "file_path": "src/agent/agent_test.rs", "rank": 18, "score": 140617.9817299074 }, { "content": "#[test]\n\nfn test_network_type_is_udp() -> Result<()> {\n\n assert!(NetworkType::Udp4.is_udp());\n\n assert!(NetworkType::Udp6.is_udp());\n\n assert!(!NetworkType::Udp4.is_tcp());\n\n assert!(!NetworkType::Udp6.is_tcp());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/network_type/network_type_test.rs", "rank": 19, "score": 140268.95900737512 }, { "content": "\n\n pub(crate) fn key(&self) -> &str {\n\n &self.inner.params.key\n\n }\n\n\n\n pub(crate) async fn write_packet(&self, data: &[u8], addr: SocketAddr) -> ConnResult<()> {\n\n // NOTE: Pion/ice uses Sync.Pool to optimise this.\n\n let mut buffer = make_buffer();\n\n let mut offset = 0;\n\n\n\n if (data.len() + MAX_ADDR_SIZE) > (RECEIVE_MTU + MAX_ADDR_SIZE) {\n\n return Err(Error::ErrBufferShort);\n\n }\n\n\n\n // Format of buffer: | data len(2) | data bytes(dn) | addr len(2) | addr bytes(an) |\n\n // Where the number in parenthesis indicate the number of bytes used\n\n // `dn` and `an` are the length in bytes of data and addr respectively.\n\n\n\n // SAFETY: `data.len()` is at most RECEIVE_MTU(8192) - MAX_ADDR_SIZE(27)\n\n buffer[0..2].copy_from_slice(&(data.len() as u16).to_le_bytes()[..]);\n", "file_path": "src/udp_mux/udp_mux_conn.rs", "rank": 20, "score": 137507.33851209455 }, { "content": "\n\n buf[..data_len].copy_from_slice(&buffer[offset..offset + data_len]);\n\n offset += data_len;\n\n\n\n let address_len: usize = buffer[offset..offset + 2]\n\n .try_into()\n\n .map(u16::from_le_bytes)\n\n .map(From::from)\n\n .unwrap();\n\n offset += 2;\n\n\n\n let addr = SocketAddr::decode(&buffer[offset..offset + address_len])?;\n\n\n\n Ok((data_len, addr))\n\n }\n\n\n\n async fn send_to(&self, buf: &[u8], target: &SocketAddr) -> ConnResult<usize> {\n\n self.params.udp_mux.send_to(buf, target).await\n\n }\n\n\n", "file_path": "src/udp_mux/udp_mux_conn.rs", "rank": 21, "score": 137506.0724258308 }, { "content": " async fn send_to(&self, buf: &[u8], target: SocketAddr) -> ConnResult<usize> {\n\n let normalized_target = normalize_socket_addr(&target, &self.inner.params.local_addr);\n\n\n\n if !self.contains_address(&normalized_target) {\n\n self.add_address(normalized_target).await;\n\n }\n\n\n\n self.inner.send_to(buf, &normalized_target).await\n\n }\n\n\n\n async fn local_addr(&self) -> ConnResult<SocketAddr> {\n\n Ok(self.inner.local_addr())\n\n }\n\n\n\n async fn remote_addr(&self) -> Option<SocketAddr> {\n\n None\n\n }\n\n async fn close(&self) -> ConnResult<()> {\n\n self.inner.close();\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/udp_mux/udp_mux_conn.rs", "rank": 22, "score": 137500.84811502777 }, { "content": "use std::convert::TryInto;\n\nuse std::{collections::HashSet, io, net::SocketAddr, sync::Arc};\n\n\n\nuse async_trait::async_trait;\n\nuse tokio::sync::watch;\n\n\n\nuse util::{sync::Mutex, Buffer, Conn, Error};\n\n\n\nuse super::socket_addr_ext::{SocketAddrExt, MAX_ADDR_SIZE};\n\nuse super::{normalize_socket_addr, UDPMuxDefault, RECEIVE_MTU};\n\n\n\n#[inline(always)]\n\n/// Create a buffer of appropriate size to fit both a packet with max RECEIVE_MTU and the\n\n/// additional metadata used for muxing.\n", "file_path": "src/udp_mux/udp_mux_conn.rs", "rank": 23, "score": 137492.57379977775 }, { "content": " /// Close Receiver. A copy of this can be obtained via [`close_tx`].\n\n closed_watch_rx: watch::Receiver<bool>,\n\n\n\n inner: Arc<UDPMuxConnInner>,\n\n}\n\n\n\nimpl UDPMuxConn {\n\n pub(crate) fn new(params: UDPMuxConnParams) -> Self {\n\n let (closed_watch_tx, closed_watch_rx) = watch::channel(false);\n\n\n\n Self {\n\n closed_watch_rx,\n\n inner: Arc::new(UDPMuxConnInner {\n\n params,\n\n closed_watch_tx: Mutex::new(Some(closed_watch_tx)),\n\n addresses: Default::default(),\n\n buffer: Buffer::new(0, 0),\n\n }),\n\n }\n\n }\n", "file_path": "src/udp_mux/udp_mux_conn.rs", "rank": 24, "score": 137488.24080703538 }, { "content": " tokio::spawn(async move {\n\n cloned_self.buffer.close().await;\n\n });\n\n }\n\n }\n\n\n\n fn local_addr(&self) -> SocketAddr {\n\n self.params.local_addr\n\n }\n\n\n\n // Address related methods\n\n pub(super) fn get_addresses(&self) -> Vec<SocketAddr> {\n\n let addresses = self.addresses.lock();\n\n\n\n addresses.iter().cloned().collect()\n\n }\n\n\n\n pub(super) fn add_address(self: &Arc<Self>, addr: SocketAddr) {\n\n {\n\n let mut addresses = self.addresses.lock();\n", "file_path": "src/udp_mux/udp_mux_conn.rs", "rank": 25, "score": 137484.87520235285 }, { "content": " pub(crate) fn close_rx(&self) -> watch::Receiver<bool> {\n\n self.closed_watch_rx.clone()\n\n }\n\n\n\n /// Close this connection\n\n pub(crate) fn close(&self) {\n\n self.inner.close();\n\n }\n\n\n\n pub(super) fn get_addresses(&self) -> Vec<SocketAddr> {\n\n self.inner.get_addresses()\n\n }\n\n\n\n pub(super) async fn add_address(&self, addr: SocketAddr) {\n\n self.inner.add_address(addr);\n\n self.inner\n\n .params\n\n .udp_mux\n\n .register_conn_for_address(self, addr)\n\n .await;\n", "file_path": "src/udp_mux/udp_mux_conn.rs", "rank": 26, "score": 137483.7533138295 }, { "content": " addresses.insert(addr);\n\n }\n\n }\n\n\n\n pub(super) fn remove_address(&self, addr: &SocketAddr) {\n\n {\n\n let mut addresses = self.addresses.lock();\n\n addresses.remove(addr);\n\n }\n\n }\n\n\n\n pub(super) fn contains_address(&self, addr: &SocketAddr) -> bool {\n\n let addresses = self.addresses.lock();\n\n\n\n addresses.contains(addr)\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub(crate) struct UDPMuxConn {\n", "file_path": "src/udp_mux/udp_mux_conn.rs", "rank": 27, "score": 137478.37178144048 }, { "content": " offset += 2;\n\n\n\n buffer[offset..offset + data.len()].copy_from_slice(data);\n\n offset += data.len();\n\n\n\n let len = addr.encode(&mut buffer[offset + 2..])?;\n\n buffer[offset..offset + 2].copy_from_slice(&(len as u16).to_le_bytes()[..]);\n\n offset += 2 + len;\n\n\n\n self.inner.buffer.write(&buffer[..offset]).await?;\n\n\n\n Ok(())\n\n }\n\n\n\n pub(crate) fn is_closed(&self) -> bool {\n\n self.inner.is_closed()\n\n }\n\n\n\n /// Get a copy of the close [`tokio::sync::watch::Receiver`] that fires when this\n\n /// connection is closed.\n", "file_path": "src/udp_mux/udp_mux_conn.rs", "rank": 28, "score": 137476.06015556437 }, { "content": " fn is_closed(&self) -> bool {\n\n self.closed_watch_tx.lock().is_none()\n\n }\n\n\n\n fn close(self: &Arc<Self>) {\n\n let mut closed_tx = self.closed_watch_tx.lock();\n\n\n\n if let Some(tx) = closed_tx.take() {\n\n let _ = tx.send(true);\n\n drop(closed_tx);\n\n\n\n let cloned_self = Arc::clone(self);\n\n\n\n {\n\n let mut addresses = self.addresses.lock();\n\n *addresses = Default::default();\n\n }\n\n\n\n // NOTE: Alternatively we could wait on the buffer closing here so that\n\n // our caller can wait for things to fully settle down\n", "file_path": "src/udp_mux/udp_mux_conn.rs", "rank": 29, "score": 137471.0558834698 }, { "content": " // We always have at least.\n\n //\n\n // * 2 bytes for data len\n\n // * 2 bytes for addr len\n\n // * 7 bytes for an Ipv4 addr\n\n if len < 11 {\n\n return Err(Error::ErrBufferShort);\n\n }\n\n\n\n let data_len: usize = buffer[..2]\n\n .try_into()\n\n .map(u16::from_le_bytes)\n\n .map(From::from)\n\n .unwrap();\n\n offset += 2;\n\n\n\n let total = 2 + data_len + 2 + 7;\n\n if data_len > buf.len() || total > len {\n\n return Err(Error::ErrBufferShort);\n\n }\n", "file_path": "src/udp_mux/udp_mux_conn.rs", "rank": 30, "score": 137466.11223599315 }, { "content": " }\n\n\n\n pub(super) fn remove_address(&self, addr: &SocketAddr) {\n\n self.inner.remove_address(addr)\n\n }\n\n\n\n pub(super) fn contains_address(&self, addr: &SocketAddr) -> bool {\n\n self.inner.contains_address(addr)\n\n }\n\n}\n\n\n", "file_path": "src/udp_mux/udp_mux_conn.rs", "rank": 31, "score": 137462.96834172035 }, { "content": "#[must_use]\n\npub fn supported_network_types() -> Vec<NetworkType> {\n\n vec![\n\n NetworkType::Udp4,\n\n NetworkType::Udp6,\n\n //NetworkType::TCP4,\n\n //NetworkType::TCP6,\n\n ]\n\n}\n\n\n\n/// Represents the type of network.\n\n#[derive(PartialEq, Debug, Copy, Clone, Eq, Hash)]\n\npub enum NetworkType {\n\n Unspecified,\n\n\n\n /// Indicates UDP over IPv4.\n\n Udp4,\n\n\n\n /// Indicates UDP over IPv6.\n\n Udp6,\n\n\n", "file_path": "src/network_type/mod.rs", "rank": 32, "score": 136964.5060743151 }, { "content": " }\n\n }\n\n\n\n fn decode(buffer: &[u8]) -> Result<SocketAddr, Error> {\n\n use std::net::*;\n\n\n\n match buffer[0] {\n\n IPV4_MARKER => {\n\n if buffer.len() < IPV4_ADDRESS_SIZE {\n\n return Err(Error::ErrBufferShort);\n\n }\n\n\n\n let ip_parts = &buffer[1..5];\n\n let port = match &buffer[5..7].try_into() {\n\n Err(_) => return Err(Error::ErrFailedToParseIpaddr),\n\n Ok(input) => u16::from_le_bytes(*input),\n\n };\n\n\n\n let ip = Ipv4Addr::new(ip_parts[0], ip_parts[1], ip_parts[2], ip_parts[3]);\n\n\n", "file_path": "src/udp_mux/socket_addr_ext.rs", "rank": 48, "score": 118234.74985603047 }, { "content": "const IPV6_MARKER: u8 = 6;\n\nconst IPV6_ADDRESS_SIZE: usize = 27;\n\n\n\npub(super) const MAX_ADDR_SIZE: usize = IPV6_ADDRESS_SIZE;\n\n\n\nimpl SocketAddrExt for SocketAddr {\n\n fn encode(&self, buffer: &mut [u8]) -> Result<usize, Error> {\n\n use std::net::SocketAddr::*;\n\n\n\n if buffer.len() < MAX_ADDR_SIZE {\n\n return Err(Error::ErrBufferShort);\n\n }\n\n\n\n match self {\n\n V4(addr) => {\n\n let marker = IPV4_MARKER;\n\n let ip: [u8; 4] = addr.ip().octets();\n\n let port: u16 = addr.port();\n\n\n\n buffer[0] = marker;\n", "file_path": "src/udp_mux/socket_addr_ext.rs", "rank": 49, "score": 118234.5532913557 }, { "content": "use std::array::TryFromSliceError;\n\nuse std::convert::TryInto;\n\nuse std::net::SocketAddr;\n\n\n\nuse util::Error;\n\n\n\npub(super) trait SocketAddrExt {\n\n ///Encode a representation of `self` into the buffer and return the length of this encoded\n\n ///version.\n\n ///\n\n /// The buffer needs to be at least 27 bytes in length.\n\n fn encode(&self, buffer: &mut [u8]) -> Result<usize, Error>;\n\n\n\n /// Decode a `SocketAddr` from a buffer. The encoding should have previously been done with\n\n /// [`SocketAddrExt::encode`].\n\n fn decode(buffer: &[u8]) -> Result<SocketAddr, Error>;\n\n}\n\n\n\nconst IPV4_MARKER: u8 = 4;\n\nconst IPV4_ADDRESS_SIZE: usize = 7;\n", "file_path": "src/udp_mux/socket_addr_ext.rs", "rank": 50, "score": 118232.90133284501 }, { "content": " Ok(SocketAddr::V4(SocketAddrV4::new(ip, port)))\n\n }\n\n IPV6_MARKER => {\n\n if buffer.len() < IPV6_ADDRESS_SIZE {\n\n return Err(Error::ErrBufferShort);\n\n }\n\n\n\n // Just to help the type system infer correctly\n\n fn helper(b: &[u8]) -> Result<&[u8; 16], TryFromSliceError> {\n\n b.try_into()\n\n }\n\n\n\n let ip = match helper(&buffer[1..17]) {\n\n Err(_) => return Err(Error::ErrFailedToParseIpaddr),\n\n Ok(input) => Ipv6Addr::from(*input),\n\n };\n\n let port = match &buffer[17..19].try_into() {\n\n Err(_) => return Err(Error::ErrFailedToParseIpaddr),\n\n Ok(input) => u16::from_le_bytes(*input),\n\n };\n", "file_path": "src/udp_mux/socket_addr_ext.rs", "rank": 51, "score": 118230.46450505301 }, { "content": " #[test]\n\n fn test_encode_ipv4_with_short_buffer() {\n\n let mut buffer = vec![0u8; IPV4_ADDRESS_SIZE - 1];\n\n let ip = SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::from([56, 128, 35, 5]), 0x1234));\n\n\n\n let result = ip.encode(&mut buffer);\n\n\n\n assert_eq!(result, Err(Error::ErrBufferShort));\n\n }\n\n\n\n #[test]\n\n fn test_encode_ipv6_with_short_buffer() {\n\n let mut buffer = vec![0u8; MAX_ADDR_SIZE - 1];\n\n let ip = SocketAddr::V6(SocketAddrV6::new(\n\n Ipv6Addr::from([\n\n 92, 114, 235, 3, 244, 64, 38, 111, 20, 100, 199, 241, 19, 174, 220, 123,\n\n ]),\n\n 0x1234,\n\n 0x12345678,\n\n 0x87654321,\n", "file_path": "src/udp_mux/socket_addr_ext.rs", "rank": 52, "score": 118220.85899789154 }, { "content": "#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use std::net::*;\n\n\n\n #[test]\n\n fn test_ipv4() {\n\n let ip = SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::from([56, 128, 35, 5]), 0x1234));\n\n\n\n let mut buffer = [0_u8; MAX_ADDR_SIZE];\n\n let encoded_len = ip.encode(&mut buffer);\n\n\n\n assert_eq!(encoded_len, Ok(7));\n\n assert_eq!(\n\n &buffer[0..7],\n\n &[IPV4_MARKER, 56, 128, 35, 5, 0x34, 0x12][..]\n\n );\n\n\n\n let decoded = SocketAddr::decode(&buffer);\n\n\n", "file_path": "src/udp_mux/socket_addr_ext.rs", "rank": 53, "score": 118220.36189126836 }, { "content": " ));\n\n\n\n let result = ip.encode(&mut buffer);\n\n\n\n assert_eq!(result, Err(Error::ErrBufferShort));\n\n }\n\n\n\n #[test]\n\n fn test_decode_ipv4_with_short_buffer() {\n\n let buffer = vec![IPV4_MARKER, 0];\n\n\n\n let result = SocketAddr::decode(&buffer);\n\n\n\n assert_eq!(result, Err(Error::ErrBufferShort));\n\n }\n\n\n\n #[test]\n\n fn test_decode_ipv6_with_short_buffer() {\n\n let buffer = vec![IPV6_MARKER, 0];\n\n\n\n let result = SocketAddr::decode(&buffer);\n\n\n\n assert_eq!(result, Err(Error::ErrBufferShort));\n\n }\n\n}\n", "file_path": "src/udp_mux/socket_addr_ext.rs", "rank": 54, "score": 118219.99297578276 }, { "content": "\n\n let flowinfo = match &buffer[19..23].try_into() {\n\n Err(_) => return Err(Error::ErrFailedToParseIpaddr),\n\n Ok(input) => u32::from_le_bytes(*input),\n\n };\n\n\n\n let scope_id = match &buffer[23..27].try_into() {\n\n Err(_) => return Err(Error::ErrFailedToParseIpaddr),\n\n Ok(input) => u32::from_le_bytes(*input),\n\n };\n\n\n\n Ok(SocketAddr::V6(SocketAddrV6::new(\n\n ip, port, flowinfo, scope_id,\n\n )))\n\n }\n\n _ => Err(Error::ErrFailedToParseIpaddr),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/udp_mux/socket_addr_ext.rs", "rank": 55, "score": 118219.55806352424 }, { "content": " assert_eq!(decoded, Ok(ip));\n\n }\n\n\n\n #[test]\n\n fn test_ipv6() {\n\n let ip = SocketAddr::V6(SocketAddrV6::new(\n\n Ipv6Addr::from([\n\n 92, 114, 235, 3, 244, 64, 38, 111, 20, 100, 199, 241, 19, 174, 220, 123,\n\n ]),\n\n 0x1234,\n\n 0x12345678,\n\n 0x87654321,\n\n ));\n\n\n\n let mut buffer = [0_u8; MAX_ADDR_SIZE];\n\n let encoded_len = ip.encode(&mut buffer);\n\n\n\n assert_eq!(encoded_len, Ok(27));\n\n assert_eq!(\n\n &buffer[0..27],\n", "file_path": "src/udp_mux/socket_addr_ext.rs", "rank": 56, "score": 118212.65196674068 }, { "content": " buffer[1..5].copy_from_slice(&ip);\n\n buffer[5..7].copy_from_slice(&port.to_le_bytes());\n\n\n\n Ok(7)\n\n }\n\n V6(addr) => {\n\n let marker = IPV6_MARKER;\n\n let ip: [u8; 16] = addr.ip().octets();\n\n let port: u16 = addr.port();\n\n let flowinfo = addr.flowinfo();\n\n let scope_id = addr.scope_id();\n\n\n\n buffer[0] = marker;\n\n buffer[1..17].copy_from_slice(&ip);\n\n buffer[17..19].copy_from_slice(&port.to_le_bytes());\n\n buffer[19..23].copy_from_slice(&flowinfo.to_le_bytes());\n\n buffer[23..27].copy_from_slice(&scope_id.to_le_bytes());\n\n\n\n Ok(MAX_ADDR_SIZE)\n\n }\n", "file_path": "src/udp_mux/socket_addr_ext.rs", "rank": 57, "score": 118204.04848382715 }, { "content": " 0x34,\n\n 0x12,\n\n // LE flowinfo\n\n 0x78,\n\n 0x56,\n\n 0x34,\n\n 0x12,\n\n // LE scope_id\n\n 0x21,\n\n 0x43,\n\n 0x65,\n\n 0x87,\n\n ][..]\n\n );\n\n\n\n let decoded = SocketAddr::decode(&buffer);\n\n\n\n assert_eq!(decoded, Ok(ip));\n\n }\n\n\n", "file_path": "src/udp_mux/socket_addr_ext.rs", "rank": 58, "score": 118202.97996056468 }, { "content": " &[\n\n IPV6_MARKER, // marker\n\n // Start of ipv6 address\n\n 92,\n\n 114,\n\n 235,\n\n 3,\n\n 244,\n\n 64,\n\n 38,\n\n 111,\n\n 20,\n\n 100,\n\n 199,\n\n 241,\n\n 19,\n\n 174,\n\n 220,\n\n 123,\n\n // LE port\n", "file_path": "src/udp_mux/socket_addr_ext.rs", "rank": 59, "score": 118189.61862224813 }, { "content": "#[test]\n\nfn test_candidate_foundation() -> Result<()> {\n\n // All fields are the same\n\n assert_eq!(\n\n (CandidateBase {\n\n candidate_type: CandidateType::Host,\n\n network_type: AtomicU8::new(NetworkType::Udp4 as u8),\n\n address: \"A\".to_owned(),\n\n ..Default::default()\n\n })\n\n .foundation(),\n\n (CandidateBase {\n\n candidate_type: CandidateType::Host,\n\n network_type: AtomicU8::new(NetworkType::Udp4 as u8),\n\n address: \"A\".to_owned(),\n\n ..Default::default()\n\n })\n\n .foundation()\n\n );\n\n\n\n // Different Address\n", "file_path": "src/candidate/candidate_test.rs", "rank": 60, "score": 110937.93303076536 }, { "content": "#[test]\n\nfn test_controlling_get_from() -> Result<()> {\n\n let mut m = Message::new();\n\n let mut c = AttrControlling(4321);\n\n let result = c.get_from(&m);\n\n if let Err(err) = result {\n\n assert_eq!(stun::Error::ErrAttributeNotFound, err, \"unexpected error\");\n\n } else {\n\n panic!(\"expected error, but got ok\");\n\n }\n\n\n\n m.build(&[Box::new(BINDING_REQUEST), Box::new(c)])?;\n\n\n\n let mut m1 = Message::new();\n\n m1.write(&m.raw)?;\n\n\n\n let mut c1 = AttrControlling::default();\n\n c1.get_from(&m1)?;\n\n\n\n assert_eq!(c1, c, \"not equal\");\n\n\n", "file_path": "src/control/control_test.rs", "rank": 61, "score": 110937.93303076536 }, { "content": "#[test]\n\nfn test_control_get_from() -> Result<()> {\n\n //\"Blank\"\n\n {\n\n let m = Message::new();\n\n let mut c = AttrControl::default();\n\n let result = c.get_from(&m);\n\n if let Err(err) = result {\n\n assert_eq!(stun::Error::ErrAttributeNotFound, err, \"unexpected error\");\n\n } else {\n\n panic!(\"expected error, but got ok\");\n\n }\n\n }\n\n //\"Controlling\"\n\n {\n\n let mut m = Message::new();\n\n let mut c = AttrControl::default();\n\n let result = c.get_from(&m);\n\n if let Err(err) = result {\n\n assert_eq!(stun::Error::ErrAttributeNotFound, err, \"unexpected error\");\n\n } else {\n", "file_path": "src/control/control_test.rs", "rank": 62, "score": 110937.93303076536 }, { "content": "#[test]\n\nfn test_priority_get_from() -> Result<()> {\n\n let mut m = Message::new();\n\n let mut p = PriorityAttr::default();\n\n let result = p.get_from(&m);\n\n if let Err(err) = result {\n\n assert_eq!(stun::Error::ErrAttributeNotFound, err, \"unexpected error\");\n\n } else {\n\n panic!(\"expected error, but got ok\");\n\n }\n\n\n\n m.build(&[Box::new(BINDING_REQUEST), Box::new(p)])?;\n\n\n\n let mut m1 = Message::new();\n\n m1.write(&m.raw)?;\n\n\n\n let mut p1 = PriorityAttr::default();\n\n p1.get_from(&m1)?;\n\n\n\n assert_eq!(p1, p, \"not equal\");\n\n\n", "file_path": "src/priority/priority_test.rs", "rank": 63, "score": 110937.93303076536 }, { "content": "#[test]\n\nfn test_candidate_priority() -> Result<()> {\n\n let tests = vec![\n\n (\n\n CandidateBase {\n\n candidate_type: CandidateType::Host,\n\n component: AtomicU16::new(COMPONENT_RTP as u16),\n\n ..Default::default()\n\n },\n\n 2130706431,\n\n ),\n\n (\n\n CandidateBase {\n\n candidate_type: CandidateType::Host,\n\n component: AtomicU16::new(COMPONENT_RTP as u16),\n\n network_type: AtomicU8::new(NetworkType::Tcp4 as u8),\n\n tcp_type: TcpType::Active,\n\n ..Default::default()\n\n },\n\n 2128609279,\n\n ),\n", "file_path": "src/candidate/candidate_test.rs", "rank": 64, "score": 110937.93303076536 }, { "content": "#[test]\n\nfn test_controlled_get_from() -> Result<()> {\n\n let mut m = Message::new();\n\n let mut c = AttrControlled(4321);\n\n let result = c.get_from(&m);\n\n if let Err(err) = result {\n\n assert_eq!(stun::Error::ErrAttributeNotFound, err, \"unexpected error\");\n\n } else {\n\n panic!(\"expected error, but got ok\");\n\n }\n\n\n\n m.build(&[Box::new(BINDING_REQUEST), Box::new(c)])?;\n\n\n\n let mut m1 = Message::new();\n\n m1.write(&m.raw)?;\n\n\n\n let mut c1 = AttrControlled::default();\n\n c1.get_from(&m1)?;\n\n\n\n assert_eq!(c1, c, \"not equal\");\n\n\n", "file_path": "src/control/control_test.rs", "rank": 65, "score": 110937.93303076536 }, { "content": "#[test]\n\nfn test_gathering_state_string() -> Result<()> {\n\n let tests = vec![\n\n (GatheringState::Unspecified, \"unspecified\"),\n\n (GatheringState::New, \"new\"),\n\n (GatheringState::Gathering, \"gathering\"),\n\n (GatheringState::Complete, \"complete\"),\n\n ];\n\n\n\n for (gathering_state, expected_string) in tests {\n\n assert_eq!(\n\n expected_string,\n\n gathering_state.to_string(),\n\n \"testCase: {} vs {}\",\n\n expected_string,\n\n gathering_state.to_string(),\n\n )\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/state/state_test.rs", "rank": 66, "score": 109090.82182273129 }, { "content": "#[test]\n\nfn test_parse_url_success() -> Result<()> {\n\n let tests = vec![\n\n (\n\n \"stun:google.de\",\n\n \"stun:google.de:3478\",\n\n SchemeType::Stun,\n\n false,\n\n \"google.de\",\n\n 3478,\n\n ProtoType::Udp,\n\n ),\n\n (\n\n \"stun:google.de:1234\",\n\n \"stun:google.de:1234\",\n\n SchemeType::Stun,\n\n false,\n\n \"google.de\",\n\n 1234,\n\n ProtoType::Udp,\n\n ),\n", "file_path": "src/url/url_test.rs", "rank": 67, "score": 109090.82182273129 }, { "content": "#[test]\n\nfn test_candidate_last_sent() -> Result<()> {\n\n let candidate = CandidateBase::default();\n\n assert_eq!(candidate.last_sent(), UNIX_EPOCH);\n\n\n\n let now = SystemTime::now();\n\n let d = now.duration_since(UNIX_EPOCH)?;\n\n candidate.set_last_sent(d);\n\n assert_eq!(candidate.last_sent(), now);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/candidate/candidate_test.rs", "rank": 68, "score": 109090.82182273129 }, { "content": "#[test]\n\nfn test_generate_multicast_dnsname() -> Result<()> {\n\n let name = generate_multicast_dns_name();\n\n\n\n let re = Regex::new(\n\n r\"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}.local+$\",\n\n );\n\n\n\n if let Ok(re) = re {\n\n assert!(\n\n re.is_match(&name),\n\n \"mDNS name must be UUID v4 + \\\".local\\\" suffix, got {}\",\n\n name\n\n );\n\n } else {\n\n panic!(\"expected ok, but got err\");\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/mdns/mdns_test.rs", "rank": 69, "score": 109090.82182273129 }, { "content": "#[test]\n\nfn test_candidate_last_received() -> Result<()> {\n\n let candidate = CandidateBase::default();\n\n assert_eq!(candidate.last_received(), UNIX_EPOCH);\n\n\n\n let now = SystemTime::now();\n\n let d = now.duration_since(UNIX_EPOCH)?;\n\n candidate.set_last_received(d);\n\n assert_eq!(candidate.last_received(), now);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/candidate/candidate_test.rs", "rank": 70, "score": 109090.82182273129 }, { "content": "#[test]\n\nfn test_parse_url_failure() -> Result<()> {\n\n let tests = vec![\n\n (\"\", Error::ErrSchemeType),\n\n (\":::\", Error::ErrUrlParse),\n\n (\"stun:[::1]:123:\", Error::ErrPort),\n\n (\"stun:[::1]:123a\", Error::ErrPort),\n\n (\"google.de\", Error::ErrSchemeType),\n\n (\"stun:\", Error::ErrHost),\n\n (\"stun:google.de:abc\", Error::ErrPort),\n\n (\"stun:google.de?transport=udp\", Error::ErrStunQuery),\n\n (\"stuns:google.de?transport=udp\", Error::ErrStunQuery),\n\n (\"turn:google.de?trans=udp\", Error::ErrInvalidQuery),\n\n (\"turns:google.de?trans=udp\", Error::ErrInvalidQuery),\n\n (\n\n \"turns:google.de?transport=udp&another=1\",\n\n Error::ErrInvalidQuery,\n\n ),\n\n (\"turn:google.de?transport=ip\", Error::ErrProtoType),\n\n ];\n\n\n", "file_path": "src/url/url_test.rs", "rank": 71, "score": 109090.82182273129 }, { "content": "#[test]\n\nfn test_connected_state_string() -> Result<()> {\n\n let tests = vec![\n\n (ConnectionState::Unspecified, \"Unspecified\"),\n\n (ConnectionState::New, \"New\"),\n\n (ConnectionState::Checking, \"Checking\"),\n\n (ConnectionState::Connected, \"Connected\"),\n\n (ConnectionState::Completed, \"Completed\"),\n\n (ConnectionState::Failed, \"Failed\"),\n\n (ConnectionState::Disconnected, \"Disconnected\"),\n\n (ConnectionState::Closed, \"Closed\"),\n\n ];\n\n\n\n for (connection_state, expected_string) in tests {\n\n assert_eq!(\n\n expected_string,\n\n connection_state.to_string(),\n\n \"testCase: {} vs {}\",\n\n expected_string,\n\n connection_state.to_string(),\n\n )\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/state/state_test.rs", "rank": 72, "score": 109090.82182273129 }, { "content": "#[test]\n\nfn test_tcp_type() -> Result<()> {\n\n //assert_eq!(TCPType::Unspecified, tcpType)\n\n assert_eq!(TcpType::Active, TcpType::from(\"active\"));\n\n assert_eq!(TcpType::Passive, TcpType::from(\"passive\"));\n\n assert_eq!(TcpType::SimultaneousOpen, TcpType::from(\"so\"));\n\n assert_eq!(TcpType::Unspecified, TcpType::from(\"something else\"));\n\n\n\n assert_eq!(\"unspecified\", TcpType::Unspecified.to_string());\n\n assert_eq!(\"active\", TcpType::Active.to_string());\n\n assert_eq!(\"passive\", TcpType::Passive.to_string());\n\n assert_eq!(\"so\", TcpType::SimultaneousOpen.to_string());\n\n\n\n Ok(())\n\n}\n", "file_path": "src/tcp_type/tcp_type_test.rs", "rank": 73, "score": 107332.45370389233 }, { "content": "#[test]\n\nfn test_network_type_is_tcp() -> Result<()> {\n\n assert!(NetworkType::Tcp4.is_tcp());\n\n assert!(NetworkType::Tcp6.is_tcp());\n\n assert!(!NetworkType::Tcp4.is_udp());\n\n assert!(!NetworkType::Tcp6.is_udp());\n\n\n\n Ok(())\n\n}\n", "file_path": "src/network_type/network_type_test.rs", "rank": 74, "score": 105656.58330732482 }, { "content": "struct ChanReceivers {\n\n chan_state_rx: mpsc::Receiver<ConnectionState>,\n\n chan_candidate_rx: mpsc::Receiver<Option<Arc<dyn Candidate + Send + Sync>>>,\n\n chan_candidate_pair_rx: mpsc::Receiver<()>,\n\n}\n\n\n\n/// Represents the ICE agent.\n\npub struct Agent {\n\n pub(crate) internal: Arc<AgentInternal>,\n\n\n\n pub(crate) udp_network: UDPNetwork,\n\n pub(crate) interface_filter: Arc<Option<InterfaceFilterFn>>,\n\n pub(crate) mdns_mode: MulticastDnsMode,\n\n pub(crate) mdns_name: String,\n\n pub(crate) mdns_conn: Option<Arc<DnsConn>>,\n\n pub(crate) net: Arc<Net>,\n\n\n\n // 1:1 D-NAT IP address mapping\n\n pub(crate) ext_ip_mapper: Arc<Option<ExternalIpMapper>>,\n\n pub(crate) gathering_state: Arc<AtomicU8>, //GatheringState,\n", "file_path": "src/agent/mod.rs", "rank": 75, "score": 104945.18710892124 }, { "content": "#[test]\n\nfn test_network_type_parsing_success() -> Result<()> {\n\n let ipv4: IpAddr = \"192.168.0.1\".parse().unwrap();\n\n let ipv6: IpAddr = \"fe80::a3:6ff:fec4:5454\".parse().unwrap();\n\n\n\n let tests = vec![\n\n (\"lowercase UDP4\", \"udp\", ipv4, NetworkType::Udp4),\n\n (\"uppercase UDP4\", \"UDP\", ipv4, NetworkType::Udp4),\n\n (\"lowercase UDP6\", \"udp\", ipv6, NetworkType::Udp6),\n\n (\"uppercase UDP6\", \"UDP\", ipv6, NetworkType::Udp6),\n\n ];\n\n\n\n for (name, in_network, in_ip, expected) in tests {\n\n let actual = determine_network_type(in_network, &in_ip)?;\n\n\n\n assert_eq!(\n\n actual, expected,\n\n \"NetworkTypeParsing: '{}' -- input:{} expected:{} actual:{}\",\n\n name, in_network, expected, actual\n\n );\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/network_type/network_type_test.rs", "rank": 76, "score": 104057.53786410991 }, { "content": "#[test]\n\nfn test_network_type_parsing_failure() -> Result<()> {\n\n let ipv6: IpAddr = \"fe80::a3:6ff:fec4:5454\".parse().unwrap();\n\n\n\n let tests = vec![(\"invalid network\", \"junkNetwork\", ipv6)];\n\n for (name, in_network, in_ip) in tests {\n\n let result = determine_network_type(in_network, &in_ip);\n\n assert!(\n\n result.is_err(),\n\n \"NetworkTypeParsing should fail: '{}' -- input:{}\",\n\n name,\n\n in_network,\n\n );\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/network_type/network_type_test.rs", "rank": 77, "score": 104057.53786410991 }, { "content": "struct GatherCandidatesSrflxParams {\n\n urls: Vec<Url>,\n\n network_types: Vec<NetworkType>,\n\n port_max: u16,\n\n port_min: u16,\n\n net: Arc<Net>,\n\n agent_internal: Arc<AgentInternal>,\n\n}\n\n\n\nimpl Agent {\n\n pub(crate) async fn gather_candidates_internal(params: GatherCandidatesInternalParams) {\n\n Self::set_gathering_state(\n\n &params.chan_candidate_tx,\n\n &params.gathering_state,\n\n GatheringState::Gathering,\n\n )\n\n .await;\n\n\n\n let wg = WaitGroup::new();\n\n\n", "file_path": "src/agent/agent_gather.rs", "rank": 78, "score": 100001.69648838472 }, { "content": "struct GatherCandidatesLocalParams {\n\n udp_network: UDPNetwork,\n\n network_types: Vec<NetworkType>,\n\n mdns_mode: MulticastDnsMode,\n\n mdns_name: String,\n\n interface_filter: Arc<Option<InterfaceFilterFn>>,\n\n ext_ip_mapper: Arc<Option<ExternalIpMapper>>,\n\n net: Arc<Net>,\n\n agent_internal: Arc<AgentInternal>,\n\n}\n\n\n", "file_path": "src/agent/agent_gather.rs", "rank": 79, "score": 100001.69648838472 }, { "content": "#[test]\n\nfn test_external_ip_mapper_validate_ip_string() -> Result<()> {\n\n let ip = validate_ip_string(\"1.2.3.4\")?;\n\n assert!(ip.is_ipv4(), \"should be true\");\n\n assert_eq!(\"1.2.3.4\", ip.to_string(), \"should be true\");\n\n\n\n let ip = validate_ip_string(\"2601:4567::5678\")?;\n\n assert!(!ip.is_ipv4(), \"should be false\");\n\n assert_eq!(\"2601:4567::5678\", ip.to_string(), \"should be true\");\n\n\n\n let result = validate_ip_string(\"bad.6.6.6\");\n\n assert!(result.is_err(), \"should fail\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/external_ip_mapper/external_ip_mapper_test.rs", "rank": 80, "score": 98332.82495313633 }, { "content": " verify_packet(&buffer[..n], next_sequence);\n\n\n\n conn.send_to(&buffer[..n], remote_connection_addr)\n\n .await\n\n .expect(\"Failed to write to muxxed connection\");\n\n\n\n read += n;\n\n log::debug!(\"Muxxed read {}, sequence: {}\", read, next_sequence);\n\n next_sequence += 1;\n\n }\n\n });\n\n\n\n let remote_connection_2 = Arc::clone(&remote_connection);\n\n let remote_handle = tokio::spawn(async move {\n\n let remote_connection = remote_connection_2;\n\n let mut buffer = vec![0u8; RECEIVE_MTU];\n\n let mut next_sequence = 0;\n\n let mut read = 0;\n\n\n\n while read < TARGET_SIZE {\n", "file_path": "src/udp_mux/udp_mux_test.rs", "rank": 81, "score": 97825.41139181501 }, { "content": " assert_eq!(buffer[..len], stun_msg);\n\n\n\n const TARGET_SIZE: usize = 1 * 1024 * 1024;\n\n\n\n // Read on the muxed side\n\n let conn_2 = Arc::clone(&conn);\n\n let mux_handle = tokio::spawn(async move {\n\n let conn = conn_2;\n\n\n\n let mut buffer = vec![0u8; RECEIVE_MTU];\n\n let mut next_sequence = 0;\n\n let mut read = 0;\n\n\n\n while read < TARGET_SIZE {\n\n let (n, _) = conn\n\n .recv_from(&mut buffer)\n\n .await\n\n .expect(\"recv_from should not error\");\n\n assert_eq!(n, RECEIVE_MTU);\n\n\n", "file_path": "src/udp_mux/udp_mux_test.rs", "rank": 82, "score": 97822.1220120773 }, { "content": " timeout.is_none(),\n\n \"At least one of the muxed tasks timedout {:?}\",\n\n all_results\n\n );\n\n\n\n let res = udp_mux.close().await;\n\n assert!(res.is_ok());\n\n let res = udp_mux.get_conn(\"failurefrag\").await;\n\n\n\n assert!(\n\n res.is_err(),\n\n \"Getting connections after UDPMuxDefault is closed should fail\"\n\n );\n\n\n\n Ok(())\n\n}\n\n\n\nasync fn test_mux_connection(\n\n mux: Arc<dyn UDPMux + Send + Sync>,\n\n ufrag: &str,\n", "file_path": "src/udp_mux/udp_mux_test.rs", "rank": 83, "score": 97819.577388792 }, { "content": "\n\n // These bytes should be dropped\n\n remote_connection.send(\"Droppped bytes\".as_bytes()).await?;\n\n\n\n sleep(Duration::from_millis(1)).await;\n\n\n\n let stun_msg = {\n\n let mut m = Message::default();\n\n m.typ = BINDING_REQUEST;\n\n m.add(ATTR_USERNAME, format!(\"{}:otherufrag\", ufrag).as_bytes());\n\n\n\n m.marshal_binary().unwrap()\n\n };\n\n\n\n let remote_connection_addr = remote_connection.local_addr()?;\n\n\n\n conn.send_to(&stun_msg, remote_connection_addr).await?;\n\n\n\n let mut buffer = vec![0u8; RECEIVE_MTU];\n\n let len = remote_connection.recv(&mut buffer).await?;\n", "file_path": "src/udp_mux/udp_mux_test.rs", "rank": 84, "score": 97811.95760963828 }, { "content": " let n = remote_connection\n\n .recv(&mut buffer)\n\n .await\n\n .expect(\"recv_from should not error\");\n\n assert_eq!(n, RECEIVE_MTU);\n\n\n\n verify_packet(&buffer[..n], next_sequence);\n\n read += n;\n\n log::debug!(\"Remote read {}, sequence: {}\", read, next_sequence);\n\n next_sequence += 1;\n\n }\n\n });\n\n\n\n let mut sequence: u32 = 0;\n\n let mut written = 0;\n\n let mut buffer = vec![0u8; RECEIVE_MTU];\n\n while written < TARGET_SIZE {\n\n thread_rng().fill(&mut buffer[24..]);\n\n\n\n let hash = sha1_hash(&buffer[24..]);\n", "file_path": "src/udp_mux/udp_mux_test.rs", "rank": 85, "score": 97801.95580376062 }, { "content": " .init();\n\n\n\n // TODO: Support IPv6 dual stack. This works Linux and macOS, but not Windows.\n\n #[cfg(all(unix, target_pointer_width = \"64\"))]\n\n let udp_socket = UdpSocket::bind((std::net::Ipv6Addr::UNSPECIFIED, 0)).await?;\n\n\n\n #[cfg(any(not(unix), not(target_pointer_width = \"64\")))]\n\n let udp_socket = UdpSocket::bind((std::net::Ipv4Addr::UNSPECIFIED, 0)).await?;\n\n\n\n let addr = udp_socket.local_addr()?;\n\n log::info!(\"Listening on {}\", addr);\n\n\n\n let udp_mux = UDPMuxDefault::new(UDPMuxParams::new(udp_socket));\n\n let udp_mux_dyn = Arc::clone(&udp_mux) as Arc<dyn UDPMux + Send + Sync>;\n\n\n\n let udp_mux_dyn_1 = Arc::clone(&udp_mux_dyn);\n\n let h1 = tokio::spawn(async move {\n\n timeout(\n\n TIMEOUT,\n\n test_mux_connection(Arc::clone(&udp_mux_dyn_1), \"ufrag1\", addr, Network::Ipv4),\n", "file_path": "src/udp_mux/udp_mux_test.rs", "rank": 86, "score": 97801.45526817138 }, { "content": " Err(timeout_err) => {\n\n assert!(false, \"Mux test timedout: {:?}\", timeout_err)\n\n }\n\n\n\n // Join error\n\n Ok(join_result) => match join_result {\n\n Err(err) => {\n\n assert!(false, \"Mux test failed with join error: {:?}\", err)\n\n }\n\n // Actual error\n\n Ok(mux_result) => match mux_result {\n\n Err(err) => assert!(false, \"Mux test failed with error: {:?}\", err),\n\n _ => (),\n\n },\n\n },\n\n }\n\n }\n\n\n\n let timeout = all_results.iter().find_map(|r| r.as_ref().err());\n\n assert!(\n", "file_path": "src/udp_mux/udp_mux_test.rs", "rank": 87, "score": 97798.55756170592 }, { "content": " buffer[4..24].copy_from_slice(&hash);\n\n buffer[0..4].copy_from_slice(&sequence.to_le_bytes());\n\n\n\n let len = remote_connection.send(&buffer).await?;\n\n\n\n written += len;\n\n log::debug!(\"Data written {}, sequence: {}\", written, sequence);\n\n sequence += 1;\n\n\n\n sleep(Duration::from_millis(1)).await;\n\n }\n\n\n\n let (r1, r2) = tokio::join!(mux_handle, remote_handle);\n\n assert!(r1.is_ok() && r2.is_ok());\n\n\n\n let res = conn.close().await;\n\n assert!(res.is_ok(), \"Failed to close Conn: {:?}\", res);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/udp_mux/udp_mux_test.rs", "rank": 88, "score": 97797.56556368951 }, { "content": " )\n\n .await\n\n });\n\n\n\n let udp_mux_dyn_2 = Arc::clone(&udp_mux_dyn);\n\n let h2 = tokio::spawn(async move {\n\n timeout(\n\n TIMEOUT,\n\n test_mux_connection(Arc::clone(&udp_mux_dyn_2), \"ufrag2\", addr, Network::Ipv4),\n\n )\n\n .await\n\n });\n\n\n\n let all_results;\n\n\n\n #[cfg(all(unix, target_pointer_width = \"64\"))]\n\n {\n\n // TODO: Support IPv6 dual stack. This works Linux and macOS, but not Windows.\n\n let udp_mux_dyn_3 = Arc::clone(&udp_mux_dyn);\n\n let h3 = tokio::spawn(async move {\n", "file_path": "src/udp_mux/udp_mux_test.rs", "rank": 89, "score": 97797.45976475358 }, { "content": " timeout(\n\n TIMEOUT,\n\n test_mux_connection(Arc::clone(&udp_mux_dyn_3), \"ufrag3\", addr, Network::Ipv6),\n\n )\n\n .await\n\n });\n\n\n\n let (r1, r2, r3) = tokio::join!(h1, h2, h3);\n\n all_results = [r1, r2, r3];\n\n }\n\n\n\n #[cfg(any(not(unix), not(target_pointer_width = \"64\")))]\n\n {\n\n let (r1, r2) = tokio::join!(h1, h2);\n\n all_results = [r1, r2];\n\n }\n\n\n\n for timeout_result in &all_results {\n\n // Timeout error\n\n match timeout_result {\n", "file_path": "src/udp_mux/udp_mux_test.rs", "rank": 90, "score": 97794.53131510287 }, { "content": " listener_addr: SocketAddr,\n\n network: Network,\n\n) -> Result<()> {\n\n let conn = mux.get_conn(ufrag).await?;\n\n // FIXME: Cleanup\n\n\n\n let connect_addr = network\n\n .connect_ip(listener_addr.port())\n\n .parse::<SocketAddr>()\n\n .unwrap();\n\n\n\n let remote_connection = Arc::new(network.bind().await?);\n\n log::info!(\"Bound for ufrag: {}\", ufrag);\n\n remote_connection.connect(connect_addr).await?;\n\n log::info!(\"Connected to {} for ufrag: {}\", connect_addr, ufrag);\n\n log::info!(\n\n \"Testing muxing from {} over {}\",\n\n remote_connection.local_addr().unwrap(),\n\n listener_addr\n\n );\n", "file_path": "src/udp_mux/udp_mux_test.rs", "rank": 91, "score": 97790.82625196924 }, { "content": " }\n\n}\n\n\n\nconst TIMEOUT: Duration = Duration::from_secs(60);\n\n\n\n#[tokio::test]\n\nasync fn test_udp_mux() -> Result<()> {\n\n use std::io::Write;\n\n env_logger::Builder::from_default_env()\n\n .format(|buf, record| {\n\n writeln!(\n\n buf,\n\n \"{}:{} [{}] {} - {}\",\n\n record.file().unwrap_or(\"unknown\"),\n\n record.line().unwrap_or(0),\n\n record.level(),\n\n chrono::Local::now().format(\"%H:%M:%S.%6f\"),\n\n record.args()\n\n )\n\n })\n", "file_path": "src/udp_mux/udp_mux_test.rs", "rank": 92, "score": 97790.1549805554 }, { "content": "use std::convert::TryInto;\n\nuse std::io;\n\nuse std::time::Duration;\n\n\n\nuse super::*;\n\nuse crate::error::Result;\n\nuse stun::message::*;\n\n\n\nuse tokio::net::UdpSocket;\n\nuse tokio::time::{sleep, timeout};\n\n\n\nuse rand::{thread_rng, Rng};\n\nuse sha1::{Digest, Sha1};\n\n\n\n#[derive(Debug, Copy, Clone)]\n", "file_path": "src/udp_mux/udp_mux_test.rs", "rank": 93, "score": 97787.5048266765 }, { "content": "#[test]\n\nfn test_external_ip_mapper_new_external_ip_mapper() -> Result<()> {\n\n // ips being empty should succeed but mapper will still be nil\n\n let m = ExternalIpMapper::new(CandidateType::Unspecified, &[])?;\n\n assert!(m.is_none(), \"should be none\");\n\n\n\n // IPv4 with no explicit local IP, defaults to CandidateTypeHost\n\n let m = ExternalIpMapper::new(CandidateType::Unspecified, &[\"1.2.3.4\".to_owned()])?.unwrap();\n\n assert_eq!(m.candidate_type, CandidateType::Host, \"should match\");\n\n assert!(m.ipv4_mapping.ip_sole.is_some());\n\n assert!(m.ipv6_mapping.ip_sole.is_none());\n\n assert_eq!(0, m.ipv4_mapping.ip_map.len(), \"should match\");\n\n assert_eq!(0, m.ipv6_mapping.ip_map.len(), \"should match\");\n\n\n\n // IPv4 with no explicit local IP, using CandidateTypeServerReflexive\n\n let m =\n\n ExternalIpMapper::new(CandidateType::ServerReflexive, &[\"1.2.3.4\".to_owned()])?.unwrap();\n\n assert_eq!(\n\n CandidateType::ServerReflexive,\n\n m.candidate_type,\n\n \"should match\"\n", "file_path": "src/external_ip_mapper/external_ip_mapper_test.rs", "rank": 94, "score": 97048.80164681975 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nenum Network {\n\n Ipv4,\n\n Ipv6,\n\n}\n\n\n\nimpl Network {\n\n /// Bind the UDP socket for the \"remote\".\n\n async fn bind(&self) -> io::Result<UdpSocket> {\n\n match self {\n\n Network::Ipv4 => UdpSocket::bind(\"0.0.0.0:0\").await,\n\n Network::Ipv6 => UdpSocket::bind(\"[::]:0\").await,\n\n }\n\n }\n\n\n\n /// Connnect ip from the \"remote\".\n\n fn connect_ip(&self, port: u16) -> String {\n\n match self {\n\n Network::Ipv4 => format!(\"127.0.0.1:{}\", port),\n\n Network::Ipv6 => format!(\"[::1]:{}\", port),\n\n }\n", "file_path": "src/udp_mux/udp_mux_test.rs", "rank": 95, "score": 96171.16412755154 }, { "content": "#[test]\n\nfn test_external_ip_mapper_find_external_ip_with_empty_map() -> Result<()> {\n\n let m = ExternalIpMapper::new(CandidateType::Unspecified, &[\"1.2.3.4\".to_owned()])?.unwrap();\n\n\n\n // attempt to find IPv6 that does not exist in the map\n\n let result = m.find_external_ip(\"fe80::1\");\n\n assert!(result.is_err(), \"should fail\");\n\n\n\n let m = ExternalIpMapper::new(CandidateType::Unspecified, &[\"2200::1\".to_owned()])?.unwrap();\n\n\n\n // attempt to find IPv4 that does not exist in the map\n\n let result = m.find_external_ip(\"10.0.0.1\");\n\n assert!(result.is_err(), \"should fail\");\n\n\n\n Ok(())\n\n}\n", "file_path": "src/external_ip_mapper/external_ip_mapper_test.rs", "rank": 96, "score": 95816.52396751827 }, { "content": "#[test]\n\nfn test_external_ip_mapper_find_external_ip_with_explicit_local_ip() -> Result<()> {\n\n // IPv4 with explicit local IP, defaults to CandidateTypeHost\n\n let m = ExternalIpMapper::new(\n\n CandidateType::Unspecified,\n\n &[\n\n \"1.2.3.4/10.0.0.1\".to_owned(),\n\n \"1.2.3.5/10.0.0.2\".to_owned(),\n\n \"2200::1/fe80::1\".to_owned(),\n\n \"2200::2/fe80::2\".to_owned(),\n\n ],\n\n )?\n\n .unwrap();\n\n\n\n // find external IPv4\n\n let ext_ip = m.find_external_ip(\"10.0.0.1\")?;\n\n assert_eq!(\"1.2.3.4\", ext_ip.to_string(), \"should match\");\n\n\n\n let ext_ip = m.find_external_ip(\"10.0.0.2\")?;\n\n assert_eq!(\"1.2.3.5\", ext_ip.to_string(), \"should match\");\n\n\n", "file_path": "src/external_ip_mapper/external_ip_mapper_test.rs", "rank": 97, "score": 94632.92570653645 }, { "content": "#[test]\n\nfn test_external_ip_mapper_new_external_ip_mapper_with_explicit_local_ip() -> Result<()> {\n\n // IPv4 with explicit local IP, defaults to CandidateTypeHost\n\n let m = ExternalIpMapper::new(CandidateType::Unspecified, &[\"1.2.3.4/10.0.0.1\".to_owned()])?\n\n .unwrap();\n\n assert_eq!(CandidateType::Host, m.candidate_type, \"should match\");\n\n assert!(m.ipv4_mapping.ip_sole.is_none());\n\n assert!(m.ipv6_mapping.ip_sole.is_none());\n\n assert_eq!(1, m.ipv4_mapping.ip_map.len(), \"should match\");\n\n assert_eq!(0, m.ipv6_mapping.ip_map.len(), \"should match\");\n\n\n\n // Cannot assign two ext IPs for one local IPv4\n\n let result = ExternalIpMapper::new(\n\n CandidateType::Unspecified,\n\n &[\"1.2.3.4/10.0.0.1\".to_owned(), \"1.2.3.5/10.0.0.1\".to_owned()],\n\n );\n\n assert!(result.is_err(), \"should fail\");\n\n\n\n // Cannot assign two ext IPs for one local IPv6\n\n let result = ExternalIpMapper::new(\n\n CandidateType::Unspecified,\n", "file_path": "src/external_ip_mapper/external_ip_mapper_test.rs", "rank": 98, "score": 93495.17821556449 }, { "content": "#[test]\n\nfn test_external_ip_mapper_new_external_ip_mapper_with_implicit_local_ip() -> Result<()> {\n\n // Mixing inpicit and explicit local IPs not allowed\n\n let result = ExternalIpMapper::new(\n\n CandidateType::Unspecified,\n\n &[\"1.2.3.4\".to_owned(), \"1.2.3.5/10.0.0.1\".to_owned()],\n\n );\n\n assert!(result.is_err(), \"should fail\");\n\n\n\n // Mixing inpicit and explicit local IPs not allowed\n\n let result = ExternalIpMapper::new(\n\n CandidateType::Unspecified,\n\n &[\"1.2.3.5/10.0.0.1\".to_owned(), \"1.2.3.4\".to_owned()],\n\n );\n\n assert!(result.is_err(), \"should fail\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/external_ip_mapper/external_ip_mapper_test.rs", "rank": 99, "score": 93495.17821556449 } ]
Rust
src/header.rs
ssands1/elf2tab
9ba7a8dd3832d4edd1e323ee62a660313ccdf8e0
use std::fmt; use std::io; use std::io::{Read, Seek, SeekFrom, Write}; use std::mem; use std::vec; use util; #[repr(u16)] #[derive(Clone, Copy, Debug)] #[allow(dead_code)] enum TbfHeaderTypes { Main = 1, WriteableFlashRegions = 2, PackageName = 3, PicOption1 = 4, } #[repr(C)] #[derive(Clone, Copy, Debug)] struct TbfHeaderTlv { tipe: TbfHeaderTypes, length: u16, } #[repr(C)] #[derive(Clone, Copy, Debug)] struct TbfHeaderBase { version: u16, header_size: u16, total_size: u32, flags: u32, checksum: u32, } #[repr(C)] #[derive(Clone, Copy, Debug)] struct TbfHeaderMain { base: TbfHeaderTlv, init_fn_offset: u32, protected_size: u32, minimum_ram_size: u32, } #[repr(C)] #[derive(Clone, Copy, Debug)] struct TbfHeaderWriteableFlashRegion { base: TbfHeaderTlv, offset: u32, size: u32, } impl fmt::Display for TbfHeaderBase { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, " version: {:>8} {:>#10X} header_size: {:>8} {:>#10X} total_size: {:>8} {:>#10X} flags: {:>8} {:>#10X} ", self.version, self.version, self.header_size, self.header_size, self.total_size, self.total_size, self.flags, self.flags, ) } } impl fmt::Display for TbfHeaderMain { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, " init_fn_offset: {:>8} {:>#10X} protected_size: {:>8} {:>#10X} minimum_ram_size: {:>8} {:>#10X} ", self.init_fn_offset, self.init_fn_offset, self.protected_size, self.protected_size, self.minimum_ram_size, self.minimum_ram_size, ) } } impl fmt::Display for TbfHeaderWriteableFlashRegion { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, " flash region: offset: {:>8} {:>#10X} size: {:>8} {:>#10X} ", self.offset, self.offset, self.size, self.size, ) } } pub struct TbfHeader { hdr_base: TbfHeaderBase, hdr_main: TbfHeaderMain, hdr_pkg_name_tlv: Option<TbfHeaderTlv>, hdr_wfr: Vec<TbfHeaderWriteableFlashRegion>, package_name: String, package_name_pad: usize, } impl TbfHeader { pub fn new() -> Self { Self { hdr_base: TbfHeaderBase { version: 2, header_size: 0, total_size: 0, flags: 0, checksum: 0, }, hdr_main: TbfHeaderMain { base: TbfHeaderTlv { tipe: TbfHeaderTypes::Main, length: (mem::size_of::<TbfHeaderMain>() - mem::size_of::<TbfHeaderTlv>()) as u16, }, init_fn_offset: 0, protected_size: 0, minimum_ram_size: 0, }, hdr_pkg_name_tlv: None, hdr_wfr: Vec::new(), package_name: String::new(), package_name_pad: 0, } } pub fn create( &mut self, minimum_ram_size: u32, writeable_flash_regions: usize, package_name: String, ) -> usize { let mut header_length = mem::size_of::<TbfHeaderBase>() + mem::size_of::<TbfHeaderMain>(); self.package_name_pad = if !package_name.is_empty() { header_length += mem::size_of::<TbfHeaderTlv>() + package_name.len(); let pad = align4needed!(header_length); header_length += pad; pad } else { 0 }; header_length += mem::size_of::<TbfHeaderWriteableFlashRegion>() * writeable_flash_regions; let flags = 0x0000_0001; self.hdr_base.header_size = header_length as u16; self.hdr_base.flags = flags; self.hdr_main.minimum_ram_size = minimum_ram_size; self.package_name = package_name; if !self.package_name.is_empty() { self.hdr_pkg_name_tlv = Some(TbfHeaderTlv { tipe: TbfHeaderTypes::PackageName, length: self.package_name.len() as u16, }); } for _ in 0..writeable_flash_regions { self.hdr_wfr.push(TbfHeaderWriteableFlashRegion { base: TbfHeaderTlv { tipe: TbfHeaderTypes::WriteableFlashRegions, length: 8, }, offset: 0, size: 0, }); } self.generate().expect("No header was generated").get_ref().len() } pub fn set_protected_size(&mut self, protected_size: u32) { self.hdr_main.protected_size = protected_size; } pub fn set_total_size(&mut self, total_size: u32) { self.hdr_base.total_size = total_size; } pub fn set_init_fn_offset(&mut self, init_fn_offset: u32) { self.hdr_main.init_fn_offset = init_fn_offset; } pub fn set_writeable_flash_region_values(&mut self, offset: u32, size: u32) { for wfr in &mut self.hdr_wfr { if wfr.size == 0 { wfr.offset = offset; wfr.size = size; break; } } } pub fn generate(&self) -> io::Result<(io::Cursor<vec::Vec<u8>>)> { let mut header_buf = io::Cursor::new(Vec::new()); header_buf.write_all(unsafe { util::as_byte_slice(&self.hdr_base) })?; header_buf.write_all(unsafe { util::as_byte_slice(&self.hdr_main) })?; if !self.package_name.is_empty() { header_buf.write_all(unsafe { util::as_byte_slice(&self.hdr_pkg_name_tlv) })?; header_buf.write_all(self.package_name.as_ref())?; util::do_pad(&mut header_buf, self.package_name_pad)?; } for wfr in &self.hdr_wfr { header_buf.write_all(unsafe { util::as_byte_slice(wfr) })?; } let current_length = header_buf.get_ref().len(); util::do_pad(&mut header_buf, align4needed!(current_length))?; self.inject_checksum(header_buf) } fn inject_checksum( &self, mut header_buf: io::Cursor<vec::Vec<u8>>, ) -> io::Result<(io::Cursor<vec::Vec<u8>>)> { header_buf.seek(SeekFrom::Start(0))?; let mut wordbuf = [0_u8; 4]; let mut checksum: u32 = 0; loop { let count = header_buf.read(&mut wordbuf)?; let mut word = 0; for (i, c) in wordbuf.iter().enumerate().take(count) { word |= u32::from(*c) << (8 * i); } checksum ^= word; if count != 4 { break; } } header_buf.seek(io::SeekFrom::Start(12))?; wordbuf[0] = (checksum & 0xFF) as u8; wordbuf[1] = ((checksum >> 8) & 0xFF) as u8; wordbuf[2] = ((checksum >> 16) & 0xFF) as u8; wordbuf[3] = ((checksum >> 24) & 0xFF) as u8; header_buf.write(&wordbuf)?; header_buf.seek(io::SeekFrom::Start(0))?; Ok(header_buf) } } impl fmt::Display for TbfHeader { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "TBF Header:")?; write!(f, "{}", self.hdr_base)?; write!(f, "{}", self.hdr_main)?; for wfr in &self.hdr_wfr { write!(f, "{}", wfr)?; } Ok(()) } }
use std::fmt; use std::io; use std::io::{Read, Seek, SeekFrom, Write}; use std::mem; use std::vec; use util; #[repr(u16)] #[derive(Clone, Copy, Debug)] #[allow(dead_code)] enum TbfHeaderTypes { Main = 1, WriteableFlashRegions = 2, PackageName = 3, PicOption1 = 4, } #[repr(C)] #[derive(Clone, Copy, Debug)] struct TbfHeaderTlv { tipe: TbfHeaderTypes, length: u16, } #[repr(C)] #[derive(Clone, Copy, Debug)] struct TbfHeaderBase { version: u16, header_size: u16, total_size: u32, flags: u32, checksum: u32, } #[repr(C)] #[derive(Clone, Copy, Debug)] struct TbfHeaderMain { base: TbfHeaderTlv, init_fn_offset: u32, protected_size: u32, minimum_ram_size: u32, } #[repr(C)] #[derive(Clone, Copy, Debug)] struct TbfHeaderWriteableFlashRegion { base: TbfHeaderTlv, offset: u32, size: u32, } impl fmt::Display for TbfHeaderBase { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, " version: {:>8} {:>#10X} header_size: {:>8} {:>#10X} total_size: {:>8} {:>#10X} flags: {:>8} {:>#10X} ", self.version, self.version, self.header_size, self.header_size, self.total_size, self.total_size, self.flags, self.flags, ) } } impl fmt::Display for TbfHeaderMain { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, " init_fn_offset: {:>8} {:>#10X} protected_size: {:>8} {:>#10X} minimum_ram_size: {:>8} {:>#10X} ", self.init_fn_offset, self.init_fn_offset, self.protected_size, self.protected_size, self.minimum_ram_size, self.minimum_ram_size, ) } } impl fmt::Display for TbfHeaderWriteableFlashRegion { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, " flash region: offset: {:>8} {:>#10X} size: {:>8} {:>#10X} ", self.offset, self.offset, self.size, self.size, ) } } pub struct TbfHeader { hdr_base: TbfHeaderBase, hdr_main: TbfHeaderMain, hdr_pkg_name_tlv: Option<TbfHeaderTlv>, hdr_wfr: Vec<TbfHeaderWriteableFlashRegion>, package_name: String, package_name_pad: usize, } impl TbfHeader { pub fn new() -> Self { Self { hdr_base: TbfHeaderBase { version: 2, header_size: 0, total_size: 0, flags: 0, checksum: 0, }, hdr_main: TbfHeaderMain { base: TbfHeaderTlv { tipe: TbfHeaderTypes::Main, length: (mem::size_of::<TbfHeaderMain>() - mem::size_of::<TbfHeaderTlv>()) as u16, }, init_fn_offset: 0, protected_size: 0, minimum_ram_size: 0, }, hdr_pkg_name_tlv: None, hdr_wfr: Vec::new(), package_name: String::new(), package_name_pad: 0, } } pub fn create( &mut self, minimum_ram_size: u32, writeable_flash_regions: usize, package_name: String, ) -> usize { let mut header_length = mem::size_of::<TbfHeaderBase>() + mem::size_of::<TbfHeaderMain>(); self.package_name_pad = if !package_name.is_empty() { header_length += mem::size_of::<TbfHeaderTlv>() + package_name.len(); let pad = align4needed!(header_length); header_length += pad; pad } else { 0 };
pub fn set_protected_size(&mut self, protected_size: u32) { self.hdr_main.protected_size = protected_size; } pub fn set_total_size(&mut self, total_size: u32) { self.hdr_base.total_size = total_size; } pub fn set_init_fn_offset(&mut self, init_fn_offset: u32) { self.hdr_main.init_fn_offset = init_fn_offset; } pub fn set_writeable_flash_region_values(&mut self, offset: u32, size: u32) { for wfr in &mut self.hdr_wfr { if wfr.size == 0 { wfr.offset = offset; wfr.size = size; break; } } } pub fn generate(&self) -> io::Result<(io::Cursor<vec::Vec<u8>>)> { let mut header_buf = io::Cursor::new(Vec::new()); header_buf.write_all(unsafe { util::as_byte_slice(&self.hdr_base) })?; header_buf.write_all(unsafe { util::as_byte_slice(&self.hdr_main) })?; if !self.package_name.is_empty() { header_buf.write_all(unsafe { util::as_byte_slice(&self.hdr_pkg_name_tlv) })?; header_buf.write_all(self.package_name.as_ref())?; util::do_pad(&mut header_buf, self.package_name_pad)?; } for wfr in &self.hdr_wfr { header_buf.write_all(unsafe { util::as_byte_slice(wfr) })?; } let current_length = header_buf.get_ref().len(); util::do_pad(&mut header_buf, align4needed!(current_length))?; self.inject_checksum(header_buf) } fn inject_checksum( &self, mut header_buf: io::Cursor<vec::Vec<u8>>, ) -> io::Result<(io::Cursor<vec::Vec<u8>>)> { header_buf.seek(SeekFrom::Start(0))?; let mut wordbuf = [0_u8; 4]; let mut checksum: u32 = 0; loop { let count = header_buf.read(&mut wordbuf)?; let mut word = 0; for (i, c) in wordbuf.iter().enumerate().take(count) { word |= u32::from(*c) << (8 * i); } checksum ^= word; if count != 4 { break; } } header_buf.seek(io::SeekFrom::Start(12))?; wordbuf[0] = (checksum & 0xFF) as u8; wordbuf[1] = ((checksum >> 8) & 0xFF) as u8; wordbuf[2] = ((checksum >> 16) & 0xFF) as u8; wordbuf[3] = ((checksum >> 24) & 0xFF) as u8; header_buf.write(&wordbuf)?; header_buf.seek(io::SeekFrom::Start(0))?; Ok(header_buf) } } impl fmt::Display for TbfHeader { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "TBF Header:")?; write!(f, "{}", self.hdr_base)?; write!(f, "{}", self.hdr_main)?; for wfr in &self.hdr_wfr { write!(f, "{}", wfr)?; } Ok(()) } }
header_length += mem::size_of::<TbfHeaderWriteableFlashRegion>() * writeable_flash_regions; let flags = 0x0000_0001; self.hdr_base.header_size = header_length as u16; self.hdr_base.flags = flags; self.hdr_main.minimum_ram_size = minimum_ram_size; self.package_name = package_name; if !self.package_name.is_empty() { self.hdr_pkg_name_tlv = Some(TbfHeaderTlv { tipe: TbfHeaderTypes::PackageName, length: self.package_name.len() as u16, }); } for _ in 0..writeable_flash_regions { self.hdr_wfr.push(TbfHeaderWriteableFlashRegion { base: TbfHeaderTlv { tipe: TbfHeaderTypes::WriteableFlashRegions, length: 8, }, offset: 0, size: 0, }); } self.generate().expect("No header was generated").get_ref().len() }
function_block-function_prefix_line
[ { "content": "pub fn do_pad(output: &mut io::Write, length: usize) -> io::Result<()> {\n\n let mut pad = length;\n\n let zero_buf = [0_u8; 512];\n\n while pad > 0 {\n\n let amount_to_write = cmp::min(zero_buf.len(), pad);\n\n pad -= output.write(&zero_buf[..amount_to_write])?;\n\n }\n\n Ok(())\n\n}\n\n\n\npub unsafe fn as_byte_slice<T: Copy>(input: &T) -> &[u8] {\n\n slice::from_raw_parts(input as *const T as *const u8, mem::size_of::<T>())\n\n}\n", "file_path": "src/util.rs", "rank": 0, "score": 130672.91318318337 }, { "content": "fn main() {\n\n let opt = cmdline::Opt::from_args();\n\n\n\n // Create the metadata.toml file needed for the TAB file.\n\n let mut metadata_toml = String::new();\n\n write!(\n\n &mut metadata_toml,\n\n \"tab-version = 1\n\nname = \\\"{}\\\"\n\nonly-for-boards = \\\"\\\"\n\nbuild-date = {}\",\n\n opt.package_name.as_ref().map_or(\"\", |package_name| package_name.as_str()),\n\n chrono::prelude::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true)\n\n ).unwrap();\n\n\n\n // Start creating a tar archive which will be the .tab file.\n\n let tab_name = fs::File::create(&opt.output).expect(\"Could not create the output file.\");\n\n let mut tab = tar::Builder::new(tab_name);\n\n\n\n // Add the metadata file without creating a real file on the filesystem.\n", "file_path": "src/main.rs", "rank": 2, "score": 69662.83701680116 }, { "content": "/// Convert an ELF file to a TBF (Tock Binary Format) binary file.\n\n///\n\n/// This will place all writeable and executable sections from the ELF file\n\n/// into a binary and prepend a TBF header to it. For all writeable sections,\n\n/// if there is a .rel.X section it will be included at the end with a 32 bit\n\n/// length parameter first.\n\n///\n\n/// Assumptions:\n\n/// - Sections in a segment that is RW and set to be loaded will be in RAM and\n\n/// should count towards minimum required RAM.\n\n/// - Sections that are writeable flash regions include .wfr in their name.\n\nfn elf_to_tbf(\n\n input: &elf::File,\n\n output: &mut Write,\n\n package_name: Option<String>,\n\n verbose: bool,\n\n stack_len: u32,\n\n app_heap_len: u32,\n\n kernel_heap_len: u32,\n\n protected_region_size_arg: Option<u32>,\n\n) -> io::Result<()> {\n\n let package_name = package_name.unwrap_or_default();\n\n\n\n // Get an array of the sections sorted so we place them in the proper order\n\n // in the binary.\n\n let mut sections_sort: Vec<(usize, usize)> = Vec::new();\n\n for (i, section) in input.sections.iter().enumerate() {\n\n sections_sort.push((i, section.shdr.offset as usize));\n\n }\n\n sections_sort.sort_by_key(|s| s.1);\n\n\n", "file_path": "src/main.rs", "rank": 4, "score": 48508.69034755623 }, { "content": "fn usage() -> &'static str {\n\n \"elf2tab [FLAGS] [OPTIONS] ELF...\n\nConverts Tock userspace programs from .elf files to Tock Application Bundles.\n\n\n\nFLAGS:\n\n --help, -h print help information\n\n --version, -V print version information\n\nOPTIONS:\n\n --verbose, -v be verbose\n\n --protected-region-size=SIZE size of protected region including headers\n\n --package-name=NAME name of package [default: empty]\n\n --output-file=FILE, -o FILE [default: TockApp.tab]\n\n --minimum-ram-size=SIZE in bytes [default: from RAM sections in ELF]\n\n --app-heap=SIZE in bytes [default: 1024]\n\n --kernel-heap=SIZE in bytes [default: 1024]\n\n --stack=SIZE in bytes [default: 2048]\"\n\n}\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(\n", "file_path": "src/cmdline.rs", "rank": 8, "score": 26579.02884089679 }, { "content": "use std::cmp;\n\nuse std::io;\n\nuse std::mem;\n\nuse std::slice;\n\n\n\n/// Takes a value and rounds it up to be aligned % 4\n\n#[macro_export]\n\nmacro_rules! align4 {\n\n ($e:expr) => {\n\n ($e) + ((4 - (($e) % 4)) % 4)\n\n };\n\n}\n\n\n\n/// Takes a value and rounds it up to be aligned % 8\n\n#[macro_export]\n\nmacro_rules! align8 {\n\n ($e:expr) => {\n\n ($e) + ((8 - (($e) % 8)) % 8)\n\n };\n\n}\n\n\n\n/// How much needs to be added to get a value aligned % 4\n\n#[macro_export]\n\nmacro_rules! align4needed {\n\n ($e:expr) => {\n\n (4 - (($e) % 4)) % 4\n\n };\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 9, "score": 22556.57385387291 }, { "content": " tbfheader.set_protected_size(fixed_protected_region_size - header_length as u32);\n\n\n\n fixed_protected_region_size\n\n } else {\n\n header_length as u32\n\n };\n\n binary_index += protected_region_size as usize;\n\n\n\n // The init function is where the app will start executing, defined as an\n\n // offset from the end of protected region at the beginning of the app in\n\n // flash. Typically the protected region only includes the TBF header. To\n\n // calculate the offset we need to find which section includes the entry\n\n // function and then determine its offset relative to the end of the\n\n // protected region.\n\n let mut init_fn_offset: u32 = 0;\n\n\n\n // Need a place to put the app sections before we know the true TBF header.\n\n let mut binary: Vec<u8> = vec![0; protected_region_size as usize - header_length];\n\n\n\n let mut entry_point_found = false;\n", "file_path": "src/main.rs", "rank": 10, "score": 20680.329816499536 }, { "content": " if section.shdr.flags.0 == elf::types::SHF_WRITE.0 + elf::types::SHF_ALLOC.0 {\n\n // This section is also one we might need to include relocation\n\n // data for.\n\n rel_sections.push(section.shdr.name.clone());\n\n }\n\n }\n\n if verbose {\n\n println!(\n\n \"Number of writeable flash regions: {}\",\n\n writeable_flash_regions_count\n\n );\n\n }\n\n\n\n // Keep track of an index of where we are in creating the app binary.\n\n let mut binary_index = 0;\n\n\n\n // Now we can create the first pass TBF header. This is mostly to get the\n\n // size of the header since we have to fill in some of the offsets later.\n\n let mut tbfheader = header::TbfHeader::new();\n\n let header_length = tbfheader.create(\n", "file_path": "src/main.rs", "rank": 11, "score": 20678.7515272799 }, { "content": " println!(\n\n \"Warning! Placing section {} at 0x{:x}, which is not 4-byte aligned.\",\n\n section.shdr.name, binary_index\n\n );\n\n }\n\n binary.extend(&section.data);\n\n\n\n // Check if this is a writeable flash region. If so, we need to\n\n // set the offset and size in the header.\n\n if section.shdr.name.contains(\".wfr\") && section.shdr.size > 0 {\n\n tbfheader.set_writeable_flash_region_values(\n\n binary_index as u32,\n\n section.shdr.size as u32,\n\n );\n\n }\n\n\n\n // Now increment where we are in the binary.\n\n binary_index += section.shdr.size as usize;\n\n }\n\n }\n", "file_path": "src/main.rs", "rank": 12, "score": 20673.657474959073 }, { "content": " minimum_ram_size,\n\n writeable_flash_regions_count,\n\n package_name,\n\n );\n\n // If a protected region size was passed, confirm the header will fit.\n\n // Otherwise, use the header size as the protected region size.\n\n let protected_region_size =\n\n if let Some(fixed_protected_region_size) = protected_region_size_arg {\n\n if fixed_protected_region_size < header_length as u32 {\n\n // The header doesn't fit in the provided protected region size;\n\n // throw an error.\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n format!(\n\n \"protected_region_size = {} is too small for the TBF headers. Header size: {}\",\n\n fixed_protected_region_size, header_length),\n\n ));\n\n }\n\n // Update the header's protected size, as the protected region may\n\n // be larger than the header size.\n", "file_path": "src/main.rs", "rank": 13, "score": 20673.35258250701 }, { "content": "\n\n if verbose {\n\n print!(\"{}\", tbfheader);\n\n }\n\n\n\n // Write the header and actual app to a binary file.\n\n output.write_all(tbfheader.generate().unwrap().get_ref())?;\n\n output.write_all(binary.as_ref())?;\n\n\n\n let rel_data_len: [u8; 4] = [\n\n (relocation_binary.len() & 0xff) as u8,\n\n (relocation_binary.len() >> 8 & 0xff) as u8,\n\n (relocation_binary.len() >> 16 & 0xff) as u8,\n\n (relocation_binary.len() >> 24 & 0xff) as u8,\n\n ];\n\n output.write_all(&rel_data_len)?;\n\n output.write_all(relocation_binary.as_ref())?;\n\n\n\n // Pad to get a power of 2 sized flash app.\n\n util::do_pad(output, post_content_pad as usize)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 14, "score": 20672.855153222357 }, { "content": " // init_fn_offset is specified relative to the end of the TBF\n\n // header.\n\n init_fn_offset = (input.ehdr.entry - section.shdr.addr) as u32\n\n + (binary_index - header_length) as u32\n\n }\n\n\n\n // If this is writeable, executable, or allocated, is nonzero length,\n\n // and is type `PROGBITS` we want to add it to the binary.\n\n if (section.shdr.flags.0\n\n & (elf::types::SHF_WRITE.0 + elf::types::SHF_EXECINSTR.0 + elf::types::SHF_ALLOC.0)\n\n != 0) && section.shdr.shtype == elf::types::SHT_PROGBITS\n\n && section.shdr.size > 0\n\n {\n\n if verbose {\n\n println!(\n\n \"Including the {} section at offset {} (0x{:x})\",\n\n section.shdr.name, binary_index, binary_index\n\n );\n\n }\n\n if align4needed!(binary_index) != 0 {\n", "file_path": "src/main.rs", "rank": 15, "score": 20672.591261832597 }, { "content": " // Add in room the app is asking us to reserve for the stack and heaps to\n\n // the minimum required RAM size.\n\n minimum_ram_size += align8!(stack_len) + align4!(app_heap_len) + align4!(kernel_heap_len);\n\n\n\n // Need an array of sections to look for relocation data to include.\n\n let mut rel_sections: Vec<String> = Vec::new();\n\n\n\n // Iterate the sections in the ELF file to find properties of the app that\n\n // are required to go in the TBF header.\n\n let mut writeable_flash_regions_count = 0;\n\n\n\n for s in &sections_sort {\n\n let section = &input.sections[s.0];\n\n\n\n // Count write only sections as writeable flash regions.\n\n if section.shdr.name.contains(\".wfr\") && section.shdr.size > 0 {\n\n writeable_flash_regions_count += 1;\n\n }\n\n\n\n // Check write+alloc sections for possible .rel.X sections.\n", "file_path": "src/main.rs", "rank": 16, "score": 20672.330347327454 }, { "content": "\n\n // Now that we have checked all of the sections, we can set the\n\n // init_fn_offset.\n\n tbfheader.set_init_fn_offset(init_fn_offset);\n\n\n\n // Next we have to add in any relocation data.\n\n let mut relocation_binary: Vec<u8> = Vec::new();\n\n\n\n // For each section that might have relocation data, check if a .rel.X\n\n // section exists and if so include it.\n\n if verbose {\n\n println!(\"Searching for .rel.X sections to add.\");\n\n }\n\n for relocation_section_name in &rel_sections {\n\n let mut name: String = \".rel\".to_owned();\n\n name.push_str(relocation_section_name);\n\n\n\n let rel_data = input\n\n .sections\n\n .iter()\n", "file_path": "src/main.rs", "rank": 17, "score": 20671.390354595853 }, { "content": "extern crate chrono;\n\nextern crate elf;\n\nextern crate tar;\n\n#[macro_use]\n\nextern crate structopt;\n\n\n\nuse std::cmp;\n\nuse std::fmt::Write as fmtwrite;\n\nuse std::fs;\n\nuse std::io;\n\nuse std::io::{Seek, Write};\n\nuse std::mem;\n\n\n\n#[macro_use]\n\nmod util;\n\nmod cmdline;\n\nmod header;\n\nuse structopt::StructOpt;\n\n\n", "file_path": "src/main.rs", "rank": 18, "score": 20670.863380438983 }, { "content": " // adding it to the TAB tar file.\n\n let mut outfile: fs::File = fs::OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .create(true)\n\n .truncate(true)\n\n .open(tbf_path.clone())\n\n .unwrap();\n\n\n\n // Do the conversion to a tock binary.\n\n elf_to_tbf(\n\n &elffile,\n\n &mut outfile,\n\n opt.package_name.clone(),\n\n opt.verbose,\n\n opt.stack_size,\n\n opt.app_heap_size,\n\n opt.kernel_heap_size,\n\n opt.protected_region_size,\n\n ).unwrap();\n", "file_path": "src/main.rs", "rank": 19, "score": 20670.85609678025 }, { "content": " }\n\n\n\n // Add the relocation data to our total length. Also include the 4 bytes for\n\n // the relocation data length.\n\n binary_index += relocation_binary.len() + mem::size_of::<u32>();\n\n\n\n // That is everything that we are going to include in our app binary. Now\n\n // we need to pad the binary to a power of 2 in size, and make sure it is\n\n // at least 512 bytes in size.\n\n let post_content_pad = if binary_index.count_ones() > 1 {\n\n let power2len = cmp::max(1 << (32 - (binary_index as u32).leading_zeros()), 512);\n\n power2len - binary_index\n\n } else {\n\n 0\n\n };\n\n binary_index += post_content_pad;\n\n let total_size = binary_index;\n\n\n\n // Now set the total size of the app in the header.\n\n tbfheader.set_total_size(total_size as u32);\n", "file_path": "src/main.rs", "rank": 20, "score": 20670.606750126488 }, { "content": " // Keep track of how much RAM this app will need.\n\n let mut minimum_ram_size: u32 = 0;\n\n\n\n // Find the ELF segment for the RAM segment. That will tell us how much\n\n // RAM we need to reserve for when those are copied into memory.\n\n for segment in &input.phdrs {\n\n if segment.progtype == elf::types::PT_LOAD\n\n && segment.flags.0 == elf::types::PF_W.0 + elf::types::PF_R.0\n\n {\n\n minimum_ram_size = segment.memsz as u32;\n\n break;\n\n }\n\n }\n\n if verbose {\n\n println!(\n\n \"Min RAM size from sections in ELF: {} bytes\",\n\n minimum_ram_size\n\n );\n\n }\n\n\n", "file_path": "src/main.rs", "rank": 21, "score": 20668.854182378178 }, { "content": "\n\n // Iterate the sections in the ELF file and add them to the binary as needed\n\n for s in &sections_sort {\n\n let section = &input.sections[s.0];\n\n\n\n // Determine if this is the section where the entry point is in. If it\n\n // is, then we need to calculate the correct init_fn_offset.\n\n if input.ehdr.entry >= section.shdr.addr\n\n && input.ehdr.entry < (section.shdr.addr + section.shdr.size)\n\n && (section.shdr.name.find(\"debug\")).is_none()\n\n {\n\n // panic in case we detect entry point in multiple sections.\n\n if entry_point_found {\n\n panic!(\"Duplicate entry point in {} section\", section.shdr.name);\n\n }\n\n entry_point_found = true;\n\n\n\n if verbose {\n\n println!(\"Entry point is in {} section\", section.shdr.name);\n\n }\n", "file_path": "src/main.rs", "rank": 22, "score": 20666.60297032139 }, { "content": " let mut header = tar::Header::new_gnu();\n\n header.set_size(metadata_toml.as_bytes().len() as u64);\n\n header.set_mode(0o644);\n\n header.set_cksum();\n\n tab.append_data(&mut header, \"metadata.toml\", metadata_toml.as_bytes())\n\n .unwrap();\n\n\n\n // Iterate all input elfs. Convert them to Tock friendly binaries and then\n\n // add them to the TAB file.\n\n for elf_path in opt.input {\n\n let tbf_path = elf_path.with_extension(\"tbf\");\n\n\n\n let elffile = elf::File::open_path(&elf_path).expect(\"Could not open the .elf file.\");\n\n\n\n if opt.output.clone() == tbf_path.clone() {\n\n panic!(\"tab file {} and output file {} cannot be the same file\",\n\n opt.output.clone().to_str().unwrap(), tbf_path.clone().to_str().unwrap());\n\n }\n\n\n\n // Get output file as both read/write for creating the binary and\n", "file_path": "src/main.rs", "rank": 23, "score": 20666.292469221247 }, { "content": "\n\n // Add the file to the TAB tar file.\n\n outfile.seek(io::SeekFrom::Start(0)).unwrap();\n\n tab.append_file(tbf_path.file_name().unwrap(), &mut outfile)\n\n .unwrap();\n\n outfile.seek(io::SeekFrom::Start(0)).unwrap();\n\n tab.append_file(\n\n tbf_path.with_extension(\"bin\").file_name().unwrap(),\n\n &mut outfile,\n\n ).unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 24, "score": 20665.406975887043 }, { "content": " .find(|section| section.shdr.name == name)\n\n .map_or(&[] as &[u8], |section| section.data.as_ref());\n\n\n\n relocation_binary.extend(rel_data);\n\n\n\n if verbose && !rel_data.is_empty() {\n\n println!(\n\n \" Adding {} section. Length: {} bytes at {} (0x{:x})\",\n\n name,\n\n rel_data.len(),\n\n binary_index + mem::size_of::<u32>() + rel_data.len(),\n\n binary_index + mem::size_of::<u32>() + rel_data.len()\n\n );\n\n }\n\n if !rel_data.is_empty() && align4needed!(binary_index) != 0 {\n\n println!(\n\n \"Warning! Placing section {} at 0x{:x}, which is not 4-byte aligned.\",\n\n name, binary_index\n\n );\n\n }\n", "file_path": "src/main.rs", "rank": 25, "score": 20665.31315197954 }, { "content": " #[structopt(\n\n name = \"elf\",\n\n help = \"application file(s) to package\",\n\n parse(from_os_str)\n\n )]\n\n #[structopt(raw(required = \"true\"))]\n\n pub input: Vec<PathBuf>,\n\n\n\n #[structopt(\n\n long = \"protected-region-size\",\n\n name = \"protected-region-size\",\n\n help = \"Size of the protected region (including headers)\"\n\n )]\n\n pub protected_region_size: Option<u32>,\n\n}\n\n\n\nmod test {\n\n\n\n #[cfg(test)]\n\n use super::Opt;\n", "file_path": "src/cmdline.rs", "rank": 38, "score": 11.330326689737161 }, { "content": "### Creating the TBF Header\n\n\n\nAll Tock apps must start with a Tock Binary Format header so that the kernel\n\nknows how big the app is, how much memory it requires, and other important\n\nproperties. elf2tab handles creating this header automatically, and mostly\n\njust requires the `--stack`, `--app-heap`, and `--kernel-heap` flags so it\n\nknows the memory requirements.\n\n\n\nHowever, the TBF header also contains information about \"writeable flash\n\nregions\", or portions of the application's address space in flash that the app\n\nintends to use to store persistent data. This information is added to the header\n\nso that the kernel and other tools know that there is persistent that should be\n\nmaintained intact. To specify to elf2tab that a linker section is one of these\n\nwriteable flash regions, the name of the section should include the string\n\n`.wfr`. Any sections in the .elf that include `.wfr` in their name will have\n\ntheir relative address offset included in the TBF header via the\n\n`TbfHeaderWriteableFlashRegions` TLV.\n\n\n\n### Creating the TAB file\n\n\n\nAfter generating the program binary and TBF header for each .elf file specified\n\nin the command line, elf2tab will store those files along side the .elf files\n\n(using the `.tbf` extension), and create a [TAB\n\nfile](https://github.com/tock/tock/blob/master/doc/Compilation.md#tock-application-bundle)\n\ncontaining each .tbf file. These .tab files are used by tools like Tockloader to\n\nload Tock apps on to boards.\n\n\n\n\n\nInspecting TABs\n\n---------------\n\n\n\nTockloader can show some details of a .tab file. Simply:\n\n\n\n $ tockloader inspect-tab <tab file name>\n\n\n\n\n", "file_path": "README.md", "rank": 39, "score": 9.676182330921336 }, { "content": " help = \"in bytes\"\n\n )]\n\n pub stack_size: u32,\n\n\n\n #[structopt(\n\n long = \"app-heap\",\n\n name = \"heap-size\",\n\n default_value = \"1024\",\n\n help = \"in bytes\"\n\n )]\n\n pub app_heap_size: u32,\n\n\n\n #[structopt(\n\n long = \"kernel-heap\",\n\n name = \"kernel-heap-size\",\n\n default_value = \"1024\",\n\n help = \"in bytes\"\n\n )]\n\n pub kernel_heap_size: u32,\n\n\n", "file_path": "src/cmdline.rs", "rank": 41, "score": 8.225607480445902 }, { "content": "Compiling elf2tab\n\n-----------------\n\n\n\nWith rustup installed, simply run:\n\n\n\n cargo build\n\n\n\n\n\nelf2tab Details\n\n---------------\n\n\n\nelf2tab tries to be as generic as possible for creating apps that can be\n\nflashed onto a Tock board. It does three main things:\n\n\n\n1. Extracts the various sections in each .elf file and creates a binary file\n\n per .elf from the sections.\n\n2. Prepends a\n\n [Tock Binary Format](https://github.com/tock/tock/blob/master/doc/Compilation.md#tock-binary-format)\n\n header to each binary.\n\n3. Creates the TAB file by creating a tar file with each of the Tock binaries.\n\n\n\n\n\n### Creating binary files from .elf files\n\n\n\nelf2tab tries to process .elf files in as generic of a way as possible. To\n\ncreate the binary file, elf2tab iterates through the sections in the .elf file\n\nin their offset order that are writeable, executable, or allocated, have nonzero\n\nlength, and are of type PROGBITS. The binary data for each of these sections\n\nare concatenated into the output file.\n\n\n\nNext, elf2tab appends to the binary all writeable or allocated sections that\n\ncontain the string `.rel` in their name. Because of how these sections are\n\ncreated for PIC code by the linker, it seems these sections have to be special\n\ncased and not grouped into the first step.\n\n\n", "file_path": "README.md", "rank": 42, "score": 7.776242444964899 }, { "content": "# ![elf2tab](http://www.tockos.org/assets/img/elf2tab.svg \"elf2tab Logo\")\n\n\n\n`elf2tab` is a tool that converts [Tock](https://github.com/tock/tock) userland\n\napps from `.elf` files to Tock Application Bundles (TABs or `.tab` files). TABs\n\nare Tock apps that have been compiled for the various architectures that Tock\n\nruns on.\n\n\n\n\n\nUsage\n\n-----\n\n\n\n```\n\nUSAGE:\n\n elf2tab [FLAGS] [--protected-region-size=<protected-region-size>]\n\n [--package-name=<pkg-name>] [--output-file=<filename>] <elf>...\n\n elf2tab [FLAGS] [--protected-region-size=<protected-region-size>] [--package-name=<pkg-name>]\n\n [--output-file=<filename>] [--minimum-ram-size=<min-ram-size>] <elf>...\n\n elf2tab [FLAGS] [--protected-region-size=<protected-region-size>]\n\n [--package-name=<pkg-name>] [--output-file=<filename>]\n\n [--app-heap=<heap-size>] [--kernel-heap=<kernel-heap-size>] [--stack=<stack-size>] <elf>...\n\n\n\nFLAGS:\n\n -h, --help Prints help information\n\n -V, --version Prints version information\n\n -v, --verbose Be verbose\n\n\n\nOPTIONS:\n\n -o, --output-file <filename> output file name [default: TockApp.tab]\n\n --app-heap <heap-size> in bytes [default: 1024]\n\n --kernel-heap <kernel-heap-size> in bytes [default: 1024]\n\n --minimum-ram-size <min-ram-size> in bytes\n\n -p, --package-name <pkg-name> package name\n\n --protected-region-size <protected-region-size> Size of the protected region (including headers)\n\n --stack <stack-size> in bytes [default: 2048]\n\n\n\nARGS:\n\n <elf>... application file(s) to package\n\n```\n\n\n\nFor example, converting a \"blink\" app from a compiled .elf file (for a Cortex-M4\n\ndevice) with this tool would look like:\n\n\n\n $ elf2tab -o blink.tab -n blink --stack 1024 --app-heap 1024 --kernel-heap 1024 cortex-m4.elf\n\n\n\nIt also supports (and encourages!) combing .elf files for multiple architectures\n\ninto a single tab:\n\n\n\n $ elf2tab -o blink.tab -n blink --stack 1024 --app-heap 1024 --kernel-heap 1024 cortex-m0.elf cortex-m3.elf cortex-m4.elf\n\n\n\n\n", "file_path": "README.md", "rank": 43, "score": 7.665052592694753 }, { "content": " about = \"Convert Tock userland apps from .elf files to Tock Application Bundles (TABs or .tab files).\",\n\n raw(usage = \"usage()\")\n\n)]\n\n#[structopt(raw(setting = \"structopt::clap::AppSettings::ColoredHelp\"))]\n\npub struct Opt {\n\n #[structopt(short = \"v\", long = \"verbose\", help = \"Be verbose\")]\n\n pub verbose: bool,\n\n\n\n #[structopt(\n\n long = \"minimum-ram-size\",\n\n name = \"min-ram-size\",\n\n help = \"in bytes\",\n\n conflicts_with = \"stack-size\",\n\n conflicts_with = \"heap-size\",\n\n conflicts_with = \"kernel-heap-size\"\n\n )]\n\n pub minimum_stack_size: Option<u32>,\n\n\n\n #[structopt(\n\n long = \"output-file\",\n", "file_path": "src/cmdline.rs", "rank": 44, "score": 6.37553916155842 }, { "content": " short = \"o\",\n\n name = \"filename\",\n\n default_value = \"TockApp.tab\",\n\n parse(from_os_str),\n\n help = \"output file name\"\n\n )]\n\n pub output: PathBuf,\n\n\n\n #[structopt(\n\n long = \"package-name\",\n\n short = \"p\",\n\n name = \"pkg-name\",\n\n help = \"package name\"\n\n )]\n\n pub package_name: Option<String>,\n\n\n\n #[structopt(\n\n long = \"stack\",\n\n name = \"stack-size\",\n\n default_value = \"2048\",\n", "file_path": "src/cmdline.rs", "rank": 45, "score": 5.8157336965366255 }, { "content": " #[cfg(test)]\n\n use structopt::StructOpt;\n\n\n\n #[test]\n\n // elf2tab [FLAGS] [--package-name=<pkg-name>] [--output-file=[<filename>]] <elf>...\n\n fn simple_invocations_succeed() {\n\n {\n\n let args = vec![\"elf2tab\", \"app.elf\"];\n\n let result = Opt::from_iter_safe(args.iter());\n\n assert!(result.is_ok());\n\n }\n\n {\n\n let args = vec![\"elf2tab\", \"--package-name\", \"my-pkg\", \"app.elf\"];\n\n let result = Opt::from_iter_safe(args.iter());\n\n assert!(result.is_ok());\n\n }\n\n {\n\n let args = vec![\"elf2tab\", \"--output-file\", \"out.tab\", \"app.elf\"];\n\n let result = Opt::from_iter_safe(args.iter());\n\n assert!(result.is_ok());\n", "file_path": "src/cmdline.rs", "rank": 46, "score": 4.3747206757548724 }, { "content": "\n\n #[test]\n\n // elf2tab [FLAGS] [--package-name=<pkg-name>] [--output-file=[<filename>]] <elf>...\n\n fn simple_invocations_fail() {\n\n {\n\n let args = vec![\"elf2tab\", \"app.elf\", \"--package-name\"];\n\n let result = Opt::from_iter_safe(args.iter());\n\n assert!(result.is_err());\n\n }\n\n }\n\n\n\n #[test]\n\n // elf2tab [FLAGS] [--package-name=<pkg-name>] [--output-file=[<filename>]] [--minimum-stack-size=<min-stack-size>] <elf>...\n\n fn advanced_invocations_succeed() {\n\n {\n\n let args = vec![\n\n \"elf2tab\",\n\n \"--package-name\",\n\n \"my-pkg\",\n\n \"--minimum-ram-size\",\n", "file_path": "src/cmdline.rs", "rank": 47, "score": 4.3490253798367435 }, { "content": " assert!(result.is_ok());\n\n }\n\n }\n\n\n\n #[test]\n\n // elf2tab [FLAGS] [--package-name=<pkg-name>] [--output-file=[<filename>]] [--app-heap[=<heap-size>]]\n\n // [--kernel-heap[=<kernel-heap-size>]] [--stack[=<stack-size>]] <elf>...\"\n\n fn expert_invocations_fail() {\n\n {\n\n let args = vec![\n\n \"elf2tab\",\n\n \"--package-name\",\n\n \"my-pkg\",\n\n \"--kernel-heap\",\n\n \"10\",\n\n \"--minimum-ram-size\",\n\n \"10\",\n\n \"app.elf\",\n\n ];\n\n let result = Opt::from_iter_safe(args.iter());\n", "file_path": "src/cmdline.rs", "rank": 48, "score": 4.09703601624422 }, { "content": " \"10\",\n\n \"app.elf\",\n\n ];\n\n let result = Opt::from_iter_safe(args.iter());\n\n assert!(result.is_ok());\n\n }\n\n }\n\n\n\n #[test]\n\n // elf2tab [FLAGS] [--package-name=<pkg-name>] [--output-file=[<filename>]] [--minimum-stack-size=<min-stack-size>] <elf>...\n\n fn advanced_invocations_fail() {\n\n {\n\n let args = vec![\n\n \"elf2tab\",\n\n \"--package-name\",\n\n \"my-pkg\",\n\n \"--minimum-ram-size\",\n\n \"app.elf\",\n\n ];\n\n let result = Opt::from_iter_safe(args.iter());\n", "file_path": "src/cmdline.rs", "rank": 49, "score": 3.9175027976719607 }, { "content": " ];\n\n let result = Opt::from_iter_safe(args.iter());\n\n assert!(result.is_err());\n\n }\n\n }\n\n\n\n #[test]\n\n // elf2tab [FLAGS] [--package-name=<pkg-name>] [--output-file=[<filename>]] [--app-heap[=<heap-size>]]\n\n // [--kernel-heap[=<kernel-heap-size>]] [--stack[=<stack-size>]] <elf>...\"\n\n fn expert_invocations_succeed() {\n\n {\n\n let args = vec![\n\n \"elf2tab\",\n\n \"--package-name\",\n\n \"my-pkg\",\n\n \"--kernel-heap\",\n\n \"10\",\n\n \"app.elf\",\n\n ];\n\n let result = Opt::from_iter_safe(args.iter());\n", "file_path": "src/cmdline.rs", "rank": 50, "score": 3.8391099127101715 }, { "content": "use std::path::PathBuf;\n\nuse structopt;\n\n\n", "file_path": "src/cmdline.rs", "rank": 51, "score": 3.197041365422733 }, { "content": " \"my-pkg\",\n\n \"--minimum-ram-size\",\n\n \"10\",\n\n \"--stack\",\n\n \"10\",\n\n \"app.elf\",\n\n ];\n\n let result = Opt::from_iter_safe(args.iter());\n\n assert!(result.is_err());\n\n }\n\n {\n\n let args = vec![\n\n \"elf2tab\",\n\n \"--package-name\",\n\n \"my-pkg\",\n\n \"--minimum-ram-size\",\n\n \"10\",\n\n \"--kernel-heap\",\n\n \"10\",\n\n \"app.elf\",\n", "file_path": "src/cmdline.rs", "rank": 52, "score": 2.19842692193612 }, { "content": " \"my-pkg\",\n\n \"--stack\",\n\n \"10\",\n\n \"--minimum-ram-size\",\n\n \"10\",\n\n \"app.elf\",\n\n ];\n\n let result = Opt::from_iter_safe(args.iter());\n\n assert!(result.is_err());\n\n }\n\n }\n\n}\n", "file_path": "src/cmdline.rs", "rank": 53, "score": 1.940398909176626 }, { "content": " assert!(result.is_err());\n\n }\n\n {\n\n let args = vec![\n\n \"elf2tab\",\n\n \"--package-name\",\n\n \"my-pkg\",\n\n \"--minimum-ram-size\",\n\n \"10\",\n\n \"--app-heap\",\n\n \"10\",\n\n \"app.elf\",\n\n ];\n\n let result = Opt::from_iter_safe(args.iter());\n\n assert!(result.is_err());\n\n }\n\n {\n\n let args = vec![\n\n \"elf2tab\",\n\n \"--package-name\",\n", "file_path": "src/cmdline.rs", "rank": 54, "score": 1.6802726675320945 }, { "content": " assert!(result.is_err());\n\n }\n\n {\n\n let args = vec![\n\n \"elf2tab\",\n\n \"--package-name\",\n\n \"my-pkg\",\n\n \"--app-heap\",\n\n \"10\",\n\n \"--minimum-ram-size\",\n\n \"10\",\n\n \"app.elf\",\n\n ];\n\n let result = Opt::from_iter_safe(args.iter());\n\n assert!(result.is_err());\n\n }\n\n {\n\n let args = vec![\n\n \"elf2tab\",\n\n \"--package-name\",\n", "file_path": "src/cmdline.rs", "rank": 55, "score": 1.6802726675320945 } ]
Rust
src/solution/string/str_str.rs
smallswan/leetcode-rust
9b8bb3f91bec613de61f1cfdd203dd9eeda23ebe
pub fn str_str(haystack: String, needle: String) -> i32 { let source = haystack.as_bytes(); let target = needle.as_bytes(); let source_offset = 0usize; let source_count = source.len(); let target_offset = 0usize; let target_count = target.len(); let from_index = 0usize; if target_count == 0usize { return 0; } if target_count > source_count { return -1; } let first = target[target_offset]; let max = source_offset + (source_count - target_count); let mut i = source_offset + from_index; while i <= max { while source[i] != first { i += 1; if i <= max { continue; } else { break; } } if i <= max { let mut j = i + 1; let end = j + target_count - 1; let mut k = target_offset + 1; while j < end && source[j] == target[k] { j += 1; k += 1; } if j == end { return (i - source_offset) as i32; } } i += 1; } -1 } pub fn str_str_v2(haystack: String, needle: String) -> i32 { match haystack.find(&needle) { Some(index) => index as i32, None => -1, } } pub fn str_str_v3(haystack: String, needle: String) -> i32 { let (m, n) = (needle.len(), haystack.len()); if m == 0 { return 0; } let haystack_chars = haystack.chars().collect::<Vec<char>>(); let needle_chars = needle.chars().collect::<Vec<char>>(); let mut pi = vec![0; m]; let (mut i, mut j) = (1, 0); while i < m { while j > 0 && (needle_chars[i] != needle_chars[j]) { j = pi[j - 1]; } if needle_chars[i] == needle_chars[j] { j += 1; } pi[i] = j; i += 1; } let (mut i, mut j) = (0, 0); while i < n { while j > 0 && (haystack_chars[i] != needle_chars[j]) { j = pi[j - 1]; } if haystack_chars[i] == needle_chars[j] { j += 1; } if (j == m) { return (i - m + 1) as i32; } i += 1; } -1 } pub fn find_substring(s: String, words: Vec<String>) -> Vec<i32> { use std::collections::HashMap; let mut bytes = s.chars().collect::<Vec<char>>(); let mut result: Vec<i32> = Vec::new(); if s.is_empty() || words.is_empty() { return result; } let mut map: HashMap<String, i32> = HashMap::new(); let one_word = words[0].len(); let words_len = words.len(); for word in words { let counter = map.entry(word).or_insert(0); *counter += 1; } for i in 0..one_word { let (mut left, mut right, mut count) = (i, i, 0); let mut tmp_map: HashMap<String, i32> = HashMap::new(); while right + one_word <= s.len() { let w: String = bytes.iter().skip(right).take(one_word).collect(); right += one_word; if !map.contains_key(&w.clone()) { count = 0; left = right; tmp_map.clear(); } else { let w_str = w.clone(); let mut counter = tmp_map.entry(w_str).or_insert(0); *counter += 1; count += 1; while tmp_map.get(&w.clone()).unwrap_or(&0) > map.get(&w.clone()).unwrap_or(&0) { let t_w: String = bytes.iter().skip(left).take(one_word).collect(); count -= 1; let t_w_str = t_w.clone(); let mut counter = tmp_map.entry(t_w_str).or_insert(0); *counter -= 1; left += one_word; } if count == words_len { result.push(left as i32); } } } } result } pub fn knuth_morris_pratt(st: String, pat: String) -> Vec<usize> { if st.is_empty() || pat.is_empty() { return vec![]; } let string = st.into_bytes(); let pattern = pat.into_bytes(); let mut partial = vec![0]; for i in 1..pattern.len() { let mut j = partial[i - 1]; while j > 0 && pattern[j] != pattern[i] { j = partial[j - 1]; } partial.push(if pattern[j] == pattern[i] { j + 1 } else { j }); } let mut ret = vec![]; let mut j = 0; for (i, &c) in string.iter().enumerate() { while j > 0 && c != pattern[j] { j = partial[j - 1]; } if c == pattern[j] { j += 1; } if j == pattern.len() { ret.push(i + 1 - j); j = partial[j - 1]; } } ret } pub fn rabin_karp(target: String, pattern: String) -> Vec<usize> { if target.is_empty() || pattern.is_empty() || pattern.len() > target.len() { return vec![]; } let string: String = (&pattern[0..pattern.len()]).to_string(); let hash_pattern = hash(string.clone()); let mut ret = vec![]; for i in 0..(target.len() - pattern.len() + 1) { let s = (&target[i..(i + pattern.len())]).to_string(); let string_hash = hash(s.clone()); if string_hash == hash_pattern && s == string { ret.push(i); } } ret } fn hash(mut s: String) -> u16 { let prime: u16 = 101; let last_char = s .drain(s.len() - 1..) .next() .expect("Failed to get the last char of the string"); let mut res: u16 = 0; for (i, &c) in s.as_bytes().iter().enumerate() { if i == 0 { res = (c as u16 * 256) % prime; } else { res = (((res + c as u16) % 101) * 256) % 101; } } (res + last_char as u16) % prime } use std::cmp::Ordering; pub fn is_subsequence(s: String, t: String) -> bool { let mut s_chars: Vec<char> = s.chars().collect::<Vec<char>>(); let mut t_chars: Vec<char> = t.chars().collect::<Vec<char>>(); let (s_len, t_len) = (s_chars.len(), t_chars.len()); match s_len.cmp(&t_len) { Ordering::Greater => return false, Ordering::Equal => return s == t, Ordering::Less => (), } let (mut i, mut j) = (0, 0); while i < s_len && j < t_len { if s_chars[i] == t_chars[j] { i += 1; } j += 1; } i == s_len } use std::mem; pub fn num_distinct(s: String, t: String) -> i32 { let mut cache = vec![0; t.len() + 1]; let mut temp = vec![0; t.len() + 1]; cache[0] = 1; temp[0] = 1; for c_1 in s.into_bytes() { for (i, c_2) in t.bytes().enumerate() { temp[i + 1] = if c_2 == c_1 { cache[i] + cache[i + 1] } else { cache[i + 1] }; } mem::swap(&mut cache, &mut temp); } cache[t.len()] } pub fn repeated_substring_pattern(s: String) -> bool { let mut t = String::with_capacity(s.len() * 2 - 2); t.push_str(&s[1..]); t.push_str(&s[..s.len() - 1]); t.contains(&s) } #[cfg(test)] mod tests { use super::*; #[test] fn sub_string() { let haystack = String::from("aaacaaab"); let needle = String::from("aaab"); dbg!(str_str(haystack, needle)); let index = knuth_morris_pratt("Rust is a programming language empowering everyone to build reliable and efficient software".to_string(),"everyone".to_string()); println!("{:?}", index); dbg!(is_subsequence("acb".to_string(), "ahbgdc".to_string())); } mod kmp { use super::*; #[test] fn each_letter_matches() { let index = knuth_morris_pratt("aaa".to_string(), "a".to_string()); assert_eq!(index, vec![0, 1, 2]); } #[test] fn a_few_separate_matches() { let index = knuth_morris_pratt("abababa".to_string(), "ab".to_string()); assert_eq!(index, vec![0, 2, 4]); } #[test] fn one_match() { let index = knuth_morris_pratt("ABC ABCDAB ABCDABCDABDE".to_string(), "ABCDABD".to_string()); assert_eq!(index, vec![15]); } #[test] fn lots_of_matches() { let index = knuth_morris_pratt("aaabaabaaaaa".to_string(), "aa".to_string()); assert_eq!(index, vec![0, 1, 4, 7, 8, 9, 10]); } #[test] fn lots_of_intricate_matches() { let index = knuth_morris_pratt("ababababa".to_string(), "aba".to_string()); assert_eq!(index, vec![0, 2, 4, 6]); } #[test] fn not_found0() { let index = knuth_morris_pratt("abcde".to_string(), "f".to_string()); assert_eq!(index, vec![]); } #[test] fn not_found1() { let index = knuth_morris_pratt("abcde".to_string(), "ac".to_string()); assert_eq!(index, vec![]); } #[test] fn not_found2() { let index = knuth_morris_pratt("ababab".to_string(), "bababa".to_string()); assert_eq!(index, vec![]); } #[test] fn empty_string() { let index = knuth_morris_pratt("".to_string(), "abcdef".to_string()); assert_eq!(index, vec![]); } } mod rabin_karp { use super::*; #[test] fn hi_hash() { let hash_result = hash("hi".to_string()); assert_eq!(hash_result, 65); } #[test] fn abr_hash() { let hash_result = hash("abr".to_string()); assert_eq!(hash_result, 4); } #[test] fn bra_hash() { let hash_result = hash("bra".to_string()); assert_eq!(hash_result, 30); } #[test] fn each_letter_matches() { let index = rabin_karp("aaa".to_string(), "a".to_string()); assert_eq!(index, vec![0, 1, 2]); } #[test] fn a_few_separate_matches() { let index = rabin_karp("abababa".to_string(), "ab".to_string()); assert_eq!(index, vec![0, 2, 4]); } #[test] fn one_match() { let index = rabin_karp("ABC ABCDAB ABCDABCDABDE".to_string(), "ABCDABD".to_string()); assert_eq!(index, vec![15]); } #[test] fn lots_of_matches() { let index = rabin_karp("aaabaabaaaaa".to_string(), "aa".to_string()); assert_eq!(index, vec![0, 1, 4, 7, 8, 9, 10]); } #[test] fn lots_of_intricate_matches() { let index = rabin_karp("ababababa".to_string(), "aba".to_string()); assert_eq!(index, vec![0, 2, 4, 6]); } #[test] fn not_found0() { let index = rabin_karp("abcde".to_string(), "f".to_string()); assert_eq!(index, vec![]); } #[test] fn not_found1() { let index = rabin_karp("abcde".to_string(), "ac".to_string()); assert_eq!(index, vec![]); } #[test] fn not_found2() { let index = rabin_karp("ababab".to_string(), "bababa".to_string()); assert_eq!(index, vec![]); } #[test] fn empty_string() { let index = rabin_karp("".to_string(), "abcdef".to_string()); assert_eq!(index, vec![]); } } }
pub fn str_str(haystack: String, needle: String) -> i32 { let source = haystack.as_bytes(); let target = needle.as_bytes(); let source_offset = 0usize; let source_count = source.len(); let target_offset = 0usize; let target_count = target.len(); let from_index = 0usize; if target_count == 0usize { return 0; } if target_count > source_count { return -1; } let first = target[target_offset]; let max = source_offset + (source_count - target_count); let mut i = source_offset + from_index; while i <= max { while source[i] != first { i += 1; if i <= max { continue; } else { break; } } if i <= max { let mut j = i + 1; let end = j + target_count - 1; let mut k = target_offset + 1; while j < end && source[j] == target[k] { j += 1; k += 1; } if j == end { return (i - source_offset) as i32; } } i += 1; } -1 } pub fn str_str_v2(haystack: String, needle: String) -> i32 { match haystack.find(&needle) { Some(index) => index as i32, None => -1, } } pub fn str_str_v3(haystack: String, needle: String) -> i32 { let (m, n) = (needle.len(), haystack.len()); if m == 0 { return 0; } let haystack_chars = haystack.chars().collect::<Vec<char>>(); let needle_chars = needle.chars().collect::<Vec<char>>(); let mut pi = vec![0; m]; let (mut i, mut j) = (1, 0); while i < m { while j > 0 && (needle_chars[i] != needle_chars[j]) { j = pi[j - 1]; } if needle_chars[i] == needle_chars[j] { j += 1; } pi[i] = j; i += 1; } let (mut i, mut j) = (0, 0); while i < n { while j > 0 && (haystack_chars[i] != needle_chars[j]) { j = pi[j - 1]; } if haystack_chars[i] == needle_chars[j] { j += 1; } if (j == m) { return (i - m + 1) as i32; } i += 1; } -1 } pub fn find_substring(s: String, words: Vec<String>) -> Vec<i32> { use std::collections::HashMap; let mut bytes = s.chars().collect::<Vec<char>>(); let mut result: Vec<i32> = Vec::new(); if s.is_empty() || words.is_empty() { return result; } let mut map: HashMap<String, i32> = HashMap::new(); let one_word = words[0].len(); let words_len = words.len(); for word in words { let counter = map.entry(word).or_insert(0); *counter += 1; } for i in 0..one_word { let (mut left, mut right, mut count) = (i, i, 0); let mut tmp_map: HashMap<String, i32> = HashMap::new(); while right + one_word <= s.len() { let w: String = bytes.iter().skip(right).take(one_word).collect(); right += one_word; if !map.contains_key(&w.clone()) { count = 0; left = right; tmp_map.clear(); } else { let w_str = w.clone(); let mut counter = tmp_map.entry(w_str).or_insert(0); *counter += 1; count += 1; while tmp_map.get(&w.clone()).unwrap_or(&0) > map.get(&w.clone()).unwrap_or(&0) { let t_w: String = bytes.iter().skip(left).take(one_word).collect(); count -= 1; let t_w_str = t_w.clone(); let mut counter = tmp_map.entry(t_w_str).or_insert(0); *counter -= 1; left += one_word; } if count == words_len { result.push(left as i32); } } } } result } pub fn knuth_morris_pratt(st: String, pat: String) -> Vec<usize> { if st.is_empty() || pat.
pub fn rabin_karp(target: String, pattern: String) -> Vec<usize> { if target.is_empty() || pattern.is_empty() || pattern.len() > target.len() { return vec![]; } let string: String = (&pattern[0..pattern.len()]).to_string(); let hash_pattern = hash(string.clone()); let mut ret = vec![]; for i in 0..(target.len() - pattern.len() + 1) { let s = (&target[i..(i + pattern.len())]).to_string(); let string_hash = hash(s.clone()); if string_hash == hash_pattern && s == string { ret.push(i); } } ret } fn hash(mut s: String) -> u16 { let prime: u16 = 101; let last_char = s .drain(s.len() - 1..) .next() .expect("Failed to get the last char of the string"); let mut res: u16 = 0; for (i, &c) in s.as_bytes().iter().enumerate() { if i == 0 { res = (c as u16 * 256) % prime; } else { res = (((res + c as u16) % 101) * 256) % 101; } } (res + last_char as u16) % prime } use std::cmp::Ordering; pub fn is_subsequence(s: String, t: String) -> bool { let mut s_chars: Vec<char> = s.chars().collect::<Vec<char>>(); let mut t_chars: Vec<char> = t.chars().collect::<Vec<char>>(); let (s_len, t_len) = (s_chars.len(), t_chars.len()); match s_len.cmp(&t_len) { Ordering::Greater => return false, Ordering::Equal => return s == t, Ordering::Less => (), } let (mut i, mut j) = (0, 0); while i < s_len && j < t_len { if s_chars[i] == t_chars[j] { i += 1; } j += 1; } i == s_len } use std::mem; pub fn num_distinct(s: String, t: String) -> i32 { let mut cache = vec![0; t.len() + 1]; let mut temp = vec![0; t.len() + 1]; cache[0] = 1; temp[0] = 1; for c_1 in s.into_bytes() { for (i, c_2) in t.bytes().enumerate() { temp[i + 1] = if c_2 == c_1 { cache[i] + cache[i + 1] } else { cache[i + 1] }; } mem::swap(&mut cache, &mut temp); } cache[t.len()] } pub fn repeated_substring_pattern(s: String) -> bool { let mut t = String::with_capacity(s.len() * 2 - 2); t.push_str(&s[1..]); t.push_str(&s[..s.len() - 1]); t.contains(&s) } #[cfg(test)] mod tests { use super::*; #[test] fn sub_string() { let haystack = String::from("aaacaaab"); let needle = String::from("aaab"); dbg!(str_str(haystack, needle)); let index = knuth_morris_pratt("Rust is a programming language empowering everyone to build reliable and efficient software".to_string(),"everyone".to_string()); println!("{:?}", index); dbg!(is_subsequence("acb".to_string(), "ahbgdc".to_string())); } mod kmp { use super::*; #[test] fn each_letter_matches() { let index = knuth_morris_pratt("aaa".to_string(), "a".to_string()); assert_eq!(index, vec![0, 1, 2]); } #[test] fn a_few_separate_matches() { let index = knuth_morris_pratt("abababa".to_string(), "ab".to_string()); assert_eq!(index, vec![0, 2, 4]); } #[test] fn one_match() { let index = knuth_morris_pratt("ABC ABCDAB ABCDABCDABDE".to_string(), "ABCDABD".to_string()); assert_eq!(index, vec![15]); } #[test] fn lots_of_matches() { let index = knuth_morris_pratt("aaabaabaaaaa".to_string(), "aa".to_string()); assert_eq!(index, vec![0, 1, 4, 7, 8, 9, 10]); } #[test] fn lots_of_intricate_matches() { let index = knuth_morris_pratt("ababababa".to_string(), "aba".to_string()); assert_eq!(index, vec![0, 2, 4, 6]); } #[test] fn not_found0() { let index = knuth_morris_pratt("abcde".to_string(), "f".to_string()); assert_eq!(index, vec![]); } #[test] fn not_found1() { let index = knuth_morris_pratt("abcde".to_string(), "ac".to_string()); assert_eq!(index, vec![]); } #[test] fn not_found2() { let index = knuth_morris_pratt("ababab".to_string(), "bababa".to_string()); assert_eq!(index, vec![]); } #[test] fn empty_string() { let index = knuth_morris_pratt("".to_string(), "abcdef".to_string()); assert_eq!(index, vec![]); } } mod rabin_karp { use super::*; #[test] fn hi_hash() { let hash_result = hash("hi".to_string()); assert_eq!(hash_result, 65); } #[test] fn abr_hash() { let hash_result = hash("abr".to_string()); assert_eq!(hash_result, 4); } #[test] fn bra_hash() { let hash_result = hash("bra".to_string()); assert_eq!(hash_result, 30); } #[test] fn each_letter_matches() { let index = rabin_karp("aaa".to_string(), "a".to_string()); assert_eq!(index, vec![0, 1, 2]); } #[test] fn a_few_separate_matches() { let index = rabin_karp("abababa".to_string(), "ab".to_string()); assert_eq!(index, vec![0, 2, 4]); } #[test] fn one_match() { let index = rabin_karp("ABC ABCDAB ABCDABCDABDE".to_string(), "ABCDABD".to_string()); assert_eq!(index, vec![15]); } #[test] fn lots_of_matches() { let index = rabin_karp("aaabaabaaaaa".to_string(), "aa".to_string()); assert_eq!(index, vec![0, 1, 4, 7, 8, 9, 10]); } #[test] fn lots_of_intricate_matches() { let index = rabin_karp("ababababa".to_string(), "aba".to_string()); assert_eq!(index, vec![0, 2, 4, 6]); } #[test] fn not_found0() { let index = rabin_karp("abcde".to_string(), "f".to_string()); assert_eq!(index, vec![]); } #[test] fn not_found1() { let index = rabin_karp("abcde".to_string(), "ac".to_string()); assert_eq!(index, vec![]); } #[test] fn not_found2() { let index = rabin_karp("ababab".to_string(), "bababa".to_string()); assert_eq!(index, vec![]); } #[test] fn empty_string() { let index = rabin_karp("".to_string(), "abcdef".to_string()); assert_eq!(index, vec![]); } } }
is_empty() { return vec![]; } let string = st.into_bytes(); let pattern = pat.into_bytes(); let mut partial = vec![0]; for i in 1..pattern.len() { let mut j = partial[i - 1]; while j > 0 && pattern[j] != pattern[i] { j = partial[j - 1]; } partial.push(if pattern[j] == pattern[i] { j + 1 } else { j }); } let mut ret = vec![]; let mut j = 0; for (i, &c) in string.iter().enumerate() { while j > 0 && c != pattern[j] { j = partial[j - 1]; } if c == pattern[j] { j += 1; } if j == pattern.len() { ret.push(i + 1 - j); j = partial[j - 1]; } } ret }
function_block-function_prefixed
[ { "content": "/// 剑指 Offer 58 - II. 左旋转字符串 https://leetcode-cn.com/problems/zuo-xuan-zhuan-zi-fu-chuan-lcof/\n\npub fn reverse_left_words(s: String, n: i32) -> String {\n\n let mut chars: Vec<char> = s.chars().collect();\n\n chars.rotate_left(n as usize);\n\n chars.iter().collect()\n\n}\n\n\n\nuse std::collections::HashSet;\n", "file_path": "src/lcof.rs", "rank": 0, "score": 377802.815445433 }, { "content": "/// 762. 二进制表示中质数个计算置位 https://leetcode-cn.com/problems/prime-number-of-set-bits-in-binary-representation/\n\npub fn count_prime_set_bits(left: i32, right: i32) -> i32 {\n\n let mut count = 0;\n\n for num in left..=right {\n\n match num.count_ones() {\n\n 2 | 3 | 5 | 7 | 11 | 13 | 17 | 19 | 23 | 29 | 31 => {\n\n count += 1;\n\n }\n\n _ => (),\n\n }\n\n }\n\n count\n\n}\n\n\n", "file_path": "src/solution/math/operations.rs", "rank": 1, "score": 366005.7013626207 }, { "content": "/// 力扣(201. 数字范围按位与) https://leetcode-cn.com/problems/bitwise-and-of-numbers-range/\n\n/// 我们可以将问题重新表述为:给定两个整数,我们要找到它们对应的二进制字符串的公共前缀。\n\n/// 方法1:位移\n\npub fn range_bitwise_and(left: i32, right: i32) -> i32 {\n\n let mut left = left;\n\n let mut right = right;\n\n let mut shift = 0;\n\n while left < right {\n\n left >>= 1;\n\n right >>= 1;\n\n shift += 1;\n\n }\n\n left << shift\n\n}\n\n\n", "file_path": "src/solution/math/bitwise/and.rs", "rank": 2, "score": 336744.44641208026 }, { "content": "/// 力扣(38. 外观数列) https://leetcode-cn.com/problems/count-and-say/\n\npub fn count_and_say(n: i32) -> String {\n\n let mut s = \"1\".to_string();\n\n for _ in 0..n - 1 {\n\n let mut ret = \"\".to_string();\n\n let mut count = 0;\n\n // use peekable to check next char\n\n let mut it = s.chars().peekable();\n\n while let Some(c) = it.next() {\n\n match it.peek() {\n\n Some(next) if next == &c => count += 1,\n\n _ => {\n\n ret.push_str(&(count + 1).to_string());\n\n ret.push(c);\n\n count = 0;\n\n }\n\n }\n\n }\n\n s = ret;\n\n }\n\n s\n\n}\n\n\n", "file_path": "src/medium.rs", "rank": 3, "score": 336366.1758364931 }, { "content": "fn going_up(first: i32, rest: &[i32], up_length: i32, result: &mut i32) {\n\n if let Some((&second, rest)) = rest.split_first() {\n\n match second.cmp(&first) {\n\n Ordering::Less => going_down(second, rest, up_length, 1, result),\n\n Ordering::Equal => {\n\n *result += triangular(up_length) + up_length + 1;\n\n\n\n going_up(second, rest, 0, result);\n\n }\n\n Ordering::Greater => going_up(second, rest, up_length + 1, result),\n\n }\n\n } else {\n\n *result += triangular(up_length) + up_length + 1;\n\n }\n\n}\n\n\n", "file_path": "src/solution/algorithms/greedy/greedy.rs", "rank": 4, "score": 335615.8764068553 }, { "content": "/// 力扣(201. 数字范围按位与)\n\n/// 方法2:Brian Kernighan 算法\n\npub fn range_bitwise_and_v2(left: i32, right: i32) -> i32 {\n\n let mut right = right;\n\n while left < right {\n\n right &= (right - 1);\n\n }\n\n right\n\n}\n\n\n", "file_path": "src/solution/math/bitwise/and.rs", "rank": 5, "score": 333538.55881376984 }, { "content": "fn less_than_thousand(num: i32, result: &mut String) {\n\n const SINGLES: [&str; 19] = [\n\n \"One\",\n\n \"Two\",\n\n \"Three\",\n\n \"Four\",\n\n \"Five\",\n\n \"Six\",\n\n \"Seven\",\n\n \"Eight\",\n\n \"Nine\",\n\n \"Ten\",\n\n \"Eleven\",\n\n \"Twelve\",\n\n \"Thirteen\",\n\n \"Fourteen\",\n\n \"Fifteen\",\n\n \"Sixteen\",\n\n \"Seventeen\",\n\n \"Eighteen\",\n", "file_path": "src/solution/math/operations.rs", "rank": 6, "score": 332677.0319454641 }, { "content": "/// 招商银行-01. 文本编辑程序设计 https://leetcode-cn.com/contest/cmbchina-2022spring/problems/fWcPGC/\n\npub fn delete_text(article: String, index: i32) -> String {\n\n let mut bytes: Vec<u8> = article.bytes().collect();\n\n let len = bytes.len();\n\n let index = index as usize;\n\n if bytes[index] != b' ' {\n\n let (mut left, mut right) = (index, index);\n\n while left > 0 && bytes[left] != b' ' {\n\n bytes[left] = b'-';\n\n left -= 1;\n\n }\n\n\n\n if bytes[left] != b' ' {\n\n bytes[left] = b'-';\n\n }\n\n\n\n while right < len && bytes[right] != b' ' {\n\n bytes[right] = b'-';\n\n right += 1;\n\n }\n\n\n\n let new_bytes: Vec<u8> = bytes.into_iter().filter(|&c| c != b'-').collect();\n\n let new_article = String::from_utf8(new_bytes).unwrap();\n\n\n\n return new_article.split_whitespace().collect::<Vec<_>>().join(\" \");\n\n }\n\n\n\n article\n\n}\n\n\n", "file_path": "src/contest/spring2022.rs", "rank": 7, "score": 332215.69345909345 }, { "content": "/// 6051. 统计是给定字符串前缀的字符串数目 https://leetcode-cn.com/problems/count-prefixes-of-a-given-string/\n\npub fn count_prefixes(words: Vec<String>, s: String) -> i32 {\n\n let s_bytes = s.as_bytes();\n\n words\n\n .iter()\n\n .filter(|word| s_bytes.starts_with(word.as_bytes()))\n\n .count() as i32\n\n}\n\n\n", "file_path": "src/contest/spring2022.rs", "rank": 8, "score": 331811.7642935744 }, { "content": "fn going_down(first: i32, rest: &[i32], up_length: i32, down_length: i32, result: &mut i32) {\n\n if let Some((&second, rest)) = rest.split_first() {\n\n match second.cmp(&first) {\n\n Ordering::Less => going_down(second, rest, up_length, down_length + 1, result),\n\n Ordering::Equal => {\n\n *result += triangular_2(up_length, down_length) + up_length.max(down_length) + 1;\n\n\n\n going_up(second, rest, 0, result);\n\n }\n\n Ordering::Greater => {\n\n *result += triangular_2(up_length, down_length) + up_length.max(down_length);\n\n\n\n going_up(second, rest, 1, result);\n\n }\n\n }\n\n } else {\n\n *result += triangular_2(up_length, down_length) + up_length.max(down_length) + 1;\n\n }\n\n}\n\n\n", "file_path": "src/solution/algorithms/greedy/greedy.rs", "rank": 9, "score": 326811.8820685822 }, { "content": "/// 728. 自除数 https://leetcode-cn.com/problems/self-dividing-numbers/\n\npub fn self_dividing_numbers(left: i32, right: i32) -> Vec<i32> {\n\n let mut result: Vec<i32> = vec![];\n\n fn is_self_dividing_number(num: i32) -> bool {\n\n let mut mut_num = num;\n\n while mut_num > 0 {\n\n let rem = mut_num % 10;\n\n if rem == 0 || num % rem != 0 {\n\n return false;\n\n }\n\n mut_num /= 10;\n\n }\n\n true\n\n }\n\n\n\n for num in left..=right {\n\n if is_self_dividing_number(num) {\n\n result.push(num);\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/solution/math/numbers.rs", "rank": 10, "score": 326647.52283720917 }, { "content": "// TODO 600\n\n/// 1208. 尽可能使字符串相等 https://leetcode-cn.com/problems/get-equal-substrings-within-budget/\n\npub fn equal_substring(s: String, t: String, max_cost: i32) -> i32 {\n\n use std::cmp::max;\n\n let (mut left, mut right, mut cost, mut result) = (0, 0, 0, 0);\n\n let len = s.len();\n\n let s_bytes = s.as_bytes();\n\n let t_bytes = t.as_bytes();\n\n while right < len {\n\n cost += (s_bytes[right] as i32 - t_bytes[right] as i32).abs();\n\n right += 1;\n\n while cost > max_cost {\n\n cost -= (s_bytes[left] as i32 - t_bytes[left] as i32).abs();\n\n left += 1;\n\n }\n\n result = max(result, right - left);\n\n }\n\n result as i32\n\n}\n\n\n", "file_path": "src/medium.rs", "rank": 11, "score": 308533.9027455775 }, { "content": "/// 1208. 尽可能使字符串相等\n\npub fn equal_substring_v2(s: String, t: String, max_cost: i32) -> i32 {\n\n use std::cmp::max;\n\n let (mut left, mut right, mut cost, mut result) = (0, 0, 0, 0);\n\n let len = s.len();\n\n\n\n let s_bytes = s.as_bytes();\n\n let t_bytes = t.as_bytes();\n\n let mut diff: Vec<i32> = vec![0; len];\n\n for idx in 0..len {\n\n diff[idx] = (s_bytes[idx] as i32 - t_bytes[idx] as i32).abs();\n\n }\n\n while right < len {\n\n cost += diff[right];\n\n right += 1;\n\n while cost > max_cost {\n\n cost -= diff[left];\n\n left += 1;\n\n }\n\n result = max(result, right - left);\n\n }\n", "file_path": "src/medium.rs", "rank": 12, "score": 305254.2292865172 }, { "content": "/// 520. 检测大写字母 https://leetcode-cn.com/problems/detect-capital/\n\npub fn detect_capital_use(word: String) -> bool {\n\n let mut word = word.chars();\n\n let first = word.next();\n\n if first.is_none() {\n\n return true;\n\n }\n\n let first = first.unwrap();\n\n\n\n if let Some(second) = word.next() {\n\n let res = word.try_fold(second, move |sd, x| {\n\n if sd.is_lowercase() && x.is_lowercase() {\n\n return Ok(sd);\n\n }\n\n\n\n if sd.is_uppercase() && x.is_uppercase() {\n\n return Ok(sd);\n\n }\n\n\n\n Err(())\n\n });\n", "file_path": "src/simple.rs", "rank": 13, "score": 303551.48076307576 }, { "content": "/// 541. 反转字符串 II https://leetcode-cn.com/problems/reverse-string-ii/\n\npub fn reverse_str(s: String, k: i32) -> String {\n\n let mut s = s.into_bytes();\n\n s.chunks_mut(k as usize).step_by(2).for_each(<[_]>::reverse);\n\n String::from_utf8(s).unwrap()\n\n}\n\n\n", "file_path": "src/solution/string/reverse.rs", "rank": 14, "score": 301605.9849382882 }, { "content": "/// 434. 字符串中的单词数 https://leetcode-cn.com/problems/number-of-segments-in-a-string/\n\npub fn count_segments(s: String) -> i32 {\n\n let mut result = 0;\n\n let mut iter = s.bytes();\n\n\n\n while let Some(c) = iter.next() {\n\n if c != b' ' {\n\n result += 1;\n\n\n\n loop {\n\n if let Some(c) = iter.next() {\n\n if c == b' ' {\n\n break;\n\n }\n\n } else {\n\n return result;\n\n }\n\n }\n\n }\n\n }\n\n\n\n result\n\n}\n", "file_path": "src/solution/string/split.rs", "rank": 15, "score": 301547.8413927234 }, { "content": "/// 剑指 Offer 57 - II. 和为s的连续正数序列 https://leetcode-cn.com/problems/he-wei-sde-lian-xu-zheng-shu-xu-lie-lcof/\n\n/// 方法1:数学公式法,算法来源:https://leetcode-cn.com/problems/he-wei-sde-lian-xu-zheng-shu-xu-lie-lcof/solution/shu-ju-jie-gou-he-suan-fa-hua-dong-chuan-74eb/\n\n/// start,start+1,start+2,...,start+(n-1) = n * start + n*(n-1)/2 = target\n\n/// 令 n*start = total,则 total = target - n * (n - 1) / 2\n\npub fn find_continuous_sequence(target: i32) -> Vec<Vec<i32>> {\n\n let mut result = vec![];\n\n let mut n = 2;\n\n loop {\n\n let total = target - n * (n - 1) / 2;\n\n if total <= 0 {\n\n break;\n\n }\n\n if total % n == 0 {\n\n let mut arr = vec![];\n\n let start = total / n;\n\n for i in 0..n {\n\n arr.push(start + i);\n\n }\n\n result.push(arr);\n\n }\n\n n += 1;\n\n }\n\n\n\n result.reverse();\n\n result\n\n}\n\n\n", "file_path": "src/lcof.rs", "rank": 16, "score": 296730.47332524456 }, { "content": "/// 力扣(387. 字符串中的第一个唯一字符) https://leetcode-cn.com/problems/first-unique-character-in-a-string/\n\n/// 方法一:使用哈希表存储频数\n\npub fn first_uniq_char(s: String) -> i32 {\n\n let chars: Vec<char> = s.chars().collect();\n\n let len = chars.len();\n\n // 统计各个字符出现的次数\n\n use std::collections::HashMap;\n\n let mut counts_map = HashMap::<char, i32>::new();\n\n for ch in &chars {\n\n match counts_map.get_mut(ch) {\n\n Some(count) => *count += 1,\n\n None => {\n\n counts_map.insert(*ch, 1);\n\n }\n\n };\n\n }\n\n\n\n for (i, &ch) in chars.iter().enumerate() {\n\n if let Some(&count) = counts_map.get(&ch) {\n\n if count == 1 {\n\n return i as i32;\n\n }\n\n }\n\n }\n\n\n\n -1\n\n}\n\n\n", "file_path": "src/simple.rs", "rank": 17, "score": 295761.9434569088 }, { "content": "/// 力扣(58. 最后一个单词的长度) https://leetcode-cn.com/problems/length-of-last-word/submissions/\n\n/// 方法1:rsplitn()\n\npub fn length_of_last_word(s: String) -> i32 {\n\n let s_trim = s.trim_end();\n\n let words: Vec<&str> = s_trim.rsplitn(2, ' ').collect();\n\n words[0].len() as i32\n\n}\n\n\n", "file_path": "src/simple.rs", "rank": 18, "score": 295750.24371417164 }, { "content": "/// 力扣(189. 旋转数组) https://leetcode-cn.com/problems/rotate-array/\n\npub fn rotate(nums: &mut Vec<i32>, k: i32) {\n\n let len = nums.len();\n\n if len <= 1 {\n\n return;\n\n }\n\n let offset = (k as usize) % len;\n\n if offset == 0 {\n\n return;\n\n }\n\n\n\n //三次翻转\n\n nums.reverse();\n\n\n\n for i in 0..offset / 2 {\n\n nums.swap(i, offset - i - 1);\n\n }\n\n\n\n for j in 0..(len - offset) / 2 {\n\n nums.swap(j + offset, len - j - 1);\n\n }\n\n}\n\n\n", "file_path": "src/medium.rs", "rank": 19, "score": 295613.7768232415 }, { "content": "/// 力扣(387. 字符串中的第一个唯一字符)\n\n/// 方法二:使用哈希表存储索引\n\npub fn first_uniq_char_v2(s: String) -> i32 {\n\n let chars: Vec<char> = s.chars().collect();\n\n let len = chars.len();\n\n // 存储首次出现的下标\n\n use std::collections::HashMap;\n\n let mut indexs_map = HashMap::<char, i32>::new();\n\n for (i, &ch) in chars.iter().enumerate() {\n\n match indexs_map.get_mut(&ch) {\n\n Some(index) => {\n\n // 再次出现则将下标设置为-1\n\n *index = -1;\n\n }\n\n None => {\n\n // 首次出现存储下标\n\n indexs_map.insert(ch, i as i32);\n\n }\n\n };\n\n }\n\n\n\n // 下标不为-1且最小的值即为答案\n", "file_path": "src/simple.rs", "rank": 23, "score": 291748.65877127514 }, { "content": "/// 力扣(387. 字符串中的第一个唯一字符)\n\n/// 方法三:队列\n\npub fn first_uniq_char_v3(s: String) -> i32 {\n\n let chars: Vec<char> = s.chars().collect();\n\n let len = chars.len();\n\n // 存储首次出现的下标\n\n use std::collections::HashMap;\n\n use std::collections::VecDeque;\n\n // 存放各个字符及其首次出现的位置下标的元组\n\n let mut queue = VecDeque::<(char, i32)>::new();\n\n let mut indexs_map = HashMap::<char, i32>::new();\n\n for (i, &ch) in chars.iter().enumerate() {\n\n match indexs_map.get_mut(&ch) {\n\n Some(index) => {\n\n // 再次出现则将下标设置为-1\n\n *index = -1;\n\n // 只保留第一次出现的\n\n queue.retain(|x| x.0 != ch);\n\n }\n\n None => {\n\n // 首次出现存储下标\n\n indexs_map.insert(ch, i as i32);\n", "file_path": "src/simple.rs", "rank": 24, "score": 291748.65877127514 }, { "content": "/// 力扣(58. 最后一个单词的长度)\n\n/// 方法2:双指针\n\npub fn length_of_last_word_v2(s: String) -> i32 {\n\n let chars: Vec<char> = s.chars().collect();\n\n let mut end = (chars.len() - 1) as i32;\n\n while end >= 0 && chars[end as usize] == ' ' {\n\n end -= 1;\n\n }\n\n if end < 0 {\n\n return 0;\n\n }\n\n\n\n let mut start = end;\n\n while start >= 0 && chars[start as usize] != ' ' {\n\n start -= 1;\n\n }\n\n\n\n end - start\n\n}\n\n\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n", "file_path": "src/simple.rs", "rank": 25, "score": 291741.2422628336 }, { "content": "/// 598. 范围求和 II https://leetcode-cn.com/problems/range-addition-ii/\n\npub fn max_count(m: i32, n: i32, ops: Vec<Vec<i32>>) -> i32 {\n\n let (mut min_a, mut min_b) = (m, n);\n\n\n\n for op in ops.iter() {\n\n min_a = min(op[0], min_a);\n\n min_b = min(op[1], min_b);\n\n }\n\n\n\n min_a * min_b\n\n}\n\n\n", "file_path": "src/solution/math/operations.rs", "rank": 26, "score": 289055.95693141053 }, { "content": "/// 273. 整数转换英文表示 https://leetcode-cn.com/problems/integer-to-english-words/\n\npub fn number_to_words(num: i32) -> String {\n\n let mut result = String::new();\n\n\n\n if num == 0 {\n\n result.push_str(\"Zero\");\n\n } else {\n\n let mut num = num;\n\n\n\n for (name, base) in [\n\n (\"Billion\", 1_000_000_000),\n\n (\"Million\", 1_000_000),\n\n (\"Thousand\", 1_000),\n\n ] {\n\n if num >= base {\n\n less_than_thousand(num / base, &mut result);\n\n result.push(' ');\n\n result.push_str(name);\n\n\n\n num %= base;\n\n\n", "file_path": "src/solution/math/operations.rs", "rank": 27, "score": 287906.7514737475 }, { "content": "/// 力扣(88. 合并两个有序数组) https://leetcode-cn.com/problems/merge-sorted-array/\n\n/// 面试题 10.01. 合并排序的数组 https://leetcode-cn.com/problems/sorted-merge-lcci/\n\npub fn merge(nums1: &mut Vec<i32>, m: i32, nums2: &mut Vec<i32>, n: i32) {\n\n let mut m = m;\n\n let mut index: usize = 0;\n\n for &item in nums2.iter().take(n as usize) {\n\n while (index < m as usize) && nums1[index] <= item {\n\n index += 1;\n\n }\n\n\n\n if index < (m as usize) {\n\n for j in (index + 1..nums1.len()).rev() {\n\n nums1[j] = nums1[j - 1];\n\n }\n\n m += 1;\n\n }\n\n nums1[index] = item;\n\n index += 1;\n\n }\n\n}\n\n\n", "file_path": "src/solution/algorithms/two_pointers/two_pointers.rs", "rank": 28, "score": 287035.51331941376 }, { "content": "/// 力扣(88. 合并两个有序数组)\n\n/// 双指针/从后往前\n\npub fn merge_v2(nums1: &mut Vec<i32>, m: i32, nums2: &mut Vec<i32>, n: i32) {\n\n let mut p1 = m - 1;\n\n let mut p2 = n - 1;\n\n let mut p = m + n - 1;\n\n while p1 >= 0 && p2 >= 0 {\n\n if nums1[p1 as usize] < nums2[p2 as usize] {\n\n nums1[p as usize] = nums2[p2 as usize];\n\n p2 -= 1;\n\n } else {\n\n nums1[p as usize] = nums1[p1 as usize];\n\n p1 -= 1;\n\n }\n\n p -= 1;\n\n }\n\n nums1[..((p2 + 1) as usize)].clone_from_slice(&nums2[..((p2 + 1) as usize)]);\n\n}\n\n\n", "file_path": "src/solution/algorithms/two_pointers/two_pointers.rs", "rank": 30, "score": 285179.2252255564 }, { "content": "/// 189. 轮转数组 https://leetcode-cn.com/problems/rotate-array/\n\npub fn rotate(nums: &mut Vec<i32>, k: i32) {\n\n let len = nums.len();\n\n nums.rotate_right(k as usize % len)\n\n}\n\n\n", "file_path": "src/solution/data_structures/array/operations.rs", "rank": 31, "score": 284688.085558168 }, { "content": "/// 力扣(412. Fizz Buzz) https://leetcode-cn.com/problems/fizz-buzz/\n\npub fn fizz_buzz(n: i32) -> Vec<String> {\n\n let len = n as usize;\n\n let mut result = Vec::<String>::with_capacity(len);\n\n for i in 1..=len {\n\n let divisible_by_3 = i % 3 == 0;\n\n let divisible_by_5 = i % 5 == 0;\n\n if divisible_by_3 && divisible_by_5 {\n\n result.push(\"FizzBuzz\".to_string());\n\n } else if divisible_by_3 {\n\n result.push(\"Fizz\".to_string());\n\n } else if divisible_by_5 {\n\n result.push(\"Buzz\".to_string());\n\n } else {\n\n result.push(i.to_string());\n\n }\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/solution/math/operations.rs", "rank": 32, "score": 281282.3190458439 }, { "content": "/// 力扣(412. Fizz Buzz)\n\npub fn fizz_buzz_v2(n: i32) -> Vec<String> {\n\n let len = n as usize;\n\n let mut result = Vec::<String>::with_capacity(len);\n\n for i in 1..=len {\n\n if i % 3 == 0 {\n\n if i % 5 == 0 {\n\n result.push(\"FizzBuzz\".to_string());\n\n } else {\n\n result.push(\"Fizz\".to_string());\n\n }\n\n } else if i % 5 == 0 {\n\n result.push(\"Buzz\".to_string());\n\n } else {\n\n result.push(i.to_string());\n\n }\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/solution/math/operations.rs", "rank": 33, "score": 278384.91982523736 }, { "content": "fn max_path_sum_helper(root: Option<&RefCell<TreeNode>>, result: &mut i32) -> i32 {\n\n root.map(RefCell::borrow)\n\n .as_deref()\n\n .map_or(i32::MIN, |root| {\n\n let line_sum_1 = max_path_sum_helper(root.left.as_deref(), result);\n\n let line_sum_2 = max_path_sum_helper(root.right.as_deref(), result);\n\n\n\n *result = (*result).max(root.val + line_sum_1.max(0) + line_sum_2.max(0));\n\n\n\n line_sum_1.max(line_sum_2).max(0) + root.val\n\n })\n\n}\n\n\n", "file_path": "src/solution/data_structures/trees/binary_tree.rs", "rank": 34, "score": 275677.84980647947 }, { "content": "/// 22. 括号生成 https://leetcode-cn.com/problems/generate-parentheses/\n\npub fn generate_parenthesis(n: i32) -> Vec<String> {\n\n if n < 1 {\n\n return vec![];\n\n }\n\n fn dfs(n: i32, left: i32, right: i32, result: &mut Vec<String>, mut path: String) {\n\n if left == n && right == n {\n\n result.push(path);\n\n return;\n\n }\n\n if left < n {\n\n let mut new_path = path.clone();\n\n new_path.push('(');\n\n dfs(n, left + 1, right, result, new_path);\n\n }\n\n if right < left {\n\n // reuse path to avoid clone overhead\n\n path.push(')');\n\n dfs(n, left, right + 1, result, path);\n\n }\n\n }\n\n let mut result = Vec::new();\n\n dfs(n, 0, 0, &mut result, String::new());\n\n result\n\n}\n\n\n", "file_path": "src/solution/data_structures/stacks/stack.rs", "rank": 35, "score": 275603.05243846716 }, { "content": "/// 力扣(278. 第一个错误的版本) https://leetcode-cn.com/problems/first-bad-version/\n\n// The API isBadVersion is defined for you.\n\n// isBadVersion(versions:i32)-> bool;\n\n// to call it use self.isBadVersion(versions)\n\npub fn first_bad_version(n: i32) -> i32 {\n\n let mut left = 1;\n\n let mut right = n;\n\n while left < right {\n\n let middle = left + (right - left) / 2;\n\n if is_bad_version(middle) {\n\n right = middle;\n\n } else {\n\n left = middle + 1;\n\n }\n\n }\n\n left\n\n}\n\n\n", "file_path": "src/simple.rs", "rank": 36, "score": 272977.668868378 }, { "content": "/// 力扣(204. 计数质数) https://leetcode-cn.com/problems/count-primes/\n\npub fn count_primes(n: i32) -> i32 {\n\n let mut ans = 0;\n\n let mut i = 2;\n\n while i < n {\n\n if is_prime(i) {\n\n ans += 1;\n\n }\n\n i += 1;\n\n }\n\n ans\n\n}\n\n\n", "file_path": "src/solution/math/numbers.rs", "rank": 37, "score": 269877.180351474 }, { "content": "/// 数组最大连续子序列和\n\npub fn max_continue_array_sum(array: &[i32]) -> i32 {\n\n let len = array.len();\n\n let mut max = array[0];\n\n let mut sum = array[0];\n\n for &item in array.iter().take(len).skip(1) {\n\n sum = if sum + item > item { sum + item } else { item };\n\n\n\n if sum >= max {\n\n max = sum;\n\n }\n\n }\n\n max\n\n}\n\n\n", "file_path": "src/dp.rs", "rank": 38, "score": 269138.99053873273 }, { "content": "/// 233. 数字 1 的个数 https://leetcode-cn.com/problems/number-of-digit-one/\n\npub fn count_digit_one(n: i32) -> i32 {\n\n let mut n = i64::from(n);\n\n let mut result = 0;\n\n let mut ten_to_the_power = 1;\n\n let mut base = 0;\n\n let mut processed = 0;\n\n\n\n while n > 0 {\n\n let digit = n % 10;\n\n\n\n match digit {\n\n 0 => {}\n\n 1 => result += base + processed + 1,\n\n _ => result += base * digit + ten_to_the_power,\n\n }\n\n\n\n processed += ten_to_the_power * digit;\n\n base = ten_to_the_power + base * 10;\n\n ten_to_the_power *= 10;\n\n n /= 10;\n\n }\n\n\n\n result as _\n\n}\n\n\n", "file_path": "src/solution/math/operations.rs", "rank": 39, "score": 266984.406777648 }, { "content": "/// 力扣(204. 计数质数)\n\n/// 方法二:厄拉多塞筛法(埃氏筛)\n\npub fn count_primes_v2(n: i32) -> i32 {\n\n let n = n as usize;\n\n let mut primes = vec![1; n];\n\n let mut ans = 0;\n\n let mut i = 2_usize;\n\n\n\n while i < n {\n\n if primes[i] == 1 {\n\n ans += 1;\n\n }\n\n if let Some(squar) = i.checked_mul(i) {\n\n if squar < n {\n\n let mut j = squar;\n\n while j < n {\n\n primes[j] = 0;\n\n j += i;\n\n }\n\n }\n\n }\n\n\n\n i += 1;\n\n }\n\n ans\n\n}\n\n\n", "file_path": "src/solution/math/numbers.rs", "rank": 40, "score": 266984.40677764796 }, { "content": "/// 剑指 Offer 53 - I. 在排序数组中查找数字 I https://leetcode-cn.com/problems/zai-pai-xu-shu-zu-zhong-cha-zhao-shu-zi-lcof/\n\n/// 注意:本题与主站 34 题相同(仅返回值不同):https://leetcode-cn.com/problems/find-first-and-last-position-of-element-in-sorted-array/\n\npub fn search(nums: Vec<i32>, target: i32) -> i32 {\n\n use std::cmp::Ordering;\n\n let mut range = vec![-1, -1];\n\n let mut left = 0;\n\n let mut right = nums.len();\n\n while left < right {\n\n let mut middle = (left + right) / 2;\n\n match nums[middle].cmp(&target) {\n\n Ordering::Greater => {\n\n right = middle;\n\n }\n\n Ordering::Less => {\n\n left = middle + 1;\n\n }\n\n Ordering::Equal => {\n\n // 找到target的第一个位置后则向左右两边拓展查找\n\n range[0] = middle as i32;\n\n range[1] = middle as i32;\n\n let mut l = middle;\n\n let mut r = middle;\n", "file_path": "src/lcof.rs", "rank": 41, "score": 266660.98248786404 }, { "content": "/// 力扣(338. 比特位计数) https://leetcode-cn.com/problems/counting-bits/\n\n/// 与 力扣(191. 位1的个数) 类似\n\npub fn count_bits(n: i32) -> Vec<i32> {\n\n let n = n as usize;\n\n let mut result = vec![0; n + 1];\n\n\n\n for (num, item) in result.iter_mut().enumerate().take(n + 1) {\n\n *item = num.count_ones() as i32;\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/solution/math/bitwise/and.rs", "rank": 44, "score": 263798.44355893787 }, { "content": "/// 力扣(338. 比特位计数)\n\npub fn count_bits_v2(n: i32) -> Vec<i32> {\n\n let n = n as usize;\n\n let mut result = vec![0; n + 1];\n\n for (num, item) in result.iter_mut().enumerate().take(n + 1).skip(1) {\n\n *item = count_ones(num as u32);\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/solution/math/bitwise/and.rs", "rank": 46, "score": 261014.33823432907 }, { "content": "/// 72. 编辑距离 https://leetcode-cn.com/problems/edit-distance/\n\npub fn min_distance(word1: String, word2: String) -> i32 {\n\n let (word1, word2) = if word2.len() < word1.len() {\n\n (word2, word1)\n\n } else {\n\n (word1, word2)\n\n };\n\n\n\n let mut cache = (0..=word1.len() as _).rev().collect::<Box<_>>();\n\n\n\n for (prev_base, c2) in word2.as_bytes().iter().rev().enumerate() {\n\n let mut prev = prev_base as _;\n\n\n\n cache[word1.len()] = prev + 1;\n\n\n\n for (i, c1) in word1.as_bytes().iter().enumerate().rev() {\n\n let distance = if c1 == c2 {\n\n prev\n\n } else {\n\n cache[i].min(cache[i + 1]).min(prev) + 1\n\n };\n", "file_path": "src/solution/string/edit_distance.rs", "rank": 47, "score": 258858.42175377946 }, { "content": "/// 剑指 Offer 57. 和为s的两个数字 https://leetcode-cn.com/problems/he-wei-sde-liang-ge-shu-zi-lcof/\n\npub fn two_sum(nums: Vec<i32>, target: i32) -> Vec<i32> {\n\n let mut result = vec![];\n\n let (mut i, mut j) = (0, nums.len() - 1);\n\n while i < j {\n\n match (nums[i] + nums[j]).cmp(&target) {\n\n Ordering::Equal => {\n\n result.push(nums[i]);\n\n result.push(nums[j]);\n\n break;\n\n }\n\n Ordering::Greater => {\n\n j -= 1;\n\n }\n\n Ordering::Less => {\n\n i += 1;\n\n }\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/lcof.rs", "rank": 48, "score": 258820.28056722094 }, { "content": "/// 力扣(27. 移除元素)https://leetcode-cn.com/problems/remove-element/\n\npub fn remove_element(nums: &mut Vec<i32>, val: i32) -> i32 {\n\n let mut k = 0;\n\n for i in 0..nums.len() {\n\n if nums[i] != val {\n\n nums[k] = nums[i];\n\n k += 1;\n\n }\n\n }\n\n k as i32\n\n}\n\n\n", "file_path": "src/simple.rs", "rank": 49, "score": 258707.09496119036 }, { "content": "/// 力扣(209. 长度最小的子数组)\n\n/// 滑动窗口\n\npub fn min_sub_array_len_v2(target: i32, nums: Vec<i32>) -> i32 {\n\n use std::cmp::min;\n\n let mut result = i32::MAX;\n\n let mut sum = 0;\n\n let mut i = 0;\n\n let mut sub_length = 0;\n\n let len = nums.len();\n\n for j in 0..len {\n\n sum += nums[j];\n\n while sum >= target {\n\n sub_length = (j - i + 1);\n\n result = min(result, sub_length as i32);\n\n sum -= nums[i];\n\n i += 1;\n\n }\n\n }\n\n if result == i32::MAX {\n\n 0\n\n } else {\n\n result\n\n }\n\n}\n\n\n", "file_path": "src/medium.rs", "rank": 50, "score": 256547.4972673019 }, { "content": "/// 力扣(704. 二分查找) https://leetcode-cn.com/problems/binary-search/\n\npub fn search(nums: Vec<i32>, target: i32) -> i32 {\n\n // target在[left,right]中查找\n\n let len = nums.len();\n\n let mut left = 0;\n\n let mut right = len - 1;\n\n let mut pivot;\n\n while left <= right {\n\n pivot = left + (right - left) / 2;\n\n // 注意usize的范围和nums的下标范围\n\n if nums[pivot] == target {\n\n return pivot as i32;\n\n }\n\n if target < nums[pivot] {\n\n if pivot == 0 {\n\n break;\n\n }\n\n right = pivot - 1;\n\n } else {\n\n if pivot == len - 1 {\n\n break;\n\n }\n\n left = pivot + 1;\n\n }\n\n }\n\n -1\n\n}\n\n\n", "file_path": "src/solution/algorithms/searches/binary_search.rs", "rank": 51, "score": 256547.4972673019 }, { "content": "pub fn min_distance_v2(word1: String, word2: String) -> i32 {\n\n let bytes1 = word1.as_bytes();\n\n let bytes2 = word2.as_bytes();\n\n let len1 = word1.len();\n\n let len2 = word2.len();\n\n let mut dp = vec![vec![0; len2 + 1]; len1 + 1];\n\n for i in 1..=len2 {\n\n dp[0][i] = dp[0][i - 1] + 1;\n\n }\n\n\n\n for i in 1..=len1 {\n\n dp[i][0] = dp[i - 1][0] + 1;\n\n }\n\n\n\n for i in 1..=len1 {\n\n for j in 1..=len2 {\n\n if bytes1[i - 1] == bytes2[j - 1] {\n\n dp[i][j] = dp[i - 1][j - 1];\n\n } else {\n\n dp[i][j] = min(min(dp[i - 1][j - 1], dp[i][j - 1]), dp[i - 1][j]) + 1;\n", "file_path": "src/solution/string/edit_distance.rs", "rank": 52, "score": 256493.99179895967 }, { "content": "/// 力扣(338. 比特位计数) https://leetcode-cn.com/problems/counting-bits/\n\n/// 动态规划\n\npub fn count_bits_v3(n: i32) -> Vec<i32> {\n\n let n = n as usize;\n\n let mut result = vec![0; n + 1];\n\n let mut high_bit = 0;\n\n for num in 1..=n {\n\n if num & (num - 1) == 0 {\n\n high_bit = num;\n\n }\n\n result[num] = result[num - high_bit] + 1;\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/solution/algorithms/dp/dynamic_programming.rs", "rank": 53, "score": 255760.687912784 }, { "content": "/// 力扣(338. 比特位计数)\n\n/// 动态规划——最低有效位\n\npub fn count_bits_v4(n: i32) -> Vec<i32> {\n\n let n = n as usize;\n\n let mut result = vec![0i32; n + 1];\n\n for num in 1..=n {\n\n result[num] = result[num >> 1] + ((num as i32) & 1);\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/solution/algorithms/dp/dynamic_programming.rs", "rank": 54, "score": 255756.43684454 }, { "content": "/// 224. 基本计算器 https://leetcode-cn.com/problems/basic-calculator/\n\npub fn calculate(s: String) -> i32 {\n\n let mut stack = Vec::new();\n\n let mut lhs = 0;\n\n let mut rhs = 0;\n\n let mut sign = 1;\n\n\n\n for c in s.bytes() {\n\n match c {\n\n b'+' => {\n\n lhs += rhs * sign;\n\n rhs = 0;\n\n sign = 1;\n\n }\n\n b'-' => {\n\n lhs += rhs * sign;\n\n rhs = 0;\n\n sign = -1;\n\n }\n\n b'(' => {\n\n stack.push((lhs, sign));\n", "file_path": "src/solution/math/operations.rs", "rank": 55, "score": 255481.015629861 }, { "content": "/// 力扣(3. 无重复的字符串的最长子串)https://leetcode-cn.com/problems/longest-substring-without-repeating-characters/submissions/\n\n/// 方法一:滑动窗口\n\npub fn length_of_longest_substring(s: String) -> i32 {\n\n if s.is_empty() {\n\n return 0;\n\n }\n\n\n\n let s: &[u8] = s.as_bytes();\n\n\n\n // 查找以第i个字符为起始的最长不重复的字符串,返回值:(不重复字符串长度,下一次查询的起始位置)\n\n fn get_len(i: usize, s: &[u8]) -> (i32, usize) {\n\n let mut len = 0;\n\n //字符 0-z(包含了数字、符号、空格) 对应的u8 范围为[48,122],这里分配长度为128的数组绰绰有余\n\n // 例如:bits[48] 存储字符0出现的位置\n\n let mut bits = [0usize; 128]; // 用数组记录每个字符是否出现过\n\n let mut to = s.len() - 1;\n\n for (j, &item) in s.iter().enumerate().skip(i) {\n\n let index = item as usize;\n\n if bits[index] == 0 {\n\n bits[index] = j + 1;\n\n len += 1;\n\n } else {\n", "file_path": "src/simple.rs", "rank": 56, "score": 255481.015629861 }, { "content": "/// 力扣(704. 二分查找)\n\npub fn search_v2(nums: Vec<i32>, target: i32) -> i32 {\n\n use std::cmp::Ordering;\n\n // target在[left,right]中查找\n\n let mut left = 0;\n\n let mut right = (nums.len() - 1) as i32;\n\n while left <= right {\n\n let middle = (left + right) as usize / 2;\n\n match nums[middle].cmp(&target) {\n\n Ordering::Greater => {\n\n right = middle as i32 - 1;\n\n }\n\n Ordering::Less => {\n\n left = middle as i32 + 1;\n\n }\n\n Ordering::Equal => {\n\n return middle as i32;\n\n }\n\n }\n\n }\n\n -1\n\n}\n\n\n", "file_path": "src/solution/algorithms/searches/binary_search.rs", "rank": 57, "score": 254235.48836249608 }, { "content": "/// 力扣(35. 搜索插入位置) https://leetcode-cn.com/problems/search-insert-position/\n\n/// 提示:nums 为无重复元素的升序排列数组\n\npub fn search_insert(nums: Vec<i32>, target: i32) -> i32 {\n\n let len = nums.len();\n\n let mut idx = 0;\n\n while idx < len {\n\n if target <= nums[idx] {\n\n return idx as i32;\n\n }\n\n\n\n if target > nums[idx] {\n\n if idx != len - 1 {\n\n if target < nums[idx + 1] {\n\n return (idx + 1) as i32;\n\n } else {\n\n idx += 1;\n\n continue;\n\n }\n\n } else {\n\n return len as i32;\n\n }\n\n }\n\n idx += 1;\n\n }\n\n\n\n idx as i32\n\n}\n\n\n", "file_path": "src/solution/algorithms/searches/binary_search.rs", "rank": 58, "score": 254235.48836249608 }, { "content": "/// 力扣(704. 二分查找)\n\npub fn search_v3(nums: Vec<i32>, target: i32) -> i32 {\n\n // target在[left,right)中查找,由于rust下标usize的限制,推荐使用这种方式\n\n let mut left = 0;\n\n let mut right = nums.len();\n\n while left < right {\n\n let middle = left + (right - left) / 2;\n\n match nums[middle].cmp(&target) {\n\n Ordering::Greater => {\n\n right = middle;\n\n }\n\n Ordering::Less => {\n\n left = middle + 1;\n\n }\n\n Ordering::Equal => {\n\n return middle as i32;\n\n }\n\n }\n\n }\n\n -1\n\n}\n", "file_path": "src/solution/algorithms/searches/binary_search.rs", "rank": 59, "score": 254235.48836249608 }, { "content": "/// 166. 分数到小数 https://leetcode-cn.com/problems/fraction-to-recurring-decimal/\n\npub fn fraction_to_decimal(numerator: i32, denominator: i32) -> String {\n\n let numerator = i64::from(numerator);\n\n let denominator = i64::from(denominator);\n\n let integer_part = numerator / denominator;\n\n let mut remainder = numerator % denominator;\n\n\n\n let mut result = if integer_part == 0\n\n && if denominator < 0 {\n\n numerator > 0\n\n } else {\n\n numerator < 0\n\n } {\n\n String::from(\"-0\")\n\n } else {\n\n integer_part.to_string()\n\n };\n\n\n\n if remainder != 0 {\n\n let denominator = denominator.abs();\n\n let mut remainder_to_index = HashMap::new();\n", "file_path": "src/solution/math/operations.rs", "rank": 60, "score": 254076.6334737887 }, { "content": "/// 力扣(13. 罗马数字转整数) https://leetcode-cn.com/problems/roman-to-integer/\n\n/// 以下解法不正确\n\npub fn roman_to_int(s: String) -> i32 {\n\n let len = s.len();\n\n let mut sum = 0;\n\n\n\n let mut map = HashMap::<&str, i32>::new();\n\n map.insert(\"I\", 1);\n\n map.insert(\"V\", 5);\n\n map.insert(\"X\", 10);\n\n map.insert(\"L\", 50);\n\n map.insert(\"C\", 100);\n\n map.insert(\"D\", 500);\n\n map.insert(\"M\", 1000);\n\n map.insert(\"IV\", 4);\n\n map.insert(\"IX\", 9);\n\n map.insert(\"XL\", 40);\n\n map.insert(\"XC\", 90);\n\n map.insert(\"CD\", 400);\n\n map.insert(\"CM\", 900);\n\n\n\n let mut i = 1;\n", "file_path": "src/solution/math/numbers.rs", "rank": 61, "score": 252329.8666304549 }, { "content": "/// 91. 解码方法 https://leetcode-cn.com/problems/decode-ways/\n\n/// 动态规划\n\npub fn num_decodings(s: String) -> i32 {\n\n let s = s.into_bytes();\n\n\n\n if let Some(last) = s.last() {\n\n let mut cache_2 = 1;\n\n let mut cache_1 = if *last == b'0' { 0 } else { 1 };\n\n\n\n for window in s.windows(2).rev() {\n\n cache_2 = match window {\n\n //如果包含前导0,则无法转换\n\n [b'0', _] => mem::replace(&mut cache_1, 0),\n\n [b'1', _] | [b'2', b'0'..=b'6'] => {\n\n let new_cache_1 = cache_1 + cache_2;\n\n\n\n mem::replace(&mut cache_1, new_cache_1)\n\n }\n\n _ => cache_1,\n\n };\n\n }\n\n\n\n cache_1\n\n } else {\n\n 1\n\n }\n\n}\n\n\n\nuse std::collections::HashSet;\n", "file_path": "src/solution/math/numbers.rs", "rank": 62, "score": 252329.8666304549 }, { "content": "/// 力扣(405. 数字转换为十六进制数) https://leetcode-cn.com/problems/convert-a-number-to-hexadecimal/\n\npub fn to_hex(num: i32) -> String {\n\n match num.cmp(&0) {\n\n Ordering::Greater | Ordering::Less => {\n\n let mut ret = String::new();\n\n let mut num = num;\n\n let mut i = 7;\n\n while i >= 0 {\n\n let val = (num >> (4 * i)) & 0xf;\n\n if !ret.is_empty() || val > 0 {\n\n ret.push_str(HEX_CHARS[val as usize]);\n\n }\n\n i -= 1;\n\n }\n\n ret\n\n }\n\n\n\n Ordering::Equal => \"0\".to_owned(),\n\n }\n\n}\n\n\n", "file_path": "src/solution/math/numbers.rs", "rank": 63, "score": 252329.8666304549 }, { "content": "/// 力扣(35. 搜索插入位置)\n\n/// 二分查找\n\npub fn search_insert_v2(nums: Vec<i32>, target: i32) -> i32 {\n\n use std::cmp::Ordering;\n\n let mut left = 0;\n\n let mut right = (nums.len() - 1) as i32;\n\n while left <= right {\n\n let middle = (left + (right - left) / 2) as usize;\n\n match nums[middle].cmp(&target) {\n\n Ordering::Greater => {\n\n right = (middle as i32) - 1;\n\n }\n\n Ordering::Less => {\n\n left = (middle + 1) as i32;\n\n }\n\n Ordering::Equal => {\n\n return middle as i32;\n\n }\n\n }\n\n }\n\n (right + 1) as i32\n\n}\n\n\n", "file_path": "src/solution/algorithms/searches/binary_search.rs", "rank": 64, "score": 252000.65048152965 }, { "content": "/// 剑指 Offer 58 - I. 翻转单词顺序 https://leetcode-cn.com/problems/fan-zhuan-dan-ci-shun-xu-lcof/\n\npub fn reverse_words(s: String) -> String {\n\n let mut words: Vec<&str> = s.split(' ').collect();\n\n let mut result = String::new();\n\n words.reverse();\n\n for word in words {\n\n // 注意:按照\" \"分割,结果中空字符串为\"\"而不是\" \"\n\n if !word.is_empty() {\n\n result = format!(\"{} {}\", result, word);\n\n }\n\n }\n\n result.trim().to_string()\n\n}\n\n\n", "file_path": "src/lcof.rs", "rank": 65, "score": 251067.53545603904 }, { "content": "/// 力扣(283. 移动零) https://leetcode-cn.com/problems/move-zeroes/\n\npub fn move_zeroes(nums: &mut Vec<i32>) {\n\n let mut slow_index = 0;\n\n let len = nums.len();\n\n\n\n for fast_index in 0..len {\n\n if nums[fast_index] != 0 {\n\n nums[slow_index] = nums[fast_index];\n\n slow_index += 1;\n\n }\n\n }\n\n\n\n for num in nums.iter_mut().take(len).skip(slow_index) {\n\n *num = 0;\n\n }\n\n}\n\n\n", "file_path": "src/simple.rs", "rank": 66, "score": 251017.03884742004 }, { "content": "/// 力扣(LCP 07. 传递信息) https://leetcode-cn.com/problems/chuan-di-xin-xi/submissions/\n\n/// 方法一:深度优先搜索\n\npub fn num_ways(n: i32, relation: Vec<Vec<i32>>, k: i32) -> i32 {\n\n let mut map = HashMap::<i32, HashSet<i32>>::new();\n\n for re in relation {\n\n let key = re[0];\n\n\n\n match map.get_mut(&key) {\n\n Some(set) => {\n\n set.insert(re[1]);\n\n }\n\n None => {\n\n let mut set = HashSet::<i32>::new();\n\n set.insert(re[1]);\n\n map.insert(re[0], set);\n\n }\n\n }\n\n }\n\n println!(\"map:{:?}\", map);\n\n let mut account = 0;\n\n search_recurse(&map, 0, n - 1, 1, k, &mut account);\n\n account\n\n}\n\n\n", "file_path": "src/contest/spring2020.rs", "rank": 67, "score": 250830.12051924362 }, { "content": "/// 33. 搜索旋转排序数组 https://leetcode-cn.com/problems/search-in-rotated-sorted-array/\n\n/// 方法一:二分查找\n\npub fn search_in_rotated_sorted_array(nums: Vec<i32>, target: i32) -> i32 {\n\n let len = nums.len();\n\n if len == 0 {\n\n return -1;\n\n }\n\n if len == 1 {\n\n if nums[0] == target {\n\n return 0;\n\n } else {\n\n return -1;\n\n }\n\n }\n\n\n\n let (mut left, mut right) = (0, len - 1);\n\n while left <= right {\n\n let mut middle = (left + right) / 2;\n\n if nums[middle] == target {\n\n return middle as i32;\n\n }\n\n if nums[0] <= nums[middle] {\n", "file_path": "src/solution/algorithms/searches/binary_search.rs", "rank": 68, "score": 249839.00460856984 }, { "content": "/// 力扣(16. 最接近的三数之和) https://leetcode-cn.com/problems/3sum-closest/\n\n/// 方法1:排序 + 双指针\n\npub fn three_sum_closest(nums: Vec<i32>, target: i32) -> i32 {\n\n let len = nums.len();\n\n let mut new_nums = nums;\n\n new_nums.sort_unstable();\n\n // -10^4 <= target <= 10^4\n\n let mut best = 10000;\n\n // 枚举 a\n\n for (first, &a) in new_nums.iter().enumerate() {\n\n // 需要和上一次枚举的数不相同\n\n if first > 0 && a == new_nums[first - 1] {\n\n continue;\n\n }\n\n let mut second = first + 1;\n\n let mut third = len - 1;\n\n while second < third {\n\n let sum = a + new_nums[second] + new_nums[third];\n\n if sum == target {\n\n return target;\n\n }\n\n\n", "file_path": "src/solution/algorithms/two_pointers/two_pointers.rs", "rank": 69, "score": 249839.0046085698 }, { "content": "/// 557. 反转字符串中的单词 III https://leetcode-cn.com/problems/reverse-words-in-a-string-iii/\n\npub fn reverse_words(s: String) -> String {\n\n let mut chars: Vec<char> = s.chars().collect();\n\n let len = chars.len();\n\n let (mut i, mut j, mut k) = (0, 0, 0);\n\n while k < len {\n\n if chars[k] == ' ' {\n\n j = k - 1;\n\n while i < j {\n\n chars.swap(i, j);\n\n i += 1;\n\n j -= 1;\n\n }\n\n i = k + 1;\n\n j = i;\n\n }\n\n k += 1;\n\n }\n\n if j != len - 1 {\n\n j = len - 1;\n\n while i < j {\n\n chars.swap(i, j);\n\n i += 1;\n\n j -= 1;\n\n }\n\n }\n\n\n\n chars.iter().collect()\n\n}\n\n\n", "file_path": "src/solution/string/reverse.rs", "rank": 70, "score": 249409.4477194329 }, { "content": "/// 力扣(34. 在排序数组中查找元素的第一个和最后一个位置) https://leetcode-cn.com/problems/find-first-and-last-position-of-element-in-sorted-array/\n\n/// 先用二分查找算法找到target的下标,然后向左右两边继续查找\n\npub fn search_range(nums: Vec<i32>, target: i32) -> Vec<i32> {\n\n use std::cmp::Ordering;\n\n let mut range = vec![-1, -1];\n\n let mut left = 0;\n\n let mut right = nums.len();\n\n while left < right {\n\n let mut middle = (left + right) / 2;\n\n match nums[middle].cmp(&target) {\n\n Ordering::Greater => {\n\n right = middle;\n\n }\n\n Ordering::Less => {\n\n left = middle + 1;\n\n }\n\n Ordering::Equal => {\n\n // 找到target的第一个位置后则向左右两边拓展查找\n\n range[0] = middle as i32;\n\n range[1] = middle as i32;\n\n let mut l = middle;\n\n let mut r = middle;\n", "file_path": "src/solution/algorithms/searches/binary_search.rs", "rank": 71, "score": 249406.46547969646 }, { "content": "/// 力扣(1. 两数之和) https://leetcode-cn.com/problems/two-sum\n\npub fn two_sum(nums: Vec<i32>, target: i32) -> Vec<i32> {\n\n let mut nums_map = HashMap::<i32, i32>::new();\n\n for (idx, num) in nums.into_iter().enumerate() {\n\n let complement = target - num;\n\n\n\n let j = idx as i32;\n\n if let Some(idx) = nums_map.get(&complement) {\n\n return vec![*idx, j];\n\n }\n\n nums_map.insert(num, j);\n\n }\n\n vec![]\n\n}\n\n\n", "file_path": "src/solution/data_structures/array/operations.rs", "rank": 72, "score": 249400.44561529998 }, { "content": "/// 504. 七进制数 https://leetcode-cn.com/problems/base-7/\n\npub fn convert_to_base7(num: i32) -> String {\n\n if num == 0 {\n\n return \"0\".to_owned();\n\n }\n\n\n\n let negative = num < 0;\n\n let mut num = num.abs();\n\n let mut digits = String::new();\n\n while num > 0 {\n\n digits.push_str(&format!(\"{}\", num % 7));\n\n num /= 7;\n\n }\n\n\n\n if negative {\n\n digits.push('-');\n\n }\n\n digits.chars().rev().collect()\n\n}\n\n\n", "file_path": "src/solution/math/numbers.rs", "rank": 73, "score": 249309.58584724218 }, { "content": "/// 力扣(13. 罗马数字转整数)\n\npub fn roman_to_int_v3(s: String) -> i32 {\n\n let mut sum = 0;\n\n let chars_vec: Vec<char> = s.chars().collect();\n\n\n\n let chars = s.char_indices();\n\n let len = s.len();\n\n\n\n let mut split_idx = 0;\n\n for (idx, ch) in chars {\n\n if idx != 0 && idx == split_idx {\n\n continue;\n\n }\n\n let num = match ch {\n\n 'I' => {\n\n if idx + 1 < len {\n\n let next_char = chars_vec[idx + 1];\n\n if next_char == 'V' {\n\n split_idx = idx + 1;\n\n 4\n\n } else if next_char == 'X' {\n", "file_path": "src/solution/math/numbers.rs", "rank": 74, "score": 249309.58584724218 }, { "content": "/// 力扣(13. 罗马数字转整数)\n\npub fn roman_to_int_v2(s: String) -> i32 {\n\n use std::collections::HashMap;\n\n // [(a,b)] convert to HashMap<a,b>\n\n let map: HashMap<char, i32> = ROMAN_NUMBERS.iter().cloned().collect();\n\n let mut ret = 0;\n\n let mut it = s.chars().peekable();\n\n while let Some(c) = it.next() {\n\n let v = map.get(&c).unwrap();\n\n match it.peek() {\n\n Some(n) if v < map.get(n).unwrap() => ret -= v,\n\n _ => ret += v,\n\n }\n\n }\n\n ret\n\n}\n\n\n", "file_path": "src/solution/math/numbers.rs", "rank": 75, "score": 249309.58584724218 }, { "content": "/// 剑指 Offer 45. 把数组排成最小的数 https://leetcode-cn.com/problems/ba-shu-zu-pai-cheng-zui-xiao-de-shu-lcof/\n\npub fn min_number(nums: Vec<i32>) -> String {\n\n let mut nums_str: Vec<String> = nums.iter().map(|&num| format!(\"{}\", num)).collect();\n\n nums_str.sort_by(|x, y| {\n\n let xy = format!(\"{}{}\", x, y).parse::<u64>().unwrap();\n\n let yx = format!(\"{}{}\", y, x).parse::<u64>().unwrap();\n\n\n\n xy.cmp(&yx)\n\n });\n\n\n\n nums_str.iter().fold(\"\".to_string(), |acc, num| acc + num)\n\n}\n\n\n\n/**\n\n * Your MedianFinder object will be instantiated and called as such:\n\n * let obj = MedianFinder::new();\n\n * obj.add_num(num);\n\n * let ret_2: f64 = obj.find_median();\n\n */\n\nuse std::collections::HashMap;\n", "file_path": "src/lcof.rs", "rank": 76, "score": 248941.40099212757 }, { "content": "/// 力扣(26. 删除有序数组中的重复项) https://leetcode-cn.com/problems/remove-duplicates-from-sorted-array/\n\npub fn remove_duplicates(nums: &mut Vec<i32>) -> i32 {\n\n let len = nums.len();\n\n if len <= 1 {\n\n return len as i32;\n\n }\n\n let mut slow_index = 0;\n\n let mut fast_index = 1;\n\n while fast_index < len {\n\n if nums[slow_index] != nums[fast_index] {\n\n nums[slow_index + 1] = nums[fast_index];\n\n slow_index += 1;\n\n }\n\n fast_index += 1;\n\n }\n\n\n\n (slow_index + 1) as i32\n\n}\n\n\n", "file_path": "src/solution/algorithms/two_pointers/two_pointers.rs", "rank": 77, "score": 248587.46106717104 }, { "content": "/// 力扣(283. 移动零) https://leetcode-cn.com/problems/move-zeroes/\n\npub fn move_zeroes_v2(nums: &mut Vec<i32>) {\n\n let mut slow_index = 0;\n\n let mut fast_index = 0;\n\n let len = nums.len();\n\n while fast_index < len {\n\n if nums[fast_index] != 0 {\n\n nums.swap(slow_index, fast_index);\n\n slow_index += 1;\n\n }\n\n fast_index += 1;\n\n }\n\n}\n\n\n", "file_path": "src/simple.rs", "rank": 78, "score": 247975.72786360147 }, { "content": "/// 剑指 Offer 58 - I. 翻转单词顺序\n\npub fn reverse_words_v2(s: String) -> String {\n\n s.split_whitespace().rev().collect::<Vec<_>>().join(\" \")\n\n}\n\n\n", "file_path": "src/lcof.rs", "rank": 79, "score": 247905.21600079912 }, { "content": "/// 面试题 01.07. 旋转矩阵 https://leetcode-cn.com/problems/rotate-matrix-lcci/\n\npub fn rotate(matrix: &mut Vec<Vec<i32>>) {\n\n let n = matrix.len();\n\n\n\n for first in 0..n / 2 {\n\n let last = n - 1 - first;\n\n\n\n for i in first..last {\n\n let j = n - 1 - i;\n\n\n\n let temp = matrix[first][i];\n\n\n\n matrix[first][i] = matrix[j][first];\n\n matrix[j][first] = matrix[last][j];\n\n matrix[last][j] = matrix[i][last];\n\n matrix[i][last] = temp;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/interview.rs", "rank": 80, "score": 247804.1611484901 }, { "content": "/// 力扣(167. 两数之和 II - 输入有序数组)https://leetcode-cn.com/problems/two-sum-ii-input-array-is-sorted/\n\npub fn two_sum2(numbers: Vec<i32>, target: i32) -> Vec<i32> {\n\n let mut result = Vec::<i32>::with_capacity(2);\n\n\n\n let mut index1 = 0;\n\n let mut index2 = numbers.len() - 1;\n\n while index2 >= 1 {\n\n let sum = numbers[index1] + numbers[index2];\n\n match sum.cmp(&target) {\n\n Ordering::Less => {\n\n index1 += 1;\n\n continue;\n\n }\n\n Ordering::Greater => {\n\n index2 -= 1;\n\n continue;\n\n }\n\n Ordering::Equal => {\n\n result.push((index1 + 1) as i32);\n\n result.push((index2 + 1) as i32);\n\n break;\n\n }\n\n }\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/solution/algorithms/two_pointers/two_pointers.rs", "rank": 81, "score": 247238.79974234014 }, { "content": "/// 821. 字符的最短距离 https://leetcode-cn.com/problems/shortest-distance-to-a-character/\n\npub fn shortest_to_char(s: String, c: char) -> Vec<i32> {\n\n let c = c as u8;\n\n let mut result = vec![0; s.len()];\n\n let mut prev_position = i32::MIN;\n\n\n\n for (i, (distance, x)) in (0..).zip(result.iter_mut().zip(s.bytes())) {\n\n if x == c {\n\n prev_position = i;\n\n } else {\n\n *distance = i.saturating_sub(prev_position);\n\n }\n\n }\n\n\n\n prev_position = i32::MIN;\n\n\n\n for (i, (distance, x)) in (0..).zip(result.iter_mut().zip(s.bytes()).rev()) {\n\n if x == c {\n\n prev_position = i;\n\n } else {\n\n *distance = (*distance).min(i.saturating_sub(prev_position));\n", "file_path": "src/solution/string/converts.rs", "rank": 82, "score": 247043.79882134142 }, { "content": "/// 力扣(171. Excel 表列序号) https://leetcode-cn.com/problems/excel-sheet-column-number/submissions/\n\npub fn title_to_number(column_title: String) -> i32 {\n\n let mut sum = 0;\n\n let mut hex_base = 1;\n\n for ch in column_title.chars().rev() {\n\n sum += (hex_base * (ch as u8 - b'A' + 1) as i32);\n\n hex_base *= 26;\n\n }\n\n\n\n sum\n\n}\n\n\n", "file_path": "src/solution/math/operations.rs", "rank": 83, "score": 246412.18662663558 }, { "content": "/// 力扣(168. Excel表列名称) https://leetcode-cn.com/problems/excel-sheet-column-title/\n\npub fn convert_to_title(column_number: i32) -> String {\n\n let mut ret = String::new();\n\n let mut column_number = column_number;\n\n while column_number > 0 {\n\n let a0 = (column_number - 1) % 26 + 1;\n\n let ch = b'A' + (a0 - 1) as u8;\n\n ret.push(ch as char);\n\n column_number = (column_number - a0) / 26;\n\n }\n\n\n\n ret.chars().rev().collect()\n\n}\n\n\n", "file_path": "src/solution/math/operations.rs", "rank": 84, "score": 246412.18662663558 }, { "content": "/// 力扣(26. 删除有序数组中的重复项)\n\npub fn remove_duplicates_v2(nums: &mut Vec<i32>) -> i32 {\n\n let len = nums.len();\n\n if len == 0 {\n\n return 0;\n\n }\n\n //慢指针表示下一个不同元素要填入的下标位置\n\n let mut slow_index = 1;\n\n //快指针表示遍历数组到达的下标位置\n\n let mut fast_index = 1;\n\n while fast_index < len {\n\n if nums[fast_index] != nums[fast_index - 1] {\n\n nums[slow_index] = nums[fast_index];\n\n slow_index += 1;\n\n }\n\n\n\n fast_index += 1;\n\n }\n\n\n\n slow_index as i32\n\n}\n\n\n", "file_path": "src/solution/algorithms/two_pointers/two_pointers.rs", "rank": 85, "score": 246267.26899019303 }, { "content": "/// 80. 删除有序数组中的重复项 II https://leetcode-cn.com/problems/remove-duplicates-from-sorted-array-ii/\n\npub fn remove_duplicates_ii(nums: &mut Vec<i32>) -> i32 {\n\n let len = nums.len();\n\n if len <= 2 {\n\n return len as i32;\n\n }\n\n let (mut slow, mut fast) = (2, 2);\n\n while fast < len {\n\n if nums[slow - 2] != nums[fast] {\n\n nums[slow] = nums[fast];\n\n slow += 1;\n\n }\n\n fast += 1;\n\n }\n\n slow as i32\n\n}\n\n\n", "file_path": "src/solution/algorithms/two_pointers/two_pointers.rs", "rank": 86, "score": 246267.26899019303 }, { "content": "/// 力扣(26. 删除有序数组中的重复项)\n\npub fn remove_duplicates_v3(nums: &mut Vec<i32>) -> i32 {\n\n nums.dedup();\n\n nums.len() as i32\n\n}\n\n\n", "file_path": "src/solution/algorithms/two_pointers/two_pointers.rs", "rank": 87, "score": 246267.26899019303 }, { "content": "/// 力扣(401. 二进制手表) https://leetcode-cn.com/problems/binary-watch/\n\n/// 方法1:空间换时间避免重复计算\n\npub fn read_binary_watch(turned_on: i32) -> Vec<String> {\n\n if !(0..=8).contains(&turned_on) {\n\n return vec![];\n\n }\n\n //小时最多亮3盏灯[0,3]\n\n let mut hour_turn_on: Vec<Vec<String>> = vec![vec![]; 4];\n\n for hour in (0i32..=11i32) {\n\n hour_turn_on[hour.count_ones() as usize].push(format!(\"{}\", hour));\n\n }\n\n //dbg!(hour_turn_on);\n\n //分钟最多亮5盏灯[0,5]\n\n let mut minute_turn_on: Vec<Vec<String>> = vec![vec![]; 6];\n\n for minute in (0i32..=59i32) {\n\n minute_turn_on[minute.count_ones() as usize].push(format!(\"{:02}\", minute));\n\n }\n\n //dbg!(minute_turn_on);\n\n let mut result = Vec::new();\n\n\n\n // turned_on = hour + minute;\n\n for hour in (0i32..=3i32) {\n", "file_path": "src/simple.rs", "rank": 88, "score": 245921.1202089149 }, { "content": "/// 剑指 Offer 04. 二维数组中的查找 https://leetcode-cn.com/problems/er-wei-shu-zu-zhong-de-cha-zhao-lcof/\n\npub fn find_number_in2_d_array(matrix: Vec<Vec<i32>>, target: i32) -> bool {\n\n if matrix.is_empty() || matrix[0].is_empty() {\n\n return false;\n\n }\n\n let (m, n) = (matrix.len(), matrix[0].len());\n\n let mut row = 0;\n\n let mut col = n - 1;\n\n let mut max_in_row = matrix[row][col];\n\n\n\n while row < m {\n\n match max_in_row.cmp(&target) {\n\n Ordering::Equal => return true,\n\n Ordering::Greater => {\n\n if col > 0 {\n\n col -= 1;\n\n } else {\n\n break;\n\n }\n\n max_in_row = matrix[row][col];\n\n }\n", "file_path": "src/lcof.rs", "rank": 89, "score": 245519.4557642365 }, { "content": "/// 32. 最长有效括号 https://leetcode-cn.com/problems/longest-valid-parentheses/\n\npub fn longest_valid_parentheses(s: String) -> i32 {\n\n let mut seq: Vec<char> = s.chars().collect();\n\n let forward_max = longest(&seq, '(');\n\n seq.reverse();\n\n let backward_max = longest(&seq, ')');\n\n i32::max(forward_max, backward_max)\n\n}\n\n\n", "file_path": "src/solution/data_structures/stacks/stack.rs", "rank": 90, "score": 243630.3192398654 }, { "content": "/// 6053. 统计网格图中没有被保卫的格子数 https://leetcode-cn.com/problems/count-unguarded-cells-in-the-grid/submissions/\n\npub fn count_unguarded(m: i32, n: i32, guards: Vec<Vec<i32>>, walls: Vec<Vec<i32>>) -> i32 {\n\n let (m, n) = (m as usize, n as usize);\n\n let mut grid = vec![vec![b'0'; n]; m];\n\n for wall in walls {\n\n grid[wall[0] as usize][wall[1] as usize] = b'W';\n\n }\n\n for guard in &guards {\n\n grid[guard[0] as usize][guard[1] as usize] = b'G';\n\n }\n\n\n\n let mut guard_cnt = 0;\n\n for guard in &guards {\n\n //行\n\n let row = guard[0] as usize;\n\n let col = guard[1] as usize;\n\n let (mut row_t, mut col_t) = (row, col);\n\n\n\n while row_t > 0 {\n\n row_t -= 1;\n\n if grid[row_t][col] == b'0' {\n", "file_path": "src/contest/spring2022.rs", "rank": 91, "score": 242694.18854795402 }, { "content": "/// 力扣(18. 四数之和) https://leetcode-cn.com/problems/4sum/\n\n/// 方法1:排序 + 双指针\n\npub fn four_sum(nums: Vec<i32>, target: i32) -> Vec<Vec<i32>> {\n\n use std::cmp::Ordering;\n\n\n\n let mut result = Vec::<Vec<i32>>::new();\n\n let len = nums.len();\n\n\n\n if len < 4 {\n\n return result;\n\n }\n\n let mut new_nums = nums;\n\n new_nums.sort_unstable();\n\n\n\n // 枚举 a\n\n for (first, &a) in new_nums.iter().take(len - 3).enumerate() {\n\n // 需要和上一次枚举的数不相同\n\n if first > 0 && a == new_nums[first - 1] {\n\n continue;\n\n }\n\n let min_fours = a + new_nums[first + 1] + new_nums[first + 2] + new_nums[first + 3];\n\n if min_fours > target {\n", "file_path": "src/solution/algorithms/two_pointers/two_pointers.rs", "rank": 92, "score": 242677.84268963424 }, { "content": "/// 力扣(290. 单词规律) https://leetcode-cn.com/problems/word-pattern/\n\n/// 与 力扣(205. 同构字符串)类似\n\npub fn word_pattern(pattern: String, s: String) -> bool {\n\n let pattern_chars = pattern.chars().collect::<Vec<_>>();\n\n let words = s.split(' ').collect::<Vec<_>>();\n\n let len = words.len();\n\n if pattern_chars.len() != len {\n\n return false;\n\n }\n\n let mut pattern_map = HashMap::<char, String>::new();\n\n let mut word_map = HashMap::<String, char>::new();\n\n for i in 0..len {\n\n match (pattern_map.get(&pattern_chars[i]), word_map.get(words[i])) {\n\n (Some(word), Some(ch)) => {\n\n if word != words[i] || *ch != pattern_chars[i] {\n\n return false;\n\n }\n\n }\n\n (None, None) => {\n\n pattern_map.insert(pattern_chars[i], String::from(words[i]));\n\n word_map.insert(String::from(words[i]), pattern_chars[i]);\n\n }\n\n _ => {\n\n return false;\n\n }\n\n }\n\n }\n\n\n\n true\n\n}\n\n\n", "file_path": "src/simple.rs", "rank": 93, "score": 240565.37060684865 }, { "content": "/// 682. 棒球比赛 https://leetcode-cn.com/problems/baseball-game/\n\npub fn cal_points(ops: Vec<String>) -> i32 {\n\n let mut stack: Vec<i32> = Vec::with_capacity(ops.len());\n\n\n\n for op in ops {\n\n match op.parse().map_err(|_| op.as_str()) {\n\n Ok(value) => stack.push(value),\n\n Err(\"C\") => {\n\n stack.pop();\n\n }\n\n Err(\"D\") => stack.push(stack.last().unwrap() * 2),\n\n Err(_) => {\n\n let len = stack.len();\n\n stack.push(stack[len - 2] + stack[len - 1]);\n\n }\n\n }\n\n }\n\n\n\n stack.iter().sum()\n\n}\n\n\n", "file_path": "src/solution/data_structures/stacks/stack.rs", "rank": 94, "score": 237568.74299340008 }, { "content": "/// 179. 最大数 https://leetcode-cn.com/problems/largest-number/\n\n/// 拓展:剑指 Offer 45. 把数组排成最小的数 https://leetcode-cn.com/problems/ba-shu-zu-pai-cheng-zui-xiao-de-shu-lcof/\n\npub fn largest_number(nums: Vec<i32>) -> String {\n\n let mut nums_str: Vec<String> = nums.iter().map(|&num| format!(\"{}\", num)).collect();\n\n nums_str.sort_by(|x, y| {\n\n let xy = format!(\"{}{}\", x, y).parse::<u64>().unwrap();\n\n let yx = format!(\"{}{}\", y, x).parse::<u64>().unwrap();\n\n\n\n yx.cmp(&xy)\n\n });\n\n\n\n if nums_str[0] == \"0\" {\n\n \"0\".to_string()\n\n } else {\n\n nums_str.iter().fold(\"\".to_string(), |acc, num| acc + num)\n\n }\n\n}\n\n\n", "file_path": "src/solution/data_structures/array/operations.rs", "rank": 95, "score": 237568.74299340008 }, { "content": "/// 力扣(150. 逆波兰表达式求值) https://leetcode-cn.com/problems/evaluate-reverse-polish-notation/\n\n/// 方法二:数组模拟栈\n\npub fn eval_rpn(tokens: Vec<String>) -> i32 {\n\n let len = (tokens.len() + 1) / 2;\n\n let mut stack = vec![0; len];\n\n let mut index = -1;\n\n for token in &tokens {\n\n match (token.as_str()) {\n\n \"+\" => {\n\n index -= 1;\n\n stack[index as usize] += stack[(index + 1) as usize];\n\n }\n\n \"-\" => {\n\n index -= 1;\n\n stack[index as usize] -= stack[(index + 1) as usize];\n\n }\n\n \"*\" => {\n\n index -= 1;\n\n stack[index as usize] *= stack[(index + 1) as usize];\n\n }\n\n \"/\" => {\n\n index -= 1;\n", "file_path": "src/solution/data_structures/stacks/stack.rs", "rank": 96, "score": 237568.74299340008 }, { "content": "/// 31. 下一个排列 https://leetcode-cn.com/problems/next-permutation/\n\npub fn next_permutation(nums: &mut Vec<i32>) {\n\n let n = nums.len();\n\n let mut i = n - 1;\n\n while i > 0 && nums[i - 1] >= nums[i] {\n\n i -= 1;\n\n }\n\n if i > 0 {\n\n let mut j = n - 1;\n\n while nums[i - 1] >= nums[j] {\n\n j -= 1;\n\n }\n\n // 较小数nums[i-i]与较大数nums[j]交换位置\n\n nums.swap(i - 1, j);\n\n }\n\n nums[i..].reverse();\n\n}\n\n\n", "file_path": "src/solution/algorithms/two_pointers/two_pointers.rs", "rank": 97, "score": 236976.68464410282 }, { "content": "/// 163. 缺失的区间 https://leetcode-cn.com/problems/missing-ranges/\n\npub fn find_missing_ranges(nums: Vec<i32>, lower: i32, upper: i32) -> Vec<String> {\n\n fn find(a: i32, b: i32) -> Option<String> {\n\n match b - a {\n\n 0 | 1 => None,\n\n 2 => Some((a + 1).to_string()),\n\n _ => Some(format!(\"{}->{}\", a + 1, b - 1)),\n\n }\n\n }\n\n\n\n let mut ans = vec![];\n\n //预处理,简化边界判断\n\n for window in [vec![lower - 1], nums, vec![upper + 1]].concat().windows(2) {\n\n if let Some(s) = find(window[0], window[1]) {\n\n ans.push(s);\n\n }\n\n }\n\n ans\n\n}\n\n\n", "file_path": "src/solution/data_structures/array/operations.rs", "rank": 98, "score": 236446.49967084714 }, { "content": "/// 2022招银网络笔试(第二题)\n\n/// 题目大意:求闭区间[l,r]中,数字x(范围[0,9])在各个数字中出现的次数的总和\n\npub fn count_num(l: i32, r: i32, x: i32) -> i32 {\n\n let mut count = 0;\n\n\n\n // 将数字num转为字符串,再统计数字x出现的次数\n\n fn count_x(num: i32, x: i32) -> i32 {\n\n let mut count = 0;\n\n let x_byte = (x as u8) + b'0';\n\n let num_str = format!(\"{}\", num);\n\n num_str\n\n .into_bytes()\n\n .iter()\n\n .filter(|&byte| *byte == x_byte)\n\n .count() as i32\n\n }\n\n\n\n // 采用除10求余的方式得到数字num各个位上的数字,然后进行统计\n\n fn count_x_v2(num: i32, x: i32) -> i32 {\n\n if num == 0 && x == 0 {\n\n return 1;\n\n }\n", "file_path": "src/contest/spring2022.rs", "rank": 99, "score": 235618.60297399212 } ]
Rust
src/enclave_proc/socket.rs
bercarug/aws-nitro-enclaves-cli-1
ab4e03bc37fc7fcf6f98c42d416612976299989a
#![deny(missing_docs)] #![deny(warnings)] use inotify::{EventMask, Inotify, WatchMask}; use log::{debug, warn}; use std::path::{Path, PathBuf}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::thread::{self, JoinHandle}; use crate::common::get_socket_path; use crate::common::{ExitGracefully, NitroCliErrorEnum, NitroCliFailure, NitroCliResult}; use crate::new_nitro_cli_failure; #[derive(Default)] pub struct EnclaveProcSock { socket_path: PathBuf, remove_listener_thread: Option<JoinHandle<()>>, requested_remove: Arc<AtomicBool>, } impl Clone for EnclaveProcSock { fn clone(&self) -> Self { EnclaveProcSock { socket_path: self.socket_path.clone(), remove_listener_thread: None, requested_remove: self.requested_remove.clone(), } } } impl Drop for EnclaveProcSock { fn drop(&mut self) { self.close_mut() .ok_or_exit_with_errno(Some("Failed to drop socket")); } } impl EnclaveProcSock { pub fn new(enclave_id: &str) -> NitroCliResult<Self> { let socket_path = get_socket_path(enclave_id).map_err(|_| { new_nitro_cli_failure!( "Failed to create enclave process socket", NitroCliErrorEnum::SocketPathNotFound ) })?; Ok(EnclaveProcSock { socket_path, remove_listener_thread: None, requested_remove: Arc::new(AtomicBool::new(false)), }) } pub fn get_path(&self) -> &Path { &self.socket_path.as_path() } pub fn set_path(&mut self, socket_path: PathBuf) { self.socket_path = socket_path; } pub fn start_monitoring(&mut self, exit_on_delete: bool) -> NitroCliResult<()> { let path_clone = self.socket_path.clone(); let requested_remove_clone = self.requested_remove.clone(); let mut socket_inotify = Inotify::init().map_err(|e| { new_nitro_cli_failure!( &format!("Failed to initialize socket notifications: {:?}", e), NitroCliErrorEnum::InotifyError ) })?; socket_inotify .add_watch( self.socket_path.as_path(), WatchMask::ATTRIB | WatchMask::DELETE_SELF, ) .map_err(|e| { new_nitro_cli_failure!( &format!("Failed to add watch to inotify: {:?}", e), NitroCliErrorEnum::InotifyError ) })?; self.remove_listener_thread = Some(thread::spawn(move || { socket_removal_listener( path_clone, requested_remove_clone, socket_inotify, exit_on_delete, ) })); Ok(()) } fn close_mut(&mut self) -> NitroCliResult<()> { self.requested_remove.store(true, Ordering::SeqCst); if self.socket_path.exists() { std::fs::remove_file(&self.socket_path).map_err(|e| { new_nitro_cli_failure!( &format!( "Failed to remove socket file {:?} from disk: {:?}", self.socket_path, e ), NitroCliErrorEnum::FileOperationFailure ) })?; } if self.remove_listener_thread.is_some() { self.remove_listener_thread .take() .unwrap() .join() .map_err(|e| { new_nitro_cli_failure!( &format!("Failed to join socket notification thread: {:?}", e), NitroCliErrorEnum::ThreadJoinFailure ) })?; } Ok(()) } pub fn close(mut self) -> NitroCliResult<()> { self.close_mut() .map_err(|e| e.add_subaction("Close socket".to_string())) } } fn socket_removal_listener( socket_path: PathBuf, requested_remove: Arc<AtomicBool>, mut socket_inotify: Inotify, exit_on_delete: bool, ) { let mut buffer = [0u8; 4096]; let mut done = false; debug!("Socket file event listener started for {:?}.", socket_path); while !done { let events = socket_inotify .read_events_blocking(&mut buffer) .map_err(|e| { new_nitro_cli_failure!( &format!("Socket removal listener error: {:?}", e), NitroCliErrorEnum::InotifyError ) .set_action("Run Enclave".to_string()) }) .ok_or_exit_with_errno(Some("Failed to read inotify events")); for event in events { if (event.mask.contains(EventMask::ATTRIB) || event.mask.contains(EventMask::DELETE_SELF)) && !socket_path.exists() { if requested_remove.load(Ordering::SeqCst) { debug!("The enclave process socket has deleted itself."); done = true; } else { warn!("The enclave process socket has been deleted!"); if exit_on_delete { std::process::exit(1); } done = true; } } } } debug!("Enclave process socket monitoring is done."); } #[cfg(test)] mod tests { use super::*; use std::os::unix::net::UnixListener; use std::process::Command; const DUMMY_ENCLAVE_ID: &str = "i-0000000000000000-enc0123456789012345"; const THREADS_STR: &str = "Threads:"; const WAIT_REMOVE_MILLIS: u64 = 10; fn get_num_threads_from_status_output(status_str: String) -> u32 { let start_idx = status_str.find(THREADS_STR); let mut iter = status_str.chars(); iter.by_ref().nth(start_idx.unwrap() + THREADS_STR.len()); let slice = iter.as_str(); let new_str = slice.to_string(); let end_idx = new_str.find('\n'); let substr = &slice[..end_idx.unwrap()]; substr.parse().unwrap() } #[test] fn test_enclaveprocsock_init() { let socket = EnclaveProcSock::new(&DUMMY_ENCLAVE_ID.to_string()); assert!(socket.is_ok()); if let Ok(socket) = socket { assert!(socket .socket_path .as_path() .to_str() .unwrap() .contains("0123456789012345")); assert!(socket.remove_listener_thread.is_none()); assert!(!socket.requested_remove.load(Ordering::SeqCst)); } } #[test] fn test_start_monitoring() { let socket = EnclaveProcSock::new(&DUMMY_ENCLAVE_ID.to_string()); assert!(socket.is_ok()); if let Ok(mut socket) = socket { UnixListener::bind(socket.get_path()) .map_err(|e| { new_nitro_cli_failure!( &format!("Failed to bind to socket: {:?}", e), NitroCliErrorEnum::SocketError ) }) .ok_or_exit_with_errno(Some("Error binding")); let result = socket.start_monitoring(false); assert!(result.is_ok()); std::fs::remove_file(&socket.socket_path).unwrap(); std::thread::sleep(std::time::Duration::from_millis(WAIT_REMOVE_MILLIS)); assert!(!socket.requested_remove.load(Ordering::SeqCst)); } } #[test] fn test_close() { let socket = EnclaveProcSock::new(&DUMMY_ENCLAVE_ID.to_string()); assert!(socket.is_ok()); let out_cmd0 = Command::new("cat") .arg(format!("/proc/{}/status", std::process::id())) .output() .expect("Failed to run cat"); let out0 = std::str::from_utf8(&out_cmd0.stdout).unwrap(); let crt_num_threads0 = get_num_threads_from_status_output(out0.to_string()); if let Ok(mut socket) = socket { let _ = UnixListener::bind(socket.get_path()) .map_err(|e| { new_nitro_cli_failure!( &format!("Failed to bind to socket: {:?}", e), NitroCliErrorEnum::SocketError ) }) .ok_or_exit_with_errno(Some("Error binding")); let result = socket.start_monitoring(true); assert!(result.is_ok()); let result = socket.close_mut(); assert!(result.is_ok()); assert!(socket.requested_remove.load(Ordering::SeqCst)); } let out_cmd1 = Command::new("cat") .arg(format!("/proc/{}/status", std::process::id())) .output() .expect("Failed to run cat"); let out1 = std::str::from_utf8(&out_cmd1.stdout).unwrap(); let crt_num_threads1 = get_num_threads_from_status_output(out1.to_string()); assert_eq!(crt_num_threads0, crt_num_threads1); } }
#![deny(missing_docs)] #![deny(warnings)] use inotify::{EventMask, Inotify, WatchMask}; use log::{debug, warn}; use std::path::{Path, PathBuf}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::thread::{self, JoinHandle}; use crate::common::get_socket_path; use crate::common::{ExitGracefully, NitroCliErrorEnum, NitroCliFailure, NitroCliResult}; use crate::new_nitro_cli_failure; #[derive(Default)] pub struct EnclaveProcSock { socket_path: PathBuf, remove_listener_thread: Option<JoinHandle<()>>, requested_remove: Arc<AtomicBool>, } impl Clone for EnclaveProcSock { fn clone(&self) -> Self { EnclaveProcSock { socket_path: self.socket_path.clone(), remove_listener_thread: None, requested_remove: self.requested_remove.clone(), } } } impl Drop for EnclaveProcSock { fn drop(&mut self) { self.close_mut() .ok_or_exit_with_errno(Some("Failed to drop socket")); } } impl EnclaveProcSock { pub fn new(enclave_id: &str) -> NitroCliResult<Self> { let socket_path = get_socket_path(enclave_id).map_err(|_| { new_nitro_cli_failure!( "Failed to create enclave process socket", NitroCliErrorEnum::SocketPathNotFound ) })?; Ok(EnclaveProcSock { socket_path, remove_listener_thread: None, requested_remove: Arc::new(AtomicBool::new(false)), }) } pub fn get_path(&self) -> &Path { &self.socket_path.as_path() } pub fn set_path(&mut self, socket_path: PathBuf) { self.socket_path = socket_path; } pub fn start_monitoring(&mut self, exit_on_delete: bool) -> NitroCliResult<()> { let path_clone = self.socket_path.clone(); let requested_remove_clone = self.requested_remove.clone(); let mut socket_inotify = Inotify::init().map_err(|e| { new_nitro_cli_failure!( &format!("Failed to initialize socket notifications: {:?}", e), NitroCliErrorEnum::InotifyError ) })?; socket_inotify .add_watch( self.socket_path.as_path(), WatchMask::ATTRIB | WatchMask::DELETE_SELF, ) .map_err(|e| { new_nitro_cli_failure!( &format!("Failed to add watch to inotify: {:?}", e), NitroCliErrorEnum::InotifyError ) })?; self.remove_listener_thread = Some(thread::spawn(move || { socket_removal_listener( path_clone, requested_remove_clone, socket_inotify, exit_on_delete, ) })); Ok(()) } fn close_mut(&mut self) -> NitroCliResult<()> { self.requested_remove.store(true, Ordering::SeqCst); if self.socket_path.exists() { std::fs::remove_file(&self.socket_path).map_err(|e| { new_nitro_cli_failure!( &format!( "Failed to remove socket file {:?} from disk: {:?}", self.socket_path, e ), NitroCliErrorEnum::FileOperationFailure ) })?; } if self.remove_listener_thread.is_some() { self.remove_listener_thread .take() .unwrap() .join() .map_err(|e| { new_nitro_cli_failure!( &format!("Failed to join socket notification thread: {:?}", e), NitroCliErrorEnum::ThreadJoinFailure ) })?; } Ok(()) } pub fn close(mut self) -> NitroCliResult<()> { self.close_mut() .map_err(|e| e.add_subaction("Close socket".to_string())) } }
#[cfg(test)] mod tests { use super::*; use std::os::unix::net::UnixListener; use std::process::Command; const DUMMY_ENCLAVE_ID: &str = "i-0000000000000000-enc0123456789012345"; const THREADS_STR: &str = "Threads:"; const WAIT_REMOVE_MILLIS: u64 = 10; fn get_num_threads_from_status_output(status_str: String) -> u32 { let start_idx = status_str.find(THREADS_STR); let mut iter = status_str.chars(); iter.by_ref().nth(start_idx.unwrap() + THREADS_STR.len()); let slice = iter.as_str(); let new_str = slice.to_string(); let end_idx = new_str.find('\n'); let substr = &slice[..end_idx.unwrap()]; substr.parse().unwrap() } #[test] fn test_enclaveprocsock_init() { let socket = EnclaveProcSock::new(&DUMMY_ENCLAVE_ID.to_string()); assert!(socket.is_ok()); if let Ok(socket) = socket { assert!(socket .socket_path .as_path() .to_str() .unwrap() .contains("0123456789012345")); assert!(socket.remove_listener_thread.is_none()); assert!(!socket.requested_remove.load(Ordering::SeqCst)); } } #[test] fn test_start_monitoring() { let socket = EnclaveProcSock::new(&DUMMY_ENCLAVE_ID.to_string()); assert!(socket.is_ok()); if let Ok(mut socket) = socket { UnixListener::bind(socket.get_path()) .map_err(|e| { new_nitro_cli_failure!( &format!("Failed to bind to socket: {:?}", e), NitroCliErrorEnum::SocketError ) }) .ok_or_exit_with_errno(Some("Error binding")); let result = socket.start_monitoring(false); assert!(result.is_ok()); std::fs::remove_file(&socket.socket_path).unwrap(); std::thread::sleep(std::time::Duration::from_millis(WAIT_REMOVE_MILLIS)); assert!(!socket.requested_remove.load(Ordering::SeqCst)); } } #[test] fn test_close() { let socket = EnclaveProcSock::new(&DUMMY_ENCLAVE_ID.to_string()); assert!(socket.is_ok()); let out_cmd0 = Command::new("cat") .arg(format!("/proc/{}/status", std::process::id())) .output() .expect("Failed to run cat"); let out0 = std::str::from_utf8(&out_cmd0.stdout).unwrap(); let crt_num_threads0 = get_num_threads_from_status_output(out0.to_string()); if let Ok(mut socket) = socket { let _ = UnixListener::bind(socket.get_path()) .map_err(|e| { new_nitro_cli_failure!( &format!("Failed to bind to socket: {:?}", e), NitroCliErrorEnum::SocketError ) }) .ok_or_exit_with_errno(Some("Error binding")); let result = socket.start_monitoring(true); assert!(result.is_ok()); let result = socket.close_mut(); assert!(result.is_ok()); assert!(socket.requested_remove.load(Ordering::SeqCst)); } let out_cmd1 = Command::new("cat") .arg(format!("/proc/{}/status", std::process::id())) .output() .expect("Failed to run cat"); let out1 = std::str::from_utf8(&out_cmd1.stdout).unwrap(); let crt_num_threads1 = get_num_threads_from_status_output(out1.to_string()); assert_eq!(crt_num_threads0, crt_num_threads1); } }
fn socket_removal_listener( socket_path: PathBuf, requested_remove: Arc<AtomicBool>, mut socket_inotify: Inotify, exit_on_delete: bool, ) { let mut buffer = [0u8; 4096]; let mut done = false; debug!("Socket file event listener started for {:?}.", socket_path); while !done { let events = socket_inotify .read_events_blocking(&mut buffer) .map_err(|e| { new_nitro_cli_failure!( &format!("Socket removal listener error: {:?}", e), NitroCliErrorEnum::InotifyError ) .set_action("Run Enclave".to_string()) }) .ok_or_exit_with_errno(Some("Failed to read inotify events")); for event in events { if (event.mask.contains(EventMask::ATTRIB) || event.mask.contains(EventMask::DELETE_SELF)) && !socket_path.exists() { if requested_remove.load(Ordering::SeqCst) { debug!("The enclave process socket has deleted itself."); done = true; } else { warn!("The enclave process socket has been deleted!"); if exit_on_delete { std::process::exit(1); } done = true; } } } } debug!("Enclave process socket monitoring is done."); }
function_block-full_function
[ { "content": "/// Get the path to the Unix socket owned by an enclave process which also owns the enclave with the given ID.\n\npub fn get_socket_path(enclave_id: &str) -> NitroCliResult<PathBuf> {\n\n // The full enclave ID is \"i-(...)-enc<enc_id>\" and we want to extract only <enc_id>.\n\n let tokens: Vec<_> = enclave_id.rsplit(\"-enc\").collect();\n\n let sockets_path = get_sockets_dir_path();\n\n Ok(sockets_path.join(tokens[0]).with_extension(\"sock\"))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[allow(unused_imports)]\n\n use super::*;\n\n\n\n use crate::common::commands_parser::EmptyArgs;\n\n\n\n const TMP_DIR_STR: &str = \"./tmp_sock_dir\";\n\n\n\n fn unset_envvar(varname: &String) {\n\n let _ = unsafe {\n\n libc::unsetenv(varname.as_ptr() as *const i8);\n\n };\n", "file_path": "src/common/mod.rs", "rank": 0, "score": 283690.6552085695 }, { "content": "/// Get the path to the directory containing the Unix sockets owned by all enclave processes.\n\npub fn get_sockets_dir_path() -> PathBuf {\n\n let log_path = match env::var(SOCKETS_DIR_PATH_ENV_VAR) {\n\n Ok(env_path) => env_path,\n\n Err(_) => SOCKETS_DIR_PATH.to_string(),\n\n };\n\n Path::new(&log_path).to_path_buf()\n\n}\n\n\n", "file_path": "src/common/mod.rs", "rank": 1, "score": 223286.01128656862 }, { "content": "/// Open a file at a given location for writing and appending.\n\nfn open_log_file(file_path: &Path) -> NitroCliResult<File> {\n\n let file = OpenOptions::new()\n\n .create(true)\n\n .append(true)\n\n .read(false)\n\n .open(file_path)\n\n .map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to open log file: {:?}\", e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n })?;\n\n\n\n let log_file_uid = Uid::from_raw(\n\n file.metadata()\n\n .map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to get log file metadata: {:?}\", e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n", "file_path": "src/common/logger.rs", "rank": 2, "score": 206801.0595080473 }, { "content": "/// Obtain an enclave's CID given its full ID.\n\npub fn enclave_proc_get_cid(enclave_id: &str) -> NitroCliResult<u64> {\n\n let mut comm = enclave_proc_connect_to_single(enclave_id)\n\n .map_err(|e| e.add_subaction(\"Failed to connect to enclave process\".to_string()))?;\n\n // TODO: Replicate output of old CLI on invalid enclave IDs.\n\n enclave_proc_command_send_single::<EmptyArgs>(\n\n EnclaveProcessCommandType::GetEnclaveCID,\n\n None,\n\n &mut comm,\n\n )\n\n .map_err(|e| e.add_subaction(\"Failed to send CID request to enclave process\".to_string()))?;\n\n\n\n info!(\"Sent command: GetEnclaveCID\");\n\n let enclave_cid = read_u64_le(&mut comm)\n\n .map_err(|e| e.add_subaction(String::from(\"Failed to read CID from enclave process\")))?;\n\n\n\n // We got the CID, so shut the connection down.\n\n comm.shutdown(std::net::Shutdown::Both).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\n\n \"Failed to shut down connection after obtaining CID: {:?}\",\n\n e\n\n ),\n\n NitroCliErrorEnum::SocketError\n\n )\n\n })?;\n\n\n\n Ok(enclave_cid)\n\n}\n", "file_path": "src/enclave_proc_comm.rs", "rank": 3, "score": 205411.52140448883 }, { "content": "/// Open a connection to an enclave-specific socket.\n\npub fn enclave_proc_connect_to_single(enclave_id: &str) -> NitroCliResult<UnixStream> {\n\n let socket_path = get_socket_path(enclave_id).map_err(|e| {\n\n e.add_subaction(\"Connect to specific enclave process\".to_string())\n\n .set_error_code(NitroCliErrorEnum::SocketError)\n\n })?;\n\n UnixStream::connect(socket_path).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to connect to specific enclave process: {:?}\", e),\n\n NitroCliErrorEnum::SocketError\n\n )\n\n })\n\n}\n\n\n", "file_path": "src/enclave_proc_comm.rs", "rank": 4, "score": 202773.24729989652 }, { "content": "/// Read a LE-encoded 64-bit unsigned value from a socket.\n\npub fn read_u64_le(socket: &mut dyn Read) -> NitroCliResult<u64> {\n\n let mut bytes = [0u8; std::mem::size_of::<u64>()];\n\n socket.read_exact(&mut bytes).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\n\n \"Failed to read {} bytes from the given socket: {:?}\",\n\n std::mem::size_of::<u64>(),\n\n e\n\n ),\n\n NitroCliErrorEnum::SocketError\n\n )\n\n })?;\n\n\n\n Ok(u64::from_le_bytes(bytes))\n\n}\n\n\n", "file_path": "src/common/mod.rs", "rank": 5, "score": 201561.32342453173 }, { "content": "/// Write a LE-encoded 64-bit unsigned value to a socket.\n\npub fn write_u64_le(socket: &mut dyn Write, value: u64) -> NitroCliResult<()> {\n\n let bytes = value.to_le_bytes();\n\n socket.write_all(&bytes).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\n\n \"Failed to write {} bytes to the given socket: {:?}\",\n\n std::mem::size_of::<u64>(),\n\n e\n\n ),\n\n NitroCliErrorEnum::SocketError\n\n )\n\n })\n\n}\n\n\n", "file_path": "src/common/mod.rs", "rank": 6, "score": 193305.86323572346 }, { "content": "/// Process reply messages from all connected enclave processes.\n\npub fn enclave_process_handle_all_replies<T>(\n\n replies: &mut [UnixStream],\n\n prev_failed_conns: usize,\n\n print_as_vec: bool,\n\n allowed_return_codes: Vec<i32>,\n\n) -> NitroCliResult<()>\n\nwhere\n\n T: Clone + DeserializeOwned + Serialize,\n\n{\n\n let objects = enclave_proc_handle_outputs::<T>(replies);\n\n let failed_conns = prev_failed_conns + replies.len() - objects.len();\n\n\n\n // Print a message if we have any connections that have failed.\n\n if failed_conns > 0 {\n\n eprintln!(\"Failed connections: {}\", failed_conns);\n\n }\n\n\n\n // Output the received objects either individually or as an array.\n\n if print_as_vec {\n\n let obj_vec: Vec<T> = objects.iter().map(|v| v.0.clone()).collect();\n", "file_path": "src/enclave_proc_comm.rs", "rank": 7, "score": 192805.76646955786 }, { "content": "/// Get the directory containing Nitro CLI related log files.\n\npub fn get_log_file_base_path() -> String {\n\n match env::var(LOGS_DIR_PATH_ENV_VAR) {\n\n Ok(env_path) => env_path,\n\n Err(_) => LOGS_DIR_PATH.to_string(),\n\n }\n\n}\n\n\n", "file_path": "src/common/logger.rs", "rank": 9, "score": 186441.26218700237 }, { "content": "/// Terminates all enclave instances belonging to the current user (or all\n\n/// instances, if the current user has `root` permissions).\n\npub fn terminate_all_enclaves() -> NitroCliResult<()> {\n\n let sockets_dir = get_sockets_dir_path();\n\n let mut replies: Vec<UnixStream> = vec![];\n\n let sockets = std::fs::read_dir(sockets_dir.as_path()).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Error while accessing sockets directory: {:?}\", e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n })?;\n\n\n\n let mut err_socket_files: usize = 0;\n\n let mut failed_connections: Vec<PathBuf> = Vec::new();\n\n for socket in sockets {\n\n let entry = match socket {\n\n Ok(value) => value,\n\n Err(_) => {\n\n err_socket_files += 1;\n\n continue;\n\n }\n\n };\n", "file_path": "src/lib.rs", "rank": 10, "score": 179243.31619821617 }, { "content": "/// Print a message to a connection's standard error, if the connection is available.\n\npub fn safe_conn_eprintln(conn: Option<&Connection>, msg: &str) -> NitroCliResult<()> {\n\n if conn.is_none() {\n\n return Ok(());\n\n }\n\n\n\n conn.unwrap().eprintln(msg)\n\n}\n", "file_path": "src/enclave_proc/connection.rs", "rank": 11, "score": 178817.0685738715 }, { "content": "/// Print a message to a connection's standard output, if the connection is available.\n\npub fn safe_conn_println(conn: Option<&Connection>, msg: &str) -> NitroCliResult<()> {\n\n if conn.is_none() {\n\n return Ok(());\n\n }\n\n\n\n conn.unwrap().println(msg)\n\n}\n\n\n", "file_path": "src/enclave_proc/connection.rs", "rank": 12, "score": 178817.0685738715 }, { "content": "/// Create the enclave process.\n\nfn create_enclave_process(logger: &EnclaveProcLogWriter) -> NitroCliResult<()> {\n\n // To get a detached process, we first:\n\n // (1) Temporarily ignore specific signals (SIGHUP).\n\n // (2) Daemonize the current process.\n\n // (3) Wait until the detached process is orphaned.\n\n // (4) Restore signal handlers.\n\n let signal_handler = SignalHandler::new(&[SIGHUP])\n\n .mask_all()\n\n .map_err(|e| e.add_subaction(\"Failed to mask signals\".to_string()))?;\n\n let ppid = getpid();\n\n\n\n // Daemonize the current process. The working directory remains\n\n // unchanged and the standard descriptors are routed to '/dev/null'.\n\n daemon(true, false).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to daemonize enclave process: {:?}\", e),\n\n NitroCliErrorEnum::DaemonizeProcessFailure\n\n )\n\n })?;\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 13, "score": 177657.75238468434 }, { "content": "/// Notify both the user and the logger of an error.\n\npub fn notify_error(err_msg: &str) {\n\n eprintln!(\"{}\", err_msg);\n\n error!(\"{}\", err_msg);\n\n}\n\n\n", "file_path": "src/common/mod.rs", "rank": 14, "score": 173088.17134838912 }, { "content": "/// Get the path to the log file.\n\nfn get_log_file_path() -> PathBuf {\n\n Path::new(&get_log_file_base_path()).join(LOG_FILE_NAME)\n\n}\n\n\n", "file_path": "src/common/logger.rs", "rank": 15, "score": 173019.37978662175 }, { "content": "/// Obtain an enclave's description and provide it through the given connection.\n\npub fn describe_enclaves(\n\n enclave_manager: &EnclaveManager,\n\n connection: &Connection,\n\n) -> NitroCliResult<()> {\n\n debug!(\"describe_enclaves\");\n\n\n\n let info = get_enclave_describe_info(enclave_manager)\n\n .map_err(|e| e.add_subaction(String::from(\"Execute Describe Enclave command\")))?;\n\n connection.println(\n\n serde_json::to_string_pretty(&info)\n\n .map_err(|err| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to display enclave describe data: {:?}\", err),\n\n NitroCliErrorEnum::SerdeError\n\n )\n\n })?\n\n .as_str(),\n\n )\n\n}\n", "file_path": "src/enclave_proc/commands.rs", "rank": 16, "score": 172990.38250282087 }, { "content": "/// Terminate an enclave and provide the termination status through the given connection.\n\npub fn terminate_enclaves(\n\n enclave_manager: &mut EnclaveManager,\n\n connection: Option<&Connection>,\n\n) -> NitroCliResult<()> {\n\n let enclave_id = enclave_manager.enclave_id.clone();\n\n\n\n debug!(\"terminate_enclaves\");\n\n enclave_manager\n\n .update_state(EnclaveState::Terminating)\n\n .map_err(|e| e.add_subaction(\"Failed to update enclave state\".to_string()))?;\n\n if let Err(error_info) = enclave_manager.terminate_enclave() {\n\n safe_conn_eprintln(\n\n connection,\n\n format!(\n\n \"Warning: Failed to stop enclave {}\\nError message: {:?}\",\n\n enclave_manager.enclave_id,\n\n construct_error_message(&error_info).as_str()\n\n )\n\n .as_str(),\n\n )?;\n", "file_path": "src/enclave_proc/commands.rs", "rank": 17, "score": 172990.3361959448 }, { "content": "/// Launch an enclave with the specified arguments and provide the launch status through the given connection.\n\npub fn run_enclaves(\n\n args: &RunEnclavesArgs,\n\n connection: Option<&Connection>,\n\n) -> NitroCliResult<EnclaveManager> {\n\n debug!(\"run_enclaves\");\n\n\n\n let eif_file = File::open(&args.eif_path).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to open the EIF file: {:?}\", e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n })?;\n\n\n\n let cpu_ids = CpuInfo::new()\n\n .map_err(|e| e.add_subaction(\"Failed to construct CPU information\".to_string()))?\n\n .get_cpu_config(args)\n\n .map_err(|e| e.add_subaction(\"Failed to get CPU configuration\".to_string()))?;\n\n let mut enclave_manager = EnclaveManager::new(\n\n args.enclave_cid,\n\n args.memory_mib,\n", "file_path": "src/enclave_proc/commands.rs", "rank": 18, "score": 172990.24655930986 }, { "content": "/// Connects to the enclave console and prints it continously.\n\npub fn enclave_console(enclave_cid: u64) -> NitroCliResult<()> {\n\n let console = Console::new(\n\n VMADDR_CID_HYPERVISOR,\n\n u32::try_from(enclave_cid).map_err(|err| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to parse enclave CID: {:?}\", err),\n\n NitroCliErrorEnum::IntegerParsingError\n\n )\n\n })? + CID_TO_CONSOLE_PORT_OFFSET,\n\n )\n\n .map_err(|e| e.add_subaction(\"Connect to enclave console\".to_string()))?;\n\n println!(\"Successfully connected to the console.\");\n\n console\n\n .read_to(io::stdout().by_ref())\n\n .map_err(|e| e.add_subaction(\"Connect to enclave console\".to_string()))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 19, "score": 167556.01540593526 }, { "content": "/// Wrapper over the console connection function.\n\npub fn console_enclaves(enclave_cid: u64) -> NitroCliResult<()> {\n\n debug!(\"console_enclaves\");\n\n println!(\"Connecting to the console for enclave {}...\", enclave_cid);\n\n enclave_console(enclave_cid)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 20, "score": 167551.73905877193 }, { "content": "/// Fetch JSON objects and statuses from all connected enclave processes.\n\npub fn enclave_proc_handle_outputs<T>(conns: &mut [UnixStream]) -> Vec<(T, i32)>\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n let mut objects: Vec<(T, i32)> = Vec::new();\n\n\n\n for conn in conns.iter_mut() {\n\n // We only count connections that have yielded a valid JSON object and a status\n\n let (object, status) = enclave_proc_handle_output::<T>(conn);\n\n if let Some(object) = object {\n\n if let Some(status) = status {\n\n objects.push((object, status));\n\n }\n\n }\n\n }\n\n\n\n objects\n\n}\n\n\n", "file_path": "src/enclave_proc_comm.rs", "rank": 21, "score": 167090.75571097352 }, { "content": "/// Obtain the enclave information requested by the `run-enclaves` command.\n\npub fn get_run_enclaves_info(\n\n enclave_cid: u64,\n\n slot_id: u64,\n\n cpu_ids: Vec<u32>,\n\n memory: u64,\n\n) -> NitroCliResult<EnclaveRunInfo> {\n\n let info = EnclaveRunInfo::new(\n\n generate_enclave_id(slot_id)?,\n\n enclave_cid,\n\n cpu_ids.len(),\n\n cpu_ids,\n\n memory,\n\n );\n\n Ok(info)\n\n}\n\n\n", "file_path": "src/enclave_proc/utils.rs", "rank": 22, "score": 166902.31377354177 }, { "content": "/// Obtain the enclave information requested by the `describe-enclaves` command.\n\npub fn get_enclave_describe_info(\n\n enclave_manager: &EnclaveManager,\n\n) -> NitroCliResult<EnclaveDescribeInfo> {\n\n let (slot_uid, enclave_cid, cpus_count, cpu_ids, memory_mib, flags, state) =\n\n enclave_manager.get_description_resources()?;\n\n let info = EnclaveDescribeInfo::new(\n\n generate_enclave_id(slot_uid)?,\n\n enclave_cid,\n\n cpus_count,\n\n cpu_ids,\n\n memory_mib,\n\n state.to_string(),\n\n flags_to_string(flags),\n\n );\n\n Ok(info)\n\n}\n\n\n", "file_path": "src/enclave_proc/utils.rs", "rank": 23, "score": 166902.31377354177 }, { "content": "/// Build an enclave image file with the provided arguments.\n\npub fn build_enclaves(args: BuildEnclavesArgs) -> NitroCliResult<()> {\n\n debug!(\"build_enclaves\");\n\n eprintln!(\"Start building the Enclave Image...\");\n\n build_from_docker(\n\n &args.docker_uri,\n\n &args.docker_dir,\n\n &args.output,\n\n &args.signing_certificate,\n\n &args.private_key,\n\n )\n\n .map_err(|e| e.add_subaction(\"Failed to build EIF from docker\".to_string()))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 24, "score": 165025.6795456463 }, { "content": "/// Launch the enclave process.\n\n///\n\n/// * `comm_fd` - A descriptor used for initial communication with the parent Nitro CLI instance.\n\n/// * `logger` - The current log writer, whose ID gets updated when an enclave is launched.\n\npub fn enclave_process_run(comm_stream: UnixStream, logger: &EnclaveProcLogWriter) {\n\n create_enclave_process(logger)\n\n .map_err(|e| e.set_action(\"Run Enclave\".to_string()))\n\n .ok_or_exit_with_errno(None);\n\n let res = process_event_loop(comm_stream, logger);\n\n if let Err(mut error_info) = res {\n\n error_info = error_info.set_action(\"Run Enclave\".to_string());\n\n notify_error(construct_error_message(&error_info).as_str());\n\n process::exit(error_info.error_code as i32);\n\n }\n\n process::exit(0);\n\n}\n", "file_path": "src/enclave_proc/mod.rs", "rank": 25, "score": 164562.8157272892 }, { "content": "pub fn enclave_ready(\n\n listener: VsockListener,\n\n poll_timeout_ms: c_int,\n\n) -> Result<(), EifLoaderError> {\n\n let mut poll_fds = [PollFd::new(listener.as_raw_fd(), PollFlags::POLLIN)];\n\n let result = poll(&mut poll_fds, poll_timeout_ms);\n\n if result == Ok(0) {\n\n return Err(EifLoaderError::VsockTimeoutError);\n\n } else if result != Ok(1) {\n\n return Err(EifLoaderError::SocketPollingError);\n\n }\n\n\n\n let mut stream = listener\n\n .accept()\n\n .map_err(|_err| EifLoaderError::VsockAcceptingError)?;\n\n\n\n // Wait until the other end is closed\n\n let mut buf = [0u8];\n\n let bytes = stream\n\n .0\n", "file_path": "eif_loader/src/lib.rs", "rank": 26, "score": 163276.20271320757 }, { "content": "/// Send the given command, then close the channel that was used for sending it.\n\nfn send_command_and_close(cmd: EnclaveProcessCommandType, stream: &mut UnixStream) {\n\n let action_str = &get_command_action(cmd);\n\n\n\n enclave_proc_command_send_single::<EmptyArgs>(cmd, None, stream)\n\n .ok_or_exit_with_errno(Some(\"Failed to send command\"));\n\n stream\n\n .shutdown(std::net::Shutdown::Both)\n\n .map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to close stream after sending command: {:?}\", e),\n\n NitroCliErrorEnum::SocketCloseError\n\n )\n\n .set_action(action_str.to_string())\n\n })\n\n .ok_or_exit_with_errno(Some(\"Failed to shut down stream\"));\n\n}\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 27, "score": 162251.139226223 }, { "content": "/// Connect to all existing enclave processes, returning a connection to each.\n\npub fn enclave_proc_connect_to_all() -> NitroCliResult<Vec<UnixStream>> {\n\n let paths = fs::read_dir(get_sockets_dir_path()).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to access sockets directory: {:?}\", e),\n\n NitroCliErrorEnum::ReadFromDiskFailure\n\n )\n\n })?;\n\n Ok(paths\n\n .filter_map(|path| path.ok())\n\n .map(|path| path.path())\n\n .filter(|path| !path.is_dir())\n\n .filter_map(|path| {\n\n // Get the file path string.\n\n if let Some(path_str) = path.to_str() {\n\n // Enclave process sockets are named \"<enclave_id>.sock\".\n\n if !path_str.ends_with(\".sock\") {\n\n return None;\n\n }\n\n\n\n // At this point we have found a potential socket.\n", "file_path": "src/enclave_proc_comm.rs", "rank": 28, "score": 160310.86889019771 }, { "content": "/// Broadcast a command to all available enclave processes.\n\npub fn enclave_proc_command_send_all<T>(\n\n cmd: EnclaveProcessCommandType,\n\n args: Option<&T>,\n\n) -> NitroCliResult<(Vec<UnixStream>, usize)>\n\nwhere\n\n T: Serialize,\n\n{\n\n // Open a connection to each valid socket.\n\n let mut replies: Vec<UnixStream> = vec![];\n\n let epoll_fd = epoll::epoll_create().map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to create epoll: {:?}\", e),\n\n NitroCliErrorEnum::EpollError\n\n )\n\n })?;\n\n let comms: Vec<NitroCliResult<()>> = enclave_proc_connect_to_all()\n\n .map_err(|e| {\n\n e.add_subaction(\"Failed to send command to all enclave processes\".to_string())\n\n })?\n\n .iter_mut()\n", "file_path": "src/enclave_proc_comm.rs", "rank": 29, "score": 158195.71311394055 }, { "content": "/// Initialize logging.\n\npub fn init_logger() -> NitroCliResult<EnclaveProcLogWriter> {\n\n // The log file is \"nitro-cli.log\" and is stored in the NPE resources directory.\n\n let log_writer = EnclaveProcLogWriter::new()?;\n\n\n\n // Initialize logging with the new log writer.\n\n flexi_logger::Logger::with_env_or_str(DEFAULT_LOG_LEVEL)\n\n .log_target(LogTarget::Writer(Box::new(log_writer.clone())))\n\n .start()\n\n .map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to initialize enclave process logger: {:?}\", e),\n\n NitroCliErrorEnum::LoggerError\n\n )\n\n })?;\n\n\n\n // The log writer is provided for sharing between CLI-related processes.\n\n Ok(log_writer)\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/common/logger.rs", "rank": 30, "score": 158179.2308054662 }, { "content": "/// Notify that an error has occurred, also forwarding the error message to a connection.\n\nfn notify_error_with_conn(err_msg: &str, conn: &Connection, action: EnclaveProcessCommandType) {\n\n let action_str = &get_command_action(action);\n\n\n\n notify_error(err_msg);\n\n conn.eprintln(err_msg)\n\n .map_err(|e| e.set_action(action_str.to_string()))\n\n .ok_or_exit_with_errno(Some(\"Failed to forward error message to connection\"));\n\n}\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 31, "score": 156683.81023968148 }, { "content": "/// Transfers a chunck of maximum 4KB from src to dst\n\n/// If no error occurs, returns true if the source disconnects and false otherwise\n\nfn transfer(src: &mut dyn Read, dst: &mut dyn Write) -> bool {\n\n let mut buffer = [0u8; BUFF_SIZE];\n\n\n\n let nbytes = src.read(&mut buffer);\n\n let nbytes = match nbytes {\n\n Err(_) => 0,\n\n Ok(n) => n,\n\n };\n\n\n\n if nbytes == 0 {\n\n return true;\n\n }\n\n\n\n dst.write_all(&buffer[..nbytes]).is_err()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use rand;\n\n use std::fs;\n", "file_path": "vsock_proxy/src/starter.rs", "rank": 32, "score": 155792.24937396182 }, { "content": "/// Generate a unique ID for a new enclave with the specified slot ID.\n\npub fn generate_enclave_id(slot_id: u64) -> NitroCliResult<String> {\n\n let file_path = \"/sys/devices/virtual/dmi/id/board_asset_tag\";\n\n if metadata(file_path).is_ok() {\n\n let mut file = File::open(file_path).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to open file: {:?}\", e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n })?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to read from file: {:?}\", e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n })?;\n\n contents.retain(|c| !c.is_whitespace());\n\n return Ok(format!(\"{}-enc{:x}\", contents, slot_id));\n\n }\n\n Ok(format!(\"i-0000000000000000-enc{:x}\", slot_id))\n\n}\n\n\n", "file_path": "src/enclave_proc/utils.rs", "rank": 33, "score": 153150.30513983782 }, { "content": "/// Spawn an enclave process and wait until it has detached and has\n\n/// taken ownership of its communication socket.\n\npub fn enclave_proc_spawn(logger: &EnclaveProcLogWriter) -> NitroCliResult<UnixStream> {\n\n let (cli_socket, enclave_proc_socket) = UnixStream::pair().map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Could not create a socket pair: {:?}\", e),\n\n NitroCliErrorEnum::SocketPairCreationFailure\n\n )\n\n })?;\n\n\n\n // Prevent the descriptor from being closed when calling exec().\n\n let enclave_proc_fd = enclave_proc_socket.as_raw_fd();\n\n unsafe {\n\n let flags = libc::fcntl(enclave_proc_fd, libc::F_GETFD);\n\n libc::fcntl(enclave_proc_fd, libc::F_SETFD, flags & !libc::FD_CLOEXEC);\n\n }\n\n\n\n // Spawn an intermediate child process. This will fork again in order to\n\n // create the detached enclave process.\n\n let fork_status = fork();\n\n\n\n if let Ok(ForkResult::Child) = fork_status {\n", "file_path": "src/enclave_proc_comm.rs", "rank": 34, "score": 152870.74324077816 }, { "content": "/// Obtain the logger ID from the full enclave ID.\n\nfn get_logger_id(enclave_id: &str) -> String {\n\n // The full enclave ID is \"i-(...)-enc<enc_id>\" and we want to extract only <enc_id>.\n\n let tokens: Vec<_> = enclave_id.rsplit(\"-enc\").collect();\n\n format!(\"enc-{}:{}\", tokens[0], std::process::id())\n\n}\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 35, "score": 151519.31061412932 }, { "content": "/// Receive an object of a specified type from an input stream.\n\npub fn receive_from_stream<T>(input_stream: &mut dyn Read) -> NitroCliResult<T>\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n let size = read_u64_le(input_stream)\n\n .map_err(|e| e.add_subaction(\"Failed to receive data size\".to_string()))?\n\n as usize;\n\n let mut raw_data: Vec<u8> = vec![0; size];\n\n input_stream.read_exact(&mut raw_data[..]).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to receive data: {:?}\", e),\n\n NitroCliErrorEnum::SocketError\n\n )\n\n })?;\n\n let data: T = serde_cbor::from_slice(&raw_data[..]).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to decode received data: {:?}\", e),\n\n NitroCliErrorEnum::SerdeError\n\n )\n\n })?;\n\n Ok(data)\n\n}\n\n\n", "file_path": "src/common/mod.rs", "rank": 36, "score": 148320.76089728 }, { "content": "/// Send a command to a single socket.\n\npub fn enclave_proc_command_send_single<T>(\n\n cmd: EnclaveProcessCommandType,\n\n args: Option<&T>,\n\n mut socket: &mut UnixStream,\n\n) -> NitroCliResult<()>\n\nwhere\n\n T: Serialize,\n\n{\n\n // Serialize the command type.\n\n let cmd_bytes = serde_cbor::to_vec(&cmd).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Invalid command format: {:?}\", e),\n\n NitroCliErrorEnum::InvalidCommand\n\n )\n\n })?;\n\n\n\n // The command is written twice. The first read is done by the connection listener to check if this is\n\n // a shut-down command. The second read is done by the enclave process for all non-shut-down commands.\n\n for _ in 0..2 {\n\n write_u64_le(&mut socket, cmd_bytes.len() as u64)\n", "file_path": "src/common/mod.rs", "rank": 37, "score": 148064.5302358623 }, { "content": "/// Detailed information based on user-provided error code.\n\npub fn explain_error(error_code_str: String) {\n\n match error_code_str.as_str() {\n\n \"E00\" => {\n\n eprintln!(\"Unspecified error. This is used as a catch-all error and should not be used in the code.\");\n\n },\n\n \"E01\" => {\n\n eprintln!(\"Missing mandatory argument. Such error appears when the Nitro CLI is requested to perform an operation, but not all of the mandatory arguments were supplied.\\n\\tExample: `nitro-cli run-enclave --cpu-count 2 --eif-path /path/to/my/eif`. Note that in this case, the mandatory parameter `--memory` is missing a value.\");\n\n },\n\n \"E02\" => {\n\n eprintln!(\"CLI conflicting arguments. Such error appears when the Nitro CLI is supplied two contradicting arguments at the same time, such as `--cpu-count` and `--cpu-ids`.\\nIn this case, only one of the parameters should be supplied.\");\n\n },\n\n \"E03\" => {\n\n eprintln!(\"Invalid argument provided. Such error appears when the type of at least one of the arguments provided to the Nitro CLI does not match the expected type of that parameter.\\n\\tExample: `nitro-cli run-enclave --cpu-count 1z --memory 80 --eif-path /path/to/my/eif`. In this case, `cpu-count` is not a valid integer value.\" );\n\n },\n\n \"E04\" => {\n\n eprintln!(\"Socket pair creation failure. Such error apears when the Nitro CLI process attempts to open a stream pair in order to send a command to the enclave process, but the stream initialization fails.\");\n\n },\n\n \"E05\" => {\n\n eprintln!(\"Process spawn failure. Such error appears when the main Nitro CLI process failed to spawn the enclave process, in order to complete a `run-enclave` command.\");\n\n },\n", "file_path": "src/common/document_errors.rs", "rank": 38, "score": 144214.91359242194 }, { "content": "pub fn send_file(args: FileArgs) -> Result<(), String> {\n\n let mut file =\n\n File::open(&args.localfile).map_err(|err| format!(\"Could not open localfile {:?}\", err))?;\n\n let vsocket = vsock_connect(args.cid, args.port)?;\n\n let socket_fd = vsocket.as_raw_fd();\n\n\n\n // Send command id\n\n send_u64(socket_fd, CmdId::SendFile as u64)?;\n\n\n\n // send remotefile path\n\n let buf = args.remotefile.as_bytes();\n\n let len: u64 = buf.len().try_into().map_err(|err| format!(\"{:?}\", err))?;\n\n send_u64(socket_fd, len)?;\n\n send_loop(socket_fd, &buf, len)?;\n\n\n\n let filesize = file\n\n .metadata()\n\n .map_err(|err| format!(\"Could not get file metadate {:?}\", err))?\n\n .len();\n\n\n", "file_path": "samples/command_executer/src/lib.rs", "rank": 39, "score": 143398.56111409637 }, { "content": "pub fn recv_file(args: FileArgs) -> Result<(), String> {\n\n let mut file = File::create(&args.localfile)\n\n .map_err(|err| format!(\"Could not open localfile {:?}\", err))?;\n\n let vsocket = vsock_connect(args.cid, args.port)?;\n\n let socket_fd = vsocket.as_raw_fd();\n\n\n\n // Send command id\n\n send_u64(socket_fd, CmdId::RecvFile as u64)?;\n\n\n\n // send remotefile path\n\n let buf = args.remotefile.as_bytes();\n\n let len: u64 = buf.len().try_into().map_err(|err| format!(\"{:?}\", err))?;\n\n send_u64(socket_fd, len)?;\n\n send_loop(socket_fd, &buf, len)?;\n\n\n\n // Receive filesize\n\n let filesize = recv_u64(socket_fd)?;\n\n println!(\n\n \"Receiving file {}(saving to {}) - size {}\",\n\n &args.remotefile,\n", "file_path": "samples/command_executer/src/lib.rs", "rank": 40, "score": 143398.56111409637 }, { "content": "/// Check if the `NITRO_BETWEEN_PACKETS_MILLIS` environment variable is set.\n\n/// If it is, return a `Duration` representing its value.\n\npub fn between_packets_delay() -> Option<Duration> {\n\n if let Ok(value) = std::env::var(\"NITRO_BETWEEN_PACKETS_MILLIS\") {\n\n if let Ok(value) = value.parse::<u64>() {\n\n return Some(Duration::from_millis(value));\n\n }\n\n }\n\n\n\n None\n\n}\n", "file_path": "src/enclave_proc/resource_manager.rs", "rank": 41, "score": 143305.97634422116 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct FileTemplate {\n\n path: String,\n\n source: String,\n\n mode: String,\n\n}\n\n\n", "file_path": "enclave_build/src/yaml_generator.rs", "rank": 42, "score": 141854.82385484898 }, { "content": "/// Get a string representation of the bit-mask which holds the enclave launch flags.\n\npub fn flags_to_string(flags: u64) -> String {\n\n if flags & NE_ENCLAVE_DEBUG_MODE == NE_ENCLAVE_DEBUG_MODE {\n\n \"DEBUG_MODE\"\n\n } else {\n\n \"NONE\"\n\n }\n\n .to_string()\n\n}\n\n\n", "file_path": "src/enclave_proc/utils.rs", "rank": 43, "score": 139399.81406360166 }, { "content": "/// The main event loop of the enclave process.\n\nfn process_event_loop(\n\n comm_stream: UnixStream,\n\n logger: &EnclaveProcLogWriter,\n\n) -> NitroCliResult<()> {\n\n let mut conn_listener = ConnectionListener::new()?;\n\n let mut enclave_manager = EnclaveManager::default();\n\n let mut terminate_thread: Option<std::thread::JoinHandle<()>> = None;\n\n let mut done = false;\n\n let mut ret_value = Ok(());\n\n\n\n // Start the signal handler before spawning any other threads. This is done since the\n\n // handler will mask all relevant signals from the current thread and this setting will\n\n // be automatically inherited by all threads spawned from this point on; we want this\n\n // because only the dedicated thread spawned by the handler should listen for signals.\n\n enclave_proc_configure_signal_handler(&conn_listener)\n\n .map_err(|e| e.add_subaction(\"Failed to configure signal handler\".to_string()))?;\n\n\n\n // Add the CLI communication channel to epoll.\n\n conn_listener\n\n .handle_new_connection(comm_stream)\n", "file_path": "src/enclave_proc/mod.rs", "rank": 44, "score": 139381.71867927333 }, { "content": "/// Returns a link with more detailed information regarding a specific error.\n\npub fn construct_help_link(error_code_str: String) -> String {\n\n format!(\n\n \"https://docs.aws.amazon.com/enclaves/latest/user/cli-errors.html#{}\",\n\n error_code_str\n\n )\n\n}\n\n\n", "file_path": "src/common/document_errors.rs", "rank": 45, "score": 135029.2230444417 }, { "content": "/// The default POSIX signal handling function, which notifies the enclave process to shut down gracefully.\n\nfn enclave_proc_handle_signals(comm_fd: RawFd, signal: Signal) -> bool {\n\n let mut stream = unsafe { UnixStream::from_raw_fd(comm_fd) };\n\n\n\n warn!(\n\n \"Received signal {:?}. The enclave process will now close.\",\n\n signal\n\n );\n\n send_command_and_close(\n\n EnclaveProcessCommandType::ConnectionListenerStop,\n\n &mut stream,\n\n );\n\n\n\n true\n\n}\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 46, "score": 134862.4961441949 }, { "content": "/// Build an enclave image file from a Docker image.\n\npub fn build_from_docker(\n\n docker_uri: &str,\n\n docker_dir: &Option<String>,\n\n output_path: &str,\n\n signing_certificate: &Option<String>,\n\n private_key: &Option<String>,\n\n) -> NitroCliResult<(File, BTreeMap<String, String>)> {\n\n let blobs_path =\n\n blobs_path().map_err(|e| e.add_subaction(\"Failed to retrieve blobs path\".to_string()))?;\n\n let mut cmdline_file = File::open(format!(\"{}/cmdline\", blobs_path)).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Could not open kernel command line file: {:?}\", e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n })?;\n\n\n\n let mut cmdline = String::new();\n\n cmdline_file.read_to_string(&mut cmdline).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to read kernel command line: {:?}\", e),\n", "file_path": "src/lib.rs", "rank": 47, "score": 134579.48581569042 }, { "content": "/// Obtain an enclave's slot ID from its full ID.\n\npub fn get_slot_id(enclave_id: String) -> Result<u64, String> {\n\n let tokens: Vec<&str> = enclave_id.split(\"-enc\").collect();\n\n\n\n match tokens.get(1) {\n\n Some(slot_id) => u64::from_str_radix(*slot_id, 16)\n\n .map_err(|_err| \"Invalid enclave id format\".to_string()),\n\n None => Err(\"Invalid enclave_id.\".to_string()),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_generate_enclave_id() {\n\n let slot_id: u64 = 7;\n\n let enc_id = generate_enclave_id(slot_id);\n\n let file_path = \"/sys/devices/virtual/dmi/id/board_asset_tag\";\n\n\n", "file_path": "src/enclave_proc/utils.rs", "rank": 48, "score": 133558.82934880117 }, { "content": "/// Checks if the forwarded server is allowed\n\npub fn check_allowlist(\n\n remote_addr: IpAddr,\n\n remote_port: u16,\n\n config_file: Option<&str>,\n\n only_4: bool,\n\n only_6: bool,\n\n) -> VsockProxyResult<()> {\n\n if let Some(config_file) = config_file {\n\n let mut f = File::open(config_file).map_err(|_| \"Could not open the file\")?;\n\n\n\n let mut content = String::new();\n\n f.read_to_string(&mut content)\n\n .map_err(|_| \"Could not read the file\")?;\n\n\n\n let docs = YamlLoader::load_from_str(&content).map_err(|_| \"Bad yaml format\")?;\n\n let services = (&docs[0])[\"allowlist\"]\n\n .as_vec()\n\n .ok_or_else(|| \"No allowlist field\")?;\n\n\n\n for raw_service in services {\n", "file_path": "vsock_proxy/src/starter.rs", "rank": 49, "score": 129575.19770145147 }, { "content": "/// Get the action associated with `cmd` as a String.\n\nfn get_command_action(cmd: EnclaveProcessCommandType) -> String {\n\n match cmd {\n\n EnclaveProcessCommandType::Run => \"Run Enclave\".to_string(),\n\n EnclaveProcessCommandType::Terminate | EnclaveProcessCommandType::TerminateComplete => {\n\n \"Terminate Enclave\".to_string()\n\n }\n\n EnclaveProcessCommandType::Describe => \"Describe Enclaves\".to_string(),\n\n EnclaveProcessCommandType::GetEnclaveCID => \"Get Enclave CID\".to_string(),\n\n EnclaveProcessCommandType::ConnectionListenerStop => \"Stop Connection Listener\".to_string(),\n\n _ => \"Unknown Command\".to_string(),\n\n }\n\n}\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 50, "score": 123603.79317263336 }, { "content": "/// Print the output from a single enclave process.\n\nfn enclave_proc_handle_output<T>(conn: &mut UnixStream) -> (Option<T>, Option<i32>)\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n let mut stdout_str = String::new();\n\n let mut status: Option<i32> = None;\n\n\n\n // The contents meant for standard output must always form a valid JSON object.\n\n while let Ok(reply) = receive_from_stream::<EnclaveProcessReply>(conn) {\n\n match reply {\n\n EnclaveProcessReply::StdOutMessage(msg) => stdout_str.push_str(&msg),\n\n EnclaveProcessReply::StdErrMessage(msg) => eprint!(\"{}\", msg),\n\n EnclaveProcessReply::Status(status_code) => status = Some(status_code),\n\n }\n\n }\n\n\n\n // Shut the connection down.\n\n match conn.shutdown(std::net::Shutdown::Both) {\n\n Ok(()) => (),\n\n Err(e) => {\n\n notify_error(&format!(\"Failed to shut connection down: {}\", e));\n\n status = Some(-1);\n\n }\n\n }\n\n\n\n // Decode the JSON object.\n\n let json_obj = serde_json::from_str::<T>(&stdout_str).ok();\n\n (json_obj, status)\n\n}\n\n\n", "file_path": "src/enclave_proc_comm.rs", "rank": 51, "score": 123364.77066932726 }, { "content": "/// Returns detailed error information based on supplied arguments.\n\npub fn get_detailed_info(error_code_str: String, additional_info: &[String]) -> String {\n\n let mut ret = format!(\"[ {} ] \", error_code_str);\n\n let info_placeholder = \"MISSING_INFO\".to_string();\n\n\n\n match error_code_str.as_str() {\n\n \"E00\" => {\n\n ret.push_str(\"Unspecified error. This is used as a catch-all error and should not be used in the code.\");\n\n }\n\n \"E01\" => {\n\n ret.push_str(\n\n format!(\n\n \"Missing mandatory argument. User did not provide the `{}` argument.\",\n\n additional_info.get(0).unwrap_or(&info_placeholder)\n\n )\n\n .as_str(),\n\n );\n\n }\n\n \"E02\" => {\n\n ret.push_str(\n\n format!(\n", "file_path": "src/common/document_errors.rs", "rank": 52, "score": 122029.25198145959 }, { "content": "/// Returns the value of the `NITRO_CLI_BLOBS` environment variable.\n\n///\n\n/// This variable specifies where all the blobs necessary for building\n\n/// an enclave image are stored. As of now the blobs are:\n\n/// - *bzImage*: A kernel image.\n\n/// - *init*: The initial init process that is bootstraping the environment.\n\n/// - *linuxkit*: A slightly modified version of linuxkit.\n\n/// - *cmdline*: A file containing the kernel commandline.\n\nfn blobs_path() -> NitroCliResult<String> {\n\n // TODO Improve error message with a suggestion to the user\n\n // consider using the default path used by rpm install\n\n let blobs_res = std::env::var(\"NITRO_CLI_BLOBS\");\n\n\n\n Ok(blobs_res.unwrap_or_else(|_| DEFAULT_BLOBS_PATH.to_string()))\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 53, "score": 121950.07840612435 }, { "content": "/// Returns the value of the `NITRO_CLI_ARTIFACTS` environment variable.\n\n///\n\n/// This variable configures the path where the build artifacts should be saved.\n\nfn artifacts_path() -> NitroCliResult<String> {\n\n if let Ok(artifacts) = std::env::var(\"NITRO_CLI_ARTIFACTS\") {\n\n std::fs::create_dir_all(artifacts.clone()).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Could not create artifacts path {}: {:?}\", artifacts, e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n })?;\n\n Ok(artifacts)\n\n } else if let Ok(home) = std::env::var(\"HOME\") {\n\n let artifacts = format!(\"{}/.nitro_cli/\", home);\n\n std::fs::create_dir_all(artifacts.clone()).map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Could not create artifacts path {}: {:?}\", artifacts, e),\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n })?;\n\n Ok(artifacts)\n\n } else {\n\n Err(new_nitro_cli_failure!(\n\n \"Could not find a folder for the CLI artifacts, set either HOME or NITRO_CLI_ARTIFACTS\",\n\n NitroCliErrorEnum::ArtifactsPathNotSet\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 54, "score": 121940.79675246714 }, { "content": "pub fn recv_loop(fd: RawFd, buf: &mut [u8], len: u64) -> Result<(), String> {\n\n let len: usize = len.try_into().map_err(|err| format!(\"{:?}\", err))?;\n\n let mut recv_bytes = 0;\n\n\n\n while recv_bytes < len {\n\n let size = match recv(fd, &mut buf[recv_bytes..len], MsgFlags::empty()) {\n\n Ok(size) => size,\n\n Err(nix::Error::Sys(EINTR)) => 0,\n\n Err(err) => return Err(format!(\"{:?}\", err)),\n\n };\n\n recv_bytes += size;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "samples/command_executer/src/protocol_helpers.rs", "rank": 55, "score": 121822.70778145074 }, { "content": "pub trait ExitGracefully<T, E> {\n\n fn ok_or_exit(self, message: &str) -> T;\n\n}\n\n\n\nimpl<T, E: std::fmt::Debug> ExitGracefully<T, E> for Result<T, E> {\n\n fn ok_or_exit(self, message: &str) -> T {\n\n match self {\n\n Ok(val) => val,\n\n Err(err) => {\n\n error!(\"{:?}: {}\", err, message);\n\n std::process::exit(1);\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! create_app {\n\n () => {\n\n App::new(\"Vsock Tool\")\n", "file_path": "samples/command_executer/src/utils.rs", "rank": 56, "score": 114525.71038474288 }, { "content": "#[derive(Default)]\n\nstruct EnclaveHandle {\n\n /// The CPU configuration as requested by the user.\n\n #[allow(dead_code)]\n\n cpu_config: EnclaveCpuConfig,\n\n /// List of CPU IDs provided to the enclave.\n\n cpu_ids: Vec<u32>,\n\n /// Amount of memory allocated for the enclave, in MB.\n\n allocated_memory_mib: u64,\n\n /// The enclave slot ID.\n\n slot_uid: u64,\n\n /// The enclave CID.\n\n enclave_cid: Option<u64>,\n\n /// Enclave flags (including the enclave debug mode flag).\n\n flags: u64,\n\n /// The driver-provided enclave descriptor.\n\n enc_fd: RawFd,\n\n /// The allocator used to manage enclave memory.\n\n resource_allocator: ResourceAllocator,\n\n /// The enclave image file.\n\n eif_file: Option<File>,\n", "file_path": "src/enclave_proc/resource_manager.rs", "rank": 57, "score": 113719.01068548777 }, { "content": "/// Handle an event coming from an enclave.\n\nfn try_handle_enclave_event(connection: &Connection) -> NitroCliResult<HandledEnclaveEvent> {\n\n // Check if this is an enclave connection.\n\n if let Some(mut enc_events) = connection\n\n .get_enclave_event_flags()\n\n .map_err(|e| e.add_subaction(\"Failed to get enclave events flag\".to_string()))?\n\n {\n\n let enc_hup = enc_events.contains(EpollFlags::EPOLLHUP);\n\n\n\n // Check if non-hang-up events have occurred.\n\n enc_events.remove(EpollFlags::EPOLLHUP);\n\n if !enc_events.is_empty() {\n\n warn!(\"Received unexpected enclave event(s): {:?}\", enc_events);\n\n }\n\n\n\n // If we received the hang-up event we need to terminate cleanly.\n\n if enc_hup {\n\n warn!(\"Received hang-up event from the enclave. Enclave process will shut down.\");\n\n return Ok(HandledEnclaveEvent::HangUp);\n\n }\n\n\n\n // Non-hang-up enclave events are not fatal.\n\n return Ok(HandledEnclaveEvent::Unexpected);\n\n }\n\n\n\n Ok(HandledEnclaveEvent::None)\n\n}\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 58, "score": 111841.17656727899 }, { "content": "/// Release the enclave descriptor.\n\nfn release_enclave_descriptor(enc_fd: RawFd) -> NitroCliResult<()> {\n\n // Close enclave descriptor.\n\n let rc = unsafe { libc::close(enc_fd) };\n\n if rc < 0 {\n\n return Err(new_nitro_cli_failure!(\n\n \"Failed to close enclave descriptor\",\n\n NitroCliErrorEnum::FileOperationFailure\n\n ));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/enclave_proc/resource_manager.rs", "rank": 59, "score": 111529.60021916706 }, { "content": "/// Launch the POSIX signal handler on a dedicated thread and ensure its events are accessible.\n\nfn enclave_proc_configure_signal_handler(conn_listener: &ConnectionListener) -> NitroCliResult<()> {\n\n let mut signal_handler = SignalHandler::new_with_defaults()\n\n .mask_all()\n\n .map_err(|e| e.add_subaction(\"Failed to configure signal handler\".to_string()))?;\n\n let (local_stream, thread_stream) = UnixStream::pair()\n\n .map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Failed to create stream pair: {:?}\", e),\n\n NitroCliErrorEnum::SocketPairCreationFailure\n\n )\n\n .set_action(\"Run Enclave\".to_string())\n\n })\n\n .ok_or_exit_with_errno(Some(\"Failed to create stream pair\"));\n\n\n\n conn_listener\n\n .add_stream_to_epoll(local_stream)\n\n .map_err(|e| {\n\n e.add_subaction(\n\n \"Failed to add stream to epoll when configuring signal handler\".to_string(),\n\n )\n\n })?;\n\n signal_handler.start_handler(thread_stream.into_raw_fd(), enclave_proc_handle_signals);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 60, "score": 110164.83695754188 }, { "content": "fn parse_no_wait(args: &ArgMatches) -> bool {\n\n if args.is_present(\"no-wait\") {\n\n true\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "samples/command_executer/src/command_parser.rs", "rank": 61, "score": 109624.52319256408 }, { "content": "void warn(const char *msg) {\n\n int error = errno;\n\n perror(msg);\n\n errno = error;\n", "file_path": "init.c", "rank": 62, "score": 109389.79267942713 }, { "content": " const char *path;\n", "file_path": "init.c", "rank": 63, "score": 109282.65176604892 }, { "content": "struct VsockSocket {\n\n socket_fd: RawFd,\n\n}\n\n\n\nimpl VsockSocket {\n\n fn new(socket_fd: RawFd) -> Self {\n\n VsockSocket { socket_fd }\n\n }\n\n}\n\n\n\nimpl Drop for VsockSocket {\n\n fn drop(&mut self) {\n\n shutdown(self.socket_fd, Shutdown::Both)\n\n .unwrap_or_else(|e| eprintln!(\"Failed to shut socket down: {:?}\", e));\n\n close(self.socket_fd).unwrap_or_else(|e| eprintln!(\"Failed to close socket: {:?}\", e));\n\n }\n\n}\n\n\n\nimpl AsRawFd for VsockSocket {\n\n fn as_raw_fd(&self) -> RawFd {\n\n self.socket_fd\n\n }\n\n}\n\n\n", "file_path": "samples/command_executer/src/lib.rs", "rank": 64, "score": 108076.86687904176 }, { "content": "/// Parse the debug-mode flag from the command-line arguments.\n\nfn debug_mode(args: &ArgMatches) -> Option<bool> {\n\n let val = args.is_present(\"debug-mode\");\n\n if val {\n\n Some(val)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/common/commands_parser.rs", "rank": 65, "score": 107468.56683013303 }, { "content": "/// Data held by a connection.\n\nstruct ConnectionData {\n\n /// Flags received from `epoll` if this was an event-triggered connection.\n\n epoll_flags: EpollFlags,\n\n /// A communication stream with the peer, if this was a socket-triggered connection.\n\n input_stream: Option<UnixStream>,\n\n}\n\n\n\n/// An enclave process connection to a CLI instance, an enclave or itself.\n\n#[derive(Clone)]\n\npub struct Connection {\n\n /// The thread-safe data used internally by the connection.\n\n data: Arc<Mutex<ConnectionData>>,\n\n}\n\n\n\nimpl Drop for ConnectionData {\n\n fn drop(&mut self) {\n\n if let Some(input_stream) = &self.input_stream {\n\n // Close the stream.\n\n input_stream\n\n .shutdown(std::net::Shutdown::Both)\n", "file_path": "src/enclave_proc/connection.rs", "rank": 66, "score": 106319.79683498657 }, { "content": "fn main() {\n\n let matches = App::new(\"Docker2Eif builder\")\n\n .about(\"Generate consistent EIF image from a Docker image\")\n\n .setting(AppSettings::DisableVersion)\n\n .arg(\n\n Arg::with_name(\"docker_image\")\n\n .short(\"t\")\n\n .long(\"tag\")\n\n .help(\"Docker image tag\")\n\n .takes_value(true)\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"init_path\")\n\n .short(\"i\")\n\n .long(\"init\")\n\n .help(\"Path to a binary representing the init process for the enclave\")\n\n .takes_value(true)\n\n .required(true),\n\n )\n", "file_path": "enclave_build/src/main.rs", "rank": 67, "score": 106250.8804347357 }, { "content": "/// Assembles the error message which gets displayed to the user.\n\npub fn construct_error_message(failure: &NitroCliFailure) -> String {\n\n // Suggestive error description comes first.\n\n let error_info: String = document_errors::get_detailed_info(\n\n (*ERROR_CODES.get(&failure.error_code).unwrap_or(&\"E00\")).to_string(),\n\n &failure.additional_info,\n\n );\n\n\n\n // Include a link to the documentation page.\n\n let help_link: String = document_errors::construct_help_link(\n\n (*ERROR_CODES.get(&failure.error_code).unwrap_or(&\"E00\")).to_string(),\n\n );\n\n let backtrace: String = document_errors::construct_backtrace(&failure);\n\n\n\n // Write backtrace to a log file.\n\n let log_path = log_backtrace(backtrace.clone());\n\n\n\n // Return final output, depending on whether the user requested the backtrace or not.\n\n match std::env::var(BACKTRACE_VAR) {\n\n Ok(display_backtrace) => match display_backtrace.as_str() {\n\n \"1\" => {\n", "file_path": "src/common/mod.rs", "rank": 68, "score": 104884.21048223598 }, { "content": "#[derive(Clone, Default)]\n\nstruct ResourceAllocator {\n\n /// The requested memory size in bytes.\n\n requested_mem: u64,\n\n /// The memory regions that have actually been allocated.\n\n mem_regions: Vec<MemoryRegion>,\n\n}\n\n\n\n/// Helper structure for managing an enclave's resources.\n", "file_path": "src/enclave_proc/resource_manager.rs", "rank": 69, "score": 104013.90563722263 }, { "content": "/// The policy used to filter received commands based on the requester's type.\n\nstruct CommandRequesterPolicy {\n\n /// A mapping between a requester's type and all of its allowed commands.\n\n policy: HashMap<CommandRequesterType, Vec<EnclaveProcessCommandType>>,\n\n}\n\n\n", "file_path": "src/enclave_proc/connection.rs", "rank": 70, "score": 104013.5704441927 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct DirTemplate {\n\n path: String,\n\n directory: bool,\n\n mode: String,\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum YamlGeneratorError {\n\n TempfileError,\n\n}\n\n\n\npub struct YamlGenerator {\n\n docker_image: String,\n\n init_path: String,\n\n nsm_path: String,\n\n cmd_path: String,\n\n env_path: String,\n\n}\n\n\n\nimpl YamlGenerator {\n", "file_path": "enclave_build/src/yaml_generator.rs", "rank": 71, "score": 104009.05989935096 }, { "content": "/// Start enclave termination.\n\nfn notify_terminate(\n\n connection: Connection,\n\n conn_listener: &ConnectionListener,\n\n enclave_manager: EnclaveManager,\n\n) -> NitroCliResult<JoinHandle<()>> {\n\n let (local_stream, thread_stream) = UnixStream::pair().map_err(|e| {\n\n new_nitro_cli_failure!(\n\n &format!(\"Could not create stream pair: {:?}\", e),\n\n NitroCliErrorEnum::SocketPairCreationFailure\n\n )\n\n })?;\n\n\n\n conn_listener.add_stream_to_epoll(local_stream)?;\n\n Ok(thread::spawn(move || {\n\n run_terminate(connection, thread_stream, enclave_manager)\n\n }))\n\n}\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 72, "score": 103810.96564219824 }, { "content": "/// Perform enclave termination.\n\nfn run_terminate(\n\n connection: Connection,\n\n mut thread_stream: UnixStream,\n\n mut enclave_manager: EnclaveManager,\n\n) {\n\n terminate_enclaves(&mut enclave_manager, Some(&connection)).unwrap_or_else(|e| {\n\n notify_error_with_conn(\n\n construct_error_message(&e).as_str(),\n\n &connection,\n\n EnclaveProcessCommandType::Terminate,\n\n );\n\n });\n\n\n\n // Notify the main thread that enclave termination has completed.\n\n send_command_and_close(\n\n EnclaveProcessCommandType::TerminateComplete,\n\n &mut thread_stream,\n\n );\n\n}\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 73, "score": 103810.96564219824 }, { "content": "/// Handle a single command, returning whenever an error occurs.\n\nfn handle_command(\n\n cmd: EnclaveProcessCommandType,\n\n logger: &EnclaveProcLogWriter,\n\n connection: &Connection,\n\n conn_listener: &mut ConnectionListener,\n\n enclave_manager: &mut EnclaveManager,\n\n terminate_thread: &mut Option<std::thread::JoinHandle<()>>,\n\n) -> NitroCliResult<(i32, bool)> {\n\n Ok(match cmd {\n\n EnclaveProcessCommandType::Run => {\n\n // We should never receive a Run command if we are already running.\n\n if !enclave_manager.enclave_id.is_empty() {\n\n (libc::EEXIST, false)\n\n } else {\n\n let run_args = connection.read::<RunEnclavesArgs>().map_err(|e| {\n\n e.add_subaction(\"Failed to get run arguments\".to_string())\n\n .set_action(\"Run Enclave\".to_string())\n\n })?;\n\n info!(\"Run args = {:?}\", run_args);\n\n\n", "file_path": "src/enclave_proc/mod.rs", "rank": 74, "score": 103806.59135475363 }, { "content": "fn write_config(config: Vec<String>) -> Result<NamedTempFile, DockerError> {\n\n let mut file = NamedTempFile::new().map_err(|_| DockerError::TempfileError)?;\n\n\n\n for line in config {\n\n file.write_fmt(format_args!(\"{}\\n\", line))\n\n .map_err(|_| DockerError::TempfileError)?;\n\n }\n\n\n\n Ok(file)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::io::Read;\n\n\n\n /// Test extracted configuration is as expected\n\n #[test]\n\n fn test_config() {\n\n let docker = DockerUtil::new(String::from(\n", "file_path": "enclave_build/src/docker.rs", "rank": 75, "score": 103764.80730901976 }, { "content": "/// Returns a string containing the backtrace recorded during propagating an error message\n\npub fn construct_backtrace(failure_info: &NitroCliFailure) -> String {\n\n let mut ret = String::new();\n\n let commit_id = env!(\"COMMIT_ID\");\n\n\n\n ret.push_str(&format!(\" Action: {}\\n Subactions:\", failure_info.action));\n\n for subaction in failure_info.subactions.iter().rev() {\n\n ret.push_str(&format!(\"\\n {}\", subaction));\n\n }\n\n ret.push_str(&format!(\"\\n Root error file: {}\", failure_info.file));\n\n ret.push_str(&format!(\"\\n Root error line: {}\", failure_info.line));\n\n\n\n ret.push_str(&format!(\n\n \"\\n Build commit: {}\",\n\n match commit_id.len() {\n\n 0 => \"not available\",\n\n _ => commit_id,\n\n }\n\n ));\n\n\n\n ret\n\n}\n\n\n", "file_path": "src/common/document_errors.rs", "rank": 76, "score": 103179.46188086185 }, { "content": "/// Parse the enclave image file path from the command-line arguments.\n\nfn parse_eif_path(args: &ArgMatches) -> NitroCliResult<String> {\n\n let eif_path = args.value_of(\"eif-path\").ok_or_else(|| {\n\n new_nitro_cli_failure!(\n\n \"`eif-path` argument not found\",\n\n NitroCliErrorEnum::MissingArgument\n\n )\n\n })?;\n\n Ok(eif_path.to_string())\n\n}\n\n\n", "file_path": "src/common/commands_parser.rs", "rank": 77, "score": 102020.91664188931 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct BootstrapRamfsTemplate {\n\n files: (DirTemplate, FileTemplate, FileTemplate),\n\n}\n\n\n", "file_path": "enclave_build/src/yaml_generator.rs", "rank": 78, "score": 101838.51504191566 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct CustomerRamfsTemplate {\n\n init: Vec<String>,\n\n files: (\n\n DirTemplate,\n\n DirTemplate,\n\n DirTemplate,\n\n DirTemplate,\n\n DirTemplate,\n\n DirTemplate,\n\n FileTemplate,\n\n FileTemplate,\n\n ),\n\n}\n\n\n", "file_path": "enclave_build/src/yaml_generator.rs", "rank": 79, "score": 101838.51504191566 }, { "content": "pub fn listen(args: ListenArgs) -> Result<(), String> {\n\n let socket_fd = socket(\n\n AddressFamily::Vsock,\n\n SockType::Stream,\n\n SockFlag::empty(),\n\n None,\n\n )\n\n .map_err(|err| format!(\"Create socket failed: {:?}\", err))?;\n\n\n\n let sockaddr = SockAddr::new_vsock(VMADDR_CID_ANY, args.port);\n\n\n\n bind(socket_fd, &sockaddr).map_err(|err| format!(\"Bind failed: {:?}\", err))?;\n\n\n\n listen_vsock(socket_fd, BACKLOG).map_err(|err| format!(\"Listen failed: {:?}\", err))?;\n\n\n\n loop {\n\n let fd = accept(socket_fd).map_err(|err| format!(\"Accept failed: {:?}\", err))?;\n\n\n\n //cmd id\n\n let cmdid = match recv_u64(fd) {\n", "file_path": "samples/command_executer/src/lib.rs", "rank": 80, "score": 101611.26251829877 }, { "content": "/// Logs the given backtrace string to a separate, backtrace-specific file.\n\n/// Returns a string denoting the path to the corresponding log file.\n\nfn log_backtrace(backtrace: String) -> Result<String, &'static str> {\n\n let log_path_base = get_log_file_base_path();\n\n\n\n // Check if backtrace logs location exists and create it if necessary.\n\n if !Path::new(&log_path_base).exists() {\n\n let create_logs_dir = std::fs::create_dir_all(&log_path_base);\n\n if create_logs_dir.is_err() {\n\n return Err(\"Could not create backtrace logs directory\");\n\n }\n\n }\n\n\n\n let utc_time_now = Utc::now().to_rfc3339();\n\n let log_path_str = format!(\"{}/err{}.log\", &log_path_base, utc_time_now);\n\n let log_path = Path::new(&log_path_str);\n\n let log_file = std::fs::File::create(log_path);\n\n if log_file.is_err() {\n\n return Err(\"Could not create backtrace log file\");\n\n }\n\n\n\n let write_result = log_file.unwrap().write_all(&backtrace.as_bytes());\n\n if write_result.is_err() {\n\n return Err(\"Could not write to backtrace log file\");\n\n }\n\n\n\n match log_path.to_str() {\n\n Some(log_path) => Ok(log_path.to_string()),\n\n None => Err(\"Could not return log file path\"),\n\n }\n\n}\n\n\n", "file_path": "src/common/mod.rs", "rank": 81, "score": 101266.30296764872 }, { "content": "fn parse_error_code_str(args: &ArgMatches) -> NitroCliResult<String> {\n\n let error_code_str = args.value_of(\"error-code\").ok_or_else(|| {\n\n new_nitro_cli_failure!(\n\n \"`error-code` argument not found\",\n\n NitroCliErrorEnum::MissingArgument\n\n )\n\n })?;\n\n Ok(error_code_str.to_string())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use crate::common::construct_error_message;\n\n use crate::create_app;\n\n\n\n use clap::{App, AppSettings, Arg, SubCommand};\n\n\n\n /// Parse the path of the JSON config file\n", "file_path": "src/common/commands_parser.rs", "rank": 82, "score": 100461.79946752734 }, { "content": "/// Write an enclave image file to the specified list of memory regions.\n\nfn write_eif_to_regions(\n\n eif_file: &mut File,\n\n regions: &[MemoryRegion],\n\n image_write_offset: usize,\n\n) -> NitroCliResult<()> {\n\n let file_size = eif_file\n\n .metadata()\n\n .map_err(|_| {\n\n new_nitro_cli_failure!(\n\n \"Failed to obtain EIF file metadata\",\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n\n })?\n\n .len() as usize;\n\n\n\n eif_file.seek(SeekFrom::Start(0)).map_err(|_| {\n\n new_nitro_cli_failure!(\n\n \"Failed to seek to the beginning of the EIF file\",\n\n NitroCliErrorEnum::FileOperationFailure\n\n )\n", "file_path": "src/enclave_proc/resource_manager.rs", "rank": 83, "score": 99371.55083360172 }, { "content": "/// Parse the enclave's ID from the command-line arguments.\n\nfn parse_enclave_id(args: &ArgMatches) -> NitroCliResult<String> {\n\n let enclave_id = args.value_of(\"enclave-id\").ok_or_else(|| {\n\n new_nitro_cli_failure!(\n\n \"`enclave-id` argument not found\",\n\n NitroCliErrorEnum::MissingArgument\n\n )\n\n })?;\n\n Ok(enclave_id.to_string())\n\n}\n\n\n", "file_path": "src/common/commands_parser.rs", "rank": 84, "score": 98621.50562959746 }, { "content": "pub fn run(args: RunArgs) -> Result<i32, String> {\n\n let vsocket = vsock_connect(args.cid, args.port)?;\n\n let socket_fd = vsocket.as_raw_fd();\n\n\n\n // Send command id\n\n if args.no_wait {\n\n send_u64(socket_fd, CmdId::RunCmdNoWait as u64)?;\n\n } else {\n\n send_u64(socket_fd, CmdId::RunCmd as u64)?;\n\n }\n\n\n\n // send command\n\n let buf = args.command.as_bytes();\n\n let len: u64 = buf.len().try_into().map_err(|err| format!(\"{:?}\", err))?;\n\n send_u64(socket_fd, len)?;\n\n send_loop(socket_fd, &buf, len)?;\n\n\n\n // recv output\n\n let mut buf = [0u8; BUF_MAX_LEN];\n\n let len = recv_u64(socket_fd)?;\n", "file_path": "samples/command_executer/src/lib.rs", "rank": 85, "score": 97126.80755892937 }, { "content": "fn run_server(fd: RawFd, no_wait: bool) -> Result<(), String> {\n\n // recv command\n\n let len = recv_u64(fd)?;\n\n let mut buf = [0u8; BUF_MAX_LEN];\n\n recv_loop(fd, &mut buf, len)?;\n\n\n\n let len_usize = len.try_into().map_err(|err| format!(\"{:?}\", err))?;\n\n let command = std::str::from_utf8(&buf[0..len_usize]).map_err(|err| format!(\"{:?}\", err))?;\n\n\n\n // execute command\n\n let command_output = if no_wait {\n\n #[rustfmt::skip]\n\n let output = Command::new(\"sh\")\n\n .arg(\"-c\")\n\n .arg(command)\n\n .spawn();\n\n if output.is_err() {\n\n CommandOutput::new(\n\n String::new(),\n\n format!(\"Could not execute the command {}\", command),\n", "file_path": "samples/command_executer/src/lib.rs", "rank": 86, "score": 96194.55654956357 }, { "content": "pub fn recv_u64(fd: RawFd) -> Result<u64, String> {\n\n let mut buf = [0u8; 9];\n\n recv_loop(fd, &mut buf, 9)?;\n\n let val = LittleEndian::read_u64(&buf);\n\n Ok(val)\n\n}\n\n\n", "file_path": "samples/command_executer/src/protocol_helpers.rs", "rank": 87, "score": 93973.24467142412 }, { "content": "pub fn recv_i32(fd: RawFd) -> Result<i32, String> {\n\n let mut buf = [0u8; 4];\n\n recv_loop(fd, &mut buf, 4)?;\n\n let val = LittleEndian::read_i32(&buf);\n\n Ok(val)\n\n}\n\n\n", "file_path": "samples/command_executer/src/protocol_helpers.rs", "rank": 88, "score": 93973.24467142412 }, { "content": "/// Parse the enclave's required CID from the command-line arguments.\n\nfn parse_enclave_cid(args: &ArgMatches) -> NitroCliResult<Option<u64>> {\n\n let enclave_cid = if let Some(enclave_cid) = args.value_of(\"enclave-cid\") {\n\n let enclave_cid: u64 = enclave_cid.parse().map_err(|_| {\n\n new_nitro_cli_failure!(\n\n \"`enclave-cid` is not a number\",\n\n NitroCliErrorEnum::InvalidArgument\n\n )\n\n .add_info(vec![\"enclave-cid\", enclave_cid])\n\n })?;\n\n\n\n // Do not use well-known CID values - 0, 1, 2 - as the enclave CID.\n\n // VMADDR_CID_ANY = -1U\n\n // VMADDR_CID_HYPERVISOR = 0\n\n // VMADDR_CID_LOCAL = 1\n\n // VMADDR_CID_HOST = 2\n\n // Note: 0 is used as a placeholder to auto-generate a CID.\n\n // <http://man7.org/linux/man-pages/man7/vsock.7.html>\n\n if enclave_cid == 0 {\n\n eprintln!(\"The enclave CID will be auto-generated as the provided CID is 0\");\n\n }\n", "file_path": "src/common/commands_parser.rs", "rank": 89, "score": 93871.86413156596 }, { "content": "pub fn send_u64(fd: RawFd, val: u64) -> Result<(), String> {\n\n let mut buf = [0u8; 9];\n\n LittleEndian::write_u64(&mut buf, val);\n\n send_loop(fd, &mut buf, 9)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "samples/command_executer/src/protocol_helpers.rs", "rank": 90, "score": 90121.78419707387 }, { "content": "pub fn send_i32(fd: RawFd, val: i32) -> Result<(), String> {\n\n let mut buf = [0u8; 4];\n\n LittleEndian::write_i32(&mut buf, val);\n\n send_loop(fd, &mut buf, 4)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "samples/command_executer/src/protocol_helpers.rs", "rank": 91, "score": 90121.78419707387 }, { "content": "fn initial_digest(len: usize) -> Vec<u8> {\n\n vec![0; len]\n\n}\n\n\n\nimpl<T: Digest + Debug + Write + Clone> EifHasher<T> {\n\n pub fn new(block_size: usize, mut hasher: T) -> Result<Self, String> {\n\n let output_size = hasher.result_reset().len();\n\n if block_size > 0 && output_size * 2 > block_size {\n\n return Err(\"Invalid block_size\".to_string());\n\n }\n\n\n\n Ok(EifHasher {\n\n block: Vec::with_capacity(block_size),\n\n digest: initial_digest(output_size),\n\n block_size,\n\n output_size,\n\n hasher,\n\n })\n\n }\n\n\n", "file_path": "eif_defs/src/eif_hasher.rs", "rank": 92, "score": 86509.64835716393 }, { "content": "pub fn send_loop(fd: RawFd, buf: &[u8], len: u64) -> Result<(), String> {\n\n let len: usize = len.try_into().map_err(|err| format!(\"{:?}\", err))?;\n\n let mut send_bytes = 0;\n\n\n\n while send_bytes < len {\n\n let size = match send(fd, &buf[send_bytes..len], MsgFlags::empty()) {\n\n Ok(size) => size,\n\n Err(nix::Error::Sys(EINTR)) => 0,\n\n Err(err) => return Err(format!(\"{:?}\", err)),\n\n };\n\n send_bytes += size;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "samples/command_executer/src/protocol_helpers.rs", "rank": 93, "score": 83504.30035125623 }, { "content": "fn send_file_server(fd: RawFd) -> Result<(), String> {\n\n // recv file path\n\n let len = recv_u64(fd)?;\n\n let mut buf = [0u8; BUF_MAX_LEN];\n\n recv_loop(fd, &mut buf, len)?;\n\n let len_usize = len.try_into().map_err(|err| format!(\"{:?}\", err))?;\n\n let path = std::str::from_utf8(&buf[0..len_usize]).map_err(|err| format!(\"{:?}\", err))?;\n\n\n\n let mut file = File::create(path).map_err(|err| format!(\"Could not open file {:?}\", err))?;\n\n\n\n // Receive filesize\n\n let filesize = recv_u64(fd)?;\n\n println!(\"Receiving file {} - size {}\", path, filesize);\n\n\n\n let mut progress: u64 = 0;\n\n let mut tmpsize: u64;\n\n\n\n while progress < filesize {\n\n tmpsize = buf.len().try_into().map_err(|err| format!(\"{:?}\", err))?;\n\n tmpsize = min(tmpsize, filesize - progress);\n\n\n\n recv_loop(fd, &mut buf, tmpsize)?;\n\n file.write_all(&buf[..tmpsize.try_into().map_err(|err| format!(\"{:?}\", err))?])\n\n .map_err(|err| format!(\"Could not write {:?}\", err))?;\n\n progress += tmpsize\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "samples/command_executer/src/lib.rs", "rank": 94, "score": 82945.36045756114 }, { "content": "fn recv_file_server(fd: RawFd) -> Result<(), String> {\n\n // recv file path\n\n let len = recv_u64(fd)?;\n\n let mut buf = [0u8; BUF_MAX_LEN];\n\n recv_loop(fd, &mut buf, len)?;\n\n let len_usize = len.try_into().map_err(|err| format!(\"{:?}\", err))?;\n\n let path = std::str::from_utf8(&buf[0..len_usize]).map_err(|err| format!(\"{:?}\", err))?;\n\n\n\n let mut file = File::open(path).map_err(|err| format!(\"Could not open file {:?}\", err))?;\n\n\n\n let filesize = file\n\n .metadata()\n\n .map_err(|err| format!(\"Could not get file metadata {:?}\", err))?\n\n .len();\n\n\n\n send_u64(fd, filesize)?;\n\n println!(\"Sending file {} - size {}\", path, filesize);\n\n\n\n let mut progress: u64 = 0;\n\n let mut tmpsize: u64;\n", "file_path": "samples/command_executer/src/lib.rs", "rank": 95, "score": 82945.36045756114 } ]
Rust
src/component/sum_of_best.rs
AntyMew/livesplit-core
b59a45ddd85c914121d279df38ad5b0e581bd512
use Timer; use time::formatter::{Accuracy, Regular, TimeFormatter}; use serde_json::{to_writer, Result}; use analysis::sum_of_segments::calculate_best; use std::io::Write; use std::borrow::Cow; use settings::{Color, Field, Gradient, SettingsDescription, Value}; use super::DEFAULT_INFO_TEXT_GRADIENT; #[derive(Default, Clone)] pub struct Component { settings: Settings, } #[derive(Clone, Serialize, Deserialize)] #[serde(default)] pub struct Settings { pub background: Gradient, pub label_color: Option<Color>, pub value_color: Option<Color>, pub accuracy: Accuracy, } impl Default for Settings { fn default() -> Self { Self { background: DEFAULT_INFO_TEXT_GRADIENT, label_color: None, value_color: None, accuracy: Accuracy::Seconds, } } } #[derive(Serialize, Deserialize)] pub struct State { pub background: Gradient, pub label_color: Option<Color>, pub value_color: Option<Color>, pub text: String, pub time: String, } impl State { pub fn write_json<W>(&self, writer: W) -> Result<()> where W: Write, { to_writer(writer, self) } } impl Component { pub fn new() -> Self { Default::default() } pub fn with_settings(settings: Settings) -> Self { Self { settings, ..Default::default() } } pub fn settings(&self) -> &Settings { &self.settings } pub fn settings_mut(&mut self) -> &mut Settings { &mut self.settings } pub fn name(&self) -> Cow<str> { "Sum of Best Segments".into() } pub fn state(&self, timer: &Timer) -> State { let time = calculate_best( timer.run().segments(), false, true, timer.current_timing_method(), ); State { background: self.settings.background, label_color: self.settings.label_color, value_color: self.settings.value_color, text: String::from("Sum of Best Segments"), time: Regular::with_accuracy(self.settings.accuracy) .format(time) .to_string(), } } pub fn settings_description(&self) -> SettingsDescription { SettingsDescription::with_fields(vec![ Field::new("Background".into(), self.settings.background.into()), Field::new("Label Color".into(), self.settings.label_color.into()), Field::new("Value Color".into(), self.settings.value_color.into()), Field::new("Accuracy".into(), self.settings.accuracy.into()), ]) } pub fn set_value(&mut self, index: usize, value: Value) { match index { 0 => self.settings.background = value.into(), 1 => self.settings.label_color = value.into(), 2 => self.settings.value_color = value.into(), 3 => self.settings.accuracy = value.into(), _ => panic!("Unsupported Setting Index"), } } }
use Timer; use time::formatter::{Accuracy, Regular, TimeFormatter}; use serde_json::{to_writer, Result}; use analysis::sum_of_segments::calculate_best; use std::io::Write; use std::borrow::Cow; use settings::{Color, Field, Gradient, SettingsDescription, Value}; use super::DEFAULT_INFO_TEXT_GRADIENT; #[derive(Default, Clone)] pub struct Component { settings: Settings, } #[derive(Clone, Serialize, Deserialize)] #[serde(default)] pub struct Settings { pub background: Gradient, pub label_color: Option<Color>, pub value_color: Option<Color>, pub accuracy: Accuracy, } impl Default for Settings { fn default() -> Self { Self { background: DEFAULT_INFO_TEXT_GRADIENT, label_color: None, value_color: None, accuracy: Accuracy::Seconds, } } } #[derive(Serialize, Deserialize)] pub struct State { pub background: Gradient, pub label_color: Option<Color>, pub value_color: Option<Color>, pub text: String, pub time: String, } impl State { pub fn write_json<W>(&self, writer: W) -> Result<()> where W: Write, { to_writer(writer, self) } } impl Component { pub fn new() -> Self { Default::default() } pub fn with_settings(settings: Settings) -> Self { Self { settings, ..Default::default() } } pub fn settings(&self) -> &Settings { &self.settings } pub fn settings_mut(&mut self) -> &mut Settings { &mut self.settings } pub fn name(&self) -> Cow<str> { "Sum of Best Segments".into() } pub fn state(&self, timer: &Timer) -> State { let time =
; State { background: self.settings.background, label_color: self.settings.label_color, value_color: self.settings.value_color, text: String::from("Sum of Best Segments"), time: Regular::with_accuracy(self.settings.accuracy) .format(time) .to_string(), } } pub fn settings_description(&self) -> SettingsDescription { SettingsDescription::with_fields(vec![ Field::new("Background".into(), self.settings.background.into()), Field::new("Label Color".into(), self.settings.label_color.into()), Field::new("Value Color".into(), self.settings.value_color.into()), Field::new("Accuracy".into(), self.settings.accuracy.into()), ]) } pub fn set_value(&mut self, index: usize, value: Value) { match index { 0 => self.settings.background = value.into(), 1 => self.settings.label_color = value.into(), 2 => self.settings.value_color = value.into(), 3 => self.settings.accuracy = value.into(), _ => panic!("Unsupported Setting Index"), } } }
calculate_best( timer.run().segments(), false, true, timer.current_timing_method(), )
call_expression
[ { "content": "pub fn write<W: Write>(mut writer: W, classes: &BTreeMap<String, Class>) -> Result<()> {\n\n write!(\n\n writer,\n\n \"{}\",\n\n r#\"#ifndef LIVESPLIT_CORE_H\n\n#define LIVESPLIT_CORE_H\n\n\n\n#ifdef __cplusplus\n\n#define restrict __restrict\n\nnamespace LiveSplit {\n\nextern \"C\" {\n\n#endif\n\n\n\n#include <stdint.h>\n\n#include <stddef.h>\n\n#include <stdbool.h>\n\n\n\n\"#\n\n )?;\n\n\n", "file_path": "capi/bind_gen/src/c.rs", "rank": 0, "score": 528799.9558206268 }, { "content": "pub fn write<W: Write>(mut writer: W, classes: &BTreeMap<String, Class>) -> Result<()> {\n\n write!(\n\n writer,\n\n \"{}\",\n\n r#\"using System;\n\nusing System.Runtime.InteropServices;\n\nusing System.Text;\n\nusing System.IO;\n\n\n\nnamespace LiveSplitCore\n\n{\"#\n\n )?;\n\n\n\n for (class_name, class) in classes {\n\n let class_name_ref = format!(\"{}Ref\", class_name);\n\n let class_name_ref_mut = format!(\"{}RefMut\", class_name);\n\n\n\n write_class_comments(&mut writer, &class.comments)?;\n\n\n\n write!(\n", "file_path": "capi/bind_gen/src/csharp.rs", "rank": 1, "score": 523958.2846108381 }, { "content": "pub fn write<W: Write>(mut writer: W, classes: &BTreeMap<String, Class>) -> Result<()> {\n\n write!(writer,\n\n \"{}\",\n\n r#\"#!/usr/bin/env python3\n\n# coding: utf-8\n\n\n\nimport sys, ctypes\n\nfrom ctypes import c_char_p, c_void_p, c_int8, c_int16, c_int32, c_int64, c_uint8, c_uint16, c_uint32, c_uint64, c_size_t, c_float, c_double, c_bool, c_char, c_byte\n\n\n\nprefix = {'win32': ''}.get(sys.platform, './lib')\n\nextension = {'darwin': '.dylib', 'win32': '.dll'}.get(sys.platform, '.so')\n\nlivesplit_core_native = ctypes.cdll.LoadLibrary(prefix + \"livesplit_core\" + extension)\n\n\"#)?;\n\n\n\n for class in classes.values() {\n\n for function in class\n\n .static_fns\n\n .iter()\n\n .chain(class.own_fns.iter())\n\n .chain(class.shared_fns.iter())\n", "file_path": "capi/bind_gen/src/python.rs", "rank": 2, "score": 523958.28461083805 }, { "content": "pub fn write<W: Write>(mut writer: W, classes: &BTreeMap<String, Class>) -> Result<()> {\n\n write!(\n\n writer,\n\n r#\"import LiveSplitCoreNative\n\n\"#\n\n )?;\n\n\n\n for (class_name, class) in classes {\n\n let class_name_ref = format!(\"{}Ref\", class_name);\n\n let class_name_ref_mut = format!(\"{}RefMut\", class_name);\n\n\n\n write_class_comments(&mut writer, &class.comments)?;\n\n\n\n write!(\n\n writer,\n\n r#\"\n\npublic class {class} {{\n\n var ptr: UnsafeMutableRawPointer?\"#,\n\n class = class_name_ref\n\n )?;\n", "file_path": "capi/bind_gen/src/swift/code.rs", "rank": 3, "score": 519262.11067814106 }, { "content": "pub fn write<W: Write>(mut writer: W, classes: &BTreeMap<String, Class>) -> Result<()> {\n\n write!(\n\n writer,\n\n \"{}\",\n\n r#\"#ifndef LIVESPLIT_CORE_H\n\n#define LIVESPLIT_CORE_H\n\n\n\n#ifdef __cplusplus\n\nnamespace LiveSplit {\n\nextern \"C\" {\n\n#endif\n\n\n\n#include <stdint.h>\n\n#include <stddef.h>\n\n#include <stdbool.h>\n\n\"#\n\n )?;\n\n\n\n for class in classes.values() {\n\n writeln!(writer, \"\")?;\n", "file_path": "capi/bind_gen/src/swift/header.rs", "rank": 4, "score": 519262.11067814106 }, { "content": "pub fn write<W: Write>(mut writer: W, classes: &BTreeMap<String, Class>) -> Result<()> {\n\n write!(writer,\n\n \"{}\",\n\n r#\"#include <jni.h>\n\n#include <string>\n\n#include \"livesplit_core.h\"\n\n\n\nusing namespace LiveSplit;\n\n\n\nextern \"C\" JNIEXPORT jlong Java_livesplitcore_LiveSplitCoreNative_Run_1parseString(JNIEnv* jni_env, jobject, jstring data, jstring path, jboolean load_files) {\n\n auto cstr_data = jni_env->GetStringUTFChars(data, nullptr);\n\n auto cstr_path = jni_env->GetStringUTFChars(path, nullptr);\n\n auto result = (jlong)Run_parse(cstr_data, strlen(cstr_data), cstr_path, (uint8_t)load_files);\n\n jni_env->ReleaseStringUTFChars(path, cstr_path);\n\n jni_env->ReleaseStringUTFChars(data, cstr_data);\n\n return result;\n\n}\n\n\"#)?;\n\n\n\n for (class_name, class) in classes {\n", "file_path": "capi/bind_gen/src/jni_cpp.rs", "rank": 5, "score": 519262.1106781411 }, { "content": "/// Saves the Run in use by the Timer provided as a LiveSplit splits file\n\n/// (*.lss).\n\npub fn save_timer<W: Write>(timer: &Timer, writer: W) -> Result<()> {\n\n let run;\n\n let run = if timer.current_phase() == TimerPhase::NotRunning {\n\n timer.run()\n\n } else {\n\n run = timer.clone().into_run(true);\n\n &run\n\n };\n\n save_run(run, writer)\n\n}\n\n\n", "file_path": "src/run/saver/livesplit.rs", "rank": 6, "score": 510698.0090379764 }, { "content": "fn time_inner<W: Write>(writer: &mut Writer<W>, time: Time, buf: &mut Vec<u8>) -> Result<()> {\n\n if let Some(time) = time.real_time {\n\n time_span(writer, new_tag(b\"RealTime\"), time, buf)?;\n\n }\n\n\n\n if let Some(time) = time.game_time {\n\n time_span(writer, new_tag(b\"GameTime\"), time, buf)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/run/saver/livesplit.rs", "rank": 7, "score": 502895.89573001594 }, { "content": "pub fn write<W: Write>(mut writer: W, classes: &BTreeMap<String, Class>, opt: &Opt) -> Result<()> {\n\n write!(\n\n writer,\n\n r#\"# coding: utf-8\n\nrequire 'ffi'\n\n\n", "file_path": "capi/bind_gen/src/ruby.rs", "rank": 8, "score": 502465.0611639227 }, { "content": "fn write_class_comments<W: Write>(mut writer: W, comments: &[String]) -> Result<()> {\n\n write!(\n\n writer,\n\n r#\"\n\n /// <summary>\"#\n\n )?;\n\n\n\n for comment in comments {\n\n write!(\n\n writer,\n\n r#\"\n\n /// {}\"#,\n\n comment\n\n .replace(\"<NULL>\", \"null\")\n\n .replace(\"<TRUE>\", \"true\")\n\n .replace(\"<FALSE>\", \"false\")\n\n )?;\n\n }\n\n\n\n write!(\n\n writer,\n\n r#\"\n\n /// </summary>\"#\n\n )\n\n}\n\n\n", "file_path": "capi/bind_gen/src/csharp.rs", "rank": 9, "score": 485779.96286509396 }, { "content": "fn write_class_comments<W: Write>(mut writer: W, comments: &[String]) -> Result<()> {\n\n write!(\n\n writer,\n\n r#\"\n\n/**\"#\n\n )?;\n\n\n\n for comment in comments {\n\n write!(\n\n writer,\n\n r#\"\n\n * {}\"#,\n\n comment\n\n .replace(\"<NULL>\", \"null\")\n\n .replace(\"<TRUE>\", \"true\")\n\n .replace(\"<FALSE>\", \"false\")\n\n )?;\n\n }\n\n\n\n write!(\n\n writer,\n\n r#\"\n\n */\"#\n\n )\n\n}\n\n\n", "file_path": "capi/bind_gen/src/node.rs", "rank": 10, "score": 485779.96286509396 }, { "content": "fn write_class_comments<W: Write>(mut writer: W, comments: &[String]) -> Result<()> {\n\n write!(\n\n writer,\n\n r#\"\n\n/**\"#\n\n )?;\n\n\n\n for comment in comments {\n\n write!(\n\n writer,\n\n r#\"\n\n * {}\"#,\n\n comment\n\n .replace(\"<NULL>\", \"null\")\n\n .replace(\"<TRUE>\", \"true\")\n\n .replace(\"<FALSE>\", \"false\")\n\n )?;\n\n }\n\n\n\n write!(\n\n writer,\n\n r#\"\n\n */\"#\n\n )\n\n}\n\n\n", "file_path": "capi/bind_gen/src/wasm.rs", "rank": 11, "score": 485779.96286509396 }, { "content": "fn write_class_comments<W: Write>(mut writer: W, comments: &[String]) -> Result<()> {\n\n write!(\n\n writer,\n\n r#\"\n\n\"#\n\n )?;\n\n\n\n for comment in comments {\n\n write!(\n\n writer,\n\n r#\"\n\n # {}\"#,\n\n comment\n\n .replace(\"<NULL>\", \"nil\")\n\n .replace(\"<TRUE>\", \"true\")\n\n .replace(\"<FALSE>\", \"false\")\n\n )?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "capi/bind_gen/src/ruby.rs", "rank": 12, "score": 485779.96286509396 }, { "content": "fn write_class_comments<W: Write>(mut writer: W, comments: &[String]) -> Result<()> {\n\n write!(\n\n writer,\n\n r#\"\n\n/**\"#\n\n )?;\n\n\n\n for comment in comments {\n\n write!(\n\n writer,\n\n r#\"\n\n * {}\"#,\n\n comment\n\n .replace(\"<NULL>\", \"null\")\n\n .replace(\"<TRUE>\", \"true\")\n\n .replace(\"<FALSE>\", \"false\")\n\n )?;\n\n }\n\n\n\n write!(\n\n writer,\n\n r#\"\n\n */\"#\n\n )\n\n}\n\n\n", "file_path": "capi/bind_gen/src/emscripten.rs", "rank": 13, "score": 485779.96286509396 }, { "content": "fn write_class_comments<W: Write>(mut writer: W, comments: &[String]) -> Result<()> {\n\n write!(\n\n writer,\n\n r#\"\n\n \"\"\"\"#\n\n )?;\n\n\n\n for comment in comments {\n\n write!(\n\n writer,\n\n r#\"{}\n\n \"#,\n\n comment\n\n .replace(\"<NULL>\", \"None\")\n\n .replace(\"<TRUE>\", \"True\")\n\n .replace(\"<FALSE>\", \"False\")\n\n )?;\n\n }\n\n\n\n write!(\n\n writer,\n\n r#\"\"\"\"\n\n\"#\n\n )\n\n}\n\n\n", "file_path": "capi/bind_gen/src/python.rs", "rank": 14, "score": 485779.96286509396 }, { "content": "fn write_class_comments<W: Write>(mut writer: W, comments: &[String]) -> Result<()> {\n\n write!(\n\n writer,\n\n r#\"\n\n/**\"#\n\n )?;\n\n\n\n for comment in comments {\n\n write!(\n\n writer,\n\n r#\"\n\n {}\"#,\n\n comment\n\n .replace(\"<NULL>\", \"nil\")\n\n .replace(\"<TRUE>\", \"true\")\n\n .replace(\"<FALSE>\", \"false\")\n\n )?;\n\n }\n\n\n\n write!(\n\n writer,\n\n r#\"\n\n*/\"#\n\n )\n\n}\n\n\n", "file_path": "capi/bind_gen/src/swift/code.rs", "rank": 15, "score": 481294.1367896891 }, { "content": "fn write_class_comments<W: Write>(mut writer: W, comments: &[String]) -> Result<()> {\n\n write!(\n\n writer,\n\n r#\"\n\n/**\"#\n\n )?;\n\n\n\n for comment in comments {\n\n write!(\n\n writer,\n\n r#\"\n\n * {}\"#,\n\n comment\n\n .replace(\"<NULL>\", \"null\")\n\n .replace(\"<TRUE>\", \"true\")\n\n .replace(\"<FALSE>\", \"false\")\n\n )?;\n\n }\n\n\n\n write!(\n\n writer,\n\n r#\"\n\n */\"#\n\n )\n\n}\n", "file_path": "capi/bind_gen/src/java/mod.rs", "rank": 16, "score": 481294.1367896891 }, { "content": "fn write_class_comments<W: Write>(mut writer: W, comments: &[String]) -> Result<()> {\n\n write!(\n\n writer,\n\n r#\"\n\n/**\"#\n\n )?;\n\n\n\n for comment in comments {\n\n write!(\n\n writer,\n\n r#\"\n\n * {}\"#,\n\n comment\n\n .replace(\"<NULL>\", \"null\")\n\n .replace(\"<TRUE>\", \"true\")\n\n .replace(\"<FALSE>\", \"false\")\n\n )?;\n\n }\n\n\n\n write!(\n\n writer,\n\n r#\"\n\n */\"#\n\n )\n\n}\n\n\n", "file_path": "capi/bind_gen/src/kotlin/jni.rs", "rank": 17, "score": 481294.1367896891 }, { "content": "fn text<W: Write, T: AsRef<[u8]>>(writer: &mut Writer<W>, tag: BytesStart, text: T) -> Result<()> {\n\n let text = text.as_ref();\n\n scoped(writer, tag, text.is_empty(), |writer| {\n\n writer.write_event(Event::Text(BytesText::from_plain(text)))?;\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "src/run/saver/livesplit.rs", "rank": 18, "score": 466863.4972800533 }, { "content": "fn write_end<W: Write>(writer: &mut Writer<W>, tag: &[u8]) -> Result<()> {\n\n writer.write_event(Event::End(BytesEnd::borrowed(tag)))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/run/saver/livesplit.rs", "rank": 19, "score": 440100.28645225463 }, { "content": "fn write_start<W: Write>(writer: &mut Writer<W>, tag: BytesStart) -> Result<()> {\n\n writer.write_event(Event::Start(tag))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/run/saver/livesplit.rs", "rank": 20, "score": 436267.21472135314 }, { "content": "fn write_fn<W: Write>(mut writer: W, function: &Function) -> Result<()> {\n\n let is_static = function.is_static();\n\n let has_return_type = function.has_return_type();\n\n let return_type = get_hl_type(&function.output);\n\n\n\n if is_static {\n\n write!(\n\n writer,\n\n r#\"\n\n @staticmethod\n\n def {}(\"#,\n\n function.method\n\n )?;\n\n } else {\n\n write!(\n\n writer,\n\n r#\"\n\n def {}(\"#,\n\n function.method\n\n )?;\n", "file_path": "capi/bind_gen/src/python.rs", "rank": 21, "score": 435779.5363988963 }, { "content": "fn write_fn<W: Write>(mut writer: W, function: &Function) -> Result<()> {\n\n let is_static = function.is_static();\n\n let has_return_type = function.has_return_type();\n\n let return_type = get_hl_type(&function.output);\n\n let mut method: &str = &function.method;\n\n if method == \"new\" {\n\n method = \"create\";\n\n }\n\n\n\n for comment in &function.comments {\n\n write!(\n\n writer,\n\n r#\"\n\n # {}\"#,\n\n comment\n\n .replace(\"<NULL>\", \"nil\")\n\n .replace(\"<TRUE>\", \"true\")\n\n .replace(\"<FALSE>\", \"false\")\n\n )?;\n\n }\n", "file_path": "capi/bind_gen/src/ruby.rs", "rank": 22, "score": 435779.53639889625 }, { "content": "fn write_fn<W: Write>(mut writer: W, function: &Function) -> Result<()> {\n\n let is_static = function.is_static();\n\n let has_return_type = function.has_return_type();\n\n let return_type = get_hl_type(&function.output);\n\n let is_constructor = function.method == \"new\";\n\n\n\n if !function.comments.is_empty() {\n\n write!(\n\n writer,\n\n r#\"\n\n /**\"#\n\n )?;\n\n\n\n for comment in &function.comments {\n\n write!(\n\n writer,\n\n r#\"\n\n {}\"#,\n\n comment\n\n .replace(\"<NULL>\", \"nil\")\n", "file_path": "capi/bind_gen/src/swift/code.rs", "rank": 23, "score": 431944.3715200492 }, { "content": "fn write_fn<W: Write>(mut writer: W, function: &Function) -> Result<()> {\n\n let is_static = function.is_static();\n\n let has_return_type = function.has_return_type();\n\n let return_type = get_hl_type(&function.output);\n\n let is_constructor = function.method == \"new\" && !function.output.is_nullable;\n\n let mut method = function.method.to_mixed_case();\n\n if method == \"clone\" {\n\n method = \"copy\".into();\n\n } else if method == \"close\" {\n\n method = \"finish\".into();\n\n } else if method == \"new\" {\n\n method = \"create\".into();\n\n } else if method == \"default\" {\n\n method = \"createDefault\".into();\n\n }\n\n\n\n if !function.comments.is_empty() {\n\n write!(\n\n writer,\n\n r#\"\n", "file_path": "capi/bind_gen/src/kotlin/jni.rs", "rank": 24, "score": 431944.3715200492 }, { "content": "/// Saves a Run as a LiveSplit splits file (*.lss). Use the `save_timer`\n\n/// function if the Run is in use by a timer in order to properly save the\n\n/// current attempt as well.\n\npub fn save_run<W: Write>(run: &Run, writer: W) -> Result<()> {\n\n let writer = &mut Writer::new(writer);\n\n\n\n let buf = &mut Vec::new();\n\n let image_buf = &mut Cow::Borrowed(&LSS_IMAGE_HEADER[..]);\n\n\n\n writer.write_event(Event::Decl(BytesDecl::new(b\"1.0\", Some(b\"UTF-8\"), None)))?;\n\n writer.write_event(Event::Start(BytesStart::borrowed(\n\n br#\"Run version=\"1.7.0\"\"#,\n\n 3,\n\n )))?;\n\n\n\n image(\n\n writer,\n\n new_tag(b\"GameIcon\"),\n\n run.game_icon(),\n\n buf,\n\n image_buf,\n\n )?;\n\n text(writer, new_tag(b\"GameName\"), run.game_name())?;\n", "file_path": "src/run/saver/livesplit.rs", "rank": 25, "score": 416261.79786148574 }, { "content": "fn write_fn<W: Write>(mut writer: W, function: &Function, type_script: bool) -> Result<()> {\n\n let is_static = function.is_static();\n\n let has_return_type = function.has_return_type();\n\n let return_type_with_null = get_hl_type_with_null(&function.output);\n\n let return_type_without_null = get_hl_type_without_null(&function.output);\n\n let method = function.method.to_mixed_case();\n\n let is_json = has_return_type && function.output.name == \"Json\";\n\n\n\n if !function.comments.is_empty() || !type_script {\n\n write!(\n\n writer,\n\n r#\"\n\n /**\"#\n\n )?;\n\n\n\n for comment in &function.comments {\n\n write!(\n\n writer,\n\n r#\"\n\n * {}\"#,\n", "file_path": "capi/bind_gen/src/emscripten.rs", "rank": 26, "score": 413429.5756629243 }, { "content": "fn write_fn<W: Write>(mut writer: W, function: &Function, type_script: bool) -> Result<()> {\n\n let is_static = function.is_static();\n\n let has_return_type = function.has_return_type();\n\n let return_type_with_null = get_hl_type_with_null(&function.output);\n\n let return_type_without_null = get_hl_type_without_null(&function.output);\n\n let method = function.method.to_mixed_case();\n\n let is_json = has_return_type && function.output.name == \"Json\";\n\n\n\n if !function.comments.is_empty() || !type_script {\n\n write!(\n\n writer,\n\n r#\"\n\n /**\"#\n\n )?;\n\n\n\n for comment in &function.comments {\n\n write!(\n\n writer,\n\n r#\"\n\n * {}\"#,\n", "file_path": "capi/bind_gen/src/wasm.rs", "rank": 27, "score": 413429.57566292444 }, { "content": "fn write_fn<W: Write>(mut writer: W, function: &Function, type_script: bool) -> Result<()> {\n\n let is_static = function.is_static();\n\n let has_return_type = function.has_return_type();\n\n let return_type_with_null = get_hl_type_with_null(&function.output);\n\n let return_type_without_null = get_hl_type_without_null(&function.output);\n\n let method = function.method.to_mixed_case();\n\n let is_json = has_return_type && function.output.name == \"Json\";\n\n\n\n if !function.comments.is_empty() || !type_script {\n\n write!(\n\n writer,\n\n r#\"\n\n /**\"#\n\n )?;\n\n\n\n for comment in &function.comments {\n\n write!(\n\n writer,\n\n r#\"\n\n * {}\"#,\n", "file_path": "capi/bind_gen/src/node.rs", "rank": 28, "score": 413429.57566292444 }, { "content": "fn write_fn<W: Write>(mut writer: W, function: &Function, class_name: &str) -> Result<()> {\n\n let is_static = function.is_static();\n\n let has_return_type = function.has_return_type();\n\n let return_type = get_hl_type(&function.output);\n\n let return_type_ll = get_ll_type(&function.output, true);\n\n let is_constructor = function.method == \"new\" && !function.output.is_nullable;\n\n\n\n if !function.comments.is_empty() {\n\n write!(\n\n writer,\n\n r#\"\n\n /// <summary>\"#\n\n )?;\n\n\n\n for comment in &function.comments {\n\n write!(\n\n writer,\n\n r#\"\n\n /// {}\"#,\n\n comment\n", "file_path": "capi/bind_gen/src/csharp.rs", "rank": 29, "score": 413429.5756629244 }, { "content": "fn write_fn<W: Write>(mut writer: W, function: &Function, class_name: &str) -> Result<()> {\n\n let is_static = function.is_static();\n\n let has_return_type = function.has_return_type();\n\n let return_type = get_hl_type(&function.output);\n\n let return_type_ll = get_ll_type(&function.output);\n\n let is_constructor = function.method == \"new\" && !function.output.is_nullable;\n\n let mut method = function.method.to_mixed_case();\n\n if method == \"clone\" {\n\n method = \"copy\".into();\n\n } else if method == \"close\" {\n\n method = \"finish\".into();\n\n } else if method == \"new\" {\n\n method = \"create\".into();\n\n } else if method == \"default\" {\n\n method = \"createDefault\".into();\n\n }\n\n\n\n if !function.comments.is_empty() {\n\n write!(\n\n writer,\n", "file_path": "capi/bind_gen/src/java/jna.rs", "rank": 30, "score": 409911.4840870364 }, { "content": "fn write_fn<W: Write>(mut writer: W, function: &Function, class_name: &str) -> Result<()> {\n\n let has_return_type = function.has_return_type();\n\n let return_type = get_jni_type(&function.output);\n\n\n\n write!(\n\n writer,\n\n r#\"\n\nextern \"C\" JNIEXPORT {} Java_livesplitcore_LiveSplitCoreNative_{}_1{}(JNIEnv* jni_env, jobject\"#,\n\n return_type,\n\n class_name,\n\n function.method.to_mixed_case()\n\n )?;\n\n\n\n for &(ref name, ref typ) in &function.inputs {\n\n write!(\n\n writer,\n\n \", {} {}\",\n\n get_jni_type(typ),\n\n if name == \"this\" { \"self\" } else { name }\n\n )?;\n", "file_path": "capi/bind_gen/src/jni_cpp.rs", "rank": 31, "score": 409911.48408703634 }, { "content": "fn write_fn<W: Write>(mut writer: W, function: &Function, class_name: &str) -> Result<()> {\n\n let is_static = function.is_static();\n\n let has_return_type = function.has_return_type();\n\n let return_type = get_hl_type(&function.output);\n\n let is_constructor = function.method == \"new\" && !function.output.is_nullable;\n\n let mut method = function.method.to_mixed_case();\n\n if method == \"clone\" {\n\n method = \"copy\".into();\n\n } else if method == \"close\" {\n\n method = \"finish\".into();\n\n } else if method == \"new\" {\n\n method = \"create\".into();\n\n } else if method == \"default\" {\n\n method = \"createDefault\".into();\n\n }\n\n\n\n if !function.comments.is_empty() {\n\n write!(\n\n writer,\n\n r#\"\n", "file_path": "capi/bind_gen/src/java/jni.rs", "rank": 32, "score": 409911.48408703634 }, { "content": "fn scoped<W, F>(writer: &mut Writer<W>, tag: BytesStart, is_empty: bool, scope: F) -> Result<()>\n\nwhere\n\n W: Write,\n\n F: FnOnce(&mut Writer<W>) -> Result<()>,\n\n{\n\n if is_empty {\n\n writer.write_event(Event::Empty(tag))?;\n\n } else {\n\n let (start, end) = split_tag(&tag);\n\n writer.write_event(Event::Start(start))?;\n\n scope(writer)?;\n\n writer.write_event(Event::End(end))?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/run/saver/livesplit.rs", "rank": 33, "score": 334327.61330547865 }, { "content": "/// Calculates whether or not the Split Times for the indicated split qualify as\n\n/// a Best Segment.\n\n///\n\n/// - `timer`: The current timer.\n\n/// - `split_number`: The split to check.\n\n/// - `method`: The timing method to use.\n\n///\n\n/// Returns whether or not the indicated split is a Best Segment.\n\npub fn check_best_segment(timer: &Timer, split_number: usize, method: TimingMethod) -> bool {\n\n if timer.run().segment(split_number).split_time()[method].is_none() {\n\n return false;\n\n }\n\n\n\n let delta = previous_segment_delta(timer, split_number, best_segments::NAME, method);\n\n let current_segment = previous_segment_time(timer, split_number, method);\n\n let best_segment = timer.run().segment(split_number).best_segment_time()[method];\n\n best_segment.map_or(true, |b| {\n\n current_segment.map_or(false, |c| c < b) || delta.map_or(false, |d| d < TimeSpan::zero())\n\n })\n\n}\n", "file_path": "src/analysis/state_helper.rs", "rank": 34, "score": 324914.10027766076 }, { "content": "pub fn write<W: Write>(\n\n mut writer: W,\n\n classes: &BTreeMap<String, Class>,\n\n type_script: bool,\n\n) -> Result<()> {\n\n if type_script {\n\n writeln!(\n\n writer,\n\n \"{}{}\",\n\n r#\"// tslint:disable\n\nlet wasm: WebAssembly.ResultObject | null = null;\n\n\n\ndeclare namespace WebAssembly {\n", "file_path": "capi/bind_gen/src/wasm.rs", "rank": 35, "score": 322065.99534966727 }, { "content": "pub fn write<W: Write>(\n\n mut writer: W,\n\n classes: &BTreeMap<String, Class>,\n\n type_script: bool,\n\n) -> Result<()> {\n\n if type_script {\n\n writeln!(\n\n writer,\n\n \"{}{}\",\n\n r#\"// tslint:disable\n\nconst LiveSplitCore = require('./livesplit_core');\n\nconst emscriptenModule = LiveSplitCore({});\n\nconst liveSplitCoreNative: any = {};\n\n\n\n\"#,\n\n typescript::HEADER\n\n )?;\n\n } else {\n\n writeln!(\n\n writer,\n", "file_path": "capi/bind_gen/src/emscripten.rs", "rank": 36, "score": 322065.99534966727 }, { "content": "pub fn write<W: Write>(\n\n mut writer: W,\n\n classes: &BTreeMap<String, Class>,\n\n type_script: bool,\n\n) -> Result<()> {\n\n if type_script {\n\n write!(\n\n writer,\n\n r#\"\"use strict\";\n\n// tslint:disable\n\nimport ffi = require('ffi');\n\nimport fs = require('fs');\n\nimport ref = require('ref');\n\n\n\n{}\n\n\n\nconst liveSplitCoreNative = ffi.Library('livesplit_core', {{\"#,\n\n typescript::HEADER\n\n )?;\n\n } else {\n", "file_path": "capi/bind_gen/src/node.rs", "rank": 37, "score": 322065.99534966727 }, { "content": "fn time<W: Write>(\n\n writer: &mut Writer<W>,\n\n tag: BytesStart,\n\n time: Time,\n\n buf: &mut Vec<u8>,\n\n) -> Result<()> {\n\n scoped(\n\n writer,\n\n tag,\n\n time.real_time.is_none() && time.game_time.is_none(),\n\n |writer| time_inner(writer, time, buf),\n\n )\n\n}\n\n\n", "file_path": "src/run/saver/livesplit.rs", "rank": 38, "score": 316573.72688736115 }, { "content": "pub fn text<R, F>(reader: &mut Reader<R>, buf: &mut Vec<u8>, f: F) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n F: FnOnce(Cow<str>),\n\n{\n\n text_err(reader, buf, |t| {\n\n f(t);\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "src/run/parser/xml_util.rs", "rank": 39, "score": 308297.6823111059 }, { "content": "pub fn write<P: AsRef<Path>>(path: P, classes: &BTreeMap<String, Class>) -> Result<()> {\n\n let mut path = path.as_ref().to_owned();\n\n\n\n path.push(\"LiveSplitCoreNative\");\n\n create_dir_all(&path)?;\n\n\n\n path.push(\"livesplit_core.h\");\n\n header::write(BufWriter::new(File::create(&path)?), classes)?;\n\n path.pop();\n\n\n\n path.push(\"module.map\");\n\n write!(BufWriter::new(File::create(&path)?), \"{}\", MODULE_MAP)?;\n\n path.pop();\n\n\n\n path.pop();\n\n\n\n path.push(\"LiveSplitCore.swift\");\n\n code::write(BufWriter::new(File::create(&path)?), classes)?;\n\n path.pop();\n\n\n\n Ok(())\n\n}\n", "file_path": "capi/bind_gen/src/swift/mod.rs", "rank": 40, "score": 294130.60852264194 }, { "content": "pub fn write<P: AsRef<Path>>(path: P, classes: &BTreeMap<String, Class>) -> Result<()> {\n\n let mut path = path.as_ref().to_owned();\n\n\n\n path.push(\"LiveSplitCoreNative.kt\");\n\n write_native_class(&path, classes)?;\n\n path.pop();\n\n\n\n for (class_name, class) in classes {\n\n path.push(format!(\"{}Ref\", class_name));\n\n path.set_extension(\"kt\");\n\n write_class_ref(&path, class_name, class)?;\n\n path.pop();\n\n\n\n path.push(format!(\"{}RefMut\", class_name));\n\n path.set_extension(\"kt\");\n\n write_class_ref_mut(&path, class_name, class)?;\n\n path.pop();\n\n\n\n path.push(class_name);\n\n path.set_extension(\"kt\");\n\n write_class(&path, class_name, class)?;\n\n path.pop();\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "capi/bind_gen/src/kotlin/jni.rs", "rank": 41, "score": 294130.60852264194 }, { "content": "pub fn write<P: AsRef<Path>>(path: P, classes: &BTreeMap<String, Class>) -> Result<()> {\n\n let mut path = path.as_ref().to_owned();\n\n\n\n path.push(\"jni\");\n\n create_dir_all(&path)?;\n\n jni::write(&path, classes)?;\n\n path.pop();\n\n\n\n path.push(\"LiveSplitCoreJNI.cpp\");\n\n jni_cpp::write(BufWriter::new(File::create(&path)?), classes)?;\n\n path.pop();\n\n\n\n Ok(())\n\n}\n", "file_path": "capi/bind_gen/src/kotlin/mod.rs", "rank": 42, "score": 294130.6085226419 }, { "content": "pub fn write<P: AsRef<Path>>(path: P, classes: &BTreeMap<String, Class>) -> Result<()> {\n\n let mut path = path.as_ref().to_owned();\n\n\n\n path.push(\"LiveSplitCoreNative.java\");\n\n write_native_class(&path, classes)?;\n\n path.pop();\n\n\n\n for (class_name, class) in classes {\n\n path.push(format!(\"{}Ref\", class_name));\n\n path.set_extension(\"java\");\n\n write_class_ref(&path, class_name, class)?;\n\n path.pop();\n\n\n\n path.push(format!(\"{}RefMut\", class_name));\n\n path.set_extension(\"java\");\n\n write_class_ref_mut(&path, class_name, class)?;\n\n path.pop();\n\n\n\n path.push(class_name);\n\n path.set_extension(\"java\");\n\n write_class(&path, class_name, class)?;\n\n path.pop();\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "capi/bind_gen/src/java/jna.rs", "rank": 43, "score": 294130.60852264194 }, { "content": "pub fn write<P: AsRef<Path>>(path: P, classes: &BTreeMap<String, Class>) -> Result<()> {\n\n let mut path = path.as_ref().to_owned();\n\n\n\n path.push(\"LiveSplitCoreNative.java\");\n\n write_native_class(&path, classes)?;\n\n path.pop();\n\n\n\n for (class_name, class) in classes {\n\n path.push(format!(\"{}Ref\", class_name));\n\n path.set_extension(\"java\");\n\n write_class_ref(&path, class_name, class)?;\n\n path.pop();\n\n\n\n path.push(format!(\"{}RefMut\", class_name));\n\n path.set_extension(\"java\");\n\n write_class_ref_mut(&path, class_name, class)?;\n\n path.pop();\n\n\n\n path.push(class_name);\n\n path.set_extension(\"java\");\n\n write_class(&path, class_name, class)?;\n\n path.pop();\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "capi/bind_gen/src/java/jni.rs", "rank": 44, "score": 294130.60852264194 }, { "content": "pub fn write<P: AsRef<Path>>(path: P, classes: &BTreeMap<String, Class>) -> Result<()> {\n\n let mut path = path.as_ref().to_owned();\n\n\n\n path.push(\"jna\");\n\n create_dir_all(&path)?;\n\n jna::write(&path, classes)?;\n\n path.pop();\n\n\n\n path.push(\"jni\");\n\n create_dir_all(&path)?;\n\n jni::write(&path, classes)?;\n\n path.pop();\n\n\n\n path.push(\"LiveSplitCoreJNI.cpp\");\n\n jni_cpp::write(BufWriter::new(File::create(&path)?), classes)?;\n\n path.pop();\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "capi/bind_gen/src/java/mod.rs", "rank": 45, "score": 294130.60852264194 }, { "content": "pub fn text_err<R, F>(reader: &mut Reader<R>, buf: &mut Vec<u8>, f: F) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n F: FnOnce(Cow<str>) -> Result<()>,\n\n{\n\n text_as_bytes_err(reader, buf, |b| f(decode_cow_text(b)?))\n\n}\n\n\n", "file_path": "src/run/parser/xml_util.rs", "rank": 46, "score": 283564.2459725736 }, { "content": "pub fn text_parsed<R, F, T>(reader: &mut Reader<R>, buf: &mut Vec<u8>, f: F) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n F: FnOnce(T),\n\n T: str::FromStr,\n\n <T as str::FromStr>::Err: Into<Error>,\n\n{\n\n text_err(reader, buf, |t| {\n\n f(t.parse().map_err(Into::into)?);\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "src/run/parser/xml_util.rs", "rank": 47, "score": 278182.9135228344 }, { "content": "/// Tries to resolve the given comparison based on a Timer object. If either\n\n/// `None` is given or the comparison doesn't exist, `None` is returned.\n\n/// Otherwise the comparison name stored in the Timer is returned by reference.\n\npub fn resolve<'a>(comparison: &Option<String>, timer: &'a Timer) -> Option<&'a str> {\n\n let comparison = comparison.as_ref()?;\n\n timer.run().comparisons().find(|&rc| comparison == rc)\n\n}\n", "file_path": "src/comparison/mod.rs", "rank": 48, "score": 278083.81094526255 }, { "content": "fn time_span<W: Write>(\n\n writer: &mut Writer<W>,\n\n tag: BytesStart,\n\n time: TimeSpan,\n\n buf: &mut Vec<u8>,\n\n) -> Result<()> {\n\n write_display(writer, tag, Complete.format(time), buf)\n\n}\n\n\n", "file_path": "src/run/saver/livesplit.rs", "rank": 49, "score": 277440.1778804919 }, { "content": "pub fn text_as_bytes_err<R, F, T>(reader: &mut Reader<R>, buf: &mut Vec<u8>, f: F) -> Result<T>\n\nwhere\n\n R: BufRead,\n\n F: FnOnce(Cow<[u8]>) -> Result<T>,\n\n{\n\n let val;\n\n loop {\n\n buf.clear();\n\n match reader.read_event(buf)? {\n\n Event::Start(_) => return Err(Error::UnexpectedElement),\n\n Event::End(_) => {\n\n return f(Cow::Borrowed(&[]));\n\n }\n\n Event::Text(text) | Event::CData(text) => {\n\n let text = text.unescaped()?;\n\n val = f(text)?;\n\n break;\n\n }\n\n Event::Eof => return Err(Error::UnexpectedEndOfFile),\n\n _ => {}\n\n }\n\n }\n\n end_tag_immediately(reader, buf)?;\n\n Ok(val)\n\n}\n\n\n", "file_path": "src/run/parser/xml_util.rs", "rank": 50, "score": 270335.9740733 }, { "content": "fn time<R, F>(reader: &mut Reader<R>, buf: &mut Vec<u8>, mut f: F) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n F: FnMut(Time),\n\n{\n\n time_span(reader, buf, |t| f(RealTime(Some(t)).into()))\n\n}\n\n\n", "file_path": "src/run/parser/llanfair2.rs", "rank": 51, "score": 263399.04434452805 }, { "content": "/// Calculates the current pace of the active attempt based on the comparison\n\n/// provided. If there's no active attempt, the final time of the comparison is\n\n/// returned instead.\n\npub fn calculate(timer: &Timer, comparison: &str) -> Option<TimeSpan> {\n\n let timing_method = timer.current_timing_method();\n\n let last_segment = timer.run().segments().last().unwrap();\n\n\n\n match timer.current_phase() {\n\n TimerPhase::Running | TimerPhase::Paused => {\n\n let mut delta = analysis::last_delta(\n\n timer.run(),\n\n timer.current_split_index().unwrap(),\n\n comparison,\n\n timing_method,\n\n ).unwrap_or_default();\n\n\n\n catch! {\n\n let live_delta = timer.current_time()[timing_method]?\n\n - timer.current_split().unwrap().comparison(comparison)[timing_method]?;\n\n\n\n if live_delta > delta {\n\n delta = live_delta;\n\n }\n", "file_path": "src/analysis/current_pace.rs", "rank": 52, "score": 261677.27697058502 }, { "content": "/// Calculates how much time could be saved on the remainder of the run with the\n\n/// given comparison. This information is based on the best segments.\n\n/// Considering the best segments don't represent theoretically perfect segment\n\n/// times, this information is only an approximation of how much time can\n\n/// actually be saved. This information is always live, so the total possible\n\n/// time save will shrink towards zero throughout the run and when time is lost\n\n/// on a segment. The time returned by this function can never be below zero.\n\npub fn calculate_total(timer: &Timer, segment_index: usize, comparison: &str) -> TimeSpan {\n\n let mut total = TimeSpan::zero();\n\n\n\n for index in segment_index..timer.run().len() {\n\n if let Some(time_save) = calculate(timer, index, comparison, true) {\n\n total += time_save;\n\n }\n\n }\n\n\n\n total\n\n}\n", "file_path": "src/analysis/possible_time_save.rs", "rank": 53, "score": 259967.44550318047 }, { "content": "/// Calculates the delta of the current attempt to the comparison provided.\n\n/// Additionally a value is returned that indicates whether the delta value is a\n\n/// live delta. A live delta indicates that the value is actively changing at\n\n/// the moment. This may be the case when the current attempt is slower than the\n\n/// comparison at the current split.\n\npub fn calculate(timer: &Timer, comparison: &str) -> (Option<TimeSpan>, bool) {\n\n let timing_method = timer.current_timing_method();\n\n let last_segment = timer.run().segments().last().unwrap();\n\n\n\n let mut use_live_delta = false;\n\n\n\n let time = match timer.current_phase() {\n\n TimerPhase::Running | TimerPhase::Paused => {\n\n let mut delta = analysis::last_delta(\n\n timer.run(),\n\n timer.current_split_index().unwrap(),\n\n comparison,\n\n timing_method,\n\n );\n\n\n\n catch! {\n\n let live_delta = timer.current_time()[timing_method]?\n\n - timer.current_split().unwrap().comparison(comparison)[timing_method]?;\n\n\n\n if live_delta > delta.unwrap_or_default() {\n", "file_path": "src/analysis/delta.rs", "rank": 54, "score": 258696.84324581485 }, { "content": "fn populate_prediction(prediction: &mut Option<TimeSpan>, predicted_time: Option<TimeSpan>) {\n\n if let Some(predicted_time) = predicted_time {\n\n if prediction.map_or(true, |t| predicted_time < t) {\n\n *prediction = Some(predicted_time);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/analysis/sum_of_segments/best.rs", "rank": 55, "score": 257818.37620323 }, { "content": "fn time<R, F>(reader: &mut Reader<R>, buf: &mut Vec<u8>, f: F) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n F: FnOnce(Time),\n\n{\n\n let mut time = Time::new();\n\n\n\n parse_children(reader, buf, |reader, tag| {\n\n if tag.name() == b\"RealTime\" {\n\n time_span_opt(reader, tag.into_buf(), |t| {\n\n time.real_time = t;\n\n })\n\n } else if tag.name() == b\"GameTime\" {\n\n time_span_opt(reader, tag.into_buf(), |t| {\n\n time.game_time = t;\n\n })\n\n } else {\n\n end_tag(reader, tag.into_buf())\n\n }\n\n })?;\n\n\n\n f(time);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/run/parser/livesplit.rs", "rank": 56, "score": 257027.07854919287 }, { "content": "/// Attempts to parse a Llanfair splits file.\n\npub fn parse<R: Read + Seek>(mut source: R) -> Result<Run> {\n\n let mut buf = Vec::new();\n\n let mut buf2 = Vec::new();\n\n\n\n // The protocol is documented here:\n\n // https://docs.oracle.com/javase/7/docs/platform/serialization/spec/protocol.html\n\n\n\n const HEADER: [u8; 30] = [\n\n 0xAC, 0xED, // Magic\n\n 0x00, 0x05, // Version\n\n 0x73, // New Object\n\n 0x72, // New Class Declaration\n\n 0x00, 0x16, // Length of Class Name\n\n // org.fenix.llanfair.Run\n\n 0x6F, 0x72, 0x67, 0x2E, 0x66, 0x65, 0x6E, 0x69, 0x78, 0x2E, 0x6C,\n\n 0x6C, 0x61, 0x6E, 0x66, 0x61, 0x69, 0x72, 0x2E, 0x52, 0x75, 0x6E,\n\n ];\n\n let mut header_buf = [0; 30];\n\n source.read_exact(&mut header_buf)?;\n\n if HEADER != header_buf {\n", "file_path": "src/run/parser/llanfair.rs", "rank": 57, "score": 255846.9078932562 }, { "content": "fn fix_history_from_none_best_segments(segment: &mut Segment, method: TimingMethod) {\n\n // Only do anything if the Best Segment Time is gone for the Segment in question\n\n if segment.best_segment_time()[method].is_none() {\n\n // Keep only the skipped segments\n\n segment\n\n .segment_history_mut()\n\n .retain(|&(_, time)| time[method].is_none());\n\n }\n\n}\n\n\n", "file_path": "src/run/run.rs", "rank": 58, "score": 254771.51020668916 }, { "content": "fn time<R, F>(reader: &mut Reader<R>, buf: &mut Vec<u8>, f: F) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n F: FnOnce(Time),\n\n{\n\n time_span(reader, buf, |t| f(RealTime(Some(t)).into()))\n\n}\n\n\n", "file_path": "src/run/parser/llanfair_gered.rs", "rank": 59, "score": 254547.09681206994 }, { "content": "pub fn parse_children<R, F>(reader: &mut Reader<R>, buf: &mut Vec<u8>, mut f: F) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n F: FnMut(&mut Reader<R>, Tag) -> Result<()>,\n\n{\n\n unsafe {\n\n let ptr_buf: *mut Vec<u8> = buf;\n\n loop {\n\n buf.clear();\n\n match reader.read_event(buf)? {\n\n Event::Start(start) => {\n\n let tag = Tag::new(start, ptr_buf);\n\n f(reader, tag)?;\n\n }\n\n Event::End(_) => return Ok(()),\n\n Event::Eof => return Err(Error::UnexpectedEndOfFile),\n\n _ => {}\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/run/parser/xml_util.rs", "rank": 60, "score": 253903.777176007 }, { "content": "pub fn end_tag<R: BufRead>(reader: &mut Reader<R>, buf: &mut Vec<u8>) -> Result<()> {\n\n let mut depth = 0;\n\n loop {\n\n buf.clear();\n\n match reader.read_event(buf)? {\n\n Event::Start(_) => {\n\n depth += 1;\n\n }\n\n Event::End(_) => {\n\n if depth == 0 {\n\n return Ok(());\n\n }\n\n depth -= 1;\n\n }\n\n Event::Eof => return Err(Error::UnexpectedEndOfFile),\n\n _ => {}\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/run/parser/xml_util.rs", "rank": 61, "score": 252373.3438972265 }, { "content": "#[allow(needless_range_loop)]\n\npub fn calculate(\n\n segments: &[Segment],\n\n predictions: &mut [Option<TimeSpan>],\n\n simple_calculation: bool,\n\n use_current_run: bool,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n predictions[0] = Some(TimeSpan::zero());\n\n let end_index = segments.len();\n\n for segment_index in 0..end_index {\n\n let current_time = predictions[segment_index];\n\n populate_predictions(\n\n segments,\n\n current_time,\n\n segment_index,\n\n predictions,\n\n simple_calculation,\n\n use_current_run,\n\n method,\n\n );\n\n }\n\n predictions[end_index]\n\n}\n", "file_path": "src/analysis/sum_of_segments/best.rs", "rank": 62, "score": 249773.1236947949 }, { "content": "pub fn parse_attributes<'a, F>(tag: &BytesStart<'a>, mut f: F) -> Result<()>\n\nwhere\n\n F: FnMut(&[u8], AttributeValue) -> Result<bool>,\n\n{\n\n for attribute in tag.attributes() {\n\n let attribute = attribute?;\n\n let key = attribute.key;\n\n if !f(key, AttributeValue(&attribute))? {\n\n return Ok(());\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/run/parser/xml_util.rs", "rank": 63, "score": 249492.35866390186 }, { "content": "/// Calculates the Sum of Best Segments for the timing method provided. This is\n\n/// the fastest time possible to complete a run of a category, based on\n\n/// information collected from all the previous attempts. This often matches up\n\n/// with the sum of the best segment times of all the segments, but that may not\n\n/// always be the case, as skipped segments may introduce combined segments that\n\n/// may be faster than the actual sum of their best segment times. The name is\n\n/// therefore a bit misleading, but sticks around for historical reasons. You\n\n/// can choose to do a simple calculation instead, which excludes the Segment\n\n/// History from the calculation process. If there's an active attempt, you can\n\n/// choose to take it into account as well.\n\npub fn calculate_best(\n\n segments: &[Segment],\n\n simple_calculation: bool,\n\n use_current_run: bool,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n let mut predictions = Vec::with_capacity(segments.len() + 1);\n\n predictions.resize(segments.len() + 1, None);\n\n best::calculate(\n\n segments,\n\n &mut predictions,\n\n simple_calculation,\n\n use_current_run,\n\n method,\n\n )\n\n}\n\n\n", "file_path": "src/analysis/sum_of_segments/mod.rs", "rank": 64, "score": 246054.63593385788 }, { "content": "/// Gets the length of the last segment that leads up to a certain split.\n\n///\n\n/// - `timer`: The current timer.\n\n/// - `split_number`: The index of the split that represents the end of the\n\n/// segment.\n\n/// - `method`: The timing method that you are using.\n\n///\n\n/// Returns the length of the segment leading up to `split_number`, returning\n\n/// None if the split is not completed yet.\n\npub fn previous_segment_time(\n\n timer: &Timer,\n\n split_number: usize,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n segment_time_or_segment_delta(\n\n timer,\n\n split_number,\n\n false,\n\n true,\n\n personal_best::NAME,\n\n method,\n\n )\n\n}\n\n\n", "file_path": "src/analysis/state_helper.rs", "rank": 65, "score": 245141.94621588002 }, { "content": "/// Gets the length of the last segment that leads up to a certain split, using\n\n/// the live segment time if the split is not completed yet.\n\n///\n\n/// - `timer`: The current timer.\n\n/// - `split_number`: The index of the split that represents the end of the\n\n/// segment.\n\n/// - `method`: The timing method that you are using.\n\n///\n\n/// Returns the length of the segment leading up to `split_number`, returning\n\n/// the live segment time if the split is not completed yet.\n\npub fn live_segment_time(\n\n timer: &Timer,\n\n split_number: usize,\n\n method: TimingMethod,\n\n) -> Option<TimeSpan> {\n\n segment_time_or_segment_delta(timer, split_number, true, true, personal_best::NAME, method)\n\n}\n\n\n", "file_path": "src/analysis/state_helper.rs", "rank": 66, "score": 245139.65600194657 }, { "content": "fn parse_time(time: &str) -> Result<Time> {\n\n let real_time = time.parse::<TimeSpan>()?;\n\n\n\n // Empty Time is stored as zero\n\n let real_time = if real_time != TimeSpan::zero() {\n\n Some(real_time)\n\n } else {\n\n None\n\n };\n\n\n\n Ok(Time::new().with_real_time(real_time))\n\n}\n\n\n", "file_path": "src/run/parser/urn.rs", "rank": 67, "score": 242306.02121263507 }, { "content": "fn parse_date_time<S: AsRef<str>>(text: S) -> Result<DateTime<Utc>> {\n\n Utc.datetime_from_str(text.as_ref(), \"%m/%d/%Y %T\")\n\n .map_err(Into::into)\n\n}\n\n\n", "file_path": "src/run/parser/livesplit.rs", "rank": 68, "score": 240601.9468605742 }, { "content": "pub fn attribute<'a, F>(tag: &BytesStart<'a>, key: &[u8], mut f: F) -> Result<()>\n\nwhere\n\n F: FnMut(Cow<str>),\n\n{\n\n attribute_err(tag, key, |t| {\n\n f(t);\n\n Ok(())\n\n })\n\n}\n", "file_path": "src/run/parser/xml_util.rs", "rank": 69, "score": 240460.89441257023 }, { "content": "fn parse_time(time: &str) -> Result<Time> {\n\n // Replace \",\" by \".\" as \",\" wouldn't parse\n\n let time: TimeSpan = replace(time, \",\", \".\").parse()?;\n\n // Skipped is stored as a zero time in FaceSplit Splits\n\n if time == TimeSpan::zero() {\n\n Ok(Time::default())\n\n } else {\n\n Ok(RealTime(Some(time)).into())\n\n }\n\n}\n\n\n", "file_path": "src/run/parser/face_split.rs", "rank": 70, "score": 240135.48725514082 }, { "content": "fn time_span<R, F>(reader: &mut Reader<R>, buf: &mut Vec<u8>, mut f: F) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n F: FnMut(TimeSpan),\n\n{\n\n single_child(reader, buf, b\"value\", |reader, tag| {\n\n text_err(reader, tag.into_buf(), |text| {\n\n let milliseconds = text.parse::<i64>()?;\n\n f(TimeSpan::from_milliseconds(milliseconds as f64));\n\n Ok(())\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/run/parser/llanfair2.rs", "rank": 71, "score": 239382.20160883287 }, { "content": "pub fn attribute_err<'a, F>(tag: &BytesStart<'a>, key: &[u8], mut f: F) -> Result<()>\n\nwhere\n\n F: FnMut(Cow<str>) -> Result<()>,\n\n{\n\n let mut called = false;\n\n parse_attributes(tag, |k, v| {\n\n if k == key {\n\n f(v.get()?)?;\n\n called = true;\n\n Ok(false)\n\n } else {\n\n Ok(true)\n\n }\n\n })?;\n\n if called {\n\n Ok(())\n\n } else {\n\n Err(Error::AttributeNotFound)\n\n }\n\n}\n\n\n", "file_path": "src/run/parser/xml_util.rs", "rank": 72, "score": 237800.2557371728 }, { "content": "/// Attempts to parse a SourceLiveTimer splits file.\n\npub fn parse<R: Read>(source: R) -> Result<Run> {\n\n let mut run = Run::new();\n\n let splits: Splits = from_reader(source)?;\n\n\n\n if splits.Category.starts_with(\"Portal 2\") {\n\n run.set_game_name(\"Portal 2\");\n\n } else if splits.Category.starts_with(\"Portal\") {\n\n run.set_game_name(\"Portal\");\n\n } else if splits.Category.starts_with(\"Half Life 2\") {\n\n run.set_game_name(\"Half Life 2\");\n\n }\n\n\n\n if let Some(run_name) = splits.RunName {\n\n if run_name != splits.Category {\n\n run.set_category_name(run_name);\n\n } else {\n\n run.set_category_name(splits.Category);\n\n }\n\n } else {\n\n run.set_category_name(splits.Category);\n", "file_path": "src/run/parser/source_live_timer.rs", "rank": 73, "score": 236316.5826118879 }, { "content": "fn image<W: Write>(\n\n writer: &mut Writer<W>,\n\n tag: BytesStart,\n\n image: &Image,\n\n buf: &mut Vec<u8>,\n\n image_buf: &mut Cow<[u8]>,\n\n) -> Result<()> {\n\n let url = image.url();\n\n if url.starts_with(\"data:;base64,\") {\n\n let src = &url[\"data:;base64,\".len()..];\n\n buf.clear();\n\n if base64::decode_config_buf(src, base64::STANDARD, buf).is_ok() {\n\n let len = buf.len();\n\n let image_buf = image_buf.to_mut();\n\n image_buf.truncate(LSS_IMAGE_HEADER.len());\n\n image_buf.reserve(len + 6);\n\n image_buf.write_u32::<LE>(len as u32).unwrap();\n\n image_buf.push(0x2);\n\n image_buf.append(buf);\n\n image_buf.push(0xB);\n", "file_path": "src/run/saver/livesplit.rs", "rank": 74, "score": 235357.20603931317 }, { "content": "pub fn optional_attribute_err<'a, F>(tag: &BytesStart<'a>, key: &[u8], mut f: F) -> Result<()>\n\nwhere\n\n F: FnMut(Cow<str>) -> Result<()>,\n\n{\n\n parse_attributes(tag, |k, v| {\n\n if k == key {\n\n f(v.get()?)?;\n\n Ok(false)\n\n } else {\n\n Ok(true)\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/run/parser/xml_util.rs", "rank": 75, "score": 235228.9907161203 }, { "content": "fn run_with_splits(timer: &mut Timer, splits: &[f64]) {\n\n timer.start();\n\n timer.initialize_game_time();\n\n timer.pause_game_time();\n\n\n\n for &split in splits {\n\n timer.set_game_time(TimeSpan::from_seconds(split));\n\n timer.split();\n\n }\n\n\n\n timer.reset(true);\n\n}\n\n\n", "file_path": "benches/balanced_pb.rs", "rank": 76, "score": 234242.7481000363 }, { "content": "/// Attempts to parse a Portal 2 Live Timer splits file.\n\npub fn parse<R: BufRead>(source: R) -> Result<Run> {\n\n let mut run = Run::new();\n\n\n\n run.set_game_name(\"Portal 2\");\n\n run.set_category_name(\"Any%\");\n\n\n\n let mut lines = source.lines().peekable();\n\n lines.next(); // Skip the header\n\n\n\n let mut aggregate_ticks = 0.0;\n\n\n\n for &(chapter_name, maps) in &CHAPTERS {\n\n for &map in maps {\n\n let line = lines.next().ok_or(Error::ExpectedMap)??;\n\n let mut splits = line.split(',');\n\n let map_name = splits.next().ok_or(Error::ExpectedMapName)?;\n\n if map_name != map {\n\n return Err(Error::ExpectedDifferentMapName);\n\n }\n\n let start_ticks: f64 = splits.next().ok_or(Error::ExpectedStartTicks)?.parse()?;\n", "file_path": "src/run/parser/portal2_live_timer.rs", "rank": 77, "score": 233468.42303946312 }, { "content": "fn time_old<R, F>(reader: &mut Reader<R>, buf: &mut Vec<u8>, f: F) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n F: FnOnce(Time),\n\n{\n\n time_span_opt(reader, buf, |t| f(Time::new().with_real_time(t)))\n\n}\n\n\n", "file_path": "src/run/parser/livesplit.rs", "rank": 78, "score": 232122.9636419933 }, { "content": "fn time_span<R, F>(reader: &mut Reader<R>, buf: &mut Vec<u8>, f: F) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n F: FnOnce(TimeSpan),\n\n{\n\n text_err(reader, buf, |text| {\n\n let time_span = || -> Result<TimeSpan> {\n\n if let (Some(dot_index), Some(colon_index)) = (text.find('.'), text.find(':')) {\n\n if dot_index < colon_index {\n\n let days = TimeSpan::from_days(text[..dot_index].parse()?);\n\n let time = text[dot_index + 1..].parse()?;\n\n return Ok(days + time);\n\n }\n\n }\n\n text.parse().map_err(Into::into)\n\n }()?;\n\n f(time_span);\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "src/run/parser/livesplit.rs", "rank": 79, "score": 232122.9636419933 }, { "content": "fn write_files(classes: &BTreeMap<String, Class>, opt: &Opt) -> Result<()> {\n\n let mut path = PathBuf::from(\"..\");\n\n path.push(\"bindings\");\n\n\n\n remove_dir_all(&path).ok();\n\n create_dir_all(&path)?;\n\n\n\n path.push(\"emscripten\");\n\n create_dir_all(&path)?;\n\n {\n\n path.push(\"livesplit_core.js\");\n\n emscripten::write(BufWriter::new(File::create(&path)?), classes, false)?;\n\n path.pop();\n\n\n\n path.push(\"livesplit_core.ts\");\n\n emscripten::write(BufWriter::new(File::create(&path)?), classes, true)?;\n\n path.pop();\n\n }\n\n path.pop();\n\n\n", "file_path": "capi/bind_gen/src/main.rs", "rank": 80, "score": 231655.80503742833 }, { "content": "/// Attempts to parse a splits file by invoking the corresponding parser for the\n\n/// file format detected. A path to the splits file can be provided, which helps\n\n/// saving the splits file again later. Additionally you need to specify if\n\n/// additional files, like external images are allowed to be loaded. If you are\n\n/// using livesplit-core in a server-like environment, set this to `false`. Only\n\n/// client-side applications should set this to `true`.\n\npub fn parse<R>(mut source: R, path: Option<PathBuf>, load_files: bool) -> Result<ParsedRun>\n\nwhere\n\n R: BufRead + Seek,\n\n{\n\n let files_path =\n\n if load_files { path.clone() } else { None };\n\n\n\n source.seek(SeekFrom::Start(0))?;\n\n if let Ok(run) = livesplit::parse(&mut source, path) {\n\n return Ok(parsed(run, TimerKind::LiveSplit));\n\n }\n\n\n\n source.seek(SeekFrom::Start(0))?;\n\n if let Ok(run) = wsplit::parse(&mut source, load_files) {\n\n return Ok(parsed(run, TimerKind::WSplit));\n\n }\n\n\n\n source.seek(SeekFrom::Start(0))?;\n\n if let Ok(run) = splitterz::parse(&mut source, load_files) {\n\n return Ok(parsed(run, TimerKind::SplitterZ));\n", "file_path": "src/run/parser/composite.rs", "rank": 81, "score": 230346.0627187082 }, { "content": "fn time_span<R, F>(reader: &mut Reader<R>, buf: &mut Vec<u8>, f: F) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n F: FnOnce(TimeSpan),\n\n{\n\n text_err(reader, buf, |text| {\n\n let milliseconds = text.parse::<i64>()?;\n\n f(TimeSpan::from_milliseconds(milliseconds as f64));\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "src/run/parser/llanfair_gered.rs", "rank": 82, "score": 229718.69646855403 }, { "content": "fn time_span_opt<R, F>(reader: &mut Reader<R>, buf: &mut Vec<u8>, f: F) -> Result<()>\n\nwhere\n\n R: BufRead,\n\n F: FnOnce(Option<TimeSpan>),\n\n{\n\n text_err(reader, buf, |text| {\n\n let time_span = || -> Result<Option<TimeSpan>> {\n\n if text.is_empty() {\n\n return Ok(None);\n\n }\n\n if let (Some(dot_index), Some(colon_index)) = (text.find('.'), text.find(':')) {\n\n if dot_index < colon_index {\n\n let days = TimeSpan::from_days(text[..dot_index].parse()?);\n\n let time = text[dot_index + 1..].parse()?;\n\n return Ok(Some(days + time));\n\n }\n\n }\n\n Ok(Some(text.parse()?))\n\n }()?;\n\n f(time_span);\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "src/run/parser/livesplit.rs", "rank": 83, "score": 229718.69646855403 }, { "content": "fn run_with_splits(timer: &mut Timer, splits: &[f64]) {\n\n timer.start();\n\n timer.initialize_game_time();\n\n timer.pause_game_time();\n\n\n\n for &split in splits {\n\n timer.set_game_time(TimeSpan::from_seconds(split));\n\n timer.split();\n\n }\n\n\n\n timer.reset(true);\n\n}\n\n\n", "file_path": "src/comparison/tests/balanced_pb.rs", "rank": 84, "score": 229709.46803913685 }, { "content": "fn timer() -> Timer {\n\n let mut timer = Timer::new(run()).unwrap();\n\n\n\n timer.start();\n\n timer.initialize_game_time();\n\n timer.pause_game_time();\n\n\n\n timer\n\n}\n\n\n", "file_path": "src/time/timer/tests.rs", "rank": 85, "score": 229377.95638829673 }, { "content": "fn write_display<W: Write, D: Display>(\n\n writer: &mut Writer<W>,\n\n tag: BytesStart,\n\n value: D,\n\n buf: &mut Vec<u8>,\n\n) -> Result<()> {\n\n text(writer, tag, fmt_buf(value, buf))\n\n}\n\n\n", "file_path": "src/run/saver/livesplit.rs", "rank": 86, "score": 229340.31907687488 }, { "content": "fn read_string<R: Read>(mut source: R, buf: &mut Vec<u8>, max_length: u64) -> Result<&str> {\n\n let str_length = source.read_u16::<BE>()? as usize;\n\n if str_length as u64 > max_length {\n\n return Err(Error::LengthOutOfBounds);\n\n }\n\n buf.clear();\n\n buf.reserve(str_length);\n\n unsafe { buf.set_len(str_length) };\n\n source.read_exact(buf)?;\n\n from_utf8(buf).map_err(Into::into)\n\n}\n\n\n", "file_path": "src/run/parser/llanfair.rs", "rank": 87, "score": 227921.92981875216 }, { "content": "fn fix_history_from_best_segment_times(segment: &mut Segment, method: TimingMethod) {\n\n if let Some(best_segment) = segment.best_segment_time()[method] {\n\n for &mut (_, ref mut time) in segment.segment_history_mut().iter_mut() {\n\n // Make sure no times in the history are lower than the Best Segment\n\n if let Some(ref mut time) = time[method] {\n\n if *time < best_segment {\n\n *time = best_segment;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Iterator that iterates over all the comparisons. This includes both the\n\n/// custom comparisons defined by the user and the Comparison Generators.\n\npub struct ComparisonsIter<'a> {\n\n custom: &'a [String],\n\n generators: &'a [Box<ComparisonGenerator>],\n\n}\n\n\n", "file_path": "src/run/run.rs", "rank": 88, "score": 225715.10289107804 }, { "content": "/// Calculates the top and bottom color the Timer Component would use for the\n\n/// gradient of the times it is showing.\n\npub fn top_and_bottom_color(color: Color) -> (Color, Color) {\n\n let hsv: Hsv = color.rgba.into();\n\n\n\n let h = hsv.hue.to_degrees() as f64;\n\n let s = hsv.saturation as f64;\n\n let v = hsv.value as f64;\n\n let a = color.rgba.alpha;\n\n\n\n let top_color = Rgb::from(Hsv::new(h.into(), 0.5 * s, (1.5 * v + 0.1).min(1.0)));\n\n let bottom_color = Rgb::from(Hsv::new(h.into(), s, 0.8 * v));\n\n\n\n let top_color = Color::from((\n\n top_color.red as f32,\n\n top_color.green as f32,\n\n top_color.blue as f32,\n\n a,\n\n ));\n\n\n\n let bottom_color = Color::from((\n\n bottom_color.red as f32,\n\n bottom_color.green as f32,\n\n bottom_color.blue as f32,\n\n a,\n\n ));\n\n\n\n (top_color, bottom_color)\n\n}\n", "file_path": "src/component/timer.rs", "rank": 89, "score": 222373.72037513021 }, { "content": "fn parse_time_optional(time: &str) -> Result<Option<TimeSpan>> {\n\n let time: TimeSpan = time.parse()?;\n\n if time == TimeSpan::zero() {\n\n Ok(None)\n\n } else {\n\n Ok(Some(time))\n\n }\n\n}\n\n\n", "file_path": "src/run/parser/time_split_tracker.rs", "rank": 90, "score": 222194.4762719435 }, { "content": "fn run_with_splits_opt(timer: &mut Timer, splits: &[Option<f64>]) {\n\n timer.start();\n\n timer.initialize_game_time();\n\n timer.pause_game_time();\n\n\n\n for &split in splits {\n\n if let Some(split) = split {\n\n timer.set_game_time(TimeSpan::from_seconds(split));\n\n timer.split();\n\n } else {\n\n timer.skip_split();\n\n }\n\n }\n\n\n\n timer.reset(true);\n\n}\n\n\n", "file_path": "src/comparison/tests/balanced_pb.rs", "rank": 91, "score": 222001.8734055251 }, { "content": "fn generate(segments: &mut [Segment], attempts: &[Attempt], method: TimingMethod) {\n\n for attempt in attempts {\n\n let id = attempt.index();\n\n let mut total_time = TimeSpan::zero();\n\n\n\n for segment in segments.iter_mut() {\n\n if let Some(time) = segment.segment_history().get(id) {\n\n if let Some(time) = time[method] {\n\n total_time += time;\n\n\n\n let comp = &mut segment.comparison_mut(NAME)[method];\n\n if comp.map_or(true, |c| total_time < c) {\n\n *comp = Some(total_time);\n\n }\n\n }\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n", "file_path": "src/comparison/best_split_times.rs", "rank": 92, "score": 221299.64902858937 }, { "content": "fn populate_prediction(prediction: &mut Option<TimeSpan>, predicted_time: Option<TimeSpan>) {\n\n if let Some(predicted_time) = predicted_time {\n\n if prediction.map_or(true, |t| predicted_time > t) {\n\n *prediction = Some(predicted_time);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/analysis/sum_of_segments/worst.rs", "rank": 93, "score": 219257.43381856795 }, { "content": "#[test]\n\nfn import_best_segment_with_game_time_usage() {\n\n let mut timer = timer();\n\n\n\n let first = TimeSpan::from_seconds(5.0);\n\n timer.set_game_time(first);\n\n timer.split();\n\n\n\n let run = timer.into_run(true);\n\n let mut editor = Editor::new(run).unwrap();\n\n\n\n editor.select_timing_method(TimingMethod::GameTime);\n\n\n\n editor.select_only(0);\n\n let best = Some(TimeSpan::from_seconds(4.0));\n\n editor.active_segment().set_best_segment_time(best);\n\n\n\n editor.insert_segment_above();\n\n\n\n let history = editor.run().segment(0).segment_history();\n\n // The newly inserted segment's history should have a null time with a\n", "file_path": "src/time/timer/tests.rs", "rank": 94, "score": 214622.71516188356 }, { "content": "/// Helper function for accessing either the given comparison or a Timer's\n\n/// current comparison if the given comparison is `None`.\n\npub fn or_current<'a>(comparison: Option<&'a str>, timer: &'a Timer) -> &'a str {\n\n comparison.unwrap_or_else(|| timer.current_comparison())\n\n}\n\n\n", "file_path": "src/comparison/mod.rs", "rank": 95, "score": 214522.47105585856 }, { "content": "fn calculate_comparison_time(\n\n timer: &Timer,\n\n timing_method: TimingMethod,\n\n comparison: &str,\n\n last_split_index: usize,\n\n) -> Option<TimeSpan> {\n\n if comparison == best_segments::NAME {\n\n timer.run().segment(last_split_index).best_segment_time()[timing_method]\n\n } else if last_split_index == 0 {\n\n timer\n\n .run()\n\n .segment(0)\n\n .comparison_timing_method(comparison, timing_method)\n\n } else if timer.current_split_index() > Some(0) {\n\n Some(\n\n timer\n\n .run()\n\n .segment(last_split_index)\n\n .comparison_timing_method(comparison, timing_method)?\n\n - timer\n\n .run()\n\n .segment(last_split_index - 1)\n\n .comparison_timing_method(comparison, timing_method)?,\n\n )\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/component/detailed_timer.rs", "rank": 96, "score": 213050.3646373459 }, { "content": "fn calculate_segment_time(\n\n timer: &Timer,\n\n timing_method: TimingMethod,\n\n last_split_index: usize,\n\n) -> Option<TimeSpan> {\n\n let last_split = if last_split_index > 0 {\n\n timer.run().segment(last_split_index - 1).split_time()[timing_method]\n\n } else {\n\n Some(TimeSpan::zero())\n\n };\n\n\n\n if timer.current_phase() == TimerPhase::NotRunning {\n\n Some(timer.run().offset())\n\n } else {\n\n Some(timer.current_time()[timing_method]? - last_split?)\n\n }\n\n}\n", "file_path": "src/component/detailed_timer.rs", "rank": 97, "score": 213050.3646373459 }, { "content": "#[test]\n\nfn deleting_best_segment_time_clears_segment_history() {\n\n let mut timer = timer();\n\n\n\n let first = TimeSpan::from_seconds(5.0);\n\n timer.set_game_time(first);\n\n timer.split();\n\n\n\n let second = TimeSpan::from_seconds(10.0);\n\n timer.set_game_time(second);\n\n timer.split();\n\n\n\n let third = TimeSpan::from_seconds(15.0);\n\n timer.set_game_time(third);\n\n timer.split();\n\n\n\n let run = timer.into_run(true);\n\n let run2 = run.clone();\n\n\n\n // =============================================\n\n\n", "file_path": "src/time/timer/tests.rs", "rank": 98, "score": 211493.27179287936 }, { "content": "#[test]\n\nfn modifying_best_segment_time_fixes_segment_history() {\n\n let mut timer = timer();\n\n\n\n let first = TimeSpan::from_seconds(5.0);\n\n timer.set_game_time(first);\n\n timer.split();\n\n\n\n let second = TimeSpan::from_seconds(10.0);\n\n timer.set_game_time(second);\n\n timer.split();\n\n\n\n let third = TimeSpan::from_seconds(15.0);\n\n timer.set_game_time(third);\n\n timer.split();\n\n\n\n let run = timer.into_run(true);\n\n let run2 = run.clone();\n\n\n\n // =============================================\n\n\n", "file_path": "src/time/timer/tests.rs", "rank": 99, "score": 211493.27179287936 } ]
Rust
weight-gen/src/main.rs
ImbueNetwork/open-runtime-module-library
c439a50e01944aedeef33231e0824a17ed1813bc
use clap::{App, Arg}; use serde::{Deserialize, Serialize}; use std::io::Read; #[derive(Serialize, Deserialize, Default, Debug, Clone)] pub struct BenchData { pub name: String, pub weight: u64, pub reads: u32, pub writes: u32, pub comments: Vec<String>, } #[derive(Serialize, Default, Debug, Clone)] struct TemplateData { pub header: String, pub benchmarks: Vec<BenchData>, } #[derive(Clone, Copy)] struct UnderscoreHelper; impl handlebars::HelperDef for UnderscoreHelper { fn call<'reg: 'rc, 'rc>( &self, h: &handlebars::Helper, _: &handlebars::Handlebars, _: &handlebars::Context, _rc: &mut handlebars::RenderContext, out: &mut dyn handlebars::Output, ) -> handlebars::HelperResult { use handlebars::JsonRender; let param = h.param(0).expect("Unable to retrieve param from handlebars helper"); let underscore_param = underscore(param.value().render()); out.write(&underscore_param)?; Ok(()) } } fn underscore<Number>(i: Number) -> String where Number: std::string::ToString, { let mut s = String::new(); let i_str = i.to_string(); let a = i_str.chars().rev().enumerate(); for (idx, val) in a { if idx != 0 && idx % 3 == 0 { s.insert(0, '_'); } s.insert(0, val); } s } #[derive(Clone, Copy)] struct JoinHelper; impl handlebars::HelperDef for JoinHelper { fn call<'reg: 'rc, 'rc>( &self, h: &handlebars::Helper, _: &handlebars::Handlebars, _: &handlebars::Context, _rc: &mut handlebars::RenderContext, out: &mut dyn handlebars::Output, ) -> handlebars::HelperResult { use handlebars::JsonRender; let param = h.param(0).expect("Unable to retrieve param from handlebars helper"); let value = param.value(); let joined = if value.is_array() { value .as_array() .unwrap() .iter() .map(|v| v.render()) .collect::<Vec<String>>() .join(" ") } else { value.render() }; out.write(&joined)?; Ok(()) } } fn parse_stdio() -> Option<Vec<BenchData>> { let mut buffer = String::new(); std::io::stdin() .read_to_string(&mut buffer) .expect("Unable to read from stdin"); let file_path = buffer .split_ascii_whitespace() .last() .expect("Last line must be JOSN file path."); let reader = std::fs::File::open(std::path::Path::new(file_path)).unwrap(); serde_json::from_reader(&reader).ok() } fn main() { let matches = App::new("Weight Generator") .version("1.0") .author("Laminar Developers <hello@laminar.one>") .about("Generate rust weight info source file from JSON data generated by ORML bencher") .arg( Arg::with_name("input") .short("i") .long("input") .value_name("PATH") .help("Input JSON data file") .takes_value(true), ) .arg( Arg::with_name("template") .short("t") .long("template") .value_name("PATH") .help("Handlebars template file") .takes_value(true), ) .arg( Arg::with_name("header") .long("header") .value_name("PATH") .help("Header file path") .takes_value(true), ) .arg( Arg::with_name("output") .short("o") .long("output") .value_name("PATH") .help("Output file path") .takes_value(true), ) .get_matches(); let mut benchmarks: Vec<BenchData> = { if let Some(input_path) = matches.value_of("input") { let reader = std::fs::File::open(std::path::Path::new(&input_path.trim())).unwrap(); serde_json::from_reader(&reader).expect("Could not parse JSON data") } else { parse_stdio().expect("Could not parse JSON data") } }; benchmarks.iter_mut().for_each(|x| { x.comments.sort(); }); let mut handlebars = handlebars::Handlebars::new(); handlebars.register_helper("underscore", Box::new(UnderscoreHelper)); handlebars.register_helper("join", Box::new(JoinHelper)); handlebars.register_escape_fn(|s| -> String { s.to_string() }); let header = { if let Some(path) = matches.value_of("header") { ::std::fs::read_to_string(&path).expect("Header file not found") } else { String::from("") } }; let hbs_data = TemplateData { header, benchmarks }; const DEFAULT_TEMPLATE: &str = include_str!("./template.hbs"); let template = { if let Some(path) = matches.value_of("template") { ::std::fs::read_to_string(&path).expect("Template file not found") } else { String::from(DEFAULT_TEMPLATE) } }; if let Some(path) = matches.value_of("output") { let mut output_file = ::std::fs::File::create(&path).expect("Could not create output file"); handlebars .render_template_to_write(&template, &hbs_data, &mut output_file) .expect("Unable to render template"); println!(); println!("Weights file `{}` was generated.", path); } else { let template_string = handlebars .render_template(&template, &hbs_data) .expect("Unable to render template"); println!("{}", template_string); } }
use clap::{App, Arg}; use serde::{Deserialize, Serialize}; use std::io::Read; #[derive(Serialize, Deserialize, Default, Debug, Clone)] pub struct BenchData { pub name: String, pub weight: u64, pub reads: u32, pub writes: u32, pub comments: Vec<String>, } #[derive(Serialize, Default, Debug, Clone)] struct TemplateData { pub header: String, pub benchmarks: Vec<BenchData>, } #[derive(Clone, Copy)] struct UnderscoreHelper; impl handlebars::HelperDef for UnderscoreHelper { fn call<'reg: 'rc, 'rc>( &self, h: &handlebars::Helper, _: &handlebars::Handlebars, _: &handlebars::Context, _rc: &mut handlebars::RenderContext, out: &mut dyn handlebars::Output, ) -> handlebars::HelperResult { use handlebars::JsonRender; let param = h.param(0).expect("Unable to retrieve param from handlebars helper"); let underscore_param = underscore(param.value().render()); out.write(&underscore_param)?; Ok(()) } } fn underscore<Number>(i: Number) -> String where Number: std::string::ToString, { let mut s = String::new(); let i_str = i.to_string(); let a = i_str.chars().rev().enumerate(); for (idx, val) in a { if idx != 0 && idx % 3 == 0 { s.insert(0, '_'); } s.insert(0, val); } s } #[derive(Clone, Copy)] struct JoinHelper; impl handlebars::HelperDef for JoinHelper {
} fn parse_stdio() -> Option<Vec<BenchData>> { let mut buffer = String::new(); std::io::stdin() .read_to_string(&mut buffer) .expect("Unable to read from stdin"); let file_path = buffer .split_ascii_whitespace() .last() .expect("Last line must be JOSN file path."); let reader = std::fs::File::open(std::path::Path::new(file_path)).unwrap(); serde_json::from_reader(&reader).ok() } fn main() { let matches = App::new("Weight Generator") .version("1.0") .author("Laminar Developers <hello@laminar.one>") .about("Generate rust weight info source file from JSON data generated by ORML bencher") .arg( Arg::with_name("input") .short("i") .long("input") .value_name("PATH") .help("Input JSON data file") .takes_value(true), ) .arg( Arg::with_name("template") .short("t") .long("template") .value_name("PATH") .help("Handlebars template file") .takes_value(true), ) .arg( Arg::with_name("header") .long("header") .value_name("PATH") .help("Header file path") .takes_value(true), ) .arg( Arg::with_name("output") .short("o") .long("output") .value_name("PATH") .help("Output file path") .takes_value(true), ) .get_matches(); let mut benchmarks: Vec<BenchData> = { if let Some(input_path) = matches.value_of("input") { let reader = std::fs::File::open(std::path::Path::new(&input_path.trim())).unwrap(); serde_json::from_reader(&reader).expect("Could not parse JSON data") } else { parse_stdio().expect("Could not parse JSON data") } }; benchmarks.iter_mut().for_each(|x| { x.comments.sort(); }); let mut handlebars = handlebars::Handlebars::new(); handlebars.register_helper("underscore", Box::new(UnderscoreHelper)); handlebars.register_helper("join", Box::new(JoinHelper)); handlebars.register_escape_fn(|s| -> String { s.to_string() }); let header = { if let Some(path) = matches.value_of("header") { ::std::fs::read_to_string(&path).expect("Header file not found") } else { String::from("") } }; let hbs_data = TemplateData { header, benchmarks }; const DEFAULT_TEMPLATE: &str = include_str!("./template.hbs"); let template = { if let Some(path) = matches.value_of("template") { ::std::fs::read_to_string(&path).expect("Template file not found") } else { String::from(DEFAULT_TEMPLATE) } }; if let Some(path) = matches.value_of("output") { let mut output_file = ::std::fs::File::create(&path).expect("Could not create output file"); handlebars .render_template_to_write(&template, &hbs_data, &mut output_file) .expect("Unable to render template"); println!(); println!("Weights file `{}` was generated.", path); } else { let template_string = handlebars .render_template(&template, &hbs_data) .expect("Unable to render template"); println!("{}", template_string); } }
fn call<'reg: 'rc, 'rc>( &self, h: &handlebars::Helper, _: &handlebars::Handlebars, _: &handlebars::Context, _rc: &mut handlebars::RenderContext, out: &mut dyn handlebars::Output, ) -> handlebars::HelperResult { use handlebars::JsonRender; let param = h.param(0).expect("Unable to retrieve param from handlebars helper"); let value = param.value(); let joined = if value.is_array() { value .as_array() .unwrap() .iter() .map(|v| v.render()) .collect::<Vec<String>>() .join(" ") } else { value.render() }; out.write(&joined)?; Ok(()) }
function_block-full_function
[ { "content": "/// Increment used weight\n\npub fn using(weight: Weight) {\n\n\tMETER.with(|v| {\n\n\t\tlet mut meter = v.borrow_mut();\n\n\t\tmeter.used_weight = meter.used_weight.saturating_add(weight);\n\n\t})\n\n}\n\n\n", "file_path": "weight-meter/src/meter_std.rs", "rank": 0, "score": 271039.88446049247 }, { "content": "pub fn using(weight: Weight) {\n\n\tunsafe {\n\n\t\tMETER.used_weight = METER.used_weight.saturating_add(weight);\n\n\t}\n\n}\n\n\n", "file_path": "weight-meter/src/meter_no_std.rs", "rank": 1, "score": 271029.61506961705 }, { "content": "/// Get used weight\n\npub fn used_weight() -> Weight {\n\n\tMETER.with(|v| v.borrow().used_weight)\n\n}\n", "file_path": "weight-meter/src/meter_std.rs", "rank": 2, "score": 267154.76242525986 }, { "content": "pub fn used_weight() -> Weight {\n\n\tunsafe { METER.used_weight }\n\n}\n", "file_path": "weight-meter/src/meter_no_std.rs", "rank": 3, "score": 267144.4930343845 }, { "content": "/// show error message and debugging info for the case of an error happening\n\n/// during a benchmark\n\npub fn show_benchmark_debug_info(\n\n\tinstance_string: &[u8],\n\n\tbenchmark: &[u8],\n\n\tcomponents: &[(BenchmarkParameter, u32)],\n\n\tverify: &bool,\n\n\terror_message: &str,\n\n) -> sp_runtime::RuntimeString {\n\n\tsp_runtime::format_runtime_string!(\n\n\t\t\"\\n* Pallet: {}\\n\\\n\n\t\t* Benchmark: {}\\n\\\n\n\t\t* Components: {:?}\\n\\\n\n\t\t* Verify: {:?}\\n\\\n\n\t\t* Error message: {}\",\n\n\t\tsp_std::str::from_utf8(instance_string).expect(\"it's all just strings ran through the wasm interface. qed\"),\n\n\t\tsp_std::str::from_utf8(benchmark).expect(\"it's all just strings ran through the wasm interface. qed\"),\n\n\t\tcomponents,\n\n\t\tverify,\n\n\t\terror_message,\n\n\t)\n\n}\n", "file_path": "benchmarking/src/lib.rs", "rank": 5, "score": 236905.27956212504 }, { "content": "/// Start weight meter with base weight\n\npub fn start(weight: Weight) {\n\n\tMETER.with(|v| {\n\n\t\tlet mut meter = v.borrow_mut();\n\n\t\tif meter.depth == 0 {\n\n\t\t\tmeter.used_weight = weight;\n\n\t\t}\n\n\t\tmeter.depth = meter.depth.saturating_add(1);\n\n\t});\n\n}\n\n\n", "file_path": "weight-meter/src/meter_std.rs", "rank": 6, "score": 230779.68355882587 }, { "content": "pub fn start(weight: Weight) {\n\n\tunsafe {\n\n\t\tif METER.depth == 0 {\n\n\t\t\tMETER.used_weight = weight;\n\n\t\t}\n\n\t\tMETER.depth = METER.depth.saturating_add(1);\n\n\t}\n\n}\n\n\n", "file_path": "weight-meter/src/meter_no_std.rs", "rank": 7, "score": 230773.87197564478 }, { "content": "#[allow(dead_code)] // rust cannot detect usage in macro_rules\n\npub fn median<T: Ord + Clone>(mut items: Vec<T>) -> Option<T> {\n\n\tif items.is_empty() {\n\n\t\treturn None;\n\n\t}\n\n\n\n\tlet mid_index = items.len() / 2;\n\n\n\n\t// Won't panic as `items` ensured not empty.\n\n\tlet (_, item, _) = items.select_nth_unstable(mid_index);\n\n\tSome(item.clone())\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! create_median_value_data_provider {\n\n\t($name:ident, $key:ty, $value:ty, $timestamped_value:ty, [$( $provider:ty ),*]) => {\n\n\t\tpub struct $name;\n\n\t\timpl $crate::DataProvider<$key, $value> for $name {\n\n\t\t\tfn get(key: &$key) -> Option<$value> {\n\n\t\t\t\tlet mut values = vec![];\n\n\t\t\t\t$(\n", "file_path": "traits/src/data_provider.rs", "rank": 8, "score": 218269.94725845428 }, { "content": "/// Write to the given `file` if the `content` is different.\n\npub fn write_file_if_changed(file: impl AsRef<Path>, content: impl AsRef<str>) {\n\n\tif fs::read_to_string(file.as_ref()).ok().as_deref() != Some(content.as_ref()) {\n\n\t\tfs::write(file.as_ref(), content.as_ref())\n\n\t\t\t.unwrap_or_else(|_| panic!(\"Writing `{}` can not fail!\", file.as_ref().display()));\n\n\t}\n\n}\n\n\n", "file_path": "bencher/src/build_wasm/prerequisites.rs", "rank": 9, "score": 216649.87503381597 }, { "content": "pub fn run_to_block(n: u64) {\n\n\twhile System::block_number() < n {\n\n\t\tScheduler::on_finalize(System::block_number());\n\n\t\tSystem::set_block_number(System::block_number() + 1);\n\n\t\tScheduler::on_initialize(System::block_number());\n\n\t}\n\n}\n", "file_path": "authority/src/mock.rs", "rank": 10, "score": 210751.88498263224 }, { "content": "pub fn cyan(message: &str) -> String {\n\n\tif crate::build_wasm::color_output_enabled() {\n\n\t\tansi_term::Color::Cyan.paint(message).to_string()\n\n\t} else {\n\n\t\tmessage.into()\n\n\t}\n\n}\n\n\n", "file_path": "bencher/src/colorize.rs", "rank": 11, "score": 202047.52188238877 }, { "content": "pub fn green_bold(message: &str) -> String {\n\n\tif crate::build_wasm::color_output_enabled() {\n\n\t\tansi_term::Color::Green.bold().paint(message).to_string()\n\n\t} else {\n\n\t\tmessage.into()\n\n\t}\n\n}\n", "file_path": "bencher/src/colorize.rs", "rank": 12, "score": 199062.56768721057 }, { "content": "pub fn yellow_bold(message: &str) -> String {\n\n\tif color_output_enabled() {\n\n\t\tansi_term::Color::Yellow.bold().paint(message).to_string()\n\n\t} else {\n\n\t\tmessage.into()\n\n\t}\n\n}\n\n\n", "file_path": "bencher/src/colorize.rs", "rank": 13, "score": 199062.56768721057 }, { "content": "pub fn red_bold(message: &str) -> String {\n\n\tif color_output_enabled() {\n\n\t\tansi_term::Color::Red.bold().paint(message).to_string()\n\n\t} else {\n\n\t\tmessage.into()\n\n\t}\n\n}\n\n\n", "file_path": "bencher/src/colorize.rs", "rank": 14, "score": 199062.56768721057 }, { "content": "pub fn para_ext(para_id: u32) -> TestExternalities {\n\n\tuse para::{Runtime, System};\n\n\n\n\tlet mut t = frame_system::GenesisConfig::default()\n\n\t\t.build_storage::<Runtime>()\n\n\t\t.unwrap();\n\n\n\n\tlet parachain_info_config = parachain_info::GenesisConfig {\n\n\t\tparachain_id: para_id.into(),\n\n\t};\n\n\t<parachain_info::GenesisConfig as GenesisBuild<Runtime, _>>::assimilate_storage(&parachain_info_config, &mut t)\n\n\t\t.unwrap();\n\n\n\n\torml_tokens::GenesisConfig::<Runtime> {\n\n\t\tbalances: vec![(ALICE, CurrencyId::R, 1_000)],\n\n\t}\n\n\t.assimilate_storage(&mut t)\n\n\t.unwrap();\n\n\n\n\tlet mut ext = TestExternalities::new(t);\n\n\text.execute_with(|| System::set_block_number(1));\n\n\text\n\n}\n\n\n", "file_path": "xtokens/src/mock/mod.rs", "rank": 15, "score": 191073.35026456212 }, { "content": "fn get_version(impl_commit: String) -> String {\n\n\tlet commit_dash = if impl_commit.is_empty() { \"\" } else { \"-\" };\n\n\n\n\tformat!(\n\n\t\t\"{}{}{}-{}\",\n\n\t\tstd::env::var(\"CARGO_PKG_VERSION\").unwrap_or_default(),\n\n\t\tcommit_dash,\n\n\t\timpl_commit,\n\n\t\tget_platform(),\n\n\t)\n\n}\n\n\n", "file_path": "build-script-utils/src/version.rs", "rank": 16, "score": 190982.13133240168 }, { "content": "/// Checks that all prerequisites are installed.\n\n///\n\n/// Returns the versioned cargo command on success.\n\npub fn check() -> Result<CargoCommandVersioned, String> {\n\n\tlet cargo_command = get_nightly_cargo();\n\n\n\n\tif !cargo_command.is_nightly() {\n\n\t\treturn Err(red_bold(\"Rust nightly not installed, please install it!\"));\n\n\t}\n\n\n\n\tcheck_wasm_toolchain_installed(cargo_command)\n\n}\n\n\n", "file_path": "bencher/src/build_wasm/prerequisites.rs", "rank": 17, "score": 190925.43853921053 }, { "content": "#[proc_macro_attribute]\n\npub fn weight(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n\tlet weight: Expr = parse(attr).unwrap();\n\n\tlet ItemFn { attrs, vis, sig, block } = parse(item).unwrap();\n\n\t(quote! {\n\n\t\t#(#attrs)*\n\n\t\t#[cfg_attr(feature = \"bench\", ::orml_bencher::benchmarkable)]\n\n\t\t#vis #sig {\n\n\t\t\t::orml_weight_meter::using(#weight);\n\n\t\t\t#block\n\n\t\t}\n\n\t})\n\n\t.into()\n\n}\n", "file_path": "weight-meter/weight-meter-procedural/src/lib.rs", "rank": 18, "score": 188712.01409824772 }, { "content": "/// Finish weight meter\n\npub fn finish() {\n\n\tMETER.with(|v| {\n\n\t\tlet mut meter = v.borrow_mut();\n\n\t\tmeter.depth = meter.depth.saturating_sub(1);\n\n\t})\n\n}\n\n\n", "file_path": "weight-meter/src/meter_std.rs", "rank": 19, "score": 187757.469776386 }, { "content": "pub fn finish() {\n\n\tunsafe {\n\n\t\tMETER.depth.checked_sub(1).map_or_else(\n\n\t\t\t|| {\n\n\t\t\t\tdebug_assert!(false);\n\n\t\t\t\t0\n\n\t\t\t},\n\n\t\t\t|v| v,\n\n\t\t);\n\n\t}\n\n}\n\n\n", "file_path": "weight-meter/src/meter_no_std.rs", "rank": 20, "score": 187752.33879763397 }, { "content": "#[test]\n\nfn used_weight_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet result: PostDispatchInfo = TestModule::expect_100(Origin::signed(100)).unwrap();\n\n\t\t// Check used weight is correct\n\n\t\tassert_eq!(Some(100), result.actual_weight);\n\n\t\t// Check that the method ran correctly\n\n\t\tassert_eq!(Some(100), TestModule::something());\n\n\n\n\t\tlet result: PostDispatchInfo = TestModule::expect_500(Origin::signed(100)).unwrap();\n\n\t\tassert_eq!(Some(500), result.actual_weight);\n\n\t\tassert_eq!(Some(600), TestModule::something());\n\n\t});\n\n}\n\n\n", "file_path": "weight-meter/src/tests.rs", "rank": 23, "score": 166861.58441777102 }, { "content": "/// Execute the supplied function in a new storage transaction.\n\n///\n\n/// All changes to storage performed by the supplied function are discarded if\n\n/// the returned outcome is `Result::Err`.\n\n///\n\n/// Transactions can be nested to any depth. Commits happen to the parent\n\n/// transaction.\n\npub fn with_transaction_result<R>(f: impl FnOnce() -> Result<R, DispatchError>) -> Result<R, DispatchError> {\n\n\twith_transaction(|| {\n\n\t\tlet res = f();\n\n\t\tif res.is_ok() {\n\n\t\t\tTransactionOutcome::Commit(res)\n\n\t\t} else {\n\n\t\t\tTransactionOutcome::Rollback(res)\n\n\t\t}\n\n\t})\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n\n\tuse frame_support::{assert_noop, assert_ok, decl_module, decl_storage};\n\n\tuse sp_io::TestExternalities;\n\n\tuse sp_runtime::{DispatchError, DispatchResult};\n\n\n", "file_path": "utilities/src/lib.rs", "rank": 24, "score": 166831.8425010991 }, { "content": "pub fn new_test_ext() -> sp_io::TestExternalities {\n\n\tExtBuilder::default().build()\n\n}\n", "file_path": "weight-meter/src/mock.rs", "rank": 25, "score": 165056.49572560168 }, { "content": "/// Extract the crate name from the given `Cargo.toml`.\n\nfn get_crate_name(cargo_manifest: &Path) -> String {\n\n\tlet cargo_toml: Table =\n\n\t\ttoml::from_str(&fs::read_to_string(cargo_manifest).expect(\"File exists as checked before; qed\"))\n\n\t\t\t.expect(\"Cargo manifest is a valid toml file; qed\");\n\n\n\n\tlet package = cargo_toml\n\n\t\t.get(\"package\")\n\n\t\t.and_then(|t| t.as_table())\n\n\t\t.expect(\"`package` key exists in valid `Cargo.toml`; qed\");\n\n\n\n\tpackage\n\n\t\t.get(\"name\")\n\n\t\t.and_then(|p| p.as_str())\n\n\t\t.map(ToOwned::to_owned)\n\n\t\t.expect(\"Package name exists; qed\")\n\n}\n\n\n", "file_path": "bencher/src/build_wasm/wasm_project.rs", "rank": 27, "score": 164343.101996643 }, { "content": "#[test]\n\nfn used_weight_branch_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet result: PostDispatchInfo = TestModule::expect_100_or_200(Origin::signed(100), false).unwrap();\n\n\t\t// Check used weight is correct\n\n\t\tassert_eq!(Some(100), result.actual_weight);\n\n\t\t// Check that the method ran correctly\n\n\t\tassert_eq!(Some(100), TestModule::something());\n\n\n\n\t\tlet result: PostDispatchInfo = TestModule::expect_100_or_200(Origin::signed(100), true).unwrap();\n\n\t\t// Check used weight is correct\n\n\t\tassert_eq!(Some(200), result.actual_weight);\n\n\t\t// Check that the method ran correctly\n\n\t\tassert_eq!(Some(300), TestModule::something());\n\n\t});\n\n}\n\n\n", "file_path": "weight-meter/src/tests.rs", "rank": 28, "score": 163933.19277274265 }, { "content": "/// Returns the name for the wasm binary.\n\nfn get_wasm_binary_name(cargo_manifest: &Path) -> String {\n\n\tget_crate_name(cargo_manifest).replace('-', \"_\")\n\n}\n\n\n", "file_path": "bencher/src/build_wasm/wasm_project.rs", "rank": 29, "score": 161972.4481700528 }, { "content": "#[test]\n\nfn used_weight_nested_calls_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet result: PostDispatchInfo = TestModule::nested_inner_methods(Origin::signed(100)).unwrap();\n\n\t\t// Check used weight is correct\n\n\t\tassert_eq!(Some(300), result.actual_weight);\n\n\t});\n\n}\n\n\n", "file_path": "weight-meter/src/tests.rs", "rank": 30, "score": 161133.49487118755 }, { "content": "#[proc_macro_attribute]\n\npub fn start(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n\tlet weight: Expr = if attr.is_empty() {\n\n\t\tparse((quote! { 0 }).into()).unwrap()\n\n\t} else {\n\n\t\tparse(attr).unwrap()\n\n\t};\n\n\tlet ItemFn { attrs, vis, sig, block } = parse(item).unwrap();\n\n\t(quote! {\n\n\t\t#(#attrs)*\n\n\t\t#[cfg_attr(feature = \"bench\", ::orml_bencher::benchmarkable)]\n\n\t\t#vis #sig {\n\n\t\t\t::orml_weight_meter::start(#weight);\n\n\t\t\tlet result = #block;\n\n\t\t\t::orml_weight_meter::finish();\n\n\t\t\tresult\n\n\t\t}\n\n\t})\n\n\t.into()\n\n}\n\n\n", "file_path": "weight-meter/weight-meter-procedural/src/lib.rs", "rank": 31, "score": 156636.62873212754 }, { "content": "/// Copy `src` to `dst` if the `dst` does not exist or is different.\n\npub fn copy_file_if_changed(src: PathBuf, dst: PathBuf) {\n\n\tlet src_file = fs::read_to_string(&src).ok();\n\n\tlet dst_file = fs::read_to_string(&dst).ok();\n\n\n\n\tif src_file != dst_file {\n\n\t\tfs::copy(&src, &dst)\n\n\t\t\t.unwrap_or_else(|_| panic!(\"Copying `{}` to `{}` can not fail; qed\", src.display(), dst.display()));\n\n\t}\n\n}\n\n\n", "file_path": "bencher/src/build_wasm/prerequisites.rs", "rank": 32, "score": 151815.35531811728 }, { "content": "#[proc_macro_attribute]\n\npub fn benchmarkable(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n\tlet syn::ItemFn { attrs, vis, sig, block } = syn::parse(item).unwrap();\n\n\t(quote::quote! {\n\n\t\t#(#attrs)*\n\n\t\t#vis #sig {\n\n\t\t\t#[cfg(not(feature = \"std\"))]\n\n\t\t\t::orml_bencher::bench::before_block();\n\n\t\t\tlet result = #block;\n\n\t\t\t#[cfg(not(feature = \"std\"))]\n\n\t\t\t::orml_bencher::bench::after_block();\n\n\t\t\tresult\n\n\t\t}\n\n\t})\n\n\t.into()\n\n}\n", "file_path": "bencher/bencher-procedural/src/lib.rs", "rank": 33, "score": 148795.04310024698 }, { "content": "fn get_platform() -> String {\n\n\tlet env_dash = if TARGET_ENV.is_some() { \"-\" } else { \"\" };\n\n\n\n\tformat!(\n\n\t\t\"{}-{}{}{}\",\n\n\t\tTARGET_ARCH.as_str(),\n\n\t\tTARGET_OS.as_str(),\n\n\t\tenv_dash,\n\n\t\tTARGET_ENV.map(|x| x.as_str()).unwrap_or(\"\"),\n\n\t)\n\n}\n\n\n", "file_path": "build-script-utils/src/version.rs", "rank": 34, "score": 145328.78825922305 }, { "content": "fn get_commit() -> String {\n\n\tlet commit = std::env::var(\"GIT_COMMIT\").unwrap_or_default();\n\n\tif !commit.is_empty() {\n\n\t\treturn commit;\n\n\t}\n\n\n\n\tlet output = Command::new(\"git\").args(&[\"rev-parse\", \"--short\", \"HEAD\"]).output();\n\n\n\n\tmatch output {\n\n\t\tOk(o) if o.status.success() => String::from_utf8_lossy(&o.stdout).trim().into(),\n\n\t\tOk(o) => {\n\n\t\t\tprintln!(\"cargo:warning=Git command failed with status: {}\", o.status);\n\n\t\t\t\"unknown\".into()\n\n\t\t}\n\n\t\tErr(err) => {\n\n\t\t\tprintln!(\"cargo:warning=Failed to execute git command: {}\", err);\n\n\t\t\t\"unknown\".into()\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "build-script-utils/src/version.rs", "rank": 35, "score": 145328.78825922305 }, { "content": "/// Generate the `cargo:` key output\n\npub fn generate_cargo_keys() {\n\n\tprintln!(\n\n\t\t\"cargo:rustc-env=SUBSTRATE_CLI_IMPL_VERSION={}\",\n\n\t\tget_version(get_commit())\n\n\t)\n\n}\n\n\n", "file_path": "build-script-utils/src/version.rs", "rank": 36, "score": 141700.5605242476 }, { "content": "/// Creates the WASM project, compiles the WASM binary and compacts the WASM\n\n/// binary.\n\n///\n\n/// # Returns\n\n///\n\n/// The path to the compact WASM binary and the bloaty WASM binary.\n\npub fn create_and_compile(\n\n\tproject_cargo_toml: &Path,\n\n\tdefault_rustflags: &str,\n\n\tcargo_cmd: CargoCommandVersioned,\n\n\tfeatures_to_enable: Vec<String>,\n\n\twasm_binary_name: Option<String>,\n\n) -> (Option<WasmBinary>, WasmBinaryBloaty) {\n\n\tlet wasm_workspace_root = get_wasm_workspace_root();\n\n\tlet wasm_workspace = wasm_workspace_root.join(\"wbuild\");\n\n\n\n\tlet crate_metadata = crate_metadata(project_cargo_toml);\n\n\n\n\tlet project = create_project(\n\n\t\tproject_cargo_toml,\n\n\t\t&wasm_workspace,\n\n\t\t&crate_metadata,\n\n\t\tcrate_metadata.workspace_root.as_ref(),\n\n\t\tfeatures_to_enable,\n\n\t);\n\n\n", "file_path": "bencher/src/build_wasm/wasm_project.rs", "rank": 37, "score": 141700.5605242476 }, { "content": "/// Returns `true` when color output is enabled.\n\npub fn color_output_enabled() -> bool {\n\n\tstd::env::var(WASM_BUILD_NO_COLOR).is_err()\n\n}\n\n\n", "file_path": "bencher/src/colorize.rs", "rank": 38, "score": 139135.58445202856 }, { "content": "fn whitelist(b: &mut Bencher) {\n\n\tb.whitelist(Bar::<Runtime>::hashed_key_for(1), true, true);\n\n\tb.whitelist(Bar::<Runtime>::hashed_key_for(2), true, false);\n\n\tb.whitelist(Foo::<Runtime>::hashed_key().to_vec(), true, true);\n\n\tb.whitelist(Value::<Runtime>::hashed_key().to_vec(), true, true);\n\n\tb.bench(|| {\n\n\t\tlet _ = Test::set_foo();\n\n\t});\n\n}\n\n\n\nbenches!(whitelist, set_value, set_foo, remove_all_bar);\n", "file_path": "bencher/test/src/benches.rs", "rank": 39, "score": 138923.1394814608 }, { "content": "fn set_foo(b: &mut Bencher) {\n\n\tb.bench(|| {\n\n\t\tlet _ = Test::set_foo();\n\n\t});\n\n}\n\n\n", "file_path": "bencher/test/src/benches.rs", "rank": 40, "score": 136836.19619423497 }, { "content": "fn remove_all_bar(b: &mut Bencher) {\n\n\tBar::<Runtime>::insert(1, 1);\n\n\tb.bench(|| {\n\n\t\tTest::remove_all_bar();\n\n\t});\n\n}\n\n\n", "file_path": "bencher/test/src/benches.rs", "rank": 41, "score": 136836.19619423497 }, { "content": "fn set_value(b: &mut Bencher) {\n\n\tlet result = b.bench(|| Test::set_value(Origin::signed(1), 1));\n\n\tassert_ok!(result);\n\n\tassert_eq!(Test::value(), Some(1 + 1));\n\n}\n\n\n", "file_path": "bencher/test/src/benches.rs", "rank": 42, "score": 136836.19619423497 }, { "content": "/// Returns `true` when color output is enabled.\n\npub fn color_output_enabled() -> bool {\n\n\tstd::env::var(WASM_BUILD_NO_COLOR).is_err()\n\n}\n\n\n", "file_path": "bencher/src/build_wasm/mod.rs", "rank": 43, "score": 135281.54399362538 }, { "content": "/// Weight functions needed for orml_vesting.\n\npub trait WeightInfo {\n\n\tfn vested_transfer() -> Weight;\n\n\tfn claim(i: u32, ) -> Weight;\n\n\tfn update_vesting_schedules(i: u32, ) -> Weight;\n\n}\n\n\n\n/// Default weights.\n\nimpl WeightInfo for () {\n\n\tfn vested_transfer() -> Weight {\n\n\t\t(69_000_000 as Weight)\n\n\t\t\t.saturating_add(RocksDbWeight::get().reads(4 as Weight))\n\n\t\t\t.saturating_add(RocksDbWeight::get().writes(4 as Weight))\n\n\t}\n\n\tfn claim(i: u32, ) -> Weight {\n\n\t\t(31_747_000 as Weight)\n\n\t\t\t// Standard Error: 4_000\n\n\t\t\t.saturating_add((63_000 as Weight).saturating_mul(i as Weight))\n\n\t\t\t.saturating_add(RocksDbWeight::get().reads(2 as Weight))\n\n\t\t\t.saturating_add(RocksDbWeight::get().writes(2 as Weight))\n\n\t}\n\n\tfn update_vesting_schedules(i: u32, ) -> Weight {\n\n\t\t(29_457_000 as Weight)\n\n\t\t\t// Standard Error: 4_000\n\n\t\t\t.saturating_add((117_000 as Weight).saturating_mul(i as Weight))\n\n\t\t\t.saturating_add(RocksDbWeight::get().reads(2 as Weight))\n\n\t\t\t.saturating_add(RocksDbWeight::get().writes(3 as Weight))\n\n\t}\n\n}\n", "file_path": "vesting/src/weights.rs", "rank": 45, "score": 131215.3440168333 }, { "content": "/// Weight functions needed for orml_tokens.\n\npub trait WeightInfo {\n\n\tfn transfer() -> Weight;\n\n\tfn transfer_all() -> Weight;\n\n\tfn transfer_keep_alive() -> Weight;\n\n\tfn force_transfer() -> Weight;\n\n\tfn set_balance() -> Weight;\n\n}\n\n\n\n/// Default weights.\n\nimpl WeightInfo for () {\n\n\tfn transfer() -> Weight {\n\n\t\t(69_000_000 as Weight)\n\n\t\t\t.saturating_add(RocksDbWeight::get().reads(5 as Weight))\n\n\t\t\t.saturating_add(RocksDbWeight::get().writes(4 as Weight))\n\n\t}\n\n\tfn transfer_all() -> Weight {\n\n\t\t(69_000_000 as Weight)\n\n\t\t\t.saturating_add(RocksDbWeight::get().reads(5 as Weight))\n\n\t\t\t.saturating_add(RocksDbWeight::get().writes(4 as Weight))\n\n\t}\n", "file_path": "tokens/src/weights.rs", "rank": 46, "score": 131215.3440168333 }, { "content": "/// Weight functions needed for orml_authority.\n\npub trait WeightInfo {\n\n\tfn dispatch_as() -> Weight;\n\n\tfn schedule_dispatch_without_delay() -> Weight;\n\n\tfn schedule_dispatch_with_delay() -> Weight;\n\n\tfn fast_track_scheduled_dispatch() -> Weight;\n\n\tfn delay_scheduled_dispatch() -> Weight;\n\n\tfn cancel_scheduled_dispatch() -> Weight;\n\n\tfn authorize_call() -> Weight;\n\n\tfn remove_authorized_call() -> Weight;\n\n\tfn trigger_call() -> Weight;\n\n}\n\n\n\n/// Default weights.\n\nimpl WeightInfo for () {\n\n\tfn dispatch_as() -> Weight {\n\n\t\t(12_000_000 as Weight)\n\n\t}\n\n\tfn schedule_dispatch_without_delay() -> Weight {\n\n\t\t(30_000_000 as Weight)\n\n\t\t\t.saturating_add(RocksDbWeight::get().reads(3 as Weight))\n", "file_path": "authority/src/weights.rs", "rank": 47, "score": 131215.3440168333 }, { "content": "/// Weight functions needed for orml_oracle.\n\npub trait WeightInfo {\n\n\tfn feed_values(c: u32, ) -> Weight;\n\n\tfn on_finalize() -> Weight;\n\n}\n\n\n\n/// Default weights.\n\nimpl WeightInfo for () {\n\n\tfn feed_values(c: u32, ) -> Weight {\n\n\t\t(16_800_000 as Weight)\n\n\t\t\t// Standard Error: 84_000\n\n\t\t\t.saturating_add((3_600_000 as Weight).saturating_mul(c as Weight))\n\n\t\t\t.saturating_add(RocksDbWeight::get().reads(3 as Weight))\n\n\t\t\t.saturating_add(RocksDbWeight::get().writes(1 as Weight))\n\n\t\t\t.saturating_add(RocksDbWeight::get().writes((2 as Weight).saturating_mul(c as Weight)))\n\n\t}\n\n\tfn on_finalize() -> Weight {\n\n\t\t(3_000_000 as Weight)\n\n\t\t\t.saturating_add(RocksDbWeight::get().writes(1 as Weight))\n\n\t}\n\n}\n", "file_path": "oracle/src/weights.rs", "rank": 48, "score": 131215.3440168333 }, { "content": "/// Weight functions needed for orml_auction.\n\npub trait WeightInfo {\n\n\tfn bid_collateral_auction() -> Weight;\n\n\tfn on_finalize(c: u32, ) -> Weight;\n\n}\n\n\n\n/// Default weights.\n\nimpl WeightInfo for () {\n\n\tfn bid_collateral_auction() -> Weight {\n\n\t\t(108_000_000 as Weight)\n\n\t\t\t.saturating_add(RocksDbWeight::get().reads(8 as Weight))\n\n\t\t\t.saturating_add(RocksDbWeight::get().writes(9 as Weight))\n\n\t}\n\n\tfn on_finalize(c: u32, ) -> Weight {\n\n\t\t(9_779_000 as Weight)\n\n\t\t\t// Standard Error: 13_000\n\n\t\t\t.saturating_add((57_962_000 as Weight).saturating_mul(c as Weight))\n\n\t\t\t.saturating_add(RocksDbWeight::get().reads(10 as Weight))\n\n\t\t\t.saturating_add(RocksDbWeight::get().reads((3 as Weight).saturating_mul(c as Weight)))\n\n\t\t\t.saturating_add(RocksDbWeight::get().writes(7 as Weight))\n\n\t\t\t.saturating_add(RocksDbWeight::get().writes((3 as Weight).saturating_mul(c as Weight)))\n\n\t}\n\n}\n", "file_path": "auction/src/weights.rs", "rank": 49, "score": 131215.3440168333 }, { "content": "/// Weight functions needed for module_currencies.\n\npub trait WeightInfo {\n\n\tfn transfer_non_native_currency() -> Weight;\n\n\tfn transfer_native_currency() -> Weight;\n\n\tfn update_balance_non_native_currency() -> Weight;\n\n\tfn update_balance_native_currency_creating() -> Weight;\n\n\tfn update_balance_native_currency_killing() -> Weight;\n\n}\n\n\n\n/// Default weights.\n\nimpl WeightInfo for () {\n\n\tfn transfer_non_native_currency() -> Weight {\n\n\t\t(60_000_000 as Weight)\n\n\t\t\t.saturating_add(RocksDbWeight::get().reads(5 as Weight))\n\n\t\t\t.saturating_add(RocksDbWeight::get().writes(4 as Weight))\n\n\t}\n\n\tfn transfer_native_currency() -> Weight {\n\n\t\t(60_000_000 as Weight)\n\n\t\t\t.saturating_add(RocksDbWeight::get().reads(3 as Weight))\n\n\t\t\t.saturating_add(RocksDbWeight::get().writes(2 as Weight))\n\n\t}\n", "file_path": "currencies/src/weights.rs", "rank": 50, "score": 131215.3440168333 }, { "content": "// This function basically just builds a genesis storage key/value store\n\n// according to our desired mockup.\n\npub fn new_test_ext() -> sp_io::TestExternalities {\n\n\tlet storage = frame_system::GenesisConfig::default().build_storage::<Test>().unwrap();\n\n\n\n\tlet mut t: sp_io::TestExternalities = storage.into();\n\n\n\n\tt.execute_with(|| {\n\n\t\tTimestamp::set_timestamp(12345);\n\n\t});\n\n\n\n\tt\n\n}\n", "file_path": "oracle/src/mock.rs", "rank": 51, "score": 129559.43940865569 }, { "content": "pub fn relay_ext() -> sp_io::TestExternalities {\n\n\tuse relay::{Runtime, System};\n\n\n\n\tlet mut t = frame_system::GenesisConfig::default()\n\n\t\t.build_storage::<Runtime>()\n\n\t\t.unwrap();\n\n\n\n\tpallet_balances::GenesisConfig::<Runtime> {\n\n\t\tbalances: vec![(ALICE, 1_000)],\n\n\t}\n\n\t.assimilate_storage(&mut t)\n\n\t.unwrap();\n\n\n\n\tlet mut ext = sp_io::TestExternalities::new(t);\n\n\text.execute_with(|| System::set_block_number(1));\n\n\text\n\n}\n", "file_path": "xtokens/src/mock/mod.rs", "rank": 52, "score": 129559.43940865569 }, { "content": "struct Meter {\n\n\tused_weight: Weight,\n\n\t// Depth gets incremented when entering call or a sub-call\n\n\t// This is used to avoid miscalculation during sub-calls\n\n\tdepth: u8,\n\n}\n\n\n\nmod meter_no_std;\n\nmod meter_std;\n\n\n\nextern crate self as orml_weight_meter;\n\n\n\n#[cfg(test)]\n\nmod mock;\n\n\n\n#[cfg(test)]\n\nmod tests;\n\n\n\n#[cfg(feature = \"std\")]\n\npub use meter_std::*;\n\n\n\n#[cfg(not(feature = \"std\"))]\n\npub use meter_no_std::*;\n\n\n\n/// Start weight meter\n\npub use weight_meter_procedural::start;\n\n\n\n/// Measure each methods weight\n\npub use weight_meter_procedural::weight;\n", "file_path": "weight-meter/src/lib.rs", "rank": 53, "score": 127798.37103485785 }, { "content": "#[test]\n\nfn benchmarks_macro_works() {\n\n\t// Check benchmark creation for `set_value`.\n\n\tlet selected_benchmark = SelectedBenchmark::set_value;\n\n\n\n\tlet components = <SelectedBenchmark as BenchmarkingSetup<Test>>::components(&selected_benchmark);\n\n\tassert_eq!(components, vec![(BenchmarkParameter::b, 1, 1000)]);\n\n\n\n\tlet closure = <SelectedBenchmark as BenchmarkingSetup<Test>>::instance(\n\n\t\t&selected_benchmark,\n\n\t\t&[(BenchmarkParameter::b, 1)],\n\n\t\ttrue,\n\n\t)\n\n\t.expect(\"failed to create closure\");\n\n\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tassert_eq!(closure(), Ok(()));\n\n\t});\n\n}\n\n\n", "file_path": "benchmarking/src/tests.rs", "rank": 54, "score": 126434.38820962446 }, { "content": "#[test]\n\nfn benchmarks_generate_unit_tests() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tassert_ok!(Benchmark::test_benchmark_set_value());\n\n\t\tassert_ok!(Benchmark::test_benchmark_other_name());\n\n\t\tassert_ok!(Benchmark::test_benchmark_sort_vector());\n\n\t\tassert_err!(Benchmark::test_benchmark_bad_origin(), \"Bad origin\");\n\n\t\tassert_err!(Benchmark::test_benchmark_bad_verify(), \"You forgot to sort!\");\n\n\t});\n\n}\n", "file_path": "benchmarking/src/tests.rs", "rank": 55, "score": 124488.34197643142 }, { "content": "#[test]\n\nfn benchmarks_macro_verify_works() {\n\n\t// Check postcondition for benchmark `set_value` is valid.\n\n\tlet selected_benchmark = SelectedBenchmark::set_value;\n\n\n\n\tlet closure = <SelectedBenchmark as BenchmarkingSetup<Test>>::instance(\n\n\t\t&selected_benchmark,\n\n\t\t&[(BenchmarkParameter::b, 1)],\n\n\t\ttrue,\n\n\t)\n\n\t.expect(\"failed to create closure\");\n\n\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tassert_ok!(closure());\n\n\t});\n\n\n\n\t// Check postcondition for benchmark `bad_verify` is invalid.\n\n\tlet selected = SelectedBenchmark::bad_verify;\n\n\n\n\tlet closure =\n\n\t\t<SelectedBenchmark as BenchmarkingSetup<Test>>::instance(&selected, &[(BenchmarkParameter::x, 10000)], true)\n\n\t\t\t.expect(\"failed to create closure\");\n\n\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tassert_err!(closure(), \"You forgot to sort!\");\n\n\t});\n\n}\n\n\n", "file_path": "benchmarking/src/tests.rs", "rank": 56, "score": 124488.34197643142 }, { "content": "#[test]\n\nfn benchmarks_macro_rename_works() {\n\n\t// Check benchmark creation for `other_dummy`.\n\n\tlet selected_benchmark = SelectedBenchmark::other_name;\n\n\tlet components = <SelectedBenchmark as BenchmarkingSetup<Test>>::components(&selected_benchmark);\n\n\tassert_eq!(components, vec![(BenchmarkParameter::b, 1, 1000)]);\n\n\n\n\tlet closure = <SelectedBenchmark as BenchmarkingSetup<Test>>::instance(\n\n\t\t&selected_benchmark,\n\n\t\t&[(BenchmarkParameter::b, 1)],\n\n\t\ttrue,\n\n\t)\n\n\t.expect(\"failed to create closure\");\n\n\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tassert_ok!(closure());\n\n\t});\n\n}\n\n\n", "file_path": "benchmarking/src/tests.rs", "rank": 57, "score": 124488.34197643142 }, { "content": "#[test]\n\nfn exceed_max_weight_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet result: PostDispatchInfo = TestModule::expect_max_weight(Origin::signed(100)).unwrap();\n\n\t\t// Check used weight is correct\n\n\t\tassert_eq!(Some(u64::MAX), result.actual_weight);\n\n\t});\n\n}\n\n\n", "file_path": "weight-meter/src/tests.rs", "rank": 58, "score": 122666.08355129437 }, { "content": "#[test]\n\nfn benchmarks_macro_works_for_non_dispatchable() {\n\n\tlet selected_benchmark = SelectedBenchmark::sort_vector;\n\n\n\n\tlet components = <SelectedBenchmark as BenchmarkingSetup<Test>>::components(&selected_benchmark);\n\n\tassert_eq!(components, vec![(BenchmarkParameter::x, 1, 10000)]);\n\n\n\n\tlet closure = <SelectedBenchmark as BenchmarkingSetup<Test>>::instance(\n\n\t\t&selected_benchmark,\n\n\t\t&[(BenchmarkParameter::x, 1)],\n\n\t\ttrue,\n\n\t)\n\n\t.expect(\"failed to create closure\");\n\n\n\n\tassert_eq!(closure(), Ok(()));\n\n}\n\n\n", "file_path": "benchmarking/src/tests.rs", "rank": 59, "score": 122624.12399361366 }, { "content": "#[derive(Serialize, Deserialize, Default, Debug, Clone)]\n\nstruct BenchData {\n\n\tpub name: String,\n\n\tpub weight: u64,\n\n\tpub reads: u32,\n\n\tpub writes: u32,\n\n\tpub comments: Vec<String>,\n\n}\n\n\n", "file_path": "bencher/src/handler.rs", "rank": 60, "score": 122512.82592463333 }, { "content": "#[allow(dead_code)]\n\nfn print_events<Runtime: frame_system::Config>(name: &'static str) {\n\n\tprintln!(\"------ {:?} events -------\", name);\n\n\tframe_system::Pallet::<Runtime>::events()\n\n\t\t.iter()\n\n\t\t.for_each(|r| println!(\"> {:?}\", r.event));\n\n}\n\n\n", "file_path": "xtokens/src/tests.rs", "rank": 61, "score": 120132.00251483146 }, { "content": "/// Get a nightly from rustup. If `selected` is `Some(_)`, a `CargoCommand`\n\n/// using the given nightly is returned.\n\nfn get_rustup_nightly(selected: Option<String>) -> Option<CargoCommand> {\n\n\tlet host = format!(\"-{}\", env::var(\"HOST\").expect(\"`HOST` is always set by cargo\"));\n\n\n\n\tlet version = match selected {\n\n\t\tSome(selected) => selected,\n\n\t\tNone => {\n\n\t\t\tlet output = Command::new(\"rustup\")\n\n\t\t\t\t.args(&[\"toolchain\", \"list\"])\n\n\t\t\t\t.output()\n\n\t\t\t\t.ok()?\n\n\t\t\t\t.stdout;\n\n\t\t\tlet lines = output.as_slice().lines();\n\n\n\n\t\t\tlet mut latest_nightly = None;\n\n\t\t\tfor line in lines.filter_map(|l| l.ok()) {\n\n\t\t\t\tif line.starts_with(\"nightly-\") && line.ends_with(&host) {\n\n\t\t\t\t\t// Rustup prints them sorted\n\n\t\t\t\t\tlatest_nightly = Some(line.clone());\n\n\t\t\t\t}\n\n\t\t\t}\n", "file_path": "bencher/src/build_wasm/prerequisites.rs", "rank": 62, "score": 119495.59101795532 }, { "content": "pub fn build() -> std::io::Result<Vec<u8>> {\n\n\tlet manifest_dir = std::env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n\tlet pkg_name = std::env::var(\"CARGO_PKG_NAME\").unwrap();\n\n\n\n\tlet random = thread_rng()\n\n\t\t.sample_iter(&Alphanumeric)\n\n\t\t.take(16)\n\n\t\t.map(char::from)\n\n\t\t.collect::<String>();\n\n\n\n\tlet mut out_dir = std::path::PathBuf::from(manifest_dir);\n\n\tout_dir.push(format!(\"target/release/build/{}-{}/out\", pkg_name, random));\n\n\n\n\tstd::env::set_var(\"OUT_DIR\", out_dir.display().to_string());\n\n\n\n\tlet mut project_cargo_toml = std::env::current_dir()?;\n\n\tproject_cargo_toml.push(\"Cargo.toml\");\n\n\n\n\tlet default_rustflags = \"-Clink-arg=--export=__heap_base -C link-arg=--import-memory\";\n\n\tlet cargo_cmd = match prerequisites::check() {\n", "file_path": "bencher/src/build_wasm/mod.rs", "rank": 63, "score": 118692.82742878665 }, { "content": "\tpub trait WeightInfo {\n\n\t\tfn gradually_update() -> Weight;\n\n\t\tfn cancel_gradually_update() -> Weight;\n\n\t\tfn on_finalize(u: u32) -> Weight;\n\n\t}\n\n\n\n\tpub(crate) type StorageKeyBytes<T> = BoundedVec<u8, <T as Config>::MaxStorageKeyBytes>;\n\n\tpub(crate) type StorageValueBytes<T> = BoundedVec<u8, <T as Config>::MaxStorageValueBytes>;\n\n\n\n\ttype GraduallyUpdateOf<T> = GraduallyUpdate<StorageKeyBytes<T>, StorageValueBytes<T>>;\n\n\n", "file_path": "gradually-update/src/lib.rs", "rank": 64, "score": 118428.72705117597 }, { "content": "#[test]\n\nfn should_read_raw_values() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet key: u32 = 50;\n\n\n\n\t\tlet raw_values = ModuleOracle::read_raw_values(&key);\n\n\t\tassert_eq!(raw_values, vec![]);\n\n\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(1), vec![(key, 1000)]));\n\n\t\tassert_ok!(ModuleOracle::feed_values(Origin::signed(2), vec![(key, 1200)]));\n\n\n\n\t\tlet raw_values = ModuleOracle::read_raw_values(&key);\n\n\t\tassert_eq!(\n\n\t\t\traw_values,\n\n\t\t\tvec![\n\n\t\t\t\tTimestampedValue {\n\n\t\t\t\t\tvalue: 1000,\n\n\t\t\t\t\ttimestamp: 12345,\n\n\t\t\t\t},\n\n\t\t\t\tTimestampedValue {\n\n\t\t\t\t\tvalue: 1200,\n\n\t\t\t\t\ttimestamp: 12345,\n\n\t\t\t\t},\n\n\t\t\t]\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "oracle/src/tests.rs", "rank": 66, "score": 116287.59360963773 }, { "content": "#[test]\n\nfn u32_should_work() {\n\n\tExtBuilder::default().build().execute_with(|| {\n\n\t\tlet update: GraduallyUpdate<StorageKeyBytes<Runtime>, StorageValueBytes<Runtime>> = GraduallyUpdate {\n\n\t\t\tkey: vec![1].try_into().unwrap(),\n\n\t\t\ttarget_value: 30u32.encode().try_into().unwrap(),\n\n\t\t\tper_block: 1u32.encode().try_into().unwrap(),\n\n\t\t};\n\n\t\tassert_ok!(GraduallyUpdateModule::gradually_update(Origin::root(), update.clone()));\n\n\t\tassert_eq!(storage_get(&update.key), Vec::<u8>::new());\n\n\t\tGraduallyUpdateModule::on_finalize(10);\n\n\t\tassert_eq!(storage_get(&update.key), vec![10, 0, 0, 0]);\n\n\t\tGraduallyUpdateModule::on_finalize(15);\n\n\t\tassert_eq!(storage_get(&update.key), vec![10, 0, 0, 0]);\n\n\t\tGraduallyUpdateModule::on_finalize(20);\n\n\t\tassert_eq!(storage_get(&update.key), vec![20, 0, 0, 0]);\n\n\t\tGraduallyUpdateModule::on_finalize(40);\n\n\t\tassert_eq!(storage_get(&update.key), vec![30, 0, 0, 0]);\n\n\t});\n\n}\n\n\n", "file_path": "gradually-update/src/tests.rs", "rank": 67, "score": 116267.77559181396 }, { "content": "#[test]\n\nfn cannot_use_fund_if_not_claimed() {\n\n\tExtBuilder::build().execute_with(|| {\n\n\t\tlet schedule = VestingSchedule {\n\n\t\t\tstart: 10u64,\n\n\t\t\tperiod: 10u64,\n\n\t\t\tperiod_count: 1u32,\n\n\t\t\tper_period: 50u64,\n\n\t\t};\n\n\t\tassert_ok!(Vesting::vested_transfer(Origin::signed(ALICE), BOB, schedule));\n\n\t\tassert!(PalletBalances::ensure_can_withdraw(&BOB, 1, WithdrawReasons::TRANSFER, 49).is_err());\n\n\t});\n\n}\n\n\n", "file_path": "vesting/src/tests.rs", "rank": 68, "score": 116253.57174853024 }, { "content": "/// Handle bench results\n\npub fn handle(output: Vec<u8>, storage_infos: Vec<StorageInfo>) {\n\n\tprintln!();\n\n\n\n\tlet pkg_name = std::env::var(\"CARGO_PKG_NAME\").unwrap_or_default().replace(\"-\", \"_\");\n\n\n\n\tlet results = <Vec<BenchResult> as Decode>::decode(&mut &output[..]).unwrap();\n\n\tlet data: Vec<BenchData> = results\n\n\t\t.into_iter()\n\n\t\t.map(|result| {\n\n\t\t\tlet name = String::from_utf8_lossy(&result.method).to_string();\n\n\n\n\t\t\tlet y: Vec<f64> = result.elapses.into_iter().map(|x| x as f64).collect();\n\n\t\t\tlet x: Vec<f64> = (0..y.len()).into_iter().map(|x| x as f64).collect();\n\n\t\t\tlet data = vec![(\"Y\", y), (\"X\", x)];\n\n\t\t\tlet data = RegressionDataBuilder::new().build_from(data).unwrap();\n\n\t\t\tlet formula = \"Y ~ X\";\n\n\n\n\t\t\tlet model = FormulaRegressionBuilder::new()\n\n\t\t\t\t.data(&data)\n\n\t\t\t\t.formula(formula)\n", "file_path": "bencher/src/handler.rs", "rank": 69, "score": 114652.6499405615 }, { "content": "pub trait ConvertBalance<A, B> {\n\n\ttype AssetId;\n\n\tfn convert_balance(amount: A, asset_id: Self::AssetId) -> B;\n\n\tfn convert_balance_back(amount: B, asset_id: Self::AssetId) -> A;\n\n}\n\n\n\npub struct Mapper<AccountId, T, C, B, GetCurrencyId>(sp_std::marker::PhantomData<(AccountId, T, C, B, GetCurrencyId)>);\n\nimpl<AccountId, T, C, B, GetCurrencyId> fungible::Inspect<AccountId> for Mapper<AccountId, T, C, B, GetCurrencyId>\n\nwhere\n\n\tT: fungibles::Inspect<AccountId>,\n\n\tC: ConvertBalance<\n\n\t\t<T as fungibles::Inspect<AccountId>>::Balance,\n\n\t\tB,\n\n\t\tAssetId = <T as fungibles::Inspect<AccountId>>::AssetId,\n\n\t>,\n\n\tB: BalanceT,\n\n\tGetCurrencyId: Get<<T as fungibles::Inspect<AccountId>>::AssetId>,\n\n{\n\n\ttype Balance = B;\n\n\n", "file_path": "tokens/src/impls.rs", "rank": 70, "score": 114093.92202100357 }, { "content": "#[test]\n\nfn transfer_to_self_chain_fails() {\n\n\tTestNet::reset();\n\n\n\n\tParaA::execute_with(|| {\n\n\t\tassert_noop!(\n\n\t\t\tParaXTokens::transfer_multiasset(\n\n\t\t\t\tSome(ALICE).into(),\n\n\t\t\t\tBox::new(MultiAsset::sibling_parachain_asset(1, \"A\".into(), 100).into()),\n\n\t\t\t\tBox::new(\n\n\t\t\t\t\tMultiLocation::new(\n\n\t\t\t\t\t\t1,\n\n\t\t\t\t\t\tX2(\n\n\t\t\t\t\t\t\tParachain(1),\n\n\t\t\t\t\t\t\tJunction::AccountId32 {\n\n\t\t\t\t\t\t\t\tnetwork: NetworkId::Any,\n\n\t\t\t\t\t\t\t\tid: BOB.into()\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t)\n\n\t\t\t\t\t)\n\n\t\t\t\t\t.into()\n\n\t\t\t\t),\n\n\t\t\t\t50,\n\n\t\t\t),\n\n\t\t\tError::<para::Runtime>::NotCrossChainTransfer\n\n\t\t);\n\n\t});\n\n}\n\n\n", "file_path": "xtokens/src/tests.rs", "rank": 71, "score": 114070.42176122025 }, { "content": "fn check_wasm_toolchain_installed(cargo_command: CargoCommand) -> Result<CargoCommandVersioned, String> {\n\n\tlet temp = tempdir().expect(\"Creating temp dir does not fail; qed\");\n\n\tfs::create_dir_all(temp.path().join(\"src\")).expect(\"Creating src dir does not fail; qed\");\n\n\tcreate_check_toolchain_project(temp.path());\n\n\n\n\tlet err_msg = red_bold(\"Rust WASM toolchain not installed, please install it!\");\n\n\tlet manifest_path = temp.path().join(\"Cargo.toml\").display().to_string();\n\n\n\n\tlet mut build_cmd = cargo_command.command();\n\n\tbuild_cmd.args(&[\n\n\t\t\"build\",\n\n\t\t\"--target=wasm32-unknown-unknown\",\n\n\t\t\"--manifest-path\",\n\n\t\t&manifest_path,\n\n\t]);\n\n\n\n\tif color_output_enabled() {\n\n\t\tbuild_cmd.arg(\"--color=always\");\n\n\t}\n\n\n", "file_path": "bencher/src/build_wasm/prerequisites.rs", "rank": 72, "score": 113841.62670862154 }, { "content": "#[test]\n\nfn send_self_parachain_asset_to_sibling() {\n\n\tTestNet::reset();\n\n\n\n\tParaA::execute_with(|| {\n\n\t\tassert_ok!(ParaTokens::deposit(CurrencyId::A, &ALICE, 1_000));\n\n\n\n\t\tassert_ok!(ParaXTokens::transfer(\n\n\t\t\tSome(ALICE).into(),\n\n\t\t\tCurrencyId::A,\n\n\t\t\t500,\n\n\t\t\tBox::new(\n\n\t\t\t\tMultiLocation::new(\n\n\t\t\t\t\t1,\n\n\t\t\t\t\tX2(\n\n\t\t\t\t\t\tParachain(2),\n\n\t\t\t\t\t\tJunction::AccountId32 {\n\n\t\t\t\t\t\t\tnetwork: NetworkId::Any,\n\n\t\t\t\t\t\t\tid: BOB.into(),\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t)\n", "file_path": "xtokens/src/tests.rs", "rank": 73, "score": 111983.47847399443 }, { "content": "\t#[pallet::config]\n\n\tpub trait Config: frame_system::Config {}\n\n\n\n\t#[pallet::storage]\n\n\t#[pallet::getter(fn value)]\n\n\tpub(crate) type Value<T: Config> = StorageValue<_, u32, OptionQuery>;\n\n\n\n\t#[pallet::pallet]\n\n\t#[pallet::generate_store(pub(super) trait Store)]\n\n\tpub struct Pallet<T>(_);\n\n\n\n\t#[pallet::call]\n\n\timpl<T: Config> Pallet<T> {\n\n\t\t#[pallet::weight(0)]\n\n\t\tpub fn set_value(origin: OriginFor<T>, n: u32) -> DispatchResult {\n\n\t\t\tlet _sender = frame_system::ensure_signed(origin)?;\n\n\t\t\tValue::<T>::put(n);\n\n\t\t\tOk(())\n\n\t\t}\n\n\n\n\t\t#[pallet::weight(0)]\n\n\t\tpub fn dummy(origin: OriginFor<T>, _n: u32) -> DispatchResult {\n\n\t\t\tlet _sender = frame_system::ensure_none(origin)?;\n\n\t\t\tOk(())\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "benchmarking/src/tests.rs", "rank": 74, "score": 111655.84687058257 }, { "content": "#[test]\n\nfn nested_module_calls_works() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\tlet result = TestModule::nested_extrinsic(Origin::signed(0)).unwrap();\n\n\t\tassert_eq!(result.actual_weight, Some(700));\n\n\t});\n\n}\n", "file_path": "weight-meter/src/tests.rs", "rank": 75, "score": 111649.17990996421 }, { "content": "/// Abstraction over a simple auction system.\n\npub trait Auction<AccountId, BlockNumber> {\n\n\t/// The id of an AuctionInfo\n\n\ttype AuctionId: FullCodec + Default + Copy + Eq + PartialEq + MaybeSerializeDeserialize + Bounded + Debug;\n\n\t/// The price to bid.\n\n\ttype Balance: AtLeast32Bit + FullCodec + Copy + MaybeSerializeDeserialize + Debug + Default;\n\n\n\n\t/// The auction info of `id`\n\n\tfn auction_info(id: Self::AuctionId) -> Option<AuctionInfo<AccountId, Self::Balance, BlockNumber>>;\n\n\t/// Update the auction info of `id` with `info`\n\n\tfn update_auction(id: Self::AuctionId, info: AuctionInfo<AccountId, Self::Balance, BlockNumber>) -> DispatchResult;\n\n\t/// Create new auction with specific startblock and endblock, return the id\n\n\t/// of the auction\n\n\tfn new_auction(start: BlockNumber, end: Option<BlockNumber>) -> result::Result<Self::AuctionId, DispatchError>;\n\n\t/// Remove auction by `id`\n\n\tfn remove_auction(id: Self::AuctionId);\n\n}\n\n\n\n/// The result of bid handling.\n\npub struct OnNewBidResult<BlockNumber> {\n\n\t/// Indicates if the bid was accepted\n\n\tpub accept_bid: bool,\n\n\t/// The auction end change.\n\n\tpub auction_end_change: Change<Option<BlockNumber>>,\n\n}\n\n\n", "file_path": "traits/src/auction.rs", "rank": 76, "score": 110010.21875914172 }, { "content": "#[test]\n\nfn send_self_parachain_asset_to_sibling_with_fee() {\n\n\tTestNet::reset();\n\n\n\n\tParaA::execute_with(|| {\n\n\t\tassert_ok!(ParaTokens::deposit(CurrencyId::A, &ALICE, 1_000));\n\n\n\n\t\tassert_ok!(ParaXTokens::transfer_with_fee(\n\n\t\t\tSome(ALICE).into(),\n\n\t\t\tCurrencyId::A,\n\n\t\t\t450,\n\n\t\t\t50,\n\n\t\t\tBox::new(\n\n\t\t\t\tMultiLocation::new(\n\n\t\t\t\t\t1,\n\n\t\t\t\t\tX2(\n\n\t\t\t\t\t\tParachain(2),\n\n\t\t\t\t\t\tJunction::AccountId32 {\n\n\t\t\t\t\t\t\tnetwork: NetworkId::Any,\n\n\t\t\t\t\t\t\tid: BOB.into(),\n\n\t\t\t\t\t\t}\n", "file_path": "xtokens/src/tests.rs", "rank": 77, "score": 109998.8279710457 }, { "content": "\t#[pallet::config]\n\n\tpub trait Config: frame_system::Config {}\n\n\n\n\t#[pallet::pallet]\n\n\t#[pallet::generate_store(pub(super) trait Store)]\n\n\tpub struct Pallet<T>(PhantomData<T>);\n\n\n\n\t#[pallet::hooks]\n\n\timpl<T: Config> Hooks<T::BlockNumber> for Pallet<T> {}\n\n\n\n\t#[pallet::storage]\n\n\t#[pallet::getter(fn something)]\n\n\tpub type Something<T> = StorageValue<_, u32>;\n\n\n\n\t#[pallet::call]\n\n\timpl<T: Config> Pallet<T> {\n\n\t\t#[pallet::weight(50_000)]\n\n\t\t#[orml_weight_meter::start]\n\n\t\tpub fn expect_100(origin: OriginFor<T>) -> DispatchResultWithPostInfo {\n\n\t\t\tensure_signed(origin)?;\n\n\n", "file_path": "weight-meter/src/mock.rs", "rank": 78, "score": 109684.11311801069 }, { "content": "#[test]\n\nfn currency_adapter_lock_block_number_extension_should_work() {\n\n\tExtBuilder::default()\n\n\t\t.balances(vec![(TREASURY_ACCOUNT, DOT, 100)])\n\n\t\t.build()\n\n\t\t.execute_with(|| {\n\n\t\t\tTreasuryCurrencyAdapter::set_lock(ID_1, &TREASURY_ACCOUNT, 200, WithdrawReasons::all());\n\n\t\t\tassert_noop!(\n\n\t\t\t\tTreasuryCurrencyAdapter::transfer(&TREASURY_ACCOUNT, &ALICE, 6, ExistenceRequirement::AllowDeath),\n\n\t\t\t\tError::<Runtime>::LiquidityRestrictions\n\n\t\t\t);\n\n\t\t\tTreasuryCurrencyAdapter::extend_lock(ID_1, &TREASURY_ACCOUNT, 90, WithdrawReasons::all());\n\n\t\t\tassert_noop!(\n\n\t\t\t\tTreasuryCurrencyAdapter::transfer(&TREASURY_ACCOUNT, &ALICE, 6, ExistenceRequirement::AllowDeath),\n\n\t\t\t\tError::<Runtime>::LiquidityRestrictions\n\n\t\t\t);\n\n\t\t\tSystem::set_block_number(2);\n\n\t\t\tTreasuryCurrencyAdapter::extend_lock(ID_1, &TREASURY_ACCOUNT, 90, WithdrawReasons::all());\n\n\t\t\tassert_noop!(\n\n\t\t\t\tTreasuryCurrencyAdapter::transfer(&TREASURY_ACCOUNT, &ALICE, 3, ExistenceRequirement::AllowDeath),\n\n\t\t\t\tError::<Runtime>::LiquidityRestrictions\n\n\t\t\t);\n\n\t\t});\n\n}\n\n\n", "file_path": "tokens/src/tests.rs", "rank": 79, "score": 108109.12925039255 }, { "content": "#[test]\n\nfn send_self_parachain_asset_to_sibling_with_distinct_fee() {\n\n\tTestNet::reset();\n\n\n\n\tParaA::execute_with(|| {\n\n\t\tassert_ok!(ParaTokens::deposit(CurrencyId::A, &ALICE, 1_000));\n\n\t\tassert_ok!(ParaTokens::deposit(CurrencyId::A1, &ALICE, 1_000));\n\n\n\n\t\tassert_ok!(ParaXTokens::transfer_multicurrencies(\n\n\t\t\tSome(ALICE).into(),\n\n\t\t\tvec![(CurrencyId::A, 450), (CurrencyId::A1, 50)],\n\n\t\t\t1,\n\n\t\t\tBox::new(\n\n\t\t\t\tMultiLocation::new(\n\n\t\t\t\t\t1,\n\n\t\t\t\t\tX2(\n\n\t\t\t\t\t\tParachain(2),\n\n\t\t\t\t\t\tJunction::AccountId32 {\n\n\t\t\t\t\t\t\tnetwork: NetworkId::Any,\n\n\t\t\t\t\t\t\tid: BOB.into(),\n\n\t\t\t\t\t\t}\n", "file_path": "xtokens/src/tests.rs", "rank": 80, "score": 108109.12925039255 }, { "content": "#[test]\n\nfn send_self_parachain_asset_to_sibling_relative_parachain() {\n\n\tTestNet::reset();\n\n\n\n\tParaD::execute_with(|| {\n\n\t\tassert_ok!(ParaRelativeTokens::deposit(CurrencyId::D, &ALICE, 1_000));\n\n\n\n\t\tassert_ok!(ParaRelativeXTokens::transfer(\n\n\t\t\tSome(ALICE).into(),\n\n\t\t\tCurrencyId::D,\n\n\t\t\t500,\n\n\t\t\tBox::new(\n\n\t\t\t\tMultiLocation::new(\n\n\t\t\t\t\t1,\n\n\t\t\t\t\tX2(\n\n\t\t\t\t\t\tParachain(2),\n\n\t\t\t\t\t\tJunction::AccountId32 {\n\n\t\t\t\t\t\t\tnetwork: NetworkId::Any,\n\n\t\t\t\t\t\t\tid: BOB.into(),\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t)\n", "file_path": "xtokens/src/tests.rs", "rank": 81, "score": 108109.12925039255 }, { "content": "/// Get a list of enabled features for the project.\n\nfn project_enabled_features(cargo_manifest: &Path, crate_metadata: &cargo_metadata::Metadata) -> Vec<String> {\n\n\tlet package = find_package_by_manifest_path(cargo_manifest, crate_metadata);\n\n\n\n\tlet mut enabled_features = package\n\n\t\t.features\n\n\t\t.keys()\n\n\t\t.filter(|f| {\n\n\t\t\tlet mut feature_env = f.replace(\"-\", \"_\");\n\n\t\t\tfeature_env.make_ascii_uppercase();\n\n\n\n\t\t\t// We don't want to enable the `std`/`default` feature for the wasm build and\n\n\t\t\t// we need to check if the feature is enabled by checking the env variable.\n\n\t\t\t*f != \"std\"\n\n\t\t\t\t&& *f != \"default\"\n\n\t\t\t\t&& env::var(format!(\"CARGO_FEATURE_{}\", feature_env))\n\n\t\t\t\t\t.map(|v| v == \"1\")\n\n\t\t\t\t\t.unwrap_or_default()\n\n\t\t})\n\n\t\t.cloned()\n\n\t\t.collect::<Vec<_>>();\n\n\n\n\tenabled_features.sort();\n\n\tenabled_features\n\n}\n\n\n", "file_path": "bencher/src/build_wasm/wasm_project.rs", "rank": 82, "score": 107368.32542221845 }, { "content": "/// Config for orml-authority\n\npub trait AuthorityConfig<Origin, PalletsOrigin, BlockNumber> {\n\n\t/// Check if the `origin` is allowed to schedule a dispatchable call\n\n\t/// with a given `priority`.\n\n\tfn check_schedule_dispatch(origin: Origin, priority: Priority) -> DispatchResult;\n\n\t/// Check if the `origin` is allow to fast track a scheduled task that\n\n\t/// initially created by `initial_origin`. `new_delay` is number of\n\n\t/// blocks this dispatchable will be dispatched from now after fast\n\n\t/// track.\n\n\tfn check_fast_track_schedule(\n\n\t\torigin: Origin,\n\n\t\tinitial_origin: &PalletsOrigin,\n\n\t\tnew_delay: BlockNumber,\n\n\t) -> DispatchResult;\n\n\t/// Check if the `origin` is allow to delay a scheduled task that\n\n\t/// initially created by `inital_origin`.\n\n\tfn check_delay_schedule(origin: Origin, initial_origin: &PalletsOrigin) -> DispatchResult;\n\n\t/// Check if the `origin` is allow to cancel a scheduled task that\n\n\t/// initially created by `inital_origin`.\n\n\tfn check_cancel_schedule(origin: Origin, initial_origin: &PalletsOrigin) -> DispatchResult;\n\n}\n\n\n", "file_path": "authority/src/lib.rs", "rank": 83, "score": 104387.37520700764 }, { "content": "// This function basically just builds a genesis storage key/value store\n\n// according to our desired mockup.\n\nfn new_test_ext() -> sp_io::TestExternalities {\n\n\tframe_system::GenesisConfig::default()\n\n\t\t.build_storage::<Test>()\n\n\t\t.unwrap()\n\n\t\t.into()\n\n}\n\n\n\nruntime_benchmarks! {\n\n\t{ Test, test }\n\n\n\n\tset_value {\n\n\t\tlet b in 1 .. 1000;\n\n\t\tlet caller = account::<AccountId>(\"caller\", 0, 0);\n\n\t}: _ (RawOrigin::Signed(caller), b)\n\n\tverify {\n\n\t\tassert_eq!(Pallet::value(), Some(b));\n\n\t}\n\n\n\n\tother_name {\n\n\t\tlet b in 1 .. 1000;\n", "file_path": "benchmarking/src/tests.rs", "rank": 84, "score": 103300.13934598942 }, { "content": "/// Hooks for auction to handle bids.\n\npub trait AuctionHandler<AccountId, Balance, BlockNumber, AuctionId> {\n\n\t/// Called when new bid is received.\n\n\t/// The return value determines if the bid should be accepted and update\n\n\t/// auction end time. Implementation should reserve money from current\n\n\t/// winner and refund previous winner.\n\n\tfn on_new_bid(\n\n\t\tnow: BlockNumber,\n\n\t\tid: AuctionId,\n\n\t\tnew_bid: (AccountId, Balance),\n\n\t\tlast_bid: Option<(AccountId, Balance)>,\n\n\t) -> OnNewBidResult<BlockNumber>;\n\n\t/// End an auction with `winner`\n\n\tfn on_auction_ended(id: AuctionId, winner: Option<(AccountId, Balance)>);\n\n}\n", "file_path": "traits/src/auction.rs", "rank": 85, "score": 99321.07766204025 }, { "content": "//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 2.0.0\n\n\n\n#![allow(unused_parens)]\n\n#![allow(unused_imports)]\n\n#![allow(clippy::unnecessary_cast)]\n\n\n\nuse frame_support::weights::{constants::RocksDbWeight as DbWeight, Weight};\n\n\n\nimpl crate::WeightInfo for () {\n\n\tfn vested_transfer() -> Weight {\n\n\t\t(310_862_000 as Weight)\n\n\t\t\t.saturating_add(DbWeight::get().reads(4 as Weight))\n\n\t\t\t.saturating_add(DbWeight::get().writes(4 as Weight))\n\n\t}\n\n\tfn claim(i: u32) -> Weight {\n\n\t\t(158_614_000 as Weight)\n\n\t\t\t.saturating_add((958_000 as Weight).saturating_mul(i as Weight))\n\n\t\t\t.saturating_add(DbWeight::get().reads(3 as Weight))\n\n\t\t\t.saturating_add(DbWeight::get().writes(3 as Weight))\n\n\t}\n\n\tfn update_vesting_schedules(i: u32) -> Weight {\n\n\t\t(119_811_000 as Weight)\n\n\t\t\t.saturating_add((2_320_000 as Weight).saturating_mul(i as Weight))\n\n\t\t\t.saturating_add(DbWeight::get().reads(2 as Weight))\n\n\t\t\t.saturating_add(DbWeight::get().writes(3 as Weight))\n\n\t}\n\n}\n", "file_path": "vesting/src/default_weight.rs", "rank": 86, "score": 91430.84895589715 }, { "content": "//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 2.0.0\n\n\n\n#![allow(unused_parens)]\n\n#![allow(unused_imports)]\n\n#![allow(clippy::unnecessary_cast)]\n\n\n\nuse frame_support::weights::{constants::RocksDbWeight as DbWeight, Weight};\n\n\n\nimpl crate::WeightInfo for () {\n\n\tfn transfer_non_native_currency() -> Weight {\n\n\t\t(172_011_000 as Weight)\n\n\t\t\t.saturating_add(DbWeight::get().reads(5 as Weight))\n\n\t\t\t.saturating_add(DbWeight::get().writes(2 as Weight))\n\n\t}\n\n\tfn transfer_native_currency() -> Weight {\n\n\t\t(43_023_000 as Weight)\n\n\t}\n\n\tfn update_balance_non_native_currency() -> Weight {\n\n\t\t(137_440_000 as Weight)\n\n\t\t\t.saturating_add(DbWeight::get().reads(5 as Weight))\n", "file_path": "currencies/src/default_weight.rs", "rank": 87, "score": 91426.81643679763 }, { "content": "\t\t\t.saturating_add(DbWeight::get().writes(2 as Weight))\n\n\t}\n\n\tfn update_balance_native_currency_creating() -> Weight {\n\n\t\t(64_432_000 as Weight)\n\n\t}\n\n\tfn update_balance_native_currency_killing() -> Weight {\n\n\t\t(62_595_000 as Weight)\n\n\t}\n\n}\n", "file_path": "currencies/src/default_weight.rs", "rank": 88, "score": 91414.02397980605 }, { "content": "/// Run benches\n\npub fn run<B: Block>(wasm_code: Vec<u8>) -> std::result::Result<Vec<u8>, sc_executor_common::error::Error> {\n\n\tlet mut overlay = OverlayedChanges::default();\n\n\tlet mut cache = StorageTransactionCache::default();\n\n\tlet state = sc_client_db::BenchmarkingState::<B>::new(Default::default(), Default::default(), false, true).unwrap();\n\n\n\n\tlet tracker = Arc::new(BenchTracker::new());\n\n\tlet tracker_ext = BenchTrackerExt(Arc::clone(&tracker));\n\n\n\n\tlet mut extensions = Extensions::default();\n\n\textensions.register(tracker_ext);\n\n\n\n\tlet ext = Ext::<_, _>::new(&mut overlay, &mut cache, &state, Some(&mut extensions));\n\n\tlet mut bench_ext = BenchExt::new(ext, tracker);\n\n\n\n\tlet executor =\n\n\t\tWasmExecutor::<ComposeHostFunctions>::new(WasmExecutionMethod::Compiled, Default::default(), 1, None, 1);\n\n\n\n\tlet blob = RuntimeBlob::uncompress_if_needed(&wasm_code[..]).unwrap();\n\n\n\n\texecutor.uncached_call(blob, &mut bench_ext, false, \"run_benches\", &[])\n\n}\n", "file_path": "bencher/src/bench_runner.rs", "rank": 89, "score": 91038.68668796941 }, { "content": "/// Copy the WASM binary to the target directory set in `WASM_TARGET_DIRECTORY`\n\n/// environment variable. If the variable is not set, this is a no-op.\n\nfn copy_wasm_to_target_directory(cargo_manifest: &Path, wasm_binary: &WasmBinary) {\n\n\tlet target_dir = match env::var(WASM_TARGET_DIRECTORY) {\n\n\t\tOk(path) => PathBuf::from(path),\n\n\t\tErr(_) => return,\n\n\t};\n\n\n\n\tassert!(\n\n\t\ttarget_dir.is_absolute(),\n\n\t\t\"Environment variable `{}` with `{}` is not an absolute path!\",\n\n\t\tWASM_TARGET_DIRECTORY,\n\n\t\ttarget_dir.display()\n\n\t);\n\n\n\n\tfs::create_dir_all(&target_dir).expect(\"Creates `WASM_TARGET_DIRECTORY`.\");\n\n\n\n\tfs::copy(\n\n\t\twasm_binary.wasm_binary_path(),\n\n\t\ttarget_dir.join(format!(\"{}.wasm\", get_wasm_binary_name(cargo_manifest))),\n\n\t)\n\n\t.expect(\"Copies WASM binary to `WASM_TARGET_DIRECTORY`.\");\n\n}\n", "file_path": "bencher/src/build_wasm/wasm_project.rs", "rank": 90, "score": 89268.57207947555 }, { "content": "//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 2.0.0\n\n\n\n#![allow(unused_parens)]\n\n#![allow(unused_imports)]\n\n#![allow(clippy::unnecessary_cast)]\n\n\n\nuse frame_support::weights::{constants::RocksDbWeight as DbWeight, Weight};\n\n\n\nimpl crate::WeightInfo for () {\n\n\tfn gradually_update() -> Weight {\n\n\t\t(57_922_000 as Weight)\n\n\t\t\t.saturating_add(DbWeight::get().reads(2 as Weight))\n\n\t\t\t.saturating_add(DbWeight::get().writes(1 as Weight))\n\n\t}\n\n\tfn cancel_gradually_update() -> Weight {\n\n\t\t(66_687_000 as Weight)\n\n\t\t\t.saturating_add(DbWeight::get().reads(1 as Weight))\n\n\t\t\t.saturating_add(DbWeight::get().writes(1 as Weight))\n\n\t}\n\n\tfn on_finalize(u: u32) -> Weight {\n\n\t\t(37_067_000 as Weight)\n\n\t\t\t.saturating_add((20_890_000 as Weight).saturating_mul(u as Weight))\n\n\t\t\t.saturating_add(DbWeight::get().reads(3 as Weight))\n\n\t\t\t.saturating_add(DbWeight::get().writes(3 as Weight))\n\n\t}\n\n}\n", "file_path": "gradually-update/src/default_weight.rs", "rank": 91, "score": 88946.68170961417 }, { "content": "/// Build the project to create the WASM binary.\n\nfn build_project(project: &Path, default_rustflags: &str, cargo_cmd: CargoCommandVersioned) {\n\n\tlet manifest_path = project.join(\"Cargo.toml\");\n\n\tlet mut build_cmd = cargo_cmd.command();\n\n\n\n\tlet rustflags = format!(\n\n\t\t\"-C link-arg=--export-table {} {}\",\n\n\t\tdefault_rustflags,\n\n\t\tenv::var(WASM_BUILD_RUSTFLAGS_ENV).unwrap_or_default(),\n\n\t);\n\n\n\n\tbuild_cmd\n\n\t\t.args(&[\"rustc\", \"--target=wasm32-unknown-unknown\"])\n\n\t\t.arg(format!(\"--manifest-path={}\", manifest_path.display()))\n\n\t\t.env(\"RUSTFLAGS\", rustflags)\n\n\t\t// Unset the `CARGO_TARGET_DIR` to prevent a cargo deadlock (cargo locks a target dir exclusive).\n\n\t\t// The runner project is created in `CARGO_TARGET_DIR` and executing it will create a sub target\n\n\t\t// directory inside of `CARGO_TARGET_DIR`.\n\n\t\t.env_remove(\"CARGO_TARGET_DIR\")\n\n\t\t// We don't want to call ourselves recursively\n\n\t\t.env(SKIP_BUILD_ENV, \"\");\n", "file_path": "bencher/src/build_wasm/wasm_project.rs", "rank": 92, "score": 85327.18284517205 }, { "content": "#[derive(Default, Debug)]\n\nstruct AccessReport {\n\n\tpub read: u32,\n\n\tpub written: u32,\n\n}\n\n\n\npub struct BenchTracker {\n\n\tinstant: RwLock<Instant>,\n\n\tdepth: RwLock<u32>,\n\n\tredundant: RwLock<Instant>,\n\n\tresults: RwLock<Vec<u128>>,\n\n\tmain_keys: RwLock<HashMap<StorageKey, AccessInfo>>,\n\n\tchild_keys: RwLock<HashMap<StorageKey, HashMap<StorageKey, AccessInfo>>>,\n\n\twarn_child_prefix_remove: RwLock<bool>,\n\n\twhitelisted_keys: RwLock<HashMap<StorageKey, (bool, bool)>>,\n\n}\n\n\n\nimpl BenchTracker {\n\n\tpub fn new() -> Self {\n\n\t\tBenchTracker {\n\n\t\t\tinstant: RwLock::new(Instant::now()),\n", "file_path": "bencher/src/tracker.rs", "rank": 93, "score": 83524.85472945703 }, { "content": "#[derive(Default)]\n\nstruct AccessInfo {\n\n\tpub read: AccessType,\n\n\tpub written: AccessType,\n\n}\n\n\n\nimpl AccessInfo {\n\n\tfn read(redundant: bool) -> Self {\n\n\t\tlet read = if redundant {\n\n\t\t\tAccessType::Redundant\n\n\t\t} else {\n\n\t\t\tAccessType::Important\n\n\t\t};\n\n\t\tSelf {\n\n\t\t\tread,\n\n\t\t\twritten: AccessType::None,\n\n\t\t}\n\n\t}\n\n\n\n\tfn written(redundant: bool) -> Self {\n\n\t\tlet written = if redundant {\n", "file_path": "bencher/src/tracker.rs", "rank": 94, "score": 83519.66414322724 }, { "content": "type Balance = u64;\n\n\n\nparameter_types! {\n\n\tpub const ExistentialDeposit: u64 = 2;\n\n}\n\n\n\nimpl pallet_balances::Config for Runtime {\n\n\ttype Balance = Balance;\n\n\ttype DustRemoval = ();\n\n\ttype Event = Event;\n\n\ttype ExistentialDeposit = ExistentialDeposit;\n\n\ttype AccountStore = frame_system::Pallet<Runtime>;\n\n\ttype MaxLocks = ();\n\n\ttype MaxReserves = ();\n\n\ttype ReserveIdentifier = [u8; 8];\n\n\ttype WeightInfo = ();\n\n}\n\n\n\nparameter_type_with_key! {\n\n\tpub ExistentialDeposits: |_currency_id: CurrencyId| -> Balance {\n", "file_path": "currencies/src/mock.rs", "rank": 95, "score": 80594.40275073393 }, { "content": "type Balance = u64;\n\n\n\nparameter_types! {\n\n\tpub const ExistentialDeposit: u64 = 1;\n\n}\n\n\n\nimpl pallet_balances::Config for Runtime {\n\n\ttype Balance = Balance;\n\n\ttype DustRemoval = ();\n\n\ttype Event = Event;\n\n\ttype ExistentialDeposit = ExistentialDeposit;\n\n\ttype AccountStore = frame_system::Pallet<Runtime>;\n\n\ttype MaxLocks = ();\n\n\ttype MaxReserves = ();\n\n\ttype ReserveIdentifier = [u8; 8];\n\n\ttype WeightInfo = ();\n\n}\n\n\n\npub struct EnsureAliceOrBob;\n\nimpl EnsureOrigin<Origin> for EnsureAliceOrBob {\n", "file_path": "vesting/src/mock.rs", "rank": 96, "score": 80594.40275073393 }, { "content": "type Value = u32;\n\n\n\nparameter_types! {\n\n\tpub const BlockHashCount: u64 = 250;\n\n}\n\nimpl frame_system::Config for Test {\n\n\ttype Origin = Origin;\n\n\ttype Call = Call;\n\n\ttype Index = u64;\n\n\ttype BlockNumber = u64;\n\n\ttype Hash = H256;\n\n\ttype Hashing = BlakeTwo256;\n\n\ttype AccountId = AccountId;\n\n\ttype Lookup = IdentityLookup<Self::AccountId>;\n\n\ttype Header = Header;\n\n\ttype Event = Event;\n\n\ttype BlockHashCount = BlockHashCount;\n\n\ttype BlockWeights = ();\n\n\ttype BlockLength = ();\n\n\ttype Version = ();\n", "file_path": "oracle/src/mock.rs", "rank": 97, "score": 80570.69234480389 }, { "content": "type Key = u32;\n", "file_path": "oracle/src/mock.rs", "rank": 98, "score": 80570.69234480389 }, { "content": "type CurrencyId = u32;\n", "file_path": "currencies/src/mock.rs", "rank": 99, "score": 79389.69699645456 } ]
Rust
src/lib.rs
kneasle/goldilocks-json-fmt
41a84437e933c67365e874b73405d4d1fc935849
/*! [![crates.io](https://img.shields.io/crates/v/goldilocks-json-fmt.svg)](https://crates.io/crates/goldilocks-json-fmt) A simple, portable, fast, pretty JSON formatter. No dependencies or unsafe code. The resulting JSON strikes a balance between 'too wide' (i.e. minified, all on one line) and 'too tall' (e.g. `serde_json`'s `pretty_print`). You give the formatter a line limit (defaults to 100 chars), and it keeps things as wide as possible whilst preserving that limit. Sometimes the limit is impossible to achieve (e.g. you have a string that's longer than the line limit), in which case the formatter will break the limit by as little as possible. The throughput of the Goldilocks formatter is about 300MB/s, which should be enough for most situations. It's about as fast as you can get without cracking out the big guns and using SIMD, which would break both simplicity and portability. # Example: ``` // Ewww so horrible let json = r#"{"test/cases/87s-at-back.toml":{"comps":[{"length":32, "string":"sHsH","avg_score":-0.45625},{"length":64,"string":"sHWsMH", "avg_score":-0.44062495},{"length":96,"string":"WMsWMHsH","avg_score": -0.33124998},{"length":96,"string":"WsMHWsMH","avg_score":-0.33124998}, {"length":96,"string":"sHWMsWMH","avg_score":-0.33124995},{"length":64, "string":"WsMHsH","avg_score":-0.284375}]}}"#; let perfect_json = goldilocks_json_fmt::format(&json).expect("Invalid JSON"); assert_eq!( &perfect_json, // So perfect! r#"{ "test/cases/87s-at-back.toml": { "comps": [ { "length": 32, "string": "sHsH", "avg_score": -0.45625 }, { "length": 64, "string": "sHWsMH", "avg_score": -0.44062495 }, { "length": 96, "string": "WMsWMHsH", "avg_score": -0.33124998 }, { "length": 96, "string": "WsMHWsMH", "avg_score": -0.33124998 }, { "length": 96, "string": "sHWMsWMH", "avg_score": -0.33124995 }, { "length": 64, "string": "WsMHsH", "avg_score": -0.284375 } ] } }"#, ); ``` */ mod formatting; mod parsing; pub fn format(s: &str) -> Result<String> { format_with_config(s, &Config::default()) } pub fn format_within_width(s: &str, width_limit: usize) -> Result<String> { let config = Config { width_limit, ..Config::default() }; format_with_config(s, &config) } pub fn format_with_config(s: &str, config: &Config) -> Result<String> { Node::parse(s).map(|ast| ast.format(config)) } #[derive(Debug, Clone)] pub struct Config { pub width_limit: usize, pub indent_width: usize, } impl Default for Config { fn default() -> Self { Self { width_limit: 100, indent_width: 2, } } } #[derive(Debug, Clone, PartialEq, Eq)] pub enum Error { /* Misc parsing */ ExpectedXsFoundY(Item, usize, &'static [Expected], char), ExpectedXsFoundEof(Item, &'static [Expected]), InvalidTrailingWhitespace(usize, char), /* String parsing */ EofDuringString(usize), InvalidEscape(usize, char), InvalidHexEscape(usize, usize, char), ControlCharInString(usize, char), /* Number parsing */ LeadingZero(usize), SecondDecimalPoint(usize), InvalidCharInExponent(usize, char), EmptyExponent(usize), } pub type Result<T> = std::result::Result<T, Error>; #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Item { TopLevelValue, Literal(&'static str), Number, Array(usize), Object(usize), } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Expected { Key, Value, Char(char), Digit, } impl Error { fn expected_xs_found( item: Item, expected: &'static [Expected], v: Option<(usize, char)>, ) -> Self { match v { Some((idx, c)) => Error::ExpectedXsFoundY(item, idx, expected, c), None => Error::ExpectedXsFoundEof(item, expected), } } } #[derive(Debug, Clone, PartialEq, Eq)] struct Node<'source> { unsplit_width: usize, kind: NodeKind<'source>, } #[derive(Debug, Clone, PartialEq, Eq)] enum NodeKind<'source> { Atom(&'source str), Array(Vec<Node<'source>>), Object(Vec<(&'source str, Node<'source>)>), } impl<'source> Node<'source> { fn new_atom(s: &'source str) -> Self { Self { unsplit_width: s.len(), kind: NodeKind::Atom(s), } } }
/*! [![crates.io](https://img.shields.io/crates/v/goldilocks-json-fmt.svg)](https://crates.io/crates/goldilocks-json-fmt) A simple, portable, fast, pretty JSON formatter. No dependencies or unsafe code. The resulting JSON strikes a balance between 'too wide' (i.e. minified, all on one line) and 'too tall' (e.g. `serde_json`'s `pretty_print`). You give the formatter a line limit (defaults to 100 chars), and it keeps things as wide as possible whilst preserving that limit. Sometimes the limit is impossible to achieve (e.g. you have a string that's longer than the line limit), in which case the formatter will break the limit by as little as possible. The throughput of the Goldilocks formatter is about 300MB/s, which should be enough for most situations. It's about as fast as you can get without cracking out the big guns and using SIMD, which would break both simplicity and portability. # Example: ``` // Ewww so horrible let json = r#"{"test/cases/87s-at-back.toml":{"comps":[{"length":32, "string":"sHsH","avg_score":-0.45625},{"length":64,"string":"sHWsMH", "avg_score":-0.44062495},{"length":96,"string":"WMsWMHsH","avg_score": -0.33124998},{"length":96,"string":"WsMHWsMH","avg_score":-0.33124998}, {"length":96,"string":"sHWMsWMH","avg_score":-0.33124995},{"length":64, "string":"WsMHsH","avg_score":-0.284375}]}}"#; let perfect_json = goldilocks_json_fmt::format(&json).expect("Invalid JSON"); assert_eq!( &perfect_json, // So perfect! r#"{ "test/cases/87s-at-back.toml": { "comps": [ { "length": 32, "string": "sHsH", "avg_score": -0.45625 }, { "length": 64, "string": "sHWsMH", "avg_score": -0.44062495 }, { "length": 96, "string": "WMsWMHsH", "avg_score": -0.33124998 }, { "length": 96, "string": "WsMHWsMH", "avg_score": -0.33124998 }, { "length": 96, "string": "sHWMsWMH", "avg_score": -0.33124995 }, { "length": 64, "string": "WsMHsH", "avg_score": -0.284375 } ] } }"#, ); ``` */ mod formatting; mod parsing; pub fn format(s: &str) -> Result<String> { format_with_config(s, &Config::default()) } pub fn format_within_width(s: &str, width_limit: usize) -> Result<String> { let config = Config { width_limit, ..Config::default() }; format_with_config(s, &config) } pub fn format_with_config(s: &str, config: &Config) -> Result<String> { Node::parse(s).map(|ast| ast.format(config)) } #[derive(Debug, Clone)] pub struct Config { pub width_limit: usize, pub indent_width: usize, } impl Default for Config { fn default() -> Self { Self { width_limit: 100, indent_width: 2, } } } #[derive(Debug, Clone, PartialEq, Eq)] pub enum Error { /* Misc parsing */ ExpectedXsFoundY(Item, usize, &'static [Expected], char), ExpectedXsFoundEof(Item, &'static [Expected]), InvalidTrailingWhitespace(usize, char), /* String parsing */ EofDuringString(usize), InvalidEscape(usize, char), InvalidHexEscape(usize, usize, char), ControlCharInStr
py, PartialEq, Eq)] pub enum Expected { Key, Value, Char(char), Digit, } impl Error { fn expected_xs_found( item: Item, expected: &'static [Expected], v: Option<(usize, char)>, ) -> Self { match v { Some((idx, c)) => Error::ExpectedXsFoundY(item, idx, expected, c), None => Error::ExpectedXsFoundEof(item, expected), } } } #[derive(Debug, Clone, PartialEq, Eq)] struct Node<'source> { unsplit_width: usize, kind: NodeKind<'source>, } #[derive(Debug, Clone, PartialEq, Eq)] enum NodeKind<'source> { Atom(&'source str), Array(Vec<Node<'source>>), Object(Vec<(&'source str, Node<'source>)>), } impl<'source> Node<'source> { fn new_atom(s: &'source str) -> Self { Self { unsplit_width: s.len(), kind: NodeKind::Atom(s), } } }
ing(usize, char), /* Number parsing */ LeadingZero(usize), SecondDecimalPoint(usize), InvalidCharInExponent(usize, char), EmptyExponent(usize), } pub type Result<T> = std::result::Result<T, Error>; #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Item { TopLevelValue, Literal(&'static str), Number, Array(usize), Object(usize), } #[derive(Debug, Clone, Co
random
[ { "content": "/// Attempt to parse the chars in `iter` as an string, **assuming that the initial `\"` has\n\n/// been consumed**. This returns a string slice **from the JSON source code**, i.e. the fully\n\n/// escaped string complete with the enclosing `\"`s. We do not attempt to decode the string, we\n\n/// simply verify that it conforms to the JSON standard.\n\nfn parse_string<'source>(start_idx: usize, iter: &mut Iter<'source>) -> Result<&'source str> {\n\n while let Some((idx, c)) = iter.next() {\n\n match c {\n\n // If we find an unescaped quote, then terminate the string.\n\n // `+ 1` is OK because '\"' has UTF-8 length of 1 byte\n\n '\"' => return Ok(&iter.source[start_idx..idx + 1]),\n\n '\\\\' => match iter.next() {\n\n Some((_, '\"' | '\\\\' | '/' | 'b' | 'f' | 'n' | 'r' | 't')) => {} // Valid escape chars\n\n Some((_, 'u')) => {\n\n // `\\u` should be followed by 4 hex chars\n\n for _ in 0..4 {\n\n match iter.next() {\n\n Some((_, '0'..='9' | 'a'..='f' | 'A'..='F')) => {} // Valid hex char\n\n Some((bad_idx, bad_char)) => {\n\n return Err(Error::InvalidHexEscape(idx, bad_idx, bad_char));\n\n }\n\n None => return Err(Error::EofDuringString(start_idx)),\n\n }\n\n }\n\n }\n", "file_path": "src/parsing.rs", "rank": 3, "score": 76830.77451670829 }, { "content": "/// The result generated when a JSON value is parsed. Special cases are made for ']' and '}' to\n\n/// prevent lookahead for empty arrays/objects.\n\nenum ValueParseResult<'source> {\n\n /// The value parsed to a node\n\n Node(Node<'source>),\n\n /// The first non-whitespace char was `']'`\n\n CloseSquare(usize),\n\n /// The first non-whitespace char was `'}'`\n\n CloseBrace(usize),\n\n}\n\n\n", "file_path": "src/parsing.rs", "rank": 4, "score": 51106.39686014566 }, { "content": "/// Attempt to parse the chars in `iter` as an array, **assuming that the initial `[` has\n\n/// been consumed**.\n\nfn parse_array<'source>(start_idx: usize, iter: &mut Iter<'source>) -> Result<Node<'source>> {\n\n // Parse the first element\n\n let first_value = match parse_value_or_closing_bracket(\n\n Item::Array(start_idx),\n\n &[Expected::Value, Expected::Char(']')],\n\n iter,\n\n )? {\n\n // Array is `[]`, and therefore empty\n\n ValueParseResult::CloseSquare(_) => {\n\n return Ok(Node {\n\n unsplit_width: \"[]\".len(),\n\n kind: NodeKind::Array(vec![]),\n\n })\n\n }\n\n // Can't end an array with `}`\n\n ValueParseResult::CloseBrace(idx) => {\n\n return Err(Error::ExpectedXsFoundY(\n\n Item::Array(start_idx),\n\n idx,\n\n &[Expected::Value, Expected::Char(']')],\n", "file_path": "src/parsing.rs", "rank": 5, "score": 46869.64897556745 }, { "content": "/// Attempt to parse an object, **assuming that the initial `{` has been consumed**.\n\nfn parse_object<'source>(start_idx: usize, iter: &mut Iter<'source>) -> Result<Node<'source>> {\n\n let mut fields = Vec::<(&str, Node)>::new();\n\n let mut unsplit_width = \"{ \".len() + \" }\".len();\n\n loop {\n\n // Parse ws until we get to a '\"' for the key (returning if we see '}' and this is\n\n // the first field)\n\n let key = loop {\n\n match iter.next() {\n\n Some((_, ' ' | '\\t' | '\\n' | '\\r')) => continue, // Ignore whitespace\n\n Some((start_idx, '\"')) => break parse_string(start_idx, iter)?, // Parse key\n\n Some((_, '}')) if fields.is_empty() => {\n\n // '}' before the first key is an empty object\n\n // TODO: Report idempotence bug in rustfmt\n\n return Ok(Node {\n\n unsplit_width: 2, // \"{}\"\n\n kind: NodeKind::Object(vec![]),\n\n });\n\n }\n\n // Any other char is an error\n\n v => {\n", "file_path": "src/parsing.rs", "rank": 6, "score": 46866.983775264685 }, { "content": "fn main() {\n\n for path in std::env::args().skip(1) {\n\n let raw_json =\n\n std::fs::read_to_string(&path).unwrap_or_else(|_| panic!(\"Couldn't read {:?}\", path));\n\n let pretty_json = goldilocks_json_fmt::format(&raw_json)\n\n .unwrap_or_else(|_| panic!(\"Failed to parse JSON in {:?}\", path));\n\n println!(\"{}\", pretty_json);\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 7, "score": 38062.37054890391 }, { "content": "/// Attempt to parse a JSON value.\n\n///\n\n/// NOTE: This does not parse trailing whitespace\n\nfn parse_value<'source>(item: Item, iter: &mut Iter<'source>) -> Result<Node<'source>> {\n\n let (idx, unexpected_char) =\n\n match parse_value_or_closing_bracket(item, &[Expected::Value], iter)? {\n\n ValueParseResult::Node(n) => return Ok(n),\n\n ValueParseResult::CloseBrace(idx) => (idx, '}'),\n\n ValueParseResult::CloseSquare(idx) => (idx, ']'),\n\n };\n\n Err(Error::ExpectedXsFoundY(\n\n item,\n\n idx,\n\n &[Expected::Value],\n\n unexpected_char,\n\n ))\n\n}\n\n\n", "file_path": "src/parsing.rs", "rank": 8, "score": 35626.0410983717 }, { "content": "/// Char iterator that always lets you peek indices (even if the whole input has been consumed)\n\n// TODO: Consume the string byte-wise, since we're only interested in ASCII. This'll probably\n\n// have a massive performance benefit\n\nstruct Iter<'source> {\n\n inner: std::iter::Peekable<std::str::CharIndices<'source>>,\n\n source: &'source str,\n\n}\n\n\n\nimpl<'source> Iter<'source> {\n\n fn new(source: &'source str) -> Self {\n\n Self {\n\n inner: source.char_indices().peekable(),\n\n source,\n\n }\n\n }\n\n\n\n fn peek_char(&mut self) -> Option<char> {\n\n self.inner.peek().map(|(_idx, c)| *c)\n\n }\n\n\n\n fn new_atom_starting_from(&mut self, start_idx: usize) -> Node<'source> {\n\n Node::new_atom(&self.source[start_idx..self.peek_idx()])\n\n }\n", "file_path": "src/parsing.rs", "rank": 9, "score": 35374.08897427508 }, { "content": "/// Parse a number, assuming that everything up to **and including** the 'e' or 'E' has been\n\n/// consumed\n\nfn parse_number_after_exponent<'source>(\n\n start_idx: usize,\n\n iter: &mut Iter<'source>,\n\n) -> Result<Node<'source>> {\n\n // 'E' or 'e' is the last char popped, and it has UTF-8 length of 1 byte\n\n let exponent_idx = iter.peek_idx() - 1;\n\n // An exponent can optionally start with a '+' or '-'\n\n if let Some('+' | '-') = iter.peek_char() {\n\n iter.next(); // Consume the '+/-' if it exists, otherwise start parsing digits\n\n }\n\n\n\n let mut has_at_least_one_digit = false;\n\n loop {\n\n match iter.peek_char() {\n\n Some('0'..='9') => iter.next(), // Numbers are always valid exponents\n\n Some(c @ ('.' | 'e' | 'E' | '+' | '-')) => {\n\n return Err(Error::InvalidCharInExponent(iter.peek_idx(), c));\n\n }\n\n // Anything that isn't part of a number belongs to the next token (e.g. ',' to\n\n // move onto the next array element)\n", "file_path": "src/parsing.rs", "rank": 12, "score": 31549.625004736034 }, { "content": "/// Parse a number, assuming that the leading 0 has been consumed (i.e. the number so far\n\n/// is `0` or `-0`)\n\nfn parse_number_after_leading_0<'source>(\n\n start_idx: usize,\n\n iter: &mut Iter<'source>,\n\n) -> Result<Node<'source>> {\n\n Ok(match iter.peek_char() {\n\n // `iter.peek_idx() - 1` is the index of the last ASCII value consumed (in this case, the\n\n // leading '0')\n\n Some('1'..='9') => return Err(Error::LeadingZero(iter.peek_idx() - 1)),\n\n Some('.') => {\n\n iter.next();\n\n parse_number_after_decimal_point(start_idx, iter)?\n\n }\n\n Some('e' | 'E') => {\n\n iter.next();\n\n parse_number_after_exponent(start_idx, iter)?\n\n }\n\n // Number is '0' or '-0'. Therefore, the peeked char is part of the next token\n\n // (e.g. it could be a ',')\n\n _ => iter.new_atom_starting_from(start_idx),\n\n })\n\n}\n\n\n", "file_path": "src/parsing.rs", "rank": 13, "score": 31549.625004736034 }, { "content": "/// Attempt to parse a single JSON value, or a closing bracket (']' or '}'). Special cases:\n\n/// - If the first non-whitespace char is `]`, then `Some(CloseSquare)` is returned\n\n/// - If the first non-whitespace char is `}`, then `Some(CloseBrace)` is returned\n\n///\n\n/// NOTE: This does not parse trailing whitespace\n\nfn parse_value_or_closing_bracket<'source>(\n\n item: Item,\n\n expected: &'static [Expected],\n\n iter: &mut Iter<'source>,\n\n) -> Result<ValueParseResult<'source>> {\n\n while let Some((start_idx, c)) = iter.next() {\n\n /// Consume a sequence of chars, erroring if it's not as expected\n\n macro_rules! expect_ident {\n\n ($ident_name: literal => $( $chars: literal ),*) => {{\n\n // Consume each char in turn, erroring if any of them aren't expected\n\n $(\n\n match iter.next() {\n\n Some((_, $chars)) => (),\n\n v => return Err(Error::expected_xs_found(\n\n Item::Literal($ident_name),\n\n &[Expected::Char($chars)],\n\n v\n\n )),\n\n }\n\n )*\n", "file_path": "src/parsing.rs", "rank": 14, "score": 30616.520029213505 }, { "content": "/// Parse a number, assuming that everything up to **and including** the decimal point has\n\n/// been consumed.\n\nfn parse_number_after_decimal_point<'source>(\n\n start_idx: usize,\n\n iter: &mut Iter<'source>,\n\n) -> Result<Node<'source>> {\n\n loop {\n\n match iter.peek_char() {\n\n Some('0'..='9') => {\n\n iter.next(); // Keep consuming numbers\n\n }\n\n // Can't have multiple decimal points\n\n Some('.') => return Err(Error::SecondDecimalPoint(iter.peek_idx())),\n\n Some('e' | 'E') => {\n\n iter.next(); // Consume the 'e'/'E'\n\n return parse_number_after_exponent(start_idx, iter);\n\n }\n\n // Anything that isn't part of a number belongs to the next token (e.g. ',' to\n\n // move onto the next array element)\n\n _ => return Ok(iter.new_atom_starting_from(start_idx)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parsing.rs", "rank": 15, "score": 30609.60200690388 }, { "content": "/// Parse a number, assuming that a single non-zero digit has been consumed (i.e. the\n\n/// number so far matches `[1-9]` or `-[1-9]`\n\nfn parse_number_after_first_non_zero<'source>(\n\n start_idx: usize,\n\n iter: &mut Iter<'source>,\n\n) -> Result<Node<'source>> {\n\n // TODO: Refactor all these functions into one loop?\n\n loop {\n\n match iter.peek_char() {\n\n Some('0'..='9') => {\n\n iter.next(); // Keep consuming numbers\n\n }\n\n Some('.') => {\n\n iter.next();\n\n return parse_number_after_decimal_point(start_idx, iter);\n\n }\n\n Some('e' | 'E') => {\n\n iter.next();\n\n return parse_number_after_exponent(start_idx, iter);\n\n }\n\n // Anything that isn't part of a number belongs to the next token (e.g. ',' to\n\n // move onto the next array element)\n\n _ => return Ok(iter.new_atom_starting_from(start_idx)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parsing.rs", "rank": 16, "score": 29764.20944740666 }, { "content": "# goldilocks-json-fmt\n\n\n\n[![crates.io](https://img.shields.io/crates/v/goldilocks-json-fmt.svg)](https://crates.io/crates/goldilocks-json-fmt)\n\n\n\nA simple, portable, fast, pretty JSON formatter. No dependencies or unsafe code.\n\n\n\nThe resulting JSON strikes a balance between 'too wide' (i.e. minified, all on one line) and 'too\n\ntall' (e.g. `serde_json`'s `pretty_print`). You give the formatter a line limit (defaults to 100\n\nchars), and it keeps things as wide as possible whilst preserving that limit. Sometimes the limit\n\nis impossible to achieve (e.g. you have a string that's longer than the line limit), in which case\n\nthe formatter will break the limit by as little as possible.\n\n\n\nThe throughput of the Goldilocks formatter is about 300MB/s, which should be enough for most\n\nsituations. It's about as fast as you can get without cracking out the big guns and using SIMD,\n\nwhich would break both simplicity and portability.\n\n\n\n## Example:\n\n\n\n```rust\n\n// Ewww so horrible\n\nlet json = r#\"{\"test/cases/87s-at-back.toml\":{\"comps\":[{\"length\":32,\n\n \"string\":\"sHsH\",\"avg_score\":-0.45625},{\"length\":64,\"string\":\"sHWsMH\",\n\n \"avg_score\":-0.44062495},{\"length\":96,\"string\":\"WMsWMHsH\",\"avg_score\":\n\n -0.33124998},{\"length\":96,\"string\":\"WsMHWsMH\",\"avg_score\":-0.33124998},\n\n {\"length\":96,\"string\":\"sHWMsWMH\",\"avg_score\":-0.33124995},{\"length\":64,\n\n \"string\":\"WsMHsH\",\"avg_score\":-0.284375}]}}\"#;\n\n\n\nlet perfect_json = goldilocks_json_fmt::format(&json).expect(\"Invalid JSON\");\n\n\n\nassert_eq!(\n\n &perfect_json,\n\n // So perfect!\n\n r#\"{\n\n \"test/cases/87s-at-back.toml\": {\n\n \"comps\": [\n\n { \"length\": 32, \"string\": \"sHsH\", \"avg_score\": -0.45625 },\n\n { \"length\": 64, \"string\": \"sHWsMH\", \"avg_score\": -0.44062495 },\n\n { \"length\": 96, \"string\": \"WMsWMHsH\", \"avg_score\": -0.33124998 },\n\n { \"length\": 96, \"string\": \"WsMHWsMH\", \"avg_score\": -0.33124998 },\n\n { \"length\": 96, \"string\": \"sHWMsWMH\", \"avg_score\": -0.33124995 },\n\n { \"length\": 64, \"string\": \"WsMHsH\", \"avg_score\": -0.284375 }\n\n ]\n\n }\n\n}\"#,\n\n);\n\n```\n\n\n\nLicense: MIT\n", "file_path": "README.md", "rank": 17, "score": 25487.87522762947 }, { "content": "# Benchmarks for JSON formatting\n\n\n\n| Size | File Name | Source |\n\n|------:|---------------------------:|--------|\n\n| 1.3MB | cccbr-methods.json | [GitHub](https://raw.githubusercontent.com/kneasle/cc-method-lib/0b57bd03e6f08f20d2fdf0cb9e688bbd01b62e0a/cccbr-methods.json) |\n\n| 10kB | earthquakes.json | [US Government](https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_hour.geojson) |\n\n| 891kB | historical-events.json | [vizgr.org](https://www.vizgr.org/historical-events/search.php?format=json&begin_date=-3000000&end_date=20151231&lang=en) |\n\n| 145kB | monument-test-results.json | [Monument's GitHub Repo](https://github.com/kneasle/ringing/blob/4bec69f28e6bcae76476a4054d9d3ccee635d6cb/monument/test/results.json) |\n\n| 237kB | meteorites.json | [NASA](https://data.nasa.gov/resource/y77d-th95.json) (modified) |\n\n\n\nSome data files (e.g. `meteorites.json`) contain numbers inside strings (e.g. `\"41\"` instead of just\n\n`41`). I've pre-processed these so that the numbers are outside the strings using the command `rg\n\n--passthru -N \"\\\"(?P<num>(-?)[0-9]*\\.?[0-9]*)\\\"\" benches/meteorites.json --replace \"\\$num\" >\n\n/tmp/json && mv /tmp/json benches/meteorites.json` (requires\n\n[ripgrep](https://github.com/BurntSushi/ripgrep) and a Unix-like OS).\n\n\n\nAlso, the original `historical-events.json` is missing two trailing `}`s, which I've added.\n", "file_path": "benches/README.md", "rank": 18, "score": 24492.46647268488 }, { "content": "//! Code to convert an AST into a pretty-formatted string\n\n\n\n// `str::push` is non-const, so pushing single-char strings gets compiled down to plain data writes\n\n// and is therefore faster.\n\n#![allow(clippy::single_char_add_str)]\n\n\n\nuse crate::{Config, Node, NodeKind};\n\n\n\nimpl Node<'_> {\n\n /// Convert the AST into a pretty-formatted string\n\n pub(crate) fn format(&self, config: &Config) -> String {\n\n let mut s = String::new();\n\n let mut indentation_str = String::new();\n\n self.write_to_string(0, &mut indentation_str, &mut s, config);\n\n s\n\n }\n\n\n\n fn write_to_string(\n\n &self,\n\n indentation: usize,\n", "file_path": "src/formatting.rs", "rank": 19, "score": 23040.62517525477 }, { "content": " indentation_str: &mut String,\n\n out: &mut String,\n\n config: &Config,\n\n ) {\n\n if self.unsplit_width + indentation < config.width_limit {\n\n self.fmt_wide(out, config); // If this can fit in one line, then make it 'wide'\n\n } else {\n\n self.fmt_tall(indentation_str, out, config);\n\n }\n\n }\n\n\n\n fn fmt_wide(&self, out: &mut String, config: &Config) {\n\n match &self.kind {\n\n NodeKind::Atom(s) => out.push_str(s), // Atoms are always formatted as-is\n\n NodeKind::Array(values) => {\n\n out.push_str(\"[\");\n\n let mut is_first_time = true;\n\n for v in values {\n\n if !is_first_time {\n\n out.push_str(\", \");\n", "file_path": "src/formatting.rs", "rank": 20, "score": 23036.45042975613 }, { "content": " }\n\n v.fmt_wide(out, config); // Format all children as also wide\n\n is_first_time = false;\n\n }\n\n out.push_str(\"]\");\n\n }\n\n NodeKind::Object(fields) => {\n\n out.push_str(\"{ \");\n\n let mut is_first_time = true;\n\n for (key, value) in fields {\n\n // Comma for the last value (if it exists)\n\n if !is_first_time {\n\n out.push_str(\", \");\n\n }\n\n is_first_time = false;\n\n // `key: value`\n\n out.push_str(key);\n\n out.push_str(\": \");\n\n value.fmt_wide(out, config); // Format all children as also wide\n\n }\n", "file_path": "src/formatting.rs", "rank": 21, "score": 23029.109306718063 }, { "content": " out.push_str(\": \");\n\n value.write_to_string(\n\n indentation_str.len() + key.len() + 2, // '{indentation}{key}: '\n\n indentation_str,\n\n out,\n\n config,\n\n );\n\n }\n\n // Remove indent level before '}'\n\n for _ in 0..config.indent_width {\n\n assert!(indentation_str.pop().is_some());\n\n }\n\n // Final `]` on a its own line\n\n out.push_str(\"\\n\");\n\n out.push_str(indentation_str);\n\n out.push_str(\"}\");\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/formatting.rs", "rank": 22, "score": 23028.10195005255 }, { "content": " out.push_str(indentation_str);\n\n first_val.write_to_string(indentation_str.len(), indentation_str, out, config);\n\n // Other values\n\n for val in val_iter {\n\n // Comma for the previous value\n\n out.push_str(\",\\n\");\n\n out.push_str(indentation_str);\n\n // Next value\n\n val.write_to_string(indentation_str.len(), indentation_str, out, config)\n\n }\n\n // Remove indent level before ']'\n\n for _ in 0..config.indent_width {\n\n assert!(indentation_str.pop().is_some());\n\n }\n\n }\n\n // Final `]` on a its own line\n\n out.push_str(\"\\n\");\n\n out.push_str(indentation_str);\n\n out.push_str(\"]\");\n\n }\n", "file_path": "src/formatting.rs", "rank": 23, "score": 23028.085844420006 }, { "content": " out.push_str(\" }\");\n\n }\n\n }\n\n }\n\n\n\n fn fmt_tall(&self, indentation_str: &mut String, out: &mut String, config: &Config) {\n\n match &self.kind {\n\n NodeKind::Atom(s) => out.push_str(s), // Atoms are always formatted as-is\n\n NodeKind::Array(values) => {\n\n // Leading '['\n\n out.push_str(\"[\");\n\n // First value\n\n let mut val_iter = values.iter();\n\n if let Some(first_val) = val_iter.next() {\n\n // Add an extra indent level\n\n for _ in 0..config.indent_width {\n\n indentation_str.push_str(\" \");\n\n }\n\n // Add the first value\n\n out.push_str(\"\\n\");\n", "file_path": "src/formatting.rs", "rank": 24, "score": 23027.814891462323 }, { "content": "use criterion::{black_box, criterion_group, criterion_main, Criterion};\n\n\n\n/// Define a set of benchmark functions, and combine them all into a single `criterion_group!`.\n\nmacro_rules! define_group {\n\n ($grp_name: ident, $( $fn_name: ident => $file_name: literal ),*) => {\n\n criterion_group!($grp_name, $( $fn_name ),*);\n\n\n\n $(\n\n fn $fn_name(c: &mut Criterion) {\n\n c.bench_function($file_name, |b| {\n\n b.iter(|| goldilocks_json_fmt::format(black_box(include_str!($file_name))))\n\n });\n\n }\n\n )*\n\n };\n\n}\n\n\n\ndefine_group!(\n\n benches,\n\n cccbr_methods => \"cccbr-methods.json\",\n\n earthquakes => \"earthquakes.json\",\n\n historical_events => \"historical-events.json\",\n\n meteorites => \"meteorites.json\",\n\n monument_test_results => \"monument-test-results.json\"\n\n);\n\n\n\ncriterion_main!(benches);\n", "file_path": "benches/format.rs", "rank": 25, "score": 23026.748130108965 }, { "content": " NodeKind::Object(fields) => {\n\n // Add an extra indent level\n\n for _ in 0..config.indent_width {\n\n indentation_str.push_str(\" \");\n\n }\n\n // Leading '{'\n\n out.push_str(\"{\");\n\n // Other values\n\n let mut is_first_field = true;\n\n for (key, value) in fields {\n\n // Comma for previous value\n\n if !is_first_field {\n\n out.push_str(\",\");\n\n }\n\n is_first_field = false;\n\n // New line for the next key\n\n out.push_str(\"\\n\");\n\n out.push_str(indentation_str);\n\n // `key: value`\n\n out.push_str(key);\n", "file_path": "src/formatting.rs", "rank": 26, "score": 23024.787392863032 }, { "content": "//! Code to parse JSON string into an AST\n\n\n\nuse crate::{Error, Expected, Item, Node, NodeKind, Result};\n\n\n\nimpl<'source> Node<'source> {\n\n /// Parse a [`str`]ing into a JSON node\n\n pub(crate) fn parse(s: &'source str) -> Result<Self> {\n\n let mut iter = Iter::new(s);\n\n // Parse the JSON value as the root node\n\n let ast_root = parse_value(Item::TopLevelValue, &mut iter)?;\n\n // Assert that there's only whitespace until the end of the file\n\n loop {\n\n match iter.next() {\n\n Some((_, ' ' | '\\r' | '\\n' | '\\t')) => continue, // Consume any whitespace\n\n // Anything other than whitespace is an error\n\n Some((idx, c)) => return Err(Error::InvalidTrailingWhitespace(idx, c)),\n\n None => return Ok(ast_root), // EOF with only whitespace is fine\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Attempt to parse a JSON value.\n\n///\n\n/// NOTE: This does not parse trailing whitespace\n", "file_path": "src/parsing.rs", "rank": 35, "score": 17.88437326213429 }, { "content": " Error::InvalidEscape(6, ' '),\n\n ); // fail26.json\n\n check_fail(\n\n r#\"[\"line\n\nbreak\"]\"#,\n\n Error::ControlCharInString(6, '\\n'),\n\n ); // fail27.json\n\n check_fail(\n\n r#\"[\"line\\\n\nbreak\"]\"#,\n\n Error::InvalidEscape(7, '\\n'),\n\n ); // fail28.json\n\n check_fail(r#\"[0e]\"#, Error::EmptyExponent(2)); // fail29.json\n\n\n\n check_fail(r#\"[0e+]\"#, Error::EmptyExponent(2)); // fail30.json\n\n check_fail(r#\"[0e+-1]\"#, Error::InvalidCharInExponent(4, '-')); // fail31.json\n\n check_fail(\n\n r#\"{\"Comma instead if closing brace\": true,\"#,\n\n Error::ExpectedXsFoundEof(Item::Object(0), &[Expected::Key]),\n\n ); // fail32.json\n", "file_path": "src/parsing.rs", "rank": 38, "score": 16.34608880719051 }, { "content": "\n\n /// Gets the byte index of the next char to be popped (or the source's length if no chars\n\n /// are left)\n\n fn peek_idx(&mut self) -> usize {\n\n self.inner\n\n .peek()\n\n .map_or(self.source.len(), |(idx, _c)| *idx)\n\n }\n\n}\n\n\n\nimpl<'source> Iterator for Iter<'source> {\n\n type Item = (usize, char);\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.inner.next()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "src/parsing.rs", "rank": 39, "score": 15.317114331962237 }, { "content": " ); // fail22.json\n\n check_fail(\n\n r#\"[\"Bad value\", truth]\"#,\n\n Error::ExpectedXsFoundY(Item::Literal(\"true\"), 17, &[Expected::Char('e')], 't'),\n\n ); // fail23.json\n\n check_fail(\n\n r#\"['single quote']\"#,\n\n Error::ExpectedXsFoundY(\n\n Item::Array(0),\n\n 1,\n\n &[Expected::Value, Expected::Char(']')],\n\n '\\'',\n\n ),\n\n ); // fail24.json\n\n check_fail(\n\n r#\"[\"\ttab\tcharacter\tin\tstring\t\"]\"#,\n\n Error::ControlCharInString(2, '\\t'),\n\n ); // fail25.json\n\n check_fail(\n\n r#\"[\"tab\\ character\\ in\\ string\\ \"]\"#,\n", "file_path": "src/parsing.rs", "rank": 40, "score": 13.641944255475494 }, { "content": " // http://www.json.org/JSON_checker/test.zip).\n\n //\n\n // Note:\n\n // - fail1.json was removed because top-level types other than object or array were allowed\n\n // after RFC7159 (https://www.ietf.org/rfc/rfc7159.txt)\n\n // - fail18.json was removed because JSON doesn't have a depth limit\n\n\n\n check_fail(\n\n r#\"[\"Unclosed array\"\"#,\n\n Error::ExpectedXsFoundEof(Item::Array(0), &[Expected::Char(','), Expected::Char(']')]),\n\n ); // fail2.json\n\n check_fail(\n\n r#\"{unquoted_key: \"keys must be quoted\"}\"#,\n\n Error::ExpectedXsFoundY(\n\n Item::Object(0),\n\n 1,\n\n &[Expected::Key, Expected::Char('}')],\n\n 'u',\n\n ),\n\n ); // fail3.json\n", "file_path": "src/parsing.rs", "rank": 41, "score": 11.958191042934079 }, { "content": " r#\"{\"Missing colon\" null}\"#,\n\n Error::ExpectedXsFoundY(Item::Object(0), 17, &[Expected::Char(':')], 'n'),\n\n ); // fail19.json\n\n\n\n check_fail(\n\n r#\"{\"Double colon\":: null}\"#,\n\n Error::ExpectedXsFoundY(Item::Object(0), 16, &[Expected::Value], ':'),\n\n ); // fail20.json\n\n check_fail(\n\n r#\"{\"Comma instead of colon\", null}\"#,\n\n Error::ExpectedXsFoundY(Item::Object(0), 25, &[Expected::Char(':')], ','),\n\n ); // fail21.json\n\n check_fail(\n\n r#\"[\"Colon instead of comma\": false]\"#,\n\n Error::ExpectedXsFoundY(\n\n Item::Array(0),\n\n 25,\n\n &[Expected::Char(','), Expected::Char(']')],\n\n ':',\n\n ),\n", "file_path": "src/parsing.rs", "rank": 42, "score": 11.083415762723654 }, { "content": " use super::*;\n\n\n\n #[track_caller]\n\n fn check_fail(s: &str, err: Error) {\n\n assert_eq!(Node::parse(s), Err(err));\n\n }\n\n\n\n #[track_caller]\n\n fn check_atom(s: &str, literal: &str) {\n\n assert_eq!(\n\n Node::parse(s).expect(\"Parsing atom unexpectedly failed\"),\n\n Node {\n\n unsplit_width: literal.len(),\n\n kind: NodeKind::Atom(literal),\n\n }\n\n );\n\n }\n\n\n\n #[track_caller]\n\n fn check_atom_no_ws(s: &str) {\n", "file_path": "src/parsing.rs", "rank": 43, "score": 10.75942234333891 }, { "content": " check_atom(\"\\r \\\"string\\\" \\r\\t \\n \", r#\"\"string\"\"#);\n\n // Control chars in a string aren't allowed\n\n check_fail(\" \\\"\\0\\\" x\", Error::ControlCharInString(5, '\\0'));\n\n check_fail(\" \\\"\\n\\\" x\", Error::ControlCharInString(5, '\\n'));\n\n check_fail(\" \\\"\\t\\\" x\", Error::ControlCharInString(5, '\\t'));\n\n // Check for things in trailing whitespace\n\n check_fail(\n\n r#\" \"string\" x\"#,\n\n Error::InvalidTrailingWhitespace(14, 'x'),\n\n );\n\n }\n\n\n\n #[test]\n\n fn number() {\n\n check_atom_no_ws(\"0\");\n\n check_atom_no_ws(\"-0\");\n\n check_atom(\" 0 \\t\\n \", \"0\");\n\n check_fail(\"02\", Error::LeadingZero(0));\n\n check_fail(\"-02\", Error::LeadingZero(1));\n\n check_atom_no_ws(\"10233415216992347901\");\n", "file_path": "src/parsing.rs", "rank": 44, "score": 10.591067628291858 }, { "content": " ); // fail7.json\n\n check_fail(\n\n r#\"[\"Extra close\"]]\"#,\n\n Error::InvalidTrailingWhitespace(15, ']'),\n\n ); // fail8.json\n\n check_fail(\n\n r#\"{\"Extra comma\": true,}\"#,\n\n Error::ExpectedXsFoundY(Item::Object(0), 21, &[Expected::Key], '}'),\n\n ); // fail9.json\n\n\n\n check_fail(\n\n r#\"{\"Extra value after close\": true} \"misplaced quoted value\"\"#,\n\n Error::InvalidTrailingWhitespace(34, '\"'),\n\n ); // fail10.json\n\n check_fail(\n\n r#\"{\"Illegal expression\": 1 + 2}\"#,\n\n Error::ExpectedXsFoundY(\n\n Item::Object(0),\n\n 25,\n\n &[Expected::Char(','), Expected::Char('}')],\n", "file_path": "src/parsing.rs", "rank": 45, "score": 10.58026469041224 }, { "content": " '+',\n\n ),\n\n ); // fail11.json\n\n check_fail(\n\n r#\"{\"Illegal invocation\": alert()}\"#,\n\n Error::ExpectedXsFoundY(Item::Object(0), 23, &[Expected::Value], 'a'),\n\n ); // fail12.json\n\n check_fail(\n\n r#\"{\"Numbers cannot have leading zeroes\": 013}\"#,\n\n Error::LeadingZero(39),\n\n ); // fail13.json\n\n check_fail(\n\n // TODO: Handle this better. We should probably handle this differently because it's\n\n // just after a number\n\n r#\"{\"Numbers cannot be hex\": 0x14}\"#,\n\n Error::ExpectedXsFoundY(\n\n Item::Object(0),\n\n 27,\n\n &[Expected::Char(','), Expected::Char('}')],\n\n 'x',\n", "file_path": "src/parsing.rs", "rank": 46, "score": 10.404857104616566 }, { "content": " check_fail(\n\n r#\"[\"mismatch\"}\"#,\n\n Error::ExpectedXsFoundY(\n\n Item::Array(0),\n\n 11,\n\n &[Expected::Char(','), Expected::Char(']')],\n\n '}',\n\n ),\n\n ); // fail33.json\n\n }\n\n\n\n #[test]\n\n fn json_check_ok() {\n\n check_ok(\n\n r#\"{\n\n \"JSON Test Pattern pass3\": {\n\n \"The outermost value\": \"must be an object or array.\",\n\n \"In this test\": \"It is an object.\"\n\n }\n\n}\n", "file_path": "src/parsing.rs", "rank": 47, "score": 10.31000810417049 }, { "content": " ),\n\n ); // fail14.json\n\n check_fail(\n\n r#\"[\"Illegal backslash escape: \\x15\"]\"#,\n\n Error::InvalidEscape(29, 'x'),\n\n ); // fail15.json\n\n check_fail(\n\n r#\"[\\naked]\"#,\n\n Error::ExpectedXsFoundY(\n\n Item::Array(0),\n\n 1,\n\n &[Expected::Value, Expected::Char(']')],\n\n '\\\\',\n\n ),\n\n ); // fail16.json\n\n check_fail(\n\n r#\"[\"Illegal backslash escape: \\017\"]\"#,\n\n Error::InvalidEscape(29, '0'),\n\n ); // fail17.json\n\n check_fail(\n", "file_path": "src/parsing.rs", "rank": 48, "score": 10.166223634979012 }, { "content": " check_fail(\n\n r#\"[\"extra comma\",]\"#,\n\n Error::ExpectedXsFoundY(Item::Array(0), 15, &[Expected::Value], ']'),\n\n ); // fail4.json\n\n check_fail(\n\n r#\"[\"double extra comma\",,]\"#,\n\n Error::ExpectedXsFoundY(Item::Array(0), 22, &[Expected::Value], ','),\n\n ); // fail5.json\n\n check_fail(\n\n r#\"[ , \"<-- missing value\"]\"#,\n\n Error::ExpectedXsFoundY(\n\n Item::Array(0),\n\n 4,\n\n &[Expected::Value, Expected::Char(']')],\n\n ',',\n\n ),\n\n ); // fail6.json\n\n check_fail(\n\n r#\"[\"Comma after the close\"],\"#,\n\n Error::InvalidTrailingWhitespace(25, ','),\n", "file_path": "src/parsing.rs", "rank": 49, "score": 9.981159783285566 }, { "content": " v => {\n\n return Err(Error::expected_xs_found(\n\n Item::Number,\n\n &[Expected::Digit],\n\n v,\n\n ));\n\n }\n\n },\n\n '0' => parse_number_after_leading_0(start_idx, iter)?,\n\n '1'..='9' => parse_number_after_first_non_zero(start_idx, iter)?,\n\n 'n' => expect_ident!(\"null\" => 'u', 'l', 'l'),\n\n 't' => expect_ident!(\"true\" => 'r', 'u', 'e'),\n\n 'f' => expect_ident!(\"false\" => 'a', 'l', 's', 'e'),\n\n ']' => return Ok(ValueParseResult::CloseSquare(start_idx)),\n\n '}' => return Ok(ValueParseResult::CloseBrace(start_idx)),\n\n _ => return Err(Error::ExpectedXsFoundY(item, start_idx, expected, c)),\n\n };\n\n // If a JSON value was successfully parsed, return that value\n\n return Ok(ValueParseResult::Node(value_node));\n\n }\n\n // If a JSON object was missing when the file ended, then that's an error\n\n Err(Error::ExpectedXsFoundEof(item, &[Expected::Value]))\n\n}\n\n\n", "file_path": "src/parsing.rs", "rank": 50, "score": 9.888814241963756 }, { "content": " let expected: &[Expected] = match fields.len() {\n\n 0 => &[Expected::Key, Expected::Char('}')],\n\n _ => &[Expected::Key],\n\n };\n\n return Err(Error::expected_xs_found(\n\n Item::Object(start_idx),\n\n expected,\n\n v,\n\n ));\n\n }\n\n }\n\n };\n\n // Read whitespace until we find a ':'\n\n loop {\n\n match iter.next() {\n\n Some((_, ' ' | '\\t' | '\\n' | '\\r')) => continue,\n\n Some((_, ':')) => break, // Found ':', parse the value\n\n // Anything other than ':' or whitespace is an error\n\n v => {\n\n return Err(Error::expected_xs_found(\n", "file_path": "src/parsing.rs", "rank": 51, "score": 9.83656550019652 }, { "content": " // Invalid escape sequence\n\n Some((bad_escape_idx, bad_escape_char)) => {\n\n return Err(Error::InvalidEscape(bad_escape_idx, bad_escape_char));\n\n }\n\n None => return Err(Error::EofDuringString(start_idx)),\n\n },\n\n // Control chars aren't allowed in strings\n\n '\\0'..='\\x19' => return Err(Error::ControlCharInString(idx, c)),\n\n _ => {} // Any other char is just part of the string\n\n }\n\n }\n\n // If a file ended during a string, then that's an error\n\n Err(Error::EofDuringString(start_idx))\n\n}\n\n\n\n////////////////////\n\n// NUMBER PARSING //\n\n////////////////////\n\n\n", "file_path": "src/parsing.rs", "rank": 53, "score": 9.121679097065005 }, { "content": " check_atom(s, s);\n\n }\n\n\n\n #[track_caller]\n\n fn check_ok(s: &str, exp_node: Node) {\n\n assert_eq!(Node::parse(s), Ok(exp_node));\n\n }\n\n\n\n #[test]\n\n fn unmatched_closing_bracket() {\n\n check_fail(\n\n \"]\",\n\n Error::ExpectedXsFoundY(Item::TopLevelValue, 0, &[Expected::Value], ']'),\n\n );\n\n check_fail(\n\n \"}\",\n\n Error::ExpectedXsFoundY(Item::TopLevelValue, 0, &[Expected::Value], '}'),\n\n );\n\n check_fail(\n\n \"} \",\n", "file_path": "src/parsing.rs", "rank": 54, "score": 8.66794890349522 }, { "content": " unsplit_width,\n\n kind: NodeKind::Object(fields),\n\n });\n\n }\n\n v => {\n\n // Anything except whitespace, ',' or '}' is an error\n\n return Err(Error::expected_xs_found(\n\n Item::Object(start_idx),\n\n &[Expected::Char(','), Expected::Char('}')],\n\n v,\n\n ));\n\n }\n\n }\n\n }\n\n unsplit_width += \", \".len(); // Add space required by the comma\n\n }\n\n}\n\n\n", "file_path": "src/parsing.rs", "rank": 55, "score": 8.128058164854934 }, { "content": " });\n\n }\n\n // If ',', parse another element and repeat\n\n Some((_, ',')) => {\n\n let n = parse_value(Item::Array(start_idx), iter)?;\n\n unsplit_width += \", \".len() + n.unsplit_width;\n\n contents.push(n);\n\n }\n\n // Anything except whitespace, ',' or ']' is an error\n\n v => {\n\n return Err(Error::expected_xs_found(\n\n Item::Array(start_idx),\n\n &[Expected::Char(','), Expected::Char(']')],\n\n v,\n\n ));\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/parsing.rs", "rank": 56, "score": 7.91619691165255 }, { "content": " fn string() {\n\n check_atom_no_ws(r#\"\"\"\"#);\n\n // Escape sequences\n\n check_atom_no_ws(r#\"\"thing \\\" thing\"\"#);\n\n check_atom_no_ws(r#\"\"thing \\\\ thing\"\"#);\n\n check_atom_no_ws(r#\"\"thing \\\\\"\"#);\n\n check_atom_no_ws(r#\"\"thing \\\" \\\\\"\"#);\n\n check_atom_no_ws(r#\"\"thing \\/ thing\"\"#);\n\n check_atom_no_ws(r#\"\"thing \\uAFFF thing\"\"#);\n\n check_atom_no_ws(r#\"\"thing \\u01aF thing\"\"#);\n\n // Invalid escape\n\n check_fail(r#\"\"thing \\x thing\"\"#, Error::InvalidEscape(8, 'x'));\n\n check_fail(\n\n r#\"\"thing \\uAFXF thing\"\"#,\n\n Error::InvalidHexEscape(7, 11, 'X'),\n\n );\n\n // Leading/trailing whitespace\n\n check_atom(\"\\r \\\"\\\"\", r#\"\"\"\"#);\n\n check_atom(\"\\\"\\\" \\r\\t \\n \", r#\"\"\"\"#);\n\n check_atom(\"\\r \\\"\\\" \\r\\t \\n \", r#\"\"\"\"#);\n", "file_path": "src/parsing.rs", "rank": 57, "score": 7.215146966114013 }, { "content": " Error::ExpectedXsFoundY(Item::TopLevelValue, 0, &[Expected::Value], '}'),\n\n );\n\n check_fail(\n\n \" \\t\\n} \",\n\n Error::ExpectedXsFoundY(Item::TopLevelValue, 4, &[Expected::Value], '}'),\n\n );\n\n }\n\n\n\n #[test]\n\n fn literal() {\n\n check_atom_no_ws(\"true\");\n\n check_atom_no_ws(\"false\");\n\n check_atom_no_ws(\"null\");\n\n check_atom(\" null\", \"null\");\n\n check_atom(\" null\\t\\n \", \"null\");\n\n // Check for things in trailing whitespace\n\n check_fail(\" null x\", Error::InvalidTrailingWhitespace(10, 'x'));\n\n }\n\n\n\n #[test]\n", "file_path": "src/parsing.rs", "rank": 58, "score": 6.95580192706305 }, { "content": " kind: NodeKind::Array(vec![]),\n\n },\n\n );\n\n check_ok(\n\n \" [ ]\\r\\t \\n\",\n\n Node {\n\n unsplit_width: 2,\n\n kind: NodeKind::Array(vec![]),\n\n },\n\n );\n\n check_fail(\n\n \" [ }\\r\\t \\n\",\n\n Error::ExpectedXsFoundY(\n\n Item::Array(4),\n\n 7,\n\n &[Expected::Value, Expected::Char(']')],\n\n '}',\n\n ),\n\n );\n\n check_ok(\n", "file_path": "src/parsing.rs", "rank": 59, "score": 6.899160120365945 }, { "content": " // Construct and return an atom node\n\n let len = $ident_name.len();\n\n let sub_str = &iter.source[start_idx..start_idx + len];\n\n Node {\n\n unsplit_width: len,\n\n kind: NodeKind::Atom(sub_str),\n\n }\n\n }};\n\n }\n\n\n\n let value_node = match c {\n\n ' ' | '\\t' | '\\n' | '\\r' => continue, // Ignore whitespace\n\n '[' => parse_array(start_idx, iter)?,\n\n '{' => parse_object(start_idx, iter)?,\n\n '\"' => Node::new_atom(parse_string(start_idx, iter)?),\n\n // If a '-' is reached, it must be followed by a digit then a number without\n\n // the leading digit\n\n '-' => match iter.next() {\n\n Some((_, '0')) => parse_number_after_leading_0(start_idx, iter)?,\n\n Some((_, '1'..='9')) => parse_number_after_first_non_zero(start_idx, iter)?,\n", "file_path": "src/parsing.rs", "rank": 60, "score": 4.900487642620805 }, { "content": " Item::Object(start_idx),\n\n &[Expected::Char(':')],\n\n v,\n\n ));\n\n }\n\n }\n\n }\n\n // Parse the contained value\n\n let value = parse_value(Item::Object(start_idx), iter)?;\n\n // Add the field we just parsed\n\n unsplit_width += key.len() + \": \".len() + value.unsplit_width;\n\n fields.push((key, value));\n\n // Consume either ',' (parse next key/value pair) or '}' (end object)\n\n loop {\n\n match iter.next() {\n\n Some((_, ' ' | '\\t' | '\\n' | '\\r')) => continue,\n\n Some((_, ',')) => break, // if ',', parse the next key/value pair\n\n Some((_, '}')) => {\n\n // if '}', the object is finished\n\n return Ok(Node {\n", "file_path": "src/parsing.rs", "rank": 61, "score": 4.864100584355408 }, { "content": " _ => match has_at_least_one_digit {\n\n true => return Ok(iter.new_atom_starting_from(start_idx)),\n\n // `- 1` steps backwards over the `E` or `e` (both of which occupy 1 byte in UTF-8)\n\n false => return Err(Error::EmptyExponent(exponent_idx)),\n\n },\n\n };\n\n has_at_least_one_digit = true;\n\n }\n\n}\n\n\n\n//////////////\n\n// ITERATOR //\n\n//////////////\n\n\n", "file_path": "src/parsing.rs", "rank": 62, "score": 4.848614435147608 }, { "content": " ),\n\n ]),\n\n };\n\n check_ok(\n\n r#\"{\"key\": \"value\", \"key2\": [{ \"is_open\": true }, { \"is_open\": false }, null ] }\"#,\n\n phat_node.clone(),\n\n );\n\n check_ok(\n\n \" {\\t \\t \\n\\n \\\"key\\\" \\t:\\n\\n \\\"value\\\" \\t\\n\n\n \\t\\t\\r,\n\n \\\"key2\\\": [ { \\\"is_open\\\": true }, { \\\"is_open\\\" \\t: false }, null ]\n\n } \\t\\t\\n \",\n\n phat_node,\n\n );\n\n }\n\n\n\n #[test]\n\n fn json_check_fail() {\n\n // These tests are taken from JSON_checker's test suite, found here:\n\n // http://www.json.org/JSON_checker/ (the tests themselves are here:\n", "file_path": "src/parsing.rs", "rank": 63, "score": 4.759470117709526 }, { "content": " r#\"[\n\n \"JSON Test Pattern pass1\",\n\n {\"object with 1 member\":[\"array with 1 element\"]},\n\n {},\n\n [],\n\n -42,\n\n true,\n\n false,\n\n null,\n\n {\n\n \"integer\": 1234567890,\n\n \"real\": -9876.543210,\n\n \"e\": 0.123456789e-12,\n\n \"E\": 1.234567890E+34,\n\n \"\": 23456789012E66,\n\n \"zero\": 0,\n\n \"one\": 1,\n\n \"space\": \" \",\n\n \"quote\": \"\\\"\",\n\n \"backslash\": \"\\\\\",\n", "file_path": "src/parsing.rs", "rank": 64, "score": 4.610934828850834 }, { "content": "\n\n4 , 5 , 6 ,7 ],\"compact\":[1,2,3,4,5,6,7],\n\n \"jsontext\": \"{\\\"object with 1 member\\\":[\\\"array with 1 element\\\"]}\",\n\n \"quotes\": \"&#34; \\u0022 %22 0x22 034 &#x22;\",\n\n \"\\/\\\\\\\"\\uCAFE\\uBABE\\uAB98\\uFCDE\\ubcda\\uef4A\\b\\f\\n\\r\\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?\"\n\n: \"A key can be any string\"\n\n },\n\n 0.5 ,98.6\n\n,\n\n99.44\n\n,\n\n\n\n1066,\n\n1e1,\n\n0.1e1,\n\n1e-1,\n\n1e00,2e+00,2e-00\n\n,\"rosebud\"]\"#,\n\n )\n\n .is_ok()); // pass1.json\n\n }\n\n}\n", "file_path": "src/parsing.rs", "rank": 65, "score": 4.13964991805003 }, { "content": " \" [ true ]\\r\\t \\n\",\n\n Node {\n\n unsplit_width: 6,\n\n kind: NodeKind::Array(vec![Node::new_atom(\"true\")]),\n\n },\n\n );\n\n check_fail(\n\n \" [ true, ]\\r\\t \\n\",\n\n Error::ExpectedXsFoundY(Item::Array(4), 12, &[Expected::Value], ']'),\n\n );\n\n check_ok(\n\n \" [ true, false ]\\r\\t \\n\",\n\n Node {\n\n unsplit_width: 13,\n\n kind: NodeKind::Array(vec![Node::new_atom(\"true\"), Node::new_atom(\"false\")]),\n\n },\n\n );\n\n check_ok(\n\n \" [ true, [\\n\\nfalse, []] ]\\r\\t \\n\",\n\n Node {\n", "file_path": "src/parsing.rs", "rank": 66, "score": 3.8779357368271796 }, { "content": " '}',\n\n ))\n\n }\n\n // We parsed a value, so parse the rest of the array\n\n ValueParseResult::Node(n) => n,\n\n };\n\n\n\n let mut unsplit_width = \"[\".len() + first_value.unsplit_width + \"]\".len();\n\n let mut contents = vec![first_value];\n\n // Repeatedly expect either:\n\n // - ',' followed by another element, or\n\n // - ']', finishing the array\n\n loop {\n\n match iter.next() {\n\n Some((_, ' ' | '\\t' | '\\n' | '\\r')) => continue, // Ignore whitespace\n\n // If ']', finish the array\n\n Some((_, ']')) => {\n\n return Ok(Node {\n\n unsplit_width,\n\n kind: NodeKind::Array(contents),\n", "file_path": "src/parsing.rs", "rank": 67, "score": 3.4081924808648236 }, { "content": " check_atom_no_ws(\"0.2\");\n\n check_fail(\"0.2.3\", Error::SecondDecimalPoint(3));\n\n check_atom_no_ws(\"-0.00002\");\n\n check_atom_no_ws(\"0.0200000\");\n\n check_atom_no_ws(\"0.02e1\");\n\n check_atom_no_ws(\"0.02E-1201\");\n\n check_atom_no_ws(\"0.02e-1201\");\n\n check_atom_no_ws(\"0.02e+01201\");\n\n check_fail(\"0.02e\", Error::EmptyExponent(4));\n\n check_fail(\"0.02e-\", Error::EmptyExponent(4));\n\n check_atom_no_ws(\"0e-01\"); // Leading 0s in exponents is apparently allowed?\n\n check_atom_no_ws(\"0e01\"); // Leading 0s in exponents is apparently allowed?\n\n }\n\n\n\n #[test]\n\n fn array() {\n\n check_ok(\n\n \"[]\",\n\n Node {\n\n unsplit_width: 2,\n", "file_path": "src/parsing.rs", "rank": 68, "score": 2.4770750551326994 }, { "content": "\"#,\n\n Node {\n\n unsplit_width: 123,\n\n kind: NodeKind::Object(vec![(\n\n \"\\\"JSON Test Pattern pass3\\\"\",\n\n Node {\n\n unsplit_width: 92,\n\n kind: NodeKind::Object(vec![\n\n (\n\n \"\\\"The outermost value\\\"\",\n\n Node::new_atom(\"\\\"must be an object or array.\\\"\"),\n\n ),\n\n (\"\\\"In this test\\\"\", Node::new_atom(\"\\\"It is an object.\\\"\")),\n\n ]),\n\n },\n\n )]),\n\n },\n\n ); // pass3.json\n\n\n\n assert!(Node::parse(\n", "file_path": "src/parsing.rs", "rank": 69, "score": 2.42002309746786 }, { "content": " \"controls\": \"\\b\\f\\n\\r\\t\",\n\n \"slash\": \"/ & \\/\",\n\n \"alpha\": \"abcdefghijklmnopqrstuvwyz\",\n\n \"ALPHA\": \"ABCDEFGHIJKLMNOPQRSTUVWYZ\",\n\n \"digit\": \"0123456789\",\n\n \"0123456789\": \"digit\",\n\n \"special\": \"`1~!@#$%^&*()_+-={':[,]}|;.</>?\",\n\n \"hex\": \"\\u0123\\u4567\\u89AB\\uCDEF\\uabcd\\uef4A\",\n\n \"true\": true,\n\n \"false\": false,\n\n \"null\": null,\n\n \"array\":[ ],\n\n \"object\":{ },\n\n \"address\": \"50 St. James Street\",\n\n \"url\": \"http://www.JSON.org/\",\n\n \"comment\": \"// /* <!-- --\",\n\n \"\\\\# -- --> */\\n \": \" \",\n\n \" s p a c e d \" :[1,2 , 3\n\n\n\n,\n", "file_path": "src/parsing.rs", "rank": 70, "score": 1.827531545586861 } ]
Rust
crates/wasi-common/cap-std-sync/src/dir.rs
dheaton-arm/wasmtime
86611d3bbc92b781ed136dcda7cdba9ec2c1cbee
use crate::file::{filetype_from, File}; use cap_fs_ext::{DirEntryExt, DirExt, MetadataExt, SystemTimeSpec}; use std::any::Any; use std::path::{Path, PathBuf}; use system_interface::fs::GetSetFdFlags; use wasi_common::{ dir::{ReaddirCursor, ReaddirEntity, WasiDir}, file::{FdFlags, FileType, Filestat, OFlags, WasiFile}, Error, ErrorExt, }; pub struct Dir(cap_std::fs::Dir); impl Dir { pub fn from_cap_std(dir: cap_std::fs::Dir) -> Self { Dir(dir) } pub fn open_file_( &self, symlink_follow: bool, path: &str, oflags: OFlags, read: bool, write: bool, fdflags: FdFlags, ) -> Result<File, Error> { use cap_fs_ext::{FollowSymlinks, OpenOptionsFollowExt}; let mut opts = cap_std::fs::OpenOptions::new(); if oflags.contains(OFlags::CREATE | OFlags::EXCLUSIVE) { opts.create_new(true); opts.write(true); } else if oflags.contains(OFlags::CREATE) { opts.create(true); opts.write(true); } if oflags.contains(OFlags::TRUNCATE) { opts.truncate(true); } if read { opts.read(true); } if write { opts.write(true); } else { opts.read(true); } if fdflags.contains(FdFlags::APPEND) { opts.append(true); } if symlink_follow { opts.follow(FollowSymlinks::Yes); } else { opts.follow(FollowSymlinks::No); } if fdflags.intersects( wasi_common::file::FdFlags::DSYNC | wasi_common::file::FdFlags::SYNC | wasi_common::file::FdFlags::RSYNC, ) { return Err(Error::not_supported().context("SYNC family of FdFlags")); } let mut f = self.0.open_with(Path::new(path), &opts)?; if fdflags.contains(wasi_common::file::FdFlags::NONBLOCK) { let set_fd_flags = f.new_set_fd_flags(system_interface::fs::FdFlags::NONBLOCK)?; f.set_fd_flags(set_fd_flags)?; } Ok(File::from_cap_std(f)) } pub fn open_dir_(&self, symlink_follow: bool, path: &str) -> Result<Self, Error> { let d = if symlink_follow { self.0.open_dir(Path::new(path))? } else { self.0.open_dir_nofollow(Path::new(path))? }; Ok(Dir::from_cap_std(d)) } pub fn rename_(&self, src_path: &str, dest_dir: &Self, dest_path: &str) -> Result<(), Error> { self.0 .rename(Path::new(src_path), &dest_dir.0, Path::new(dest_path))?; Ok(()) } pub fn hard_link_( &self, src_path: &str, target_dir: &Self, target_path: &str, ) -> Result<(), Error> { let src_path = Path::new(src_path); let target_path = Path::new(target_path); self.0.hard_link(src_path, &target_dir.0, target_path)?; Ok(()) } } #[async_trait::async_trait] impl WasiDir for Dir { fn as_any(&self) -> &dyn Any { self } async fn open_file( &self, symlink_follow: bool, path: &str, oflags: OFlags, read: bool, write: bool, fdflags: FdFlags, ) -> Result<Box<dyn WasiFile>, Error> { let f = self.open_file_(symlink_follow, path, oflags, read, write, fdflags)?; Ok(Box::new(f)) } async fn open_dir(&self, symlink_follow: bool, path: &str) -> Result<Box<dyn WasiDir>, Error> { let d = self.open_dir_(symlink_follow, path)?; Ok(Box::new(d)) } async fn create_dir(&self, path: &str) -> Result<(), Error> { self.0.create_dir(Path::new(path))?; Ok(()) } async fn readdir( &self, cursor: ReaddirCursor, ) -> Result<Box<dyn Iterator<Item = Result<ReaddirEntity, Error>> + Send>, Error> { let dir_meta = self.0.dir_metadata()?; let rd = vec![ { let name = ".".to_owned(); Ok((FileType::Directory, dir_meta.ino(), name)) }, { let name = "..".to_owned(); Ok((FileType::Directory, dir_meta.ino(), name)) }, ] .into_iter() .chain({ let entries = self.0.entries()?.map(|entry| { let entry = entry?; let meta = entry.full_metadata()?; let inode = meta.ino(); let filetype = filetype_from(&meta.file_type()); let name = entry .file_name() .into_string() .map_err(|_| Error::illegal_byte_sequence().context("filename"))?; Ok((filetype, inode, name)) }); #[cfg(windows)] let entries = entries.filter(|entry: &Result<_, wasi_common::Error>| { use winapi::shared::winerror::{ERROR_ACCESS_DENIED, ERROR_SHARING_VIOLATION}; if let Err(err) = entry { if let Some(err) = err.downcast_ref::<std::io::Error>() { if err.raw_os_error() == Some(ERROR_SHARING_VIOLATION as i32) || err.raw_os_error() == Some(ERROR_ACCESS_DENIED as i32) { return false; } } } true }); entries }) .enumerate() .map(|(ix, r)| match r { Ok((filetype, inode, name)) => Ok(ReaddirEntity { next: ReaddirCursor::from(ix as u64 + 1), filetype, inode, name, }), Err(e) => Err(e), }) .skip(u64::from(cursor) as usize); Ok(Box::new(rd)) } async fn symlink(&self, src_path: &str, dest_path: &str) -> Result<(), Error> { self.0.symlink(src_path, dest_path)?; Ok(()) } async fn remove_dir(&self, path: &str) -> Result<(), Error> { self.0.remove_dir(Path::new(path))?; Ok(()) } async fn unlink_file(&self, path: &str) -> Result<(), Error> { self.0.remove_file_or_symlink(Path::new(path))?; Ok(()) } async fn read_link(&self, path: &str) -> Result<PathBuf, Error> { let link = self.0.read_link(Path::new(path))?; Ok(link) } async fn get_filestat(&self) -> Result<Filestat, Error> { let meta = self.0.dir_metadata()?; Ok(Filestat { device_id: meta.dev(), inode: meta.ino(), filetype: filetype_from(&meta.file_type()), nlink: meta.nlink(), size: meta.len(), atim: meta.accessed().map(|t| Some(t.into_std())).unwrap_or(None), mtim: meta.modified().map(|t| Some(t.into_std())).unwrap_or(None), ctim: meta.created().map(|t| Some(t.into_std())).unwrap_or(None), }) } async fn get_path_filestat( &self, path: &str, follow_symlinks: bool, ) -> Result<Filestat, Error> { let meta = if follow_symlinks { self.0.metadata(Path::new(path))? } else { self.0.symlink_metadata(Path::new(path))? }; Ok(Filestat { device_id: meta.dev(), inode: meta.ino(), filetype: filetype_from(&meta.file_type()), nlink: meta.nlink(), size: meta.len(), atim: meta.accessed().map(|t| Some(t.into_std())).unwrap_or(None), mtim: meta.modified().map(|t| Some(t.into_std())).unwrap_or(None), ctim: meta.created().map(|t| Some(t.into_std())).unwrap_or(None), }) } async fn rename( &self, src_path: &str, dest_dir: &dyn WasiDir, dest_path: &str, ) -> Result<(), Error> { let dest_dir = dest_dir .as_any() .downcast_ref::<Self>() .ok_or(Error::badf().context("failed downcast to cap-std Dir"))?; self.rename_(src_path, dest_dir, dest_path) } async fn hard_link( &self, src_path: &str, target_dir: &dyn WasiDir, target_path: &str, ) -> Result<(), Error> { let target_dir = target_dir .as_any() .downcast_ref::<Self>() .ok_or(Error::badf().context("failed downcast to cap-std Dir"))?; self.hard_link_(src_path, target_dir, target_path) } async fn set_times( &self, path: &str, atime: Option<wasi_common::SystemTimeSpec>, mtime: Option<wasi_common::SystemTimeSpec>, follow_symlinks: bool, ) -> Result<(), Error> { if follow_symlinks { self.0.set_times( Path::new(path), convert_systimespec(atime), convert_systimespec(mtime), )?; } else { self.0.set_symlink_times( Path::new(path), convert_systimespec(atime), convert_systimespec(mtime), )?; } Ok(()) } } fn convert_systimespec(t: Option<wasi_common::SystemTimeSpec>) -> Option<SystemTimeSpec> { match t { Some(wasi_common::SystemTimeSpec::Absolute(t)) => Some(SystemTimeSpec::Absolute(t)), Some(wasi_common::SystemTimeSpec::SymbolicNow) => Some(SystemTimeSpec::SymbolicNow), None => None, } } #[cfg(test)] mod test { use super::Dir; use cap_std::ambient_authority; #[test] fn scratch_dir() { let tempdir = tempfile::Builder::new() .prefix("cap-std-sync") .tempdir() .expect("create temporary dir"); let preopen_dir = cap_std::fs::Dir::open_ambient_dir(tempdir.path(), ambient_authority()) .expect("open ambient temporary dir"); let preopen_dir = Dir::from_cap_std(preopen_dir); run(wasi_common::WasiDir::open_dir(&preopen_dir, false, ".")) .expect("open the same directory via WasiDir abstraction"); } #[cfg(not(windows))] #[test] fn readdir() { use std::collections::HashMap; use wasi_common::dir::{ReaddirCursor, ReaddirEntity, WasiDir}; use wasi_common::file::{FdFlags, FileType, OFlags}; fn readdir_into_map(dir: &dyn WasiDir) -> HashMap<String, ReaddirEntity> { let mut out = HashMap::new(); for readdir_result in run(dir.readdir(ReaddirCursor::from(0))).expect("readdir succeeds") { let entity = readdir_result.expect("readdir entry is valid"); out.insert(entity.name.clone(), entity); } out } let tempdir = tempfile::Builder::new() .prefix("cap-std-sync") .tempdir() .expect("create temporary dir"); let preopen_dir = cap_std::fs::Dir::open_ambient_dir(tempdir.path(), ambient_authority()) .expect("open ambient temporary dir"); let preopen_dir = Dir::from_cap_std(preopen_dir); let entities = readdir_into_map(&preopen_dir); assert_eq!( entities.len(), 2, "should just be . and .. in empty dir: {:?}", entities ); assert!(entities.get(".").is_some()); assert!(entities.get("..").is_some()); run(preopen_dir.open_file( false, "file1", OFlags::CREATE, true, false, FdFlags::empty(), )) .expect("create file1"); let entities = readdir_into_map(&preopen_dir); assert_eq!(entities.len(), 3, "should be ., .., file1 {:?}", entities); assert_eq!( entities.get(".").expect(". entry").filetype, FileType::Directory ); assert_eq!( entities.get("..").expect(".. entry").filetype, FileType::Directory ); assert_eq!( entities.get("file1").expect("file1 entry").filetype, FileType::RegularFile ); } fn run<F: std::future::Future>(future: F) -> F::Output { use std::pin::Pin; use std::task::{Context, Poll, RawWaker, RawWakerVTable, Waker}; let mut f = Pin::from(Box::new(future)); let waker = dummy_waker(); let mut cx = Context::from_waker(&waker); match f.as_mut().poll(&mut cx) { Poll::Ready(val) => return val, Poll::Pending => { panic!("Cannot wait on pending future: must enable wiggle \"async\" future and execute on an async Store") } } fn dummy_waker() -> Waker { return unsafe { Waker::from_raw(clone(5 as *const _)) }; unsafe fn clone(ptr: *const ()) -> RawWaker { assert_eq!(ptr as usize, 5); const VTABLE: RawWakerVTable = RawWakerVTable::new(clone, wake, wake_by_ref, drop); RawWaker::new(ptr, &VTABLE) } unsafe fn wake(ptr: *const ()) { assert_eq!(ptr as usize, 5); } unsafe fn wake_by_ref(ptr: *const ()) { assert_eq!(ptr as usize, 5); } unsafe fn drop(ptr: *const ()) { assert_eq!(ptr as usize, 5); } } } }
use crate::file::{filetype_from, File}; use cap_fs_ext::{DirEntryExt, DirExt, MetadataExt, SystemTimeSpec}; use std::any::Any; use std::path::{Path, PathBuf}; use system_interface::fs::GetSetFdFlags; use wasi_common::{ dir::{ReaddirCursor, ReaddirEntity, WasiDir}, file::{FdFlags, FileType, Filestat, OFlags, WasiFile}, Error, ErrorExt, }; pub struct Dir(cap_std::fs::Dir); impl Dir { pub fn from_cap_std(dir: cap_std::fs::Dir) -> Self { Dir(dir) } pub fn open_file_( &self, symlink_follow: bool, path: &str, oflags: OFlags, read: bool, write: bool, fdflags: FdFlags, ) -> Result<File, Error> { use cap_fs_ext::{FollowSymlinks, OpenOptionsFollowExt}; let mut opts = cap_std::fs::OpenOptions::new(); if oflags.contains(OFlags::CREATE | OFlags::EXCLUSIVE) { opts.create_new(true); opts.write(true); } else if oflags.contains(OFlags::CREATE) { opts.create(true); opts.write(true); } if oflags.contains(OFlags::TRUNCATE) { opts.truncate(true); } if read { opts.read(true); } if write { opts.write(true); } else { opts.read(true); } if fdflags.contains(FdFlags::APPEND) { opts.append(true); } if symlink_follow { opts.follow(FollowSymlinks::Yes); } else { opts.follow(FollowSymlinks::No); } if fdflags.intersects( wasi_common::file::FdFlags::DSYNC | wasi_common::file::FdFlags::SYNC | wasi_common::file::FdFlags::RSYNC, ) { return Err(Error::not_supported().context("SYNC family of FdFlags")); } let mut f = self.0.open_with(Path::new(path), &opts)?; if fdflags.contains(wasi_common::file::FdFlags::NONBLOCK) { let set_fd_flags = f.new_set_fd_flags(system_interface::fs::FdFlags::NONBLOCK)?; f.set_fd_flags(set_fd_flags)?; } Ok(File::from_cap_std(f)) } pub fn open_dir_(&self, symlink_follow: bool, path: &str) -> Result<Self, Error> { let d = if symlink_follow { self.0.open_dir(Path::new(path))? } else { self.0.open_dir_nofollow(Path::new(path))? }; Ok(Dir::from_cap_std(d)) } pub fn rename_(&self, src_path: &str, dest_dir: &Self, dest_path: &str) -> Result<(), Error> { self.0 .rename(Path::new(src_path), &dest_dir.0, Path::new(dest_path))?; Ok(()) } pub fn hard_link_( &self, src_path: &str, target_dir: &Self, target_path: &str, ) -> Result<(), Error> { let src_path = Path::new(src_path); let target_path = Path::new(target_path); self.0.hard_link(src_path, &target_dir.0, target_path)?; Ok(()) } } #[async_trait::async_trait] impl WasiDir for Dir { fn as_any(&self) -> &dyn Any { self } async fn open_file( &self, symlink_follow: bool, path: &str, oflags: OFlags, read: bool, write: bool, fdflags: FdFlags, ) -> Result<Box<dyn WasiFile>, Error> { let f = self.open_file_(symlink_follow, path, oflags, read, write, fdflags)?; Ok(Box::new(f)) } async fn open_dir(&self, symlink_follow: bool, path: &str) -> Result<Box<dyn WasiDir>, Error> { let d = self.open_dir_(symlink_follow, path)?; Ok(Box::new(d)) } async fn create_dir(&self, path: &str) -> Result<(), Error> { self.0.create_dir(Path::new(path))?; Ok(()) } async fn readdir( &self, cursor: ReaddirCursor, ) -> Result<Box<dyn Iterator<Item = Result<ReaddirEntity, Error>> + Send>, Error> { let dir_meta = self.0.dir_metadata()?; let rd = vec![ { let name = ".".to_owned(); Ok((FileType::Directory, dir_meta.ino(), name)) }, { let name = "..".to_owned(); Ok((FileType::Directory, dir_meta.ino(), name)) }, ] .into_iter() .chain({ let entries = self.0.entries()?.map(|entry| { let entry = entry?; let meta = entry.full_metadata()?; let inode = meta.ino(); let filetype = filetype_from(&meta.file_type()); let name = entry .file_name() .into_string() .map_err(|_| Error::illegal_byte_sequence().context("filename"))?; Ok((filetype, inode, name)) }); #[cfg(windows)] let entries = entries.filter(|entry: &Result<_, wasi_common::Error>| { use winapi::shared::winerror::{ERROR_ACCESS_DENIED, ERROR_SHARING_VIOLATION}; if let Err(err) = entry { if let Some(err) = err.downcast_ref::<std::io::Error>() { if err.raw_os_error() == Some(ERROR_SHARING_VIOLATION as i32) || err.raw_os_error() == Some(ERROR_ACCESS_DENIED as i32) { return false; } } } true }); entries }) .enumerate() .map(|(ix, r)| match r { Ok((filetype, inode, name)) => Ok(ReaddirEntity { next: ReaddirCursor::from(ix as u64 + 1), filetype, inode, name, }), Err(e) => Err(e), }) .skip(u64::from(cursor) as usize); Ok(Box::new(rd)) } async fn symlink(&self, src_path: &str, dest_path: &str) -> Result<(), Error> { self.0.symlink(src_path, dest_path)?; Ok(()) } async fn remove_dir(&self, path: &str) -> Result<(), Error> { self.0.remove_dir(Path::new(path))?; Ok(()) } async fn unlink_file(&self, path: &str) -> Result<(), Error> { self.0.remove_file_or_symlink(Path::new(path))?; Ok(()) } async fn read_link(&self, path: &str) -> Result<PathBuf, Error> { let link = self.0.read_link(Path::new(path))?; Ok(link) } async fn get_filestat(&self) -> Result<Filestat, Error> { let meta = self.0.dir_metadata()?; Ok(Filestat { device_id: meta.dev(), inode: meta.ino(), filetype: filetype_from(&meta.file_type()), nlink: meta.nlink(), size: meta.len(), atim: meta.accessed().map(|t| Some(t.into_std())).unwrap_or(None), mtim: meta.modified().map(|t| Some(t.into_std())).unwrap_or(None), ctim: meta.created().map(|t| Some(t.into_std())).unwrap_or(None), }) } async fn get_path_filestat( &self, path: &str, follow_symlinks: bool, ) -> Result<Filestat, Error> { let meta = if follow_symlinks { self.0.metadata(Path::new(path))? } else { self.0.symlink_metadata(Path::new(path))? };
} async fn rename( &self, src_path: &str, dest_dir: &dyn WasiDir, dest_path: &str, ) -> Result<(), Error> { let dest_dir = dest_dir .as_any() .downcast_ref::<Self>() .ok_or(Error::badf().context("failed downcast to cap-std Dir"))?; self.rename_(src_path, dest_dir, dest_path) } async fn hard_link( &self, src_path: &str, target_dir: &dyn WasiDir, target_path: &str, ) -> Result<(), Error> { let target_dir = target_dir .as_any() .downcast_ref::<Self>() .ok_or(Error::badf().context("failed downcast to cap-std Dir"))?; self.hard_link_(src_path, target_dir, target_path) } async fn set_times( &self, path: &str, atime: Option<wasi_common::SystemTimeSpec>, mtime: Option<wasi_common::SystemTimeSpec>, follow_symlinks: bool, ) -> Result<(), Error> { if follow_symlinks { self.0.set_times( Path::new(path), convert_systimespec(atime), convert_systimespec(mtime), )?; } else { self.0.set_symlink_times( Path::new(path), convert_systimespec(atime), convert_systimespec(mtime), )?; } Ok(()) } } fn convert_systimespec(t: Option<wasi_common::SystemTimeSpec>) -> Option<SystemTimeSpec> { match t { Some(wasi_common::SystemTimeSpec::Absolute(t)) => Some(SystemTimeSpec::Absolute(t)), Some(wasi_common::SystemTimeSpec::SymbolicNow) => Some(SystemTimeSpec::SymbolicNow), None => None, } } #[cfg(test)] mod test { use super::Dir; use cap_std::ambient_authority; #[test] fn scratch_dir() { let tempdir = tempfile::Builder::new() .prefix("cap-std-sync") .tempdir() .expect("create temporary dir"); let preopen_dir = cap_std::fs::Dir::open_ambient_dir(tempdir.path(), ambient_authority()) .expect("open ambient temporary dir"); let preopen_dir = Dir::from_cap_std(preopen_dir); run(wasi_common::WasiDir::open_dir(&preopen_dir, false, ".")) .expect("open the same directory via WasiDir abstraction"); } #[cfg(not(windows))] #[test] fn readdir() { use std::collections::HashMap; use wasi_common::dir::{ReaddirCursor, ReaddirEntity, WasiDir}; use wasi_common::file::{FdFlags, FileType, OFlags}; fn readdir_into_map(dir: &dyn WasiDir) -> HashMap<String, ReaddirEntity> { let mut out = HashMap::new(); for readdir_result in run(dir.readdir(ReaddirCursor::from(0))).expect("readdir succeeds") { let entity = readdir_result.expect("readdir entry is valid"); out.insert(entity.name.clone(), entity); } out } let tempdir = tempfile::Builder::new() .prefix("cap-std-sync") .tempdir() .expect("create temporary dir"); let preopen_dir = cap_std::fs::Dir::open_ambient_dir(tempdir.path(), ambient_authority()) .expect("open ambient temporary dir"); let preopen_dir = Dir::from_cap_std(preopen_dir); let entities = readdir_into_map(&preopen_dir); assert_eq!( entities.len(), 2, "should just be . and .. in empty dir: {:?}", entities ); assert!(entities.get(".").is_some()); assert!(entities.get("..").is_some()); run(preopen_dir.open_file( false, "file1", OFlags::CREATE, true, false, FdFlags::empty(), )) .expect("create file1"); let entities = readdir_into_map(&preopen_dir); assert_eq!(entities.len(), 3, "should be ., .., file1 {:?}", entities); assert_eq!( entities.get(".").expect(". entry").filetype, FileType::Directory ); assert_eq!( entities.get("..").expect(".. entry").filetype, FileType::Directory ); assert_eq!( entities.get("file1").expect("file1 entry").filetype, FileType::RegularFile ); } fn run<F: std::future::Future>(future: F) -> F::Output { use std::pin::Pin; use std::task::{Context, Poll, RawWaker, RawWakerVTable, Waker}; let mut f = Pin::from(Box::new(future)); let waker = dummy_waker(); let mut cx = Context::from_waker(&waker); match f.as_mut().poll(&mut cx) { Poll::Ready(val) => return val, Poll::Pending => { panic!("Cannot wait on pending future: must enable wiggle \"async\" future and execute on an async Store") } } fn dummy_waker() -> Waker { return unsafe { Waker::from_raw(clone(5 as *const _)) }; unsafe fn clone(ptr: *const ()) -> RawWaker { assert_eq!(ptr as usize, 5); const VTABLE: RawWakerVTable = RawWakerVTable::new(clone, wake, wake_by_ref, drop); RawWaker::new(ptr, &VTABLE) } unsafe fn wake(ptr: *const ()) { assert_eq!(ptr as usize, 5); } unsafe fn wake_by_ref(ptr: *const ()) { assert_eq!(ptr as usize, 5); } unsafe fn drop(ptr: *const ()) { assert_eq!(ptr as usize, 5); } } } }
Ok(Filestat { device_id: meta.dev(), inode: meta.ino(), filetype: filetype_from(&meta.file_type()), nlink: meta.nlink(), size: meta.len(), atim: meta.accessed().map(|t| Some(t.into_std())).unwrap_or(None), mtim: meta.modified().map(|t| Some(t.into_std())).unwrap_or(None), ctim: meta.created().map(|t| Some(t.into_std())).unwrap_or(None), })
call_expression
[ { "content": "/// Generates all the Rust source files used in Cranelift from the meta-language.\n\npub fn generate(isas: &[isa::Isa], out_dir: &str, crate_dir: &Path) -> Result<(), error::Error> {\n\n // Create all the definitions:\n\n // - common definitions.\n\n let mut shared_defs = shared::define();\n\n\n\n gen_settings::generate(\n\n &shared_defs.settings,\n\n gen_settings::ParentGroup::None,\n\n \"settings.rs\",\n\n &out_dir,\n\n )?;\n\n gen_types::generate(\"types.rs\", &out_dir)?;\n\n\n\n // - per ISA definitions.\n\n let target_isas = isa::define(isas, &mut shared_defs);\n\n\n\n // At this point, all definitions are done.\n\n let all_formats = shared_defs.verify_instruction_formats();\n\n\n\n // Generate all the code.\n", "file_path": "cranelift/codegen/meta/src/lib.rs", "rank": 0, "score": 554163.4138906241 }, { "content": "/// Prints:\n\n/// ; error: [ERROR BODY]\n\nfn print_error(w: &mut dyn Write, err: VerifierError) -> fmt::Result {\n\n writeln!(w, \"; error: {}\", err.to_string())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "cranelift/codegen/src/print_errors.rs", "rank": 1, "score": 532805.9328765842 }, { "content": "/// Prints:\n\n/// ; ^~~~~~\n\nfn print_arrow(w: &mut dyn Write, entity: &str) -> fmt::Result {\n\n write!(w, \";\")?;\n\n\n\n let indent = entity.len() - entity.trim_start().len();\n\n if indent != 0 {\n\n write!(w, \"{1:0$}^\", indent - 1, \"\")?;\n\n }\n\n\n\n for _ in 0..entity.trim().len() - 1 {\n\n write!(w, \"~\")?;\n\n }\n\n\n\n writeln!(w)\n\n}\n\n\n", "file_path": "cranelift/codegen/src/print_errors.rs", "rank": 2, "score": 515607.1264283594 }, { "content": "#[cfg(feature = \"rebuild-isle\")]\n\nfn gen_isle_enum(name: &str, mut variants: Vec<&str>, fmt: &mut Formatter) {\n\n variants.sort();\n\n let prefix = format!(\";;;; Enumerated Immediate: {} \", name);\n\n fmtln!(fmt, \"{:;<80}\", prefix);\n\n fmt.empty_line();\n\n fmtln!(fmt, \"(type {} extern\", name);\n\n fmt.indent(|fmt| {\n\n fmt.line(\"(enum\");\n\n fmt.indent(|fmt| {\n\n for variant in variants {\n\n fmtln!(fmt, \"{}\", variant);\n\n }\n\n });\n\n fmt.line(\")\");\n\n });\n\n fmt.line(\")\");\n\n fmt.empty_line();\n\n}\n\n\n", "file_path": "cranelift/codegen/meta/src/gen_inst.rs", "rank": 3, "score": 508493.45446110656 }, { "content": "/// Write `func` to `w` as equivalent text.\n\n/// Use `isa` to emit ISA-dependent annotations.\n\npub fn write_function(w: &mut dyn Write, func: &Function) -> fmt::Result {\n\n decorate_function(&mut PlainWriter, w, func)\n\n}\n\n\n", "file_path": "cranelift/codegen/src/write.rs", "rank": 4, "score": 483353.2187072753 }, { "content": "fn feature_found(path: &Path, name: &str) -> bool {\n\n path.iter().any(|part| match part.to_str() {\n\n Some(s) => s.contains(name),\n\n None => false,\n\n })\n\n}\n\n\n", "file_path": "tests/all/wast.rs", "rank": 5, "score": 472041.10967824404 }, { "content": "/// Look for a supported ISA with the given `name`.\n\n/// Return a builder that can create a corresponding `TargetIsa`.\n\npub fn lookup_by_name(name: &str) -> Result<Builder, LookupError> {\n\n use alloc::str::FromStr;\n\n lookup(triple!(name))\n\n}\n\n\n\n/// Describes reason for target lookup failure\n\n#[derive(PartialEq, Eq, Copy, Clone, Debug)]\n\npub enum LookupError {\n\n /// Support for this target was disabled in the current build.\n\n SupportDisabled,\n\n\n\n /// Support for this target has not yet been implemented.\n\n Unsupported,\n\n}\n\n\n\n// This is manually implementing Error and Display instead of using thiserror to reduce the amount\n\n// of dependencies used by Cranelift.\n\nimpl std::error::Error for LookupError {}\n\n\n\nimpl fmt::Display for LookupError {\n", "file_path": "cranelift/codegen/src/isa/mod.rs", "rank": 6, "score": 466255.37581423915 }, { "content": "/// Write the operands of `inst` to `w` with a prepended space.\n\npub fn write_operands(w: &mut dyn Write, dfg: &DataFlowGraph, inst: Inst) -> fmt::Result {\n\n let pool = &dfg.value_lists;\n\n use crate::ir::instructions::InstructionData::*;\n\n match dfg[inst] {\n\n AtomicRmw { op, args, .. } => write!(w, \" {}, {}, {}\", op, args[0], args[1]),\n\n AtomicCas { args, .. } => write!(w, \" {}, {}, {}\", args[0], args[1], args[2]),\n\n LoadNoOffset { flags, arg, .. } => write!(w, \"{} {}\", flags, arg),\n\n StoreNoOffset { flags, args, .. } => write!(w, \"{} {}, {}\", flags, args[0], args[1]),\n\n Unary { arg, .. } => write!(w, \" {}\", arg),\n\n UnaryImm { imm, .. } => write!(w, \" {}\", imm),\n\n UnaryIeee32 { imm, .. } => write!(w, \" {}\", imm),\n\n UnaryIeee64 { imm, .. } => write!(w, \" {}\", imm),\n\n UnaryBool { imm, .. } => write!(w, \" {}\", imm),\n\n UnaryGlobalValue { global_value, .. } => write!(w, \" {}\", global_value),\n\n UnaryConst {\n\n constant_handle, ..\n\n } => write!(w, \" {}\", constant_handle),\n\n Binary { args, .. } => write!(w, \" {}, {}\", args[0], args[1]),\n\n BinaryImm8 { arg, imm, .. } => write!(w, \" {}, {}\", arg, imm),\n\n BinaryImm64 { arg, imm, .. } => write!(w, \" {}, {}\", arg, imm),\n", "file_path": "cranelift/codegen/src/write.rs", "rank": 7, "score": 447088.16374115914 }, { "content": "/// Applies the relocation `r` at `offset` within `code`, according to the\n\n/// symbols found in `obj`.\n\n///\n\n/// This method is used at runtime to resolve relocations in ELF images,\n\n/// typically with respect to where the memory was placed in the final address\n\n/// in memory.\n\npub fn apply_reloc(obj: &File, code: &mut [u8], offset: u64, r: Relocation) {\n\n let target_func_address: usize = match r.target() {\n\n RelocationTarget::Symbol(i) => {\n\n // Processing relocation target is a named symbols that is compiled\n\n // wasm function or runtime libcall.\n\n let sym = obj.symbol_by_index(i).unwrap();\n\n if sym.is_local() {\n\n &code[sym.address() as usize] as *const u8 as usize\n\n } else {\n\n match sym.name() {\n\n Ok(name) => {\n\n if let Some(addr) = to_libcall_address(name) {\n\n addr\n\n } else {\n\n panic!(\"unknown function to link: {}\", name);\n\n }\n\n }\n\n Err(_) => panic!(\"unexpected relocation target: not a symbol\"),\n\n }\n\n }\n", "file_path": "crates/jit/src/link.rs", "rank": 8, "score": 446970.4628845579 }, { "content": "fn build_wasm(wat_path: impl AsRef<Path>) -> Result<NamedTempFile> {\n\n let mut wasm_file = NamedTempFile::new()?;\n\n let wasm = wat::parse_file(wat_path)?;\n\n wasm_file.write(&wasm)?;\n\n Ok(wasm_file)\n\n}\n\n\n\n// Very basic use case: compile binary wasm file and run specific function with arguments.\n", "file_path": "tests/all/cli_tests.rs", "rank": 9, "score": 432329.20161066484 }, { "content": "fn find_crates(dir: &Path, dst: &mut Vec<Crate>) {\n\n if dir.join(\"Cargo.toml\").exists() {\n\n let krate = read_crate(&dir.join(\"Cargo.toml\"));\n\n if !krate.publish || CRATES_TO_PUBLISH.iter().any(|c| krate.name == *c) {\n\n dst.push(krate);\n\n } else {\n\n panic!(\"failed to find {:?} in whitelist or blacklist\", krate.name);\n\n }\n\n }\n\n\n\n for entry in dir.read_dir().unwrap() {\n\n let entry = entry.unwrap();\n\n if entry.file_type().unwrap().is_dir() {\n\n find_crates(&entry.path(), dst);\n\n }\n\n }\n\n}\n\n\n", "file_path": "scripts/publish.rs", "rank": 10, "score": 428642.8034166887 }, { "content": "pub fn wasi_file_is_stdin(f: &dyn WasiFile) -> bool {\n\n f.as_any().is::<crate::stdio::Stdin>()\n\n}\n\n\n", "file_path": "crates/wasi-common/tokio/src/sched/windows.rs", "rank": 11, "score": 424018.68798070087 }, { "content": "fn write_spec(w: &mut dyn Write, func: &Function) -> fmt::Result {\n\n write!(w, \"{}{}\", func.name, func.signature)\n\n}\n\n\n\n//----------------------------------------------------------------------\n\n//\n\n// Basic blocks\n\n\n", "file_path": "cranelift/codegen/src/write.rs", "rank": 12, "score": 418384.87358382693 }, { "content": "pub fn wasi_file_is_stdin(f: &dyn WasiFile) -> bool {\n\n f.as_any().is::<crate::stdio::Stdin>()\n\n}\n\n\n", "file_path": "crates/wasi-common/cap-std-sync/src/sched/windows.rs", "rank": 13, "score": 414569.8071738359 }, { "content": "/// Write block args using optional parantheses.\n\nfn write_block_args(w: &mut dyn Write, args: &[Value]) -> fmt::Result {\n\n if args.is_empty() {\n\n Ok(())\n\n } else {\n\n write!(w, \"({})\", DisplayValues(args))\n\n }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/write.rs", "rank": 14, "score": 413309.87893792544 }, { "content": "/// A helper to extract all the `Type` listings of each variable in `params`\n\n/// for only parameters the return true for `is_wasm`, typically paired with\n\n/// `is_wasm_return` or `is_wasm_parameter`.\n\npub fn wasm_param_types(params: &[ir::AbiParam], is_wasm: impl Fn(usize) -> bool) -> Vec<Type> {\n\n let mut ret = Vec::with_capacity(params.len());\n\n for (i, param) in params.iter().enumerate() {\n\n if is_wasm(i) {\n\n ret.push(param.value_type);\n\n }\n\n }\n\n ret\n\n}\n", "file_path": "cranelift/wasm/src/code_translator.rs", "rank": 15, "score": 411504.37286089535 }, { "content": "fn call_ser(file: &str, pretty: bool) -> Result<(), String> {\n\n let ret_of_parse = parse_functions(file);\n\n match ret_of_parse {\n\n Ok(funcs) => {\n\n let ser_str = if pretty {\n\n serde_json::to_string_pretty(&funcs).unwrap()\n\n } else {\n\n serde_json::to_string(&funcs).unwrap()\n\n };\n\n println!(\"{}\", ser_str);\n\n Ok(())\n\n }\n\n Err(_pe) => Err(\"There was a parsing error\".to_string()),\n\n }\n\n}\n\n\n", "file_path": "cranelift/serde/src/clif-json.rs", "rank": 16, "score": 410294.1409479605 }, { "content": "/// Hexadecimal with a multiple of 4 digits and group separators:\n\n///\n\n/// 0xfff0\n\n/// 0x0001_ffff\n\n/// 0xffff_ffff_fff8_4400\n\n///\n\nfn write_hex(x: u64, f: &mut Formatter) -> fmt::Result {\n\n let mut pos = (64 - x.leading_zeros() - 1) & 0xf0;\n\n write!(f, \"0x{:04x}\", (x >> pos) & 0xffff)?;\n\n while pos > 0 {\n\n pos -= 16;\n\n write!(f, \"_{:04x}\", (x >> pos) & 0xffff)?;\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl Display for Uimm64 {\n\n fn fmt(&self, f: &mut Formatter) -> fmt::Result {\n\n let x = self.0;\n\n if x < 10_000 {\n\n // Use decimal for small numbers.\n\n write!(f, \"{}\", x)\n\n } else {\n\n write_hex(x, f)\n\n }\n\n }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/ir/immediates.rs", "rank": 17, "score": 407589.14309415396 }, { "content": "fn handle_module(options: &Options, path: &Path, name: &str, fisa: FlagsOrIsa) -> Result<()> {\n\n let buffer = read_to_string(&path)?;\n\n let test_file = parse_test(&buffer, ParseOptions::default())\n\n .with_context(|| format!(\"failed to parse {}\", name))?;\n\n\n\n // If we have an isa from the command-line, use that. Otherwise if the\n\n // file contains a unique isa, use that.\n\n let isa = fisa.isa.or(test_file.isa_spec.unique_isa());\n\n\n\n if isa.is_none() {\n\n anyhow::bail!(\"compilation requires a target isa\");\n\n };\n\n\n\n for (func, _) in test_file.functions {\n\n let mut relocs = PrintRelocs::new(options.print);\n\n let mut traps = PrintTraps::new(options.print);\n\n let mut stack_maps = PrintStackMaps::new(options.print);\n\n\n\n if let Some(isa) = isa {\n\n let mut context = Context::new();\n", "file_path": "cranelift/src/compile.rs", "rank": 18, "score": 407179.52955607895 }, { "content": "fn handle_module(options: &Options, path: &Path, name: &str, fisa: FlagsOrIsa) -> Result<()> {\n\n let color_choice = match options.color {\n\n ColorOpt::Auto => ColorChoice::Auto,\n\n ColorOpt::Always => ColorChoice::Always,\n\n ColorOpt::Never => ColorChoice::Never,\n\n };\n\n let mut terminal = StandardStream::stdout(color_choice);\n\n let use_color = terminal.supports_color() && options.color == ColorOpt::Auto\n\n || options.color == ColorOpt::Always;\n\n vcprint!(\n\n options.verbose,\n\n use_color,\n\n terminal,\n\n Color::Yellow,\n\n \"Handling: \"\n\n );\n\n vprintln!(options.verbose, \"\\\"{}\\\"\", name);\n\n vcprint!(\n\n options.verbose,\n\n use_color,\n", "file_path": "cranelift/src/wasm.rs", "rank": 19, "score": 407179.52955607895 }, { "content": "fn has_die_back_edge<R: Reader<Offset = usize>>(die: &read::DebuggingInformationEntry<R>) -> bool {\n\n match die.tag() {\n\n constants::DW_TAG_variable\n\n | constants::DW_TAG_constant\n\n | constants::DW_TAG_inlined_subroutine\n\n | constants::DW_TAG_lexical_block\n\n | constants::DW_TAG_label\n\n | constants::DW_TAG_with_stmt\n\n | constants::DW_TAG_try_block\n\n | constants::DW_TAG_catch_block\n\n | constants::DW_TAG_template_type_parameter\n\n | constants::DW_TAG_enumerator\n\n | constants::DW_TAG_member\n\n | constants::DW_TAG_variant_part\n\n | constants::DW_TAG_variant\n\n | constants::DW_TAG_formal_parameter => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "crates/cranelift/src/debug/gc.rs", "rank": 20, "score": 401055.8550072319 }, { "content": "pub fn prepare_workspace(exe_name: &str) -> anyhow::Result<TempDir> {\n\n let prefix = format!(\"wasi_common_{}\", exe_name);\n\n let tempdir = Builder::new().prefix(&prefix).tempdir()?;\n\n Ok(tempdir)\n\n}\n\n\n", "file_path": "crates/test-programs/tests/wasm_tests/utils.rs", "rank": 21, "score": 396962.0716592945 }, { "content": "fn write_arg(w: &mut dyn Write, func: &Function, arg: Value) -> fmt::Result {\n\n write!(w, \"{}: {}\", arg, func.dfg.value_type(arg))\n\n}\n\n\n", "file_path": "cranelift/codegen/src/write.rs", "rank": 22, "score": 395745.0707251487 }, { "content": "// Assumption: path inside cache directory.\n\n// Then, we don't have to use sound OS-specific exclusive file access.\n\n// Note: there's no need to remove temporary file here - cleanup task will do it later.\n\nfn fs_write_atomic(path: &Path, reason: &str, contents: &[u8]) -> bool {\n\n let lock_path = path.with_extension(format!(\"wip-atomic-write-{}\", reason));\n\n fs::OpenOptions::new()\n\n .create_new(true) // atomic file creation (assumption: no one will open it without this flag)\n\n .write(true)\n\n .open(&lock_path)\n\n .and_then(|mut file| file.write_all(contents))\n\n // file should go out of scope and be closed at this point\n\n .and_then(|()| fs::rename(&lock_path, &path)) // atomic file rename\n\n .map_err(|err| {\n\n warn!(\n\n \"Failed to write file with rename, lock path: {}, target path: {}, err: {}\",\n\n lock_path.display(),\n\n path.display(),\n\n err\n\n )\n\n })\n\n .is_ok()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "crates/cache/src/lib.rs", "rank": 23, "score": 392806.0531425682 }, { "content": "/// Iterate over all of the files passed as arguments, recursively iterating through directories.\n\npub fn iterate_files<'a>(files: &'a [PathBuf]) -> impl Iterator<Item = PathBuf> + 'a {\n\n files\n\n .iter()\n\n .flat_map(WalkDir::new)\n\n .filter(|f| match f {\n\n Ok(d) => {\n\n // Filter out hidden files (starting with .).\n\n !d.file_name().to_str().map_or(false, |s| s.starts_with('.'))\n\n // Filter out directories.\n\n && !d.file_type().is_dir()\n\n }\n\n Err(e) => {\n\n println!(\"Unable to read file: {}\", e);\n\n false\n\n }\n\n })\n\n .map(|f| {\n\n f.expect(\"this should not happen: we have already filtered out the errors\")\n\n .into_path()\n\n })\n\n}\n", "file_path": "cranelift/src/utils.rs", "rank": 24, "score": 391791.1143968502 }, { "content": "pub fn extract_exec_name_from_path(path: &Path) -> anyhow::Result<String> {\n\n path.file_stem()\n\n .and_then(|s| s.to_str())\n\n .map(String::from)\n\n .ok_or_else(|| {\n\n anyhow::anyhow!(\n\n \"couldn't extract the file stem from path {}\",\n\n path.display()\n\n )\n\n })\n\n}\n", "file_path": "crates/test-programs/tests/wasm_tests/utils.rs", "rank": 25, "score": 390582.65273659804 }, { "content": "pub fn instantiate(data: &[u8], bin_name: &str, workspace: Option<&Path>) -> anyhow::Result<()> {\n\n run(data, bin_name, workspace, false)\n\n}\n", "file_path": "crates/test-programs/tests/wasm_tests/runtime/tokio.rs", "rank": 26, "score": 389453.8245630793 }, { "content": "/// Emit Display and FromStr implementations for enum settings.\n\nfn gen_to_and_from_str(name: &str, values: &[&'static str], fmt: &mut Formatter) {\n\n fmtln!(fmt, \"impl fmt::Display for {} {{\", name);\n\n fmt.indent(|fmt| {\n\n fmtln!(\n\n fmt,\n\n \"fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\"\n\n );\n\n fmt.indent(|fmt| {\n\n fmtln!(fmt, \"f.write_str(match *self {\");\n\n fmt.indent(|fmt| {\n\n for v in values.iter() {\n\n fmtln!(fmt, \"Self::{} => \\\"{}\\\",\", camel_case(v), v);\n\n }\n\n });\n\n fmtln!(fmt, \"})\");\n\n });\n\n fmtln!(fmt, \"}\");\n\n });\n\n fmtln!(fmt, \"}\");\n\n\n", "file_path": "cranelift/codegen/meta/src/gen_settings.rs", "rank": 27, "score": 384799.02555493015 }, { "content": "/// Opens a fresh file descriptor for `path` where `path` should be a preopened\n\n/// directory.\n\npub fn open_scratch_directory(path: &str) -> Result<wasi::Fd, String> {\n\n unsafe {\n\n for i in 3.. {\n\n let stat = match wasi::fd_prestat_get(i) {\n\n Ok(s) => s,\n\n Err(_) => break,\n\n };\n\n if stat.tag != wasi::PREOPENTYPE_DIR {\n\n continue;\n\n }\n\n let mut dst = Vec::with_capacity(stat.u.dir.pr_name_len);\n\n if wasi::fd_prestat_dir_name(i, dst.as_mut_ptr(), dst.capacity()).is_err() {\n\n continue;\n\n }\n\n dst.set_len(stat.u.dir.pr_name_len);\n\n if dst == path.as_bytes() {\n\n let (base, inherit) = fd_get_rights(i);\n\n return Ok(\n\n wasi::path_open(i, 0, \".\", wasi::OFLAGS_DIRECTORY, base, inherit, 0)\n\n .expect(\"failed to open dir\"),\n", "file_path": "crates/test-programs/wasi-tests/src/lib.rs", "rank": 28, "score": 380842.33476934384 }, { "content": "pub fn instantiate(data: &[u8], bin_name: &str, workspace: Option<&Path>) -> anyhow::Result<()> {\n\n run(data, bin_name, workspace, false)\n\n}\n", "file_path": "crates/test-programs/tests/wasm_tests/runtime/cap_std_sync.rs", "rank": 29, "score": 380057.6122785623 }, { "content": "/// Read an entire file into a string.\n\npub fn read_to_string<P: AsRef<Path>>(path: P) -> anyhow::Result<String> {\n\n let mut buffer = String::new();\n\n let path = path.as_ref();\n\n if path == Path::new(\"-\") {\n\n let stdin = io::stdin();\n\n let mut stdin = stdin.lock();\n\n stdin\n\n .read_to_string(&mut buffer)\n\n .context(\"failed to read stdin to string\")?;\n\n } else {\n\n let mut file = File::open(path)?;\n\n file.read_to_string(&mut buffer)\n\n .with_context(|| format!(\"failed to read {} to string\", path.display()))?;\n\n }\n\n Ok(buffer)\n\n}\n\n\n\n/// Like `FlagsOrIsa`, but holds ownership.\n\npub enum OwnedFlagsOrIsa {\n\n Flags(settings::Flags),\n", "file_path": "cranelift/src/utils.rs", "rank": 30, "score": 379763.662859069 }, { "content": "/// Run all functions in a file that are succeeded by \"run:\" comments\n\nfn run_single_file(path: &PathBuf) -> Result<()> {\n\n let file_contents = read_to_string(&path)?;\n\n run_file_contents(file_contents)\n\n}\n\n\n", "file_path": "cranelift/src/run.rs", "rank": 31, "score": 379478.2120119219 }, { "content": "/// Return an `isa` builder configured for the current host\n\n/// machine, or `Err(())` if the host machine is not supported\n\n/// in the current configuration.\n\n///\n\n/// Selects the given backend variant specifically; this is\n\n/// useful when more than oen backend exists for a given target\n\n/// (e.g., on x86-64).\n\npub fn builder_with_options(infer_native_flags: bool) -> Result<isa::Builder, &'static str> {\n\n let mut isa_builder = isa::lookup(Triple::host()).map_err(|err| match err {\n\n isa::LookupError::SupportDisabled => \"support for architecture disabled at compile time\",\n\n isa::LookupError::Unsupported => \"unsupported architecture\",\n\n })?;\n\n\n\n #[cfg(any(target_arch = \"x86\", target_arch = \"x86_64\"))]\n\n {\n\n use cranelift_codegen::settings::Configurable;\n\n\n\n if !std::is_x86_feature_detected!(\"sse2\") {\n\n return Err(\"x86 support requires SSE2\");\n\n }\n\n\n\n if !infer_native_flags {\n\n return Ok(isa_builder);\n\n }\n\n\n\n if std::is_x86_feature_detected!(\"sse3\") {\n\n isa_builder.enable(\"has_sse3\").unwrap();\n", "file_path": "cranelift/native/src/lib.rs", "rank": 32, "score": 378229.8374002647 }, { "content": "/// Parse a 64-bit unsigned number.\n\nfn parse_u64(s: &str) -> Result<u64, &'static str> {\n\n let mut value: u64 = 0;\n\n let mut digits = 0;\n\n\n\n if s.starts_with(\"-0x\") {\n\n return Err(\"Invalid character in hexadecimal number\");\n\n } else if s.starts_with(\"0x\") {\n\n // Hexadecimal.\n\n for ch in s[2..].chars() {\n\n match ch.to_digit(16) {\n\n Some(digit) => {\n\n digits += 1;\n\n if digits > 16 {\n\n return Err(\"Too many hexadecimal digits\");\n\n }\n\n // This can't overflow given the digit limit.\n\n value = (value << 4) | u64::from(digit);\n\n }\n\n None => {\n\n // Allow embedded underscores, but fail on anything else.\n", "file_path": "cranelift/codegen/src/ir/immediates.rs", "rank": 33, "score": 375302.07700398576 }, { "content": "/// Parse the entire `text` into a list of functions.\n\n///\n\n/// Any test commands or target declarations are ignored.\n\npub fn parse_functions(text: &str) -> ParseResult<Vec<Function>> {\n\n let _tt = timing::parse_text();\n\n parse_test(text, ParseOptions::default())\n\n .map(|file| file.functions.into_iter().map(|(func, _)| func).collect())\n\n}\n\n\n\n/// Options for configuring the parsing of filetests.\n\npub struct ParseOptions<'a> {\n\n /// Compiler passes to run on the parsed functions.\n\n pub passes: Option<&'a [String]>,\n\n /// Target ISA for compiling the parsed functions, e.g. \"x86_64 skylake\".\n\n pub target: Option<&'a str>,\n\n /// Default calling convention used when none is specified for a parsed function.\n\n pub default_calling_convention: CallConv,\n\n /// Default for unwind-info setting (enabled or disabled).\n\n pub unwind_info: bool,\n\n}\n\n\n\nimpl Default for ParseOptions<'_> {\n\n fn default() -> Self {\n\n Self {\n\n passes: None,\n\n target: None,\n\n default_calling_convention: CallConv::Fast,\n\n unwind_info: false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "cranelift/reader/src/parser.rs", "rank": 34, "score": 373950.1626380494 }, { "content": "struct PrettyVerifierError<'a>(Box<dyn FuncWriter + 'a>, &'a mut Vec<VerifierError>);\n\n\n\nimpl<'a> FuncWriter for PrettyVerifierError<'a> {\n\n fn write_block_header(\n\n &mut self,\n\n w: &mut dyn Write,\n\n func: &Function,\n\n block: Block,\n\n indent: usize,\n\n ) -> fmt::Result {\n\n pretty_block_header_error(w, func, block, indent, &mut *self.0, self.1)\n\n }\n\n\n\n fn write_instruction(\n\n &mut self,\n\n w: &mut dyn Write,\n\n func: &Function,\n\n aliases: &SecondaryMap<Value, Vec<Value>>,\n\n inst: Inst,\n\n indent: usize,\n", "file_path": "cranelift/codegen/src/print_errors.rs", "rank": 35, "score": 372452.7837818187 }, { "content": "/// Pre-parse a supposed entity name by splitting it into two parts: A head of lowercase ASCII\n\n/// letters and numeric tail.\n\npub fn split_entity_name(name: &str) -> Option<(&str, u32)> {\n\n let (head, tail) = name.split_at(name.len() - trailing_digits(name));\n\n if tail.len() > 1 && tail.starts_with('0') {\n\n None\n\n } else {\n\n tail.parse().ok().map(|n| (head, n))\n\n }\n\n}\n\n\n\n/// Lexical analysis.\n\n///\n\n/// A `Lexer` reads text from a `&str` and provides a sequence of tokens.\n\n///\n\n/// Also keep track of a line number for error reporting.\n\n///\n\npub struct Lexer<'a> {\n\n // Complete source being processed.\n\n source: &'a str,\n\n\n\n // Iterator into `source`.\n", "file_path": "cranelift/reader/src/lexer.rs", "rank": 36, "score": 370518.62480465346 }, { "content": "/// Creates a new configuration file at specified path, or default path if None is passed.\n\n/// Fails if file already exists.\n\npub fn create_new_config<P: AsRef<Path> + Debug>(config_file: Option<P>) -> Result<PathBuf> {\n\n trace!(\"Creating new config file, path: {:?}\", config_file);\n\n\n\n let config_file = match config_file {\n\n Some(path) => path.as_ref().to_path_buf(),\n\n None => default_config_path()?,\n\n };\n\n\n\n if config_file.exists() {\n\n bail!(\n\n \"Configuration file '{}' already exists.\",\n\n config_file.display()\n\n );\n\n }\n\n\n\n let parent_dir = config_file\n\n .parent()\n\n .ok_or_else(|| anyhow!(\"Invalid cache config path: {}\", config_file.display()))?;\n\n\n\n fs::create_dir_all(parent_dir).with_context(|| {\n", "file_path": "crates/cache/src/config.rs", "rank": 37, "score": 368672.407687117 }, { "content": "fn to_libcall_address(name: &str) -> Option<usize> {\n\n use self::libcalls::*;\n\n use wasmtime_environ::for_each_libcall;\n\n macro_rules! add_libcall_symbol {\n\n [$(($libcall:ident, $export:ident)),*] => {\n\n Some(match name {\n\n $(\n\n stringify!($export) => $export as usize,\n\n )+\n\n _ => {\n\n return None;\n\n }\n\n })\n\n };\n\n }\n\n for_each_libcall!(add_libcall_symbol)\n\n}\n", "file_path": "crates/jit/src/link.rs", "rank": 38, "score": 368540.92244909133 }, { "content": "fn write_stats_file(path: &Path, stats: &ModuleCacheStatistics) -> bool {\n\n toml::to_string_pretty(&stats)\n\n .map_err(|err| {\n\n warn!(\n\n \"Failed to serialize stats file, path: {}, err: {}\",\n\n path.display(),\n\n err\n\n )\n\n })\n\n .and_then(|serialized| {\n\n if fs_write_atomic(path, \"stats\", serialized.as_bytes()) {\n\n Ok(())\n\n } else {\n\n Err(())\n\n }\n\n })\n\n .is_ok()\n\n}\n\n\n", "file_path": "crates/cache/src/worker.rs", "rank": 39, "score": 368442.5624259034 }, { "content": "fn is_matching_assert_invalid_error_message(expected: &str, actual: &str) -> bool {\n\n actual.contains(expected)\n\n // `elem.wast` and `proposals/bulk-memory-operations/elem.wast` disagree\n\n // on the expected error message for the same error.\n\n || (expected.contains(\"out of bounds\") && actual.contains(\"does not fit\"))\n\n // slight difference in error messages\n\n || (expected.contains(\"unknown elem segment\") && actual.contains(\"unknown element segment\"))\n\n}\n\n\n", "file_path": "crates/wast/src/wast.rs", "rank": 40, "score": 368190.9046516309 }, { "content": "/// Construct the list of compilations (transformations from ISLE\n\n/// source to generated Rust source) that exist in the repository.\n\nfn get_isle_compilations(crate_dir: &std::path::Path) -> Result<IsleCompilations, std::io::Error> {\n\n let cur_dir = std::env::current_dir()?;\n\n\n\n let clif_isle =\n\n make_isle_source_path_relative(&cur_dir, crate_dir.join(\"src\").join(\"clif.isle\"));\n\n let prelude_isle =\n\n make_isle_source_path_relative(&cur_dir, crate_dir.join(\"src\").join(\"prelude.isle\"));\n\n let src_isa_x64 =\n\n make_isle_source_path_relative(&cur_dir, crate_dir.join(\"src\").join(\"isa\").join(\"x64\"));\n\n let src_isa_aarch64 =\n\n make_isle_source_path_relative(&cur_dir, crate_dir.join(\"src\").join(\"isa\").join(\"aarch64\"));\n\n\n\n // This is a set of ISLE compilation units.\n\n //\n\n // The format of each entry is:\n\n //\n\n // (output Rust code file, input ISLE source files)\n\n //\n\n // There should be one entry for each backend that uses ISLE for lowering,\n\n // and if/when we replace our peephole optimization passes with ISLE, there\n", "file_path": "cranelift/codegen/build.rs", "rank": 41, "score": 366707.36553825473 }, { "content": "/// Helper function for displaying `Vec<DataValue>`.\n\npub fn write_data_value_list(f: &mut Formatter<'_>, list: &[DataValue]) -> fmt::Result {\n\n match list.len() {\n\n 0 => Ok(()),\n\n 1 => write!(f, \"{}\", list[0]),\n\n _ => {\n\n write!(f, \"{}\", list[0])?;\n\n for dv in list.iter().skip(1) {\n\n write!(f, \", {}\", dv)?;\n\n }\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn type_conversions() {\n", "file_path": "cranelift/codegen/src/data_value.rs", "rank": 42, "score": 366652.45864994545 }, { "content": "/// Default names for `ir::LibCall`s. A function by this name is imported into the object as\n\n/// part of the translation of a `ir::ExternalName::LibCall` variant.\n\npub fn default_libcall_names() -> Box<dyn Fn(ir::LibCall) -> String + Send + Sync> {\n\n Box::new(move |libcall| match libcall {\n\n ir::LibCall::Probestack => \"__cranelift_probestack\".to_owned(),\n\n ir::LibCall::UdivI64 => \"__udivdi3\".to_owned(),\n\n ir::LibCall::SdivI64 => \"__divdi3\".to_owned(),\n\n ir::LibCall::UremI64 => \"__umoddi3\".to_owned(),\n\n ir::LibCall::SremI64 => \"__moddi3\".to_owned(),\n\n ir::LibCall::IshlI64 => \"__ashldi3\".to_owned(),\n\n ir::LibCall::UshrI64 => \"__lshrdi3\".to_owned(),\n\n ir::LibCall::SshrI64 => \"__ashrdi3\".to_owned(),\n\n ir::LibCall::CeilF32 => \"ceilf\".to_owned(),\n\n ir::LibCall::CeilF64 => \"ceil\".to_owned(),\n\n ir::LibCall::FloorF32 => \"floorf\".to_owned(),\n\n ir::LibCall::FloorF64 => \"floor\".to_owned(),\n\n ir::LibCall::TruncF32 => \"truncf\".to_owned(),\n\n ir::LibCall::TruncF64 => \"trunc\".to_owned(),\n\n ir::LibCall::NearestF32 => \"nearbyintf\".to_owned(),\n\n ir::LibCall::NearestF64 => \"nearbyint\".to_owned(),\n\n ir::LibCall::Memcpy => \"memcpy\".to_owned(),\n\n ir::LibCall::Memset => \"memset\".to_owned(),\n\n ir::LibCall::Memmove => \"memmove\".to_owned(),\n\n ir::LibCall::Memcmp => \"memcmp\".to_owned(),\n\n\n\n ir::LibCall::ElfTlsGetAddr => \"__tls_get_addr\".to_owned(),\n\n })\n\n}\n", "file_path": "cranelift/module/src/lib.rs", "rank": 43, "score": 363649.08336085215 }, { "content": "/// Generate an ISA from an architecture string (e.g. \"x86_64\").\n\npub fn isa_from_arch(arch: &str) -> Result<isa::Isa, String> {\n\n isa::Isa::from_arch(arch).ok_or_else(|| format!(\"no supported isa found for arch `{}`\", arch))\n\n}\n\n\n", "file_path": "cranelift/codegen/meta/src/lib.rs", "rank": 44, "score": 361285.97543433355 }, { "content": "fn reduce(isa: &dyn TargetIsa, mut func: Function, verbose: bool) -> Result<(Function, String)> {\n\n let mut context = CrashCheckContext::new(isa);\n\n\n\n if let CheckResult::Succeed = context.check_for_crash(&func) {\n\n anyhow::bail!(\"Given function compiled successfully or gave a verifier error.\");\n\n }\n\n\n\n try_resolve_aliases(&mut context, &mut func);\n\n\n\n let progress_bar = ProgressBar::with_draw_target(0, ProgressDrawTarget::stdout());\n\n progress_bar.set_style(\n\n ProgressStyle::default_bar().template(\"{bar:60} {prefix:40} {pos:>4}/{len:>4} {msg}\"),\n\n );\n\n\n\n for pass_idx in 0..100 {\n\n let mut should_keep_reducing = false;\n\n let mut phase = 0;\n\n\n\n loop {\n\n let mut mutator: Box<dyn Mutator> = match phase {\n", "file_path": "cranelift/src/bugpoint.rs", "rank": 45, "score": 360947.5114138711 }, { "content": "fn run_test(name: &str, stack_overflow: bool) {\n\n let me = env::current_exe().unwrap();\n\n let mut cmd = Command::new(me);\n\n cmd.env(VAR_NAME, name);\n\n let output = cmd.output().expect(\"failed to spawn subprocess\");\n\n let stdout = String::from_utf8_lossy(&output.stdout);\n\n let stderr = String::from_utf8_lossy(&output.stderr);\n\n let mut desc = format!(\"got status: {}\", output.status);\n\n\n\n if !stdout.trim().is_empty() {\n\n desc.push_str(\"\\nstdout: ----\\n\");\n\n desc.push_str(\" \");\n\n desc.push_str(&stdout.replace(\"\\n\", \"\\n \"));\n\n }\n\n\n\n if !stderr.trim().is_empty() {\n\n desc.push_str(\"\\nstderr: ----\\n\");\n\n desc.push_str(\" \");\n\n desc.push_str(&stderr.replace(\"\\n\", \"\\n \"));\n\n }\n", "file_path": "tests/host_segfault.rs", "rank": 46, "score": 356396.9252930364 }, { "content": "pub fn commit(addr: *mut u8, len: usize) -> Result<()> {\n\n if len == 0 {\n\n return Ok(());\n\n }\n\n\n\n // Memory needs to be committed, so don't use the `region` crate\n\n if unsafe { VirtualAlloc(addr as _, len, MEM_COMMIT, PAGE_READWRITE).is_null() } {\n\n bail!(\"failed to commit memory as read/write\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/windows.rs", "rank": 47, "score": 355878.56939398794 }, { "content": "pub fn decommit(addr: *mut u8, len: usize) -> Result<()> {\n\n if len == 0 {\n\n return Ok(());\n\n }\n\n\n\n if unsafe { VirtualFree(addr as _, len, MEM_DECOMMIT) } == 0 {\n\n bail!(\n\n \"failed to decommit memory pages: {}\",\n\n std::io::Error::last_os_error()\n\n );\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/windows.rs", "rank": 48, "score": 355878.56939398794 }, { "content": "/// Main entry point for `clif-util test`.\n\n///\n\n/// Take a list of filenames which can be either `.clif` files or directories.\n\n///\n\n/// Files are interpreted as test cases and executed immediately.\n\n///\n\n/// Directories are scanned recursively for test cases ending in `.clif`. These test cases are\n\n/// executed on background threads.\n\n///\n\npub fn run(verbose: bool, report_times: bool, files: &[String]) -> anyhow::Result<time::Duration> {\n\n let mut runner = TestRunner::new(verbose, report_times);\n\n\n\n for path in files.iter().map(Path::new) {\n\n if path.is_file() {\n\n runner.push_test(path);\n\n } else {\n\n runner.push_dir(path);\n\n }\n\n }\n\n\n\n runner.start_threads();\n\n runner.run()\n\n}\n\n\n", "file_path": "cranelift/filetests/src/lib.rs", "rank": 49, "score": 355874.7677900784 }, { "content": "/// Return an instance implementing the \"spectest\" interface used in the\n\n/// spec testsuite.\n\npub fn link_spectest<T>(linker: &mut Linker<T>, store: &mut Store<T>) -> Result<()> {\n\n linker.func_wrap(\"spectest\", \"print\", || {})?;\n\n linker.func_wrap(\"spectest\", \"print_i32\", |val: i32| println!(\"{}: i32\", val))?;\n\n linker.func_wrap(\"spectest\", \"print_i64\", |val: i64| println!(\"{}: i64\", val))?;\n\n linker.func_wrap(\"spectest\", \"print_f32\", |val: f32| println!(\"{}: f32\", val))?;\n\n linker.func_wrap(\"spectest\", \"print_f64\", |val: f64| println!(\"{}: f64\", val))?;\n\n linker.func_wrap(\"spectest\", \"print_i32_f32\", |i: i32, f: f32| {\n\n println!(\"{}: i32\", i);\n\n println!(\"{}: f32\", f);\n\n })?;\n\n linker.func_wrap(\"spectest\", \"print_f64_f64\", |f1: f64, f2: f64| {\n\n println!(\"{}: f64\", f1);\n\n println!(\"{}: f64\", f2);\n\n })?;\n\n\n\n let ty = GlobalType::new(ValType::I32, Mutability::Const);\n\n let g = Global::new(&mut *store, ty, Val::I32(666))?;\n\n linker.define(\"spectest\", \"global_i32\", g)?;\n\n\n\n let ty = GlobalType::new(ValType::I64, Mutability::Const);\n", "file_path": "crates/wast/src/spectest.rs", "rank": 50, "score": 355265.4580699224 }, { "content": "/// Optimize the function with available optimizations.\n\n///\n\n/// Since this can be resource intensive (and code-size inflating),\n\n/// it is separated from `Context::compile` to allow DCE to remove it\n\n/// if it's not used.\n\npub fn optimize(ctx: &mut Context, isa: &dyn TargetIsa) -> CodegenResult<()> {\n\n ctx.verify_if(isa)?;\n\n fold_constants(ctx, isa)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "cranelift/preopt/src/lib.rs", "rank": 51, "score": 351024.96966607356 }, { "content": "/// Extract a valid Rust identifier from the stem of a path.\n\nfn extract_name(path: impl AsRef<Path>) -> String {\n\n path.as_ref()\n\n .file_stem()\n\n .expect(\"filename should have a stem\")\n\n .to_str()\n\n .expect(\"filename should be representable as a string\")\n\n .replace(\"-\", \"_\")\n\n .replace(\"/\", \"_\")\n\n}\n\n\n", "file_path": "build.rs", "rank": 52, "score": 349656.28471790557 }, { "content": "fn parse_preloads(s: &str) -> Result<(String, PathBuf)> {\n\n let parts: Vec<&str> = s.splitn(2, '=').collect();\n\n if parts.len() != 2 {\n\n bail!(\"must contain exactly one equals character ('=')\");\n\n }\n\n Ok((parts[0].into(), parts[1].into()))\n\n}\n\n\n\nlazy_static::lazy_static! {\n\n static ref AFTER_HELP: String = {\n\n crate::FLAG_EXPLANATIONS.to_string()\n\n };\n\n}\n\n\n\n/// Runs a WebAssembly module\n\n#[derive(StructOpt)]\n\n#[structopt(name = \"run\", setting = AppSettings::TrailingVarArg, after_help = AFTER_HELP.as_str())]\n\npub struct RunCommand {\n\n #[structopt(flatten)]\n\n common: CommonOptions,\n", "file_path": "src/commands/run.rs", "rank": 53, "score": 347753.7938665842 }, { "content": "fn decommit(addr: *mut u8, len: usize, protect: bool) -> Result<()> {\n\n if len == 0 {\n\n return Ok(());\n\n }\n\n\n\n // By creating a new mapping at the same location, this will discard the\n\n // mapping for the pages in the given range.\n\n // The new mapping will be to the CoW zero page, so this effectively\n\n // zeroes the pages.\n\n unsafe {\n\n rustix::io::mmap_anonymous(\n\n addr as _,\n\n len,\n\n if protect {\n\n rustix::io::ProtFlags::empty()\n\n } else {\n\n rustix::io::ProtFlags::READ | rustix::io::ProtFlags::WRITE\n\n },\n\n rustix::io::MapFlags::PRIVATE | rustix::io::MapFlags::FIXED,\n\n )\n\n .context(\"mmap failed to remap pages: {}\")?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/unix.rs", "rank": 54, "score": 346688.5419627469 }, { "content": "fn decommit(addr: *mut u8, len: usize, protect: bool) -> Result<()> {\n\n if len == 0 {\n\n return Ok(());\n\n }\n\n\n\n unsafe {\n\n if protect {\n\n region::protect(addr, len, region::Protection::NONE)\n\n .context(\"failed to protect memory pages\")?;\n\n }\n\n\n\n // On Linux, this is enough to cause the kernel to initialize the pages to 0 on next access\n\n rustix::io::madvise(addr as _, len, rustix::io::Advice::LinuxDontNeed)\n\n .context(\"madvise failed to decommit: {}\")?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/linux.rs", "rank": 55, "score": 346688.5419627469 }, { "content": "#[cfg(feature = \"async\")]\n\npub fn decommit_stack_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len)\n\n}\n\n\n\n/// This is used to initialize the memory pool when uffd is enabled.\n\n///\n\n/// Without uffd, all of the memory pool's pages are initially protected with `NONE` to treat the entire\n\n/// range as guard pages. When an instance is created, the initial pages of the memory are\n\n/// changed to `READ_WRITE`.\n\n///\n\n/// With uffd, however, the potentially accessible pages of the each linear memory are made `READ_WRITE` and\n\n/// the page fault handler will detect an out of bounds access and treat the page, temporarily,\n\n/// as a guard page.\n\npub(super) fn initialize_memory_pool(pool: &MemoryPool) -> Result<()> {\n\n if pool.memory_size == 0 || pool.max_wasm_pages == 0 {\n\n return Ok(());\n\n }\n\n\n\n for i in 0..pool.max_instances {\n\n for base in pool.get(i) {\n", "file_path": "crates/runtime/src/instance/allocator/pooling/uffd.rs", "rank": 56, "score": 346295.37187528954 }, { "content": "#[cfg(feature = \"async\")]\n\npub fn commit_stack_pages(_addr: *mut u8, _len: usize) -> Result<()> {\n\n // A no-op as stack pages remain READ|WRITE\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/linux.rs", "rank": 57, "score": 346295.3718752895 }, { "content": "#[cfg(feature = \"async\")]\n\npub fn commit_stack_pages(_addr: *mut u8, _len: usize) -> Result<()> {\n\n // A no-op as stack pages remain READ|WRITE\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/unix.rs", "rank": 58, "score": 346295.37187528954 }, { "content": "#[cfg(feature = \"async\")]\n\npub fn commit_stack_pages(_addr: *mut u8, _len: usize) -> Result<()> {\n\n // A no-op as stack pages remain READ|WRITE\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/uffd.rs", "rank": 59, "score": 346295.37187528954 }, { "content": "#[cfg(feature = \"async\")]\n\npub fn decommit_stack_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len, false)\n\n}\n", "file_path": "crates/runtime/src/instance/allocator/pooling/unix.rs", "rank": 60, "score": 346295.37187528954 }, { "content": "#[cfg(feature = \"async\")]\n\npub fn decommit_stack_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len, false)\n\n}\n", "file_path": "crates/runtime/src/instance/allocator/pooling/linux.rs", "rank": 61, "score": 346295.37187528954 }, { "content": "pub fn commit_memory_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n commit(addr, len)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/windows.rs", "rank": 62, "score": 346288.3219427755 }, { "content": "pub fn decommit_table_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len)\n\n}\n", "file_path": "crates/runtime/src/instance/allocator/pooling/windows.rs", "rank": 63, "score": 346288.32194277545 }, { "content": "pub fn commit_table_pages(_addr: *mut u8, _len: usize) -> Result<()> {\n\n // A no-op as table pages remain READ|WRITE\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/linux.rs", "rank": 64, "score": 346288.3219427755 }, { "content": "pub fn decommit_memory_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/uffd.rs", "rank": 65, "score": 346288.3219427755 }, { "content": "pub fn decommit_table_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/uffd.rs", "rank": 66, "score": 346288.3219427755 }, { "content": "pub fn decommit_memory_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/windows.rs", "rank": 67, "score": 346288.32194277545 }, { "content": "pub fn commit_memory_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n if len == 0 {\n\n return Ok(());\n\n }\n\n\n\n // Just change the protection level to READ|WRITE\n\n unsafe {\n\n region::protect(addr, len, region::Protection::READ_WRITE)\n\n .context(\"failed to make linear memory pages read/write\")\n\n }\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/linux.rs", "rank": 68, "score": 346288.32194277545 }, { "content": "pub fn decommit_table_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len, false)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/linux.rs", "rank": 69, "score": 346288.32194277545 }, { "content": "pub fn commit_memory_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n if len == 0 {\n\n return Ok(());\n\n }\n\n\n\n // Just change the protection level to READ|WRITE\n\n unsafe {\n\n region::protect(addr, len, region::Protection::READ_WRITE)\n\n .context(\"failed to make linear memory pages read/write\")\n\n }\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/unix.rs", "rank": 70, "score": 346288.3219427755 }, { "content": "pub fn decommit_memory_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len, true)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/unix.rs", "rank": 71, "score": 346288.3219427755 }, { "content": "pub fn commit_table_pages(_addr: *mut u8, _len: usize) -> Result<()> {\n\n // A no-op as table pages remain READ|WRITE\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/unix.rs", "rank": 72, "score": 346288.32194277545 }, { "content": "pub fn decommit_table_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len, false)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/unix.rs", "rank": 73, "score": 346288.32194277545 }, { "content": "pub fn commit_memory_pages(_addr: *mut u8, _len: usize) -> Result<()> {\n\n // A no-op as memory pages remain READ|WRITE with uffd\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/uffd.rs", "rank": 74, "score": 346288.3219427755 }, { "content": "pub fn commit_table_pages(_addr: *mut u8, _len: usize) -> Result<()> {\n\n // A no-op as table pages remain READ|WRITE\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/uffd.rs", "rank": 75, "score": 346288.3219427755 }, { "content": "pub fn commit_table_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n commit(addr, len)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/windows.rs", "rank": 76, "score": 346288.3219427755 }, { "content": "pub fn decommit_memory_pages(addr: *mut u8, len: usize) -> Result<()> {\n\n decommit(addr, len, true)\n\n}\n\n\n", "file_path": "crates/runtime/src/instance/allocator/pooling/linux.rs", "rank": 77, "score": 346288.3219427755 }, { "content": "/// Attempts to extract the corresponding function index from a symbol possibly produced by\n\n/// `func_symbol_name`.\n\npub fn try_parse_func_name(name: &str) -> Option<FuncIndex> {\n\n let n = name.strip_prefix(FUNCTION_PREFIX)?.parse().ok()?;\n\n Some(FuncIndex::new(n))\n\n}\n\n\n", "file_path": "crates/environ/src/obj.rs", "rank": 78, "score": 344644.414754934 }, { "content": "/// Attempts to extract the corresponding signature index from a symbol\n\n/// possibly produced by `trampoline_symbol_name`.\n\npub fn try_parse_trampoline_name(name: &str) -> Option<SignatureIndex> {\n\n let n = name.strip_prefix(TRAMPOLINE_PREFIX)?.parse().ok()?;\n\n Some(SignatureIndex::new(n))\n\n}\n", "file_path": "crates/environ/src/obj.rs", "rank": 79, "score": 344644.414754934 }, { "content": "fn read_module(path: &Path) -> Vec<u8> {\n\n match path.extension() {\n\n None => {\n\n panic!(\"the file extension is not wasm or wat\");\n\n }\n\n Some(ext) => match ext.to_str() {\n\n Some(\"wasm\") => std::fs::read(path).expect(\"error reading wasm file\"),\n\n Some(\"wat\") => wat::parse_file(path)\n\n .map_err(|e| e.to_string())\n\n .expect(\"failed to parse wat\"),\n\n None | Some(&_) => panic!(\"the file extension for {:?} is not wasm or wat\", path),\n\n },\n\n }\n\n}\n\n\n", "file_path": "cranelift/wasm/tests/wasm_testsuite.rs", "rank": 80, "score": 340204.1907649324 }, { "content": "#[test]\n\nfn call_linked_func() -> Result<(), Error> {\n\n let engine = Engine::default();\n\n let mut store = Store::new(&engine, State::default());\n\n store.call_hook(State::call_hook);\n\n let mut linker = Linker::new(&engine);\n\n\n\n linker.func_wrap(\n\n \"host\",\n\n \"f\",\n\n |caller: Caller<State>, a: i32, b: i64, c: f32, d: f64| {\n\n // Calling this func will switch context into wasm, then back to host:\n\n assert_eq!(caller.data().context, vec![Context::Wasm, Context::Host]);\n\n\n\n assert_eq!(\n\n caller.data().calls_into_host,\n\n caller.data().returns_from_host + 1\n\n );\n\n assert_eq!(\n\n caller.data().calls_into_wasm,\n\n caller.data().returns_from_wasm + 1\n", "file_path": "tests/all/call_hook.rs", "rank": 81, "score": 340024.07378568314 }, { "content": "/// Parses the Start section of the wasm module.\n\npub fn parse_start_section(index: u32, environ: &mut dyn ModuleEnvironment) -> WasmResult<()> {\n\n environ.declare_start_func(FuncIndex::from_u32(index))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "cranelift/wasm/src/sections_translator.rs", "rank": 82, "score": 336922.4561814802 }, { "content": "fn relocate_dwarf_sections(bytes: &mut [u8], code_region: (*const u8, usize)) -> Result<(), Error> {\n\n let mut relocations = Vec::new();\n\n let obj = File::parse(&bytes[..])?;\n\n for section in obj.sections() {\n\n let section_start = match section.file_range() {\n\n Some((start, _)) => start,\n\n None => continue,\n\n };\n\n for (off, r) in section.relocations() {\n\n if r.kind() != RelocationKind::Absolute\n\n || r.encoding() != RelocationEncoding::Generic\n\n || r.size() != 64\n\n {\n\n continue;\n\n }\n\n\n\n let sym = match r.target() {\n\n RelocationTarget::Symbol(index) => match obj.symbol_by_index(index) {\n\n Ok(sym) => sym,\n\n Err(_) => continue,\n", "file_path": "crates/jit/src/debug.rs", "rank": 83, "score": 336911.2022620014 }, { "content": "/// Return an `isa` builder configured for the current host\n\n/// machine, or `Err(())` if the host machine is not supported\n\n/// in the current configuration.\n\npub fn builder() -> Result<isa::Builder, &'static str> {\n\n builder_with_options(true)\n\n}\n\n\n", "file_path": "cranelift/native/src/lib.rs", "rank": 84, "score": 334240.5368546139 }, { "content": "/// Preserve instructions with used result values.\n\npub fn any_inst_results_used(inst: Inst, live: &[bool], dfg: &DataFlowGraph) -> bool {\n\n dfg.inst_results(inst).iter().any(|v| live[v.index()])\n\n}\n\n\n", "file_path": "cranelift/codegen/src/inst_predicates.rs", "rank": 85, "score": 334125.7225739978 }, { "content": "pub fn from_sysif_fdflags(f: system_interface::fs::FdFlags) -> wasi_common::file::FdFlags {\n\n let mut out = wasi_common::file::FdFlags::empty();\n\n if f.contains(system_interface::fs::FdFlags::APPEND) {\n\n out |= wasi_common::file::FdFlags::APPEND;\n\n }\n\n if f.contains(system_interface::fs::FdFlags::DSYNC) {\n\n out |= wasi_common::file::FdFlags::DSYNC;\n\n }\n\n if f.contains(system_interface::fs::FdFlags::NONBLOCK) {\n\n out |= wasi_common::file::FdFlags::NONBLOCK;\n\n }\n\n if f.contains(system_interface::fs::FdFlags::RSYNC) {\n\n out |= wasi_common::file::FdFlags::RSYNC;\n\n }\n\n if f.contains(system_interface::fs::FdFlags::SYNC) {\n\n out |= wasi_common::file::FdFlags::SYNC;\n\n }\n\n out\n\n}\n", "file_path": "crates/wasi-common/cap-std-sync/src/file.rs", "rank": 86, "score": 333628.36388733605 }, { "content": "pub fn to_sysif_fdflags(f: wasi_common::file::FdFlags) -> system_interface::fs::FdFlags {\n\n let mut out = system_interface::fs::FdFlags::empty();\n\n if f.contains(wasi_common::file::FdFlags::APPEND) {\n\n out |= system_interface::fs::FdFlags::APPEND;\n\n }\n\n if f.contains(wasi_common::file::FdFlags::DSYNC) {\n\n out |= system_interface::fs::FdFlags::DSYNC;\n\n }\n\n if f.contains(wasi_common::file::FdFlags::NONBLOCK) {\n\n out |= system_interface::fs::FdFlags::NONBLOCK;\n\n }\n\n if f.contains(wasi_common::file::FdFlags::RSYNC) {\n\n out |= system_interface::fs::FdFlags::RSYNC;\n\n }\n\n if f.contains(wasi_common::file::FdFlags::SYNC) {\n\n out |= system_interface::fs::FdFlags::SYNC;\n\n }\n\n out\n\n}\n", "file_path": "crates/wasi-common/cap-std-sync/src/file.rs", "rank": 87, "score": 333628.36388733605 }, { "content": "#[allow(dead_code)]\n\nfn check_wasm(wasm_path: &str, directives: &str) -> Result<()> {\n\n let wasm = read(wasm_path)?;\n\n let obj_file = NamedTempFile::new()?;\n\n let obj_path = obj_file.path().to_str().unwrap();\n\n compile_cranelift(&wasm, None, obj_path)?;\n\n let dump = get_dwarfdump(obj_path, DwarfDumpSection::DebugInfo)?;\n\n let mut builder = CheckerBuilder::new();\n\n builder\n\n .text(directives)\n\n .map_err(|e| format_err!(\"unable to build checker: {:?}\", e))?;\n\n let checker = builder.finish();\n\n let check = checker\n\n .explain(&dump, NO_VARIABLES)\n\n .map_err(|e| format_err!(\"{:?}\", e))?;\n\n assert!(check.0, \"didn't pass check {}\", check.1);\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/all/debug/translate.rs", "rank": 88, "score": 333392.8558675855 }, { "content": "/// Look for a directive in a comment string.\n\n/// The directive is of the form \"foo:\" and should follow the leading `;` in the comment:\n\n///\n\n/// ; dominates: block3 block4\n\n///\n\n/// Return the comment text following the directive.\n\npub fn match_directive<'a>(comment: &'a str, directive: &str) -> Option<&'a str> {\n\n assert!(\n\n directive.ends_with(':'),\n\n \"Directive must include trailing colon\"\n\n );\n\n let text = comment.trim_start_matches(';').trim_start();\n\n if text.starts_with(directive) {\n\n Some(text[directive.len()..].trim())\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "cranelift/filetests/src/match_directive.rs", "rank": 89, "score": 329983.2939347696 }, { "content": "fn serialize(engine: &Engine, wat: &str) -> Result<Vec<u8>> {\n\n let module = Module::new(&engine, wat)?;\n\n Ok(module.serialize()?)\n\n}\n\n\n\nunsafe fn deserialize_and_instantiate(store: &mut Store<()>, buffer: &[u8]) -> Result<Instance> {\n\n let module = Module::deserialize(store.engine(), buffer)?;\n\n Ok(Instance::new(store, &module, &[])?)\n\n}\n\n\n", "file_path": "tests/all/module_serialize.rs", "rank": 90, "score": 329572.59484146506 }, { "content": "/// Parse a float using the same format as `format_float` above.\n\n///\n\n/// The encoding parameters are:\n\n///\n\n/// w - exponent field width in bits\n\n/// t - trailing significand field width in bits\n\n///\n\nfn parse_float(s: &str, w: u8, t: u8) -> Result<u64, &'static str> {\n\n debug_assert!(w > 0 && w <= 16, \"Invalid exponent range\");\n\n debug_assert!(1 + w + t <= 64, \"Too large IEEE format for u64\");\n\n debug_assert!((t + w + 1).is_power_of_two(), \"Unexpected IEEE format size\");\n\n\n\n let (sign_bit, s2) = if s.starts_with('-') {\n\n (1u64 << (t + w), &s[1..])\n\n } else if s.starts_with('+') {\n\n (0, &s[1..])\n\n } else {\n\n (0, s)\n\n };\n\n\n\n if !s2.starts_with(\"0x\") {\n\n let max_e_bits = ((1u64 << w) - 1) << t;\n\n let quiet_bit = 1u64 << (t - 1);\n\n\n\n // The only decimal encoding allowed is 0.\n\n if s2 == \"0.0\" {\n\n return Ok(sign_bit);\n", "file_path": "cranelift/codegen/src/ir/immediates.rs", "rank": 91, "score": 329222.33245826943 }, { "content": "#[allow(dead_code)]\n\nfn check_line_program(wasm_path: &str, directives: &str) -> Result<()> {\n\n let wasm = read(wasm_path)?;\n\n let obj_file = NamedTempFile::new()?;\n\n let obj_path = obj_file.path().to_str().unwrap();\n\n compile_cranelift(&wasm, None, obj_path)?;\n\n let dump = get_dwarfdump(obj_path, DwarfDumpSection::DebugLine)?;\n\n let mut builder = CheckerBuilder::new();\n\n builder\n\n .text(directives)\n\n .map_err(|e| format_err!(\"unable to build checker: {:?}\", e))?;\n\n let checker = builder.finish();\n\n let check = checker\n\n .explain(&dump, NO_VARIABLES)\n\n .map_err(|e| format_err!(\"{:?}\", e))?;\n\n assert!(check.0, \"didn't pass check {}\", check.1);\n\n Ok(())\n\n}\n\n\n\n#[test]\n\n#[ignore]\n\n#[cfg(all(\n\n any(target_os = \"linux\", target_os = \"macos\"),\n\n target_pointer_width = \"64\"\n\n))]\n", "file_path": "tests/all/debug/translate.rs", "rank": 92, "score": 328856.6442525284 }, { "content": "/// This function is required to be called before any WebAssembly is entered.\n\n/// This will configure global state such as signal handlers to prepare the\n\n/// process to receive wasm traps.\n\n///\n\n/// This function must not only be called globally once before entering\n\n/// WebAssembly but it must also be called once-per-thread that enters\n\n/// WebAssembly. Currently in wasmtime's integration this function is called on\n\n/// creation of a `Engine`.\n\n///\n\n/// The `is_wasm_pc` argument is used when a trap happens to determine if a\n\n/// program counter is the pc of an actual wasm trap or not. This is then used\n\n/// to disambiguate faults that happen due to wasm and faults that happen due to\n\n/// bugs in Rust or elsewhere.\n\npub fn init_traps(is_wasm_pc: fn(usize) -> bool) {\n\n static INIT: Once = Once::new();\n\n INIT.call_once(|| unsafe {\n\n IS_WASM_PC = is_wasm_pc;\n\n sys::platform_init();\n\n });\n\n}\n\n\n\n/// Raises a user-defined trap immediately.\n\n///\n\n/// This function performs as-if a wasm trap was just executed, only the trap\n\n/// has a dynamic payload associated with it which is user-provided. This trap\n\n/// payload is then returned from `catch_traps` below.\n\n///\n\n/// # Safety\n\n///\n\n/// Only safe to call when wasm code is on the stack, aka `catch_traps` must\n\n/// have been previously called. Additionally no Rust destructors can be on the\n\n/// stack. They will be skipped and not executed.\n\npub unsafe fn raise_user_trap(data: Error) -> ! {\n", "file_path": "crates/runtime/src/traphandlers.rs", "rank": 93, "score": 325868.34911872965 }, { "content": "// note: config loading during validation creates cache directory to canonicalize its path,\n\n// that's why these function and macro always use custom cache directory\n\n// note: tempdir removes directory when being dropped, so we need to return it to the caller,\n\n// so the paths are valid\n\npub fn test_prolog() -> (TempDir, PathBuf, PathBuf) {\n\n let _ = pretty_env_logger::try_init();\n\n let temp_dir = tempfile::tempdir().expect(\"Can't create temporary directory\");\n\n let cache_dir = temp_dir.path().join(\"cache-dir\");\n\n let config_path = temp_dir.path().join(\"cache-config.toml\");\n\n (temp_dir, cache_dir, config_path)\n\n}\n\n\n\nmacro_rules! load_config {\n\n ($config_path:ident, $content_fmt:expr, $cache_dir:ident) => {{\n\n let config_path = &$config_path;\n\n let content = format!(\n\n $content_fmt,\n\n cache_dir = toml::to_string_pretty(&format!(\"{}\", $cache_dir.display())).unwrap()\n\n );\n\n fs::write(config_path, content).expect(\"Failed to write test config file\");\n\n CacheConfig::from_file(Some(config_path)).unwrap()\n\n }};\n\n}\n\n\n", "file_path": "crates/cache/src/config/tests.rs", "rank": 94, "score": 325431.80890964624 }, { "content": "fn execute_across_threads<F: Future + Send + 'static>(future: F) {\n\n let mut future = Pin::from(Box::new(future));\n\n let poll = future\n\n .as_mut()\n\n .poll(&mut Context::from_waker(&dummy_waker()));\n\n assert!(poll.is_pending());\n\n\n\n std::thread::spawn(move || {\n\n let poll = future\n\n .as_mut()\n\n .poll(&mut Context::from_waker(&dummy_waker()));\n\n assert!(!poll.is_pending());\n\n })\n\n .join()\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/all/async_functions.rs", "rank": 95, "score": 325259.34050188685 }, { "content": "fn is_dead_code<R: Reader>(entry: &DebuggingInformationEntry<R>) -> bool {\n\n const TOMBSTONE: u64 = u32::MAX as u64;\n\n\n\n match entry.attr_value(gimli::DW_AT_low_pc) {\n\n Ok(Some(AttributeValue::Addr(addr))) => addr == TOMBSTONE,\n\n _ => false,\n\n }\n\n}\n\n\n\npub(crate) fn clone_unit<'a, R>(\n\n dwarf: &gimli::Dwarf<R>,\n\n unit: Unit<R, R::Offset>,\n\n context: &DebugInputContext<R>,\n\n addr_tr: &'a AddressTransform,\n\n funcs: &'a CompiledFunctions,\n\n memory_offset: &ModuleMemoryOffset,\n\n out_encoding: gimli::Encoding,\n\n out_units: &mut write::UnitTable,\n\n out_strings: &mut write::StringTable,\n\n translated: &mut HashSet<DefinedFuncIndex>,\n", "file_path": "crates/cranelift/src/debug/transform/unit.rs", "rank": 96, "score": 324842.89027315954 }, { "content": "fn parse_opt_level(opt_level: &str) -> Result<wasmtime::OptLevel> {\n\n match opt_level {\n\n \"s\" => Ok(wasmtime::OptLevel::SpeedAndSize),\n\n \"0\" => Ok(wasmtime::OptLevel::None),\n\n \"1\" => Ok(wasmtime::OptLevel::Speed),\n\n \"2\" => Ok(wasmtime::OptLevel::Speed),\n\n other => bail!(\n\n \"unknown optimization level `{}`, only 0,1,2,s accepted\",\n\n other\n\n ),\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 97, "score": 323931.2979860619 }, { "content": "fn validate_symbol(name: &str) -> ModuleResult<()> {\n\n // null bytes are not allowed in symbol names and will cause the `object`\n\n // crate to panic. Let's return a clean error instead.\n\n if name.contains(\"\\0\") {\n\n return Err(ModuleError::Backend(anyhow::anyhow!(\n\n \"Symbol {:?} has a null byte, which is disallowed\",\n\n name\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl Module for ObjectModule {\n\n fn isa(&self) -> &dyn TargetIsa {\n\n &*self.isa\n\n }\n\n\n\n fn declarations(&self) -> &ModuleDeclarations {\n\n &self.declarations\n\n }\n", "file_path": "cranelift/object/src/backend.rs", "rank": 98, "score": 323203.52863945346 }, { "content": "fn parse_bool_value(value: &str) -> SetResult<bool> {\n\n match value {\n\n \"true\" | \"on\" | \"yes\" | \"1\" => Ok(true),\n\n \"false\" | \"off\" | \"no\" | \"0\" => Ok(false),\n\n _ => Err(SetError::BadValue(\"bool\".to_string())),\n\n }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/settings.rs", "rank": 99, "score": 322689.15693783225 } ]
Rust
src/format/stm/load.rs
cmatsuoka/oxdz
0b7371bf63967819315316a58629881b2169f570
use format::{ProbeInfo, Format, Loader}; use format::stm::{StmData, StmPatterns, StmInstrument}; use module::{Module, Sample}; use module::sample::SampleType; use util::BinaryRead; use ::*; pub struct StmLoader; impl Loader for StmLoader { fn name(&self) -> &'static str { "Scream Tracker 2" } fn probe(&self, b: &[u8], player_id: &str) -> Result<ProbeInfo, Error> { if b.len() < 1084 { return Err(Error::Format(format!("file too short ({})", b.len()))); } player::check_accepted(player_id, "stm")?; let magic = b.read_string(20, 10)?; if magic == "!Scream!\x1a\x02" || magic == "BMOD2STM\x1a\x02" || magic == "WUZAMOD!\x1a\x02" || magic == "SWavePro\x1a\x02" { Ok(ProbeInfo{format: Format::Stm, title: b.read_string(0, 20)?}) } else { Err(Error::Format(format!("bad magic {:?}", magic))) } } fn load(self: Box<Self>, b: &[u8], info: ProbeInfo) -> Result<Module, Error> { if info.format != Format::Stm { return Err(Error::Format("unsupported format".to_owned())); } let name = b.read_string(0, 20)?; let version_major = b.read8(30)?; let version_minor = b.read8(31)?; if version_major != 2 || version_minor < 21 { return Err(Error::Format(format!("unsupported version {}.{}", version_major, version_minor))); } let speed = b.read8(32)?; let num_patterns = b.read8(33)?; let global_vol = b.read8(34)?; let origin = b.read_string(20, 8)?; let mut instruments = Vec::<StmInstrument>::new(); let mut samples = Vec::<Sample>::new(); for i in 0..31 { let ins = load_instrument(b, i)?; instruments.push(ins); } let orders = b.slice(1040, 128)?; let patterns = StmPatterns::from_slice(num_patterns as usize, b.slice(1168, 1024*num_patterns as usize)?)?; let mut ofs = 1168 + 1024*num_patterns as usize; for i in 0..31 { let size = instruments[i].size as usize; let smp = load_sample(b.slice(ofs, size)?, ofs, i, &instruments[i]); samples.push(smp); ofs += size; } let mut data = StmData{ name, speed, num_patterns, global_vol, instruments, orders: [0; 128], patterns, samples, }; data.orders.copy_from_slice(orders); let m = Module { format_id : "stm", description: format!("Scream Tracker 2 STM"), creator : match origin.as_ref() { "!Scream!" => format!("Scream Tracker {}.{}", version_major, version_minor), "BMOD2STM" => "BMOD2STM".to_owned(), "WUZAMOD!" => "WUZAMOD".to_owned(), "SWavePro" => "SWavePro".to_owned(), _ => "unknown".to_owned(), }, channels : 4, player : "st2", data : Box::new(data), }; Ok(m) } } fn load_instrument(b: &[u8], i: usize) -> Result<StmInstrument, Error> { let mut ins = StmInstrument::new(); let ofs = 48 + i * 32; ins.name = b.read_string(ofs, 12)?; ins.size = b.read16l(ofs + 16)?; ins.loop_start = b.read16l(ofs + 18)?; ins.loop_end = b.read16l(ofs + 20)?; ins.volume = b.read8(ofs + 22)?; ins.c2spd = b.read16l(ofs + 24)?; Ok(ins) } fn load_sample(b: &[u8], ofs: usize, i: usize, ins: &StmInstrument) -> Sample { let mut smp = Sample::new(); smp.num = i + 1; smp.address = ofs as u32; smp.name = ins.name.to_owned(); smp.rate = ins.c2spd as f64 / 8448.0; smp.size = ins.size as u32; if smp.size > 0 { smp.sample_type = SampleType::Sample8; } smp.store(b); smp }
use format::{ProbeInfo, Format, Loader}; use format::stm::{StmData, StmPatterns, StmInstrument}; use module::{Module, Sample}; use module::sample::SampleType; use util::BinaryRead; use ::*; pub struct StmLoader; impl Loader for StmLoader { fn name(&self) -> &'static str { "Scream Tracker 2" } fn probe(&self, b: &[u8], player_id: &str) -> Result<ProbeInfo, Error> { if b.len() < 1084 { return Err(Error::Format(format!("file too short ({})", b.len()))); } player::check_accepted(player_id, "stm")?; let magic = b.read_string(20, 10)?; if magic == "!Scream!\x1a\x02" || magic == "BMOD2STM\x1a\x02" || magic == "WUZAMOD!\x1a\x02" || magic == "SWavePro\x1a\x02" { Ok(ProbeInfo{format: Format::Stm, title: b.read_string(0, 20)?}) } else { Err(Error::Format(format!("bad magic {:?}", magic))) } } fn load(self: Box<Self>, b: &[u8], info: ProbeInfo) -> Result<Module, Error> { if info.format != Format::Stm { return Err(Error::Format("unsupported format".to_owned())); } let name = b.read_string(0, 20)?; let version_major = b.read8(30)?;
mPatterns::from_slice(num_patterns as usize, b.slice(1168, 1024*num_patterns as usize)?)?; let mut ofs = 1168 + 1024*num_patterns as usize; for i in 0..31 { let size = instruments[i].size as usize; let smp = load_sample(b.slice(ofs, size)?, ofs, i, &instruments[i]); samples.push(smp); ofs += size; } let mut data = StmData{ name, speed, num_patterns, global_vol, instruments, orders: [0; 128], patterns, samples, }; data.orders.copy_from_slice(orders); let m = Module { format_id : "stm", description: format!("Scream Tracker 2 STM"), creator : match origin.as_ref() { "!Scream!" => format!("Scream Tracker {}.{}", version_major, version_minor), "BMOD2STM" => "BMOD2STM".to_owned(), "WUZAMOD!" => "WUZAMOD".to_owned(), "SWavePro" => "SWavePro".to_owned(), _ => "unknown".to_owned(), }, channels : 4, player : "st2", data : Box::new(data), }; Ok(m) } } fn load_instrument(b: &[u8], i: usize) -> Result<StmInstrument, Error> { let mut ins = StmInstrument::new(); let ofs = 48 + i * 32; ins.name = b.read_string(ofs, 12)?; ins.size = b.read16l(ofs + 16)?; ins.loop_start = b.read16l(ofs + 18)?; ins.loop_end = b.read16l(ofs + 20)?; ins.volume = b.read8(ofs + 22)?; ins.c2spd = b.read16l(ofs + 24)?; Ok(ins) } fn load_sample(b: &[u8], ofs: usize, i: usize, ins: &StmInstrument) -> Sample { let mut smp = Sample::new(); smp.num = i + 1; smp.address = ofs as u32; smp.name = ins.name.to_owned(); smp.rate = ins.c2spd as f64 / 8448.0; smp.size = ins.size as u32; if smp.size > 0 { smp.sample_type = SampleType::Sample8; } smp.store(b); smp }
let version_minor = b.read8(31)?; if version_major != 2 || version_minor < 21 { return Err(Error::Format(format!("unsupported version {}.{}", version_major, version_minor))); } let speed = b.read8(32)?; let num_patterns = b.read8(33)?; let global_vol = b.read8(34)?; let origin = b.read_string(20, 8)?; let mut instruments = Vec::<StmInstrument>::new(); let mut samples = Vec::<Sample>::new(); for i in 0..31 { let ins = load_instrument(b, i)?; instruments.push(ins); } let orders = b.slice(1040, 128)?; let patterns = St
function_block-random_span
[ { "content": "pub fn load(b: &[u8], player_id: &str) -> Result<Module, Error> {\n\n\n\n for f in loader_list() {\n\n debug!(\"Probing format: {}\", f.name());\n\n\n\n let info = match f.probe(b, player_id) {\n\n Ok(val) => val,\n\n Err(_) => continue,\n\n };\n\n\n\n debug!(\"Probe ok, load format {:?}\", info.format);\n\n return f.load(b, info)\n\n }\n\n\n\n Err(Error::Format(\"unsupported module format\".to_owned()))\n\n}\n", "file_path": "src/format/mod.rs", "rank": 0, "score": 299664.43347864883 }, { "content": "pub fn format(b: &[u8]) -> String {\n\n let note = if b[0] & HAS_NOTE != 0 {\n\n format!(\"{}{}\", NOTES[b[1] as usize % 12], b[1] / 12)\n\n } else {\n\n \"---\".to_owned()\n\n };\n\n\n\n let ins = if b[0] & HAS_INS != 0 {\n\n format!(\"{:02x}\", b[2])\n\n } else {\n\n \"--\".to_owned()\n\n };\n\n\n\n let vol = if b[0] & HAS_VOL != 0 {\n\n format!(\"{:02x}\", b[3])\n\n } else {\n\n \"--\".to_owned()\n\n };\n\n\n\n format!(\"{} {} {} {:02X}{:02X}\", note, ins, vol, b[4], b[5])\n\n}\n\n\n", "file_path": "src/module/event.rs", "rank": 2, "score": 248519.3487274567 }, { "content": "pub fn check_accepted(player_id: &str, my_fmt: &str) -> Result<bool, Error> {\n\n let accepted = if player_id.is_empty() {\n\n &[] // accept all\n\n } else {\n\n accepted(player_id)\n\n };\n\n\n\n if accepted.is_empty() {\n\n return Ok(false)\n\n } else {\n\n if !accepted.contains(&my_fmt) {\n\n return Err(Error::Format(format!(\"format {:?} not accepted by player {:?}\", my_fmt, player_id)))\n\n }\n\n }\n\n\n\n Ok(true)\n\n}\n\n\n\n\n\n\n\n// For the player list\n\n\n\npub struct PlayerInfo {\n\n pub id : &'static str,\n\n pub name : &'static str,\n\n pub description: &'static str,\n\n pub author : &'static str,\n\n pub accepts : &'static [&'static str],\n\n}\n\n\n", "file_path": "src/player/mod.rs", "rank": 4, "score": 218682.24877863762 }, { "content": "fn accepted(player_id: &str) -> &'static [&'static str] {\n\n let list_entry = match list_by_id(player_id) {\n\n Ok(val) => val,\n\n Err(_) => return &[],\n\n };\n\n\n\n list_entry.info().accepts\n\n}\n\n\n", "file_path": "src/player/mod.rs", "rank": 5, "score": 217437.0839595645 }, { "content": "fn channels_from_magic(magic: &str) -> usize {\n\n if magic == \"FLT8\" {\n\n 8\n\n } else {\n\n let m: Vec<char> = magic.chars().collect();\n\n if m[0].is_digit(10) && m[1].is_digit(10) && &magic[2..] == \"CH\" {\n\n ((m[0] as u8 - '0' as u8) * 10 + m[1] as u8 - '0' as u8) as usize\n\n } else if m[0].is_digit(10) && &magic[1..] == \"CHN\" {\n\n (m[0] as u8 - '0' as u8) as usize\n\n } else {\n\n 4\n\n }\n\n }\n\n}\n", "file_path": "src/format/mk/load.rs", "rank": 6, "score": 217029.57134392875 }, { "content": "fn load_sample(b: &[u8], i: usize, ins: &S3mInstrument, cvt: bool) -> Result<Sample, Error> {\n\n let mut smp = Sample::new();\n\n\n\n smp.num = i + 1;\n\n smp.address = (ins.memseg as u32) << 4;\n\n smp.name = ins.name.to_owned();\n\n smp.size = ins.length;\n\n\n\n if smp.size > 0 {\n\n smp.sample_type = if ins.flags & 0x04 != 0 { SampleType::Sample16 } else { SampleType::Sample8 };\n\n }\n\n\n\n let sample_size = if ins.flags & 0x04 != 0 { smp.size*2 } else { smp.size };\n\n smp.store(b.slice((ins.memseg as usize) << 4, sample_size as usize)?);\n\n if cvt {\n\n smp.to_signed();\n\n }\n\n\n\n Ok(smp)\n\n}\n", "file_path": "src/format/s3m/load.rs", "rank": 7, "score": 208329.54737754626 }, { "content": "pub fn list_by_id(player_id: &str) -> Result<Box<PlayerListEntry>, Error> {\n\n for p in all() {\n\n if player_id == p.info().id {\n\n return Ok(p)\n\n }\n\n }\n\n Err(Error::Format(format!(\"player {:?} not found\", player_id)))\n\n}\n\n\n", "file_path": "src/player/mod.rs", "rank": 8, "score": 201029.45534537535 }, { "content": "fn load_sample(samp: &SampleHeaderTyp, smp_num: usize, b: &[u8], offset: &mut usize) -> Result<Sample, Error> {\n\n let mut smp = Sample::new();\n\n smp.num = smp_num;\n\n smp.name = samp.name.to_owned();\n\n smp.size = samp.len as u32;\n\n let byte_size = samp.len as usize;\n\n smp.sample_type = if samp.typ & 16 != 0 {\n\n let buf = diff_decode_16l(b.slice(*offset, byte_size)?);\n\n smp.store(&buf[..].as_slice_u8());\n\n SampleType::Sample16\n\n } else {\n\n let buf = diff_decode_8(b.slice(*offset, byte_size)?);\n\n smp.store(&buf[..]);\n\n SampleType::Sample8\n\n };\n\n *offset += byte_size;\n\n\n\n Ok(smp)\n\n}\n\n\n", "file_path": "src/format/xm/load.rs", "rank": 9, "score": 199579.23865207226 }, { "content": "/// Retrieve the list of supported module formats.\n\npub fn format_list() -> Vec<FormatInfo> {\n\n format::list()\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 10, "score": 197883.3305369653 }, { "content": "pub fn list() -> Vec<FormatInfo> {\n\n loader_list().iter().map(|x| FormatInfo{name: x.name()}).collect()\n\n}\n\n\n", "file_path": "src/format/mod.rs", "rank": 11, "score": 197878.68931190105 }, { "content": "pub fn period_to_note(period: u16) -> u8 {\n\n if period == 0 {\n\n return 0\n\n }\n\n\n\n (12.0_f64 * (PERIOD_BASE / period as f64).log(2.0)).round() as u8\n\n}\n\n\n", "file_path": "src/format/mk/mod.rs", "rank": 12, "score": 195933.9226933989 }, { "content": "fn load_instrument(b: &[u8], i: usize) -> Result<ModInstrument, Error> {\n\n let mut ins = ModInstrument::new();\n\n\n\n let ofs = 20 + i * 30;\n\n ins.name = b.read_string(ofs, 22)?;\n\n ins.size = b.read16b(ofs + 22)?;\n\n ins.finetune = b.read8(ofs + 24)?;\n\n ins.volume = b.read8(ofs + 25)?;\n\n ins.repeat = b.read16b(ofs + 26)?;\n\n ins.replen = b.read16b(ofs + 28)?;\n\n\n\n Ok(ins)\n\n}\n\n\n", "file_path": "src/format/st/load.rs", "rank": 13, "score": 192946.92362900343 }, { "content": "fn load_instrument(b: &[u8], i: usize) -> Result<ModInstrument, Error> {\n\n let mut ins = ModInstrument::new();\n\n\n\n let ofs = 20 + i * 30;\n\n ins.name = b.read_string(ofs, 22)?;\n\n ins.size = b.read16b(ofs + 22)?;\n\n ins.finetune = b.read8(ofs + 24)?;\n\n ins.volume = b.read8(ofs + 25)?;\n\n ins.repeat = b.read16b(ofs + 26)?;\n\n ins.replen = b.read16b(ofs + 28)?;\n\n\n\n Ok(ins)\n\n}\n\n\n", "file_path": "src/format/mk/load.rs", "rank": 14, "score": 192946.92362900343 }, { "content": "fn load_instrument(b: &[u8], i: usize) -> Result<ModInstrument, Error> {\n\n let mut ins = ModInstrument::new();\n\n\n\n let ofs = 20 + i * 30;\n\n ins.name = b.read_string(ofs, 22)?;\n\n ins.size = b.read16b(ofs + 22)?;\n\n ins.finetune = b.read8(ofs + 24)?;\n\n ins.volume = b.read8(ofs + 25)?;\n\n ins.repeat = b.read16b(ofs + 26)?;\n\n ins.replen = b.read16b(ofs + 28)?;\n\n\n\n Ok(ins)\n\n}\n\n\n", "file_path": "src/format/fest/load.rs", "rank": 15, "score": 192946.92362900343 }, { "content": "fn load_instrument(b: &[u8], ofs: usize) -> Result<S3mInstrument, Error> {\n\n let mut ins = S3mInstrument::new();\n\n\n\n ins.typ = b.read8(ofs)?;\n\n ins.memseg = (b.read16l(ofs + 0x0e)? as u32) | ((b.read8(ofs + 0x0d)? as u32) << 16);\n\n ins.length = b.read16l_lo_hi(ofs + 0x10)?;\n\n ins.loop_beg = b.read16l_lo_hi(ofs + 0x14)?;\n\n ins.loop_end = b.read16l_lo_hi(ofs + 0x18)?;\n\n ins.vol = b.read8i(ofs + 0x1c)?;\n\n ins.flags = b.read8i(ofs + 0x1f)?;\n\n ins.c2spd = b.read16l_lo_hi(ofs + 0x20)?;\n\n ins.name = b.read_string(ofs + 0x30, 28)?;\n\n\n\n Ok(ins)\n\n}\n\n\n", "file_path": "src/format/s3m/load.rs", "rank": 16, "score": 189703.0400036771 }, { "content": "fn convert_cmd(cmd: u8, info: u8) -> (u8, u8) {\n\n let new_cmd: u8;\n\n let mut new_info = info;\n\n\n\n let x = match cmd {\n\n 0 => { // Normal play or Arpeggio\n\n if info != 0{\n\n 'J'\n\n } else {\n\n '@'\n\n }\n\n },\n\n 1 => { // Slide Up\n\n 'F'\n\n },\n\n 2 => { // Slide Down\n\n 'E'\n\n },\n\n 3 => { // Tone Portamento\n\n 'G'\n", "file_path": "src/player/st3/import.rs", "rank": 17, "score": 186586.52793438704 }, { "content": "fn diff_decode_8(b: &[u8]) -> Vec<u8> {\n\n let mut buf: Vec<u8> = vec![0; b.len()];\n\n let mut old = 0_u8;\n\n for i in 0..b.len() {\n\n let new = b[i].wrapping_add(old);\n\n buf[i] = new;\n\n old = new;\n\n }\n\n buf\n\n}\n\n\n", "file_path": "src/format/xm/load.rs", "rank": 18, "score": 184163.66998370265 }, { "content": "struct Magic {\n\n magic: &'static str,\n\n flag : bool,\n\n id : TrackerID,\n\n ch : u8,\n\n}\n\n\n\nlazy_static! {\n\n static ref MAGIC: Box<[Magic; 13]> = Box::new([\n\n Magic{magic:\"M.K.\", flag:false, id:TrackerID::Protracker, ch:4},\n\n Magic{magic:\"M!K!\", flag:true, id:TrackerID::Protracker, ch:4},\n\n Magic{magic:\"M&K!\", flag:true, id:TrackerID::Noisetracker, ch:4},\n\n Magic{magic:\"N.T.\", flag:true, id:TrackerID::Noisetracker, ch:4},\n\n Magic{magic:\"6CHN\", flag:false, id:TrackerID::FastTracker, ch:6},\n\n Magic{magic:\"8CHN\", flag:false, id:TrackerID::FastTracker, ch:8},\n\n Magic{magic:\"CD61\", flag:true, id:TrackerID::Octalyser, ch:6}, // Atari STe/Falcon\n\n Magic{magic:\"CD81\", flag:true, id:TrackerID::Octalyser, ch:8}, // Atari STe/Falcon\n\n Magic{magic:\"TDZ4\", flag:true, id:TrackerID::TakeTracker, ch:4}, // see XModule SaveTracker.c\n\n Magic{magic:\"FA04\", flag:true, id:TrackerID::DigitalTracker, ch:4}, // Atari Falcon\n\n Magic{magic:\"FA06\", flag:true, id:TrackerID::DigitalTracker, ch:6}, // Atari Falcon\n", "file_path": "src/format/mk/fingerprint.rs", "rank": 19, "score": 179823.2341009879 }, { "content": "fn test_name(b: &[u8], ofs: usize, size: usize) -> bool {\n\n for x in b[ofs..ofs+size].iter() {\n\n if *x > 0x7f { return false }\n\n if *x > 0 && *x < 32 { return false }\n\n }\n\n true\n\n}\n\n\n", "file_path": "src/format/st/load.rs", "rank": 20, "score": 177052.797250083 }, { "content": "fn load_sample(b: &[u8], ofs: usize, i: usize, ins: &ModInstrument) -> Sample {\n\n let mut smp = Sample::new();\n\n\n\n smp.num = i + 1;\n\n smp.name = ins.name.to_owned();\n\n smp.address = ofs as u32;\n\n smp.size = ins.size as u32 * 2;\n\n if smp.size > 0 {\n\n smp.sample_type = SampleType::Sample8;\n\n }\n\n smp.store(b);\n\n\n\n smp\n\n}\n\n\n", "file_path": "src/format/mk/load.rs", "rank": 21, "score": 176208.12820452225 }, { "content": "fn load_sample(b: &[u8], ofs: usize, i: usize, ins: &ModInstrument) -> Sample {\n\n let mut smp = Sample::new();\n\n\n\n smp.num = i + 1;\n\n smp.name = ins.name.to_owned();\n\n smp.address = ofs as u32;\n\n smp.size = ins.size as u32 * 2;\n\n if smp.size > 0 {\n\n smp.sample_type = SampleType::Sample8;\n\n }\n\n smp.store(b);\n\n\n\n smp\n\n}\n\n\n", "file_path": "src/format/fest/load.rs", "rank": 22, "score": 176208.12820452225 }, { "content": "fn load_sample(b: &[u8], ofs: usize, i: usize, ins: &ModInstrument) -> Sample {\n\n let mut smp = Sample::new();\n\n\n\n smp.num = i + 1;\n\n smp.name = ins.name.to_owned();\n\n smp.address = ofs as u32;\n\n smp.size = ins.size as u32 * 2;\n\n if smp.size > 0 {\n\n smp.sample_type = SampleType::Sample8;\n\n }\n\n smp.store(b);\n\n\n\n smp\n\n}\n\n\n\nlazy_static! {\n\n static ref NOTE_TABLE: Box<[u16; 37]> = Box::new([\n\n 856, 808, 762, 720, 678, 640, 604, 570,\n\n 538, 508, 480, 453, 428, 404, 381, 360,\n\n 339, 320, 302, 285, 269, 254, 240, 226,\n\n 214, 202, 190, 180, 170, 160, 151, 143,\n\n 135, 127, 120, 113, 000\n\n ]);\n\n}\n", "file_path": "src/format/st/load.rs", "rank": 23, "score": 176208.12820452225 }, { "content": "fn check_buffer_size(b: &[u8], end: usize) -> Result<(), Error> {\n\n if end > b.len() {\n\n return Err(Error::Load(format!(\"short read (want {} bytes, have {})\", end, b.len())))\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 24, "score": 171032.1260557053 }, { "content": "fn load_instruments(header: &SongHeaderTyp, b: &[u8], mut offset: &mut usize) -> Result<(Vec<InstrHeaderTyp>, Vec<Sample>), Error> {\n\n let mut instruments: Vec<InstrHeaderTyp> = Vec::with_capacity(header.ant_instrs as usize);\n\n let mut samples: Vec<Sample> = Vec::new();\n\n let mut smp_num = 1;\n\n\n\n for _i in 0..header.ant_instrs as usize {\n\n let ins = InstrHeaderTyp::from_slice(smp_num, b.slice(*offset, b.len() - *offset)?)?;\n\n let ant_samp = ins.ant_samp;\n\n\n\n *offset += ins.instr_size as usize + 40 * ant_samp as usize;\n\n if ant_samp > 0 {\n\n\n\n for j in 0..ant_samp as usize {\n\n if header.ver >= 0x0104 {\n\n let smp = load_sample(&ins.samp[j], smp_num, b, &mut offset)?;\n\n samples.push(smp);\n\n }\n\n smp_num += 1;\n\n }\n\n }\n\n\n\n instruments.push(ins);\n\n }\n\n\n\n Ok((instruments, samples))\n\n}\n\n\n", "file_path": "src/format/xm/load.rs", "rank": 25, "score": 169511.20846757927 }, { "content": "fn load_mupp(b: &[u8], ofs: usize, i: usize, pat_num: usize) -> Sample {\n\n let mut smp = Sample::new();\n\n\n\n smp.num = i + 1;\n\n smp.name = format!(\"Mupp @{}\", pat_num);\n\n smp.address = ofs as u32;\n\n smp.size = 28*32;\n\n smp.sample_type = SampleType::Sample8;\n\n smp.store(b);\n\n\n\n smp\n\n}\n\n\n", "file_path": "src/format/fest/load.rs", "rank": 26, "score": 164964.35458581493 }, { "content": "fn diff_decode_16l(b: &[u8]) -> Vec<u16> {\n\n let mut buf: Vec<u16> = vec![0; b.len()];\n\n let mut old = 0_u16;\n\n for i in 0..b.len() / 2 {\n\n let val = ((b[i*2+1] as u16) << 8) + b[i*2] as u16;\n\n let new = val.wrapping_add(old);\n\n buf[i] = new;\n\n old = new;\n\n }\n\n buf\n\n}\n", "file_path": "src/format/xm/load.rs", "rank": 27, "score": 164409.13845879489 }, { "content": "pub trait Loader: Sync {\n\n fn name(&self) -> &'static str;\n\n fn probe(&self, &[u8], &str) -> Result<ProbeInfo, Error>;\n\n fn load(self: Box<Self>, &[u8], ProbeInfo) -> Result<Module, Error>;\n\n}\n\n\n", "file_path": "src/format/mod.rs", "rank": 28, "score": 163618.04702596134 }, { "content": "fn loader_list() -> Vec<Box<Loader>> {\n\n vec![\n\n Box::new(xm::XmLoader),\n\n Box::new(s3m::S3mLoader),\n\n Box::new(stm::StmLoader),\n\n Box::new(mk::ModLoader),\n\n Box::new(st::StLoader),\n\n Box::new(fest::FestLoader),\n\n ]\n\n}\n\n\n", "file_path": "src/format/mod.rs", "rank": 29, "score": 162238.1559857783 }, { "content": "fn load_patterns(header: &SongHeaderTyp, b: &[u8], offset: &mut usize) -> Result<Vec<PatternHeaderTyp>, Error> {\n\n let mut patterns = Vec::with_capacity(header.ant_ptn as usize);\n\n for i in 0..header.ant_ptn as usize - 1 {\n\n let ptn = PatternHeaderTyp::from_slice(b.slice(*offset, b.len() - *offset)?, header.ver, header.ant_chn as usize)?;\n\n debug!(\"pattern {}: {} rows\", i, ptn.patt_len);\n\n *offset += ptn.pattern_header_size as usize + ptn.data_len as usize;\n\n patterns.push(ptn);\n\n }\n\n // alloc one extra pattern\n\n patterns.push(PatternHeaderTyp::new_empty(header.ant_chn as usize));\n\n\n\n Ok(patterns)\n\n}\n\n\n", "file_path": "src/format/xm/load.rs", "rank": 30, "score": 157377.56197817554 }, { "content": "fn parse_num(s: &str) -> Result<usize, std::num::ParseIntError> {\n\n if s.starts_with(\"0x\") {\n\n usize::from_str_radix(&s[2..], 16)\n\n } else {\n\n s.parse()\n\n }\n\n}\n", "file_path": "examples/save-sample/src/main.rs", "rank": 31, "score": 152781.71707090328 }, { "content": "pub fn list() -> Vec<PlayerInfo> {\n\n all().iter().map(|p| p.info()).collect()\n\n}\n\n\n", "file_path": "src/player/mod.rs", "rank": 32, "score": 152114.18066118474 }, { "content": "/// Retrieve the list of available players.\n\npub fn player_list() -> Vec<PlayerInfo> {\n\n player::list()\n\n}\n\n\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n Format(String),\n\n Player(String),\n\n Load(String),\n\n Io(io::Error),\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n &Error::Format(ref descr) => write!(f, \"{}\", descr),\n\n &Error::Player(ref descr) => write!(f, \"{}\", descr),\n\n &Error::Load(ref descr) => write!(f, \"{}\", descr),\n\n &Error::Io(ref err) => write!(f, \"{}\", err),\n", "file_path": "src/lib.rs", "rank": 33, "score": 152114.18066118474 }, { "content": "pub fn from_mod(module: Module) -> Result<Module, Error> {\n\n\n\n let data = module.data.as_any().downcast_ref::<ModData>().unwrap();\n\n\n\n let mut ins_num = 0;\n\n for i in 0..31 {\n\n if data.instruments[i].size > 0 {\n\n ins_num = i + 1\n\n }\n\n }\n\n\n\n let mut instruments = Vec::<S3mInstrument>::new();\n\n for i in 0..ins_num {\n\n instruments.push(S3mInstrument{\n\n typ : 1,\n\n memseg : 0,\n\n length : data.instruments[i].size as u32 * 2,\n\n loop_beg: data.instruments[i].repeat as u32 * 2,\n\n loop_end: (data.instruments[i].repeat as u32 + data.instruments[i].replen as u32) * 2,\n\n vol : data.instruments[i].volume as i8,\n", "file_path": "src/player/st3/import.rs", "rank": 34, "score": 151210.25459558953 }, { "content": "pub fn get_mod_pattern(data: &Vec<ModEvent>, pat: usize, chn: usize, num: usize, buffer: &mut [u8]) -> usize {\n\n let mut i = 0;\n\n for _ in 0..num {\n\n let (row, ch) = (i / chn, i % chn);\n\n let ofs = i * 6;\n\n let e = &data[pat*64*chn + row*chn + ch];\n\n\n\n let mut flags = 0;\n\n let note = e.note & 0xfff;\n\n let ins = (((e.note & 0xf000) >> 8) | ((e.cmd as u16 & 0xf0) >> 4)) as u8;\n\n\n\n if note != 0 { flags |= event::HAS_NOTE; buffer[ofs+1] = period_to_note(note) }\n\n if ins != 0 { flags |= event::HAS_INS ; buffer[ofs+2] = ins }\n\n if e.cmd != 0 || e.cmdlo != 0 { flags |= event::HAS_CMD; buffer[ofs+4] = e.cmd; buffer[ofs+5] = e.cmdlo }\n\n buffer[ofs] = flags;\n\n\n\n i += 1;\n\n }\n\n i\n\n}\n", "file_path": "src/format/mk/mod.rs", "rank": 35, "score": 135206.59640274834 }, { "content": "fn run(args: Vec<String>) -> Result<(), Box<Error>> {\n\n\n\n let filename = &args[1];\n\n let num = parse_num(&args[2])?;\n\n\n\n let file = File::open(filename)?;\n\n let mmap = unsafe { Mmap::map(&file).expect(\"failed to map the file\") };\n\n\n\n let mut module = oxdz::format::load(&mmap[..], \"\")?;\n\n let samples = module.data.samples();\n\n\n\n if num >= samples.len() {\n\n return Err(Box::new(MyError::InvalidSample(num)));\n\n }\n\n\n\n let out_filename = format!(\"sample_{}.raw\", num);\n\n let file = File::create(out_filename)?;\n\n let mut writer = BufWriter::new(file);\n\n writer.write_all(samples[num].data_u8())?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/save-sample/src/main.rs", "rank": 36, "score": 131510.03498233316 }, { "content": "fn period_to_note(period: u16, fine: u8) -> u8 {\n\n let ofs = fine as usize * 36;\n\n for i in 0..36 {\n\n if period >= FT_PERIOD_TABLE[ofs + i] {\n\n return i as u8\n\n } \n\n }\n\n 35\n\n}\n\n\n\nimpl FtPlayer {\n\n pub fn new(module: &Module, options: Options) -> Self {\n\n\n\n FtPlayer {\n\n options,\n\n\n\n ft_speed : 6,\n\n ft_counter : 0,\n\n ft_song_pos : 0,\n\n ft_pbreak_pos : 0,\n", "file_path": "src/player/fasttracker/player.rs", "rank": 37, "score": 122966.48188081439 }, { "content": "fn note_to_period(note: u8, fine: u8) -> u16 {\n\n FT_PERIOD_TABLE[fine as usize*36 + note as usize]\n\n}\n\n\n", "file_path": "src/player/fasttracker/player.rs", "rank": 38, "score": 122966.48188081439 }, { "content": "fn parse_num(s: &str) -> Result<usize, std::num::ParseIntError> {\n\n if s.starts_with(\"0x\") {\n\n usize::from_str_radix(&s[2..], 16)\n\n } else {\n\n s.parse()\n\n }\n\n}\n\n\n", "file_path": "examples/show-pattern/src/main.rs", "rank": 39, "score": 122462.33419168668 }, { "content": "fn parse_num(s: &str) -> Result<usize, std::num::ParseIntError> {\n\n if s.starts_with(\"0x\") {\n\n usize::from_str_radix(&s[2..], 16)\n\n } else {\n\n s.parse()\n\n }\n\n}\n", "file_path": "examples/wav-writer/src/main.rs", "rank": 40, "score": 122462.33419168668 }, { "content": "#[derive(Default)]\n\nstruct SampleTyp {\n\n len : i32,\n\n rep_s : i32,\n\n rep_l : i32,\n\n vol : u8,\n\n fine : i8,\n\n typ : u8,\n\n pan : u8,\n\n rel_ton : i8,\n\n skrap : u8,\n\n name : String,\n\n //pek : i8,\n\n //gus_base : i32,\n\n //gus_len : i32,\n\n fixed : u8,\n\n fix_spar : u16,\n\n //res1 : u8,\n\n fixed_pos: i32,\n\n}\n\n\n", "file_path": "src/player/ft2/ft2play.rs", "rank": 41, "score": 107641.06748479235 }, { "content": "#[derive(Default)]\n\nstruct StmTyp {\n\n out_vol : i8,\n\n real_vol : i8,\n\n rel_ton_nr : i8,\n\n fine_tune : i8,\n\n old_vol : i8, //u8,\n\n old_pan : u8,\n\n out_pan : u8,\n\n final_pan : u8,\n\n env_sustain_active : bool,\n\n eff_typ : u8,\n\n eff : u8,\n\n smp_offset : u8,\n\n wave_ctrl : u8,\n\n status : u8,\n\n porta_dir : u8,\n\n gliss_funk : u8,\n\n vib_pos : u8,\n\n trem_pos : u8,\n\n vib_speed : u8,\n", "file_path": "src/player/ft2/ft2play.rs", "rank": 42, "score": 107641.06748479235 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() != 3 {\n\n println!(\"usage: {} <filename> <num>\", Path::new(&args[0]).file_name().unwrap().to_str().unwrap());\n\n return;\n\n }\n\n\n\n match run(args) {\n\n Ok(_) => {},\n\n Err(e) => eprintln!(\"error: {}\", e),\n\n }\n\n}\n\n\n\n\n", "file_path": "examples/save-sample/src/main.rs", "rank": 43, "score": 106282.32652985246 }, { "content": "#[proc_macro_derive(SaveRestore)]\n\npub fn save_restore(input: TokenStream) -> TokenStream {\n\n // Construct a string representation of the type definition\n\n let s = input.to_string();\n\n \n\n // Parse the string representation\n\n let ast = syn::parse_derive_input(&s).unwrap();\n\n\n\n // Build the impl\n\n let gen = impl_save_restore(&ast);\n\n \n\n // Return the generated impl\n\n gen.parse().unwrap()\n\n}\n\n\n", "file_path": "save-restore-derive/src/lib.rs", "rank": 44, "score": 102884.68039176772 }, { "content": "fn run(args: Vec<String>) -> Result<(), Box<Error>> {\n\n\n\n let filename = &args[1];\n\n let num = parse_num(&args[2])?;\n\n\n\n let file = File::open(filename)?;\n\n let mmap = unsafe { Mmap::map(&file).expect(\"failed to map the file\") };\n\n\n\n let module = oxdz::format::load(&mmap[..], \"\")?;\n\n show_pattern(&module, num);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/show-pattern/src/main.rs", "rank": 45, "score": 98853.39774840177 }, { "content": "fn run(args: Vec<String>) -> Result<(), Box<Error>> {\n\n\n\n let filename = &args[1];\n\n let replay_time = parse_num(&args[2])? as f32 * 1000.0;\n\n\n\n let file = File::open(filename)?;\n\n let mmap = unsafe { Mmap::map(&file).expect(\"failed to map the file\") };\n\n\n\n let mut oxdz = oxdz::Oxdz::new(&mmap[..], 44100, \"\")?;\n\n\n\n // Display basic module information\n\n let mut mi = oxdz::ModuleInfo::new();\n\n oxdz.module_info(&mut mi);\n\n println!(\"Title : {}\", mi.title);\n\n println!(\"Format: {}\", mi.description);\n\n\n\n let mut fi = oxdz::FrameInfo::new();\n\n\n\n // Prepare to write a wav file\n\n let out_filename = \"out.wav\";\n", "file_path": "examples/wav-writer/src/main.rs", "rank": 46, "score": 98853.39774840177 }, { "content": "fn run(args: Vec<String>) -> Result<(), Box<Error>> {\n\n\n\n let filename = &args[1];\n\n let file = File::open(filename)?;\n\n\n\n let oxdz = {\n\n let mmap = unsafe { Mmap::map(&file).expect(\"failed to map the file\") };\n\n oxdz::Oxdz::new(&mmap[..], 44100, \"\")?\n\n };\n\n\n\n // Display basic module information\n\n let mut mi = oxdz::ModuleInfo::new();\n\n oxdz.module_info(&mut mi);\n\n println!(\"Title : {}\", mi.title);\n\n println!(\"Format: {}\", mi.description);\n\n\n\n // From Rust-SDL2 SquareWave example\n\n let sdl_context = sdl2::init().unwrap();\n\n let audio_subsystem = sdl_context.audio().unwrap();\n\n\n", "file_path": "examples/player-sdl/src/main.rs", "rank": 47, "score": 98853.39774840177 }, { "content": "fn run(args: Vec<String>) -> Result<(), Box<Error>> {\n\n\n\n // Set up our audio output\n\n let device = cpal::default_output_device().expect(\"Failed to get default output device\");\n\n\n\n // Create event loop\n\n let format = cpal::Format{\n\n channels : 2,\n\n sample_rate: cpal::SampleRate(44100),\n\n data_type : cpal::SampleFormat::I16,\n\n };\n\n let event_loop = cpal::EventLoop::new();\n\n let stream_id = event_loop.build_output_stream(&device, &format)?;\n\n event_loop.play_stream(stream_id);\n\n\n\n\n\n let info = Arc::new(Mutex::new(oxdz::FrameInfo::new()));\n\n \n\n {\n\n let info = info.clone();\n", "file_path": "examples/player-cpal/src/main.rs", "rank": 48, "score": 98853.39774840177 }, { "content": "pub trait BinaryReadExt {\n\n fn read16l_lo_hi(&self, ofs: usize) -> Result<u32, Error>;\n\n}\n\n\n\nimpl<'a> BinaryReadExt for &'a [u8] {\n\n fn read16l_lo_hi(&self, ofs: usize) -> Result<u32, Error> {\n\n let lo = self.read16l(ofs)? as u32;\n\n let hi = self.read16l(ofs + 2)? as u32;\n\n Ok((hi << 16) | lo)\n\n }\n\n\n\n}\n\n\n\n/// Scream Tracker 2 module loader\n\npub struct S3mLoader;\n\n\n\nimpl Loader for S3mLoader {\n\n fn name(&self) -> &'static str {\n\n \"Scream Tracker 3\"\n\n }\n", "file_path": "src/format/s3m/load.rs", "rank": 49, "score": 98683.14393921065 }, { "content": "pub trait FormatPlayer: Send + Sync {\n\n fn start(&mut self, &mut PlayerData, &ModuleData, &mut Mixer);\n\n fn play(&mut self, &mut PlayerData, &ModuleData, &mut Mixer);\n\n fn reset(&mut self);\n\n unsafe fn save_state(&self) -> State;\n\n unsafe fn restore_state(&mut self, &State);\n\n}\n\n\n\n#[derive(Default)]\n\npub struct PlayerData {\n\n pub pos : usize,\n\n pub row : usize,\n\n pub frame: usize,\n\n pub song : usize,\n\n pub speed: usize,\n\n pub tempo: f32,\n\n pub time : f32,\n\n\n\n initial_speed: usize,\n\n initial_tempo: f32,\n", "file_path": "src/player/mod.rs", "rank": 50, "score": 93716.1735583467 }, { "content": " Default::default()\n\n }\n\n}\n\n\n\n\n\n/// StmEvent defines the event format used in Scream Tracker 2 patterns.\n\n#[derive(Default)]\n\npub struct StmEvent {\n\n pub note : u8,\n\n pub volume : u8,\n\n pub smp : u8,\n\n pub cmd : u8,\n\n pub infobyte: u8,\n\n}\n\n\n\nimpl StmEvent {\n\n fn new() -> Self {\n\n Default::default()\n\n }\n\n\n", "file_path": "src/format/stm/mod.rs", "rank": 53, "score": 81164.35217399812 }, { "content": "pub mod load;\n\n\n\npub use self::load::*;\n\n\n\nuse std::any::Any;\n\nuse std::fmt;\n\nuse module::{event, ModuleData, Sample};\n\nuse util::{NOTES, BinaryRead};\n\nuse ::*;\n\n\n\n\n\npub struct StmData {\n\n pub name: String,\n\n pub speed: u8,\n\n pub num_patterns: u8,\n\n pub global_vol: u8,\n\n pub instruments: Vec<StmInstrument>,\n\n pub orders: [u8; 128],\n\n pub patterns: StmPatterns,\n\n pub samples: Vec<Sample>,\n", "file_path": "src/format/stm/mod.rs", "rank": 54, "score": 81163.18166473752 }, { "content": " }\n\n\n\n fn samples(&self) -> Vec<Sample> {\n\n self.samples.to_owned()\n\n }\n\n}\n\n\n\n/// StmInstrument defines extra instrument fields used in Protracker instruments.\n\n#[derive(Debug,Default)]\n\npub struct StmInstrument {\n\n pub name : String,\n\n pub volume : u8,\n\n pub size : u16,\n\n pub loop_start: u16,\n\n pub loop_end : u16,\n\n pub c2spd : u16,\n\n}\n\n\n\nimpl StmInstrument {\n\n pub fn new() -> Self {\n", "file_path": "src/format/stm/mod.rs", "rank": 55, "score": 81162.70160624819 }, { "content": "}\n\n\n\nimpl ModuleData for StmData {\n\n fn as_any(&self) -> &Any {\n\n self\n\n }\n\n\n\n fn title(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n fn patterns(&self) -> usize {\n\n self.num_patterns as usize\n\n }\n\n\n\n fn len(&self) -> usize {\n\n for i in 0..128 {\n\n if self.orders[i] >= self.num_patterns {\n\n return i\n\n }\n", "file_path": "src/format/stm/mod.rs", "rank": 56, "score": 81155.52671120965 }, { "content": "}\n\n\n\n\n\npub struct StmPatterns {\n\n data: Vec<StmEvent>,\n\n}\n\n\n\nimpl StmPatterns {\n\n fn from_slice(num: usize, b: &[u8]) -> Result<Self, Error> {\n\n let mut pat = StmPatterns{\n\n data: Vec::new(),\n\n };\n\n\n\n for p in 0..num {\n\n for r in 0..64 {\n\n for c in 0..4 {\n\n let ofs = p * 1024 + r * 16 + c * 4;\n\n let e = StmEvent::from_slice(b.slice(ofs, 4)?);\n\n pat.data.push(e);\n\n }\n", "file_path": "src/format/stm/mod.rs", "rank": 57, "score": 81154.50360705338 }, { "content": " }\n\n }\n\n\n\n Ok(pat)\n\n }\n\n\n\n pub fn event(&self, pat: u16, row: u16, chn: usize) -> &StmEvent {\n\n &self.data[pat as usize * 256 + row as usize * 4 + chn]\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_event() {\n\n let e = StmEvent::from_slice(&[255, 1, 128, 0]);\n\n assert_eq!(format!(\"{}\", e), \"--- -- -- .00\");\n", "file_path": "src/format/stm/mod.rs", "rank": 61, "score": 81146.77542090922 }, { "content": " fn from_slice(b: &[u8]) -> Self {\n\n let mut e = StmEvent::new();\n\n e.note = b[0];\n\n e.volume = (b[1] & 0x07) | (b[2] & 0xf0) >> 1;\n\n e.smp = (b[1] & 0xf8) >> 3;\n\n e.cmd = b[2] & 0x0f;\n\n e.infobyte = b[3];\n\n e\n\n }\n\n}\n\n\n\nimpl fmt::Display for StmEvent {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let note = if self.note > 250 {\n\n \"---\".to_owned()\n\n } else {\n\n let n = ((self.note&0xf) + 12*(3+(self.note>>4))) as usize;\n\n format!(\"{}{}\", NOTES[n%12], n/12)\n\n };\n\n\n", "file_path": "src/format/stm/mod.rs", "rank": 62, "score": 81146.51289856415 }, { "content": "\n\n let e = StmEvent::from_slice(&[34, 113, 128, 0]);\n\n assert_eq!(format!(\"{}\", e), \"D 5 0E -- .00\");\n\n\n\n let e = StmEvent::from_slice(&[52, 50, 100, 204]);\n\n assert_eq!(format!(\"{}\", e), \"E 6 06 32 DCC\");\n\n\n\n let e = StmEvent::from_slice(&[50, 49, 128, 0]);\n\n assert_eq!(format!(\"{}\", e), \"D 6 06 -- .00\");\n\n }\n\n}\n", "file_path": "src/format/stm/mod.rs", "rank": 63, "score": 81140.63174414483 }, { "content": " let smp = if self.smp == 0 {\n\n \"--\".to_owned()\n\n } else {\n\n format!(\"{:02X}\", self.smp)\n\n };\n\n\n\n let vol = if self.volume == 65 {\n\n \"--\".to_owned()\n\n } else {\n\n format!(\"{:02X}\", self.volume)\n\n };\n\n\n\n let cmd = if self.cmd == 0 {\n\n '.'\n\n } else {\n\n (64_u8 + self.cmd) as char\n\n };\n\n\n\n write!(f, \"{} {} {} {}{:02X}\", note, smp, vol, cmd, self.infobyte)\n\n }\n", "file_path": "src/format/stm/mod.rs", "rank": 64, "score": 81139.1014449007 }, { "content": " }\n\n 128\n\n }\n\n\n\n fn pattern_in_position(&self, pos: usize) -> Option<usize> {\n\n if pos >= self.orders.len() {\n\n None\n\n } else {\n\n Some(self.orders[pos] as usize)\n\n }\n\n }\n\n\n\n fn instruments(&self) -> Vec<String> {\n\n self.instruments.iter().map(|x| x.name.to_owned()).collect::<Vec<String>>()\n\n }\n\n\n\n fn rows(&self, pat: usize) -> usize {\n\n if pat >= self.num_patterns as usize {\n\n 0\n\n } else {\n", "file_path": "src/format/stm/mod.rs", "rank": 65, "score": 81138.892874649 }, { "content": " 64\n\n }\n\n }\n\n\n\n fn pattern_data(&self, pat: usize, num: usize, buffer: &mut [u8]) -> usize {\n\n let mut i = 0;\n\n for _ in 0..num {\n\n let (row, ch) = (i / 4, i % 4);\n\n let ofs = i * 6;\n\n let e = &self.patterns.data[pat*256 + row*4 + ch];\n\n\n\n let mut flags = 0;\n\n if e.note != 255 { flags |= event::HAS_NOTE; buffer[ofs+1] = e.note }\n\n if e.smp != 0 { flags |= event::HAS_INS ; buffer[ofs+2] = e.smp }\n\n if e.cmd != 0 { flags |= event::HAS_CMD ; buffer[ofs+4] = e.cmd; buffer[ofs+5] = e.infobyte }\n\n buffer[ofs] = flags;\n\n\n\n i += 1;\n\n }\n\n i\n", "file_path": "src/format/stm/mod.rs", "rank": 66, "score": 81138.3709409371 }, { "content": "fn impl_save_restore(ast: &syn::DeriveInput) -> quote::Tokens {\n\n let name = &ast.ident;\n\n quote! {\n\n use std::ptr;\n\n use std::mem;\n\n\n\n impl SaveRestore for #name {\n\n unsafe fn save(&self) -> Vec<u8> {\n\n let size = mem::size_of::<Self>();\n\n let mut dst: Vec<u8> = Vec::with_capacity(size);\n\n dst.set_len(size);\n\n ptr::copy(self, mem::transmute(dst.as_mut_ptr()), 1);\n\n dst\n\n }\n\n\n\n unsafe fn restore(&mut self, buffer: &Vec<u8>) {\n\n let size = buffer.len();\n\n ptr::copy(buffer.as_ptr(), mem::transmute(self), size)\n\n }\n\n }\n", "file_path": "save-restore-derive/src/lib.rs", "rank": 67, "score": 79906.53490577426 }, { "content": "#[derive(Copy,Clone,Default)]\n\nstruct Blep {\n\n level: i16,\n\n age : i16,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Paula {\n\n // the instantenous value of Paula output\n\n global_output_level: i16,\n\n\n\n // count of simultaneous bleps to keep track of\n\n active_bleps: usize,\n\n\n\n // place to keep our bleps in. MAX_BLEPS should be\n\n // defined as a BLEP_SIZE / MINIMUM_EVENT_INTERVAL.\n\n // For Paula, minimum event interval could be even 1, but it makes\n\n // sense to limit it to some higher value such as 16.\n\n bleps: Box<[Blep; MAX_BLEPS]>,\n\n\n\n pub remainder: f64,\n", "file_path": "src/mixer/paula.rs", "rank": 68, "score": 71529.9580401846 }, { "content": "#[derive(Clone,Default)]\n\nstruct Voice {\n\n num : usize,\n\n pos : f64,\n\n period : f64,\n\n note : usize,\n\n pan : isize,\n\n vol : usize,\n\n ins : usize,\n\n smp : usize,\n\n end : u32,\n\n loop_start: u32,\n\n loop_end : u32,\n\n has_loop : bool,\n\n sample_end: bool,\n\n mute : bool,\n\n active : bool,\n\n\n\n i_buffer : [i32; 4],\n\n\n\n paula : Option<Paula>,\n", "file_path": "src/mixer/mod.rs", "rank": 69, "score": 71529.9580401846 }, { "content": "struct Sampler;\n\n\n", "file_path": "src/mixer/mod.rs", "rank": 70, "score": 71529.9580401846 }, { "content": "#[derive(Default)]\n\nstruct Chn {\n\n aorgvol : i8,\n\n avol : i8,\n\n channelnum : u8,\n\n achannelused : u8,\n\n aglis : bool, // u8,\n\n atremor : u8,\n\n atreon : bool, // u8,\n\n atrigcnt : u8,\n\n anotecutcnt : u8,\n\n anotedelaycnt : u8,\n\n avibtretype : u8,\n\n note : u8,\n\n ins : u8,\n\n vol : u8,\n\n cmd : u8,\n\n info : u8,\n\n lastins : u8,\n\n lastnote : u8,\n\n alastnfo : u8,\n", "file_path": "src/player/st3/st3play.rs", "rank": 71, "score": 70241.75783440666 }, { "content": "struct MixerData {\n\n pub pos : f64,\n\n pub buf_pos: usize,\n\n pub step : usize,\n\n pub size : isize,\n\n pub vol_l : usize,\n\n pub vol_r : usize,\n\n}\n\n\n\nimpl MixerData {\n\n fn mix<T>(&mut self, interp: &Interpolator, data: &[T], buf32: &mut [i32], ibuf: &mut [i32])\n\n where Sampler: SamplerOperations<T>\n\n {\n\n let mut pos = self.pos as usize;\n\n let mut frac = ((1 << SMIX_SHIFT) as f64 * (self.pos - pos as f64)) as usize;\n\n let mut bpos = self.buf_pos;\n\n\n\n let bmax = interp.bsize() - 1;\n\n\n\n for _ in 0..self.size {\n", "file_path": "src/mixer/mod.rs", "rank": 72, "score": 70241.75783440666 }, { "content": "#[derive(Clone,Copy,Default)]\n\nstruct ChannelData {\n\n n_length : u16,\n\n n_loopstart : u16,\n\n n_replen : u16,\n\n output_volume : u8,\n\n n_finetune : u8,\n\n output_period : u16,\n\n n_insnum : u8,\n\n n_wavecontrol : u8,\n\n n_vibratopos : u8,\n\n n_tremolopos : u8,\n\n n_command : u16,\n\n n_offset : u8,\n\n n_period : u16,\n\n n_wantedperiod : u16,\n\n n_toneportdirec: u8,\n\n n_gliss : bool,\n\n n_toneportspeed: u16,\n\n n_vibratospeed : u8,\n\n n_vibratodepth : u8,\n", "file_path": "src/player/fasttracker/player.rs", "rank": 73, "score": 69031.82802087853 }, { "content": "#[derive(Clone,Copy,Default)]\n\nstruct ChannelData {\n\n n_0_note : u16,\n\n n_2_cmd : u8,\n\n n_3_cmdlo : u8,\n\n n_4_samplestart : u32,\n\n n_8_length : u16,\n\n n_a_loopstart : u32,\n\n n_e_replen : u16,\n\n n_10_period : i16,\n\n n_12_volume : u8,\n\n //n_14_dma_control: u16,\n\n n_16_portdir : bool,\n\n n_17_toneportspd : u8,\n\n n_18_wantperiod : i16,\n\n n_1a_vibrato : u8,\n\n n_1b_vibpos : u8,\n\n n_1c_prog_on : bool,\n\n\n\n // progdata\n\n n_8_dataloopstart: u8,\n", "file_path": "src/player/hmn/player.rs", "rank": 74, "score": 69031.82802087853 }, { "content": "struct InstrTyp {\n\n ta : [u8; 96],\n\n env_vp : [[i16; 2]; 12],\n\n env_pp : [[i16; 2]; 12],\n\n env_vp_ant : u8,\n\n env_pp_ant : u8,\n\n env_v_sust : u8,\n\n env_v_rep_s: u8,\n\n env_v_rep_e: u8,\n\n env_p_sust : u8,\n\n env_p_rep_s: u8,\n\n env_p_rep_e: u8,\n\n env_v_typ : u8,\n\n env_p_typ : u8,\n\n vib_typ : u8,\n\n vib_sweep : u8,\n\n vib_depth : u8,\n\n vib_rate : u8,\n\n fade_out : u16,\n\n //midi_on : bool,\n\n //midi_channel: u8,\n\n //midi_program: i16,\n\n //midi_bend : i16,\n\n //mute : bool,\n\n samp : [SampleTyp; 32],\n\n}\n\n*/\n\n\n", "file_path": "src/player/ft2/ft2play.rs", "rank": 75, "score": 69031.82802087853 }, { "content": "#[derive(Clone,Copy,Default)]\n\nstruct DataChnx {\n\n n_0_note : i16,\n\n n_2_sound_number : u8,\n\n n_3_effect_number : u8,\n\n n_4_soundstart : u32,\n\n n_8_soundlength : u16,\n\n n_10_repeatstart : u32,\n\n n_14_repeatlength : u16,\n\n n_16_last_saved_note: i16,\n\n n_18_volume : i16,\n\n n_20_volume_trigger : i16,\n\n //n_22_dma_bit : u16,\n\n}\n\n\n\nimpl DataChnx {\n\n pub fn new() -> Self {\n\n Default::default()\n\n }\n\n}\n\n\n", "file_path": "src/player/ust/player.rs", "rank": 76, "score": 69031.82802087853 }, { "content": "#[derive(Clone,Copy,Default)]\n\nstruct ChannelData {\n\n n_0_note : u16,\n\n n_2_cmd : u8,\n\n n_3_cmdlo : u8,\n\n n_4_samplestart : u32,\n\n n_8_length : u16,\n\n n_a_loopstart : u32,\n\n n_e_replen : u16,\n\n n_10_period : i16,\n\n n_12_volume : u8,\n\n //n_14_dma_control: u16,\n\n n_16_portdir : bool,\n\n n_17_toneportspd: u8,\n\n n_18_wantperiod : i16,\n\n n_1a_vibrato : u8,\n\n n_1b_vibpos : u8,\n\n}\n\n\n\nimpl ChannelData {\n\n pub fn new() -> Self {\n", "file_path": "src/player/noisetracker/player.rs", "rank": 77, "score": 69031.82802087853 }, { "content": "#[derive(Clone,Copy,Default)]\n\nstruct ChannelData {\n\n n_note : u16,\n\n n_cmd : u8,\n\n n_cmdlo : u8,\n\n n_start : u32,\n\n n_length : u16,\n\n n_loopstart : u32,\n\n n_replen : u16,\n\n n_period : u16,\n\n n_finetune : u8,\n\n n_volume : u8,\n\n n_toneportdirec: bool,\n\n n_toneportspeed: u8,\n\n n_wantedperiod : u16,\n\n n_vibratocmd : u8,\n\n n_vibratopos : u8,\n\n n_tremolocmd : u8,\n\n n_tremolopos : u8,\n\n n_wavecontrol : u8,\n\n n_glissfunk : u8,\n", "file_path": "src/player/protracker/player.rs", "rank": 78, "score": 69031.82802087853 }, { "content": "#[derive(Default)]\n\nstruct SongTyp {\n\n len : u16,\n\n rep_s : u16,\n\n ant_chn : u8,\n\n ant_ptn : u16,\n\n ant_instrs : u16,\n\n song_pos : i16,\n\n patt_nr : i16,\n\n patt_pos : i16,\n\n patt_len : i16,\n\n speed : u16,\n\n tempo : u16,\n\n glob_vol : u16,\n\n timer : u16,\n\n patt_del_time : u8,\n\n patt_del_time_2: u8,\n\n p_break_flag : bool,\n\n p_break_pos : u8,\n\n pos_jump_flag : bool,\n\n //song_tab : Vec<u8>,\n\n ver : u16,\n\n //name : String,\n\n}\n\n\n\n/*\n", "file_path": "src/player/ft2/ft2play.rs", "rank": 79, "score": 69031.82802087853 }, { "content": "#[derive(Default,Copy,Clone)]\n\nstruct St2Channel {\n\n //on : bool,\n\n //empty : bool,\n\n row : u16,\n\n //pattern_data_offs: usize,\n\n event_note : u16,\n\n event_volume : u8,\n\n event_smp : u16,\n\n event_cmd : u16,\n\n event_infobyte : u16,\n\n //last_note : u16,\n\n period_current : i16,\n\n period_target : i16,\n\n vibrato_current : u16,\n\n tremor_counter : u16,\n\n tremor_state : u16,\n\n //uint8_t *smp_name;\n\n //uint8_t *smp_data_ptr;\n\n //uint16_t smp_loop_end;\n\n //uint16_t smp_loop_start;\n", "file_path": "src/player/st2/st2play.rs", "rank": 80, "score": 69031.82802087853 }, { "content": "#[derive(Clone,Copy,Default)]\n\nstruct AudTemp {\n\n n_0_note : u16,\n\n n_2_cmd : u8,\n\n n_3_cmdlo : u8,\n\n n_4_samplestart : u32,\n\n n_8_length : u16,\n\n n_10_loopstart : u32,\n\n n_14_replen : u16,\n\n n_16_period : i16,\n\n n_18_volume : u8,\n\n n_22_last_note : i16,\n\n}\n\n\n\nimpl AudTemp {\n\n pub fn new() -> Self {\n\n Default::default()\n\n }\n\n}\n\n\n\nlazy_static! {\n", "file_path": "src/player/soundtracker/player.rs", "rank": 81, "score": 69031.82802087853 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() != 3 {\n\n println!(\"usage: {} <filename> <num>\", Path::new(&args[0]).file_name().unwrap().to_str().unwrap());\n\n return;\n\n }\n\n\n\n match run(args) {\n\n Ok(_) => {},\n\n Err(e) => eprintln!(\"error: {}\", e),\n\n }\n\n}\n\n\n\n\n", "file_path": "examples/show-pattern/src/main.rs", "rank": 82, "score": 67673.08706593863 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() != 2 {\n\n println!(\"usage: {} <filename>\", Path::new(&args[0]).file_name().unwrap().to_str().unwrap());\n\n return;\n\n }\n\n\n\n match run(args) {\n\n Ok(_) => {},\n\n Err(e) => eprintln!(\"error: {}\", e),\n\n }\n\n}\n\n\n", "file_path": "examples/player-sdl/src/main.rs", "rank": 83, "score": 67673.08706593863 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() != 2 {\n\n println!(\"usage: {} <filename>\", Path::new(&args[0]).file_name().unwrap().to_str().unwrap());\n\n return;\n\n }\n\n\n\n match run(args) {\n\n Ok(_) => {},\n\n Err(e) => eprintln!(\"error: {}\", e),\n\n }\n\n}\n\n\n", "file_path": "examples/player-cpal/src/main.rs", "rank": 84, "score": 67673.08706593863 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() != 3 {\n\n println!(\"usage: {} <filename> <secs>\", Path::new(&args[0]).file_name().unwrap().to_str().unwrap());\n\n return;\n\n }\n\n\n\n match run(args) {\n\n Ok(_) => {},\n\n Err(e) => eprintln!(\"error: {}\", e),\n\n }\n\n}\n\n\n\n\n", "file_path": "examples/wav-writer/src/main.rs", "rank": 85, "score": 67673.08706593863 }, { "content": "pub trait BinaryRead {\n\n fn read_string(&self, ofs: usize, size: usize) -> Result<String, Error>;\n\n fn read32b(&self, ofs: usize) -> Result<u32, Error>;\n\n fn read16b(&self, ofs: usize) -> Result<u16, Error>;\n\n fn read32l(&self, ofs: usize) -> Result<u32, Error>;\n\n fn read16l(&self, ofs: usize) -> Result<u16, Error>;\n\n fn read8(&self, ofs: usize) -> Result<u8, Error>;\n\n fn read8i(&self, ofs: usize) -> Result<i8, Error>;\n\n fn slice(&self, start: usize, size: usize) -> Result<&[u8], Error>;\n\n}\n\n\n\nimpl<'a> BinaryRead for &'a [u8] {\n\n fn read_string(&self, ofs: usize, size: usize) -> Result<String, Error> {\n\n try!(check_buffer_size(&self, ofs + size));\n\n Ok(String::from_utf8_lossy(&self[ofs..ofs+size]).to_string().replace(\"\\x00\", \" \"))\n\n }\n\n\n\n fn read32b(&self, ofs: usize) -> Result<u32, Error> {\n\n try!(check_buffer_size(&self, ofs + 4));\n\n Ok(BigEndian::read_u32(&self[ofs..ofs+4]))\n", "file_path": "src/util.rs", "rank": 86, "score": 66659.37327922042 }, { "content": "pub trait SaveRestore {\n\n unsafe fn save(&self) -> Vec<u8>;\n\n unsafe fn restore(&mut self, &Vec<u8>);\n\n}\n\n\n\n#[derive(Default, Clone)]\n\npub struct ScanData {\n\n pub ord : usize,\n\n pub row : usize,\n\n pub frame: usize,\n\n pub num : usize,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct OrdData {\n\n pub state: State,\n\n pub time : f32,\n\n pub used : bool,\n\n}\n\n\n\nimpl OrdData {\n\n pub fn new() -> Self {\n\n OrdData{\n\n state: vec![0; 0],\n\n time : 0.0,\n\n used : false,\n\n }\n\n }\n\n}\n", "file_path": "src/player/scan.rs", "rank": 87, "score": 65450.40859515162 }, { "content": "struct ModPlayer<'a> {\n\n oxdz: oxdz::Oxdz<'a>,\n\n data: Arc<Mutex<oxdz::FrameInfo>>,\n\n}\n\n\n\nimpl<'a> AudioCallback for ModPlayer<'a> {\n\n type Channel = i16;\n\n\n\n fn callback(&mut self, mut out: &mut [i16]) {\n\n {\n\n let mut fi = self.data.lock().unwrap();\n\n self.oxdz.frame_info(&mut fi);\n\n }\n\n self.oxdz.fill_buffer(&mut out, 0);\n\n }\n\n}\n\n\n", "file_path": "examples/player-sdl/src/main.rs", "rank": 88, "score": 64443.73774922179 }, { "content": "pub trait PlayerListEntry {\n\n fn info(&self) -> PlayerInfo;\n\n fn player(&self, module: &Module, options: Options) -> Box<FormatPlayer>;\n\n fn import(&self, module: Module) -> Result<Module, Error>;\n\n}\n\n\n\n\n\n// Trait for format-specific players\n\n\n\npub type State = Vec<u8>;\n\n\n", "file_path": "src/player/mod.rs", "rank": 89, "score": 64312.734211863295 }, { "content": "pub trait SliceConvert<'a> {\n\n fn as_slice_u8(&'a self) -> &'a [u8];\n\n}\n\n\n\nimpl<'a> SliceConvert<'a> for [u16] {\n\n fn as_slice_u8(&'a self) -> &'a [u8] {\n\n unsafe {\n\n slice::from_raw_parts(self.as_ptr() as *const u8, self.len() as usize * 2)\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/util.rs", "rank": 90, "score": 63922.914497006605 }, { "content": "pub trait MemOpExt<T> {\n\n fn fill(&mut self, u8, usize);\n\n}\n\n\n\nimpl<'a, T> MemOpExt<T> for [T] {\n\n fn fill(&mut self, val: u8, amt: usize) {\n\n unsafe { ptr::write_bytes(self.as_mut_ptr(), val, amt * std::mem::size_of::<T>() - 1); }\n\n }\n\n}\n\n\n\n\n", "file_path": "src/util.rs", "rank": 91, "score": 61576.27542964947 }, { "content": "pub trait Interpolator: Send + Sync {\n\n fn name(&self) -> &'static str;\n\n fn bsize(&self) -> usize;\n\n fn get_sample(&self, &[i32], i32) -> i32;\n\n}\n\n\n\n// Nearest neighbor interpolator\n\npub struct Nearest;\n\n\n\nimpl Interpolator for Nearest {\n\n fn name(&self) -> &'static str {\n\n \"nearest neighbor\"\n\n }\n\n\n\n fn bsize(&self) -> usize {\n\n 2\n\n }\n\n\n\n fn get_sample(&self, i: &[i32], _frac: i32) -> i32 {\n\n i[1]\n", "file_path": "src/mixer/interpolator.rs", "rank": 92, "score": 59345.76383099932 }, { "content": "pub trait ModuleData: Send + Sync {\n\n fn as_any(&self) -> &Any;\n\n fn title(&self) -> &str; // module title\n\n fn patterns(&self) -> usize; // number of patterns\n\n fn len(&self) -> usize; // module length\n\n fn pattern_in_position(&self, usize) -> Option<usize>;\n\n fn instruments(&self) -> Vec<String>;\n\n fn rows(&self, pat: usize) -> usize; // number of rows in pattern\n\n fn pattern_data(&self, pat: usize, num: usize, buffer: &mut [u8]) -> usize;\n\n fn samples(&self) -> Vec<Sample>;\n\n}\n", "file_path": "src/module/mod.rs", "rank": 93, "score": 58273.25456751748 }, { "content": "fn all() -> Vec<Box<PlayerListEntry>> {\n\n vec![\n\n Box::new(protracker::Pt21a),\n\n Box::new(noisetracker::Nt11),\n\n Box::new(fasttracker::Ft101),\n\n Box::new(st2::St2),\n\n Box::new(st3::St3),\n\n Box::new(soundtracker::DocSt2),\n\n Box::new(ust::Ust27),\n\n Box::new(st2::St2),\n\n Box::new(st3::St3),\n\n Box::new(ft2::Ft2),\n\n Box::new(hmn::Hmn),\n\n ]\n\n}\n\n\n", "file_path": "src/player/mod.rs", "rank": 94, "score": 57342.27384484205 }, { "content": "fn show_pattern(module: &module::Module, num: usize) {\n\n println!(\"Pattern {}:\", num);\n\n let rows = module.rows(num);\n\n let ch = module.channels;\n\n let mut buffer = vec![0_u8; 6 * rows * ch];\n\n\n\n module.pattern_data(num, &mut buffer);\n\n\n\n let mut ofs = 0;\n\n for r in 0..rows {\n\n print!(\"{:3}: \", r);\n\n for _ in 0..ch {\n\n print!(\"{} \", event::format(&buffer[ofs..ofs+6]));\n\n ofs += 6;\n\n }\n\n println!();\n\n }\n\n}\n", "file_path": "examples/show-pattern/src/main.rs", "rank": 95, "score": 50319.5568440596 }, { "content": "fn encode_pattern(patterns: &ModPatterns, num: usize, ch: usize) -> S3mPattern {\n\n let mut size = 2;\n\n let mut data = Vec::<u8>::new();\n\n\n\n data.push(0); // make room for pattern size\n\n data.push(0);\n\n\n\n for r in 0..64 {\n\n for c in 0..ch {\n\n let e = patterns.event(num, r, c);\n\n let mut b = 0_u8;\n\n if e.note != 0 {\n\n b |= 0x20; // note and instrument follow\n\n }\n\n if e.cmd&0x0f == 0x0c {\n\n b |= 0x40; // volume follows\n\n }\n\n if (e.cmd&0x0f != 0 || e.cmdlo != 0) && e.cmd&0x0f != 0x0c {\n\n b |= 0x80; // command and info follow\n\n }\n", "file_path": "src/player/st3/import.rs", "rank": 96, "score": 45583.84960252923 }, { "content": "use std::slice;\n\nuse std::ops::{Index, IndexMut};\n\n\n\n#[derive(Clone, Debug)]\n\npub struct SampleData {\n\n raw: Vec<u8>\n\n}\n\n\n\nimpl<'a> SampleData {\n\n pub fn new() -> Self {\n\n SampleData {\n\n raw: Vec::new()\n\n }\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.raw.len()\n\n }\n\n\n\n pub fn as_slice_u8(&'a self) -> &'a [u8] {\n", "file_path": "src/module/sample.rs", "rank": 97, "score": 42503.32928742637 }, { "content": " /// The raw PCM-encoded sample data.\n\n pub data : SampleData,\n\n}\n\n\n\nimpl Sample {\n\n pub fn new() -> Sample {\n\n Sample {\n\n sample_type : SampleType::Empty,\n\n num : 0,\n\n address : 0,\n\n size : 0,\n\n rate : 1.0,\n\n name : \"\".to_owned(),\n\n data : SampleData::new(),\n\n }\n\n }\n\n\n\n pub fn store(&mut self, b: &[u8]) {\n\n self.data.raw.extend(b);\n\n self.data.raw.extend([0; 2].iter());\n", "file_path": "src/module/sample.rs", "rank": 98, "score": 42500.92583340256 }, { "content": " }\n\n}\n\n\n\n\n\n#[derive(Debug, Clone)]\n\npub enum SampleType {\n\n Sample8,\n\n Sample16,\n\n Empty,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Sample {\n\n pub sample_type : SampleType,\n\n pub num : usize,\n\n pub address : u32,\n\n pub size : u32,\n\n pub rate : f64,\n\n /// The normalized rate used to play this sample.\n\n pub name : String,\n", "file_path": "src/module/sample.rs", "rank": 99, "score": 42498.43107620153 } ]
Rust
common/rs/src/mtc/battle/organizer.rs
OpenEmojiBattler/open-emoji-battler
c5054753525d2880602cd406837f01a8a82c7577
use crate::{ codec_types::*, mtc::battle::{common::BattleEmo, march::march}, }; use anyhow::{anyhow, ensure, Result}; use rand::{seq::SliceRandom, SeedableRng}; use rand_pcg::Pcg64Mcg; use sp_std::{cmp, prelude::*}; pub fn battle_all( board: &mtc::Board, health: &mut u8, ghost_states: &mut [mtc::GhostState], grade: u8, ghosts: &[mtc::Ghost], battle_ghost_index: u8, turn: u8, seed: u64, emo_bases: &emo::Bases, ) -> Result<Option<u8>> { let pre_health = *health; let pre_ghost_states = ghost_states.to_vec(); if ghost_states .iter() .filter(|s| matches!(s, mtc::GhostState::Active { health: _ })) .count() > 1 { battle_pvg_and_gvg( board, grade, health, ghosts, ghost_states, battle_ghost_index, turn, seed, emo_bases, )?; } else { let (ghost_index, ghost_state) = ghost_states .iter_mut() .enumerate() .find(|(_, s)| matches!(s, mtc::GhostState::Active { health: _ })) .ok_or_else(|| anyhow!("battle_all: invalid"))?; battle_pvg( grade, health, board, ghost_state, &ghosts[ghost_index].history, turn, seed, emo_bases, )?; }; let final_place = calc_final_place(*health, pre_health, ghost_states, &pre_ghost_states); Ok(final_place) } pub fn march_pvg( board: &mtc::Board, ghost_board: &mtc::GhostBoard, seed: u64, emo_bases: &emo::Bases, ) -> Result<(u8, u8, mtc::battle::Logs)> { march( build_battle_emos_from_board(board, emo_bases)?, build_battle_emos_from_ghost_board(ghost_board, emo_bases)?, seed, emo_bases, ) } pub fn march_gvg( ghost_board0: &mtc::GhostBoard, ghost_board1: &mtc::GhostBoard, seed: u64, emo_bases: &emo::Bases, ) -> Result<(u8, u8, mtc::battle::Logs)> { march( build_battle_emos_from_ghost_board(ghost_board0, emo_bases)?, build_battle_emos_from_ghost_board(ghost_board1, emo_bases)?, seed, emo_bases, ) } static EMPTY_GRADE_AND_GHOST_BOARD: mtc::GradeAndGhostBoard = mtc::GradeAndGhostBoard { grade: 1, board: mtc::GhostBoard(vec![]), }; pub fn get_grade_and_ghost_board<'a>( grade_and_ghost_boards: &'a [mtc::GradeAndGhostBoard], state: &mtc::GhostState, turn: u8, ) -> &'a mtc::GradeAndGhostBoard { let hist_len = grade_and_ghost_boards.len() as u8; if hist_len == 0 { &EMPTY_GRADE_AND_GHOST_BOARD } else { let effective_turn = if let mtc::GhostState::Retired { final_turn } = state { cmp::min(turn, *final_turn) } else { turn }; if hist_len >= effective_turn { &grade_and_ghost_boards[effective_turn as usize - 1] } else { &grade_and_ghost_boards[hist_len as usize - 1] } } } pub fn select_battle_ghost_index( states: &[mtc::GhostState], previous_index: u8, seed: u64, ) -> Result<u8> { let live_indexes = states .iter() .zip(0u8..) .filter(|(s, _)| matches!(s, mtc::GhostState::Active { health: _ })) .map(|(_, i)| i) .collect::<Vec<_>>(); let len = live_indexes.len(); ensure!(len != 0, "select_battle_ghost_index: live zero"); if len == 1 { return Ok(live_indexes[0]); } let mut rng = Pcg64Mcg::seed_from_u64(seed); live_indexes .into_iter() .filter(|&i| i != previous_index) .collect::<Vec<_>>() .choose(&mut rng) .copied() .ok_or_else(|| anyhow!("choose failed")) } fn build_battle_emos_from_board( board: &mtc::Board, emo_bases: &emo::Bases, ) -> Result<Vec<BattleEmo>> { let mut emos = Vec::with_capacity(board.0.len()); for emo in board.0.iter() { emos.push(BattleEmo::new_with_attributes( emo_bases.find(emo.base_id)?, emo.attributes.clone(), )); } Ok(emos) } fn build_battle_emos_from_ghost_board( ghost_board: &mtc::GhostBoard, emo_bases: &emo::Bases, ) -> Result<Vec<BattleEmo>> { let mut emos = Vec::with_capacity(ghost_board.0.len()); for emo in ghost_board.0.iter() { emos.push(BattleEmo::new_with_attributes( emo_bases.find(emo.base_id)?, emo.attributes.clone(), )); } Ok(emos) } fn calc_final_place( health: u8, pre_health: u8, ghost_states: &[mtc::GhostState], pre_ghost_states: &[mtc::GhostState], ) -> Option<u8> { let are_all_ghosts_retired = ghost_states .iter() .all(|s| matches!(s, mtc::GhostState::Retired { final_turn: _ })); if health == 0 || are_all_ghosts_retired { if are_all_ghosts_retired { Some(1) } else { let mut place = 1; for (i, ghost_state) in ghost_states.iter().enumerate() { if let mtc::GhostState::Active { health: _ } = ghost_state { place += 1; continue; } if let mtc::GhostState::Active { health: g_health } = pre_ghost_states[i] { if g_health > pre_health { place += 1; continue; } } } Some(place) } } else { None } } struct GhostSet<'a> { index: u8, ghost: mtc::Ghost, state: &'a mut mtc::GhostState, } fn battle_pvg_and_gvg( board: &mtc::Board, grade: u8, health: &mut u8, ghosts: &[mtc::Ghost], ghost_states: &mut [mtc::GhostState], battle_ghost_index: u8, turn: u8, seed: u64, emo_bases: &emo::Bases, ) -> Result<()> { let mut ghost_sets = ghosts .iter() .zip(ghost_states.iter_mut()) .enumerate() .map(|(i, (g, gs))| GhostSet { index: i as u8, ghost: g.clone(), state: gs, }) .collect::<Vec<GhostSet>>(); if battle_ghost_index != 0 { ghost_sets.swap(0, battle_ghost_index as usize); } let (ghost_set0, gs) = ghost_sets .split_first_mut() .ok_or_else(|| anyhow!("failed to split ghost_sets"))?; let (ghost_set1, gs) = gs .split_first_mut() .ok_or_else(|| anyhow!("failed to split ghost_sets"))?; let (ghost_set2, _) = gs .split_first_mut() .ok_or_else(|| anyhow!("failed to split ghost_sets"))?; battle_pvg( grade, health, board, ghost_set0.state, &ghost_set0.ghost.history, turn, seed, emo_bases, )?; battle_gvg( ghost_set1.state, &ghost_set1.ghost.history, ghost_set2.state, &ghost_set2.ghost.history, turn, seed, emo_bases, )?; ghost_sets.sort_unstable_by_key(|g| g.index); Ok(()) } fn damage_ghost_health(board_grade: u8, grade: u8, ghost_state: &mut mtc::GhostState, turn: u8) { if let mtc::GhostState::Active { ref mut health } = ghost_state { damage_health(board_grade, grade, health); if *health == 0 { *ghost_state = mtc::GhostState::Retired { final_turn: turn }; } } } fn damage_player_health(board_grade: u8, grade: u8, health: &mut u8) { damage_health(board_grade, grade, health) } fn damage_health(board_grade: u8, grade: u8, health: &mut u8) { if board_grade > 0 { *health = health.saturating_sub(board_grade + grade); } } fn battle_pvg( grade: u8, health: &mut u8, board: &mtc::Board, ghost_state: &mut mtc::GhostState, ghost_history: &[mtc::GradeAndGhostBoard], turn: u8, seed: u64, emo_bases: &emo::Bases, ) -> Result<()> { let ghost_grade_and_ghost_board = get_grade_and_ghost_board(ghost_history, ghost_state, turn); let (player_board_grade, ghost_board_grade, _) = march_pvg(board, &ghost_grade_and_ghost_board.board, seed, emo_bases)?; damage_ghost_health(player_board_grade, grade, ghost_state, turn); damage_player_health(ghost_board_grade, ghost_grade_and_ghost_board.grade, health); Ok(()) } fn battle_gvg( ghost0_state: &mut mtc::GhostState, ghost0_history: &[mtc::GradeAndGhostBoard], ghost1_state: &mut mtc::GhostState, ghost1_history: &[mtc::GradeAndGhostBoard], turn: u8, seed: u64, emo_bases: &emo::Bases, ) -> Result<()> { let ghost0_grade_and_ghost_board = get_grade_and_ghost_board(ghost0_history, ghost0_state, turn); let ghost1_grade_and_ghost_board = get_grade_and_ghost_board(ghost1_history, ghost1_state, turn); let (ghost0_board_grade, ghost1_board_grade, _) = march_gvg( &ghost0_grade_and_ghost_board.board, &ghost1_grade_and_ghost_board.board, seed, emo_bases, )?; damage_ghost_health( ghost1_board_grade, ghost1_grade_and_ghost_board.grade, ghost0_state, turn, ); damage_ghost_health( ghost0_board_grade, ghost0_grade_and_ghost_board.grade, ghost1_state, turn, ); Ok(()) }
use crate::{ codec_types::*, mtc::battle::{common::BattleEmo, march::march}, }; use anyhow::{anyhow, ensure, Result}; use rand::{seq::SliceRandom, SeedableRng}; use rand_pcg::Pcg64Mcg; use sp_std::{cmp, prelude::*}; pub fn battle_all( board: &mtc::Board, health: &mut u8, ghost_states: &mut [mtc::GhostState], grade: u8, ghosts: &[mtc::Ghost], battle_ghost_index: u8, turn: u8, seed: u64, emo_bases: &emo::Bases, ) -> Result<Option<u8>> { let pre_health = *health; let pre_ghost_states = ghost_states.to_vec(); if ghost_states .iter() .filter(|s| matches!(s, mtc::GhostState::Active { health: _ })) .count() > 1 { battle_pvg_and_gvg( board, grade, health, ghosts, ghost_states, battle_ghost_index, turn, seed, emo_bases, )?; } else { let (ghost_index, ghost_state) = ghost_states .iter_mut() .enumerate() .find(|(_, s)| matches!(s, mtc::GhostState::Active { health: _ })) .ok_or_else(|| anyhow!("battle_all: invalid"))?; battle_pvg( grade, health, board, ghost_state, &ghosts[ghost_index].history, turn, seed, emo_bases, )?; }; let final_place = calc_final_place(*health, pre_health, ghost_states, &pre_ghost_states); Ok(final_place) } pub fn march_pvg( board: &mtc::Board, ghost_board: &mtc::GhostBoard, seed: u64, emo_bases: &emo::Bases, ) -> Result<(u8, u8, mtc::battle::Logs)> { march( build_battle_emos_from_board(board, emo_bases)?, build_battle_emos_from_ghost_board(ghost_board, emo_bases)?, seed, emo_bases, ) } pub fn march_gvg( ghost_board0: &mtc::GhostBoard, ghost_board1: &mtc::GhostBoard, seed: u64, emo_bases: &emo::Bases, ) -> Result<(u8, u8, mtc::battle::Logs)> { march( build_battle_emos_from_ghost_board(ghost_board0, emo_bases)?, build_battle_emos_from_ghost_board(ghost_board1, emo_bases)?, seed, emo_bases, ) } static EMPTY_GRADE_AND_GHOST_BOARD: mtc::GradeAndGhostBoard = mtc::GradeAndGhostBoard { grade: 1, board: mtc::GhostBoard(vec![]), }; pub fn get_grade_and_ghost_board<'a>( grade_and_ghost_boards: &'a [mtc::GradeAndGhostBoard], state: &mtc::GhostState, turn: u8, ) -> &'a mtc::GradeAndGhostBoard { let hist_len = grade_and_ghost_boards.len() as u8; if hist_len == 0 { &EMPTY_GRADE_AND_GHOST_BOARD } else { let effective_turn = if let mtc::GhostState::Retired { final_turn } = state { cmp::min(turn, *final_turn) } else { turn }; if hist_len >= effective_turn { &grade_and_ghost_boards[effective_turn as usize - 1] } else { &grade_and_ghost_boards[hist_len as usize - 1] } } } pub fn select_battle_ghost_index( states: &[mtc::GhostState], previous_index: u8, seed: u64, ) -> Result<u8> { let live_indexes = states .iter() .zip(0u8..) .filter(|(s, _)| matches!(s, mtc::GhostState::Active { health: _ })) .map(|(_, i)| i) .collect::<Vec<_>>(); let len = live_indexes.len(); ensure!(len != 0, "select_battle_ghost_index: live zero"); if len == 1 { return Ok(live_indexes[0]); } let mut rng = Pcg64Mcg::seed_from_u64(seed); live_indexes .into_iter() .filter(|&i| i != previous_index) .collect::<Vec<_>>() .choose(&mut rng) .copied() .ok_or_else(|| anyhow!("choose failed")) } fn build_battle_emos_from_board( board: &mtc::Board, emo_bases: &emo::Bases, ) -> Result<Vec<BattleEmo>> { let mut emos = Vec::with_capacity(board.0.len()); for emo in board.0.iter() { emos.push(BattleEmo::new_with_attributes( emo_bases.find(emo.base_id)?, emo.attributes.clone(), )); } Ok(emos) } fn build_battle_emos_from_ghost_board( ghost_board: &mtc::GhostBoard, emo_bases: &emo::Bases, ) -> Result<Vec<BattleEmo>> { let mut emos = Vec::with_capacity(ghost_board.0.len()); for emo in ghost_board.0.iter() { emos.push(BattleEmo::new_with_attributes( emo_bases.find(emo.base_id)?, emo.attributes.clone(), )); } Ok(emos) } fn calc_final_place( health: u8, pre_health: u8, ghost_states: &[mtc::GhostState], pre_ghost_states: &[mtc::GhostState], ) -> Option<u8> { let are_all_ghosts_retired = ghost_states .iter() .all(|s| matches!(s, mtc::GhostState::Retired { final_turn: _ })); if health == 0 || are_all_ghosts_retired { if are_all_ghosts_retired { Some(1) } else { let mut place = 1; for (i, ghost_state) in ghost_states.iter().enumerate() { if let mtc::GhostState::Active { health: _ } = ghost_state { place += 1; continue; } if let mtc::GhostState::Active { health: g_health } = pre_ghost_states[i] { if g_health > pre_health { place += 1; continue; } } } Some(place) } } else { None } } struct GhostSet<'a> { index: u8, ghost: mtc::Ghost, state: &'a mut mtc::GhostState, } fn battle_pvg_and_gvg( board: &mtc::Board, grade: u8, health: &mut u8, ghosts: &[mtc::Ghost], ghost_states: &mut [mtc::GhostState], battle_ghost_index: u8, turn: u8, seed: u64, emo_bases: &emo::Bases, ) -> Result<()> { let mut ghost_sets = ghosts .iter() .zip(ghost_states.iter_mut()) .enumerate() .map(|(i, (g, gs))| GhostSet { index: i as u8, ghost: g.clone(), state: gs, }) .collect::<Vec<GhostSet>>(); if battle_ghost_index != 0 { ghost_sets.swap(0, battle_ghost_index as usize); } let (ghost_set0, gs) = ghost_sets .split_first_mut() .ok_or_else(|| anyhow!("failed to split ghost_sets"))?; let (ghost_set1, gs) = gs .split_first_mut() .ok_or_else(|| anyhow!("failed to split ghost_sets"))?; let (ghost_set2, _) = gs .split_first_mut() .ok_or_else(|| anyhow!("failed to split ghost_sets"))?; battle_pvg( grade, health, board, ghost_set0.state, &ghost_set0.ghost.history, turn, seed, emo_bases, )?; battle_gvg( ghost_set1.state, &ghost_set1.ghost.history, ghost_set2.state, &ghost_set2.ghost.history, turn, seed, emo_bases, )?; ghost_sets.sort_unstable_by_key(|g| g.index); Ok(()) } fn damage_ghost_health(board_grade: u8, grade: u8, ghost_state: &mut mtc::GhostState, turn: u8) { if let mtc::GhostState::Active { ref mut health } = ghost_state { damage_health(board_grade, grade, health); if *health == 0 { *ghost_state = mtc::GhostState::Retired { final_turn: turn }; } } } fn damage_player_health(board_grade: u8, grade: u8, health: &mut u8) { damage_health(board_grade, grade, health) } fn damage_health(board_grade: u8, grade: u8, health: &mut u8) { if board_grade > 0 { *health = health.saturating_sub(board_grade + grade); } } fn battle_pvg( grade: u8, health: &mut u8, board: &mtc::Board, ghost_state: &mut mtc::GhostState, ghost_history: &[mtc::GradeAndGhostBoard],
fn battle_gvg( ghost0_state: &mut mtc::GhostState, ghost0_history: &[mtc::GradeAndGhostBoard], ghost1_state: &mut mtc::GhostState, ghost1_history: &[mtc::GradeAndGhostBoard], turn: u8, seed: u64, emo_bases: &emo::Bases, ) -> Result<()> { let ghost0_grade_and_ghost_board = get_grade_and_ghost_board(ghost0_history, ghost0_state, turn); let ghost1_grade_and_ghost_board = get_grade_and_ghost_board(ghost1_history, ghost1_state, turn); let (ghost0_board_grade, ghost1_board_grade, _) = march_gvg( &ghost0_grade_and_ghost_board.board, &ghost1_grade_and_ghost_board.board, seed, emo_bases, )?; damage_ghost_health( ghost1_board_grade, ghost1_grade_and_ghost_board.grade, ghost0_state, turn, ); damage_ghost_health( ghost0_board_grade, ghost0_grade_and_ghost_board.grade, ghost1_state, turn, ); Ok(()) }
turn: u8, seed: u64, emo_bases: &emo::Bases, ) -> Result<()> { let ghost_grade_and_ghost_board = get_grade_and_ghost_board(ghost_history, ghost_state, turn); let (player_board_grade, ghost_board_grade, _) = march_pvg(board, &ghost_grade_and_ghost_board.board, seed, emo_bases)?; damage_ghost_health(player_board_grade, grade, ghost_state, turn); damage_player_health(ghost_board_grade, ghost_grade_and_ghost_board.grade, health); Ok(()) }
function_block-function_prefix_line
[ { "content": "#[wasm_bindgen]\n\npub fn march_pvg(board: &[u8], ghost_board: &[u8], seed: &str, emo_bases: &[u8]) -> Vec<u8> {\n\n mtc::battle::organizer::march_pvg(\n\n &mtc::decoders::decode_board(board),\n\n &mtc::decoders::decode_ghost_board(ghost_board),\n\n seed.parse().unwrap(),\n\n &mtc::decoders::decode_emo_bases(emo_bases),\n\n )\n\n .map_err(|e| {\n\n console_log!(\n\n \"march_pvg: {}, {}, {}, {}\",\n\n hex::encode(board),\n\n hex::encode(ghost_board),\n\n seed,\n\n hex::encode(emo_bases)\n\n );\n\n e\n\n })\n\n .unwrap()\n\n .encode()\n\n}\n\n\n", "file_path": "front/wasm/src/lib.rs", "rank": 1, "score": 356748.01548478333 }, { "content": "pub fn get_pool_emo_count_by_grade(grade: u8) -> Result<u8> {\n\n Ok(match grade {\n\n 1 => 7,\n\n 2 => 6,\n\n 3 => 6,\n\n 4 => 5,\n\n 5 => 5,\n\n 6 => 4,\n\n _ => bail!(\"invalid grade: {}\", grade),\n\n })\n\n}\n\n\n", "file_path": "common/rs/src/mtc/utils.rs", "rank": 2, "score": 336887.30149116466 }, { "content": "#[wasm_bindgen]\n\npub fn select_battle_ghost_index(states: &[u8], previous_index: u8, seed: &str) -> u8 {\n\n mtc::battle::organizer::select_battle_ghost_index(\n\n &mtc::decoders::decode_ghost_states(states),\n\n previous_index,\n\n seed.parse().unwrap(),\n\n )\n\n .unwrap()\n\n}\n\n\n", "file_path": "front/wasm/src/lib.rs", "rank": 3, "score": 333588.1197569482 }, { "content": "fn get_lowest_attack_emo_index(emos: &[BattleEmo], rng: &mut Pcg64Mcg) -> Result<u8> {\n\n let mut lowest_attack_emo_indexes = vec![0u8];\n\n let mut lowest_attack = emos\n\n .get(0)\n\n .ok_or_else(|| anyhow!(\"emos[0] not found\"))?\n\n .attributes\n\n .attack;\n\n\n\n for (emo, i) in emos.iter().zip(0u8..).skip(1) {\n\n let attack = emo.attributes.attack;\n\n if lowest_attack > attack {\n\n lowest_attack_emo_indexes = vec![i];\n\n lowest_attack = attack;\n\n }\n\n if lowest_attack == attack {\n\n lowest_attack_emo_indexes.push(i);\n\n }\n\n }\n\n\n\n let index = lowest_attack_emo_indexes\n\n .choose(rng)\n\n .ok_or_else(|| anyhow!(\"choose none for lowest\"))?;\n\n\n\n Ok(*index)\n\n}\n\n\n", "file_path": "common/rs/src/mtc/battle/board.rs", "rank": 4, "score": 317224.1199239977 }, { "content": "fn get_first_attack_player_index(boards: &BattleBoards, rng: &mut Pcg64Mcg) -> Result<u8> {\n\n let len0 = boards.count_board_emos(0)?;\n\n let len1 = boards.count_board_emos(1)?;\n\n\n\n Ok(match len0.cmp(&len1) {\n\n cmp::Ordering::Greater => 0,\n\n cmp::Ordering::Less => 1,\n\n cmp::Ordering::Equal => {\n\n let b: bool = rng.gen();\n\n if b {\n\n 0\n\n } else {\n\n 1\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "common/rs/src/mtc/battle/march.rs", "rank": 5, "score": 312209.84562441835 }, { "content": "#[wasm_bindgen]\n\npub fn get_pool_emo_count_by_grade(grade: u8) -> u8 {\n\n mtc::utils::get_pool_emo_count_by_grade(grade).unwrap()\n\n}\n\n\n", "file_path": "front/wasm/src/lib.rs", "rank": 8, "score": 304093.8394915126 }, { "content": "#[wasm_bindgen]\n\npub fn sell_emo(board: &[u8], emo_index: u8, emo_bases: &[u8]) -> Vec<u8> {\n\n decode_and_encode_for_shop(board, |board, logs| {\n\n mtc::shop::board::sell_emo(\n\n board,\n\n logs,\n\n emo_index,\n\n &mtc::decoders::decode_emo_bases(emo_bases),\n\n )\n\n })\n\n}\n\n\n", "file_path": "front/wasm/src/lib.rs", "rank": 9, "score": 286218.9470252554 }, { "content": "fn mov(board: &mut ShopBoard, result_indexes: &[u8]) -> Result<()> {\n\n let mut current_indexes = board.emo_indexes();\n\n let result_indexes_len = result_indexes.len();\n\n\n\n ensure!(\n\n current_indexes.len() == result_indexes_len,\n\n \"invalid indexes len\"\n\n );\n\n\n\n for (&result_index, idx) in result_indexes.iter().zip(0u8..) {\n\n let actual_index = current_indexes\n\n .iter()\n\n .zip(0u8..)\n\n .find(|&(&current_index, _)| current_index == result_index)\n\n .map(|(_, i)| i)\n\n .ok_or_else(|| anyhow!(\"invalid index\"))?;\n\n\n\n current_indexes.swap(idx.into(), actual_index.into());\n\n board.swap_emos(idx, actual_index);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/player_operation.rs", "rank": 10, "score": 283671.77460324264 }, { "content": "pub fn get_catalog(pool: &[mtc::Emo], board: &mtc::Board, seed: u64) -> Result<mtc::shop::Catalog> {\n\n let onboard_deck_emo_ids: Vec<u16> =\n\n board.0.iter().flat_map(|e| e.mtc_emo_ids.clone()).collect();\n\n let mut rng = Pcg64Mcg::seed_from_u64(seed);\n\n let mut emos = pool\n\n .iter()\n\n .filter(|e| !onboard_deck_emo_ids.contains(&e.id))\n\n .choose_multiple(&mut rng, (CATALOG_COUNT * CATALOG_LINE_EMO_COUNT).into())\n\n .into_iter()\n\n .cloned()\n\n .collect::<Vec<_>>();\n\n emos.shuffle(&mut rng);\n\n\n\n let mut catalog = mtc::shop::Catalog(Vec::with_capacity(CATALOG_COUNT.into()));\n\n for _i in 0..CATALOG_COUNT {\n\n let mut catalog_line =\n\n mtc::shop::CatalogLine(Vec::with_capacity(CATALOG_LINE_EMO_COUNT.into()));\n\n for _j in 0..CATALOG_LINE_EMO_COUNT {\n\n catalog_line\n\n .0\n\n .push(emos.pop().ok_or_else(|| anyhow!(\"catalog failed\"))?);\n\n }\n\n catalog.0.push(catalog_line);\n\n }\n\n Ok(catalog)\n\n}\n", "file_path": "common/rs/src/mtc/shop/catalog.rs", "rank": 11, "score": 279460.8240739512 }, { "content": "fn get_attractive_emo_index(emos: &[BattleEmo], rng: &mut Pcg64Mcg) -> Option<u8> {\n\n let attractive_emo_indexes = emos\n\n .iter()\n\n .zip(0u8..)\n\n .filter(|(e, _)| {\n\n let mut is_attractive = false;\n\n for ability in e.attributes.abilities.iter() {\n\n if let emo::ability::Ability::Battle(emo::ability::battle::Battle::Special(\n\n emo::ability::battle::Special::Attractive,\n\n )) = ability\n\n {\n\n is_attractive = true;\n\n break;\n\n }\n\n }\n\n is_attractive\n\n })\n\n .map(|(_, i)| i)\n\n .collect::<Vec<u8>>();\n\n\n\n attractive_emo_indexes.choose(rng).copied()\n\n}\n\n\n", "file_path": "common/rs/src/mtc/battle/board.rs", "rank": 12, "score": 278638.7269195057 }, { "content": "#[wasm_bindgen]\n\npub fn move_emo(board: &[u8], emo_index: u8, is_right: bool) -> Vec<u8> {\n\n decode_and_encode_for_shop(board, |board, logs| {\n\n mtc::shop::board::move_emo(board, logs, emo_index, is_right)\n\n })\n\n}\n\n\n", "file_path": "front/wasm/src/lib.rs", "rank": 13, "score": 276135.7376734821 }, { "content": "#[wasm_bindgen]\n\npub fn start_shop(board: &[u8], seed: &str, emo_bases: &[u8]) -> Vec<u8> {\n\n decode_and_encode_for_shop(board, |board, logs| {\n\n mtc::shop::board::start_shop(\n\n board,\n\n logs,\n\n seed.parse().unwrap(),\n\n &mtc::decoders::decode_emo_bases(emo_bases),\n\n )\n\n })\n\n}\n\n\n", "file_path": "front/wasm/src/lib.rs", "rank": 14, "score": 275815.75663416093 }, { "content": "pub fn build_ghost_from_history(grade_and_board_history: &[mtc::GradeAndBoard]) -> mtc::Ghost {\n\n let history = grade_and_board_history\n\n .iter()\n\n .map(|h| mtc::GradeAndGhostBoard {\n\n grade: h.grade,\n\n board: build_ghost_board_from_board(&h.board),\n\n })\n\n .collect();\n\n mtc::Ghost { history }\n\n}\n\n\n", "file_path": "common/rs/src/mtc/result.rs", "rank": 15, "score": 263904.78382276517 }, { "content": "fn sum_grades(emos: &[BattleEmo], emo_bases: &emo::Bases) -> Result<u8> {\n\n let mut sum = 0u8;\n\n for emo in emos.iter() {\n\n sum = sum.saturating_add(emo_bases.find(emo.base_id)?.grade);\n\n }\n\n Ok(sum)\n\n}\n", "file_path": "common/rs/src/mtc/battle/march.rs", "rank": 16, "score": 259651.7169628517 }, { "content": "pub fn decode_ghost_board(ghost_board: &[u8]) -> mtc::GhostBoard {\n\n let mut ghost_board: &[u8] = ghost_board;\n\n mtc::GhostBoard::decode(&mut ghost_board).unwrap()\n\n}\n\n\n", "file_path": "common/rs/src/mtc/decoders.rs", "rank": 17, "score": 256898.02128438995 }, { "content": "fn upgrade(grade: &mut u8, upgrade_coin: &mut Option<u8>, coin: &mut u8) -> Result<()> {\n\n match *upgrade_coin {\n\n Some(c) => {\n\n *coin = coin\n\n .checked_sub(c)\n\n .ok_or_else(|| anyhow!(\"Not enough coin for upgrade\"))?;\n\n ensure!(*grade < LAST_GRADE, \"already last grade\");\n\n *grade += 1;\n\n *upgrade_coin = get_upgrade_coin(*grade);\n\n }\n\n None => {\n\n bail!(\"no upgrade\");\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "common/rs/src/mtc/shop/player_operation.rs", "rank": 18, "score": 256310.62720886577 }, { "content": "fn remove_triple_emos(board: &mut ShopBoard, triple_source_indexes: &[u8]) -> Vec<ShopBoardEmo> {\n\n let mut removed = Vec::<ShopBoardEmo>::new();\n\n\n\n let mut indexes = triple_source_indexes.to_vec();\n\n indexes.sort_unstable();\n\n indexes.reverse();\n\n\n\n for &i in indexes.iter() {\n\n removed.push(board.remove_emo(i));\n\n }\n\n\n\n removed.reverse();\n\n\n\n removed\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 19, "score": 256132.26631721162 }, { "content": "#[wasm_bindgen]\n\npub fn get_catalog(pool: &[u8], board: &[u8], seed: &str) -> Vec<u8> {\n\n mtc::shop::catalog::get_catalog(\n\n &mtc::decoders::decode_mtc_emos(pool),\n\n &mtc::decoders::decode_board(board),\n\n seed.parse().unwrap(),\n\n )\n\n .unwrap()\n\n .encode()\n\n}\n\n\n", "file_path": "front/wasm/src/lib.rs", "rank": 20, "score": 247446.7421159161 }, { "content": "pub fn decode_ghost_state(ghost_state: &[u8]) -> mtc::GhostState {\n\n let mut ghost_state: &[u8] = ghost_state;\n\n mtc::GhostState::decode(&mut ghost_state).unwrap()\n\n}\n\n\n", "file_path": "common/rs/src/mtc/decoders.rs", "rank": 21, "score": 243606.82093751174 }, { "content": "fn get_turn_and_grade_and_board(history: &[mtc::GradeAndBoard]) -> (u8, u8, mtc::Board) {\n\n let (turn, previous_grade_and_board) = get_turn_and_previous_grade_and_board(history);\n\n let grade = previous_grade_and_board.grade;\n\n let board = previous_grade_and_board.board;\n\n (turn, grade, board)\n\n}\n", "file_path": "chain/pallets/game/src/lib.rs", "rank": 22, "score": 243052.4256258898 }, { "content": "// works, but poor performance\n\npub fn solve(account: &[u8; 32], count: u32) -> u64 {\n\n let mut input = [0u8; 128];\n\n\n\n input[0..32].copy_from_slice(account);\n\n input[32..36].copy_from_slice(&count.to_le_bytes()[..]);\n\n input[36] = 123;\n\n\n\n let mut solution = 0u64;\n\n\n\n loop {\n\n input[120..].copy_from_slice(&solution.to_le_bytes()[..]);\n\n\n\n let result = blake2b(32, &[], &input);\n\n let full_hash = result.as_bytes();\n\n\n\n let n = u32::from_le_bytes([full_hash[0], full_hash[1], full_hash[2], full_hash[3]]);\n\n\n\n if n < THRESHOLD {\n\n return solution;\n\n }\n", "file_path": "common/rs/src/pow.rs", "rank": 23, "score": 242482.87399394828 }, { "content": "pub fn decode_board_emo(board_emo: &[u8]) -> mtc::BoardEmo {\n\n let mut board_emo: &[u8] = board_emo;\n\n mtc::BoardEmo::decode(&mut board_emo).unwrap()\n\n}\n\n\n", "file_path": "common/rs/src/mtc/decoders.rs", "rank": 24, "score": 240408.13320503 }, { "content": "#[wasm_bindgen]\n\npub fn get_initial_coin_by_turn(turn: u8) -> u8 {\n\n mtc::shop::coin::get_initial_coin_by_turn(turn)\n\n}\n\n\n", "file_path": "front/wasm/src/lib.rs", "rank": 25, "score": 238518.01825119095 }, { "content": "#[wasm_bindgen]\n\npub fn get_grade_and_ghost_board(\n\n grade_and_ghost_boards: &[u8],\n\n ghost_state: &[u8],\n\n turn: u8,\n\n) -> Vec<u8> {\n\n mtc::battle::organizer::get_grade_and_ghost_board(\n\n &mtc::decoders::decode_grade_and_ghost_boards(grade_and_ghost_boards),\n\n &mtc::decoders::decode_ghost_state(ghost_state),\n\n turn,\n\n )\n\n .encode()\n\n}\n\n\n", "file_path": "front/wasm/src/lib.rs", "rank": 26, "score": 236897.45833012962 }, { "content": "pub fn decode_ghost_states(ghost_states: &[u8]) -> Vec<mtc::GhostState> {\n\n let mut ghost_states: &[u8] = ghost_states;\n\n Vec::decode(&mut ghost_states).unwrap()\n\n}\n\n\n", "file_path": "common/rs/src/mtc/decoders.rs", "rank": 27, "score": 236482.51102130057 }, { "content": "pub fn switch_player_index(index: u8) -> u8 {\n\n if index == 0 {\n\n 1\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "common/rs/src/mtc/battle/common.rs", "rank": 28, "score": 235327.6884751167 }, { "content": "pub fn decode_board_emos(board_emos: &[u8]) -> Vec<mtc::BoardEmo> {\n\n let mut board_emos: &[u8] = board_emos;\n\n Vec::decode(&mut board_emos).unwrap()\n\n}\n\n\n", "file_path": "common/rs/src/mtc/decoders.rs", "rank": 29, "score": 233318.00325862702 }, { "content": "pub fn get_initial_coin_by_turn(turn: u8) -> u8 {\n\n let coin = turn + 2;\n\n if coin > MAX_COIN {\n\n MAX_COIN\n\n } else {\n\n coin\n\n }\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/coin.rs", "rank": 30, "score": 232735.29657346272 }, { "content": "pub fn decode_grade_and_ghost_boards(\n\n grade_and_ghost_boards: &[u8],\n\n) -> Vec<mtc::GradeAndGhostBoard> {\n\n let mut grade_and_ghost_boards: &[u8] = grade_and_ghost_boards;\n\n Vec::decode(&mut grade_and_ghost_boards).unwrap()\n\n}\n\n\n", "file_path": "common/rs/src/mtc/decoders.rs", "rank": 31, "score": 232381.00011953144 }, { "content": "pub fn get_turn_and_previous_grade_and_board(\n\n history: &[mtc::GradeAndBoard],\n\n) -> (u8, mtc::GradeAndBoard) {\n\n let history_len = history.len();\n\n let grade_and_board = if history_len > 0 {\n\n history[history_len - 1].clone()\n\n } else {\n\n mtc::GradeAndBoard {\n\n grade: 1,\n\n board: mtc::Board(vec![]),\n\n }\n\n };\n\n let turn = history_len as u8 + 1;\n\n\n\n (turn, grade_and_board)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use sp_std::collections::btree_map::BTreeMap;\n\n\n\n #[test]\n\n fn test_emo_bases_find() {\n\n let bases = emo::Bases(BTreeMap::new());\n\n let r = bases.find(1);\n\n assert_eq!(r.is_err(), true);\n\n }\n\n}\n", "file_path": "common/rs/src/mtc/utils.rs", "rank": 32, "score": 228210.51461579234 }, { "content": "pub fn decode_board(board: &[u8]) -> mtc::Board {\n\n let mut board: &[u8] = board;\n\n mtc::Board::decode(&mut board).unwrap()\n\n}\n\n\n", "file_path": "common/rs/src/mtc/decoders.rs", "rank": 33, "score": 224076.5814781019 }, { "content": "pub fn check_solution(account: &[u8; 32], count: u32, solution: u64) -> bool {\n\n let mut input = [0u8; 128];\n\n\n\n input[0..32].copy_from_slice(account);\n\n input[32..36].copy_from_slice(&count.to_le_bytes()[..]);\n\n input[36] = 123;\n\n input[120..].copy_from_slice(&solution.to_le_bytes()[..]);\n\n\n\n let result = blake2b(32, &[], &input);\n\n let full_hash = result.as_bytes();\n\n\n\n let n = u32::from_le_bytes([full_hash[0], full_hash[1], full_hash[2], full_hash[3]]);\n\n\n\n n < THRESHOLD\n\n}\n\n\n", "file_path": "common/rs/src/pow.rs", "rank": 34, "score": 221843.24514503038 }, { "content": "pub fn partial_bytes_to_u64(array: &[u8]) -> u64 {\n\n ((array[0] as u64) << 56)\n\n + ((array[1] as u64) << 48)\n\n + ((array[2] as u64) << 40)\n\n + ((array[3] as u64) << 32)\n\n + ((array[4] as u64) << 24)\n\n + ((array[5] as u64) << 16)\n\n + ((array[6] as u64) << 8)\n\n + (array[7] as u64)\n\n}\n", "file_path": "common/rs/src/utils.rs", "rank": 36, "score": 220844.90645987165 }, { "content": "#[wasm_bindgen]\n\npub fn get_upgrade_coin(grade: u8) -> Option<u8> {\n\n mtc::shop::coin::get_upgrade_coin(grade)\n\n}\n\n\n", "file_path": "front/wasm/src/lib.rs", "rank": 37, "score": 220187.20581382306 }, { "content": "fn get_attack_emo_index(emos: &[BattleEmo]) -> u8 {\n\n let mut index: u8 = 0;\n\n let mut min_attack_and_survived_count = emos[index as usize].attack_and_survived_count;\n\n\n\n for (_, i) in emos.iter().zip(0u8..).skip(1) {\n\n let attack_and_survived_count = emos[i as usize].attack_and_survived_count;\n\n if min_attack_and_survived_count > attack_and_survived_count {\n\n min_attack_and_survived_count = attack_and_survived_count;\n\n index = i;\n\n }\n\n }\n\n\n\n index\n\n}\n\n\n", "file_path": "common/rs/src/mtc/battle/board.rs", "rank": 38, "score": 220003.90012893418 }, { "content": "pub fn get_upgrade_coin(grade: u8) -> Option<u8> {\n\n match grade {\n\n 2 => Some(5),\n\n 3 => Some(7),\n\n 4 => Some(8),\n\n 5 => Some(9),\n\n 6 => Some(9),\n\n _ => None,\n\n }\n\n}\n", "file_path": "common/rs/src/mtc/shop/coin.rs", "rank": 40, "score": 214214.42467146798 }, { "content": "pub fn decode_emo_bases(emo_bases: &[u8]) -> emo::Bases {\n\n let mut emo_bases: &[u8] = emo_bases;\n\n emo::Bases::decode(&mut emo_bases).unwrap()\n\n}\n\n\n", "file_path": "common/rs/src/mtc/decoders.rs", "rank": 41, "score": 209102.77448586604 }, { "content": "pub fn decode_ghosts(ghosts: &[u8]) -> Vec<mtc::Ghost> {\n\n let mut ghosts: &[u8] = ghosts;\n\n Vec::decode(&mut ghosts).unwrap()\n\n}\n\n\n", "file_path": "common/rs/src/mtc/decoders.rs", "rank": 42, "score": 208144.34931944637 }, { "content": "fn build_ghost_board_from_board(board: &mtc::Board) -> mtc::GhostBoard {\n\n mtc::GhostBoard(\n\n board\n\n .0\n\n .iter()\n\n .map(|be| mtc::GhostBoardEmo {\n\n base_id: be.base_id,\n\n attributes: be.attributes.clone(),\n\n })\n\n .collect(),\n\n )\n\n}\n", "file_path": "common/rs/src/mtc/result.rs", "rank": 43, "score": 203695.3858778978 }, { "content": "pub fn decode_mtc_emos(mtc_emos: &[u8]) -> Vec<mtc::Emo> {\n\n let mut mtc_emos: &[u8] = mtc_emos;\n\n Vec::decode(&mut mtc_emos).unwrap()\n\n}\n\n\n", "file_path": "common/rs/src/mtc/decoders.rs", "rank": 45, "score": 201074.84643617563 }, { "content": "// return remaining board grades and logs\n\n// logs is empty if not \"view-logs\"\n\npub fn march(\n\n battle_emos0: Vec<BattleEmo>,\n\n battle_emos1: Vec<BattleEmo>,\n\n seed: u64,\n\n emo_bases: &emo::Bases,\n\n) -> Result<(u8, u8, mtc::battle::Logs)> {\n\n let mut boards = BattleBoards([battle_emos0, battle_emos1]);\n\n let mut logs = mtc::battle::Logs::new();\n\n\n\n let mut rng = Pcg64Mcg::seed_from_u64(seed);\n\n let first_attack_player_index = get_first_attack_player_index(&boards, &mut rng)?;\n\n let mut tick = Tick::new(first_attack_player_index);\n\n\n\n call_pre_abilities(\n\n &mut boards,\n\n first_attack_player_index,\n\n &mut rng,\n\n &mut logs,\n\n emo_bases,\n\n )?;\n", "file_path": "common/rs/src/mtc/battle/march.rs", "rank": 46, "score": 200100.521455269 }, { "content": "// return coin\n\npub fn move_emo(\n\n board: &mut ShopBoard,\n\n logs: &mut mtc::shop::BoardLogs,\n\n emo_index: u8,\n\n is_right: bool,\n\n) -> Result<u8> {\n\n let len = board.count_emos();\n\n\n\n ensure!(emo_index < len, \"invalid index for move\");\n\n if emo_index == 0 && !is_right {\n\n bail!(\"move_emo: cannot move left\");\n\n }\n\n if emo_index + 1 == len && is_right {\n\n bail!(\"move_emo: cannot move right\");\n\n }\n\n\n\n let from_index = emo_index;\n\n let to_index = if is_right {\n\n from_index + 1\n\n } else {\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 47, "score": 197488.32304503696 }, { "content": "// return coin\n\npub fn sell_emo(\n\n board: &mut ShopBoard,\n\n logs: &mut mtc::shop::BoardLogs,\n\n emo_index: u8,\n\n emo_bases: &emo::Bases,\n\n) -> Result<u8> {\n\n ensure!(emo_index < board.count_emos(), \"invalid index for sell\");\n\n\n\n logs.add(&|| mtc::shop::BoardLog::Remove { index: emo_index });\n\n\n\n let sold = board.remove_emo(emo_index);\n\n\n\n let mut gotten_coin = EMO_SELL_COIN;\n\n\n\n for ability in sold.get_peri_abilities().into_iter() {\n\n if let emo::ability::shop::Peri::AsOneself {\n\n trigger: emo::ability::shop::PeriAsOneselfTrigger::Sell,\n\n action,\n\n } = ability\n\n {\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 48, "score": 197488.32304503696 }, { "content": "// return coin\n\npub fn add_emo(\n\n board: &mut ShopBoard,\n\n logs: &mut mtc::shop::BoardLogs,\n\n mtc_emo_ids: &[u16],\n\n base_id: u16,\n\n is_triple: bool,\n\n emo_index: u8,\n\n emo_bases: &emo::Bases,\n\n) -> Result<u8> {\n\n let board_emo =\n\n ShopBoardEmo::new_with_base(mtc_emo_ids.to_vec(), emo_bases.find(base_id)?, is_triple);\n\n add_emo_with_board_emo(board, logs, board_emo, emo_index, emo_bases)\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 49, "score": 197488.32304503696 }, { "content": "pub fn build_initial_ghost_states() -> Vec<mtc::GhostState> {\n\n vec![\n\n mtc::GhostState::Active {\n\n health: utils::PLAYER_INITIAL_HEALTH,\n\n },\n\n mtc::GhostState::Active {\n\n health: utils::PLAYER_INITIAL_HEALTH,\n\n },\n\n mtc::GhostState::Active {\n\n health: utils::PLAYER_INITIAL_HEALTH,\n\n },\n\n ]\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n // use super::*;\n\n\n\n // #[test]\n\n // fn test_build_deck() {\n\n // }\n\n}\n", "file_path": "common/rs/src/mtc/setup.rs", "rank": 50, "score": 189964.05329143515 }, { "content": "pub fn calculate_new_ep(player_ep: u16, player_place: u8, sorted_ghost_eps: &[u16]) -> u16 {\n\n let player_ep: f32 = player_ep.into();\n\n let mut diff = 0f32;\n\n\n\n for (&ep, i) in sorted_ghost_eps.iter().zip(0u8..) {\n\n let s = if player_place - 1 <= i { 1f32 } else { 0f32 };\n\n diff += EP_K * (s - calculate_expected(player_ep, ep.into()));\n\n }\n\n\n\n let new = player_ep + diff;\n\n\n\n if new >= MAX_EP as f32 {\n\n return MAX_EP;\n\n }\n\n if new <= MIN_EP as f32 {\n\n return MIN_EP;\n\n }\n\n\n\n roundf(new) as u16\n\n}\n\n\n", "file_path": "common/rs/src/mtc/ep.rs", "rank": 53, "score": 183194.50033984223 }, { "content": "fn get_coin(gotten_coin: &mut u8, is_triple_action: bool, coin: u8) {\n\n *gotten_coin = gotten_coin.saturating_add(if is_triple_action {\n\n coin.saturating_mul(2)\n\n } else {\n\n coin\n\n });\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 54, "score": 182539.42962600704 }, { "content": "fn get_matched_emo_indexs_from_board(\n\n board: &[BattleEmo],\n\n emo_index: u8,\n\n target: emo::ability::Target,\n\n is_action_emo_retired: bool,\n\n) -> Result<Vec<u8>> {\n\n let indexes = match target {\n\n emo::ability::Target::Oneself => {\n\n if is_action_emo_retired {\n\n vec![]\n\n } else {\n\n vec![emo_index]\n\n }\n\n }\n\n emo::ability::Target::Others {\n\n destination,\n\n typ_and_triple,\n\n } => {\n\n let emos_with_index = match destination {\n\n emo::ability::Destination::Right => {\n", "file_path": "common/rs/src/mtc/battle/board.rs", "rank": 55, "score": 181758.87180856246 }, { "content": "/// Builds a new service for a full client.\n\npub fn new_full(mut config: Configuration) -> Result<TaskManager, ServiceError> {\n\n let sc_service::PartialComponents {\n\n client,\n\n backend,\n\n mut task_manager,\n\n import_queue,\n\n mut keystore_container,\n\n select_chain,\n\n transaction_pool,\n\n inherent_data_providers,\n\n other: (block_import, grandpa_link),\n\n } = new_partial(&config)?;\n\n\n\n if let Some(url) = &config.keystore_remote {\n\n match remote_keystore(url) {\n\n Ok(k) => keystore_container.set_remote_keystore(k),\n\n Err(e) => {\n\n return Err(ServiceError::Other(format!(\n\n \"Error hooking up remote keystore for {}: {}\",\n\n url, e\n", "file_path": "chain/node/src/service.rs", "rank": 56, "score": 181269.53324828378 }, { "content": "/// Builds a new service for a light client.\n\npub fn new_light(mut config: Configuration) -> Result<TaskManager, ServiceError> {\n\n let (client, backend, keystore_container, mut task_manager, on_demand) =\n\n sc_service::new_light_parts::<Block, RuntimeApi, Executor>(&config)?;\n\n\n\n config\n\n .network\n\n .extra_sets\n\n .push(sc_finality_grandpa::grandpa_peers_set_config());\n\n\n\n let select_chain = sc_consensus::LongestChain::new(backend.clone());\n\n\n\n let transaction_pool = Arc::new(sc_transaction_pool::BasicPool::new_light(\n\n config.transaction_pool.clone(),\n\n config.prometheus_registry(),\n\n task_manager.spawn_handle(),\n\n client.clone(),\n\n on_demand.clone(),\n\n ));\n\n\n\n let (grandpa_block_import, _) = sc_finality_grandpa::block_import(\n", "file_path": "chain/node/src/service.rs", "rank": 57, "score": 181269.53324828378 }, { "content": "pub fn decode_typ_opts(typ_opts: &[u8]) -> Vec<Option<emo::Typ>> {\n\n let mut typ_opts: &[u8] = typ_opts;\n\n Vec::decode(&mut typ_opts).unwrap()\n\n}\n\n\n", "file_path": "common/rs/src/mtc/decoders.rs", "rank": 58, "score": 177213.3931027617 }, { "content": "fn get_matched_emo_indexs_from_board_by_target_or_random(\n\n board: &[BattleEmo],\n\n emo_index: u8,\n\n target_or_random: emo::ability::TargetOrRandom,\n\n is_action_emo_retired: bool,\n\n rng: &mut Pcg64Mcg,\n\n) -> Result<Vec<u8>> {\n\n let indexes = match target_or_random {\n\n emo::ability::TargetOrRandom::Target(t) => {\n\n get_matched_emo_indexs_from_board(board, emo_index, t, is_action_emo_retired)?\n\n }\n\n emo::ability::TargetOrRandom::Random {\n\n typ_and_triple,\n\n count,\n\n } => {\n\n let mut v = board\n\n .iter()\n\n .zip(0u8..)\n\n .filter(|(_, i)| {\n\n if is_action_emo_retired {\n", "file_path": "common/rs/src/mtc/battle/board.rs", "rank": 59, "score": 175609.6712928899 }, { "content": "pub fn decode_vec_u8(v: &[u8]) -> Vec<u8> {\n\n let mut v: &[u8] = v;\n\n Vec::decode(&mut v).unwrap()\n\n}\n\n\n", "file_path": "common/rs/src/mtc/decoders.rs", "rank": 60, "score": 175037.31870151937 }, { "content": "fn add_attack_and_health_to_emos(\n\n board: &mut ShopBoard,\n\n logs: &mut mtc::shop::BoardLogs,\n\n emo_index: u8,\n\n is_emo_removed: bool,\n\n target: emo::ability::Target,\n\n attack: u16,\n\n health: u16,\n\n emo_bases: &emo::Bases,\n\n) -> Result<()> {\n\n for (board_emo, index) in\n\n get_emos_by_target(board, emo_index, is_emo_removed, target, emo_bases)?.into_iter()\n\n {\n\n add_attack_and_health_to_emo(board_emo, logs, index, attack, health);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 61, "score": 174675.3369546443 }, { "content": "fn add_attack_and_health_to_emo(\n\n board_emo: &mut ShopBoardEmo,\n\n logs: &mut mtc::shop::BoardLogs,\n\n board_emo_index: u8,\n\n attack: u16,\n\n health: u16,\n\n) {\n\n let calculated_attack = board_emo.attributes.attack.saturating_add(attack);\n\n let calculated_health = board_emo.attributes.health.saturating_add(health);\n\n\n\n logs.add(&|| mtc::shop::BoardLog::IncreaseStats {\n\n index: board_emo_index,\n\n attack,\n\n health,\n\n calculated_attack,\n\n calculated_health,\n\n });\n\n\n\n board_emo.attributes.attack = calculated_attack;\n\n board_emo.attributes.health = calculated_health;\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 62, "score": 174675.3369546443 }, { "content": "fn increase_stats_by_emo_count(\n\n board: &mut ShopBoard,\n\n logs: &mut mtc::shop::BoardLogs,\n\n action_emo_index: u8,\n\n is_triple_action: bool,\n\n is_action_emo_removed: bool,\n\n emo_bases: &emo::Bases,\n\n target: emo::ability::Target,\n\n count_condition: emo::ability::TypOptAndIsTripleOpt,\n\n attack: u16,\n\n health: u16,\n\n) -> Result<()> {\n\n let (mut attack, mut health) = double_attack_and_health_if(is_triple_action, attack, health);\n\n let count: u16 = count_emos_by_typ_and_triple(\n\n board,\n\n action_emo_index,\n\n is_action_emo_removed,\n\n &count_condition,\n\n emo_bases,\n\n )?\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 63, "score": 174651.46608285955 }, { "content": "fn increase_stats_by_emo_count(\n\n player_index: u8,\n\n action_emo_index: u8,\n\n is_action_emo_retired: bool,\n\n is_triple_action: bool,\n\n boards: &mut BattleBoards,\n\n logs: &mut mtc::battle::Logs,\n\n rng: &mut Pcg64Mcg,\n\n target_or_random: emo::ability::TargetOrRandom,\n\n side: emo::ability::Side,\n\n count_condition: emo::ability::TypOptAndIsTripleOpt,\n\n attack: u16,\n\n health: u16,\n\n) -> Result<()> {\n\n let count = match side {\n\n emo::ability::Side::Ally => boards\n\n .get_board(player_index)?\n\n .iter()\n\n .zip(0u8..)\n\n .filter(|&(emo, emo_index)| {\n", "file_path": "common/rs/src/mtc/battle/board.rs", "rank": 64, "score": 174651.46608285955 }, { "content": "fn count_emos_by_typ_and_triple(\n\n board: &ShopBoard,\n\n emo_index: u8,\n\n is_emo_removed: bool,\n\n typ_and_triple: &emo::ability::TypOptAndIsTripleOpt,\n\n emo_bases: &emo::Bases,\n\n) -> Result<u8> {\n\n let mut count = 0u8;\n\n for (e, i) in board.emos_with_indexes().into_iter() {\n\n if !is_emo_removed && i == emo_index {\n\n continue;\n\n }\n\n if !is_matched_typ_and_triple_board_emo(typ_and_triple, e, emo_bases)? {\n\n continue;\n\n }\n\n count += 1;\n\n }\n\n Ok(count)\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 65, "score": 174651.46608285955 }, { "content": "fn get_defense_emo_index(\n\n emos: &[BattleEmo],\n\n is_lowest_attack: bool,\n\n rng: &mut Pcg64Mcg,\n\n) -> Result<u8> {\n\n if is_lowest_attack {\n\n return get_lowest_attack_emo_index(emos, rng);\n\n }\n\n\n\n Ok(\n\n if let Some(attractive_emo_index) = get_attractive_emo_index(emos, rng) {\n\n attractive_emo_index\n\n } else {\n\n rng.gen_range(0u8..(emos.len() as u8))\n\n },\n\n )\n\n}\n\n\n", "file_path": "common/rs/src/mtc/battle/board.rs", "rank": 66, "score": 174488.17224000095 }, { "content": "pub fn decode_option_u8(option_u8: &[u8]) -> Option<u8> {\n\n let mut option_u8: &[u8] = option_u8;\n\n Option::decode(&mut option_u8).unwrap()\n\n}\n\n\n", "file_path": "common/rs/src/mtc/decoders.rs", "rank": 67, "score": 174233.8428430351 }, { "content": "fn get_coin_by_emo_count_div(\n\n board: &mut ShopBoard,\n\n action_emo_index: u8,\n\n is_action_emo_removed: bool,\n\n emo_bases: &emo::Bases,\n\n gotten_coin: &mut u8,\n\n is_triple_action: bool,\n\n count_condition: emo::ability::TypOptAndIsTripleOpt,\n\n divisor: u8,\n\n) -> Result<()> {\n\n let count = count_emos_by_typ_and_triple(\n\n board,\n\n action_emo_index,\n\n is_action_emo_removed,\n\n &count_condition,\n\n emo_bases,\n\n )?;\n\n let base = count / divisor;\n\n *gotten_coin = gotten_coin.saturating_add(if is_triple_action {\n\n base.saturating_mul(2)\n\n } else {\n\n base\n\n });\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 68, "score": 171235.39088557957 }, { "content": "fn get_attack_and_defense_emo_indexes(\n\n boards: &mut BattleBoards,\n\n attack_player_index: u8,\n\n rng: &mut Pcg64Mcg,\n\n) -> Result<(u8, u8)> {\n\n let defense_player_index = switch_player_index(attack_player_index);\n\n\n\n let attack_emo_index = get_attack_emo_index(boards.get_board(attack_player_index)?);\n\n\n\n let mut is_lowest_attack = false;\n\n for (_, ability) in boards\n\n .get_emo(attack_player_index, attack_emo_index)?\n\n .get_abilities()\n\n .into_iter()\n\n {\n\n if let emo::ability::battle::Battle::Special(\n\n emo::ability::battle::Special::AttackLowestAttack,\n\n ) = ability\n\n {\n\n is_lowest_attack = true;\n", "file_path": "common/rs/src/mtc/battle/board.rs", "rank": 69, "score": 171075.91451416444 }, { "content": "/// Parse and run command line arguments\n\npub fn run() -> sc_cli::Result<()> {\n\n let cli = Cli::from_args();\n\n\n\n match &cli.subcommand {\n\n Some(Subcommand::Key(cmd)) => cmd.run(&cli),\n\n Some(Subcommand::BuildSpec(cmd)) => {\n\n let runner = cli.create_runner(cmd)?;\n\n runner.sync_run(|config| cmd.run(config.chain_spec, config.network))\n\n }\n\n Some(Subcommand::CheckBlock(cmd)) => {\n\n let runner = cli.create_runner(cmd)?;\n\n runner.async_run(|config| {\n\n let PartialComponents {\n\n client,\n\n task_manager,\n\n import_queue,\n\n ..\n\n } = service::new_partial(&config)?;\n\n Ok((cmd.run(client, import_queue), task_manager))\n\n })\n", "file_path": "chain/node/src/command.rs", "rank": 70, "score": 168241.21505244458 }, { "content": "#[wasm_bindgen]\n\npub fn add_emo(\n\n board: &[u8],\n\n mtc_emo_ids: &[u16],\n\n base_id: u16,\n\n is_triple: bool,\n\n emo_index: u8,\n\n emo_bases: &[u8],\n\n) -> Vec<u8> {\n\n decode_and_encode_for_shop(board, |board, logs| {\n\n mtc::shop::board::add_emo(\n\n board,\n\n logs,\n\n mtc_emo_ids,\n\n base_id,\n\n is_triple,\n\n emo_index,\n\n &mtc::decoders::decode_emo_bases(emo_bases),\n\n )\n\n })\n\n}\n\n\n", "file_path": "front/wasm/src/lib.rs", "rank": 72, "score": 166230.46307813082 }, { "content": "pub fn check_and_build_emo_bases(\n\n mut bases: emo::Bases,\n\n new_bases: emo::Bases,\n\n fixed_base_ids: &[u16],\n\n built_base_ids: &[u16],\n\n force_bases_update: bool,\n\n) -> Result<emo::Bases, &'static str> {\n\n if force_bases_update {\n\n bases = new_bases;\n\n } else {\n\n for (id, value) in new_bases.0.into_iter() {\n\n if bases.0.contains_key(&id) {\n\n continue;\n\n }\n\n bases.0.insert(id, value);\n\n }\n\n }\n\n\n\n let base_keys = bases.0.keys().cloned().collect::<Vec<_>>();\n\n\n", "file_path": "common/rs/src/mtc/emo_bases.rs", "rank": 73, "score": 165891.3632069861 }, { "content": "pub fn attack(\n\n boards: &mut BattleBoards,\n\n attack_player_index: u8,\n\n logs: &mut mtc::battle::Logs,\n\n rng: &mut Pcg64Mcg,\n\n emo_bases: &emo::Bases,\n\n) -> Result<()> {\n\n let (attack_emo_index, defense_emo_index) =\n\n get_attack_and_defense_emo_indexes(boards, attack_player_index, rng)?;\n\n\n\n logs.add(&|| mtc::battle::Log::Attack {\n\n attack_player_index,\n\n attack_emo_index,\n\n defense_emo_index,\n\n });\n\n\n\n attack_damage(\n\n boards,\n\n attack_player_index,\n\n attack_emo_index,\n\n defense_emo_index,\n\n emo_bases,\n\n logs,\n\n rng,\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "common/rs/src/mtc/battle/board.rs", "rank": 74, "score": 163216.4859844574 }, { "content": "// return coin\n\npub fn start_shop(\n\n board: &mut ShopBoard,\n\n logs: &mut mtc::shop::BoardLogs,\n\n seed: u64,\n\n emo_bases: &emo::Bases,\n\n) -> Result<u8> {\n\n let mut rng = Pcg64Mcg::seed_from_u64(seed.reverse_bits());\n\n\n\n let mut gotten_coin = 0;\n\n\n\n for (action_emo_index, ability) in board.get_board_pre_abilities().into_iter() {\n\n call_pre_ability(\n\n board,\n\n &mut gotten_coin,\n\n &mut rng,\n\n logs,\n\n ability,\n\n action_emo_index,\n\n emo_bases,\n\n )?;\n\n }\n\n\n\n Ok(gotten_coin)\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 75, "score": 160400.5792273472 }, { "content": "fn decode_and_encode_for_shop<F>(board: &[u8], f: F) -> Vec<u8>\n\nwhere\n\n F: Fn(&mut mtc::shop::common::ShopBoard, &mut codec_types::mtc::shop::BoardLogs) -> Result<u8>,\n\n{\n\n let mut shop_board =\n\n mtc::shop::common::ShopBoard::from_board(mtc::decoders::decode_board(board));\n\n let mut logs = codec_types::mtc::shop::BoardLogs::new();\n\n\n\n let coin = f(&mut shop_board, &mut logs).unwrap();\n\n\n\n (shop_board.into_board(), logs, coin).encode()\n\n}\n", "file_path": "front/wasm/src/lib.rs", "rank": 76, "score": 158607.241748583 }, { "content": "fn add_emo_with_board_emo(\n\n board: &mut ShopBoard,\n\n logs: &mut mtc::shop::BoardLogs,\n\n new_board_emo: ShopBoardEmo,\n\n emo_index: u8,\n\n emo_bases: &emo::Bases,\n\n) -> Result<u8> {\n\n let len = board.count_emos();\n\n\n\n ensure!(len < BOARD_EMO_MAX_COUNT, \"board max capacity\");\n\n ensure!(len >= emo_index, \"Invalid emo_index\");\n\n\n\n let mut gotten_coin = 0;\n\n\n\n let is_new_emo_triple = new_board_emo.attributes.is_triple;\n\n let new_emo_id = new_board_emo.id;\n\n\n\n logs.add(&|| mtc::shop::BoardLog::Add {\n\n index: emo_index,\n\n board_emo: new_board_emo.clone_as_board_emo(),\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 77, "score": 158340.05689973844 }, { "content": "pub fn call_pre_abilities(\n\n boards: &mut BattleBoards,\n\n first_attack_player_index: u8,\n\n rng: &mut Pcg64Mcg,\n\n logs: &mut mtc::battle::Logs,\n\n emo_bases: &emo::Bases,\n\n) -> Result<()> {\n\n let first_defense_player_index = switch_player_index(first_attack_player_index);\n\n\n\n for &player_index in [first_attack_player_index, first_defense_player_index].iter() {\n\n for (emo_index, ability) in boards.get_board_abilities(player_index)?.into_iter() {\n\n if let emo::ability::battle::Battle::General(\n\n emo::ability::battle::General::AsOneself {\n\n trigger: emo::ability::battle::GeneralAsOneselfTrigger::Pre,\n\n action,\n\n },\n\n ) = ability\n\n {\n\n call_ability_normal_action_as_oneself(\n\n player_index,\n", "file_path": "common/rs/src/mtc/battle/board.rs", "rank": 78, "score": 157711.05147618343 }, { "content": "pub fn decrease_upgrade_coin(upgrade_coin: Option<u8>) -> Option<u8> {\n\n match upgrade_coin {\n\n Some(c) => {\n\n if c > 0 {\n\n Some(c - 1)\n\n } else {\n\n Some(0)\n\n }\n\n }\n\n None => None,\n\n }\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/coin.rs", "rank": 79, "score": 156666.96509263167 }, { "content": "pub fn is_matched_typ_and_triple_for_emo(\n\n typ_and_triple: &emo::ability::TypOptAndIsTripleOpt,\n\n emo: &BattleEmo,\n\n) -> bool {\n\n is_matched_typ_and_triple(typ_and_triple, &emo.typ, emo.attributes.is_triple)\n\n}\n", "file_path": "common/rs/src/mtc/battle/common.rs", "rank": 80, "score": 155195.16320836075 }, { "content": "pub fn build_emo_attributes(base: &emo::Base, is_triple: bool) -> emo::Attributes {\n\n emo::Attributes {\n\n attack: if is_triple {\n\n base.attack.saturating_mul(2)\n\n } else {\n\n base.attack\n\n },\n\n health: if is_triple {\n\n base.health.saturating_mul(2)\n\n } else {\n\n base.health\n\n },\n\n abilities: base.abilities.clone(),\n\n is_triple,\n\n }\n\n}\n\n\n\nimpl emo::Bases {\n\n pub fn new() -> Self {\n\n Self(Default::default())\n", "file_path": "common/rs/src/mtc/utils.rs", "rank": 81, "score": 154377.99785340496 }, { "content": "pub fn production_config() -> Result<ChainSpec, String> {\n\n ChainSpec::from_json_bytes(&include_bytes!(\"../specs/production-raw.json\")[..])\n\n}\n\n\n", "file_path": "chain/node/src/chain_spec.rs", "rank": 82, "score": 154374.94449155813 }, { "content": "pub fn staging_config() -> Result<ChainSpec, String> {\n\n ChainSpec::from_json_bytes(&include_bytes!(\"../specs/staging-raw.json\")[..])\n\n}\n\n\n", "file_path": "chain/node/src/chain_spec.rs", "rank": 83, "score": 154374.94449155813 }, { "content": "pub fn development_config() -> Result<ChainSpec, String> {\n\n let wasm_binary =\n\n WASM_BINARY.ok_or_else(|| \"Development wasm binary not available\".to_string())?;\n\n\n\n Ok(ChainSpec::from_genesis(\n\n // Name\n\n \"Development\",\n\n // ID\n\n \"dev\",\n\n ChainType::Development,\n\n move || {\n\n testnet_genesis(\n\n wasm_binary,\n\n // Initial PoA authorities\n\n vec![authority_keys_from_seed(\"Alice\")],\n\n // Sudo account\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n // Pre-funded accounts\n\n vec![],\n\n true,\n", "file_path": "chain/node/src/chain_spec.rs", "rank": 84, "score": 154374.94449155813 }, { "content": "pub fn decode_vec_u16(v: &[u8]) -> Vec<u16> {\n\n let mut v: &[u8] = v;\n\n Vec::decode(&mut v).unwrap()\n\n}\n\n\n", "file_path": "common/rs/src/mtc/decoders.rs", "rank": 85, "score": 149230.8439660467 }, { "content": "fn build_triple_emo_nums(base: &emo::Base, source_board_emos: &[ShopBoardEmo]) -> (u16, u16) {\n\n let attack_diff = source_board_emos\n\n .iter()\n\n .map(|eb| eb.attributes.attack.saturating_sub(base.attack))\n\n .sum::<u16>();\n\n let health_diff = source_board_emos\n\n .iter()\n\n .map(|eb| eb.attributes.health.saturating_sub(base.health))\n\n .sum::<u16>();\n\n\n\n let attack = base.attack.saturating_mul(2).saturating_add(attack_diff);\n\n let health = base.health.saturating_mul(2).saturating_add(health_diff);\n\n\n\n (attack, health)\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 86, "score": 148859.13914600923 }, { "content": "/// Generate an account ID from seed.\n\npub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId\n\nwhere\n\n AccountPublic: From<<TPublic::Pair as Pair>::Public>,\n\n{\n\n AccountPublic::from(get_from_seed::<TPublic>(seed)).into_account()\n\n}\n\n\n", "file_path": "chain/node/src/chain_spec.rs", "rank": 87, "score": 146317.17540615355 }, { "content": "fn is_matched_typ_and_triple_board_emo(\n\n typ_and_triple: &emo::ability::TypOptAndIsTripleOpt,\n\n board_emo: &ShopBoardEmo,\n\n emo_bases: &emo::Bases,\n\n) -> Result<bool> {\n\n Ok(if typ_and_triple.typ_opt.is_none() {\n\n is_matched_triple(typ_and_triple.is_triple_opt, board_emo.attributes.is_triple)\n\n } else {\n\n is_matched_typ_and_triple(\n\n typ_and_triple,\n\n &emo_bases.find(board_emo.base_id)?.typ,\n\n board_emo.attributes.is_triple,\n\n )\n\n })\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 88, "score": 145371.08714435546 }, { "content": "pub fn decode_catalog(catalog: &[u8]) -> mtc::shop::Catalog {\n\n let mut catalog: &[u8] = catalog;\n\n mtc::shop::Catalog::decode(&mut catalog).unwrap()\n\n}\n\n\n", "file_path": "common/rs/src/mtc/decoders.rs", "rank": 89, "score": 142897.7186911432 }, { "content": "// return if survived\n\nfn damage_emo(\n\n damage: u16,\n\n player_index: u8,\n\n emo_index: u8,\n\n boards: &mut BattleBoards,\n\n emo_bases: &emo::Bases,\n\n rng: &mut Pcg64Mcg,\n\n logs: &mut mtc::battle::Logs,\n\n) -> Result<bool> {\n\n let emo_shielded =\n\n remove_shield_if_exist(boards, player_index, emo_index, logs, emo_bases, rng)?;\n\n\n\n if emo_shielded || damage == 0 {\n\n return Ok(true);\n\n }\n\n\n\n let emo = boards.get_emo_mut(player_index, emo_index)?;\n\n\n\n let health = emo.attributes.health.saturating_sub(damage);\n\n\n", "file_path": "common/rs/src/mtc/battle/board.rs", "rank": 90, "score": 142699.45710751077 }, { "content": "fn retire_emo(\n\n player_index: u8,\n\n emo_index: u8,\n\n boards: &mut BattleBoards,\n\n emo_bases: &emo::Bases,\n\n rng: &mut Pcg64Mcg,\n\n logs: &mut mtc::battle::Logs,\n\n) -> Result<()> {\n\n let removed_emo = boards\n\n .get_board_mut(player_index)?\n\n .remove(emo_index as usize);\n\n\n\n logs.add(&|| mtc::battle::Log::Remove {\n\n player_index,\n\n emo_index,\n\n });\n\n\n\n call_emo_retire_player_abilities(\n\n player_index,\n\n emo_index,\n", "file_path": "common/rs/src/mtc/battle/board.rs", "rank": 91, "score": 142695.13733532288 }, { "content": "fn add_emo(\n\n player_index: u8,\n\n emo_index: u8,\n\n boards: &mut BattleBoards,\n\n emo: BattleEmo,\n\n emo_bases: &emo::Bases,\n\n rng: &mut Pcg64Mcg,\n\n logs: &mut mtc::battle::Logs,\n\n) -> Result<()> {\n\n let len = boards.count_board_emos(player_index)?;\n\n\n\n if len < BOARD_EMO_MAX_COUNT {\n\n ensure!(len >= emo_index, \"invalid emo_index\");\n\n\n\n logs.add(&|| mtc::battle::Log::Add {\n\n player_index,\n\n emo_index,\n\n base_id: emo.base_id,\n\n attributes: emo.attributes.clone(),\n\n });\n", "file_path": "common/rs/src/mtc/battle/board.rs", "rank": 92, "score": 142695.13733532288 }, { "content": "fn set_emo(\n\n board: &mut ShopBoard,\n\n gotten_coin: &mut u8,\n\n logs: &mut mtc::shop::BoardLogs,\n\n action_emo_index: u8,\n\n is_triple_action: bool,\n\n is_action_emo_removed: bool,\n\n emo_bases: &emo::Bases,\n\n base_id: u16,\n\n) -> Result<()> {\n\n if board.count_emos() < BOARD_EMO_MAX_COUNT {\n\n let index = if is_action_emo_removed {\n\n action_emo_index\n\n } else {\n\n action_emo_index + 1\n\n };\n\n *gotten_coin = gotten_coin.saturating_add(add_emo(\n\n board,\n\n logs,\n\n &[],\n\n base_id,\n\n is_triple_action,\n\n index,\n\n emo_bases,\n\n )?);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 93, "score": 142695.13733532288 }, { "content": "fn set_emo(\n\n player_index: u8,\n\n emo_index: u8,\n\n base_id: u16,\n\n is_triple: bool,\n\n attack_and_survived_count: u8,\n\n boards: &mut BattleBoards,\n\n logs: &mut mtc::battle::Logs,\n\n emo_bases: &emo::Bases,\n\n rng: &mut Pcg64Mcg,\n\n side: emo::ability::Side,\n\n) -> Result<()> {\n\n let emo_base = emo_bases.find(base_id)?;\n\n let battle_emo = BattleEmo::new_with_base(emo_base, is_triple);\n\n\n\n match side {\n\n emo::ability::Side::Ally => {\n\n set_emo_ally(\n\n player_index,\n\n emo_index,\n", "file_path": "common/rs/src/mtc/battle/board.rs", "rank": 94, "score": 142695.13733532288 }, { "content": "fn damage_all_emos(\n\n boards: &mut BattleBoards,\n\n player_index: u8,\n\n damage: u16,\n\n emo_bases: &emo::Bases,\n\n logs: &mut mtc::battle::Logs,\n\n rng: &mut Pcg64Mcg,\n\n) -> Result<()> {\n\n for emo_id in boards\n\n .get_board(player_index)?\n\n .iter()\n\n .map(|e| e.id)\n\n .collect::<Vec<_>>()\n\n .into_iter()\n\n {\n\n let emo_index = if let Some(index) = boards.find_emo_index_by_id(player_index, emo_id)? {\n\n index\n\n } else {\n\n continue;\n\n };\n", "file_path": "common/rs/src/mtc/battle/board.rs", "rank": 95, "score": 142695.13733532288 }, { "content": "pub fn is_matched_typ(required_typ: &Option<emo::Typ>, test_typ: &emo::Typ) -> bool {\n\n matches!((required_typ.as_ref(), test_typ), (Some(typ), _) | (None, typ) if typ == test_typ)\n\n}\n\n\n", "file_path": "common/rs/src/mtc/utils.rs", "rank": 96, "score": 142150.07712841232 }, { "content": "fn increase_stats_by_grade(\n\n board: &mut ShopBoard,\n\n logs: &mut mtc::shop::BoardLogs,\n\n action_emo_index: u8,\n\n is_triple_action: bool,\n\n is_action_emo_removed: bool,\n\n emo_bases: &emo::Bases,\n\n base_id: u16,\n\n target: emo::ability::Target,\n\n attack: u16,\n\n health: u16,\n\n) -> Result<()> {\n\n let (mut attack, mut health) = double_attack_and_health_if(is_triple_action, attack, health);\n\n let grade: u16 = emo_bases.find(base_id)?.grade.into();\n\n attack = attack.saturating_mul(grade);\n\n health = health.saturating_mul(grade);\n\n\n\n add_attack_and_health_to_emos(\n\n board,\n\n logs,\n\n action_emo_index,\n\n is_action_emo_removed,\n\n target,\n\n attack,\n\n health,\n\n emo_bases,\n\n )\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 97, "score": 141067.37905052883 }, { "content": "fn get_right_emo(\n\n board: &mut ShopBoard,\n\n origin_index: u8,\n\n is_removed: bool,\n\n) -> Option<(&mut ShopBoardEmo, u8)> {\n\n let target_index = if is_removed {\n\n origin_index\n\n } else {\n\n origin_index + 1\n\n } as usize;\n\n board\n\n .0\n\n .get_mut(target_index)\n\n .map(|e| (e, target_index as u8))\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 98, "score": 139895.2224309603 }, { "content": "fn get_left_emo(\n\n board: &mut ShopBoard,\n\n origin_index: u8,\n\n) -> Result<Option<(&mut ShopBoardEmo, u8)>> {\n\n Ok(if let Some(target_index) = origin_index.checked_sub(1) {\n\n let board_emo = board.get_emo_mut(target_index)?;\n\n Some((board_emo, target_index))\n\n } else {\n\n None\n\n })\n\n}\n\n\n", "file_path": "common/rs/src/mtc/shop/board.rs", "rank": 99, "score": 139895.2224309603 } ]