hexsha
stringlengths
40
40
size
int64
2
1.05M
content
stringlengths
2
1.05M
avg_line_length
float64
1.33
100
max_line_length
int64
1
1k
alphanum_fraction
float64
0.25
1
1c5f8996e1b454ecf419ab27f9f315c67a26470e
136,862
// ignore-tidy-filelength //! This crate is responsible for the part of name resolution that doesn't require type checker. //! //! Module structure of the crate is built here. //! Paths in macros, imports, expressions, types, patterns are resolved here. //! Label and lifetime names are resolved here as well. //! //! Type-relative name resolution (methods, fields, associated items) happens in `librustc_typeck`. #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![feature(box_patterns)] #![feature(bool_to_option)] #![feature(control_flow_enum)] #![feature(crate_visibility_modifier)] #![feature(format_args_capture)] #![feature(iter_zip)] #![feature(nll)] #![cfg_attr(bootstrap, feature(or_patterns))] #![recursion_limit = "256"] #![allow(rustdoc::private_intra_doc_links)] pub use rustc_hir::def::{Namespace, PerNS}; use Determinacy::*; use rustc_arena::{DroplessArena, TypedArena}; use rustc_ast::node_id::NodeMap; use rustc_ast::ptr::P; use rustc_ast::visit::{self, Visitor}; use rustc_ast::{self as ast, NodeId}; use rustc_ast::{Crate, CRATE_NODE_ID}; use rustc_ast::{Expr, ExprKind, LitKind}; use rustc_ast::{ItemKind, ModKind, Path}; use rustc_ast_lowering::ResolverAstLowering; use rustc_ast_pretty::pprust; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap}; use rustc_data_structures::ptr_key::PtrKey; use rustc_data_structures::sync::Lrc; use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder}; use rustc_expand::base::{DeriveResolutions, SyntaxExtension, SyntaxExtensionKind}; use rustc_hir::def::Namespace::*; use rustc_hir::def::{self, CtorOf, DefKind, NonMacroAttrKind, PartialRes}; use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId, CRATE_DEF_INDEX}; use rustc_hir::definitions::{DefKey, DefPathData, Definitions}; use rustc_hir::TraitCandidate; use rustc_index::vec::IndexVec; use rustc_metadata::creader::{CStore, CrateLoader}; use rustc_middle::hir::exports::ExportMap; use rustc_middle::middle::cstore::{CrateStore, MetadataLoaderDyn}; use rustc_middle::span_bug; use rustc_middle::ty::query::Providers; use rustc_middle::ty::{self, DefIdTree, ResolverOutputs}; use rustc_session::lint; use rustc_session::lint::{BuiltinLintDiagnostics, LintBuffer}; use rustc_session::Session; use rustc_span::edition::Edition; use rustc_span::hygiene::{ExpnId, ExpnKind, MacroKind, SyntaxContext, Transparency}; use rustc_span::source_map::Spanned; use rustc_span::symbol::{kw, sym, Ident, Symbol}; use rustc_span::{Span, DUMMY_SP}; use smallvec::{smallvec, SmallVec}; use std::cell::{Cell, RefCell}; use std::collections::BTreeSet; use std::ops::ControlFlow; use std::{cmp, fmt, iter, ptr}; use tracing::debug; use diagnostics::{extend_span_to_previous_binding, find_span_of_binding_until_next_binding}; use diagnostics::{ImportSuggestion, LabelSuggestion, Suggestion}; use imports::{Import, ImportKind, ImportResolver, NameResolution}; use late::{ConstantItemKind, HasGenericParams, PathSource, Rib, RibKind::*}; use macros::{MacroRulesBinding, MacroRulesScope, MacroRulesScopeRef}; type Res = def::Res<NodeId>; mod build_reduced_graph; mod check_unused; mod def_collector; mod diagnostics; mod imports; mod late; mod macros; enum Weak { Yes, No, } #[derive(Copy, Clone, PartialEq, Debug)] pub enum Determinacy { Determined, Undetermined, } impl Determinacy { fn determined(determined: bool) -> Determinacy { if determined { Determinacy::Determined } else { Determinacy::Undetermined } } } /// A specific scope in which a name can be looked up. /// This enum is currently used only for early resolution (imports and macros), /// but not for late resolution yet. #[derive(Clone, Copy)] enum Scope<'a> { DeriveHelpers(ExpnId), DeriveHelpersCompat, MacroRules(MacroRulesScopeRef<'a>), CrateRoot, // The node ID is for reporting the `PROC_MACRO_DERIVE_RESOLUTION_FALLBACK` // lint if it should be reported. Module(Module<'a>, Option<NodeId>), RegisteredAttrs, MacroUsePrelude, BuiltinAttrs, ExternPrelude, ToolPrelude, StdLibPrelude, BuiltinTypes, } /// Names from different contexts may want to visit different subsets of all specific scopes /// with different restrictions when looking up the resolution. /// This enum is currently used only for early resolution (imports and macros), /// but not for late resolution yet. #[derive(Clone, Copy)] enum ScopeSet<'a> { /// All scopes with the given namespace. All(Namespace, /*is_import*/ bool), /// Crate root, then extern prelude (used for mixed 2015-2018 mode in macros). AbsolutePath(Namespace), /// All scopes with macro namespace and the given macro kind restriction. Macro(MacroKind), /// All scopes with the given namespace, used for partially performing late resolution. /// The node id enables lints and is used for reporting them. Late(Namespace, Module<'a>, Option<NodeId>), } /// Everything you need to know about a name's location to resolve it. /// Serves as a starting point for the scope visitor. /// This struct is currently used only for early resolution (imports and macros), /// but not for late resolution yet. #[derive(Clone, Copy, Debug)] pub struct ParentScope<'a> { module: Module<'a>, expansion: ExpnId, macro_rules: MacroRulesScopeRef<'a>, derives: &'a [ast::Path], } impl<'a> ParentScope<'a> { /// Creates a parent scope with the passed argument used as the module scope component, /// and other scope components set to default empty values. pub fn module(module: Module<'a>, resolver: &Resolver<'a>) -> ParentScope<'a> { ParentScope { module, expansion: ExpnId::root(), macro_rules: resolver.arenas.alloc_macro_rules_scope(MacroRulesScope::Empty), derives: &[], } } } #[derive(Copy, Debug, Clone)] enum ImplTraitContext { Existential, Universal(LocalDefId), } #[derive(Eq)] struct BindingError { name: Symbol, origin: BTreeSet<Span>, target: BTreeSet<Span>, could_be_path: bool, } impl PartialOrd for BindingError { fn partial_cmp(&self, other: &BindingError) -> Option<cmp::Ordering> { Some(self.cmp(other)) } } impl PartialEq for BindingError { fn eq(&self, other: &BindingError) -> bool { self.name == other.name } } impl Ord for BindingError { fn cmp(&self, other: &BindingError) -> cmp::Ordering { self.name.cmp(&other.name) } } enum ResolutionError<'a> { /// Error E0401: can't use type or const parameters from outer function. GenericParamsFromOuterFunction(Res, HasGenericParams), /// Error E0403: the name is already used for a type or const parameter in this generic /// parameter list. NameAlreadyUsedInParameterList(Symbol, Span), /// Error E0407: method is not a member of trait. MethodNotMemberOfTrait(Symbol, &'a str), /// Error E0437: type is not a member of trait. TypeNotMemberOfTrait(Symbol, &'a str), /// Error E0438: const is not a member of trait. ConstNotMemberOfTrait(Symbol, &'a str), /// Error E0408: variable `{}` is not bound in all patterns. VariableNotBoundInPattern(&'a BindingError), /// Error E0409: variable `{}` is bound in inconsistent ways within the same match arm. VariableBoundWithDifferentMode(Symbol, Span), /// Error E0415: identifier is bound more than once in this parameter list. IdentifierBoundMoreThanOnceInParameterList(Symbol), /// Error E0416: identifier is bound more than once in the same pattern. IdentifierBoundMoreThanOnceInSamePattern(Symbol), /// Error E0426: use of undeclared label. UndeclaredLabel { name: Symbol, suggestion: Option<LabelSuggestion> }, /// Error E0429: `self` imports are only allowed within a `{ }` list. SelfImportsOnlyAllowedWithin { root: bool, span_with_rename: Span }, /// Error E0430: `self` import can only appear once in the list. SelfImportCanOnlyAppearOnceInTheList, /// Error E0431: `self` import can only appear in an import list with a non-empty prefix. SelfImportOnlyInImportListWithNonEmptyPrefix, /// Error E0433: failed to resolve. FailedToResolve { label: String, suggestion: Option<Suggestion> }, /// Error E0434: can't capture dynamic environment in a fn item. CannotCaptureDynamicEnvironmentInFnItem, /// Error E0435: attempt to use a non-constant value in a constant. AttemptToUseNonConstantValueInConstant( Ident, /* suggestion */ &'static str, /* current */ &'static str, ), /// Error E0530: `X` bindings cannot shadow `Y`s. BindingShadowsSomethingUnacceptable(&'static str, Symbol, &'a NameBinding<'a>), /// Error E0128: generic parameters with a default cannot use forward-declared identifiers. ForwardDeclaredTyParam, // FIXME(const_generics_defaults) /// ERROR E0770: the type of const parameters must not depend on other generic parameters. ParamInTyOfConstParam(Symbol), /// constant values inside of type parameter defaults must not depend on generic parameters. ParamInAnonConstInTyDefault(Symbol), /// generic parameters must not be used inside const evaluations. /// /// This error is only emitted when using `min_const_generics`. ParamInNonTrivialAnonConst { name: Symbol, is_type: bool }, /// Error E0735: generic parameters with a default cannot use `Self` SelfInTyParamDefault, /// Error E0767: use of unreachable label UnreachableLabel { name: Symbol, definition_span: Span, suggestion: Option<LabelSuggestion> }, } enum VisResolutionError<'a> { Relative2018(Span, &'a ast::Path), AncestorOnly(Span), FailedToResolve(Span, String, Option<Suggestion>), ExpectedFound(Span, String, Res), Indeterminate(Span), ModuleOnly(Span), } /// A minimal representation of a path segment. We use this in resolve because we synthesize 'path /// segments' which don't have the rest of an AST or HIR `PathSegment`. #[derive(Clone, Copy, Debug)] pub struct Segment { ident: Ident, id: Option<NodeId>, /// Signals whether this `PathSegment` has generic arguments. Used to avoid providing /// nonsensical suggestions. has_generic_args: bool, } impl Segment { fn from_path(path: &Path) -> Vec<Segment> { path.segments.iter().map(|s| s.into()).collect() } fn from_ident(ident: Ident) -> Segment { Segment { ident, id: None, has_generic_args: false } } fn names_to_string(segments: &[Segment]) -> String { names_to_string(&segments.iter().map(|seg| seg.ident.name).collect::<Vec<_>>()) } } impl<'a> From<&'a ast::PathSegment> for Segment { fn from(seg: &'a ast::PathSegment) -> Segment { Segment { ident: seg.ident, id: Some(seg.id), has_generic_args: seg.args.is_some() } } } struct UsePlacementFinder { target_module: NodeId, span: Option<Span>, found_use: bool, } impl UsePlacementFinder { fn check(krate: &Crate, target_module: NodeId) -> (Option<Span>, bool) { let mut finder = UsePlacementFinder { target_module, span: None, found_use: false }; if let ControlFlow::Continue(..) = finder.check_mod(&krate.items, CRATE_NODE_ID) { visit::walk_crate(&mut finder, krate); } (finder.span, finder.found_use) } fn check_mod(&mut self, items: &[P<ast::Item>], node_id: NodeId) -> ControlFlow<()> { if self.span.is_some() { return ControlFlow::Break(()); } if node_id != self.target_module { return ControlFlow::Continue(()); } // find a use statement for item in items { match item.kind { ItemKind::Use(..) => { // don't suggest placing a use before the prelude // import or other generated ones if !item.span.from_expansion() { self.span = Some(item.span.shrink_to_lo()); self.found_use = true; return ControlFlow::Break(()); } } // don't place use before extern crate ItemKind::ExternCrate(_) => {} // but place them before the first other item _ => { if self.span.map_or(true, |span| item.span < span) && !item.span.from_expansion() { // don't insert between attributes and an item if item.attrs.is_empty() { self.span = Some(item.span.shrink_to_lo()); } else { // find the first attribute on the item for attr in &item.attrs { if self.span.map_or(true, |span| attr.span < span) { self.span = Some(attr.span.shrink_to_lo()); } } } } } } } ControlFlow::Continue(()) } } impl<'tcx> Visitor<'tcx> for UsePlacementFinder { fn visit_item(&mut self, item: &'tcx ast::Item) { if let ItemKind::Mod(_, ModKind::Loaded(items, ..)) = &item.kind { if let ControlFlow::Break(..) = self.check_mod(items, item.id) { return; } } visit::walk_item(self, item); } } /// An intermediate resolution result. /// /// This refers to the thing referred by a name. The difference between `Res` and `Item` is that /// items are visible in their whole block, while `Res`es only from the place they are defined /// forward. #[derive(Debug)] enum LexicalScopeBinding<'a> { Item(&'a NameBinding<'a>), Res(Res), } impl<'a> LexicalScopeBinding<'a> { fn res(self) -> Res { match self { LexicalScopeBinding::Item(binding) => binding.res(), LexicalScopeBinding::Res(res) => res, } } } #[derive(Copy, Clone, Debug)] enum ModuleOrUniformRoot<'a> { /// Regular module. Module(Module<'a>), /// Virtual module that denotes resolution in crate root with fallback to extern prelude. CrateRootAndExternPrelude, /// Virtual module that denotes resolution in extern prelude. /// Used for paths starting with `::` on 2018 edition. ExternPrelude, /// Virtual module that denotes resolution in current scope. /// Used only for resolving single-segment imports. The reason it exists is that import paths /// are always split into two parts, the first of which should be some kind of module. CurrentScope, } impl ModuleOrUniformRoot<'_> { fn same_def(lhs: Self, rhs: Self) -> bool { match (lhs, rhs) { (ModuleOrUniformRoot::Module(lhs), ModuleOrUniformRoot::Module(rhs)) => { lhs.def_id() == rhs.def_id() } ( ModuleOrUniformRoot::CrateRootAndExternPrelude, ModuleOrUniformRoot::CrateRootAndExternPrelude, ) | (ModuleOrUniformRoot::ExternPrelude, ModuleOrUniformRoot::ExternPrelude) | (ModuleOrUniformRoot::CurrentScope, ModuleOrUniformRoot::CurrentScope) => true, _ => false, } } } #[derive(Clone, Debug)] enum PathResult<'a> { Module(ModuleOrUniformRoot<'a>), NonModule(PartialRes), Indeterminate, Failed { span: Span, label: String, suggestion: Option<Suggestion>, is_error_from_last_segment: bool, }, } #[derive(Debug)] enum ModuleKind { /// An anonymous module; e.g., just a block. /// /// ``` /// fn main() { /// fn f() {} // (1) /// { // This is an anonymous module /// f(); // This resolves to (2) as we are inside the block. /// fn f() {} // (2) /// } /// f(); // Resolves to (1) /// } /// ``` Block(NodeId), /// Any module with a name. /// /// This could be: /// /// * A normal module – either `mod from_file;` or `mod from_block { }` – /// or the crate root (which is conceptually a top-level module). /// Note that the crate root's [name][Self::name] will be [`kw::Empty`]. /// * A trait or an enum (it implicitly contains associated types, methods and variant /// constructors). Def(DefKind, DefId, Symbol), } impl ModuleKind { /// Get name of the module. pub fn name(&self) -> Option<Symbol> { match self { ModuleKind::Block(..) => None, ModuleKind::Def(.., name) => Some(*name), } } } /// A key that identifies a binding in a given `Module`. /// /// Multiple bindings in the same module can have the same key (in a valid /// program) if all but one of them come from glob imports. #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] struct BindingKey { /// The identifier for the binding, aways the `normalize_to_macros_2_0` version of the /// identifier. ident: Ident, ns: Namespace, /// 0 if ident is not `_`, otherwise a value that's unique to the specific /// `_` in the expanded AST that introduced this binding. disambiguator: u32, } type Resolutions<'a> = RefCell<FxIndexMap<BindingKey, &'a RefCell<NameResolution<'a>>>>; /// One node in the tree of modules. /// /// Note that a "module" in resolve is broader than a `mod` that you declare in Rust code. It may be one of these: /// /// * `mod` /// * crate root (aka, top-level anonymous module) /// * `enum` /// * `trait` /// * curly-braced block with statements /// /// You can use [`ModuleData::kind`] to determine the kind of module this is. pub struct ModuleData<'a> { /// The direct parent module (it may not be a `mod`, however). parent: Option<Module<'a>>, /// What kind of module this is, because this may not be a `mod`. kind: ModuleKind, /// The [`DefId`] of the nearest `mod` item ancestor (which may be this module). /// This may be the crate root. nearest_parent_mod: DefId, /// Mapping between names and their (possibly in-progress) resolutions in this module. /// Resolutions in modules from other crates are not populated until accessed. lazy_resolutions: Resolutions<'a>, /// True if this is a module from other crate that needs to be populated on access. populate_on_access: Cell<bool>, /// Macro invocations that can expand into items in this module. unexpanded_invocations: RefCell<FxHashSet<ExpnId>>, /// Whether `#[no_implicit_prelude]` is active. no_implicit_prelude: bool, glob_importers: RefCell<Vec<&'a Import<'a>>>, globs: RefCell<Vec<&'a Import<'a>>>, /// Used to memoize the traits in this module for faster searches through all traits in scope. traits: RefCell<Option<Box<[(Ident, &'a NameBinding<'a>)]>>>, /// Span of the module itself. Used for error reporting. span: Span, expansion: ExpnId, } type Module<'a> = &'a ModuleData<'a>; impl<'a> ModuleData<'a> { fn new( parent: Option<Module<'a>>, kind: ModuleKind, nearest_parent_mod: DefId, expansion: ExpnId, span: Span, ) -> Self { ModuleData { parent, kind, nearest_parent_mod, lazy_resolutions: Default::default(), populate_on_access: Cell::new(!nearest_parent_mod.is_local()), unexpanded_invocations: Default::default(), no_implicit_prelude: false, glob_importers: RefCell::new(Vec::new()), globs: RefCell::new(Vec::new()), traits: RefCell::new(None), span, expansion, } } fn for_each_child<R, F>(&'a self, resolver: &mut R, mut f: F) where R: AsMut<Resolver<'a>>, F: FnMut(&mut R, Ident, Namespace, &'a NameBinding<'a>), { for (key, name_resolution) in resolver.as_mut().resolutions(self).borrow().iter() { if let Some(binding) = name_resolution.borrow().binding { f(resolver, key.ident, key.ns, binding); } } } /// This modifies `self` in place. The traits will be stored in `self.traits`. fn ensure_traits<R>(&'a self, resolver: &mut R) where R: AsMut<Resolver<'a>>, { let mut traits = self.traits.borrow_mut(); if traits.is_none() { let mut collected_traits = Vec::new(); self.for_each_child(resolver, |_, name, ns, binding| { if ns != TypeNS { return; } if let Res::Def(DefKind::Trait | DefKind::TraitAlias, _) = binding.res() { collected_traits.push((name, binding)) } }); *traits = Some(collected_traits.into_boxed_slice()); } } fn res(&self) -> Option<Res> { match self.kind { ModuleKind::Def(kind, def_id, _) => Some(Res::Def(kind, def_id)), _ => None, } } fn def_id(&self) -> Option<DefId> { match self.kind { ModuleKind::Def(_, def_id, _) => Some(def_id), _ => None, } } // `self` resolves to the first module ancestor that `is_normal`. fn is_normal(&self) -> bool { matches!(self.kind, ModuleKind::Def(DefKind::Mod, _, _)) } fn is_trait(&self) -> bool { matches!(self.kind, ModuleKind::Def(DefKind::Trait, _, _)) } fn nearest_item_scope(&'a self) -> Module<'a> { match self.kind { ModuleKind::Def(DefKind::Enum | DefKind::Trait, ..) => { self.parent.expect("enum or trait module without a parent") } _ => self, } } fn is_ancestor_of(&self, mut other: &Self) -> bool { while !ptr::eq(self, other) { if let Some(parent) = other.parent { other = parent; } else { return false; } } true } } impl<'a> fmt::Debug for ModuleData<'a> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{:?}", self.res()) } } /// Records a possibly-private value, type, or module definition. #[derive(Clone, Debug)] pub struct NameBinding<'a> { kind: NameBindingKind<'a>, ambiguity: Option<(&'a NameBinding<'a>, AmbiguityKind)>, expansion: ExpnId, span: Span, vis: ty::Visibility, } pub trait ToNameBinding<'a> { fn to_name_binding(self, arenas: &'a ResolverArenas<'a>) -> &'a NameBinding<'a>; } impl<'a> ToNameBinding<'a> for &'a NameBinding<'a> { fn to_name_binding(self, _: &'a ResolverArenas<'a>) -> &'a NameBinding<'a> { self } } #[derive(Clone, Debug)] enum NameBindingKind<'a> { Res(Res, /* is_macro_export */ bool), Module(Module<'a>), Import { binding: &'a NameBinding<'a>, import: &'a Import<'a>, used: Cell<bool> }, } impl<'a> NameBindingKind<'a> { /// Is this a name binding of a import? fn is_import(&self) -> bool { matches!(*self, NameBindingKind::Import { .. }) } } struct PrivacyError<'a> { ident: Ident, binding: &'a NameBinding<'a>, dedup_span: Span, } struct UseError<'a> { err: DiagnosticBuilder<'a>, /// Candidates which user could `use` to access the missing type. candidates: Vec<ImportSuggestion>, /// The `DefId` of the module to place the use-statements in. def_id: DefId, /// Whether the diagnostic should say "instead" (as in `consider importing ... instead`). instead: bool, /// Extra free-form suggestion. suggestion: Option<(Span, &'static str, String, Applicability)>, } #[derive(Clone, Copy, PartialEq, Debug)] enum AmbiguityKind { Import, BuiltinAttr, DeriveHelper, MacroRulesVsModularized, GlobVsOuter, GlobVsGlob, GlobVsExpanded, MoreExpandedVsOuter, } impl AmbiguityKind { fn descr(self) -> &'static str { match self { AmbiguityKind::Import => "name vs any other name during import resolution", AmbiguityKind::BuiltinAttr => "built-in attribute vs any other name", AmbiguityKind::DeriveHelper => "derive helper attribute vs any other name", AmbiguityKind::MacroRulesVsModularized => { "`macro_rules` vs non-`macro_rules` from other module" } AmbiguityKind::GlobVsOuter => { "glob import vs any other name from outer scope during import/macro resolution" } AmbiguityKind::GlobVsGlob => "glob import vs glob import in the same module", AmbiguityKind::GlobVsExpanded => { "glob import vs macro-expanded name in the same \ module during import/macro resolution" } AmbiguityKind::MoreExpandedVsOuter => { "macro-expanded name vs less macro-expanded name \ from outer scope during import/macro resolution" } } } } /// Miscellaneous bits of metadata for better ambiguity error reporting. #[derive(Clone, Copy, PartialEq)] enum AmbiguityErrorMisc { SuggestCrate, SuggestSelf, FromPrelude, None, } struct AmbiguityError<'a> { kind: AmbiguityKind, ident: Ident, b1: &'a NameBinding<'a>, b2: &'a NameBinding<'a>, misc1: AmbiguityErrorMisc, misc2: AmbiguityErrorMisc, } impl<'a> NameBinding<'a> { fn module(&self) -> Option<Module<'a>> { match self.kind { NameBindingKind::Module(module) => Some(module), NameBindingKind::Import { binding, .. } => binding.module(), _ => None, } } fn res(&self) -> Res { match self.kind { NameBindingKind::Res(res, _) => res, NameBindingKind::Module(module) => module.res().unwrap(), NameBindingKind::Import { binding, .. } => binding.res(), } } fn is_ambiguity(&self) -> bool { self.ambiguity.is_some() || match self.kind { NameBindingKind::Import { binding, .. } => binding.is_ambiguity(), _ => false, } } fn is_possibly_imported_variant(&self) -> bool { match self.kind { NameBindingKind::Import { binding, .. } => binding.is_possibly_imported_variant(), NameBindingKind::Res( Res::Def(DefKind::Variant | DefKind::Ctor(CtorOf::Variant, ..), _), _, ) => true, NameBindingKind::Res(..) | NameBindingKind::Module(..) => false, } } fn is_extern_crate(&self) -> bool { match self.kind { NameBindingKind::Import { import: &Import { kind: ImportKind::ExternCrate { .. }, .. }, .. } => true, NameBindingKind::Module(&ModuleData { kind: ModuleKind::Def(DefKind::Mod, def_id, _), .. }) => def_id.index == CRATE_DEF_INDEX, _ => false, } } fn is_import(&self) -> bool { matches!(self.kind, NameBindingKind::Import { .. }) } fn is_glob_import(&self) -> bool { match self.kind { NameBindingKind::Import { import, .. } => import.is_glob(), _ => false, } } fn is_importable(&self) -> bool { !matches!( self.res(), Res::Def(DefKind::AssocConst | DefKind::AssocFn | DefKind::AssocTy, _) ) } fn is_macro_def(&self) -> bool { matches!(self.kind, NameBindingKind::Res(Res::Def(DefKind::Macro(..), _), _)) } fn macro_kind(&self) -> Option<MacroKind> { self.res().macro_kind() } // Suppose that we resolved macro invocation with `invoc_parent_expansion` to binding `binding` // at some expansion round `max(invoc, binding)` when they both emerged from macros. // Then this function returns `true` if `self` may emerge from a macro *after* that // in some later round and screw up our previously found resolution. // See more detailed explanation in // https://github.com/rust-lang/rust/pull/53778#issuecomment-419224049 fn may_appear_after(&self, invoc_parent_expansion: ExpnId, binding: &NameBinding<'_>) -> bool { // self > max(invoc, binding) => !(self <= invoc || self <= binding) // Expansions are partially ordered, so "may appear after" is an inversion of // "certainly appears before or simultaneously" and includes unordered cases. let self_parent_expansion = self.expansion; let other_parent_expansion = binding.expansion; let certainly_before_other_or_simultaneously = other_parent_expansion.is_descendant_of(self_parent_expansion); let certainly_before_invoc_or_simultaneously = invoc_parent_expansion.is_descendant_of(self_parent_expansion); !(certainly_before_other_or_simultaneously || certainly_before_invoc_or_simultaneously) } } #[derive(Debug, Default, Clone)] pub struct ExternPreludeEntry<'a> { extern_crate_item: Option<&'a NameBinding<'a>>, pub introduced_by_item: bool, } /// Used for better errors for E0773 enum BuiltinMacroState { NotYetSeen(SyntaxExtensionKind), AlreadySeen(Span), } struct DeriveData { resolutions: DeriveResolutions, helper_attrs: Vec<(usize, Ident)>, has_derive_copy: bool, } /// The main resolver class. /// /// This is the visitor that walks the whole crate. pub struct Resolver<'a> { session: &'a Session, definitions: Definitions, graph_root: Module<'a>, prelude: Option<Module<'a>>, extern_prelude: FxHashMap<Ident, ExternPreludeEntry<'a>>, /// N.B., this is used only for better diagnostics, not name resolution itself. has_self: FxHashSet<DefId>, /// Names of fields of an item `DefId` accessible with dot syntax. /// Used for hints during error reporting. field_names: FxHashMap<DefId, Vec<Spanned<Symbol>>>, /// All imports known to succeed or fail. determined_imports: Vec<&'a Import<'a>>, /// All non-determined imports. indeterminate_imports: Vec<&'a Import<'a>>, /// FIXME: Refactor things so that these fields are passed through arguments and not resolver. /// We are resolving a last import segment during import validation. last_import_segment: bool, /// This binding should be ignored during in-module resolution, so that we don't get /// "self-confirming" import resolutions during import validation. unusable_binding: Option<&'a NameBinding<'a>>, /// Resolutions for nodes that have a single resolution. partial_res_map: NodeMap<PartialRes>, /// Resolutions for import nodes, which have multiple resolutions in different namespaces. import_res_map: NodeMap<PerNS<Option<Res>>>, /// Resolutions for labels (node IDs of their corresponding blocks or loops). label_res_map: NodeMap<NodeId>, /// `CrateNum` resolutions of `extern crate` items. extern_crate_map: FxHashMap<LocalDefId, CrateNum>, export_map: ExportMap<LocalDefId>, trait_map: NodeMap<Vec<TraitCandidate>>, /// A map from nodes to anonymous modules. /// Anonymous modules are pseudo-modules that are implicitly created around items /// contained within blocks. /// /// For example, if we have this: /// /// fn f() { /// fn g() { /// ... /// } /// } /// /// There will be an anonymous module created around `g` with the ID of the /// entry block for `f`. block_map: NodeMap<Module<'a>>, /// A fake module that contains no definition and no prelude. Used so that /// some AST passes can generate identifiers that only resolve to local or /// language items. empty_module: Module<'a>, module_map: FxHashMap<LocalDefId, Module<'a>>, extern_module_map: FxHashMap<DefId, Module<'a>>, binding_parent_modules: FxHashMap<PtrKey<'a, NameBinding<'a>>, Module<'a>>, underscore_disambiguator: u32, /// Maps glob imports to the names of items actually imported. glob_map: FxHashMap<LocalDefId, FxHashSet<Symbol>>, /// Visibilities in "lowered" form, for all entities that have them. visibilities: FxHashMap<LocalDefId, ty::Visibility>, used_imports: FxHashSet<(NodeId, Namespace)>, maybe_unused_trait_imports: FxHashSet<LocalDefId>, maybe_unused_extern_crates: Vec<(LocalDefId, Span)>, /// Privacy errors are delayed until the end in order to deduplicate them. privacy_errors: Vec<PrivacyError<'a>>, /// Ambiguity errors are delayed for deduplication. ambiguity_errors: Vec<AmbiguityError<'a>>, /// `use` injections are delayed for better placement and deduplication. use_injections: Vec<UseError<'a>>, /// Crate-local macro expanded `macro_export` referred to by a module-relative path. macro_expanded_macro_export_errors: BTreeSet<(Span, Span)>, arenas: &'a ResolverArenas<'a>, dummy_binding: &'a NameBinding<'a>, crate_loader: CrateLoader<'a>, macro_names: FxHashSet<Ident>, builtin_macros: FxHashMap<Symbol, BuiltinMacroState>, registered_attrs: FxHashSet<Ident>, registered_tools: FxHashSet<Ident>, macro_use_prelude: FxHashMap<Symbol, &'a NameBinding<'a>>, all_macros: FxHashMap<Symbol, Res>, macro_map: FxHashMap<DefId, Lrc<SyntaxExtension>>, dummy_ext_bang: Lrc<SyntaxExtension>, dummy_ext_derive: Lrc<SyntaxExtension>, non_macro_attrs: [Lrc<SyntaxExtension>; 2], local_macro_def_scopes: FxHashMap<LocalDefId, Module<'a>>, ast_transform_scopes: FxHashMap<ExpnId, Module<'a>>, unused_macros: FxHashMap<LocalDefId, (NodeId, Span)>, proc_macro_stubs: FxHashSet<LocalDefId>, /// Traces collected during macro resolution and validated when it's complete. single_segment_macro_resolutions: Vec<(Ident, MacroKind, ParentScope<'a>, Option<&'a NameBinding<'a>>)>, multi_segment_macro_resolutions: Vec<(Vec<Segment>, Span, MacroKind, ParentScope<'a>, Option<Res>)>, builtin_attrs: Vec<(Ident, ParentScope<'a>)>, /// `derive(Copy)` marks items they are applied to so they are treated specially later. /// Derive macros cannot modify the item themselves and have to store the markers in the global /// context, so they attach the markers to derive container IDs using this resolver table. containers_deriving_copy: FxHashSet<ExpnId>, /// Parent scopes in which the macros were invoked. /// FIXME: `derives` are missing in these parent scopes and need to be taken from elsewhere. invocation_parent_scopes: FxHashMap<ExpnId, ParentScope<'a>>, /// `macro_rules` scopes *produced* by expanding the macro invocations, /// include all the `macro_rules` items and other invocations generated by them. output_macro_rules_scopes: FxHashMap<ExpnId, MacroRulesScopeRef<'a>>, /// Helper attributes that are in scope for the given expansion. helper_attrs: FxHashMap<ExpnId, Vec<Ident>>, /// Ready or in-progress results of resolving paths inside the `#[derive(...)]` attribute /// with the given `ExpnId`. derive_data: FxHashMap<ExpnId, DeriveData>, /// Avoid duplicated errors for "name already defined". name_already_seen: FxHashMap<Symbol, Span>, potentially_unused_imports: Vec<&'a Import<'a>>, /// Table for mapping struct IDs into struct constructor IDs, /// it's not used during normal resolution, only for better error reporting. /// Also includes of list of each fields visibility struct_constructors: DefIdMap<(Res, ty::Visibility, Vec<ty::Visibility>)>, /// Features enabled for this crate. active_features: FxHashSet<Symbol>, lint_buffer: LintBuffer, next_node_id: NodeId, def_id_to_span: IndexVec<LocalDefId, Span>, node_id_to_def_id: FxHashMap<ast::NodeId, LocalDefId>, def_id_to_node_id: IndexVec<LocalDefId, ast::NodeId>, /// Indices of unnamed struct or variant fields with unresolved attributes. placeholder_field_indices: FxHashMap<NodeId, usize>, /// When collecting definitions from an AST fragment produced by a macro invocation `ExpnId` /// we know what parent node that fragment should be attached to thanks to this table, /// and how the `impl Trait` fragments were introduced. invocation_parents: FxHashMap<ExpnId, (LocalDefId, ImplTraitContext)>, next_disambiguator: FxHashMap<(LocalDefId, DefPathData), u32>, /// Some way to know that we are in a *trait* impl in `visit_assoc_item`. /// FIXME: Replace with a more general AST map (together with some other fields). trait_impl_items: FxHashSet<LocalDefId>, legacy_const_generic_args: FxHashMap<DefId, Option<Vec<usize>>>, } /// Nothing really interesting here; it just provides memory for the rest of the crate. #[derive(Default)] pub struct ResolverArenas<'a> { modules: TypedArena<ModuleData<'a>>, local_modules: RefCell<Vec<Module<'a>>>, imports: TypedArena<Import<'a>>, name_resolutions: TypedArena<RefCell<NameResolution<'a>>>, ast_paths: TypedArena<ast::Path>, dropless: DroplessArena, } impl<'a> ResolverArenas<'a> { fn alloc_module(&'a self, module: ModuleData<'a>) -> Module<'a> { let module = self.modules.alloc(module); if module.def_id().map_or(true, |def_id| def_id.is_local()) { self.local_modules.borrow_mut().push(module); } module } fn local_modules(&'a self) -> std::cell::Ref<'a, Vec<Module<'a>>> { self.local_modules.borrow() } fn alloc_name_binding(&'a self, name_binding: NameBinding<'a>) -> &'a NameBinding<'a> { self.dropless.alloc(name_binding) } fn alloc_import(&'a self, import: Import<'a>) -> &'a Import<'_> { self.imports.alloc(import) } fn alloc_name_resolution(&'a self) -> &'a RefCell<NameResolution<'a>> { self.name_resolutions.alloc(Default::default()) } fn alloc_macro_rules_scope(&'a self, scope: MacroRulesScope<'a>) -> MacroRulesScopeRef<'a> { PtrKey(self.dropless.alloc(Cell::new(scope))) } fn alloc_macro_rules_binding( &'a self, binding: MacroRulesBinding<'a>, ) -> &'a MacroRulesBinding<'a> { self.dropless.alloc(binding) } fn alloc_ast_paths(&'a self, paths: &[ast::Path]) -> &'a [ast::Path] { self.ast_paths.alloc_from_iter(paths.iter().cloned()) } fn alloc_pattern_spans(&'a self, spans: impl Iterator<Item = Span>) -> &'a [Span] { self.dropless.alloc_from_iter(spans) } } impl<'a> AsMut<Resolver<'a>> for Resolver<'a> { fn as_mut(&mut self) -> &mut Resolver<'a> { self } } impl<'a, 'b> DefIdTree for &'a Resolver<'b> { fn parent(self, id: DefId) -> Option<DefId> { match id.as_local() { Some(id) => self.definitions.def_key(id).parent, None => self.cstore().def_key(id).parent, } .map(|index| DefId { index, ..id }) } } /// This interface is used through the AST→HIR step, to embed full paths into the HIR. After that /// the resolver is no longer needed as all the relevant information is inline. impl ResolverAstLowering for Resolver<'_> { fn def_key(&mut self, id: DefId) -> DefKey { if let Some(id) = id.as_local() { self.definitions().def_key(id) } else { self.cstore().def_key(id) } } fn item_generics_num_lifetimes(&self, def_id: DefId, sess: &Session) -> usize { self.cstore().item_generics_num_lifetimes(def_id, sess) } fn legacy_const_generic_args(&mut self, expr: &Expr) -> Option<Vec<usize>> { self.legacy_const_generic_args(expr) } fn get_partial_res(&mut self, id: NodeId) -> Option<PartialRes> { self.partial_res_map.get(&id).cloned() } fn get_import_res(&mut self, id: NodeId) -> PerNS<Option<Res>> { self.import_res_map.get(&id).cloned().unwrap_or_default() } fn get_label_res(&mut self, id: NodeId) -> Option<NodeId> { self.label_res_map.get(&id).cloned() } fn definitions(&mut self) -> &mut Definitions { &mut self.definitions } fn lint_buffer(&mut self) -> &mut LintBuffer { &mut self.lint_buffer } fn next_node_id(&mut self) -> NodeId { self.next_node_id() } fn trait_map(&self) -> &NodeMap<Vec<TraitCandidate>> { &self.trait_map } fn opt_local_def_id(&self, node: NodeId) -> Option<LocalDefId> { self.node_id_to_def_id.get(&node).copied() } fn local_def_id(&self, node: NodeId) -> LocalDefId { self.opt_local_def_id(node).unwrap_or_else(|| panic!("no entry for node id: `{:?}`", node)) } /// Adds a definition with a parent definition. fn create_def( &mut self, parent: LocalDefId, node_id: ast::NodeId, data: DefPathData, expn_id: ExpnId, span: Span, ) -> LocalDefId { assert!( !self.node_id_to_def_id.contains_key(&node_id), "adding a def'n for node-id {:?} and data {:?} but a previous def'n exists: {:?}", node_id, data, self.definitions.def_key(self.node_id_to_def_id[&node_id]), ); // Find the next free disambiguator for this key. let next_disambiguator = &mut self.next_disambiguator; let next_disambiguator = |parent, data| { let next_disamb = next_disambiguator.entry((parent, data)).or_insert(0); let disambiguator = *next_disamb; *next_disamb = next_disamb.checked_add(1).expect("disambiguator overflow"); disambiguator }; let def_id = self.definitions.create_def(parent, data, expn_id, next_disambiguator); assert_eq!(self.def_id_to_span.push(span), def_id); // Some things for which we allocate `LocalDefId`s don't correspond to // anything in the AST, so they don't have a `NodeId`. For these cases // we don't need a mapping from `NodeId` to `LocalDefId`. if node_id != ast::DUMMY_NODE_ID { debug!("create_def: def_id_to_node_id[{:?}] <-> {:?}", def_id, node_id); self.node_id_to_def_id.insert(node_id, def_id); } assert_eq!(self.def_id_to_node_id.push(node_id), def_id); def_id } } impl<'a> Resolver<'a> { pub fn new( session: &'a Session, krate: &Crate, crate_name: &str, metadata_loader: &'a MetadataLoaderDyn, arenas: &'a ResolverArenas<'a>, ) -> Resolver<'a> { let root_local_def_id = LocalDefId { local_def_index: CRATE_DEF_INDEX }; let root_def_id = root_local_def_id.to_def_id(); let root_module_kind = ModuleKind::Def(DefKind::Mod, root_def_id, kw::Empty); let graph_root = arenas.alloc_module(ModuleData { no_implicit_prelude: session.contains_name(&krate.attrs, sym::no_implicit_prelude), ..ModuleData::new(None, root_module_kind, root_def_id, ExpnId::root(), krate.span) }); let empty_module_kind = ModuleKind::Def(DefKind::Mod, root_def_id, kw::Empty); let empty_module = arenas.alloc_module(ModuleData { no_implicit_prelude: true, ..ModuleData::new( Some(graph_root), empty_module_kind, root_def_id, ExpnId::root(), DUMMY_SP, ) }); let mut module_map = FxHashMap::default(); module_map.insert(root_local_def_id, graph_root); let definitions = Definitions::new(crate_name, session.local_crate_disambiguator()); let root = definitions.get_root_def(); let mut visibilities = FxHashMap::default(); visibilities.insert(root_local_def_id, ty::Visibility::Public); let mut def_id_to_span = IndexVec::default(); assert_eq!(def_id_to_span.push(rustc_span::DUMMY_SP), root); let mut def_id_to_node_id = IndexVec::default(); assert_eq!(def_id_to_node_id.push(CRATE_NODE_ID), root); let mut node_id_to_def_id = FxHashMap::default(); node_id_to_def_id.insert(CRATE_NODE_ID, root); let mut invocation_parents = FxHashMap::default(); invocation_parents.insert(ExpnId::root(), (root, ImplTraitContext::Existential)); let mut extern_prelude: FxHashMap<Ident, ExternPreludeEntry<'_>> = session .opts .externs .iter() .filter(|(_, entry)| entry.add_prelude) .map(|(name, _)| (Ident::from_str(name), Default::default())) .collect(); if !session.contains_name(&krate.attrs, sym::no_core) { extern_prelude.insert(Ident::with_dummy_span(sym::core), Default::default()); if !session.contains_name(&krate.attrs, sym::no_std) { extern_prelude.insert(Ident::with_dummy_span(sym::std), Default::default()); } } let (registered_attrs, registered_tools) = macros::registered_attrs_and_tools(session, &krate.attrs); let features = session.features_untracked(); let non_macro_attr = |mark_used| Lrc::new(SyntaxExtension::non_macro_attr(mark_used, session.edition())); let mut resolver = Resolver { session, definitions, // The outermost module has def ID 0; this is not reflected in the // AST. graph_root, prelude: None, extern_prelude, has_self: FxHashSet::default(), field_names: FxHashMap::default(), determined_imports: Vec::new(), indeterminate_imports: Vec::new(), last_import_segment: false, unusable_binding: None, partial_res_map: Default::default(), import_res_map: Default::default(), label_res_map: Default::default(), extern_crate_map: Default::default(), export_map: FxHashMap::default(), trait_map: Default::default(), underscore_disambiguator: 0, empty_module, module_map, block_map: Default::default(), extern_module_map: FxHashMap::default(), binding_parent_modules: FxHashMap::default(), ast_transform_scopes: FxHashMap::default(), glob_map: Default::default(), visibilities, used_imports: FxHashSet::default(), maybe_unused_trait_imports: Default::default(), maybe_unused_extern_crates: Vec::new(), privacy_errors: Vec::new(), ambiguity_errors: Vec::new(), use_injections: Vec::new(), macro_expanded_macro_export_errors: BTreeSet::new(), arenas, dummy_binding: arenas.alloc_name_binding(NameBinding { kind: NameBindingKind::Res(Res::Err, false), ambiguity: None, expansion: ExpnId::root(), span: DUMMY_SP, vis: ty::Visibility::Public, }), crate_loader: CrateLoader::new(session, metadata_loader, crate_name), macro_names: FxHashSet::default(), builtin_macros: Default::default(), registered_attrs, registered_tools, macro_use_prelude: FxHashMap::default(), all_macros: FxHashMap::default(), macro_map: FxHashMap::default(), dummy_ext_bang: Lrc::new(SyntaxExtension::dummy_bang(session.edition())), dummy_ext_derive: Lrc::new(SyntaxExtension::dummy_derive(session.edition())), non_macro_attrs: [non_macro_attr(false), non_macro_attr(true)], invocation_parent_scopes: Default::default(), output_macro_rules_scopes: Default::default(), helper_attrs: Default::default(), derive_data: Default::default(), local_macro_def_scopes: FxHashMap::default(), name_already_seen: FxHashMap::default(), potentially_unused_imports: Vec::new(), struct_constructors: Default::default(), unused_macros: Default::default(), proc_macro_stubs: Default::default(), single_segment_macro_resolutions: Default::default(), multi_segment_macro_resolutions: Default::default(), builtin_attrs: Default::default(), containers_deriving_copy: Default::default(), active_features: features .declared_lib_features .iter() .map(|(feat, ..)| *feat) .chain(features.declared_lang_features.iter().map(|(feat, ..)| *feat)) .collect(), lint_buffer: LintBuffer::default(), next_node_id: NodeId::from_u32(1), def_id_to_span, node_id_to_def_id, def_id_to_node_id, placeholder_field_indices: Default::default(), invocation_parents, next_disambiguator: Default::default(), trait_impl_items: Default::default(), legacy_const_generic_args: Default::default(), }; let root_parent_scope = ParentScope::module(graph_root, &resolver); resolver.invocation_parent_scopes.insert(ExpnId::root(), root_parent_scope); resolver } pub fn next_node_id(&mut self) -> NodeId { let next = self .next_node_id .as_usize() .checked_add(1) .expect("input too large; ran out of NodeIds"); self.next_node_id = ast::NodeId::from_usize(next); self.next_node_id } pub fn lint_buffer(&mut self) -> &mut LintBuffer { &mut self.lint_buffer } pub fn arenas() -> ResolverArenas<'a> { Default::default() } pub fn into_outputs(self) -> ResolverOutputs { let definitions = self.definitions; let visibilities = self.visibilities; let extern_crate_map = self.extern_crate_map; let export_map = self.export_map; let maybe_unused_trait_imports = self.maybe_unused_trait_imports; let maybe_unused_extern_crates = self.maybe_unused_extern_crates; let glob_map = self.glob_map; ResolverOutputs { definitions, cstore: Box::new(self.crate_loader.into_cstore()), visibilities, extern_crate_map, export_map, glob_map, maybe_unused_trait_imports, maybe_unused_extern_crates, extern_prelude: self .extern_prelude .iter() .map(|(ident, entry)| (ident.name, entry.introduced_by_item)) .collect(), } } pub fn clone_outputs(&self) -> ResolverOutputs { ResolverOutputs { definitions: self.definitions.clone(), cstore: Box::new(self.cstore().clone()), visibilities: self.visibilities.clone(), extern_crate_map: self.extern_crate_map.clone(), export_map: self.export_map.clone(), glob_map: self.glob_map.clone(), maybe_unused_trait_imports: self.maybe_unused_trait_imports.clone(), maybe_unused_extern_crates: self.maybe_unused_extern_crates.clone(), extern_prelude: self .extern_prelude .iter() .map(|(ident, entry)| (ident.name, entry.introduced_by_item)) .collect(), } } pub fn cstore(&self) -> &CStore { self.crate_loader.cstore() } fn non_macro_attr(&self, mark_used: bool) -> Lrc<SyntaxExtension> { self.non_macro_attrs[mark_used as usize].clone() } fn dummy_ext(&self, macro_kind: MacroKind) -> Lrc<SyntaxExtension> { match macro_kind { MacroKind::Bang => self.dummy_ext_bang.clone(), MacroKind::Derive => self.dummy_ext_derive.clone(), MacroKind::Attr => self.non_macro_attr(true), } } /// Runs the function on each namespace. fn per_ns<F: FnMut(&mut Self, Namespace)>(&mut self, mut f: F) { f(self, TypeNS); f(self, ValueNS); f(self, MacroNS); } fn is_builtin_macro(&mut self, res: Res) -> bool { self.get_macro(res).map_or(false, |ext| ext.builtin_name.is_some()) } fn macro_def(&self, mut ctxt: SyntaxContext) -> DefId { loop { match ctxt.outer_expn_data().macro_def_id { Some(def_id) => return def_id, None => ctxt.remove_mark(), }; } } /// Entry point to crate resolution. pub fn resolve_crate(&mut self, krate: &Crate) { self.session.time("resolve_crate", || { self.session.time("finalize_imports", || ImportResolver { r: self }.finalize_imports()); self.session.time("finalize_macro_resolutions", || self.finalize_macro_resolutions()); self.session.time("late_resolve_crate", || self.late_resolve_crate(krate)); self.session.time("resolve_check_unused", || self.check_unused(krate)); self.session.time("resolve_report_errors", || self.report_errors(krate)); self.session.time("resolve_postprocess", || self.crate_loader.postprocess(krate)); }); } pub fn traits_in_scope( &mut self, current_trait: Option<Module<'a>>, parent_scope: &ParentScope<'a>, ctxt: SyntaxContext, assoc_item: Option<(Symbol, Namespace)>, ) -> Vec<TraitCandidate> { let mut found_traits = Vec::new(); if let Some(module) = current_trait { if self.trait_may_have_item(Some(module), assoc_item) { let def_id = module.def_id().unwrap(); found_traits.push(TraitCandidate { def_id, import_ids: smallvec![] }); } } self.visit_scopes(ScopeSet::All(TypeNS, false), parent_scope, ctxt, |this, scope, _, _| { match scope { Scope::Module(module, _) => { this.traits_in_module(module, assoc_item, &mut found_traits); } Scope::StdLibPrelude => { if let Some(module) = this.prelude { this.traits_in_module(module, assoc_item, &mut found_traits); } } Scope::ExternPrelude | Scope::ToolPrelude | Scope::BuiltinTypes => {} _ => unreachable!(), } None::<()> }); found_traits } fn traits_in_module( &mut self, module: Module<'a>, assoc_item: Option<(Symbol, Namespace)>, found_traits: &mut Vec<TraitCandidate>, ) { module.ensure_traits(self); let traits = module.traits.borrow(); for (trait_name, trait_binding) in traits.as_ref().unwrap().iter() { if self.trait_may_have_item(trait_binding.module(), assoc_item) { let def_id = trait_binding.res().def_id(); let import_ids = self.find_transitive_imports(&trait_binding.kind, *trait_name); found_traits.push(TraitCandidate { def_id, import_ids }); } } } // List of traits in scope is pruned on best effort basis. We reject traits not having an // associated item with the given name and namespace (if specified). This is a conservative // optimization, proper hygienic type-based resolution of associated items is done in typeck. // We don't reject trait aliases (`trait_module == None`) because we don't have access to their // associated items. fn trait_may_have_item( &mut self, trait_module: Option<Module<'a>>, assoc_item: Option<(Symbol, Namespace)>, ) -> bool { match (trait_module, assoc_item) { (Some(trait_module), Some((name, ns))) => { self.resolutions(trait_module).borrow().iter().any(|resolution| { let (&BindingKey { ident: assoc_ident, ns: assoc_ns, .. }, _) = resolution; assoc_ns == ns && assoc_ident.name == name }) } _ => true, } } fn find_transitive_imports( &mut self, mut kind: &NameBindingKind<'_>, trait_name: Ident, ) -> SmallVec<[LocalDefId; 1]> { let mut import_ids = smallvec![]; while let NameBindingKind::Import { import, binding, .. } = kind { let id = self.local_def_id(import.id); self.maybe_unused_trait_imports.insert(id); self.add_to_glob_map(&import, trait_name); import_ids.push(id); kind = &binding.kind; } import_ids } fn new_module( &self, parent: Module<'a>, kind: ModuleKind, nearest_parent_mod: DefId, expn_id: ExpnId, span: Span, ) -> Module<'a> { let module = ModuleData::new(Some(parent), kind, nearest_parent_mod, expn_id, span); self.arenas.alloc_module(module) } fn new_key(&mut self, ident: Ident, ns: Namespace) -> BindingKey { let ident = ident.normalize_to_macros_2_0(); let disambiguator = if ident.name == kw::Underscore { self.underscore_disambiguator += 1; self.underscore_disambiguator } else { 0 }; BindingKey { ident, ns, disambiguator } } fn resolutions(&mut self, module: Module<'a>) -> &'a Resolutions<'a> { if module.populate_on_access.get() { module.populate_on_access.set(false); self.build_reduced_graph_external(module); } &module.lazy_resolutions } fn resolution( &mut self, module: Module<'a>, key: BindingKey, ) -> &'a RefCell<NameResolution<'a>> { *self .resolutions(module) .borrow_mut() .entry(key) .or_insert_with(|| self.arenas.alloc_name_resolution()) } fn record_use( &mut self, ident: Ident, ns: Namespace, used_binding: &'a NameBinding<'a>, is_lexical_scope: bool, ) { if let Some((b2, kind)) = used_binding.ambiguity { self.ambiguity_errors.push(AmbiguityError { kind, ident, b1: used_binding, b2, misc1: AmbiguityErrorMisc::None, misc2: AmbiguityErrorMisc::None, }); } if let NameBindingKind::Import { import, binding, ref used } = used_binding.kind { // Avoid marking `extern crate` items that refer to a name from extern prelude, // but not introduce it, as used if they are accessed from lexical scope. if is_lexical_scope { if let Some(entry) = self.extern_prelude.get(&ident.normalize_to_macros_2_0()) { if let Some(crate_item) = entry.extern_crate_item { if ptr::eq(used_binding, crate_item) && !entry.introduced_by_item { return; } } } } used.set(true); import.used.set(true); self.used_imports.insert((import.id, ns)); self.add_to_glob_map(&import, ident); self.record_use(ident, ns, binding, false); } } #[inline] fn add_to_glob_map(&mut self, import: &Import<'_>, ident: Ident) { if import.is_glob() { let def_id = self.local_def_id(import.id); self.glob_map.entry(def_id).or_default().insert(ident.name); } } /// A generic scope visitor. /// Visits scopes in order to resolve some identifier in them or perform other actions. /// If the callback returns `Some` result, we stop visiting scopes and return it. fn visit_scopes<T>( &mut self, scope_set: ScopeSet<'a>, parent_scope: &ParentScope<'a>, ctxt: SyntaxContext, mut visitor: impl FnMut( &mut Self, Scope<'a>, /*use_prelude*/ bool, SyntaxContext, ) -> Option<T>, ) -> Option<T> { // General principles: // 1. Not controlled (user-defined) names should have higher priority than controlled names // built into the language or standard library. This way we can add new names into the // language or standard library without breaking user code. // 2. "Closed set" below means new names cannot appear after the current resolution attempt. // Places to search (in order of decreasing priority): // (Type NS) // 1. FIXME: Ribs (type parameters), there's no necessary infrastructure yet // (open set, not controlled). // 2. Names in modules (both normal `mod`ules and blocks), loop through hygienic parents // (open, not controlled). // 3. Extern prelude (open, the open part is from macro expansions, not controlled). // 4. Tool modules (closed, controlled right now, but not in the future). // 5. Standard library prelude (de-facto closed, controlled). // 6. Language prelude (closed, controlled). // (Value NS) // 1. FIXME: Ribs (local variables), there's no necessary infrastructure yet // (open set, not controlled). // 2. Names in modules (both normal `mod`ules and blocks), loop through hygienic parents // (open, not controlled). // 3. Standard library prelude (de-facto closed, controlled). // (Macro NS) // 1-3. Derive helpers (open, not controlled). All ambiguities with other names // are currently reported as errors. They should be higher in priority than preludes // and probably even names in modules according to the "general principles" above. They // also should be subject to restricted shadowing because are effectively produced by // derives (you need to resolve the derive first to add helpers into scope), but they // should be available before the derive is expanded for compatibility. // It's mess in general, so we are being conservative for now. // 1-3. `macro_rules` (open, not controlled), loop through `macro_rules` scopes. Have higher // priority than prelude macros, but create ambiguities with macros in modules. // 1-3. Names in modules (both normal `mod`ules and blocks), loop through hygienic parents // (open, not controlled). Have higher priority than prelude macros, but create // ambiguities with `macro_rules`. // 4. `macro_use` prelude (open, the open part is from macro expansions, not controlled). // 4a. User-defined prelude from macro-use // (open, the open part is from macro expansions, not controlled). // 4b. "Standard library prelude" part implemented through `macro-use` (closed, controlled). // 4c. Standard library prelude (de-facto closed, controlled). // 6. Language prelude: builtin attributes (closed, controlled). let rust_2015 = ctxt.edition() == Edition::Edition2015; let (ns, macro_kind, is_absolute_path) = match scope_set { ScopeSet::All(ns, _) => (ns, None, false), ScopeSet::AbsolutePath(ns) => (ns, None, true), ScopeSet::Macro(macro_kind) => (MacroNS, Some(macro_kind), false), ScopeSet::Late(ns, ..) => (ns, None, false), }; let module = match scope_set { // Start with the specified module. ScopeSet::Late(_, module, _) => module, // Jump out of trait or enum modules, they do not act as scopes. _ => parent_scope.module.nearest_item_scope(), }; let mut scope = match ns { _ if is_absolute_path => Scope::CrateRoot, TypeNS | ValueNS => Scope::Module(module, None), MacroNS => Scope::DeriveHelpers(parent_scope.expansion), }; let mut ctxt = ctxt.normalize_to_macros_2_0(); let mut use_prelude = !module.no_implicit_prelude; loop { let visit = match scope { // Derive helpers are not in scope when resolving derives in the same container. Scope::DeriveHelpers(expn_id) => { !(expn_id == parent_scope.expansion && macro_kind == Some(MacroKind::Derive)) } Scope::DeriveHelpersCompat => true, Scope::MacroRules(macro_rules_scope) => { // Use "path compression" on `macro_rules` scope chains. This is an optimization // used to avoid long scope chains, see the comments on `MacroRulesScopeRef`. // As another consequence of this optimization visitors never observe invocation // scopes for macros that were already expanded. while let MacroRulesScope::Invocation(invoc_id) = macro_rules_scope.get() { if let Some(next_scope) = self.output_macro_rules_scopes.get(&invoc_id) { macro_rules_scope.set(next_scope.get()); } else { break; } } true } Scope::CrateRoot => true, Scope::Module(..) => true, Scope::RegisteredAttrs => use_prelude, Scope::MacroUsePrelude => use_prelude || rust_2015, Scope::BuiltinAttrs => true, Scope::ExternPrelude => use_prelude || is_absolute_path, Scope::ToolPrelude => use_prelude, Scope::StdLibPrelude => use_prelude || ns == MacroNS, Scope::BuiltinTypes => true, }; if visit { if let break_result @ Some(..) = visitor(self, scope, use_prelude, ctxt) { return break_result; } } scope = match scope { Scope::DeriveHelpers(expn_id) if expn_id != ExpnId::root() => { // Derive helpers are not visible to code generated by bang or derive macros. let expn_data = expn_id.expn_data(); match expn_data.kind { ExpnKind::Root | ExpnKind::Macro(MacroKind::Bang | MacroKind::Derive, _) => { Scope::DeriveHelpersCompat } _ => Scope::DeriveHelpers(expn_data.parent), } } Scope::DeriveHelpers(..) => Scope::DeriveHelpersCompat, Scope::DeriveHelpersCompat => Scope::MacroRules(parent_scope.macro_rules), Scope::MacroRules(macro_rules_scope) => match macro_rules_scope.get() { MacroRulesScope::Binding(binding) => { Scope::MacroRules(binding.parent_macro_rules_scope) } MacroRulesScope::Invocation(invoc_id) => { Scope::MacroRules(self.invocation_parent_scopes[&invoc_id].macro_rules) } MacroRulesScope::Empty => Scope::Module(module, None), }, Scope::CrateRoot => match ns { TypeNS => { ctxt.adjust(ExpnId::root()); Scope::ExternPrelude } ValueNS | MacroNS => break, }, Scope::Module(module, prev_lint_id) => { use_prelude = !module.no_implicit_prelude; let derive_fallback_lint_id = match scope_set { ScopeSet::Late(.., lint_id) => lint_id, _ => None, }; match self.hygienic_lexical_parent(module, &mut ctxt, derive_fallback_lint_id) { Some((parent_module, lint_id)) => { Scope::Module(parent_module, lint_id.or(prev_lint_id)) } None => { ctxt.adjust(ExpnId::root()); match ns { TypeNS => Scope::ExternPrelude, ValueNS => Scope::StdLibPrelude, MacroNS => Scope::RegisteredAttrs, } } } } Scope::RegisteredAttrs => Scope::MacroUsePrelude, Scope::MacroUsePrelude => Scope::StdLibPrelude, Scope::BuiltinAttrs => break, // nowhere else to search Scope::ExternPrelude if is_absolute_path => break, Scope::ExternPrelude => Scope::ToolPrelude, Scope::ToolPrelude => Scope::StdLibPrelude, Scope::StdLibPrelude => match ns { TypeNS => Scope::BuiltinTypes, ValueNS => break, // nowhere else to search MacroNS => Scope::BuiltinAttrs, }, Scope::BuiltinTypes => break, // nowhere else to search }; } None } /// This resolves the identifier `ident` in the namespace `ns` in the current lexical scope. /// More specifically, we proceed up the hierarchy of scopes and return the binding for /// `ident` in the first scope that defines it (or None if no scopes define it). /// /// A block's items are above its local variables in the scope hierarchy, regardless of where /// the items are defined in the block. For example, /// ```rust /// fn f() { /// g(); // Since there are no local variables in scope yet, this resolves to the item. /// let g = || {}; /// fn g() {} /// g(); // This resolves to the local variable `g` since it shadows the item. /// } /// ``` /// /// Invariant: This must only be called during main resolution, not during /// import resolution. fn resolve_ident_in_lexical_scope( &mut self, mut ident: Ident, ns: Namespace, parent_scope: &ParentScope<'a>, record_used_id: Option<NodeId>, path_span: Span, ribs: &[Rib<'a>], ) -> Option<LexicalScopeBinding<'a>> { assert!(ns == TypeNS || ns == ValueNS); let orig_ident = ident; if ident.name == kw::Empty { return Some(LexicalScopeBinding::Res(Res::Err)); } let (general_span, normalized_span) = if ident.name == kw::SelfUpper { // FIXME(jseyfried) improve `Self` hygiene let empty_span = ident.span.with_ctxt(SyntaxContext::root()); (empty_span, empty_span) } else if ns == TypeNS { let normalized_span = ident.span.normalize_to_macros_2_0(); (normalized_span, normalized_span) } else { (ident.span.normalize_to_macro_rules(), ident.span.normalize_to_macros_2_0()) }; ident.span = general_span; let normalized_ident = Ident { span: normalized_span, ..ident }; // Walk backwards up the ribs in scope. let record_used = record_used_id.is_some(); let mut module = self.graph_root; for i in (0..ribs.len()).rev() { debug!("walk rib\n{:?}", ribs[i].bindings); // Use the rib kind to determine whether we are resolving parameters // (macro 2.0 hygiene) or local variables (`macro_rules` hygiene). let rib_ident = if ribs[i].kind.contains_params() { normalized_ident } else { ident }; if let Some((original_rib_ident_def, res)) = ribs[i].bindings.get_key_value(&rib_ident) { // The ident resolves to a type parameter or local variable. return Some(LexicalScopeBinding::Res(self.validate_res_from_ribs( i, rib_ident, *res, record_used, path_span, *original_rib_ident_def, ribs, ))); } module = match ribs[i].kind { ModuleRibKind(module) => module, MacroDefinition(def) if def == self.macro_def(ident.span.ctxt()) => { // If an invocation of this macro created `ident`, give up on `ident` // and switch to `ident`'s source from the macro definition. ident.span.remove_mark(); continue; } _ => continue, }; match module.kind { ModuleKind::Block(..) => {} // We can see through blocks _ => break, } let item = self.resolve_ident_in_module_unadjusted( ModuleOrUniformRoot::Module(module), ident, ns, parent_scope, record_used, path_span, ); if let Ok(binding) = item { // The ident resolves to an item. return Some(LexicalScopeBinding::Item(binding)); } } self.early_resolve_ident_in_lexical_scope( orig_ident, ScopeSet::Late(ns, module, record_used_id), parent_scope, record_used, record_used, path_span, ) .ok() .map(LexicalScopeBinding::Item) } fn hygienic_lexical_parent( &mut self, module: Module<'a>, ctxt: &mut SyntaxContext, derive_fallback_lint_id: Option<NodeId>, ) -> Option<(Module<'a>, Option<NodeId>)> { if !module.expansion.outer_expn_is_descendant_of(*ctxt) { return Some((self.macro_def_scope(ctxt.remove_mark()), None)); } if let ModuleKind::Block(..) = module.kind { return Some((module.parent.unwrap().nearest_item_scope(), None)); } // We need to support the next case under a deprecation warning // ``` // struct MyStruct; // ---- begin: this comes from a proc macro derive // mod implementation_details { // // Note that `MyStruct` is not in scope here. // impl SomeTrait for MyStruct { ... } // } // ---- end // ``` // So we have to fall back to the module's parent during lexical resolution in this case. if derive_fallback_lint_id.is_some() { if let Some(parent) = module.parent { // Inner module is inside the macro, parent module is outside of the macro. if module.expansion != parent.expansion && module.expansion.is_descendant_of(parent.expansion) { // The macro is a proc macro derive if let Some(def_id) = module.expansion.expn_data().macro_def_id { let ext = self.get_macro_by_def_id(def_id); if ext.builtin_name.is_none() && ext.macro_kind() == MacroKind::Derive && parent.expansion.outer_expn_is_descendant_of(*ctxt) { return Some((parent, derive_fallback_lint_id)); } } } } } None } fn resolve_ident_in_module( &mut self, module: ModuleOrUniformRoot<'a>, ident: Ident, ns: Namespace, parent_scope: &ParentScope<'a>, record_used: bool, path_span: Span, ) -> Result<&'a NameBinding<'a>, Determinacy> { self.resolve_ident_in_module_ext(module, ident, ns, parent_scope, record_used, path_span) .map_err(|(determinacy, _)| determinacy) } fn resolve_ident_in_module_ext( &mut self, module: ModuleOrUniformRoot<'a>, mut ident: Ident, ns: Namespace, parent_scope: &ParentScope<'a>, record_used: bool, path_span: Span, ) -> Result<&'a NameBinding<'a>, (Determinacy, Weak)> { let tmp_parent_scope; let mut adjusted_parent_scope = parent_scope; match module { ModuleOrUniformRoot::Module(m) => { if let Some(def) = ident.span.normalize_to_macros_2_0_and_adjust(m.expansion) { tmp_parent_scope = ParentScope { module: self.macro_def_scope(def), ..*parent_scope }; adjusted_parent_scope = &tmp_parent_scope; } } ModuleOrUniformRoot::ExternPrelude => { ident.span.normalize_to_macros_2_0_and_adjust(ExpnId::root()); } ModuleOrUniformRoot::CrateRootAndExternPrelude | ModuleOrUniformRoot::CurrentScope => { // No adjustments } } self.resolve_ident_in_module_unadjusted_ext( module, ident, ns, adjusted_parent_scope, false, record_used, path_span, ) } fn resolve_crate_root(&mut self, ident: Ident) -> Module<'a> { debug!("resolve_crate_root({:?})", ident); let mut ctxt = ident.span.ctxt(); let mark = if ident.name == kw::DollarCrate { // When resolving `$crate` from a `macro_rules!` invoked in a `macro`, // we don't want to pretend that the `macro_rules!` definition is in the `macro` // as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks. // FIXME: This is only a guess and it doesn't work correctly for `macro_rules!` // definitions actually produced by `macro` and `macro` definitions produced by // `macro_rules!`, but at least such configurations are not stable yet. ctxt = ctxt.normalize_to_macro_rules(); debug!( "resolve_crate_root: marks={:?}", ctxt.marks().into_iter().map(|(i, t)| (i.expn_data(), t)).collect::<Vec<_>>() ); let mut iter = ctxt.marks().into_iter().rev().peekable(); let mut result = None; // Find the last opaque mark from the end if it exists. while let Some(&(mark, transparency)) = iter.peek() { if transparency == Transparency::Opaque { result = Some(mark); iter.next(); } else { break; } } debug!( "resolve_crate_root: found opaque mark {:?} {:?}", result, result.map(|r| r.expn_data()) ); // Then find the last semi-transparent mark from the end if it exists. for (mark, transparency) in iter { if transparency == Transparency::SemiTransparent { result = Some(mark); } else { break; } } debug!( "resolve_crate_root: found semi-transparent mark {:?} {:?}", result, result.map(|r| r.expn_data()) ); result } else { debug!("resolve_crate_root: not DollarCrate"); ctxt = ctxt.normalize_to_macros_2_0(); ctxt.adjust(ExpnId::root()) }; let module = match mark { Some(def) => self.macro_def_scope(def), None => { debug!( "resolve_crate_root({:?}): found no mark (ident.span = {:?})", ident, ident.span ); return self.graph_root; } }; let module = self.get_module(DefId { index: CRATE_DEF_INDEX, ..module.nearest_parent_mod }); debug!( "resolve_crate_root({:?}): got module {:?} ({:?}) (ident.span = {:?})", ident, module, module.kind.name(), ident.span ); module } fn resolve_self(&mut self, ctxt: &mut SyntaxContext, module: Module<'a>) -> Module<'a> { let mut module = self.get_module(module.nearest_parent_mod); while module.span.ctxt().normalize_to_macros_2_0() != *ctxt { let parent = module.parent.unwrap_or_else(|| self.macro_def_scope(ctxt.remove_mark())); module = self.get_module(parent.nearest_parent_mod); } module } fn resolve_path( &mut self, path: &[Segment], opt_ns: Option<Namespace>, // `None` indicates a module path in import parent_scope: &ParentScope<'a>, record_used: bool, path_span: Span, crate_lint: CrateLint, ) -> PathResult<'a> { self.resolve_path_with_ribs( path, opt_ns, parent_scope, record_used, path_span, crate_lint, None, ) } fn resolve_path_with_ribs( &mut self, path: &[Segment], opt_ns: Option<Namespace>, // `None` indicates a module path in import parent_scope: &ParentScope<'a>, record_used: bool, path_span: Span, crate_lint: CrateLint, ribs: Option<&PerNS<Vec<Rib<'a>>>>, ) -> PathResult<'a> { let mut module = None; let mut allow_super = true; let mut second_binding = None; debug!( "resolve_path(path={:?}, opt_ns={:?}, record_used={:?}, \ path_span={:?}, crate_lint={:?})", path, opt_ns, record_used, path_span, crate_lint, ); for (i, &Segment { ident, id, has_generic_args: _ }) in path.iter().enumerate() { debug!("resolve_path ident {} {:?} {:?}", i, ident, id); let record_segment_res = |this: &mut Self, res| { if record_used { if let Some(id) = id { if !this.partial_res_map.contains_key(&id) { assert!(id != ast::DUMMY_NODE_ID, "Trying to resolve dummy id"); this.record_partial_res(id, PartialRes::new(res)); } } } }; let is_last = i == path.len() - 1; let ns = if is_last { opt_ns.unwrap_or(TypeNS) } else { TypeNS }; let name = ident.name; allow_super &= ns == TypeNS && (name == kw::SelfLower || name == kw::Super); if ns == TypeNS { if allow_super && name == kw::Super { let mut ctxt = ident.span.ctxt().normalize_to_macros_2_0(); let self_module = match i { 0 => Some(self.resolve_self(&mut ctxt, parent_scope.module)), _ => match module { Some(ModuleOrUniformRoot::Module(module)) => Some(module), _ => None, }, }; if let Some(self_module) = self_module { if let Some(parent) = self_module.parent { module = Some(ModuleOrUniformRoot::Module( self.resolve_self(&mut ctxt, parent), )); continue; } } let msg = "there are too many leading `super` keywords".to_string(); return PathResult::Failed { span: ident.span, label: msg, suggestion: None, is_error_from_last_segment: false, }; } if i == 0 { if name == kw::SelfLower { let mut ctxt = ident.span.ctxt().normalize_to_macros_2_0(); module = Some(ModuleOrUniformRoot::Module( self.resolve_self(&mut ctxt, parent_scope.module), )); continue; } if name == kw::PathRoot && ident.span.rust_2018() { module = Some(ModuleOrUniformRoot::ExternPrelude); continue; } if name == kw::PathRoot && ident.span.rust_2015() && self.session.rust_2018() { // `::a::b` from 2015 macro on 2018 global edition module = Some(ModuleOrUniformRoot::CrateRootAndExternPrelude); continue; } if name == kw::PathRoot || name == kw::Crate || name == kw::DollarCrate { // `::a::b`, `crate::a::b` or `$crate::a::b` module = Some(ModuleOrUniformRoot::Module(self.resolve_crate_root(ident))); continue; } } } // Report special messages for path segment keywords in wrong positions. if ident.is_path_segment_keyword() && i != 0 { let name_str = if name == kw::PathRoot { "crate root".to_string() } else { format!("`{}`", name) }; let label = if i == 1 && path[0].ident.name == kw::PathRoot { format!("global paths cannot start with {}", name_str) } else { format!("{} in paths can only be used in start position", name_str) }; return PathResult::Failed { span: ident.span, label, suggestion: None, is_error_from_last_segment: false, }; } enum FindBindingResult<'a> { Binding(Result<&'a NameBinding<'a>, Determinacy>), PathResult(PathResult<'a>), } let find_binding_in_ns = |this: &mut Self, ns| { let binding = if let Some(module) = module { this.resolve_ident_in_module( module, ident, ns, parent_scope, record_used, path_span, ) } else if ribs.is_none() || opt_ns.is_none() || opt_ns == Some(MacroNS) { let scopes = ScopeSet::All(ns, opt_ns.is_none()); this.early_resolve_ident_in_lexical_scope( ident, scopes, parent_scope, record_used, record_used, path_span, ) } else { let record_used_id = if record_used { crate_lint.node_id().or(Some(CRATE_NODE_ID)) } else { None }; match this.resolve_ident_in_lexical_scope( ident, ns, parent_scope, record_used_id, path_span, &ribs.unwrap()[ns], ) { // we found a locally-imported or available item/module Some(LexicalScopeBinding::Item(binding)) => Ok(binding), // we found a local variable or type param Some(LexicalScopeBinding::Res(res)) if opt_ns == Some(TypeNS) || opt_ns == Some(ValueNS) => { record_segment_res(this, res); return FindBindingResult::PathResult(PathResult::NonModule( PartialRes::with_unresolved_segments(res, path.len() - 1), )); } _ => Err(Determinacy::determined(record_used)), } }; FindBindingResult::Binding(binding) }; let binding = match find_binding_in_ns(self, ns) { FindBindingResult::PathResult(x) => return x, FindBindingResult::Binding(binding) => binding, }; match binding { Ok(binding) => { if i == 1 { second_binding = Some(binding); } let res = binding.res(); let maybe_assoc = opt_ns != Some(MacroNS) && PathSource::Type.is_expected(res); if let Some(next_module) = binding.module() { module = Some(ModuleOrUniformRoot::Module(next_module)); record_segment_res(self, res); } else if res == Res::ToolMod && i + 1 != path.len() { if binding.is_import() { self.session .struct_span_err( ident.span, "cannot use a tool module through an import", ) .span_note(binding.span, "the tool module imported here") .emit(); } let res = Res::NonMacroAttr(NonMacroAttrKind::Tool); return PathResult::NonModule(PartialRes::new(res)); } else if res == Res::Err { return PathResult::NonModule(PartialRes::new(Res::Err)); } else if opt_ns.is_some() && (is_last || maybe_assoc) { self.lint_if_path_starts_with_module( crate_lint, path, path_span, second_binding, ); return PathResult::NonModule(PartialRes::with_unresolved_segments( res, path.len() - i - 1, )); } else { let label = format!( "`{}` is {} {}, not a module", ident, res.article(), res.descr(), ); return PathResult::Failed { span: ident.span, label, suggestion: None, is_error_from_last_segment: is_last, }; } } Err(Undetermined) => return PathResult::Indeterminate, Err(Determined) => { if let Some(ModuleOrUniformRoot::Module(module)) = module { if opt_ns.is_some() && !module.is_normal() { return PathResult::NonModule(PartialRes::with_unresolved_segments( module.res().unwrap(), path.len() - i, )); } } let module_res = match module { Some(ModuleOrUniformRoot::Module(module)) => module.res(), _ => None, }; let (label, suggestion) = if module_res == self.graph_root.res() { let is_mod = |res| matches!(res, Res::Def(DefKind::Mod, _)); // Don't look up import candidates if this is a speculative resolve let mut candidates = if record_used { self.lookup_import_candidates(ident, TypeNS, parent_scope, is_mod) } else { Vec::new() }; candidates.sort_by_cached_key(|c| { (c.path.segments.len(), pprust::path_to_string(&c.path)) }); if let Some(candidate) = candidates.get(0) { ( String::from("unresolved import"), Some(( vec![(ident.span, pprust::path_to_string(&candidate.path))], String::from("a similar path exists"), Applicability::MaybeIncorrect, )), ) } else if self.session.edition() == Edition::Edition2015 { (format!("maybe a missing crate `{}`?", ident), None) } else { (format!("could not find `{}` in the crate root", ident), None) } } else if i == 0 { if ident .name .as_str() .chars() .next() .map_or(false, |c| c.is_ascii_uppercase()) { (format!("use of undeclared type `{}`", ident), None) } else { (format!("use of undeclared crate or module `{}`", ident), None) } } else { let parent = path[i - 1].ident.name; let parent = match parent { // ::foo is mounted at the crate root for 2015, and is the extern // prelude for 2018+ kw::PathRoot if self.session.edition() > Edition::Edition2015 => { "the list of imported crates".to_owned() } kw::PathRoot | kw::Crate => "the crate root".to_owned(), _ => { format!("`{}`", parent) } }; let mut msg = format!("could not find `{}` in {}", ident, parent); if ns == TypeNS || ns == ValueNS { let ns_to_try = if ns == TypeNS { ValueNS } else { TypeNS }; if let FindBindingResult::Binding(Ok(binding)) = find_binding_in_ns(self, ns_to_try) { let mut found = |what| { msg = format!( "expected {}, found {} `{}` in {}", ns.descr(), what, ident, parent ) }; if binding.module().is_some() { found("module") } else { match binding.res() { def::Res::<NodeId>::Def(kind, id) => found(kind.descr(id)), _ => found(ns_to_try.descr()), } } }; } (msg, None) }; return PathResult::Failed { span: ident.span, label, suggestion, is_error_from_last_segment: is_last, }; } } } self.lint_if_path_starts_with_module(crate_lint, path, path_span, second_binding); PathResult::Module(match module { Some(module) => module, None if path.is_empty() => ModuleOrUniformRoot::CurrentScope, _ => span_bug!(path_span, "resolve_path: non-empty path `{:?}` has no module", path), }) } fn lint_if_path_starts_with_module( &mut self, crate_lint: CrateLint, path: &[Segment], path_span: Span, second_binding: Option<&NameBinding<'_>>, ) { let (diag_id, diag_span) = match crate_lint { CrateLint::No => return, CrateLint::SimplePath(id) => (id, path_span), CrateLint::UsePath { root_id, root_span } => (root_id, root_span), CrateLint::QPathTrait { qpath_id, qpath_span } => (qpath_id, qpath_span), }; let first_name = match path.get(0) { // In the 2018 edition this lint is a hard error, so nothing to do Some(seg) if seg.ident.span.rust_2015() && self.session.rust_2015() => seg.ident.name, _ => return, }; // We're only interested in `use` paths which should start with // `{{root}}` currently. if first_name != kw::PathRoot { return; } match path.get(1) { // If this import looks like `crate::...` it's already good Some(Segment { ident, .. }) if ident.name == kw::Crate => return, // Otherwise go below to see if it's an extern crate Some(_) => {} // If the path has length one (and it's `PathRoot` most likely) // then we don't know whether we're gonna be importing a crate or an // item in our crate. Defer this lint to elsewhere None => return, } // If the first element of our path was actually resolved to an // `ExternCrate` (also used for `crate::...`) then no need to issue a // warning, this looks all good! if let Some(binding) = second_binding { if let NameBindingKind::Import { import, .. } = binding.kind { // Careful: we still want to rewrite paths from renamed extern crates. if let ImportKind::ExternCrate { source: None, .. } = import.kind { return; } } } let diag = BuiltinLintDiagnostics::AbsPathWithModule(diag_span); self.lint_buffer.buffer_lint_with_diagnostic( lint::builtin::ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE, diag_id, diag_span, "absolute paths must start with `self`, `super`, \ `crate`, or an external crate name in the 2018 edition", diag, ); } // Validate a local resolution (from ribs). fn validate_res_from_ribs( &mut self, rib_index: usize, rib_ident: Ident, mut res: Res, record_used: bool, span: Span, original_rib_ident_def: Ident, all_ribs: &[Rib<'a>], ) -> Res { const CG_BUG_STR: &str = "min_const_generics resolve check didn't stop compilation"; debug!("validate_res_from_ribs({:?})", res); let ribs = &all_ribs[rib_index + 1..]; // An invalid forward use of a generic parameter from a previous default. if let ForwardGenericParamBanRibKind = all_ribs[rib_index].kind { if record_used { let res_error = if rib_ident.name == kw::SelfUpper { ResolutionError::SelfInTyParamDefault } else { ResolutionError::ForwardDeclaredTyParam }; self.report_error(span, res_error); } assert_eq!(res, Res::Err); return Res::Err; } match res { Res::Local(_) => { use ResolutionError::*; let mut res_err = None; for rib in ribs { match rib.kind { NormalRibKind | ClosureOrAsyncRibKind | ModuleRibKind(..) | MacroDefinition(..) | ForwardGenericParamBanRibKind => { // Nothing to do. Continue. } ItemRibKind(_) | FnItemRibKind | AssocItemRibKind => { // This was an attempt to access an upvar inside a // named function item. This is not allowed, so we // report an error. if record_used { // We don't immediately trigger a resolve error, because // we want certain other resolution errors (namely those // emitted for `ConstantItemRibKind` below) to take // precedence. res_err = Some(CannotCaptureDynamicEnvironmentInFnItem); } } ConstantItemRibKind(_, item) => { // Still doesn't deal with upvars if record_used { let (span, resolution_error) = if let Some((ident, constant_item_kind)) = item { let kind_str = match constant_item_kind { ConstantItemKind::Const => "const", ConstantItemKind::Static => "static", }; ( span, AttemptToUseNonConstantValueInConstant( ident, "let", kind_str, ), ) } else { ( rib_ident.span, AttemptToUseNonConstantValueInConstant( original_rib_ident_def, "const", "let", ), ) }; self.report_error(span, resolution_error); } return Res::Err; } ConstParamTyRibKind => { if record_used { self.report_error(span, ParamInTyOfConstParam(rib_ident.name)); } return Res::Err; } } } if let Some(res_err) = res_err { self.report_error(span, res_err); return Res::Err; } } Res::Def(DefKind::TyParam, _) | Res::SelfTy(..) => { let mut in_ty_param_default = false; for rib in ribs { let has_generic_params = match rib.kind { NormalRibKind | ClosureOrAsyncRibKind | AssocItemRibKind | ModuleRibKind(..) | MacroDefinition(..) => { // Nothing to do. Continue. continue; } // We only forbid constant items if we are inside of type defaults, // for example `struct Foo<T, U = [u8; std::mem::size_of::<T>()]>` ForwardGenericParamBanRibKind => { // FIXME(const_generic_defaults): we may need to distinguish between // being in type parameter defaults and const parameter defaults in_ty_param_default = true; continue; } ConstantItemRibKind(trivial, _) => { let features = self.session.features_untracked(); // HACK(min_const_generics): We currently only allow `N` or `{ N }`. if !(trivial || features.const_generics || features.lazy_normalization_consts) { // HACK(min_const_generics): If we encounter `Self` in an anonymous constant // we can't easily tell if it's generic at this stage, so we instead remember // this and then enforce the self type to be concrete later on. if let Res::SelfTy(trait_def, Some((impl_def, _))) = res { res = Res::SelfTy(trait_def, Some((impl_def, true))); } else { if record_used { self.report_error( span, ResolutionError::ParamInNonTrivialAnonConst { name: rib_ident.name, is_type: true, }, ); } self.session.delay_span_bug(span, CG_BUG_STR); return Res::Err; } } if in_ty_param_default { if record_used { self.report_error( span, ResolutionError::ParamInAnonConstInTyDefault( rib_ident.name, ), ); } return Res::Err; } else { continue; } } // This was an attempt to use a type parameter outside its scope. ItemRibKind(has_generic_params) => has_generic_params, FnItemRibKind => HasGenericParams::Yes, ConstParamTyRibKind => { if record_used { self.report_error( span, ResolutionError::ParamInTyOfConstParam(rib_ident.name), ); } return Res::Err; } }; if record_used { self.report_error( span, ResolutionError::GenericParamsFromOuterFunction( res, has_generic_params, ), ); } return Res::Err; } } Res::Def(DefKind::ConstParam, _) => { let mut ribs = ribs.iter().peekable(); if let Some(Rib { kind: FnItemRibKind, .. }) = ribs.peek() { // When declaring const parameters inside function signatures, the first rib // is always a `FnItemRibKind`. In this case, we can skip it, to avoid it // (spuriously) conflicting with the const param. ribs.next(); } let mut in_ty_param_default = false; for rib in ribs { let has_generic_params = match rib.kind { NormalRibKind | ClosureOrAsyncRibKind | AssocItemRibKind | ModuleRibKind(..) | MacroDefinition(..) => continue, // We only forbid constant items if we are inside of type defaults, // for example `struct Foo<T, U = [u8; std::mem::size_of::<T>()]>` ForwardGenericParamBanRibKind => { // FIXME(const_generic_defaults): we may need to distinguish between // being in type parameter defaults and const parameter defaults in_ty_param_default = true; continue; } ConstantItemRibKind(trivial, _) => { let features = self.session.features_untracked(); // HACK(min_const_generics): We currently only allow `N` or `{ N }`. if !(trivial || features.const_generics || features.lazy_normalization_consts) { if record_used { self.report_error( span, ResolutionError::ParamInNonTrivialAnonConst { name: rib_ident.name, is_type: false, }, ); } self.session.delay_span_bug(span, CG_BUG_STR); return Res::Err; } if in_ty_param_default { if record_used { self.report_error( span, ResolutionError::ParamInAnonConstInTyDefault( rib_ident.name, ), ); } return Res::Err; } else { continue; } } ItemRibKind(has_generic_params) => has_generic_params, FnItemRibKind => HasGenericParams::Yes, ConstParamTyRibKind => { if record_used { self.report_error( span, ResolutionError::ParamInTyOfConstParam(rib_ident.name), ); } return Res::Err; } }; // This was an attempt to use a const parameter outside its scope. if record_used { self.report_error( span, ResolutionError::GenericParamsFromOuterFunction( res, has_generic_params, ), ); } return Res::Err; } } _ => {} } res } fn record_partial_res(&mut self, node_id: NodeId, resolution: PartialRes) { debug!("(recording res) recording {:?} for {}", resolution, node_id); if let Some(prev_res) = self.partial_res_map.insert(node_id, resolution) { panic!("path resolved multiple times ({:?} before, {:?} now)", prev_res, resolution); } } fn is_accessible_from(&self, vis: ty::Visibility, module: Module<'a>) -> bool { vis.is_accessible_from(module.nearest_parent_mod, self) } fn set_binding_parent_module(&mut self, binding: &'a NameBinding<'a>, module: Module<'a>) { if let Some(old_module) = self.binding_parent_modules.insert(PtrKey(binding), module) { if !ptr::eq(module, old_module) { span_bug!(binding.span, "parent module is reset for binding"); } } } fn disambiguate_macro_rules_vs_modularized( &self, macro_rules: &'a NameBinding<'a>, modularized: &'a NameBinding<'a>, ) -> bool { // Some non-controversial subset of ambiguities "modularized macro name" vs "macro_rules" // is disambiguated to mitigate regressions from macro modularization. // Scoping for `macro_rules` behaves like scoping for `let` at module level, in general. match ( self.binding_parent_modules.get(&PtrKey(macro_rules)), self.binding_parent_modules.get(&PtrKey(modularized)), ) { (Some(macro_rules), Some(modularized)) => { macro_rules.nearest_parent_mod == modularized.nearest_parent_mod && modularized.is_ancestor_of(macro_rules) } _ => false, } } fn report_errors(&mut self, krate: &Crate) { self.report_with_use_injections(krate); for &(span_use, span_def) in &self.macro_expanded_macro_export_errors { let msg = "macro-expanded `macro_export` macros from the current crate \ cannot be referred to by absolute paths"; self.lint_buffer.buffer_lint_with_diagnostic( lint::builtin::MACRO_EXPANDED_MACRO_EXPORTS_ACCESSED_BY_ABSOLUTE_PATHS, CRATE_NODE_ID, span_use, msg, BuiltinLintDiagnostics::MacroExpandedMacroExportsAccessedByAbsolutePaths(span_def), ); } for ambiguity_error in &self.ambiguity_errors { self.report_ambiguity_error(ambiguity_error); } let mut reported_spans = FxHashSet::default(); for error in &self.privacy_errors { if reported_spans.insert(error.dedup_span) { self.report_privacy_error(error); } } } fn report_with_use_injections(&mut self, krate: &Crate) { for UseError { mut err, candidates, def_id, instead, suggestion } in self.use_injections.drain(..) { let (span, found_use) = if let Some(def_id) = def_id.as_local() { UsePlacementFinder::check(krate, self.def_id_to_node_id[def_id]) } else { (None, false) }; if !candidates.is_empty() { diagnostics::show_candidates(&mut err, span, &candidates, instead, found_use); } else if let Some((span, msg, sugg, appl)) = suggestion { err.span_suggestion(span, msg, sugg, appl); } err.emit(); } } fn report_conflict<'b>( &mut self, parent: Module<'_>, ident: Ident, ns: Namespace, new_binding: &NameBinding<'b>, old_binding: &NameBinding<'b>, ) { // Error on the second of two conflicting names if old_binding.span.lo() > new_binding.span.lo() { return self.report_conflict(parent, ident, ns, old_binding, new_binding); } let container = match parent.kind { ModuleKind::Def(kind, _, _) => kind.descr(parent.def_id().unwrap()), ModuleKind::Block(..) => "block", }; let old_noun = match old_binding.is_import() { true => "import", false => "definition", }; let new_participle = match new_binding.is_import() { true => "imported", false => "defined", }; let (name, span) = (ident.name, self.session.source_map().guess_head_span(new_binding.span)); if let Some(s) = self.name_already_seen.get(&name) { if s == &span { return; } } let old_kind = match (ns, old_binding.module()) { (ValueNS, _) => "value", (MacroNS, _) => "macro", (TypeNS, _) if old_binding.is_extern_crate() => "extern crate", (TypeNS, Some(module)) if module.is_normal() => "module", (TypeNS, Some(module)) if module.is_trait() => "trait", (TypeNS, _) => "type", }; let msg = format!("the name `{}` is defined multiple times", name); let mut err = match (old_binding.is_extern_crate(), new_binding.is_extern_crate()) { (true, true) => struct_span_err!(self.session, span, E0259, "{}", msg), (true, _) | (_, true) => match new_binding.is_import() && old_binding.is_import() { true => struct_span_err!(self.session, span, E0254, "{}", msg), false => struct_span_err!(self.session, span, E0260, "{}", msg), }, _ => match (old_binding.is_import(), new_binding.is_import()) { (false, false) => struct_span_err!(self.session, span, E0428, "{}", msg), (true, true) => struct_span_err!(self.session, span, E0252, "{}", msg), _ => struct_span_err!(self.session, span, E0255, "{}", msg), }, }; err.note(&format!( "`{}` must be defined only once in the {} namespace of this {}", name, ns.descr(), container )); err.span_label(span, format!("`{}` re{} here", name, new_participle)); err.span_label( self.session.source_map().guess_head_span(old_binding.span), format!("previous {} of the {} `{}` here", old_noun, old_kind, name), ); // See https://github.com/rust-lang/rust/issues/32354 use NameBindingKind::Import; let import = match (&new_binding.kind, &old_binding.kind) { // If there are two imports where one or both have attributes then prefer removing the // import without attributes. (Import { import: new, .. }, Import { import: old, .. }) if { !new_binding.span.is_dummy() && !old_binding.span.is_dummy() && (new.has_attributes || old.has_attributes) } => { if old.has_attributes { Some((new, new_binding.span, true)) } else { Some((old, old_binding.span, true)) } } // Otherwise prioritize the new binding. (Import { import, .. }, other) if !new_binding.span.is_dummy() => { Some((import, new_binding.span, other.is_import())) } (other, Import { import, .. }) if !old_binding.span.is_dummy() => { Some((import, old_binding.span, other.is_import())) } _ => None, }; // Check if the target of the use for both bindings is the same. let duplicate = new_binding.res().opt_def_id() == old_binding.res().opt_def_id(); let has_dummy_span = new_binding.span.is_dummy() || old_binding.span.is_dummy(); let from_item = self.extern_prelude.get(&ident).map_or(true, |entry| entry.introduced_by_item); // Only suggest removing an import if both bindings are to the same def, if both spans // aren't dummy spans. Further, if both bindings are imports, then the ident must have // been introduced by a item. let should_remove_import = duplicate && !has_dummy_span && ((new_binding.is_extern_crate() || old_binding.is_extern_crate()) || from_item); match import { Some((import, span, true)) if should_remove_import && import.is_nested() => { self.add_suggestion_for_duplicate_nested_use(&mut err, import, span) } Some((import, _, true)) if should_remove_import && !import.is_glob() => { // Simple case - remove the entire import. Due to the above match arm, this can // only be a single use so just remove it entirely. err.tool_only_span_suggestion( import.use_span_with_attributes, "remove unnecessary import", String::new(), Applicability::MaybeIncorrect, ); } Some((import, span, _)) => { self.add_suggestion_for_rename_of_use(&mut err, name, import, span) } _ => {} } err.emit(); self.name_already_seen.insert(name, span); } /// This function adds a suggestion to change the binding name of a new import that conflicts /// with an existing import. /// /// ```text,ignore (diagnostic) /// help: you can use `as` to change the binding name of the import /// | /// LL | use foo::bar as other_bar; /// | ^^^^^^^^^^^^^^^^^^^^^ /// ``` fn add_suggestion_for_rename_of_use( &self, err: &mut DiagnosticBuilder<'_>, name: Symbol, import: &Import<'_>, binding_span: Span, ) { let suggested_name = if name.as_str().chars().next().unwrap().is_uppercase() { format!("Other{}", name) } else { format!("other_{}", name) }; let mut suggestion = None; match import.kind { ImportKind::Single { type_ns_only: true, .. } => { suggestion = Some(format!("self as {}", suggested_name)) } ImportKind::Single { source, .. } => { if let Some(pos) = source.span.hi().0.checked_sub(binding_span.lo().0).map(|pos| pos as usize) { if let Ok(snippet) = self.session.source_map().span_to_snippet(binding_span) { if pos <= snippet.len() { suggestion = Some(format!( "{} as {}{}", &snippet[..pos], suggested_name, if snippet.ends_with(';') { ";" } else { "" } )) } } } } ImportKind::ExternCrate { source, target, .. } => { suggestion = Some(format!( "extern crate {} as {};", source.unwrap_or(target.name), suggested_name, )) } _ => unreachable!(), } let rename_msg = "you can use `as` to change the binding name of the import"; if let Some(suggestion) = suggestion { err.span_suggestion( binding_span, rename_msg, suggestion, Applicability::MaybeIncorrect, ); } else { err.span_label(binding_span, rename_msg); } } /// This function adds a suggestion to remove a unnecessary binding from an import that is /// nested. In the following example, this function will be invoked to remove the `a` binding /// in the second use statement: /// /// ```ignore (diagnostic) /// use issue_52891::a; /// use issue_52891::{d, a, e}; /// ``` /// /// The following suggestion will be added: /// /// ```ignore (diagnostic) /// use issue_52891::{d, a, e}; /// ^-- help: remove unnecessary import /// ``` /// /// If the nested use contains only one import then the suggestion will remove the entire /// line. /// /// It is expected that the provided import is nested - this isn't checked by the /// function. If this invariant is not upheld, this function's behaviour will be unexpected /// as characters expected by span manipulations won't be present. fn add_suggestion_for_duplicate_nested_use( &self, err: &mut DiagnosticBuilder<'_>, import: &Import<'_>, binding_span: Span, ) { assert!(import.is_nested()); let message = "remove unnecessary import"; // Two examples will be used to illustrate the span manipulations we're doing: // // - Given `use issue_52891::{d, a, e};` where `a` is a duplicate then `binding_span` is // `a` and `import.use_span` is `issue_52891::{d, a, e};`. // - Given `use issue_52891::{d, e, a};` where `a` is a duplicate then `binding_span` is // `a` and `import.use_span` is `issue_52891::{d, e, a};`. let (found_closing_brace, span) = find_span_of_binding_until_next_binding(self.session, binding_span, import.use_span); // If there was a closing brace then identify the span to remove any trailing commas from // previous imports. if found_closing_brace { if let Some(span) = extend_span_to_previous_binding(self.session, span) { err.tool_only_span_suggestion( span, message, String::new(), Applicability::MaybeIncorrect, ); } else { // Remove the entire line if we cannot extend the span back, this indicates a // `issue_52891::{self}` case. err.span_suggestion( import.use_span_with_attributes, message, String::new(), Applicability::MaybeIncorrect, ); } return; } err.span_suggestion(span, message, String::new(), Applicability::MachineApplicable); } fn extern_prelude_get( &mut self, ident: Ident, speculative: bool, ) -> Option<&'a NameBinding<'a>> { if ident.is_path_segment_keyword() { // Make sure `self`, `super` etc produce an error when passed to here. return None; } self.extern_prelude.get(&ident.normalize_to_macros_2_0()).cloned().and_then(|entry| { if let Some(binding) = entry.extern_crate_item { if !speculative && entry.introduced_by_item { self.record_use(ident, TypeNS, binding, false); } Some(binding) } else { let crate_id = if !speculative { self.crate_loader.process_path_extern(ident.name, ident.span) } else { self.crate_loader.maybe_process_path_extern(ident.name)? }; let crate_root = self.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX }); Some( (crate_root, ty::Visibility::Public, DUMMY_SP, ExpnId::root()) .to_name_binding(self.arenas), ) } }) } /// Rustdoc uses this to resolve things in a recoverable way. `ResolutionError<'a>` /// isn't something that can be returned because it can't be made to live that long, /// and also it's a private type. Fortunately rustdoc doesn't need to know the error, /// just that an error occurred. // FIXME(Manishearth): intra-doc links won't get warned of epoch changes. pub fn resolve_str_path_error( &mut self, span: Span, path_str: &str, ns: Namespace, module_id: DefId, ) -> Result<(ast::Path, Res), ()> { let path = if path_str.starts_with("::") { ast::Path { span, segments: iter::once(Ident::with_dummy_span(kw::PathRoot)) .chain(path_str.split("::").skip(1).map(Ident::from_str)) .map(|i| self.new_ast_path_segment(i)) .collect(), tokens: None, } } else { ast::Path { span, segments: path_str .split("::") .map(Ident::from_str) .map(|i| self.new_ast_path_segment(i)) .collect(), tokens: None, } }; let module = self.get_module(module_id); let parent_scope = &ParentScope::module(module, self); let res = self.resolve_ast_path(&path, ns, parent_scope).map_err(|_| ())?; Ok((path, res)) } // Resolve a path passed from rustdoc or HIR lowering. fn resolve_ast_path( &mut self, path: &ast::Path, ns: Namespace, parent_scope: &ParentScope<'a>, ) -> Result<Res, (Span, ResolutionError<'a>)> { match self.resolve_path( &Segment::from_path(path), Some(ns), parent_scope, false, path.span, CrateLint::No, ) { PathResult::Module(ModuleOrUniformRoot::Module(module)) => Ok(module.res().unwrap()), PathResult::NonModule(path_res) if path_res.unresolved_segments() == 0 => { Ok(path_res.base_res()) } PathResult::NonModule(..) => Err(( path.span, ResolutionError::FailedToResolve { label: String::from("type-relative paths are not supported in this context"), suggestion: None, }, )), PathResult::Module(..) | PathResult::Indeterminate => unreachable!(), PathResult::Failed { span, label, suggestion, .. } => { Err((span, ResolutionError::FailedToResolve { label, suggestion })) } } } fn new_ast_path_segment(&mut self, ident: Ident) -> ast::PathSegment { let mut seg = ast::PathSegment::from_ident(ident); seg.id = self.next_node_id(); seg } // For rustdoc. pub fn graph_root(&self) -> Module<'a> { self.graph_root } // For rustdoc. pub fn all_macros(&self) -> &FxHashMap<Symbol, Res> { &self.all_macros } /// Retrieves the span of the given `DefId` if `DefId` is in the local crate. #[inline] pub fn opt_span(&self, def_id: DefId) -> Option<Span> { if let Some(def_id) = def_id.as_local() { Some(self.def_id_to_span[def_id]) } else { None } } /// Checks if an expression refers to a function marked with /// `#[rustc_legacy_const_generics]` and returns the argument index list /// from the attribute. pub fn legacy_const_generic_args(&mut self, expr: &Expr) -> Option<Vec<usize>> { if let ExprKind::Path(None, path) = &expr.kind { // Don't perform legacy const generics rewriting if the path already // has generic arguments. if path.segments.last().unwrap().args.is_some() { return None; } let partial_res = self.partial_res_map.get(&expr.id)?; if partial_res.unresolved_segments() != 0 { return None; } if let Res::Def(def::DefKind::Fn, def_id) = partial_res.base_res() { // We only support cross-crate argument rewriting. Uses // within the same crate should be updated to use the new // const generics style. if def_id.is_local() { return None; } if let Some(v) = self.legacy_const_generic_args.get(&def_id) { return v.clone(); } let parse_attrs = || { let attrs = self.cstore().item_attrs(def_id, self.session); let attr = attrs .iter() .find(|a| self.session.check_name(a, sym::rustc_legacy_const_generics))?; let mut ret = vec![]; for meta in attr.meta_item_list()? { match meta.literal()?.kind { LitKind::Int(a, _) => { ret.push(a as usize); } _ => panic!("invalid arg index"), } } Some(ret) }; // Cache the lookup to avoid parsing attributes for an iterm // multiple times. let ret = parse_attrs(); self.legacy_const_generic_args.insert(def_id, ret.clone()); return ret; } } None } } fn names_to_string(names: &[Symbol]) -> String { let mut result = String::new(); for (i, name) in names.iter().filter(|name| **name != kw::PathRoot).enumerate() { if i > 0 { result.push_str("::"); } if Ident::with_dummy_span(*name).is_raw_guess() { result.push_str("r#"); } result.push_str(&name.as_str()); } result } fn path_names_to_string(path: &Path) -> String { names_to_string(&path.segments.iter().map(|seg| seg.ident.name).collect::<Vec<_>>()) } /// A somewhat inefficient routine to obtain the name of a module. fn module_to_string(module: Module<'_>) -> Option<String> { let mut names = Vec::new(); fn collect_mod(names: &mut Vec<Symbol>, module: Module<'_>) { if let ModuleKind::Def(.., name) = module.kind { if let Some(parent) = module.parent { names.push(name); collect_mod(names, parent); } } else { names.push(Symbol::intern("<opaque>")); collect_mod(names, module.parent.unwrap()); } } collect_mod(&mut names, module); if names.is_empty() { return None; } names.reverse(); Some(names_to_string(&names)) } #[derive(Copy, Clone, Debug)] enum CrateLint { /// Do not issue the lint. No, /// This lint applies to some arbitrary path; e.g., `impl ::foo::Bar`. /// In this case, we can take the span of that path. SimplePath(NodeId), /// This lint comes from a `use` statement. In this case, what we /// care about really is the *root* `use` statement; e.g., if we /// have nested things like `use a::{b, c}`, we care about the /// `use a` part. UsePath { root_id: NodeId, root_span: Span }, /// This is the "trait item" from a fully qualified path. For example, /// we might be resolving `X::Y::Z` from a path like `<T as X::Y>::Z`. /// The `path_span` is the span of the to the trait itself (`X::Y`). QPathTrait { qpath_id: NodeId, qpath_span: Span }, } impl CrateLint { fn node_id(&self) -> Option<NodeId> { match *self { CrateLint::No => None, CrateLint::SimplePath(id) | CrateLint::UsePath { root_id: id, .. } | CrateLint::QPathTrait { qpath_id: id, .. } => Some(id), } } } pub fn provide(providers: &mut Providers) { late::lifetimes::provide(providers); }
40.100205
114
0.535912
9b6302752880f1a3e86b3bc6705c4f3821e7d435
14,907
//! Mailbox property interface //! //! (ref: https://github.com/raspberrypi/firmware/wiki/Mailbox-property-interface) use super::fb::FramebufferInfo; use bcm2837::mailbox::{Mailbox, MailboxChannel}; use lazy_static::lazy_static; use alloc::string::String; use core::mem; use spin::Mutex; use aarch64::asm; lazy_static! { static ref MAILBOX: Mutex<Mailbox> = Mutex::new(Mailbox::new()); } #[derive(Debug)] pub struct PropertyMailboxError(u32); pub type PropertyMailboxResult<T> = Result<T, PropertyMailboxError>; impl From<PropertyMailboxError> for String { fn from(error: PropertyMailboxError) -> Self { format!("{:x?}", error) } } /// Buffer request/response code. /// Copied from `linux/include/soc/bcm2835/raspberrypi-firmware.h` #[repr(u32)] #[allow(dead_code)] #[derive(Copy, Clone, Debug)] #[allow(non_camel_case_types)] enum PropertyMailboxStatus { RPI_FIRMWARE_STATUS_REQUEST = 0, RPI_FIRMWARE_STATUS_SUCCESS = 0x80000000, RPI_FIRMWARE_STATUS_ERROR = 0x80000001, } use self::PropertyMailboxStatus::*; /// Tag identifier. /// Copied from `linux/include/soc/bcm2835/raspberrypi-firmware.h` #[repr(u32)] #[allow(dead_code)] #[derive(Copy, Clone, Debug)] #[allow(non_camel_case_types)] enum PropertyMailboxTagId { RPI_FIRMWARE_PROPERTY_END = 0, RPI_FIRMWARE_GET_FIRMWARE_REVISION = 0x00000001, RPI_FIRMWARE_SET_CURSOR_INFO = 0x00008010, RPI_FIRMWARE_SET_CURSOR_STATE = 0x00008011, RPI_FIRMWARE_GET_BOARD_MODEL = 0x00010001, RPI_FIRMWARE_GET_BOARD_REVISION = 0x00010002, RPI_FIRMWARE_GET_BOARD_MAC_ADDRESS = 0x00010003, RPI_FIRMWARE_GET_BOARD_SERIAL = 0x00010004, RPI_FIRMWARE_GET_ARM_MEMORY = 0x00010005, RPI_FIRMWARE_GET_VC_MEMORY = 0x00010006, RPI_FIRMWARE_GET_CLOCKS = 0x00010007, RPI_FIRMWARE_GET_POWER_STATE = 0x00020001, RPI_FIRMWARE_GET_TIMING = 0x00020002, RPI_FIRMWARE_SET_POWER_STATE = 0x00028001, RPI_FIRMWARE_GET_CLOCK_STATE = 0x00030001, RPI_FIRMWARE_GET_CLOCK_RATE = 0x00030002, RPI_FIRMWARE_GET_VOLTAGE = 0x00030003, RPI_FIRMWARE_GET_MAX_CLOCK_RATE = 0x00030004, RPI_FIRMWARE_GET_MAX_VOLTAGE = 0x00030005, RPI_FIRMWARE_GET_TEMPERATURE = 0x00030006, RPI_FIRMWARE_GET_MIN_CLOCK_RATE = 0x00030007, RPI_FIRMWARE_GET_MIN_VOLTAGE = 0x00030008, RPI_FIRMWARE_GET_TURBO = 0x00030009, RPI_FIRMWARE_GET_MAX_TEMPERATURE = 0x0003000a, RPI_FIRMWARE_GET_STC = 0x0003000b, RPI_FIRMWARE_ALLOCATE_MEMORY = 0x0003000c, RPI_FIRMWARE_LOCK_MEMORY = 0x0003000d, RPI_FIRMWARE_UNLOCK_MEMORY = 0x0003000e, RPI_FIRMWARE_RELEASE_MEMORY = 0x0003000f, RPI_FIRMWARE_EXECUTE_CODE = 0x00030010, RPI_FIRMWARE_EXECUTE_QPU = 0x00030011, RPI_FIRMWARE_SET_ENABLE_QPU = 0x00030012, RPI_FIRMWARE_GET_DISPMANX_RESOURCE_MEM_HANDLE = 0x00030014, RPI_FIRMWARE_GET_EDID_BLOCK = 0x00030020, RPI_FIRMWARE_GET_CUSTOMER_OTP = 0x00030021, RPI_FIRMWARE_GET_DOMAIN_STATE = 0x00030030, RPI_FIRMWARE_GET_THROTTLED = 0x00030046, RPI_FIRMWARE_GET_CLOCK_MEASURED = 0x00030047, RPI_FIRMWARE_NOTIFY_REBOOT = 0x00030048, RPI_FIRMWARE_SET_CLOCK_STATE = 0x00038001, RPI_FIRMWARE_SET_CLOCK_RATE = 0x00038002, RPI_FIRMWARE_SET_VOLTAGE = 0x00038003, RPI_FIRMWARE_SET_TURBO = 0x00038009, RPI_FIRMWARE_SET_CUSTOMER_OTP = 0x00038021, RPI_FIRMWARE_SET_DOMAIN_STATE = 0x00038030, RPI_FIRMWARE_GET_GPIO_STATE = 0x00030041, RPI_FIRMWARE_SET_GPIO_STATE = 0x00038041, RPI_FIRMWARE_SET_SDHOST_CLOCK = 0x00038042, RPI_FIRMWARE_GET_GPIO_CONFIG = 0x00030043, RPI_FIRMWARE_SET_GPIO_CONFIG = 0x00038043, RPI_FIRMWARE_GET_PERIPH_REG = 0x00030045, RPI_FIRMWARE_SET_PERIPH_REG = 0x00038045, RPI_FIRMWARE_GET_POE_HAT_VAL = 0x00030049, RPI_FIRMWARE_SET_POE_HAT_VAL = 0x00030050, /* Dispmanx TAGS */ RPI_FIRMWARE_FRAMEBUFFER_ALLOCATE = 0x00040001, RPI_FIRMWARE_FRAMEBUFFER_BLANK = 0x00040002, RPI_FIRMWARE_FRAMEBUFFER_GET_PHYSICAL_WIDTH_HEIGHT = 0x00040003, RPI_FIRMWARE_FRAMEBUFFER_GET_VIRTUAL_WIDTH_HEIGHT = 0x00040004, RPI_FIRMWARE_FRAMEBUFFER_GET_DEPTH = 0x00040005, RPI_FIRMWARE_FRAMEBUFFER_GET_PIXEL_ORDER = 0x00040006, RPI_FIRMWARE_FRAMEBUFFER_GET_ALPHA_MODE = 0x00040007, RPI_FIRMWARE_FRAMEBUFFER_GET_PITCH = 0x00040008, RPI_FIRMWARE_FRAMEBUFFER_GET_VIRTUAL_OFFSET = 0x00040009, RPI_FIRMWARE_FRAMEBUFFER_GET_OVERSCAN = 0x0004000a, RPI_FIRMWARE_FRAMEBUFFER_GET_PALETTE = 0x0004000b, RPI_FIRMWARE_FRAMEBUFFER_GET_TOUCHBUF = 0x0004000f, RPI_FIRMWARE_FRAMEBUFFER_GET_GPIOVIRTBUF = 0x00040010, RPI_FIRMWARE_FRAMEBUFFER_RELEASE = 0x00048001, RPI_FIRMWARE_FRAMEBUFFER_TEST_PHYSICAL_WIDTH_HEIGHT = 0x00044003, RPI_FIRMWARE_FRAMEBUFFER_TEST_VIRTUAL_WIDTH_HEIGHT = 0x00044004, RPI_FIRMWARE_FRAMEBUFFER_TEST_DEPTH = 0x00044005, RPI_FIRMWARE_FRAMEBUFFER_TEST_PIXEL_ORDER = 0x00044006, RPI_FIRMWARE_FRAMEBUFFER_TEST_ALPHA_MODE = 0x00044007, RPI_FIRMWARE_FRAMEBUFFER_TEST_VIRTUAL_OFFSET = 0x00044009, RPI_FIRMWARE_FRAMEBUFFER_TEST_OVERSCAN = 0x0004400a, RPI_FIRMWARE_FRAMEBUFFER_TEST_PALETTE = 0x0004400b, RPI_FIRMWARE_FRAMEBUFFER_TEST_VSYNC = 0x0004400e, RPI_FIRMWARE_FRAMEBUFFER_SET_PHYSICAL_WIDTH_HEIGHT = 0x00048003, RPI_FIRMWARE_FRAMEBUFFER_SET_VIRTUAL_WIDTH_HEIGHT = 0x00048004, RPI_FIRMWARE_FRAMEBUFFER_SET_DEPTH = 0x00048005, RPI_FIRMWARE_FRAMEBUFFER_SET_PIXEL_ORDER = 0x00048006, RPI_FIRMWARE_FRAMEBUFFER_SET_ALPHA_MODE = 0x00048007, RPI_FIRMWARE_FRAMEBUFFER_SET_VIRTUAL_OFFSET = 0x00048009, RPI_FIRMWARE_FRAMEBUFFER_SET_OVERSCAN = 0x0004800a, RPI_FIRMWARE_FRAMEBUFFER_SET_PALETTE = 0x0004800b, RPI_FIRMWARE_FRAMEBUFFER_SET_TOUCHBUF = 0x0004801f, RPI_FIRMWARE_FRAMEBUFFER_SET_GPIOVIRTBUF = 0x00048020, RPI_FIRMWARE_FRAMEBUFFER_SET_VSYNC = 0x0004800e, RPI_FIRMWARE_FRAMEBUFFER_SET_BACKLIGHT = 0x0004800f, RPI_FIRMWARE_VCHIQ_INIT = 0x00048010, RPI_FIRMWARE_GET_COMMAND_LINE = 0x00050001, RPI_FIRMWARE_GET_DMA_CHANNELS = 0x00060001, } use self::PropertyMailboxTagId::*; /// A property mailbox tag. #[repr(C, packed)] #[derive(Debug)] #[allow(safe_packed_borrows)] struct PropertyMailboxTag<T: Sized> { id: PropertyMailboxTagId, buf_size: u32, req_resp_size: u32, buf: T, } /// A request that contained a sequence of concatenated tags. The response /// overwrites the request. #[repr(C, packed)] #[derive(Debug)] #[allow(safe_packed_borrows)] struct PropertyMailboxRequest<T: Sized> { buf_size: u32, req_resp_code: PropertyMailboxStatus, buf: T, end_tag: PropertyMailboxTagId, } /// Request buffer address must be 16-byte aligned. #[repr(C, align(16))] #[derive(Debug)] struct Align16<T: Sized>(PropertyMailboxRequest<T>); /// Pack a sequence of concatenated tags into a request, and send the address /// to the mailbox. /// Returns `PropertyMailboxResult<typeof($tags)>`. macro_rules! send_request { ($tags: ident) => {{ let req = Align16(PropertyMailboxRequest { buf_size: mem::size_of_val(&$tags) as u32 + 12, req_resp_code: RPI_FIRMWARE_STATUS_REQUEST, buf: $tags, end_tag: RPI_FIRMWARE_PROPERTY_END, }); let start = &req as *const _ as u32; let end = start + req.0.buf_size; { // flush data cache around mailbox accesses let mut mbox = MAILBOX.lock(); asm::flush_dcache_range(start as usize, end as usize); mbox.write(MailboxChannel::Property, start); mbox.read(MailboxChannel::Property); asm::flush_dcache_range(start as usize, end as usize); } match req.0.req_resp_code { RPI_FIRMWARE_STATUS_SUCCESS => Ok(req.0.buf), other => Err(PropertyMailboxError(other as u32)), } }}; } /// Send a tag to mailbox. Will call `send_request!`. /// Returns `PropertyMailboxResult<typeof(buf)>`. macro_rules! send_one_tag { ($id: expr, [$($arg: expr),*]) => {{ let buf = [$($arg),*]; let tag = PropertyMailboxTag { id: $id, buf_size: mem::size_of_val(&buf) as u32, req_resp_size: 0, buf, }; Ok(send_request!(tag)?.buf) }}; } /// Allocates contiguous memory on the GPU. `size` and `align` are in bytes. /// Returns memory `handle`. pub fn mem_alloc(size: u32, align: u32, flags: u32) -> PropertyMailboxResult<u32> { let ret = send_one_tag!(RPI_FIRMWARE_LOCK_MEMORY, [size, align, flags])?; Ok(ret[0]) } /// Free the memory buffer of `handle`. status=0 is success. pub fn mem_free(handle: u32) -> PropertyMailboxResult<()> { let status = send_one_tag!(RPI_FIRMWARE_RELEASE_MEMORY, [handle])?; match status[0] { 0 => Ok(()), other => Err(PropertyMailboxError(other)), } } /// Lock buffer in place, and return a `bus_address`. Must be done before memory /// can be accessed. pub fn mem_lock(handle: u32) -> PropertyMailboxResult<u32> { let ret = send_one_tag!(RPI_FIRMWARE_LOCK_MEMORY, [handle])?; Ok(ret[0]) } /// Unlock buffer. It retains contents, but may move. Needs to be locked before /// next use. status=0 is success. pub fn mem_unlock(handle: u32) -> PropertyMailboxResult<()> { let status = send_one_tag!(RPI_FIRMWARE_UNLOCK_MEMORY, [handle])?; match status[0] { 0 => Ok(()), other => Err(PropertyMailboxError(other)), } } /// Get physical (display) width/height. Returns `(width, height)` in pixels. /// Note that the "physical (display)" size is the size of the allocated buffer /// in memory, not the resolution of the video signal sent to the display device. pub fn framebuffer_get_physical_size() -> PropertyMailboxResult<(u32, u32)> { let ret = send_one_tag!(RPI_FIRMWARE_FRAMEBUFFER_GET_PHYSICAL_WIDTH_HEIGHT, [0, 0])?; Ok((ret[0], ret[1])) } /// Get depth. Returns bits per pixel. pub fn framebuffer_get_depth() -> PropertyMailboxResult<u32> { let ret = send_one_tag!(RPI_FIRMWARE_FRAMEBUFFER_GET_DEPTH, [0])?; Ok(ret[0]) } /// Set virtual offset. Returns `(X, Y)` in pixel. /// The response may not be the same as the request so it must be checked. /// May be the previous offset or 0 for unsupported. pub fn framebuffer_set_virtual_offset(xoffset: u32, yoffset: u32) -> PropertyMailboxResult<(u32, u32)> { let ret = send_one_tag!( RPI_FIRMWARE_FRAMEBUFFER_SET_VIRTUAL_OFFSET, [xoffset, yoffset] )?; Ok((ret[0], ret[1])) } /// Allocate framebuffer on GPU and try to set width/height/depth. /// Returns `FramebufferInfo`. pub fn framebuffer_alloc(width: u32, height: u32, depth: u32) -> PropertyMailboxResult<FramebufferInfo> { #[repr(C, packed)] #[derive(Debug)] struct FramebufferAllocTag { set_physical_size: PropertyMailboxTag<[u32; 2]>, set_virtual_size: PropertyMailboxTag<[u32; 2]>, set_depth: PropertyMailboxTag<[u32; 1]>, set_virtual_offset: PropertyMailboxTag<[u32; 2]>, allocate: PropertyMailboxTag<[u32; 2]>, get_pitch: PropertyMailboxTag<[u32; 1]>, } let tags = FramebufferAllocTag { // Set physical (buffer) width/height. Returns `(width, height)` in pixel. set_physical_size: PropertyMailboxTag { id: RPI_FIRMWARE_FRAMEBUFFER_SET_PHYSICAL_WIDTH_HEIGHT, buf_size: 8, req_resp_size: 0, buf: [width, height], }, // Set virtual (buffer) width/height. Returns `(width, height)` in pixel. set_virtual_size: PropertyMailboxTag { id: RPI_FIRMWARE_FRAMEBUFFER_SET_VIRTUAL_WIDTH_HEIGHT, buf_size: 8, req_resp_size: 0, buf: [width, height], }, // Set depth; Returns bits per pixel. set_depth: PropertyMailboxTag { id: RPI_FIRMWARE_FRAMEBUFFER_SET_DEPTH, buf_size: 4, req_resp_size: 0, buf: [depth], }, // Set virtual offset. Returns `(X, Y)` in pixel. set_virtual_offset: PropertyMailboxTag { id: RPI_FIRMWARE_FRAMEBUFFER_SET_VIRTUAL_OFFSET, buf_size: 8, req_resp_size: 0, buf: [0, 0], }, // Allocate buffer. Returns `(base_address, size)` in bytes. allocate: PropertyMailboxTag { id: RPI_FIRMWARE_FRAMEBUFFER_ALLOCATE, buf_size: 8, req_resp_size: 0, buf: [0x1000, 0], }, // Get pitch. Return bytes per line. get_pitch: PropertyMailboxTag { id: RPI_FIRMWARE_FRAMEBUFFER_GET_PITCH, buf_size: 4, req_resp_size: 0, buf: [0], }, }; let ret = send_request!(tags)?; Ok(FramebufferInfo { xres: ret.set_physical_size.buf[0], yres: ret.set_physical_size.buf[1], xres_virtual: ret.set_virtual_size.buf[0], yres_virtual: ret.set_virtual_size.buf[1], xoffset: ret.set_virtual_offset.buf[0], yoffset: ret.set_virtual_offset.buf[1], depth: ret.set_depth.buf[0], pitch: ret.get_pitch.buf[0], bus_addr: ret.allocate.buf[0], screen_size: ret.allocate.buf[1], }) }
42.110169
105
0.625746
0a63f920fa2f8d8ff39ae686a2172ead7609ece4
5,526
// Copyright (c) The Libra Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::account_address::AccountAddress; use anyhow::{Error, Result}; #[cfg(any(test, feature = "fuzzing"))] use libra_crypto::{ed25519::Ed25519PrivateKey, PrivateKey, Uniform}; use libra_crypto::{ed25519::Ed25519PublicKey, traits::ValidCryptoMaterial, x25519}; #[cfg(any(test, feature = "fuzzing"))] use proptest_derive::Arbitrary; use serde::{Deserialize, Serialize}; use std::{convert::TryFrom, fmt}; /// After executing a special transaction indicates a change to the next epoch, consensus /// and networking get the new list of validators, their keys, and their voting power. Consensus /// has a public key to validate signed messages and networking will has public signing and identity /// keys for creating secure channels of communication between validators. The validators and /// their public keys and voting power may or may not change between epochs. #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] #[cfg_attr(any(test, feature = "fuzzing"), derive(Arbitrary))] pub struct ValidatorInfo { // The validator's account address. AccountAddresses are initially derived from the account // auth pubkey; however, the auth key can be rotated, so one should not rely on this // initial property. account_address: AccountAddress, // This key can validate messages sent from this validator consensus_public_key: Ed25519PublicKey, // Voting power of this validator consensus_voting_power: u64, // This key can validate signed messages at the network layer network_signing_public_key: Ed25519PublicKey, // This key establishes the corresponding PrivateKey holder's eligibility to join the p2p // network network_identity_public_key: x25519::PublicKey, } impl fmt::Display for ValidatorInfo { fn fmt(&self, f: &mut fmt::Formatter) -> std::fmt::Result { write!(f, "account_address: {}", self.account_address.short_str()) } } impl ValidatorInfo { pub fn new( account_address: AccountAddress, consensus_public_key: Ed25519PublicKey, consensus_voting_power: u64, network_signing_public_key: Ed25519PublicKey, network_identity_public_key: x25519::PublicKey, ) -> Self { ValidatorInfo { account_address, consensus_public_key, consensus_voting_power, network_signing_public_key, network_identity_public_key, } } #[cfg(any(test, feature = "fuzzing"))] pub fn new_with_test_network_keys( account_address: AccountAddress, consensus_public_key: Ed25519PublicKey, consensus_voting_power: u64, ) -> Self { let network_signing_public_key = Ed25519PrivateKey::generate_for_testing().public_key(); let private_key = x25519::PrivateKey::generate_for_testing(); let network_identity_public_key = private_key.public_key(); Self { account_address, consensus_public_key, consensus_voting_power, network_signing_public_key, network_identity_public_key, } } /// Returns the id of this validator (hash of the current public key of the /// validator associated account address) pub fn account_address(&self) -> &AccountAddress { &self.account_address } /// Returns the key for validating signed messages from this validator pub fn consensus_public_key(&self) -> &Ed25519PublicKey { &self.consensus_public_key } /// Returns the voting power for this validator pub fn consensus_voting_power(&self) -> u64 { self.consensus_voting_power } /// Returns the key for validating signed messages at the network layers pub fn network_signing_public_key(&self) -> &Ed25519PublicKey { &self.network_signing_public_key } /// Returns the key that establishes a validator's identity in the p2p network pub fn network_identity_public_key(&self) -> x25519::PublicKey { self.network_identity_public_key } } impl TryFrom<crate::proto::types::ValidatorInfo> for ValidatorInfo { type Error = Error; fn try_from(proto: crate::proto::types::ValidatorInfo) -> Result<Self> { let account_address = AccountAddress::try_from(proto.account_address)?; let consensus_public_key = Ed25519PublicKey::try_from(&proto.consensus_public_key[..])?; let consensus_voting_power = proto.consensus_voting_power; let network_signing_public_key = Ed25519PublicKey::try_from(&proto.network_signing_public_key[..])?; let network_identity_public_key = x25519::PublicKey::try_from(&proto.network_identity_public_key[..])?; Ok(Self::new( account_address, consensus_public_key, consensus_voting_power, network_signing_public_key, network_identity_public_key, )) } } impl From<ValidatorInfo> for crate::proto::types::ValidatorInfo { fn from(keys: ValidatorInfo) -> Self { Self { account_address: keys.account_address.to_vec(), consensus_public_key: keys.consensus_public_key.to_bytes().to_vec(), consensus_voting_power: keys.consensus_voting_power, network_signing_public_key: keys.network_signing_public_key.to_bytes().to_vec(), network_identity_public_key: keys.network_identity_public_key.to_bytes(), } } }
40.043478
100
0.700869
1a93e203a3d3c5990383c9e3c392ad86e0e87efe
22,277
use crate::core::compiler::{BuildConfig, MessageFormat}; use crate::core::Workspace; use crate::ops::{CompileFilter, CompileOptions, NewOptions, Packages, VersionControl}; use crate::sources::CRATES_IO_REGISTRY; use crate::util::important_paths::find_root_manifest_for_wd; use crate::util::{paths, toml::TomlProfile, validate_package_name}; use crate::util::{ print_available_benches, print_available_binaries, print_available_examples, print_available_tests, }; use crate::CargoResult; use anyhow::bail; use clap::{self, SubCommand}; use std::ffi::{OsStr, OsString}; use std::fs; use std::path::PathBuf; pub use crate::core::compiler::{CompileMode, ProfileKind}; pub use crate::{CliError, CliResult, Config}; pub use clap::{AppSettings, Arg, ArgMatches}; pub type App = clap::App<'static, 'static>; pub trait AppExt: Sized { fn _arg(self, arg: Arg<'static, 'static>) -> Self; fn arg_package_spec( self, package: &'static str, all: &'static str, exclude: &'static str, ) -> Self { self.arg_package_spec_simple(package) ._arg(opt("all", "Alias for --workspace (deprecated)")) ._arg(opt("workspace", all)) ._arg(multi_opt("exclude", "SPEC", exclude)) } fn arg_package_spec_simple(self, package: &'static str) -> Self { self._arg(multi_opt("package", "SPEC", package).short("p")) } fn arg_package(self, package: &'static str) -> Self { self._arg(opt("package", package).short("p").value_name("SPEC")) } fn arg_jobs(self) -> Self { self._arg( opt("jobs", "Number of parallel jobs, defaults to # of CPUs") .short("j") .value_name("N"), ) } fn arg_targets_all( self, lib: &'static str, bin: &'static str, bins: &'static str, example: &'static str, examples: &'static str, test: &'static str, tests: &'static str, bench: &'static str, benches: &'static str, all: &'static str, ) -> Self { self.arg_targets_lib_bin(lib, bin, bins) ._arg(optional_multi_opt("example", "NAME", example)) ._arg(opt("examples", examples)) ._arg(optional_multi_opt("test", "NAME", test)) ._arg(opt("tests", tests)) ._arg(optional_multi_opt("bench", "NAME", bench)) ._arg(opt("benches", benches)) ._arg(opt("all-targets", all)) } fn arg_targets_lib_bin(self, lib: &'static str, bin: &'static str, bins: &'static str) -> Self { self._arg(opt("lib", lib)) ._arg(optional_multi_opt("bin", "NAME", bin)) ._arg(opt("bins", bins)) } fn arg_targets_bins_examples( self, bin: &'static str, bins: &'static str, example: &'static str, examples: &'static str, ) -> Self { self._arg(optional_multi_opt("bin", "NAME", bin)) ._arg(opt("bins", bins)) ._arg(optional_multi_opt("example", "NAME", example)) ._arg(opt("examples", examples)) } fn arg_targets_bin_example(self, bin: &'static str, example: &'static str) -> Self { self._arg(optional_multi_opt("bin", "NAME", bin)) ._arg(optional_multi_opt("example", "NAME", example)) } fn arg_features(self) -> Self { self._arg(multi_opt( "features", "FEATURES", "Space-separated list of features to activate", )) ._arg(opt("all-features", "Activate all available features")) ._arg(opt( "no-default-features", "Do not activate the `default` feature", )) } fn arg_release(self, release: &'static str) -> Self { self._arg(opt("release", release)) } fn arg_profile(self, profile: &'static str) -> Self { self._arg(opt("profile", profile).value_name("PROFILE-NAME")) } fn arg_doc(self, doc: &'static str) -> Self { self._arg(opt("doc", doc)) } fn arg_target_triple(self, target: &'static str) -> Self { self._arg(opt("target", target).value_name("TRIPLE")) } fn arg_target_dir(self) -> Self { self._arg( opt("target-dir", "Directory for all generated artifacts").value_name("DIRECTORY"), ) } fn arg_manifest_path(self) -> Self { self._arg(opt("manifest-path", "Path to Cargo.toml").value_name("PATH")) } fn arg_message_format(self) -> Self { self._arg(multi_opt("message-format", "FMT", "Error format")) } fn arg_build_plan(self) -> Self { self._arg(opt( "build-plan", "Output the build plan in JSON (unstable)", )) } fn arg_new_opts(self) -> Self { self._arg( opt( "vcs", "Initialize a new repository for the given version \ control system (git, hg, pijul, or fossil) or do not \ initialize any version control at all (none), overriding \ a global configuration.", ) .value_name("VCS") .possible_values(&["git", "hg", "pijul", "fossil", "none"]), ) ._arg(opt("bin", "Use a binary (application) template [default]")) ._arg(opt("lib", "Use a library template")) ._arg( opt("edition", "Edition to set for the crate generated") .possible_values(&["2015", "2018"]) .value_name("YEAR"), ) ._arg( opt( "name", "Set the resulting package name, defaults to the directory name", ) .value_name("NAME"), ) } fn arg_index(self) -> Self { self._arg(opt("index", "Registry index URL to upload the package to").value_name("INDEX")) ._arg( opt("host", "DEPRECATED, renamed to '--index'") .value_name("HOST") .hidden(true), ) } fn arg_dry_run(self, dry_run: &'static str) -> Self { self._arg(opt("dry-run", dry_run)) } } impl AppExt for App { fn _arg(self, arg: Arg<'static, 'static>) -> Self { self.arg(arg) } } pub fn opt(name: &'static str, help: &'static str) -> Arg<'static, 'static> { Arg::with_name(name).long(name).help(help) } pub fn optional_multi_opt( name: &'static str, value_name: &'static str, help: &'static str, ) -> Arg<'static, 'static> { opt(name, help) .value_name(value_name) .multiple(true) .min_values(0) .number_of_values(1) } pub fn multi_opt( name: &'static str, value_name: &'static str, help: &'static str, ) -> Arg<'static, 'static> { // Note that all `.multiple(true)` arguments in Cargo should specify // `.number_of_values(1)` as well, so that `--foo val1 val2` is // *not* parsed as `foo` with values ["val1", "val2"]. // `number_of_values` should become the default in clap 3. opt(name, help) .value_name(value_name) .multiple(true) .number_of_values(1) } pub fn subcommand(name: &'static str) -> App { SubCommand::with_name(name).settings(&[ AppSettings::UnifiedHelpMessage, AppSettings::DeriveDisplayOrder, AppSettings::DontCollapseArgsInUsage, ]) } // Determines whether or not to gate `--profile` as unstable when resolving it. pub enum ProfileChecking { Checked, Unchecked, } pub trait ArgMatchesExt { fn value_of_u32(&self, name: &str) -> CargoResult<Option<u32>> { let arg = match self._value_of(name) { None => None, Some(arg) => Some(arg.parse::<u32>().map_err(|_| { clap::Error::value_validation_auto(format!("could not parse `{}` as a number", arg)) })?), }; Ok(arg) } /// Returns value of the `name` command-line argument as an absolute path fn value_of_path(&self, name: &str, config: &Config) -> Option<PathBuf> { self._value_of(name).map(|path| config.cwd().join(path)) } fn root_manifest(&self, config: &Config) -> CargoResult<PathBuf> { if let Some(path) = self.value_of_path("manifest-path", config) { // In general, we try to avoid normalizing paths in Cargo, // but in this particular case we need it to fix #3586. let path = paths::normalize_path(&path); if !path.ends_with("Cargo.toml") { anyhow::bail!("the manifest-path must be a path to a Cargo.toml file") } if fs::metadata(&path).is_err() { anyhow::bail!( "manifest path `{}` does not exist", self._value_of("manifest-path").unwrap() ) } return Ok(path); } find_root_manifest_for_wd(config.cwd()) } fn workspace<'a>(&self, config: &'a Config) -> CargoResult<Workspace<'a>> { let root = self.root_manifest(config)?; let mut ws = Workspace::new(&root, config)?; if config.cli_unstable().avoid_dev_deps { ws.set_require_optional_deps(false); } if ws.is_virtual() && !config.cli_unstable().package_features { // --all-features is actually honored. In general, workspaces and // feature flags are a bit of a mess right now. for flag in &["features", "no-default-features"] { if self._is_present(flag) { bail!( "--{} is not allowed in the root of a virtual workspace\n\ note: while this was previously accepted, it didn't actually do anything", flag ); } } } Ok(ws) } fn jobs(&self) -> CargoResult<Option<u32>> { self.value_of_u32("jobs") } fn target(&self) -> Option<String> { self._value_of("target").map(|s| s.to_string()) } fn get_profile_kind( &self, config: &Config, default: ProfileKind, profile_checking: ProfileChecking, ) -> CargoResult<ProfileKind> { let specified_profile = match self._value_of("profile") { None => None, Some("dev") => Some(ProfileKind::Dev), Some("release") => Some(ProfileKind::Release), Some(name) => { TomlProfile::validate_name(name, "profile name")?; Some(ProfileKind::Custom(name.to_string())) } }; match profile_checking { ProfileChecking::Unchecked => {} ProfileChecking::Checked => { if specified_profile.is_some() && !config.cli_unstable().unstable_options { anyhow::bail!("Usage of `--profile` requires `-Z unstable-options`") } } } if self._is_present("release") { if !config.cli_unstable().unstable_options { Ok(ProfileKind::Release) } else { match specified_profile { None | Some(ProfileKind::Release) => Ok(ProfileKind::Release), _ => anyhow::bail!("Conflicting usage of --profile and --release"), } } } else if self._is_present("debug") { if !config.cli_unstable().unstable_options { Ok(ProfileKind::Dev) } else { match specified_profile { None | Some(ProfileKind::Dev) => Ok(ProfileKind::Dev), _ => anyhow::bail!("Conflicting usage of --profile and --debug"), } } } else { Ok(specified_profile.unwrap_or(default)) } } fn compile_options<'a>( &self, config: &'a Config, mode: CompileMode, workspace: Option<&Workspace<'a>>, profile_checking: ProfileChecking, ) -> CargoResult<CompileOptions<'a>> { let spec = Packages::from_flags( // TODO Integrate into 'workspace' self._is_present("workspace") || self._is_present("all"), self._values_of("exclude"), self._values_of("package"), )?; let mut message_format = None; let default_json = MessageFormat::Json { short: false, ansi: false, render_diagnostics: false, }; for fmt in self._values_of("message-format") { for fmt in fmt.split(',') { let fmt = fmt.to_ascii_lowercase(); match fmt.as_str() { "json" => { if message_format.is_some() { bail!("cannot specify two kinds of `message-format` arguments"); } message_format = Some(default_json); } "human" => { if message_format.is_some() { bail!("cannot specify two kinds of `message-format` arguments"); } message_format = Some(MessageFormat::Human); } "short" => { if message_format.is_some() { bail!("cannot specify two kinds of `message-format` arguments"); } message_format = Some(MessageFormat::Short); } "json-render-diagnostics" => { if message_format.is_none() { message_format = Some(default_json); } match &mut message_format { Some(MessageFormat::Json { render_diagnostics, .. }) => *render_diagnostics = true, _ => bail!("cannot specify two kinds of `message-format` arguments"), } } "json-diagnostic-short" => { if message_format.is_none() { message_format = Some(default_json); } match &mut message_format { Some(MessageFormat::Json { short, .. }) => *short = true, _ => bail!("cannot specify two kinds of `message-format` arguments"), } } "json-diagnostic-rendered-ansi" => { if message_format.is_none() { message_format = Some(default_json); } match &mut message_format { Some(MessageFormat::Json { ansi, .. }) => *ansi = true, _ => bail!("cannot specify two kinds of `message-format` arguments"), } } s => bail!("invalid message format specifier: `{}`", s), } } } let mut build_config = BuildConfig::new(config, self.jobs()?, &self.target(), mode)?; build_config.message_format = message_format.unwrap_or(MessageFormat::Human); build_config.profile_kind = self.get_profile_kind(config, ProfileKind::Dev, profile_checking)?; build_config.build_plan = self._is_present("build-plan"); if build_config.build_plan { config .cli_unstable() .fail_if_stable_opt("--build-plan", 5579)?; }; let opts = CompileOptions { config, build_config, features: self._values_of("features"), all_features: self._is_present("all-features"), no_default_features: self._is_present("no-default-features"), spec, filter: CompileFilter::from_raw_arguments( self._is_present("lib"), self._values_of("bin"), self._is_present("bins"), self._values_of("test"), self._is_present("tests"), self._values_of("example"), self._is_present("examples"), self._values_of("bench"), self._is_present("benches"), self._is_present("all-targets"), ), target_rustdoc_args: None, target_rustc_args: None, local_rustdoc_args: None, rustdoc_document_private_items: false, export_dir: None, }; if let Some(ws) = workspace { self.check_optional_opts(ws, &opts)?; } Ok(opts) } fn compile_options_for_single_package<'a>( &self, config: &'a Config, mode: CompileMode, workspace: Option<&Workspace<'a>>, profile_checking: ProfileChecking, ) -> CargoResult<CompileOptions<'a>> { let mut compile_opts = self.compile_options(config, mode, workspace, profile_checking)?; compile_opts.spec = Packages::Packages(self._values_of("package")); Ok(compile_opts) } fn new_options(&self, config: &Config) -> CargoResult<NewOptions> { let vcs = self._value_of("vcs").map(|vcs| match vcs { "git" => VersionControl::Git, "hg" => VersionControl::Hg, "pijul" => VersionControl::Pijul, "fossil" => VersionControl::Fossil, "none" => VersionControl::NoVcs, vcs => panic!("Impossible vcs: {:?}", vcs), }); NewOptions::new( vcs, self._is_present("bin"), self._is_present("lib"), self.value_of_path("path", config).unwrap(), self._value_of("name").map(|s| s.to_string()), self._value_of("edition").map(|s| s.to_string()), self.registry(config)?, ) } fn registry(&self, config: &Config) -> CargoResult<Option<String>> { match self._value_of("registry") { Some(registry) => { validate_package_name(registry, "registry name", "")?; if registry == CRATES_IO_REGISTRY { // If "crates.io" is specified, then we just need to return `None`, // as that will cause cargo to use crates.io. This is required // for the case where a default alternative registry is used // but the user wants to switch back to crates.io for a single // command. Ok(None) } else { Ok(Some(registry.to_string())) } } None => config.default_registry(), } } fn index(&self, config: &Config) -> CargoResult<Option<String>> { // TODO: deprecated. Remove once it has been decided `--host` can be removed // We may instead want to repurpose the host flag, as mentioned in issue // rust-lang/cargo#4208. let msg = "The flag '--host' is no longer valid. Previous versions of Cargo accepted this flag, but it is being deprecated. The flag is being renamed to 'index', as the flag wants the location of the index. Please use '--index' instead. This will soon become a hard error, so it's either recommended to update to a fixed version or contact the upstream maintainer about this warning."; let index = match self._value_of("host") { Some(host) => { config.shell().warn(&msg)?; Some(host.to_string()) } None => self._value_of("index").map(|s| s.to_string()), }; Ok(index) } fn check_optional_opts( &self, workspace: &Workspace<'_>, compile_opts: &CompileOptions<'_>, ) -> CargoResult<()> { if self.is_present_with_zero_values("example") { print_available_examples(workspace, compile_opts)?; } if self.is_present_with_zero_values("bin") { print_available_binaries(workspace, compile_opts)?; } if self.is_present_with_zero_values("bench") { print_available_benches(workspace, compile_opts)?; } if self.is_present_with_zero_values("test") { print_available_tests(workspace, compile_opts)?; } Ok(()) } fn is_present_with_zero_values(&self, name: &str) -> bool { self._is_present(name) && self._value_of(name).is_none() } fn _value_of(&self, name: &str) -> Option<&str>; fn _values_of(&self, name: &str) -> Vec<String>; fn _value_of_os(&self, name: &str) -> Option<&OsStr>; fn _values_of_os(&self, name: &str) -> Vec<OsString>; fn _is_present(&self, name: &str) -> bool; } impl<'a> ArgMatchesExt for ArgMatches<'a> { fn _value_of(&self, name: &str) -> Option<&str> { self.value_of(name) } fn _value_of_os(&self, name: &str) -> Option<&OsStr> { self.value_of_os(name) } fn _values_of(&self, name: &str) -> Vec<String> { self.values_of(name) .unwrap_or_default() .map(|s| s.to_string()) .collect() } fn _values_of_os(&self, name: &str) -> Vec<OsString> { self.values_of_os(name) .unwrap_or_default() .map(|s| s.to_os_string()) .collect() } fn _is_present(&self, name: &str) -> bool { self.is_present(name) } } pub fn values(args: &ArgMatches<'_>, name: &str) -> Vec<String> { args._values_of(name) } pub fn values_os(args: &ArgMatches<'_>, name: &str) -> Vec<OsString> { args._values_of_os(name) } #[derive(PartialEq, PartialOrd, Eq, Ord)] pub enum CommandInfo { BuiltIn { name: String, about: Option<String> }, External { name: String, path: PathBuf }, } impl CommandInfo { pub fn name(&self) -> &str { match self { CommandInfo::BuiltIn { name, .. } => name, CommandInfo::External { name, .. } => name, } } }
34.645412
100
0.532388
e2962fdaa93b08ca6f35f344cd7bfabaa7bbab2c
9,222
/*! Access to networking hardware. The `phy` module deals with the *network devices*. It provides a trait for transmitting and receiving frames, [Device](trait.Device.html) and implementations of it: * the [_loopback_](struct.Loopback.html), for zero dependency testing; * _middleware_ [Tracer](struct.Tracer.html) and [FaultInjector](struct.FaultInjector.html), to facilitate debugging; * _adapters_ [RawSocket](struct.RawSocket.html) and [TapInterface](struct.TapInterface.html), to transmit and receive frames on the host OS. # Examples An implementation of the [Device](trait.Device.html) trait for a simple hardware Ethernet controller could look as follows: ```rust use smoltcp::Result; use smoltcp::phy::{self, DeviceCapabilities, Device}; use smoltcp::time::Instant; struct StmPhy { rx_buffer: [u8; 1536], tx_buffer: [u8; 1536], } impl<'a> StmPhy { fn new() -> StmPhy { StmPhy { rx_buffer: [0; 1536], tx_buffer: [0; 1536], } } } impl<'a> phy::Device<'a> for StmPhy { type RxToken = StmPhyRxToken<'a>; type TxToken = StmPhyTxToken<'a>; fn receive(&'a mut self) -> Option<(Self::RxToken, Self::TxToken)> { Some((StmPhyRxToken(&mut self.rx_buffer[..]), StmPhyTxToken(&mut self.tx_buffer[..]))) } fn transmit(&'a mut self) -> Option<Self::TxToken> { Some(StmPhyTxToken(&mut self.tx_buffer[..])) } fn capabilities(&self) -> DeviceCapabilities { let mut caps = DeviceCapabilities::default(); caps.max_transmission_unit = 1536; caps.max_burst_size = Some(1); caps } } struct StmPhyRxToken<'a>(&'a mut [u8]); impl<'a> phy::RxToken for StmPhyRxToken<'a> { fn consume<R, F>(mut self, _timestamp: Instant, f: F) -> Result<R> where F: FnOnce(&mut [u8]) -> Result<R> { // TODO: receive packet into buffer let result = f(&mut self.0); println!("rx called"); result } } struct StmPhyTxToken<'a>(&'a mut [u8]); impl<'a> phy::TxToken for StmPhyTxToken<'a> { fn consume<R, F>(self, _timestamp: Instant, len: usize, f: F) -> Result<R> where F: FnOnce(&mut [u8]) -> Result<R> { let result = f(&mut self.0[..len]); println!("tx called {}", len); // TODO: send packet out result } } ``` */ use crate::Result; use crate::time::Instant; #[cfg(all(any(feature = "phy-raw_socket", feature = "phy-tap_interface"), unix))] mod sys; mod tracer; mod fault_injector; mod fuzz_injector; mod pcap_writer; #[cfg(any(feature = "std", feature = "alloc"))] mod loopback; #[cfg(all(feature = "phy-raw_socket", unix))] mod raw_socket; #[cfg(all(feature = "phy-tap_interface", any(target_os = "linux", target_os = "android")))] mod tap_interface; #[cfg(all(any(feature = "phy-raw_socket", feature = "phy-tap_interface"), unix))] pub use self::sys::wait; pub use self::tracer::Tracer; pub use self::fault_injector::FaultInjector; pub use self::fuzz_injector::{Fuzzer, FuzzInjector}; pub use self::pcap_writer::{PcapLinkType, PcapMode, PcapSink, PcapWriter}; #[cfg(any(feature = "std", feature = "alloc"))] pub use self::loopback::Loopback; #[cfg(all(feature = "phy-raw_socket", unix))] pub use self::raw_socket::RawSocket; #[cfg(all(feature = "phy-tap_interface", any(target_os = "linux", target_os = "android")))] pub use self::tap_interface::TapInterface; #[cfg(feature = "ethernet")] /// A tracer device for Ethernet frames. pub type EthernetTracer<T> = Tracer<T, super::wire::EthernetFrame<&'static [u8]>>; /// A description of checksum behavior for a particular protocol. #[derive(Debug, Clone, Copy)] pub enum Checksum { /// Verify checksum when receiving and compute checksum when sending. Both, /// Verify checksum when receiving. Rx, /// Compute checksum before sending. Tx, /// Ignore checksum completely. None, } impl Default for Checksum { fn default() -> Checksum { Checksum::Both } } impl Checksum { /// Returns whether checksum should be verified when receiving. pub fn rx(&self) -> bool { match *self { Checksum::Both | Checksum::Rx => true, _ => false } } /// Returns whether checksum should be verified when sending. pub fn tx(&self) -> bool { match *self { Checksum::Both | Checksum::Tx => true, _ => false } } } /// A description of checksum behavior for every supported protocol. #[derive(Debug, Clone, Default)] #[non_exhaustive] pub struct ChecksumCapabilities { pub ipv4: Checksum, pub udp: Checksum, pub tcp: Checksum, #[cfg(feature = "proto-ipv4")] pub icmpv4: Checksum, #[cfg(feature = "proto-ipv6")] pub icmpv6: Checksum, } impl ChecksumCapabilities { /// Checksum behavior that results in not computing or verifying checksums /// for any of the supported protocols. pub fn ignored() -> Self { ChecksumCapabilities { ipv4: Checksum::None, udp: Checksum::None, tcp: Checksum::None, #[cfg(feature = "proto-ipv4")] icmpv4: Checksum::None, #[cfg(feature = "proto-ipv6")] icmpv6: Checksum::None, } } } /// A description of device capabilities. /// /// Higher-level protocols may achieve higher throughput or lower latency if they consider /// the bandwidth or packet size limitations. #[derive(Debug, Clone, Default)] #[non_exhaustive] pub struct DeviceCapabilities { /// Maximum transmission unit. /// /// The network device is unable to send or receive frames larger than the value returned /// by this function. /// /// For Ethernet devices, this is the maximum Ethernet frame size, including the Ethernet header (14 octets), but /// *not* including the Ethernet FCS (4 octets). Therefore, Ethernet MTU = IP MTU + 14. /// /// Note that in Linux and other OSes, "MTU" is the IP MTU, not the Ethernet MTU, even for Ethernet /// devices. This is a common source of confusion. /// /// Most common IP MTU is 1500. Minimum is 576 (for IPv4) or 1280 (for IPv6). Maximum is 9216 octets. pub max_transmission_unit: usize, /// Maximum burst size, in terms of MTU. /// /// The network device is unable to send or receive bursts large than the value returned /// by this function. /// /// If `None`, there is no fixed limit on burst size, e.g. if network buffers are /// dynamically allocated. pub max_burst_size: Option<usize>, /// Checksum behavior. /// /// If the network device is capable of verifying or computing checksums for some protocols, /// it can request that the stack not do so in software to improve performance. pub checksum: ChecksumCapabilities, } /// An interface for sending and receiving raw network frames. /// /// The interface is based on _tokens_, which are types that allow to receive/transmit a /// single packet. The `receive` and `transmit` functions only construct such tokens, the /// real sending/receiving operation are performed when the tokens are consumed. pub trait Device<'a> { type RxToken: RxToken + 'a; type TxToken: TxToken + 'a; /// Construct a token pair consisting of one receive token and one transmit token. /// /// The additional transmit token makes it possible to generate a reply packet based /// on the contents of the received packet. For example, this makes it possible to /// handle arbitrarily large ICMP echo ("ping") requests, where the all received bytes /// need to be sent back, without heap allocation. fn receive(&'a mut self) -> Option<(Self::RxToken, Self::TxToken)>; /// Construct a transmit token. fn transmit(&'a mut self) -> Option<Self::TxToken>; /// Get a description of device capabilities. fn capabilities(&self) -> DeviceCapabilities; } /// A token to receive a single network packet. pub trait RxToken { /// Consumes the token to receive a single network packet. /// /// This method receives a packet and then calls the given closure `f` with the raw /// packet bytes as argument. /// /// The timestamp must be a number of milliseconds, monotonically increasing since an /// arbitrary moment in time, such as system startup. fn consume<R, F>(self, timestamp: Instant, f: F) -> Result<R> where F: FnOnce(&mut [u8]) -> Result<R>; } /// A token to transmit a single network packet. pub trait TxToken { /// Consumes the token to send a single network packet. /// /// This method constructs a transmit buffer of size `len` and calls the passed /// closure `f` with a mutable reference to that buffer. The closure should construct /// a valid network packet (e.g. an ethernet packet) in the buffer. When the closure /// returns, the transmit buffer is sent out. /// /// The timestamp must be a number of milliseconds, monotonically increasing since an /// arbitrary moment in time, such as system startup. fn consume<R, F>(self, timestamp: Instant, len: usize, f: F) -> Result<R> where F: FnOnce(&mut [u8]) -> Result<R>; }
33.413043
117
0.65463
9beeec135595090bce48cd101703a7fa036ae939
7,401
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct InvokeEndpointOutput { /// <p>Includes the inference provided by the model.</p> /// <p>For information about the format of the response body, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/cdf-inference.html">Common Data /// Formats-Inference</a>.</p> pub body: std::option::Option<smithy_types::Blob>, /// <p>The MIME type of the inference returned in the response body.</p> pub content_type: std::option::Option<std::string::String>, /// <p>Identifies the production variant that was invoked.</p> pub invoked_production_variant: std::option::Option<std::string::String>, /// <p>Provides additional information in the response about the inference returned by a /// model hosted at an Amazon SageMaker endpoint. The information is an opaque value that is /// forwarded verbatim. You could use this value, for example, to return an ID received in /// the <code>CustomAttributes</code> header of a request or other metadata that a service /// endpoint was programmed to produce. The value must consist of no more than 1024 visible /// US-ASCII characters as specified in <a href="https://tools.ietf.org/html/rfc7230#section-3.2.6">Section 3.3.6. Field Value /// Components</a> of the Hypertext Transfer Protocol (HTTP/1.1). If the customer /// wants the custom attribute returned, the model must set the custom attribute to be /// included on the way back. </p> /// <p>The code in your model is responsible for setting or updating any custom attributes in /// the response. If your code does not set this value in the response, an empty value is /// returned. For example, if a custom attribute represents the trace ID, your model can /// prepend the custom attribute with <code>Trace ID:</code> in your post-processing /// function.</p> /// <p>This feature is currently supported in the AWS SDKs but not in the Amazon SageMaker Python /// SDK.</p> pub custom_attributes: std::option::Option<std::string::String>, } impl std::fmt::Debug for InvokeEndpointOutput { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("InvokeEndpointOutput"); formatter.field("body", &"*** Sensitive Data Redacted ***"); formatter.field("content_type", &self.content_type); formatter.field( "invoked_production_variant", &self.invoked_production_variant, ); formatter.field("custom_attributes", &"*** Sensitive Data Redacted ***"); formatter.finish() } } /// See [`InvokeEndpointOutput`](crate::output::InvokeEndpointOutput) pub mod invoke_endpoint_output { /// A builder for [`InvokeEndpointOutput`](crate::output::InvokeEndpointOutput) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) body: std::option::Option<smithy_types::Blob>, pub(crate) content_type: std::option::Option<std::string::String>, pub(crate) invoked_production_variant: std::option::Option<std::string::String>, pub(crate) custom_attributes: std::option::Option<std::string::String>, } impl Builder { /// <p>Includes the inference provided by the model.</p> /// <p>For information about the format of the response body, see <a href="https://docs.aws.amazon.com/sagemaker/latest/dg/cdf-inference.html">Common Data /// Formats-Inference</a>.</p> pub fn body(mut self, inp: smithy_types::Blob) -> Self { self.body = Some(inp); self } pub fn set_body(mut self, inp: std::option::Option<smithy_types::Blob>) -> Self { self.body = inp; self } /// <p>The MIME type of the inference returned in the response body.</p> pub fn content_type(mut self, inp: impl Into<std::string::String>) -> Self { self.content_type = Some(inp.into()); self } pub fn set_content_type(mut self, inp: std::option::Option<std::string::String>) -> Self { self.content_type = inp; self } /// <p>Identifies the production variant that was invoked.</p> pub fn invoked_production_variant(mut self, inp: impl Into<std::string::String>) -> Self { self.invoked_production_variant = Some(inp.into()); self } pub fn set_invoked_production_variant( mut self, inp: std::option::Option<std::string::String>, ) -> Self { self.invoked_production_variant = inp; self } /// <p>Provides additional information in the response about the inference returned by a /// model hosted at an Amazon SageMaker endpoint. The information is an opaque value that is /// forwarded verbatim. You could use this value, for example, to return an ID received in /// the <code>CustomAttributes</code> header of a request or other metadata that a service /// endpoint was programmed to produce. The value must consist of no more than 1024 visible /// US-ASCII characters as specified in <a href="https://tools.ietf.org/html/rfc7230#section-3.2.6">Section 3.3.6. Field Value /// Components</a> of the Hypertext Transfer Protocol (HTTP/1.1). If the customer /// wants the custom attribute returned, the model must set the custom attribute to be /// included on the way back. </p> /// <p>The code in your model is responsible for setting or updating any custom attributes in /// the response. If your code does not set this value in the response, an empty value is /// returned. For example, if a custom attribute represents the trace ID, your model can /// prepend the custom attribute with <code>Trace ID:</code> in your post-processing /// function.</p> /// <p>This feature is currently supported in the AWS SDKs but not in the Amazon SageMaker Python /// SDK.</p> pub fn custom_attributes(mut self, inp: impl Into<std::string::String>) -> Self { self.custom_attributes = Some(inp.into()); self } pub fn set_custom_attributes( mut self, inp: std::option::Option<std::string::String>, ) -> Self { self.custom_attributes = inp; self } /// Consumes the builder and constructs a [`InvokeEndpointOutput`](crate::output::InvokeEndpointOutput) pub fn build(self) -> crate::output::InvokeEndpointOutput { crate::output::InvokeEndpointOutput { body: self.body, content_type: self.content_type, invoked_production_variant: self.invoked_production_variant, custom_attributes: self.custom_attributes, } } } } impl InvokeEndpointOutput { /// Creates a new builder-style object to manufacture [`InvokeEndpointOutput`](crate::output::InvokeEndpointOutput) pub fn builder() -> crate::output::invoke_endpoint_output::Builder { crate::output::invoke_endpoint_output::Builder::default() } }
56.068182
162
0.65329
87df38d89927e4e80dad2387e8b6a015d212270a
16,467
//! Defines `FaerieBackend`. use crate::container; use crate::traps::{FaerieTrapManifest, FaerieTrapSink}; use anyhow::anyhow; use cranelift_codegen::binemit::{ Addend, CodeOffset, NullStackmapSink, NullTrapSink, Reloc, RelocSink, Stackmap, StackmapSink, }; use cranelift_codegen::isa::TargetIsa; use cranelift_codegen::{self, binemit, ir}; use cranelift_module::{ Backend, DataContext, DataDescription, DataId, FuncId, Init, Linkage, ModuleError, ModuleNamespace, ModuleResult, TrapSite, }; use faerie; use std::convert::TryInto; use std::fs::File; use target_lexicon::Triple; #[derive(Debug)] /// Setting to enable collection of traps. Setting this to `Enabled` in /// `FaerieBuilder` means that a `FaerieTrapManifest` will be present /// in the `FaerieProduct`. pub enum FaerieTrapCollection { /// `FaerieProduct::trap_manifest` will be `None` Disabled, /// `FaerieProduct::trap_manifest` will be `Some` Enabled, } /// A builder for `FaerieBackend`. pub struct FaerieBuilder { isa: Box<dyn TargetIsa>, name: String, collect_traps: FaerieTrapCollection, libcall_names: Box<dyn Fn(ir::LibCall) -> String>, } impl FaerieBuilder { /// Create a new `FaerieBuilder` using the given Cranelift target, that /// can be passed to /// [`Module::new`](cranelift_module::Module::new) /// /// Faerie output requires that TargetIsa have PIC (Position Independent Code) enabled. /// /// `collect_traps` setting determines whether trap information is collected in a /// `FaerieTrapManifest` available in the `FaerieProduct`. /// /// The `libcall_names` function provides a way to translate `cranelift_codegen`'s `ir::LibCall` /// enum to symbols. LibCalls are inserted in the IR as part of the legalization for certain /// floating point instructions, and for stack probes. If you don't know what to use for this /// argument, use `cranelift_module::default_libcall_names()`. pub fn new( isa: Box<dyn TargetIsa>, name: String, collect_traps: FaerieTrapCollection, libcall_names: Box<dyn Fn(ir::LibCall) -> String>, ) -> ModuleResult<Self> { if !isa.flags().is_pic() { return Err(ModuleError::Backend(anyhow!( "faerie requires TargetIsa be PIC" ))); } Ok(Self { isa, name, collect_traps, libcall_names, }) } } /// A `FaerieBackend` implements `Backend` and emits ".o" files using the `faerie` library. /// /// See the `FaerieBuilder` for a convenient way to construct `FaerieBackend` instances. pub struct FaerieBackend { isa: Box<dyn TargetIsa>, artifact: faerie::Artifact, trap_manifest: Option<FaerieTrapManifest>, libcall_names: Box<dyn Fn(ir::LibCall) -> String>, } pub struct FaerieCompiledFunction { code_length: u32, } impl FaerieCompiledFunction { pub fn code_length(&self) -> u32 { self.code_length } } pub struct FaerieCompiledData {} impl Backend for FaerieBackend { type Builder = FaerieBuilder; type CompiledFunction = FaerieCompiledFunction; type CompiledData = FaerieCompiledData; // There's no need to return individual artifacts; we're writing them into // the output file instead. type FinalizedFunction = (); type FinalizedData = (); /// The returned value here provides functions for emitting object files /// to memory and files. type Product = FaerieProduct; /// Create a new `FaerieBackend` using the given Cranelift target. fn new(builder: FaerieBuilder) -> Self { Self { artifact: faerie::Artifact::new(builder.isa.triple().clone(), builder.name), isa: builder.isa, trap_manifest: match builder.collect_traps { FaerieTrapCollection::Enabled => Some(FaerieTrapManifest::new()), FaerieTrapCollection::Disabled => None, }, libcall_names: builder.libcall_names, } } fn isa(&self) -> &dyn TargetIsa { &*self.isa } fn declare_function(&mut self, _id: FuncId, name: &str, linkage: Linkage) { self.artifact .declare(name, translate_function_linkage(linkage)) .expect("inconsistent declarations"); } fn declare_data( &mut self, _id: DataId, name: &str, linkage: Linkage, writable: bool, tls: bool, align: Option<u8>, ) { assert!(!tls, "Faerie doesn't yet support TLS"); self.artifact .declare(name, translate_data_linkage(linkage, writable, align)) .expect("inconsistent declarations"); } fn define_function( &mut self, _id: FuncId, name: &str, ctx: &cranelift_codegen::Context, namespace: &ModuleNamespace<Self>, total_size: u32, ) -> ModuleResult<(FaerieCompiledFunction, &[TrapSite])> { let mut code: Vec<u8> = vec![0; total_size as usize]; // TODO: Replace this with FaerieStackmapSink once it is implemented. let mut stackmap_sink = NullStackmapSink {}; let mut traps: &[TrapSite] = &[]; // Non-lexical lifetimes would obviate the braces here. { let mut reloc_sink = FaerieRelocSink { triple: self.isa.triple().clone(), artifact: &mut self.artifact, name, namespace, libcall_names: &*self.libcall_names, }; if let Some(ref mut trap_manifest) = self.trap_manifest { let mut trap_sink = FaerieTrapSink::new(name, total_size); unsafe { ctx.emit_to_memory( &*self.isa, code.as_mut_ptr(), &mut reloc_sink, &mut trap_sink, &mut stackmap_sink, ) }; traps = trap_manifest.add_sink(trap_sink); } else { let mut trap_sink = NullTrapSink {}; unsafe { ctx.emit_to_memory( &*self.isa, code.as_mut_ptr(), &mut reloc_sink, &mut trap_sink, &mut stackmap_sink, ) }; } } // because `define` will take ownership of code, this is our last chance let code_length = code.len() as u32; self.artifact .define(name, code) .expect("inconsistent declaration"); Ok((FaerieCompiledFunction { code_length }, traps)) } fn define_function_bytes( &mut self, _id: FuncId, name: &str, bytes: &[u8], _namespace: &ModuleNamespace<Self>, traps: Vec<TrapSite>, ) -> ModuleResult<(FaerieCompiledFunction, &[TrapSite])> { let code_length: u32 = match bytes.len().try_into() { Ok(code_length) => code_length, _ => Err(ModuleError::FunctionTooLarge(name.to_string()))?, }; let mut ret_traps: &[TrapSite] = &[]; if let Some(ref mut trap_manifest) = self.trap_manifest { let trap_sink = FaerieTrapSink::new_with_sites(name, code_length, traps); ret_traps = trap_manifest.add_sink(trap_sink); } self.artifact .define(name, bytes.to_vec()) .expect("inconsistent declaration"); Ok((FaerieCompiledFunction { code_length }, ret_traps)) } fn define_data( &mut self, _id: DataId, name: &str, _writable: bool, tls: bool, _align: Option<u8>, data_ctx: &DataContext, namespace: &ModuleNamespace<Self>, ) -> ModuleResult<FaerieCompiledData> { assert!(!tls, "Faerie doesn't yet support TLS"); let &DataDescription { ref init, ref function_decls, ref data_decls, ref function_relocs, ref data_relocs, } = data_ctx.description(); for &(offset, id) in function_relocs { let to = &namespace.get_function_decl(&function_decls[id]).name; self.artifact .link(faerie::Link { from: name, to, at: u64::from(offset), }) .map_err(|e| ModuleError::Backend(e.into()))?; } for &(offset, id, addend) in data_relocs { debug_assert_eq!( addend, 0, "faerie doesn't support addends in data section relocations yet" ); let to = &namespace.get_data_decl(&data_decls[id]).name; self.artifact .link(faerie::Link { from: name, to, at: u64::from(offset), }) .map_err(|e| ModuleError::Backend(e.into()))?; } match *init { Init::Uninitialized => { panic!("data is not initialized yet"); } Init::Zeros { size } => { self.artifact .define_zero_init(name, size) .expect("inconsistent declaration"); } Init::Bytes { ref contents } => { self.artifact .define(name, contents.to_vec()) .expect("inconsistent declaration"); } } Ok(FaerieCompiledData {}) } fn write_data_funcaddr( &mut self, _data: &mut FaerieCompiledData, _offset: usize, _what: ir::FuncRef, ) { unimplemented!() } fn write_data_dataaddr( &mut self, _data: &mut FaerieCompiledData, _offset: usize, _what: ir::GlobalValue, _usize: binemit::Addend, ) { unimplemented!() } fn finalize_function( &mut self, _id: FuncId, _func: &FaerieCompiledFunction, _namespace: &ModuleNamespace<Self>, ) { // Nothing to do. } fn get_finalized_function(&self, _func: &FaerieCompiledFunction) { // Nothing to do. } fn finalize_data( &mut self, _id: DataId, _data: &FaerieCompiledData, _namespace: &ModuleNamespace<Self>, ) { // Nothing to do. } fn get_finalized_data(&self, _data: &FaerieCompiledData) { // Nothing to do. } fn publish(&mut self) { // Nothing to do. } fn finish(self, _namespace: &ModuleNamespace<Self>) -> FaerieProduct { FaerieProduct { artifact: self.artifact, trap_manifest: self.trap_manifest, } } } /// This is the output of `Module`'s /// [`finish`](../cranelift_module/struct.Module.html#method.finish) function. /// It provides functions for writing out the object file to memory or a file. #[derive(Debug)] pub struct FaerieProduct { /// Faerie artifact with all functions, data, and links from the module defined pub artifact: faerie::Artifact, /// Optional trap manifest. Contains `FaerieTrapManifest` when `FaerieBuilder.collect_traps` is /// set to `FaerieTrapCollection::Enabled`. pub trap_manifest: Option<FaerieTrapManifest>, } impl FaerieProduct { /// Return the name of the output file. This is the name passed into `new`. pub fn name(&self) -> &str { &self.artifact.name } /// Call `emit` on the faerie `Artifact`, producing bytes in memory. pub fn emit(&self) -> Result<Vec<u8>, faerie::ArtifactError> { self.artifact.emit() } /// Call `write` on the faerie `Artifact`, writing to a file. pub fn write(&self, sink: File) -> Result<(), faerie::ArtifactError> { self.artifact.write(sink) } } fn translate_function_linkage(linkage: Linkage) -> faerie::Decl { match linkage { Linkage::Import => faerie::Decl::function_import().into(), Linkage::Local => faerie::Decl::function().into(), Linkage::Preemptible => faerie::Decl::function().weak().into(), Linkage::Hidden => faerie::Decl::function().global().hidden().into(), Linkage::Export => faerie::Decl::function().global().into(), } } fn translate_data_linkage(linkage: Linkage, writable: bool, align: Option<u8>) -> faerie::Decl { let align = align.map(u64::from); match linkage { Linkage::Import => faerie::Decl::data_import().into(), Linkage::Local => faerie::Decl::data() .with_writable(writable) .with_align(align) .into(), Linkage::Preemptible => faerie::Decl::data() .weak() .with_writable(writable) .with_align(align) .into(), Linkage::Hidden => faerie::Decl::data() .global() .hidden() .with_writable(writable) .with_align(align) .into(), Linkage::Export => faerie::Decl::data() .global() .with_writable(writable) .with_align(align) .into(), } } struct FaerieRelocSink<'a> { triple: Triple, artifact: &'a mut faerie::Artifact, name: &'a str, namespace: &'a ModuleNamespace<'a, FaerieBackend>, libcall_names: &'a dyn Fn(ir::LibCall) -> String, } impl<'a> RelocSink for FaerieRelocSink<'a> { fn reloc_block(&mut self, _offset: CodeOffset, _reloc: Reloc, _block_offset: CodeOffset) { unimplemented!(); } fn reloc_external( &mut self, offset: CodeOffset, reloc: Reloc, name: &ir::ExternalName, addend: Addend, ) { let ref_name: String = match *name { ir::ExternalName::User { .. } => { if self.namespace.is_function(name) { self.namespace.get_function_decl(name).name.clone() } else { self.namespace.get_data_decl(name).name.clone() } } ir::ExternalName::LibCall(ref libcall) => { let sym = (self.libcall_names)(*libcall); self.artifact .declare(sym.clone(), faerie::Decl::function_import()) .expect("faerie declaration of libcall"); sym } _ => panic!("invalid ExternalName {}", name), }; let (raw_reloc, raw_addend) = container::raw_relocation(reloc, &self.triple); // TODO: Handle overflow. let final_addend = addend + raw_addend; let addend_i32 = final_addend as i32; debug_assert!(i64::from(addend_i32) == final_addend); self.artifact .link_with( faerie::Link { from: self.name, to: &ref_name, at: u64::from(offset), }, faerie::Reloc::Raw { reloc: raw_reloc, addend: addend_i32, }, ) .expect("faerie relocation error"); } fn reloc_jt(&mut self, _offset: CodeOffset, reloc: Reloc, _jt: ir::JumpTable) { match reloc { Reloc::X86PCRelRodata4 => { // Not necessary to record this unless we are going to split apart code and its // jumptbl/rodata. } _ => { panic!("Unhandled reloc"); } } } fn reloc_constant(&mut self, _offset: CodeOffset, reloc: Reloc, _jt: ir::ConstantOffset) { match reloc { Reloc::X86PCRelRodata4 => { // Not necessary to record this unless we are going to split apart code and its // jumptbl/rodata. } _ => { panic!("Unhandled reloc"); } } } } #[allow(dead_code)] struct FaerieStackmapSink<'a> { artifact: &'a mut faerie::Artifact, namespace: &'a ModuleNamespace<'a, FaerieBackend>, } /// Faerie is currently not used in SpiderMonkey. Methods are unimplemented. impl<'a> StackmapSink for FaerieStackmapSink<'a> { fn add_stackmap(&mut self, _: CodeOffset, _: Stackmap) { unimplemented!("faerie support for stackmaps"); } }
32.099415
100
0.562033
f43e670bff89cd39545b29192ffee3e30c4959d4
1,042
#![deny(rust_2018_idioms)] use migration_helpers::common_migrations::AddPrefixesMigration; use migration_helpers::{migrate, Result}; use std::process; /// We added new settings for running kubelet in standalone mode, and for using TLS auth. /// We also added new configuration files to apply these settings. They need to be removed /// when we downgrade. fn run() -> Result<()> { migrate(AddPrefixesMigration(vec![ "settings.kubernetes.bootstrap-token", "settings.kubernetes.authentication-mode", "settings.kubernetes.standalone-mode", "configuration-files.kubelet-bootstrap-kubeconfig", "configuration-files.kubelet-exec-start-conf", ])) } // Returning a Result from main makes it print a Debug representation of the error, but with Snafu // we have nice Display representations of the error, so we wrap "main" (run) and print any error. // https://github.com/shepmaster/snafu/issues/110 fn main() { if let Err(e) = run() { eprintln!("{}", e); process::exit(1); } }
35.931034
98
0.699616
e2fb723de74bfac1766c6b254e44f9b4b1c77626
9,139
use crate::{ create_error, support::{default_error_inheritance, export_and_insert}, StdError, StdResult, }; use laythe_core::{ hooks::{GcHooks, Hooks}, if_let_obj, managed::{Gc, GcObj, GcStr, Trace}, module::{Module, Package}, object::{LyNative, Native, NativeMetaBuilder, ObjectKind}, signature::{Arity, ParameterBuilder, ParameterKind}, to_obj_kind, val, value::{Value, VALUE_NIL}, Call, }; use std::io::Write; pub const ASSERT_ERROR_NAME: &str = "AssertError"; pub(crate) fn add_assert_funs( hooks: &GcHooks, module: Gc<Module>, package: Gc<Package>, ) -> StdResult<()> { let error = default_error_inheritance(hooks, package, ASSERT_ERROR_NAME)?; let error_val = val!(error); let str_name = hooks.manage_str("str"); export_and_insert(hooks, module, error.name(), error_val)?; export_and_insert( hooks, module, hooks.manage_str(ASSERT_META.name), val!(Assert::native(hooks, str_name, error_val)), )?; export_and_insert( hooks, module, hooks.manage_str(ASSERTEQ_META.name), val!(AssertEq::native(hooks, str_name, error_val)), )?; export_and_insert( hooks, module, hooks.manage_str(ASSERTNE_META.name), val!(AssertNe::native(hooks, str_name, error_val)), ) .map_err(StdError::from) } const ASSERT_META: NativeMetaBuilder = NativeMetaBuilder::fun("assert", Arity::Fixed(1)) .with_params(&[ParameterBuilder::new("value", ParameterKind::Bool)]); const ASSERTEQ_META: NativeMetaBuilder = NativeMetaBuilder::fun("assertEq", Arity::Fixed(2)) .with_params(&[ ParameterBuilder::new("actual", ParameterKind::Any), ParameterBuilder::new("expected", ParameterKind::Any), ]); const ASSERTNE_META: NativeMetaBuilder = NativeMetaBuilder::fun("assertNe", Arity::Fixed(2)) .with_params(&[ ParameterBuilder::new("actual", ParameterKind::Any), ParameterBuilder::new("unexpected", ParameterKind::Any), ]); fn to_str(hooks: &mut Hooks, value: Value) -> GcStr { hooks .get_method(value, hooks.manage_str("str")) .map(|method| hooks.call_method(value, method, &[])) .map(|string| { if let Call::Ok(ok) = string { if_let_obj!(ObjectKind::String(string) = (ok) { return string; }); } hooks.manage_str(format!("{:?}", string)) }) .expect("No str method") } #[derive(Debug)] /// A native method to assert that for a boolean true value pub struct Assert { /// reference to 'str' method_str: GcStr, error: Value, } impl Assert { /// Construct a new instance of the native assert function pub fn native(hooks: &GcHooks, method_str: GcStr, error: Value) -> GcObj<Native> { debug_assert!(error.is_obj_kind(laythe_core::object::ObjectKind::Class)); let native = Box::new(Self { method_str, error }) as Box<dyn LyNative>; hooks.manage_obj(Native::new(ASSERT_META.to_meta(hooks), native)) } } impl LyNative for Assert { fn call(&self, hooks: &mut Hooks, _this: Option<Value>, args: &[Value]) -> Call { if args[0].to_bool() { Call::Ok(VALUE_NIL) } else { create_error!( self.error, hooks, "Assertion failed expected true received false" ) } } } impl Trace for Assert { fn trace(&self) { self.method_str.trace(); } fn trace_debug(&self, stdout: &mut dyn Write) { self.method_str.trace_debug(stdout); } } #[derive(Debug)] pub struct AssertEq { method_str: GcStr, error: Value, } impl AssertEq { /// Construct a new instance of the native assertEq function pub fn native(hooks: &GcHooks, method_str: GcStr, error: Value) -> GcObj<Native> { debug_assert!(error.is_obj_kind(ObjectKind::Class)); let native = Box::new(Self { method_str, error }) as Box<dyn LyNative>; hooks.manage_obj(Native::new(ASSERTEQ_META.to_meta(hooks), native)) } } impl LyNative for AssertEq { fn call(&self, hooks: &mut Hooks, _this: Option<Value>, args: &[Value]) -> Call { if args[0] == args[1] { return Call::Ok(VALUE_NIL); } let arg0 = to_str(hooks, args[0]); let arg1 = to_str(hooks, args[1]); create_error!( self.error, hooks, format!("Assertion failed {} and {} are not equal.", arg0, arg1) ) } } impl Trace for AssertEq { fn trace(&self) { self.method_str.trace(); self.error.trace(); } fn trace_debug(&self, stdout: &mut dyn Write) { self.method_str.trace_debug(stdout); self.error.trace_debug(stdout); } } #[derive(Debug)] pub struct AssertNe { method_str: GcStr, error: Value, } impl AssertNe { /// Construct a new instance of the native assertNe function pub fn native(hooks: &GcHooks, method_str: GcStr, error: Value) -> GcObj<Native> { debug_assert!(error.is_obj_kind(ObjectKind::Class)); let native = Box::new(Self { method_str, error }) as Box<dyn LyNative>; hooks.manage_obj(Native::new(ASSERTNE_META.to_meta(hooks), native)) } } impl LyNative for AssertNe { fn call(&self, hooks: &mut Hooks, _this: Option<Value>, args: &[Value]) -> Call { if args[0] != args[1] { return Call::Ok(VALUE_NIL); } let arg0 = to_str(hooks, args[0]); let arg1 = to_str(hooks, args[1]); create_error!( self.error, hooks, format!("Assertion failed {} and {} are equal.", arg0, arg1) ) } } impl Trace for AssertNe { fn trace(&self) { self.method_str.trace(); self.error.trace(); } fn trace_debug(&self, stdout: &mut dyn Write) { self.method_str.trace_debug(stdout); self.error.trace_debug(stdout); } } #[cfg(test)] mod test { use super::*; use laythe_core::hooks::NoContext; #[cfg(test)] mod assert { use crate::support::test_error_class; use super::*; #[test] fn new() { let mut context = NoContext::default(); let hooks = Hooks::new(&mut context); let error = val!(test_error_class(&hooks.as_gc())); let assert = Assert::native(&hooks.as_gc(), hooks.manage_str("str".to_string()), error); assert_eq!(&*assert.meta().name, "assert"); assert_eq!(assert.meta().signature.arity, Arity::Fixed(1)); assert_eq!( assert.meta().signature.parameters[0].kind, ParameterKind::Bool ); } #[test] fn call() { let mut context = NoContext::default(); let mut hooks = Hooks::new(&mut context); let error = val!(test_error_class(&hooks.as_gc())); let assert = Assert::native(&hooks.as_gc(), hooks.manage_str("str".to_string()), error); let values = &[val!(true)]; let result = match assert.call(&mut hooks, None, values) { Call::Ok(res) => res, _ => panic!(), }; assert_eq!(result, VALUE_NIL); } } #[cfg(test)] mod assert_eq { use crate::support::test_error_class; use super::*; #[test] fn new() { let mut context = NoContext::default(); let hooks = GcHooks::new(&mut context); let error = val!(test_error_class(&hooks)); let assert_eq = AssertEq::native(&hooks, hooks.manage_str("str".to_string()), error); assert_eq!(&*assert_eq.meta().name, "assertEq"); assert_eq!(assert_eq.meta().signature.arity, Arity::Fixed(2)); assert_eq!( assert_eq.meta().signature.parameters[0].kind, ParameterKind::Any ); assert_eq!( assert_eq.meta().signature.parameters[1].kind, ParameterKind::Any ); } #[test] fn call() { let mut context = NoContext::default(); let mut hooks = Hooks::new(&mut context); let error = val!(test_error_class(&hooks.as_gc())); let assert_eq = AssertEq::native(&hooks.as_gc(), hooks.manage_str("str".to_string()), error); let values = &[val!(10.5), val!(10.5)]; let result = match assert_eq.call(&mut hooks, None, values) { Call::Ok(res) => res, _ => panic!(), }; assert_eq!(result, VALUE_NIL); } } #[cfg(test)] mod assert_ne { use crate::support::test_error_class; use super::*; #[test] fn new() { let mut context = NoContext::default(); let hooks = GcHooks::new(&mut context); let error = val!(test_error_class(&hooks)); let assert_eq = AssertNe::native(&hooks, hooks.manage_str("str".to_string()), error); assert_eq!(&*assert_eq.meta().name, "assertNe"); assert_eq!(assert_eq.meta().signature.arity, Arity::Fixed(2)); assert_eq!( assert_eq.meta().signature.parameters[0].kind, ParameterKind::Any ); assert_eq!( assert_eq.meta().signature.parameters[1].kind, ParameterKind::Any ); } #[test] fn call() { let mut context = NoContext::default(); let mut hooks = Hooks::new(&mut context); let error = val!(test_error_class(&hooks.as_gc())); let assert_ne = AssertNe::native(&hooks.as_gc(), hooks.manage_str("str".to_string()), error); let values = &[val!(10.5), VALUE_NIL]; let result = match assert_ne.call(&mut hooks, None, values) { Call::Ok(res) => res, _ => panic!(), }; assert_eq!(result, VALUE_NIL); } } }
25.889518
99
0.627312
8a8780878925ecb954a3dfe20b4be6e4ac7f040f
1,795
// Copyright Kani Contributors // SPDX-License-Identifier: Apache-2.0 OR MIT // Checks that `roundf32` does return: // * The nearest integer to the argument for some concrete cases. // * A value that is closer to one of the limits (zero, infinity or negative // infinity, based on the fractional part of the argument) in all cases. // * A value such that the difference between it and the argument is between // zero and 0.5. #![feature(core_intrinsics)] use std::intrinsics::roundf32; #[kani::proof] fn test_one() { let one = 1.0; let result = unsafe { roundf32(one) }; assert!(result == 1.0); } #[kani::proof] fn test_one_frac() { let one_frac = 1.9; let result = unsafe { roundf32(one_frac) }; assert!(result == 2.0); } #[kani::proof] fn test_conc() { let conc = -42.6; let result = unsafe { roundf32(conc) }; assert!(result == -43.0); } #[kani::proof] fn test_conc_sci() { let conc = 5.4e-2; let result = unsafe { roundf32(conc) }; assert!(result == 0.0); } #[kani::proof] fn test_towards_closer() { let x: f32 = kani::any(); kani::assume(!x.is_nan()); kani::assume(!x.is_infinite()); let result = unsafe { roundf32(x) }; let frac = x.fract().abs(); if x.is_sign_positive() { if frac >= 0.5 { assert!(result > x); } else { assert!(result <= x); } } else { if frac >= 0.5 { assert!(result < x); } else { assert!(result >= x); } } } #[kani::proof] fn test_diff_half_one() { let x: f32 = kani::any(); kani::assume(!x.is_nan()); kani::assume(!x.is_infinite()); let result = unsafe { roundf32(x) }; let diff = (x - result).abs(); assert!(diff <= 0.5); assert!(diff >= 0.0); }
24.589041
77
0.572702
8750a344e4e4a3de1257e768ef348750df99119d
563
pub trait PPUCPUConnection { fn is_nmi_pin_set(&self) -> bool; fn clear_nmi_pin(&mut self); fn is_dma_request(&self) -> bool; fn clear_dma_request(&mut self); fn dma_address(&mut self) -> u8; fn send_oam_data(&mut self, address: u8, data: u8); } pub trait APUCPUConnection { fn request_dmc_reader_read(&self) -> Option<u16>; fn submit_dmc_buffer_byte(&mut self, byte: u8); } pub trait CPUIrqProvider { fn is_irq_change_requested(&self) -> bool; fn irq_pin_state(&self) -> bool; fn clear_irq_request_pin(&mut self); }
28.15
55
0.685613
bfec2f720099e83df171f8804ac4ce0ec356c7e5
3,491
// Copyright 2019-2022 ChainSafe Systems // SPDX-License-Identifier: Apache-2.0, MIT #[macro_use] extern crate lazy_static; use address::Address; use clock::ChainEpoch; use num_bigint::BigInt; pub mod actor; pub mod address; pub mod bigint; pub mod clock; pub mod commcid; pub mod consensus; pub mod crypto; pub mod deal; pub mod econ; pub mod error; pub mod math; pub mod message; pub mod piece; pub mod randomness; pub mod receipt; pub mod reward; pub mod sector; pub mod smooth; pub mod state; pub mod sys; pub mod version; lazy_static! { /// Total Filecoin available to the network. pub static ref TOTAL_FILECOIN: BigInt = BigInt::from(TOTAL_FILECOIN_BASE) * FILECOIN_PRECISION; /// Zero address used to avoid allowing it to be used for verification. /// This is intentionally disallowed because it is an edge case with Filecoin's BLS /// signature verification. pub static ref ZERO_ADDRESS: Address = "f3yaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaby2smx7a".parse().unwrap(); } /// Codec for raw data. pub const IPLD_RAW: u64 = 0x55; /// Multihash code for the identity hash function. pub const IDENTITY_HASH: u64 = 0x0; /// The maximum supported CID size. pub const MAX_CID_LEN: usize = 100; /// Identifier for Actors, includes builtin and initialized actors pub type ActorID = u64; /// Default bit width for the hamt in the filecoin protocol. pub const HAMT_BIT_WIDTH: u32 = 5; /// Total gas limit allowed per block. This is shared across networks. pub const BLOCK_GAS_LIMIT: i64 = 10_000_000_000; /// Total Filecoin supply. pub const TOTAL_FILECOIN_BASE: i64 = 2_000_000_000; // Epochs /// Lookback height for retrieving ticket randomness. pub const TICKET_RANDOMNESS_LOOKBACK: ChainEpoch = 1; /// Epochs to look back for verifying PoSt proofs. pub const WINNING_POST_SECTOR_SET_LOOKBACK: ChainEpoch = 10; /// The expected number of block producers in each epoch. pub const BLOCKS_PER_EPOCH: u64 = 5; /// Ratio of integer values to token value. pub const FILECOIN_PRECISION: i64 = 1_000_000_000_000_000_000; /// Allowable clock drift in validations. pub const ALLOWABLE_CLOCK_DRIFT: u64 = 1; /// Config trait which handles different network configurations. pub trait NetworkParams { /// Total filecoin available to network. const TOTAL_FILECOIN: i64; /// Available rewards for mining. const MINING_REWARD_TOTAL: i64; /// Initial reward actor balance. This function is only called in genesis setting up state. fn initial_reward_balance() -> BigInt { BigInt::from(Self::MINING_REWARD_TOTAL) * Self::TOTAL_FILECOIN } /// Convert integer value of tokens into BigInt based on the token precision. fn from_fil(i: i64) -> BigInt { BigInt::from(i) * FILECOIN_PRECISION } } /// Params for the network. This is now continued on into mainnet and is static across networks. // * This can be removed in the future if the new testnet is configred at build time // * but the reason to keep as is, is for an easier transition to runtime configuration. pub struct DefaultNetworkParams; impl NetworkParams for DefaultNetworkParams { const TOTAL_FILECOIN: i64 = TOTAL_FILECOIN_BASE; const MINING_REWARD_TOTAL: i64 = 1_400_000_000; } /// Method number indicator for calling actor methods. pub type MethodNum = u64; /// Base actor send method. pub const METHOD_SEND: MethodNum = 0; /// Base actor constructor method. pub const METHOD_CONSTRUCTOR: MethodNum = 1;
30.893805
149
0.752506
1c6a0e3ce3113eaf07e843a968eab2fb72fc0e3e
6,065
/* * BSD 3-Clause License * * Copyright (c) 2021, Alex Katlein * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the copyright holder nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ use std::path::Path; use anyhow::Context; use crossterm::style::Stylize; use dircpy::copy_dir; use git2::Repository; use crate::fetch::embedded::is_git_repo; use crate::spec::TemplateSpec; use crate::utils::errors::ArchResult; use crate::utils::ToolConfig; mod embedded; mod installed; pub struct FetchOptions<'f, 't> { pub branch: Option<&'f str>, pub dirty: bool, pub local_git: bool, pub tool_config: &'t ToolConfig<'t>, } impl<'spec> TemplateSpec<'spec> { pub fn fetch(&self, into: &Path, options: FetchOptions) -> ArchResult<()> { let (local_repo, is_local) = match self { TemplateSpec::Local(local_path) => (is_git_repo(local_path, options.tool_config), true), _ => (false, false), }; println!("Template source: {}", self); if is_local && (!local_repo || options.dirty) { if let TemplateSpec::Local(local_path) = self { if options.tool_config.verbose { println!("{}", "Copying local directory".dim()); } copy_dir(local_path, into).context("Failed to copy local directory") } else { panic!() } } else if options.local_git { installed::fetch(self, into, &options).context("Failed to fetch using local Git") } else { embedded::fetch(self, into, &options).context("Failed to fetch using embedded Git") } } } pub fn copy_git_directory( parent_dir: &Path, target_dir: &Path, tool_config: &ToolConfig, ) -> ArchResult<()> { if embedded::is_git_repo(parent_dir, tool_config) { if tool_config.verbose { println!("{}", "Copying .git directory to target directory".dim()); } copy_dir(parent_dir.join(".git"), target_dir.join(".git"))?; remove_remotes(target_dir); } else if tool_config.verbose { println!( "{}", "Template not a Git repository. Not copying .git directory to target".dim() ); } Ok(()) } fn remove_remotes(dir: &Path) { match Repository::open(dir) { Ok(repo) => { repo.remotes() .map(|remotes| { remotes .iter() .flatten() .for_each(|remote| match repo.remote_delete(remote) { Ok(_) => (), Err(err) => { eprintln!( "{}", format!( "{:#}", anyhow::Error::from(err) .context(format!("Failed to remove remote {}", remote)) ) .red() ); } }) }) .unwrap_or_else(|err| { eprintln!( "{}", format!( "{:#}\n", anyhow::Error::from(err).context("Failed to retrieve remotes") ) .red() ); }); } Err(err) => { eprintln!( "{}", format!( "{:#}\n", anyhow::Error::from(err).context(format!( "Failed to open Git repository in {}", dir.display() )) ) .red() ); } }; } pub fn init_git_repository(dir: &Path, tool_config: &ToolConfig) -> ArchResult<()> { if tool_config.verbose { println!( "{}", "Initializing Git repository in target directory".dim() ); } match Repository::init(dir) { Ok(_) => (), Err(err) => { eprintln!( "{}", format!( "{:#}\n", anyhow::Error::from(err).context(format!( "Failed to initialize Git repository in {}", dir.display() )) ) .red() ); } } Ok(()) }
33.508287
100
0.509975
099344ca83998ab2e61bec86ee6f1b1958c0f5b1
215
use crate::*; #[test] fn it_works() { let mut p = std::ptr::null_mut(); unsafe { std_FooPtr_ctor(&mut p, std::ptr::null_mut()); foo_Foo_create(&mut p); std_FooPtr_dtor(p); } }
15.357143
54
0.539535
c1ce54bea394986b5570d5032f22425f4053e67c
16,029
//! FIXME: write short doc here use ra_db::{FileId, SourceDatabase}; use ra_syntax::{ algo::find_node_at_offset, ast::{self, DocCommentsOwner}, match_ast, AstNode, SyntaxNode, }; use crate::{ db::RootDatabase, display::ShortLabel, references::{classify_name_ref, NameKind::*}, FilePosition, NavigationTarget, RangeInfo, }; pub(crate) fn goto_definition( db: &RootDatabase, position: FilePosition, ) -> Option<RangeInfo<Vec<NavigationTarget>>> { let parse = db.parse(position.file_id); let syntax = parse.tree().syntax().clone(); if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&syntax, position.offset) { let navs = reference_definition(db, position.file_id, &name_ref).to_vec(); return Some(RangeInfo::new(name_ref.syntax().text_range(), navs.to_vec())); } if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, position.offset) { let navs = name_definition(db, position.file_id, &name)?; return Some(RangeInfo::new(name.syntax().text_range(), navs)); } None } #[derive(Debug)] pub(crate) enum ReferenceResult { Exact(NavigationTarget), Approximate(Vec<NavigationTarget>), } impl ReferenceResult { fn to_vec(self) -> Vec<NavigationTarget> { use self::ReferenceResult::*; match self { Exact(target) => vec![target], Approximate(vec) => vec, } } } pub(crate) fn reference_definition( db: &RootDatabase, file_id: FileId, name_ref: &ast::NameRef, ) -> ReferenceResult { use self::ReferenceResult::*; let name_kind = classify_name_ref(db, file_id, &name_ref).map(|d| d.kind); match name_kind { Some(Macro(mac)) => return Exact(NavigationTarget::from_macro_def(db, mac)), Some(Field(field)) => return Exact(NavigationTarget::from_field(db, field)), Some(AssocItem(assoc)) => return Exact(NavigationTarget::from_assoc_item(db, assoc)), Some(Def(def)) => match NavigationTarget::from_def(db, def) { Some(nav) => return Exact(nav), None => return Approximate(vec![]), }, Some(SelfType(ty)) => { if let Some((def_id, _)) = ty.as_adt() { return Exact(NavigationTarget::from_adt_def(db, def_id)); } } Some(Pat((_, pat))) => return Exact(NavigationTarget::from_pat(db, file_id, pat)), Some(SelfParam(par)) => return Exact(NavigationTarget::from_self_param(file_id, par)), Some(GenericParam(_)) => { // FIXME: go to the generic param def } None => {} }; // Fallback index based approach: let navs = crate::symbol_index::index_resolve(db, name_ref) .into_iter() .map(|s| NavigationTarget::from_symbol(db, s)) .collect(); Approximate(navs) } pub(crate) fn name_definition( db: &RootDatabase, file_id: FileId, name: &ast::Name, ) -> Option<Vec<NavigationTarget>> { let parent = name.syntax().parent()?; if let Some(module) = ast::Module::cast(parent.clone()) { if module.has_semi() { let src = hir::Source { file_id: file_id.into(), ast: module }; if let Some(child_module) = hir::Module::from_declaration(db, src) { let nav = NavigationTarget::from_module(db, child_module); return Some(vec![nav]); } } } if let Some(nav) = named_target(file_id, &parent) { return Some(vec![nav]); } None } fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget> { match_ast! { match node { ast::StructDef(it) => { Some(NavigationTarget::from_named( file_id, &it, it.doc_comment_text(), it.short_label(), )) }, ast::EnumDef(it) => { Some(NavigationTarget::from_named( file_id, &it, it.doc_comment_text(), it.short_label(), )) }, ast::EnumVariant(it) => { Some(NavigationTarget::from_named( file_id, &it, it.doc_comment_text(), it.short_label(), )) }, ast::FnDef(it) => { Some(NavigationTarget::from_named( file_id, &it, it.doc_comment_text(), it.short_label(), )) }, ast::TypeAliasDef(it) => { Some(NavigationTarget::from_named( file_id, &it, it.doc_comment_text(), it.short_label(), )) }, ast::ConstDef(it) => { Some(NavigationTarget::from_named( file_id, &it, it.doc_comment_text(), it.short_label(), )) }, ast::StaticDef(it) => { Some(NavigationTarget::from_named( file_id, &it, it.doc_comment_text(), it.short_label(), )) }, ast::TraitDef(it) => { Some(NavigationTarget::from_named( file_id, &it, it.doc_comment_text(), it.short_label(), )) }, ast::RecordFieldDef(it) => { Some(NavigationTarget::from_named( file_id, &it, it.doc_comment_text(), it.short_label(), )) }, ast::Module(it) => { Some(NavigationTarget::from_named( file_id, &it, it.doc_comment_text(), it.short_label(), )) }, ast::MacroCall(it) => { Some(NavigationTarget::from_named( file_id, &it, it.doc_comment_text(), None, )) }, _ => None, } } } #[cfg(test)] mod tests { use test_utils::covers; use crate::mock_analysis::analysis_and_position; fn check_goto(fixture: &str, expected: &str) { let (analysis, pos) = analysis_and_position(fixture); let mut navs = analysis.goto_definition(pos).unwrap().unwrap().info; assert_eq!(navs.len(), 1); let nav = navs.pop().unwrap(); nav.assert_match(expected); } #[test] fn goto_definition_works_in_items() { check_goto( " //- /lib.rs struct Foo; enum E { X(Foo<|>) } ", "Foo STRUCT_DEF FileId(1) [0; 11) [7; 10)", ); } #[test] fn goto_definition_resolves_correct_name() { check_goto( " //- /lib.rs use a::Foo; mod a; mod b; enum E { X(Foo<|>) } //- /a.rs struct Foo; //- /b.rs struct Foo; ", "Foo STRUCT_DEF FileId(2) [0; 11) [7; 10)", ); } #[test] fn goto_definition_works_for_module_declaration() { check_goto( " //- /lib.rs mod <|>foo; //- /foo.rs // empty ", "foo SOURCE_FILE FileId(2) [0; 10)", ); check_goto( " //- /lib.rs mod <|>foo; //- /foo/mod.rs // empty ", "foo SOURCE_FILE FileId(2) [0; 10)", ); } #[test] fn goto_definition_works_for_macros() { covers!(goto_definition_works_for_macros); check_goto( " //- /lib.rs macro_rules! foo { () => { {} }; } fn bar() { <|>foo!(); } ", "foo MACRO_CALL FileId(1) [0; 50) [13; 16)", ); } #[test] fn goto_definition_works_for_macros_from_other_crates() { covers!(goto_definition_works_for_macros); check_goto( " //- /lib.rs use foo::foo; fn bar() { <|>foo!(); } //- /foo/lib.rs #[macro_export] macro_rules! foo { () => { {} }; } ", "foo MACRO_CALL FileId(2) [0; 66) [29; 32)", ); } #[test] fn goto_definition_works_for_macros_in_use_tree() { check_goto( " //- /lib.rs use foo::foo<|>; //- /foo/lib.rs #[macro_export] macro_rules! foo { () => { {} }; } ", "foo MACRO_CALL FileId(2) [0; 66) [29; 32)", ); } #[test] fn goto_definition_works_for_methods() { covers!(goto_definition_works_for_methods); check_goto( " //- /lib.rs struct Foo; impl Foo { fn frobnicate(&self) { } } fn bar(foo: &Foo) { foo.frobnicate<|>(); } ", "frobnicate FN_DEF FileId(1) [27; 52) [30; 40)", ); } #[test] fn goto_definition_works_for_fields() { covers!(goto_definition_works_for_fields); check_goto( " //- /lib.rs struct Foo { spam: u32, } fn bar(foo: &Foo) { foo.spam<|>; } ", "spam RECORD_FIELD_DEF FileId(1) [17; 26) [17; 21)", ); } #[test] fn goto_definition_works_for_record_fields() { covers!(goto_definition_works_for_record_fields); check_goto( " //- /lib.rs struct Foo { spam: u32, } fn bar() -> Foo { Foo { spam<|>: 0, } } ", "spam RECORD_FIELD_DEF FileId(1) [17; 26) [17; 21)", ); } #[test] fn goto_definition_works_for_ufcs_inherent_methods() { check_goto( " //- /lib.rs struct Foo; impl Foo { fn frobnicate() { } } fn bar(foo: &Foo) { Foo::frobnicate<|>(); } ", "frobnicate FN_DEF FileId(1) [27; 47) [30; 40)", ); } #[test] fn goto_definition_works_for_ufcs_trait_methods_through_traits() { check_goto( " //- /lib.rs trait Foo { fn frobnicate(); } fn bar() { Foo::frobnicate<|>(); } ", "frobnicate FN_DEF FileId(1) [16; 32) [19; 29)", ); } #[test] fn goto_definition_works_for_ufcs_trait_methods_through_self() { check_goto( " //- /lib.rs struct Foo; trait Trait { fn frobnicate(); } impl Trait for Foo {} fn bar() { Foo::frobnicate<|>(); } ", "frobnicate FN_DEF FileId(1) [30; 46) [33; 43)", ); } #[test] fn goto_definition_on_self() { check_goto( " //- /lib.rs struct Foo; impl Foo { pub fn new() -> Self { Self<|> {} } } ", "Foo STRUCT_DEF FileId(1) [0; 11) [7; 10)", ); check_goto( " //- /lib.rs struct Foo; impl Foo { pub fn new() -> Self<|> { Self {} } } ", "Foo STRUCT_DEF FileId(1) [0; 11) [7; 10)", ); check_goto( " //- /lib.rs enum Foo { A } impl Foo { pub fn new() -> Self<|> { Foo::A } } ", "Foo ENUM_DEF FileId(1) [0; 14) [5; 8)", ); check_goto( " //- /lib.rs enum Foo { A } impl Foo { pub fn thing(a: &Self<|>) { } } ", "Foo ENUM_DEF FileId(1) [0; 14) [5; 8)", ); } #[test] fn goto_definition_on_self_in_trait_impl() { check_goto( " //- /lib.rs struct Foo; trait Make { fn new() -> Self; } impl Make for Foo { fn new() -> Self { Self<|> {} } } ", "Foo STRUCT_DEF FileId(1) [0; 11) [7; 10)", ); check_goto( " //- /lib.rs struct Foo; trait Make { fn new() -> Self; } impl Make for Foo { fn new() -> Self<|> { Self{} } } ", "Foo STRUCT_DEF FileId(1) [0; 11) [7; 10)", ); } #[test] fn goto_definition_works_when_used_on_definition_name_itself() { check_goto( " //- /lib.rs struct Foo<|> { value: u32 } ", "Foo STRUCT_DEF FileId(1) [0; 25) [7; 10)", ); check_goto( r#" //- /lib.rs struct Foo { field<|>: string, } "#, "field RECORD_FIELD_DEF FileId(1) [17; 30) [17; 22)", ); check_goto( " //- /lib.rs fn foo_test<|>() { } ", "foo_test FN_DEF FileId(1) [0; 17) [3; 11)", ); check_goto( " //- /lib.rs enum Foo<|> { Variant, } ", "Foo ENUM_DEF FileId(1) [0; 25) [5; 8)", ); check_goto( " //- /lib.rs enum Foo { Variant1, Variant2<|>, Variant3, } ", "Variant2 ENUM_VARIANT FileId(1) [29; 37) [29; 37)", ); check_goto( r#" //- /lib.rs static inner<|>: &str = ""; "#, "inner STATIC_DEF FileId(1) [0; 24) [7; 12)", ); check_goto( r#" //- /lib.rs const inner<|>: &str = ""; "#, "inner CONST_DEF FileId(1) [0; 23) [6; 11)", ); check_goto( r#" //- /lib.rs type Thing<|> = Option<()>; "#, "Thing TYPE_ALIAS_DEF FileId(1) [0; 24) [5; 10)", ); check_goto( r#" //- /lib.rs trait Foo<|> { } "#, "Foo TRAIT_DEF FileId(1) [0; 13) [6; 9)", ); check_goto( r#" //- /lib.rs mod bar<|> { } "#, "bar MODULE FileId(1) [0; 11) [4; 7)", ); } }
25.402536
94
0.402832
1c95fe0f8a9cf1d8b404e8a0bdcb985b4bdae6eb
1,487
// This file is part of security-keys-rust. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/security-keys-rust/master/COPYRIGHT. No part of security-keys-rust, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2021 The developers of security-keys-rust. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/security-keys-rust/master/COPYRIGHT. /// Usage. #[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] #[derive(Deserialize, Serialize)] #[serde(deny_unknown_fields)] pub struct Usage { usage_page: Option<UsagePage>, usage_identifier: UsageIdentifier, } impl TryClone for Usage { #[inline(always)] fn try_clone(&self) -> Result<Self, TryReserveError> { Ok(*self) } } impl Usage { #[allow(missing_docs)] #[inline(always)] pub const fn usage_page(&self) -> Option<UsagePage> { self.usage_page } #[allow(missing_docs)] #[inline(always)] pub const fn usage_identifier(&self) -> UsageIdentifier { self.usage_identifier } #[inline(always)] fn parse(data: u32, was_32_bits_wide: bool) -> Self { Self { usage_page: if was_32_bits_wide { Some((data >> 16) as u16) } else { None }, usage_identifier: data as u16, } } }
24.783333
409
0.721587
1ca045bd4afc2243b0a157de458f75d17b91a4ca
4,228
use super::ReadBuf; use std::io; use std::ops::DerefMut; use std::pin::Pin; use std::task::{Context, Poll}; /// Reads bytes from a source. /// /// This trait is analogous to the [`std::io::Read`] trait, but integrates with /// the asynchronous task system. In particular, the [`poll_read`] method, /// unlike [`Read::read`], will automatically queue the current task for wakeup /// and return if data is not yet available, rather than blocking the calling /// thread. /// /// Specifically, this means that the `poll_read` function will return one of /// the following: /// /// * `Poll::Ready(Ok(()))` means that data was immediately read and placed into /// the output buffer. If no data was read (`buf.filled().is_empty()`) it /// implies that EOF has been reached. /// /// * `Poll::Pending` means that no data was read into the buffer /// provided. The I/O object is not currently readable but may become readable /// in the future. Most importantly, **the current future's task is scheduled /// to get unparked when the object is readable**. This means that like /// `Future::poll` you'll receive a notification when the I/O object is /// readable again. /// /// * `Poll::Ready(Err(e))` for other errors are standard I/O errors coming from the /// underlying object. /// /// This trait importantly means that the `read` method only works in the /// context of a future's task. The object may panic if used outside of a task. /// /// Utilities for working with `AsyncRead` values are provided by /// [`AsyncReadExt`]. /// /// [`poll_read`]: AsyncRead::poll_read /// [`std::io::Read`]: std::io::Read /// [`Read::read`]: std::io::Read::read /// [`AsyncReadExt`]: crate::io::AsyncReadExt pub trait AsyncRead { /// Attempts to read from the `AsyncRead` into `buf`. /// /// On success, returns `Poll::Ready(Ok(()))` and fills `buf` with data /// read. If no data was read (`buf.filled().is_empty()`) it implies that /// EOF has been reached. /// /// If no data is available for reading, the method returns `Poll::Pending` /// and arranges for the current task (via `cx.waker()`) to receive a /// notification when the object becomes readable or is closed. fn poll_read( self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut ReadBuf<'_>, ) -> Poll<io::Result<()>>; } macro_rules! deref_async_read { () => { fn poll_read( mut self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut ReadBuf<'_>, ) -> Poll<io::Result<()>> { Pin::new(&mut **self).poll_read(cx, buf) } }; } impl<T: ?Sized + AsyncRead + Unpin> AsyncRead for Box<T> { deref_async_read!(); } impl<T: ?Sized + AsyncRead + Unpin> AsyncRead for &mut T { deref_async_read!(); } impl<P> AsyncRead for Pin<P> where P: DerefMut + Unpin, P::Target: AsyncRead, { fn poll_read( self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut ReadBuf<'_>, ) -> Poll<io::Result<()>> { self.get_mut().as_mut().poll_read(cx, buf) } } impl AsyncRead for &[u8] { fn poll_read( mut self: Pin<&mut Self>, _cx: &mut Context<'_>, buf: &mut ReadBuf<'_>, ) -> Poll<io::Result<()>> { let amt = std::cmp::min(self.len(), buf.remaining()); let (a, b) = self.split_at(amt); buf.put_slice(a); *self = b; Poll::Ready(Ok(())) } } impl<T: AsRef<[u8]> + Unpin> AsyncRead for io::Cursor<T> { fn poll_read( mut self: Pin<&mut Self>, _cx: &mut Context<'_>, buf: &mut ReadBuf<'_>, ) -> Poll<io::Result<()>> { let pos = self.position(); let slice: &[u8] = (*self).get_ref().as_ref(); // The position could technically be out of bounds, so don't panic... if pos > slice.len() as u64 { return Poll::Ready(Ok(())); } let start = pos as usize; let amt = std::cmp::min(slice.len() - start, buf.remaining()); // Add won't overflow because of pos check above. let end = start + amt; buf.put_slice(&slice[start..end]); self.set_position(end as u64); Poll::Ready(Ok(())) } }
32.274809
84
0.593425
91a630692dfac1bf71e70dc6c5754aa0753c3d6d
4,646
use crate::datatypes::TimeUnit; use crate::{datatypes::DataType, types::NativeType}; use parquet2::schema::types::{ LogicalType, ParquetType, TimeUnit as ParquetTimeUnit, TimestampType, }; use parquet2::statistics::PrimitiveStatistics as ParquetPrimitiveStatistics; use parquet2::types::NativeType as ParquetNativeType; use std::any::Any; use super::Statistics; use crate::error::Result; /// Arrow-deserialized parquet Statistics of a primitive type #[derive(Debug, Clone, PartialEq)] pub struct PrimitiveStatistics<T: NativeType> { /// the data type pub data_type: DataType, /// number of nulls pub null_count: Option<i64>, /// number of dictinct values pub distinct_count: Option<i64>, /// Minimum pub min_value: Option<T>, /// Maximum pub max_value: Option<T>, } impl<T: NativeType> Statistics for PrimitiveStatistics<T> { fn data_type(&self) -> &DataType { &self.data_type } fn as_any(&self) -> &dyn Any { self } fn null_count(&self) -> Option<i64> { self.null_count } } impl<T, R> From<(&ParquetPrimitiveStatistics<R>, DataType)> for PrimitiveStatistics<T> where T: NativeType, R: ParquetNativeType, R: num_traits::AsPrimitive<T>, { fn from((stats, data_type): (&ParquetPrimitiveStatistics<R>, DataType)) -> Self { Self { data_type, null_count: stats.null_count, distinct_count: stats.distinct_count, min_value: stats.min_value.map(|x| x.as_()), max_value: stats.max_value.map(|x| x.as_()), } } } pub(super) fn statistics_from_i32( stats: &ParquetPrimitiveStatistics<i32>, data_type: DataType, ) -> Result<Box<dyn Statistics>> { use DataType::*; Ok(match data_type { UInt8 => { Box::new(PrimitiveStatistics::<u8>::from((stats, data_type))) as Box<dyn Statistics> } UInt16 => Box::new(PrimitiveStatistics::<u16>::from((stats, data_type))), UInt32 => Box::new(PrimitiveStatistics::<u32>::from((stats, data_type))), Int8 => Box::new(PrimitiveStatistics::<i8>::from((stats, data_type))), Int16 => Box::new(PrimitiveStatistics::<i16>::from((stats, data_type))), Decimal(_, _) => Box::new(PrimitiveStatistics::<i128>::from((stats, data_type))), _ => Box::new(PrimitiveStatistics::<i32>::from((stats, data_type))), }) } fn timestamp(type_: &ParquetType, time_unit: TimeUnit, x: i64) -> i64 { let logical_type = if let ParquetType::PrimitiveType { logical_type, .. } = type_ { logical_type } else { unreachable!() }; let unit = if let Some(LogicalType::TIMESTAMP(TimestampType { unit, .. })) = logical_type { unit } else { return x; }; match (unit, time_unit) { (ParquetTimeUnit::MILLIS(_), TimeUnit::Second) => x / 1_000, (ParquetTimeUnit::MICROS(_), TimeUnit::Second) => x / 1_000_000, (ParquetTimeUnit::NANOS(_), TimeUnit::Second) => x * 1_000_000_000, (ParquetTimeUnit::MILLIS(_), TimeUnit::Millisecond) => x, (ParquetTimeUnit::MICROS(_), TimeUnit::Millisecond) => x / 1_000, (ParquetTimeUnit::NANOS(_), TimeUnit::Millisecond) => x / 1_000_000, (ParquetTimeUnit::MILLIS(_), TimeUnit::Microsecond) => x * 1_000, (ParquetTimeUnit::MICROS(_), TimeUnit::Microsecond) => x, (ParquetTimeUnit::NANOS(_), TimeUnit::Microsecond) => x / 1_000, (ParquetTimeUnit::MILLIS(_), TimeUnit::Nanosecond) => x * 1_000_000, (ParquetTimeUnit::MICROS(_), TimeUnit::Nanosecond) => x * 1_000, (ParquetTimeUnit::NANOS(_), TimeUnit::Nanosecond) => x, } } pub(super) fn statistics_from_i64( stats: &ParquetPrimitiveStatistics<i64>, data_type: DataType, ) -> Result<Box<dyn Statistics>> { use DataType::*; Ok(match data_type { UInt64 => { Box::new(PrimitiveStatistics::<u64>::from((stats, data_type))) as Box<dyn Statistics> } Timestamp(time_unit, None) => Box::new(PrimitiveStatistics::<i64> { data_type, null_count: stats.null_count, distinct_count: stats.distinct_count, min_value: stats .min_value .map(|x| timestamp(stats.descriptor.type_(), time_unit, x)), max_value: stats .max_value .map(|x| timestamp(stats.descriptor.type_(), time_unit, x)), }), Decimal(_, _) => Box::new(PrimitiveStatistics::<i128>::from((stats, data_type))), _ => Box::new(PrimitiveStatistics::<i64>::from((stats, data_type))), }) }
34.932331
97
0.62204
d94dd24af3edc74277110793b5c7a9e6449ee334
87,385
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. pub use self::AnnNode::*; use syntax::abi::Abi; use syntax::ast; use syntax::codemap::{CodeMap, Spanned}; use syntax::parse::ParseSess; use syntax::parse::lexer::comments; use syntax::print::pp::{self, Breaks}; use syntax::print::pp::Breaks::{Consistent, Inconsistent}; use syntax::print::pprust::PrintState; use syntax::ptr::P; use syntax::symbol::keywords; use syntax::util::parser::{self, AssocOp, Fixity}; use syntax_pos::{self, BytePos}; use hir; use hir::{PatKind, RegionTyParamBound, TraitTyParamBound, TraitBoundModifier, RangeEnd}; use std::cell::Cell; use std::io::{self, Write, Read}; use std::iter::Peekable; use std::vec; pub enum AnnNode<'a> { NodeName(&'a ast::Name), NodeBlock(&'a hir::Block), NodeItem(&'a hir::Item), NodeSubItem(ast::NodeId), NodeExpr(&'a hir::Expr), NodePat(&'a hir::Pat), } pub enum Nested { Item(hir::ItemId), TraitItem(hir::TraitItemId), ImplItem(hir::ImplItemId), Body(hir::BodyId), BodyArgPat(hir::BodyId, usize) } pub trait PpAnn { fn nested(&self, _state: &mut State, _nested: Nested) -> io::Result<()> { Ok(()) } fn pre(&self, _state: &mut State, _node: AnnNode) -> io::Result<()> { Ok(()) } fn post(&self, _state: &mut State, _node: AnnNode) -> io::Result<()> { Ok(()) } } pub struct NoAnn; impl PpAnn for NoAnn {} pub const NO_ANN: &'static PpAnn = &NoAnn; impl PpAnn for hir::Crate { fn nested(&self, state: &mut State, nested: Nested) -> io::Result<()> { match nested { Nested::Item(id) => state.print_item(self.item(id.id)), Nested::TraitItem(id) => state.print_trait_item(self.trait_item(id)), Nested::ImplItem(id) => state.print_impl_item(self.impl_item(id)), Nested::Body(id) => state.print_expr(&self.body(id).value), Nested::BodyArgPat(id, i) => state.print_pat(&self.body(id).arguments[i].pat) } } } pub struct State<'a> { pub s: pp::Printer<'a>, cm: Option<&'a CodeMap>, comments: Option<Vec<comments::Comment>>, literals: Peekable<vec::IntoIter<comments::Literal>>, cur_cmnt: usize, boxes: Vec<pp::Breaks>, ann: &'a (PpAnn + 'a), } impl<'a> PrintState<'a> for State<'a> { fn writer(&mut self) -> &mut pp::Printer<'a> { &mut self.s } fn boxes(&mut self) -> &mut Vec<pp::Breaks> { &mut self.boxes } fn comments(&mut self) -> &mut Option<Vec<comments::Comment>> { &mut self.comments } fn cur_cmnt(&mut self) -> &mut usize { &mut self.cur_cmnt } fn cur_lit(&mut self) -> Option<&comments::Literal> { self.literals.peek() } fn bump_lit(&mut self) -> Option<comments::Literal> { self.literals.next() } } #[allow(non_upper_case_globals)] pub const indent_unit: usize = 4; #[allow(non_upper_case_globals)] pub const default_columns: usize = 78; /// Requires you to pass an input filename and reader so that /// it can scan the input text for comments and literals to /// copy forward. pub fn print_crate<'a>(cm: &'a CodeMap, sess: &ParseSess, krate: &hir::Crate, filename: String, input: &mut Read, out: Box<Write + 'a>, ann: &'a PpAnn, is_expanded: bool) -> io::Result<()> { let mut s = State::new_from_input(cm, sess, filename, input, out, ann, is_expanded); // When printing the AST, we sometimes need to inject `#[no_std]` here. // Since you can't compile the HIR, it's not necessary. s.print_mod(&krate.module, &krate.attrs)?; s.print_remaining_comments()?; s.s.eof() } impl<'a> State<'a> { pub fn new_from_input(cm: &'a CodeMap, sess: &ParseSess, filename: String, input: &mut Read, out: Box<Write + 'a>, ann: &'a PpAnn, is_expanded: bool) -> State<'a> { let (cmnts, lits) = comments::gather_comments_and_literals(sess, filename, input); State::new(cm, out, ann, Some(cmnts), // If the code is post expansion, don't use the table of // literals, since it doesn't correspond with the literals // in the AST anymore. if is_expanded { None } else { Some(lits) }) } pub fn new(cm: &'a CodeMap, out: Box<Write + 'a>, ann: &'a PpAnn, comments: Option<Vec<comments::Comment>>, literals: Option<Vec<comments::Literal>>) -> State<'a> { State { s: pp::mk_printer(out, default_columns), cm: Some(cm), comments: comments.clone(), literals: literals.unwrap_or_default().into_iter().peekable(), cur_cmnt: 0, boxes: Vec::new(), ann, } } } pub fn to_string<F>(ann: &PpAnn, f: F) -> String where F: FnOnce(&mut State) -> io::Result<()> { let mut wr = Vec::new(); { let mut printer = State { s: pp::mk_printer(Box::new(&mut wr), default_columns), cm: None, comments: None, literals: vec![].into_iter().peekable(), cur_cmnt: 0, boxes: Vec::new(), ann, }; f(&mut printer).unwrap(); printer.s.eof().unwrap(); } String::from_utf8(wr).unwrap() } pub fn visibility_qualified(vis: &hir::Visibility, w: &str) -> String { to_string(NO_ANN, |s| { s.print_visibility(vis)?; s.s.word(w) }) } impl<'a> State<'a> { pub fn cbox(&mut self, u: usize) -> io::Result<()> { self.boxes.push(pp::Breaks::Consistent); self.s.cbox(u) } pub fn nbsp(&mut self) -> io::Result<()> { self.s.word(" ") } pub fn word_nbsp(&mut self, w: &str) -> io::Result<()> { self.s.word(w)?; self.nbsp() } pub fn head(&mut self, w: &str) -> io::Result<()> { // outer-box is consistent self.cbox(indent_unit)?; // head-box is inconsistent self.ibox(w.len() + 1)?; // keyword that starts the head if !w.is_empty() { self.word_nbsp(w)?; } Ok(()) } pub fn bopen(&mut self) -> io::Result<()> { self.s.word("{")?; self.end() // close the head-box } pub fn bclose_(&mut self, span: syntax_pos::Span, indented: usize) -> io::Result<()> { self.bclose_maybe_open(span, indented, true) } pub fn bclose_maybe_open(&mut self, span: syntax_pos::Span, indented: usize, close_box: bool) -> io::Result<()> { self.maybe_print_comment(span.hi())?; self.break_offset_if_not_bol(1, -(indented as isize))?; self.s.word("}")?; if close_box { self.end()?; // close the outer-box } Ok(()) } pub fn bclose(&mut self, span: syntax_pos::Span) -> io::Result<()> { self.bclose_(span, indent_unit) } pub fn in_cbox(&self) -> bool { match self.boxes.last() { Some(&last_box) => last_box == pp::Breaks::Consistent, None => false, } } pub fn space_if_not_bol(&mut self) -> io::Result<()> { if !self.is_bol() { self.s.space()?; } Ok(()) } pub fn break_offset_if_not_bol(&mut self, n: usize, off: isize) -> io::Result<()> { if !self.is_bol() { self.s.break_offset(n, off) } else { if off != 0 && self.s.last_token().is_hardbreak_tok() { // We do something pretty sketchy here: tuck the nonzero // offset-adjustment we were going to deposit along with the // break into the previous hardbreak. self.s.replace_last_token(pp::Printer::hardbreak_tok_offset(off)); } Ok(()) } } // Synthesizes a comment that was not textually present in the original source // file. pub fn synth_comment(&mut self, text: String) -> io::Result<()> { self.s.word("/*")?; self.s.space()?; self.s.word(&text[..])?; self.s.space()?; self.s.word("*/") } pub fn commasep_cmnt<T, F, G>(&mut self, b: Breaks, elts: &[T], mut op: F, mut get_span: G) -> io::Result<()> where F: FnMut(&mut State, &T) -> io::Result<()>, G: FnMut(&T) -> syntax_pos::Span { self.rbox(0, b)?; let len = elts.len(); let mut i = 0; for elt in elts { self.maybe_print_comment(get_span(elt).hi())?; op(self, elt)?; i += 1; if i < len { self.s.word(",")?; self.maybe_print_trailing_comment(get_span(elt), Some(get_span(&elts[i]).hi()))?; self.space_if_not_bol()?; } } self.end() } pub fn commasep_exprs(&mut self, b: Breaks, exprs: &[hir::Expr]) -> io::Result<()> { self.commasep_cmnt(b, exprs, |s, e| s.print_expr(&e), |e| e.span) } pub fn print_mod(&mut self, _mod: &hir::Mod, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_inner_attributes(attrs)?; for &item_id in &_mod.item_ids { self.ann.nested(self, Nested::Item(item_id))?; } Ok(()) } pub fn print_foreign_mod(&mut self, nmod: &hir::ForeignMod, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_inner_attributes(attrs)?; for item in &nmod.items { self.print_foreign_item(item)?; } Ok(()) } pub fn print_opt_lifetime(&mut self, lifetime: &hir::Lifetime) -> io::Result<()> { if !lifetime.is_elided() { self.print_lifetime(lifetime)?; self.nbsp()?; } Ok(()) } pub fn print_type(&mut self, ty: &hir::Ty) -> io::Result<()> { self.maybe_print_comment(ty.span.lo())?; self.ibox(0)?; match ty.node { hir::TySlice(ref ty) => { self.s.word("[")?; self.print_type(&ty)?; self.s.word("]")?; } hir::TyPtr(ref mt) => { self.s.word("*")?; match mt.mutbl { hir::MutMutable => self.word_nbsp("mut")?, hir::MutImmutable => self.word_nbsp("const")?, } self.print_type(&mt.ty)?; } hir::TyRptr(ref lifetime, ref mt) => { self.s.word("&")?; self.print_opt_lifetime(lifetime)?; self.print_mt(mt)?; } hir::TyNever => { self.s.word("!")?; }, hir::TyTup(ref elts) => { self.popen()?; self.commasep(Inconsistent, &elts[..], |s, ty| s.print_type(&ty))?; if elts.len() == 1 { self.s.word(",")?; } self.pclose()?; } hir::TyBareFn(ref f) => { let generics = hir::Generics { lifetimes: f.lifetimes.clone(), ty_params: hir::HirVec::new(), where_clause: hir::WhereClause { id: ast::DUMMY_NODE_ID, predicates: hir::HirVec::new(), }, span: syntax_pos::DUMMY_SP, }; self.print_ty_fn(f.abi, f.unsafety, &f.decl, None, &generics, &f.arg_names[..])?; } hir::TyPath(ref qpath) => { self.print_qpath(qpath, false)? } hir::TyTraitObject(ref bounds, ref lifetime) => { let mut first = true; for bound in bounds { self.nbsp()?; if first { first = false; } else { self.word_space("+")?; } self.print_poly_trait_ref(bound)?; } if !lifetime.is_elided() { self.word_space("+")?; self.print_lifetime(lifetime)?; } } hir::TyImplTraitExistential(ref existty, ref _lifetimes) => { self.print_bounds("impl", &existty.bounds[..])?; } hir::TyImplTraitUniversal(_, ref bounds) => { self.print_bounds("impl", &bounds[..])?; } hir::TyArray(ref ty, v) => { self.s.word("[")?; self.print_type(&ty)?; self.s.word("; ")?; self.ann.nested(self, Nested::Body(v))?; self.s.word("]")?; } hir::TyTypeof(e) => { self.s.word("typeof(")?; self.ann.nested(self, Nested::Body(e))?; self.s.word(")")?; } hir::TyInfer => { self.s.word("_")?; } hir::TyErr => { self.s.word("?")?; } } self.end() } pub fn print_foreign_item(&mut self, item: &hir::ForeignItem) -> io::Result<()> { self.hardbreak_if_not_bol()?; self.maybe_print_comment(item.span.lo())?; self.print_outer_attributes(&item.attrs)?; match item.node { hir::ForeignItemFn(ref decl, ref arg_names, ref generics) => { self.head("")?; self.print_fn(decl, hir::Unsafety::Normal, hir::Constness::NotConst, Abi::Rust, Some(item.name), generics, &item.vis, arg_names, None)?; self.end()?; // end head-ibox self.s.word(";")?; self.end() // end the outer fn box } hir::ForeignItemStatic(ref t, m) => { self.head(&visibility_qualified(&item.vis, "static"))?; if m { self.word_space("mut")?; } self.print_name(item.name)?; self.word_space(":")?; self.print_type(&t)?; self.s.word(";")?; self.end()?; // end the head-ibox self.end() // end the outer cbox } hir::ForeignItemType => { self.head(&visibility_qualified(&item.vis, "type"))?; self.print_name(item.name)?; self.s.word(";")?; self.end()?; // end the head-ibox self.end() // end the outer cbox } } } fn print_associated_const(&mut self, name: ast::Name, ty: &hir::Ty, default: Option<hir::BodyId>, vis: &hir::Visibility) -> io::Result<()> { self.s.word(&visibility_qualified(vis, ""))?; self.word_space("const")?; self.print_name(name)?; self.word_space(":")?; self.print_type(ty)?; if let Some(expr) = default { self.s.space()?; self.word_space("=")?; self.ann.nested(self, Nested::Body(expr))?; } self.s.word(";") } fn print_associated_type(&mut self, name: ast::Name, bounds: Option<&hir::TyParamBounds>, ty: Option<&hir::Ty>) -> io::Result<()> { self.word_space("type")?; self.print_name(name)?; if let Some(bounds) = bounds { self.print_bounds(":", bounds)?; } if let Some(ty) = ty { self.s.space()?; self.word_space("=")?; self.print_type(ty)?; } self.s.word(";") } /// Pretty-print an item pub fn print_item(&mut self, item: &hir::Item) -> io::Result<()> { self.hardbreak_if_not_bol()?; self.maybe_print_comment(item.span.lo())?; self.print_outer_attributes(&item.attrs)?; self.ann.pre(self, NodeItem(item))?; match item.node { hir::ItemExternCrate(ref optional_path) => { self.head(&visibility_qualified(&item.vis, "extern crate"))?; if let Some(p) = *optional_path { let val = p.as_str(); if val.contains("-") { self.print_string(&val, ast::StrStyle::Cooked)?; } else { self.print_name(p)?; } self.s.space()?; self.s.word("as")?; self.s.space()?; } self.print_name(item.name)?; self.s.word(";")?; self.end()?; // end inner head-block self.end()?; // end outer head-block } hir::ItemUse(ref path, kind) => { self.head(&visibility_qualified(&item.vis, "use"))?; self.print_path(path, false)?; match kind { hir::UseKind::Single => { if path.segments.last().unwrap().name != item.name { self.s.space()?; self.word_space("as")?; self.print_name(item.name)?; } self.s.word(";")?; } hir::UseKind::Glob => self.s.word("::*;")?, hir::UseKind::ListStem => self.s.word("::{};")? } self.end()?; // end inner head-block self.end()?; // end outer head-block } hir::ItemStatic(ref ty, m, expr) => { self.head(&visibility_qualified(&item.vis, "static"))?; if m == hir::MutMutable { self.word_space("mut")?; } self.print_name(item.name)?; self.word_space(":")?; self.print_type(&ty)?; self.s.space()?; self.end()?; // end the head-ibox self.word_space("=")?; self.ann.nested(self, Nested::Body(expr))?; self.s.word(";")?; self.end()?; // end the outer cbox } hir::ItemConst(ref ty, expr) => { self.head(&visibility_qualified(&item.vis, "const"))?; self.print_name(item.name)?; self.word_space(":")?; self.print_type(&ty)?; self.s.space()?; self.end()?; // end the head-ibox self.word_space("=")?; self.ann.nested(self, Nested::Body(expr))?; self.s.word(";")?; self.end()?; // end the outer cbox } hir::ItemFn(ref decl, unsafety, constness, abi, ref typarams, body) => { self.head("")?; self.print_fn(decl, unsafety, constness, abi, Some(item.name), typarams, &item.vis, &[], Some(body))?; self.s.word(" ")?; self.end()?; // need to close a box self.end()?; // need to close a box self.ann.nested(self, Nested::Body(body))?; } hir::ItemMod(ref _mod) => { self.head(&visibility_qualified(&item.vis, "mod"))?; self.print_name(item.name)?; self.nbsp()?; self.bopen()?; self.print_mod(_mod, &item.attrs)?; self.bclose(item.span)?; } hir::ItemForeignMod(ref nmod) => { self.head("extern")?; self.word_nbsp(&nmod.abi.to_string())?; self.bopen()?; self.print_foreign_mod(nmod, &item.attrs)?; self.bclose(item.span)?; } hir::ItemGlobalAsm(ref ga) => { self.head(&visibility_qualified(&item.vis, "global asm"))?; self.s.word(&ga.asm.as_str())?; self.end()? } hir::ItemTy(ref ty, ref params) => { self.ibox(indent_unit)?; self.ibox(0)?; self.word_nbsp(&visibility_qualified(&item.vis, "type"))?; self.print_name(item.name)?; self.print_generics(params)?; self.end()?; // end the inner ibox self.print_where_clause(&params.where_clause)?; self.s.space()?; self.word_space("=")?; self.print_type(&ty)?; self.s.word(";")?; self.end()?; // end the outer ibox } hir::ItemEnum(ref enum_definition, ref params) => { self.print_enum_def(enum_definition, params, item.name, item.span, &item.vis)?; } hir::ItemStruct(ref struct_def, ref generics) => { self.head(&visibility_qualified(&item.vis, "struct"))?; self.print_struct(struct_def, generics, item.name, item.span, true)?; } hir::ItemUnion(ref struct_def, ref generics) => { self.head(&visibility_qualified(&item.vis, "union"))?; self.print_struct(struct_def, generics, item.name, item.span, true)?; } hir::ItemAutoImpl(unsafety, ref trait_ref) => { self.head("")?; self.print_visibility(&item.vis)?; self.print_unsafety(unsafety)?; self.word_nbsp("impl")?; self.print_trait_ref(trait_ref)?; self.s.space()?; self.word_space("for")?; self.word_space("..")?; self.bopen()?; self.bclose(item.span)?; } hir::ItemImpl(unsafety, polarity, defaultness, ref generics, ref opt_trait, ref ty, ref impl_items) => { self.head("")?; self.print_visibility(&item.vis)?; self.print_defaultness(defaultness)?; self.print_unsafety(unsafety)?; self.word_nbsp("impl")?; if generics.is_parameterized() { self.print_generics(generics)?; self.s.space()?; } match polarity { hir::ImplPolarity::Negative => { self.s.word("!")?; } _ => {} } match opt_trait { &Some(ref t) => { self.print_trait_ref(t)?; self.s.space()?; self.word_space("for")?; } &None => {} } self.print_type(&ty)?; self.print_where_clause(&generics.where_clause)?; self.s.space()?; self.bopen()?; self.print_inner_attributes(&item.attrs)?; for impl_item in impl_items { self.ann.nested(self, Nested::ImplItem(impl_item.id))?; } self.bclose(item.span)?; } hir::ItemTrait(is_auto, unsafety, ref generics, ref bounds, ref trait_items) => { self.head("")?; self.print_visibility(&item.vis)?; self.print_is_auto(is_auto)?; self.print_unsafety(unsafety)?; self.word_nbsp("trait")?; self.print_name(item.name)?; self.print_generics(generics)?; let mut real_bounds = Vec::with_capacity(bounds.len()); for b in bounds.iter() { if let TraitTyParamBound(ref ptr, hir::TraitBoundModifier::Maybe) = *b { self.s.space()?; self.word_space("for ?")?; self.print_trait_ref(&ptr.trait_ref)?; } else { real_bounds.push(b.clone()); } } self.print_bounds(":", &real_bounds[..])?; self.print_where_clause(&generics.where_clause)?; self.s.word(" ")?; self.bopen()?; for trait_item in trait_items { self.ann.nested(self, Nested::TraitItem(trait_item.id))?; } self.bclose(item.span)?; } } self.ann.post(self, NodeItem(item)) } pub fn print_trait_ref(&mut self, t: &hir::TraitRef) -> io::Result<()> { self.print_path(&t.path, false) } fn print_formal_lifetime_list(&mut self, lifetimes: &[hir::LifetimeDef]) -> io::Result<()> { if !lifetimes.is_empty() { self.s.word("for<")?; let mut comma = false; for lifetime_def in lifetimes { if comma { self.word_space(",")? } self.print_lifetime_def(lifetime_def)?; comma = true; } self.s.word(">")?; } Ok(()) } fn print_poly_trait_ref(&mut self, t: &hir::PolyTraitRef) -> io::Result<()> { self.print_formal_lifetime_list(&t.bound_lifetimes)?; self.print_trait_ref(&t.trait_ref) } pub fn print_enum_def(&mut self, enum_definition: &hir::EnumDef, generics: &hir::Generics, name: ast::Name, span: syntax_pos::Span, visibility: &hir::Visibility) -> io::Result<()> { self.head(&visibility_qualified(visibility, "enum"))?; self.print_name(name)?; self.print_generics(generics)?; self.print_where_clause(&generics.where_clause)?; self.s.space()?; self.print_variants(&enum_definition.variants, span) } pub fn print_variants(&mut self, variants: &[hir::Variant], span: syntax_pos::Span) -> io::Result<()> { self.bopen()?; for v in variants { self.space_if_not_bol()?; self.maybe_print_comment(v.span.lo())?; self.print_outer_attributes(&v.node.attrs)?; self.ibox(indent_unit)?; self.print_variant(v)?; self.s.word(",")?; self.end()?; self.maybe_print_trailing_comment(v.span, None)?; } self.bclose(span) } pub fn print_visibility(&mut self, vis: &hir::Visibility) -> io::Result<()> { match *vis { hir::Public => self.word_nbsp("pub"), hir::Visibility::Crate => self.word_nbsp("pub(crate)"), hir::Visibility::Restricted { ref path, .. } => { self.s.word("pub(")?; self.print_path(path, false)?; self.word_nbsp(")") } hir::Inherited => Ok(()), } } pub fn print_defaultness(&mut self, defaultness: hir::Defaultness) -> io::Result<()> { match defaultness { hir::Defaultness::Default { .. } => self.word_nbsp("default")?, hir::Defaultness::Final => (), } Ok(()) } pub fn print_struct(&mut self, struct_def: &hir::VariantData, generics: &hir::Generics, name: ast::Name, span: syntax_pos::Span, print_finalizer: bool) -> io::Result<()> { self.print_name(name)?; self.print_generics(generics)?; if !struct_def.is_struct() { if struct_def.is_tuple() { self.popen()?; self.commasep(Inconsistent, struct_def.fields(), |s, field| { s.maybe_print_comment(field.span.lo())?; s.print_outer_attributes(&field.attrs)?; s.print_visibility(&field.vis)?; s.print_type(&field.ty) })?; self.pclose()?; } self.print_where_clause(&generics.where_clause)?; if print_finalizer { self.s.word(";")?; } self.end()?; self.end() // close the outer-box } else { self.print_where_clause(&generics.where_clause)?; self.nbsp()?; self.bopen()?; self.hardbreak_if_not_bol()?; for field in struct_def.fields() { self.hardbreak_if_not_bol()?; self.maybe_print_comment(field.span.lo())?; self.print_outer_attributes(&field.attrs)?; self.print_visibility(&field.vis)?; self.print_name(field.name)?; self.word_nbsp(":")?; self.print_type(&field.ty)?; self.s.word(",")?; } self.bclose(span) } } pub fn print_variant(&mut self, v: &hir::Variant) -> io::Result<()> { self.head("")?; let generics = hir::Generics::empty(); self.print_struct(&v.node.data, &generics, v.node.name, v.span, false)?; if let Some(d) = v.node.disr_expr { self.s.space()?; self.word_space("=")?; self.ann.nested(self, Nested::Body(d))?; } Ok(()) } pub fn print_method_sig(&mut self, name: ast::Name, m: &hir::MethodSig, generics: &hir::Generics, vis: &hir::Visibility, arg_names: &[Spanned<ast::Name>], body_id: Option<hir::BodyId>) -> io::Result<()> { self.print_fn(&m.decl, m.unsafety, m.constness, m.abi, Some(name), generics, vis, arg_names, body_id) } pub fn print_trait_item(&mut self, ti: &hir::TraitItem) -> io::Result<()> { self.ann.pre(self, NodeSubItem(ti.id))?; self.hardbreak_if_not_bol()?; self.maybe_print_comment(ti.span.lo())?; self.print_outer_attributes(&ti.attrs)?; match ti.node { hir::TraitItemKind::Const(ref ty, default) => { self.print_associated_const(ti.name, &ty, default, &hir::Inherited)?; } hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Required(ref arg_names)) => { self.print_method_sig(ti.name, sig, &ti.generics, &hir::Inherited, arg_names, None)?; self.s.word(";")?; } hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Provided(body)) => { self.head("")?; self.print_method_sig(ti.name, sig, &ti.generics, &hir::Inherited, &[], Some(body))?; self.nbsp()?; self.end()?; // need to close a box self.end()?; // need to close a box self.ann.nested(self, Nested::Body(body))?; } hir::TraitItemKind::Type(ref bounds, ref default) => { self.print_associated_type(ti.name, Some(bounds), default.as_ref().map(|ty| &**ty))?; } } self.ann.post(self, NodeSubItem(ti.id)) } pub fn print_impl_item(&mut self, ii: &hir::ImplItem) -> io::Result<()> { self.ann.pre(self, NodeSubItem(ii.id))?; self.hardbreak_if_not_bol()?; self.maybe_print_comment(ii.span.lo())?; self.print_outer_attributes(&ii.attrs)?; self.print_defaultness(ii.defaultness)?; match ii.node { hir::ImplItemKind::Const(ref ty, expr) => { self.print_associated_const(ii.name, &ty, Some(expr), &ii.vis)?; } hir::ImplItemKind::Method(ref sig, body) => { self.head("")?; self.print_method_sig(ii.name, sig, &ii.generics, &ii.vis, &[], Some(body))?; self.nbsp()?; self.end()?; // need to close a box self.end()?; // need to close a box self.ann.nested(self, Nested::Body(body))?; } hir::ImplItemKind::Type(ref ty) => { self.print_associated_type(ii.name, None, Some(ty))?; } } self.ann.post(self, NodeSubItem(ii.id)) } pub fn print_stmt(&mut self, st: &hir::Stmt) -> io::Result<()> { self.maybe_print_comment(st.span.lo())?; match st.node { hir::StmtDecl(ref decl, _) => { self.print_decl(&decl)?; } hir::StmtExpr(ref expr, _) => { self.space_if_not_bol()?; self.print_expr(&expr)?; } hir::StmtSemi(ref expr, _) => { self.space_if_not_bol()?; self.print_expr(&expr)?; self.s.word(";")?; } } if stmt_ends_with_semi(&st.node) { self.s.word(";")?; } self.maybe_print_trailing_comment(st.span, None) } pub fn print_block(&mut self, blk: &hir::Block) -> io::Result<()> { self.print_block_with_attrs(blk, &[]) } pub fn print_block_unclosed(&mut self, blk: &hir::Block) -> io::Result<()> { self.print_block_unclosed_indent(blk, indent_unit) } pub fn print_block_unclosed_indent(&mut self, blk: &hir::Block, indented: usize) -> io::Result<()> { self.print_block_maybe_unclosed(blk, indented, &[], false) } pub fn print_block_with_attrs(&mut self, blk: &hir::Block, attrs: &[ast::Attribute]) -> io::Result<()> { self.print_block_maybe_unclosed(blk, indent_unit, attrs, true) } pub fn print_block_maybe_unclosed(&mut self, blk: &hir::Block, indented: usize, attrs: &[ast::Attribute], close_box: bool) -> io::Result<()> { match blk.rules { hir::UnsafeBlock(..) => self.word_space("unsafe")?, hir::PushUnsafeBlock(..) => self.word_space("push_unsafe")?, hir::PopUnsafeBlock(..) => self.word_space("pop_unsafe")?, hir::DefaultBlock => (), } self.maybe_print_comment(blk.span.lo())?; self.ann.pre(self, NodeBlock(blk))?; self.bopen()?; self.print_inner_attributes(attrs)?; for st in &blk.stmts { self.print_stmt(st)?; } match blk.expr { Some(ref expr) => { self.space_if_not_bol()?; self.print_expr(&expr)?; self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi()))?; } _ => (), } self.bclose_maybe_open(blk.span, indented, close_box)?; self.ann.post(self, NodeBlock(blk)) } fn print_else(&mut self, els: Option<&hir::Expr>) -> io::Result<()> { match els { Some(_else) => { match _else.node { // "another else-if" hir::ExprIf(ref i, ref then, ref e) => { self.cbox(indent_unit - 1)?; self.ibox(0)?; self.s.word(" else if ")?; self.print_expr_as_cond(&i)?; self.s.space()?; self.print_expr(&then)?; self.print_else(e.as_ref().map(|e| &**e)) } // "final else" hir::ExprBlock(ref b) => { self.cbox(indent_unit - 1)?; self.ibox(0)?; self.s.word(" else ")?; self.print_block(&b) } // BLEAH, constraints would be great here _ => { panic!("print_if saw if with weird alternative"); } } } _ => Ok(()), } } pub fn print_if(&mut self, test: &hir::Expr, blk: &hir::Expr, elseopt: Option<&hir::Expr>) -> io::Result<()> { self.head("if")?; self.print_expr_as_cond(test)?; self.s.space()?; self.print_expr(blk)?; self.print_else(elseopt) } pub fn print_if_let(&mut self, pat: &hir::Pat, expr: &hir::Expr, blk: &hir::Block, elseopt: Option<&hir::Expr>) -> io::Result<()> { self.head("if let")?; self.print_pat(pat)?; self.s.space()?; self.word_space("=")?; self.print_expr_as_cond(expr)?; self.s.space()?; self.print_block(blk)?; self.print_else(elseopt) } fn print_call_post(&mut self, args: &[hir::Expr]) -> io::Result<()> { self.popen()?; self.commasep_exprs(Inconsistent, args)?; self.pclose() } pub fn print_expr_maybe_paren(&mut self, expr: &hir::Expr, prec: i8) -> io::Result<()> { let needs_par = expr_precedence(expr) < prec; if needs_par { self.popen()?; } self.print_expr(expr)?; if needs_par { self.pclose()?; } Ok(()) } /// Print an expr using syntax that's acceptable in a condition position, such as the `cond` in /// `if cond { ... }`. pub fn print_expr_as_cond(&mut self, expr: &hir::Expr) -> io::Result<()> { let needs_par = match expr.node { // These cases need parens due to the parse error observed in #26461: `if return {}` // parses as the erroneous construct `if (return {})`, not `if (return) {}`. hir::ExprClosure(..) | hir::ExprRet(..) | hir::ExprBreak(..) => true, _ => contains_exterior_struct_lit(expr), }; if needs_par { self.popen()?; } self.print_expr(expr)?; if needs_par { self.pclose()?; } Ok(()) } fn print_expr_vec(&mut self, exprs: &[hir::Expr]) -> io::Result<()> { self.ibox(indent_unit)?; self.s.word("[")?; self.commasep_exprs(Inconsistent, exprs)?; self.s.word("]")?; self.end() } fn print_expr_repeat(&mut self, element: &hir::Expr, count: hir::BodyId) -> io::Result<()> { self.ibox(indent_unit)?; self.s.word("[")?; self.print_expr(element)?; self.word_space(";")?; self.ann.nested(self, Nested::Body(count))?; self.s.word("]")?; self.end() } fn print_expr_struct(&mut self, qpath: &hir::QPath, fields: &[hir::Field], wth: &Option<P<hir::Expr>>) -> io::Result<()> { self.print_qpath(qpath, true)?; self.s.word("{")?; self.commasep_cmnt(Consistent, &fields[..], |s, field| { s.ibox(indent_unit)?; if !field.is_shorthand { s.print_name(field.name.node)?; s.word_space(":")?; } s.print_expr(&field.expr)?; s.end() }, |f| f.span)?; match *wth { Some(ref expr) => { self.ibox(indent_unit)?; if !fields.is_empty() { self.s.word(",")?; self.s.space()?; } self.s.word("..")?; self.print_expr(&expr)?; self.end()?; } _ => if !fields.is_empty() { self.s.word(",")? }, } self.s.word("}")?; Ok(()) } fn print_expr_tup(&mut self, exprs: &[hir::Expr]) -> io::Result<()> { self.popen()?; self.commasep_exprs(Inconsistent, exprs)?; if exprs.len() == 1 { self.s.word(",")?; } self.pclose() } fn print_expr_call(&mut self, func: &hir::Expr, args: &[hir::Expr]) -> io::Result<()> { let prec = match func.node { hir::ExprField(..) | hir::ExprTupField(..) => parser::PREC_FORCE_PAREN, _ => parser::PREC_POSTFIX, }; self.print_expr_maybe_paren(func, prec)?; self.print_call_post(args) } fn print_expr_method_call(&mut self, segment: &hir::PathSegment, args: &[hir::Expr]) -> io::Result<()> { let base_args = &args[1..]; self.print_expr_maybe_paren(&args[0], parser::PREC_POSTFIX)?; self.s.word(".")?; self.print_name(segment.name)?; segment.with_parameters(|parameters| { if !parameters.lifetimes.is_empty() || !parameters.types.is_empty() || !parameters.bindings.is_empty() { self.print_path_parameters(&parameters, segment.infer_types, true) } else { Ok(()) } })?; self.print_call_post(base_args) } fn print_expr_binary(&mut self, op: hir::BinOp, lhs: &hir::Expr, rhs: &hir::Expr) -> io::Result<()> { let assoc_op = bin_op_to_assoc_op(op.node); let prec = assoc_op.precedence() as i8; let fixity = assoc_op.fixity(); let (left_prec, right_prec) = match fixity { Fixity::Left => (prec, prec + 1), Fixity::Right => (prec + 1, prec), Fixity::None => (prec + 1, prec + 1), }; let left_prec = match (&lhs.node, op.node) { // These cases need parens: `x as i32 < y` has the parser thinking that `i32 < y` is // the beginning of a path type. It starts trying to parse `x as (i32 < y ...` instead // of `(x as i32) < ...`. We need to convince it _not_ to do that. (&hir::ExprCast { .. }, hir::BinOp_::BiLt) | (&hir::ExprCast { .. }, hir::BinOp_::BiShl) => parser::PREC_FORCE_PAREN, _ => left_prec, }; self.print_expr_maybe_paren(lhs, left_prec)?; self.s.space()?; self.word_space(op.node.as_str())?; self.print_expr_maybe_paren(rhs, right_prec) } fn print_expr_unary(&mut self, op: hir::UnOp, expr: &hir::Expr) -> io::Result<()> { self.s.word(op.as_str())?; self.print_expr_maybe_paren(expr, parser::PREC_PREFIX) } fn print_expr_addr_of(&mut self, mutability: hir::Mutability, expr: &hir::Expr) -> io::Result<()> { self.s.word("&")?; self.print_mutability(mutability)?; self.print_expr_maybe_paren(expr, parser::PREC_PREFIX) } pub fn print_expr(&mut self, expr: &hir::Expr) -> io::Result<()> { self.maybe_print_comment(expr.span.lo())?; self.print_outer_attributes(&expr.attrs)?; self.ibox(indent_unit)?; self.ann.pre(self, NodeExpr(expr))?; match expr.node { hir::ExprBox(ref expr) => { self.word_space("box")?; self.print_expr_maybe_paren(expr, parser::PREC_PREFIX)?; } hir::ExprArray(ref exprs) => { self.print_expr_vec(exprs)?; } hir::ExprRepeat(ref element, count) => { self.print_expr_repeat(&element, count)?; } hir::ExprStruct(ref qpath, ref fields, ref wth) => { self.print_expr_struct(qpath, &fields[..], wth)?; } hir::ExprTup(ref exprs) => { self.print_expr_tup(exprs)?; } hir::ExprCall(ref func, ref args) => { self.print_expr_call(&func, args)?; } hir::ExprMethodCall(ref segment, _, ref args) => { self.print_expr_method_call(segment, args)?; } hir::ExprBinary(op, ref lhs, ref rhs) => { self.print_expr_binary(op, &lhs, &rhs)?; } hir::ExprUnary(op, ref expr) => { self.print_expr_unary(op, &expr)?; } hir::ExprAddrOf(m, ref expr) => { self.print_expr_addr_of(m, &expr)?; } hir::ExprLit(ref lit) => { self.print_literal(&lit)?; } hir::ExprCast(ref expr, ref ty) => { let prec = AssocOp::As.precedence() as i8; self.print_expr_maybe_paren(&expr, prec)?; self.s.space()?; self.word_space("as")?; self.print_type(&ty)?; } hir::ExprType(ref expr, ref ty) => { let prec = AssocOp::Colon.precedence() as i8; self.print_expr_maybe_paren(&expr, prec)?; self.word_space(":")?; self.print_type(&ty)?; } hir::ExprIf(ref test, ref blk, ref elseopt) => { self.print_if(&test, &blk, elseopt.as_ref().map(|e| &**e))?; } hir::ExprWhile(ref test, ref blk, opt_sp_name) => { if let Some(sp_name) = opt_sp_name { self.print_name(sp_name.node)?; self.word_space(":")?; } self.head("while")?; self.print_expr_as_cond(&test)?; self.s.space()?; self.print_block(&blk)?; } hir::ExprLoop(ref blk, opt_sp_name, _) => { if let Some(sp_name) = opt_sp_name { self.print_name(sp_name.node)?; self.word_space(":")?; } self.head("loop")?; self.s.space()?; self.print_block(&blk)?; } hir::ExprMatch(ref expr, ref arms, _) => { self.cbox(indent_unit)?; self.ibox(4)?; self.word_nbsp("match")?; self.print_expr_as_cond(&expr)?; self.s.space()?; self.bopen()?; for arm in arms { self.print_arm(arm)?; } self.bclose_(expr.span, indent_unit)?; } hir::ExprClosure(capture_clause, ref decl, body, _fn_decl_span, _gen) => { self.print_capture_clause(capture_clause)?; self.print_closure_args(&decl, body)?; self.s.space()?; // this is a bare expression self.ann.nested(self, Nested::Body(body))?; self.end()?; // need to close a box // a box will be closed by print_expr, but we didn't want an overall // wrapper so we closed the corresponding opening. so create an // empty box to satisfy the close. self.ibox(0)?; } hir::ExprBlock(ref blk) => { // containing cbox, will be closed by print-block at } self.cbox(indent_unit)?; // head-box, will be closed by print-block after { self.ibox(0)?; self.print_block(&blk)?; } hir::ExprAssign(ref lhs, ref rhs) => { let prec = AssocOp::Assign.precedence() as i8; self.print_expr_maybe_paren(&lhs, prec + 1)?; self.s.space()?; self.word_space("=")?; self.print_expr_maybe_paren(&rhs, prec)?; } hir::ExprAssignOp(op, ref lhs, ref rhs) => { let prec = AssocOp::Assign.precedence() as i8; self.print_expr_maybe_paren(&lhs, prec + 1)?; self.s.space()?; self.s.word(op.node.as_str())?; self.word_space("=")?; self.print_expr_maybe_paren(&rhs, prec)?; } hir::ExprField(ref expr, name) => { self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX)?; self.s.word(".")?; self.print_name(name.node)?; } hir::ExprTupField(ref expr, id) => { self.print_expr_maybe_paren(&expr, parser::PREC_POSTFIX)?; self.s.word(".")?; self.print_usize(id.node)?; } hir::ExprIndex(ref expr, ref index) => { self.print_expr_maybe_paren(&expr, parser::PREC_POSTFIX)?; self.s.word("[")?; self.print_expr(&index)?; self.s.word("]")?; } hir::ExprPath(ref qpath) => { self.print_qpath(qpath, true)? } hir::ExprBreak(label, ref opt_expr) => { self.s.word("break")?; self.s.space()?; if let Some(label_ident) = label.ident { self.print_name(label_ident.node.name)?; self.s.space()?; } if let Some(ref expr) = *opt_expr { self.print_expr_maybe_paren(expr, parser::PREC_JUMP)?; self.s.space()?; } } hir::ExprAgain(label) => { self.s.word("continue")?; self.s.space()?; if let Some(label_ident) = label.ident { self.print_name(label_ident.node.name)?; self.s.space()? } } hir::ExprRet(ref result) => { self.s.word("return")?; match *result { Some(ref expr) => { self.s.word(" ")?; self.print_expr_maybe_paren(&expr, parser::PREC_JUMP)?; } _ => (), } } hir::ExprInlineAsm(ref a, ref outputs, ref inputs) => { self.s.word("asm!")?; self.popen()?; self.print_string(&a.asm.as_str(), a.asm_str_style)?; self.word_space(":")?; let mut out_idx = 0; self.commasep(Inconsistent, &a.outputs, |s, out| { let constraint = out.constraint.as_str(); let mut ch = constraint.chars(); match ch.next() { Some('=') if out.is_rw => { s.print_string(&format!("+{}", ch.as_str()), ast::StrStyle::Cooked)? } _ => s.print_string(&constraint, ast::StrStyle::Cooked)?, } s.popen()?; s.print_expr(&outputs[out_idx])?; s.pclose()?; out_idx += 1; Ok(()) })?; self.s.space()?; self.word_space(":")?; let mut in_idx = 0; self.commasep(Inconsistent, &a.inputs, |s, co| { s.print_string(&co.as_str(), ast::StrStyle::Cooked)?; s.popen()?; s.print_expr(&inputs[in_idx])?; s.pclose()?; in_idx += 1; Ok(()) })?; self.s.space()?; self.word_space(":")?; self.commasep(Inconsistent, &a.clobbers, |s, co| { s.print_string(&co.as_str(), ast::StrStyle::Cooked)?; Ok(()) })?; let mut options = vec![]; if a.volatile { options.push("volatile"); } if a.alignstack { options.push("alignstack"); } if a.dialect == ast::AsmDialect::Intel { options.push("intel"); } if !options.is_empty() { self.s.space()?; self.word_space(":")?; self.commasep(Inconsistent, &options, |s, &co| { s.print_string(co, ast::StrStyle::Cooked)?; Ok(()) })?; } self.pclose()?; } hir::ExprYield(ref expr) => { self.word_space("yield")?; self.print_expr_maybe_paren(&expr, parser::PREC_JUMP)?; } } self.ann.post(self, NodeExpr(expr))?; self.end() } pub fn print_local_decl(&mut self, loc: &hir::Local) -> io::Result<()> { self.print_pat(&loc.pat)?; if let Some(ref ty) = loc.ty { self.word_space(":")?; self.print_type(&ty)?; } Ok(()) } pub fn print_decl(&mut self, decl: &hir::Decl) -> io::Result<()> { self.maybe_print_comment(decl.span.lo())?; match decl.node { hir::DeclLocal(ref loc) => { self.space_if_not_bol()?; self.ibox(indent_unit)?; self.word_nbsp("let")?; self.ibox(indent_unit)?; self.print_local_decl(&loc)?; self.end()?; if let Some(ref init) = loc.init { self.nbsp()?; self.word_space("=")?; self.print_expr(&init)?; } self.end() } hir::DeclItem(item) => { self.ann.nested(self, Nested::Item(item)) } } } pub fn print_usize(&mut self, i: usize) -> io::Result<()> { self.s.word(&i.to_string()) } pub fn print_name(&mut self, name: ast::Name) -> io::Result<()> { self.s.word(&name.as_str())?; self.ann.post(self, NodeName(&name)) } pub fn print_for_decl(&mut self, loc: &hir::Local, coll: &hir::Expr) -> io::Result<()> { self.print_local_decl(loc)?; self.s.space()?; self.word_space("in")?; self.print_expr(coll) } pub fn print_path(&mut self, path: &hir::Path, colons_before_params: bool) -> io::Result<()> { self.maybe_print_comment(path.span.lo())?; for (i, segment) in path.segments.iter().enumerate() { if i > 0 { self.s.word("::")? } if segment.name != keywords::CrateRoot.name() && segment.name != keywords::DollarCrate.name() { self.print_name(segment.name)?; segment.with_parameters(|parameters| { self.print_path_parameters(parameters, segment.infer_types, colons_before_params) })?; } } Ok(()) } pub fn print_qpath(&mut self, qpath: &hir::QPath, colons_before_params: bool) -> io::Result<()> { match *qpath { hir::QPath::Resolved(None, ref path) => { self.print_path(path, colons_before_params) } hir::QPath::Resolved(Some(ref qself), ref path) => { self.s.word("<")?; self.print_type(qself)?; self.s.space()?; self.word_space("as")?; for (i, segment) in path.segments[..path.segments.len() - 1].iter().enumerate() { if i > 0 { self.s.word("::")? } if segment.name != keywords::CrateRoot.name() && segment.name != keywords::DollarCrate.name() { self.print_name(segment.name)?; segment.with_parameters(|parameters| { self.print_path_parameters(parameters, segment.infer_types, colons_before_params) })?; } } self.s.word(">")?; self.s.word("::")?; let item_segment = path.segments.last().unwrap(); self.print_name(item_segment.name)?; item_segment.with_parameters(|parameters| { self.print_path_parameters(parameters, item_segment.infer_types, colons_before_params) }) } hir::QPath::TypeRelative(ref qself, ref item_segment) => { self.s.word("<")?; self.print_type(qself)?; self.s.word(">")?; self.s.word("::")?; self.print_name(item_segment.name)?; item_segment.with_parameters(|parameters| { self.print_path_parameters(parameters, item_segment.infer_types, colons_before_params) }) } } } fn print_path_parameters(&mut self, parameters: &hir::PathParameters, infer_types: bool, colons_before_params: bool) -> io::Result<()> { if parameters.parenthesized { self.s.word("(")?; self.commasep(Inconsistent, parameters.inputs(), |s, ty| s.print_type(&ty))?; self.s.word(")")?; self.space_if_not_bol()?; self.word_space("->")?; self.print_type(&parameters.bindings[0].ty)?; } else { let start = if colons_before_params { "::<" } else { "<" }; let empty = Cell::new(true); let start_or_comma = |this: &mut Self| { if empty.get() { empty.set(false); this.s.word(start) } else { this.word_space(",") } }; if !parameters.lifetimes.iter().all(|lt| lt.is_elided()) { for lifetime in &parameters.lifetimes { start_or_comma(self)?; self.print_lifetime(lifetime)?; } } if !parameters.types.is_empty() { start_or_comma(self)?; self.commasep(Inconsistent, &parameters.types, |s, ty| s.print_type(&ty))?; } // FIXME(eddyb) This would leak into error messages, e.g.: // "non-exhaustive patterns: `Some::<..>(_)` not covered". if infer_types && false { start_or_comma(self)?; self.s.word("..")?; } for binding in parameters.bindings.iter() { start_or_comma(self)?; self.print_name(binding.name)?; self.s.space()?; self.word_space("=")?; self.print_type(&binding.ty)?; } if !empty.get() { self.s.word(">")? } } Ok(()) } pub fn print_pat(&mut self, pat: &hir::Pat) -> io::Result<()> { self.maybe_print_comment(pat.span.lo())?; self.ann.pre(self, NodePat(pat))?; // Pat isn't normalized, but the beauty of it // is that it doesn't matter match pat.node { PatKind::Wild => self.s.word("_")?, PatKind::Binding(binding_mode, _, ref path1, ref sub) => { match binding_mode { hir::BindingAnnotation::Ref => { self.word_nbsp("ref")?; self.print_mutability(hir::MutImmutable)?; } hir::BindingAnnotation::RefMut => { self.word_nbsp("ref")?; self.print_mutability(hir::MutMutable)?; } hir::BindingAnnotation::Unannotated => {} hir::BindingAnnotation::Mutable => { self.word_nbsp("mut")?; } } self.print_name(path1.node)?; if let Some(ref p) = *sub { self.s.word("@")?; self.print_pat(&p)?; } } PatKind::TupleStruct(ref qpath, ref elts, ddpos) => { self.print_qpath(qpath, true)?; self.popen()?; if let Some(ddpos) = ddpos { self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?; if ddpos != 0 { self.word_space(",")?; } self.s.word("..")?; if ddpos != elts.len() { self.s.word(",")?; self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p))?; } } else { self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p))?; } self.pclose()?; } PatKind::Path(ref qpath) => { self.print_qpath(qpath, true)?; } PatKind::Struct(ref qpath, ref fields, etc) => { self.print_qpath(qpath, true)?; self.nbsp()?; self.word_space("{")?; self.commasep_cmnt(Consistent, &fields[..], |s, f| { s.cbox(indent_unit)?; if !f.node.is_shorthand { s.print_name(f.node.name)?; s.word_nbsp(":")?; } s.print_pat(&f.node.pat)?; s.end() }, |f| f.node.pat.span)?; if etc { if !fields.is_empty() { self.word_space(",")?; } self.s.word("..")?; } self.s.space()?; self.s.word("}")?; } PatKind::Tuple(ref elts, ddpos) => { self.popen()?; if let Some(ddpos) = ddpos { self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?; if ddpos != 0 { self.word_space(",")?; } self.s.word("..")?; if ddpos != elts.len() { self.s.word(",")?; self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p))?; } } else { self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p))?; if elts.len() == 1 { self.s.word(",")?; } } self.pclose()?; } PatKind::Box(ref inner) => { self.s.word("box ")?; self.print_pat(&inner)?; } PatKind::Ref(ref inner, mutbl) => { self.s.word("&")?; if mutbl == hir::MutMutable { self.s.word("mut ")?; } self.print_pat(&inner)?; } PatKind::Lit(ref e) => self.print_expr(&e)?, PatKind::Range(ref begin, ref end, ref end_kind) => { self.print_expr(&begin)?; self.s.space()?; match *end_kind { RangeEnd::Included => self.s.word("...")?, RangeEnd::Excluded => self.s.word("..")?, } self.print_expr(&end)?; } PatKind::Slice(ref before, ref slice, ref after) => { self.s.word("[")?; self.commasep(Inconsistent, &before[..], |s, p| s.print_pat(&p))?; if let Some(ref p) = *slice { if !before.is_empty() { self.word_space(",")?; } if p.node != PatKind::Wild { self.print_pat(&p)?; } self.s.word("..")?; if !after.is_empty() { self.word_space(",")?; } } self.commasep(Inconsistent, &after[..], |s, p| s.print_pat(&p))?; self.s.word("]")?; } } self.ann.post(self, NodePat(pat)) } fn print_arm(&mut self, arm: &hir::Arm) -> io::Result<()> { // I have no idea why this check is necessary, but here it // is :( if arm.attrs.is_empty() { self.s.space()?; } self.cbox(indent_unit)?; self.ibox(0)?; self.print_outer_attributes(&arm.attrs)?; let mut first = true; for p in &arm.pats { if first { first = false; } else { self.s.space()?; self.word_space("|")?; } self.print_pat(&p)?; } self.s.space()?; if let Some(ref e) = arm.guard { self.word_space("if")?; self.print_expr(&e)?; self.s.space()?; } self.word_space("=>")?; match arm.body.node { hir::ExprBlock(ref blk) => { // the block will close the pattern's ibox self.print_block_unclosed_indent(&blk, indent_unit)?; // If it is a user-provided unsafe block, print a comma after it if let hir::UnsafeBlock(hir::UserProvided) = blk.rules { self.s.word(",")?; } } _ => { self.end()?; // close the ibox for the pattern self.print_expr(&arm.body)?; self.s.word(",")?; } } self.end() // close enclosing cbox } pub fn print_fn(&mut self, decl: &hir::FnDecl, unsafety: hir::Unsafety, constness: hir::Constness, abi: Abi, name: Option<ast::Name>, generics: &hir::Generics, vis: &hir::Visibility, arg_names: &[Spanned<ast::Name>], body_id: Option<hir::BodyId>) -> io::Result<()> { self.print_fn_header_info(unsafety, constness, abi, vis)?; if let Some(name) = name { self.nbsp()?; self.print_name(name)?; } self.print_generics(generics)?; self.popen()?; let mut i = 0; // Make sure we aren't supplied *both* `arg_names` and `body_id`. assert!(arg_names.is_empty() || body_id.is_none()); self.commasep(Inconsistent, &decl.inputs, |s, ty| { s.ibox(indent_unit)?; if let Some(name) = arg_names.get(i) { s.s.word(&name.node.as_str())?; s.s.word(":")?; s.s.space()?; } else if let Some(body_id) = body_id { s.ann.nested(s, Nested::BodyArgPat(body_id, i))?; s.s.word(":")?; s.s.space()?; } i += 1; s.print_type(ty)?; s.end() })?; if decl.variadic { self.s.word(", ...")?; } self.pclose()?; self.print_fn_output(decl)?; self.print_where_clause(&generics.where_clause) } fn print_closure_args(&mut self, decl: &hir::FnDecl, body_id: hir::BodyId) -> io::Result<()> { self.s.word("|")?; let mut i = 0; self.commasep(Inconsistent, &decl.inputs, |s, ty| { s.ibox(indent_unit)?; s.ann.nested(s, Nested::BodyArgPat(body_id, i))?; i += 1; if ty.node != hir::TyInfer { s.s.word(":")?; s.s.space()?; s.print_type(ty)?; } s.end() })?; self.s.word("|")?; if let hir::DefaultReturn(..) = decl.output { return Ok(()); } self.space_if_not_bol()?; self.word_space("->")?; match decl.output { hir::Return(ref ty) => { self.print_type(&ty)?; self.maybe_print_comment(ty.span.lo()) } hir::DefaultReturn(..) => unreachable!(), } } pub fn print_capture_clause(&mut self, capture_clause: hir::CaptureClause) -> io::Result<()> { match capture_clause { hir::CaptureByValue => self.word_space("move"), hir::CaptureByRef => Ok(()), } } pub fn print_bounds(&mut self, prefix: &str, bounds: &[hir::TyParamBound]) -> io::Result<()> { if !bounds.is_empty() { self.s.word(prefix)?; let mut first = true; for bound in bounds { self.nbsp()?; if first { first = false; } else { self.word_space("+")?; } match *bound { TraitTyParamBound(ref tref, TraitBoundModifier::None) => { self.print_poly_trait_ref(tref) } TraitTyParamBound(ref tref, TraitBoundModifier::Maybe) => { self.s.word("?")?; self.print_poly_trait_ref(tref) } RegionTyParamBound(ref lt) => { self.print_lifetime(lt) } }? } Ok(()) } else { Ok(()) } } pub fn print_lifetime(&mut self, lifetime: &hir::Lifetime) -> io::Result<()> { self.print_name(lifetime.name.name()) } pub fn print_lifetime_def(&mut self, lifetime: &hir::LifetimeDef) -> io::Result<()> { self.print_lifetime(&lifetime.lifetime)?; let mut sep = ":"; for v in &lifetime.bounds { self.s.word(sep)?; self.print_lifetime(v)?; sep = "+"; } Ok(()) } pub fn print_generics(&mut self, generics: &hir::Generics) -> io::Result<()> { let total = generics.lifetimes.len() + generics.ty_params.len(); if total == 0 { return Ok(()); } self.s.word("<")?; let mut ints = Vec::new(); for i in 0..total { ints.push(i); } self.commasep(Inconsistent, &ints[..], |s, &idx| { if idx < generics.lifetimes.len() { let lifetime = &generics.lifetimes[idx]; s.print_lifetime_def(lifetime) } else { let idx = idx - generics.lifetimes.len(); let param = &generics.ty_params[idx]; s.print_ty_param(param) } })?; self.s.word(">")?; Ok(()) } pub fn print_ty_param(&mut self, param: &hir::TyParam) -> io::Result<()> { self.print_name(param.name)?; self.print_bounds(":", &param.bounds)?; match param.default { Some(ref default) => { self.s.space()?; self.word_space("=")?; self.print_type(&default) } _ => Ok(()), } } pub fn print_where_clause(&mut self, where_clause: &hir::WhereClause) -> io::Result<()> { if where_clause.predicates.is_empty() { return Ok(()); } self.s.space()?; self.word_space("where")?; for (i, predicate) in where_clause.predicates.iter().enumerate() { if i != 0 { self.word_space(",")?; } match predicate { &hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate{ref bound_lifetimes, ref bounded_ty, ref bounds, ..}) => { self.print_formal_lifetime_list(bound_lifetimes)?; self.print_type(&bounded_ty)?; self.print_bounds(":", bounds)?; } &hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate{ref lifetime, ref bounds, ..}) => { self.print_lifetime(lifetime)?; self.s.word(":")?; for (i, bound) in bounds.iter().enumerate() { self.print_lifetime(bound)?; if i != 0 { self.s.word(":")?; } } } &hir::WherePredicate::EqPredicate(hir::WhereEqPredicate{ref lhs_ty, ref rhs_ty, ..}) => { self.print_type(lhs_ty)?; self.s.space()?; self.word_space("=")?; self.print_type(rhs_ty)?; } } } Ok(()) } pub fn print_mutability(&mut self, mutbl: hir::Mutability) -> io::Result<()> { match mutbl { hir::MutMutable => self.word_nbsp("mut"), hir::MutImmutable => Ok(()), } } pub fn print_mt(&mut self, mt: &hir::MutTy) -> io::Result<()> { self.print_mutability(mt.mutbl)?; self.print_type(&mt.ty) } pub fn print_fn_output(&mut self, decl: &hir::FnDecl) -> io::Result<()> { if let hir::DefaultReturn(..) = decl.output { return Ok(()); } self.space_if_not_bol()?; self.ibox(indent_unit)?; self.word_space("->")?; match decl.output { hir::DefaultReturn(..) => unreachable!(), hir::Return(ref ty) => self.print_type(&ty)?, } self.end()?; match decl.output { hir::Return(ref output) => self.maybe_print_comment(output.span.lo()), _ => Ok(()), } } pub fn print_ty_fn(&mut self, abi: Abi, unsafety: hir::Unsafety, decl: &hir::FnDecl, name: Option<ast::Name>, generics: &hir::Generics, arg_names: &[Spanned<ast::Name>]) -> io::Result<()> { self.ibox(indent_unit)?; if !generics.lifetimes.is_empty() || !generics.ty_params.is_empty() { self.s.word("for")?; self.print_generics(generics)?; } let generics = hir::Generics { lifetimes: hir::HirVec::new(), ty_params: hir::HirVec::new(), where_clause: hir::WhereClause { id: ast::DUMMY_NODE_ID, predicates: hir::HirVec::new(), }, span: syntax_pos::DUMMY_SP, }; self.print_fn(decl, unsafety, hir::Constness::NotConst, abi, name, &generics, &hir::Inherited, arg_names, None)?; self.end() } pub fn maybe_print_trailing_comment(&mut self, span: syntax_pos::Span, next_pos: Option<BytePos>) -> io::Result<()> { let cm = match self.cm { Some(cm) => cm, _ => return Ok(()), }; if let Some(ref cmnt) = self.next_comment() { if (*cmnt).style != comments::Trailing { return Ok(()); } let span_line = cm.lookup_char_pos(span.hi()); let comment_line = cm.lookup_char_pos((*cmnt).pos); let mut next = (*cmnt).pos + BytePos(1); if let Some(p) = next_pos { next = p; } if span.hi() < (*cmnt).pos && (*cmnt).pos < next && span_line.line == comment_line.line { self.print_comment(cmnt)?; } } Ok(()) } pub fn print_remaining_comments(&mut self) -> io::Result<()> { // If there aren't any remaining comments, then we need to manually // make sure there is a line break at the end. if self.next_comment().is_none() { self.s.hardbreak()?; } loop { match self.next_comment() { Some(ref cmnt) => { self.print_comment(cmnt)?; } _ => break, } } Ok(()) } pub fn print_opt_abi_and_extern_if_nondefault(&mut self, opt_abi: Option<Abi>) -> io::Result<()> { match opt_abi { Some(Abi::Rust) => Ok(()), Some(abi) => { self.word_nbsp("extern")?; self.word_nbsp(&abi.to_string()) } None => Ok(()), } } pub fn print_extern_opt_abi(&mut self, opt_abi: Option<Abi>) -> io::Result<()> { match opt_abi { Some(abi) => { self.word_nbsp("extern")?; self.word_nbsp(&abi.to_string()) } None => Ok(()), } } pub fn print_fn_header_info(&mut self, unsafety: hir::Unsafety, constness: hir::Constness, abi: Abi, vis: &hir::Visibility) -> io::Result<()> { self.s.word(&visibility_qualified(vis, ""))?; self.print_unsafety(unsafety)?; match constness { hir::Constness::NotConst => {} hir::Constness::Const => self.word_nbsp("const")?, } if abi != Abi::Rust { self.word_nbsp("extern")?; self.word_nbsp(&abi.to_string())?; } self.s.word("fn") } pub fn print_unsafety(&mut self, s: hir::Unsafety) -> io::Result<()> { match s { hir::Unsafety::Normal => Ok(()), hir::Unsafety::Unsafe => self.word_nbsp("unsafe"), } } pub fn print_is_auto(&mut self, s: hir::IsAuto) -> io::Result<()> { match s { hir::IsAuto::Yes => self.word_nbsp("auto"), hir::IsAuto::No => Ok(()), } } } // Dup'ed from parse::classify, but adapted for the HIR. /// Does this expression require a semicolon to be treated /// as a statement? The negation of this: 'can this expression /// be used as a statement without a semicolon' -- is used /// as an early-bail-out in the parser so that, for instance, /// if true {...} else {...} /// |x| 5 /// isn't parsed as (if true {...} else {...} | x) | 5 fn expr_requires_semi_to_be_stmt(e: &hir::Expr) -> bool { match e.node { hir::ExprIf(..) | hir::ExprMatch(..) | hir::ExprBlock(_) | hir::ExprWhile(..) | hir::ExprLoop(..) => false, _ => true, } } /// this statement requires a semicolon after it. /// note that in one case (stmt_semi), we've already /// seen the semicolon, and thus don't need another. fn stmt_ends_with_semi(stmt: &hir::Stmt_) -> bool { match *stmt { hir::StmtDecl(ref d, _) => { match d.node { hir::DeclLocal(_) => true, hir::DeclItem(_) => false, } } hir::StmtExpr(ref e, _) => { expr_requires_semi_to_be_stmt(&e) } hir::StmtSemi(..) => { false } } } fn expr_precedence(expr: &hir::Expr) -> i8 { use syntax::util::parser::*; match expr.node { hir::ExprClosure(..) => PREC_CLOSURE, hir::ExprBreak(..) | hir::ExprAgain(..) | hir::ExprRet(..) | hir::ExprYield(..) => PREC_JUMP, // Binop-like expr kinds, handled by `AssocOp`. hir::ExprBinary(op, _, _) => bin_op_to_assoc_op(op.node).precedence() as i8, hir::ExprCast(..) => AssocOp::As.precedence() as i8, hir::ExprType(..) => AssocOp::Colon.precedence() as i8, hir::ExprAssign(..) | hir::ExprAssignOp(..) => AssocOp::Assign.precedence() as i8, // Unary, prefix hir::ExprBox(..) | hir::ExprAddrOf(..) | hir::ExprUnary(..) => PREC_PREFIX, // Unary, postfix hir::ExprCall(..) | hir::ExprMethodCall(..) | hir::ExprField(..) | hir::ExprTupField(..) | hir::ExprIndex(..) | hir::ExprInlineAsm(..) => PREC_POSTFIX, // Never need parens hir::ExprArray(..) | hir::ExprRepeat(..) | hir::ExprTup(..) | hir::ExprLit(..) | hir::ExprPath(..) | hir::ExprIf(..) | hir::ExprWhile(..) | hir::ExprLoop(..) | hir::ExprMatch(..) | hir::ExprBlock(..) | hir::ExprStruct(..) => PREC_PAREN, } } fn bin_op_to_assoc_op(op: hir::BinOp_) -> AssocOp { use hir::BinOp_::*; match op { BiAdd => AssocOp::Add, BiSub => AssocOp::Subtract, BiMul => AssocOp::Multiply, BiDiv => AssocOp::Divide, BiRem => AssocOp::Modulus, BiAnd => AssocOp::LAnd, BiOr => AssocOp::LOr, BiBitXor => AssocOp::BitXor, BiBitAnd => AssocOp::BitAnd, BiBitOr => AssocOp::BitOr, BiShl => AssocOp::ShiftLeft, BiShr => AssocOp::ShiftRight, BiEq => AssocOp::Equal, BiLt => AssocOp::Less, BiLe => AssocOp::LessEqual, BiNe => AssocOp::NotEqual, BiGe => AssocOp::GreaterEqual, BiGt => AssocOp::Greater, } } /// Expressions that syntactically contain an "exterior" struct literal i.e. not surrounded by any /// parens or other delimiters, e.g. `X { y: 1 }`, `X { y: 1 }.method()`, `foo == X { y: 1 }` and /// `X { y: 1 } == foo` all do, but `(X { y: 1 }) == foo` does not. fn contains_exterior_struct_lit(value: &hir::Expr) -> bool { match value.node { hir::ExprStruct(..) => true, hir::ExprAssign(ref lhs, ref rhs) | hir::ExprAssignOp(_, ref lhs, ref rhs) | hir::ExprBinary(_, ref lhs, ref rhs) => { // X { y: 1 } + X { y: 2 } contains_exterior_struct_lit(&lhs) || contains_exterior_struct_lit(&rhs) } hir::ExprUnary(_, ref x) | hir::ExprCast(ref x, _) | hir::ExprType(ref x, _) | hir::ExprField(ref x, _) | hir::ExprTupField(ref x, _) | hir::ExprIndex(ref x, _) => { // &X { y: 1 }, X { y: 1 }.y contains_exterior_struct_lit(&x) } hir::ExprMethodCall(.., ref exprs) => { // X { y: 1 }.bar(...) contains_exterior_struct_lit(&exprs[0]) } _ => false, } }
35.740286
99
0.438039
76822d661f60ceb097eb11677d53448cce7d582a
394
use bevy::ecs::system::EntityCommands; use bevy::prelude::*; use crate::prelude::MelodyGrid; use notation_model::prelude::BarLane; pub struct MelodyPlugin; impl Plugin for MelodyPlugin { fn build(&self, _app: &mut AppBuilder) {} } impl MelodyPlugin { pub fn insert_lane_extra(commands: &mut EntityCommands, _lane: &BarLane) { commands.insert(MelodyGrid::default()); } }
21.888889
78
0.71066
6a3c2477a4bb7e59bf4621f8796fa205b821aa6d
1,909
#![deny(clippy::all)] #![deny(missing_docs)] #![forbid(unsafe_code)] #![doc(html_root_url = "https://docs.rs/libunftp/0.18.2")] //! libunftp is an extensible, async, cloud orientated FTP(S) server library. //! //! Because of its plug-able authentication (e.g. PAM, JSON File, Generic REST) and storage //! backends (e.g. local filesystem, [Google Cloud Storage](https://cloud.google.com/storage)) it's //! more flexible than traditional FTP servers and a perfect match for the cloud. //! //! It runs on top of the Tokio asynchronous run-time and tries to make use of Async IO as much as //! possible. //! //! # Quick Start //! //! Add the libunftp and tokio crates to your project's dependencies in Cargo.toml. Then also choose //! a [storage back-end implementation](https://crates.io/search?page=1&per_page=10&q=unftp-sbe) to //! add. Here we choose the [file system back-end](https://crates.io/crates/unftp-sbe-fs): //! //! ```toml //! [dependencies] //! libunftp = "0.18.2" //! unftp-sbe-fs = "0.2.0" //! tokio = { version = "1", features = ["full"] } //! ``` //! Now you're ready to develop your server! Add the following to src/main.rs: //! //! ```no_run //! use unftp_sbe_fs::ServerExt; //! //! #[tokio::main] //! pub async fn main() { //! let ftp_home = std::env::temp_dir(); //! let server = libunftp::Server::with_fs(ftp_home) //! .greeting("Welcome to my FTP server") //! .passive_ports(50000..65535); //! //! server.listen("127.0.0.1:2121").await; //! } //! ``` //! You can now run your server with cargo run and connect to localhost:2121 with your favourite FTP client e.g.: //! //! ```sh //! lftp -p 2121 localhost //! ``` pub mod auth; pub(crate) mod metrics; pub mod notification; pub(crate) mod server; pub mod storage; pub use crate::server::ftpserver::{error::ServerError, options, Server}; type BoxError = Box<dyn std::error::Error + Send + Sync + 'static>;
34.089286
113
0.658984
263e598a23920901fe29418df841df9a38256c21
2,699
//! XML namespacing support use crate::string::{AvmString, WStr, WString}; use gc_arena::{Collect, MutationContext}; use std::fmt; /// Represents a scoped name within XML. /// /// All names in XML are optionally namespaced. Each namespace is represented /// as a string; the document contains a mapping of namespaces to URIs. /// /// Names without a namespace use the default namespace. /// /// The special namespace `xmlns` is used to map namespace strings to URIs; it /// should not be used for user-specified namespaces. #[derive(Copy, Clone, Collect, PartialEq, Eq, PartialOrd, Ord)] #[collect(no_drop)] pub struct XmlName<'gc> { /// The position of the namespace separator in the name, if the name is namespaced. namespace_sep: Option<usize>, name: AvmString<'gc>, } impl<'gc> XmlName<'gc> { pub fn in_namespace( gc_context: MutationContext<'gc, '_>, namespace: &WStr, name: &WStr, ) -> Self { let mut full_name = WString::from(namespace); full_name.push_byte(b':'); full_name.push_str(name); Self { namespace_sep: Some(namespace.len()), name: AvmString::new(gc_context, full_name), } } pub fn in_default_namespace(name: AvmString<'gc>) -> Self { Self { namespace_sep: None, name, } } pub fn from_str(full_name: impl Into<AvmString<'gc>>) -> Self { let full_name = full_name.into(); Self { namespace_sep: full_name.find(b':'), name: full_name, } } /// Retrieve the local part of this name. pub fn local_name(&self) -> &WStr { match self.namespace_sep { Some(sep) => &self.name[sep + 1..], None => &self.name, } } /// Retrieve the prefix part of this name, if available. pub fn prefix(&self) -> Option<&WStr> { self.namespace_sep.map(|sep| &self.name[..sep]) } /// Return the fully qualified part of the name. /// /// This consists of the namespace, if present, plus a colon and local name. pub fn node_name(&self) -> AvmString<'gc> { self.name } /// Compares both names as case-insensitve (for use in HTML parsing). /// TODO: We shouldn't need this when we have a proper HTML parser. pub fn eq_ignore_case(&self, other: XmlName<'gc>) -> bool { self.name.eq_ignore_case(&other.name) } } impl<'gc> fmt::Debug for XmlName<'gc> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("XmlName") .field("namespace", &self.prefix()) .field("name", &self.local_name()) .finish() } }
30.325843
87
0.600963
1d5f7d5c0099e6360383987f4705290268bf742c
752
#![feature(generic_associated_types)] struct Texture; trait Surface { type TextureIter<'a>: Iterator<Item = &'a Texture> where Self: 'a; fn get_texture(&self) -> Self::TextureIter<'_>; } trait Swapchain { type Surface<'a>: Surface where Self: 'a; fn get_surface(&self) -> Self::Surface<'_>; } impl<'s> Surface for &'s Texture { type TextureIter<'a> = std::option::IntoIter<&'a Texture>; //~^ ERROR the type fn get_texture(&self) -> Self::TextureIter<'_> { let option: Option<&Texture> = Some(self); option.into_iter() } } impl Swapchain for Texture { type Surface<'a> = &'a Texture; fn get_surface(&self) -> Self::Surface<'_> { self } } fn main() {}
18.8
62
0.589096
1e8ea35ca8152a1a23d534c23e2eb019fdd1b820
1,466
use std::fmt::{Debug, Error, Formatter}; use crate::{ExClause, TableIndex}; use crate::context::{Context, InferenceTable}; use crate::table::AnswerIndex; #[derive(Debug)] crate struct CanonicalStrand<C: Context> { pub(super) canonical_ex_clause: C::CanonicalExClause, /// Index into `ex_clause.subgoals`. crate selected_subgoal: Option<SelectedSubgoal<C>>, } crate struct Strand<'table, C: Context + 'table, I: Context + 'table> { crate infer: &'table mut dyn InferenceTable<C, I>, pub(super) ex_clause: ExClause<I>, /// Index into `ex_clause.subgoals`. crate selected_subgoal: Option<SelectedSubgoal<C>>, } #[derive(Clone, Debug)] crate struct SelectedSubgoal<C: Context> { /// The index of the subgoal in `ex_clause.subgoals` crate subgoal_index: usize, /// The index of the table that we created or found for this subgoal pub(super) subgoal_table: TableIndex, /// Index of the answer we should request next from the table crate answer_index: AnswerIndex, /// Maps the universes of the subgoal to the canonical universes /// used in the table crate universe_map: C::UniverseMap, } impl<'table, C: Context, I: Context> Debug for Strand<'table, C, I> { fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> { fmt.debug_struct("Strand") .field("ex_clause", &self.ex_clause) .field("selected_subgoal", &self.selected_subgoal) .finish() } }
31.191489
72
0.677353
e85eb7b8969357621c2e3683e3b5e788277fc950
3,355
use std::cmp::Ordering; pub fn main(input: &str) -> Metrics { let (bits_count, numbers) = parse_text(input); let common_bits_mask: usize = (0..bits_count) .into_iter() .rev() .map(|bit| numbers.mcb(bit as u32).expect("0 and 1 equally common") << bit) .sum(); let gamma_rate: usize = common_bits_mask; let epsilon_rate = !gamma_rate & ((1<<bits_count) - 1); println!("Gamma rate: {}", gamma_rate); println!("Epsilon rate: {}", epsilon_rate); println!("Power consumption: {}", gamma_rate * epsilon_rate); let max_bit = bits_count - 1; let mask = (common_bits_mask >> max_bit) & 1; let (mut oxygen_generator, mut co2_rating): (Vec<_>, Vec<_>) = numbers .into_iter() .partition(|n| (n >> max_bit & 1) == mask); for b in (0..max_bit).into_iter().rev() { if oxygen_generator.len() > 1 { let mcb = oxygen_generator.mcb_with_equal(b as u32, 1) & 1; oxygen_generator.retain(|n| ((n>>b) & 1) == mcb); } if co2_rating.len() > 1 { let mcb = co2_rating.lcb_with_equal(b as u32, 0) & 1; co2_rating.retain(|n| ((n>>b) & 1) == mcb); } } let oxygen_generator = *oxygen_generator.first().expect("could not find"); let co2_rating = *co2_rating.first().expect("could not find"); println!("Oxygen generator: {}", oxygen_generator); println!("CO2 rating: {}", co2_rating); println!("Life support rating: {}", oxygen_generator * co2_rating); Metrics { gamma_rate, epsilon_rate, oxygen_generator, co2_rating, } } #[derive(Debug)] pub struct Metrics{ pub gamma_rate: usize, pub epsilon_rate: usize, pub oxygen_generator: usize, pub co2_rating: usize, } impl Metrics { pub fn power_consumtion(&self) -> usize { self.gamma_rate * self.epsilon_rate } pub fn life_support_rating(&self) -> usize { self.oxygen_generator * self.co2_rating } } pub fn parse_text(input: &str) -> (usize, Vec<usize>) { let bits_size = input .lines() .map(|s| s.len()) .max() .expect("No data lines"); let numbers = input .lines() .map(|line| usize::from_str_radix(line, 2).expect("invalid representation")) .collect(); (bits_size, numbers) } /// Get most/least common bit at `bit` position. /// /// Returns `None` if 1 and 0 are equally common. pub trait MostCommonBit { fn mcb(&self, bit: u32) -> Option<usize>; fn mcb_with_equal(&self, bit: u32, on_equal: usize) -> usize { self.mcb(bit).unwrap_or(on_equal) } fn lcb(&self, bit: u32) -> Option<usize> { self.mcb(bit).map(|b| !b) } fn lcb_with_equal(&self, bit: u32, on_equal: usize) -> usize { self.lcb(bit).unwrap_or(on_equal) } } impl MostCommonBit for Vec<usize> { fn mcb(&self, bit: u32) -> Option<usize> { assert!(bit < usize::BITS, "Bit is out of range"); let (ones, nils) = self .iter() .map(|n| n >> bit & 1) .map(|b| (b, !b & 1)) .fold((0, 0), |(a1, b1), (a2, b2)| (a1 + a2, b1 + b2)); match ones.cmp(&nils) { Ordering::Less => Some(0), Ordering::Greater => Some(1), Ordering::Equal => None, } } }
28.675214
84
0.565723
29d81e84ccd65d306b1aeea63ad4dac88f3eda37
81
fn main() { let x = 1 ; let y = x ; //copy x let z = x ; //legal }
8.1
24
0.382716
f9cf52f51edd3e7f1841d0dd77e571a4a84b495b
8,053
//! `MovieClipLoader` impl use crate::avm1::activation::Activation; use crate::avm1::error::Error; use crate::avm1::object::script_object::ScriptObject; use crate::avm1::object::TObject; use crate::avm1::property::Attribute; use crate::avm1::{Object, UpdateContext, Value}; use crate::backend::navigator::RequestOptions; use crate::display_object::{DisplayObject, TDisplayObject}; use enumset::EnumSet; use gc_arena::MutationContext; pub fn constructor<'gc>( activation: &mut Activation<'_, 'gc>, context: &mut UpdateContext<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { let listeners = ScriptObject::array(context.gc_context, Some(activation.avm.prototypes().array)); this.define_value( context.gc_context, "_listeners", Value::Object(listeners.into()), Attribute::DontEnum.into(), ); listeners.set_array_element(0, Value::Object(this), context.gc_context); Ok(Value::Undefined) } pub fn add_listener<'gc>( activation: &mut Activation<'_, 'gc>, context: &mut UpdateContext<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { let new_listener = args.get(0).cloned().unwrap_or(Value::Undefined); let listeners = this.get("_listeners", activation, context)?; if let Value::Object(listeners) = listeners { let length = listeners.length(); listeners.set_length(context.gc_context, length + 1); listeners.set_array_element(length, new_listener, context.gc_context); } Ok(true.into()) } pub fn remove_listener<'gc>( activation: &mut Activation<'_, 'gc>, context: &mut UpdateContext<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { let old_listener = args.get(0).cloned().unwrap_or(Value::Undefined); let listeners = this.get("_listeners", activation, context)?; if let Value::Object(listeners) = listeners { let length = listeners.length(); let mut position = None; for i in 0..length { let other_listener = listeners.array_element(i); if old_listener == other_listener { position = Some(i); break; } } if let Some(position) = position { if length > 0 { let new_length = length - 1; for i in position..new_length { listeners.set_array_element( i, listeners.array_element(i + 1), context.gc_context, ); } listeners.delete_array_element(new_length, context.gc_context); listeners.delete(activation, context.gc_context, &new_length.to_string()); listeners.set_length(context.gc_context, new_length); } } } Ok(true.into()) } pub fn broadcast_message<'gc>( activation: &mut Activation<'_, 'gc>, context: &mut UpdateContext<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { let event_name_val = args.get(0).cloned().unwrap_or(Value::Undefined); let event_name = event_name_val.coerce_to_string(activation, context)?; let call_args = &args[0..]; let listeners = this.get("_listeners", activation, context)?; if let Value::Object(listeners) = listeners { for i in 0..listeners.length() { let listener = listeners.array_element(i); if let Value::Object(listener) = listener { listener.call_method(&event_name, call_args, activation, context)?; } } } Ok(Value::Undefined) } pub fn load_clip<'gc>( activation: &mut Activation<'_, 'gc>, context: &mut UpdateContext<'_, 'gc, '_>, this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { let url_val = args.get(0).cloned().unwrap_or(Value::Undefined); let url = url_val.coerce_to_string(activation, context)?; let target = args.get(1).cloned().unwrap_or(Value::Undefined); if let Value::Object(target) = target { if let Some(movieclip) = target .as_display_object() .and_then(|dobj| dobj.as_movie_clip()) { let fetch = context.navigator.fetch(&url, RequestOptions::get()); let process = context.load_manager.load_movie_into_clip( context.player.clone().unwrap(), DisplayObject::MovieClip(movieclip), fetch, url.to_string(), Some(this), ); context.navigator.spawn_future(process); } Ok(true.into()) } else { Ok(false.into()) } } pub fn unload_clip<'gc>( _activation: &mut Activation<'_, 'gc>, context: &mut UpdateContext<'_, 'gc, '_>, _this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { let target = args.get(0).cloned().unwrap_or(Value::Undefined); if let Value::Object(target) = target { if let Some(mut movieclip) = target .as_display_object() .and_then(|dobj| dobj.as_movie_clip()) { movieclip.unload(context); movieclip.replace_with_movie(context.gc_context, None); return Ok(true.into()); } } Ok(false.into()) } pub fn get_progress<'gc>( _activation: &mut Activation<'_, 'gc>, context: &mut UpdateContext<'_, 'gc, '_>, _this: Object<'gc>, args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { let target = args.get(0).cloned().unwrap_or(Value::Undefined); if let Value::Object(target) = target { if let Some(movieclip) = target .as_display_object() .and_then(|dobj| dobj.as_movie_clip()) { let ret_obj = ScriptObject::object(context.gc_context, None); ret_obj.define_value( context.gc_context, "bytesLoaded", movieclip .movie() .map(|mv| (mv.data().len() + 21).into()) .unwrap_or(Value::Undefined), EnumSet::empty(), ); ret_obj.define_value( context.gc_context, "bytesTotal", movieclip .movie() .map(|mv| (mv.data().len() + 21).into()) .unwrap_or(Value::Undefined), EnumSet::empty(), ); return Ok(ret_obj.into()); } } Ok(Value::Undefined) } pub fn create_proto<'gc>( gc_context: MutationContext<'gc, '_>, proto: Object<'gc>, fn_proto: Object<'gc>, ) -> Object<'gc> { let mcl_proto = ScriptObject::object(gc_context, Some(proto)); mcl_proto.as_script_object().unwrap().force_set_function( "addListener", add_listener, gc_context, EnumSet::empty(), Some(fn_proto), ); mcl_proto.as_script_object().unwrap().force_set_function( "removeListener", remove_listener, gc_context, EnumSet::empty(), Some(fn_proto), ); mcl_proto.as_script_object().unwrap().force_set_function( "broadcastMessage", broadcast_message, gc_context, EnumSet::empty(), Some(fn_proto), ); mcl_proto.as_script_object().unwrap().force_set_function( "loadClip", load_clip, gc_context, EnumSet::empty(), Some(fn_proto), ); mcl_proto.as_script_object().unwrap().force_set_function( "unloadClip", unload_clip, gc_context, EnumSet::empty(), Some(fn_proto), ); mcl_proto.as_script_object().unwrap().force_set_function( "getProgress", get_progress, gc_context, EnumSet::empty(), Some(fn_proto), ); mcl_proto.into() }
30.274436
90
0.569974
b9be05417d2ec8d5c57f950b1427e22c127cae1d
23,486
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under both the MIT license found in the * LICENSE-MIT file in the root directory of this source tree and the Apache * License, Version 2.0 found in the LICENSE-APACHE file in the root directory * of this source tree. */ //! Ordered set implementation using a sorted vector use std::borrow::Borrow; use std::cmp::Ordering; use std::collections::Bound::*; use std::collections::{BTreeSet, Bound}; use std::fmt::{self, Debug}; use std::iter::{FromIterator, Peekable}; use std::mem; use std::ops::{BitAnd, BitOr, BitXor, RangeBounds, Sub}; use quickcheck::{Arbitrary, Gen}; #[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)] pub struct SortedVectorSet<T>(Vec<T>); impl<T> SortedVectorSet<T> where T: Ord, { /// Creates a new, empty SortedVectorSet. pub fn new() -> SortedVectorSet<T> { SortedVectorSet(Vec::new()) } /// Creates a new, empty SortedVectorSet, with capacity for `capacity` entries. pub fn with_capacity(capacity: usize) -> SortedVectorSet<T> { SortedVectorSet(Vec::with_capacity(capacity)) } /// Clears the set, removing all elements. pub fn clear(&mut self) { self.0.clear() } /// Returns `true` if the set is empty. pub fn is_empty(&self) -> bool { self.0.is_empty() } /// Utility function to binary search for an index using the key. fn find_index<Q>(&self, q: &Q) -> Result<usize, usize> where T: Borrow<Q>, Q: Ord + ?Sized, { self.0.binary_search_by(|e| e.borrow().cmp(q)) } /// Returns `true` if the set contains a value. pub fn contains<Q>(&self, q: &Q) -> bool where T: Borrow<Q>, Q: Ord + ?Sized, { self.find_index(q).is_ok() } /// Returns a reference to the value in the set, if any, that is equal to the given value. pub fn get<Q>(&self, q: &Q) -> Option<&T> where T: Borrow<Q>, Q: Ord + ?Sized, { self.find_index(q).ok().map(|index| &self.0[index]) } /// Utility function for implementing `range` and `range_mut`. /// /// Convert a range boundary for the start of a range into a slice /// index suitable for use in a range expression. fn range_index_start<Q>(&self, b: Bound<&Q>) -> usize where T: Borrow<Q>, Q: Ord + ?Sized, { match b { Unbounded => 0, Included(q) => match self.find_index(q) { Ok(index) => index, Err(index) => index, }, Excluded(q) => match self.find_index(q) { Ok(index) => index + 1, Err(index) => index, }, } } /// Utility function for implementing `range` and `range_mut`. /// /// Convert a range boundary for the end of a range into a slice /// index suitable for use in a range expression. fn range_index_end<Q>(&self, b: Bound<&Q>) -> usize where T: Borrow<Q>, Q: Ord + ?Sized, { match b { Unbounded => self.0.len(), Included(q) => match self.find_index(q) { Ok(index) => index + 1, Err(index) => index, }, Excluded(q) => match self.find_index(q) { Ok(index) => index, Err(index) => index, }, } } /// Returns an iterator over the given range of keys. /// /// # Panics /// /// Panics if the range start is after the range end. pub fn range<Q, R>(&self, range: R) -> std::slice::Iter<T> where T: Borrow<Q>, Q: Ord + ?Sized, R: RangeBounds<Q>, { let start = self.range_index_start(range.start_bound()); let end = self.range_index_end(range.end_bound()); if start > end { panic!("range start is greater than range end in SortedVectorSet") } self.0[start..end].iter() } /// Returns the items that are in `self` that are not in `other`. pub fn difference<'a>(&'a self, other: &'a SortedVectorSet<T>) -> Difference<'a, T> { Difference(OperationInner { left: self.iter().peekable(), right: other.iter().peekable(), }) } /// Returns the items that are in `self` or `other`, but not in both. pub fn symmetric_difference<'a>( &'a self, other: &'a SortedVectorSet<T>, ) -> SymmetricDifference<'a, T> { SymmetricDifference(OperationInner { left: self.iter().peekable(), right: other.iter().peekable(), }) } /// Returns the items that are in both `self` and `other`. pub fn intersection<'a>(&'a self, other: &'a SortedVectorSet<T>) -> Intersection<'a, T> { Intersection(OperationInner { left: self.iter().peekable(), right: other.iter().peekable(), }) } /// Returns the items that are in `self`, `other`, or both. pub fn union<'a>(&'a self, other: &'a SortedVectorSet<T>) -> Union<'a, T> { Union(OperationInner { left: self.iter().peekable(), right: other.iter().peekable(), }) } /// Returns `true` if `self` has no elements in common with `other`. pub fn is_disjoint(&self, other: &SortedVectorSet<T>) -> bool { self.intersection(other).next().is_none() } /// Returns `true` if `self` is a subset of `other`, i.e. `other` /// contains at least all values in `self`. pub fn is_subset(&self, other: &SortedVectorSet<T>) -> bool { other.difference(self).next().is_none() } /// Returns `true` if `self` is a superset of `other`, i.e. `self` /// contains at least all values in `other`. pub fn is_superset(&self, other: &SortedVectorSet<T>) -> bool { other.is_subset(self) } /// Adds a value to the set. /// /// Returns `true` if the set did not already have this value present. pub fn insert(&mut self, value: T) -> bool { self.replace(value).is_none() } /// Adds a value to the set, replacing the existing value, if any, /// that is equal to the given one. Returns the replaced value. pub fn replace(&mut self, value: T) -> Option<T> { let len = self.0.len(); if len == 0 || self.0[len - 1] < value { self.0.push(value); None } else { let mut value = value; match self.find_index(&value) { Ok(index) => { mem::swap(&mut self.0[index], &mut value); Some(value) } Err(index) => { self.0.insert(index, value); None } } } } /// Removes the value in the set, if any, that is equal to the given /// one. Returns `true` if the value was in the set. pub fn remove<Q>(&mut self, value: &Q) -> bool where T: Borrow<Q>, Q: Ord + ?Sized, { self.take(value).is_some() } /// Removes and returns the value in the set, if any, that is equal /// to the given one. pub fn take<Q>(&mut self, value: &Q) -> Option<T> where T: Borrow<Q>, Q: Ord + ?Sized, { match self.find_index(&value) { Ok(index) => Some(self.0.remove(index)), Err(_index) => None, } } /// Moves all elements from `other` into `self`, leaving `other` empty. pub fn append(&mut self, other: &mut SortedVectorSet<T>) { if other.is_empty() { return; } if self.is_empty() { mem::swap(self, other); return; } let self_iter = mem::take(self).into_iter(); let other_iter = mem::take(other).into_iter(); let iter = MergeIter { left: self_iter.peekable(), right: other_iter.peekable(), }; self.0 = iter.collect(); } /// Splits the collection in two at the given key. Returns /// everything after the given key, including the key. pub fn split_off<Q>(&mut self, q: &Q) -> SortedVectorSet<T> where T: Borrow<Q>, Q: Ord + ?Sized, { let index = match self.find_index(q) { Ok(index) => index, Err(index) => index, }; SortedVectorSet(self.0.split_off(index)) } /// Returns an iterator over the values in the map, in sorted order pub fn iter(&self) -> std::slice::Iter<T> { self.0.iter() } /// Returns the number of elements in the set. pub fn len(&self) -> usize { self.0.len() } } impl<T> Default for SortedVectorSet<T> where T: Ord, { fn default() -> SortedVectorSet<T> { SortedVectorSet::new() } } impl<T> Debug for SortedVectorSet<T> where T: Ord + Debug, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_set().entries(self.iter()).finish() } } impl<T> IntoIterator for SortedVectorSet<T> where T: Ord, { type Item = T; type IntoIter = std::vec::IntoIter<T>; #[inline] fn into_iter(self) -> std::vec::IntoIter<T> { self.0.into_iter() } } impl<'a, T: 'a> IntoIterator for &'a SortedVectorSet<T> where T: Ord, { type Item = &'a T; type IntoIter = std::slice::Iter<'a, T>; #[inline] fn into_iter(self) -> std::slice::Iter<'a, T> { self.0.iter() } } impl<T> Extend<T> for SortedVectorSet<T> where T: Ord, { fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) { let mut new: Vec<_> = iter.into_iter().collect(); if new.is_empty() { return; } new.sort(); let self_iter = mem::take(self).into_iter(); let new_iter = new.into_iter(); let iter = MergeIter { left: self_iter.peekable(), right: new_iter.peekable(), }; self.0 = iter.collect(); } } impl<'a, T> Extend<&'a T> for SortedVectorSet<T> where T: Ord + Copy, { fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) { iter.into_iter().for_each(move |&value| { self.insert(value); }); } } impl<T> FromIterator<T> for SortedVectorSet<T> where T: Ord, { fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> SortedVectorSet<T> { let iter = iter.into_iter(); let mut set = SortedVectorSet::with_capacity(iter.size_hint().0); set.extend(iter); set } } struct MergeIter<T, I: Iterator<Item = T>> { left: Peekable<I>, right: Peekable<I>, } impl<T, I> MergeIter<T, I> where T: Ord, I: Iterator<Item = T>, { /// Returns the next right value, skipping over equal values. fn next_right(&mut self) -> Option<T> { let mut next = self.right.next(); while let (Some(next_ref), Some(after)) = (next.as_ref(), self.right.peek()) { if after > next_ref { break; } next = self.right.next(); } next } } impl<T, I> Iterator for MergeIter<T, I> where T: Ord, I: Iterator<Item = T>, { type Item = T; fn next(&mut self) -> Option<T> { let res = match (self.left.peek(), self.right.peek()) { (Some(left), Some(right)) => left.cmp(right), (Some(_), None) => Ordering::Less, (None, Some(_)) => Ordering::Greater, (None, None) => return None, }; // Check which element comes first and only advance the corresponding // iterator. If the two keys are equal, take the value from `right`. // If `right` has multiple equal keys, take the last one. match res { Ordering::Less => self.left.next(), Ordering::Greater => self.next_right(), Ordering::Equal => { self.left.next(); self.next_right() } } } fn size_hint(&self) -> (usize, Option<usize>) { let left_hint = self.left.size_hint(); let right_hint = self.right.size_hint(); let low = std::cmp::max(left_hint.0, right_hint.0); let high = match (left_hint.1, right_hint.1) { (Some(left_high), Some(right_high)) => left_high.checked_add(right_high), _ => None, }; (low, high) } } struct OperationInner<'a, T> { left: Peekable<std::slice::Iter<'a, T>>, right: Peekable<std::slice::Iter<'a, T>>, } impl<'a, T> Iterator for OperationInner<'a, T> where T: Ord, { type Item = (Option<&'a T>, Option<&'a T>); fn next(&mut self) -> Option<Self::Item> { let res = match (self.left.peek(), self.right.peek()) { (Some(left), Some(right)) => left.cmp(right), (Some(_), None) => Ordering::Less, (None, Some(_)) => Ordering::Greater, (None, None) => return None, }; // Check which element comes first and only advance the corresponding // iterator. If the two keys are equal, advance both. match res { Ordering::Less => Some((self.left.next(), None)), Ordering::Greater => Some((None, self.right.next())), Ordering::Equal => Some((self.left.next(), self.right.next())), } } } pub struct Difference<'a, T: 'a>(OperationInner<'a, T>); impl<'a, T> Iterator for Difference<'a, T> where T: Ord, { type Item = &'a T; fn next(&mut self) -> Option<&'a T> { while let Some(next) = self.0.next() { match next { (Some(left), None) => return Some(left), _ => continue, } } None } fn size_hint(&self) -> (usize, Option<usize>) { let left_hint = self.0.left.size_hint(); let right_hint = self.0.right.size_hint(); let low = match right_hint.1 { Some(right_high) => left_hint.0.saturating_sub(right_high), None => 0, }; let high = left_hint.1; (low, high) } } pub struct SymmetricDifference<'a, T: 'a>(OperationInner<'a, T>); impl<'a, T> Iterator for SymmetricDifference<'a, T> where T: Ord, { type Item = &'a T; fn next(&mut self) -> Option<&'a T> { while let Some(next) = self.0.next() { match next { (Some(left), None) => return Some(left), (None, Some(right)) => return Some(right), _ => continue, } } None } fn size_hint(&self) -> (usize, Option<usize>) { let left_hint = self.0.left.size_hint(); let right_hint = self.0.right.size_hint(); let low = 0; let high = match (left_hint.1, right_hint.1) { (Some(left_high), Some(right_high)) => left_high.checked_add(right_high), _ => None, }; (low, high) } } pub struct Intersection<'a, T: 'a>(OperationInner<'a, T>); impl<'a, T> Iterator for Intersection<'a, T> where T: Ord, { type Item = &'a T; fn next(&mut self) -> Option<&'a T> { while let Some(next) = self.0.next() { match next { (Some(left), Some(_right)) => return Some(left), _ => continue, } } None } fn size_hint(&self) -> (usize, Option<usize>) { let left_hint = self.0.left.size_hint(); let right_hint = self.0.right.size_hint(); let low = 0; let high = match (left_hint.1, right_hint.1) { (Some(left_high), Some(right_high)) => Some(std::cmp::min(left_high, right_high)), _ => None, }; (low, high) } } pub struct Union<'a, T: 'a>(OperationInner<'a, T>); impl<'a, T> Iterator for Union<'a, T> where T: Ord, { type Item = &'a T; fn next(&mut self) -> Option<&'a T> { while let Some(next) = self.0.next() { match next { (_, Some(right)) => return Some(right), (Some(left), None) => return Some(left), _ => continue, } } None } fn size_hint(&self) -> (usize, Option<usize>) { let left_hint = self.0.left.size_hint(); let right_hint = self.0.right.size_hint(); let low = std::cmp::max(left_hint.0, right_hint.0); let high = match (left_hint.1, right_hint.1) { (Some(left_high), Some(right_high)) => left_high.checked_add(right_high), _ => None, }; (low, high) } } impl<T> BitAnd<&SortedVectorSet<T>> for &SortedVectorSet<T> where T: Ord + Clone, { type Output = SortedVectorSet<T>; fn bitand(self, rhs: &SortedVectorSet<T>) -> SortedVectorSet<T> { self.intersection(rhs).cloned().collect() } } impl<T> Sub<&SortedVectorSet<T>> for &SortedVectorSet<T> where T: Ord + Clone, { type Output = SortedVectorSet<T>; fn sub(self, rhs: &SortedVectorSet<T>) -> SortedVectorSet<T> { self.difference(rhs).cloned().collect() } } impl<T> BitXor<&SortedVectorSet<T>> for &SortedVectorSet<T> where T: Ord + Clone, { type Output = SortedVectorSet<T>; fn bitxor(self, rhs: &SortedVectorSet<T>) -> SortedVectorSet<T> { self.symmetric_difference(rhs).cloned().collect() } } impl<T> BitOr<&SortedVectorSet<T>> for &SortedVectorSet<T> where T: Ord + Clone, { type Output = SortedVectorSet<T>; fn bitor(self, rhs: &SortedVectorSet<T>) -> SortedVectorSet<T> { self.union(rhs).cloned().collect() } } impl<T> From<BTreeSet<T>> for SortedVectorSet<T> { fn from(bset: BTreeSet<T>) -> SortedVectorSet<T> { // The BTreeSet will iterate in sorted order. let v = bset.into_iter().collect(); SortedVectorSet(v) } } impl<T> Arbitrary for SortedVectorSet<T> where T: Arbitrary + Ord, { fn arbitrary<G: Gen>(g: &mut G) -> SortedVectorSet<T> { let vec: Vec<T> = Arbitrary::arbitrary(g); vec.into_iter().collect() } fn shrink(&self) -> Box<dyn Iterator<Item = SortedVectorSet<T>>> { let vec: Vec<T> = self.clone().into_iter().collect(); Box::new( vec.shrink() .map(|v| v.into_iter().collect::<SortedVectorSet<T>>()), ) } } #[macro_export] macro_rules! sorted_vector_set { ( $( $value:expr ),* $( , )? ) => { { let size = <[()]>::len(&[ $( $crate::replace_expr!( ( $value ) () ) ),* ]); let mut set = $crate::SortedVectorSet::with_capacity(size); $( set.insert($value); )* set } }; } #[cfg(test)] mod tests { use super::*; use quickcheck::quickcheck; use std::collections::BTreeSet; #[test] fn insert_contains_take_remove() { let mut svs = SortedVectorSet::new(); assert_eq!(svs.insert("test1"), true); assert_eq!(svs.insert("test2"), true); assert_eq!(svs.insert("test4"), true); assert_eq!(svs.insert("test3"), true); assert_eq!(svs.insert("test1"), false); assert_eq!(svs.contains(&"test1"), true); assert_eq!(svs.contains(&"never"), false); assert_eq!(svs.take(&"test3"), Some("test3")); assert_eq!(svs.take(&"never"), None); assert_eq!(svs.remove(&"test2"), true); assert_eq!(svs.remove(&"test2"), false); assert_eq!(svs.remove(&"never"), false); } #[test] fn iter() { let svs = sorted_vector_set! { 1, 2, 3, 4, 5 }; let mut i = svs.iter(); assert_eq!(i.next(), Some(&1)); assert_eq!(i.next(), Some(&2)); assert_eq!(i.next(), Some(&3)); assert_eq!(i.next(), Some(&4)); assert_eq!(i.next(), Some(&5)); assert_eq!(i.next(), None); let mut i = svs.into_iter(); assert_eq!(i.next(), Some(1)); assert_eq!(i.next(), Some(2)); assert_eq!(i.next(), Some(3)); assert_eq!(i.next(), Some(4)); assert_eq!(i.next(), Some(5)); assert_eq!(i.next(), None); } #[test] fn range() { let svs = sorted_vector_set! { 1, 3, 5, 7, 9, 11}; assert_eq!(svs.range(3..9).cloned().collect::<Vec<_>>(), vec![3, 5, 7]); assert_eq!(svs.range(3..=7).cloned().collect::<Vec<_>>(), vec![3, 5, 7]); assert_eq!(svs.range(..2).cloned().collect::<Vec<_>>(), vec![1]); assert_eq!(svs.range(6..).cloned().collect::<Vec<_>>(), vec![7, 9, 11]); } #[test] fn split_off_append_extend() { let mut svs = sorted_vector_set! { 1, 3, 5, 7, 9, 11}; let mut svs2 = svs.split_off(&7); assert_eq!(svs.iter().cloned().collect::<Vec<_>>(), vec![1, 3, 5]); assert_eq!(svs2.iter().cloned().collect::<Vec<_>>(), vec![7, 9, 11]); svs2.extend(vec![4, 5, 6, 7, 8].into_iter()); assert_eq!( svs2.iter().cloned().collect::<Vec<_>>(), vec![4, 5, 6, 7, 8, 9, 11] ); svs2.append(&mut svs); assert!(svs.is_empty()); assert_eq!( svs2.iter().cloned().collect::<Vec<_>>(), vec![1, 3, 4, 5, 6, 7, 8, 9, 11] ); } #[test] fn intersect_difference_symdiff_union() { let svs1 = sorted_vector_set! { 1, 3, 4, 5, 6, 7, 9 }; let svs2 = sorted_vector_set! { 2, 4, 5, 6, 7, 8, 10}; assert_eq!( svs1.intersection(&svs2) .cloned() .collect::<SortedVectorSet<_>>(), sorted_vector_set! { 4, 5, 6, 7 }, ); assert_eq!( svs1.difference(&svs2) .cloned() .collect::<SortedVectorSet<_>>(), sorted_vector_set! { 1, 3, 9 }, ); assert_eq!( svs1.symmetric_difference(&svs2) .cloned() .collect::<SortedVectorSet<_>>(), sorted_vector_set! { 1, 2, 3, 8, 9, 10 }, ); assert_eq!( svs1.union(&svs2).cloned().collect::<SortedVectorSet<_>>(), sorted_vector_set! { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, ); assert_eq!(&svs1 & &svs2, sorted_vector_set! { 4, 5, 6, 7 },); assert_eq!(&svs1 - &svs2, sorted_vector_set! { 1, 3, 9 },); assert_eq!(&svs1 ^ &svs2, sorted_vector_set! { 1, 2, 3, 8, 9, 10 },); assert_eq!(&svs1 | &svs2, (1..=10).collect(),); } #[test] fn debug_print() { assert_eq!(&format!("{:?}", SortedVectorSet::<i32>::new()), "{}"); assert_eq!( &format!("{:?}", sorted_vector_set! {1, 10, 100}), "{1, 10, 100}" ); } fn svset_from_btreeset<T: Ord + Clone>(b: &BTreeSet<T>) -> SortedVectorSet<T> { let mut svs = SortedVectorSet::with_capacity(b.len()); for v in b.iter() { svs.insert(v.clone()); } svs } quickcheck! { fn like_btreeset_is_empty(b: BTreeSet<u32>) -> bool { let svs = svset_from_btreeset(&b); svs.is_empty() == b.is_empty() } fn like_btreeset_len(b: BTreeSet<u32>) -> bool { let svs = svset_from_btreeset(&b); svs.len() == b.len() } fn like_btreeset_iter(b: BTreeSet<u32>) -> bool { let svs = svset_from_btreeset(&b); itertools::equal(svs.iter(), b.iter()) } } }
28.995062
94
0.529337
299ed97a7e8245af9c6ff3e9c5bb52b885ee68c6
5,191
//! egui core library //! //! To quickly get started with egui, you can take a look at [`egui_template`](https://github.com/emilk/egui_template) //! which uses [`eframe`](https://docs.rs/eframe). //! //! To create a GUI using egui you first need a [`CtxRef`] (by convention referred to by `ctx`). //! Use one of [`SidePanel`], [`TopPanel`], [`CentralPanel`], [`Window`] or [`Area`] to //! get access to an [`Ui`] where you can put widgets. For example: //! //! ``` //! # let mut ctx = egui::CtxRef::default(); //! # ctx.begin_frame(Default::default()); //! egui::CentralPanel::default().show(&ctx, |ui| { //! ui.label("Hello"); //! }); //! ``` //! //! //! To write your own integration for egui you need to do this: //! //! ``` ignore //! let mut egui_ctx = egui::CtxRef::default(); //! //! // Game loop: //! loop { //! let raw_input: egui::RawInput = my_integration.gather_input(); //! egui_ctx.begin_frame(raw_input); //! my_app.ui(&egui_ctx); // add panels, windows and widgets to `egui_ctx` here //! let (output, shapes) = egui_ctx.end_frame(); //! let clipped_meshes = egui_ctx.tessellate(shapes); // create triangles to paint //! my_integration.paint(clipped_meshes); //! my_integration.set_cursor_icon(output.cursor_icon); //! // Also see `egui::Output` for more //! } //! ``` #![cfg_attr(not(debug_assertions), deny(warnings))] // Forbid warnings in release builds #![forbid(unsafe_code)] #![warn( clippy::all, clippy::await_holding_lock, clippy::dbg_macro, clippy::doc_markdown, clippy::empty_enum, clippy::enum_glob_use, clippy::exit, clippy::filter_map_next, clippy::fn_params_excessive_bools, clippy::if_let_mutex, clippy::imprecise_flops, clippy::inefficient_to_string, clippy::linkedlist, clippy::lossy_float_literal, clippy::macro_use_imports, clippy::match_on_vec_items, clippy::match_wildcard_for_single_variants, clippy::mem_forget, clippy::mismatched_target_os, clippy::missing_errors_doc, clippy::missing_safety_doc, clippy::needless_borrow, clippy::needless_continue, clippy::needless_pass_by_value, clippy::option_option, clippy::pub_enum_variant_names, clippy::rest_pat_in_fully_bound_structs, clippy::todo, clippy::unimplemented, clippy::unnested_or_patterns, clippy::verbose_file_reads, future_incompatible, missing_crate_level_docs, missing_doc_code_examples, // missing_docs, nonstandard_style, rust_2018_idioms, unused_doc_comments, )] #![allow(clippy::manual_range_contains)] mod animation_manager; pub mod containers; mod context; mod data; pub mod experimental; pub(crate) mod grid; mod id; mod input_state; mod introspection; pub mod layers; mod layout; mod memory; pub mod menu; mod painter; pub(crate) mod placer; mod response; mod sense; pub mod style; mod ui; pub mod util; pub mod widgets; pub use emath as math; pub use epaint as paint; pub use epaint::emath; pub use emath::{ clamp, lerp, pos2, remap, remap_clamp, vec2, Align, Align2, NumExt, Pos2, Rect, Vec2, }; pub use epaint::{ color, mutex, text::{FontDefinitions, FontFamily, TextStyle}, ClippedMesh, Color32, Rgba, Shape, Stroke, Texture, TextureId, }; pub use { containers::*, context::{Context, CtxRef}, data::{input::*, output::*}, grid::Grid, id::Id, input_state::InputState, layers::{LayerId, Order}, layout::*, memory::Memory, painter::Painter, response::Response, sense::Sense, style::{Style, Visuals}, ui::Ui, widgets::*, }; // ---------------------------------------------------------------------------- #[cfg(debug_assertions)] pub(crate) const fn has_debug_assertions() -> bool { true } #[cfg(not(debug_assertions))] pub(crate) const fn has_debug_assertions() -> bool { false } /// Helper function that adds a label when compiling with debug assertions enabled. pub fn warn_if_debug_build(ui: &mut crate::Ui) { if crate::has_debug_assertions() { ui.label( crate::Label::new("‼ Debug build ‼") .small() .text_color(crate::Color32::RED), ) .on_hover_text("egui was compiled with debug assertions enabled."); } } // ---------------------------------------------------------------------------- /// Create a [`Hyperlink`](crate::Hyperlink) to the current [`file!()`] (and line) on Github /// /// Example: `ui.add(github_link_file_line!("https://github.com/YOUR/PROJECT/blob/master/", "(source code)"));` #[macro_export] macro_rules! github_link_file_line { ($github_url:expr, $label:expr) => {{ let url = format!("{}{}#L{}", $github_url, file!(), line!()); $crate::Hyperlink::new(url).text($label) }}; } /// Create a [`Hyperlink`](crate::Hyperlink) to the current [`file!()`] on github. /// /// Example: `ui.add(github_link_file!("https://github.com/YOUR/PROJECT/blob/master/", "(source code)"));` #[macro_export] macro_rules! github_link_file { ($github_url:expr, $label:expr) => {{ let url = format!("{}{}", $github_url, file!()); $crate::Hyperlink::new(url).text($label) }}; }
28.679558
118
0.638027
1c9594c0ac1090db9054257db1208aa0de0b3dcc
12,815
use std::marker::PhantomData; use swc_common::{util::take::Take, Spanned, DUMMY_SP}; use swc_ecma_ast::*; use crate::ExprFactory; pub struct FunctionWrapper<T> { pub binding_ident: Option<Ident>, pub function: Expr, pub ignore_function_name: bool, pub ignore_function_length: bool, function_ident: Option<Ident>, params: Vec<Param>, _type: PhantomData<T>, } impl<T> FunctionWrapper<T> { /// `get_params` clone only the parameters that count in function length. fn get_params<'a, ParamsIter, Item>(params_iter: ParamsIter) -> Vec<Param> where Item: Into<&'a Param>, ParamsIter: IntoIterator<Item = Item>, { params_iter .into_iter() .map(Into::into) .map_while(|param| match param.pat { Pat::Ident(..) => Some(param.clone()), Pat::Array(..) | Pat::Object(..) => Some(Param { span: param.span, decorators: param.decorators.clone(), pat: Pat::Ident(private_ident!("_").into()), }), _ => None, }) .collect() } /// /// ```javascript /// (function () { /// var REF = FUNCTION; /// return function NAME(PARAMS) { /// return REF.apply(this, arguments); /// }; /// })() /// ``` fn build_anonymous_expression_wrapper(&mut self) -> Expr { let name_ident = self.binding_ident.take(); let ref_ident = private_ident!("_ref"); let ref_decl: Decl = VarDecl { span: DUMMY_SP, kind: VarDeclKind::Var, decls: vec![VarDeclarator { span: DUMMY_SP, name: Pat::Ident(ref_ident.clone().into()), init: Some(Box::new(self.function.take())), definite: false, }], declare: false, } .into(); let return_fn_stmt = { let fn_expr = self.build_function_forward(ref_ident, name_ident); ReturnStmt { span: DUMMY_SP, arg: Some(Box::new(fn_expr.into())), } } .into(); let block_stmt = BlockStmt { span: DUMMY_SP, stmts: vec![ref_decl.into(), return_fn_stmt], }; let function = Function { span: DUMMY_SP, body: Some(block_stmt), params: Default::default(), is_generator: false, is_async: false, decorators: Default::default(), return_type: Default::default(), type_params: Default::default(), }; FnExpr { ident: None, function, } .as_iife() .into() } /// /// ```javascript /// (function () { /// var REF = FUNCTION; /// function NAME(PARAMS) { /// return REF.apply(this, arguments); /// } /// return NAME; /// })() /// ``` fn build_named_expression_wrapper(&mut self, name_ident: Ident) -> Expr { let ref_ident = self.function_ident.as_ref().map_or_else( || private_ident!("_ref"), |ident| private_ident!(ident.span, format!("_{}", ident.sym)), ); let ref_stmt: Stmt = Stmt::Decl( VarDecl { span: DUMMY_SP, kind: VarDeclKind::Var, decls: vec![VarDeclarator { span: DUMMY_SP, name: Pat::Ident(ref_ident.clone().into()), init: Some(Box::new(self.function.take())), definite: false, }], declare: false, } .into(), ); let fn_decl_stmt = { let FnExpr { function, .. } = self.build_function_forward(ref_ident, None); Stmt::Decl( FnDecl { ident: name_ident.clone(), declare: false, function, } .into(), ) }; let return_stmt = Stmt::Return(ReturnStmt { span: DUMMY_SP, arg: Some(Box::new(name_ident.into())), }); let block_stmt = BlockStmt { span: DUMMY_SP, stmts: vec![ref_stmt, fn_decl_stmt, return_stmt], }; let function = Function { span: DUMMY_SP, body: Some(block_stmt), params: Default::default(), is_generator: false, is_async: false, decorators: Default::default(), return_type: Default::default(), type_params: Default::default(), }; FnExpr { ident: None, function, } .as_iife() .into() } /// /// ```javascript /// function NAME(PARAMS) { /// return REF.apply(this, arguments); /// } /// function REF() { /// REF = FUNCTION; /// return REF.apply(this, arguments); /// } /// ``` fn build_declaration_wrapper(&mut self, name_ident: Option<Ident>) -> (FnExpr, FnDecl) { let ref_ident = self.function_ident.as_ref().map_or_else( || private_ident!("_ref"), |ident| private_ident!(ident.span, format!("_{}", ident.sym)), ); // function NAME let fn_expr = self.build_function_forward(ref_ident.clone(), name_ident); let assign_stmt = AssignExpr { span: DUMMY_SP, op: op!("="), left: PatOrExpr::Expr(Box::new(Expr::Ident(ref_ident.clone()))), right: Box::new(self.function.take()), } .into_stmt(); // clone `return REF.apply(this, arguments);` let return_ref_apply_stmt = fn_expr .function .body .as_ref() .expect("The `fn_expr` we construct cannot be None") .stmts[0] .clone(); let ref_fn_block_stmt = BlockStmt { span: DUMMY_SP, stmts: vec![assign_stmt, return_ref_apply_stmt], }; // function REF let ref_decl = FnDecl { declare: false, ident: ref_ident, function: Function { span: DUMMY_SP, is_async: false, is_generator: false, params: self.params.take(), body: Some(ref_fn_block_stmt), decorators: Default::default(), type_params: Default::default(), return_type: Default::default(), }, }; (fn_expr, ref_decl) } /// /// ```javascript /// function NAME(PARAMS) { /// return REF.apply(this, arguments); /// } /// ``` fn build_function_forward(&mut self, ref_ident: Ident, name_ident: Option<Ident>) -> FnExpr { let apply = Stmt::Return(ReturnStmt { span: DUMMY_SP, arg: Some(Box::new(ref_ident.apply( DUMMY_SP, Box::new(Expr::This(ThisExpr { span: DUMMY_SP })), vec![quote_ident!("arguments").as_arg()], ))), }); FnExpr { ident: name_ident, function: Function { span: DUMMY_SP, is_async: false, is_generator: false, params: self.params.take(), body: Some(BlockStmt { span: DUMMY_SP, stmts: vec![apply], }), decorators: Default::default(), type_params: Default::default(), return_type: Default::default(), }, } } } impl From<FnExpr> for FunctionWrapper<Expr> { fn from(mut fn_expr: FnExpr) -> Self { let function_ident = fn_expr.ident.take(); let params = Self::get_params(fn_expr.function.params.iter()); Self { binding_ident: None, function_ident, params, ignore_function_name: false, ignore_function_length: false, function: fn_expr.into(), _type: Default::default(), } } } impl From<ArrowExpr> for FunctionWrapper<Expr> { fn from( ArrowExpr { span, params, body, is_async, is_generator, .. }: ArrowExpr, ) -> Self { let body = Some(match body { BlockStmtOrExpr::BlockStmt(block) => block, BlockStmtOrExpr::Expr(expr) => BlockStmt { span: DUMMY_SP, stmts: vec![Stmt::Return(ReturnStmt { span: expr.span(), arg: Some(expr), })], }, }); let function = Function { span, params: params.into_iter().map(Into::into).collect(), decorators: Default::default(), body, type_params: None, return_type: None, is_generator, is_async, }; let fn_expr = FnExpr { ident: None, function, }; Self { binding_ident: None, function_ident: None, ignore_function_name: false, ignore_function_length: false, params: Self::get_params(fn_expr.function.params.iter()), function: fn_expr.into(), _type: Default::default(), } } } #[allow(clippy::from_over_into)] impl Into<Expr> for FunctionWrapper<Expr> { /// If a function has a function name, it may be called recursively. /// We use the named expression to hoist the function name internally /// Therefore, its recursive calls refer to the correct identity. /// /// Else /// if a function has a binding name, it may be called recursively as well. /// But it refer the binding name which exist the outer scope. /// It is safe to using anonymous expression wrapper. /// /// Optimization: /// A function without a name cannot be recursively referenced by Ident. /// It's safe to return the expr without wrapper if the params.len is 0. fn into(mut self) -> Expr { if let Some(name_ident) = self.function_ident.as_ref().cloned() { self.build_named_expression_wrapper(name_ident) } else if (!self.ignore_function_name && self.binding_ident.is_some()) || (!self.ignore_function_length && !self.params.is_empty()) { self.build_anonymous_expression_wrapper() } else { self.function } } } impl From<FnDecl> for FunctionWrapper<FnDecl> { fn from(mut fn_decl: FnDecl) -> Self { let function_ident = Some(fn_decl.ident.take()); let params = Self::get_params(fn_decl.function.params.iter()); Self { binding_ident: None, function_ident, params, ignore_function_name: false, ignore_function_length: false, function: FnExpr { ident: None, function: fn_decl.function, } .into(), _type: Default::default(), } } } /// /// The result of declaration wrapper includes two parts. /// `name_fn` is used to replace the original function. /// `ref_fn` is an extra function called internally by `name_fn`. /// /// ```javascript /// function NAME(PARAMS) { /// return REF.apply(this, arguments); /// } /// function REF() { /// REF = FUNCTION; /// return REF.apply(this, arguments); /// } /// ``` pub struct FnWrapperResult<N, R> { pub name_fn: N, pub ref_fn: R, } impl From<FunctionWrapper<FnDecl>> for FnWrapperResult<FnDecl, FnDecl> { fn from(mut value: FunctionWrapper<FnDecl>) -> Self { let name_ident = value .function_ident .clone() .expect("`FunctionWrapper` converted from `FnDecl` definitely has `Ident`"); let (FnExpr { function, .. }, ref_fn) = value.build_declaration_wrapper(None); FnWrapperResult { name_fn: FnDecl { ident: name_ident, declare: false, function, }, ref_fn, } } } impl From<FunctionWrapper<Expr>> for FnWrapperResult<FnExpr, FnDecl> { fn from(mut value: FunctionWrapper<Expr>) -> Self { let name_ident = value .function_ident .clone() .or_else(|| value.binding_ident.clone()); let (name_fn, ref_fn) = value.build_declaration_wrapper(name_ident); FnWrapperResult { name_fn, ref_fn } } }
29.45977
97
0.508467
ab71c1cfe3c0690cffbbd2a18f0b6cfd950737f8
1,353
use chrono::NaiveDateTime; use diesel::prelude::*; use uuid::Uuid; use crate::{ schema::messages, utils::establish_connection }; use std::{error::Error, str::FromStr}; #[derive(Debug, Clone, Queryable, QueryableByName, Identifiable)] #[table_name="messages"] pub struct Message { pub id: Vec<u8>, pub user_id: Vec<u8>, pub room_id: Vec<u8>, pub message: String, pub send_at: NaiveDateTime, } #[derive(Debug, Insertable)] #[table_name="messages"] pub struct NewMessage { pub user_id: Vec<u8>, pub room_id: Vec<u8>, pub message: String, } impl Message { pub fn get_messages(room_id: String) -> Result<Vec<Message>, Box<dyn Error>> { let connection = establish_connection(); let room_id = Uuid::from_str(&room_id)?.as_bytes().to_vec(); Ok(messages::dsl::messages .filter(messages::dsl::room_id.eq(&room_id)) .limit(50) .load::<Message>(&connection)?) } pub fn send_message(message: NewMessage) -> Result<Message, Box<dyn Error>> { let connection = establish_connection(); diesel::insert_into(messages::table) .values(&message) .execute(&connection)?; Ok(messages::dsl::messages .order(messages::send_at.desc()) .first::<Message>(&connection)?) } }
24.160714
81
0.617147
33ed70458016ba315aaaa4bfce54344b00f18991
617
pub trait UnwrapFromDrop<T> { fn unwrap_from_drop(self) -> T; } impl<T, E> UnwrapFromDrop<T> for Result<T, E> where T: Default, E: std::fmt::Debug, { fn unwrap_from_drop(self) -> T { use std::thread::panicking; match self { Ok(t) => t, Err(e) => { if panicking() { eprintln!("called `Result::unwrap()` on an `Err` value: {:?}", e); T::default() } else { panic!("called `Result::unwrap()` on an `Err` value: {:?}", e) } } } } }
23.730769
86
0.424635
3381b95c2a38e1fd9946c29d088ae2c1a6a06a5a
108,609
//! MIR datatypes and passes. See the [rustc dev guide] for more info. //! //! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/mir/index.html use crate::mir::interpret::{GlobalAlloc, Scalar}; use crate::mir::visit::MirVisitable; use crate::ty::adjustment::PointerCast; use crate::ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; use crate::ty::print::{FmtPrinter, Printer}; use crate::ty::subst::{Subst, SubstsRef}; use crate::ty::{ self, AdtDef, CanonicalUserTypeAnnotations, List, Region, Ty, TyCtxt, UserTypeAnnotationIndex, }; use rustc_hir as hir; use rustc_hir::def::{CtorKind, Namespace}; use rustc_hir::def_id::DefId; use rustc_hir::{self, GeneratorKind}; use rustc_target::abi::VariantIdx; use polonius_engine::Atom; pub use rustc_ast::ast::Mutability; use rustc_ast::ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::graph::dominators::{dominators, Dominators}; use rustc_data_structures::graph::{self, GraphSuccessors}; use rustc_index::bit_set::BitMatrix; use rustc_index::vec::{Idx, IndexVec}; use rustc_macros::HashStable; use rustc_serialize::{Decodable, Encodable}; use rustc_span::symbol::Symbol; use rustc_span::{Span, DUMMY_SP}; use rustc_target::abi; use rustc_target::asm::InlineAsmRegOrRegClass; use std::borrow::Cow; use std::fmt::{self, Debug, Display, Formatter, Write}; use std::ops::{Index, IndexMut}; use std::slice; use std::{iter, mem, option}; use self::predecessors::{PredecessorCache, Predecessors}; pub use self::query::*; pub mod interpret; pub mod mono; mod predecessors; mod query; pub mod tcx; pub mod traversal; mod type_foldable; pub mod visit; /// Types for locals type LocalDecls<'tcx> = IndexVec<Local, LocalDecl<'tcx>>; pub trait HasLocalDecls<'tcx> { fn local_decls(&self) -> &LocalDecls<'tcx>; } impl<'tcx> HasLocalDecls<'tcx> for LocalDecls<'tcx> { fn local_decls(&self) -> &LocalDecls<'tcx> { self } } impl<'tcx> HasLocalDecls<'tcx> for Body<'tcx> { fn local_decls(&self) -> &LocalDecls<'tcx> { &self.local_decls } } /// The various "big phases" that MIR goes through. /// /// Warning: ordering of variants is significant. #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Debug, PartialEq, Eq, PartialOrd, Ord)] #[derive(HashStable)] pub enum MirPhase { Build = 0, Const = 1, Validated = 2, DropElab = 3, Optimized = 4, } impl MirPhase { /// Gets the index of the current MirPhase within the set of all `MirPhase`s. pub fn phase_index(&self) -> usize { *self as usize } } /// The lowered representation of a single function. #[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable, TypeFoldable)] pub struct Body<'tcx> { /// A list of basic blocks. References to basic block use a newtyped index type `BasicBlock` /// that indexes into this vector. basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>, /// Records how far through the "desugaring and optimization" process this particular /// MIR has traversed. This is particularly useful when inlining, since in that context /// we instantiate the promoted constants and add them to our promoted vector -- but those /// promoted items have already been optimized, whereas ours have not. This field allows /// us to see the difference and forego optimization on the inlined promoted items. pub phase: MirPhase, /// A list of source scopes; these are referenced by statements /// and used for debuginfo. Indexed by a `SourceScope`. pub source_scopes: IndexVec<SourceScope, SourceScopeData>, /// The yield type of the function, if it is a generator. pub yield_ty: Option<Ty<'tcx>>, /// Generator drop glue. pub generator_drop: Option<Box<Body<'tcx>>>, /// The layout of a generator. Produced by the state transformation. pub generator_layout: Option<GeneratorLayout<'tcx>>, /// If this is a generator then record the type of source expression that caused this generator /// to be created. pub generator_kind: Option<GeneratorKind>, /// Declarations of locals. /// /// The first local is the return value pointer, followed by `arg_count` /// locals for the function arguments, followed by any user-declared /// variables and temporaries. pub local_decls: LocalDecls<'tcx>, /// User type annotations. pub user_type_annotations: CanonicalUserTypeAnnotations<'tcx>, /// The number of arguments this function takes. /// /// Starting at local 1, `arg_count` locals will be provided by the caller /// and can be assumed to be initialized. /// /// If this MIR was built for a constant, this will be 0. pub arg_count: usize, /// Mark an argument local (which must be a tuple) as getting passed as /// its individual components at the LLVM level. /// /// This is used for the "rust-call" ABI. pub spread_arg: Option<Local>, /// Debug information pertaining to user variables, including captures. pub var_debug_info: Vec<VarDebugInfo<'tcx>>, /// Mark this MIR of a const context other than const functions as having converted a `&&` or /// `||` expression into `&` or `|` respectively. This is problematic because if we ever stop /// this conversion from happening and use short circuiting, we will cause the following code /// to change the value of `x`: `let mut x = 42; false && { x = 55; true };` /// /// List of places where control flow was destroyed. Used for error reporting. pub control_flow_destroyed: Vec<(Span, String)>, /// A span representing this MIR, for error reporting. pub span: Span, /// Constants that are required to evaluate successfully for this MIR to be well-formed. /// We hold in this field all the constants we are not able to evaluate yet. pub required_consts: Vec<Constant<'tcx>>, /// The user may be writing e.g. `&[(SOME_CELL, 42)][i].1` and this would get promoted, because /// we'd statically know that no thing with interior mutability will ever be available to the /// user without some serious unsafe code. Now this means that our promoted is actually /// `&[(SOME_CELL, 42)]` and the MIR using it will do the `&promoted[i].1` projection because the /// index may be a runtime value. Such a promoted value is illegal because it has reachable /// interior mutability. This flag just makes this situation very obvious where the previous /// implementation without the flag hid this situation silently. /// FIXME(oli-obk): rewrite the promoted during promotion to eliminate the cell components. pub ignore_interior_mut_in_const_validation: bool, predecessor_cache: PredecessorCache, } impl<'tcx> Body<'tcx> { pub fn new( basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>, source_scopes: IndexVec<SourceScope, SourceScopeData>, local_decls: LocalDecls<'tcx>, user_type_annotations: CanonicalUserTypeAnnotations<'tcx>, arg_count: usize, var_debug_info: Vec<VarDebugInfo<'tcx>>, span: Span, control_flow_destroyed: Vec<(Span, String)>, generator_kind: Option<GeneratorKind>, ) -> Self { // We need `arg_count` locals, and one for the return place. assert!( local_decls.len() > arg_count, "expected at least {} locals, got {}", arg_count + 1, local_decls.len() ); Body { phase: MirPhase::Build, basic_blocks, source_scopes, yield_ty: None, generator_drop: None, generator_layout: None, generator_kind, local_decls, user_type_annotations, arg_count, spread_arg: None, var_debug_info, span, required_consts: Vec::new(), ignore_interior_mut_in_const_validation: false, control_flow_destroyed, predecessor_cache: PredecessorCache::new(), } } /// Returns a partially initialized MIR body containing only a list of basic blocks. /// /// The returned MIR contains no `LocalDecl`s (even for the return place) or source scopes. It /// is only useful for testing but cannot be `#[cfg(test)]` because it is used in a different /// crate. pub fn new_cfg_only(basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>) -> Self { Body { phase: MirPhase::Build, basic_blocks, source_scopes: IndexVec::new(), yield_ty: None, generator_drop: None, generator_layout: None, local_decls: IndexVec::new(), user_type_annotations: IndexVec::new(), arg_count: 0, spread_arg: None, span: DUMMY_SP, required_consts: Vec::new(), control_flow_destroyed: Vec::new(), generator_kind: None, var_debug_info: Vec::new(), ignore_interior_mut_in_const_validation: false, predecessor_cache: PredecessorCache::new(), } } #[inline] pub fn basic_blocks(&self) -> &IndexVec<BasicBlock, BasicBlockData<'tcx>> { &self.basic_blocks } #[inline] pub fn basic_blocks_mut(&mut self) -> &mut IndexVec<BasicBlock, BasicBlockData<'tcx>> { // Because the user could mutate basic block terminators via this reference, we need to // invalidate the predecessor cache. // // FIXME: Use a finer-grained API for this, so only transformations that alter terminators // invalidate the predecessor cache. self.predecessor_cache.invalidate(); &mut self.basic_blocks } #[inline] pub fn basic_blocks_and_local_decls_mut( &mut self, ) -> (&mut IndexVec<BasicBlock, BasicBlockData<'tcx>>, &mut LocalDecls<'tcx>) { self.predecessor_cache.invalidate(); (&mut self.basic_blocks, &mut self.local_decls) } /// Returns `true` if a cycle exists in the control-flow graph that is reachable from the /// `START_BLOCK`. pub fn is_cfg_cyclic(&self) -> bool { graph::is_cyclic(self) } #[inline] pub fn local_kind(&self, local: Local) -> LocalKind { let index = local.as_usize(); if index == 0 { debug_assert!( self.local_decls[local].mutability == Mutability::Mut, "return place should be mutable" ); LocalKind::ReturnPointer } else if index < self.arg_count + 1 { LocalKind::Arg } else if self.local_decls[local].is_user_variable() { LocalKind::Var } else { LocalKind::Temp } } /// Returns an iterator over all temporaries. #[inline] pub fn temps_iter<'a>(&'a self) -> impl Iterator<Item = Local> + 'a { (self.arg_count + 1..self.local_decls.len()).filter_map(move |index| { let local = Local::new(index); if self.local_decls[local].is_user_variable() { None } else { Some(local) } }) } /// Returns an iterator over all user-declared locals. #[inline] pub fn vars_iter<'a>(&'a self) -> impl Iterator<Item = Local> + 'a { (self.arg_count + 1..self.local_decls.len()).filter_map(move |index| { let local = Local::new(index); self.local_decls[local].is_user_variable().then_some(local) }) } /// Returns an iterator over all user-declared mutable locals. #[inline] pub fn mut_vars_iter<'a>(&'a self) -> impl Iterator<Item = Local> + 'a { (self.arg_count + 1..self.local_decls.len()).filter_map(move |index| { let local = Local::new(index); let decl = &self.local_decls[local]; if decl.is_user_variable() && decl.mutability == Mutability::Mut { Some(local) } else { None } }) } /// Returns an iterator over all user-declared mutable arguments and locals. #[inline] pub fn mut_vars_and_args_iter<'a>(&'a self) -> impl Iterator<Item = Local> + 'a { (1..self.local_decls.len()).filter_map(move |index| { let local = Local::new(index); let decl = &self.local_decls[local]; if (decl.is_user_variable() || index < self.arg_count + 1) && decl.mutability == Mutability::Mut { Some(local) } else { None } }) } /// Returns an iterator over all function arguments. #[inline] pub fn args_iter(&self) -> impl Iterator<Item = Local> + ExactSizeIterator { let arg_count = self.arg_count; (1..arg_count + 1).map(Local::new) } /// Returns an iterator over all user-defined variables and compiler-generated temporaries (all /// locals that are neither arguments nor the return place). #[inline] pub fn vars_and_temps_iter(&self) -> impl Iterator<Item = Local> + ExactSizeIterator { let arg_count = self.arg_count; let local_count = self.local_decls.len(); (arg_count + 1..local_count).map(Local::new) } /// Changes a statement to a nop. This is both faster than deleting instructions and avoids /// invalidating statement indices in `Location`s. pub fn make_statement_nop(&mut self, location: Location) { let block = &mut self.basic_blocks[location.block]; debug_assert!(location.statement_index < block.statements.len()); block.statements[location.statement_index].make_nop() } /// Returns the source info associated with `location`. pub fn source_info(&self, location: Location) -> &SourceInfo { let block = &self[location.block]; let stmts = &block.statements; let idx = location.statement_index; if idx < stmts.len() { &stmts[idx].source_info } else { assert_eq!(idx, stmts.len()); &block.terminator().source_info } } /// Checks if `sub` is a sub scope of `sup` pub fn is_sub_scope(&self, mut sub: SourceScope, sup: SourceScope) -> bool { while sub != sup { match self.source_scopes[sub].parent_scope { None => return false, Some(p) => sub = p, } } true } /// Returns the return type; it always return first element from `local_decls` array. #[inline] pub fn return_ty(&self) -> Ty<'tcx> { self.local_decls[RETURN_PLACE].ty } /// Gets the location of the terminator for the given block. #[inline] pub fn terminator_loc(&self, bb: BasicBlock) -> Location { Location { block: bb, statement_index: self[bb].statements.len() } } #[inline] pub fn predecessors(&self) -> impl std::ops::Deref<Target = Predecessors> + '_ { self.predecessor_cache.compute(&self.basic_blocks) } #[inline] pub fn dominators(&self) -> Dominators<BasicBlock> { dominators(self) } } #[derive(Copy, Clone, PartialEq, Eq, Debug, RustcEncodable, RustcDecodable, HashStable)] pub enum Safety { Safe, /// Unsafe because of a PushUnsafeBlock BuiltinUnsafe, /// Unsafe because of an unsafe fn FnUnsafe, /// Unsafe because of an `unsafe` block ExplicitUnsafe(hir::HirId), } impl<'tcx> Index<BasicBlock> for Body<'tcx> { type Output = BasicBlockData<'tcx>; #[inline] fn index(&self, index: BasicBlock) -> &BasicBlockData<'tcx> { &self.basic_blocks()[index] } } impl<'tcx> IndexMut<BasicBlock> for Body<'tcx> { #[inline] fn index_mut(&mut self, index: BasicBlock) -> &mut BasicBlockData<'tcx> { &mut self.basic_blocks_mut()[index] } } #[derive(Copy, Clone, Debug, HashStable, TypeFoldable)] pub enum ClearCrossCrate<T> { Clear, Set(T), } impl<T> ClearCrossCrate<T> { pub fn as_ref(&self) -> ClearCrossCrate<&T> { match self { ClearCrossCrate::Clear => ClearCrossCrate::Clear, ClearCrossCrate::Set(v) => ClearCrossCrate::Set(v), } } pub fn assert_crate_local(self) -> T { match self { ClearCrossCrate::Clear => bug!("unwrapping cross-crate data"), ClearCrossCrate::Set(v) => v, } } } const TAG_CLEAR_CROSS_CRATE_CLEAR: u8 = 0; const TAG_CLEAR_CROSS_CRATE_SET: u8 = 1; impl<T: Encodable> rustc_serialize::UseSpecializedEncodable for ClearCrossCrate<T> { #[inline] fn default_encode<E: rustc_serialize::Encoder>(&self, e: &mut E) -> Result<(), E::Error> { match *self { ClearCrossCrate::Clear => TAG_CLEAR_CROSS_CRATE_CLEAR.encode(e), ClearCrossCrate::Set(ref val) => { TAG_CLEAR_CROSS_CRATE_SET.encode(e)?; val.encode(e) } } } } impl<T: Decodable> rustc_serialize::UseSpecializedDecodable for ClearCrossCrate<T> { #[inline] fn default_decode<D>(d: &mut D) -> Result<ClearCrossCrate<T>, D::Error> where D: rustc_serialize::Decoder, { let discr = u8::decode(d)?; match discr { TAG_CLEAR_CROSS_CRATE_CLEAR => Ok(ClearCrossCrate::Clear), TAG_CLEAR_CROSS_CRATE_SET => { let val = T::decode(d)?; Ok(ClearCrossCrate::Set(val)) } _ => unreachable!(), } } } /// Grouped information about the source code origin of a MIR entity. /// Intended to be inspected by diagnostics and debuginfo. /// Most passes can work with it as a whole, within a single function. // The unofficial Cranelift backend, at least as of #65828, needs `SourceInfo` to implement `Eq` and // `Hash`. Please ping @bjorn3 if removing them. #[derive(Copy, Clone, Debug, Eq, PartialEq, RustcEncodable, RustcDecodable, Hash, HashStable)] pub struct SourceInfo { /// The source span for the AST pertaining to this MIR entity. pub span: Span, /// The source scope, keeping track of which bindings can be /// seen by debuginfo, active lint levels, `unsafe {...}`, etc. pub scope: SourceScope, } impl SourceInfo { #[inline] pub fn outermost(span: Span) -> Self { SourceInfo { span, scope: OUTERMOST_SOURCE_SCOPE } } } /////////////////////////////////////////////////////////////////////////// // Borrow kinds #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, RustcEncodable, RustcDecodable)] #[derive(HashStable)] pub enum BorrowKind { /// Data must be immutable and is aliasable. Shared, /// The immediately borrowed place must be immutable, but projections from /// it don't need to be. For example, a shallow borrow of `a.b` doesn't /// conflict with a mutable borrow of `a.b.c`. /// /// This is used when lowering matches: when matching on a place we want to /// ensure that place have the same value from the start of the match until /// an arm is selected. This prevents this code from compiling: /// /// let mut x = &Some(0); /// match *x { /// None => (), /// Some(_) if { x = &None; false } => (), /// Some(_) => (), /// } /// /// This can't be a shared borrow because mutably borrowing (*x as Some).0 /// should not prevent `if let None = x { ... }`, for example, because the /// mutating `(*x as Some).0` can't affect the discriminant of `x`. /// We can also report errors with this kind of borrow differently. Shallow, /// Data must be immutable but not aliasable. This kind of borrow /// cannot currently be expressed by the user and is used only in /// implicit closure bindings. It is needed when the closure is /// borrowing or mutating a mutable referent, e.g.: /// /// let x: &mut isize = ...; /// let y = || *x += 5; /// /// If we were to try to translate this closure into a more explicit /// form, we'd encounter an error with the code as written: /// /// struct Env { x: & &mut isize } /// let x: &mut isize = ...; /// let y = (&mut Env { &x }, fn_ptr); // Closure is pair of env and fn /// fn fn_ptr(env: &mut Env) { **env.x += 5; } /// /// This is then illegal because you cannot mutate an `&mut` found /// in an aliasable location. To solve, you'd have to translate with /// an `&mut` borrow: /// /// struct Env { x: & &mut isize } /// let x: &mut isize = ...; /// let y = (&mut Env { &mut x }, fn_ptr); // changed from &x to &mut x /// fn fn_ptr(env: &mut Env) { **env.x += 5; } /// /// Now the assignment to `**env.x` is legal, but creating a /// mutable pointer to `x` is not because `x` is not mutable. We /// could fix this by declaring `x` as `let mut x`. This is ok in /// user code, if awkward, but extra weird for closures, since the /// borrow is hidden. /// /// So we introduce a "unique imm" borrow -- the referent is /// immutable, but not aliasable. This solves the problem. For /// simplicity, we don't give users the way to express this /// borrow, it's just used when translating closures. Unique, /// Data is mutable and not aliasable. Mut { /// `true` if this borrow arose from method-call auto-ref /// (i.e., `adjustment::Adjust::Borrow`). allow_two_phase_borrow: bool, }, } impl BorrowKind { pub fn allows_two_phase_borrow(&self) -> bool { match *self { BorrowKind::Shared | BorrowKind::Shallow | BorrowKind::Unique => false, BorrowKind::Mut { allow_two_phase_borrow } => allow_two_phase_borrow, } } } /////////////////////////////////////////////////////////////////////////// // Variables and temps rustc_index::newtype_index! { pub struct Local { derive [HashStable] DEBUG_FORMAT = "_{}", const RETURN_PLACE = 0, } } impl Atom for Local { fn index(self) -> usize { Idx::index(self) } } /// Classifies locals into categories. See `Body::local_kind`. #[derive(PartialEq, Eq, Debug, HashStable)] pub enum LocalKind { /// User-declared variable binding. Var, /// Compiler-introduced temporary. Temp, /// Function argument. Arg, /// Location of function's return value. ReturnPointer, } #[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)] pub struct VarBindingForm<'tcx> { /// Is variable bound via `x`, `mut x`, `ref x`, or `ref mut x`? pub binding_mode: ty::BindingMode, /// If an explicit type was provided for this variable binding, /// this holds the source Span of that type. /// /// NOTE: if you want to change this to a `HirId`, be wary that /// doing so breaks incremental compilation (as of this writing), /// while a `Span` does not cause our tests to fail. pub opt_ty_info: Option<Span>, /// Place of the RHS of the =, or the subject of the `match` where this /// variable is initialized. None in the case of `let PATTERN;`. /// Some((None, ..)) in the case of and `let [mut] x = ...` because /// (a) the right-hand side isn't evaluated as a place expression. /// (b) it gives a way to separate this case from the remaining cases /// for diagnostics. pub opt_match_place: Option<(Option<Place<'tcx>>, Span)>, /// The span of the pattern in which this variable was bound. pub pat_span: Span, } #[derive(Clone, Debug, RustcEncodable, RustcDecodable)] pub enum BindingForm<'tcx> { /// This is a binding for a non-`self` binding, or a `self` that has an explicit type. Var(VarBindingForm<'tcx>), /// Binding for a `self`/`&self`/`&mut self` binding where the type is implicit. ImplicitSelf(ImplicitSelfKind), /// Reference used in a guard expression to ensure immutability. RefForGuard, } /// Represents what type of implicit self a function has, if any. #[derive(Clone, Copy, PartialEq, Debug, RustcEncodable, RustcDecodable, HashStable)] pub enum ImplicitSelfKind { /// Represents a `fn x(self);`. Imm, /// Represents a `fn x(mut self);`. Mut, /// Represents a `fn x(&self);`. ImmRef, /// Represents a `fn x(&mut self);`. MutRef, /// Represents when a function does not have a self argument or /// when a function has a `self: X` argument. None, } CloneTypeFoldableAndLiftImpls! { BindingForm<'tcx>, } mod binding_form_impl { use crate::ich::StableHashingContext; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for super::BindingForm<'tcx> { fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { use super::BindingForm::*; ::std::mem::discriminant(self).hash_stable(hcx, hasher); match self { Var(binding) => binding.hash_stable(hcx, hasher), ImplicitSelf(kind) => kind.hash_stable(hcx, hasher), RefForGuard => (), } } } } /// `BlockTailInfo` is attached to the `LocalDecl` for temporaries /// created during evaluation of expressions in a block tail /// expression; that is, a block like `{ STMT_1; STMT_2; EXPR }`. /// /// It is used to improve diagnostics when such temporaries are /// involved in borrow_check errors, e.g., explanations of where the /// temporaries come from, when their destructors are run, and/or how /// one might revise the code to satisfy the borrow checker's rules. #[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)] pub struct BlockTailInfo { /// If `true`, then the value resulting from evaluating this tail /// expression is ignored by the block's expression context. /// /// Examples include `{ ...; tail };` and `let _ = { ...; tail };` /// but not e.g., `let _x = { ...; tail };` pub tail_result_is_ignored: bool, /// `Span` of the tail expression. pub span: Span, } /// A MIR local. /// /// This can be a binding declared by the user, a temporary inserted by the compiler, a function /// argument, or the return place. #[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable, TypeFoldable)] pub struct LocalDecl<'tcx> { /// Whether this is a mutable minding (i.e., `let x` or `let mut x`). /// /// Temporaries and the return place are always mutable. pub mutability: Mutability, // FIXME(matthewjasper) Don't store in this in `Body` pub local_info: Option<Box<LocalInfo<'tcx>>>, /// `true` if this is an internal local. /// /// These locals are not based on types in the source code and are only used /// for a few desugarings at the moment. /// /// The generator transformation will sanity check the locals which are live /// across a suspension point against the type components of the generator /// which type checking knows are live across a suspension point. We need to /// flag drop flags to avoid triggering this check as they are introduced /// after typeck. /// /// Unsafety checking will also ignore dereferences of these locals, /// so they can be used for raw pointers only used in a desugaring. /// /// This should be sound because the drop flags are fully algebraic, and /// therefore don't affect the OIBIT or outlives properties of the /// generator. pub internal: bool, /// If this local is a temporary and `is_block_tail` is `Some`, /// then it is a temporary created for evaluation of some /// subexpression of some block's tail expression (with no /// intervening statement context). // FIXME(matthewjasper) Don't store in this in `Body` pub is_block_tail: Option<BlockTailInfo>, /// The type of this local. pub ty: Ty<'tcx>, /// If the user manually ascribed a type to this variable, /// e.g., via `let x: T`, then we carry that type here. The MIR /// borrow checker needs this information since it can affect /// region inference. // FIXME(matthewjasper) Don't store in this in `Body` pub user_ty: Option<Box<UserTypeProjections>>, /// The *syntactic* (i.e., not visibility) source scope the local is defined /// in. If the local was defined in a let-statement, this /// is *within* the let-statement, rather than outside /// of it. /// /// This is needed because the visibility source scope of locals within /// a let-statement is weird. /// /// The reason is that we want the local to be *within* the let-statement /// for lint purposes, but we want the local to be *after* the let-statement /// for names-in-scope purposes. /// /// That's it, if we have a let-statement like the one in this /// function: /// /// ``` /// fn foo(x: &str) { /// #[allow(unused_mut)] /// let mut x: u32 = { // <- one unused mut /// let mut y: u32 = x.parse().unwrap(); /// y + 2 /// }; /// drop(x); /// } /// ``` /// /// Then, from a lint point of view, the declaration of `x: u32` /// (and `y: u32`) are within the `#[allow(unused_mut)]` scope - the /// lint scopes are the same as the AST/HIR nesting. /// /// However, from a name lookup point of view, the scopes look more like /// as if the let-statements were `match` expressions: /// /// ``` /// fn foo(x: &str) { /// match { /// match x.parse().unwrap() { /// y => y + 2 /// } /// } { /// x => drop(x) /// }; /// } /// ``` /// /// We care about the name-lookup scopes for debuginfo - if the /// debuginfo instruction pointer is at the call to `x.parse()`, we /// want `x` to refer to `x: &str`, but if it is at the call to /// `drop(x)`, we want it to refer to `x: u32`. /// /// To allow both uses to work, we need to have more than a single scope /// for a local. We have the `source_info.scope` represent the "syntactic" /// lint scope (with a variable being under its let block) while the /// `var_debug_info.source_info.scope` represents the "local variable" /// scope (where the "rest" of a block is under all prior let-statements). /// /// The end result looks like this: /// /// ```text /// ROOT SCOPE /// │{ argument x: &str } /// │ /// │ │{ #[allow(unused_mut)] } // This is actually split into 2 scopes /// │ │ // in practice because I'm lazy. /// │ │ /// │ │← x.source_info.scope /// │ │← `x.parse().unwrap()` /// │ │ /// │ │ │← y.source_info.scope /// │ │ /// │ │ │{ let y: u32 } /// │ │ │ /// │ │ │← y.var_debug_info.source_info.scope /// │ │ │← `y + 2` /// │ /// │ │{ let x: u32 } /// │ │← x.var_debug_info.source_info.scope /// │ │← `drop(x)` // This accesses `x: u32`. /// ``` pub source_info: SourceInfo, } // `LocalDecl` is used a lot. Make sure it doesn't unintentionally get bigger. #[cfg(target_arch = "x86_64")] static_assert_size!(LocalDecl<'_>, 56); /// Extra information about a some locals that's used for diagnostics. (Not /// used for non-StaticRef temporaries, the return place, or anonymous function /// parameters.) #[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable, TypeFoldable)] pub enum LocalInfo<'tcx> { /// A user-defined local variable or function parameter /// /// The `BindingForm` is solely used for local diagnostics when generating /// warnings/errors when compiling the current crate, and therefore it need /// not be visible across crates. User(ClearCrossCrate<BindingForm<'tcx>>), /// A temporary created that references the static with the given `DefId`. StaticRef { def_id: DefId, is_thread_local: bool }, } impl<'tcx> LocalDecl<'tcx> { /// Returns `true` only if local is a binding that can itself be /// made mutable via the addition of the `mut` keyword, namely /// something like the occurrences of `x` in: /// - `fn foo(x: Type) { ... }`, /// - `let x = ...`, /// - or `match ... { C(x) => ... }` pub fn can_be_made_mutable(&self) -> bool { match self.local_info { Some(box LocalInfo::User(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm { binding_mode: ty::BindingMode::BindByValue(_), opt_ty_info: _, opt_match_place: _, pat_span: _, })))) => true, Some(box LocalInfo::User(ClearCrossCrate::Set(BindingForm::ImplicitSelf( ImplicitSelfKind::Imm, )))) => true, _ => false, } } /// Returns `true` if local is definitely not a `ref ident` or /// `ref mut ident` binding. (Such bindings cannot be made into /// mutable bindings, but the inverse does not necessarily hold). pub fn is_nonref_binding(&self) -> bool { match self.local_info { Some(box LocalInfo::User(ClearCrossCrate::Set(BindingForm::Var(VarBindingForm { binding_mode: ty::BindingMode::BindByValue(_), opt_ty_info: _, opt_match_place: _, pat_span: _, })))) => true, Some(box LocalInfo::User(ClearCrossCrate::Set(BindingForm::ImplicitSelf(_)))) => true, _ => false, } } /// Returns `true` if this variable is a named variable or function /// parameter declared by the user. #[inline] pub fn is_user_variable(&self) -> bool { match self.local_info { Some(box LocalInfo::User(_)) => true, _ => false, } } /// Returns `true` if this is a reference to a variable bound in a `match` /// expression that is used to access said variable for the guard of the /// match arm. pub fn is_ref_for_guard(&self) -> bool { match self.local_info { Some(box LocalInfo::User(ClearCrossCrate::Set(BindingForm::RefForGuard))) => true, _ => false, } } /// Returns `Some` if this is a reference to a static item that is used to /// access that static pub fn is_ref_to_static(&self) -> bool { match self.local_info { Some(box LocalInfo::StaticRef { .. }) => true, _ => false, } } /// Returns `Some` if this is a reference to a static item that is used to /// access that static pub fn is_ref_to_thread_local(&self) -> bool { match self.local_info { Some(box LocalInfo::StaticRef { is_thread_local, .. }) => is_thread_local, _ => false, } } /// Returns `true` is the local is from a compiler desugaring, e.g., /// `__next` from a `for` loop. #[inline] pub fn from_compiler_desugaring(&self) -> bool { self.source_info.span.desugaring_kind().is_some() } /// Creates a new `LocalDecl` for a temporary: mutable, non-internal. #[inline] pub fn new(ty: Ty<'tcx>, span: Span) -> Self { Self::with_source_info(ty, SourceInfo::outermost(span)) } /// Like `LocalDecl::new`, but takes a `SourceInfo` instead of a `Span`. #[inline] pub fn with_source_info(ty: Ty<'tcx>, source_info: SourceInfo) -> Self { LocalDecl { mutability: Mutability::Mut, local_info: None, internal: false, is_block_tail: None, ty, user_ty: None, source_info, } } /// Converts `self` into same `LocalDecl` except tagged as internal. #[inline] pub fn internal(mut self) -> Self { self.internal = true; self } /// Converts `self` into same `LocalDecl` except tagged as immutable. #[inline] pub fn immutable(mut self) -> Self { self.mutability = Mutability::Not; self } /// Converts `self` into same `LocalDecl` except tagged as internal temporary. #[inline] pub fn block_tail(mut self, info: BlockTailInfo) -> Self { assert!(self.is_block_tail.is_none()); self.is_block_tail = Some(info); self } } /// Debug information pertaining to a user variable. #[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable, TypeFoldable)] pub struct VarDebugInfo<'tcx> { pub name: Symbol, /// Source info of the user variable, including the scope /// within which the variable is visible (to debuginfo) /// (see `LocalDecl`'s `source_info` field for more details). pub source_info: SourceInfo, /// Where the data for this user variable is to be found. /// NOTE(eddyb) There's an unenforced invariant that this `Place` is /// based on a `Local`, not a `Static`, and contains no indexing. pub place: Place<'tcx>, } /////////////////////////////////////////////////////////////////////////// // BasicBlock rustc_index::newtype_index! { pub struct BasicBlock { derive [HashStable] DEBUG_FORMAT = "bb{}", const START_BLOCK = 0, } } impl BasicBlock { pub fn start_location(self) -> Location { Location { block: self, statement_index: 0 } } } /////////////////////////////////////////////////////////////////////////// // BasicBlockData and Terminator #[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable, TypeFoldable)] pub struct BasicBlockData<'tcx> { /// List of statements in this block. pub statements: Vec<Statement<'tcx>>, /// Terminator for this block. /// /// N.B., this should generally ONLY be `None` during construction. /// Therefore, you should generally access it via the /// `terminator()` or `terminator_mut()` methods. The only /// exception is that certain passes, such as `simplify_cfg`, swap /// out the terminator temporarily with `None` while they continue /// to recurse over the set of basic blocks. pub terminator: Option<Terminator<'tcx>>, /// If true, this block lies on an unwind path. This is used /// during codegen where distinct kinds of basic blocks may be /// generated (particularly for MSVC cleanup). Unwind blocks must /// only branch to other unwind blocks. pub is_cleanup: bool, } #[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)] pub struct Terminator<'tcx> { pub source_info: SourceInfo, pub kind: TerminatorKind<'tcx>, } #[derive(Clone, RustcEncodable, RustcDecodable, HashStable, PartialEq)] pub enum TerminatorKind<'tcx> { /// Block should have one successor in the graph; we jump there. Goto { target: BasicBlock }, /// Operand evaluates to an integer; jump depending on its value /// to one of the targets, and otherwise fallback to `otherwise`. SwitchInt { /// The discriminant value being tested. discr: Operand<'tcx>, /// The type of value being tested. switch_ty: Ty<'tcx>, /// Possible values. The locations to branch to in each case /// are found in the corresponding indices from the `targets` vector. values: Cow<'tcx, [u128]>, /// Possible branch sites. The last element of this vector is used /// for the otherwise branch, so targets.len() == values.len() + 1 /// should hold. // // This invariant is quite non-obvious and also could be improved. // One way to make this invariant is to have something like this instead: // // branches: Vec<(ConstInt, BasicBlock)>, // otherwise: Option<BasicBlock> // exhaustive if None // // However we’ve decided to keep this as-is until we figure a case // where some other approach seems to be strictly better than other. targets: Vec<BasicBlock>, }, /// Indicates that the landing pad is finished and unwinding should /// continue. Emitted by `build::scope::diverge_cleanup`. Resume, /// Indicates that the landing pad is finished and that the process /// should abort. Used to prevent unwinding for foreign items. Abort, /// Indicates a normal return. The return place should have /// been filled in before this executes. This can occur multiple times /// in different basic blocks. Return, /// Indicates a terminator that can never be reached. Unreachable, /// Drop the `Place`. Drop { place: Place<'tcx>, target: BasicBlock, unwind: Option<BasicBlock> }, /// Drop the `Place` and assign the new value over it. This ensures /// that the assignment to `P` occurs *even if* the destructor for /// place unwinds. Its semantics are best explained by the /// elaboration: /// /// ``` /// BB0 { /// DropAndReplace(P <- V, goto BB1, unwind BB2) /// } /// ``` /// /// becomes /// /// ``` /// BB0 { /// Drop(P, goto BB1, unwind BB2) /// } /// BB1 { /// // P is now uninitialized /// P <- V /// } /// BB2 { /// // P is now uninitialized -- its dtor panicked /// P <- V /// } /// ``` DropAndReplace { place: Place<'tcx>, value: Operand<'tcx>, target: BasicBlock, unwind: Option<BasicBlock>, }, /// Block ends with a call of a converging function. Call { /// The function that’s being called. func: Operand<'tcx>, /// Arguments the function is called with. /// These are owned by the callee, which is free to modify them. /// This allows the memory occupied by "by-value" arguments to be /// reused across function calls without duplicating the contents. args: Vec<Operand<'tcx>>, /// Destination for the return value. If some, the call is converging. destination: Option<(Place<'tcx>, BasicBlock)>, /// Cleanups to be done if the call unwinds. cleanup: Option<BasicBlock>, /// `true` if this is from a call in HIR rather than from an overloaded /// operator. True for overloaded function call. from_hir_call: bool, /// This `Span` is the span of the function, without the dot and receiver /// (e.g. `foo(a, b)` in `x.foo(a, b)` fn_span: Span, }, /// Jump to the target if the condition has the expected value, /// otherwise panic with a message and a cleanup target. Assert { cond: Operand<'tcx>, expected: bool, msg: AssertMessage<'tcx>, target: BasicBlock, cleanup: Option<BasicBlock>, }, /// A suspend point. Yield { /// The value to return. value: Operand<'tcx>, /// Where to resume to. resume: BasicBlock, /// The place to store the resume argument in. resume_arg: Place<'tcx>, /// Cleanup to be done if the generator is dropped at this suspend point. drop: Option<BasicBlock>, }, /// Indicates the end of the dropping of a generator. GeneratorDrop, /// A block where control flow only ever takes one real path, but borrowck /// needs to be more conservative. FalseEdge { /// The target normal control flow will take. real_target: BasicBlock, /// A block control flow could conceptually jump to, but won't in /// practice. imaginary_target: BasicBlock, }, /// A terminator for blocks that only take one path in reality, but where we /// reserve the right to unwind in borrowck, even if it won't happen in practice. /// This can arise in infinite loops with no function calls for example. FalseUnwind { /// The target normal control flow will take. real_target: BasicBlock, /// The imaginary cleanup block link. This particular path will never be taken /// in practice, but in order to avoid fragility we want to always /// consider it in borrowck. We don't want to accept programs which /// pass borrowck only when `panic=abort` or some assertions are disabled /// due to release vs. debug mode builds. This needs to be an `Option` because /// of the `remove_noop_landing_pads` and `no_landing_pads` passes. unwind: Option<BasicBlock>, }, /// Block ends with an inline assembly block. This is a terminator since /// inline assembly is allowed to diverge. InlineAsm { /// The template for the inline assembly, with placeholders. template: &'tcx [InlineAsmTemplatePiece], /// The operands for the inline assembly, as `Operand`s or `Place`s. operands: Vec<InlineAsmOperand<'tcx>>, /// Miscellaneous options for the inline assembly. options: InlineAsmOptions, /// Source spans for each line of the inline assembly code. These are /// used to map assembler errors back to the line in the source code. line_spans: &'tcx [Span], /// Destination block after the inline assembly returns, unless it is /// diverging (InlineAsmOptions::NORETURN). destination: Option<BasicBlock>, }, } /// Information about an assertion failure. #[derive(Clone, RustcEncodable, RustcDecodable, HashStable, PartialEq)] pub enum AssertKind<O> { BoundsCheck { len: O, index: O }, Overflow(BinOp), OverflowNeg, DivisionByZero, RemainderByZero, ResumedAfterReturn(GeneratorKind), ResumedAfterPanic(GeneratorKind), } #[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable, HashStable, TypeFoldable)] pub enum InlineAsmOperand<'tcx> { In { reg: InlineAsmRegOrRegClass, value: Operand<'tcx>, }, Out { reg: InlineAsmRegOrRegClass, late: bool, place: Option<Place<'tcx>>, }, InOut { reg: InlineAsmRegOrRegClass, late: bool, in_value: Operand<'tcx>, out_place: Option<Place<'tcx>>, }, Const { value: Operand<'tcx>, }, SymFn { value: Box<Constant<'tcx>>, }, SymStatic { def_id: DefId, }, } /// Type for MIR `Assert` terminator error messages. pub type AssertMessage<'tcx> = AssertKind<Operand<'tcx>>; pub type Successors<'a> = iter::Chain<option::IntoIter<&'a BasicBlock>, slice::Iter<'a, BasicBlock>>; pub type SuccessorsMut<'a> = iter::Chain<option::IntoIter<&'a mut BasicBlock>, slice::IterMut<'a, BasicBlock>>; impl<'tcx> Terminator<'tcx> { pub fn successors(&self) -> Successors<'_> { self.kind.successors() } pub fn successors_mut(&mut self) -> SuccessorsMut<'_> { self.kind.successors_mut() } pub fn unwind(&self) -> Option<&Option<BasicBlock>> { self.kind.unwind() } pub fn unwind_mut(&mut self) -> Option<&mut Option<BasicBlock>> { self.kind.unwind_mut() } } impl<'tcx> TerminatorKind<'tcx> { pub fn if_( tcx: TyCtxt<'tcx>, cond: Operand<'tcx>, t: BasicBlock, f: BasicBlock, ) -> TerminatorKind<'tcx> { static BOOL_SWITCH_FALSE: &[u128] = &[0]; TerminatorKind::SwitchInt { discr: cond, switch_ty: tcx.types.bool, values: From::from(BOOL_SWITCH_FALSE), targets: vec![f, t], } } pub fn successors(&self) -> Successors<'_> { use self::TerminatorKind::*; match *self { Resume | Abort | GeneratorDrop | Return | Unreachable | Call { destination: None, cleanup: None, .. } | InlineAsm { destination: None, .. } => None.into_iter().chain(&[]), Goto { target: ref t } | Call { destination: None, cleanup: Some(ref t), .. } | Call { destination: Some((_, ref t)), cleanup: None, .. } | Yield { resume: ref t, drop: None, .. } | DropAndReplace { target: ref t, unwind: None, .. } | Drop { target: ref t, unwind: None, .. } | Assert { target: ref t, cleanup: None, .. } | FalseUnwind { real_target: ref t, unwind: None } | InlineAsm { destination: Some(ref t), .. } => Some(t).into_iter().chain(&[]), Call { destination: Some((_, ref t)), cleanup: Some(ref u), .. } | Yield { resume: ref t, drop: Some(ref u), .. } | DropAndReplace { target: ref t, unwind: Some(ref u), .. } | Drop { target: ref t, unwind: Some(ref u), .. } | Assert { target: ref t, cleanup: Some(ref u), .. } | FalseUnwind { real_target: ref t, unwind: Some(ref u) } => { Some(t).into_iter().chain(slice::from_ref(u)) } SwitchInt { ref targets, .. } => None.into_iter().chain(&targets[..]), FalseEdge { ref real_target, ref imaginary_target } => { Some(real_target).into_iter().chain(slice::from_ref(imaginary_target)) } } } pub fn successors_mut(&mut self) -> SuccessorsMut<'_> { use self::TerminatorKind::*; match *self { Resume | Abort | GeneratorDrop | Return | Unreachable | Call { destination: None, cleanup: None, .. } | InlineAsm { destination: None, .. } => None.into_iter().chain(&mut []), Goto { target: ref mut t } | Call { destination: None, cleanup: Some(ref mut t), .. } | Call { destination: Some((_, ref mut t)), cleanup: None, .. } | Yield { resume: ref mut t, drop: None, .. } | DropAndReplace { target: ref mut t, unwind: None, .. } | Drop { target: ref mut t, unwind: None, .. } | Assert { target: ref mut t, cleanup: None, .. } | FalseUnwind { real_target: ref mut t, unwind: None } | InlineAsm { destination: Some(ref mut t), .. } => Some(t).into_iter().chain(&mut []), Call { destination: Some((_, ref mut t)), cleanup: Some(ref mut u), .. } | Yield { resume: ref mut t, drop: Some(ref mut u), .. } | DropAndReplace { target: ref mut t, unwind: Some(ref mut u), .. } | Drop { target: ref mut t, unwind: Some(ref mut u), .. } | Assert { target: ref mut t, cleanup: Some(ref mut u), .. } | FalseUnwind { real_target: ref mut t, unwind: Some(ref mut u) } => { Some(t).into_iter().chain(slice::from_mut(u)) } SwitchInt { ref mut targets, .. } => None.into_iter().chain(&mut targets[..]), FalseEdge { ref mut real_target, ref mut imaginary_target } => { Some(real_target).into_iter().chain(slice::from_mut(imaginary_target)) } } } pub fn unwind(&self) -> Option<&Option<BasicBlock>> { match *self { TerminatorKind::Goto { .. } | TerminatorKind::Resume | TerminatorKind::Abort | TerminatorKind::Return | TerminatorKind::Unreachable | TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } | TerminatorKind::SwitchInt { .. } | TerminatorKind::FalseEdge { .. } | TerminatorKind::InlineAsm { .. } => None, TerminatorKind::Call { cleanup: ref unwind, .. } | TerminatorKind::Assert { cleanup: ref unwind, .. } | TerminatorKind::DropAndReplace { ref unwind, .. } | TerminatorKind::Drop { ref unwind, .. } | TerminatorKind::FalseUnwind { ref unwind, .. } => Some(unwind), } } pub fn unwind_mut(&mut self) -> Option<&mut Option<BasicBlock>> { match *self { TerminatorKind::Goto { .. } | TerminatorKind::Resume | TerminatorKind::Abort | TerminatorKind::Return | TerminatorKind::Unreachable | TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } | TerminatorKind::SwitchInt { .. } | TerminatorKind::FalseEdge { .. } | TerminatorKind::InlineAsm { .. } => None, TerminatorKind::Call { cleanup: ref mut unwind, .. } | TerminatorKind::Assert { cleanup: ref mut unwind, .. } | TerminatorKind::DropAndReplace { ref mut unwind, .. } | TerminatorKind::Drop { ref mut unwind, .. } | TerminatorKind::FalseUnwind { ref mut unwind, .. } => Some(unwind), } } } impl<'tcx> BasicBlockData<'tcx> { pub fn new(terminator: Option<Terminator<'tcx>>) -> BasicBlockData<'tcx> { BasicBlockData { statements: vec![], terminator, is_cleanup: false } } /// Accessor for terminator. /// /// Terminator may not be None after construction of the basic block is complete. This accessor /// provides a convenience way to reach the terminator. pub fn terminator(&self) -> &Terminator<'tcx> { self.terminator.as_ref().expect("invalid terminator state") } pub fn terminator_mut(&mut self) -> &mut Terminator<'tcx> { self.terminator.as_mut().expect("invalid terminator state") } pub fn retain_statements<F>(&mut self, mut f: F) where F: FnMut(&mut Statement<'_>) -> bool, { for s in &mut self.statements { if !f(s) { s.make_nop(); } } } pub fn expand_statements<F, I>(&mut self, mut f: F) where F: FnMut(&mut Statement<'tcx>) -> Option<I>, I: iter::TrustedLen<Item = Statement<'tcx>>, { // Gather all the iterators we'll need to splice in, and their positions. let mut splices: Vec<(usize, I)> = vec![]; let mut extra_stmts = 0; for (i, s) in self.statements.iter_mut().enumerate() { if let Some(mut new_stmts) = f(s) { if let Some(first) = new_stmts.next() { // We can already store the first new statement. *s = first; // Save the other statements for optimized splicing. let remaining = new_stmts.size_hint().0; if remaining > 0 { splices.push((i + 1 + extra_stmts, new_stmts)); extra_stmts += remaining; } } else { s.make_nop(); } } } // Splice in the new statements, from the end of the block. // FIXME(eddyb) This could be more efficient with a "gap buffer" // where a range of elements ("gap") is left uninitialized, with // splicing adding new elements to the end of that gap and moving // existing elements from before the gap to the end of the gap. // For now, this is safe code, emulating a gap but initializing it. let mut gap = self.statements.len()..self.statements.len() + extra_stmts; self.statements.resize( gap.end, Statement { source_info: SourceInfo::outermost(DUMMY_SP), kind: StatementKind::Nop }, ); for (splice_start, new_stmts) in splices.into_iter().rev() { let splice_end = splice_start + new_stmts.size_hint().0; while gap.end > splice_end { gap.start -= 1; gap.end -= 1; self.statements.swap(gap.start, gap.end); } self.statements.splice(splice_start..splice_end, new_stmts); gap.end = splice_start; } } pub fn visitable(&self, index: usize) -> &dyn MirVisitable<'tcx> { if index < self.statements.len() { &self.statements[index] } else { &self.terminator } } } impl<O> AssertKind<O> { /// Getting a description does not require `O` to be printable, and does not /// require allocation. /// The caller is expected to handle `BoundsCheck` separately. pub fn description(&self) -> &'static str { use AssertKind::*; match self { Overflow(BinOp::Add) => "attempt to add with overflow", Overflow(BinOp::Sub) => "attempt to subtract with overflow", Overflow(BinOp::Mul) => "attempt to multiply with overflow", Overflow(BinOp::Div) => "attempt to divide with overflow", Overflow(BinOp::Rem) => "attempt to calculate the remainder with overflow", OverflowNeg => "attempt to negate with overflow", Overflow(BinOp::Shr) => "attempt to shift right with overflow", Overflow(BinOp::Shl) => "attempt to shift left with overflow", Overflow(op) => bug!("{:?} cannot overflow", op), DivisionByZero => "attempt to divide by zero", RemainderByZero => "attempt to calculate the remainder with a divisor of zero", ResumedAfterReturn(GeneratorKind::Gen) => "generator resumed after completion", ResumedAfterReturn(GeneratorKind::Async(_)) => "`async fn` resumed after completion", ResumedAfterPanic(GeneratorKind::Gen) => "generator resumed after panicking", ResumedAfterPanic(GeneratorKind::Async(_)) => "`async fn` resumed after panicking", BoundsCheck { .. } => bug!("Unexpected AssertKind"), } } /// Format the message arguments for the `assert(cond, msg..)` terminator in MIR printing. fn fmt_assert_args<W: Write>(&self, f: &mut W) -> fmt::Result where O: Debug, { match self { AssertKind::BoundsCheck { ref len, ref index } => write!( f, "\"index out of bounds: the len is {{}} but the index is {{}}\", {:?}, {:?}", len, index ), _ => write!(f, "\"{}\"", self.description()), } } } impl<O: fmt::Debug> fmt::Debug for AssertKind<O> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use AssertKind::*; match self { BoundsCheck { ref len, ref index } => { write!(f, "index out of bounds: the len is {:?} but the index is {:?}", len, index) } _ => write!(f, "{}", self.description()), } } } impl<'tcx> Debug for TerminatorKind<'tcx> { fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result { self.fmt_head(fmt)?; let successor_count = self.successors().count(); let labels = self.fmt_successor_labels(); assert_eq!(successor_count, labels.len()); match successor_count { 0 => Ok(()), 1 => write!(fmt, " -> {:?}", self.successors().next().unwrap()), _ => { write!(fmt, " -> [")?; for (i, target) in self.successors().enumerate() { if i > 0 { write!(fmt, ", ")?; } write!(fmt, "{}: {:?}", labels[i], target)?; } write!(fmt, "]") } } } } impl<'tcx> TerminatorKind<'tcx> { /// Writes the "head" part of the terminator; that is, its name and the data it uses to pick the /// successor basic block, if any. The only information not included is the list of possible /// successors, which may be rendered differently between the text and the graphviz format. pub fn fmt_head<W: Write>(&self, fmt: &mut W) -> fmt::Result { use self::TerminatorKind::*; match self { Goto { .. } => write!(fmt, "goto"), SwitchInt { discr, .. } => write!(fmt, "switchInt({:?})", discr), Return => write!(fmt, "return"), GeneratorDrop => write!(fmt, "generator_drop"), Resume => write!(fmt, "resume"), Abort => write!(fmt, "abort"), Yield { value, resume_arg, .. } => write!(fmt, "{:?} = yield({:?})", resume_arg, value), Unreachable => write!(fmt, "unreachable"), Drop { place, .. } => write!(fmt, "drop({:?})", place), DropAndReplace { place, value, .. } => { write!(fmt, "replace({:?} <- {:?})", place, value) } Call { func, args, destination, .. } => { if let Some((destination, _)) = destination { write!(fmt, "{:?} = ", destination)?; } write!(fmt, "{:?}(", func)?; for (index, arg) in args.iter().enumerate() { if index > 0 { write!(fmt, ", ")?; } write!(fmt, "{:?}", arg)?; } write!(fmt, ")") } Assert { cond, expected, msg, .. } => { write!(fmt, "assert(")?; if !expected { write!(fmt, "!")?; } write!(fmt, "{:?}, ", cond)?; msg.fmt_assert_args(fmt)?; write!(fmt, ")") } FalseEdge { .. } => write!(fmt, "falseEdge"), FalseUnwind { .. } => write!(fmt, "falseUnwind"), InlineAsm { template, ref operands, options, .. } => { write!(fmt, "asm!(\"{}\"", InlineAsmTemplatePiece::to_string(template))?; for op in operands { write!(fmt, ", ")?; let print_late = |&late| if late { "late" } else { "" }; match op { InlineAsmOperand::In { reg, value } => { write!(fmt, "in({}) {:?}", reg, value)?; } InlineAsmOperand::Out { reg, late, place: Some(place) } => { write!(fmt, "{}out({}) {:?}", print_late(late), reg, place)?; } InlineAsmOperand::Out { reg, late, place: None } => { write!(fmt, "{}out({}) _", print_late(late), reg)?; } InlineAsmOperand::InOut { reg, late, in_value, out_place: Some(out_place), } => { write!( fmt, "in{}out({}) {:?} => {:?}", print_late(late), reg, in_value, out_place )?; } InlineAsmOperand::InOut { reg, late, in_value, out_place: None } => { write!(fmt, "in{}out({}) {:?} => _", print_late(late), reg, in_value)?; } InlineAsmOperand::Const { value } => { write!(fmt, "const {:?}", value)?; } InlineAsmOperand::SymFn { value } => { write!(fmt, "sym_fn {:?}", value)?; } InlineAsmOperand::SymStatic { def_id } => { write!(fmt, "sym_static {:?}", def_id)?; } } } write!(fmt, ", options({:?}))", options) } } } /// Returns the list of labels for the edges to the successor basic blocks. pub fn fmt_successor_labels(&self) -> Vec<Cow<'static, str>> { use self::TerminatorKind::*; match *self { Return | Resume | Abort | Unreachable | GeneratorDrop => vec![], Goto { .. } => vec!["".into()], SwitchInt { ref values, switch_ty, .. } => ty::tls::with(|tcx| { let param_env = ty::ParamEnv::empty(); let switch_ty = tcx.lift(&switch_ty).unwrap(); let size = tcx.layout_of(param_env.and(switch_ty)).unwrap().size; values .iter() .map(|&u| { ty::Const::from_scalar(tcx, Scalar::from_uint(u, size), switch_ty) .to_string() .into() }) .chain(iter::once("otherwise".into())) .collect() }), Call { destination: Some(_), cleanup: Some(_), .. } => { vec!["return".into(), "unwind".into()] } Call { destination: Some(_), cleanup: None, .. } => vec!["return".into()], Call { destination: None, cleanup: Some(_), .. } => vec!["unwind".into()], Call { destination: None, cleanup: None, .. } => vec![], Yield { drop: Some(_), .. } => vec!["resume".into(), "drop".into()], Yield { drop: None, .. } => vec!["resume".into()], DropAndReplace { unwind: None, .. } | Drop { unwind: None, .. } => { vec!["return".into()] } DropAndReplace { unwind: Some(_), .. } | Drop { unwind: Some(_), .. } => { vec!["return".into(), "unwind".into()] } Assert { cleanup: None, .. } => vec!["".into()], Assert { .. } => vec!["success".into(), "unwind".into()], FalseEdge { .. } => vec!["real".into(), "imaginary".into()], FalseUnwind { unwind: Some(_), .. } => vec!["real".into(), "cleanup".into()], FalseUnwind { unwind: None, .. } => vec!["real".into()], InlineAsm { destination: Some(_), .. } => vec!["".into()], InlineAsm { destination: None, .. } => vec![], } } } /////////////////////////////////////////////////////////////////////////// // Statements #[derive(Clone, RustcEncodable, RustcDecodable, HashStable, TypeFoldable)] pub struct Statement<'tcx> { pub source_info: SourceInfo, pub kind: StatementKind<'tcx>, } // `Statement` is used a lot. Make sure it doesn't unintentionally get bigger. #[cfg(target_arch = "x86_64")] static_assert_size!(Statement<'_>, 32); impl Statement<'_> { /// Changes a statement to a nop. This is both faster than deleting instructions and avoids /// invalidating statement indices in `Location`s. pub fn make_nop(&mut self) { self.kind = StatementKind::Nop } /// Changes a statement to a nop and returns the original statement. pub fn replace_nop(&mut self) -> Self { Statement { source_info: self.source_info, kind: mem::replace(&mut self.kind, StatementKind::Nop), } } } #[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable, HashStable, TypeFoldable)] pub enum StatementKind<'tcx> { /// Write the RHS Rvalue to the LHS Place. Assign(Box<(Place<'tcx>, Rvalue<'tcx>)>), /// This represents all the reading that a pattern match may do /// (e.g., inspecting constants and discriminant values), and the /// kind of pattern it comes from. This is in order to adapt potential /// error messages to these specific patterns. /// /// Note that this also is emitted for regular `let` bindings to ensure that locals that are /// never accessed still get some sanity checks for, e.g., `let x: ! = ..;` FakeRead(FakeReadCause, Box<Place<'tcx>>), /// Write the discriminant for a variant to the enum Place. SetDiscriminant { place: Box<Place<'tcx>>, variant_index: VariantIdx }, /// Start a live range for the storage of the local. StorageLive(Local), /// End the current live range for the storage of the local. StorageDead(Local), /// Executes a piece of inline Assembly. Stored in a Box to keep the size /// of `StatementKind` low. LlvmInlineAsm(Box<LlvmInlineAsm<'tcx>>), /// Retag references in the given place, ensuring they got fresh tags. This is /// part of the Stacked Borrows model. These statements are currently only interpreted /// by miri and only generated when "-Z mir-emit-retag" is passed. /// See <https://internals.rust-lang.org/t/stacked-borrows-an-aliasing-model-for-rust/8153/> /// for more details. Retag(RetagKind, Box<Place<'tcx>>), /// Encodes a user's type ascription. These need to be preserved /// intact so that NLL can respect them. For example: /// /// let a: T = y; /// /// The effect of this annotation is to relate the type `T_y` of the place `y` /// to the user-given type `T`. The effect depends on the specified variance: /// /// - `Covariant` -- requires that `T_y <: T` /// - `Contravariant` -- requires that `T_y :> T` /// - `Invariant` -- requires that `T_y == T` /// - `Bivariant` -- no effect AscribeUserType(Box<(Place<'tcx>, UserTypeProjection)>, ty::Variance), /// No-op. Useful for deleting instructions without affecting statement indices. Nop, } /// Describes what kind of retag is to be performed. #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Debug, PartialEq, Eq, HashStable)] pub enum RetagKind { /// The initial retag when entering a function. FnEntry, /// Retag preparing for a two-phase borrow. TwoPhase, /// Retagging raw pointers. Raw, /// A "normal" retag. Default, } /// The `FakeReadCause` describes the type of pattern why a FakeRead statement exists. #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Debug, HashStable, PartialEq)] pub enum FakeReadCause { /// Inject a fake read of the borrowed input at the end of each guards /// code. /// /// This should ensure that you cannot change the variant for an enum while /// you are in the midst of matching on it. ForMatchGuard, /// `let x: !; match x {}` doesn't generate any read of x so we need to /// generate a read of x to check that it is initialized and safe. ForMatchedPlace, /// A fake read of the RefWithinGuard version of a bind-by-value variable /// in a match guard to ensure that it's value hasn't change by the time /// we create the OutsideGuard version. ForGuardBinding, /// Officially, the semantics of /// /// `let pattern = <expr>;` /// /// is that `<expr>` is evaluated into a temporary and then this temporary is /// into the pattern. /// /// However, if we see the simple pattern `let var = <expr>`, we optimize this to /// evaluate `<expr>` directly into the variable `var`. This is mostly unobservable, /// but in some cases it can affect the borrow checker, as in #53695. /// Therefore, we insert a "fake read" here to ensure that we get /// appropriate errors. ForLet, /// If we have an index expression like /// /// (*x)[1][{ x = y; 4}] /// /// then the first bounds check is invalidated when we evaluate the second /// index expression. Thus we create a fake borrow of `x` across the second /// indexer, which will cause a borrow check error. ForIndex, } #[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable, HashStable, TypeFoldable)] pub struct LlvmInlineAsm<'tcx> { pub asm: hir::LlvmInlineAsmInner, pub outputs: Box<[Place<'tcx>]>, pub inputs: Box<[(Span, Operand<'tcx>)]>, } impl Debug for Statement<'_> { fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result { use self::StatementKind::*; match self.kind { Assign(box (ref place, ref rv)) => write!(fmt, "{:?} = {:?}", place, rv), FakeRead(ref cause, ref place) => write!(fmt, "FakeRead({:?}, {:?})", cause, place), Retag(ref kind, ref place) => write!( fmt, "Retag({}{:?})", match kind { RetagKind::FnEntry => "[fn entry] ", RetagKind::TwoPhase => "[2phase] ", RetagKind::Raw => "[raw] ", RetagKind::Default => "", }, place, ), StorageLive(ref place) => write!(fmt, "StorageLive({:?})", place), StorageDead(ref place) => write!(fmt, "StorageDead({:?})", place), SetDiscriminant { ref place, variant_index } => { write!(fmt, "discriminant({:?}) = {:?}", place, variant_index) } LlvmInlineAsm(ref asm) => { write!(fmt, "llvm_asm!({:?} : {:?} : {:?})", asm.asm, asm.outputs, asm.inputs) } AscribeUserType(box (ref place, ref c_ty), ref variance) => { write!(fmt, "AscribeUserType({:?}, {:?}, {:?})", place, variance, c_ty) } Nop => write!(fmt, "nop"), } } } /////////////////////////////////////////////////////////////////////////// // Places /// A path to a value; something that can be evaluated without /// changing or disturbing program state. #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, HashStable)] pub struct Place<'tcx> { pub local: Local, /// projection out of a place (access a field, deref a pointer, etc) pub projection: &'tcx List<PlaceElem<'tcx>>, } impl<'tcx> rustc_serialize::UseSpecializedDecodable for Place<'tcx> {} #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(RustcEncodable, RustcDecodable, HashStable)] pub enum ProjectionElem<V, T> { Deref, Field(Field, T), Index(V), /// These indices are generated by slice patterns. Easiest to explain /// by example: /// /// ``` /// [X, _, .._, _, _] => { offset: 0, min_length: 4, from_end: false }, /// [_, X, .._, _, _] => { offset: 1, min_length: 4, from_end: false }, /// [_, _, .._, X, _] => { offset: 2, min_length: 4, from_end: true }, /// [_, _, .._, _, X] => { offset: 1, min_length: 4, from_end: true }, /// ``` ConstantIndex { /// index or -index (in Python terms), depending on from_end offset: u32, /// The thing being indexed must be at least this long. For arrays this /// is always the exact length. min_length: u32, /// Counting backwards from end? This is always false when indexing an /// array. from_end: bool, }, /// These indices are generated by slice patterns. /// /// If `from_end` is true `slice[from..slice.len() - to]`. /// Otherwise `array[from..to]`. Subslice { from: u32, to: u32, /// Whether `to` counts from the start or end of the array/slice. /// For `PlaceElem`s this is `true` if and only if the base is a slice. /// For `ProjectionKind`, this can also be `true` for arrays. from_end: bool, }, /// "Downcast" to a variant of an ADT. Currently, we only introduce /// this for ADTs with more than one variant. It may be better to /// just introduce it always, or always for enums. /// /// The included Symbol is the name of the variant, used for printing MIR. Downcast(Option<Symbol>, VariantIdx), } impl<V, T> ProjectionElem<V, T> { /// Returns `true` if the target of this projection may refer to a different region of memory /// than the base. fn is_indirect(&self) -> bool { match self { Self::Deref => true, Self::Field(_, _) | Self::Index(_) | Self::ConstantIndex { .. } | Self::Subslice { .. } | Self::Downcast(_, _) => false, } } } /// Alias for projections as they appear in places, where the base is a place /// and the index is a local. pub type PlaceElem<'tcx> = ProjectionElem<Local, Ty<'tcx>>; // At least on 64 bit systems, `PlaceElem` should not be larger than two pointers. #[cfg(target_arch = "x86_64")] static_assert_size!(PlaceElem<'_>, 16); /// Alias for projections as they appear in `UserTypeProjection`, where we /// need neither the `V` parameter for `Index` nor the `T` for `Field`. pub type ProjectionKind = ProjectionElem<(), ()>; rustc_index::newtype_index! { pub struct Field { derive [HashStable] DEBUG_FORMAT = "field[{}]" } } #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct PlaceRef<'tcx> { pub local: Local, pub projection: &'tcx [PlaceElem<'tcx>], } impl<'tcx> Place<'tcx> { // FIXME change this to a const fn by also making List::empty a const fn. pub fn return_place() -> Place<'tcx> { Place { local: RETURN_PLACE, projection: List::empty() } } /// Returns `true` if this `Place` contains a `Deref` projection. /// /// If `Place::is_indirect` returns false, the caller knows that the `Place` refers to the /// same region of memory as its base. pub fn is_indirect(&self) -> bool { self.projection.iter().any(|elem| elem.is_indirect()) } /// Finds the innermost `Local` from this `Place`, *if* it is either a local itself or /// a single deref of a local. // // FIXME: can we safely swap the semantics of `fn base_local` below in here instead? pub fn local_or_deref_local(&self) -> Option<Local> { match self.as_ref() { PlaceRef { local, projection: [] } | PlaceRef { local, projection: [ProjectionElem::Deref] } => Some(local), _ => None, } } /// If this place represents a local variable like `_X` with no /// projections, return `Some(_X)`. pub fn as_local(&self) -> Option<Local> { self.as_ref().as_local() } pub fn as_ref(&self) -> PlaceRef<'tcx> { PlaceRef { local: self.local, projection: &self.projection } } } impl From<Local> for Place<'_> { fn from(local: Local) -> Self { Place { local, projection: List::empty() } } } impl<'tcx> PlaceRef<'tcx> { /// Finds the innermost `Local` from this `Place`, *if* it is either a local itself or /// a single deref of a local. // // FIXME: can we safely swap the semantics of `fn base_local` below in here instead? pub fn local_or_deref_local(&self) -> Option<Local> { match *self { PlaceRef { local, projection: [] } | PlaceRef { local, projection: [ProjectionElem::Deref] } => Some(local), _ => None, } } /// If this place represents a local variable like `_X` with no /// projections, return `Some(_X)`. pub fn as_local(&self) -> Option<Local> { match *self { PlaceRef { local, projection: [] } => Some(local), _ => None, } } } impl Debug for Place<'_> { fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result { for elem in self.projection.iter().rev() { match elem { ProjectionElem::Downcast(_, _) | ProjectionElem::Field(_, _) => { write!(fmt, "(").unwrap(); } ProjectionElem::Deref => { write!(fmt, "(*").unwrap(); } ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } | ProjectionElem::Subslice { .. } => {} } } write!(fmt, "{:?}", self.local)?; for elem in self.projection.iter() { match elem { ProjectionElem::Downcast(Some(name), _index) => { write!(fmt, " as {})", name)?; } ProjectionElem::Downcast(None, index) => { write!(fmt, " as variant#{:?})", index)?; } ProjectionElem::Deref => { write!(fmt, ")")?; } ProjectionElem::Field(field, ty) => { write!(fmt, ".{:?}: {:?})", field.index(), ty)?; } ProjectionElem::Index(ref index) => { write!(fmt, "[{:?}]", index)?; } ProjectionElem::ConstantIndex { offset, min_length, from_end: false } => { write!(fmt, "[{:?} of {:?}]", offset, min_length)?; } ProjectionElem::ConstantIndex { offset, min_length, from_end: true } => { write!(fmt, "[-{:?} of {:?}]", offset, min_length)?; } ProjectionElem::Subslice { from, to, from_end: true } if to == 0 => { write!(fmt, "[{:?}:]", from)?; } ProjectionElem::Subslice { from, to, from_end: true } if from == 0 => { write!(fmt, "[:-{:?}]", to)?; } ProjectionElem::Subslice { from, to, from_end: true } => { write!(fmt, "[{:?}:-{:?}]", from, to)?; } ProjectionElem::Subslice { from, to, from_end: false } => { write!(fmt, "[{:?}..{:?}]", from, to)?; } } } Ok(()) } } /////////////////////////////////////////////////////////////////////////// // Scopes rustc_index::newtype_index! { pub struct SourceScope { derive [HashStable] DEBUG_FORMAT = "scope[{}]", const OUTERMOST_SOURCE_SCOPE = 0, } } #[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)] pub struct SourceScopeData { pub span: Span, pub parent_scope: Option<SourceScope>, /// Crate-local information for this source scope, that can't (and /// needn't) be tracked across crates. pub local_data: ClearCrossCrate<SourceScopeLocalData>, } #[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable)] pub struct SourceScopeLocalData { /// An `HirId` with lint levels equivalent to this scope's lint levels. pub lint_root: hir::HirId, /// The unsafe block that contains this node. pub safety: Safety, } /////////////////////////////////////////////////////////////////////////// // Operands /// These are values that can appear inside an rvalue. They are intentionally /// limited to prevent rvalues from being nested in one another. #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, HashStable)] pub enum Operand<'tcx> { /// Copy: The value must be available for use afterwards. /// /// This implies that the type of the place must be `Copy`; this is true /// by construction during build, but also checked by the MIR type checker. Copy(Place<'tcx>), /// Move: The value (including old borrows of it) will not be used again. /// /// Safe for values of all types (modulo future developments towards `?Move`). /// Correct usage patterns are enforced by the borrow checker for safe code. /// `Copy` may be converted to `Move` to enable "last-use" optimizations. Move(Place<'tcx>), /// Synthesizes a constant value. Constant(Box<Constant<'tcx>>), } impl<'tcx> Debug for Operand<'tcx> { fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result { use self::Operand::*; match *self { Constant(ref a) => write!(fmt, "{:?}", a), Copy(ref place) => write!(fmt, "{:?}", place), Move(ref place) => write!(fmt, "move {:?}", place), } } } impl<'tcx> Operand<'tcx> { /// Convenience helper to make a constant that refers to the fn /// with given `DefId` and substs. Since this is used to synthesize /// MIR, assumes `user_ty` is None. pub fn function_handle( tcx: TyCtxt<'tcx>, def_id: DefId, substs: SubstsRef<'tcx>, span: Span, ) -> Self { let ty = tcx.type_of(def_id).subst(tcx, substs); Operand::Constant(box Constant { span, user_ty: None, literal: ty::Const::zero_sized(tcx, ty), }) } /// Convenience helper to make a literal-like constant from a given scalar value. /// Since this is used to synthesize MIR, assumes `user_ty` is None. pub fn const_from_scalar( tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, val: Scalar, span: Span, ) -> Operand<'tcx> { debug_assert!({ let param_env_and_ty = ty::ParamEnv::empty().and(ty); let type_size = tcx .layout_of(param_env_and_ty) .unwrap_or_else(|e| panic!("could not compute layout for {:?}: {:?}", ty, e)) .size; let scalar_size = abi::Size::from_bytes(match val { Scalar::Raw { size, .. } => size, _ => panic!("Invalid scalar type {:?}", val), }); scalar_size == type_size }); Operand::Constant(box Constant { span, user_ty: None, literal: ty::Const::from_scalar(tcx, val, ty), }) } pub fn to_copy(&self) -> Self { match *self { Operand::Copy(_) | Operand::Constant(_) => self.clone(), Operand::Move(place) => Operand::Copy(place), } } /// Returns the `Place` that is the target of this `Operand`, or `None` if this `Operand` is a /// constant. pub fn place(&self) -> Option<Place<'tcx>> { match self { Operand::Copy(place) | Operand::Move(place) => Some(*place), Operand::Constant(_) => None, } } } /////////////////////////////////////////////////////////////////////////// /// Rvalues #[derive(Clone, RustcEncodable, RustcDecodable, HashStable, PartialEq)] pub enum Rvalue<'tcx> { /// x (either a move or copy, depending on type of x) Use(Operand<'tcx>), /// [x; 32] Repeat(Operand<'tcx>, &'tcx ty::Const<'tcx>), /// &x or &mut x Ref(Region<'tcx>, BorrowKind, Place<'tcx>), /// Accessing a thread local static. This is inherently a runtime operation, even if llvm /// treats it as an access to a static. This `Rvalue` yields a reference to the thread local /// static. ThreadLocalRef(DefId), /// Create a raw pointer to the given place /// Can be generated by raw address of expressions (`&raw const x`), /// or when casting a reference to a raw pointer. AddressOf(Mutability, Place<'tcx>), /// length of a `[X]` or `[X;n]` value Len(Place<'tcx>), Cast(CastKind, Operand<'tcx>, Ty<'tcx>), BinaryOp(BinOp, Operand<'tcx>, Operand<'tcx>), CheckedBinaryOp(BinOp, Operand<'tcx>, Operand<'tcx>), NullaryOp(NullOp, Ty<'tcx>), UnaryOp(UnOp, Operand<'tcx>), /// Read the discriminant of an ADT. /// /// Undefined (i.e., no effort is made to make it defined, but there’s no reason why it cannot /// be defined to return, say, a 0) if ADT is not an enum. Discriminant(Place<'tcx>), /// Creates an aggregate value, like a tuple or struct. This is /// only needed because we want to distinguish `dest = Foo { x: /// ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case /// that `Foo` has a destructor. These rvalues can be optimized /// away after type-checking and before lowering. Aggregate(Box<AggregateKind<'tcx>>, Vec<Operand<'tcx>>), } #[derive(Clone, Copy, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)] pub enum CastKind { Misc, Pointer(PointerCast), } #[derive(Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)] pub enum AggregateKind<'tcx> { /// The type is of the element Array(Ty<'tcx>), Tuple, /// The second field is the variant index. It's equal to 0 for struct /// and union expressions. The fourth field is /// active field number and is present only for union expressions /// -- e.g., for a union expression `SomeUnion { c: .. }`, the /// active field index would identity the field `c` Adt(&'tcx AdtDef, VariantIdx, SubstsRef<'tcx>, Option<UserTypeAnnotationIndex>, Option<usize>), Closure(DefId, SubstsRef<'tcx>), Generator(DefId, SubstsRef<'tcx>, hir::Movability), } #[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)] pub enum BinOp { /// The `+` operator (addition) Add, /// The `-` operator (subtraction) Sub, /// The `*` operator (multiplication) Mul, /// The `/` operator (division) Div, /// The `%` operator (modulus) Rem, /// The `^` operator (bitwise xor) BitXor, /// The `&` operator (bitwise and) BitAnd, /// The `|` operator (bitwise or) BitOr, /// The `<<` operator (shift left) Shl, /// The `>>` operator (shift right) Shr, /// The `==` operator (equality) Eq, /// The `<` operator (less than) Lt, /// The `<=` operator (less than or equal to) Le, /// The `!=` operator (not equal to) Ne, /// The `>=` operator (greater than or equal to) Ge, /// The `>` operator (greater than) Gt, /// The `ptr.offset` operator Offset, } impl BinOp { pub fn is_checkable(self) -> bool { use self::BinOp::*; match self { Add | Sub | Mul | Shl | Shr => true, _ => false, } } } #[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)] pub enum NullOp { /// Returns the size of a value of that type SizeOf, /// Creates a new uninitialized box for a value of that type Box, } #[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)] pub enum UnOp { /// The `!` operator for logical inversion Not, /// The `-` operator for negation Neg, } impl<'tcx> Debug for Rvalue<'tcx> { fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result { use self::Rvalue::*; match *self { Use(ref place) => write!(fmt, "{:?}", place), Repeat(ref a, ref b) => { write!(fmt, "[{:?}; ", a)?; pretty_print_const(b, fmt, false)?; write!(fmt, "]") } Len(ref a) => write!(fmt, "Len({:?})", a), Cast(ref kind, ref place, ref ty) => { write!(fmt, "{:?} as {:?} ({:?})", place, ty, kind) } BinaryOp(ref op, ref a, ref b) => write!(fmt, "{:?}({:?}, {:?})", op, a, b), CheckedBinaryOp(ref op, ref a, ref b) => { write!(fmt, "Checked{:?}({:?}, {:?})", op, a, b) } UnaryOp(ref op, ref a) => write!(fmt, "{:?}({:?})", op, a), Discriminant(ref place) => write!(fmt, "discriminant({:?})", place), NullaryOp(ref op, ref t) => write!(fmt, "{:?}({:?})", op, t), ThreadLocalRef(did) => ty::tls::with(|tcx| { let muta = tcx.static_mutability(did).unwrap().prefix_str(); write!(fmt, "&/*tls*/ {}{}", muta, tcx.def_path_str(did)) }), Ref(region, borrow_kind, ref place) => { let kind_str = match borrow_kind { BorrowKind::Shared => "", BorrowKind::Shallow => "shallow ", BorrowKind::Mut { .. } | BorrowKind::Unique => "mut ", }; // When printing regions, add trailing space if necessary. let print_region = ty::tls::with(|tcx| { tcx.sess.verbose() || tcx.sess.opts.debugging_opts.identify_regions }); let region = if print_region { let mut region = region.to_string(); if !region.is_empty() { region.push(' '); } region } else { // Do not even print 'static String::new() }; write!(fmt, "&{}{}{:?}", region, kind_str, place) } AddressOf(mutability, ref place) => { let kind_str = match mutability { Mutability::Mut => "mut", Mutability::Not => "const", }; write!(fmt, "&raw {} {:?}", kind_str, place) } Aggregate(ref kind, ref places) => { let fmt_tuple = |fmt: &mut Formatter<'_>, name: &str| { let mut tuple_fmt = fmt.debug_tuple(name); for place in places { tuple_fmt.field(place); } tuple_fmt.finish() }; match **kind { AggregateKind::Array(_) => write!(fmt, "{:?}", places), AggregateKind::Tuple => { if places.is_empty() { write!(fmt, "()") } else { fmt_tuple(fmt, "") } } AggregateKind::Adt(adt_def, variant, substs, _user_ty, _) => { let variant_def = &adt_def.variants[variant]; let name = ty::tls::with(|tcx| { let mut name = String::new(); let substs = tcx.lift(&substs).expect("could not lift for printing"); FmtPrinter::new(tcx, &mut name, Namespace::ValueNS) .print_def_path(variant_def.def_id, substs)?; Ok(name) })?; match variant_def.ctor_kind { CtorKind::Const => fmt.write_str(&name), CtorKind::Fn => fmt_tuple(fmt, &name), CtorKind::Fictive => { let mut struct_fmt = fmt.debug_struct(&name); for (field, place) in variant_def.fields.iter().zip(places) { struct_fmt.field(&field.ident.as_str(), place); } struct_fmt.finish() } } } AggregateKind::Closure(def_id, substs) => ty::tls::with(|tcx| { if let Some(def_id) = def_id.as_local() { let hir_id = tcx.hir().as_local_hir_id(def_id); let name = if tcx.sess.opts.debugging_opts.span_free_formats { let substs = tcx.lift(&substs).unwrap(); format!( "[closure@{}]", tcx.def_path_str_with_substs(def_id.to_def_id(), substs), ) } else { let span = tcx.hir().span(hir_id); format!("[closure@{}]", tcx.sess.source_map().span_to_string(span)) }; let mut struct_fmt = fmt.debug_struct(&name); if let Some(upvars) = tcx.upvars_mentioned(def_id) { for (&var_id, place) in upvars.keys().zip(places) { let var_name = tcx.hir().name(var_id); struct_fmt.field(&var_name.as_str(), place); } } struct_fmt.finish() } else { write!(fmt, "[closure]") } }), AggregateKind::Generator(def_id, _, _) => ty::tls::with(|tcx| { if let Some(def_id) = def_id.as_local() { let hir_id = tcx.hir().as_local_hir_id(def_id); let name = format!("[generator@{:?}]", tcx.hir().span(hir_id)); let mut struct_fmt = fmt.debug_struct(&name); if let Some(upvars) = tcx.upvars_mentioned(def_id) { for (&var_id, place) in upvars.keys().zip(places) { let var_name = tcx.hir().name(var_id); struct_fmt.field(&var_name.as_str(), place); } } struct_fmt.finish() } else { write!(fmt, "[generator]") } }), } } } } } /////////////////////////////////////////////////////////////////////////// /// Constants /// /// Two constants are equal if they are the same constant. Note that /// this does not necessarily mean that they are "==" in Rust -- in /// particular one must be wary of `NaN`! #[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, HashStable)] pub struct Constant<'tcx> { pub span: Span, /// Optional user-given type: for something like /// `collect::<Vec<_>>`, this would be present and would /// indicate that `Vec<_>` was explicitly specified. /// /// Needed for NLL to impose user-given type constraints. pub user_ty: Option<UserTypeAnnotationIndex>, pub literal: &'tcx ty::Const<'tcx>, } impl Constant<'tcx> { pub fn check_static_ptr(&self, tcx: TyCtxt<'_>) -> Option<DefId> { match self.literal.val.try_to_scalar() { Some(Scalar::Ptr(ptr)) => match tcx.global_alloc(ptr.alloc_id) { GlobalAlloc::Static(def_id) => { assert!(!tcx.is_thread_local_static(def_id)); Some(def_id) } _ => None, }, _ => None, } } } /// A collection of projections into user types. /// /// They are projections because a binding can occur a part of a /// parent pattern that has been ascribed a type. /// /// Its a collection because there can be multiple type ascriptions on /// the path from the root of the pattern down to the binding itself. /// /// An example: /// /// ```rust /// struct S<'a>((i32, &'a str), String); /// let S((_, w): (i32, &'static str), _): S = ...; /// // ------ ^^^^^^^^^^^^^^^^^^^ (1) /// // --------------------------------- ^ (2) /// ``` /// /// The highlights labelled `(1)` show the subpattern `(_, w)` being /// ascribed the type `(i32, &'static str)`. /// /// The highlights labelled `(2)` show the whole pattern being /// ascribed the type `S`. /// /// In this example, when we descend to `w`, we will have built up the /// following two projected types: /// /// * base: `S`, projection: `(base.0).1` /// * base: `(i32, &'static str)`, projection: `base.1` /// /// The first will lead to the constraint `w: &'1 str` (for some /// inferred region `'1`). The second will lead to the constraint `w: /// &'static str`. #[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable, TypeFoldable)] pub struct UserTypeProjections { pub contents: Vec<(UserTypeProjection, Span)>, } impl<'tcx> UserTypeProjections { pub fn none() -> Self { UserTypeProjections { contents: vec![] } } pub fn is_empty(&self) -> bool { self.contents.is_empty() } pub fn from_projections(projs: impl Iterator<Item = (UserTypeProjection, Span)>) -> Self { UserTypeProjections { contents: projs.collect() } } pub fn projections_and_spans( &self, ) -> impl Iterator<Item = &(UserTypeProjection, Span)> + ExactSizeIterator { self.contents.iter() } pub fn projections(&self) -> impl Iterator<Item = &UserTypeProjection> + ExactSizeIterator { self.contents.iter().map(|&(ref user_type, _span)| user_type) } pub fn push_projection(mut self, user_ty: &UserTypeProjection, span: Span) -> Self { self.contents.push((user_ty.clone(), span)); self } fn map_projections( mut self, mut f: impl FnMut(UserTypeProjection) -> UserTypeProjection, ) -> Self { self.contents = self.contents.drain(..).map(|(proj, span)| (f(proj), span)).collect(); self } pub fn index(self) -> Self { self.map_projections(|pat_ty_proj| pat_ty_proj.index()) } pub fn subslice(self, from: u32, to: u32) -> Self { self.map_projections(|pat_ty_proj| pat_ty_proj.subslice(from, to)) } pub fn deref(self) -> Self { self.map_projections(|pat_ty_proj| pat_ty_proj.deref()) } pub fn leaf(self, field: Field) -> Self { self.map_projections(|pat_ty_proj| pat_ty_proj.leaf(field)) } pub fn variant(self, adt_def: &'tcx AdtDef, variant_index: VariantIdx, field: Field) -> Self { self.map_projections(|pat_ty_proj| pat_ty_proj.variant(adt_def, variant_index, field)) } } /// Encodes the effect of a user-supplied type annotation on the /// subcomponents of a pattern. The effect is determined by applying the /// given list of proejctions to some underlying base type. Often, /// the projection element list `projs` is empty, in which case this /// directly encodes a type in `base`. But in the case of complex patterns with /// subpatterns and bindings, we want to apply only a *part* of the type to a variable, /// in which case the `projs` vector is used. /// /// Examples: /// /// * `let x: T = ...` -- here, the `projs` vector is empty. /// /// * `let (x, _): T = ...` -- here, the `projs` vector would contain /// `field[0]` (aka `.0`), indicating that the type of `s` is /// determined by finding the type of the `.0` field from `T`. #[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable, PartialEq)] pub struct UserTypeProjection { pub base: UserTypeAnnotationIndex, pub projs: Vec<ProjectionKind>, } impl Copy for ProjectionKind {} impl UserTypeProjection { pub(crate) fn index(mut self) -> Self { self.projs.push(ProjectionElem::Index(())); self } pub(crate) fn subslice(mut self, from: u32, to: u32) -> Self { self.projs.push(ProjectionElem::Subslice { from, to, from_end: true }); self } pub(crate) fn deref(mut self) -> Self { self.projs.push(ProjectionElem::Deref); self } pub(crate) fn leaf(mut self, field: Field) -> Self { self.projs.push(ProjectionElem::Field(field, ())); self } pub(crate) fn variant( mut self, adt_def: &AdtDef, variant_index: VariantIdx, field: Field, ) -> Self { self.projs.push(ProjectionElem::Downcast( Some(adt_def.variants[variant_index].ident.name), variant_index, )); self.projs.push(ProjectionElem::Field(field, ())); self } } CloneTypeFoldableAndLiftImpls! { ProjectionKind, } impl<'tcx> TypeFoldable<'tcx> for UserTypeProjection { fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self { use crate::mir::ProjectionElem::*; let base = self.base.fold_with(folder); let projs: Vec<_> = self .projs .iter() .map(|&elem| match elem { Deref => Deref, Field(f, ()) => Field(f, ()), Index(()) => Index(()), Downcast(symbol, variantidx) => Downcast(symbol, variantidx), ConstantIndex { offset, min_length, from_end } => { ConstantIndex { offset, min_length, from_end } } Subslice { from, to, from_end } => Subslice { from, to, from_end }, }) .collect(); UserTypeProjection { base, projs } } fn super_visit_with<Vs: TypeVisitor<'tcx>>(&self, visitor: &mut Vs) -> bool { self.base.visit_with(visitor) // Note: there's nothing in `self.proj` to visit. } } rustc_index::newtype_index! { pub struct Promoted { derive [HashStable] DEBUG_FORMAT = "promoted[{}]" } } impl<'tcx> Debug for Constant<'tcx> { fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result { write!(fmt, "{}", self) } } impl<'tcx> Display for Constant<'tcx> { fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result { write!(fmt, "const ")?; pretty_print_const(self.literal, fmt, true) } } fn pretty_print_const( c: &ty::Const<'tcx>, fmt: &mut Formatter<'_>, print_types: bool, ) -> fmt::Result { use crate::ty::print::PrettyPrinter; ty::tls::with(|tcx| { let literal = tcx.lift(&c).unwrap(); let mut cx = FmtPrinter::new(tcx, fmt, Namespace::ValueNS); cx.print_alloc_ids = true; cx.pretty_print_const(literal, print_types)?; Ok(()) }) } impl<'tcx> graph::DirectedGraph for Body<'tcx> { type Node = BasicBlock; } impl<'tcx> graph::WithNumNodes for Body<'tcx> { #[inline] fn num_nodes(&self) -> usize { self.basic_blocks.len() } } impl<'tcx> graph::WithStartNode for Body<'tcx> { #[inline] fn start_node(&self) -> Self::Node { START_BLOCK } } impl<'tcx> graph::WithSuccessors for Body<'tcx> { #[inline] fn successors(&self, node: Self::Node) -> <Self as GraphSuccessors<'_>>::Iter { self.basic_blocks[node].terminator().successors().cloned() } } impl<'a, 'b> graph::GraphSuccessors<'b> for Body<'a> { type Item = BasicBlock; type Iter = iter::Cloned<Successors<'b>>; } impl graph::GraphPredecessors<'graph> for Body<'tcx> { type Item = BasicBlock; type Iter = smallvec::IntoIter<[BasicBlock; 4]>; } impl graph::WithPredecessors for Body<'tcx> { #[inline] fn predecessors(&self, node: Self::Node) -> <Self as graph::GraphPredecessors<'_>>::Iter { self.predecessors()[node].clone().into_iter() } } /// `Location` represents the position of the start of the statement; or, if /// `statement_index` equals the number of statements, then the start of the /// terminator. #[derive(Copy, Clone, PartialEq, Eq, Hash, Ord, PartialOrd, HashStable)] pub struct Location { /// The block that the location is within. pub block: BasicBlock, pub statement_index: usize, } impl fmt::Debug for Location { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { write!(fmt, "{:?}[{}]", self.block, self.statement_index) } } impl Location { pub const START: Location = Location { block: START_BLOCK, statement_index: 0 }; /// Returns the location immediately after this one within the enclosing block. /// /// Note that if this location represents a terminator, then the /// resulting location would be out of bounds and invalid. pub fn successor_within_block(&self) -> Location { Location { block: self.block, statement_index: self.statement_index + 1 } } /// Returns `true` if `other` is earlier in the control flow graph than `self`. pub fn is_predecessor_of<'tcx>(&self, other: Location, body: &Body<'tcx>) -> bool { // If we are in the same block as the other location and are an earlier statement // then we are a predecessor of `other`. if self.block == other.block && self.statement_index < other.statement_index { return true; } let predecessors = body.predecessors(); // If we're in another block, then we want to check that block is a predecessor of `other`. let mut queue: Vec<BasicBlock> = predecessors[other.block].to_vec(); let mut visited = FxHashSet::default(); while let Some(block) = queue.pop() { // If we haven't visited this block before, then make sure we visit it's predecessors. if visited.insert(block) { queue.extend(predecessors[block].iter().cloned()); } else { continue; } // If we found the block that `self` is in, then we are a predecessor of `other` (since // we found that block by looking at the predecessors of `other`). if self.block == block { return true; } } false } pub fn dominates(&self, other: Location, dominators: &Dominators<BasicBlock>) -> bool { if self.block == other.block { self.statement_index <= other.statement_index } else { dominators.is_dominated_by(other.block, self.block) } } }
37.169405
101
0.565727
336da19d19227f3fe7cf10dfa14f999b384e85a1
512
use chrono::{NaiveDate, NaiveDateTime}; fn main() { let date_time: NaiveDateTime = NaiveDate::from_ymd(2017, 11, 12).and_hms(17, 33, 44); println!( "Number of seconds between 1970-01-01 00:00:00 and {} is {}.", date_time, date_time.timestamp()); let date_time_after_a_billion_seconds = NaiveDateTime::from_timestamp(1_000_000_000, 0); println!( "Date after a billion seconds since 1970-01-01 00:00:00 was {}.", date_time_after_a_billion_seconds); }
36.571429
93
0.660156
143dea827294be82b20d444b6fd53e8b9947e0f2
21,782
//! SVG output support for piet #![deny(clippy::trivially_copy_pass_by_ref)] #[cfg(feature = "evcxr")] mod evcxr; mod text; use std::{borrow::Cow, fmt, fmt::Write, io, mem}; use image::{DynamicImage, GenericImageView, ImageBuffer}; use piet::kurbo::{Affine, Point, Rect, Shape, Size}; use piet::{ Color, Error, FixedGradient, FontStyle, Image, ImageFormat, InterpolationMode, IntoBrush, LineCap, LineJoin, StrokeStyle, TextAlignment, TextLayout as _, }; use svg::node::Node; pub use crate::text::{Text, TextLayout}; // re-export piet pub use piet; #[cfg(feature = "evcxr")] pub use evcxr::draw_evcxr; type Result<T> = std::result::Result<T, Error>; /// `piet::RenderContext` for generating SVG images pub struct RenderContext { size: Size, stack: Vec<State>, state: State, doc: svg::Document, next_id: u64, text: Text, } impl RenderContext { /// Construct an empty `RenderContext` pub fn new(size: Size) -> Self { Self { size, stack: Vec::new(), state: State::default(), doc: svg::Document::new(), next_id: 0, text: Text::new(), } } /// The size that the SVG will render at. /// /// The size is used to set the view box for the svg. pub fn size(&self) -> Size { self.size } /// Write graphics rendered so far to an `std::io::Write` impl, such as `std::fs::File` /// /// Additional rendering can be done afterwards. pub fn write(&self, writer: impl io::Write) -> io::Result<()> { svg::write(writer, &self.doc) } /// Returns an object that can write the svg somewhere. pub fn display(&self) -> &impl fmt::Display { &self.doc } fn new_id(&mut self) -> Id { let x = Id(self.next_id); self.next_id += 1; x } } impl piet::RenderContext for RenderContext { type Brush = Brush; type Text = Text; type TextLayout = TextLayout; type Image = SvgImage; fn status(&mut self) -> Result<()> { Ok(()) } fn clear(&mut self, rect: impl Into<Option<Rect>>, color: Color) { let rect = rect.into(); let mut rect = match rect { Some(rect) => svg::node::element::Rectangle::new() .set("width", rect.width()) .set("height", rect.height()) .set("x", rect.x0) .set("y", rect.y0), None => svg::node::element::Rectangle::new() .set("width", "100%") .set("height", "100%"), } .set("fill", fmt_color(&color)) .set("fill-opacity", fmt_opacity(&color)); //FIXME: I don't think we should be clipping, here? if let Some(id) = self.state.clip { rect.assign("clip-path", format!("url(#{})", id.to_string())); } self.doc.append(rect); } fn solid_brush(&mut self, color: Color) -> Brush { Brush { kind: BrushKind::Solid(color), } } fn gradient(&mut self, gradient: impl Into<FixedGradient>) -> Result<Brush> { let id = self.new_id(); match gradient.into() { FixedGradient::Linear(x) => { let mut gradient = svg::node::element::LinearGradient::new() .set("gradientUnits", "userSpaceOnUse") .set("id", id) .set("x1", x.start.x) .set("y1", x.start.y) .set("x2", x.end.x) .set("y2", x.end.y); for stop in x.stops { gradient.append( svg::node::element::Stop::new() .set("offset", stop.pos) .set("stop-color", fmt_color(&stop.color)) .set("stop-opacity", fmt_opacity(&stop.color)), ); } self.doc.append(gradient); } FixedGradient::Radial(x) => { let mut gradient = svg::node::element::RadialGradient::new() .set("gradientUnits", "userSpaceOnUse") .set("id", id) .set("cx", x.center.x) .set("cy", x.center.y) .set("fx", x.center.x + x.origin_offset.x) .set("fy", x.center.y + x.origin_offset.y) .set("r", x.radius); for stop in x.stops { gradient.append( svg::node::element::Stop::new() .set("offset", stop.pos) .set("stop-color", fmt_color(&stop.color)) .set("stop-opacity", fmt_opacity(&stop.color)), ); } self.doc.append(gradient); } } Ok(Brush { kind: BrushKind::Ref(id), }) } fn fill(&mut self, shape: impl Shape, brush: &impl IntoBrush<Self>) { let brush = brush.make_brush(self, || shape.bounding_box()); add_shape( &mut self.doc, shape, &Attrs { xf: self.state.xf, clip: self.state.clip, fill: Some((brush.into_owned(), None)), ..Attrs::default() }, ); } fn fill_even_odd(&mut self, shape: impl Shape, brush: &impl IntoBrush<Self>) { let brush = brush.make_brush(self, || shape.bounding_box()); add_shape( &mut self.doc, shape, &Attrs { xf: self.state.xf, clip: self.state.clip, fill: Some((brush.into_owned(), Some("evenodd"))), ..Attrs::default() }, ); } fn clip(&mut self, shape: impl Shape) { let id = self.new_id(); let mut clip = svg::node::element::ClipPath::new().set("id", id); add_shape( &mut clip, shape, &Attrs { xf: self.state.xf, clip: self.state.clip, ..Attrs::default() }, ); self.doc.append(clip); self.state.clip = Some(id); } fn stroke(&mut self, shape: impl Shape, brush: &impl IntoBrush<Self>, width: f64) { let brush = brush.make_brush(self, || shape.bounding_box()); add_shape( &mut self.doc, shape, &Attrs { xf: self.state.xf, clip: self.state.clip, stroke: Some((brush.into_owned(), width, &StrokeStyle::new())), ..Attrs::default() }, ); } fn stroke_styled( &mut self, shape: impl Shape, brush: &impl IntoBrush<Self>, width: f64, style: &StrokeStyle, ) { let brush = brush.make_brush(self, || shape.bounding_box()); add_shape( &mut self.doc, shape, &Attrs { xf: self.state.xf, clip: self.state.clip, stroke: Some((brush.into_owned(), width, style)), ..Attrs::default() }, ); } fn text(&mut self) -> &mut Self::Text { &mut self.text } fn draw_text(&mut self, layout: &Self::TextLayout, pos: impl Into<Point>) { let pos = pos.into(); let color = { let (r, g, b, a) = layout.text_color.as_rgba8(); format!("rgba({}, {}, {}, {})", r, g, b, a as f64 * (100. / 255.)) }; let mut x = pos.x; // SVG doesn't do multiline text, and so doesn't have a concept of text width. We can do // alignment though, using text-anchor. TODO eventually we should generate a separate text // span for each line (having laid out the multiline text ourselves. let anchor = match (layout.max_width, layout.alignment) { (width, TextAlignment::End) if width.is_finite() && width > 0. => { x += width; "text-anchor:end" } (width, TextAlignment::Center) if width.is_finite() && width > 0. => { x += width * 0.5; "text-anchor:middle" } _ => "", }; // If we are using a named font, then mark it for inclusion. self.text() .seen_fonts .lock() .unwrap() .insert(layout.font_face.clone()); let mut text = svg::node::element::Text::new() .set("x", x) .set("y", pos.y + layout.size().height) .set( "style", format!( "font-size:{}pt;\ font-family:\"{}\";\ font-weight:{};\ font-style:{};\ text-decoration:{};\ fill:{};\ {}", layout.font_size, layout.font_face.family.name(), layout.font_face.weight.to_raw(), match layout.font_face.style { FontStyle::Regular => "normal", FontStyle::Italic => "italic", }, match (layout.underline, layout.strikethrough) { (false, false) => "none", (false, true) => "line-through", (true, false) => "underline", (true, true) => "underline line-through", }, color, anchor, ), ) .add(svg::node::Text::new(layout.text())); let affine = self.current_transform(); if affine != Affine::IDENTITY { text.assign("transform", xf_val(&affine)); } if let Some(id) = self.state.clip { text.assign("clip-path", format!("url(#{})", id.to_string())); } self.doc.append(text); } fn save(&mut self) -> Result<()> { let new = self.state.clone(); self.stack.push(mem::replace(&mut self.state, new)); Ok(()) } fn restore(&mut self) -> Result<()> { self.state = self.stack.pop().ok_or(Error::StackUnbalance)?; Ok(()) } fn finish(&mut self) -> Result<()> { self.doc .assign("viewBox", (0, 0, self.size.width, self.size.height)); self.doc.assign( "style", format!("width:{}px;height:{}px;", self.size.width, self.size.height), ); let text = (*self.text()).clone(); let mut seen_fonts = text.seen_fonts.lock().unwrap(); if !seen_fonts.is_empty() { // include fonts let mut style = String::new(); for face in &*seen_fonts { if face.family.name().contains('"') { panic!("font family name contains `\"`"); } // TODO convert font to woff2 to save space in svg output, maybe writeln!( &mut style, "@font-face {{\n\ font-family: \"{}\";\n\ font-weight: {};\n\ font-style: {};\n\ src: url(\"data:application/x-font-opentype;charset=utf-8;base64,{}\");\n\ }}", face.family.name(), face.weight.to_raw(), match face.style { FontStyle::Regular => "normal", FontStyle::Italic => "italic", }, base64::display::Base64Display::with_config( &text.font_data(face)?, base64::STANDARD ), ) .unwrap(); } self.doc.append(svg::node::element::Style::new(style)); } seen_fonts.clear(); Ok(()) } fn transform(&mut self, transform: Affine) { self.state.xf *= transform; } fn current_transform(&self) -> Affine { self.state.xf } fn make_image( &mut self, width: usize, height: usize, buf: &[u8], format: ImageFormat, ) -> Result<Self::Image> { Ok(SvgImage(match format { ImageFormat::Grayscale => { let image = ImageBuffer::from_raw(width as _, height as _, buf.to_owned()) .ok_or(Error::InvalidInput)?; DynamicImage::ImageLuma8(image) } ImageFormat::Rgb => { let image = ImageBuffer::from_raw(width as _, height as _, buf.to_owned()) .ok_or(Error::InvalidInput)?; DynamicImage::ImageRgb8(image) } ImageFormat::RgbaSeparate => { let image = ImageBuffer::from_raw(width as _, height as _, buf.to_owned()) .ok_or(Error::InvalidInput)?; DynamicImage::ImageRgba8(image) } ImageFormat::RgbaPremul => { use image::Rgba; use piet::util::unpremul; let mut image = ImageBuffer::<Rgba<u8>, _>::from_raw(width as _, height as _, buf.to_owned()) .ok_or(Error::InvalidInput)?; for px in image.pixels_mut() { px[0] = unpremul(px[0], px[3]); px[1] = unpremul(px[1], px[3]); px[2] = unpremul(px[2], px[3]); } DynamicImage::ImageRgba8(image) } // future-proof _ => return Err(Error::Unimplemented), })) } #[inline] fn draw_image( &mut self, image: &Self::Image, dst_rect: impl Into<Rect>, interp: InterpolationMode, ) { draw_image(self, image, None, dst_rect.into(), interp); } #[inline] fn draw_image_area( &mut self, image: &Self::Image, src_rect: impl Into<Rect>, dst_rect: impl Into<Rect>, interp: InterpolationMode, ) { draw_image(self, image, Some(src_rect.into()), dst_rect.into(), interp); } fn capture_image_area(&mut self, _src_rect: impl Into<Rect>) -> Result<Self::Image> { Err(Error::Unimplemented) } fn blurred_rect(&mut self, rect: Rect, _blur_radius: f64, brush: &impl IntoBrush<Self>) { // TODO blur (perhaps using SVG filters) self.fill(rect, brush) } } fn draw_image( ctx: &mut RenderContext, image: &<RenderContext as piet::RenderContext>::Image, _src_rect: Option<Rect>, dst_rect: Rect, _interp: InterpolationMode, ) { use base64::write::EncoderStringWriter; use image::ImageOutputFormat; let mut writer = EncoderStringWriter::new(base64::STANDARD); image .0 .write_to(&mut writer, ImageOutputFormat::Png) .unwrap(); // TODO when src_rect.is_some() // TODO maybe we could use css 'image-rendering' to control interpolation? let node = svg::node::element::Image::new() .set("x", dst_rect.x0) .set("y", dst_rect.y0) .set("width", dst_rect.x1 - dst_rect.x0) .set("height", dst_rect.y1 - dst_rect.y0) .set( "href", format!("data:image/png;base64,{}", writer.into_inner()), ); ctx.doc.append(node); } #[derive(Default)] struct Attrs<'a> { xf: Affine, clip: Option<Id>, fill: Option<(Brush, Option<&'a str>)>, stroke: Option<(Brush, f64, &'a StrokeStyle)>, } impl Attrs<'_> { // allow clippy warning for `width != 1.0` in if statement #[allow(clippy::float_cmp)] fn apply_to(&self, node: &mut impl Node) { node.assign("transform", xf_val(&self.xf)); if let Some(id) = self.clip { node.assign("clip-path", format!("url(#{})", id.to_string())); } if let Some((ref brush, rule)) = self.fill { node.assign("fill", brush.color()); if let Some(opacity) = brush.opacity() { node.assign("fill-opacity", opacity); } if let Some(rule) = rule { node.assign("fill-rule", rule); } } else { node.assign("fill", "none"); } if let Some((ref stroke, width, style)) = self.stroke { node.assign("stroke", stroke.color()); if let Some(opacity) = stroke.opacity() { node.assign("stroke-opacity", opacity); } if width != 1.0 { node.assign("stroke-width", width); } match style.line_join { LineJoin::Miter { limit } if limit == LineJoin::DEFAULT_MITER_LIMIT => (), LineJoin::Miter { limit } => { node.assign("stroke-miterlimit", limit); } LineJoin::Round => { node.assign("stroke-linejoin", "round"); } LineJoin::Bevel => { node.assign("stroke-linejoin", "bevel"); } } match style.line_cap { LineCap::Round => { node.assign("stroke-linecap", "round"); } LineCap::Square => { node.assign("stroke-linecap", "square"); } LineCap::Butt => (), } if !style.dash_pattern.is_empty() { node.assign("stroke-dasharray", style.dash_pattern.to_vec()); } if style.dash_offset != 0.0 { node.assign("stroke-dashoffset", style.dash_offset); } } } } fn xf_val(xf: &Affine) -> svg::node::Value { let xf = xf.as_coeffs(); format!( "matrix({} {} {} {} {} {})", xf[0], xf[1], xf[2], xf[3], xf[4], xf[5] ) .into() } fn add_shape(node: &mut impl Node, shape: impl Shape, attrs: &Attrs) { if let Some(circle) = shape.as_circle() { let mut x = svg::node::element::Circle::new() .set("cx", circle.center.x) .set("cy", circle.center.y) .set("r", circle.radius); attrs.apply_to(&mut x); node.append(x); } else if let Some(round_rect) = shape .as_rounded_rect() .filter(|r| r.radii().as_single_radius().is_some()) { let mut x = svg::node::element::Rectangle::new() .set("x", round_rect.origin().x) .set("y", round_rect.origin().y) .set("width", round_rect.width()) .set("height", round_rect.height()) .set("rx", round_rect.radii().as_single_radius().unwrap()) .set("ry", round_rect.radii().as_single_radius().unwrap()); attrs.apply_to(&mut x); node.append(x); } else if let Some(rect) = shape.as_rect() { let mut x = svg::node::element::Rectangle::new() .set("x", rect.origin().x) .set("y", rect.origin().y) .set("width", rect.width()) .set("height", rect.height()); attrs.apply_to(&mut x); node.append(x); } else { let mut path = svg::node::element::Path::new().set("d", shape.into_path(1e-3).to_svg()); attrs.apply_to(&mut path); node.append(path); } } #[derive(Debug, Clone, Default)] struct State { xf: Affine, clip: Option<Id>, } /// An SVG brush #[derive(Debug, Clone)] pub struct Brush { kind: BrushKind, } #[derive(Debug, Clone)] enum BrushKind { Solid(Color), Ref(Id), } impl Brush { fn color(&self) -> svg::node::Value { match self.kind { BrushKind::Solid(ref color) => fmt_color(color).into(), BrushKind::Ref(id) => format!("url(#{})", id.to_string()).into(), } } fn opacity(&self) -> Option<svg::node::Value> { match self.kind { BrushKind::Solid(ref color) => Some(fmt_opacity(color).into()), BrushKind::Ref(_) => None, } } } impl IntoBrush<RenderContext> for Brush { fn make_brush<'b>( &'b self, _piet: &mut RenderContext, _bbox: impl FnOnce() -> Rect, ) -> Cow<'b, Brush> { Cow::Owned(self.clone()) } } // RGB in hex representation fn fmt_color(color: &Color) -> String { format!("#{:06x}", color.as_rgba_u32() >> 8) } // Opacity as value from [0, 1] fn fmt_opacity(color: &Color) -> String { format!("{}", color.as_rgba().3) } #[derive(Clone)] pub struct SvgImage(image::DynamicImage); impl Image for SvgImage { fn size(&self) -> Size { let (width, height) = self.0.dimensions(); Size { width: width as _, height: height as _, } } } #[derive(Debug, Copy, Clone)] struct Id(u64); impl Id { // TODO allowing clippy warning temporarily. But this should be changed to impl Display #[allow(clippy::inherent_to_string)] fn to_string(self) -> String { const ALPHABET: &[u8; 52] = b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; let mut out = String::with_capacity(4); let mut x = self.0; loop { let digit = (x % ALPHABET.len() as u64) as usize; out.push(ALPHABET[digit] as char); x /= ALPHABET.len() as u64; if x == 0 { break; } } out } } impl From<Id> for svg::node::Value { fn from(x: Id) -> Self { x.to_string().into() } }
31.522431
98
0.482187
75536c4648687cee0a8daf7d664fed5f74567853
31,884
use crate::error::{ Error, InvalidFunctionArgument, ResponseError, UnknownResponseStatus, UnsupportedQueryResultType, }; use crate::response::*; use crate::selector::Selector; use crate::util::{validate_duration, RuleType, TargetState}; use std::collections::HashMap; use url::Url; /// A client used to execute queries. It uses a [reqwest::Client] internally /// that manages connections for us. /// /// Note that possible errors regarding domain name resolution or /// connection establishment will only be propagated from the underlying /// [reqwest::Client] when a query is executed. #[derive(Clone)] pub struct Client { pub(crate) client: reqwest::Client, pub(crate) base_url: String, } impl Default for Client { /// Create a standard Client that sends requests to "http://127.0.0.1:9090/api/v1". /// /// ```rust /// use prometheus_http_query::Client; /// /// let client = Client::default(); /// ``` fn default() -> Self { Client { client: reqwest::Client::new(), base_url: String::from("http://127.0.0.1:9090/api/v1"), } } } impl std::str::FromStr for Client { type Err = crate::error::Error; /// Create a Client from a custom base URL, which *may* be useful if requests /// are handled by i.e. a reverse proxy. /// /// ```rust /// use prometheus_http_query::Client; /// use std::str::FromStr; /// /// let client = Client::from_str("http://proxy.example.com/prometheus"); /// assert!(client.is_ok()); /// ``` fn from_str(url: &str) -> Result<Self, Self::Err> { let url = Url::parse(url).map_err(Error::UrlParse)?; let client = Client { base_url: format!("{}/api/v1", url), client: reqwest::Client::new(), }; Ok(client) } } impl std::convert::TryFrom<&str> for Client { type Error = crate::error::Error; /// Create a Client from a custom base URL, which *may* be useful if requests /// are handled by i.e. a reverse proxy. /// /// ```rust /// use prometheus_http_query::Client; /// use std::convert::TryFrom; /// /// let client = Client::try_from("http://proxy.example.com/prometheus"); /// assert!(client.is_ok()); /// ``` fn try_from(url: &str) -> Result<Self, Self::Error> { let url = Url::parse(url).map_err(Error::UrlParse)?; let client = Client { base_url: format!("{}/api/v1", url), client: reqwest::Client::new(), }; Ok(client) } } impl std::convert::TryFrom<String> for Client { type Error = crate::error::Error; /// Create a Client from a custom base URL, which *may* be useful if requests /// are handled by i.e. a reverse proxy. /// /// ```rust /// use prometheus_http_query::Client; /// use std::convert::TryFrom; /// /// let url = String::from("http://proxy.example.com/prometheus"); /// let client = Client::try_from(url); /// assert!(client.is_ok()); /// ``` fn try_from(url: String) -> Result<Self, Self::Error> { let url = Url::parse(&url).map_err(Error::UrlParse)?; let client = Client { base_url: format!("{}/api/v1", url), client: reqwest::Client::new(), }; Ok(client) } } impl Client { /// Return a reference to the wrapped [reqwest::Client], i.e. to /// use it for other requests unrelated to the Prometheus API. /// /// ```rust /// use prometheus_http_query::{Client, Error}; /// /// #[tokio::main(flavor = "current_thread")] /// async fn main() -> Result<(), Error> { /// let client = Client::default(); /// /// // An amittedly bad example, but that is not the point. /// let response = client /// .inner() /// .head("http://127.0.0.1:9090") /// .send() /// .await /// .map_err(Error::Reqwest)?; /// /// // Prometheus does not allow HEAD requests. /// assert_eq!(response.status(), reqwest::StatusCode::METHOD_NOT_ALLOWED); /// Ok(()) /// } /// ``` pub fn inner(&self) -> &reqwest::Client { &self.client } /// Return a reference to the base URL that is used in requests to /// the Prometheus API. /// /// ```rust /// use prometheus_http_query::Client; /// use std::str::FromStr; /// /// let client = Client::default(); /// /// assert_eq!(client.base_url(), "http://127.0.0.1:9090/api/v1"); /// /// let client = Client::from_str("https://proxy.example.com:8443/prometheus").unwrap(); /// /// assert_eq!(client.base_url(), "https://proxy.example.com:8443/prometheus/api/v1"); /// ``` pub fn base_url(&self) -> &str { &self.base_url } /// Create a Client from a custom [reqwest::Client] and URL. /// This way you can account for all extra parameters (e.g. x509 authentication) /// that may be needed to connect to Prometheus or an intermediate proxy, /// by building it into the [reqwest::Client]. /// /// ```rust /// use prometheus_http_query::{Client, Error}; /// /// fn main() -> Result<(), Error> { /// let client = { /// let c = reqwest::Client::builder() /// .no_proxy() /// .build() /// .map_err(Error::Reqwest)?; /// Client::from(c, "https://prometheus.example.com") /// }; /// /// assert!(client.is_ok()); /// Ok(()) /// } /// ``` pub fn from(client: reqwest::Client, url: &str) -> Result<Self, Error> { let base_url = format!("{}/api/v1", Url::parse(url).map_err(Error::UrlParse)?); Ok(Client { base_url, client }) } /// Perform an instant query using a [crate::RangeVector] or [crate::InstantVector]. /// /// ```rust /// use prometheus_http_query::{Client, InstantVector, Selector, Aggregate, Error}; /// use prometheus_http_query::aggregations::sum; /// use std::convert::TryInto; /// /// #[tokio::main(flavor = "current_thread")] /// async fn main() -> Result<(), Error> { /// let client = Client::default(); /// /// let v: InstantVector = Selector::new() /// .metric("node_cpu_seconds_total") /// .try_into()?; /// /// let s = sum(v, Some(Aggregate::By(&["cpu"]))); /// /// let response = client.query(s, None, None).await?; /// /// assert!(response.as_instant().is_some()); /// /// Ok(()) /// } /// ``` pub async fn query( &self, vector: impl std::fmt::Display, time: Option<i64>, timeout: Option<&str>, ) -> Result<QueryResultType, Error> { let url = format!("{}/query", self.base_url); let query = vector.to_string(); let mut params = vec![("query", query.as_str())]; let time = time.map(|t| t.to_string()); if let Some(t) = &time { params.push(("time", t.as_str())); } if let Some(t) = timeout { validate_duration(t, false)?; params.push(("timeout", t)); } let response = self .client .get(&url) .query(params.as_slice()) .send() .await .map_err(Error::Reqwest)? .error_for_status() .map_err(Error::Reqwest)?; check_response(response) .await .and_then(convert_query_response) } pub async fn query_range( &self, vector: impl std::fmt::Display, start: i64, end: i64, step: &str, timeout: Option<&str>, ) -> Result<QueryResultType, Error> { let url = format!("{}/query_range", self.base_url); validate_duration(step, false)?; let query = vector.to_string(); let start = start.to_string(); let end = end.to_string(); let mut params = vec![ ("query", query.as_str()), ("start", start.as_str()), ("end", end.as_str()), ("step", step), ]; if let Some(t) = timeout { validate_duration(t, false)?; params.push(("timeout", t)); } let response = self .client .get(&url) .query(params.as_slice()) .send() .await .map_err(Error::Reqwest)? .error_for_status() .map_err(Error::Reqwest)?; check_response(response) .await .and_then(convert_query_response) } /// Find time series that match certain label sets ([Selector]s). /// /// ```rust /// use prometheus_http_query::{Client, Selector, Error}; /// /// #[tokio::main(flavor = "current_thread")] /// async fn main() -> Result<(), Error> { /// let client = Client::default(); /// /// let s1 = Selector::new() /// .with("handler", "/api/v1/query"); /// /// let s2 = Selector::new() /// .with("job", "node") /// .regex_match("mode", ".+"); /// /// let set = vec![s1, s2]; /// /// let response = client.series(&set, None, None).await; /// /// assert!(response.is_ok()); /// /// Ok(()) /// } /// ``` pub async fn series( &self, selectors: &[Selector<'_>], start: Option<i64>, end: Option<i64>, ) -> Result<Vec<HashMap<String, String>>, Error> { let url = format!("{}/series", self.base_url); let mut params = vec![]; let start = start.map(|t| t.to_string()); if let Some(s) = &start { params.push(("start", s.as_str())); } let end = end.map(|t| t.to_string()); if let Some(e) = &end { params.push(("end", e.as_str())); } if selectors.is_empty() { return Err(Error::InvalidFunctionArgument(InvalidFunctionArgument { message: String::from("at least one match[] argument (Selector) must be provided in order to query the series endpoint") })); } let selectors: Vec<String> = selectors .iter() .map(|s| match s.to_string().as_str().split_once('}') { Some(split) => { let mut s = split.0.to_owned(); s.push('}'); s } None => s.to_string(), }) .collect(); for selector in &selectors { params.push(("match[]", selector)); } let response = self .client .get(&url) .query(params.as_slice()) .send() .await .map_err(Error::Reqwest)? .error_for_status() .map_err(Error::Reqwest)?; check_response(response).await.and_then(move |r| { let data = r.get("data").ok_or(Error::MissingField)?.to_owned(); let result: Vec<HashMap<String, String>> = serde_json::from_value(data).map_err(Error::ResponseParse)?; Ok(result) }) } /// Retrieve all label names (or use [Selector]s to select time series to read label names from). /// /// ```rust /// use prometheus_http_query::{Client, Selector, Error}; /// /// #[tokio::main(flavor = "current_thread")] /// async fn main() -> Result<(), Error> { /// let client = Client::default(); /// /// // To retrieve a list of all labels: /// let response = client.label_names(None, None, None).await; /// /// assert!(response.is_ok()); /// /// // To retrieve a list of labels that appear in specific time series, use Selectors: /// let s1 = Selector::new() /// .with("handler", "/api/v1/query"); /// /// let s2 = Selector::new() /// .with("job", "node") /// .regex_match("mode", ".+"); /// /// let set = Some(vec![s1, s2]); /// /// let response = client.label_names(set, None, None).await; /// /// assert!(response.is_ok()); /// /// Ok(()) /// } /// ``` pub async fn label_names( &self, selectors: Option<Vec<Selector<'_>>>, start: Option<i64>, end: Option<i64>, ) -> Result<Vec<String>, Error> { let url = format!("{}/labels", self.base_url); let mut params = vec![]; let start = start.map(|t| t.to_string()); if let Some(s) = &start { params.push(("start", s.as_str())); } let end = end.map(|t| t.to_string()); if let Some(e) = &end { params.push(("end", e.as_str())); } let selectors: Option<Vec<String>> = selectors.map(|vec| { vec.iter() .map(|s| match s.to_string().as_str().split_once('}') { Some(split) => { let mut s = split.0.to_owned(); s.push('}'); s } None => s.to_string(), }) .collect() }); if let Some(ref selector_vec) = selectors { for selector in selector_vec { params.push(("match[]", selector)); } } let response = self .client .get(&url) .query(params.as_slice()) .send() .await .map_err(Error::Reqwest)? .error_for_status() .map_err(Error::Reqwest)?; check_response(response).await.and_then(move |r| { let data = r.get("data").ok_or(Error::MissingField)?.to_owned(); let result: Vec<String> = serde_json::from_value(data).map_err(Error::ResponseParse)?; Ok(result) }) } /// Retrieve all label values for a label name (or use [Selector]s to select the time series to read label values from) /// /// ```rust /// use prometheus_http_query::{Client, Selector, Error}; /// /// #[tokio::main(flavor = "current_thread")] /// async fn main() -> Result<(), Error> { /// let client = Client::default(); /// /// // To retrieve a list of all label values for a specific label name: /// let response = client.label_values("job", None, None, None).await; /// /// assert!(response.is_ok()); /// /// // To retrieve a list of label values of labels in specific time series instead: /// let s1 = Selector::new() /// .regex_match("instance", ".+"); /// /// let set = Some(vec![s1]); /// /// let response = client.label_values("job", set, None, None).await; /// /// assert!(response.is_ok()); /// /// Ok(()) /// } /// ``` pub async fn label_values( &self, label: &str, selectors: Option<Vec<Selector<'_>>>, start: Option<i64>, end: Option<i64>, ) -> Result<Vec<String>, Error> { let url = format!("{}/label/{}/values", self.base_url, label); let mut params = vec![]; let start = start.map(|t| t.to_string()); if let Some(s) = &start { params.push(("start", s.as_str())); } let end = end.map(|t| t.to_string()); if let Some(e) = &end { params.push(("end", e.as_str())); } let selectors: Option<Vec<String>> = selectors.map(|vec| { vec.iter() .map(|s| match s.to_string().as_str().split_once('}') { Some(split) => { let mut s = split.0.to_owned(); s.push('}'); s } None => s.to_string(), }) .collect() }); if let Some(ref selector_vec) = selectors { for selector in selector_vec { params.push(("match[]", selector)); } } let response = self .client .get(&url) .query(params.as_slice()) .send() .await .map_err(Error::Reqwest)? .error_for_status() .map_err(Error::Reqwest)?; check_response(response).await.and_then(move |r| { let data = r.get("data").ok_or(Error::MissingField)?.to_owned(); let result = serde_json::from_value(data).map_err(Error::ResponseParse)?; Ok(result) }) } /// Query the current state of target discovery. /// /// ```rust /// use prometheus_http_query::{Client, Error, TargetState}; /// use std::convert::TryInto; /// /// #[tokio::main(flavor = "current_thread")] /// async fn main() -> Result<(), Error> { /// let client = Client::default(); /// /// let response = client.targets(None).await; /// /// assert!(response.is_ok()); /// /// // Filter targets by type: /// let response = client.targets(Some(TargetState::Active)).await; /// /// assert!(response.is_ok()); /// /// Ok(()) /// } /// ``` pub async fn targets(&self, state: Option<TargetState>) -> Result<Targets, Error> { let url = format!("{}/targets", self.base_url); let mut params = vec![]; let state = state.map(|s| s.to_string()); if let Some(s) = &state { params.push(("state", s.as_str())) } let response = self .client .get(&url) .query(params.as_slice()) .send() .await .map_err(Error::Reqwest)? .error_for_status() .map_err(Error::Reqwest)?; check_response(response).await.and_then(move |r| { let data = r.get("data").ok_or(Error::MissingField)?.to_owned(); let targets: Targets = serde_json::from_value(data).map_err(Error::ResponseParse)?; Ok(targets) }) } /// Retrieve a list of rule groups of recording and alerting rules. /// /// ```rust /// use prometheus_http_query::{Client, Error, RuleType}; /// use std::convert::TryInto; /// /// #[tokio::main(flavor = "current_thread")] /// async fn main() -> Result<(), Error> { /// let client = Client::default(); /// /// let response = client.rules(None).await; /// /// assert!(response.is_ok()); /// /// // Filter rules by type: /// let response = client.rules(Some(RuleType::Alert)).await; /// /// assert!(response.is_ok()); /// /// Ok(()) /// } /// ``` pub async fn rules(&self, rule_type: Option<RuleType>) -> Result<Vec<RuleGroup>, Error> { let url = format!("{}/rules", self.base_url); let mut params = vec![]; let rule_type = rule_type.map(|s| s.to_string()); if let Some(s) = &rule_type { params.push(("type", s.as_str())) } let response = self .client .get(&url) .query(params.as_slice()) .send() .await .map_err(Error::Reqwest)? .error_for_status() .map_err(Error::Reqwest)?; check_response(response).await.and_then(move |r| { let groups = r .get("data") .ok_or(Error::MissingField)? .as_object() .unwrap() .get("groups") .ok_or(Error::MissingField)? .to_owned(); let result: Vec<RuleGroup> = serde_json::from_value(groups).map_err(Error::ResponseParse)?; Ok(result) }) } /// Retrieve a list of active alerts. /// /// ```rust /// use prometheus_http_query::{Client, Error}; /// use std::convert::TryInto; /// /// #[tokio::main(flavor = "current_thread")] /// async fn main() -> Result<(), Error> { /// let client = Client::default(); /// /// let response = client.alerts().await; /// /// assert!(response.is_ok()); /// /// Ok(()) /// } /// ``` pub async fn alerts(&self) -> Result<Vec<Alert>, Error> { let url = format!("{}/alerts", self.base_url); let response = self .client .get(&url) .send() .await .map_err(Error::Reqwest)? .error_for_status() .map_err(Error::Reqwest)?; check_response(response).await.and_then(move |r| { let alerts = r .get("data") .ok_or(Error::MissingField)? .as_object() .unwrap() .get("alerts") .ok_or(Error::MissingField)? .to_owned(); let result: Vec<Alert> = serde_json::from_value(alerts).map_err(Error::ResponseParse)?; Ok(result) }) } /// Retrieve a list of flags that Prometheus was configured with. /// /// ```rust /// use prometheus_http_query::{Client, Error}; /// use std::convert::TryInto; /// /// #[tokio::main(flavor = "current_thread")] /// async fn main() -> Result<(), Error> { /// let client = Client::default(); /// /// let response = client.flags().await; /// /// assert!(response.is_ok()); /// /// Ok(()) /// } /// ``` pub async fn flags(&self) -> Result<HashMap<String, String>, Error> { let url = format!("{}/status/flags", self.base_url); let response = self .client .get(&url) .send() .await .map_err(Error::Reqwest)? .error_for_status() .map_err(Error::Reqwest)?; check_response(response).await.and_then(move |r| { let data = r.get("data").ok_or(Error::MissingField)?.to_owned(); let flags: HashMap<String, String> = serde_json::from_value(data).map_err(Error::ResponseParse)?; Ok(flags) }) } /// Query the current state of alertmanager discovery. /// /// ```rust /// use prometheus_http_query::{Client, Error, TargetState}; /// use std::convert::TryInto; /// /// #[tokio::main(flavor = "current_thread")] /// async fn main() -> Result<(), Error> { /// let client = Client::default(); /// /// let response = client.alertmanagers().await; /// /// assert!(response.is_ok()); /// /// Ok(()) /// } /// ``` pub async fn alertmanagers(&self) -> Result<Alertmanagers, Error> { let url = format!("{}/alertmanagers", self.base_url); let response = self .client .get(&url) .send() .await .map_err(Error::Reqwest)? .error_for_status() .map_err(Error::Reqwest)?; check_response(response).await.and_then(move |r| { let data = r .get("data") .ok_or(Error::MissingField)? .as_object() .unwrap(); let mut active: Vec<Url> = vec![]; let items = data .get("activeAlertmanagers") .ok_or(Error::MissingField)? .as_array() .unwrap(); for item in items { let raw_url = item .get("url") .ok_or(Error::MissingField)? .as_str() .unwrap(); let url = Url::parse(raw_url).map_err(Error::UrlParse)?; active.push(url); } let mut dropped: Vec<Url> = vec![]; let items = data .get("droppedAlertmanagers") .ok_or(Error::MissingField)? .as_array() .unwrap(); for item in items { let raw_url = item .get("url") .ok_or(Error::MissingField)? .as_str() .unwrap(); let url = Url::parse(raw_url).map_err(Error::UrlParse)?; dropped.push(url); } let result = Alertmanagers { active, dropped }; Ok(result) }) } /// Retrieve metadata about metrics that are currently scraped from targets, along with target information. /// /// ```rust /// use prometheus_http_query::{Client, Error, Selector}; /// use std::convert::TryInto; /// /// #[tokio::main(flavor = "current_thread")] /// async fn main() -> Result<(), Error> { /// let client = Client::default(); /// /// // Retrieve metadata for a specific metric from all targets. /// let response = client.target_metadata(Some("go_routines"), None, None).await; /// /// assert!(response.is_ok()); /// /// // Retrieve metric metadata from specific targets. /// let s = Selector::new().with("job", "prometheus"); /// /// let response = client.target_metadata(None, Some(&s), None).await; /// /// assert!(response.is_ok()); /// /// // Retrieve metadata for a specific metric from targets that match a specific label set. /// let s = Selector::new().with("job", "node"); /// /// let response = client.target_metadata(Some("node_cpu_seconds_total"), Some(&s), None).await; /// /// assert!(response.is_ok()); /// /// Ok(()) /// } /// ``` pub async fn target_metadata( &self, metric: Option<&str>, match_target: Option<&Selector<'_>>, limit: Option<usize>, ) -> Result<Vec<TargetMetadata>, Error> { let url = format!("{}/targets/metadata", self.base_url); let mut params = vec![]; let metric = metric.map(|s| s.to_string()); if let Some(m) = &metric { params.push(("metric", m.as_str())) } let match_target = match_target.map(|s| s.to_string()); if let Some(m) = &match_target { params.push(("match_target", m.as_str())) } let limit = limit.map(|s| s.to_string()); if let Some(l) = &limit { params.push(("limit", l.as_str())) } let response = self .client .get(&url) .query(params.as_slice()) .send() .await .map_err(Error::Reqwest)? .error_for_status() .map_err(Error::Reqwest)?; check_response(response).await.and_then(move |r| { let data = r.get("data").ok_or(Error::MissingField)?.to_owned(); let result: Vec<TargetMetadata> = serde_json::from_value(data).map_err(Error::ResponseParse)?; Ok(result) }) } /// Retrieve metadata about metrics that are currently scraped from targets. /// /// ```rust /// use prometheus_http_query::{Client, Error}; /// use std::convert::TryInto; /// /// #[tokio::main(flavor = "current_thread")] /// async fn main() -> Result<(), Error> { /// let client = Client::default(); /// /// // Retrieve metadata for a all metrics. /// let response = client.metric_metadata(None, None).await; /// /// assert!(response.is_ok()); /// /// // Limit the number of returned metrics /// let response = client.metric_metadata(None, Some(10)).await; /// /// assert!(response.is_ok()); /// /// // Retrieve metadata of a specific metric. /// let response = client.metric_metadata(Some("go_routines"), None).await; /// /// assert!(response.is_ok()); /// /// Ok(()) /// } /// ``` pub async fn metric_metadata( &self, metric: Option<&str>, limit: Option<usize>, ) -> Result<HashMap<String, Vec<MetricMetadata>>, Error> { let url = format!("{}/metadata", self.base_url); let mut params = vec![]; let metric = metric.map(|s| s.to_string()); if let Some(m) = &metric { params.push(("metric", m.as_str())) } let limit = limit.map(|s| s.to_string()); if let Some(l) = &limit { params.push(("limit", l.as_str())) } let response = self .client .get(&url) .query(params.as_slice()) .send() .await .map_err(Error::Reqwest)? .error_for_status() .map_err(Error::Reqwest)?; check_response(response).await.and_then(move |r| { let data = r.get("data").ok_or(Error::MissingField)?.to_owned(); let result: HashMap<String, Vec<MetricMetadata>> = serde_json::from_value(data).map_err(Error::ResponseParse)?; Ok(result) }) } } // Convert the response object to an intermediary map, check the JSON's status field // and map potential errors (if any) to a proper error type. Else return the map. async fn check_response( response: reqwest::Response, ) -> Result<HashMap<String, serde_json::Value>, Error> { let map = response .json::<HashMap<String, serde_json::Value>>() .await .map_err(Error::Reqwest)?; let status = map .get("status") .ok_or(Error::MissingField)? .as_str() .unwrap(); match status { "success" => Ok(map), "error" => { let kind = map .get("errorType") .ok_or(Error::MissingField)? .as_str() .unwrap() .to_string(); let message = map .get("error") .ok_or(Error::MissingField)? .as_str() .unwrap() .to_string(); Err(Error::ResponseError(ResponseError { kind, message })) } _ => Err(Error::UnknownResponseStatus(UnknownResponseStatus( status.to_string(), ))), } } // Parses the API response from a map to a Response enum that // encapsulates a result type of "vector", "matrix", or "scalar". fn convert_query_response( response: HashMap<String, serde_json::Value>, ) -> Result<QueryResultType, Error> { let data_obj = response .get("data") .ok_or(Error::MissingField)? .as_object() .unwrap(); let data_type = data_obj .get("resultType") .ok_or(Error::MissingField)? .as_str() .unwrap(); let data = data_obj .get("result") .ok_or(Error::MissingField)? .to_owned(); match data_type { "vector" => { let result: Vec<InstantVector> = serde_json::from_value(data).map_err(Error::ResponseParse)?; Ok(QueryResultType::Vector(result)) } "matrix" => { let result: Vec<RangeVector> = serde_json::from_value(data).map_err(Error::ResponseParse)?; Ok(QueryResultType::Matrix(result)) } "scalar" => { let result: Sample = serde_json::from_value(data).map_err(Error::ResponseParse)?; Ok(QueryResultType::Scalar(result)) } _ => Err(Error::UnsupportedQueryResultType( UnsupportedQueryResultType(data_type.to_string()), )), } }
30.279202
136
0.499435
5d7c5077e8b2015a0b1a45d1bfa8caf518ec1b53
6,602
use pact_matching::models::*; use pact_matching::models::matchingrules::MatchingRules; #[cfg(test)] use regex::Regex; use std::collections::HashMap; use prelude::*; #[cfg(test)] #[allow(unused_imports)] use env_logger; /// Various methods shared between `RequestBuilder` and `ResponseBuilder`. pub trait HttpPartBuilder { /// (Implementation detail.) This function fetches the mutable state that's /// needed to update this builder's `headers`. You should not need to use /// this under normal circumstances. /// /// This function has two return values because its job is to split a single /// `&mut` into two `&mut` pointing to sub-objects, which has to be done /// carefully in Rust. #[doc(hidden)] fn headers_and_matching_rules_mut(&mut self) -> (&mut HashMap<String, Vec<String>>, &mut MatchingRules); /// (Implementation detail.) This function fetches the mutable state that's /// needed to update this builder's `body`. You should not need to use this /// under normal circumstances. /// /// This function has two return values because its job is to split a single /// `&mut` into two `&mut` pointing to sub-objects, which has to be done /// carefully in Rust. #[doc(hidden)] fn body_and_matching_rules_mut(&mut self) -> (&mut OptionalBody, &mut MatchingRules); /// Specify a header pattern. /// /// ``` /// #[macro_use] /// extern crate pact_consumer; /// extern crate regex; /// /// use pact_consumer::prelude::*; /// use pact_consumer::builders::RequestBuilder; /// use regex::Regex; /// /// # fn main() { /// RequestBuilder::default() /// .header("X-Simple", "value") /// .header("X-Digits", term!("^[0-9]+$", "123")); /// # } /// ``` fn header<N, V>(&mut self, name: N, value: V) -> &mut Self where N: Into<String>, V: Into<StringPattern>, { let name = name.into(); let value = value.into(); { let (headers, rules) = self.headers_and_matching_rules_mut(); if headers.contains_key(&name) { headers.get_mut(&name).map(|val| val.push(value.to_example())); } else { headers.insert(name.clone(), vec![value.to_example()]); } value.extract_matching_rules(&name, rules.add_category("header")) } self } /// Set the `Content-Type` header. fn content_type<CT>(&mut self, content_type: CT) -> &mut Self where CT: Into<StringPattern>, { self.header("Content-Type", content_type) } /// Set the `Content-Type` header to `text/html`. fn html(&mut self) -> &mut Self { self.content_type("text/html") } /// Set the `Content-Type` header to `application/json; charset=utf-8`, /// with enough flexibility to cover common variations. fn json_utf8(&mut self) -> &mut Self { self.content_type(term!( "^application/json; charset=(utf|UTF)-8$", "application/json; charset=utf-8" )) } /// Specify a body literal. This does not allow using patterns. /// /// ``` /// #[macro_use] /// extern crate pact_consumer; /// /// use pact_consumer::prelude::*; /// use pact_consumer::builders::RequestBuilder; /// /// # fn main() { /// RequestBuilder::default().body("Hello"); /// # } /// ``` /// /// TODO: We may want to change this to `B: Into<Vec<u8>>` depending on what /// happens with https://github.com/pact-foundation/pact-reference/issues/19 fn body<B: Into<String>>(&mut self, body: B) -> &mut Self { let body = body.into(); { let (body_ref, _) = self.body_and_matching_rules_mut(); *body_ref = OptionalBody::Present(body.into()); } self } /// Specify the body as `JsonPattern`, possibly including special matching /// rules. /// /// ``` /// #[macro_use] /// extern crate pact_consumer; /// /// use pact_consumer::prelude::*; /// use pact_consumer::builders::RequestBuilder; /// /// # fn main() { /// RequestBuilder::default().json_body(json_pattern!({ /// "message": like!("Hello"), /// })); /// # } /// ``` fn json_body<B: Into<JsonPattern>>(&mut self, body: B) -> &mut Self { let body = body.into(); { let (body_ref, rules) = self.body_and_matching_rules_mut(); *body_ref = OptionalBody::Present(body.to_example().to_string().into()); body.extract_matching_rules("$", rules.add_category("body")); } self } } #[test] fn header_pattern() { let application_regex = Regex::new("application/.*").unwrap(); let pattern = PactBuilder::new("C", "P") .interaction("I", |i| { i.request.header( "Content-Type", Term::new(application_regex, "application/json"), ); }) .build(); let good = PactBuilder::new("C", "P") .interaction("I", |i| { i.request.header("Content-Type", "application/xml"); }) .build(); let bad = PactBuilder::new("C", "P") .interaction("I", |i| { i.request.header("Content-Type", "text/html"); }) .build(); assert_requests_match!(good, pattern); assert_requests_do_not_match!(bad, pattern); } #[test] fn body_literal() { let pattern = PactBuilder::new("C", "P") .interaction("I", |i| { i.request.body("Hello"); }) .build(); let good = PactBuilder::new("C", "P") .interaction("I", |i| { i.request.body("Hello"); }) .build(); let bad = PactBuilder::new("C", "P") .interaction("I", |i| { i.request.body("Bye"); }) .build(); assert_requests_match!(good, pattern); assert_requests_do_not_match!(bad, pattern); } #[test] fn json_body_pattern() { let pattern = PactBuilder::new("C", "P") .interaction("I", |i| { i.request.json_body(json_pattern!({ "message": Like::new(json_pattern!("Hello")), })); }) .build(); let good = PactBuilder::new("C", "P") .interaction("I", |i| { i.request.json_body(json_pattern!({ "message": "Goodbye" })); }) .build(); let bad = PactBuilder::new("C", "P") .interaction("I", |i| { i.request.json_body(json_pattern!({ "message": false })); }) .build(); assert_requests_match!(good, pattern); assert_requests_do_not_match!(bad, pattern); }
32.204878
108
0.566949
5d67bbd5ed5e3696c22ff58a392f42376f1fc976
19,637
#[doc = "Reader of register RTCPS1CTL"] pub type R = crate::R<u16, super::RTCPS1CTL>; #[doc = "Writer for register RTCPS1CTL"] pub type W = crate::W<u16, super::RTCPS1CTL>; #[doc = "Register RTCPS1CTL `reset()`'s with value 0"] impl crate::ResetValue for super::RTCPS1CTL { type Type = u16; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "0:0\\] Prescale timer 1 interrupt flag\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum RT1PSIFG_A { #[doc = "0: No time event occurred"] RT1PSIFG_0 = 0, #[doc = "1: Time event occurred"] RT1PSIFG_1 = 1, } impl From<RT1PSIFG_A> for bool { #[inline(always)] fn from(variant: RT1PSIFG_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `RT1PSIFG`"] pub type RT1PSIFG_R = crate::R<bool, RT1PSIFG_A>; impl RT1PSIFG_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> RT1PSIFG_A { match self.bits { false => RT1PSIFG_A::RT1PSIFG_0, true => RT1PSIFG_A::RT1PSIFG_1, } } #[doc = "Checks if the value of the field is `RT1PSIFG_0`"] #[inline(always)] pub fn is_rt1psifg_0(&self) -> bool { *self == RT1PSIFG_A::RT1PSIFG_0 } #[doc = "Checks if the value of the field is `RT1PSIFG_1`"] #[inline(always)] pub fn is_rt1psifg_1(&self) -> bool { *self == RT1PSIFG_A::RT1PSIFG_1 } } #[doc = "Write proxy for field `RT1PSIFG`"] pub struct RT1PSIFG_W<'a> { w: &'a mut W, } impl<'a> RT1PSIFG_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: RT1PSIFG_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "No time event occurred"] #[inline(always)] pub fn rt1psifg_0(self) -> &'a mut W { self.variant(RT1PSIFG_A::RT1PSIFG_0) } #[doc = "Time event occurred"] #[inline(always)] pub fn rt1psifg_1(self) -> &'a mut W { self.variant(RT1PSIFG_A::RT1PSIFG_1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u16) & 0x01); self.w } } #[doc = "1:1\\] Prescale timer 1 interrupt enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum RT1PSIE_A { #[doc = "0: Interrupt not enabled"] DISABLE = 0, #[doc = "1: Interrupt enabled (LPM3/LPM3.5 wake-up enabled)"] ENABLE = 1, } impl From<RT1PSIE_A> for bool { #[inline(always)] fn from(variant: RT1PSIE_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `RT1PSIE`"] pub type RT1PSIE_R = crate::R<bool, RT1PSIE_A>; impl RT1PSIE_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> RT1PSIE_A { match self.bits { false => RT1PSIE_A::DISABLE, true => RT1PSIE_A::ENABLE, } } #[doc = "Checks if the value of the field is `DISABLE`"] #[inline(always)] pub fn is_disable(&self) -> bool { *self == RT1PSIE_A::DISABLE } #[doc = "Checks if the value of the field is `ENABLE`"] #[inline(always)] pub fn is_enable(&self) -> bool { *self == RT1PSIE_A::ENABLE } } #[doc = "Write proxy for field `RT1PSIE`"] pub struct RT1PSIE_W<'a> { w: &'a mut W, } impl<'a> RT1PSIE_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: RT1PSIE_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Interrupt not enabled"] #[inline(always)] pub fn disable(self) -> &'a mut W { self.variant(RT1PSIE_A::DISABLE) } #[doc = "Interrupt enabled (LPM3/LPM3.5 wake-up enabled)"] #[inline(always)] pub fn enable(self) -> &'a mut W { self.variant(RT1PSIE_A::ENABLE) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u16) & 0x01) << 1); self.w } } #[doc = "4:2\\] Prescale timer 1 interrupt interval\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum RT1IP_A { #[doc = "0: Divide by 2"] _2 = 0, #[doc = "1: Divide by 4"] _4 = 1, #[doc = "2: Divide by 8"] _8 = 2, #[doc = "3: Divide by 16"] _16 = 3, #[doc = "4: Divide by 32"] _32 = 4, #[doc = "5: Divide by 64"] _64 = 5, #[doc = "6: Divide by 128"] _128 = 6, #[doc = "7: Divide by 256"] _256 = 7, } impl From<RT1IP_A> for u8 { #[inline(always)] fn from(variant: RT1IP_A) -> Self { variant as _ } } #[doc = "Reader of field `RT1IP`"] pub type RT1IP_R = crate::R<u8, RT1IP_A>; impl RT1IP_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> RT1IP_A { match self.bits { 0 => RT1IP_A::_2, 1 => RT1IP_A::_4, 2 => RT1IP_A::_8, 3 => RT1IP_A::_16, 4 => RT1IP_A::_32, 5 => RT1IP_A::_64, 6 => RT1IP_A::_128, 7 => RT1IP_A::_256, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `_2`"] #[inline(always)] pub fn is_2(&self) -> bool { *self == RT1IP_A::_2 } #[doc = "Checks if the value of the field is `_4`"] #[inline(always)] pub fn is_4(&self) -> bool { *self == RT1IP_A::_4 } #[doc = "Checks if the value of the field is `_8`"] #[inline(always)] pub fn is_8(&self) -> bool { *self == RT1IP_A::_8 } #[doc = "Checks if the value of the field is `_16`"] #[inline(always)] pub fn is_16(&self) -> bool { *self == RT1IP_A::_16 } #[doc = "Checks if the value of the field is `_32`"] #[inline(always)] pub fn is_32(&self) -> bool { *self == RT1IP_A::_32 } #[doc = "Checks if the value of the field is `_64`"] #[inline(always)] pub fn is_64(&self) -> bool { *self == RT1IP_A::_64 } #[doc = "Checks if the value of the field is `_128`"] #[inline(always)] pub fn is_128(&self) -> bool { *self == RT1IP_A::_128 } #[doc = "Checks if the value of the field is `_256`"] #[inline(always)] pub fn is_256(&self) -> bool { *self == RT1IP_A::_256 } } #[doc = "Write proxy for field `RT1IP`"] pub struct RT1IP_W<'a> { w: &'a mut W, } impl<'a> RT1IP_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: RT1IP_A) -> &'a mut W { { self.bits(variant.into()) } } #[doc = "Divide by 2"] #[inline(always)] pub fn _2(self) -> &'a mut W { self.variant(RT1IP_A::_2) } #[doc = "Divide by 4"] #[inline(always)] pub fn _4(self) -> &'a mut W { self.variant(RT1IP_A::_4) } #[doc = "Divide by 8"] #[inline(always)] pub fn _8(self) -> &'a mut W { self.variant(RT1IP_A::_8) } #[doc = "Divide by 16"] #[inline(always)] pub fn _16(self) -> &'a mut W { self.variant(RT1IP_A::_16) } #[doc = "Divide by 32"] #[inline(always)] pub fn _32(self) -> &'a mut W { self.variant(RT1IP_A::_32) } #[doc = "Divide by 64"] #[inline(always)] pub fn _64(self) -> &'a mut W { self.variant(RT1IP_A::_64) } #[doc = "Divide by 128"] #[inline(always)] pub fn _128(self) -> &'a mut W { self.variant(RT1IP_A::_128) } #[doc = "Divide by 256"] #[inline(always)] pub fn _256(self) -> &'a mut W { self.variant(RT1IP_A::_256) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 2)) | (((value as u16) & 0x07) << 2); self.w } } #[doc = "8:8\\] Prescale timer 1 hold\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum RT1PSHOLD_A { #[doc = "0: RT1PS is operational"] RT1PSHOLD_0 = 0, #[doc = "1: RT1PS is held"] RT1PSHOLD_1 = 1, } impl From<RT1PSHOLD_A> for bool { #[inline(always)] fn from(variant: RT1PSHOLD_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `RT1PSHOLD`"] pub type RT1PSHOLD_R = crate::R<bool, RT1PSHOLD_A>; impl RT1PSHOLD_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> RT1PSHOLD_A { match self.bits { false => RT1PSHOLD_A::RT1PSHOLD_0, true => RT1PSHOLD_A::RT1PSHOLD_1, } } #[doc = "Checks if the value of the field is `RT1PSHOLD_0`"] #[inline(always)] pub fn is_rt1pshold_0(&self) -> bool { *self == RT1PSHOLD_A::RT1PSHOLD_0 } #[doc = "Checks if the value of the field is `RT1PSHOLD_1`"] #[inline(always)] pub fn is_rt1pshold_1(&self) -> bool { *self == RT1PSHOLD_A::RT1PSHOLD_1 } } #[doc = "Write proxy for field `RT1PSHOLD`"] pub struct RT1PSHOLD_W<'a> { w: &'a mut W, } impl<'a> RT1PSHOLD_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: RT1PSHOLD_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "RT1PS is operational"] #[inline(always)] pub fn rt1pshold_0(self) -> &'a mut W { self.variant(RT1PSHOLD_A::RT1PSHOLD_0) } #[doc = "RT1PS is held"] #[inline(always)] pub fn rt1pshold_1(self) -> &'a mut W { self.variant(RT1PSHOLD_A::RT1PSHOLD_1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u16) & 0x01) << 8); self.w } } #[doc = "13:11\\] Prescale timer 1 clock divide\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum RT1PSDIV_A { #[doc = "0: Divide by 2"] _2 = 0, #[doc = "1: Divide by 4"] _4 = 1, #[doc = "2: Divide by 8"] _8 = 2, #[doc = "3: Divide by 16"] _16 = 3, #[doc = "4: Divide by 32"] _32 = 4, #[doc = "5: Divide by 64"] _64 = 5, #[doc = "6: Divide by 128"] _128 = 6, #[doc = "7: Divide by 256"] _256 = 7, } impl From<RT1PSDIV_A> for u8 { #[inline(always)] fn from(variant: RT1PSDIV_A) -> Self { variant as _ } } #[doc = "Reader of field `RT1PSDIV`"] pub type RT1PSDIV_R = crate::R<u8, RT1PSDIV_A>; impl RT1PSDIV_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> RT1PSDIV_A { match self.bits { 0 => RT1PSDIV_A::_2, 1 => RT1PSDIV_A::_4, 2 => RT1PSDIV_A::_8, 3 => RT1PSDIV_A::_16, 4 => RT1PSDIV_A::_32, 5 => RT1PSDIV_A::_64, 6 => RT1PSDIV_A::_128, 7 => RT1PSDIV_A::_256, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `_2`"] #[inline(always)] pub fn is_2(&self) -> bool { *self == RT1PSDIV_A::_2 } #[doc = "Checks if the value of the field is `_4`"] #[inline(always)] pub fn is_4(&self) -> bool { *self == RT1PSDIV_A::_4 } #[doc = "Checks if the value of the field is `_8`"] #[inline(always)] pub fn is_8(&self) -> bool { *self == RT1PSDIV_A::_8 } #[doc = "Checks if the value of the field is `_16`"] #[inline(always)] pub fn is_16(&self) -> bool { *self == RT1PSDIV_A::_16 } #[doc = "Checks if the value of the field is `_32`"] #[inline(always)] pub fn is_32(&self) -> bool { *self == RT1PSDIV_A::_32 } #[doc = "Checks if the value of the field is `_64`"] #[inline(always)] pub fn is_64(&self) -> bool { *self == RT1PSDIV_A::_64 } #[doc = "Checks if the value of the field is `_128`"] #[inline(always)] pub fn is_128(&self) -> bool { *self == RT1PSDIV_A::_128 } #[doc = "Checks if the value of the field is `_256`"] #[inline(always)] pub fn is_256(&self) -> bool { *self == RT1PSDIV_A::_256 } } #[doc = "Write proxy for field `RT1PSDIV`"] pub struct RT1PSDIV_W<'a> { w: &'a mut W, } impl<'a> RT1PSDIV_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: RT1PSDIV_A) -> &'a mut W { { self.bits(variant.into()) } } #[doc = "Divide by 2"] #[inline(always)] pub fn _2(self) -> &'a mut W { self.variant(RT1PSDIV_A::_2) } #[doc = "Divide by 4"] #[inline(always)] pub fn _4(self) -> &'a mut W { self.variant(RT1PSDIV_A::_4) } #[doc = "Divide by 8"] #[inline(always)] pub fn _8(self) -> &'a mut W { self.variant(RT1PSDIV_A::_8) } #[doc = "Divide by 16"] #[inline(always)] pub fn _16(self) -> &'a mut W { self.variant(RT1PSDIV_A::_16) } #[doc = "Divide by 32"] #[inline(always)] pub fn _32(self) -> &'a mut W { self.variant(RT1PSDIV_A::_32) } #[doc = "Divide by 64"] #[inline(always)] pub fn _64(self) -> &'a mut W { self.variant(RT1PSDIV_A::_64) } #[doc = "Divide by 128"] #[inline(always)] pub fn _128(self) -> &'a mut W { self.variant(RT1PSDIV_A::_128) } #[doc = "Divide by 256"] #[inline(always)] pub fn _256(self) -> &'a mut W { self.variant(RT1PSDIV_A::_256) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 11)) | (((value as u16) & 0x07) << 11); self.w } } #[doc = "15:14\\] Prescale timer 1 clock source select\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum RT1SSEL_A { #[doc = "0: 32-kHz crystal oscillator clock"] RT1SSEL_0 = 0, #[doc = "1: 32-kHz crystal oscillator clock"] RT1SSEL_1 = 1, #[doc = "2: Output from RT0PS"] RT0PS = 2, #[doc = "3: Output from RT0PS"] RT0PS = 3, } impl From<RT1SSEL_A> for u8 { #[inline(always)] fn from(variant: RT1SSEL_A) -> Self { variant as _ } } #[doc = "Reader of field `RT1SSEL`"] pub type RT1SSEL_R = crate::R<u8, RT1SSEL_A>; impl RT1SSEL_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> RT1SSEL_A { match self.bits { 0 => RT1SSEL_A::RT1SSEL_0, 1 => RT1SSEL_A::RT1SSEL_1, 2 => RT1SSEL_A::RT0PS, 3 => RT1SSEL_A::RT0PS, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `RT1SSEL_0`"] #[inline(always)] pub fn is_rt1ssel_0(&self) -> bool { *self == RT1SSEL_A::RT1SSEL_0 } #[doc = "Checks if the value of the field is `RT1SSEL_1`"] #[inline(always)] pub fn is_rt1ssel_1(&self) -> bool { *self == RT1SSEL_A::RT1SSEL_1 } #[doc = "Checks if the value of the field is `RT0PS`"] #[inline(always)] pub fn is_rt0ps(&self) -> bool { *self == RT1SSEL_A::RT0PS } #[doc = "Checks if the value of the field is `RT0PS`"] #[inline(always)] pub fn is_rt0ps(&self) -> bool { *self == RT1SSEL_A::RT0PS } } #[doc = "Write proxy for field `RT1SSEL`"] pub struct RT1SSEL_W<'a> { w: &'a mut W, } impl<'a> RT1SSEL_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: RT1SSEL_A) -> &'a mut W { { self.bits(variant.into()) } } #[doc = "32-kHz crystal oscillator clock"] #[inline(always)] pub fn rt1ssel_0(self) -> &'a mut W { self.variant(RT1SSEL_A::RT1SSEL_0) } #[doc = "32-kHz crystal oscillator clock"] #[inline(always)] pub fn rt1ssel_1(self) -> &'a mut W { self.variant(RT1SSEL_A::RT1SSEL_1) } #[doc = "Output from RT0PS"] #[inline(always)] pub fn rt0ps(self) -> &'a mut W { self.variant(RT1SSEL_A::RT0PS) } #[doc = "Output from RT0PS"] #[inline(always)] pub fn rt0ps(self) -> &'a mut W { self.variant(RT1SSEL_A::RT0PS) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 14)) | (((value as u16) & 0x03) << 14); self.w } } impl R { #[doc = "Bit 0 - 0:0\\] Prescale timer 1 interrupt flag"] #[inline(always)] pub fn rt1psifg(&self) -> RT1PSIFG_R { RT1PSIFG_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - 1:1\\] Prescale timer 1 interrupt enable"] #[inline(always)] pub fn rt1psie(&self) -> RT1PSIE_R { RT1PSIE_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bits 2:4 - 4:2\\] Prescale timer 1 interrupt interval"] #[inline(always)] pub fn rt1ip(&self) -> RT1IP_R { RT1IP_R::new(((self.bits >> 2) & 0x07) as u8) } #[doc = "Bit 8 - 8:8\\] Prescale timer 1 hold"] #[inline(always)] pub fn rt1pshold(&self) -> RT1PSHOLD_R { RT1PSHOLD_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bits 11:13 - 13:11\\] Prescale timer 1 clock divide"] #[inline(always)] pub fn rt1psdiv(&self) -> RT1PSDIV_R { RT1PSDIV_R::new(((self.bits >> 11) & 0x07) as u8) } #[doc = "Bits 14:15 - 15:14\\] Prescale timer 1 clock source select"] #[inline(always)] pub fn rt1ssel(&self) -> RT1SSEL_R { RT1SSEL_R::new(((self.bits >> 14) & 0x03) as u8) } } impl W { #[doc = "Bit 0 - 0:0\\] Prescale timer 1 interrupt flag"] #[inline(always)] pub fn rt1psifg(&mut self) -> RT1PSIFG_W { RT1PSIFG_W { w: self } } #[doc = "Bit 1 - 1:1\\] Prescale timer 1 interrupt enable"] #[inline(always)] pub fn rt1psie(&mut self) -> RT1PSIE_W { RT1PSIE_W { w: self } } #[doc = "Bits 2:4 - 4:2\\] Prescale timer 1 interrupt interval"] #[inline(always)] pub fn rt1ip(&mut self) -> RT1IP_W { RT1IP_W { w: self } } #[doc = "Bit 8 - 8:8\\] Prescale timer 1 hold"] #[inline(always)] pub fn rt1pshold(&mut self) -> RT1PSHOLD_W { RT1PSHOLD_W { w: self } } #[doc = "Bits 11:13 - 13:11\\] Prescale timer 1 clock divide"] #[inline(always)] pub fn rt1psdiv(&mut self) -> RT1PSDIV_W { RT1PSDIV_W { w: self } } #[doc = "Bits 14:15 - 15:14\\] Prescale timer 1 clock source select"] #[inline(always)] pub fn rt1ssel(&mut self) -> RT1SSEL_W { RT1SSEL_W { w: self } } }
27.933144
86
0.538473
bf150a277e9e540d2880f7aa05d3464d1677fba0
13,720
// Copyright 2016 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. use std::process; use std::sync::mpsc::Receiver; use std::sync::{mpsc, Arc}; use std::thread; use std::time::Duration; use mio::EventLoop; use super::Result; use super::transport::RaftStoreRouter; use import::SSTImporter; use kvproto::metapb; use kvproto::raft_serverpb::StoreIdent; use pd::{Error as PdError, PdClient, PdTask, INVALID_ID}; use protobuf::RepeatedField; use raftstore::coprocessor::dispatcher::CoprocessorHost; use raftstore::store::{self, keys, Config as StoreConfig, Engines, Msg, Peekable, SignificantMsg, SnapManager, Store, StoreChannel, Transport}; use server::Config as ServerConfig; use server::readpool::ReadPool; use storage::{self, Config as StorageConfig, RaftKv, Storage}; use util::transport::SendCh; use util::worker::FutureWorker; const MAX_CHECK_CLUSTER_BOOTSTRAPPED_RETRY_COUNT: u64 = 60; const CHECK_CLUSTER_BOOTSTRAPPED_RETRY_SECONDS: u64 = 3; pub fn create_raft_storage<S>( router: S, cfg: &StorageConfig, read_pool: ReadPool<storage::ReadPoolContext>, ) -> Result<Storage> where S: RaftStoreRouter + 'static, { let engine = Box::new(RaftKv::new(router)); let store = Storage::from_engine(engine, cfg, read_pool)?; Ok(store) } fn check_region_epoch(region: &metapb::Region, other: &metapb::Region) -> Result<()> { let epoch = region.get_region_epoch(); let other_epoch = other.get_region_epoch(); if epoch.get_conf_ver() != other_epoch.get_conf_ver() { return Err(box_err!( "region conf_ver inconsist: {} with {}", epoch.get_conf_ver(), other_epoch.get_conf_ver() )); } if epoch.get_version() != other_epoch.get_version() { return Err(box_err!( "region version inconsist: {} with {}", epoch.get_version(), other_epoch.get_version() )); } Ok(()) } // Node is a wrapper for raft store. // TODO: we will rename another better name like RaftStore later. pub struct Node<C: PdClient + 'static> { cluster_id: u64, store: metapb::Store, store_cfg: StoreConfig, store_handle: Option<thread::JoinHandle<()>>, ch: SendCh<Msg>, pd_client: Arc<C>, } impl<C> Node<C> where C: PdClient, { pub fn new<T>( event_loop: &mut EventLoop<Store<T, C>>, cfg: &ServerConfig, store_cfg: &StoreConfig, pd_client: Arc<C>, ) -> Node<C> where T: Transport + 'static, { let mut store = metapb::Store::new(); store.set_id(INVALID_ID); if cfg.advertise_addr.is_empty() { store.set_address(cfg.addr.clone()); } else { store.set_address(cfg.advertise_addr.clone()) } let mut labels = Vec::new(); for (k, v) in &cfg.labels { let mut label = metapb::StoreLabel::new(); label.set_key(k.to_owned()); label.set_value(v.to_owned()); labels.push(label); } store.set_labels(RepeatedField::from_vec(labels)); let ch = SendCh::new(event_loop.channel(), "raftstore"); Node { cluster_id: cfg.cluster_id, store, store_cfg: store_cfg.clone(), store_handle: None, pd_client, ch, } } #[allow(too_many_arguments)] pub fn start<T>( &mut self, event_loop: EventLoop<Store<T, C>>, engines: Engines, trans: T, snap_mgr: SnapManager, significant_msg_receiver: Receiver<SignificantMsg>, pd_worker: FutureWorker<PdTask>, coprocessor_host: CoprocessorHost, importer: Arc<SSTImporter>, ) -> Result<()> where T: Transport + 'static, { let bootstrapped = self.check_cluster_bootstrapped()?; let mut store_id = self.check_store(&engines)?; if store_id == INVALID_ID { store_id = self.bootstrap_store(&engines)?; } else if !bootstrapped { // We have saved data before, and the cluster must be bootstrapped. return Err(box_err!( "store {} is not empty, but cluster {} is not bootstrapped, \ maybe you connected a wrong PD or need to remove the TiKV data \ and start again", store_id, self.cluster_id )); } self.store.set_id(store_id); self.check_prepare_bootstrap_cluster(&engines)?; if !bootstrapped { // cluster is not bootstrapped, and we choose first store to bootstrap // prepare bootstrap. let region = self.prepare_bootstrap_cluster(&engines, store_id)?; self.bootstrap_cluster(&engines, region)?; } // inform pd. self.pd_client.put_store(self.store.clone())?; self.start_store( event_loop, store_id, engines, trans, snap_mgr, significant_msg_receiver, pd_worker, coprocessor_host, importer, )?; Ok(()) } pub fn id(&self) -> u64 { self.store.get_id() } pub fn get_sendch(&self) -> SendCh<Msg> { self.ch.clone() } // check store, return store id for the engine. // If the store is not bootstrapped, use INVALID_ID. fn check_store(&self, engines: &Engines) -> Result<u64> { let res = engines .kv_engine .get_msg::<StoreIdent>(keys::STORE_IDENT_KEY)?; if res.is_none() { return Ok(INVALID_ID); } let ident = res.unwrap(); if ident.get_cluster_id() != self.cluster_id { error!( "cluster ID mismatch: local_id {} remote_id {}. \ you are trying to connect to another cluster, please reconnect to the correct PD", ident.get_cluster_id(), self.cluster_id ); process::exit(1); } let store_id = ident.get_store_id(); if store_id == INVALID_ID { return Err(box_err!("invalid store ident {:?}", ident)); } Ok(store_id) } fn alloc_id(&self) -> Result<u64> { let id = self.pd_client.alloc_id()?; Ok(id) } fn bootstrap_store(&self, engines: &Engines) -> Result<u64> { let store_id = self.alloc_id()?; info!("alloc store id {} ", store_id); store::bootstrap_store(engines, self.cluster_id, store_id)?; Ok(store_id) } pub fn prepare_bootstrap_cluster( &self, engines: &Engines, store_id: u64, ) -> Result<metapb::Region> { let region_id = self.alloc_id()?; info!( "alloc first region id {} for cluster {}, store {}", region_id, self.cluster_id, store_id ); let peer_id = self.alloc_id()?; info!( "alloc first peer id {} for first region {}", peer_id, region_id ); let region = store::prepare_bootstrap(engines, store_id, region_id, peer_id)?; Ok(region) } fn check_prepare_bootstrap_cluster(&self, engines: &Engines) -> Result<()> { let res = engines .kv_engine .get_msg::<metapb::Region>(keys::PREPARE_BOOTSTRAP_KEY)?; if res.is_none() { return Ok(()); } let first_region = res.unwrap(); for _ in 0..MAX_CHECK_CLUSTER_BOOTSTRAPPED_RETRY_COUNT { match self.pd_client.get_region(b"") { Ok(region) => { if region.get_id() == first_region.get_id() { check_region_epoch(&region, &first_region)?; store::clear_prepare_bootstrap_state(engines)?; } else { store::clear_prepare_bootstrap(engines, first_region.get_id())?; } return Ok(()); } Err(e) => { warn!("check cluster prepare bootstrapped failed: {:?}", e); } } thread::sleep(Duration::from_secs( CHECK_CLUSTER_BOOTSTRAPPED_RETRY_SECONDS, )); } Err(box_err!("check cluster prepare bootstrapped failed")) } fn bootstrap_cluster(&mut self, engines: &Engines, region: metapb::Region) -> Result<()> { let region_id = region.get_id(); match self.pd_client.bootstrap_cluster(self.store.clone(), region) { Err(PdError::ClusterBootstrapped(_)) => { error!("cluster {} is already bootstrapped", self.cluster_id); store::clear_prepare_bootstrap(engines, region_id)?; Ok(()) } // TODO: should we clean region for other errors too? Err(e) => panic!("bootstrap cluster {} err: {:?}", self.cluster_id, e), Ok(_) => { store::clear_prepare_bootstrap_state(engines)?; info!("bootstrap cluster {} ok", self.cluster_id); Ok(()) } } } fn check_cluster_bootstrapped(&self) -> Result<bool> { for _ in 0..MAX_CHECK_CLUSTER_BOOTSTRAPPED_RETRY_COUNT { match self.pd_client.is_cluster_bootstrapped() { Ok(b) => return Ok(b), Err(e) => { warn!("check cluster bootstrapped failed: {:?}", e); } } thread::sleep(Duration::from_secs( CHECK_CLUSTER_BOOTSTRAPPED_RETRY_SECONDS, )); } Err(box_err!("check cluster bootstrapped failed")) } #[allow(too_many_arguments)] fn start_store<T>( &mut self, mut event_loop: EventLoop<Store<T, C>>, store_id: u64, engines: Engines, trans: T, snap_mgr: SnapManager, significant_msg_receiver: Receiver<SignificantMsg>, pd_worker: FutureWorker<PdTask>, coprocessor_host: CoprocessorHost, importer: Arc<SSTImporter>, ) -> Result<()> where T: Transport + 'static, { info!("start raft store {} thread", store_id); if self.store_handle.is_some() { return Err(box_err!("{} is already started", store_id)); } let cfg = self.store_cfg.clone(); let pd_client = Arc::clone(&self.pd_client); let store = self.store.clone(); let sender = event_loop.channel(); let (tx, rx) = mpsc::channel(); let builder = thread::Builder::new().name(thd_name!(format!("raftstore-{}", store_id))); let h = builder.spawn(move || { let ch = StoreChannel { sender, significant_msg_receiver, }; let mut store = match Store::new( ch, store, cfg, engines, trans, pd_client, snap_mgr, pd_worker, coprocessor_host, importer, ) { Err(e) => panic!("construct store {} err {:?}", store_id, e), Ok(s) => s, }; tx.send(0).unwrap(); if let Err(e) = store.run(&mut event_loop) { error!("store {} run err {:?}", store_id, e); }; })?; // wait for store to be initialized rx.recv().unwrap(); self.store_handle = Some(h); Ok(()) } fn stop_store(&mut self, store_id: u64) -> Result<()> { info!("stop raft store {} thread", store_id); let h = match self.store_handle.take() { None => return Ok(()), Some(h) => h, }; box_try!(self.ch.send(Msg::Quit)); if let Err(e) = h.join() { return Err(box_err!("join store {} thread err {:?}", store_id, e)); } Ok(()) } pub fn stop(&mut self) -> Result<()> { let store_id = self.store.get_id(); self.stop_store(store_id) } } #[cfg(test)] mod tests { use super::check_region_epoch; use kvproto::metapb; use raftstore::store::keys; #[test] fn test_check_region_epoch() { let mut r1 = metapb::Region::new(); r1.set_id(1); r1.set_start_key(keys::EMPTY_KEY.to_vec()); r1.set_end_key(keys::EMPTY_KEY.to_vec()); r1.mut_region_epoch().set_version(1); r1.mut_region_epoch().set_conf_ver(1); let mut r2 = metapb::Region::new(); r2.set_id(1); r2.set_start_key(keys::EMPTY_KEY.to_vec()); r2.set_end_key(keys::EMPTY_KEY.to_vec()); r2.mut_region_epoch().set_version(2); r2.mut_region_epoch().set_conf_ver(1); let mut r3 = metapb::Region::new(); r3.set_id(1); r3.set_start_key(keys::EMPTY_KEY.to_vec()); r3.set_end_key(keys::EMPTY_KEY.to_vec()); r3.mut_region_epoch().set_version(1); r3.mut_region_epoch().set_conf_ver(2); assert!(check_region_epoch(&r1, &r2).is_err()); assert!(check_region_epoch(&r1, &r3).is_err()); } }
31.685912
99
0.556122
483b761a2315dce648a370a1349b0ecadf6066d8
719
use std::io; fn main() { let mut cl = String::new(); println!("Digite o valor do Ciclo-CL:"); io::stdin() .read_line(&mut cl) .expect("Erro ao ler Ciclo-CL"); let cl: f64 = cl .trim() .parse() .expect("ciclo cl dado não é um número inteiro"); let mut mhz = String::new(); println!("Agora, digite o valor do MHz-RAM:"); io::stdin() .read_line(&mut mhz) .expect("Erro ao ler MHz-RAM"); let mhz: f64 = mhz .trim() .parse() .expect("mhz da ram dado não é um número inteiro"); let clock_speed = (2.0 * cl * 1E9) / (mhz * 1E6); println!("Clock Speed da RAM: {:.1} ns", clock_speed); }
22.46875
59
0.515994
fc1a55a5a75a5e64bef8cf9e4e9528783ed1dc31
15,428
//! (deprecated) Test fixtures for interactive distributed key generation. use crate::api::dkg_errors::{DkgVerifyReshareDealingError, DkgVerifyResponseError}; use crate::api::{combine_signatures, keygen, sign_message, verify_combined_signature}; use crate::dkg::secp256k1 as dkg_lib; use crate::types::public_coefficients::conversions::pub_key_bytes_from_pub_coeff_bytes; use crate::types::{CombinedSignatureBytes, SecretKeyBytes as ThresholdSecretKeyBytes}; use dkg_lib::types::{ CLibDealingBytes, CLibResponseBytes, CLibTranscriptBytes, CLibVerifiedResponseBytes, EphemeralKeySetBytes, EphemeralPopBytes, EphemeralPublicKeyBytes, EphemeralSecretKeyBytes, }; use dkg_lib::{ compute_private_key, create_ephemeral, create_resharing_dealing, create_resharing_transcript, create_response, verify_resharing_dealing, verify_response, }; use ic_crypto_internal_types::sign::threshold_sig::ni_dkg::ni_dkg_groth20_bls12_381::PublicCoefficientsBytes; use ic_crypto_test_utils::dkg::random_dkg_id; use ic_types::NumberOfNodes; use ic_types::{IDkgId, NodeIndex, Randomness}; use rand::seq::IteratorRandom; use rand::Rng; use rand_chacha::ChaChaRng; use std::collections::BTreeMap; pub fn ephemeral_key_set_from_tuple( tuple: ( EphemeralSecretKeyBytes, EphemeralPublicKeyBytes, EphemeralPopBytes, ), ) -> EphemeralKeySetBytes { EphemeralKeySetBytes { secret_key_bytes: tuple.0, public_key_bytes: tuple.1, pop_bytes: tuple.2, } } pub fn ephemeral_key_set_public_key_with_pop( key_set: &EphemeralKeySetBytes, ) -> (EphemeralPublicKeyBytes, EphemeralPopBytes) { (key_set.public_key_bytes, key_set.pop_bytes) } #[derive(Clone)] pub struct StateWithThresholdKey { pub threshold: NumberOfNodes, pub num_signatories: NumberOfNodes, pub public_coefficients: PublicCoefficientsBytes, pub secret_keys: Vec<Option<ThresholdSecretKeyBytes>>, } impl StateWithThresholdKey { pub fn random(rng: &mut ChaChaRng) -> Self { let threshold = NumberOfNodes::from(rng.gen_range(1_u32, 10_u32)); let num_signatories = NumberOfNodes::from(rng.gen_range(threshold.get(), 10_u32)); let eligibility = vec![true; num_signatories.get() as usize]; let (public_coefficients, secret_keys): ( PublicCoefficientsBytes, Vec<Option<ThresholdSecretKeyBytes>>, ) = { let seed = Randomness::from(rng.gen::<[u8; 32]>()); keygen(seed, threshold, &eligibility).expect("Initial keygen failed") }; StateWithThresholdKey { threshold, num_signatories, public_coefficients, secret_keys, } } pub fn from_transcript(state: StateWithTranscript) -> Self { let secret_keys: Vec<Option<ThresholdSecretKeyBytes>> = state .receiver_ephemeral_keys .iter() .map(|key_maybe| { key_maybe .map(|receiver_ephemeral_secret_key_bytes| { compute_private_key( receiver_ephemeral_secret_key_bytes.secret_key_bytes, &state.transcript, state.dkg_id, ) .expect("Failed to compute threshold key from transcript") }) .flatten() }) .collect(); StateWithThresholdKey { threshold: state.new_threshold, num_signatories: state.num_receivers, public_coefficients: state.transcript.public_coefficients, secret_keys, } } pub fn sign(&self, message: &[u8]) -> CombinedSignatureBytes { let individual_signatures: Vec<_> = self .secret_keys .iter() .map(|key_maybe| { key_maybe .as_ref() .map(|key| sign_message(message, key).expect("Could not sign")) }) .collect(); combine_signatures(&individual_signatures, self.threshold) .expect("Could not combine signatures") } pub fn verify(&self, message: &[u8], signature: CombinedSignatureBytes) { verify_combined_signature( message, signature, pub_key_bytes_from_pub_coeff_bytes(&self.public_coefficients), ) .expect("Verification failed"); } } #[derive(Clone)] pub struct StateWithEphemeralKeys { pub initial_state: StateWithThresholdKey, pub dkg_id: IDkgId, pub num_dealers: NumberOfNodes, pub num_receivers: NumberOfNodes, pub dealer_ephemeral_keys: Vec<Option<EphemeralKeySetBytes>>, pub receiver_ephemeral_keys: Vec<Option<EphemeralKeySetBytes>>, } impl StateWithEphemeralKeys { pub fn random(mut rng: &mut ChaChaRng, initial_state: StateWithThresholdKey) -> Self { let dkg_id = random_dkg_id(&mut rng); let num_dealers = initial_state.num_signatories; let num_receivers = NumberOfNodes::from(rng.gen_range(1, 10_u32)); let dealer_ephemeral_keys = (0..num_dealers.get()) .map(|dealer_index| { Some(ephemeral_key_set_from_tuple(create_ephemeral( &mut rng, dkg_id, &dealer_index.to_be_bytes()[..], ))) }) .collect(); let receiver_ephemeral_keys = (0..num_receivers.get()) .map(|dealer_index| { Some(ephemeral_key_set_from_tuple(create_ephemeral( &mut rng, dkg_id, &dealer_index.to_be_bytes()[..], ))) }) .collect(); StateWithEphemeralKeys { initial_state, dkg_id, num_dealers, num_receivers, dealer_ephemeral_keys, receiver_ephemeral_keys, } } } #[derive(Clone)] pub struct StateWithResharedDealings { pub initial_state: StateWithThresholdKey, pub dkg_id: IDkgId, pub num_dealers: NumberOfNodes, pub num_receivers: NumberOfNodes, pub dealer_ephemeral_keys: Vec<Option<EphemeralKeySetBytes>>, pub receiver_ephemeral_keys: Vec<Option<EphemeralKeySetBytes>>, pub dealer_indices: Vec<NodeIndex>, pub new_threshold: NumberOfNodes, pub dealings: BTreeMap<EphemeralPublicKeyBytes, CLibDealingBytes>, } impl StateWithResharedDealings { pub fn random(mut rng: &mut ChaChaRng, state: StateWithEphemeralKeys) -> Self { let StateWithEphemeralKeys { initial_state, dkg_id, num_dealers, num_receivers, dealer_ephemeral_keys, receiver_ephemeral_keys, } = state; // Some dealers may drop out; we need at least threshold dealers. assert!( initial_state.threshold.get() <= num_dealers.get(), "Insufficient dealers to reshare threshold key." ); let num_dealers = NumberOfNodes::from( rng.gen_range(initial_state.threshold.get(), num_dealers.get() + 1), ); let new_threshold = { let new_threshold = 5; let new_threshold = std::cmp::min(new_threshold, (num_receivers.get() + 1) / 2); let new_threshold = std::cmp::min(new_threshold, num_dealers.get()); NumberOfNodes::from(rng.gen_range(1, new_threshold + 1)) }; let dealer_indices: Vec<NodeIndex> = (0..initial_state.num_signatories.get()) .choose_multiple(&mut rng, num_dealers.get() as usize); // Assume that all receivers have ephemeral keys; we can replace Some with None // later if needed for testing. let receiver_public_keys_with_pop: Vec< Option<(EphemeralPublicKeyBytes, EphemeralPopBytes)>, > = receiver_ephemeral_keys .iter() .map(|keys_maybe| { keys_maybe .as_ref() .map(ephemeral_key_set_public_key_with_pop) }) .collect(); let dealings: BTreeMap<EphemeralPublicKeyBytes, CLibDealingBytes> = dealer_indices .iter() .filter_map(|dealer_index| { dealer_ephemeral_keys[*dealer_index as usize].map(|dealer_ephemeral_key_set| { let seed = Randomness::from(rng.gen::<[u8; 32]>()); let dkg_id = dkg_id; let reshared_secret_key = initial_state.secret_keys[*dealer_index as usize] .expect( "Could not get dealer secret key, even though we created all secret keys.", ); let dealing = create_resharing_dealing( seed, dealer_ephemeral_key_set.secret_key_bytes, dkg_id, new_threshold, &receiver_public_keys_with_pop, reshared_secret_key, ) .expect("Could not create resharing dealing"); (dealer_ephemeral_key_set.public_key_bytes, dealing) }) }) .collect(); StateWithResharedDealings { initial_state, dkg_id, num_dealers, num_receivers, dealer_ephemeral_keys, receiver_ephemeral_keys, dealer_indices, new_threshold, dealings, } } pub fn verify_dealings(&self) -> Result<(), DkgVerifyReshareDealingError> { let receiver_public_keys: Vec<Option<(EphemeralPublicKeyBytes, EphemeralPopBytes)>> = self .receiver_ephemeral_keys .iter() .map(|keys_maybe| { keys_maybe .as_ref() .map(ephemeral_key_set_public_key_with_pop) }) .collect(); for dealer_index in self.dealer_indices.iter() { if let Some(dealer_key_set) = self.dealer_ephemeral_keys[*dealer_index as usize] { if let Some(dealing) = self.dealings.get(&dealer_key_set.public_key_bytes) { verify_resharing_dealing( self.new_threshold, &receiver_public_keys, dealing.clone(), *dealer_index, self.initial_state.public_coefficients.clone(), )?; } } } Ok(()) } } #[derive(Clone)] pub struct StateWithResponses { pub initial_state: StateWithThresholdKey, pub dkg_id: IDkgId, pub num_dealers: NumberOfNodes, pub num_receivers: NumberOfNodes, pub dealer_ephemeral_keys: Vec<Option<EphemeralKeySetBytes>>, pub receiver_ephemeral_keys: Vec<Option<EphemeralKeySetBytes>>, pub dealer_indices: Vec<NodeIndex>, pub new_threshold: NumberOfNodes, pub dealings: BTreeMap<EphemeralPublicKeyBytes, CLibDealingBytes>, pub responses: Vec<Option<CLibResponseBytes>>, } impl StateWithResponses { pub fn from_resharing_dealings(rng: &mut ChaChaRng, state: StateWithResharedDealings) -> Self { let StateWithResharedDealings { initial_state, dkg_id, dealer_ephemeral_keys, receiver_ephemeral_keys, dealer_indices, new_threshold, dealings, num_dealers, num_receivers, } = state; let responses: Vec<_> = (0..) .zip(&receiver_ephemeral_keys) .map(|(receiver_index, key_maybe)| { key_maybe.map(|key_set| { let seed = Randomness::from(rng.gen::<[u8; 32]>()); create_response( seed, &key_set.secret_key_bytes, dkg_id, &dealings, receiver_index, ) .expect("failed to create a response") }) }) .collect(); StateWithResponses { initial_state, dkg_id, num_dealers, num_receivers, dealer_ephemeral_keys, receiver_ephemeral_keys, dealer_indices, new_threshold, dealings, responses, } } pub fn verify_responses(&self) -> Result<(), DkgVerifyResponseError> { for tuple in (0..) .zip(&self.receiver_ephemeral_keys) .zip(&self.responses) { if let ((receiver_index, Some(key)), Some(response)) = tuple { verify_response( self.dkg_id, &self.dealings, receiver_index, key.public_key_bytes, &response, )?; } } Ok(()) } } #[derive(Clone)] pub struct StateWithTranscript { pub initial_state: StateWithThresholdKey, pub dkg_id: IDkgId, pub num_dealers: NumberOfNodes, pub num_receivers: NumberOfNodes, pub dealer_ephemeral_keys: Vec<Option<EphemeralKeySetBytes>>, pub receiver_ephemeral_keys: Vec<Option<EphemeralKeySetBytes>>, pub dealer_indices: Vec<NodeIndex>, pub new_threshold: NumberOfNodes, pub dealings: BTreeMap<EphemeralPublicKeyBytes, CLibDealingBytes>, pub transcript: CLibTranscriptBytes, } impl StateWithTranscript { pub fn from_resharing_responses(state: StateWithResponses) -> Self { let StateWithResponses { initial_state, dkg_id, dealer_ephemeral_keys, receiver_ephemeral_keys, dealer_indices, new_threshold, dealings, num_dealers, num_receivers, responses, } = state; let dealer_public_keys: Vec<_> = dealer_ephemeral_keys .iter() .map(|key_maybe| { key_maybe .as_ref() .map(ephemeral_key_set_public_key_with_pop) }) .collect(); let verified_responses: Vec<_> = receiver_ephemeral_keys .iter() .zip(&responses) .map(|tuple| match tuple { (Some(key_set), Some(CLibResponseBytes { complaints })) => { Some(CLibVerifiedResponseBytes { receiver_public_key: key_set.public_key_bytes, complaints: complaints.clone(), }) } _ => None, }) .collect(); let transcript = create_resharing_transcript( new_threshold, &dealings, &verified_responses, &dealer_public_keys, &initial_state.public_coefficients, ) .expect("Could not compute transcript"); StateWithTranscript { initial_state, dkg_id, num_dealers, num_receivers, dealer_ephemeral_keys, receiver_ephemeral_keys, dealer_indices, new_threshold, dealings, transcript, } } }
36.386792
109
0.581346
bbe290c8ddf39004689abbea86ffb6254169bbee
37,738
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. /// Operation shape for `BatchGetTraces`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`batch_get_traces`](crate::client::Client::batch_get_traces). /// /// See [`crate::client::fluent_builders::BatchGetTraces`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct BatchGetTraces { _private: (), } impl BatchGetTraces { /// Creates a new builder-style object to manufacture [`BatchGetTracesInput`](crate::input::BatchGetTracesInput) pub fn builder() -> crate::input::batch_get_traces_input::Builder { crate::input::batch_get_traces_input::Builder::default() } /// Creates a new `BatchGetTraces` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for BatchGetTraces { type Output = std::result::Result<crate::output::BatchGetTracesOutput, crate::error::BatchGetTracesError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_batch_get_traces_error(response) } else { crate::operation_deser::parse_batch_get_traces_response(response) } } } /// Operation shape for `CreateGroup`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`create_group`](crate::client::Client::create_group). /// /// See [`crate::client::fluent_builders::CreateGroup`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct CreateGroup { _private: (), } impl CreateGroup { /// Creates a new builder-style object to manufacture [`CreateGroupInput`](crate::input::CreateGroupInput) pub fn builder() -> crate::input::create_group_input::Builder { crate::input::create_group_input::Builder::default() } /// Creates a new `CreateGroup` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for CreateGroup { type Output = std::result::Result<crate::output::CreateGroupOutput, crate::error::CreateGroupError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_create_group_error(response) } else { crate::operation_deser::parse_create_group_response(response) } } } /// Operation shape for `CreateSamplingRule`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`create_sampling_rule`](crate::client::Client::create_sampling_rule). /// /// See [`crate::client::fluent_builders::CreateSamplingRule`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct CreateSamplingRule { _private: (), } impl CreateSamplingRule { /// Creates a new builder-style object to manufacture [`CreateSamplingRuleInput`](crate::input::CreateSamplingRuleInput) pub fn builder() -> crate::input::create_sampling_rule_input::Builder { crate::input::create_sampling_rule_input::Builder::default() } /// Creates a new `CreateSamplingRule` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for CreateSamplingRule { type Output = std::result::Result< crate::output::CreateSamplingRuleOutput, crate::error::CreateSamplingRuleError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_create_sampling_rule_error(response) } else { crate::operation_deser::parse_create_sampling_rule_response(response) } } } /// Operation shape for `DeleteGroup`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_group`](crate::client::Client::delete_group). /// /// See [`crate::client::fluent_builders::DeleteGroup`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteGroup { _private: (), } impl DeleteGroup { /// Creates a new builder-style object to manufacture [`DeleteGroupInput`](crate::input::DeleteGroupInput) pub fn builder() -> crate::input::delete_group_input::Builder { crate::input::delete_group_input::Builder::default() } /// Creates a new `DeleteGroup` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteGroup { type Output = std::result::Result<crate::output::DeleteGroupOutput, crate::error::DeleteGroupError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_delete_group_error(response) } else { crate::operation_deser::parse_delete_group_response(response) } } } /// Operation shape for `DeleteSamplingRule`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_sampling_rule`](crate::client::Client::delete_sampling_rule). /// /// See [`crate::client::fluent_builders::DeleteSamplingRule`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteSamplingRule { _private: (), } impl DeleteSamplingRule { /// Creates a new builder-style object to manufacture [`DeleteSamplingRuleInput`](crate::input::DeleteSamplingRuleInput) pub fn builder() -> crate::input::delete_sampling_rule_input::Builder { crate::input::delete_sampling_rule_input::Builder::default() } /// Creates a new `DeleteSamplingRule` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteSamplingRule { type Output = std::result::Result< crate::output::DeleteSamplingRuleOutput, crate::error::DeleteSamplingRuleError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_delete_sampling_rule_error(response) } else { crate::operation_deser::parse_delete_sampling_rule_response(response) } } } /// Operation shape for `GetEncryptionConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_encryption_config`](crate::client::Client::get_encryption_config). /// /// See [`crate::client::fluent_builders::GetEncryptionConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetEncryptionConfig { _private: (), } impl GetEncryptionConfig { /// Creates a new builder-style object to manufacture [`GetEncryptionConfigInput`](crate::input::GetEncryptionConfigInput) pub fn builder() -> crate::input::get_encryption_config_input::Builder { crate::input::get_encryption_config_input::Builder::default() } /// Creates a new `GetEncryptionConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetEncryptionConfig { type Output = std::result::Result< crate::output::GetEncryptionConfigOutput, crate::error::GetEncryptionConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_encryption_config_error(response) } else { crate::operation_deser::parse_get_encryption_config_response(response) } } } /// Operation shape for `GetGroup`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_group`](crate::client::Client::get_group). /// /// See [`crate::client::fluent_builders::GetGroup`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetGroup { _private: (), } impl GetGroup { /// Creates a new builder-style object to manufacture [`GetGroupInput`](crate::input::GetGroupInput) pub fn builder() -> crate::input::get_group_input::Builder { crate::input::get_group_input::Builder::default() } /// Creates a new `GetGroup` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetGroup { type Output = std::result::Result<crate::output::GetGroupOutput, crate::error::GetGroupError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_group_error(response) } else { crate::operation_deser::parse_get_group_response(response) } } } /// Operation shape for `GetGroups`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_groups`](crate::client::Client::get_groups). /// /// See [`crate::client::fluent_builders::GetGroups`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetGroups { _private: (), } impl GetGroups { /// Creates a new builder-style object to manufacture [`GetGroupsInput`](crate::input::GetGroupsInput) pub fn builder() -> crate::input::get_groups_input::Builder { crate::input::get_groups_input::Builder::default() } /// Creates a new `GetGroups` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetGroups { type Output = std::result::Result<crate::output::GetGroupsOutput, crate::error::GetGroupsError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_groups_error(response) } else { crate::operation_deser::parse_get_groups_response(response) } } } /// Operation shape for `GetInsight`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_insight`](crate::client::Client::get_insight). /// /// See [`crate::client::fluent_builders::GetInsight`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetInsight { _private: (), } impl GetInsight { /// Creates a new builder-style object to manufacture [`GetInsightInput`](crate::input::GetInsightInput) pub fn builder() -> crate::input::get_insight_input::Builder { crate::input::get_insight_input::Builder::default() } /// Creates a new `GetInsight` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetInsight { type Output = std::result::Result<crate::output::GetInsightOutput, crate::error::GetInsightError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_insight_error(response) } else { crate::operation_deser::parse_get_insight_response(response) } } } /// Operation shape for `GetInsightEvents`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_insight_events`](crate::client::Client::get_insight_events). /// /// See [`crate::client::fluent_builders::GetInsightEvents`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetInsightEvents { _private: (), } impl GetInsightEvents { /// Creates a new builder-style object to manufacture [`GetInsightEventsInput`](crate::input::GetInsightEventsInput) pub fn builder() -> crate::input::get_insight_events_input::Builder { crate::input::get_insight_events_input::Builder::default() } /// Creates a new `GetInsightEvents` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetInsightEvents { type Output = std::result::Result< crate::output::GetInsightEventsOutput, crate::error::GetInsightEventsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_insight_events_error(response) } else { crate::operation_deser::parse_get_insight_events_response(response) } } } /// Operation shape for `GetInsightImpactGraph`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_insight_impact_graph`](crate::client::Client::get_insight_impact_graph). /// /// See [`crate::client::fluent_builders::GetInsightImpactGraph`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetInsightImpactGraph { _private: (), } impl GetInsightImpactGraph { /// Creates a new builder-style object to manufacture [`GetInsightImpactGraphInput`](crate::input::GetInsightImpactGraphInput) pub fn builder() -> crate::input::get_insight_impact_graph_input::Builder { crate::input::get_insight_impact_graph_input::Builder::default() } /// Creates a new `GetInsightImpactGraph` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetInsightImpactGraph { type Output = std::result::Result< crate::output::GetInsightImpactGraphOutput, crate::error::GetInsightImpactGraphError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_insight_impact_graph_error(response) } else { crate::operation_deser::parse_get_insight_impact_graph_response(response) } } } /// Operation shape for `GetInsightSummaries`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_insight_summaries`](crate::client::Client::get_insight_summaries). /// /// See [`crate::client::fluent_builders::GetInsightSummaries`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetInsightSummaries { _private: (), } impl GetInsightSummaries { /// Creates a new builder-style object to manufacture [`GetInsightSummariesInput`](crate::input::GetInsightSummariesInput) pub fn builder() -> crate::input::get_insight_summaries_input::Builder { crate::input::get_insight_summaries_input::Builder::default() } /// Creates a new `GetInsightSummaries` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetInsightSummaries { type Output = std::result::Result< crate::output::GetInsightSummariesOutput, crate::error::GetInsightSummariesError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_insight_summaries_error(response) } else { crate::operation_deser::parse_get_insight_summaries_response(response) } } } /// Operation shape for `GetSamplingRules`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_sampling_rules`](crate::client::Client::get_sampling_rules). /// /// See [`crate::client::fluent_builders::GetSamplingRules`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetSamplingRules { _private: (), } impl GetSamplingRules { /// Creates a new builder-style object to manufacture [`GetSamplingRulesInput`](crate::input::GetSamplingRulesInput) pub fn builder() -> crate::input::get_sampling_rules_input::Builder { crate::input::get_sampling_rules_input::Builder::default() } /// Creates a new `GetSamplingRules` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetSamplingRules { type Output = std::result::Result< crate::output::GetSamplingRulesOutput, crate::error::GetSamplingRulesError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_sampling_rules_error(response) } else { crate::operation_deser::parse_get_sampling_rules_response(response) } } } /// Operation shape for `GetSamplingStatisticSummaries`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_sampling_statistic_summaries`](crate::client::Client::get_sampling_statistic_summaries). /// /// See [`crate::client::fluent_builders::GetSamplingStatisticSummaries`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetSamplingStatisticSummaries { _private: (), } impl GetSamplingStatisticSummaries { /// Creates a new builder-style object to manufacture [`GetSamplingStatisticSummariesInput`](crate::input::GetSamplingStatisticSummariesInput) pub fn builder() -> crate::input::get_sampling_statistic_summaries_input::Builder { crate::input::get_sampling_statistic_summaries_input::Builder::default() } /// Creates a new `GetSamplingStatisticSummaries` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetSamplingStatisticSummaries { type Output = std::result::Result< crate::output::GetSamplingStatisticSummariesOutput, crate::error::GetSamplingStatisticSummariesError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_sampling_statistic_summaries_error(response) } else { crate::operation_deser::parse_get_sampling_statistic_summaries_response(response) } } } /// Operation shape for `GetSamplingTargets`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_sampling_targets`](crate::client::Client::get_sampling_targets). /// /// See [`crate::client::fluent_builders::GetSamplingTargets`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetSamplingTargets { _private: (), } impl GetSamplingTargets { /// Creates a new builder-style object to manufacture [`GetSamplingTargetsInput`](crate::input::GetSamplingTargetsInput) pub fn builder() -> crate::input::get_sampling_targets_input::Builder { crate::input::get_sampling_targets_input::Builder::default() } /// Creates a new `GetSamplingTargets` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetSamplingTargets { type Output = std::result::Result< crate::output::GetSamplingTargetsOutput, crate::error::GetSamplingTargetsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_sampling_targets_error(response) } else { crate::operation_deser::parse_get_sampling_targets_response(response) } } } /// Operation shape for `GetServiceGraph`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_service_graph`](crate::client::Client::get_service_graph). /// /// See [`crate::client::fluent_builders::GetServiceGraph`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetServiceGraph { _private: (), } impl GetServiceGraph { /// Creates a new builder-style object to manufacture [`GetServiceGraphInput`](crate::input::GetServiceGraphInput) pub fn builder() -> crate::input::get_service_graph_input::Builder { crate::input::get_service_graph_input::Builder::default() } /// Creates a new `GetServiceGraph` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetServiceGraph { type Output = std::result::Result< crate::output::GetServiceGraphOutput, crate::error::GetServiceGraphError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_service_graph_error(response) } else { crate::operation_deser::parse_get_service_graph_response(response) } } } /// Operation shape for `GetTimeSeriesServiceStatistics`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_time_series_service_statistics`](crate::client::Client::get_time_series_service_statistics). /// /// See [`crate::client::fluent_builders::GetTimeSeriesServiceStatistics`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetTimeSeriesServiceStatistics { _private: (), } impl GetTimeSeriesServiceStatistics { /// Creates a new builder-style object to manufacture [`GetTimeSeriesServiceStatisticsInput`](crate::input::GetTimeSeriesServiceStatisticsInput) pub fn builder() -> crate::input::get_time_series_service_statistics_input::Builder { crate::input::get_time_series_service_statistics_input::Builder::default() } /// Creates a new `GetTimeSeriesServiceStatistics` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetTimeSeriesServiceStatistics { type Output = std::result::Result< crate::output::GetTimeSeriesServiceStatisticsOutput, crate::error::GetTimeSeriesServiceStatisticsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_time_series_service_statistics_error(response) } else { crate::operation_deser::parse_get_time_series_service_statistics_response(response) } } } /// Operation shape for `GetTraceGraph`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_trace_graph`](crate::client::Client::get_trace_graph). /// /// See [`crate::client::fluent_builders::GetTraceGraph`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetTraceGraph { _private: (), } impl GetTraceGraph { /// Creates a new builder-style object to manufacture [`GetTraceGraphInput`](crate::input::GetTraceGraphInput) pub fn builder() -> crate::input::get_trace_graph_input::Builder { crate::input::get_trace_graph_input::Builder::default() } /// Creates a new `GetTraceGraph` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetTraceGraph { type Output = std::result::Result<crate::output::GetTraceGraphOutput, crate::error::GetTraceGraphError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_trace_graph_error(response) } else { crate::operation_deser::parse_get_trace_graph_response(response) } } } /// Operation shape for `GetTraceSummaries`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_trace_summaries`](crate::client::Client::get_trace_summaries). /// /// See [`crate::client::fluent_builders::GetTraceSummaries`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetTraceSummaries { _private: (), } impl GetTraceSummaries { /// Creates a new builder-style object to manufacture [`GetTraceSummariesInput`](crate::input::GetTraceSummariesInput) pub fn builder() -> crate::input::get_trace_summaries_input::Builder { crate::input::get_trace_summaries_input::Builder::default() } /// Creates a new `GetTraceSummaries` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetTraceSummaries { type Output = std::result::Result< crate::output::GetTraceSummariesOutput, crate::error::GetTraceSummariesError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_trace_summaries_error(response) } else { crate::operation_deser::parse_get_trace_summaries_response(response) } } } /// Operation shape for `ListTagsForResource`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_tags_for_resource`](crate::client::Client::list_tags_for_resource). /// /// See [`crate::client::fluent_builders::ListTagsForResource`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListTagsForResource { _private: (), } impl ListTagsForResource { /// Creates a new builder-style object to manufacture [`ListTagsForResourceInput`](crate::input::ListTagsForResourceInput) pub fn builder() -> crate::input::list_tags_for_resource_input::Builder { crate::input::list_tags_for_resource_input::Builder::default() } /// Creates a new `ListTagsForResource` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListTagsForResource { type Output = std::result::Result< crate::output::ListTagsForResourceOutput, crate::error::ListTagsForResourceError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_tags_for_resource_error(response) } else { crate::operation_deser::parse_list_tags_for_resource_response(response) } } } /// Operation shape for `PutEncryptionConfig`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_encryption_config`](crate::client::Client::put_encryption_config). /// /// See [`crate::client::fluent_builders::PutEncryptionConfig`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutEncryptionConfig { _private: (), } impl PutEncryptionConfig { /// Creates a new builder-style object to manufacture [`PutEncryptionConfigInput`](crate::input::PutEncryptionConfigInput) pub fn builder() -> crate::input::put_encryption_config_input::Builder { crate::input::put_encryption_config_input::Builder::default() } /// Creates a new `PutEncryptionConfig` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutEncryptionConfig { type Output = std::result::Result< crate::output::PutEncryptionConfigOutput, crate::error::PutEncryptionConfigError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_encryption_config_error(response) } else { crate::operation_deser::parse_put_encryption_config_response(response) } } } /// Operation shape for `PutTelemetryRecords`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_telemetry_records`](crate::client::Client::put_telemetry_records). /// /// See [`crate::client::fluent_builders::PutTelemetryRecords`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutTelemetryRecords { _private: (), } impl PutTelemetryRecords { /// Creates a new builder-style object to manufacture [`PutTelemetryRecordsInput`](crate::input::PutTelemetryRecordsInput) pub fn builder() -> crate::input::put_telemetry_records_input::Builder { crate::input::put_telemetry_records_input::Builder::default() } /// Creates a new `PutTelemetryRecords` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutTelemetryRecords { type Output = std::result::Result< crate::output::PutTelemetryRecordsOutput, crate::error::PutTelemetryRecordsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_telemetry_records_error(response) } else { crate::operation_deser::parse_put_telemetry_records_response(response) } } } /// Operation shape for `PutTraceSegments`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_trace_segments`](crate::client::Client::put_trace_segments). /// /// See [`crate::client::fluent_builders::PutTraceSegments`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutTraceSegments { _private: (), } impl PutTraceSegments { /// Creates a new builder-style object to manufacture [`PutTraceSegmentsInput`](crate::input::PutTraceSegmentsInput) pub fn builder() -> crate::input::put_trace_segments_input::Builder { crate::input::put_trace_segments_input::Builder::default() } /// Creates a new `PutTraceSegments` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutTraceSegments { type Output = std::result::Result< crate::output::PutTraceSegmentsOutput, crate::error::PutTraceSegmentsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_trace_segments_error(response) } else { crate::operation_deser::parse_put_trace_segments_response(response) } } } /// Operation shape for `TagResource`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`tag_resource`](crate::client::Client::tag_resource). /// /// See [`crate::client::fluent_builders::TagResource`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct TagResource { _private: (), } impl TagResource { /// Creates a new builder-style object to manufacture [`TagResourceInput`](crate::input::TagResourceInput) pub fn builder() -> crate::input::tag_resource_input::Builder { crate::input::tag_resource_input::Builder::default() } /// Creates a new `TagResource` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for TagResource { type Output = std::result::Result<crate::output::TagResourceOutput, crate::error::TagResourceError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_tag_resource_error(response) } else { crate::operation_deser::parse_tag_resource_response(response) } } } /// Operation shape for `UntagResource`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`untag_resource`](crate::client::Client::untag_resource). /// /// See [`crate::client::fluent_builders::UntagResource`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UntagResource { _private: (), } impl UntagResource { /// Creates a new builder-style object to manufacture [`UntagResourceInput`](crate::input::UntagResourceInput) pub fn builder() -> crate::input::untag_resource_input::Builder { crate::input::untag_resource_input::Builder::default() } /// Creates a new `UntagResource` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UntagResource { type Output = std::result::Result<crate::output::UntagResourceOutput, crate::error::UntagResourceError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_untag_resource_error(response) } else { crate::operation_deser::parse_untag_resource_response(response) } } } /// Operation shape for `UpdateGroup`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`update_group`](crate::client::Client::update_group). /// /// See [`crate::client::fluent_builders::UpdateGroup`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UpdateGroup { _private: (), } impl UpdateGroup { /// Creates a new builder-style object to manufacture [`UpdateGroupInput`](crate::input::UpdateGroupInput) pub fn builder() -> crate::input::update_group_input::Builder { crate::input::update_group_input::Builder::default() } /// Creates a new `UpdateGroup` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UpdateGroup { type Output = std::result::Result<crate::output::UpdateGroupOutput, crate::error::UpdateGroupError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_update_group_error(response) } else { crate::operation_deser::parse_update_group_response(response) } } } /// Operation shape for `UpdateSamplingRule`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`update_sampling_rule`](crate::client::Client::update_sampling_rule). /// /// See [`crate::client::fluent_builders::UpdateSamplingRule`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UpdateSamplingRule { _private: (), } impl UpdateSamplingRule { /// Creates a new builder-style object to manufacture [`UpdateSamplingRuleInput`](crate::input::UpdateSamplingRuleInput) pub fn builder() -> crate::input::update_sampling_rule_input::Builder { crate::input::update_sampling_rule_input::Builder::default() } /// Creates a new `UpdateSamplingRule` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UpdateSamplingRule { type Output = std::result::Result< crate::output::UpdateSamplingRuleOutput, crate::error::UpdateSamplingRuleError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_update_sampling_rule_error(response) } else { crate::operation_deser::parse_update_sampling_rule_response(response) } } }
42.071349
148
0.689146
d529f6b0e80eeb366cbeae08faa1722b1b164b4f
4,550
#[doc = r" Value read from the register"] pub struct R { bits: u8, } #[doc = r" Value to write to the register"] pub struct W { bits: u8, } impl super::INTENSET { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits }; let mut w = W { bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = r" Value of the field"] pub struct SINGLEER { bits: bool, } impl SINGLEER { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct DUALER { bits: bool, } impl DUALER { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Proxy"] pub struct _SINGLEEW<'a> { w: &'a mut W, } impl<'a> _SINGLEEW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u8) << OFFSET); self.w.bits |= ((value & MASK) as u8) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _DUALEW<'a> { w: &'a mut W, } impl<'a> _DUALEW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 1; self.w.bits &= !((MASK as u8) << OFFSET); self.w.bits |= ((value & MASK) as u8) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } #[doc = "Bit 0 - Single Bit ECC Error Interrupt Enable Set"] #[inline] pub fn singlee(&self) -> SINGLEER { let bits = { const MASK: bool = true; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u8) != 0 }; SINGLEER { bits } } #[doc = "Bit 1 - Dual Bit ECC Error Interrupt Enable Set"] #[inline] pub fn duale(&self) -> DUALER { let bits = { const MASK: bool = true; const OFFSET: u8 = 1; ((self.bits >> OFFSET) & MASK as u8) != 0 }; DUALER { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u8) -> &mut Self { self.bits = bits; self } #[doc = "Bit 0 - Single Bit ECC Error Interrupt Enable Set"] #[inline] pub fn singlee(&mut self) -> _SINGLEEW { _SINGLEEW { w: self } } #[doc = "Bit 1 - Dual Bit ECC Error Interrupt Enable Set"] #[inline] pub fn duale(&mut self) -> _DUALEW { _DUALEW { w: self } } }
24.863388
64
0.499121
39a6d0302e5331cbdef5c9520b851e15c3892462
467
#[derive(Debug)] pub struct StorageError { pub error: String, } impl std::fmt::Display for StorageError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "Cause: {}", self.error) } } #[derive(Debug)] pub struct QueryError { pub error: String, } impl std::fmt::Display for QueryError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "Cause: {}", self.error) } }
21.227273
72
0.586724
fcf03460a9dfca729dfb467444eafba5daf51261
731
// Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::{contract_event::ContractEvent, write_set::WriteSet}; use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, Hash, Eq, PartialEq, Serialize, Deserialize)] pub struct ChangeSet { write_set: WriteSet, events: Vec<ContractEvent>, } impl ChangeSet { pub fn new(write_set: WriteSet, events: Vec<ContractEvent>) -> Self { Self { write_set, events } } pub fn into_inner(self) -> (WriteSet, Vec<ContractEvent>) { (self.write_set, self.events) } pub fn write_set(&self) -> &WriteSet { &self.write_set } pub fn events(&self) -> &[ContractEvent] { &self.events } }
24.366667
73
0.651163
fbd68d0aaa8dd9353f39f7bd73d9faabe4ef94eb
1,489
// Copyright 2019 The n-sql Project Developers. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. mod datetime_diff_fn; mod datetime_type; mod day_add_fn; mod day_sub_fn; mod extract_fn; mod hour_add_fn; mod hour_sub_fn; mod minute_add_fn; mod minute_sub_fn; mod month_add_fn; mod month_sub_fn; mod second_add_fn; mod second_sub_fn; mod year_add_fn; mod year_sub_fn; pub use self::datetime_diff_fn::*; pub use self::datetime_type::*; pub use self::day_add_fn::*; pub use self::day_sub_fn::*; pub use self::extract_fn::*; pub use self::hour_add_fn::*; pub use self::hour_sub_fn::*; pub use self::minute_add_fn::*; pub use self::minute_sub_fn::*; pub use self::month_add_fn::*; pub use self::month_sub_fn::*; pub use self::second_add_fn::*; pub use self::second_sub_fn::*; pub use self::year_add_fn::*; pub use self::year_sub_fn::*; #[derive(Clone, Debug)] pub enum DatetimeFn { Extract(ExtractFn), Diff(DatetimeDiffFn), DayAdd(DayAddFn), DaySub(DaySubFn), HourAdd(HourAddFn), HourSub(HourSubFn), MinuteAdd(MinuteAddFn), MinuteSub(MinuteSubFn), MonthAdd(MonthAddFn), MonthSub(MonthSubFn), SecondAdd(SecondAddFn), SecondSub(SecondSubFn), YearAdd(YearAddFn), YearSub(YearSubFn), }
25.672414
68
0.730692
6abc93e7b32a0e829b3f9b6ee1f626461f8c151b
6,721
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. //! Helper builder for constructing a `MetricEvent`. use { cobalt_client::traits::AsEventCodes, fidl_fuchsia_metrics::{HistogramBucket, MetricEvent, MetricEventPayload}, }; /// Adds the `builder()` method to `MetricEvent`. pub trait MetricEventExt { /// Returns a `MetricEventBuilder` for the specified `metric_id`. /// /// # Examples /// /// ``` /// assert_eq!(MetricEvent::builder(5).as_event().metric_id, 0); /// ``` fn builder(metric_id: u32) -> MetricEventBuilder; } impl MetricEventExt for MetricEvent { fn builder(metric_id: u32) -> MetricEventBuilder { MetricEventBuilder { metric_id, ..MetricEventBuilder::default() } } } /// MetricEventBuilder allows for a chained construction of `MetricEvent` objects. #[derive(Debug, Default, Clone)] pub struct MetricEventBuilder { metric_id: u32, event_codes: Vec<u32>, } impl MetricEventBuilder { /// Appends the provided `event_code` to the `event_codes` list. /// /// # Examples /// /// ``` /// assert_eq!(MetricEvent::builder(6).with_event_code(10).as_event().event_codes, vec![10]); /// ``` pub fn with_event_code(mut self, event_code: u32) -> MetricEventBuilder { self.event_codes.push(event_code); self } /// Overrides the list of event_codes with the provided `event_codes`. /// /// # Examples /// /// ``` /// assert_eq!( /// MetricEvent::builder(7).with_event_codes([1, 2, 3]).as_event().event_codes, /// vec![1,2,3]); /// ``` pub fn with_event_codes<Codes: AsEventCodes>( mut self, event_codes: Codes, ) -> MetricEventBuilder { self.event_codes = event_codes.as_event_codes(); self } /// Writes an `event_code` to a particular `index`. This method is useful when not assigning /// event codes in order. /// /// # Examples /// /// ``` /// assert_eq!( /// MetricEvent::builder(8).with_event_code_at(1, 10).as_event().event_codes, /// vec![0, 10]); /// ``` /// /// # Panics /// /// Panics if the `value` is greater than or equal to 5. pub fn with_event_code_at(mut self, index: usize, event_code: u32) -> MetricEventBuilder { assert!( index < 5, "Invalid index passed to MetricEventBuilder::with_event_code. Cobalt events cannot support more than 5 event_codes." ); while self.event_codes.len() <= index { self.event_codes.push(0); } self.event_codes[index] = event_code; self } /// Constructs a `MetricEvent` with the provided `MetricEventPayload`. /// /// # Examples /// ``` /// let payload = MetricEventPayload::Event(fidl_fuchsia_cobalt::Event); /// assert_eq!(MetricEvent::builder(10).build(payload.clone()).payload, payload); /// ``` pub fn build(self, payload: MetricEventPayload) -> MetricEvent { MetricEvent { metric_id: self.metric_id, event_codes: self.event_codes, payload } } /// Constructs a `MetricEvent` with a payload type of `MetricEventPayload::Count`. /// /// # Examples /// ``` /// asert_eq!( /// MetricEvent::builder(11).as_occurrence(10).payload, /// MetricEventPayload::Event(fidl_fuchsia_cobalt::Count(10))); /// ``` pub fn as_occurrence(self, count: u64) -> MetricEvent { self.build(MetricEventPayload::Count(count)) } /// Constructs a `MetricEvent` with a payload type of `MetricEventPayload::IntegerValue`. /// /// # Examples /// ``` /// asert_eq!( /// MetricEvent::builder(12).as_integer(5).payload, /// MetricEventPayload::IntegerValue(5))); /// ``` pub fn as_integer(self, integer_value: i64) -> MetricEvent { self.build(MetricEventPayload::IntegerValue(integer_value)) } /// Constructs a `MetricEvent` with a payload type of `MetricEventPayload::Histogram`. /// /// # Examples /// ``` /// let histogram = vec![HistogramBucket { index: 0, count: 1 }]; /// asert_eq!( /// MetricEvent::builder(17).as_int_histogram(histogram.clone()).payload, /// MetricEventPayload::Histogram(histogram)); /// ``` pub fn as_integer_histogram(self, histogram: Vec<HistogramBucket>) -> MetricEvent { self.build(MetricEventPayload::Histogram(histogram)) } /// Constructs a `MetricEvent` with a payload type of `MetricEventPayload::StringValue`. /// /// # Examples /// ``` /// asert_eq!( /// MetricEvent::builder(17).as_string("test").payload, /// MetricEventPayload::StringValue("test".to_string())); /// ``` pub fn as_string<S: Into<String>>(self, string: S) -> MetricEvent { self.build(MetricEventPayload::StringValue(string.into())) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_builder_as_occurrence() { let event = MetricEvent::builder(1).with_event_code(2).as_occurrence(3); let expected = MetricEvent { metric_id: 1, event_codes: vec![2], payload: MetricEventPayload::Count(3), }; assert_eq!(event, expected); } #[test] fn test_builder_as_integer() { let event = MetricEvent::builder(2).with_event_code(3).as_integer(4); let expected = MetricEvent { metric_id: 2, event_codes: vec![3], payload: MetricEventPayload::IntegerValue(4), }; assert_eq!(event, expected); } #[test] fn test_as_integer_histogram() { let event = MetricEvent::builder(7) .with_event_code(8) .as_integer_histogram(vec![HistogramBucket { index: 0, count: 1 }]); let expected = MetricEvent { metric_id: 7, event_codes: vec![8], payload: MetricEventPayload::Histogram(vec![HistogramBucket { index: 0, count: 1 }]), }; assert_eq!(event, expected); } #[test] fn test_builder_as_string() { let event = MetricEvent::builder(2).with_event_code(3).as_string("value"); let expected = MetricEvent { metric_id: 2, event_codes: vec![3], payload: MetricEventPayload::StringValue("value".into()), }; assert_eq!(event, expected); } #[test] #[should_panic(expected = "Invalid index")] fn test_bad_event_code_at_index() { MetricEvent::builder(8).with_event_code_at(5, 10).as_occurrence(1); } }
32.004762
128
0.605416
29ec85590b32044a14d900e7a92ffae09bc1230f
34,095
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Ordered containers with integer keys, implemented as radix tries (`TrieSet` and `TrieMap` types) use core::prelude::*; use alloc::boxed::Box; use core::default::Default; use core::mem::zeroed; use core::mem; use core::uint; use {Collection, Mutable, Map, MutableMap, Set, MutableSet}; use slice::{Items, MutItems}; use slice; // FIXME: #5244: need to manually update the TrieNode constructor static SHIFT: uint = 4; static SIZE: uint = 1 << SHIFT; static MASK: uint = SIZE - 1; static NUM_CHUNKS: uint = uint::BITS / SHIFT; enum Child<T> { Internal(Box<TrieNode<T>>), External(uint, T), Nothing } #[allow(missing_doc)] pub struct TrieMap<T> { root: TrieNode<T>, length: uint } impl<T> Collection for TrieMap<T> { /// Return the number of elements in the map #[inline] fn len(&self) -> uint { self.length } } impl<T> Mutable for TrieMap<T> { /// Clear the map, removing all values. #[inline] fn clear(&mut self) { self.root = TrieNode::new(); self.length = 0; } } impl<T> Map<uint, T> for TrieMap<T> { /// Return a reference to the value corresponding to the key #[inline] fn find<'a>(&'a self, key: &uint) -> Option<&'a T> { let mut node: &'a TrieNode<T> = &self.root; let mut idx = 0; loop { match node.children[chunk(*key, idx)] { Internal(ref x) => node = &**x, External(stored, ref value) => { if stored == *key { return Some(value) } else { return None } } Nothing => return None } idx += 1; } } } impl<T> MutableMap<uint, T> for TrieMap<T> { /// Return a mutable reference to the value corresponding to the key #[inline] fn find_mut<'a>(&'a mut self, key: &uint) -> Option<&'a mut T> { find_mut(&mut self.root.children[chunk(*key, 0)], *key, 1) } /// Insert a key-value pair from the map. If the key already had a value /// present in the map, that value is returned. Otherwise None is returned. fn swap(&mut self, key: uint, value: T) -> Option<T> { let ret = insert(&mut self.root.count, &mut self.root.children[chunk(key, 0)], key, value, 1); if ret.is_none() { self.length += 1 } ret } /// Removes a key from the map, returning the value at the key if the key /// was previously in the map. fn pop(&mut self, key: &uint) -> Option<T> { let ret = remove(&mut self.root.count, &mut self.root.children[chunk(*key, 0)], *key, 1); if ret.is_some() { self.length -= 1 } ret } } impl<T> Default for TrieMap<T> { #[inline] fn default() -> TrieMap<T> { TrieMap::new() } } impl<T> TrieMap<T> { /// Create an empty TrieMap #[inline] pub fn new() -> TrieMap<T> { TrieMap{root: TrieNode::new(), length: 0} } /// Visit all key-value pairs in reverse order #[inline] pub fn each_reverse<'a>(&'a self, f: |&uint, &'a T| -> bool) -> bool { self.root.each_reverse(f) } /// Get an iterator over the key-value pairs in the map pub fn iter<'a>(&'a self) -> Entries<'a, T> { let mut iter = unsafe {Entries::new()}; iter.stack[0] = self.root.children.iter(); iter.length = 1; iter.remaining_min = self.length; iter.remaining_max = self.length; iter } /// Get an iterator over the key-value pairs in the map, with the /// ability to mutate the values. pub fn mut_iter<'a>(&'a mut self) -> MutEntries<'a, T> { let mut iter = unsafe {MutEntries::new()}; iter.stack[0] = self.root.children.mut_iter(); iter.length = 1; iter.remaining_min = self.length; iter.remaining_max = self.length; iter } } // FIXME #5846 we want to be able to choose between &x and &mut x // (with many different `x`) below, so we need to optionally pass mut // as a tt, but the only thing we can do with a `tt` is pass them to // other macros, so this takes the `& <mutability> <operand>` token // sequence and forces their evaluation as an expression. (see also // `item!` below.) macro_rules! addr { ($e:expr) => { $e } } macro_rules! bound { ($iterator_name:ident, // the current treemap self = $this:expr, // the key to look for key = $key:expr, // are we looking at the upper bound? is_upper = $upper:expr, // method names for slicing/iterating. slice_from = $slice_from:ident, iter = $iter:ident, // see the comment on `addr!`, this is just an optional mut, but // there's no 0-or-1 repeats yet. mutability = $($mut_:tt)*) => { { // # For `mut` // We need an unsafe pointer here because we are borrowing // mutable references to the internals of each of these // mutable nodes, while still using the outer node. // // However, we're allowed to flaunt rustc like this because we // never actually modify the "shape" of the nodes. The only // place that mutation is can actually occur is of the actual // values of the TrieMap (as the return value of the // iterator), i.e. we can never cause a deallocation of any // TrieNodes so the raw pointer is always valid. // // # For non-`mut` // We like sharing code so much that even a little unsafe won't // stop us. let this = $this; let mut node = unsafe { mem::transmute::<_, uint>(&this.root) as *mut TrieNode<T> }; let key = $key; let mut it = unsafe {$iterator_name::new()}; // everything else is zero'd, as we want. it.remaining_max = this.length; // this addr is necessary for the `Internal` pattern. addr!(loop { let children = unsafe {addr!(& $($mut_)* (*node).children)}; // it.length is the current depth in the iterator and the // current depth through the `uint` key we've traversed. let child_id = chunk(key, it.length); let (slice_idx, ret) = match children[child_id] { Internal(ref $($mut_)* n) => { node = unsafe { mem::transmute::<_, uint>(&**n) as *mut TrieNode<T> }; (child_id + 1, false) } External(stored, _) => { (if stored < key || ($upper && stored == key) { child_id + 1 } else { child_id }, true) } Nothing => { (child_id + 1, true) } }; // push to the stack. it.stack[it.length] = children.$slice_from(slice_idx).$iter(); it.length += 1; if ret { return it } }) } } } impl<T> TrieMap<T> { // If `upper` is true then returns upper_bound else returns lower_bound. #[inline] fn bound<'a>(&'a self, key: uint, upper: bool) -> Entries<'a, T> { bound!(Entries, self = self, key = key, is_upper = upper, slice_from = slice_from, iter = iter, mutability = ) } /// Get an iterator pointing to the first key-value pair whose key is not less than `key`. /// If all keys in the map are less than `key` an empty iterator is returned. pub fn lower_bound<'a>(&'a self, key: uint) -> Entries<'a, T> { self.bound(key, false) } /// Get an iterator pointing to the first key-value pair whose key is greater than `key`. /// If all keys in the map are not greater than `key` an empty iterator is returned. pub fn upper_bound<'a>(&'a self, key: uint) -> Entries<'a, T> { self.bound(key, true) } // If `upper` is true then returns upper_bound else returns lower_bound. #[inline] fn mut_bound<'a>(&'a mut self, key: uint, upper: bool) -> MutEntries<'a, T> { bound!(MutEntries, self = self, key = key, is_upper = upper, slice_from = mut_slice_from, iter = mut_iter, mutability = mut) } /// Get an iterator pointing to the first key-value pair whose key is not less than `key`. /// If all keys in the map are less than `key` an empty iterator is returned. pub fn mut_lower_bound<'a>(&'a mut self, key: uint) -> MutEntries<'a, T> { self.mut_bound(key, false) } /// Get an iterator pointing to the first key-value pair whose key is greater than `key`. /// If all keys in the map are not greater than `key` an empty iterator is returned. pub fn mut_upper_bound<'a>(&'a mut self, key: uint) -> MutEntries<'a, T> { self.mut_bound(key, true) } } impl<T> FromIterator<(uint, T)> for TrieMap<T> { fn from_iter<Iter: Iterator<(uint, T)>>(iter: Iter) -> TrieMap<T> { let mut map = TrieMap::new(); map.extend(iter); map } } impl<T> Extendable<(uint, T)> for TrieMap<T> { fn extend<Iter: Iterator<(uint, T)>>(&mut self, mut iter: Iter) { for (k, v) in iter { self.insert(k, v); } } } #[allow(missing_doc)] pub struct TrieSet { map: TrieMap<()> } impl Collection for TrieSet { /// Return the number of elements in the set #[inline] fn len(&self) -> uint { self.map.len() } } impl Mutable for TrieSet { /// Clear the set, removing all values. #[inline] fn clear(&mut self) { self.map.clear() } } impl Set<uint> for TrieSet { #[inline] fn contains(&self, value: &uint) -> bool { self.map.contains_key(value) } #[inline] fn is_disjoint(&self, other: &TrieSet) -> bool { self.iter().all(|v| !other.contains(&v)) } #[inline] fn is_subset(&self, other: &TrieSet) -> bool { self.iter().all(|v| other.contains(&v)) } #[inline] fn is_superset(&self, other: &TrieSet) -> bool { other.is_subset(self) } } impl MutableSet<uint> for TrieSet { #[inline] fn insert(&mut self, value: uint) -> bool { self.map.insert(value, ()) } #[inline] fn remove(&mut self, value: &uint) -> bool { self.map.remove(value) } } impl Default for TrieSet { #[inline] fn default() -> TrieSet { TrieSet::new() } } impl TrieSet { /// Create an empty TrieSet #[inline] pub fn new() -> TrieSet { TrieSet{map: TrieMap::new()} } /// Visit all values in reverse order #[inline] pub fn each_reverse(&self, f: |&uint| -> bool) -> bool { self.map.each_reverse(|k, _| f(k)) } /// Get an iterator over the values in the set #[inline] pub fn iter<'a>(&'a self) -> SetItems<'a> { SetItems{iter: self.map.iter()} } /// Get an iterator pointing to the first value that is not less than `val`. /// If all values in the set are less than `val` an empty iterator is returned. pub fn lower_bound<'a>(&'a self, val: uint) -> SetItems<'a> { SetItems{iter: self.map.lower_bound(val)} } /// Get an iterator pointing to the first value that key is greater than `val`. /// If all values in the set are not greater than `val` an empty iterator is returned. pub fn upper_bound<'a>(&'a self, val: uint) -> SetItems<'a> { SetItems{iter: self.map.upper_bound(val)} } } impl FromIterator<uint> for TrieSet { fn from_iter<Iter: Iterator<uint>>(iter: Iter) -> TrieSet { let mut set = TrieSet::new(); set.extend(iter); set } } impl Extendable<uint> for TrieSet { fn extend<Iter: Iterator<uint>>(&mut self, mut iter: Iter) { for elem in iter { self.insert(elem); } } } struct TrieNode<T> { count: uint, children: [Child<T>, ..SIZE] } impl<T> TrieNode<T> { #[inline] fn new() -> TrieNode<T> { // FIXME: #5244: [Nothing, ..SIZE] should be possible without implicit // copyability TrieNode{count: 0, children: [Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing]} } } impl<T> TrieNode<T> { fn each_reverse<'a>(&'a self, f: |&uint, &'a T| -> bool) -> bool { for elt in self.children.iter().rev() { match *elt { Internal(ref x) => if !x.each_reverse(|i,t| f(i,t)) { return false }, External(k, ref v) => if !f(&k, v) { return false }, Nothing => () } } true } } // if this was done via a trait, the key could be generic #[inline] fn chunk(n: uint, idx: uint) -> uint { let sh = uint::BITS - (SHIFT * (idx + 1)); (n >> sh) & MASK } fn find_mut<'r, T>(child: &'r mut Child<T>, key: uint, idx: uint) -> Option<&'r mut T> { match *child { External(stored, ref mut value) if stored == key => Some(value), External(..) => None, Internal(ref mut x) => find_mut(&mut x.children[chunk(key, idx)], key, idx + 1), Nothing => None } } fn insert<T>(count: &mut uint, child: &mut Child<T>, key: uint, value: T, idx: uint) -> Option<T> { // we branch twice to avoid having to do the `replace` when we // don't need to; this is much faster, especially for keys that // have long shared prefixes. match *child { Nothing => { *count += 1; *child = External(key, value); return None; } Internal(ref mut x) => { return insert(&mut x.count, &mut x.children[chunk(key, idx)], key, value, idx + 1); } External(stored_key, ref mut stored_value) if stored_key == key => { // swap in the new value and return the old. return Some(mem::replace(stored_value, value)); } _ => {} } // conflict, an external node with differing keys: we have to // split the node, so we need the old value by value; hence we // have to move out of `child`. match mem::replace(child, Nothing) { External(stored_key, stored_value) => { let mut new = box TrieNode::new(); insert(&mut new.count, &mut new.children[chunk(stored_key, idx)], stored_key, stored_value, idx + 1); let ret = insert(&mut new.count, &mut new.children[chunk(key, idx)], key, value, idx + 1); *child = Internal(new); return ret; } _ => fail!("unreachable code"), } } fn remove<T>(count: &mut uint, child: &mut Child<T>, key: uint, idx: uint) -> Option<T> { let (ret, this) = match *child { External(stored, _) if stored == key => { match mem::replace(child, Nothing) { External(_, value) => (Some(value), true), _ => fail!() } } External(..) => (None, false), Internal(ref mut x) => { let ret = remove(&mut x.count, &mut x.children[chunk(key, idx)], key, idx + 1); (ret, x.count == 0) } Nothing => (None, false) }; if this { *child = Nothing; *count -= 1; } return ret; } /// Forward iterator over a map pub struct Entries<'a, T> { stack: [slice::Items<'a, Child<T>>, .. NUM_CHUNKS], length: uint, remaining_min: uint, remaining_max: uint } /// Forward iterator over the key-value pairs of a map, with the /// values being mutable. pub struct MutEntries<'a, T> { stack: [slice::MutItems<'a, Child<T>>, .. NUM_CHUNKS], length: uint, remaining_min: uint, remaining_max: uint } // FIXME #5846: see `addr!` above. macro_rules! item { ($i:item) => {$i}} macro_rules! iterator_impl { ($name:ident, iter = $iter:ident, mutability = $($mut_:tt)*) => { impl<'a, T> $name<'a, T> { // Create new zero'd iterator. We have a thin gilding of safety by // using init rather than uninit, so that the worst that can happen // from failing to initialise correctly after calling these is a // segfault. #[cfg(target_word_size="32")] unsafe fn new() -> $name<'a, T> { $name { remaining_min: 0, remaining_max: 0, length: 0, // ick :( ... at least the compiler will tell us if we screwed up. stack: [zeroed(), zeroed(), zeroed(), zeroed(), zeroed(), zeroed(), zeroed(), zeroed()] } } #[cfg(target_word_size="64")] unsafe fn new() -> $name<'a, T> { $name { remaining_min: 0, remaining_max: 0, length: 0, stack: [zeroed(), zeroed(), zeroed(), zeroed(), zeroed(), zeroed(), zeroed(), zeroed(), zeroed(), zeroed(), zeroed(), zeroed(), zeroed(), zeroed(), zeroed(), zeroed()] } } } item!(impl<'a, T> Iterator<(uint, &'a $($mut_)* T)> for $name<'a, T> { // you might wonder why we're not even trying to act within the // rules, and are just manipulating raw pointers like there's no // such thing as invalid pointers and memory unsafety. The // reason is performance, without doing this we can get the // bench_iter_large microbenchmark down to about 30000 ns/iter // (using .unsafe_ref to index self.stack directly, 38000 // ns/iter with [] checked indexing), but this smashes that down // to 13500 ns/iter. // // Fortunately, it's still safe... // // We have an invariant that every Internal node // corresponds to one push to self.stack, and one pop, // nested appropriately. self.stack has enough storage // to store the maximum depth of Internal nodes in the // trie (8 on 32-bit platforms, 16 on 64-bit). fn next(&mut self) -> Option<(uint, &'a $($mut_)* T)> { let start_ptr = self.stack.as_mut_ptr(); unsafe { // write_ptr is the next place to write to the stack. // invariant: start_ptr <= write_ptr < end of the // vector. let mut write_ptr = start_ptr.offset(self.length as int); while write_ptr != start_ptr { // indexing back one is safe, since write_ptr > // start_ptr now. match (*write_ptr.offset(-1)).next() { // exhausted this iterator (i.e. finished this // Internal node), so pop from the stack. // // don't bother clearing the memory, because the // next time we use it we'll've written to it // first. None => write_ptr = write_ptr.offset(-1), Some(child) => { addr!(match *child { Internal(ref $($mut_)* node) => { // going down a level, so push // to the stack (this is the // write referenced above) *write_ptr = node.children.$iter(); write_ptr = write_ptr.offset(1); } External(key, ref $($mut_)* value) => { self.remaining_max -= 1; if self.remaining_min > 0 { self.remaining_min -= 1; } // store the new length of the // stack, based on our current // position. self.length = (write_ptr as uint - start_ptr as uint) / mem::size_of_val(&*write_ptr); return Some((key, value)); } Nothing => {} }) } } } } return None; } #[inline] fn size_hint(&self) -> (uint, Option<uint>) { (self.remaining_min, Some(self.remaining_max)) } }) } } iterator_impl! { Entries, iter = iter, mutability = } iterator_impl! { MutEntries, iter = mut_iter, mutability = mut } /// Forward iterator over a set pub struct SetItems<'a> { iter: Entries<'a, ()> } impl<'a> Iterator<uint> for SetItems<'a> { fn next(&mut self) -> Option<uint> { self.iter.next().map(|(key, _)| key) } fn size_hint(&self) -> (uint, Option<uint>) { self.iter.size_hint() } } #[cfg(test)] mod test_map { use std::prelude::*; use std::iter::range_step; use std::uint; use {MutableMap, Map}; use super::{TrieMap, TrieNode, Internal, External, Nothing}; fn check_integrity<T>(trie: &TrieNode<T>) { assert!(trie.count != 0); let mut sum = 0; for x in trie.children.iter() { match *x { Nothing => (), Internal(ref y) => { check_integrity(&**y); sum += 1 } External(_, _) => { sum += 1 } } } assert_eq!(sum, trie.count); } #[test] fn test_find_mut() { let mut m = TrieMap::new(); assert!(m.insert(1u, 12i)); assert!(m.insert(2u, 8i)); assert!(m.insert(5u, 14i)); let new = 100; match m.find_mut(&5) { None => fail!(), Some(x) => *x = new } assert_eq!(m.find(&5), Some(&new)); } #[test] fn test_find_mut_missing() { let mut m = TrieMap::new(); assert!(m.find_mut(&0).is_none()); assert!(m.insert(1u, 12i)); assert!(m.find_mut(&0).is_none()); assert!(m.insert(2, 8)); assert!(m.find_mut(&0).is_none()); } #[test] fn test_step() { let mut trie = TrieMap::new(); let n = 300u; for x in range_step(1u, n, 2) { assert!(trie.insert(x, x + 1)); assert!(trie.contains_key(&x)); check_integrity(&trie.root); } for x in range_step(0u, n, 2) { assert!(!trie.contains_key(&x)); assert!(trie.insert(x, x + 1)); check_integrity(&trie.root); } for x in range(0u, n) { assert!(trie.contains_key(&x)); assert!(!trie.insert(x, x + 1)); check_integrity(&trie.root); } for x in range_step(1u, n, 2) { assert!(trie.remove(&x)); assert!(!trie.contains_key(&x)); check_integrity(&trie.root); } for x in range_step(0u, n, 2) { assert!(trie.contains_key(&x)); assert!(!trie.insert(x, x + 1)); check_integrity(&trie.root); } } #[test] fn test_each_reverse() { let mut m = TrieMap::new(); assert!(m.insert(3, 6)); assert!(m.insert(0, 0)); assert!(m.insert(4, 8)); assert!(m.insert(2, 4)); assert!(m.insert(1, 2)); let mut n = 4; m.each_reverse(|k, v| { assert_eq!(*k, n); assert_eq!(*v, n * 2); n -= 1; true }); } #[test] fn test_each_reverse_break() { let mut m = TrieMap::new(); for x in range(uint::MAX - 10000, uint::MAX).rev() { m.insert(x, x / 2); } let mut n = uint::MAX - 1; m.each_reverse(|k, v| { if n == uint::MAX - 5000 { false } else { assert!(n > uint::MAX - 5000); assert_eq!(*k, n); assert_eq!(*v, n / 2); n -= 1; true } }); } #[test] fn test_swap() { let mut m = TrieMap::new(); assert_eq!(m.swap(1u, 2i), None); assert_eq!(m.swap(1u, 3i), Some(2)); assert_eq!(m.swap(1u, 4i), Some(3)); } #[test] fn test_pop() { let mut m = TrieMap::new(); m.insert(1u, 2i); assert_eq!(m.pop(&1), Some(2)); assert_eq!(m.pop(&1), None); } #[test] fn test_from_iter() { let xs = vec![(1u, 1i), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; let map: TrieMap<int> = xs.iter().map(|&x| x).collect(); for &(k, v) in xs.iter() { assert_eq!(map.find(&k), Some(&v)); } } #[test] fn test_iteration() { let empty_map : TrieMap<uint> = TrieMap::new(); assert_eq!(empty_map.iter().next(), None); let first = uint::MAX - 10000; let last = uint::MAX; let mut map = TrieMap::new(); for x in range(first, last).rev() { map.insert(x, x / 2); } let mut i = 0; for (k, &v) in map.iter() { assert_eq!(k, first + i); assert_eq!(v, k / 2); i += 1; } assert_eq!(i, last - first); } #[test] fn test_mut_iter() { let mut empty_map : TrieMap<uint> = TrieMap::new(); assert!(empty_map.mut_iter().next().is_none()); let first = uint::MAX - 10000; let last = uint::MAX; let mut map = TrieMap::new(); for x in range(first, last).rev() { map.insert(x, x / 2); } let mut i = 0; for (k, v) in map.mut_iter() { assert_eq!(k, first + i); *v -= k / 2; i += 1; } assert_eq!(i, last - first); assert!(map.iter().all(|(_, &v)| v == 0)); } #[test] fn test_bound() { let empty_map : TrieMap<uint> = TrieMap::new(); assert_eq!(empty_map.lower_bound(0).next(), None); assert_eq!(empty_map.upper_bound(0).next(), None); let last = 999u; let step = 3u; let value = 42u; let mut map : TrieMap<uint> = TrieMap::new(); for x in range_step(0u, last, step) { assert!(x % step == 0); map.insert(x, value); } for i in range(0u, last - step) { let mut lb = map.lower_bound(i); let mut ub = map.upper_bound(i); let next_key = i - i % step + step; let next_pair = (next_key, &value); if i % step == 0 { assert_eq!(lb.next(), Some((i, &value))); } else { assert_eq!(lb.next(), Some(next_pair)); } assert_eq!(ub.next(), Some(next_pair)); } let mut lb = map.lower_bound(last - step); assert_eq!(lb.next(), Some((last - step, &value))); let mut ub = map.upper_bound(last - step); assert_eq!(ub.next(), None); for i in range(last - step + 1, last) { let mut lb = map.lower_bound(i); assert_eq!(lb.next(), None); let mut ub = map.upper_bound(i); assert_eq!(ub.next(), None); } } #[test] fn test_mut_bound() { let empty_map : TrieMap<uint> = TrieMap::new(); assert_eq!(empty_map.lower_bound(0).next(), None); assert_eq!(empty_map.upper_bound(0).next(), None); let mut m_lower = TrieMap::new(); let mut m_upper = TrieMap::new(); for i in range(0u, 100) { m_lower.insert(2 * i, 4 * i); m_upper.insert(2 * i, 4 * i); } for i in range(0u, 199) { let mut lb_it = m_lower.mut_lower_bound(i); let (k, v) = lb_it.next().unwrap(); let lb = i + i % 2; assert_eq!(lb, k); *v -= k; } for i in range(0u, 198) { let mut ub_it = m_upper.mut_upper_bound(i); let (k, v) = ub_it.next().unwrap(); let ub = i + 2 - i % 2; assert_eq!(ub, k); *v -= k; } assert!(m_lower.mut_lower_bound(199).next().is_none()); assert!(m_upper.mut_upper_bound(198).next().is_none()); assert!(m_lower.iter().all(|(_, &x)| x == 0)); assert!(m_upper.iter().all(|(_, &x)| x == 0)); } } #[cfg(test)] mod bench_map { use std::prelude::*; use std::rand::{weak_rng, Rng}; use test::Bencher; use MutableMap; use super::TrieMap; #[bench] fn bench_iter_small(b: &mut Bencher) { let mut m = TrieMap::<uint>::new(); let mut rng = weak_rng(); for _ in range(0u, 20) { m.insert(rng.gen(), rng.gen()); } b.iter(|| for _ in m.iter() {}) } #[bench] fn bench_iter_large(b: &mut Bencher) { let mut m = TrieMap::<uint>::new(); let mut rng = weak_rng(); for _ in range(0u, 1000) { m.insert(rng.gen(), rng.gen()); } b.iter(|| for _ in m.iter() {}) } #[bench] fn bench_lower_bound(b: &mut Bencher) { let mut m = TrieMap::<uint>::new(); let mut rng = weak_rng(); for _ in range(0u, 1000) { m.insert(rng.gen(), rng.gen()); } b.iter(|| { for _ in range(0u, 10) { m.lower_bound(rng.gen()); } }); } #[bench] fn bench_upper_bound(b: &mut Bencher) { let mut m = TrieMap::<uint>::new(); let mut rng = weak_rng(); for _ in range(0u, 1000) { m.insert(rng.gen(), rng.gen()); } b.iter(|| { for _ in range(0u, 10) { m.upper_bound(rng.gen()); } }); } #[bench] fn bench_insert_large(b: &mut Bencher) { let mut m = TrieMap::<[uint, .. 10]>::new(); let mut rng = weak_rng(); b.iter(|| { for _ in range(0u, 1000) { m.insert(rng.gen(), [1, .. 10]); } }) } #[bench] fn bench_insert_large_low_bits(b: &mut Bencher) { let mut m = TrieMap::<[uint, .. 10]>::new(); let mut rng = weak_rng(); b.iter(|| { for _ in range(0u, 1000) { // only have the last few bits set. m.insert(rng.gen::<uint>() & 0xff_ff, [1, .. 10]); } }) } #[bench] fn bench_insert_small(b: &mut Bencher) { let mut m = TrieMap::<()>::new(); let mut rng = weak_rng(); b.iter(|| { for _ in range(0u, 1000) { m.insert(rng.gen(), ()); } }) } #[bench] fn bench_insert_small_low_bits(b: &mut Bencher) { let mut m = TrieMap::<()>::new(); let mut rng = weak_rng(); b.iter(|| { for _ in range(0u, 1000) { // only have the last few bits set. m.insert(rng.gen::<uint>() & 0xff_ff, ()); } }) } } #[cfg(test)] mod test_set { use std::prelude::*; use std::uint; use {MutableSet, Set}; use super::TrieSet; #[test] fn test_sane_chunk() { let x = 1; let y = 1 << (uint::BITS - 1); let mut trie = TrieSet::new(); assert!(trie.insert(x)); assert!(trie.insert(y)); assert_eq!(trie.len(), 2); let expected = [x, y]; for (i, x) in trie.iter().enumerate() { assert_eq!(expected[i], x); } } #[test] fn test_from_iter() { let xs = vec![9u, 8, 7, 6, 5, 4, 3, 2, 1]; let set: TrieSet = xs.iter().map(|&x| x).collect(); for x in xs.iter() { assert!(set.contains(x)); } } }
31.395028
100
0.482211
e574f309cc235b0e76fefff9818086b97c957ae8
3,897
//! This module provides a bounded queue of futures which are identified by a //! key. //! //! This module tries to uphold the following guarantees: //! //! - During a flush, no further futures can be enqueued //! - wait, wait_async, and contains_key can only indicate future completion if it has actually //! been completed, either via block_on or .await //! use futures::{ executor::block_on, future::{IntoFuture, Shared}, prelude::*, }; use parking_lot::{Mutex, RwLock}; use std::{borrow::Borrow, hash::Hash}; use indexmap::IndexMap; /// A bounded queue of `F`s which are identified by a key `K`. pub struct BoundedFutureQueue<K, F> where F: TryFuture, F::Output: Clone, { futures: RwLock<IndexMap<K, Shared<IntoFuture<F>>>>, limit: usize, flush_lock: Mutex<()>, } impl<K: Clone + Eq + Hash, F: TryFuture<Ok = ()>> BoundedFutureQueue<K, F> where F::Output: Clone, F::Error: Clone, { /// Creates a new queue with the given `limit`. pub fn new(limit: usize) -> Self { BoundedFutureQueue { futures: RwLock::new(IndexMap::new()), limit, flush_lock: Mutex::new(()), } } /// Enqueues a new `Future`. This function will block if the queue is full. pub fn enqueue(&self, key: K, future: F) -> Result<(), F::Error> where K: Clone, { { let _lock = self.flush_lock.lock(); self.wait(&key)?; let previous = self .futures .write() .insert(key, future.into_future().shared()); assert!(previous.is_none()); } if self.futures.read().len() > self.limit { self.drain_while_above_limit(self.limit)?; } Ok(()) } /// Remove a task from the queue pub async fn mark_completed(&self, key: &K) { self.futures.write().shift_remove(key); } fn drain_any_future(&self) -> Option<Result<(), F::Error>> { let maybe_entry = self .futures .write() .get_index(0) .map(|(k, v)| (k.clone(), v.clone())); if let Some((k, v)) = maybe_entry { let ret = block_on(v); self.futures.write().shift_remove(&k); Some(ret) } else { None } } async fn drain_specific_future(&self, key: K) -> Option<Result<(), F::Error>> { let maybe_fut = self .futures .write() .get_key_value(&key) .map(|(k, v)| (k.clone(), v.clone())); if let Some((key, fut)) = maybe_fut { let ret = Some(fut.await); self.futures.write().shift_remove(&key); ret } else { None } } fn drain_while_above_limit(&self, limit: usize) -> Result<(), F::Error> { while self.futures.read().len() > limit { match self.drain_any_future() { None => break, Some(res) => res?, } } Ok(()) } /// Flushes the queue. pub fn flush(&self) -> Result<(), F::Error> { let _lock = self.flush_lock.lock(); self.drain_while_above_limit(0) } /// Waits asynchronously for the given `key`. /// May flush the whole queue if a future returned an error beforehand. pub fn wait_async<'a, Q: Borrow<K> + 'a>( &'a self, key: Q, ) -> impl TryFuture<Ok = (), Error = F::Error> + 'a { async move { self.drain_specific_future(key.borrow().clone()) .await .unwrap_or(Ok(())) } } /// Waits for the given `key`. /// May flush the whole queue if a future returned an error beforehand. pub fn wait(&self, key: &K) -> Result<(), F::Error> { block_on(self.wait_async(key).into_future()) } }
27.251748
95
0.531691
d654e5f7c8aac23bb1c2f3048e384b8afc21b3a1
87
use std::path::PathBuf; #[derive(Debug)] pub struct Config { pub file: PathBuf, }
12.428571
23
0.655172
ac841c5e98345270b3225cc2eae54777f7778225
618
// Copyright 2018-2021 Cargill Incorporated // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. pub mod influx;
41.2
75
0.750809
8a30a3cb5feebf27fd79024218fb72ec4f062f8c
4,659
use std::fmt::Debug; use std::marker; use amethyst_core::ecs::{DispatcherBuilder, Entity, World}; use amethyst_error::Error; use amethyst_core::{SystemBundle}; use cgmath::{Basis2, Point2, Point3, Quaternion}; use collision::algorithm::broad_phase::{SweepAndPrune2, SweepAndPrune3}; use collision::dbvt::TreeValueWrapped; use collision::{Bound, ComputeBound, Contains, Discrete, Primitive, SurfaceArea, Union}; use rhusics_core::{BodyPose, Collider}; use rhusics_ecs::physics2d::{setup_dispatch_2d, GJK2}; use rhusics_ecs::physics3d::{setup_dispatch_3d, GJK3}; use rhusics_ecs::DeltaTime; use crate::default::{PoseTransformSyncSystem2, PoseTransformSyncSystem3}; /// Bundle for configuring 2D physics. /// /// ### Type parameters: /// /// - `P`: Collision primitive (see `collision::primitive` for more information) /// - `B`: Bounding volume (`Aabb2` for most scenarios) /// - `Y`: collision detection manager type (see `rhusics_core::Collider` for more information) pub struct PhysicsBundle2<P, B, Y> { m: marker::PhantomData<(P, B, Y)>, spatial: bool, } impl<P, B, Y> PhysicsBundle2<P, B, Y> { /// Create new bundle pub fn new() -> Self { Self { m: marker::PhantomData, spatial: false, } } /// Enable spatial sorting /// /// Cause rhusics to use `SpatialSortingSystem` and `SpatialCollisionSystem` instead of /// `BasicCollisionSystem` which is the default. pub fn with_spatial(mut self) -> Self { self.spatial = true; self } } impl<'a, 'b, P, B, Y> SystemBundle<'a, 'b> for PhysicsBundle2<P, B, Y> where P: Primitive<Point = Point2<f32>> + ComputeBound<B> + Send + Sync + 'static, B: Bound<Point = P::Point> + Clone + Discrete<B> + Union<B, Output = B> + Contains<B> + SurfaceArea<Scalar = f32> + Debug + Send + Sync + 'static, Y: Default + Collider + Send + Sync + 'static, { fn build(self, _world: &mut World, dispatcher: &mut DispatcherBuilder<'a, 'b>) -> Result<(), Error> { setup_dispatch_2d::< f32, P, BodyPose<Point2<f32>, Basis2<f32>>, B, TreeValueWrapped<Entity, B>, Y, _, _, DeltaTime<f32>, >( dispatcher, SweepAndPrune2::<f32, B>::new(), GJK2::new(), self.spatial, ); dispatcher.add( PoseTransformSyncSystem2::new(), "sync_system", &["physics_solver_system"], ); Ok(()) } } /// Bundle for configuring 3D physics, using the basic collision detection setup in rhusics. /// /// ### Type parameters: /// /// - `P`: Collision primitive (see `collision::primitive` for more information) /// - `B`: Bounding volume (`Aabb3` or `Sphere` for most scenarios) /// - `Y`: collision detection manager type (see `rhusics_core::Collider` for more information) pub struct PhysicsBundle3<P, B, Y> { m: marker::PhantomData<(P, B, Y)>, spatial: bool, } impl<P, B, Y> PhysicsBundle3<P, B, Y> { /// Create new bundle pub fn new() -> Self { Self { m: marker::PhantomData, spatial: false, } } /// Enable spatial sorting /// /// Cause rhusics to use `SpatialSortingSystem` and `SpatialCollisionSystem` instead of /// `BasicCollisionSystem` which is the default. pub fn with_spatial(mut self) -> Self { self.spatial = true; self } } impl<'a, 'b, P, B, Y> SystemBundle<'a, 'b> for PhysicsBundle3<P, B, Y> where P: Primitive<Point = Point3<f32>> + ComputeBound<B> + Send + Sync + 'static, B: Bound<Point = P::Point> + Clone + Discrete<B> + Union<B, Output = B> + Contains<B> + SurfaceArea<Scalar = f32> + Debug + Send + Sync + 'static, Y: Default + Collider + Send + Sync + 'static, { fn build(self, _world: &mut World, dispatcher: &mut DispatcherBuilder<'a, 'b>) -> Result<(), Error> { setup_dispatch_3d::< f32, P, BodyPose<Point3<f32>, Quaternion<f32>>, B, TreeValueWrapped<Entity, B>, Y, _, _, DeltaTime<f32>, >( dispatcher, SweepAndPrune3::<f32, B>::new(), GJK3::new(), self.spatial, ); dispatcher.add( PoseTransformSyncSystem3::new(), "sync_system", &["physics_solver_system"], ); Ok(()) } }
28.937888
105
0.564928
9c49f5d42b8e8005e7c31d609411eb1d38af3d32
2,707
#[doc = "Register `MSGBOX_FIFO_STATUS_REG%s` reader"] pub struct R(crate::R<MSGBOX_FIFO_STATUS_REG_SPEC>); impl core::ops::Deref for R { type Target = crate::R<MSGBOX_FIFO_STATUS_REG_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<MSGBOX_FIFO_STATUS_REG_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<MSGBOX_FIFO_STATUS_REG_SPEC>) -> Self { R(reader) } } #[doc = "FIFO Not Available Flag\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum FIFO_NOT_AVA_FLAG_A { #[doc = "0: The Message FIFO queue empty level reaches the configured threshold"] AVAILABLE = 0, #[doc = "1: The Message FIFO queue empty level does not reach the configured threshold"] NOT_AVAILABLE = 1, } impl From<FIFO_NOT_AVA_FLAG_A> for bool { #[inline(always)] fn from(variant: FIFO_NOT_AVA_FLAG_A) -> Self { variant as u8 != 0 } } #[doc = "Field `FIFO_NOT_AVA_FLAG` reader - FIFO Not Available Flag"] pub type FIFO_NOT_AVA_FLAG_R = crate::BitReader<FIFO_NOT_AVA_FLAG_A>; impl FIFO_NOT_AVA_FLAG_R { #[doc = "Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> FIFO_NOT_AVA_FLAG_A { match self.bits { false => FIFO_NOT_AVA_FLAG_A::AVAILABLE, true => FIFO_NOT_AVA_FLAG_A::NOT_AVAILABLE, } } #[doc = "Checks if the value of the field is `AVAILABLE`"] #[inline(always)] pub fn is_available(&self) -> bool { *self == FIFO_NOT_AVA_FLAG_A::AVAILABLE } #[doc = "Checks if the value of the field is `NOT_AVAILABLE`"] #[inline(always)] pub fn is_not_available(&self) -> bool { *self == FIFO_NOT_AVA_FLAG_A::NOT_AVAILABLE } } impl R { #[doc = "Bit 0 - FIFO Not Available Flag"] #[inline(always)] pub fn fifo_not_ava_flag(&self) -> FIFO_NOT_AVA_FLAG_R { FIFO_NOT_AVA_FLAG_R::new((self.bits & 1) != 0) } } #[doc = "Message Box FIFO Status Register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [msgbox_fifo_status_reg](index.html) module"] pub struct MSGBOX_FIFO_STATUS_REG_SPEC; impl crate::RegisterSpec for MSGBOX_FIFO_STATUS_REG_SPEC { type Ux = u32; } #[doc = "`read()` method returns [msgbox_fifo_status_reg::R](R) reader structure"] impl crate::Readable for MSGBOX_FIFO_STATUS_REG_SPEC { type Reader = R; } #[doc = "`reset()` method sets MSGBOX_FIFO_STATUS_REG%s to value 0"] impl crate::Resettable for MSGBOX_FIFO_STATUS_REG_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
36.093333
257
0.670484
7a3609a255f0822be802d6f7b3cb2917006d6d66
68,907
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. use datafusion::arrow::datatypes::{ DataType, Field, IntervalUnit, Schema, SchemaRef, TimeUnit, UnionMode, }; use datafusion::logical_expr::{BuiltInWindowFunction, BuiltinScalarFunction}; use datafusion::logical_plan::{JoinConstraint, Operator}; use datafusion::physical_plan::aggregates::AggregateFunction; use datafusion::prelude::JoinType; use datafusion::scalar::ScalarValue; use crate::error::PlanSerDeError; use crate::protobuf::scalar_type; // include the generated protobuf source as a submodule #[allow(clippy::all)] pub mod protobuf { include!(concat!(env!("OUT_DIR"), "/plan.protobuf.rs")); } pub mod error; pub mod from_proto; pub(crate) fn proto_error<S: Into<String>>(message: S) -> PlanSerDeError { PlanSerDeError::General(message.into()) } #[macro_export] macro_rules! convert_required { ($PB:expr) => {{ if let Some(field) = $PB.as_ref() { field.try_into() } else { Err(proto_error("Missing required field in protobuf")) } }}; } #[macro_export] macro_rules! into_required { ($PB:expr) => {{ if let Some(field) = $PB.as_ref() { Ok(field.into()) } else { Err(proto_error("Missing required field in protobuf")) } }}; } #[macro_export] macro_rules! convert_box_required { ($PB:expr) => {{ if let Some(field) = $PB.as_ref() { field.as_ref().try_into() } else { Err(proto_error("Missing required field in protobuf")) } }}; } pub(crate) fn from_proto_binary_op(op: &str) -> Result<Operator, PlanSerDeError> { match op { "And" => Ok(Operator::And), "Or" => Ok(Operator::Or), "Eq" => Ok(Operator::Eq), "NotEq" => Ok(Operator::NotEq), "LtEq" => Ok(Operator::LtEq), "Lt" => Ok(Operator::Lt), "Gt" => Ok(Operator::Gt), "GtEq" => Ok(Operator::GtEq), "Plus" => Ok(Operator::Plus), "Minus" => Ok(Operator::Minus), "Multiply" => Ok(Operator::Multiply), "Divide" => Ok(Operator::Divide), "Modulo" => Ok(Operator::Modulo), "Like" => Ok(Operator::Like), "NotLike" => Ok(Operator::NotLike), other => Err(proto_error(format!( "Unsupported binary operator '{:?}'", other ))), } } impl From<protobuf::AggregateFunction> for AggregateFunction { fn from(agg_fun: protobuf::AggregateFunction) -> AggregateFunction { match agg_fun { protobuf::AggregateFunction::Min => AggregateFunction::Min, protobuf::AggregateFunction::Max => AggregateFunction::Max, protobuf::AggregateFunction::Sum => AggregateFunction::Sum, protobuf::AggregateFunction::Avg => AggregateFunction::Avg, protobuf::AggregateFunction::Count => AggregateFunction::Count, protobuf::AggregateFunction::ApproxDistinct => { AggregateFunction::ApproxDistinct } protobuf::AggregateFunction::ArrayAgg => AggregateFunction::ArrayAgg, protobuf::AggregateFunction::Variance => AggregateFunction::Variance, protobuf::AggregateFunction::VariancePop => AggregateFunction::VariancePop, protobuf::AggregateFunction::Covariance => AggregateFunction::Covariance, protobuf::AggregateFunction::CovariancePop => { AggregateFunction::CovariancePop } protobuf::AggregateFunction::Stddev => AggregateFunction::Stddev, protobuf::AggregateFunction::StddevPop => AggregateFunction::StddevPop, protobuf::AggregateFunction::Correlation => AggregateFunction::Correlation, } } } impl From<protobuf::BuiltInWindowFunction> for BuiltInWindowFunction { fn from(built_in_function: protobuf::BuiltInWindowFunction) -> Self { match built_in_function { protobuf::BuiltInWindowFunction::RowNumber => { BuiltInWindowFunction::RowNumber } protobuf::BuiltInWindowFunction::Rank => BuiltInWindowFunction::Rank, protobuf::BuiltInWindowFunction::PercentRank => { BuiltInWindowFunction::PercentRank } protobuf::BuiltInWindowFunction::DenseRank => { BuiltInWindowFunction::DenseRank } protobuf::BuiltInWindowFunction::Lag => BuiltInWindowFunction::Lag, protobuf::BuiltInWindowFunction::Lead => BuiltInWindowFunction::Lead, protobuf::BuiltInWindowFunction::FirstValue => { BuiltInWindowFunction::FirstValue } protobuf::BuiltInWindowFunction::CumeDist => BuiltInWindowFunction::CumeDist, protobuf::BuiltInWindowFunction::Ntile => BuiltInWindowFunction::Ntile, protobuf::BuiltInWindowFunction::NthValue => BuiltInWindowFunction::NthValue, protobuf::BuiltInWindowFunction::LastValue => { BuiltInWindowFunction::LastValue } } } } impl protobuf::TimeUnit { pub fn from_arrow_time_unit(val: &TimeUnit) -> Self { match val { TimeUnit::Second => protobuf::TimeUnit::Second, TimeUnit::Millisecond => protobuf::TimeUnit::TimeMillisecond, TimeUnit::Microsecond => protobuf::TimeUnit::Microsecond, TimeUnit::Nanosecond => protobuf::TimeUnit::Nanosecond, } } pub fn from_i32_to_arrow(time_unit_i32: i32) -> Result<TimeUnit, PlanSerDeError> { let pb_time_unit = protobuf::TimeUnit::from_i32(time_unit_i32); match pb_time_unit { Some(time_unit) => Ok(match time_unit { protobuf::TimeUnit::Second => TimeUnit::Second, protobuf::TimeUnit::TimeMillisecond => TimeUnit::Millisecond, protobuf::TimeUnit::Microsecond => TimeUnit::Microsecond, protobuf::TimeUnit::Nanosecond => TimeUnit::Nanosecond, }), None => Err(proto_error( "Error converting i32 to TimeUnit: Passed invalid variant", )), } } } impl protobuf::IntervalUnit { pub fn from_arrow_interval_unit(interval_unit: &IntervalUnit) -> Self { match interval_unit { IntervalUnit::YearMonth => protobuf::IntervalUnit::YearMonth, IntervalUnit::DayTime => protobuf::IntervalUnit::DayTime, IntervalUnit::MonthDayNano => protobuf::IntervalUnit::MonthDayNano, } } pub fn from_i32_to_arrow( interval_unit_i32: i32, ) -> Result<IntervalUnit, PlanSerDeError> { let pb_interval_unit = protobuf::IntervalUnit::from_i32(interval_unit_i32); match pb_interval_unit { Some(interval_unit) => Ok(match interval_unit { protobuf::IntervalUnit::YearMonth => IntervalUnit::YearMonth, protobuf::IntervalUnit::DayTime => IntervalUnit::DayTime, protobuf::IntervalUnit::MonthDayNano => IntervalUnit::MonthDayNano, }), None => Err(proto_error( "Error converting i32 to DateUnit: Passed invalid variant", )), } } } impl TryInto<datafusion::arrow::datatypes::DataType> for &protobuf::arrow_type::ArrowTypeEnum { type Error = PlanSerDeError; fn try_into(self) -> Result<datafusion::arrow::datatypes::DataType, Self::Error> { use protobuf::arrow_type; Ok(match self { arrow_type::ArrowTypeEnum::None(_) => DataType::Null, arrow_type::ArrowTypeEnum::Bool(_) => DataType::Boolean, arrow_type::ArrowTypeEnum::Uint8(_) => DataType::UInt8, arrow_type::ArrowTypeEnum::Int8(_) => DataType::Int8, arrow_type::ArrowTypeEnum::Uint16(_) => DataType::UInt16, arrow_type::ArrowTypeEnum::Int16(_) => DataType::Int16, arrow_type::ArrowTypeEnum::Uint32(_) => DataType::UInt32, arrow_type::ArrowTypeEnum::Int32(_) => DataType::Int32, arrow_type::ArrowTypeEnum::Uint64(_) => DataType::UInt64, arrow_type::ArrowTypeEnum::Int64(_) => DataType::Int64, arrow_type::ArrowTypeEnum::Float16(_) => DataType::Float16, arrow_type::ArrowTypeEnum::Float32(_) => DataType::Float32, arrow_type::ArrowTypeEnum::Float64(_) => DataType::Float64, arrow_type::ArrowTypeEnum::Utf8(_) => DataType::Utf8, arrow_type::ArrowTypeEnum::LargeUtf8(_) => DataType::LargeUtf8, arrow_type::ArrowTypeEnum::Binary(_) => DataType::Binary, arrow_type::ArrowTypeEnum::FixedSizeBinary(size) => { DataType::FixedSizeBinary(*size) } arrow_type::ArrowTypeEnum::LargeBinary(_) => DataType::LargeBinary, arrow_type::ArrowTypeEnum::Date32(_) => DataType::Date32, arrow_type::ArrowTypeEnum::Date64(_) => DataType::Date64, arrow_type::ArrowTypeEnum::Duration(time_unit) => { DataType::Duration(protobuf::TimeUnit::from_i32_to_arrow(*time_unit)?) } arrow_type::ArrowTypeEnum::Timestamp(protobuf::Timestamp { time_unit, timezone, }) => DataType::Timestamp( protobuf::TimeUnit::from_i32_to_arrow(*time_unit)?, match timezone.len() { 0 => None, _ => Some(timezone.to_owned()), }, ), arrow_type::ArrowTypeEnum::Time32(time_unit) => { DataType::Time32(protobuf::TimeUnit::from_i32_to_arrow(*time_unit)?) } arrow_type::ArrowTypeEnum::Time64(time_unit) => { DataType::Time64(protobuf::TimeUnit::from_i32_to_arrow(*time_unit)?) } arrow_type::ArrowTypeEnum::Interval(interval_unit) => DataType::Interval( protobuf::IntervalUnit::from_i32_to_arrow(*interval_unit)?, ), arrow_type::ArrowTypeEnum::Decimal(protobuf::Decimal { whole, fractional, }) => DataType::Decimal(*whole as usize, *fractional as usize), arrow_type::ArrowTypeEnum::List(list) => { let list_type: &protobuf::Field = list .as_ref() .field_type .as_ref() .ok_or_else(|| proto_error("Protobuf deserialization error: List message missing required field 'field_type'"))? .as_ref(); DataType::List(Box::new(list_type.try_into()?)) } arrow_type::ArrowTypeEnum::LargeList(list) => { let list_type: &protobuf::Field = list .as_ref() .field_type .as_ref() .ok_or_else(|| proto_error("Protobuf deserialization error: List message missing required field 'field_type'"))? .as_ref(); DataType::LargeList(Box::new(list_type.try_into()?)) } arrow_type::ArrowTypeEnum::FixedSizeList(list) => { let list_type: &protobuf::Field = list .as_ref() .field_type .as_ref() .ok_or_else(|| proto_error("Protobuf deserialization error: List message missing required field 'field_type'"))? .as_ref(); let list_size = list.list_size; DataType::FixedSizeList(Box::new(list_type.try_into()?), list_size) } arrow_type::ArrowTypeEnum::Struct(strct) => DataType::Struct( strct .sub_field_types .iter() .map(|field| field.try_into()) .collect::<Result<Vec<_>, _>>()?, ), arrow_type::ArrowTypeEnum::Union(union) => { let union_mode = protobuf::UnionMode::from_i32(union.union_mode) .ok_or_else(|| { proto_error( "Protobuf deserialization error: Unknown union mode type", ) })?; let union_mode = match union_mode { protobuf::UnionMode::Dense => UnionMode::Dense, protobuf::UnionMode::Sparse => UnionMode::Sparse, }; let union_types = union .union_types .iter() .map(|field| field.try_into()) .collect::<Result<Vec<_>, _>>()?; DataType::Union(union_types, union_mode) } arrow_type::ArrowTypeEnum::Dictionary(dict) => { let pb_key_datatype = dict .as_ref() .key .as_ref() .ok_or_else(|| proto_error("Protobuf deserialization error: Dictionary message missing required field 'key'"))?; let pb_value_datatype = dict .as_ref() .value .as_ref() .ok_or_else(|| proto_error("Protobuf deserialization error: Dictionary message missing required field 'key'"))?; let key_datatype: DataType = pb_key_datatype.as_ref().try_into()?; let value_datatype: DataType = pb_value_datatype.as_ref().try_into()?; DataType::Dictionary(Box::new(key_datatype), Box::new(value_datatype)) } }) } } #[allow(clippy::from_over_into)] impl Into<datafusion::arrow::datatypes::DataType> for protobuf::PrimitiveScalarType { fn into(self) -> datafusion::arrow::datatypes::DataType { match self { protobuf::PrimitiveScalarType::Bool => DataType::Boolean, protobuf::PrimitiveScalarType::Uint8 => DataType::UInt8, protobuf::PrimitiveScalarType::Int8 => DataType::Int8, protobuf::PrimitiveScalarType::Uint16 => DataType::UInt16, protobuf::PrimitiveScalarType::Int16 => DataType::Int16, protobuf::PrimitiveScalarType::Uint32 => DataType::UInt32, protobuf::PrimitiveScalarType::Int32 => DataType::Int32, protobuf::PrimitiveScalarType::Uint64 => DataType::UInt64, protobuf::PrimitiveScalarType::Int64 => DataType::Int64, protobuf::PrimitiveScalarType::Float32 => DataType::Float32, protobuf::PrimitiveScalarType::Float64 => DataType::Float64, protobuf::PrimitiveScalarType::Utf8 => DataType::Utf8, protobuf::PrimitiveScalarType::LargeUtf8 => DataType::LargeUtf8, protobuf::PrimitiveScalarType::Date32 => DataType::Date32, protobuf::PrimitiveScalarType::TimeMicrosecond => { DataType::Time64(TimeUnit::Microsecond) } protobuf::PrimitiveScalarType::TimeNanosecond => { DataType::Time64(TimeUnit::Nanosecond) } protobuf::PrimitiveScalarType::Null => DataType::Null, } } } impl From<protobuf::JoinType> for JoinType { fn from(t: protobuf::JoinType) -> Self { match t { protobuf::JoinType::Inner => JoinType::Inner, protobuf::JoinType::Left => JoinType::Left, protobuf::JoinType::Right => JoinType::Right, protobuf::JoinType::Full => JoinType::Full, protobuf::JoinType::Semi => JoinType::Semi, protobuf::JoinType::Anti => JoinType::Anti, } } } impl From<JoinType> for protobuf::JoinType { fn from(t: JoinType) -> Self { match t { JoinType::Inner => protobuf::JoinType::Inner, JoinType::Left => protobuf::JoinType::Left, JoinType::Right => protobuf::JoinType::Right, JoinType::Full => protobuf::JoinType::Full, JoinType::Semi => protobuf::JoinType::Semi, JoinType::Anti => protobuf::JoinType::Anti, } } } impl From<protobuf::JoinConstraint> for JoinConstraint { fn from(t: protobuf::JoinConstraint) -> Self { match t { protobuf::JoinConstraint::On => JoinConstraint::On, protobuf::JoinConstraint::Using => JoinConstraint::Using, } } } impl From<JoinConstraint> for protobuf::JoinConstraint { fn from(t: JoinConstraint) -> Self { match t { JoinConstraint::On => protobuf::JoinConstraint::On, JoinConstraint::Using => protobuf::JoinConstraint::Using, } } } impl TryFrom<&DataType> for protobuf::ScalarType { type Error = PlanSerDeError; fn try_from(value: &DataType) -> Result<Self, Self::Error> { let datatype = protobuf::scalar_type::Datatype::try_from(value)?; Ok(protobuf::ScalarType { datatype: Some(datatype), }) } } impl TryFrom<&DataType> for protobuf::scalar_type::Datatype { type Error = PlanSerDeError; fn try_from(val: &DataType) -> Result<Self, Self::Error> { use protobuf::PrimitiveScalarType; let scalar_value = match val { DataType::Boolean => scalar_type::Datatype::Scalar(PrimitiveScalarType::Bool as i32), DataType::Int8 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Int8 as i32), DataType::Int16 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Int16 as i32), DataType::Int32 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Int32 as i32), DataType::Int64 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Int64 as i32), DataType::UInt8 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Uint8 as i32), DataType::UInt16 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Uint16 as i32), DataType::UInt32 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Uint32 as i32), DataType::UInt64 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Uint64 as i32), DataType::Float32 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Float32 as i32), DataType::Float64 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Float64 as i32), DataType::Date32 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Date32 as i32), DataType::Time64(time_unit) => match time_unit { TimeUnit::Microsecond => scalar_type::Datatype::Scalar(PrimitiveScalarType::TimeMicrosecond as i32), TimeUnit::Nanosecond => scalar_type::Datatype::Scalar(PrimitiveScalarType::TimeNanosecond as i32), _ => { return Err(proto_error(format!( "Found invalid time unit for scalar value, only TimeUnit::Microsecond and TimeUnit::Nanosecond are valid time units: {:?}", time_unit ))) } }, DataType::Utf8 => scalar_type::Datatype::Scalar(PrimitiveScalarType::Utf8 as i32), DataType::LargeUtf8 => scalar_type::Datatype::Scalar(PrimitiveScalarType::LargeUtf8 as i32), DataType::List(field_type) => { let mut field_names: Vec<String> = Vec::new(); let mut curr_field = field_type.as_ref(); field_names.push(curr_field.name().to_owned()); //For each nested field check nested datatype, since datafusion scalars only support recursive lists with a leaf scalar type // any other compound types are errors. while let DataType::List(nested_field_type) = curr_field.data_type() { curr_field = nested_field_type.as_ref(); field_names.push(curr_field.name().to_owned()); if !is_valid_scalar_type_no_list_check(curr_field.data_type()) { return Err(proto_error(format!("{:?} is an invalid scalar type", curr_field))); } } let deepest_datatype = curr_field.data_type(); if !is_valid_scalar_type_no_list_check(deepest_datatype) { return Err(proto_error(format!("The list nested type {:?} is an invalid scalar type", curr_field))); } let pb_deepest_type: PrimitiveScalarType = match deepest_datatype { DataType::Boolean => PrimitiveScalarType::Bool, DataType::Int8 => PrimitiveScalarType::Int8, DataType::Int16 => PrimitiveScalarType::Int16, DataType::Int32 => PrimitiveScalarType::Int32, DataType::Int64 => PrimitiveScalarType::Int64, DataType::UInt8 => PrimitiveScalarType::Uint8, DataType::UInt16 => PrimitiveScalarType::Uint16, DataType::UInt32 => PrimitiveScalarType::Uint32, DataType::UInt64 => PrimitiveScalarType::Uint64, DataType::Float32 => PrimitiveScalarType::Float32, DataType::Float64 => PrimitiveScalarType::Float64, DataType::Date32 => PrimitiveScalarType::Date32, DataType::Time64(time_unit) => match time_unit { TimeUnit::Microsecond => PrimitiveScalarType::TimeMicrosecond, TimeUnit::Nanosecond => PrimitiveScalarType::TimeNanosecond, _ => { return Err(proto_error(format!( "Found invalid time unit for scalar value, only TimeUnit::Microsecond and TimeUnit::Nanosecond are valid time units: {:?}", time_unit ))) } }, DataType::Utf8 => PrimitiveScalarType::Utf8, DataType::LargeUtf8 => PrimitiveScalarType::LargeUtf8, _ => { return Err(proto_error(format!( "Error converting to Datatype to scalar type, {:?} is invalid as a datafusion scalar.", val ))) } }; protobuf::scalar_type::Datatype::List(protobuf::ScalarListType { field_names, deepest_type: pb_deepest_type as i32, }) } DataType::Null | DataType::Float16 | DataType::Timestamp(_, _) | DataType::Date64 | DataType::Time32(_) | DataType::Duration(_) | DataType::Interval(_) | DataType::Binary | DataType::FixedSizeBinary(_) | DataType::LargeBinary | DataType::FixedSizeList(_, _) | DataType::LargeList(_) | DataType::Struct(_) | DataType::Union(_, _) | DataType::Dictionary(_, _) | DataType::Map(_, _) | DataType::Decimal(_, _) => { return Err(proto_error(format!( "Error converting to Datatype to scalar type, {:?} is invalid as a datafusion scalar.", val ))) } }; Ok(scalar_value) } } //Does not check if list subtypes are valid fn is_valid_scalar_type_no_list_check(datatype: &DataType) -> bool { match datatype { DataType::Boolean | DataType::Int8 | DataType::Int16 | DataType::Int32 | DataType::Int64 | DataType::UInt8 | DataType::UInt16 | DataType::UInt32 | DataType::UInt64 | DataType::Float32 | DataType::Float64 | DataType::LargeUtf8 | DataType::Utf8 | DataType::Date32 => true, DataType::Time64(time_unit) => { matches!(time_unit, TimeUnit::Microsecond | TimeUnit::Nanosecond) } DataType::List(_) => true, _ => false, } } impl TryFrom<&datafusion::scalar::ScalarValue> for protobuf::ScalarValue { type Error = PlanSerDeError; fn try_from( val: &datafusion::scalar::ScalarValue, ) -> Result<protobuf::ScalarValue, Self::Error> { use datafusion::scalar; use protobuf::scalar_value::Value; use protobuf::PrimitiveScalarType; let scalar_val = match val { scalar::ScalarValue::Boolean(val) => { create_proto_scalar(val, PrimitiveScalarType::Bool, |s| Value::BoolValue(*s)) } scalar::ScalarValue::Float32(val) => { create_proto_scalar(val, PrimitiveScalarType::Float32, |s| { Value::Float32Value(*s) }) } scalar::ScalarValue::Float64(val) => { create_proto_scalar(val, PrimitiveScalarType::Float64, |s| { Value::Float64Value(*s) }) } scalar::ScalarValue::Int8(val) => { create_proto_scalar(val, PrimitiveScalarType::Int8, |s| { Value::Int8Value(*s as i32) }) } scalar::ScalarValue::Int16(val) => { create_proto_scalar(val, PrimitiveScalarType::Int16, |s| { Value::Int16Value(*s as i32) }) } scalar::ScalarValue::Int32(val) => { create_proto_scalar(val, PrimitiveScalarType::Int32, |s| Value::Int32Value(*s)) } scalar::ScalarValue::Int64(val) => { create_proto_scalar(val, PrimitiveScalarType::Int64, |s| Value::Int64Value(*s)) } scalar::ScalarValue::UInt8(val) => { create_proto_scalar(val, PrimitiveScalarType::Uint8, |s| { Value::Uint8Value(*s as u32) }) } scalar::ScalarValue::UInt16(val) => { create_proto_scalar(val, PrimitiveScalarType::Uint16, |s| { Value::Uint16Value(*s as u32) }) } scalar::ScalarValue::UInt32(val) => { create_proto_scalar(val, PrimitiveScalarType::Uint32, |s| Value::Uint32Value(*s)) } scalar::ScalarValue::UInt64(val) => { create_proto_scalar(val, PrimitiveScalarType::Uint64, |s| Value::Uint64Value(*s)) } scalar::ScalarValue::Utf8(val) => { create_proto_scalar(val, PrimitiveScalarType::Utf8, |s| { Value::Utf8Value(s.to_owned()) }) } scalar::ScalarValue::LargeUtf8(val) => { create_proto_scalar(val, PrimitiveScalarType::LargeUtf8, |s| { Value::LargeUtf8Value(s.to_owned()) }) } scalar::ScalarValue::List(value, datatype) => { match value { Some(values) => { if values.is_empty() { protobuf::ScalarValue { value: Some(protobuf::scalar_value::Value::ListValue( protobuf::ScalarListValue { datatype: Some(datatype.as_ref().try_into()?), values: Vec::new(), }, )), } } else { let scalar_type = match datatype.as_ref() { DataType::List(field) => field.as_ref().data_type(), _ => todo!("Proper error handling"), }; let type_checked_values: Vec<protobuf::ScalarValue> = values .iter() .map(|scalar| match (scalar, scalar_type) { (scalar::ScalarValue::List(_, list_type), DataType::List(field)) => { if let DataType::List(list_field) = list_type.as_ref() { let scalar_datatype = field.data_type(); let list_datatype = list_field.data_type(); if std::mem::discriminant(list_datatype) != std::mem::discriminant(scalar_datatype) { return Err(proto_error(format!( "Protobuf serialization error: Lists with inconsistent typing {:?} and {:?} found within list", list_datatype, scalar_datatype ))); } scalar.try_into() } else { Err(proto_error(format!( "Protobuf serialization error, {:?} was inconsistent with designated type {:?}", scalar, datatype ))) } } (scalar::ScalarValue::Boolean(_), DataType::Boolean) => scalar.try_into(), (scalar::ScalarValue::Float32(_), DataType::Float32) => scalar.try_into(), (scalar::ScalarValue::Float64(_), DataType::Float64) => scalar.try_into(), (scalar::ScalarValue::Int8(_), DataType::Int8) => scalar.try_into(), (scalar::ScalarValue::Int16(_), DataType::Int16) => scalar.try_into(), (scalar::ScalarValue::Int32(_), DataType::Int32) => scalar.try_into(), (scalar::ScalarValue::Int64(_), DataType::Int64) => scalar.try_into(), (scalar::ScalarValue::UInt8(_), DataType::UInt8) => scalar.try_into(), (scalar::ScalarValue::UInt16(_), DataType::UInt16) => scalar.try_into(), (scalar::ScalarValue::UInt32(_), DataType::UInt32) => scalar.try_into(), (scalar::ScalarValue::UInt64(_), DataType::UInt64) => scalar.try_into(), (scalar::ScalarValue::Utf8(_), DataType::Utf8) => scalar.try_into(), (scalar::ScalarValue::LargeUtf8(_), DataType::LargeUtf8) => scalar.try_into(), _ => Err(proto_error(format!( "Protobuf serialization error, {:?} was inconsistent with designated type {:?}", scalar, datatype ))), }) .collect::<Result<Vec<_>, _>>()?; protobuf::ScalarValue { value: Some(protobuf::scalar_value::Value::ListValue( protobuf::ScalarListValue { datatype: Some(datatype.as_ref().try_into()?), values: type_checked_values, }, )), } } } None => protobuf::ScalarValue { value: Some(protobuf::scalar_value::Value::NullListValue( datatype.as_ref().try_into()?, )), }, } } datafusion::scalar::ScalarValue::Date32(val) => { create_proto_scalar(val, PrimitiveScalarType::Date32, |s| Value::Date32Value(*s)) } datafusion::scalar::ScalarValue::TimestampMicrosecond(val, _) => { create_proto_scalar(val, PrimitiveScalarType::TimeMicrosecond, |s| { Value::TimeMicrosecondValue(*s) }) } datafusion::scalar::ScalarValue::TimestampNanosecond(val, _) => { create_proto_scalar(val, PrimitiveScalarType::TimeNanosecond, |s| { Value::TimeNanosecondValue(*s) }) } _ => { return Err(proto_error(format!( "Error converting to Datatype to scalar type, {:?} is invalid as a datafusion scalar.", val ))) } }; Ok(scalar_val) } } impl From<&Field> for protobuf::Field { fn from(field: &Field) -> Self { protobuf::Field { name: field.name().to_owned(), arrow_type: Some(Box::new(field.data_type().into())), nullable: field.is_nullable(), children: Vec::new(), } } } impl From<&DataType> for protobuf::ArrowType { fn from(val: &DataType) -> protobuf::ArrowType { protobuf::ArrowType { arrow_type_enum: Some(val.into()), } } } impl From<&DataType> for protobuf::arrow_type::ArrowTypeEnum { fn from(val: &DataType) -> protobuf::arrow_type::ArrowTypeEnum { use protobuf::arrow_type::ArrowTypeEnum; use protobuf::EmptyMessage; match val { DataType::Null => ArrowTypeEnum::None(EmptyMessage {}), DataType::Boolean => ArrowTypeEnum::Bool(EmptyMessage {}), DataType::Int8 => ArrowTypeEnum::Int8(EmptyMessage {}), DataType::Int16 => ArrowTypeEnum::Int16(EmptyMessage {}), DataType::Int32 => ArrowTypeEnum::Int32(EmptyMessage {}), DataType::Int64 => ArrowTypeEnum::Int64(EmptyMessage {}), DataType::UInt8 => ArrowTypeEnum::Uint8(EmptyMessage {}), DataType::UInt16 => ArrowTypeEnum::Uint16(EmptyMessage {}), DataType::UInt32 => ArrowTypeEnum::Uint32(EmptyMessage {}), DataType::UInt64 => ArrowTypeEnum::Uint64(EmptyMessage {}), DataType::Float16 => ArrowTypeEnum::Float16(EmptyMessage {}), DataType::Float32 => ArrowTypeEnum::Float32(EmptyMessage {}), DataType::Float64 => ArrowTypeEnum::Float64(EmptyMessage {}), DataType::Timestamp(time_unit, timezone) => { ArrowTypeEnum::Timestamp(protobuf::Timestamp { time_unit: protobuf::TimeUnit::from_arrow_time_unit(time_unit) as i32, timezone: timezone.to_owned().unwrap_or_default(), }) } DataType::Date32 => ArrowTypeEnum::Date32(EmptyMessage {}), DataType::Date64 => ArrowTypeEnum::Date64(EmptyMessage {}), DataType::Time32(time_unit) => ArrowTypeEnum::Time32( protobuf::TimeUnit::from_arrow_time_unit(time_unit) as i32, ), DataType::Time64(time_unit) => ArrowTypeEnum::Time64( protobuf::TimeUnit::from_arrow_time_unit(time_unit) as i32, ), DataType::Duration(time_unit) => ArrowTypeEnum::Duration( protobuf::TimeUnit::from_arrow_time_unit(time_unit) as i32, ), DataType::Interval(interval_unit) => ArrowTypeEnum::Interval( protobuf::IntervalUnit::from_arrow_interval_unit(interval_unit) as i32, ), DataType::Binary => ArrowTypeEnum::Binary(EmptyMessage {}), DataType::FixedSizeBinary(size) => ArrowTypeEnum::FixedSizeBinary(*size), DataType::LargeBinary => ArrowTypeEnum::LargeBinary(EmptyMessage {}), DataType::Utf8 => ArrowTypeEnum::Utf8(EmptyMessage {}), DataType::LargeUtf8 => ArrowTypeEnum::LargeUtf8(EmptyMessage {}), DataType::List(item_type) => ArrowTypeEnum::List(Box::new(protobuf::List { field_type: Some(Box::new(item_type.as_ref().into())), })), DataType::FixedSizeList(item_type, size) => { ArrowTypeEnum::FixedSizeList(Box::new(protobuf::FixedSizeList { field_type: Some(Box::new(item_type.as_ref().into())), list_size: *size, })) } DataType::LargeList(item_type) => { ArrowTypeEnum::LargeList(Box::new(protobuf::List { field_type: Some(Box::new(item_type.as_ref().into())), })) } DataType::Struct(struct_fields) => ArrowTypeEnum::Struct(protobuf::Struct { sub_field_types: struct_fields .iter() .map(|field| field.into()) .collect::<Vec<_>>(), }), DataType::Union(union_types, union_mode) => { let union_mode = match union_mode { UnionMode::Sparse => protobuf::UnionMode::Sparse, UnionMode::Dense => protobuf::UnionMode::Dense, }; ArrowTypeEnum::Union(protobuf::Union { union_types: union_types .iter() .map(|field| field.into()) .collect::<Vec<_>>(), union_mode: union_mode.into(), }) } DataType::Dictionary(key_type, value_type) => { ArrowTypeEnum::Dictionary(Box::new(protobuf::Dictionary { key: Some(Box::new(key_type.as_ref().into())), value: Some(Box::new(value_type.as_ref().into())), })) } DataType::Decimal(whole, fractional) => { ArrowTypeEnum::Decimal(protobuf::Decimal { whole: *whole as u64, fractional: *fractional as u64, }) } DataType::Map(_, _) => { unimplemented!("Ballista does not yet support Map data type") } } } } impl TryInto<DataType> for &protobuf::ArrowType { type Error = PlanSerDeError; fn try_into(self) -> Result<DataType, Self::Error> { let pb_arrow_type = self.arrow_type_enum.as_ref().ok_or_else(|| { proto_error( "Protobuf deserialization error: ArrowType missing required field 'data_type'", ) })?; pb_arrow_type.try_into() } } impl TryInto<DataType> for &Box<protobuf::List> { type Error = PlanSerDeError; fn try_into(self) -> Result<DataType, Self::Error> { let list_ref = self.as_ref(); match &list_ref.field_type { Some(pb_field) => { let pb_field_ref = pb_field.as_ref(); let arrow_field: Field = pb_field_ref.try_into()?; Ok(DataType::List(Box::new(arrow_field))) } None => Err(proto_error( "List message missing required field 'field_type'", )), } } } fn create_proto_scalar<I, T: FnOnce(&I) -> protobuf::scalar_value::Value>( v: &Option<I>, null_arrow_type: protobuf::PrimitiveScalarType, constructor: T, ) -> protobuf::ScalarValue { protobuf::ScalarValue { value: Some(v.as_ref().map(constructor).unwrap_or( protobuf::scalar_value::Value::NullValue(null_arrow_type as i32), )), } } #[allow(clippy::from_over_into)] impl Into<protobuf::Schema> for &Schema { fn into(self) -> protobuf::Schema { protobuf::Schema { columns: self .fields() .iter() .map(protobuf::Field::from) .collect::<Vec<_>>(), } } } #[allow(clippy::from_over_into)] impl Into<protobuf::Schema> for SchemaRef { fn into(self) -> protobuf::Schema { protobuf::Schema { columns: self .fields() .iter() .map(protobuf::Field::from) .collect::<Vec<_>>(), } } } impl TryFrom<&BuiltinScalarFunction> for protobuf::ScalarFunction { type Error = PlanSerDeError; fn try_from(scalar: &BuiltinScalarFunction) -> Result<Self, Self::Error> { let scalar_function = match scalar { BuiltinScalarFunction::Sqrt => Self::Sqrt, BuiltinScalarFunction::Sin => Self::Sin, BuiltinScalarFunction::Cos => Self::Cos, BuiltinScalarFunction::Tan => Self::Tan, BuiltinScalarFunction::Asin => Self::Asin, BuiltinScalarFunction::Acos => Self::Acos, BuiltinScalarFunction::Atan => Self::Atan, BuiltinScalarFunction::Exp => Self::Exp, BuiltinScalarFunction::Log => Self::Log, BuiltinScalarFunction::Ln => Self::Ln, BuiltinScalarFunction::Log10 => Self::Log10, BuiltinScalarFunction::Floor => Self::Floor, BuiltinScalarFunction::Ceil => Self::Ceil, BuiltinScalarFunction::Round => Self::Round, BuiltinScalarFunction::Trunc => Self::Trunc, BuiltinScalarFunction::Abs => Self::Abs, BuiltinScalarFunction::OctetLength => Self::OctetLength, BuiltinScalarFunction::Concat => Self::Concat, BuiltinScalarFunction::Lower => Self::Lower, BuiltinScalarFunction::Upper => Self::Upper, BuiltinScalarFunction::Trim => Self::Trim, BuiltinScalarFunction::Ltrim => Self::Ltrim, BuiltinScalarFunction::Rtrim => Self::Rtrim, BuiltinScalarFunction::ToTimestamp => Self::ToTimestamp, BuiltinScalarFunction::Array => Self::Array, BuiltinScalarFunction::NullIf => Self::NullIf, BuiltinScalarFunction::DatePart => Self::DatePart, BuiltinScalarFunction::DateTrunc => Self::DateTrunc, BuiltinScalarFunction::MD5 => Self::Md5, BuiltinScalarFunction::SHA224 => Self::Sha224, BuiltinScalarFunction::SHA256 => Self::Sha256, BuiltinScalarFunction::SHA384 => Self::Sha384, BuiltinScalarFunction::SHA512 => Self::Sha512, BuiltinScalarFunction::Digest => Self::Digest, BuiltinScalarFunction::ToTimestampMillis => Self::ToTimestampMillis, BuiltinScalarFunction::Log2 => Self::Log2, BuiltinScalarFunction::Signum => Self::Signum, BuiltinScalarFunction::Ascii => Self::Ascii, BuiltinScalarFunction::BitLength => Self::BitLength, BuiltinScalarFunction::Btrim => Self::Btrim, BuiltinScalarFunction::CharacterLength => Self::CharacterLength, BuiltinScalarFunction::Chr => Self::Chr, BuiltinScalarFunction::ConcatWithSeparator => Self::ConcatWithSeparator, BuiltinScalarFunction::InitCap => Self::InitCap, BuiltinScalarFunction::Left => Self::Left, BuiltinScalarFunction::Lpad => Self::Lpad, BuiltinScalarFunction::Random => Self::Random, BuiltinScalarFunction::RegexpReplace => Self::RegexpReplace, BuiltinScalarFunction::Repeat => Self::Repeat, BuiltinScalarFunction::Replace => Self::Replace, BuiltinScalarFunction::Reverse => Self::Reverse, BuiltinScalarFunction::Right => Self::Right, BuiltinScalarFunction::Rpad => Self::Rpad, BuiltinScalarFunction::SplitPart => Self::SplitPart, BuiltinScalarFunction::StartsWith => Self::StartsWith, BuiltinScalarFunction::Strpos => Self::Strpos, BuiltinScalarFunction::Substr => Self::Substr, BuiltinScalarFunction::ToHex => Self::ToHex, BuiltinScalarFunction::ToTimestampMicros => Self::ToTimestampMicros, BuiltinScalarFunction::ToTimestampSeconds => Self::ToTimestampSeconds, BuiltinScalarFunction::Now => Self::Now, BuiltinScalarFunction::Translate => Self::Translate, BuiltinScalarFunction::RegexpMatch => Self::RegexpMatch, BuiltinScalarFunction::Coalesce => Self::Coalesce, BuiltinScalarFunction::Power | BuiltinScalarFunction::Struct => todo!(), }; Ok(scalar_function) } } impl TryInto<Field> for &protobuf::Field { type Error = PlanSerDeError; fn try_into(self) -> Result<Field, Self::Error> { let pb_datatype = self.arrow_type.as_ref().ok_or_else(|| { proto_error( "Protobuf deserialization error: Field message missing required field 'arrow_type'", ) })?; Ok(Field::new( self.name.as_str(), pb_datatype.as_ref().try_into()?, self.nullable, )) } } impl TryInto<Schema> for &protobuf::Schema { type Error = PlanSerDeError; fn try_into(self) -> Result<Schema, PlanSerDeError> { let fields = self .columns .iter() .map(|c| { let pb_arrow_type_res = c .arrow_type .as_ref() .ok_or_else(|| proto_error("Protobuf deserialization error: Field message was missing required field 'arrow_type'")); let pb_arrow_type: &protobuf::ArrowType = match pb_arrow_type_res { Ok(res) => res, Err(e) => return Err(e), }; Ok(Field::new(&c.name, pb_arrow_type.try_into()?, c.nullable)) }) .collect::<Result<Vec<_>, _>>()?; Ok(Schema::new(fields)) } } impl TryInto<datafusion::scalar::ScalarValue> for &protobuf::ScalarValue { type Error = PlanSerDeError; fn try_into(self) -> Result<datafusion::scalar::ScalarValue, Self::Error> { let value = self.value.as_ref().ok_or_else(|| { proto_error("Protobuf deserialization error: missing required field 'value'") })?; Ok(match value { protobuf::scalar_value::Value::BoolValue(v) => ScalarValue::Boolean(Some(*v)), protobuf::scalar_value::Value::Utf8Value(v) => { ScalarValue::Utf8(Some(v.to_owned())) } protobuf::scalar_value::Value::LargeUtf8Value(v) => { ScalarValue::LargeUtf8(Some(v.to_owned())) } protobuf::scalar_value::Value::Int8Value(v) => { ScalarValue::Int8(Some(*v as i8)) } protobuf::scalar_value::Value::Int16Value(v) => { ScalarValue::Int16(Some(*v as i16)) } protobuf::scalar_value::Value::Int32Value(v) => ScalarValue::Int32(Some(*v)), protobuf::scalar_value::Value::Int64Value(v) => ScalarValue::Int64(Some(*v)), protobuf::scalar_value::Value::Uint8Value(v) => { ScalarValue::UInt8(Some(*v as u8)) } protobuf::scalar_value::Value::Uint16Value(v) => { ScalarValue::UInt16(Some(*v as u16)) } protobuf::scalar_value::Value::Uint32Value(v) => { ScalarValue::UInt32(Some(*v)) } protobuf::scalar_value::Value::Uint64Value(v) => { ScalarValue::UInt64(Some(*v)) } protobuf::scalar_value::Value::Float32Value(v) => { ScalarValue::Float32(Some(*v)) } protobuf::scalar_value::Value::Float64Value(v) => { ScalarValue::Float64(Some(*v)) } protobuf::scalar_value::Value::Date32Value(v) => { ScalarValue::Date32(Some(*v)) } protobuf::scalar_value::Value::TimeMicrosecondValue(v) => { ScalarValue::TimestampMicrosecond(Some(*v), None) } protobuf::scalar_value::Value::TimeNanosecondValue(v) => { ScalarValue::TimestampNanosecond(Some(*v), None) } protobuf::scalar_value::Value::DecimalValue(v) => { let decimal = v.decimal.as_ref().unwrap(); ScalarValue::Decimal128( Some(v.long_value as i128), decimal.whole as usize, decimal.fractional as usize, ) } protobuf::scalar_value::Value::ListValue(scalar_list) => { let protobuf::ScalarListValue { values, datatype: opt_scalar_type, } = &scalar_list; let pb_scalar_type = opt_scalar_type .as_ref() .ok_or_else(|| proto_error("Protobuf deserialization err: ScalaListValue missing required field 'datatype'"))?; let typechecked_values: Vec<ScalarValue> = values .iter() .map(|val| val.try_into()) .collect::<Result<Vec<_>, _>>()?; let scalar_type: DataType = pb_scalar_type.try_into()?; let scalar_type = Box::new(scalar_type); ScalarValue::List(Some(Box::new(typechecked_values)), scalar_type) } protobuf::scalar_value::Value::NullListValue(v) => { let pb_datatype = v .datatype .as_ref() .ok_or_else(|| proto_error("Protobuf deserialization error: NullListValue message missing required field 'datatyp'"))?; let pb_datatype = Box::new(pb_datatype.try_into()?); ScalarValue::List(None, pb_datatype) } protobuf::scalar_value::Value::NullValue(v) => { let null_type_enum = protobuf::PrimitiveScalarType::from_i32(*v) .ok_or_else(|| proto_error("Protobuf deserialization error found invalid enum variant for DatafusionScalar"))?; null_type_enum.try_into()? } }) } } impl TryInto<DataType> for &protobuf::ScalarType { type Error = PlanSerDeError; fn try_into(self) -> Result<DataType, Self::Error> { let pb_scalartype = self.datatype.as_ref().ok_or_else(|| { proto_error("ScalarType message missing required field 'datatype'") })?; pb_scalartype.try_into() } } impl TryInto<DataType> for &protobuf::scalar_type::Datatype { type Error = PlanSerDeError; fn try_into(self) -> Result<DataType, Self::Error> { use protobuf::scalar_type::Datatype; Ok(match self { Datatype::Scalar(scalar_type) => { let pb_scalar_enum = protobuf::PrimitiveScalarType::from_i32(*scalar_type).ok_or_else(|| { proto_error(format!( "Protobuf deserialization error, scalar_type::Datatype missing was provided invalid enum variant: {}", *scalar_type )) })?; pb_scalar_enum.into() } Datatype::List(protobuf::ScalarListType { deepest_type, field_names, }) => { if field_names.is_empty() { return Err(proto_error( "Protobuf deserialization error: found no field names in ScalarListType message which requires at least one", )); } let pb_scalar_type = protobuf::PrimitiveScalarType::from_i32( *deepest_type, ) .ok_or_else(|| { proto_error(format!( "Protobuf deserialization error: invalid i32 for scalar enum: {}", *deepest_type )) })?; //Because length is checked above it is safe to unwrap .last() let mut scalar_type = DataType::List(Box::new(Field::new( field_names.last().unwrap().as_str(), pb_scalar_type.into(), true, ))); //Iterate over field names in reverse order except for the last item in the vector for name in field_names.iter().rev().skip(1) { let new_datatype = DataType::List(Box::new(Field::new( name.as_str(), scalar_type, true, ))); scalar_type = new_datatype; } scalar_type } }) } } impl TryInto<datafusion::scalar::ScalarValue> for &protobuf::scalar_value::Value { type Error = PlanSerDeError; fn try_into(self) -> Result<datafusion::scalar::ScalarValue, Self::Error> { use protobuf::PrimitiveScalarType; let scalar = match self { protobuf::scalar_value::Value::BoolValue(v) => ScalarValue::Boolean(Some(*v)), protobuf::scalar_value::Value::Utf8Value(v) => { ScalarValue::Utf8(Some(v.to_owned())) } protobuf::scalar_value::Value::LargeUtf8Value(v) => { ScalarValue::LargeUtf8(Some(v.to_owned())) } protobuf::scalar_value::Value::Int8Value(v) => { ScalarValue::Int8(Some(*v as i8)) } protobuf::scalar_value::Value::Int16Value(v) => { ScalarValue::Int16(Some(*v as i16)) } protobuf::scalar_value::Value::Int32Value(v) => ScalarValue::Int32(Some(*v)), protobuf::scalar_value::Value::Int64Value(v) => ScalarValue::Int64(Some(*v)), protobuf::scalar_value::Value::Uint8Value(v) => { ScalarValue::UInt8(Some(*v as u8)) } protobuf::scalar_value::Value::Uint16Value(v) => { ScalarValue::UInt16(Some(*v as u16)) } protobuf::scalar_value::Value::Uint32Value(v) => { ScalarValue::UInt32(Some(*v)) } protobuf::scalar_value::Value::Uint64Value(v) => { ScalarValue::UInt64(Some(*v)) } protobuf::scalar_value::Value::Float32Value(v) => { ScalarValue::Float32(Some(*v)) } protobuf::scalar_value::Value::Float64Value(v) => { ScalarValue::Float64(Some(*v)) } protobuf::scalar_value::Value::Date32Value(v) => { ScalarValue::Date32(Some(*v)) } protobuf::scalar_value::Value::TimeMicrosecondValue(v) => { ScalarValue::TimestampMicrosecond(Some(*v), None) } protobuf::scalar_value::Value::TimeNanosecondValue(v) => { ScalarValue::TimestampNanosecond(Some(*v), None) } protobuf::scalar_value::Value::ListValue(v) => v.try_into()?, protobuf::scalar_value::Value::NullListValue(v) => { ScalarValue::List(None, Box::new(v.try_into()?)) } protobuf::scalar_value::Value::NullValue(null_enum) => { PrimitiveScalarType::from_i32(*null_enum) .ok_or_else(|| proto_error("Invalid scalar type"))? .try_into()? } protobuf::scalar_value::Value::DecimalValue(v) => { let decimal = v.decimal.as_ref().unwrap(); ScalarValue::Decimal128( Some(v.long_value as i128), decimal.whole as usize, decimal.fractional as usize, ) } }; Ok(scalar) } } impl TryInto<datafusion::scalar::ScalarValue> for &protobuf::ScalarListValue { type Error = PlanSerDeError; fn try_into(self) -> Result<datafusion::scalar::ScalarValue, Self::Error> { use protobuf::scalar_type::Datatype; use protobuf::PrimitiveScalarType; let protobuf::ScalarListValue { datatype, values } = self; let pb_scalar_type = datatype .as_ref() .ok_or_else(|| proto_error("Protobuf deserialization error: ScalarListValue messsage missing required field 'datatype'"))?; let scalar_type = pb_scalar_type .datatype .as_ref() .ok_or_else(|| proto_error("Protobuf deserialization error: ScalarListValue.Datatype messsage missing required field 'datatype'"))?; let scalar_values = match scalar_type { Datatype::Scalar(scalar_type_i32) => { let leaf_scalar_type = protobuf::PrimitiveScalarType::from_i32(*scalar_type_i32) .ok_or_else(|| { proto_error("Error converting i32 to basic scalar type") })?; let typechecked_values: Vec<datafusion::scalar::ScalarValue> = values .iter() .map(|protobuf::ScalarValue { value: opt_value }| { let value = opt_value.as_ref().ok_or_else(|| { proto_error( "Protobuf deserialization error: missing required field 'value'", ) })?; typechecked_scalar_value_conversion(value, leaf_scalar_type) }) .collect::<Result<Vec<_>, _>>()?; datafusion::scalar::ScalarValue::List( Some(Box::new(typechecked_values)), Box::new(leaf_scalar_type.into()), ) } Datatype::List(list_type) => { let protobuf::ScalarListType { deepest_type, field_names, } = &list_type; let leaf_type = PrimitiveScalarType::from_i32(*deepest_type).ok_or_else(|| { proto_error("Error converting i32 to basic scalar type") })?; let depth = field_names.len(); let typechecked_values: Vec<datafusion::scalar::ScalarValue> = if depth == 0 { return Err(proto_error( "Protobuf deserialization error, ScalarListType had no field names, requires at least one", )); } else if depth == 1 { values .iter() .map(|protobuf::ScalarValue { value: opt_value }| { let value = opt_value .as_ref() .ok_or_else(|| proto_error("Protobuf deserialization error: missing required field 'value'"))?; typechecked_scalar_value_conversion(value, leaf_type) }) .collect::<Result<Vec<_>, _>>()? } else { values .iter() .map(|protobuf::ScalarValue { value: opt_value }| { let value = opt_value .as_ref() .ok_or_else(|| proto_error("Protobuf deserialization error: missing required field 'value'"))?; value.try_into() }) .collect::<Result<Vec<_>, _>>()? }; datafusion::scalar::ScalarValue::List( match typechecked_values.len() { 0 => None, _ => Some(Box::new(typechecked_values)), }, Box::new(list_type.try_into()?), ) } }; Ok(scalar_values) } } impl TryInto<DataType> for &protobuf::ScalarListType { type Error = PlanSerDeError; fn try_into(self) -> Result<DataType, Self::Error> { use protobuf::PrimitiveScalarType; let protobuf::ScalarListType { deepest_type, field_names, } = self; let depth = field_names.len(); if depth == 0 { return Err(proto_error( "Protobuf deserialization error: Found a ScalarListType message with no field names, at least one is required", )); } let mut curr_type = DataType::List(Box::new(Field::new( //Since checked vector is not empty above this is safe to unwrap field_names.last().unwrap(), PrimitiveScalarType::from_i32(*deepest_type) .ok_or_else(|| { proto_error("Could not convert to datafusion scalar type") })? .into(), true, ))); //Iterates over field names in reverse order except for the last item in the vector for name in field_names.iter().rev().skip(1) { let temp_curr_type = DataType::List(Box::new(Field::new(name, curr_type, true))); curr_type = temp_curr_type; } Ok(curr_type) } } //Does not typecheck lists fn typechecked_scalar_value_conversion( tested_type: &protobuf::scalar_value::Value, required_type: protobuf::PrimitiveScalarType, ) -> Result<datafusion::scalar::ScalarValue, PlanSerDeError> { use protobuf::scalar_value::Value; use protobuf::PrimitiveScalarType; Ok(match (tested_type, &required_type) { (Value::BoolValue(v), PrimitiveScalarType::Bool) => { ScalarValue::Boolean(Some(*v)) } (Value::Int8Value(v), PrimitiveScalarType::Int8) => { ScalarValue::Int8(Some(*v as i8)) } (Value::Int16Value(v), PrimitiveScalarType::Int16) => { ScalarValue::Int16(Some(*v as i16)) } (Value::Int32Value(v), PrimitiveScalarType::Int32) => { ScalarValue::Int32(Some(*v)) } (Value::Int64Value(v), PrimitiveScalarType::Int64) => { ScalarValue::Int64(Some(*v)) } (Value::Uint8Value(v), PrimitiveScalarType::Uint8) => { ScalarValue::UInt8(Some(*v as u8)) } (Value::Uint16Value(v), PrimitiveScalarType::Uint16) => { ScalarValue::UInt16(Some(*v as u16)) } (Value::Uint32Value(v), PrimitiveScalarType::Uint32) => { ScalarValue::UInt32(Some(*v)) } (Value::Uint64Value(v), PrimitiveScalarType::Uint64) => { ScalarValue::UInt64(Some(*v)) } (Value::Float32Value(v), PrimitiveScalarType::Float32) => { ScalarValue::Float32(Some(*v)) } (Value::Float64Value(v), PrimitiveScalarType::Float64) => { ScalarValue::Float64(Some(*v)) } (Value::Date32Value(v), PrimitiveScalarType::Date32) => { ScalarValue::Date32(Some(*v)) } (Value::TimeMicrosecondValue(v), PrimitiveScalarType::TimeMicrosecond) => { ScalarValue::TimestampMicrosecond(Some(*v), None) } (Value::TimeNanosecondValue(v), PrimitiveScalarType::TimeMicrosecond) => { ScalarValue::TimestampNanosecond(Some(*v), None) } (Value::Utf8Value(v), PrimitiveScalarType::Utf8) => { ScalarValue::Utf8(Some(v.to_owned())) } (Value::LargeUtf8Value(v), PrimitiveScalarType::LargeUtf8) => { ScalarValue::LargeUtf8(Some(v.to_owned())) } (Value::NullValue(i32_enum), required_scalar_type) => { if *i32_enum == *required_scalar_type as i32 { let pb_scalar_type = PrimitiveScalarType::from_i32(*i32_enum).ok_or_else(|| { PlanSerDeError::General(format!( "Invalid i32_enum={} when converting with PrimitiveScalarType::from_i32()", *i32_enum )) })?; let scalar_value: ScalarValue = match pb_scalar_type { PrimitiveScalarType::Bool => ScalarValue::Boolean(None), PrimitiveScalarType::Uint8 => ScalarValue::UInt8(None), PrimitiveScalarType::Int8 => ScalarValue::Int8(None), PrimitiveScalarType::Uint16 => ScalarValue::UInt16(None), PrimitiveScalarType::Int16 => ScalarValue::Int16(None), PrimitiveScalarType::Uint32 => ScalarValue::UInt32(None), PrimitiveScalarType::Int32 => ScalarValue::Int32(None), PrimitiveScalarType::Uint64 => ScalarValue::UInt64(None), PrimitiveScalarType::Int64 => ScalarValue::Int64(None), PrimitiveScalarType::Float32 => ScalarValue::Float32(None), PrimitiveScalarType::Float64 => ScalarValue::Float64(None), PrimitiveScalarType::Utf8 => ScalarValue::Utf8(None), PrimitiveScalarType::LargeUtf8 => ScalarValue::LargeUtf8(None), PrimitiveScalarType::Date32 => ScalarValue::Date32(None), PrimitiveScalarType::TimeMicrosecond => { ScalarValue::TimestampMicrosecond(None, None) } PrimitiveScalarType::TimeNanosecond => { ScalarValue::TimestampNanosecond(None, None) } PrimitiveScalarType::Null => { return Err(proto_error( "Untyped scalar null is not a valid scalar value", )) } }; scalar_value } else { return Err(proto_error("Could not convert to the proper type")); } } _ => return Err(proto_error("Could not convert to the proper type")), }) } impl TryInto<datafusion::scalar::ScalarValue> for protobuf::PrimitiveScalarType { type Error = PlanSerDeError; fn try_into(self) -> Result<datafusion::scalar::ScalarValue, Self::Error> { Ok(match self { protobuf::PrimitiveScalarType::Null => { return Err(proto_error("Untyped null is an invalid scalar value")) } protobuf::PrimitiveScalarType::Bool => ScalarValue::Boolean(None), protobuf::PrimitiveScalarType::Uint8 => ScalarValue::UInt8(None), protobuf::PrimitiveScalarType::Int8 => ScalarValue::Int8(None), protobuf::PrimitiveScalarType::Uint16 => ScalarValue::UInt16(None), protobuf::PrimitiveScalarType::Int16 => ScalarValue::Int16(None), protobuf::PrimitiveScalarType::Uint32 => ScalarValue::UInt32(None), protobuf::PrimitiveScalarType::Int32 => ScalarValue::Int32(None), protobuf::PrimitiveScalarType::Uint64 => ScalarValue::UInt64(None), protobuf::PrimitiveScalarType::Int64 => ScalarValue::Int64(None), protobuf::PrimitiveScalarType::Float32 => ScalarValue::Float32(None), protobuf::PrimitiveScalarType::Float64 => ScalarValue::Float64(None), protobuf::PrimitiveScalarType::Utf8 => ScalarValue::Utf8(None), protobuf::PrimitiveScalarType::LargeUtf8 => ScalarValue::LargeUtf8(None), protobuf::PrimitiveScalarType::Date32 => ScalarValue::Date32(None), protobuf::PrimitiveScalarType::TimeMicrosecond => { ScalarValue::TimestampMicrosecond(None, None) } protobuf::PrimitiveScalarType::TimeNanosecond => { ScalarValue::TimestampNanosecond(None, None) } }) } } fn str_to_byte(s: &str) -> Result<u8, PlanSerDeError> { if s.len() != 1 { return Err(PlanSerDeError::General("Invalid CSV delimiter".to_owned())); } Ok(s.as_bytes()[0]) }
46.12249
155
0.544096
89e4305551399a394eaafa6d854690a8d3f377c4
1,707
use crate::Renderer; pub(crate) struct DepthBufferTexture { pub depth_format: wgpu::TextureFormat, pub view: wgpu::TextureView, pub sampler: wgpu::Sampler, } impl DepthBufferTexture { pub(crate) fn new( device: &wgpu::Device, sc_desc: &wgpu::SwapChainDescriptor, name: &str, ) -> Self { let depth_format = wgpu::TextureFormat::Depth32Float; let size = wgpu::Extent3d { width: sc_desc.width, height: sc_desc.height, depth: 1, }; let depth_texture = device.create_texture(&wgpu::TextureDescriptor { label: Some(name), size, mip_level_count: 1, sample_count: 1, dimension: wgpu::TextureDimension::D2, format: depth_format, usage: wgpu::TextureUsage::RENDER_ATTACHMENT | wgpu::TextureUsage::SAMPLED, }); let view = depth_texture.create_view(&wgpu::TextureViewDescriptor::default()); let sampler = device.create_sampler(&wgpu::SamplerDescriptor { label: Some(name), address_mode_u: wgpu::AddressMode::ClampToEdge, address_mode_v: wgpu::AddressMode::ClampToEdge, address_mode_w: wgpu::AddressMode::ClampToEdge, mag_filter: wgpu::FilterMode::Linear, min_filter: wgpu::FilterMode::Linear, mipmap_filter: wgpu::FilterMode::Nearest, lod_min_clamp: -100.0, lod_max_clamp: 100.0, compare: Some(wgpu::CompareFunction::LessEqual), ..Default::default() }); Self { depth_format, view, sampler, } } }
30.482143
87
0.578793
ef3df64aef1315b18f2ad6d2723ffe3bdfa6425f
16,153
// Copyright 2018 (c) rust-themis developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Secure Comparator protocol. //! //! **Secure Comparator** is an interactive protocol for two parties that compares whether they //! share the same secret or not. It is built around a [Zero Knowledge Proof][ZKP]-based protocol //! ([Socialist Millionaire’s Protocol][SMP]), with a number of [security enhancements][paper]. //! //! Secure Comparator is transport-agnostic and only requires the user(s) to pass messages //! in a certain sequence. The protocol itself is ingrained into the functions and requires //! minimal integration efforts from the developer. //! //! [ZKP]: https://www.cossacklabs.com/zero-knowledge-protocols-without-magic.html //! [SMP]: https://en.wikipedia.org/wiki/Socialist_millionaires //! [paper]: https://www.cossacklabs.com/files/secure-comparator-paper-rev12.pdf //! //! # Examples //! //! Secure Comparator has two parties — called the client and the server — the only difference //! between them is that the client is the one who initiates the comparison. //! //! Before initiating the protocol both parties should [append their secrets] to be compared. //! This can be done incrementally so even multi-gigabyte data sets can be compared with ease. //! //! [append their secrets]: struct.SecureComparator.html#method.append_secret //! //! ``` //! # fn main() -> Result<(), themis::Error> { //! use themis::secure_comparator::SecureComparator; //! //! let mut comparison = SecureComparator::new(); //! //! comparison.append_secret(b"999-04-1234")?; //! # Ok(()) //! # } //! ``` //! //! After that the client [initiates the comparison] and runs a loop like this: //! //! [initiates the comparison]: struct.SecureComparator.html#method.begin_compare //! //! ``` //! # fn main() -> Result<(), themis::Error> { //! # use std::cell::RefCell; //! # //! # use themis::secure_comparator::SecureComparator; //! # //! # let mut comparison = SecureComparator::new(); //! # let mut other_peer = SecureComparator::new(); //! # //! # comparison.append_secret(b"999-04-1234").expect("append client"); //! # other_peer.append_secret(b"999-04-1234").expect("append server"); //! # //! # let peer_data = RefCell::new(None); //! # let mut send = |data: &[u8]| { //! # let reply = other_peer.proceed_compare(data).expect("server comparison"); //! # peer_data.replace(Some(reply)); //! # }; //! # let mut receive = || { //! # peer_data.borrow_mut().take().expect("reply data") //! # }; //! # //! let mut request = comparison.begin_compare()?; //! //! while !comparison.is_complete() { //! send(&request); // This function should send the `request` to the server. //! let reply = receive(); // This function should receive a `reply` from the server. //! //! request = comparison.proceed_compare(&reply)?; //! } //! //! if !comparison.result()? { //! unimplemented!("handle failed comparison here"); //! } //! # Ok(()) //! # } //! ``` //! //! While the server does almost the same: //! //! ``` //! # fn main() -> Result<(), themis::Error> { //! # use std::cell::RefCell; //! # //! # use themis::secure_comparator::SecureComparator; //! # //! # let mut comparison = SecureComparator::new(); //! # let mut other_peer = SecureComparator::new(); //! # //! # comparison.append_secret(b"999-04-1234").expect("append server"); //! # other_peer.append_secret(b"999-04-1234").expect("append client"); //! # let request = other_peer.begin_compare().expect("begin client"); //! # //! # let peer_data = RefCell::new(Some(request)); //! # let mut send = |data: &[u8]| { //! # let reply = other_peer.proceed_compare(data).expect("server comparison"); //! # peer_data.replace(Some(reply)); //! # }; //! # let mut receive = || { //! # peer_data.borrow_mut().take().expect("reply data") //! # }; //! # //! while !comparison.is_complete() { //! // This function should receive a `request` from the client. //! let request = receive(); //! //! let reply = comparison.proceed_compare(&request)?; //! //! send(&reply); // This function should send the `reply` to the client. //! } //! //! if !comparison.result()? { //! unimplemented!("handle failed comparison here"); //! } //! # Ok(()) //! # } //! ``` //! //! Both the server and the client use [`result`] to get the comparison result //! after it [`is_complete`]: //! //! [`result`]: struct.SecureComparator.html#method.result //! [`is_complete`]: struct.SecureComparator.html#method.is_complete use std::os::raw::c_void; use std::ptr; use bindings::{ secure_comparator_append_secret, secure_comparator_begin_compare, secure_comparator_create, secure_comparator_destroy, secure_comparator_get_result, secure_comparator_proceed_compare, secure_comparator_t, }; use crate::error::{Error, ErrorKind, Result}; use crate::utils::into_raw_parts; /// Secure Comparison context. /// /// Please see [module-level documentation][secure_comparator] for examples. /// /// [secure_comparator]: index.html #[derive(Debug)] pub struct SecureComparator { comp_ctx: *mut secure_comparator_t, } // It safe to move secure_comparator_t to another thread, it does not depend on any thread-local // state. However, it needs external synchronization for safe concurrent usage (hence no Sync). unsafe impl Send for SecureComparator {} impl SecureComparator { /// Prepares a new comparison. /// /// # Panics /// /// May panic on internal unrecoverable errors (e.g., out-of-memory). pub fn new() -> Self { match SecureComparator::try_new() { Ok(comparator) => comparator, Err(e) => panic!("secure_comparator_create() failed: {}", e), } } /// Prepares for a new comparison. fn try_new() -> Result<Self> { let comp_ctx = unsafe { secure_comparator_create() }; if comp_ctx.is_null() { // This function is most likely to fail on memory allocation. Some internal errors // in the crypto library are also possible, but unlikely. We have no way to find out. return Err(Error::with_kind(ErrorKind::NoMemory)); } Ok(Self { comp_ctx }) } /// Collects the data to be compared. /// /// Note that there is no way to remove data. If even a single byte is mismatched by the peers /// then the comparison will always return `false`. In this case you will need to recreate /// a `SecureComparator` to make a new comparison. /// /// You can use this method only before the comparison has been started. That is, /// [`append_secret`] is safe to call only before [`begin_compare`] or [`proceed_compare`]. /// It will fail with an error if you try to append more data when you’re in the middle of /// a comparison or after it has been completed. /// /// [`append_secret`]: struct.SecureComparator.html#method.append_secret /// [`begin_compare`]: struct.SecureComparator.html#method.begin_compare /// [`proceed_compare`]: struct.SecureComparator.html#method.proceed_compare /// /// # Examples /// /// You can pass in anything convertible into a byte slice: a byte slice or an array, /// a `Vec<u8>`, or a `String`. /// /// ``` /// # fn main() -> Result<(), themis::Error> { /// use themis::secure_comparator::SecureComparator; /// /// let mut comparison = SecureComparator::new(); /// /// comparison.append_secret(b"byte string")?; /// comparison.append_secret(&[1, 2, 3, 4, 5])?; /// comparison.append_secret(vec![6, 7, 8, 9])?; /// comparison.append_secret(format!("owned string"))?; /// # Ok(()) /// # } /// ``` pub fn append_secret(&mut self, secret: impl AsRef<[u8]>) -> Result<()> { let (secret_ptr, secret_len) = into_raw_parts(secret.as_ref()); unsafe { let status = secure_comparator_append_secret( self.comp_ctx, secret_ptr as *const c_void, secret_len, ); let error = Error::from_compare_status(status); if error.kind() != ErrorKind::Success { return Err(error); } } Ok(()) } /// Starts comparison on the client returning the first message. /// /// This method should be called by the client which initiates the comparison. Make sure you /// have appended all the data you need before you call this method. /// /// The resulting message should be transferred to the remote peer and passed to the /// [`proceed_compare`] of its `SecureComparator`. The remote peer should have also appended /// all the data by this point. /// /// [`proceed_compare`]: struct.SecureComparator.html#method.proceed_compare /// /// # Examples /// /// Please see [module-level documentation][secure_comparator] for examples. /// /// [secure_comparator]: index.html pub fn begin_compare(&mut self) -> Result<Vec<u8>> { let mut compare_data = Vec::new(); let mut compare_data_len = 0; unsafe { let status = secure_comparator_begin_compare( self.comp_ctx, ptr::null_mut(), &mut compare_data_len, ); let error = Error::from_compare_status(status); if error.kind() != ErrorKind::BufferTooSmall { return Err(error); } } compare_data.reserve(compare_data_len); unsafe { let status = secure_comparator_begin_compare( self.comp_ctx, compare_data.as_mut_ptr() as *mut c_void, &mut compare_data_len, ); let error = Error::from_compare_status(status); if error.kind() != ErrorKind::CompareSendOutputToPeer { return Err(error); } debug_assert!(compare_data_len <= compare_data.capacity()); compare_data.set_len(compare_data_len); } Ok(compare_data) } /// Continues comparison process with given message. /// /// This method should be called by the responding server with a message received from the /// client. It returns another message which should be passed back to the client and put /// into its [`proceed_compare`] method (that is, this method again). The client then should /// do the same. The process repeats at both sides until [`is_complete`] signals that the /// comparison is complete. /// /// Both peers should have appended all the compared data before using this method, and no /// additional data may be appended while the comparison is underway. /// /// [`proceed_compare`]: struct.SecureComparator.html#method.proceed_compare /// [`is_complete`]: struct.SecureComparator.html#method.is_complete /// /// # Examples /// /// Please see [module-level documentation][secure_comparator] for examples. /// /// [secure_comparator]: index.html pub fn proceed_compare(&mut self, peer_data: impl AsRef<[u8]>) -> Result<Vec<u8>> { let (peer_compare_data_ptr, peer_compare_data_len) = into_raw_parts(peer_data.as_ref()); let mut compare_data = Vec::new(); let mut compare_data_len = 0; unsafe { let status = secure_comparator_proceed_compare( self.comp_ctx, peer_compare_data_ptr as *const c_void, peer_compare_data_len, ptr::null_mut(), &mut compare_data_len, ); let error = Error::from_compare_status(status); if error.kind() != ErrorKind::BufferTooSmall { return Err(error); } } compare_data.reserve(compare_data_len); unsafe { let status = secure_comparator_proceed_compare( self.comp_ctx, peer_compare_data_ptr as *const c_void, peer_compare_data_len, compare_data.as_mut_ptr() as *mut c_void, &mut compare_data_len, ); let error = Error::from_compare_status(status); match error.kind() { ErrorKind::CompareSendOutputToPeer => {} ErrorKind::Success => {} _ => { return Err(error); } } debug_assert!(compare_data_len <= compare_data.capacity()); compare_data.set_len(compare_data_len); } Ok(compare_data) } /// Checks if this comparison is complete. /// /// Comparison that failed irrecoverably due to an error is also considered complete. /// /// # Examples /// /// Typically you would use this method to terminate the comparison loop. Please see /// [module-level documentation][secure_comparator] for examples. /// /// [secure_comparator]: index.html /// /// It is safe to call this method at any point, even if the comparison has not been initiated /// yet (in which case it is obviously not complete): /// /// ``` /// use themis::secure_comparator::SecureComparator; /// /// let mut comparison = SecureComparator::new(); /// /// assert!(!comparison.is_complete()); /// ``` pub fn is_complete(&self) -> bool { match self.result() { Err(ref e) if e.kind() == ErrorKind::CompareNotReady => false, _ => true, } } /// Returns the result of comparison. /// /// Let it be a surprise: `true` if data has been found equal on both peers, `false` otherwise. /// Or an error if you call this method too early, or if a real error has happened during the /// comparison. /// /// # Examples /// /// You should call this method only after the comparison is complete. /// /// ``` /// # fn main() -> Result<(), themis::Error> { /// use themis::secure_comparator::SecureComparator; /// # /// # use std::cell::RefCell; /// /// let mut comparison = SecureComparator::new(); /// # let mut other_peer = SecureComparator::new(); /// /// comparison.append_secret(b"999-04-1234")?; /// # other_peer.append_secret(b"999-04-1234")?; /// /// assert!(comparison.result().is_err()); /// /// // Perform comparison /// # /// # let mut request = comparison.begin_compare()?; /// # /// while !comparison.is_complete() { /// // ... /// # let reply = other_peer.proceed_compare(&request)?; /// # request = comparison.proceed_compare(&reply)?; /// } /// /// assert!(comparison.result().is_ok()); /// # Ok(()) /// # } /// ``` pub fn result(&self) -> Result<bool> { let status = unsafe { secure_comparator_get_result(self.comp_ctx) }; let error = Error::from_match_status(status); match error.kind() { ErrorKind::CompareMatch => Ok(true), ErrorKind::CompareNoMatch => Ok(false), _ => Err(error), } } } impl Default for SecureComparator { fn default() -> Self { SecureComparator::new() } } #[doc(hidden)] impl Drop for SecureComparator { fn drop(&mut self) { unsafe { let status = secure_comparator_destroy(self.comp_ctx); let error = Error::from_themis_status(status); if (cfg!(debug) || cfg!(test)) && error.kind() != ErrorKind::Success { panic!("secure_comparator_destroy() failed: {}", error); } } } }
35.895556
99
0.614251
d9084da10a71860ecdacf4fa0c8dc2572d077e42
1,265
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. /// <p>GetEntitlements retrieves entitlement values for a given product. The results can be /// filtered based on customer identifier or product dimensions.</p> #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetEntitlements { _private: (), } impl GetEntitlements { /// Creates a new builder-style object to manufacture [`GetEntitlementsInput`](crate::input::GetEntitlementsInput) pub fn builder() -> crate::input::get_entitlements_input::Builder { crate::input::get_entitlements_input::Builder::default() } pub fn new() -> Self { Self { _private: () } } } impl smithy_http::response::ParseStrictResponse for GetEntitlements { type Output = std::result::Result< crate::output::GetEntitlementsOutput, crate::error::GetEntitlementsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_entitlements_error(response) } else { crate::operation_deser::parse_get_entitlements_response(response) } } }
42.166667
118
0.684585
3ac250c1d3b7e9c392f3d6e359999824e4723fc5
19,347
use std::borrow::Cow; use std::cmp::Ordering; use std::collections::HashMap; use prelude::*; use nom_sql::OrderType; #[derive(Clone, Serialize, Deserialize)] struct Order(Vec<(usize, OrderType)>); impl Order { fn cmp(&self, a: &[DataType], b: &[DataType]) -> Ordering { for &(c, ref order_type) in &self.0 { let result = match *order_type { OrderType::OrderAscending => a[c].cmp(&b[c]), OrderType::OrderDescending => b[c].cmp(&a[c]), }; if result != Ordering::Equal { return result; } } Ordering::Equal } } impl From<Vec<(usize, OrderType)>> for Order { fn from(other: Vec<(usize, OrderType)>) -> Self { Order(other) } } /// TopK provides an operator that will produce the top k elements for each group. /// /// Positives are generally fast to process, while negative records can trigger expensive backwards /// queries. It is also worth noting that due the nature of Soup, the results of this operator are /// unordered. #[derive(Clone, Serialize, Deserialize)] pub struct TopK { src: IndexPair, // some cache state us: Option<IndexPair>, cols: usize, // precomputed datastructures group_by: Vec<usize>, order: Order, k: usize, } impl TopK { /// Construct a new TopK operator. /// /// `src` is this operator's ancestor, `over` is the column to compute the top K over, /// `group_by` indicates the columns that this operator is keyed on, and k is the maximum number /// of results per group. pub fn new( src: NodeIndex, order: Vec<(usize, OrderType)>, group_by: Vec<usize>, k: usize, ) -> Self { let mut group_by = group_by; group_by.sort(); TopK { src: src.into(), us: None, cols: 0, group_by, order: order.into(), k, } } } impl Ingredient for TopK { fn take(&mut self) -> NodeOperator { // Necessary because cmp_rows can't be cloned. Self { src: self.src, us: self.us, cols: self.cols, group_by: self.group_by.clone(), order: self.order.clone(), k: self.k, } .into() } fn ancestors(&self) -> Vec<NodeIndex> { vec![self.src.as_global()] } fn on_connected(&mut self, g: &Graph) { let srcn = &g[self.src.as_global()]; self.cols = srcn.fields().len(); } fn on_commit(&mut self, us: NodeIndex, remap: &HashMap<NodeIndex, IndexPair>) { // who's our parent really? self.src.remap(remap); // who are we? self.us = Some(remap[&us]); } #[allow(clippy::cognitive_complexity)] fn on_input( &mut self, _: &mut dyn Executor, from: LocalNodeIndex, rs: Records, _: &mut Tracer, replay_key_cols: Option<&[usize]>, _: &DomainNodes, state: &StateMap, ) -> ProcessingResult { debug_assert_eq!(from, *self.src); if rs.is_empty() { return ProcessingResult { results: rs, ..Default::default() }; } let group_by = &self.group_by; let group_cmp = |a: &Record, b: &Record| { group_by .iter() .map(|&col| &a[col]) .cmp(group_by.iter().map(|&col| &b[col])) }; // First, we want to be smart about multiple added/removed rows with same group. // For example, if we get a -, then a +, for the same group, we don't want to // execute two queries. We'll do this by sorting the batch by our group by. let mut rs: Vec<_> = rs.into(); rs.sort_by(&group_cmp); let us = self.us.unwrap(); let db = state .get(*us) .expect("topk operators must have their own state materialized"); let mut out = Vec::new(); let mut grp = Vec::new(); let mut grpk = 0; let mut missed = false; // current holds (Cow<Row>, bool) where bool = is_new let mut current: Vec<(Cow<[DataType]>, bool)> = Vec::new(); let mut misses = Vec::new(); let mut lookups = Vec::new(); macro_rules! post_group { ($out:ident, $current:ident, $grpk:expr, $k:expr, $order:expr) => {{ $current.sort_unstable_by(|a, b| $order.cmp(&*a.0, &*b.0)); let start = $current.len().saturating_sub($k); if $grpk == $k { if $current.len() < $grpk { // there used to be k things in the group // now there are fewer than k // we don't know if querying would bring us back to k unimplemented!(); } // FIXME: if all the elements with the smallest value in the new topk are new, // then it *could* be that there exists some value that is greater than all // those values, and <= the smallest old value. we would only discover that by // querying. unfortunately, the check below isn't *quite* right because it does // not consider old rows that were removed in this batch (which should still be // counted for this condition). if false { let all_new_bottom = $current[start..] .iter() .take_while(|(ref r, _)| { $order.cmp(r, &$current[start].0) == Ordering::Equal }) .all(|&(_, is_new)| is_new); if all_new_bottom { eprintln!("topk is guesstimating bottom row"); } } } // optimization: if we don't *have to* remove something, we don't for i in start..$current.len() { if $current[i].1 { // we found an `is_new` in current // can we replace it with a !is_new with the same order value? let replace = $current[0..start].iter().position(|&(ref r, is_new)| { !is_new && $order.cmp(r, &$current[i].0) == Ordering::Equal }); if let Some(ri) = replace { $current.swap(i, ri); } } } for (r, is_new) in $current.drain(start..) { if is_new { $out.push(Record::Positive(r.into_owned())); } } if !$current.is_empty() { $out.extend($current.drain(..).filter_map(|(r, is_new)| { if !is_new { Some(Record::Negative(r.into_owned())) } else { None } })); } }}; }; for r in rs { if grp.iter().cmp(group_by.iter().map(|&col| &r[col])) != Ordering::Equal { // new group! // first, tidy up the old one if !grp.is_empty() { post_group!(out, current, grpk, self.k, self.order); } // make ready for the new one grp.clear(); grp.extend(group_by.iter().map(|&col| &r[col]).cloned()); // check out current state match db.lookup(&group_by[..], &KeyType::from(&grp[..])) { LookupResult::Some(rs) => { if replay_key_cols.is_some() { lookups.push(Lookup { on: *us, cols: group_by.clone(), key: grp.clone(), }); } missed = false; grpk = rs.len(); current.extend(rs.into_iter().map(|r| (r, false))) } LookupResult::Missing => { missed = true; } } } if missed { misses.push(Miss { on: *us, lookup_idx: group_by.clone(), lookup_cols: group_by.clone(), replay_cols: replay_key_cols.map(Vec::from), record: r.extract().0, }); } else { match r { Record::Positive(r) => current.push((Cow::Owned(r), true)), Record::Negative(r) => { if let Some(p) = current.iter().position(|&(ref x, _)| *r == **x) { let (_, was_new) = current.swap_remove(p); if !was_new { out.push(Record::Negative(r)); } } } } } } if !grp.is_empty() { post_group!(out, current, grpk, self.k, self.order); } ProcessingResult { results: out.into(), lookups, misses, } } fn on_eviction( &mut self, _: LocalNodeIndex, key_columns: &[usize], _: &mut Vec<Vec<DataType>>, ) { assert_eq!(key_columns, &self.group_by[..]); } fn suggest_indexes(&self, this: NodeIndex) -> HashMap<NodeIndex, Vec<usize>> { vec![(this, self.group_by.clone())].into_iter().collect() } fn resolve(&self, col: usize) -> Option<Vec<(NodeIndex, usize)>> { Some(vec![(self.src.as_global(), col)]) } fn description(&self, detailed: bool) -> String { if !detailed { return String::from("TopK"); } let group_cols = self .group_by .iter() .map(ToString::to_string) .collect::<Vec<_>>() .join(", "); format!("TopK γ[{}]", group_cols) } fn parent_columns(&self, col: usize) -> Vec<(NodeIndex, Option<usize>)> { vec![(self.src.as_global(), Some(col))] } } #[cfg(test)] mod tests { use super::*; use ops; fn setup(reversed: bool) -> (ops::test::MockGraph, IndexPair) { let cmp_rows = if reversed { vec![(2, OrderType::OrderDescending)] } else { vec![(2, OrderType::OrderAscending)] }; let mut g = ops::test::MockGraph::new(); let s = g.add_base("source", &["x", "y", "z"]); g.set_op( "topk", &["x", "y", "z"], TopK::new(s.as_global(), cmp_rows, vec![1], 3), true, ); (g, s) } #[test] fn it_keeps_topk() { let (mut g, _) = setup(false); let ni = g.node().local_addr(); let r12: Vec<DataType> = vec![1.into(), "z".into(), 12.into()]; let r10: Vec<DataType> = vec![2.into(), "z".into(), 10.into()]; let r11: Vec<DataType> = vec![3.into(), "z".into(), 11.into()]; let r5: Vec<DataType> = vec![4.into(), "z".into(), 5.into()]; let r15: Vec<DataType> = vec![5.into(), "z".into(), 15.into()]; let r10b: Vec<DataType> = vec![6.into(), "z".into(), 10.into()]; let r10c: Vec<DataType> = vec![7.into(), "z".into(), 10.into()]; g.narrow_one_row(r12.clone(), true); g.narrow_one_row(r11.clone(), true); g.narrow_one_row(r5.clone(), true); g.narrow_one_row(r10b.clone(), true); g.narrow_one_row(r10c.clone(), true); assert_eq!(g.states[ni].rows(), 3); g.narrow_one_row(r15.clone(), true); g.narrow_one_row(r10.clone(), true); assert_eq!(g.states[ni].rows(), 3); } #[test] fn it_forwards() { let (mut g, _) = setup(false); let r12: Vec<DataType> = vec![1.into(), "z".into(), 12.into()]; let r10: Vec<DataType> = vec![2.into(), "z".into(), 10.into()]; let r11: Vec<DataType> = vec![3.into(), "z".into(), 11.into()]; let r5: Vec<DataType> = vec![4.into(), "z".into(), 5.into()]; let r15: Vec<DataType> = vec![5.into(), "z".into(), 15.into()]; let a = g.narrow_one_row(r12.clone(), true); assert_eq!(a, vec![r12.clone()].into()); let a = g.narrow_one_row(r10.clone(), true); assert_eq!(a, vec![r10.clone()].into()); let a = g.narrow_one_row(r11.clone(), true); assert_eq!(a, vec![r11.clone()].into()); let a = g.narrow_one_row(r5.clone(), true); assert_eq!(a.len(), 0); let a = g.narrow_one_row(r15.clone(), true); assert_eq!(a.len(), 2); assert!(a.iter().any(|r| r == &(r10.clone(), false).into())); assert!(a.iter().any(|r| r == &(r15.clone(), true).into())); } #[test] #[ignore] fn it_must_query() { let (mut g, s) = setup(false); let r12: Vec<DataType> = vec![1.into(), "z".into(), 12.into()]; let r10: Vec<DataType> = vec![2.into(), "z".into(), 10.into()]; let r11: Vec<DataType> = vec![3.into(), "z".into(), 11.into()]; let r5: Vec<DataType> = vec![4.into(), "z".into(), 5.into()]; let r15: Vec<DataType> = vec![5.into(), "z".into(), 15.into()]; let r10b: Vec<DataType> = vec![6.into(), "z".into(), 10.into()]; let r10c: Vec<DataType> = vec![7.into(), "z".into(), 10.into()]; // fill topk g.narrow_one_row(r12.clone(), true); g.narrow_one_row(r10.clone(), true); g.narrow_one_row(r11.clone(), true); g.narrow_one_row(r5.clone(), true); g.narrow_one_row(r15.clone(), true); // put stuff to query for in the bases g.seed(s, r12.clone()); g.seed(s, r10.clone()); g.seed(s, r11.clone()); g.seed(s, r5.clone()); // check that removing 15 brings back 10 let a = g.narrow_one_row((r15.clone(), false), true); assert_eq!(a.len(), 2); assert!(a.iter().any(|r| r == &(r15.clone(), false).into())); assert!(a.iter().any(|r| r == &(r10.clone(), true).into())); g.unseed(s); let a = g.narrow_one_row(r10b.clone(), true); assert_eq!(a.len(), 0); let a = g.narrow_one_row(r10c.clone(), true); assert_eq!(a.len(), 0); g.seed(s, r12.clone()); g.seed(s, r11.clone()); g.seed(s, r5.clone()); g.seed(s, r10b.clone()); g.seed(s, r10c.clone()); let a = g.narrow_one_row((r10.clone(), false), true); assert_eq!(a.len(), 2); assert_eq!(a[0], (r10.clone(), false).into()); assert!(a[1] == (r10b.clone(), true).into() || a[1] == (r10c.clone(), true).into()); } #[test] fn it_forwards_reversed() { let (mut g, _) = setup(true); let r12: Vec<DataType> = vec![1.into(), "z".into(), (-12.123).into()]; let r10: Vec<DataType> = vec![2.into(), "z".into(), (0.0431).into()]; let r11: Vec<DataType> = vec![3.into(), "z".into(), (-0.082).into()]; let r5: Vec<DataType> = vec![4.into(), "z".into(), (5.601).into()]; let r15: Vec<DataType> = vec![5.into(), "z".into(), (-15.9).into()]; let a = g.narrow_one_row(r12.clone(), true); assert_eq!(a, vec![r12.clone()].into()); let a = g.narrow_one_row(r10.clone(), true); assert_eq!(a, vec![r10.clone()].into()); let a = g.narrow_one_row(r11.clone(), true); assert_eq!(a, vec![r11.clone()].into()); let a = g.narrow_one_row(r5.clone(), true); assert_eq!(a.len(), 0); let a = g.narrow_one_row(r15.clone(), true); assert_eq!(a.len(), 2); assert!(a.iter().any(|r| r == &(r10.clone(), false).into())); assert!(a.iter().any(|r| r == &(r15.clone(), true).into())); } #[test] fn it_suggests_indices() { let (g, _) = setup(false); let me = 2.into(); let idx = g.node().suggest_indexes(me); assert_eq!(idx.len(), 1); assert_eq!(*idx.iter().next().unwrap().1, vec![1]); } #[test] fn it_resolves() { let (g, _) = setup(false); assert_eq!( g.node().resolve(0), Some(vec![(g.narrow_base_id().as_global(), 0)]) ); assert_eq!( g.node().resolve(1), Some(vec![(g.narrow_base_id().as_global(), 1)]) ); assert_eq!( g.node().resolve(2), Some(vec![(g.narrow_base_id().as_global(), 2)]) ); } #[test] fn it_parent_columns() { let (g, _) = setup(false); assert_eq!( g.node().resolve(0), Some(vec![(g.narrow_base_id().as_global(), 0)]) ); assert_eq!( g.node().resolve(1), Some(vec![(g.narrow_base_id().as_global(), 1)]) ); assert_eq!( g.node().resolve(2), Some(vec![(g.narrow_base_id().as_global(), 2)]) ); } #[test] fn it_handles_updates() { let (mut g, _) = setup(false); let ni = g.node().local_addr(); let r1: Vec<DataType> = vec![1.into(), "z".into(), 10.into()]; let r2: Vec<DataType> = vec![2.into(), "z".into(), 10.into()]; let r3: Vec<DataType> = vec![3.into(), "z".into(), 10.into()]; let r4: Vec<DataType> = vec![4.into(), "z".into(), 5.into()]; let r4a: Vec<DataType> = vec![4.into(), "z".into(), 10.into()]; let r4b: Vec<DataType> = vec![4.into(), "z".into(), 11.into()]; g.narrow_one_row(r1.clone(), true); g.narrow_one_row(r2.clone(), true); g.narrow_one_row(r3.clone(), true); // a positive for a row not in the Top-K should not change the Top-K and shouldn't emit // anything let emit = g.narrow_one_row(r4.clone(), true); assert_eq!(g.states[ni].rows(), 3); assert_eq!(emit, Vec::<Record>::new().into()); // should now have 3 rows in Top-K // [1, z, 10] // [2, z, 10] // [3, z, 10] let emit = g.narrow_one( vec![Record::Negative(r4.clone()), Record::Positive(r4a.clone())], true, ); // nothing should have been emitted, as [4, z, 10] doesn't enter Top-K assert_eq!(emit, Vec::<Record>::new().into()); let emit = g.narrow_one( vec![Record::Negative(r4a.clone()), Record::Positive(r4b.clone())], true, ); // now [4, z, 11] is in, BUT we still only keep 3 elements // and have to remove one of the existing ones assert_eq!(g.states[ni].rows(), 3); assert_eq!(emit.len(), 2); // 1 pos, 1 neg assert!(emit.iter().any(|r| !r.is_positive() && r[2] == 10.into())); assert!(emit.iter().any(|r| r.is_positive() && r[2] == 11.into())); } }
33.646957
100
0.472476
de0876f35bf67b4d92ff38df575281eab139c44d
755
/* * Copyright 2018 ProximaX Limited. All rights reserved. * Use of this source code is governed by the Apache 2.0 * license that can be found in the LICENSE file. */ use super::{AbstractTransactionDto, HashAlgorithm}; /// SecretProofTransactionDto : Transaction that revealed a proof. #[derive(Serialize, Deserialize)] pub(crate) struct SecretProofTransactionDto { #[serde(flatten)] r#abstract: AbstractTransactionDto, hash_algorithm: HashAlgorithm, /// The proof hashed. #[serde(rename = "secret")] secret: String, /// The address in hexadecimal that received the funds. #[serde(skip_serializing_if = "Option::is_none")] recipient: Option<String>, /// The original random set of bytes. proof: String, }
31.458333
66
0.712583
7939e288ebf8dcc492c343526750d1420ec00e59
1,740
#![deny(missing_docs)] //! Constraints and projections //! //! This module defines the trait [`Constraint`], which specifies an abstract //! projection method, and a collection of simple sets, such as norm-balls, //! finite sets, second-order cones and their Cartesian products. //! //! //! [`Constraint`]: trait.Constraint.html mod ball1; mod ball2; mod ballinf; mod cartesian_product; mod finite; mod halfspace; mod hyperplane; mod no_constraints; mod rectangle; mod simplex; mod soc; mod zero; pub use ball1::Ball1; pub use ball2::Ball2; pub use ballinf::BallInf; pub use cartesian_product::CartesianProduct; pub use finite::FiniteSet; pub use halfspace::Halfspace; pub use hyperplane::Hyperplane; pub use no_constraints::NoConstraints; pub use rectangle::Rectangle; pub use simplex::Simplex; pub use soc::SecondOrderCone; pub use zero::Zero; /// A set which can be used as a constraint /// /// This trait defines an abstract function that allows to compute projections /// on sets; this is implemented by a series of structures (see below for details) pub trait Constraint { /// Projection onto the set, that is, /// /// $$ /// \Pi_C(v) = \mathrm{argmin}_{z\in C}\Vert{}z-v{}\Vert /// $$ /// /// ## Arguments /// /// - `x`: The given vector $x$ is updated with the projection on the set /// fn project(&self, x: &mut [f64]); /// Returns true if and only if the set is convex fn is_convex(&self) -> bool; } /* ---------------------------------------------------------------------------- */ /* TESTS */ /* ---------------------------------------------------------------------------- */ #[cfg(test)] mod tests;
27.619048
82
0.593678
03eda3396f67f7b4aa0e0c349336866bba0ea8c4
102,940
#![doc = "generated by AutoRust 0.1.0"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use crate::models::*; pub mod product_settings { use crate::models::*; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, ) -> std::result::Result<SettingList, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/settings", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: SettingList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, settings_name: &str, ) -> std::result::Result<Settings, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/settings/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, settings_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: Settings = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, settings_name: &str, settings: &Settings, ) -> std::result::Result<Settings, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/settings/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, settings_name ); let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = azure_core::to_json(settings).map_err(update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: Settings = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Err(update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod update { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, settings_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/settings/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, settings_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn onboard_workspace( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, onboarding_request: &OnboardingRequest, ) -> std::result::Result<OnboardingRequest, onboard_workspace::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/settings/onboard", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name ); let mut url = url::Url::parse(url_str).map_err(onboard_workspace::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(onboard_workspace::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = azure_core::to_json(onboarding_request).map_err(onboard_workspace::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(onboard_workspace::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(onboard_workspace::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: OnboardingRequest = serde_json::from_slice(rsp_body) .map_err(|source| onboard_workspace::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body) .map_err(|source| onboard_workspace::Error::DeserializeError(source, rsp_body.clone()))?; Err(onboard_workspace::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod onboard_workspace { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn offboard_workspace( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, ) -> std::result::Result<(), offboard_workspace::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/settings/offboard", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name ); let mut url = url::Url::parse(url_str).map_err(offboard_workspace::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(offboard_workspace::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(offboard_workspace::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(offboard_workspace::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body) .map_err(|source| offboard_workspace::Error::DeserializeError(source, rsp_body.clone()))?; Err(offboard_workspace::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod offboard_workspace { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod sentinel_onboarding_states { use crate::models::*; pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, sentinel_onboarding_state_name: &str, ) -> std::result::Result<SentinelOnboardingState, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/onboardingStates/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, sentinel_onboarding_state_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: SentinelOnboardingState = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, sentinel_onboarding_state_name: &str, sentinel_onboarding_state_parameter: Option<&SentinelOnboardingState>, ) -> std::result::Result<create::Response, create::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/onboardingStates/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, sentinel_onboarding_state_name ); let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = if let Some(sentinel_onboarding_state_parameter) = sentinel_onboarding_state_parameter { azure_core::to_json(sentinel_onboarding_state_parameter).map_err(create::Error::SerializeError)? } else { bytes::Bytes::from_static(azure_core::EMPTY_BODY) }; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: SentinelOnboardingState = serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: SentinelOnboardingState = serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create::Response::Created201(rsp_value)) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?; Err(create::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200(SentinelOnboardingState), Created201(SentinelOnboardingState), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, sentinel_onboarding_state_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/onboardingStates/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, sentinel_onboarding_state_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, ) -> std::result::Result<SentinelOnboardingStatesList, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/onboardingStates", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: SentinelOnboardingStatesList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod operations { use crate::models::*; pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationsList, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/providers/Microsoft.SecurityInsights/operations", operation_config.base_path(),); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: OperationsList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod data_connectors { use crate::models::*; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, ) -> std::result::Result<DataConnectorList, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/dataConnectors", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: DataConnectorList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, data_connector_id: &str, ) -> std::result::Result<DataConnector, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/dataConnectors/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, data_connector_id ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: DataConnector = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, data_connector_id: &str, data_connector: &DataConnector, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/dataConnectors/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, data_connector_id ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = azure_core::to_json(data_connector).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: DataConnector = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: DataConnector = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Created201(rsp_value)) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200(DataConnector), Created201(DataConnector), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, data_connector_id: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/dataConnectors/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, data_connector_id ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod watchlists { use crate::models::*; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, ) -> std::result::Result<WatchlistList, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: WatchlistList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, watchlist_alias: &str, ) -> std::result::Result<Watchlist, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, watchlist_alias ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: Watchlist = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, watchlist_alias: &str, watchlist: &Watchlist, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, watchlist_alias ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = azure_core::to_json(watchlist).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: Watchlist = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: Watchlist = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Created201(rsp_value)) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200(Watchlist), Created201(Watchlist), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, watchlist_alias: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, watchlist_alias ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod watchlist_items { use crate::models::*; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, watchlist_alias: &str, ) -> std::result::Result<WatchlistItemList, list::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}/watchlistItems" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name , watchlist_alias) ; let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: WatchlistItemList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, watchlist_alias: &str, watchlist_item_id: &str, ) -> std::result::Result<WatchlistItem, get::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}/watchlistItems/{}" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name , watchlist_alias , watchlist_item_id) ; let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: WatchlistItem = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, watchlist_alias: &str, watchlist_item_id: &str, watchlist_item: &WatchlistItem, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}/watchlistItems/{}" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name , watchlist_alias , watchlist_item_id) ; let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = azure_core::to_json(watchlist_item).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: WatchlistItem = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: WatchlistItem = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create_or_update::Response::Created201(rsp_value)) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Err(create_or_update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create_or_update { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200(WatchlistItem), Created201(WatchlistItem), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, watchlist_alias: &str, watchlist_item_id: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}/watchlistItems/{}" , operation_config . base_path () , subscription_id , resource_group_name , operational_insights_resource_provider , workspace_name , watchlist_alias , watchlist_item_id) ; let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod metadata { use crate::models::*; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, filter: Option<&str>, orderby: Option<&str>, top: Option<i32>, skip: Option<i32>, ) -> std::result::Result<MetadataList, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/metadata", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name ); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(orderby) = orderby { url.query_pairs_mut().append_pair("$orderby", orderby); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } if let Some(skip) = skip { url.query_pairs_mut().append_pair("$skip", skip.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: MetadataList = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Err(list::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod list { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, metadata_name: &str, ) -> std::result::Result<MetadataModel, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/metadata/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, metadata_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: MetadataModel = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Err(get::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod get { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, metadata_name: &str, metadata: &MetadataModel, ) -> std::result::Result<create::Response, create::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/metadata/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, metadata_name ); let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = azure_core::to_json(metadata).map_err(create::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: MetadataModel = serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: MetadataModel = serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?; Ok(create::Response::Created201(rsp_value)) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?; Err(create::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod create { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200(MetadataModel), Created201(MetadataModel), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, metadata_name: &str, metadata_patch: &MetadataPatch, ) -> std::result::Result<MetadataModel, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/metadata/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, metadata_name ); let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = azure_core::to_json(metadata_patch).map_err(update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: MetadataModel = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Err(update::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod update { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, operational_insights_resource_provider: &str, workspace_name: &str, metadata_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/metadata/{}", operation_config.base_path(), subscription_id, resource_group_name, operational_insights_resource_provider, workspace_name, metadata_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: CloudError = serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?; Err(delete::Error::DefaultResponse { status_code, value: rsp_value, }) } } } pub mod delete { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode, value: models::CloudError, }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } }
48.080336
342
0.587216
ed216ca9aa473c16fff981281caa7bd80d26a4ae
1,791
use bit_vec::BitVec; #[derive(Serialize, Deserialize)] #[serde(remote = "BitVec")] struct BitVecSerde { #[serde(getter = "BitVec::to_bytes")] bits: Vec<u8>, } impl From<BitVecSerde> for BitVec { fn from(bv: BitVecSerde) -> BitVec { BitVec::from_bytes(&bv.bits) } } #[derive(Default, Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] pub struct TransactionMeta { #[serde(with = "BitVecSerde")] pub output_spent: BitVec, } impl TransactionMeta { pub fn new(outputs_count: usize) -> TransactionMeta { TransactionMeta { output_spent: BitVec::from_elem(outputs_count, false), } } pub fn len(&self) -> usize { self.output_spent.len() } pub fn is_empty(&self) -> bool { self.output_spent.is_empty() } pub fn is_new(&self) -> bool { self.output_spent.none() } pub fn is_fully_spent(&self) -> bool { self.output_spent.all() } pub fn is_spent(&self, index: usize) -> bool { self.output_spent.get(index).unwrap_or(false) } pub fn set_spent(&mut self, index: usize) { self.output_spent.set(index, true); } pub fn unset_spent(&mut self, index: usize) { self.output_spent.set(index, false); } } #[cfg(test)] mod tests { use super::*; use bincode; #[test] fn transaction_meta_serde() { let mut original = TransactionMeta::new(4); original.set_spent(1); original.set_spent(3); let decoded: TransactionMeta = bincode::deserialize(&(bincode::serialize(&original).unwrap())[..]).unwrap(); assert!(!decoded.is_spent(0)); assert!(decoded.is_spent(1)); assert!(!decoded.is_spent(2)); assert!(decoded.is_spent(3)); } }
22.961538
89
0.60134
726803131a45b7209e7c6738eea1665be353676c
1,794
use super::pool::{Handle as PoolHandle, Pool}; use core::fmt::Debug; use core::mem::size_of; use core::ptr::{null_mut, read, write}; pub trait Allocator { type Error: Debug; unsafe fn reallocate(&mut self, ptr: *mut u8, capacity: usize) -> Result<*mut u8, Self::Error>; } pub unsafe fn reallocate<T, A: Allocator>( allocator: &mut A, ptr: *mut T, mut capacity: isize, ) -> *mut T { if capacity < 0 { capacity = 0 } let size = size_of::<T>(); Allocator::reallocate(allocator, ptr as *mut _, size * (capacity as usize)).unwrap() as *mut _ } pub unsafe fn allocate<T, A: Allocator>(allocator: &mut A, capacity: isize) -> *mut T { reallocate(allocator, null_mut(), capacity) } pub unsafe fn deallocate<T, A: Allocator>(allocator: &mut A, ptr: *mut T) { reallocate(allocator, ptr, 0); } #[derive(Copy, Clone, Eq)] pub struct Handle(*mut u8); impl PartialEq for Handle { fn eq(&self, rhs: &Self) -> bool { self.0 == rhs.0 } } impl Default for Handle { fn default() -> Self { Handle(null_mut()) } } impl PoolHandle for Handle {} impl<T, A: Allocator> Pool<T> for A { type Handle = Handle; fn get(&self, handle: Self::Handle) -> &T { unsafe { &*(handle.0 as *mut T) } } fn get_mut(&mut self, handle: Self::Handle) -> &mut T { unsafe { &mut *(handle.0 as *mut T) } } fn add(&mut self, item: T) -> Self::Handle { unsafe { let ptr = allocate(self, 1); write(ptr as *mut T, item); Handle(ptr as *mut _) } } fn remove(&mut self, handle: Self::Handle) -> T { unsafe { let item = read(handle.0 as *mut T); deallocate(self, handle.0 as *mut T); item } } }
23.605263
99
0.56466
f79264fc093b41c05e1d879d1cacef04950a9691
181
//! This modules provides structures, which bind definitions from `aead` crate into `inplace` compatible types. pub mod aead_block; pub mod aead_chain_enc; pub mod aead_chain_dec;
30.166667
111
0.79558
01bff0cc9e11d9f0061be467cbe3fe5625a813db
17,209
use crate::physical_plan::state::ExecutionState; use crate::physical_plan::PhysicalAggregation; use crate::prelude::*; use polars_core::frame::groupby::GroupsProxy; use polars_core::series::unstable::UnstableSeries; use polars_core::{prelude::*, POOL}; use std::convert::TryFrom; use std::sync::Arc; pub struct BinaryExpr { pub(crate) left: Arc<dyn PhysicalExpr>, pub(crate) op: Operator, pub(crate) right: Arc<dyn PhysicalExpr>, expr: Expr, } impl BinaryExpr { pub fn new( left: Arc<dyn PhysicalExpr>, op: Operator, right: Arc<dyn PhysicalExpr>, expr: Expr, ) -> Self { Self { left, op, right, expr, } } } pub(crate) fn apply_operator(left: &Series, right: &Series, op: Operator) -> Result<Series> { match op { Operator::Gt => Ok(ChunkCompare::<&Series>::gt(left, right).into_series()), Operator::GtEq => Ok(ChunkCompare::<&Series>::gt_eq(left, right).into_series()), Operator::Lt => Ok(ChunkCompare::<&Series>::lt(left, right).into_series()), Operator::LtEq => Ok(ChunkCompare::<&Series>::lt_eq(left, right).into_series()), Operator::Eq => Ok(ChunkCompare::<&Series>::equal(left, right).into_series()), Operator::NotEq => Ok(ChunkCompare::<&Series>::not_equal(left, right).into_series()), Operator::Plus => Ok(left + right), Operator::Minus => Ok(left - right), Operator::Multiply => Ok(left * right), Operator::Divide => Ok(left / right), Operator::TrueDivide => { use DataType::*; match left.dtype() { Date | Datetime(_, _) | Float32 | Float64 => Ok(left / right), _ => Ok(&left.cast(&Float64)? / &right.cast(&Float64)?), } } Operator::And => left.bitand(right), Operator::Or => left.bitor(right), Operator::Xor => left.bitxor(right), Operator::Modulus => Ok(left % right), } } impl PhysicalExpr for BinaryExpr { fn as_expression(&self) -> &Expr { &self.expr } fn evaluate(&self, df: &DataFrame, state: &ExecutionState) -> Result<Series> { let (lhs, rhs) = POOL.install(|| { rayon::join( || self.left.evaluate(df, state), || self.right.evaluate(df, state), ) }); apply_operator(&lhs?, &rhs?, self.op) } #[allow(clippy::ptr_arg)] fn evaluate_on_groups<'a>( &self, df: &DataFrame, groups: &'a GroupsProxy, state: &ExecutionState, ) -> Result<AggregationContext<'a>> { let (result_a, result_b) = POOL.install(|| { rayon::join( || self.left.evaluate_on_groups(df, groups, state), || self.right.evaluate_on_groups(df, groups, state), ) }); let mut ac_l = result_a?; let mut ac_r = result_b?; if !ac_l.can_combine(&ac_r) { return Err(PolarsError::InvalidOperation( "\ cannot combine this binary expression, the groups do not match" .into(), )); } match (ac_l.agg_state(), ac_r.agg_state(), self.op) { // Some aggregations must return boolean masks that fit the group. That's why not all literals can take this path. // only literals that are used in arithmetic ( AggState::AggregatedFlat(lhs), AggState::Literal(rhs), Operator::Plus | Operator::Minus | Operator::Divide | Operator::Multiply | Operator::Modulus | Operator::TrueDivide, ) | ( AggState::Literal(lhs), AggState::AggregatedFlat(rhs), Operator::Plus | Operator::Minus | Operator::Divide | Operator::Multiply | Operator::Modulus | Operator::TrueDivide, ) => { let out = apply_operator(lhs, rhs, self.op)?; ac_l.with_series(out, true); Ok(ac_l) } // One of the two exprs is aggregated with flat aggregation, e.g. `e.min(), e.max(), e.first()` // if the groups_len == df.len we can just apply all flat. // within an aggregation a `col().first() - lit(0)` must still produce a boolean array of group length, // that's why a literal also takes this branch (AggState::AggregatedFlat(s), AggState::NotAggregated(_) | AggState::Literal(_), _) if s.len() != df.height() => { // this is a flat series of len eq to group tuples let l = ac_l.aggregated(); let l = l.as_ref(); let arr_l = &l.chunks()[0]; // we create a dummy Series that is not cloned nor moved // so we can swap the ArrayRef during the hot loop // this prevents a series Arc alloc and a vec alloc per iteration let dummy = Series::try_from(("dummy", vec![arr_l.clone()])).unwrap(); // keep logical type info let dummy = dummy.cast(l.dtype()).unwrap(); let mut us = UnstableSeries::new(&dummy); // this is now a list let r = ac_r.aggregated(); let r = r.list().unwrap(); let mut ca: ListChunked = r .amortized_iter() .enumerate() .map(|(idx, opt_s)| { opt_s .map(|s| { let r = s.as_ref(); // TODO: optimize this? Its slow and unsafe. // Safety: // we are in bounds let arr = unsafe { Arc::from(arr_l.slice_unchecked(idx, 1)) }; us.swap(arr); let l = us.as_ref(); apply_operator(l, r, self.op) }) .transpose() }) .collect::<Result<_>>()?; ca.rename(l.name()); ac_l.with_series(ca.into_series(), true); ac_l.with_update_groups(UpdateGroups::WithGroupsLen); Ok(ac_l) } // if the groups_len == df.len we can just apply all flat. ( AggState::Literal(_) | AggState::AggregatedList(_) | AggState::NotAggregated(_), AggState::AggregatedFlat(s), _, ) if s.len() != df.height() => { // this is now a list let l = ac_l.aggregated(); let l = l.list().unwrap(); // this is a flat series of len eq to group tuples let r = ac_r.aggregated(); assert_eq!(l.len(), groups.len()); let r = r.as_ref(); let arr_r = &r.chunks()[0]; // we create a dummy Series that is not cloned nor moved // so we can swap the ArrayRef during the hot loop // this prevents a series Arc alloc and a vec alloc per iteration let dummy = Series::try_from(("dummy", vec![arr_r.clone()])).unwrap(); // keep logical type info let dummy = dummy.cast(r.dtype()).unwrap(); let mut us = UnstableSeries::new(&dummy); let mut ca: ListChunked = l .amortized_iter() .enumerate() .map(|(idx, opt_s)| { opt_s .map(|s| { let l = s.as_ref(); // TODO: optimize this? Its slow. // Safety: // we are in bounds let arr = unsafe { Arc::from(arr_r.slice_unchecked(idx, 1)) }; us.swap(arr); let r = us.as_ref(); apply_operator(l, r, self.op) }) .transpose() }) .collect::<Result<_>>()?; ca.rename(l.name()); ac_l.with_series(ca.into_series(), true); ac_l.with_update_groups(UpdateGroups::WithGroupsLen); Ok(ac_l) } (AggState::AggregatedList(_), AggState::NotAggregated(_) | AggState::Literal(_), _) | (AggState::NotAggregated(_) | AggState::Literal(_), AggState::AggregatedList(_), _) => { ac_l.sort_by_groups(); ac_r.sort_by_groups(); let out = apply_operator( ac_l.flat_naive().as_ref(), ac_r.flat_naive().as_ref(), self.op, )?; // we flattened the series, so that sorts by group ac_l.with_update_groups(UpdateGroups::WithGroupsLen); ac_l.with_series(out, false); Ok(ac_l) } // flatten the Series and apply the operators (AggState::AggregatedList(_), AggState::AggregatedList(_), _) => { let out = apply_operator( ac_l.flat_naive().as_ref(), ac_r.flat_naive().as_ref(), self.op, )?; ac_l.combine_groups(ac_r).with_series(out, false); ac_l.with_update_groups(UpdateGroups::WithGroupsLen); Ok(ac_l) } // Both are or a flat series // so we can flatten the Series and apply the operators _ => { let out = apply_operator( ac_l.flat_naive().as_ref(), ac_r.flat_naive().as_ref(), self.op, )?; ac_l.combine_groups(ac_r).with_series(out, false); Ok(ac_l) } } } fn to_field(&self, input_schema: &Schema) -> Result<Field> { self.expr.to_field(input_schema, Context::Default) } fn as_agg_expr(&self) -> Result<&dyn PhysicalAggregation> { Ok(self) } #[cfg(feature = "parquet")] fn as_stats_evaluator(&self) -> Option<&dyn polars_io::predicates::StatsEvaluator> { Some(self) } } impl PhysicalAggregation for BinaryExpr { fn aggregate( &self, df: &DataFrame, groups: &GroupsProxy, state: &ExecutionState, ) -> Result<Option<Series>> { match (self.left.as_agg_expr(), self.right.as_agg_expr()) { (Ok(left), Ok(right)) => { let (left_agg, right_agg) = POOL.install(|| { rayon::join( || left.aggregate(df, groups, state), || right.aggregate(df, groups, state), ) }); let right_agg = right_agg?; left_agg? .and_then(|left| right_agg.map(|right| apply_operator(&left, &right, self.op))) .transpose() } (_, _) => Err(PolarsError::ComputeError( format!( "this binary expression is not an aggregation: {:?} pherhaps you should add an aggregation like, '.sum()', '.min()', '.mean()', etc. if you really want to collect this binary expression, use `.list()`", self.expr ) .into(), )), } } } #[cfg(feature = "parquet")] mod stats { use super::*; use polars_io::parquet::predicates::BatchStats; use polars_io::predicates::StatsEvaluator; fn apply_operator_stats_rhs_lit(min_max: &Series, literal: &Series, op: Operator) -> bool { match op { // col > lit // e.g. // [min, // max] > 0 // // [-1, // 2] > 0 // // [false, true] -> true -> read Operator::Gt => { // literal is bigger than max value // selection needs all rows ChunkCompare::<&Series>::gt(min_max, literal).any() } // col >= lit Operator::GtEq => { // literal is bigger than max value // selection needs all rows ChunkCompare::<&Series>::gt_eq(min_max, literal).any() } // col < lit Operator::Lt => { // literal is smaller than min value // selection needs all rows ChunkCompare::<&Series>::lt(min_max, literal).any() } // col <= lit Operator::LtEq => { // literal is smaller than min value // selection needs all rows ChunkCompare::<&Series>::lt_eq(min_max, literal).any() } // default: read the file _ => true, } } fn apply_operator_stats_lhs_lit(literal: &Series, min_max: &Series, op: Operator) -> bool { match op { Operator::Gt => { // literal is bigger than max value // selection needs all rows ChunkCompare::<&Series>::gt(literal, min_max).any() } Operator::GtEq => { // literal is bigger than max value // selection needs all rows ChunkCompare::<&Series>::gt_eq(literal, min_max).any() } Operator::Lt => { // literal is smaller than min value // selection needs all rows ChunkCompare::<&Series>::lt(literal, min_max).any() } Operator::LtEq => { // literal is smaller than min value // selection needs all rows ChunkCompare::<&Series>::lt_eq(literal, min_max).any() } // default: read the file _ => true, } } impl BinaryExpr { fn impl_should_read(&self, stats: &BatchStats) -> Result<bool> { let schema = stats.schema(); let fld_l = self.left.to_field(schema)?; let fld_r = self.right.to_field(schema)?; assert_eq!(fld_l.data_type(), fld_r.data_type(), "implementation error"); let dummy = DataFrame::new_no_checks(vec![]); let state = ExecutionState::new(); let out = match (fld_l.name().as_str(), fld_r.name().as_str()) { (_, "literal") => { let l = stats.get_stats(fld_l.name())?; match l.to_min_max() { None => Ok(true), Some(min_max_s) => { let lit_s = self.right.evaluate(&dummy, &state).unwrap(); Ok(apply_operator_stats_rhs_lit(&min_max_s, &lit_s, self.op)) } } } ("literal", _) => { let r = stats.get_stats(fld_r.name())?; match r.to_min_max() { None => Ok(true), Some(min_max_s) => { let lit_s = self.left.evaluate(&dummy, &state).unwrap(); Ok(apply_operator_stats_lhs_lit(&lit_s, &min_max_s, self.op)) } } } // default: read the file _ => Ok(true), }; out.map(|read| { if state.verbose && read { eprintln!("parquet file must be read, statistics not sufficient to for predicate.") } else if state.verbose && !read { eprintln!("parquet file can be skipped, the statistics were sufficient to apply the predicate.") }; read }) } } impl StatsEvaluator for BinaryExpr { fn should_read(&self, stats: &BatchStats) -> Result<bool> { if std::env::var("POLARS_NO_PARQUET_STATISTICS").is_ok() { return Ok(true); } match ( self.left.as_stats_evaluator(), self.right.as_stats_evaluator(), ) { (Some(l), Some(r)) => match self.op { Operator::And => Ok(l.should_read(stats)? && r.should_read(stats)?), Operator::Or => Ok(l.should_read(stats)? || r.should_read(stats)?), _ => Ok(true), }, _ => self.impl_should_read(stats), } } } }
37.821978
126
0.464524
0933909bb6cec23ac86e6c53f1810ded1bd5ad57
1,480
struct Board { pieces: Vec<Vec<char>>, num_rows: usize, num_cols: usize } impl Board { fn annotated(&self) -> Vec<String> { (0..self.num_rows).map(|y| self.annotated_row(y)).collect() } fn annotated_row(&self, y: usize) -> String { self.pieces[y] .iter() .enumerate() .map(|(x,&c)| if c == ' ' {self.count_neighbouring_mines_char(x, y)} else {c}) .collect::<String>() } fn count_neighbouring_mines_char(&self, x: usize, y: usize) -> char { let mut count = 0; for x1 in neighbouring_points(x, self.num_cols) { for y1 in neighbouring_points(y, self.num_rows) { let piece = self.pieces[y1][x1]; if piece == '*' { count += 1; } } } if count == 0 { ' ' } else { (('0' as u8) + count) as char } } } pub fn annotate(pieces: &[&str]) -> Vec<String> { if pieces.is_empty() { return Vec::new(); } let pieces_vec = pieces.iter().map(|&r| r.chars().collect()).collect(); Board {pieces: pieces_vec, num_rows: pieces.len(), num_cols: pieces[0].len()}.annotated() } fn neighbouring_points(x: usize, limit: usize) -> Vec<usize> { let mut offsets = vec![x]; if x >= 1 { offsets.push(x-1); } if x+2 <= limit { offsets.push(x+1); } offsets }
26.428571
96
0.491216
90739e35c9cf677e685e4e55596b04cf28f27701
4,259
// Copyright (c) The Starcoin Core Contributors // SPDX-License-Identifier: Apache-2.0 use anyhow::*; use bitflags::_core::time::Duration; use futures::channel::mpsc::channel; use futures::prelude::*; use log::{debug, error}; use network_p2p::config::{RequestResponseConfig, TransportConfig}; use network_p2p::{ identity, NetworkConfiguration, NetworkWorker, NodeKeyConfig, Params, ProtocolId, Secret, }; use network_p2p_types::{is_memory_addr, ProtocolRequest}; use prometheus::default_registry; use starcoin_config::NodeConfig; use starcoin_network_rpc::NetworkRpcService; use starcoin_service_registry::ServiceRef; use starcoin_types::peer_info::RpcInfo; use starcoin_types::startup_info::ChainInfo; use std::borrow::Cow; const MAX_REQUEST_SIZE: u64 = 1024 * 1024; const MAX_RESPONSE_SIZE: u64 = 1024 * 1024 * 64; const REQUEST_BUFFER_SIZE: usize = 128; pub const RPC_PROTOCOL_PREFIX: &str = "/starcoin/rpc/"; pub fn build_network_worker( node_config: &NodeConfig, chain_info: ChainInfo, protocols: Vec<Cow<'static, str>>, rpc_service: Option<(RpcInfo, ServiceRef<NetworkRpcService>)>, ) -> Result<NetworkWorker> { let node_name = node_config.node_name(); let discover_local = node_config.network.discover_local(); let transport_config = if is_memory_addr(&node_config.network.listen()) { TransportConfig::MemoryOnly } else { TransportConfig::Normal { enable_mdns: discover_local, allow_private_ipv4: true, wasm_external_transport: None, } }; //TODO define RequestResponseConfig by rpc api let rpc_protocols = match rpc_service { Some((rpc_info, rpc_service)) => rpc_info .into_iter() .map(|rpc_path| { //TODO define rpc path in rpc api, and add prefix. let protocol_name: Cow<'static, str> = format!("{}{}", RPC_PROTOCOL_PREFIX, rpc_path.as_str()).into(); let rpc_path_for_stream: Cow<'static, str> = rpc_path.into(); let (sender, receiver) = channel(REQUEST_BUFFER_SIZE); let stream = receiver.map(move |request| ProtocolRequest { protocol: rpc_path_for_stream.clone(), request, }); if let Err(e) = rpc_service.add_event_stream(stream) { error!( "Add request event stream for rpc {} fail: {:?}", protocol_name, e ); } RequestResponseConfig { name: protocol_name, max_request_size: MAX_REQUEST_SIZE, max_response_size: MAX_RESPONSE_SIZE, request_timeout: Duration::from_secs(30), inbound_queue: Some(sender), } }) .collect::<Vec<_>>(), None => vec![], }; let allow_non_globals_in_dht = discover_local; let boot_nodes = node_config.network.seeds(); let config = NetworkConfiguration { listen_addresses: vec![node_config.network.listen()], boot_nodes, node_key: { let secret = identity::ed25519::SecretKey::from_bytes( &mut node_config.network.network_keypair().0.to_bytes(), ) .expect("decode network node key should success."); NodeKeyConfig::Ed25519(Secret::Input(secret)) }, notifications_protocols: protocols, request_response_protocols: rpc_protocols, transport: transport_config, node_name, client_version: starcoin_config::APP_NAME_WITH_VERSION.clone(), allow_non_globals_in_dht, ..NetworkConfiguration::default() }; // protocol id is chain/{chain_id}, `RegisteredProtocol` will append `/starcoin` prefix let protocol_id = ProtocolId::from(format!("chain/{}", chain_info.chain_id()).as_str()); debug!("Init network worker with config: {:?}", config); let worker = NetworkWorker::new(Params::new( config, protocol_id, chain_info, //TODO use a custom registry for each instance. Some(default_registry().clone()), ))?; Ok(worker) }
39.435185
93
0.624325
29725272c6341b92bb1c2736d3f3fec03cff9006
5,322
#[doc = "Register `NFCID1_2ND_LAST` reader"] pub struct R(crate::R<NFCID1_2ND_LAST_SPEC>); impl core::ops::Deref for R { type Target = crate::R<NFCID1_2ND_LAST_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::convert::From<crate::R<NFCID1_2ND_LAST_SPEC>> for R { fn from(reader: crate::R<NFCID1_2ND_LAST_SPEC>) -> Self { R(reader) } } #[doc = "Register `NFCID1_2ND_LAST` writer"] pub struct W(crate::W<NFCID1_2ND_LAST_SPEC>); impl core::ops::Deref for W { type Target = crate::W<NFCID1_2ND_LAST_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl core::convert::From<crate::W<NFCID1_2ND_LAST_SPEC>> for W { fn from(writer: crate::W<NFCID1_2ND_LAST_SPEC>) -> Self { W(writer) } } #[doc = "Field `NFCID1_V` reader - NFCID1 byte V"] pub struct NFCID1_V_R(crate::FieldReader<u8, u8>); impl NFCID1_V_R { pub(crate) fn new(bits: u8) -> Self { NFCID1_V_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for NFCID1_V_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `NFCID1_V` writer - NFCID1 byte V"] pub struct NFCID1_V_W<'a> { w: &'a mut W, } impl<'a> NFCID1_V_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff); self.w } } #[doc = "Field `NFCID1_U` reader - NFCID1 byte U"] pub struct NFCID1_U_R(crate::FieldReader<u8, u8>); impl NFCID1_U_R { pub(crate) fn new(bits: u8) -> Self { NFCID1_U_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for NFCID1_U_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `NFCID1_U` writer - NFCID1 byte U"] pub struct NFCID1_U_W<'a> { w: &'a mut W, } impl<'a> NFCID1_U_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 8)) | (((value as u32) & 0xff) << 8); self.w } } #[doc = "Field `NFCID1_T` reader - NFCID1 byte T"] pub struct NFCID1_T_R(crate::FieldReader<u8, u8>); impl NFCID1_T_R { pub(crate) fn new(bits: u8) -> Self { NFCID1_T_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for NFCID1_T_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `NFCID1_T` writer - NFCID1 byte T"] pub struct NFCID1_T_W<'a> { w: &'a mut W, } impl<'a> NFCID1_T_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 16)) | (((value as u32) & 0xff) << 16); self.w } } impl R { #[doc = "Bits 0:7 - NFCID1 byte V"] #[inline(always)] pub fn nfcid1_v(&self) -> NFCID1_V_R { NFCID1_V_R::new((self.bits & 0xff) as u8) } #[doc = "Bits 8:15 - NFCID1 byte U"] #[inline(always)] pub fn nfcid1_u(&self) -> NFCID1_U_R { NFCID1_U_R::new(((self.bits >> 8) & 0xff) as u8) } #[doc = "Bits 16:23 - NFCID1 byte T"] #[inline(always)] pub fn nfcid1_t(&self) -> NFCID1_T_R { NFCID1_T_R::new(((self.bits >> 16) & 0xff) as u8) } } impl W { #[doc = "Bits 0:7 - NFCID1 byte V"] #[inline(always)] pub fn nfcid1_v(&mut self) -> NFCID1_V_W { NFCID1_V_W { w: self } } #[doc = "Bits 8:15 - NFCID1 byte U"] #[inline(always)] pub fn nfcid1_u(&mut self) -> NFCID1_U_W { NFCID1_U_W { w: self } } #[doc = "Bits 16:23 - NFCID1 byte T"] #[inline(always)] pub fn nfcid1_t(&mut self) -> NFCID1_T_W { NFCID1_T_W { w: self } } #[doc = "Writes raw bits to the register."] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "Second last NFCID1 part (7 or 10 bytes ID)\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [nfcid1_2nd_last](index.html) module"] pub struct NFCID1_2ND_LAST_SPEC; impl crate::RegisterSpec for NFCID1_2ND_LAST_SPEC { type Ux = u32; } #[doc = "`read()` method returns [nfcid1_2nd_last::R](R) reader structure"] impl crate::Readable for NFCID1_2ND_LAST_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [nfcid1_2nd_last::W](W) writer structure"] impl crate::Writable for NFCID1_2ND_LAST_SPEC { type Writer = W; } #[doc = "`reset()` method sets NFCID1_2ND_LAST to value 0"] impl crate::Resettable for NFCID1_2ND_LAST_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
30.94186
438
0.598647
4a890a7c141c0b5f1583b97066501700c751a99c
12,864
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. #[derive(Debug)] pub(crate) struct Handle< C = aws_smithy_client::erase::DynConnector, M = crate::middleware::DefaultMiddleware, R = aws_smithy_client::retry::Standard, > { pub(crate) client: aws_smithy_client::Client<C, M, R>, pub(crate) conf: crate::Config, } /// Client for Amazon Kinesis Video Streams Media /// /// Client for invoking operations on Amazon Kinesis Video Streams Media. Each operation on Amazon Kinesis Video Streams Media is a method on this /// this struct. `.send()` MUST be invoked on the generated operations to dispatch the request to the service. /// /// # Examples /// **Constructing a client and invoking an operation** /// ```rust,no_run /// # async fn docs() { /// // create a shared configuration. This can be used & shared between multiple service clients. /// let shared_config = aws_config::load_from_env().await; /// let client = aws_sdk_kinesisvideomedia::Client::new(&shared_config); /// // invoke an operation /// /* let rsp = client /// .<operation_name>(). /// .<param>("some value") /// .send().await; */ /// # } /// ``` /// **Constructing a client with custom configuration** /// ```rust,no_run /// use aws_config::RetryConfig; /// # async fn docs() { /// let shared_config = aws_config::load_from_env().await; /// let config = aws_sdk_kinesisvideomedia::config::Builder::from(&shared_config) /// .retry_config(RetryConfig::disabled()) /// .build(); /// let client = aws_sdk_kinesisvideomedia::Client::from_conf(config); /// # } #[derive(std::fmt::Debug)] pub struct Client< C = aws_smithy_client::erase::DynConnector, M = crate::middleware::DefaultMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<Handle<C, M, R>>, } impl<C, M, R> std::clone::Clone for Client<C, M, R> { fn clone(&self) -> Self { Self { handle: self.handle.clone(), } } } #[doc(inline)] pub use aws_smithy_client::Builder; impl<C, M, R> From<aws_smithy_client::Client<C, M, R>> for Client<C, M, R> { fn from(client: aws_smithy_client::Client<C, M, R>) -> Self { Self::with_config(client, crate::Config::builder().build()) } } impl<C, M, R> Client<C, M, R> { /// Creates a client with the given service configuration. pub fn with_config(client: aws_smithy_client::Client<C, M, R>, conf: crate::Config) -> Self { Self { handle: std::sync::Arc::new(Handle { client, conf }), } } /// Returns the client's configuration. pub fn conf(&self) -> &crate::Config { &self.handle.conf } } impl<C, M, R> Client<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Constructs a fluent builder for the `GetMedia` operation. /// /// See [`GetMedia`](crate::client::fluent_builders::GetMedia) for more information about the /// operation and its arguments. pub fn get_media(&self) -> fluent_builders::GetMedia<C, M, R> { fluent_builders::GetMedia::new(self.handle.clone()) } } pub mod fluent_builders { //! //! Utilities to ergonomically construct a request to the service. //! //! Fluent builders are created through the [`Client`](crate::client::Client) by calling //! one if its operation methods. After parameters are set using the builder methods, //! the `send` method can be called to initiate the request. //! /// Fluent builder constructing a request to `GetMedia`. /// /// <p> Use this API to retrieve media content from a Kinesis video stream. In the request, you identify the stream name or stream Amazon Resource Name (ARN), and the starting chunk. Kinesis Video Streams then returns a stream of chunks in order by fragment number.</p> <note> /// <p>You must first call the <code>GetDataEndpoint</code> API to get an endpoint. Then send the <code>GetMedia</code> requests to this endpoint using the <a href="https://docs.aws.amazon.com/cli/latest/reference/">--endpoint-url parameter</a>. </p> /// </note> /// <p>When you put media data (fragments) on a stream, Kinesis Video Streams stores each incoming fragment and related metadata in what is called a "chunk." For more information, see <a href="https://docs.aws.amazon.com/kinesisvideostreams/latest/dg/API_dataplane_PutMedia.html">PutMedia</a>. The <code>GetMedia</code> API returns a stream of these chunks starting from the chunk that you specify in the request. </p> /// <p>The following limits apply when using the <code>GetMedia</code> API:</p> /// <ul> /// <li> <p>A client can call <code>GetMedia</code> up to five times per second per stream. </p> </li> /// <li> <p>Kinesis Video Streams sends media data at a rate of up to 25 megabytes per second (or 200 megabits per second) during a <code>GetMedia</code> session. </p> </li> /// </ul> <note> /// <p>If an error is thrown after invoking a Kinesis Video Streams media API, in addition to the HTTP status code and the response body, it includes the following pieces of information: </p> /// <ul> /// <li> <p> <code>x-amz-ErrorType</code> HTTP header – contains a more specific error type in addition to what the HTTP status code provides. </p> </li> /// <li> <p> <code>x-amz-RequestId</code> HTTP header – if you want to report an issue to AWS, the support team can better diagnose the problem if given the Request Id.</p> </li> /// </ul> /// <p>Both the HTTP status code and the ErrorType header can be utilized to make programmatic decisions about whether errors are retry-able and under what conditions, as well as provide information on what actions the client programmer might need to take in order to successfully try again.</p> /// <p>For more information, see the <b>Errors</b> section at the bottom of this topic, as well as <a href="https://docs.aws.amazon.com/kinesisvideostreams/latest/dg/CommonErrors.html">Common Errors</a>. </p> /// </note> #[derive(std::clone::Clone, std::fmt::Debug)] pub struct GetMedia< C = aws_smithy_client::erase::DynConnector, M = crate::middleware::DefaultMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::get_media_input::Builder, } impl<C, M, R> GetMedia<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `GetMedia`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::GetMediaOutput, aws_smithy_http::result::SdkError<crate::error::GetMediaError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::GetMediaInputOperationOutputAlias, crate::output::GetMediaOutput, crate::error::GetMediaError, crate::input::GetMediaInputOperationRetryAlias, >, { let op = self .inner .build() .map_err(|err| aws_smithy_http::result::SdkError::ConstructionFailure(err.into()))? .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The Kinesis video stream name from where you want to get the media content. If you don't specify the <code>streamName</code>, you must specify the <code>streamARN</code>.</p> pub fn stream_name(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.stream_name(input.into()); self } /// <p>The Kinesis video stream name from where you want to get the media content. If you don't specify the <code>streamName</code>, you must specify the <code>streamARN</code>.</p> pub fn set_stream_name(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_stream_name(input); self } /// <p>The ARN of the stream from where you want to get the media content. If you don't specify the <code>streamARN</code>, you must specify the <code>streamName</code>.</p> pub fn stream_arn(mut self, input: impl Into<std::string::String>) -> Self { self.inner = self.inner.stream_arn(input.into()); self } /// <p>The ARN of the stream from where you want to get the media content. If you don't specify the <code>streamARN</code>, you must specify the <code>streamName</code>.</p> pub fn set_stream_arn(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_stream_arn(input); self } /// <p>Identifies the starting chunk to get from the specified stream. </p> pub fn start_selector(mut self, input: crate::model::StartSelector) -> Self { self.inner = self.inner.start_selector(input); self } /// <p>Identifies the starting chunk to get from the specified stream. </p> pub fn set_start_selector( mut self, input: std::option::Option<crate::model::StartSelector>, ) -> Self { self.inner = self.inner.set_start_selector(input); self } } } impl<C> Client<C, crate::middleware::DefaultMiddleware, aws_smithy_client::retry::Standard> { /// Creates a client with the given service config and connector override. pub fn from_conf_conn(conf: crate::Config, conn: C) -> Self { let retry_config = conf.retry_config.as_ref().cloned().unwrap_or_default(); let timeout_config = conf.timeout_config.as_ref().cloned().unwrap_or_default(); let sleep_impl = conf.sleep_impl.clone(); let mut builder = aws_smithy_client::Builder::new() .connector(conn) .middleware(crate::middleware::DefaultMiddleware::new()); builder.set_retry_config(retry_config.into()); builder.set_timeout_config(timeout_config); if let Some(sleep_impl) = sleep_impl { builder.set_sleep_impl(Some(sleep_impl)); } let client = builder.build(); Self { handle: std::sync::Arc::new(Handle { client, conf }), } } } impl Client< aws_smithy_client::erase::DynConnector, crate::middleware::DefaultMiddleware, aws_smithy_client::retry::Standard, > { /// Creates a new client from a shared config. #[cfg(any(feature = "rustls", feature = "native-tls"))] pub fn new(config: &aws_types::config::Config) -> Self { Self::from_conf(config.into()) } /// Creates a new client from the service [`Config`](crate::Config). #[cfg(any(feature = "rustls", feature = "native-tls"))] pub fn from_conf(conf: crate::Config) -> Self { let retry_config = conf.retry_config.as_ref().cloned().unwrap_or_default(); let timeout_config = conf.timeout_config.as_ref().cloned().unwrap_or_default(); let sleep_impl = conf.sleep_impl.clone(); let mut builder = aws_smithy_client::Builder::dyn_https() .middleware(crate::middleware::DefaultMiddleware::new()); builder.set_retry_config(retry_config.into()); builder.set_timeout_config(timeout_config); // the builder maintains a try-state. To avoid suppressing the warning when sleep is unset, // only set it if we actually have a sleep impl. if let Some(sleep_impl) = sleep_impl { builder.set_sleep_impl(Some(sleep_impl)); } let client = builder.build(); Self { handle: std::sync::Arc::new(Handle { client, conf }), } } }
48
422
0.636427
ff93fa4d8e1569714121bd94d0aeb2ac5e6e7ef0
1,825
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #![cfg_attr(feature = "mesalock_sgx", no_std)] #[cfg(feature = "mesalock_sgx")] #[macro_use] extern crate sgx_tstd as std; //#[macro_use] extern crate log; use thiserror::Error; #[derive(Error, Debug)] pub enum AttestationError { #[error("OCall failed")] OCallError, #[error("Ias error")] IasError, #[error("Get quote error")] QuoteError, } #[macro_use] mod cert; pub mod quote; pub mod verifier; use cfg_if::cfg_if; cfg_if! { if #[cfg(feature = "mesalock_sgx")] { pub mod key; mod report; mod ias; pub use report::IasReport; } else { } } #[cfg(all(feature = "eigen_unit_test", feature = "mesalock_sgx"))] pub mod tests { use super::*; use std::env; pub fn test_report() { let ias_key = env::var("IAS_KEY").unwrap(); let ias_spid = env::var("IAS_SPID").unwrap(); report::tests::test_init_quote(); report::tests::test_create_report(); report::tests::test_get_quote(&ias_key, &ias_spid); } }
26.838235
66
0.673973
d6f376ad1e8a3f6569916b57f9e55504a84bd4e9
2,914
use crate::{ exec::Executable, gc::{Finalize, Trace}, syntax::ast::node::Node, Context, Result, Value, }; use std::fmt; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; /// The `if` statement executes a statement if a specified condition is [`truthy`][truthy]. If /// the condition is [`falsy`][falsy], another statement can be executed. /// /// Multiple `if...else` statements can be nested to create an else if clause. /// /// Note that there is no elseif (in one word) keyword in JavaScript. /// /// More information: /// - [ECMAScript reference][spec] /// - [MDN documentation][mdn] /// /// [spec]: https://tc39.es/ecma262/#prod-IfStatement /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/if...else /// [truthy]: https://developer.mozilla.org/en-US/docs/Glossary/truthy /// [falsy]: https://developer.mozilla.org/en-US/docs/Glossary/falsy /// [expression]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Expressions_and_Operators#Expressions #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, Debug, Trace, Finalize, PartialEq)] pub struct If { cond: Box<Node>, body: Box<Node>, else_node: Option<Box<Node>>, } impl If { pub fn cond(&self) -> &Node { &self.cond } pub fn body(&self) -> &Node { &self.body } pub fn else_node(&self) -> Option<&Node> { self.else_node.as_ref().map(Box::as_ref) } /// Creates an `If` AST node. pub fn new<C, B, E, OE>(condition: C, body: B, else_node: OE) -> Self where C: Into<Node>, B: Into<Node>, E: Into<Node>, OE: Into<Option<E>>, { Self { cond: Box::new(condition.into()), body: Box::new(body.into()), else_node: else_node.into().map(E::into).map(Box::new), } } pub(in crate::syntax::ast::node) fn display( &self, f: &mut fmt::Formatter<'_>, indent: usize, ) -> fmt::Result { write!(f, "if ({}) ", self.cond())?; match self.else_node() { Some(else_e) => { self.body().display(f, indent)?; f.write_str(" else ")?; else_e.display(f, indent) } None => self.body().display(f, indent), } } } impl Executable for If { fn run(&self, context: &mut Context) -> Result<Value> { Ok(if self.cond().run(context)?.to_boolean() { self.body().run(context)? } else if let Some(ref else_e) = self.else_node() { else_e.run(context)? } else { Value::undefined() }) } } impl fmt::Display for If { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.display(f, 0) } } impl From<If> for Node { fn from(if_stm: If) -> Node { Self::If(if_stm) } }
28.019231
117
0.566575
ef51f55511f2b1527d8685baf84e6562e6468a41
13,648
// Copyright 2020. The Tari Project // // Redistribution and use in source and binary forms, with or without modification, are permitted provided that the // following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following // disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the // following disclaimer in the documentation and/or other materials provided with the distribution. // // 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote // products derived from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, // INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #[allow(dead_code)] mod helpers; use helpers::{ block_builders::create_genesis_block_with_coinbase_value, event_stream::event_stream_next, nodes::{random_node_identity, BaseNodeBuilder}, }; use core::iter; use futures::{FutureExt, SinkExt, StreamExt}; use rand::{distributions::Alphanumeric, rngs::OsRng, Rng}; use std::{sync::atomic::Ordering, time::Duration}; use tari_broadcast_channel::{bounded, Publisher, Subscriber}; use tari_comms::{ multiaddr::Multiaddr, peer_manager::{NodeId, Peer, PeerFeatures, PeerFlags}, transports::MemoryTransport, types::CommsPublicKey, }; use tari_comms_dht::DhtConfig; use tari_core::{ base_node::{service::BaseNodeServiceConfig, states::StateEvent}, consensus::{ConsensusConstantsBuilder, ConsensusManagerBuilder, Network}, mempool::{MempoolServiceConfig, TxStorageResponse}, mining::Miner, transactions::{tari_amount::MicroTari, transaction::Transaction, types::CryptoFactories}, }; use tari_mmr::MmrCacheConfig; use tari_p2p::{initialization::CommsConfig, services::liveness::LivenessConfig, transport::TransportType}; use tari_shutdown::Shutdown; use tari_test_utils::async_assert_eventually; use tari_wallet::{ contacts_service::storage::memory_db::ContactsServiceMemoryDatabase, output_manager_service::storage::memory_db::OutputManagerMemoryDatabase, storage::memory_db::WalletMemoryDatabase, transaction_service::{ config::TransactionServiceConfig, handle::TransactionEvent, storage::memory_db::TransactionMemoryDatabase, }, wallet::WalletConfig, Wallet, }; use tempfile::tempdir; use tokio::{ runtime::{Builder, Runtime}, time::delay_for, }; pub fn random_string(len: usize) -> String { iter::repeat(()).map(|_| OsRng.sample(Alphanumeric)).take(len).collect() } pub fn get_next_memory_address() -> Multiaddr { let port = MemoryTransport::acquire_next_memsocket_port(); format!("/memory/{}", port).parse().unwrap() } fn create_runtime() -> Runtime { Builder::new() .threaded_scheduler() .enable_all() .core_threads(8) .build() .unwrap() } fn create_peer(public_key: CommsPublicKey, net_address: Multiaddr) -> Peer { Peer::new( public_key.clone(), NodeId::from_key(&public_key).unwrap(), net_address.into(), PeerFlags::empty(), PeerFeatures::COMMUNICATION_NODE, &[], ) } #[test] fn wallet_base_node_integration_test() { let temp_dir = tempdir().unwrap(); let factories = CryptoFactories::default(); let alice_node_identity = random_node_identity(); let bob_node_identity = random_node_identity(); let base_node_identity = random_node_identity(); log::info!( "manage_single_transaction: Alice: '{}', Bob: '{}', Base: '{}'", alice_node_identity.node_id().short_str(), bob_node_identity.node_id().short_str(), base_node_identity.node_id().short_str() ); // Base Node Setup let mut base_node_runtime = create_runtime(); let network = Network::LocalNet; let consensus_constants = ConsensusConstantsBuilder::new(network) .with_emission_amounts(100_000_000.into(), 0.999, 100.into()) .build(); let (block0, utxo0) = create_genesis_block_with_coinbase_value(&factories, 100_000_000.into(), &consensus_constants); let consensus_manager = ConsensusManagerBuilder::new(network) .with_consensus_constants(consensus_constants) .with_block(block0.clone()) .build(); let (base_node, _consensus_manager) = BaseNodeBuilder::new(network) .with_node_identity(base_node_identity.clone()) .with_base_node_service_config(BaseNodeServiceConfig::default()) .with_mmr_cache_config(MmrCacheConfig { rewind_hist_len: 10 }) .with_mempool_service_config(MempoolServiceConfig::default()) .with_liveness_service_config(LivenessConfig::default()) .with_consensus_manager(consensus_manager.clone()) .start(&mut base_node_runtime, temp_dir.path().to_str().unwrap()); log::info!("Finished Starting Base Node"); // Alice Wallet setup let alice_comms_config = CommsConfig { node_identity: alice_node_identity.clone(), transport_type: TransportType::Memory { listener_address: alice_node_identity.public_address(), }, datastore_path: temp_dir.path().to_path_buf(), peer_database_name: random_string(8), max_concurrent_inbound_tasks: 100, outbound_buffer_size: 100, dht: DhtConfig::default_local_test(), allow_test_addresses: true, listener_liveness_allowlist_cidrs: Vec::new(), listener_liveness_max_sessions: 0, }; let alice_wallet_config = WalletConfig { comms_config: alice_comms_config, factories: factories.clone(), transaction_service_config: Some(TransactionServiceConfig { base_node_monitoring_timeout: Duration::from_secs(1), low_power_polling_timeout: Duration::from_secs(10), ..Default::default() }), }; let alice_runtime = create_runtime(); let mut alice_wallet = Wallet::new( alice_wallet_config, alice_runtime, WalletMemoryDatabase::new(), TransactionMemoryDatabase::new(), OutputManagerMemoryDatabase::new(), ContactsServiceMemoryDatabase::new(), ) .unwrap(); let mut alice_event_stream = alice_wallet.transaction_service.get_event_stream_fused(); alice_wallet .set_base_node_peer( (*base_node_identity.public_key()).clone(), base_node_identity.public_address().clone().to_string(), ) .unwrap(); alice_wallet .runtime .block_on(alice_wallet.comms.peer_manager().add_peer(create_peer( bob_node_identity.public_key().clone(), bob_node_identity.public_address(), ))) .unwrap(); // Bob Wallet setup let bob_comms_config = CommsConfig { node_identity: bob_node_identity.clone(), transport_type: TransportType::Memory { listener_address: bob_node_identity.public_address(), }, datastore_path: temp_dir.path().to_path_buf(), peer_database_name: random_string(8), max_concurrent_inbound_tasks: 100, outbound_buffer_size: 100, dht: DhtConfig::default_local_test(), allow_test_addresses: true, listener_liveness_allowlist_cidrs: Vec::new(), listener_liveness_max_sessions: 0, }; let bob_wallet_config = WalletConfig { comms_config: bob_comms_config, factories: factories.clone(), transaction_service_config: None, }; let bob_runtime = create_runtime(); let mut bob_wallet = Wallet::new( bob_wallet_config, bob_runtime, WalletMemoryDatabase::new(), TransactionMemoryDatabase::new(), OutputManagerMemoryDatabase::new(), ContactsServiceMemoryDatabase::new(), ) .unwrap(); bob_wallet .runtime .block_on(bob_wallet.comms.peer_manager().add_peer(create_peer( alice_node_identity.public_key().clone(), alice_node_identity.public_address(), ))) .unwrap(); log::info!("Finished Starting Wallets"); // Transaction let mut runtime = create_runtime(); alice_wallet .runtime .block_on(alice_wallet.output_manager_service.add_output(utxo0)) .unwrap(); alice_wallet .runtime .block_on( alice_wallet .comms .connectivity() .wait_for_connectivity(Duration::from_secs(10)), ) .unwrap(); let value = MicroTari::from(1000); alice_wallet .runtime .block_on(alice_wallet.transaction_service.send_transaction( bob_node_identity.public_key().clone(), value, MicroTari::from(20), "MONAAHHH!".to_string(), )) .unwrap(); runtime.block_on(async { let mut delay = delay_for(Duration::from_secs(60)).fuse(); let mut broadcast = false; loop { futures::select! { event = alice_event_stream.select_next_some() => { if let TransactionEvent::TransactionBroadcast(_e) = (*event.unwrap()).clone() { broadcast = true; break; } }, () = delay => { break; }, } } assert!(broadcast, "Transaction has not been broadcast before timeout"); }); let transactions = runtime .block_on(alice_wallet.transaction_service.get_completed_transactions()) .unwrap(); assert_eq!(transactions.len(), 1); let mut transaction: Option<Transaction> = None; for (_, tx) in transactions { transaction = Some(tx.transaction.clone()); let tx_excess_sig = tx.transaction.body.kernels()[0].excess_sig.clone(); runtime.block_on(async { async_assert_eventually!( base_node.mempool.has_tx_with_excess_sig(tx_excess_sig.clone()).unwrap(), expect = TxStorageResponse::UnconfirmedPool, max_attempts = 20, interval = Duration::from_millis(1000) ); }); } runtime .block_on(alice_wallet.transaction_service.set_low_power_mode()) .unwrap(); let transaction = transaction.expect("Transaction must be present"); // Setup and start the miner let mut shutdown = Shutdown::new(); let mut miner = Miner::new(shutdown.to_signal(), consensus_manager, &base_node.local_nci, 1); miner.enable_mining_flag().store(true, Ordering::Relaxed); let (mut state_event_sender, state_event_receiver): (Publisher<_>, Subscriber<_>) = bounded(1, 113); miner.subscribe_to_node_state_events(state_event_receiver); miner.subscribe_to_mempool_state_events(base_node.local_mp_interface.get_mempool_state_event_stream()); let mut miner_utxo_stream = miner.get_utxo_receiver_channel().fuse(); runtime.spawn(async move { miner.mine().await; }); runtime.block_on(async { // Simulate block sync assert!(state_event_sender.send(StateEvent::BlocksSynchronized).await.is_ok()); // Wait for miner to finish mining block 1 assert!(event_stream_next(&mut miner_utxo_stream, Duration::from_secs(20)) .await .is_some()); // Check that the mined block was submitted to the base node service and the block was added to the blockchain let block1 = base_node.blockchain_db.fetch_block(1).unwrap().block().clone(); assert_eq!(block1.body.outputs().len(), 3); // Check if the outputs of tx1 appeared as outputs in block1 let mut found_tx_outputs = 0; for tx_output in transaction.body.outputs() { for block_output in block1.body.outputs() { if tx_output == block_output { found_tx_outputs += 1; break; } } } assert_eq!(found_tx_outputs, transaction.body.outputs().len()); }); runtime.block_on(async { let mut delay = delay_for(Duration::from_secs(30)).fuse(); let mut mined = false; loop { futures::select! { event = alice_event_stream.select_next_some() => { if let TransactionEvent::TransactionMined(_e) = (*event.unwrap()).clone() { mined = true; break; } }, () = delay => { break; }, } } assert!(mined, "Transaction has not been mined before timeout"); }); alice_wallet.shutdown(); bob_wallet.shutdown(); let _ = shutdown.trigger(); runtime.block_on(base_node.comms.shutdown()); }
37.806094
118
0.655994
69ccb95b0e8f04bc70ca2d41cf2c87fe234ec7c7
1,895
// Copyright 2020 - developers of the `grammers` project. // Copyright 2021 - developers of the `tdgrand` project. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // https://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! This module gathers all the code generation submodules and coordinates //! them, feeding them the right data. mod enums; mod functions; mod metadata; mod rustifier; mod types; use std::io::{self, Write}; use tdgrand_tl_parser::tl::{Definition, Type}; /// Don't generate types for definitions of this type, /// since they are "core" types and treated differently. const SPECIAL_CASED_TYPES: [&str; 5] = ["Bool", "Bytes", "Int32", "Int53", "Int64"]; fn ignore_type(ty: &Type) -> bool { SPECIAL_CASED_TYPES.iter().any(|&x| x == ty.name) } pub fn generate_rust_code(file: &mut impl Write, definitions: &[Definition]) -> io::Result<()> { write!( file, "\ // Copyright 2020 - developers of the `grammers` project.\n\ // Copyright 2021 - developers of the `tdgrand` project.\n\ //\n\ // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\ // https://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\ // <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your\n\ // option. This file may not be copied, modified, or distributed\n\ // except according to those terms.\n\ " )?; let metadata = metadata::Metadata::new(definitions); types::write_types_mod(file, definitions, &metadata)?; enums::write_enums_mod(file, definitions, &metadata)?; functions::write_functions_mod(file, definitions, &metadata)?; Ok(()) }
37.156863
96
0.669657
72c2dbbf66a64cc8d2004a2f1710ee1713f0c5a0
8,332
use crate::core::make_alphanumeric; use crate::core::BPF; use crate::error::BccError; use bcc_sys::bccapi::bpf_probe_attach_type_BPF_PROBE_ENTRY as BPF_PROBE_ENTRY; use bcc_sys::bccapi::bpf_probe_attach_type_BPF_PROBE_RETURN as BPF_PROBE_RETURN; use bcc_sys::bccapi::bpf_prog_type_BPF_PROG_TYPE_KPROBE as BPF_PROG_TYPE_KPROBE; use bcc_sys::bccapi::pid_t; use std::path::{Path, PathBuf}; #[derive(Default)] /// A `Uprobe` is used to configure and then attach a uprobe to a userspace /// function on entry into that function. Must be attached to a `BPF` struct to /// be useful. pub struct Uprobe { binary: Option<PathBuf>, handler: Option<String>, pid: Option<pid_t>, symbol: Option<String>, addr: Option<u64>, ref_ctr_offset: u32, } impl Uprobe { /// Create a new probe with the defaults. Further initialization is required /// before attaching. pub fn new() -> Self { Default::default() } /// Specify the name of the probe handler within the BPF code. This is a /// required item. pub fn handler(mut self, name: &str) -> Self { self.handler = Some(name.to_owned()); self } /// Specify the path to the binary to probe. This is a required item. pub fn binary<T: AsRef<Path>>(mut self, path: T) -> Self { self.binary = Some(PathBuf::from(path.as_ref())); self } /// Specify the symbol to probe. This is optional. /// /// Typically required when not specifying the symbol address directly via `address`. pub fn symbol(mut self, symbol: &str) -> Self { self.symbol = Some(symbol.to_owned()); self } /// Specify the symbol address to probe. This is optional. pub fn address(mut self, addr: u64) -> Self { self.addr = Some(addr); self } /// Specify a pid to probe. This is optional. pub fn pid(mut self, pid: Option<pid_t>) -> Self { self.pid = pid; self } /// Specify reference counter offset pub fn ref_ctr_offset(mut self, offset: u32) -> Self { self.ref_ctr_offset = offset; self } /// Consumes the probe and attaches it to the `BPF` struct. May return an /// error if there is a incomplete configuration or error while loading or /// attaching the probe. pub fn attach(self, bpf: &mut BPF) -> Result<(), BccError> { if self.handler.is_none() { return Err(BccError::InvalidUprobe { message: "handler is required".to_string(), }); } if self.binary.is_none() { return Err(BccError::InvalidUprobe { message: "binary is required".to_string(), }); } let binary = self.binary.unwrap().to_str().map(|v| v.to_owned()); if binary.is_none() { return Err(BccError::InvalidUprobe { message: "binary path is invalid".to_string(), }); } let binary = binary.unwrap(); let symbol = self.symbol.as_deref().unwrap_or(""); let pid = self.pid.unwrap_or(-1); let handler = self.handler.unwrap(); let addr = self.addr; let (path, addr) = crate::symbol::resolve_symbol_path(&binary, &symbol, addr.unwrap_or(0x0), pid)?; let alpha_path = make_alphanumeric(&path); let ev_name = format!("p_{}_0x{:x}", &alpha_path, addr); let code_fd = bpf.load(&handler, BPF_PROG_TYPE_KPROBE, 0, 0)?; #[cfg(any( feature = "v0_4_0", feature = "v0_5_0", feature = "v0_6_0", feature = "v0_6_1", feature = "v0_7_0", feature = "v0_8_0", feature = "v0_9_0", feature = "v0_10_0", feature = "v0_11_0", feature = "v0_12_0", feature = "v0_13_0", feature = "v0_14_0", feature = "v0_15_0", feature = "v0_16_0", ))] let uprobe = crate::core::Uprobe::new(&ev_name, BPF_PROBE_ENTRY, &path, addr, code_fd, pid)?; #[cfg(any(feature = "v0_17_0", feature = "v0_18_0", not(feature = "specific")))] let uprobe = crate::core::Uprobe::new( &ev_name, BPF_PROBE_ENTRY, &path, addr, code_fd, pid, self.ref_ctr_offset, )?; bpf.uprobes.insert(uprobe); Ok(()) } } #[derive(Default)] /// A `UserspaceReturnProbe` is used to configure and then attach a uprobe to a /// userspace function on return from that function. Must be attached to a `BPF` /// struct to be useful. pub struct Uretprobe { binary: Option<PathBuf>, handler: Option<String>, pid: Option<pid_t>, symbol: Option<String>, ref_ctr_offset: u32, } impl Uretprobe { /// Create a new probe with the defaults. Further initialization is required /// before attaching. pub fn new() -> Self { Default::default() } /// Specify the name of the probe handler within the BPF code. This is a /// required item. pub fn handler(mut self, name: &str) -> Self { self.handler = Some(name.to_owned()); self } /// Specify the path to the binary to probe. This is a required item. pub fn binary<T: AsRef<Path>>(mut self, path: T) -> Self { self.binary = Some(PathBuf::from(path.as_ref())); self } /// Specify the symbol to probe. This is required. pub fn symbol(mut self, symbol: &str) -> Self { self.symbol = Some(symbol.to_owned()); self } /// Specify a pid to probe. This is optional. pub fn pid(mut self, pid: Option<pid_t>) -> Self { self.pid = pid; self } /// Specify reference counter offset pub fn ref_ctr_offset(mut self, offset: u32) -> Self { self.ref_ctr_offset = offset; self } /// Consumes the probe and attaches it to the `BPF` struct. May return an /// error if there is a incomplete configuration or error while loading or /// attaching the probe. pub fn attach(self, bpf: &mut BPF) -> Result<(), BccError> { if self.handler.is_none() { return Err(BccError::InvalidUprobe { message: "handler is required".to_string(), }); } if self.binary.is_none() { return Err(BccError::InvalidUprobe { message: "binary is required".to_string(), }); } let binary = self.binary.unwrap().to_str().map(|v| v.to_owned()); if binary.is_none() { return Err(BccError::InvalidUprobe { message: "binary path is invalid".to_string(), }); } if self.symbol.is_none() { return Err(BccError::InvalidUprobe { message: "symbol is required".to_string(), }); } let binary = binary.unwrap(); let symbol = self.symbol.unwrap(); let pid = self.pid.unwrap_or(-1); let handler = self.handler.unwrap(); let (path, addr) = crate::symbol::resolve_symbol_path(&binary, &symbol, 0x0, pid)?; let alpha_path = make_alphanumeric(&path); let ev_name = format!("r_{}_0x{:x}", &alpha_path, addr); let code_fd = bpf.load(&handler, BPF_PROG_TYPE_KPROBE, 0, 0)?; #[cfg(any( feature = "v0_4_0", feature = "v0_5_0", feature = "v0_6_0", feature = "v0_6_1", feature = "v0_7_0", feature = "v0_8_0", feature = "v0_9_0", feature = "v0_10_0", feature = "v0_11_0", feature = "v0_12_0", feature = "v0_13_0", feature = "v0_14_0", feature = "v0_15_0", feature = "v0_16_0", ))] let uprobe = crate::core::Uprobe::new(&ev_name, BPF_PROBE_RETURN, &path, addr, code_fd, pid)?; #[cfg(any(feature = "v0_17_0", feature = "v0_18_0", not(feature = "specific")))] let uprobe = crate::core::Uprobe::new( &ev_name, BPF_PROBE_RETURN, &path, addr, code_fd, pid, self.ref_ctr_offset, )?; bpf.uprobes.insert(uprobe); Ok(()) } }
32.169884
93
0.56373
28cfdee1a0506f4b0d1b5c86f0cf69ce3cc37132
8,429
use anyhow::anyhow; use std::collections::BTreeMap; use std::path::{Path, PathBuf}; use syn::parse::{Error as ParseError, Result as ParseResult}; /// Crate parse context /// /// Keeps track of modules defined within a crate. pub struct CrateContext { modules: BTreeMap<String, ParsedModule>, } impl CrateContext { pub fn consts(&self) -> impl Iterator<Item = &syn::ItemConst> { self.modules.iter().flat_map(|(_, ctx)| ctx.consts()) } pub fn structs(&self) -> impl Iterator<Item = &syn::ItemStruct> { self.modules.iter().flat_map(|(_, ctx)| ctx.structs()) } pub fn enums(&self) -> impl Iterator<Item = &syn::ItemEnum> { self.modules.iter().flat_map(|(_, ctx)| ctx.enums()) } pub fn modules(&self) -> impl Iterator<Item = ModuleContext> { self.modules .iter() .map(move |(_, detail)| ModuleContext { detail }) } pub fn root_module(&self) -> ModuleContext { ModuleContext { detail: self.modules.get("crate").unwrap(), } } pub fn parse(root: impl AsRef<Path>) -> Result<Self, anyhow::Error> { Ok(CrateContext { modules: ParsedModule::parse_recursive(root.as_ref())?, }) } // Perform Anchor safety checks on the parsed create pub fn safety_checks(&self) -> Result<(), anyhow::Error> { // Check all structs for unsafe field types, i.e. AccountInfo and UncheckedAccount. for (_, ctx) in self.modules.iter() { for unsafe_field in ctx.unsafe_struct_fields() { // Check if unsafe field type has been documented with a /// SAFETY: doc string. let is_documented = unsafe_field.attrs.iter().any(|attr| { attr.tokens.clone().into_iter().any(|token| match token { // Check for doc comments containing CHECK proc_macro2::TokenTree::Literal(s) => s.to_string().contains("CHECK"), _ => false, }) }); if !is_documented { let ident = unsafe_field.ident.as_ref().unwrap(); let span = ident.span(); // Error if undocumented. return Err(anyhow!( r#" {}:{}:{} Struct field "{}" is unsafe, but is not documented. Please add a `/// CHECK:` doc comment explaining why no checks through types are necessary. See https://book.anchor-lang.com/anchor_in_depth/the_accounts_struct.html#safety-checks for more information. "#, ctx.file.canonicalize().unwrap().display(), span.start().line, span.start().column, ident.to_string() )); }; } } Ok(()) } } /// Module parse context /// /// Keeps track of items defined within a module. #[derive(Copy, Clone)] pub struct ModuleContext<'krate> { detail: &'krate ParsedModule, } impl<'krate> ModuleContext<'krate> { pub fn items(&self) -> impl Iterator<Item = &syn::Item> { self.detail.items.iter() } } struct ParsedModule { name: String, file: PathBuf, path: String, items: Vec<syn::Item>, } impl ParsedModule { fn parse_recursive(root: &Path) -> Result<BTreeMap<String, ParsedModule>, anyhow::Error> { let mut modules = BTreeMap::new(); let root_content = std::fs::read_to_string(root)?; let root_file = syn::parse_file(&root_content)?; let root_mod = Self::new( String::new(), root.to_owned(), "crate".to_owned(), root_file.items, ); struct UnparsedModule { file: PathBuf, path: String, name: String, item: syn::ItemMod, } let mut unparsed = root_mod .submodules() .map(|item| UnparsedModule { file: root_mod.file.clone(), path: root_mod.path.clone(), name: item.ident.to_string(), item: item.clone(), }) .collect::<Vec<_>>(); while let Some(to_parse) = unparsed.pop() { let path = format!("{}::{}", to_parse.path, to_parse.name); let name = to_parse.name; let module = Self::from_item_mod(&to_parse.file, &path, to_parse.item)?; unparsed.extend(module.submodules().map(|item| UnparsedModule { item: item.clone(), file: module.file.clone(), path: module.path.clone(), name: item.ident.to_string(), })); modules.insert(format!("{}{}", module.path.clone(), name.clone()), module); } modules.insert(root_mod.name.clone(), root_mod); Ok(modules) } fn from_item_mod( parent_file: &Path, parent_path: &str, item: syn::ItemMod, ) -> ParseResult<Self> { Ok(match item.content { Some((_, items)) => { // The module content is within the parent file being parsed Self::new( parent_path.to_owned(), parent_file.to_owned(), item.ident.to_string(), items, ) } None => { // The module is referencing some other file, so we need to load that // to parse the items it has. let parent_dir = parent_file.parent().unwrap(); let parent_filename = parent_file.file_stem().unwrap().to_str().unwrap(); let parent_mod_dir = parent_dir.join(parent_filename); let possible_file_paths = vec![ parent_dir.join(format!("{}.rs", item.ident)), parent_dir.join(format!("{}/mod.rs", item.ident)), parent_mod_dir.join(format!("{}.rs", item.ident)), parent_mod_dir.join(format!("{}/mod.rs", item.ident)), ]; let mod_file_path = possible_file_paths .into_iter() .find(|p| p.exists()) .ok_or_else(|| ParseError::new_spanned(&item, "could not find file"))?; let mod_file_content = std::fs::read_to_string(&mod_file_path) .map_err(|_| ParseError::new_spanned(&item, "could not read file"))?; let mod_file = syn::parse_file(&mod_file_content)?; Self::new( parent_path.to_owned(), mod_file_path, item.ident.to_string(), mod_file.items, ) } }) } fn new(path: String, file: PathBuf, name: String, items: Vec<syn::Item>) -> Self { Self { name, file, path, items, } } fn submodules(&self) -> impl Iterator<Item = &syn::ItemMod> { self.items.iter().filter_map(|i| match i { syn::Item::Mod(item) => Some(item), _ => None, }) } fn structs(&self) -> impl Iterator<Item = &syn::ItemStruct> { self.items.iter().filter_map(|i| match i { syn::Item::Struct(item) => Some(item), _ => None, }) } fn unsafe_struct_fields(&self) -> impl Iterator<Item = &syn::Field> { self.structs() .flat_map(|s| &s.fields) .filter(|f| match &f.ty { syn::Type::Path(syn::TypePath { path: syn::Path { segments, .. }, .. }) => { segments.len() == 1 && segments[0].ident == "UncheckedAccount" || segments[0].ident == "AccountInfo" } _ => false, }) } fn enums(&self) -> impl Iterator<Item = &syn::ItemEnum> { self.items.iter().filter_map(|i| match i { syn::Item::Enum(item) => Some(item), _ => None, }) } fn consts(&self) -> impl Iterator<Item = &syn::ItemConst> { self.items.iter().filter_map(|i| match i { syn::Item::Const(item) => Some(item), _ => None, }) } }
33.987903
117
0.503618
edd79442fb5a186ca7775e354f1bcbe265a91c14
231
//! Basic type conversion traits. Unlike the native standard library, `U: From<T>` does not yet //! imply `T: Into<U>`. pub trait From<T>: Sized { fn from(_: T) -> Self; } pub trait Into<T>: Sized { fn from(self) -> T; }
21
95
0.61039
8f58c32e6566cb8307d07bffb1d5904aa3b9b057
355
use ink_lang as ink; #[ink::contract] mod contract { #[ink(storage)] pub struct Contract {} #[ink(impl)] impl Contract {} #[ink(impl)] impl Contract { #[ink(constructor)] pub fn constructor() -> Self { Self {} } #[ink(message)] pub fn message(&self) {} } } fn main() {}
14.791667
38
0.487324
1ca709d09755bc7bc71a0cf82acb1ab3999279cc
528
// check-pass // revisions: full min #![cfg_attr(full, feature(const_generics))] #![cfg_attr(full, allow(incomplete_features))] #![cfg_attr(min, feature(min_const_generics))] struct Foo<const D: usize> { state: Option<[u8; D]>, } impl<const D: usize> Iterator for Foo<{D}> { type Item = [u8; D]; fn next(&mut self) -> Option<Self::Item> { if true { return Some(self.state.unwrap().clone()); } else { return Some(self.state.unwrap().clone()); } } } fn main() {}
22.956522
53
0.583333
d6ed38df62f856cb1bdd15debf12343561ba1a1a
49
pub mod home; pub mod not_found; pub mod slides;
12.25
18
0.755102
4b32d7e2616342c49c3e5bbd8007f87b2be618a7
1,878
// Having flags in terms of bitshifts is more readable. #![allow(clippy::identity_op)] use crate::raw::*; use bitflags::bitflags; bitflags! { #[derive(Default)] pub struct SetupFlags : u32 { const IOPOLL = IORING_SETUP_IOPOLL; const SQPOLL = IORING_SETUP_SQPOLL; const SQ_AFF = IORING_SETUP_SQ_AFF; const CQSIZE = IORING_SETUP_CQSIZE; } } bitflags! { #[derive(Default)] pub struct Features : u32 { const SINGLE_MMAP = IORING_FEAT_SINGLE_MMAP; const NODROP = IORING_FEAT_NODROP; const SUBMIT_STABLE = IORING_FEAT_SUBMIT_STABLE; const RW_CUR_POS = IORING_FEAT_RW_CUR_POS; const CUR_PERSONALITY = IORING_FEAT_CUR_PERSONALITY; } } bitflags! { #[derive(Default)] pub struct SubmissionFlags : u8 { const FIXED_FILE = IOSQE_FIXED_FILE; const IO_DRAIN = IOSQE_IO_DRAIN; const IO_LINK = IOSQE_IO_LINK; const IO_HARDLINK = IOSQE_IO_HARDLINK; const ASYNC = IOSQE_ASYNC; const BUFFER_SELECT = IOSQE_BUFFER_SELECT; } } bitflags! { #[derive(Default)] pub struct CompletionFlags : u32 { // If set, the upper 16 bits are the buffer ID const BUFFER = 1 << 0; const BUFFER_ID_MASK = 0xFFFF << 16; } } bitflags! { #[derive(Default)] pub struct ProbeOpFlags: u16 { const SUPPORTED = 1 << 0; } } bitflags! { #[derive(Default)] pub struct TimeoutFlags : u32 { const ABS = 1 << 0; } } bitflags! { #[derive(Default)] pub struct EnterFlags : u32 { const GETEVENTS = 1 << 0; const SQ_WAKEUP = 1 << 1; } } impl CompletionFlags { pub fn buf_id(&self) -> Option<u16> { match self.contains(Self::BUFFER) { true => Some((self.bits() >> 16) as u16), false => None, } } }
23.185185
60
0.601171
d72c093adece6bd272a228e2d99481217f4e801b
1,993
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0 */ use aws_http::user_agent::AwsUserAgent; use aws_sdk_s3::middleware::DefaultMiddleware; use aws_sdk_s3::operation::ListObjectsV2; use aws_sdk_s3::{Credentials, Region}; use aws_smithy_client::test_connection::TestConnection; use aws_smithy_client::Client as CoreClient; use aws_smithy_http::body::SdkBody; use std::time::{Duration, UNIX_EPOCH}; pub type Client<C> = CoreClient<C, DefaultMiddleware>; #[tokio::test] async fn test_signer() -> Result<(), aws_sdk_s3::Error> { let creds = Credentials::new( "ANOTREAL", "notrealrnrELgWzOk3IfjzDKtFBhDby", Some("notarealsessiontoken".to_string()), None, "test", ); let conf = aws_sdk_s3::Config::builder() .credentials_provider(creds) .region(Region::new("us-east-1")) .build(); let conn = TestConnection::new(vec![( http::Request::builder() .header("authorization", "AWS4-HMAC-SHA256 Credential=ANOTREAL/20210618/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-content-sha256;x-amz-date;x-amz-security-token;x-amz-user-agent, Signature=6233614b69271e15db079287874a654183916e509909b5719b00cd8d5f31299e") .uri("https://s3.us-east-1.amazonaws.com/test-bucket?list-type=2&prefix=prefix~") .body(SdkBody::empty()) .unwrap(), http::Response::builder().status(200).body("").unwrap(), )]); let client = Client::new(conn.clone()); let mut op = ListObjectsV2::builder() .bucket("test-bucket") .prefix("prefix~") .build() .unwrap() .make_operation(&conf) .await .unwrap(); op.properties_mut() .insert(UNIX_EPOCH + Duration::from_secs(1624036048)); op.properties_mut().insert(AwsUserAgent::for_tests()); client.call(op).await.expect_err("empty response"); conn.assert_requests_match(&[]); Ok(()) }
36.907407
277
0.662318
6a724d4c632bb24d0fa0ed4fc4841df5685f5b4d
3,078
#![no_std] #![feature(concat_idents)] #![feature(collections)] extern crate collections; use collections::vec::Vec; /** * @section License * * The MIT License (MIT) * * Copyright (c) 2017, Erik Moqvist * * Permission is hereby granted, free of charge, to any person * obtaining a copy of this software and associated documentation * files (the "Software"), to deal in the Software without * restriction, including without limitation the rights to use, copy, * modify, merge, publish, distribute, sublicense, and/or sell copies * of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * * This file is part of the Rafiki project. */ #[macro_use] extern crate rafiki; use rafiki::kernel::{errno, sys, time}; use rafiki::kernel::chan::Channel; use rafiki::sync::{chan, event, queue}; use rafiki::debug::harness::Harness; testcase_define!(test_poll); fn test_poll_impl(_: *mut Harness) -> rafiki::Res { let timeout = sys::Time { seconds: 0, nanoseconds: 100 }; let (queue_tx, queue_rx) = queue::new(Some(32)); let (event_tx, event_rx) = event::new(); let list: chan::List(); /* Add both channels to the channel list. */ list.add(queue_rx.clone()); list.add(event_rx.clone()); println!("1. Writing to the queue channel."); assert!(queue_tx.write(&[2, 1, 0]) == Ok(3)); loop { println!("Polling..."); match list.poll(&Some(timeout)) { Ok(0) => { println!("2. Reading from the queue channel."); let mut buf: [u8; 3] = [0; 3]; assert!(queue_rx.read(&mut buf) == Ok(3)); assert!(buf == [2, 1, 0]); }, Ok(1) => { println!("4. Reading from the event channel."); assert!(event_rx.read(0x1) == Ok(0x1)); }, Err(errno::ETIMEDOUT) => { println!("3. Timeout. Writing to the event channel."); assert!(event_tx.write(0x1) == Ok(4)); }, _ => { unreachable!(); } } } } #[no_mangle] pub fn main() { let mut harness: Harness = Default::default(); let mut harness_testcases = [ testcase!(Some(test_poll), "test_poll"), testcase!(None, "") ]; sys::start(); uart::init(); harness.init(); harness.run(&mut harness_testcases); }
29.314286
70
0.616309
fbd9065fa433ea170da4cbb7cccd2a3338080fed
17,382
// Copyright (c) The Libra Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::{data_cache::RemoteCache, logging::NoContextLog, move_vm::MoveVM}; use move_core_types::{ account_address::AccountAddress, gas_schedule::{GasAlgebra, GasUnits}, identifier::Identifier, language_storage::{ModuleId, StructTag, TypeTag}, vm_status::StatusCode, }; use move_vm_types::{ gas_schedule::{zero_cost_schedule, CostStrategy}, values::Value, }; use vm::{ errors::{PartialVMResult, VMResult}, file_format::{ AddressIdentifierIndex, Bytecode, CodeUnit, CompiledScriptMut, FunctionHandle, FunctionHandleIndex, IdentifierIndex, ModuleHandle, ModuleHandleIndex, Signature, SignatureIndex, SignatureToken, StructHandle, StructHandleIndex, }, }; // make a script with a given signature for main. The main just return, cannot // pass resources or the verifier will fail as being still on the stack (args) fn make_script(signature: Signature) -> Vec<u8> { let mut blob = vec![]; CompiledScriptMut { module_handles: vec![], struct_handles: vec![], function_handles: vec![], function_instantiations: vec![], signatures: vec![Signature(vec![]), signature], identifiers: vec![], address_identifiers: vec![], constant_pool: vec![], type_parameters: vec![], parameters: SignatureIndex(1), code: CodeUnit { locals: SignatureIndex(0), code: vec![Bytecode::Ret], }, } .serialize(&mut blob) .expect("script must serialize"); blob } // make a script with a given signature for main. The main just return; cannot // define resources in signature or the verifier will fail with resource not being consumed. // The script has an imported struct that can be used in main's signature. // Dependencies check happens after main signature check, so we should expect // a signature check error. fn make_script_with_imports(signature: Signature) -> Vec<u8> { let mut blob = vec![]; CompiledScriptMut { module_handles: vec![ModuleHandle { address: AddressIdentifierIndex(0), name: IdentifierIndex(0), }], struct_handles: vec![StructHandle { module: ModuleHandleIndex(0), name: IdentifierIndex(1), is_nominal_resource: false, type_parameters: vec![], }], function_handles: vec![], function_instantiations: vec![], signatures: vec![Signature(vec![]), signature], identifiers: vec![ Identifier::new("one").unwrap(), Identifier::new("two").unwrap(), ], address_identifiers: vec![AccountAddress::random()], constant_pool: vec![], type_parameters: vec![], parameters: SignatureIndex(1), code: CodeUnit { locals: SignatureIndex(0), code: vec![Bytecode::Ret], }, } .serialize(&mut blob) .expect("script must serialize"); blob } // make a script with an external function that has the same signature as // the main. That allows us to pass resources and make the verifier happy that // they are consumed. // Dependencies check happens after main signature check, so we should expect // a signature check error. fn make_script_consuming_args(signature: Signature) -> Vec<u8> { let mut blob = vec![]; let mut code = vec![]; for loc_idx in 0..signature.len() { code.push(Bytecode::MoveLoc(loc_idx as u8)); code.push(Bytecode::Call(FunctionHandleIndex(0))); } code.push(Bytecode::Ret); CompiledScriptMut { module_handles: vec![ModuleHandle { address: AddressIdentifierIndex(0), name: IdentifierIndex(0), }], struct_handles: vec![StructHandle { module: ModuleHandleIndex(0), name: IdentifierIndex(1), is_nominal_resource: false, type_parameters: vec![], }], function_handles: vec![FunctionHandle { module: ModuleHandleIndex(0), name: IdentifierIndex(2), parameters: SignatureIndex(1), return_: SignatureIndex(0), type_parameters: vec![], }], function_instantiations: vec![], signatures: vec![Signature(vec![]), signature], identifiers: vec![ Identifier::new("one").unwrap(), Identifier::new("two").unwrap(), Identifier::new("three").unwrap(), ], address_identifiers: vec![AccountAddress::random()], constant_pool: vec![], type_parameters: vec![], parameters: SignatureIndex(1), code: CodeUnit { locals: SignatureIndex(0), code, }, } .serialize(&mut blob) .expect("script must serialize"); blob } struct RemoteStore {} impl RemoteCache for RemoteStore { fn get_module(&self, _module_id: &ModuleId) -> VMResult<Option<Vec<u8>>> { Ok(None) } fn get_resource( &self, _address: &AccountAddress, _tag: &StructTag, ) -> PartialVMResult<Option<Vec<u8>>> { Ok(None) } } fn call_script_with_args_ty_args_signers( script: Vec<u8>, args: Vec<Value>, ty_args: Vec<TypeTag>, signers: Vec<AccountAddress>, ) -> VMResult<()> { let move_vm = MoveVM::new(); let remote_view = RemoteStore {}; let log_context = NoContextLog::new(); let mut session = move_vm.new_session(&remote_view); let cost_table = zero_cost_schedule(); let mut cost_strategy = CostStrategy::system(&cost_table, GasUnits::new(0)); session.execute_script( script, ty_args, args, signers, &mut cost_strategy, &log_context, ) } fn call_script(script: Vec<u8>, args: Vec<Value>) -> VMResult<()> { call_script_with_args_ty_args_signers(script, args, vec![], vec![]) } #[test] fn check_main_signature() { // // Bad signatures // // struct in signature let script = make_script_with_imports(Signature(vec![SignatureToken::Struct( StructHandleIndex(0), )])); assert_eq!( call_script(script, vec![Value::u128(0)]) .err() .unwrap() .major_status(), StatusCode::INVALID_MAIN_FUNCTION_SIGNATURE, ); // struct in signature let script = make_script_with_imports(Signature(vec![ SignatureToken::Bool, SignatureToken::Struct(StructHandleIndex(0)), SignatureToken::U64, ])); assert_eq!( call_script(script, vec![Value::u128(0)]) .err() .unwrap() .major_status(), StatusCode::INVALID_MAIN_FUNCTION_SIGNATURE, ); // reference to struct in signature let script = make_script_with_imports(Signature(vec![ SignatureToken::Address, SignatureToken::MutableReference(Box::new(SignatureToken::Struct(StructHandleIndex(0)))), ])); assert_eq!( call_script(script, vec![Value::u128(0)]) .err() .unwrap() .major_status(), StatusCode::INVALID_MAIN_FUNCTION_SIGNATURE, ); // vector of struct in signature let script = make_script_with_imports(Signature(vec![ SignatureToken::Bool, SignatureToken::Vector(Box::new(SignatureToken::Struct(StructHandleIndex(0)))), SignatureToken::U64, ])); assert_eq!( call_script(script, vec![Value::u128(0)]) .err() .unwrap() .major_status(), StatusCode::INVALID_MAIN_FUNCTION_SIGNATURE, ); // vector of vector of struct in signature let script = make_script_with_imports(Signature(vec![ SignatureToken::Bool, SignatureToken::Vector(Box::new(SignatureToken::Vector(Box::new( SignatureToken::Struct(StructHandleIndex(0)), )))), SignatureToken::U64, ])); assert_eq!( call_script(script, vec![Value::u128(0)]) .err() .unwrap() .major_status(), StatusCode::INVALID_MAIN_FUNCTION_SIGNATURE, ); // reference to vector in signature let script = make_script_with_imports(Signature(vec![SignatureToken::Reference(Box::new( SignatureToken::Vector(Box::new(SignatureToken::Struct(StructHandleIndex(0)))), ))])); assert_eq!( call_script(script, vec![Value::u128(0)]) .err() .unwrap() .major_status(), StatusCode::INVALID_MAIN_FUNCTION_SIGNATURE, ); // reference to vector in signature let script = make_script_with_imports(Signature(vec![SignatureToken::Reference(Box::new( SignatureToken::U64, ))])); assert_eq!( call_script(script, vec![Value::u128(0)]) .err() .unwrap() .major_status(), StatusCode::INVALID_MAIN_FUNCTION_SIGNATURE, ); // `Signer` in signature (not `&Signer`) let script = make_script_consuming_args(Signature(vec![SignatureToken::Signer])); assert_eq!( call_script(script, vec![Value::u128(0)]) .err() .unwrap() .major_status(), StatusCode::INVALID_MAIN_FUNCTION_SIGNATURE, ); // vector of `Signer` in signature let script = make_script_consuming_args(Signature(vec![SignatureToken::Vector(Box::new( SignatureToken::Signer, ))])); assert_eq!( call_script(script, vec![Value::u128(0)]) .err() .unwrap() .major_status(), StatusCode::INVALID_MAIN_FUNCTION_SIGNATURE, ); // `Signer` ref not first arg let script = make_script(Signature(vec![ SignatureToken::Bool, SignatureToken::Reference(Box::new(SignatureToken::Signer)), ])); assert_eq!( call_script(script, vec![Value::bool(false)]) .err() .unwrap() .major_status(), StatusCode::INVALID_MAIN_FUNCTION_SIGNATURE, ); // // Good signatures // // All constants let script = make_script(Signature(vec![SignatureToken::Vector(Box::new( SignatureToken::Bool, ))])); call_script(script, vec![Value::vector_bool(vec![true, false])]).expect("vector<bool> is good"); let script = make_script(Signature(vec![ SignatureToken::Bool, SignatureToken::Vector(Box::new(SignatureToken::U8)), SignatureToken::Address, ])); call_script( script, vec![ Value::bool(true), Value::vector_u8(vec![0, 1]), Value::address(AccountAddress::random()), ], ) .expect("vector<u8> is good"); // signer ref let script = make_script(Signature(vec![ SignatureToken::Reference(Box::new(SignatureToken::Signer)), SignatureToken::Bool, SignatureToken::Address, ])); call_script_with_args_ty_args_signers( script, vec![Value::bool(false), Value::address(AccountAddress::random())], vec![], vec![AccountAddress::random()], ) .expect("&Signer first argument is good"); let script = make_script(Signature(vec![ SignatureToken::Bool, SignatureToken::Vector(Box::new(SignatureToken::U8)), SignatureToken::Vector(Box::new(SignatureToken::Vector(Box::new( SignatureToken::Address, )))), ])); let mut addresses = vec![]; addresses.push(Value::vector_address(vec![ AccountAddress::random(), AccountAddress::random(), ])); addresses.push(Value::vector_address(vec![ AccountAddress::random(), AccountAddress::random(), ])); addresses.push(Value::vector_address(vec![ AccountAddress::random(), AccountAddress::random(), ])); let values = Value::constant_vector_generic( addresses, &SignatureToken::Vector(Box::new(SignatureToken::Address)), ) .expect("vector<vector<address>> can be built"); call_script( script, vec![Value::bool(true), Value::vector_u8(vec![0, 1]), values], ) .expect("vector<vector<address>> is good"); } #[test] fn check_constant_args() { // // Simple arguments // // U128 arg, success let script = make_script(Signature(vec![SignatureToken::U128])); call_script(script, vec![Value::u128(0)]).expect("u128 is good"); // error: no args - missing arg comes as type mismatch let script = make_script(Signature(vec![SignatureToken::U64])); assert_eq!( call_script(script, vec![]).err().unwrap().major_status(), StatusCode::TYPE_MISMATCH, ); // error: too many args - too many args comes as type mismatch let script = make_script(Signature(vec![SignatureToken::Bool])); assert_eq!( call_script(script, vec![]).err().unwrap().major_status(), StatusCode::TYPE_MISMATCH, ); // // Vector arguments // // success: vector of addresses let script = make_script(Signature(vec![SignatureToken::Vector(Box::new( SignatureToken::Address, ))])); // empty vector call_script(script.clone(), vec![Value::vector_address(vec![])]) .expect("empty vector<address> is good"); // one elem vector call_script( script.clone(), vec![Value::vector_address(vec![AccountAddress::random()])], ) .expect("vector<address> is good"); // multiple elems vector call_script( script.clone(), vec![Value::vector_address(vec![ AccountAddress::random(), AccountAddress::random(), AccountAddress::random(), AccountAddress::random(), AccountAddress::random(), ])], ) .expect("multiple vector<address> is good"); // wrong vector vector<bool> passed for vector<address> assert_eq!( call_script(script.clone(), vec![Value::vector_bool(vec![true])]) .err() .unwrap() .major_status(), StatusCode::TYPE_MISMATCH, ); // wrong U128 passed for vector<address> assert_eq!( call_script(script, vec![Value::u128(12)]) .err() .unwrap() .major_status(), StatusCode::TYPE_MISMATCH, ); // vector of vector let script = make_script(Signature(vec![SignatureToken::Vector(Box::new( SignatureToken::Vector(Box::new(SignatureToken::U8)), ))])); // empty vector let arg = Value::constant_vector_generic( vec![], &SignatureToken::Vector(Box::new(SignatureToken::U8)), ) .expect("create vector of vector"); call_script(script.clone(), vec![arg]).expect("empty vector<vector<u8>> is good"); // multiple elements vector let inner = vec![ Value::vector_u8(vec![0, 1]), Value::vector_u8(vec![2, 3]), Value::vector_u8(vec![4, 5]), ]; let arg = Value::constant_vector_generic( inner, &SignatureToken::Vector(Box::new(SignatureToken::U8)), ) .expect("create vector of vector"); call_script(script.clone(), vec![arg]).expect("vector<vector<u8>> is good"); // wrong U8 passed for vector<U8> assert_eq!( call_script(script, vec![Value::u8(12)]) .err() .unwrap() .major_status(), StatusCode::TYPE_MISMATCH, ); } #[test] fn check_signer_args() { let two_signer_script = make_script(Signature(vec![ SignatureToken::Reference(Box::new(SignatureToken::Signer)), SignatureToken::Reference(Box::new(SignatureToken::Signer)), ])); // too few signers (0) assert_eq!( call_script_with_args_ty_args_signers(two_signer_script.clone(), vec![], vec![], vec![]) .err() .unwrap() .major_status(), StatusCode::TYPE_MISMATCH ); // too few signers (1) let one_signer = vec![AccountAddress::random()]; assert_eq!( call_script_with_args_ty_args_signers( two_signer_script.clone(), vec![], vec![], one_signer, ) .err() .unwrap() .major_status(), StatusCode::TYPE_MISMATCH ); // too many signers (3) let three_signers = vec![ AccountAddress::random(), AccountAddress::random(), AccountAddress::random(), ]; assert_eq!( call_script_with_args_ty_args_signers( two_signer_script.clone(), vec![], vec![], three_signers ) .err() .unwrap() .major_status(), StatusCode::TYPE_MISMATCH ); // correct number of signers (2) let two_signers = vec![AccountAddress::random(), AccountAddress::random()]; call_script_with_args_ty_args_signers(two_signer_script, vec![], vec![], two_signers) .expect("Expected two signers, passing two should work"); // too many signers (1) in a script that expects 0 is ok let no_signer_script = make_script(Signature(vec![SignatureToken::U8])); call_script_with_args_ty_args_signers( no_signer_script, vec![Value::u8(10)], vec![], vec![AccountAddress::random()], ) .expect("Ok to pass oo many signers"); }
31.318919
100
0.60252
e50d48c0c74d10d6bb82d05b15dabfbe4cfec477
7,907
// Copyright 2021 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. //! An implementation of the [FIDL wire format] for laying out messages whose types are defined //! at runtime. //! //! [FIDL wire format]: https://fuchsia.dev/fuchsia-src/reference/fidl/language/wire-format use std::default::Default; /// A FIDL struct for encoding. Fields are defined in order. pub struct Structure { fields: Vec<Field>, } impl Default for Structure { fn default() -> Self { Structure { fields: vec![] } } } impl Structure { /// Add a field and its value to this dynamic struct definition. pub fn field(mut self, field: Field) -> Self { self.fields.push(field); self } /// Encode this struct into it's [persistent message encoding]. /// /// [persistent message encoding]: https://fuchsia.dev/fuchsia-src/contribute/governance/rfcs/0120_standalone_use_of_fidl_wire_format pub fn encode_persistent(&self) -> Vec<u8> { let mut buf = Vec::new(); // encode the persistent header: buf.push(0); // disambiguator buf.push(1); // current wire format magic number buf.extend(2u16.to_le_bytes()); // v2 wire format buf.extend([0; 4]); // reserved with zeroes // encode the struct's fields: if self.fields.is_empty() { // A structure can be: // // * empty — it has no fields. Such a structure is 1 byte in size, with an alignment of // 1 byte, and is exactly equivalent to a structure containing a uint8 with the value // zero. BasicField::UInt8(0).encode_inline(&mut buf); } else { // encode primary objects first for field in &self.fields { field.encode_inline(&mut buf); } for field in &self.fields { field.encode_out_of_line(&mut buf); } } // Externally, the structure is aligned on an 8-byte boundary, and may therefore contain // final padding to meet that requirement. buf.pad_to(8); buf } } /// A field of a FIDL struct. pub enum Field { Basic(BasicField), Vector(VectorField), } impl Field { fn alignment(&self) -> usize { match self { Self::Basic(b) => b.alignment(), Self::Vector(l) => l.alignment(), } } fn encode_inline(&self, buf: &mut Vec<u8>) { buf.pad_to(self.alignment()); match self { Self::Basic(b) => b.encode_inline(buf), Self::Vector(l) => l.encode_inline(buf), } } fn encode_out_of_line(&self, buf: &mut Vec<u8>) { match self { Self::Basic(_) => (), Self::Vector(l) => { // each secondary object must be padded to 8 bytes, as well as the primary buf.pad_to(8); l.encode_out_of_line(buf); } } } } pub enum BasicField { Bool(bool), UInt8(u8), UInt16(u16), UInt32(u32), UInt64(u64), Int8(i8), Int16(i16), Int32(i32), Int64(i64), } impl BasicField { fn encode_inline(&self, buf: &mut Vec<u8>) { match self { Self::Bool(b) => buf.push(if *b { 1u8 } else { 0u8 }), Self::UInt8(n) => buf.push(*n), Self::UInt16(n) => buf.extend(n.to_le_bytes()), Self::UInt32(n) => buf.extend(n.to_le_bytes()), Self::UInt64(n) => buf.extend(n.to_le_bytes()), Self::Int8(n) => buf.extend(n.to_le_bytes()), Self::Int16(n) => buf.extend(n.to_le_bytes()), Self::Int32(n) => buf.extend(n.to_le_bytes()), Self::Int64(n) => buf.extend(n.to_le_bytes()), } } fn alignment(&self) -> usize { match self { Self::Bool(_) | Self::UInt8(_) | Self::Int8(_) => 1, Self::UInt16(_) | Self::Int16(_) => 2, Self::UInt32(_) | Self::Int32(_) => 4, _ => 8, } } } pub enum VectorField { BoolVector(Vec<bool>), UInt8Vector(Vec<u8>), UInt16Vector(Vec<u16>), UInt32Vector(Vec<u32>), UInt64Vector(Vec<u64>), Int8Vector(Vec<i8>), Int16Vector(Vec<i16>), Int32Vector(Vec<i32>), Int64Vector(Vec<i64>), // TODO(https://fxbug.dev/88174) figure out a better api for nested vectors UInt8VectorVector(Vec<Vec<u8>>), } impl VectorField { fn alignment(&self) -> usize { 8 } fn encode_inline(&self, buf: &mut Vec<u8>) { // Stored as a 16 byte record consisting of: // * `size`: 64-bit unsigned number of elements // * `data`: 64-bit presence indication or pointer to out-of-line element data let size = match self { Self::BoolVector(v) => v.len(), Self::UInt8Vector(v) => v.len(), Self::UInt16Vector(v) => v.len(), Self::UInt32Vector(v) => v.len(), Self::UInt64Vector(v) => v.len(), Self::Int8Vector(v) => v.len(), Self::Int16Vector(v) => v.len(), Self::Int32Vector(v) => v.len(), Self::Int64Vector(v) => v.len(), Self::UInt8VectorVector(v) => v.len(), } as u64; buf.extend(size.to_le_bytes()); // When encoded for transfer, `data` indicates presence of content: // * `0`: vector is absent // * `UINTPTR_MAX`: vector is present, data is the next out-of-line object */ // (we always encode UINTPTR_MAX because we don't support nullable vectors) buf.extend(u64::MAX.to_le_bytes()); } fn encode_out_of_line(&self, buf: &mut Vec<u8>) { match self { Self::BoolVector(v) => { for b in v { BasicField::Bool(*b).encode_inline(buf); } } Self::UInt8Vector(v) => buf.extend(v), Self::UInt16Vector(v) => { for n in v { BasicField::UInt16(*n).encode_inline(buf); } } Self::UInt32Vector(v) => { for n in v { BasicField::UInt32(*n).encode_inline(buf); } } Self::UInt64Vector(v) => { for n in v { BasicField::UInt64(*n).encode_inline(buf); } } Self::Int8Vector(v) => { for n in v { BasicField::Int8(*n).encode_inline(buf); } } Self::Int16Vector(v) => { for n in v { BasicField::Int16(*n).encode_inline(buf); } } Self::Int32Vector(v) => { for n in v { BasicField::Int32(*n).encode_inline(buf); } } Self::Int64Vector(v) => { for n in v { BasicField::Int64(*n).encode_inline(buf); } } Self::UInt8VectorVector(outer) => { let as_fields = outer .iter() .map(|v| Field::Vector(VectorField::UInt8Vector(v.clone()))) .collect::<Vec<_>>(); for field in &as_fields { field.encode_inline(buf); } for field in &as_fields { field.encode_out_of_line(buf); } } } } } trait Padding { fn pad_to(&mut self, align: usize); } impl Padding for Vec<u8> { fn pad_to(&mut self, align: usize) { let start_len = self.len(); let num_bytes = (align - (start_len % align)) % align; self.resize(start_len + num_bytes, 0); } }
31.007843
137
0.512331
e62c89984d3865d8da28fa9a54ea9f6d85adbc1f
3,056
//! SSD1306 I2C Interface use hal; use super::DisplayInterface; use crate::Error; // TODO: Add to prelude /// SSD1306 I2C communication interface pub struct I2cInterface<I2C> { i2c: I2C, addr: u8, } impl<I2C, CommE> I2cInterface<I2C> where I2C: hal::blocking::i2c::Write<Error = CommE>, { /// Create new SSD1306 I2C interface pub fn new(i2c: I2C, addr: u8) -> Self { Self { i2c, addr } } } impl<I2C, CommE> DisplayInterface for I2cInterface<I2C> where I2C: hal::blocking::i2c::Write<Error = CommE>, { type Error = Error<CommE, ()>; fn send_commands(&mut self, cmds: &[u8]) -> Result<(), Self::Error> { // Copy over given commands to new aray to prefix with command identifier let mut writebuf: [u8; 8] = [0; 8]; writebuf[1..=cmds.len()].copy_from_slice(&cmds[0..cmds.len()]); self.i2c .write(self.addr, &writebuf[..=cmds.len()]) .map_err(Error::Comm) } fn send_data(&mut self, buf: &[u8]) -> Result<(), Self::Error> { // Noop if the data buffer is empty if buf.is_empty() { return Ok(()); } let mut writebuf: [u8; 17] = [0; 17]; // Data mode // 8.1.5.2 5) b) in the datasheet writebuf[0] = 0x40; for chunk in buf.chunks(16) { let chunklen = chunk.len(); // Copy over all data from buffer, leaving the data command byte intact writebuf[1..=chunklen].copy_from_slice(&chunk[0..chunklen]); self.i2c .write(self.addr, &writebuf[..=chunklen]) .map_err(Error::Comm)?; } Ok(()) } fn send_bounded_data( &mut self, buf: &[u8], disp_width: usize, upper_left: (u8, u8), lower_right: (u8, u8), ) -> Result<(), Self::Error> { // Noop if the data buffer is empty if buf.is_empty() { return Ok(()); } let mut writebuf: [u8; 17] = [0; 17]; // Divide by 8 since each row is actually 8 pixels tall let height = ((lower_right.1 - upper_left.1) / 8) as usize; let starting_page = (upper_left.1 / 8) as usize; // Data mode // 8.1.5.2 5) b) in the datasheet writebuf[0] = 0x40; let mut page_offset = starting_page * disp_width; for _ in 0..=height { let start_index = page_offset + upper_left.0 as usize; let end_index = page_offset + lower_right.0 as usize; page_offset += disp_width; let sub_buf = &buf[start_index..end_index]; for chunk in sub_buf.chunks(16) { let chunklen = chunk.len(); // Copy over all data from buffer, leaving the data command byte intact writebuf[1..=chunklen].copy_from_slice(&chunk[0..chunklen]); self.i2c .write(self.addr, &writebuf[..=chunklen]) .map_err(Error::Comm)?; } } Ok(()) } }
26.573913
87
0.536976
c1301298bef0bd145bc6c5331a4688aa86397c27
3,214
// Topic: TryFrom/TryInto // // Summary: // * A library is needed for an application to convert hex color codes // into their component color values (red, green, and blue). Hex color codes // consist of a hash symbol followed by six hex digits. Every two hex digits // represent a color component in the order of red, green, blue. // // Example hex color codes: // #ffffff -> Rgb(255, 255, 255) // #001122 -> Rgb(0, 17, 34) // // Requirements: // * Create a program to convert a hex code (as &str) into an Rgb struct // * Implement TryFrom to perform the conversion // * Utilize the question mark operator in your implementation // // Notes: // * See the `from_str_radix` function in the stdlib docs for `u8` // to convert hex digits to `u8` // * Hex digits use a radix value of 16 // * Utilize the `thiserror` crate for your error type // * Run `cargo test --bin a37` to test your implementation use std::{convert::TryFrom, num::ParseIntError}; use thiserror::Error; #[derive(Debug, Error)] enum RgbError { #[error("Hex color must start with a hash")] MissingHash, #[error("Hex color must be six digits in length")] LengthError, #[error("Unable to parse hex digit {0}")] ParseError(#[from] ParseIntError), } #[derive(Debug, Eq, PartialEq)] struct Rgb(u8, u8, u8); impl TryFrom<&str> for Rgb { type Error = RgbError; fn try_from(hex: &str) -> Result<Self, Self::Error> { if !hex.starts_with('#') { return Err(RgbError::MissingHash); } if hex.len() != 7 { return Err(RgbError::LengthError); } let (r, g, b) = ( u8::from_str_radix(&hex[1..=2], 16)?, u8::from_str_radix(&hex[3..=4], 16)?, u8::from_str_radix(&hex[5..], 16)?, ); Ok(Self(r, g, b)) } } fn main() { // Use `cargo test --bin a37` to test your implementation } #[cfg(test)] mod test { use super::Rgb; use std::convert::TryFrom; #[test] fn converts_valid_hex_color() { let expected = Rgb(0, 204, 102); let actual = Rgb::try_from("#00cc66"); assert_eq!( actual.is_ok(), true, "valid hex code should be converted to Rgb" ); assert_eq!(actual.unwrap(), expected, "wrong Rgb value"); } #[test] fn fails_on_invalid_hex_digits() { assert_eq!( Rgb::try_from("#0011yy").is_err(), true, "should be an error with invalid hex color" ); } #[test] fn fails_when_missing_hash() { assert_eq!( Rgb::try_from("001100").is_err(), true, "should be an error when missing hash symbol" ); } #[test] fn fails_when_missing_color_components() { assert_eq!( Rgb::try_from("#0011f").is_err(), true, "should be an error when missing one or more color components" ); } #[test] fn fails_with_too_many_color_components() { assert_eq!( Rgb::try_from("#0011ffa").is_err(), true, "should be an error when too many color components are provided" ); } }
27.237288
78
0.578407
e60c553c0d7b386f8ba91ff98f70e2318aa90cf3
41,406
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. #[non_exhaustive] #[derive(std::fmt::Debug)] pub struct CreateSavingsPlanError { pub kind: CreateSavingsPlanErrorKind, pub(crate) meta: smithy_types::Error, } #[non_exhaustive] #[derive(std::fmt::Debug)] pub enum CreateSavingsPlanErrorKind { InternalServerException(crate::error::InternalServerException), ResourceNotFoundException(crate::error::ResourceNotFoundException), ServiceQuotaExceededException(crate::error::ServiceQuotaExceededException), ValidationException(crate::error::ValidationException), /// An unexpected error, eg. invalid JSON returned by the service or an unknown error code Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>), } impl std::fmt::Display for CreateSavingsPlanError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.kind { CreateSavingsPlanErrorKind::InternalServerException(_inner) => _inner.fmt(f), CreateSavingsPlanErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f), CreateSavingsPlanErrorKind::ServiceQuotaExceededException(_inner) => _inner.fmt(f), CreateSavingsPlanErrorKind::ValidationException(_inner) => _inner.fmt(f), CreateSavingsPlanErrorKind::Unhandled(_inner) => _inner.fmt(f), } } } impl smithy_types::retry::ProvideErrorKind for CreateSavingsPlanError { fn code(&self) -> Option<&str> { CreateSavingsPlanError::code(self) } fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> { None } } impl CreateSavingsPlanError { pub fn new(kind: CreateSavingsPlanErrorKind, meta: smithy_types::Error) -> Self { Self { kind, meta } } pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self { Self { kind: CreateSavingsPlanErrorKind::Unhandled(err.into()), meta: Default::default(), } } pub fn generic(err: smithy_types::Error) -> Self { Self { meta: err.clone(), kind: CreateSavingsPlanErrorKind::Unhandled(err.into()), } } // Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented // by std::Error to generate a message in that case. pub fn message(&self) -> Option<&str> { self.meta.message() } pub fn meta(&self) -> &smithy_types::Error { &self.meta } pub fn request_id(&self) -> Option<&str> { self.meta.request_id() } pub fn code(&self) -> Option<&str> { self.meta.code() } pub fn is_internal_server_exception(&self) -> bool { matches!( &self.kind, CreateSavingsPlanErrorKind::InternalServerException(_) ) } pub fn is_resource_not_found_exception(&self) -> bool { matches!( &self.kind, CreateSavingsPlanErrorKind::ResourceNotFoundException(_) ) } pub fn is_service_quota_exceeded_exception(&self) -> bool { matches!( &self.kind, CreateSavingsPlanErrorKind::ServiceQuotaExceededException(_) ) } pub fn is_validation_exception(&self) -> bool { matches!( &self.kind, CreateSavingsPlanErrorKind::ValidationException(_) ) } } impl std::error::Error for CreateSavingsPlanError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match &self.kind { CreateSavingsPlanErrorKind::InternalServerException(_inner) => Some(_inner), CreateSavingsPlanErrorKind::ResourceNotFoundException(_inner) => Some(_inner), CreateSavingsPlanErrorKind::ServiceQuotaExceededException(_inner) => Some(_inner), CreateSavingsPlanErrorKind::ValidationException(_inner) => Some(_inner), CreateSavingsPlanErrorKind::Unhandled(_inner) => Some(_inner.as_ref()), } } } #[non_exhaustive] #[derive(std::fmt::Debug)] pub struct DeleteQueuedSavingsPlanError { pub kind: DeleteQueuedSavingsPlanErrorKind, pub(crate) meta: smithy_types::Error, } #[non_exhaustive] #[derive(std::fmt::Debug)] pub enum DeleteQueuedSavingsPlanErrorKind { InternalServerException(crate::error::InternalServerException), ResourceNotFoundException(crate::error::ResourceNotFoundException), ServiceQuotaExceededException(crate::error::ServiceQuotaExceededException), ValidationException(crate::error::ValidationException), /// An unexpected error, eg. invalid JSON returned by the service or an unknown error code Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>), } impl std::fmt::Display for DeleteQueuedSavingsPlanError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.kind { DeleteQueuedSavingsPlanErrorKind::InternalServerException(_inner) => _inner.fmt(f), DeleteQueuedSavingsPlanErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f), DeleteQueuedSavingsPlanErrorKind::ServiceQuotaExceededException(_inner) => { _inner.fmt(f) } DeleteQueuedSavingsPlanErrorKind::ValidationException(_inner) => _inner.fmt(f), DeleteQueuedSavingsPlanErrorKind::Unhandled(_inner) => _inner.fmt(f), } } } impl smithy_types::retry::ProvideErrorKind for DeleteQueuedSavingsPlanError { fn code(&self) -> Option<&str> { DeleteQueuedSavingsPlanError::code(self) } fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> { None } } impl DeleteQueuedSavingsPlanError { pub fn new(kind: DeleteQueuedSavingsPlanErrorKind, meta: smithy_types::Error) -> Self { Self { kind, meta } } pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self { Self { kind: DeleteQueuedSavingsPlanErrorKind::Unhandled(err.into()), meta: Default::default(), } } pub fn generic(err: smithy_types::Error) -> Self { Self { meta: err.clone(), kind: DeleteQueuedSavingsPlanErrorKind::Unhandled(err.into()), } } // Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented // by std::Error to generate a message in that case. pub fn message(&self) -> Option<&str> { self.meta.message() } pub fn meta(&self) -> &smithy_types::Error { &self.meta } pub fn request_id(&self) -> Option<&str> { self.meta.request_id() } pub fn code(&self) -> Option<&str> { self.meta.code() } pub fn is_internal_server_exception(&self) -> bool { matches!( &self.kind, DeleteQueuedSavingsPlanErrorKind::InternalServerException(_) ) } pub fn is_resource_not_found_exception(&self) -> bool { matches!( &self.kind, DeleteQueuedSavingsPlanErrorKind::ResourceNotFoundException(_) ) } pub fn is_service_quota_exceeded_exception(&self) -> bool { matches!( &self.kind, DeleteQueuedSavingsPlanErrorKind::ServiceQuotaExceededException(_) ) } pub fn is_validation_exception(&self) -> bool { matches!( &self.kind, DeleteQueuedSavingsPlanErrorKind::ValidationException(_) ) } } impl std::error::Error for DeleteQueuedSavingsPlanError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match &self.kind { DeleteQueuedSavingsPlanErrorKind::InternalServerException(_inner) => Some(_inner), DeleteQueuedSavingsPlanErrorKind::ResourceNotFoundException(_inner) => Some(_inner), DeleteQueuedSavingsPlanErrorKind::ServiceQuotaExceededException(_inner) => Some(_inner), DeleteQueuedSavingsPlanErrorKind::ValidationException(_inner) => Some(_inner), DeleteQueuedSavingsPlanErrorKind::Unhandled(_inner) => Some(_inner.as_ref()), } } } #[non_exhaustive] #[derive(std::fmt::Debug)] pub struct DescribeSavingsPlanRatesError { pub kind: DescribeSavingsPlanRatesErrorKind, pub(crate) meta: smithy_types::Error, } #[non_exhaustive] #[derive(std::fmt::Debug)] pub enum DescribeSavingsPlanRatesErrorKind { ResourceNotFoundException(crate::error::ResourceNotFoundException), ValidationException(crate::error::ValidationException), /// An unexpected error, eg. invalid JSON returned by the service or an unknown error code Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>), } impl std::fmt::Display for DescribeSavingsPlanRatesError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.kind { DescribeSavingsPlanRatesErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f), DescribeSavingsPlanRatesErrorKind::ValidationException(_inner) => _inner.fmt(f), DescribeSavingsPlanRatesErrorKind::Unhandled(_inner) => _inner.fmt(f), } } } impl smithy_types::retry::ProvideErrorKind for DescribeSavingsPlanRatesError { fn code(&self) -> Option<&str> { DescribeSavingsPlanRatesError::code(self) } fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> { None } } impl DescribeSavingsPlanRatesError { pub fn new(kind: DescribeSavingsPlanRatesErrorKind, meta: smithy_types::Error) -> Self { Self { kind, meta } } pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self { Self { kind: DescribeSavingsPlanRatesErrorKind::Unhandled(err.into()), meta: Default::default(), } } pub fn generic(err: smithy_types::Error) -> Self { Self { meta: err.clone(), kind: DescribeSavingsPlanRatesErrorKind::Unhandled(err.into()), } } // Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented // by std::Error to generate a message in that case. pub fn message(&self) -> Option<&str> { self.meta.message() } pub fn meta(&self) -> &smithy_types::Error { &self.meta } pub fn request_id(&self) -> Option<&str> { self.meta.request_id() } pub fn code(&self) -> Option<&str> { self.meta.code() } pub fn is_resource_not_found_exception(&self) -> bool { matches!( &self.kind, DescribeSavingsPlanRatesErrorKind::ResourceNotFoundException(_) ) } pub fn is_validation_exception(&self) -> bool { matches!( &self.kind, DescribeSavingsPlanRatesErrorKind::ValidationException(_) ) } } impl std::error::Error for DescribeSavingsPlanRatesError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match &self.kind { DescribeSavingsPlanRatesErrorKind::ResourceNotFoundException(_inner) => Some(_inner), DescribeSavingsPlanRatesErrorKind::ValidationException(_inner) => Some(_inner), DescribeSavingsPlanRatesErrorKind::Unhandled(_inner) => Some(_inner.as_ref()), } } } #[non_exhaustive] #[derive(std::fmt::Debug)] pub struct DescribeSavingsPlansError { pub kind: DescribeSavingsPlansErrorKind, pub(crate) meta: smithy_types::Error, } #[non_exhaustive] #[derive(std::fmt::Debug)] pub enum DescribeSavingsPlansErrorKind { InternalServerException(crate::error::InternalServerException), ValidationException(crate::error::ValidationException), /// An unexpected error, eg. invalid JSON returned by the service or an unknown error code Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>), } impl std::fmt::Display for DescribeSavingsPlansError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.kind { DescribeSavingsPlansErrorKind::InternalServerException(_inner) => _inner.fmt(f), DescribeSavingsPlansErrorKind::ValidationException(_inner) => _inner.fmt(f), DescribeSavingsPlansErrorKind::Unhandled(_inner) => _inner.fmt(f), } } } impl smithy_types::retry::ProvideErrorKind for DescribeSavingsPlansError { fn code(&self) -> Option<&str> { DescribeSavingsPlansError::code(self) } fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> { None } } impl DescribeSavingsPlansError { pub fn new(kind: DescribeSavingsPlansErrorKind, meta: smithy_types::Error) -> Self { Self { kind, meta } } pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self { Self { kind: DescribeSavingsPlansErrorKind::Unhandled(err.into()), meta: Default::default(), } } pub fn generic(err: smithy_types::Error) -> Self { Self { meta: err.clone(), kind: DescribeSavingsPlansErrorKind::Unhandled(err.into()), } } // Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented // by std::Error to generate a message in that case. pub fn message(&self) -> Option<&str> { self.meta.message() } pub fn meta(&self) -> &smithy_types::Error { &self.meta } pub fn request_id(&self) -> Option<&str> { self.meta.request_id() } pub fn code(&self) -> Option<&str> { self.meta.code() } pub fn is_internal_server_exception(&self) -> bool { matches!( &self.kind, DescribeSavingsPlansErrorKind::InternalServerException(_) ) } pub fn is_validation_exception(&self) -> bool { matches!( &self.kind, DescribeSavingsPlansErrorKind::ValidationException(_) ) } } impl std::error::Error for DescribeSavingsPlansError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match &self.kind { DescribeSavingsPlansErrorKind::InternalServerException(_inner) => Some(_inner), DescribeSavingsPlansErrorKind::ValidationException(_inner) => Some(_inner), DescribeSavingsPlansErrorKind::Unhandled(_inner) => Some(_inner.as_ref()), } } } #[non_exhaustive] #[derive(std::fmt::Debug)] pub struct DescribeSavingsPlansOfferingRatesError { pub kind: DescribeSavingsPlansOfferingRatesErrorKind, pub(crate) meta: smithy_types::Error, } #[non_exhaustive] #[derive(std::fmt::Debug)] pub enum DescribeSavingsPlansOfferingRatesErrorKind { InternalServerException(crate::error::InternalServerException), ValidationException(crate::error::ValidationException), /// An unexpected error, eg. invalid JSON returned by the service or an unknown error code Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>), } impl std::fmt::Display for DescribeSavingsPlansOfferingRatesError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.kind { DescribeSavingsPlansOfferingRatesErrorKind::InternalServerException(_inner) => { _inner.fmt(f) } DescribeSavingsPlansOfferingRatesErrorKind::ValidationException(_inner) => { _inner.fmt(f) } DescribeSavingsPlansOfferingRatesErrorKind::Unhandled(_inner) => _inner.fmt(f), } } } impl smithy_types::retry::ProvideErrorKind for DescribeSavingsPlansOfferingRatesError { fn code(&self) -> Option<&str> { DescribeSavingsPlansOfferingRatesError::code(self) } fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> { None } } impl DescribeSavingsPlansOfferingRatesError { pub fn new( kind: DescribeSavingsPlansOfferingRatesErrorKind, meta: smithy_types::Error, ) -> Self { Self { kind, meta } } pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self { Self { kind: DescribeSavingsPlansOfferingRatesErrorKind::Unhandled(err.into()), meta: Default::default(), } } pub fn generic(err: smithy_types::Error) -> Self { Self { meta: err.clone(), kind: DescribeSavingsPlansOfferingRatesErrorKind::Unhandled(err.into()), } } // Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented // by std::Error to generate a message in that case. pub fn message(&self) -> Option<&str> { self.meta.message() } pub fn meta(&self) -> &smithy_types::Error { &self.meta } pub fn request_id(&self) -> Option<&str> { self.meta.request_id() } pub fn code(&self) -> Option<&str> { self.meta.code() } pub fn is_internal_server_exception(&self) -> bool { matches!( &self.kind, DescribeSavingsPlansOfferingRatesErrorKind::InternalServerException(_) ) } pub fn is_validation_exception(&self) -> bool { matches!( &self.kind, DescribeSavingsPlansOfferingRatesErrorKind::ValidationException(_) ) } } impl std::error::Error for DescribeSavingsPlansOfferingRatesError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match &self.kind { DescribeSavingsPlansOfferingRatesErrorKind::InternalServerException(_inner) => { Some(_inner) } DescribeSavingsPlansOfferingRatesErrorKind::ValidationException(_inner) => Some(_inner), DescribeSavingsPlansOfferingRatesErrorKind::Unhandled(_inner) => Some(_inner.as_ref()), } } } #[non_exhaustive] #[derive(std::fmt::Debug)] pub struct DescribeSavingsPlansOfferingsError { pub kind: DescribeSavingsPlansOfferingsErrorKind, pub(crate) meta: smithy_types::Error, } #[non_exhaustive] #[derive(std::fmt::Debug)] pub enum DescribeSavingsPlansOfferingsErrorKind { InternalServerException(crate::error::InternalServerException), ValidationException(crate::error::ValidationException), /// An unexpected error, eg. invalid JSON returned by the service or an unknown error code Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>), } impl std::fmt::Display for DescribeSavingsPlansOfferingsError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.kind { DescribeSavingsPlansOfferingsErrorKind::InternalServerException(_inner) => { _inner.fmt(f) } DescribeSavingsPlansOfferingsErrorKind::ValidationException(_inner) => _inner.fmt(f), DescribeSavingsPlansOfferingsErrorKind::Unhandled(_inner) => _inner.fmt(f), } } } impl smithy_types::retry::ProvideErrorKind for DescribeSavingsPlansOfferingsError { fn code(&self) -> Option<&str> { DescribeSavingsPlansOfferingsError::code(self) } fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> { None } } impl DescribeSavingsPlansOfferingsError { pub fn new(kind: DescribeSavingsPlansOfferingsErrorKind, meta: smithy_types::Error) -> Self { Self { kind, meta } } pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self { Self { kind: DescribeSavingsPlansOfferingsErrorKind::Unhandled(err.into()), meta: Default::default(), } } pub fn generic(err: smithy_types::Error) -> Self { Self { meta: err.clone(), kind: DescribeSavingsPlansOfferingsErrorKind::Unhandled(err.into()), } } // Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented // by std::Error to generate a message in that case. pub fn message(&self) -> Option<&str> { self.meta.message() } pub fn meta(&self) -> &smithy_types::Error { &self.meta } pub fn request_id(&self) -> Option<&str> { self.meta.request_id() } pub fn code(&self) -> Option<&str> { self.meta.code() } pub fn is_internal_server_exception(&self) -> bool { matches!( &self.kind, DescribeSavingsPlansOfferingsErrorKind::InternalServerException(_) ) } pub fn is_validation_exception(&self) -> bool { matches!( &self.kind, DescribeSavingsPlansOfferingsErrorKind::ValidationException(_) ) } } impl std::error::Error for DescribeSavingsPlansOfferingsError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match &self.kind { DescribeSavingsPlansOfferingsErrorKind::InternalServerException(_inner) => Some(_inner), DescribeSavingsPlansOfferingsErrorKind::ValidationException(_inner) => Some(_inner), DescribeSavingsPlansOfferingsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()), } } } #[non_exhaustive] #[derive(std::fmt::Debug)] pub struct ListTagsForResourceError { pub kind: ListTagsForResourceErrorKind, pub(crate) meta: smithy_types::Error, } #[non_exhaustive] #[derive(std::fmt::Debug)] pub enum ListTagsForResourceErrorKind { InternalServerException(crate::error::InternalServerException), ResourceNotFoundException(crate::error::ResourceNotFoundException), ValidationException(crate::error::ValidationException), /// An unexpected error, eg. invalid JSON returned by the service or an unknown error code Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>), } impl std::fmt::Display for ListTagsForResourceError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.kind { ListTagsForResourceErrorKind::InternalServerException(_inner) => _inner.fmt(f), ListTagsForResourceErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f), ListTagsForResourceErrorKind::ValidationException(_inner) => _inner.fmt(f), ListTagsForResourceErrorKind::Unhandled(_inner) => _inner.fmt(f), } } } impl smithy_types::retry::ProvideErrorKind for ListTagsForResourceError { fn code(&self) -> Option<&str> { ListTagsForResourceError::code(self) } fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> { None } } impl ListTagsForResourceError { pub fn new(kind: ListTagsForResourceErrorKind, meta: smithy_types::Error) -> Self { Self { kind, meta } } pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self { Self { kind: ListTagsForResourceErrorKind::Unhandled(err.into()), meta: Default::default(), } } pub fn generic(err: smithy_types::Error) -> Self { Self { meta: err.clone(), kind: ListTagsForResourceErrorKind::Unhandled(err.into()), } } // Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented // by std::Error to generate a message in that case. pub fn message(&self) -> Option<&str> { self.meta.message() } pub fn meta(&self) -> &smithy_types::Error { &self.meta } pub fn request_id(&self) -> Option<&str> { self.meta.request_id() } pub fn code(&self) -> Option<&str> { self.meta.code() } pub fn is_internal_server_exception(&self) -> bool { matches!( &self.kind, ListTagsForResourceErrorKind::InternalServerException(_) ) } pub fn is_resource_not_found_exception(&self) -> bool { matches!( &self.kind, ListTagsForResourceErrorKind::ResourceNotFoundException(_) ) } pub fn is_validation_exception(&self) -> bool { matches!( &self.kind, ListTagsForResourceErrorKind::ValidationException(_) ) } } impl std::error::Error for ListTagsForResourceError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match &self.kind { ListTagsForResourceErrorKind::InternalServerException(_inner) => Some(_inner), ListTagsForResourceErrorKind::ResourceNotFoundException(_inner) => Some(_inner), ListTagsForResourceErrorKind::ValidationException(_inner) => Some(_inner), ListTagsForResourceErrorKind::Unhandled(_inner) => Some(_inner.as_ref()), } } } #[non_exhaustive] #[derive(std::fmt::Debug)] pub struct TagResourceError { pub kind: TagResourceErrorKind, pub(crate) meta: smithy_types::Error, } #[non_exhaustive] #[derive(std::fmt::Debug)] pub enum TagResourceErrorKind { InternalServerException(crate::error::InternalServerException), ResourceNotFoundException(crate::error::ResourceNotFoundException), ServiceQuotaExceededException(crate::error::ServiceQuotaExceededException), ValidationException(crate::error::ValidationException), /// An unexpected error, eg. invalid JSON returned by the service or an unknown error code Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>), } impl std::fmt::Display for TagResourceError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.kind { TagResourceErrorKind::InternalServerException(_inner) => _inner.fmt(f), TagResourceErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f), TagResourceErrorKind::ServiceQuotaExceededException(_inner) => _inner.fmt(f), TagResourceErrorKind::ValidationException(_inner) => _inner.fmt(f), TagResourceErrorKind::Unhandled(_inner) => _inner.fmt(f), } } } impl smithy_types::retry::ProvideErrorKind for TagResourceError { fn code(&self) -> Option<&str> { TagResourceError::code(self) } fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> { None } } impl TagResourceError { pub fn new(kind: TagResourceErrorKind, meta: smithy_types::Error) -> Self { Self { kind, meta } } pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self { Self { kind: TagResourceErrorKind::Unhandled(err.into()), meta: Default::default(), } } pub fn generic(err: smithy_types::Error) -> Self { Self { meta: err.clone(), kind: TagResourceErrorKind::Unhandled(err.into()), } } // Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented // by std::Error to generate a message in that case. pub fn message(&self) -> Option<&str> { self.meta.message() } pub fn meta(&self) -> &smithy_types::Error { &self.meta } pub fn request_id(&self) -> Option<&str> { self.meta.request_id() } pub fn code(&self) -> Option<&str> { self.meta.code() } pub fn is_internal_server_exception(&self) -> bool { matches!(&self.kind, TagResourceErrorKind::InternalServerException(_)) } pub fn is_resource_not_found_exception(&self) -> bool { matches!( &self.kind, TagResourceErrorKind::ResourceNotFoundException(_) ) } pub fn is_service_quota_exceeded_exception(&self) -> bool { matches!( &self.kind, TagResourceErrorKind::ServiceQuotaExceededException(_) ) } pub fn is_validation_exception(&self) -> bool { matches!(&self.kind, TagResourceErrorKind::ValidationException(_)) } } impl std::error::Error for TagResourceError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match &self.kind { TagResourceErrorKind::InternalServerException(_inner) => Some(_inner), TagResourceErrorKind::ResourceNotFoundException(_inner) => Some(_inner), TagResourceErrorKind::ServiceQuotaExceededException(_inner) => Some(_inner), TagResourceErrorKind::ValidationException(_inner) => Some(_inner), TagResourceErrorKind::Unhandled(_inner) => Some(_inner.as_ref()), } } } #[non_exhaustive] #[derive(std::fmt::Debug)] pub struct UntagResourceError { pub kind: UntagResourceErrorKind, pub(crate) meta: smithy_types::Error, } #[non_exhaustive] #[derive(std::fmt::Debug)] pub enum UntagResourceErrorKind { InternalServerException(crate::error::InternalServerException), ResourceNotFoundException(crate::error::ResourceNotFoundException), ValidationException(crate::error::ValidationException), /// An unexpected error, eg. invalid JSON returned by the service or an unknown error code Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>), } impl std::fmt::Display for UntagResourceError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.kind { UntagResourceErrorKind::InternalServerException(_inner) => _inner.fmt(f), UntagResourceErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f), UntagResourceErrorKind::ValidationException(_inner) => _inner.fmt(f), UntagResourceErrorKind::Unhandled(_inner) => _inner.fmt(f), } } } impl smithy_types::retry::ProvideErrorKind for UntagResourceError { fn code(&self) -> Option<&str> { UntagResourceError::code(self) } fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> { None } } impl UntagResourceError { pub fn new(kind: UntagResourceErrorKind, meta: smithy_types::Error) -> Self { Self { kind, meta } } pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self { Self { kind: UntagResourceErrorKind::Unhandled(err.into()), meta: Default::default(), } } pub fn generic(err: smithy_types::Error) -> Self { Self { meta: err.clone(), kind: UntagResourceErrorKind::Unhandled(err.into()), } } // Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display as implemented // by std::Error to generate a message in that case. pub fn message(&self) -> Option<&str> { self.meta.message() } pub fn meta(&self) -> &smithy_types::Error { &self.meta } pub fn request_id(&self) -> Option<&str> { self.meta.request_id() } pub fn code(&self) -> Option<&str> { self.meta.code() } pub fn is_internal_server_exception(&self) -> bool { matches!( &self.kind, UntagResourceErrorKind::InternalServerException(_) ) } pub fn is_resource_not_found_exception(&self) -> bool { matches!( &self.kind, UntagResourceErrorKind::ResourceNotFoundException(_) ) } pub fn is_validation_exception(&self) -> bool { matches!(&self.kind, UntagResourceErrorKind::ValidationException(_)) } } impl std::error::Error for UntagResourceError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match &self.kind { UntagResourceErrorKind::InternalServerException(_inner) => Some(_inner), UntagResourceErrorKind::ResourceNotFoundException(_inner) => Some(_inner), UntagResourceErrorKind::ValidationException(_inner) => Some(_inner), UntagResourceErrorKind::Unhandled(_inner) => Some(_inner.as_ref()), } } } /// <p>One of the input parameters is not valid.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct ValidationException { pub message: std::option::Option<std::string::String>, } impl std::fmt::Debug for ValidationException { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("ValidationException"); formatter.field("message", &self.message); formatter.finish() } } impl ValidationException { pub fn message(&self) -> Option<&str> { self.message.as_deref() } } impl std::fmt::Display for ValidationException { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "ValidationException")?; if let Some(inner_1) = &self.message { write!(f, ": {}", inner_1)?; } Ok(()) } } impl std::error::Error for ValidationException {} /// See [`ValidationException`](crate::error::ValidationException) pub mod validation_exception { /// A builder for [`ValidationException`](crate::error::ValidationException) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) message: std::option::Option<std::string::String>, } impl Builder { pub fn message(mut self, input: impl Into<std::string::String>) -> Self { self.message = Some(input.into()); self } pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self { self.message = input; self } /// Consumes the builder and constructs a [`ValidationException`](crate::error::ValidationException) pub fn build(self) -> crate::error::ValidationException { crate::error::ValidationException { message: self.message, } } } } impl ValidationException { /// Creates a new builder-style object to manufacture [`ValidationException`](crate::error::ValidationException) pub fn builder() -> crate::error::validation_exception::Builder { crate::error::validation_exception::Builder::default() } } /// <p>The specified resource was not found.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct ResourceNotFoundException { pub message: std::option::Option<std::string::String>, } impl std::fmt::Debug for ResourceNotFoundException { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("ResourceNotFoundException"); formatter.field("message", &self.message); formatter.finish() } } impl ResourceNotFoundException { pub fn message(&self) -> Option<&str> { self.message.as_deref() } } impl std::fmt::Display for ResourceNotFoundException { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "ResourceNotFoundException")?; if let Some(inner_2) = &self.message { write!(f, ": {}", inner_2)?; } Ok(()) } } impl std::error::Error for ResourceNotFoundException {} /// See [`ResourceNotFoundException`](crate::error::ResourceNotFoundException) pub mod resource_not_found_exception { /// A builder for [`ResourceNotFoundException`](crate::error::ResourceNotFoundException) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) message: std::option::Option<std::string::String>, } impl Builder { pub fn message(mut self, input: impl Into<std::string::String>) -> Self { self.message = Some(input.into()); self } pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self { self.message = input; self } /// Consumes the builder and constructs a [`ResourceNotFoundException`](crate::error::ResourceNotFoundException) pub fn build(self) -> crate::error::ResourceNotFoundException { crate::error::ResourceNotFoundException { message: self.message, } } } } impl ResourceNotFoundException { /// Creates a new builder-style object to manufacture [`ResourceNotFoundException`](crate::error::ResourceNotFoundException) pub fn builder() -> crate::error::resource_not_found_exception::Builder { crate::error::resource_not_found_exception::Builder::default() } } /// <p>An unexpected error occurred.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct InternalServerException { pub message: std::option::Option<std::string::String>, } impl std::fmt::Debug for InternalServerException { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("InternalServerException"); formatter.field("message", &self.message); formatter.finish() } } impl InternalServerException { pub fn message(&self) -> Option<&str> { self.message.as_deref() } } impl std::fmt::Display for InternalServerException { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "InternalServerException")?; if let Some(inner_3) = &self.message { write!(f, ": {}", inner_3)?; } Ok(()) } } impl std::error::Error for InternalServerException {} /// See [`InternalServerException`](crate::error::InternalServerException) pub mod internal_server_exception { /// A builder for [`InternalServerException`](crate::error::InternalServerException) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) message: std::option::Option<std::string::String>, } impl Builder { pub fn message(mut self, input: impl Into<std::string::String>) -> Self { self.message = Some(input.into()); self } pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self { self.message = input; self } /// Consumes the builder and constructs a [`InternalServerException`](crate::error::InternalServerException) pub fn build(self) -> crate::error::InternalServerException { crate::error::InternalServerException { message: self.message, } } } } impl InternalServerException { /// Creates a new builder-style object to manufacture [`InternalServerException`](crate::error::InternalServerException) pub fn builder() -> crate::error::internal_server_exception::Builder { crate::error::internal_server_exception::Builder::default() } } /// <p>A service quota has been exceeded.</p> #[non_exhaustive] #[derive(std::clone::Clone, std::cmp::PartialEq)] pub struct ServiceQuotaExceededException { pub message: std::option::Option<std::string::String>, } impl std::fmt::Debug for ServiceQuotaExceededException { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut formatter = f.debug_struct("ServiceQuotaExceededException"); formatter.field("message", &self.message); formatter.finish() } } impl ServiceQuotaExceededException { pub fn message(&self) -> Option<&str> { self.message.as_deref() } } impl std::fmt::Display for ServiceQuotaExceededException { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "ServiceQuotaExceededException")?; if let Some(inner_4) = &self.message { write!(f, ": {}", inner_4)?; } Ok(()) } } impl std::error::Error for ServiceQuotaExceededException {} /// See [`ServiceQuotaExceededException`](crate::error::ServiceQuotaExceededException) pub mod service_quota_exceeded_exception { /// A builder for [`ServiceQuotaExceededException`](crate::error::ServiceQuotaExceededException) #[non_exhaustive] #[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)] pub struct Builder { pub(crate) message: std::option::Option<std::string::String>, } impl Builder { pub fn message(mut self, input: impl Into<std::string::String>) -> Self { self.message = Some(input.into()); self } pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self { self.message = input; self } /// Consumes the builder and constructs a [`ServiceQuotaExceededException`](crate::error::ServiceQuotaExceededException) pub fn build(self) -> crate::error::ServiceQuotaExceededException { crate::error::ServiceQuotaExceededException { message: self.message, } } } } impl ServiceQuotaExceededException { /// Creates a new builder-style object to manufacture [`ServiceQuotaExceededException`](crate::error::ServiceQuotaExceededException) pub fn builder() -> crate::error::service_quota_exceeded_exception::Builder { crate::error::service_quota_exceeded_exception::Builder::default() } }
36.740018
136
0.645655
6275f05b5c3f841246322d9d10126fbb31f0e510
8,340
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use { crate::{ capability::*, model::{ error::ModelError, hooks::{Event, EventPayload, EventType, Hook, HooksRegistration}, }, }, async_trait::async_trait, cm_rust::CapabilityPath, failure::Error, fidl::endpoints::ServerEnd, fidl_fuchsia_boot as fboot, fidl_fuchsia_security_resource as fsec, fuchsia_async as fasync, fuchsia_component::client::connect_to_service, fuchsia_zircon::{self as zx, HandleBased}, futures::{future::BoxFuture, prelude::*}, lazy_static::lazy_static, log::warn, std::{ convert::TryInto, sync::{Arc, Weak}, }, }; lazy_static! { pub static ref VMEX_CAPABILITY_PATH: CapabilityPath = "/svc/fuchsia.process.Vmex".try_into().unwrap(); } /// An implementation of fuchsia.security.resource.Vmex protocol. pub struct VmexService { inner: Arc<VmexServiceInner>, } impl VmexService { pub fn new() -> Self { Self { inner: Arc::new(VmexServiceInner::new()) } } pub fn hooks(&self) -> Vec<HooksRegistration> { vec![HooksRegistration { events: vec![EventType::RouteCapability], callback: Arc::downgrade(&self.inner) as Weak<dyn Hook>, }] } /// Serves an instance of the 'fuchsia.security.resource.Vmex' protocol given an appropriate /// RequestStream. Returns when the channel backing the RequestStream is closed or an /// unrecoverable error, like failure to acquire the root resource occurs. pub async fn serve(mut stream: fsec::VmexRequestStream) -> Result<(), Error> { let root_resource_provider = connect_to_service::<fboot::RootResourceMarker>()?; let root_resource = root_resource_provider.get().await?; while let Some(fsec::VmexRequest::Get { responder }) = stream.try_next().await? { let vmex_handle = root_resource.create_child(zx::ResourceKind::VMEX, None, 0, 0, b"vmex")?; let restricted_vmex_handle = vmex_handle.replace_handle( zx::Rights::TRANSFER | zx::Rights::DUPLICATE | zx::Rights::INSPECT, )?; responder.send(zx::Resource::from(restricted_vmex_handle))?; } Ok(()) } } struct VmexServiceInner; impl VmexServiceInner { pub fn new() -> Self { Self {} } async fn on_route_framework_capability_async<'a>( self: Arc<Self>, capability: &'a FrameworkCapability, capability_provider: Option<Box<dyn CapabilityProvider>>, ) -> Result<Option<Box<dyn CapabilityProvider>>, ModelError> { match capability { FrameworkCapability::ServiceProtocol(capability_path) if *capability_path == *VMEX_CAPABILITY_PATH => { Ok(Some(Box::new(VmexCapabilityProvider::new()) as Box<dyn CapabilityProvider>)) } _ => Ok(capability_provider), } } } impl Hook for VmexServiceInner { fn on(self: Arc<Self>, event: &Event) -> BoxFuture<Result<(), ModelError>> { Box::pin(async move { if let EventPayload::RouteCapability { source: CapabilitySource::Framework { capability, scope_moniker: None }, capability_provider, } = &event.payload { let mut capability_provider = capability_provider.lock().await; *capability_provider = self .on_route_framework_capability_async(&capability, capability_provider.take()) .await?; }; Ok(()) }) } } struct VmexCapabilityProvider; impl VmexCapabilityProvider { pub fn new() -> Self { Self {} } } #[async_trait] impl CapabilityProvider for VmexCapabilityProvider { async fn open( self: Box<Self>, _flags: u32, _open_mode: u32, _relative_path: String, server_end: zx::Channel, ) -> Result<(), ModelError> { let server_end = ServerEnd::<fsec::VmexMarker>::new(server_end); let stream: fsec::VmexRequestStream = server_end.into_stream().unwrap(); fasync::spawn(async move { let result = VmexService::serve(stream).await; if let Err(e) = result { warn!("VmexService.open failed: {}", e); } }); Ok(()) } } #[cfg(test)] mod tests { use { super::*, crate::model::{hooks::Hooks, moniker::AbsoluteMoniker}, fidl::endpoints::ClientEnd, fuchsia_async as fasync, fuchsia_zircon::AsHandleRef, fuchsia_zircon_sys as sys, futures::lock::Mutex, }; fn root_resource_available() -> bool { let bin = std::env::args().next(); match bin.as_ref().map(String::as_ref) { Some("/pkg/test/component_manager_tests") => false, Some("/pkg/test/component_manager_boot_env_tests") => true, _ => panic!("Unexpected test binary name {:?}", bin), } } fn serve_vmex() -> Result<fsec::VmexProxy, Error> { let (proxy, stream) = fidl::endpoints::create_proxy_and_stream::<fsec::VmexMarker>()?; fasync::spawn_local( VmexService::serve(stream) .unwrap_or_else(|e| panic!("Error while serving vmex service: {}", e)), ); Ok(proxy) } #[fasync::run_singlethreaded(test)] async fn fail_with_no_root_resource() -> Result<(), Error> { if root_resource_available() { return Ok(()); } let (_, stream) = fidl::endpoints::create_proxy_and_stream::<fsec::VmexMarker>()?; assert!(!VmexService::serve(stream).await.is_ok()); Ok(()) } #[fasync::run_singlethreaded(test)] async fn kind_type_is_vmex() -> Result<(), Error> { if !root_resource_available() { return Ok(()); } let vmex_provider = serve_vmex()?; let vmex_resource = vmex_provider.get().await?; let resource_info = vmex_resource.info()?; assert_eq!(resource_info.kind, zx::sys::ZX_RSRC_KIND_VMEX); assert_eq!(resource_info.base, 0); assert_eq!(resource_info.size, 0); Ok(()) } #[fasync::run_singlethreaded(test)] async fn minimal_rights_assigned() -> Result<(), Error> { if !root_resource_available() { return Ok(()); } let vmex_provider = serve_vmex()?; let vmex_resource = vmex_provider.get().await?; let resource_info = zx::Handle::from(vmex_resource).basic_info()?; assert_eq!( resource_info.rights, zx::Rights::DUPLICATE | zx::Rights::TRANSFER | zx::Rights::INSPECT ); Ok(()) } #[fasync::run_singlethreaded(test)] async fn connect_to_vmex_service() -> Result<(), Error> { if !root_resource_available() { return Ok(()); } let vmex_service = Arc::new(VmexService::new()); let hooks = Hooks::new(None); hooks.install(vmex_service.hooks()).await; let capability_provider = Arc::new(Mutex::new(None)); let source = CapabilitySource::Framework { capability: FrameworkCapability::ServiceProtocol(VMEX_CAPABILITY_PATH.clone()), scope_moniker: None, }; let (client, server) = zx::Channel::create()?; let event = Event::new( AbsoluteMoniker::root(), EventPayload::RouteCapability { source, capability_provider: capability_provider.clone(), }, ); hooks.dispatch(&event).await?; let capability_provider = capability_provider.lock().await.take(); if let Some(capability_provider) = capability_provider { capability_provider.open(0, 0, String::new(), server).await?; } let vmex_client = ClientEnd::<fsec::VmexMarker>::new(client) .into_proxy() .expect("failed to create launcher proxy"); let vmex_resource = vmex_client.get().await?; assert_ne!(vmex_resource.raw_handle(), sys::ZX_HANDLE_INVALID); Ok(()) } }
33.095238
97
0.596283
9bc2695fb83f59e5884739d712afb5555aa31b24
2,138
#[doc = "Register `INTERP1_BASE1` reader"] pub struct R(crate::R<INTERP1_BASE1_SPEC>); impl core::ops::Deref for R { type Target = crate::R<INTERP1_BASE1_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<INTERP1_BASE1_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<INTERP1_BASE1_SPEC>) -> Self { R(reader) } } #[doc = "Register `INTERP1_BASE1` writer"] pub struct W(crate::W<INTERP1_BASE1_SPEC>); impl core::ops::Deref for W { type Target = crate::W<INTERP1_BASE1_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<INTERP1_BASE1_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<INTERP1_BASE1_SPEC>) -> Self { W(writer) } } impl W { #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "Read/write access to BASE1 register. This register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api). For information about available fields see [interp1_base1](index.html) module"] pub struct INTERP1_BASE1_SPEC; impl crate::RegisterSpec for INTERP1_BASE1_SPEC { type Ux = u32; } #[doc = "`read()` method returns [interp1_base1::R](R) reader structure"] impl crate::Readable for INTERP1_BASE1_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [interp1_base1::W](W) writer structure"] impl crate::Writable for INTERP1_BASE1_SPEC { type Writer = W; } #[doc = "`reset()` method sets INTERP1_BASE1 to value 0"] impl crate::Resettable for INTERP1_BASE1_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
30.985507
300
0.638915
f7c9292c605a1b8612d88c53a16a8b20a3aaec94
1,643
use std::sync::Arc; use crate::{array::FromFfi, error::Result, ffi}; use super::super::{ffi::ToFfi, Array}; use super::UnionArray; unsafe impl ToFfi for UnionArray { fn buffers(&self) -> Vec<Option<std::ptr::NonNull<u8>>> { if let Some(offsets) = &self.offsets { vec![ Some(self.types.as_ptr().cast::<u8>()), Some(offsets.as_ptr().cast::<u8>()), ] } else { vec![Some(self.types.as_ptr().cast::<u8>())] } } fn children(&self) -> Vec<Arc<dyn Array>> { self.fields.clone() } fn offset(&self) -> Option<usize> { Some(self.types.offset()) } fn to_ffi_aligned(&self) -> Self { self.clone() } } impl<A: ffi::ArrowArrayRef> FromFfi<A> for UnionArray { unsafe fn try_from_ffi(array: A) -> Result<Self> { let data_type = array.data_type().clone(); let fields = Self::get_fields(&data_type); let mut types = unsafe { array.buffer::<i8>(0) }?; let offsets = if Self::is_sparse(&data_type) { None } else { Some(unsafe { array.buffer::<i32>(1) }?) }; let length = array.array().len(); let offset = array.array().offset(); let fields = (0..fields.len()) .map(|index| { let child = array.child(index)?; Ok(ffi::try_from(child)?.into()) }) .collect::<Result<Vec<Arc<dyn Array>>>>()?; if offset > 0 { types = types.slice(offset, length); }; Self::try_new(data_type, types, fields, offsets) } }
26.934426
61
0.513086
ac13ff20f23bd81ec08cd9d0c7a2fb1ff833ca5e
10,081
//! The SNMP Management Information Base itself. mod interpretation; mod linker; mod smi_well_known; pub use crate::mib::interpretation::{ InetAddress, InetAddressEncoding, SMIInterpretation, SMIScalar, SMITable, SMITableCell, TableIndexEncoding, TableIndexValue, }; use std::collections::BTreeMap; use std::fmt::Debug; use sequence_trie::SequenceTrie; use crate::error::{IndexDecodeError, InterpretationError, LookupError}; use crate::loader::Loader; use crate::mib::linker::{InternalObjectDescriptor, Linker}; use crate::parser::asn_type::Type; use crate::types::{IdentifiedObj, Identifier, Indexable, IntoOidExpr, NumericOid, OidExpr}; /// A description of an object in the MIB. /// /// The `ObjectDescriptor` includes the identifier of the object, its ASN.1 declared type, plus the /// interpretation of that type in terms of SMIv2. These are obtained using `MIB::describe_object`. #[derive(Clone, Debug)] pub struct ObjectDescriptor { /// The object's OID, both as an identifier and numerically. /// /// This field allows an object to be described based on any OID expression, and this field /// will contain both the canonical name and the numeric OID. pub object: IdentifiedObj, /// The declared type of the object as an ASN.1 type, if it has one. /// /// This will be a parsed version of the exact ASN.1 type the object is declared as in the MIB. /// If that declared type is or contains references to other types defined in the MIB; they are /// not dereferenced or expanded in this field. /// /// Some objects do not have a declared type, for example OIDs that are declared as namespaces /// for other OIDs. For such objects, this field will be `None`. pub declared_type: Option<Type<Identifier>>, /// The interpretation of this field in terms of SMI. /// /// When this is `Ok(i)`, `i` is the SMI interpretation --- a value describing the meaning of /// the object's type (dereferencing and expanding the declared ASN.1 type) and context /// (examining the object's parent to determine if it is a component of any tables). /// /// If the object is unable to be interpreted, this will be `Err(e)` where `e` describes the /// reason for the object being uninterpretable. pub smi_interpretation: Result<SMIInterpretation, InterpretationError>, } /// An SNMP Management Information Base. /// /// A `MIB` is obtained by using a [`Loader`] to load the MIB module definitions of interest and /// then compiling them by calling `.into()`. The resultant `MIB` contains the main API for /// utilizing the compiled information. #[derive(Clone, Debug)] pub struct MIB { oid_descriptor_tree: SequenceTrie<u32, InternalObjectDescriptor>, identifier_table: BTreeMap<Identifier, Result<NumericOid, LookupError>>, } impl MIB { /// Look up anything convertible to an [`OidExpr`], and translate it to a maximally-specific /// `OidExpr`. /// /// While any named identifiers in the OID expression must be known to this MIB, the exact /// object referred to by some or all of a numeric suffix need not be. The returned `OidExpr`'s /// parent will be whichever identifier known to this MIB matches the largest prefix of the /// given `OidExpr`. The fragment will contain any suffix for which the MIB does not define a /// name. pub fn lookup_best_oidexpr(&self, expr: impl IntoOidExpr) -> Result<OidExpr, LookupError> { let num_oid = self.lookup_numeric_oid(expr)?; self.lookup_best_oidexpr_internal(&num_oid) .map(|(_, oidexpr)| oidexpr) } fn lookup_best_oidexpr_internal( &self, num_oid: &NumericOid, ) -> Result<(NumericOid, OidExpr), LookupError> { // Decrement to skip the root. let prefix_len = self.oid_descriptor_tree.prefix_iter(num_oid).count() - 1; let (parent_num_oid, fragment) = num_oid.split_at(prefix_len); let parent = self .oid_descriptor_tree .get(parent_num_oid) .ok_or_else(|| LookupError::NoSuchNumericOID { oid: parent_num_oid.into(), })?; Ok(( parent_num_oid.into(), parent.name.index_by_fragment(fragment), )) } /// Look up anything convertible to an [`OidExpr`], and translate it to an equivalent /// [`NumericOid`]. /// /// While any named identifiers in the OID expression must be known to this MIB, the exact /// object referred to by some or all of a numeric suffix need not be. pub fn lookup_numeric_oid(&self, expr: impl IntoOidExpr) -> Result<NumericOid, LookupError> { let expr = expr.into_oid_expr(); let oid = self .identifier_table .get(expr.base_identifier()) .ok_or_else(|| LookupError::NoSuchIdentifier { identifier: expr.base_identifier().clone(), })? .as_ref() .map_err(Clone::clone)? .index_by_fragment(expr.fragment()); Ok(oid) } /// Look up a descriptor for an object identified by (anything convertible to) an [`OidExpr`]. /// /// See [`ObjectDescriptor`] for details. pub fn describe_object(&self, expr: impl IntoOidExpr) -> Result<ObjectDescriptor, LookupError> { use SMIInterpretation as SI; let num_oid = self.lookup_numeric_oid(expr)?; let (parent_num_oid, best_expr) = self.lookup_best_oidexpr_internal(&num_oid)?; let internal_descr = self.oid_descriptor_tree.get(&parent_num_oid).expect( "lookup_best_oidexpr_internal guarantees parent_num_oid can be gotten from \ oid_descriptor_tree", ); let interpretation = if let Ok(SI::Scalar(cell_scalar)) = &internal_descr.smi_interpretation { // If the internal descriptor's interpretation is a Scalar, check the interpretation of // the parent OID. It might be a TableRow, in which case the interpretation of this is // not just Scalar but specifically a TableCell. match self.describe_object(parent_num_oid.parent()) { Ok(ObjectDescriptor { smi_interpretation: Ok(SI::TableRow(table)), .. }) => { // The parent is a TableRow, so decode any table index values that are present in // the fragment and re-interpret this as TableCell. let fragment = best_expr.fragment().into_iter().copied(); let instance_indices = self.decode_table_cell_indices(fragment, &table)?; Ok(SI::TableCell(SMITableCell { cell_interpretation: cell_scalar.clone(), table, instance_indices, })) } _ => internal_descr.smi_interpretation.clone(), } } else { internal_descr.smi_interpretation.clone() }; Ok(ObjectDescriptor { object: IdentifiedObj::new(parent_num_oid, internal_descr.name.clone()), declared_type: internal_descr.declared_type.clone(), smi_interpretation: interpretation, }) } fn decode_table_cell_indices( &self, mut fragment: impl Iterator<Item = u32>, table: &SMITable, ) -> Result<Vec<(IdentifiedObj, TableIndexValue)>, LookupError> { use SMIInterpretation as SI; let mut instance_indices = vec![]; for (index, encoding) in table.index_fields.iter() { let decoded_value = match table.field_interpretation.get(&index) { Some(SI::Scalar(scalar_type)) => { match scalar_type.decode_from_num_oid(&mut fragment, *encoding) { Ok(Some(TableIndexValue::ObjectIdentifier(oidexpr))) => { TableIndexValue::ObjectIdentifier( self.lookup_best_oidexpr(&oidexpr).unwrap_or(oidexpr), ) } Ok(Some(scalar_val)) => scalar_val, Ok(None) => break, Err(e) => { return Err(LookupError::IndexNotDecodable { source: e, object: index.clone(), }) } } } Some(non_scalar_type) => { return Err(LookupError::IndexNotDecodable { object: index.clone(), source: IndexDecodeError::NonScalarType { interpretation: non_scalar_type.clone(), }, }) } None => unreachable!( "Linker::interpret_table guarantees index_fields never contains keys that \ aren't in field_interpretation" ), }; instance_indices.push((index.clone(), decoded_value)); } Ok(instance_indices) } } impl From<Loader> for MIB { fn from(loader: Loader) -> Self { let linker = Linker::new(loader.0); let mut oid_descriptor_tree = SequenceTrie::new(); let mut identifier_table = BTreeMap::new(); for (name, num_oid) in linker.object_numeric_oids.iter() { let entry = linker.make_entry(&name, num_oid); oid_descriptor_tree.insert(num_oid, entry); identifier_table.insert(name.clone(), Ok(num_oid.clone())); } for (name, orphan_name) in linker.orphan_identifiers { identifier_table.insert( name, Err(LookupError::OrphanIdentifier { identifier: orphan_name, }), ); } Self { oid_descriptor_tree, identifier_table, } } }
41.829876
101
0.60371
edf0ea40c1d3183da347e9822227c11c50dc7a50
7,436
use std::path::{Path, PathBuf}; use anyhow::{Context, Result}; use log::{debug, info, trace, warn}; pub struct Cartridge { data: Box<[u8]>, ram: Box<[u8]>, selected_rom_bank: u8, secondary_bank_register: u8, banking_mode_1: bool, save_file: PathBuf, } impl Cartridge { pub fn load<P: AsRef<Path>>(path: P) -> Result<Self> { let content = std::fs::read(path.as_ref()).context("Failed to open rom file")?; info!("Loaded {} bytes from rom file", content.len()); let mut save_file_path = PathBuf::from(path.as_ref()); save_file_path.set_extension("sav"); let mut cart = Self { data: content.into_boxed_slice(), // Allocate the most RAM a cart can have ram: vec![0; 64 * 1024].into_boxed_slice(), selected_rom_bank: 0x01, secondary_bank_register: 0x00, banking_mode_1: false, save_file: save_file_path, }; if cart.save_file.exists() { let ram = std::fs::read(&cart.save_file).context("Failed to load RAM file")?; let expected_size = cart.get_num_ram_banks() as usize * 8192; if ram.len() != expected_size { warn!("RAM file {} has size {}, expected {}. Ignoring...", cart.save_file.display(), ram.len(), expected_size); } else { info!("Loading RAM file {}...", cart.save_file.display()); cart.ram[..expected_size].copy_from_slice(&ram[..]); } } else { info!("No RAM file found."); } Ok(cart) } pub fn cgb_flag(&self) -> bool { self.data[0x143] >> 7 != 0 } pub fn sgb_flag(&self) -> bool { self.data[0x146] == 0x03 } pub fn title(&self) -> String { let bytes = &self.data[0x0134..=0x0143]; let end = bytes.iter().position(|b| *b == 0).unwrap_or(bytes.len()); String::from_utf8_lossy(&bytes[..end]).to_string() } pub fn licensee_code(&self) -> String { let code = self.data[0x014B]; if code == 33 { // Uses New Licensee code instead String::from_utf8_lossy(&self.data[0x0144..=0x0145]).to_string() } else { // Old licensee code format!("{:02x} (OLD)", code) } } pub fn cartridge_type(&self) -> &'static str { match self.data[0x0147] { 0x00 => "ROM ONLY", 0x01 => "MBC1", 0x02 => "MBC1+RAM", 0x03 => "MBC1+RAM+BATTERY", 0x05 => "MBC2", 0x06 => "MBC2+BATTERY", 0x08 => "ROM+RAM 1", 0x09 => "ROM+RAM+BATTERY 1", 0x0B => "MMM01", 0x0C => "MMM01+RAM", 0x0D => "MMM01+RAM+BATTERY", 0x0F => "MBC3+TIMER+BATTERY", 0x10 => "MBC3+TIMER+RAM+BATTERY 2", 0x11 => "MBC3", 0x12 => "MBC3+RAM 2", 0x13 => "MBC3+RAM+BATTERY 2", 0x19 => "MBC5", 0x1A => "MBC5+RAM", 0x1B => "MBC5+RAM+BATTERY", 0x1C => "MBC5+RUMBLE", 0x1D => "MBC5+RUMBLE+RAM", 0x1E => "MBC5+RUMBLE+RAM+BATTERY", 0x20 => "MBC6", 0x22 => "MBC7+SENSOR+RUMBLE+RAM+BATTERY", 0xFC => "POCKET CAMERA", 0xFD => "BANDAI TAMA5", 0xFE => "HuC3", 0xFF => "HuC1+RAM+BATTERY", b => panic!("Unknown cartridge type {:x}", b), } } pub fn has_mbc1(&self) -> bool { matches!(self.data[0x0147], 0x01..=0x03) } pub fn has_mbc5(&self) -> bool { matches!(self.data[0x0147], 0x19..=0x1E) } pub fn get_rom_size(&self) -> u8 { self.data[0x0148] } pub fn get_ram_size(&self) -> u8 { self.data[0x0149] } pub fn select_rom_bank(&mut self, bank: u8) { if bank == 0 && self.has_mbc1() { self.selected_rom_bank = 0x01; } else { self.selected_rom_bank = bank & 0x1f; } // assert!(bank <= self.get_num_rom_banks()); trace!("Selected ROM bank {}", self.selected_rom_bank); } pub fn set_secondary_bank_register(&mut self, bank: u8) { self.secondary_bank_register = bank & 0x03; trace!("Secondary bank register: {:02x}", self.secondary_bank_register); } pub fn select_banking_mode(&mut self, b: u8) { if b == 0 { self.banking_mode_1 = false; debug!("Banking mode select 0"); } else if b == 1 { self.banking_mode_1 = true; debug!("Banking mode select 1"); } else { warn!("Banking mode select set to unknown value: {:02x}", b); } } /// Read a byte from the selected bank of this cartridge's ROM. /// /// The given address should be relative to the selected bank, i.e. in the range 0000-3FFF. pub fn read_rom(&self, addr: u16) -> u8 { let mapped_addr = if addr < 0x4000 { if self.banking_mode_1 && self.get_rom_size() >= 0x05 { (self.secondary_bank_register << 5) as u32 * 0x4000 + addr as u32 } else { addr as u32 } } else { let bank_num = if self.get_rom_size() >= 0x05 { // If ROM size > 1MB (self.secondary_bank_register << 5) + self.selected_rom_bank } else { self.selected_rom_bank }; 0x4000 * (bank_num as u32) + (addr as u32 - 0x4000) }; self.data[mapped_addr as usize] } /// Read a byte from the selected bank of this cartridge's external RAM. /// /// The given address should be relative to the selected bank, i.e. in the range 0000-1FFF. pub fn read_ram(&self, addr: u16) -> u8 { assert!(addr < 0x2000, "addr=0x{:04x}", addr); let mapped_addr = if self.banking_mode_1 && self.get_ram_size() >= 0x03 { 0x2000 * self.secondary_bank_register as u16 + addr } else { addr }; self.ram[mapped_addr as usize] } /// Write a byte into the selected bank of this cartridge's external RAM /// /// The given address should be relative to the selected bank, i.e. in the range 0000-1FFF. pub fn write_ram(&mut self, addr: u16, b: u8) { assert!(addr < 0x2000); let addr = 0x2000 * self.secondary_bank_register as u16 + addr; self.ram[addr as usize] = b; } pub fn save(&self) { let ram_size = self.get_num_ram_banks() as usize * 8192; if let Err(e) = std::fs::write(&self.save_file, &self.ram[..ram_size]) { warn!("Failed to save RAM file {}: {}", &self.save_file.display(), e); } } #[allow(dead_code)] fn get_num_rom_banks(&self) -> u16 { match self.get_rom_size() { 0x00 => 2, 0x01 => 4, 0x02 => 8, 0x03 => 16, 0x04 => 32, 0x05 => 64, 0x06 => 128, 0x07 => 256, 0x08 => 512, s => panic!("Invalid ROM size {}", s), } } fn get_num_ram_banks(&self) -> u16 { match self.get_ram_size() { 0x00 => 0, 0x02 => 1, 0x03 => 4, 0x04 => 16, 0x05 => 8, s => panic!("Invalid ROM size {}", s), } } }
32.190476
127
0.512776
fe574b5a025afc0b706c51aa2f55b91f1dafca83
19,173
#![allow(clippy::manual_map)] use super::{gi_docgen, LocationInObject}; use crate::{ analysis::functions::Info, library::{FunctionKind, TypeId}, nameutil, Env, }; use log::{info, warn}; use once_cell::sync::Lazy; use regex::{Captures, Regex}; const LANGUAGE_SEP_BEGIN: &str = "<!-- language=\""; const LANGUAGE_SEP_END: &str = "\" -->"; const LANGUAGE_BLOCK_BEGIN: &str = "|["; const LANGUAGE_BLOCK_END: &str = "\n]|"; // A list of function names that are ignored when warning about a "not found function" const IGNORE_C_WARNING_FUNCS: [&str; 6] = [ "g_object_unref", "g_object_ref", "g_free", "g_list_free", "g_strfreev", "printf", ]; pub fn reformat_doc( input: &str, env: &Env, in_type: Option<(&TypeId, Option<LocationInObject>)>, ) -> String { code_blocks_transformation(input, env, in_type) } fn try_split<'a>(src: &'a str, needle: &str) -> (&'a str, Option<&'a str>) { match src.find(needle) { Some(pos) => (&src[..pos], Some(&src[pos + needle.len()..])), None => (src, None), } } fn code_blocks_transformation( mut input: &str, env: &Env, in_type: Option<(&TypeId, Option<LocationInObject>)>, ) -> String { let mut out = String::with_capacity(input.len()); loop { input = match try_split(input, LANGUAGE_BLOCK_BEGIN) { (before, Some(after)) => { out.push_str(&format(before, env, in_type)); if let (before, Some(after)) = try_split(get_language(after, &mut out), LANGUAGE_BLOCK_END) { out.push_str(before); out.push_str("\n```"); after } else { after } } (before, None) => { out.push_str(&format(before, env, in_type)); return out; } }; } } fn get_language<'a>(entry: &'a str, out: &mut String) -> &'a str { if let (_, Some(after)) = try_split(entry, LANGUAGE_SEP_BEGIN) { if let (before, Some(after)) = try_split(after, LANGUAGE_SEP_END) { if !["text", "rust"].contains(&before) { out.push_str(&format!("\n\n**⚠️ The following code is in {} ⚠️**", before)); } out.push_str(&format!("\n\n```{}", before)); return after; } } out.push_str("\n```text"); entry } // try to get the language if any is defined or fallback to text fn get_markdown_language(input: &str) -> (&str, &str) { let (lang, after) = if let Some((lang, after)) = input.split_once("\n") { let lang = if lang.is_empty() { None } else { Some(lang) }; (lang, after) } else { (None, input) }; (lang.unwrap_or("text"), after) } // Re-format codeblocks & replaces the C types and GI-docgen with proper links fn format( mut input: &str, env: &Env, in_type: Option<(&TypeId, Option<LocationInObject>)>, ) -> String { let mut ret = String::with_capacity(input.len()); loop { input = match try_split(input, "```") { (before, Some(after)) => { // if we are inside a codeblock ret.push_str(&replace_symbols(before, env, in_type)); let (lang, after) = get_markdown_language(after); if !["text", "rust", "xml", "css", "json", "html"].contains(&lang) && after.lines().count() > 1 { ret.push_str(&format!("**⚠️ The following code is in {0} ⚠️**\n\n", lang)); } ret.push_str(&format!("```{}\n", lang)); if let (before, Some(after)) = try_split(after, "```") { ret.push_str(before); ret.push_str("```"); after } else { after } } (before, None) => { ret.push_str(&replace_symbols(before, env, in_type)); return ret; } } } } fn replace_symbols( input: &str, env: &Env, in_type: Option<(&TypeId, Option<LocationInObject>)>, ) -> String { if env.config.use_gi_docgen { let out = gi_docgen::replace_c_types(input, env, in_type); let out = GI_DOCGEN_SYMBOL.replace_all(&out, |caps: &Captures<'_>| match &caps[2] { "TRUE" => "[`true`]".to_string(), "FALSE" => "[`false`]".to_string(), "NULL" => "[`None`]".to_string(), symbol_name => match &caps[1] { // Opt-in only for the %SYMBOLS, @/# causes breakages "%" => find_constant_or_variant_wrapper(symbol_name, env, in_type), s => panic!("Unknown symbol prefix `{}`", s), }, }); let out = GDK_GTK.replace_all(&out, |caps: &Captures<'_>| { find_type(&caps[2], env).unwrap_or_else(|| format!("`{}`", &caps[2])) }); out.to_string() } else { replace_c_types(input, env, in_type) } } static SYMBOL: Lazy<Regex> = Lazy::new(|| Regex::new(r"([@#%])(\w+\b)([:.]+[\w-]+\b)?").unwrap()); static GI_DOCGEN_SYMBOL: Lazy<Regex> = Lazy::new(|| Regex::new(r"([%])(\w+\b)([:.]+[\w-]+\b)?").unwrap()); static FUNCTION: Lazy<Regex> = Lazy::new(|| Regex::new(r"([@#%])?(\w+\b[:.]+)?(\b[a-z0-9_]+)\(\)").unwrap()); // **note** // The optional . at the end is to make the regex more relaxed for some weird broken cases on gtk3's docs // it doesn't hurt other docs so please don't drop it static GDK_GTK: Lazy<Regex> = Lazy::new(|| { Regex::new(r"`([^\(:])?((G[dts]k|Pango|cairo_|graphene_|Adw|Hdy|GtkSource)\w+\b)(\.)?`") .unwrap() }); static TAGS: Lazy<Regex> = Lazy::new(|| Regex::new(r"<[\w/-]+>").unwrap()); static SPACES: Lazy<Regex> = Lazy::new(|| Regex::new(r"[ ]{2,}").unwrap()); fn replace_c_types( entry: &str, env: &Env, in_type: Option<(&TypeId, Option<LocationInObject>)>, ) -> String { let out = FUNCTION.replace_all(entry, |caps: &Captures<'_>| { let name = &caps[3]; find_method_or_function_by_ctype(None, name, env, in_type).unwrap_or_else(|| { if !IGNORE_C_WARNING_FUNCS.contains(&name) { info!("No function found for `{}()`", name); } format!("`{}{}()`", caps.get(2).map_or("", |m| m.as_str()), name) }) }); let out = SYMBOL.replace_all(&out, |caps: &Captures<'_>| match &caps[2] { "TRUE" => "[`true`]".to_string(), "FALSE" => "[`false`]".to_string(), "NULL" => "[`None`]".to_string(), symbol_name => match &caps[1] { "%" => find_constant_or_variant_wrapper(symbol_name, env, in_type), "#" => { if let Some(member_path) = caps.get(3).map(|m| m.as_str()) { let method_name = member_path.trim_start_matches('.'); find_member(symbol_name, method_name, env, in_type).unwrap_or_else(|| { info!("`#{}` not found as method", symbol_name); format!("`{}{}`", symbol_name, member_path) }) } else if let Some(type_) = find_type(symbol_name, env) { type_ } else if let Some(constant_or_variant) = find_constant_or_variant(symbol_name, env, in_type) { warn!( "`{}` matches a constant/variant and should use `%` prefix instead of `#`", symbol_name ); constant_or_variant } else { info!("Type `#{}` not found", symbol_name); format!("`{}`", symbol_name) } } "@" => { // XXX: Theoretically this code should check if the resulting // symbol truly belongs to `in_type`! if let Some(type_) = find_type(symbol_name, env) { warn!( "`{}` matches a type and should use `#` prefix instead of `%`", symbol_name ); type_ } else if let Some(constant_or_variant) = find_constant_or_variant(symbol_name, env, in_type) { constant_or_variant } else if let Some(function) = find_method_or_function_by_ctype(None, symbol_name, env, in_type) { function } else { // `@` is often used to refer to fields and function parameters. format!("`{}`", symbol_name) } } s => panic!("Unknown symbol prefix `{}`", s), }, }); let out = GDK_GTK.replace_all(&out, |caps: &Captures<'_>| { find_type(&caps[2], env).unwrap_or_else(|| format!("`{}`", &caps[2])) }); let out = TAGS.replace_all(&out, "`$0`"); SPACES.replace_all(&out, " ").into_owned() } /// Wrapper around [`find_constant_or_variant`] that fallbacks to returning /// the `symbol_name` fn find_constant_or_variant_wrapper( symbol_name: &str, env: &Env, in_type: Option<(&TypeId, Option<LocationInObject>)>, ) -> String { find_constant_or_variant(symbol_name, env, in_type).unwrap_or_else(|| { info!("Constant or variant `%{}` not found", symbol_name); format!("`{}`", symbol_name) }) } fn find_member( type_: &str, method_name: &str, env: &Env, in_type: Option<(&TypeId, Option<LocationInObject>)>, ) -> Option<String> { let symbols = env.symbols.borrow(); let is_signal = method_name.starts_with("::"); let is_property = !is_signal && method_name.starts_with(':'); if !is_signal && !is_property { find_method_or_function_by_ctype(Some(type_), method_name, env, in_type) } else { env.analysis .objects .values() .find(|o| o.c_type == type_) .map(|info| { let sym = symbols.by_tid(info.type_id).unwrap(); // we are sure the object exists let name = method_name.trim_start_matches(':'); if is_property { gen_property_doc_link(&sym.full_rust_name(), name) } else { gen_signal_doc_link(&sym.full_rust_name(), name) } }) } } fn find_constant_or_variant( symbol: &str, env: &Env, in_type: Option<(&TypeId, Option<LocationInObject>)>, ) -> Option<String> { if let Some((flag_info, member_info)) = env.analysis.flags.iter().find_map(|f| { f.type_(&env.library) .members .iter() .find(|m| m.c_identifier == symbol && !m.status.ignored()) .map(|m| (f, m)) }) { Some(gen_member_doc_link( flag_info.type_id, &nameutil::bitfield_member_name(&member_info.name), env, in_type, )) } else if let Some((enum_info, member_info)) = env.analysis.enumerations.iter().find_map(|e| { e.type_(&env.library) .members .iter() .find(|m| m.c_identifier == symbol && !m.status.ignored()) .map(|m| (e, m)) }) { Some(gen_member_doc_link( enum_info.type_id, &nameutil::enum_member_name(&member_info.name), env, in_type, )) } else if let Some(const_info) = env .analysis .constants .iter() .find(|c| c.glib_name == symbol) { Some(gen_const_doc_link(const_info)) } else { None } } // A list of types that are automatically ignored by the `find_type` function const IGNORED_C_TYPES: [&str; 6] = [ "gconstpointer", "guint16", "guint", "gunicode", "gchararray", "GList", ]; /// either an object/interface, record, enum or a flag fn find_type(type_: &str, env: &Env) -> Option<String> { if IGNORED_C_TYPES.contains(&type_) { return None; } let type_id = if let Some(obj) = env.analysis.objects.values().find(|o| o.c_type == type_) { Some(obj.type_id) } else if let Some(record) = env .analysis .records .values() .find(|r| r.type_(&env.library).c_type == type_) { Some(record.type_id) } else if let Some(enum_) = env .analysis .enumerations .iter() .find(|e| e.type_(&env.library).c_type == type_) { Some(enum_.type_id) } else if let Some(flag) = env .analysis .flags .iter() .find(|f| f.type_(&env.library).c_type == type_) { Some(flag.type_id) } else { None }; type_id.map(|ty| gen_symbol_doc_link(ty, env)) } fn find_method_or_function_by_ctype( c_type: Option<&str>, name: &str, env: &Env, in_type: Option<(&TypeId, Option<LocationInObject>)>, ) -> Option<String> { find_method_or_function( name, env, in_type, |f| f.glib_name == name, |o| c_type.map_or(true, |t| o.c_type == t), |r| c_type.map_or(true, |t| r.type_(&env.library).c_type == t), |r| c_type.map_or(true, |t| r.type_(&env.library).c_type == t), |r| c_type.map_or(true, |t| r.type_(&env.library).c_type == t), c_type.map_or(false, |t| t.ends_with("Class")), ) } /// Find a function in all the possible items, if not found return the original name surrounded with backticks. /// A function can either be a struct/interface/record method, a global function or maybe a virtual function /// /// This function is generic so it can be de-duplicated between a /// - [`find_method_or_function_by_ctype()`] where the object/records are looked by their C name /// - [`gi_docgen::find_method_or_function_by_name()`] where the object/records are looked by their name pub(crate) fn find_method_or_function( name: &str, env: &Env, in_type: Option<(&TypeId, Option<LocationInObject>)>, search_fn: impl Fn(&crate::analysis::functions::Info) -> bool + Copy, search_obj: impl Fn(&crate::analysis::object::Info) -> bool + Copy, search_record: impl Fn(&crate::analysis::record::Info) -> bool + Copy, search_enum: impl Fn(&crate::analysis::enums::Info) -> bool + Copy, search_flag: impl Fn(&crate::analysis::flags::Info) -> bool + Copy, is_class_method: bool, ) -> Option<String> { if is_class_method { info!("Class methods are not supported yet `{}`", name); return None; } // if we can find the function in an object if let Some((obj_info, fn_info)) = env .analysis .find_object_by_function(env, search_obj, search_fn) { Some(gen_object_fn_doc_link( obj_info, fn_info, env, in_type, &obj_info.name, )) // or in a record } else if let Some((record_info, fn_info)) = env.analysis .find_record_by_function(env, search_record, search_fn) { Some(gen_type_fn_doc_link( record_info.type_id, fn_info, env, in_type, )) } else if let Some((enum_info, fn_info)) = env.analysis .find_enum_by_function(env, search_enum, search_fn) { Some(gen_type_fn_doc_link( enum_info.type_id, fn_info, env, in_type, )) } else if let Some((flag_info, fn_info)) = env.analysis .find_flag_by_function(env, search_flag, search_fn) { Some(gen_type_fn_doc_link( flag_info.type_id, fn_info, env, in_type, )) // or as a global function } else if let Some(fn_info) = env.analysis.find_global_function(env, search_fn) { Some(fn_info.doc_link(None, None, false)) } else { None } } pub(crate) fn gen_type_fn_doc_link( type_id: TypeId, fn_info: &Info, env: &Env, in_type: Option<(&TypeId, Option<LocationInObject>)>, ) -> String { let symbols = env.symbols.borrow(); let sym_name = symbols.by_tid(type_id).unwrap().full_rust_name(); let is_self = in_type == Some((&type_id, None)); fn_info.doc_link(Some(&sym_name), None, is_self) } pub(crate) fn gen_object_fn_doc_link( obj_info: &crate::analysis::object::Info, fn_info: &Info, env: &Env, in_type: Option<(&TypeId, Option<LocationInObject>)>, visible_name: &str, ) -> String { let symbols = env.symbols.borrow(); let sym = symbols.by_tid(obj_info.type_id).unwrap(); let is_self = in_type == Some((&obj_info.type_id, Some(obj_info.function_location(fn_info)))); if fn_info.kind == FunctionKind::Method { let (type_name, visible_type_name) = obj_info.generate_doc_link_info(fn_info); fn_info.doc_link( Some(&sym.full_rust_name().replace(visible_name, &type_name)), Some(&visible_type_name), is_self, ) } else { fn_info.doc_link(Some(&sym.full_rust_name()), None, is_self) } } // Helper function to generate a doc link for an enum member/bitfield variant pub(crate) fn gen_member_doc_link( type_id: TypeId, member_name: &str, env: &Env, in_type: Option<(&TypeId, Option<LocationInObject>)>, ) -> String { let symbols = env.symbols.borrow(); let sym = symbols.by_tid(type_id).unwrap().full_rust_name(); let is_self = in_type == Some((&type_id, None)); if is_self { format!("[`{m}`][Self::{m}]", m = member_name) } else { format!("[`{s}::{m}`][crate::{s}::{m}]", s = sym, m = member_name) } } pub(crate) fn gen_const_doc_link(const_info: &crate::analysis::constants::Info) -> String { // for whatever reason constants are not part of the symbols list format!("[`{n}`][crate::{n}]", n = const_info.name) } pub(crate) fn gen_signal_doc_link(symbol: &str, signal: &str) -> String { format!("`signal::{}::{}`", symbol, signal) } pub(crate) fn gen_property_doc_link(symbol: &str, property: &str) -> String { format!("`property::{}::{}`", symbol, property) } pub(crate) fn gen_vfunc_doc_link(symbol: &str, vfunc: &str) -> String { format!("`vfunc::{}::{}`", symbol, vfunc) } pub(crate) fn gen_callback_doc_link(callback: &str) -> String { format!("`callback::{}", callback) } pub(crate) fn gen_alias_doc_link(alias: &str) -> String { format!("`alias::{}`", alias) } pub(crate) fn gen_symbol_doc_link(type_id: TypeId, env: &Env) -> String { let symbols = env.symbols.borrow(); let sym = symbols.by_tid(type_id).unwrap(); // Workaround the case of glib::Variant being a derive macro and a struct if sym.name() == "Variant" && (sym.crate_name().is_none() || sym.crate_name() == Some("glib")) { format!("[`{n}`][struct@crate::{n}]", n = sym.full_rust_name()) } else { format!("[`{n}`][crate::{n}]", n = sym.full_rust_name()) } }
34.176471
111
0.548323
757caed023a6c1ebaa7423cfbeab00b7ec43e7c3
5,314
// * This file is part of the uutils coreutils package. // * // * (c) Alan Andrade <alan.andradec@gmail.com> // * // * For the full copyright and license information, please view the LICENSE // * file that was distributed with this source code. // spell-checker:ignore (ToDO) MAKEWORD addrs hashset use std::collections::hash_set::HashSet; use std::net::ToSocketAddrs; use std::str; use clap::{crate_version, Arg, ArgMatches, Command}; use uucore::{ error::{FromIo, UResult}, format_usage, }; static ABOUT: &str = "Display or set the system's host name."; const USAGE: &str = "{} [OPTION]... [HOSTNAME]"; static OPT_DOMAIN: &str = "domain"; static OPT_IP_ADDRESS: &str = "ip-address"; static OPT_FQDN: &str = "fqdn"; static OPT_SHORT: &str = "short"; static OPT_HOST: &str = "host"; #[cfg(windows)] mod wsa { use std::io; use winapi::shared::minwindef::MAKEWORD; use winapi::um::winsock2::{WSACleanup, WSAStartup, WSADATA}; pub(super) struct WsaHandle(()); pub(super) fn start() -> io::Result<WsaHandle> { let err = unsafe { let mut data = std::mem::MaybeUninit::<WSADATA>::uninit(); WSAStartup(MAKEWORD(2, 2), data.as_mut_ptr()) }; if err != 0 { Err(io::Error::from_raw_os_error(err)) } else { Ok(WsaHandle(())) } } impl Drop for WsaHandle { fn drop(&mut self) { unsafe { // This possibly returns an error but we can't handle it let _err = WSACleanup(); } } } } #[uucore::main] pub fn uumain(args: impl uucore::Args) -> UResult<()> { let matches = uu_app().get_matches_from(args); #[cfg(windows)] let _handle = wsa::start().map_err_context(|| "failed to start Winsock".to_owned())?; match matches.value_of_os(OPT_HOST) { None => display_hostname(&matches), Some(host) => hostname::set(host).map_err_context(|| "failed to set hostname".to_owned()), } } pub fn uu_app<'a>() -> Command<'a> { Command::new(uucore::util_name()) .version(crate_version!()) .about(ABOUT) .override_usage(format_usage(USAGE)) .infer_long_args(true) .arg( Arg::new(OPT_DOMAIN) .short('d') .long("domain") .overrides_with_all(&[OPT_DOMAIN, OPT_IP_ADDRESS, OPT_FQDN, OPT_SHORT]) .help("Display the name of the DNS domain if possible"), ) .arg( Arg::new(OPT_IP_ADDRESS) .short('i') .long("ip-address") .overrides_with_all(&[OPT_DOMAIN, OPT_IP_ADDRESS, OPT_FQDN, OPT_SHORT]) .help("Display the network address(es) of the host"), ) .arg( Arg::new(OPT_FQDN) .short('f') .long("fqdn") .overrides_with_all(&[OPT_DOMAIN, OPT_IP_ADDRESS, OPT_FQDN, OPT_SHORT]) .help("Display the FQDN (Fully Qualified Domain Name) (default)"), ) .arg( Arg::new(OPT_SHORT) .short('s') .long("short") .overrides_with_all(&[OPT_DOMAIN, OPT_IP_ADDRESS, OPT_FQDN, OPT_SHORT]) .help("Display the short hostname (the portion before the first dot) if possible"), ) .arg(Arg::new(OPT_HOST).allow_invalid_utf8(true)) } fn display_hostname(matches: &ArgMatches) -> UResult<()> { let hostname = hostname::get() .map_err_context(|| "failed to get hostname".to_owned())? .to_string_lossy() .into_owned(); if matches.is_present(OPT_IP_ADDRESS) { // XXX: to_socket_addrs needs hostname:port so append a dummy port and remove it later. // This was originally supposed to use std::net::lookup_host, but that seems to be // deprecated. Perhaps we should use the dns-lookup crate? let hostname = hostname + ":1"; let addresses = hostname .to_socket_addrs() .map_err_context(|| "failed to resolve socket addresses".to_owned())?; let mut hashset = HashSet::new(); let mut output = String::new(); for addr in addresses { // XXX: not sure why this is necessary... if !hashset.contains(&addr) { let mut ip = addr.to_string(); if ip.ends_with(":1") { let len = ip.len(); ip.truncate(len - 2); } output.push_str(&ip); output.push(' '); hashset.insert(addr); } } let len = output.len(); if len > 0 { println!("{}", &output[0..len - 1]); } Ok(()) } else { if matches.is_present(OPT_SHORT) || matches.is_present(OPT_DOMAIN) { let mut it = hostname.char_indices().filter(|&ci| ci.1 == '.'); if let Some(ci) = it.next() { if matches.is_present(OPT_SHORT) { println!("{}", &hostname[0..ci.0]); } else { println!("{}", &hostname[ci.0 + 1..]); } return Ok(()); } } println!("{}", hostname); Ok(()) } }
32.402439
99
0.539706
fb9e855a807d8038c9330183029a81c3dc3b21e1
15,118
/* Copyright 2021 Integritee AG and Supercomputing Systems AG Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ //! Aura worker for the sidechain. //! //! It is inspired by parity's implementation but has been greatly amended. #![cfg_attr(not(feature = "std"), no_std)] #![cfg_attr(test, feature(assert_matches))] #[cfg(all(feature = "std", feature = "sgx"))] compile_error!("feature \"std\" and feature \"sgx\" cannot be enabled at the same time"); #[cfg(all(not(feature = "std"), feature = "sgx"))] #[macro_use] extern crate sgx_tstd as std; use core::marker::PhantomData; use itc_parentchain_block_import_dispatcher::triggered_dispatcher::TriggerParentchainBlockImport; use itp_storage_verifier::GetStorageVerified; use itp_time_utils::duration_now; use its_consensus_common::{Environment, Error as ConsensusError, Proposer}; use its_consensus_slots::{SimpleSlotWorker, Slot, SlotInfo}; use its_primitives::{ traits::{Block as SidechainBlockT, SignedBlock}, types::block::BlockHash, }; use its_validateer_fetch::ValidateerFetch; use sp_runtime::{ app_crypto::{sp_core::H256, Pair, Public}, generic::SignedBlock as SignedParentchainBlock, traits::Block as ParentchainBlock, }; use std::{string::ToString, sync::Arc, time::Duration, vec::Vec}; pub mod block_importer; pub mod proposer_factory; pub mod slot_proposer; mod verifier; pub use verifier::*; #[cfg(test)] mod mock; #[cfg(test)] mod block_importer_tests; /// Aura consensus struct. pub struct Aura< AuthorityPair, ParentchainBlock, SidechainBlock, Environment, OcallApi, ImportTrigger, > { authority_pair: AuthorityPair, ocall_api: OcallApi, parentchain_import_trigger: Arc<ImportTrigger>, environment: Environment, claim_strategy: SlotClaimStrategy, /// Remove when #447 is resolved. allow_delayed_proposal: bool, _phantom: PhantomData<(AuthorityPair, ParentchainBlock, SidechainBlock)>, } impl<AuthorityPair, ParentchainBlock, SidechainBlock, Environment, OcallApi, ImportTrigger> Aura<AuthorityPair, ParentchainBlock, SidechainBlock, Environment, OcallApi, ImportTrigger> { pub fn new( authority_pair: AuthorityPair, ocall_api: OcallApi, parentchain_import_trigger: Arc<ImportTrigger>, environment: Environment, ) -> Self { Self { authority_pair, ocall_api, parentchain_import_trigger, environment, claim_strategy: SlotClaimStrategy::RoundRobin, allow_delayed_proposal: false, _phantom: Default::default(), } } pub fn with_claim_strategy(mut self, claim_strategy: SlotClaimStrategy) -> Self { self.claim_strategy = claim_strategy; self } pub fn with_allow_delayed_proposal(mut self, allow_delayed: bool) -> Self { self.allow_delayed_proposal = allow_delayed; self } } /// The fraction of total block time we are allowed to be producing the block. So that we have /// enough time send create and send the block to fellow validateers. pub const BLOCK_PROPOSAL_SLOT_PORTION: f32 = 0.8; #[derive(PartialEq, Eq, Debug)] pub enum SlotClaimStrategy { /// try to produce a block always even if it's not the authors slot /// Intended for first phase to see if aura production works Always, /// Proper Aura strategy: Only produce blocks, when it's the authors slot. RoundRobin, } type AuthorityId<P> = <P as Pair>::Public; type ShardIdentifierFor<SB> = <<SB as SignedBlock>::Block as SidechainBlockT>::ShardIdentifier; impl<AuthorityPair, PB, SB, E, OcallApi, ImportTrigger> SimpleSlotWorker<PB> for Aura<AuthorityPair, PB, SB, E, OcallApi, ImportTrigger> where AuthorityPair: Pair, // todo: Relax hash trait bound, but this needs a change to some other parts in the code. PB: ParentchainBlock<Hash = BlockHash>, E: Environment<PB, SB, Error = ConsensusError>, E::Proposer: Proposer<PB, SB>, SB: SignedBlock + Send + 'static, OcallApi: ValidateerFetch + GetStorageVerified + Send + 'static, ImportTrigger: TriggerParentchainBlockImport<SignedParentchainBlock<PB>>, { type Proposer = E::Proposer; type Claim = AuthorityPair::Public; type EpochData = Vec<AuthorityId<AuthorityPair>>; type Output = SB; fn logging_target(&self) -> &'static str { "aura" } fn epoch_data( &self, header: &PB::Header, _slot: Slot, ) -> Result<Self::EpochData, ConsensusError> { authorities::<_, AuthorityPair, PB>(&self.ocall_api, header) } fn authorities_len(&self, epoch_data: &Self::EpochData) -> Option<usize> { Some(epoch_data.len()) } fn claim_slot( &self, _header: &PB::Header, slot: Slot, epoch_data: &Self::EpochData, ) -> Option<Self::Claim> { let expected_author = slot_author::<AuthorityPair>(slot, epoch_data)?; if expected_author == &self.authority_pair.public() { return Some(self.authority_pair.public()) } if self.claim_strategy == SlotClaimStrategy::Always { log::debug!( target: self.logging_target(), "Not our slot but we still claim it." ); return Some(self.authority_pair.public()) } None } fn proposer( &mut self, header: PB::Header, shard: ShardIdentifierFor<Self::Output>, ) -> Result<Self::Proposer, ConsensusError> { self.environment.init(header, shard) } fn proposing_remaining_duration(&self, slot_info: &SlotInfo<PB>) -> Duration { proposing_remaining_duration(slot_info, duration_now()) } fn allow_delayed_proposal(&self) -> bool { self.allow_delayed_proposal } fn import_latest_parentchain_block( &self, current_latest_imported_header: &PB::Header, ) -> Result<PB::Header, ConsensusError> { let maybe_latest_imported_header = self .parentchain_import_trigger .import_all() .map_err(|e| ConsensusError::Other(e.into()))?; Ok(maybe_latest_imported_header .map(|b| b.block.header().clone()) .unwrap_or_else(|| current_latest_imported_header.clone())) } } /// unit-testable remaining duration fn. fn proposing_remaining_duration<PB: ParentchainBlock>( slot_info: &SlotInfo<PB>, now: Duration, ) -> Duration { // if a `now` before slot begin is passed such that `slot_remaining` would be bigger than `slot.slot_duration` // we take the total `slot_duration` as reference value. let proposing_duration = slot_info.duration.mul_f32(BLOCK_PROPOSAL_SLOT_PORTION); let slot_remaining = slot_info .ends_at .checked_sub(now) .map(|remaining| remaining.mul_f32(BLOCK_PROPOSAL_SLOT_PORTION)) .unwrap_or_default(); std::cmp::min(slot_remaining, proposing_duration) } fn authorities<C, P, B>( ocall_api: &C, header: &B::Header, ) -> Result<Vec<AuthorityId<P>>, ConsensusError> where C: ValidateerFetch + GetStorageVerified, P: Pair, B: ParentchainBlock<Hash = H256>, { Ok(ocall_api .current_validateers(header) .map_err(|e| ConsensusError::CouldNotGetAuthorities(e.to_string()))? .into_iter() .map(|e| AuthorityId::<P>::from_slice(e.pubkey.as_ref())) .collect()) } /// Get slot author for given block along with authorities. fn slot_author<P: Pair>(slot: Slot, authorities: &[AuthorityId<P>]) -> Option<&AuthorityId<P>> { if authorities.is_empty() { return None } let idx = *slot % (authorities.len() as u64); assert!( idx <= usize::MAX as u64, "It is impossible to have a vector with length beyond the address space; qed", ); let current_author = authorities.get(idx as usize).expect( "authorities not empty; index constrained to list length;this is a valid index; qed", ); Some(current_author) } #[cfg(test)] mod tests { use super::*; use crate::mock::{default_header, validateer, EnvironmentMock, TestAura, SLOT_DURATION}; use itc_parentchain_block_import_dispatcher::trigger_parentchain_block_import_mock::TriggerParentchainBlockImportMock; use itp_test::{ builders::{ parentchain_block_builder::ParentchainBlockBuilder, parentchain_header_builder::ParentchainHeaderBuilder, }, mock::onchain_mock::OnchainMock, }; use itp_types::{ Block as ParentchainBlock, Header as ParentchainHeader, SignedBlock as SignedParentchainBlock, }; use its_consensus_slots::PerShardSlotWorkerScheduler; use sp_core::ed25519::Public; use sp_keyring::ed25519::Keyring; fn get_aura( onchain_mock: OnchainMock, trigger_parentchain_import: Arc<TriggerParentchainBlockImportMock<SignedParentchainBlock>>, ) -> TestAura { Aura::new(Keyring::Alice.pair(), onchain_mock, trigger_parentchain_import, EnvironmentMock) } fn get_default_aura() -> TestAura { get_aura(Default::default(), Default::default()) } fn now_slot(slot: Slot) -> SlotInfo<ParentchainBlock> { SlotInfo { slot, timestamp: duration_now(), duration: SLOT_DURATION, ends_at: duration_now() + SLOT_DURATION, last_imported_parentchain_head: default_header(), } } fn default_authorities() -> Vec<Public> { vec![ Keyring::Alice.public().into(), Keyring::Bob.public().into(), Keyring::Charlie.public().into(), ] } fn onchain_mock(authorities: Vec<Public>) -> OnchainMock { let validateers = authorities.iter().map(|a| validateer(a.clone().into())).collect(); OnchainMock::default().with_validateer_set(Some(validateers)) } fn onchain_mock_with_default_authorities() -> OnchainMock { onchain_mock(default_authorities()) } fn create_import_trigger_with_header( header: ParentchainHeader, ) -> Arc<TriggerParentchainBlockImportMock<SignedParentchainBlock>> { let latest_parentchain_block = ParentchainBlockBuilder::default().with_header(header).build_signed(); Arc::new( TriggerParentchainBlockImportMock::default() .with_latest_imported(Some(latest_parentchain_block)), ) } #[test] fn current_authority_should_claim_its_slot() { let authorities = vec![ Keyring::Bob.public().into(), Keyring::Charlie.public().into(), Keyring::Alice.public().into(), ]; let aura = get_default_aura(); assert!(aura.claim_slot(&default_header(), 0.into(), &authorities).is_none()); assert!(aura.claim_slot(&default_header(), 1.into(), &authorities).is_none()); // this our authority assert!(aura.claim_slot(&default_header(), 2.into(), &authorities).is_some()); assert!(aura.claim_slot(&default_header(), 3.into(), &authorities).is_none()); assert!(aura.claim_slot(&default_header(), 4.into(), &authorities).is_none()); // this our authority assert!(aura.claim_slot(&default_header(), 5.into(), &authorities).is_some()); } #[test] fn current_authority_should_claim_all_slots() { let authorities = default_authorities(); let aura = get_default_aura().with_claim_strategy(SlotClaimStrategy::Always); assert!(aura.claim_slot(&default_header(), 0.into(), &authorities).is_some()); assert!(aura.claim_slot(&default_header(), 1.into(), &authorities).is_some()); // this our authority assert!(aura.claim_slot(&default_header(), 2.into(), &authorities).is_some()); assert!(aura.claim_slot(&default_header(), 3.into(), &authorities).is_some()); } #[test] fn on_slot_returns_block() { let _ = env_logger::builder().is_test(true).try_init(); let onchain_mock = onchain_mock_with_default_authorities(); let mut aura = get_aura(onchain_mock, Default::default()); let slot_info = now_slot(0.into()); assert!(SimpleSlotWorker::on_slot(&mut aura, slot_info, Default::default()).is_some()); } #[test] fn on_slot_for_multiple_shards_returns_blocks() { let _ = env_logger::builder().is_test(true).try_init(); let onchain_mock = onchain_mock_with_default_authorities(); let mut aura = get_aura(onchain_mock, Default::default()); let slot_info = now_slot(0.into()); let result = PerShardSlotWorkerScheduler::on_slot( &mut aura, slot_info, vec![Default::default(), Default::default()], ); assert_eq!(result.len(), 2); } #[test] fn on_slot_with_nano_second_remaining_duration_does_not_panic() { let _ = env_logger::builder().is_test(true).try_init(); let mut aura = get_default_aura(); let nano_dur = Duration::from_nanos(999); let now = duration_now(); let slot_info = SlotInfo { slot: 0.into(), timestamp: now, duration: nano_dur, ends_at: now + nano_dur, last_imported_parentchain_head: default_header(), }; let result = PerShardSlotWorkerScheduler::on_slot( &mut aura, slot_info, vec![Default::default(), Default::default()], ); assert_eq!(result.len(), 0); } #[test] fn on_slot_triggers_parentchain_block_import_if_slot_is_claimed() { let _ = env_logger::builder().is_test(true).try_init(); let latest_parentchain_header = ParentchainHeaderBuilder::default().with_number(84).build(); let parentchain_block_import_trigger = create_import_trigger_with_header(latest_parentchain_header.clone()); let mut aura = get_aura( onchain_mock_with_default_authorities(), parentchain_block_import_trigger.clone(), ); let slot_info = now_slot(0.into()); let result = SimpleSlotWorker::on_slot(&mut aura, slot_info, Default::default()).unwrap(); assert_eq!(result.block.block.layer_one_head, latest_parentchain_header.hash()); assert!(parentchain_block_import_trigger.has_import_been_called()); } #[test] fn on_slot_does_not_trigger_parentchain_block_import_if_slot_is_not_claimed() { let _ = env_logger::builder().is_test(true).try_init(); let latest_parentchain_header = ParentchainHeaderBuilder::default().with_number(84).build(); let parentchain_block_import_trigger = create_import_trigger_with_header(latest_parentchain_header); let mut aura = get_aura( onchain_mock_with_default_authorities(), parentchain_block_import_trigger.clone(), ); let slot_info = now_slot(2.into()); let result = SimpleSlotWorker::on_slot(&mut aura, slot_info, Default::default()); assert!(result.is_none()); assert!(!parentchain_block_import_trigger.has_import_been_called()); } #[test] fn proposing_remaining_duration_works() { let slot_info = now_slot(0.into()); // hard to compare actual numbers but we can at least ensure that the general concept works assert!( proposing_remaining_duration(&slot_info, duration_now()) > SLOT_DURATION / 2 && proposing_remaining_duration(&slot_info, duration_now()) < SLOT_DURATION.mul_f32(BLOCK_PROPOSAL_SLOT_PORTION + 0.01) ); } #[test] fn proposing_remaining_duration_works_for_now_before_slot_timestamp() { let slot_info = now_slot(0.into()); assert!( proposing_remaining_duration(&slot_info, Duration::from_millis(0)) > SLOT_DURATION / 2 && proposing_remaining_duration(&slot_info, Duration::from_millis(0)) < SLOT_DURATION.mul_f32(BLOCK_PROPOSAL_SLOT_PORTION + 0.01) ); } #[test] fn proposing_remaining_duration_returns_default_if_now_after_slot() { let slot_info = now_slot(0.into()); assert_eq!( proposing_remaining_duration(&slot_info, duration_now() + SLOT_DURATION), Default::default() ); } }
30.115538
119
0.740905
deede1e9cdae033d3cbaee913e0c488c07ecbcd9
875
use crate::CellOption; use papergrid::{Entity, Grid, Settings}; /// Indent is responsible for a left/right/top/bottom indent of particular cells. /// /// ```rust,no_run /// # use tabled::{Style, Indent, Row, Table, Modify}; /// # let data: Vec<&'static str> = Vec::new(); /// let table = Table::new(&data).with(Modify::new(Row(..1)).with(Indent::new(0, 0, 1, 1))); /// ``` #[derive(Debug)] pub struct Indent(usize, usize, usize, usize); impl Indent { /// Construct's an Indent object. pub fn new(left: usize, right: usize, top: usize, bottom: usize) -> Self { Self(left, right, top, bottom) } } impl CellOption for Indent { fn change_cell(&mut self, grid: &mut Grid, row: usize, column: usize) { grid.set( &Entity::Cell(row, column), Settings::new().indent(self.0, self.1, self.2, self.3), ) } }
30.172414
96
0.597714
8a4cb0165d01e2f78121d5baa403a832e19b5b8f
21,487
use std::collections::hash_map::{HashMap, Entry}; use std::collections::BTreeMap; use std::path::{Path, PathBuf}; use std::slice; use core::{Package, VirtualManifest, EitherManifest, SourceId}; use core::{PackageIdSpec, Dependency, Profile, Profiles}; use ops; use util::{Config, CargoResult, Filesystem, human}; use util::paths; /// The core abstraction in Cargo for working with a workspace of crates. /// /// A workspace is often created very early on and then threaded through all /// other functions. It's typically through this object that the current /// package is loaded and/or learned about. pub struct Workspace<'cfg> { config: &'cfg Config, // This path is a path to where the current cargo subcommand was invoked // from. That is, this is the `--manifest-path` argument to Cargo, and // points to the "main crate" that we're going to worry about. current_manifest: PathBuf, // A list of packages found in this workspace. Always includes at least the // package mentioned by `current_manifest`. packages: Packages<'cfg>, // If this workspace includes more than one crate, this points to the root // of the workspace. This is `None` in the case that `[workspace]` is // missing, `package.workspace` is missing, and no `Cargo.toml` above // `current_manifest` was found on the filesystem with `[workspace]`. root_manifest: Option<PathBuf>, // Shared target directory for all the packages of this workspace. // `None` if the default path of `root/target` should be used. target_dir: Option<Filesystem>, // List of members in this workspace with a listing of all their manifest // paths. The packages themselves can be looked up through the `packages` // set above. members: Vec<PathBuf>, // True, if this is a temporary workspace created for the purposes of // cargo install or cargo package. is_ephemeral: bool, } // Separate structure for tracking loaded packages (to avoid loading anything // twice), and this is separate to help appease the borrow checker. struct Packages<'cfg> { config: &'cfg Config, packages: HashMap<PathBuf, MaybePackage>, } enum MaybePackage { Package(Package), Virtual(VirtualManifest), } /// Configuration of a workspace in a manifest. #[derive(Debug, Clone)] pub enum WorkspaceConfig { /// Indicates that `[workspace]` was present and the members were /// optionally specified as well. Root { members: Option<Vec<String>> }, /// Indicates that `[workspace]` was present and the `root` field is the /// optional value of `package.workspace`, if present. Member { root: Option<String> }, } /// An iterator over the member packages of a workspace, returned by /// `Workspace::members` pub struct Members<'a, 'cfg: 'a> { ws: &'a Workspace<'cfg>, iter: slice::Iter<'a, PathBuf>, } impl<'cfg> Workspace<'cfg> { /// Creates a new workspace given the target manifest pointed to by /// `manifest_path`. /// /// This function will construct the entire workspace by determining the /// root and all member packages. It will then validate the workspace /// before returning it, so `Ok` is only returned for valid workspaces. pub fn new(manifest_path: &Path, config: &'cfg Config) -> CargoResult<Workspace<'cfg>> { let target_dir = config.target_dir()?; let mut ws = Workspace { config: config, current_manifest: manifest_path.to_path_buf(), packages: Packages { config: config, packages: HashMap::new(), }, root_manifest: None, target_dir: target_dir, members: Vec::new(), is_ephemeral: false, }; ws.root_manifest = ws.find_root(manifest_path)?; ws.find_members()?; ws.validate()?; Ok(ws) } /// Creates a "temporary workspace" from one package which only contains /// that package. /// /// This constructor will not touch the filesystem and only creates an /// in-memory workspace. That is, all configuration is ignored, it's just /// intended for that one package. /// /// This is currently only used in niche situations like `cargo install` or /// `cargo package`. pub fn ephemeral(package: Package, config: &'cfg Config, target_dir: Option<Filesystem>) -> CargoResult<Workspace<'cfg>> { let mut ws = Workspace { config: config, current_manifest: package.manifest_path().to_path_buf(), packages: Packages { config: config, packages: HashMap::new(), }, root_manifest: None, target_dir: None, members: Vec::new(), is_ephemeral: true, }; { let key = ws.current_manifest.parent().unwrap(); let package = MaybePackage::Package(package); ws.packages.packages.insert(key.to_path_buf(), package); ws.target_dir = if let Some(dir) = target_dir { Some(dir) } else { ws.config.target_dir()? }; ws.members.push(ws.current_manifest.clone()); } return Ok(ws) } /// Returns the current package of this workspace. /// /// Note that this can return an error if it the current manifest is /// actually a "virtual Cargo.toml", in which case an error is returned /// indicating that something else should be passed. pub fn current(&self) -> CargoResult<&Package> { self.current_opt().ok_or_else(|| human(format!("manifest path `{}` is a virtual manifest, but this \ command requires running against an actual package in \ this workspace", self.current_manifest.display())) ) } pub fn current_opt(&self) -> Option<&Package> { match *self.packages.get(&self.current_manifest) { MaybePackage::Package(ref p) => Some(p), MaybePackage::Virtual(..) => None } } /// Returns the `Config` this workspace is associated with. pub fn config(&self) -> &'cfg Config { self.config } pub fn profiles(&self) -> &Profiles { let root = self.root_manifest.as_ref().unwrap_or(&self.current_manifest); match *self.packages.get(root) { MaybePackage::Package(ref p) => p.manifest().profiles(), MaybePackage::Virtual(ref m) => m.profiles(), } } /// Returns the root path of this workspace. /// /// That is, this returns the path of the directory containing the /// `Cargo.toml` which is the root of this workspace. pub fn root(&self) -> &Path { match self.root_manifest { Some(ref p) => p, None => &self.current_manifest }.parent().unwrap() } pub fn target_dir(&self) -> Filesystem { self.target_dir.clone().unwrap_or_else(|| { Filesystem::new(self.root().join("target")) }) } /// Returns the root [replace] section of this workspace. /// /// This may be from a virtual crate or an actual crate. pub fn root_replace(&self) -> &[(PackageIdSpec, Dependency)] { let path = match self.root_manifest { Some(ref p) => p, None => &self.current_manifest, }; match *self.packages.get(path) { MaybePackage::Package(ref p) => p.manifest().replace(), MaybePackage::Virtual(ref v) => v.replace(), } } /// Returns an iterator over all packages in this workspace pub fn members<'a>(&'a self) -> Members<'a, 'cfg> { Members { ws: self, iter: self.members.iter(), } } pub fn is_ephemeral(&self) -> bool { self.is_ephemeral } /// Finds the root of a workspace for the crate whose manifest is located /// at `manifest_path`. /// /// This will parse the `Cargo.toml` at `manifest_path` and then interpret /// the workspace configuration, optionally walking up the filesystem /// looking for other workspace roots. /// /// Returns an error if `manifest_path` isn't actually a valid manifest or /// if some other transient error happens. fn find_root(&mut self, manifest_path: &Path) -> CargoResult<Option<PathBuf>> { fn read_root_pointer(member_manifest: &Path, root_link: &str) -> CargoResult<PathBuf> { let path = member_manifest.parent().unwrap() .join(root_link) .join("Cargo.toml"); debug!("find_root - pointer {}", path.display()); return Ok(paths::normalize_path(&path)) }; { let current = self.packages.load(&manifest_path)?; match *current.workspace_config() { WorkspaceConfig::Root { .. } => { debug!("find_root - is root {}", manifest_path.display()); return Ok(Some(manifest_path.to_path_buf())) } WorkspaceConfig::Member { root: Some(ref path_to_root) } => { return Ok(Some(read_root_pointer(manifest_path, path_to_root)?)) } WorkspaceConfig::Member { root: None } => {} } } let mut cur = manifest_path.parent().and_then(|p| p.parent()); while let Some(path) = cur { let manifest = path.join("Cargo.toml"); debug!("find_root - trying {}", manifest.display()); if manifest.exists() { match *self.packages.load(&manifest)?.workspace_config() { WorkspaceConfig::Root { .. } => { debug!("find_root - found"); return Ok(Some(manifest)) } WorkspaceConfig::Member { root: Some(ref path_to_root) } => { return Ok(Some(read_root_pointer(&manifest, path_to_root)?)) } WorkspaceConfig::Member { .. } => {} } } cur = path.parent(); } Ok(None) } /// After the root of a workspace has been located, probes for all members /// of a workspace. /// /// If the `workspace.members` configuration is present, then this just /// verifies that those are all valid packages to point to. Otherwise, this /// will transitively follow all `path` dependencies looking for members of /// the workspace. fn find_members(&mut self) -> CargoResult<()> { let root_manifest = match self.root_manifest { Some(ref path) => path.clone(), None => { debug!("find_members - only me as a member"); self.members.push(self.current_manifest.clone()); return Ok(()) } }; let members = { let root = self.packages.load(&root_manifest)?; match *root.workspace_config() { WorkspaceConfig::Root { ref members } => members.clone(), _ => bail!("root of a workspace inferred but wasn't a root: {}", root_manifest.display()), } }; if let Some(list) = members { for path in list { let root = root_manifest.parent().unwrap(); let manifest_path = root.join(path).join("Cargo.toml"); self.find_path_deps(&manifest_path, false)?; } } self.find_path_deps(&root_manifest, false) } fn find_path_deps(&mut self, manifest_path: &Path, is_path_dep: bool) -> CargoResult<()> { let manifest_path = paths::normalize_path(manifest_path); if self.members.iter().any(|p| p == &manifest_path) { return Ok(()) } if is_path_dep && !manifest_path.parent().unwrap().starts_with(self.root()) && self.find_root(&manifest_path)? != self.root_manifest { // If `manifest_path` is a path dependency outside of the workspace, // don't add it, or any of its dependencies, as a members. return Ok(()) } debug!("find_members - {}", manifest_path.display()); self.members.push(manifest_path.clone()); let candidates = { let pkg = match *self.packages.load(&manifest_path)? { MaybePackage::Package(ref p) => p, MaybePackage::Virtual(_) => return Ok(()), }; pkg.dependencies() .iter() .map(|d| d.source_id()) .filter(|d| d.is_path()) .filter_map(|d| d.url().to_file_path().ok()) .map(|p| p.join("Cargo.toml")) .collect::<Vec<_>>() }; for candidate in candidates { self.find_path_deps(&candidate, true)?; } Ok(()) } /// Validates a workspace, ensuring that a number of invariants are upheld: /// /// 1. A workspace only has one root. /// 2. All workspace members agree on this one root as the root. /// 3. The current crate is a member of this workspace. fn validate(&mut self) -> CargoResult<()> { if self.root_manifest.is_none() { return Ok(()) } let mut roots = Vec::new(); { let mut names = BTreeMap::new(); for member in self.members.iter() { let package = self.packages.get(member); match *package.workspace_config() { WorkspaceConfig::Root { .. } => { roots.push(member.parent().unwrap().to_path_buf()); } WorkspaceConfig::Member { .. } => {} } let name = match *package { MaybePackage::Package(ref p) => p.name(), MaybePackage::Virtual(_) => continue, }; if let Some(prev) = names.insert(name, member) { bail!("two packages named `{}` in this workspace:\n\ - {}\n\ - {}", name, prev.display(), member.display()); } } } match roots.len() { 0 => { bail!("`package.workspace` configuration points to a crate \ which is not configured with [workspace]: \n\ configuration at: {}\n\ points to: {}", self.current_manifest.display(), self.root_manifest.as_ref().unwrap().display()) } 1 => {} _ => { bail!("multiple workspace roots found in the same workspace:\n{}", roots.iter() .map(|r| format!(" {}", r.display())) .collect::<Vec<_>>() .join("\n")); } } for member in self.members.clone() { let root = self.find_root(&member)?; if root == self.root_manifest { continue } match root { Some(root) => { bail!("package `{}` is a member of the wrong workspace\n\ expected: {}\n\ actual: {}", member.display(), self.root_manifest.as_ref().unwrap().display(), root.display()); } None => { bail!("workspace member `{}` is not hierarchically below \ the workspace root `{}`", member.display(), self.root_manifest.as_ref().unwrap().display()); } } } if !self.members.contains(&self.current_manifest) { let root = self.root_manifest.as_ref().unwrap(); let root_dir = root.parent().unwrap(); let current_dir = self.current_manifest.parent().unwrap(); let root_pkg = self.packages.get(root); let members_msg = match current_dir.strip_prefix(root_dir) { Ok(rel) => { format!("this may be fixable by adding `{}` to the \ `workspace.members` array of the manifest \ located at: {}", rel.display(), root.display()) } Err(_) => { format!("this may be fixable by adding a member to \ the `workspace.members` array of the \ manifest located at: {}", root.display()) } }; let extra = match *root_pkg { MaybePackage::Virtual(_) => members_msg, MaybePackage::Package(ref p) => { let members = match *p.manifest().workspace_config() { WorkspaceConfig::Root { ref members } => members, WorkspaceConfig::Member { .. } => unreachable!(), }; if members.is_none() { format!("this may be fixable by ensuring that this \ crate is depended on by the workspace \ root: {}", root.display()) } else { members_msg } } }; bail!("current package believes it's in a workspace when it's not:\n\ current: {}\n\ workspace: {}\n\n{}", self.current_manifest.display(), root.display(), extra); } if let Some(ref root_manifest) = self.root_manifest { let default_profiles = Profiles { release: Profile::default_release(), dev: Profile::default_dev(), test: Profile::default_test(), test_deps: Profile::default_dev(), bench: Profile::default_bench(), bench_deps: Profile::default_release(), doc: Profile::default_doc(), custom_build: Profile::default_custom_build(), check: Profile::default_check(), doctest: Profile::default_doctest(), }; for pkg in self.members().filter(|p| p.manifest_path() != root_manifest) { if pkg.manifest().profiles() != &default_profiles { let message = &format!("profiles for the non root package will be ignored, \ specify profiles at the workspace root:\n\ package: {}\n\ workspace: {}", pkg.manifest_path().display(), root_manifest.display()); //TODO: remove `Eq` bound from `Profiles` when the warning is removed. self.config.shell().warn(&message)?; } } } Ok(()) } } impl<'cfg> Packages<'cfg> { fn get(&self, manifest_path: &Path) -> &MaybePackage { &self.packages[manifest_path.parent().unwrap()] } fn load(&mut self, manifest_path: &Path) -> CargoResult<&MaybePackage> { let key = manifest_path.parent().unwrap(); match self.packages.entry(key.to_path_buf()) { Entry::Occupied(e) => Ok(e.into_mut()), Entry::Vacant(v) => { let source_id = SourceId::for_path(key)?; let pair = ops::read_manifest(&manifest_path, &source_id, self.config)?; let (manifest, _nested_paths) = pair; Ok(v.insert(match manifest { EitherManifest::Real(manifest) => { MaybePackage::Package(Package::new(manifest, manifest_path)) } EitherManifest::Virtual(v) => { MaybePackage::Virtual(v) } })) } } } } impl<'a, 'cfg> Members<'a, 'cfg> { pub fn is_empty(self) -> bool { self.count() == 0 } } impl<'a, 'cfg> Iterator for Members<'a, 'cfg> { type Item = &'a Package; fn next(&mut self) -> Option<&'a Package> { loop { let next = self.iter.next().map(|path| { self.ws.packages.get(path) }); match next { Some(&MaybePackage::Package(ref p)) => return Some(p), Some(&MaybePackage::Virtual(_)) => {} None => return None, } } } } impl MaybePackage { fn workspace_config(&self) -> &WorkspaceConfig { match *self { MaybePackage::Virtual(ref v) => v.workspace_config(), MaybePackage::Package(ref v) => v.manifest().workspace_config(), } } }
38.369643
96
0.521618
726562d8087d125ef6c7bde7d7fbb20dd666c186
3,582
// Copyright 2019 MesaTEE Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use lazy_static::lazy_static; use mesatee_sdk::{Mesatee, MesateeEnclaveInfo}; use serde_derive::Serialize; use serde_json; use std::net::SocketAddr; use std::{env, fs}; lazy_static! { static ref TMS_ADDR: SocketAddr = "127.0.0.1:5554".parse().unwrap(); static ref TDFS_ADDR: SocketAddr = "127.0.0.1:5065".parse().unwrap(); } #[derive(Serialize)] pub(crate) struct GenLinearModelPayload { input_model_columns: usize, input_model_data: String, target_model_data: String, test_data: String, } fn print_usage() { let msg = " ./gen_linear_model input_model_data_columns input_model_data_path target_model_data_path test_data_path input_model_data format: f32,f32,f32,f32 ... f32,f32,f32,f32 ... ... target_model_data format: 1. 0. 1. 0. 0. 1. .... test_data format: f32,f32,f32,f32 ... "; println!("usage: \n{}", msg); } fn main() { let auditors = vec![ ( "../auditors/godzilla/godzilla.public.der", "../auditors/godzilla/godzilla.sign.sha256", ), ( "../auditors/optimus_prime/optimus_prime.public.der", "../auditors/optimus_prime/optimus_prime.sign.sha256", ), ( "../auditors/albus_dumbledore/albus_dumbledore.public.der", "../auditors/albus_dumbledore/albus_dumbledore.sign.sha256", ), ]; let args: Vec<String> = env::args().collect(); if args.len() != 5 { print_usage(); return; } let columns = args[1].parse().unwrap(); let input_model_data_path = &args[2]; let target_model_data_path = &args[3]; let test_date_path = &args[4]; let input_model_data_bytes = fs::read(&input_model_data_path).unwrap(); let input_model_data_str = String::from_utf8(input_model_data_bytes).unwrap(); let target_model_data_bytes = fs::read(&target_model_data_path).unwrap(); let target_model_data_str = String::from_utf8(target_model_data_bytes).unwrap(); let test_data_bytes = fs::read(&test_date_path).unwrap(); let test_data_str = String::from_utf8(test_data_bytes).unwrap(); let input_payload = GenLinearModelPayload { input_model_columns: columns, input_model_data: input_model_data_str, target_model_data: target_model_data_str, test_data: test_data_str, }; let input_string = serde_json::to_string(&input_payload).unwrap(); let enclave_info_file_path = "../out/enclave_info.txt"; let mesatee_enclave_info = MesateeEnclaveInfo::load(auditors, enclave_info_file_path).unwrap(); let mesatee = Mesatee::new( &mesatee_enclave_info, "uid1", "token1", *TMS_ADDR, *TDFS_ADDR, ) .unwrap(); let task = mesatee.create_task("gen_linear_model").unwrap(); let result = task.invoke_with_payload(&input_string).unwrap(); println!("result:{}", result) }
31.147826
108
0.65885
cc680ba7a41c847d62cdab36225f00c58daaf9f8
134
//! Backends for `ugly`. //! //! At the time of writing, only one backend exists: SDL2. #[cfg(feature = "backend_sdl")] pub mod sdl;
19.142857
58
0.649254
4aef6a5fa61363d62a1da568d69b5f8db7646d87
475
use super::*; #[test] fn case_sensitive() { let query = "duct"; let contents = "\ Rust: safe, fast, productive. Pick three. Duct tape."; assert_eq!(vec!["safe, fast, productive."], search(query, contents)); } #[test] fn case_insensitive() { let query = "rUsT"; let contents = "\ Rust: safe, fast, productive. Pick three. Trust me."; assert_eq!(vec!["Rust:", "Trust me."], search_case_insensitive(query, contents)); }
19.791667
85
0.591579