filename
stringlengths
3
67
data
stringlengths
0
58.3M
license
stringlengths
0
19.5k
test_zil.ml
open Rresult open Ledgerwallet_zil open Alcotest let vendor_id = 0x2C97 let product_id = 0x1015 let fail_on_error = function | None -> Alcotest.fail "Found no ledger." | Some (Result.Ok ()) -> () | Some (Result.Error e) -> Alcotest.fail (Format.asprintf "Ledger error: %a" Ledgerwallet.Transport.pp_error e) let with_connection f = fail_on_error (Ledgerwallet.Transport.with_connection_id ~vendor_id ~product_id f) let test_open_close () = with_connection (fun _ -> R.ok ()) let test_ping () = with_connection Ledgerwallet.Transport.ping let hard x = Int32.logor x 0x8000_0000l let test_getversion () = with_connection (fun h -> get_version h >>| fun (ma, mi, pa) -> Printf.printf "%d.%d.%d" ma mi pa) let test_getpk ~display_addr () = with_connection (fun h -> get_pk ~display_addr h 0l >>| fun (pk, addr) -> match Bech32.Segwit.encode addr with | Error msg -> fail msg | Ok v -> Format.printf "%a %s" Hex.pp (Hex.of_cstruct pk) v) (* let path = [ * hard 44l ; hard 1729l * ] * * let msg = Cstruct.of_string "Voulez-vous coucher avec moi, ce soir ?" * let msg_ba = Cstruct.to_bigarray msg * * let test_getpk h curve = * let pk = get_public_key h curve path in * Alcotest.(check int "pklen" * (if curve = Ed25519 then 33 else 65) (Cstruct.length pk)) * * let test_getpk () = * let h = Hidapi.open_id_exn ~vendor_id ~product_id in * List.iter (test_getpk h) curves ; * Hidapi.close h * * let test_sign h curve = * let open Alcotest in * let pk = get_public_key h curve path in * let signature = sign h curve path msg in * match curve with * | Ed25519 -> * let pk = Monocypher.Sign.(pk_of_cstruct_exn (Cstruct.sub pk 1 pkbytes)) in * check bool "sign Ed25519" true * (Tweetnacl.Sign.verify_detached ~key:pk ~signature msg) * | Secp256k1 -> begin * let pk = Cstruct.to_bigarray pk in * let signature = Cstruct.to_bigarray signature in * match Uecc.(pk_of_bytes secp256k1 pk) with * | None -> assert false * | Some pk -> * check bool "sign Secp256k1" true (Uecc.verify pk ~msg:msg_ba ~signature) * end * | Secp256r1 -> begin * let pk = Cstruct.to_bigarray pk in * let signature = Cstruct.to_bigarray signature in * match Uecc.(pk_of_bytes secp256r1 pk) with * | None -> assert false * | Some pk -> * check bool "sign Secp256r1" true (Uecc.verify pk ~msg:msg_ba ~signature) * end * * let test_sign () = * let h = Hidapi.open_id_exn ~vendor_id ~product_id in * (\* List.iter (test_sign h) curves ; *\) * (\* List.iter (test_sign h) [Secp256k1] ; *\) * Hidapi.close h *) let basic = [ (* "open_close", `Quick, test_open_close ; * "ping", `Quick, test_ping ; *) ("version", `Quick, test_getversion); (* "getpk", `Quick, (test_getpk ~display_addr:false) ; *) ("getaddr", `Quick, test_getpk ~display_addr:true) (* "get_public_key", `Quick, test_getpk ; * "sign", `Quick, test_sign ; *); ] let () = Alcotest.run "ledgerwallet.zil" [("basic", basic)]
test_bandersnatch_affine_weierstrass.ml
module BandersnatchAffineValueGeneration = Mec.Curve.Utils.PBT.MakeValueGeneration (Mec.Curve.Bandersnatch.AffineWeierstrass) module BandersnatchAffineEquality = Mec.Curve.Utils.PBT.MakeEquality (Mec.Curve.Bandersnatch.AffineWeierstrass) module BandersnatchAffineECProperties = Mec.Curve.Utils.PBT.MakeECProperties (Mec.Curve.Bandersnatch.AffineWeierstrass) module BandersnatchAffineRepresentation = Mec.Curve.Utils.PBT.MakeCompressedSerialisationAffine (Mec.Curve.Bandersnatch.AffineWeierstrass) let () = let open Alcotest in run ~verbose:true "Bandersnatch Weierstrass form, affine coordinates" [ BandersnatchAffineValueGeneration.get_tests (); BandersnatchAffineEquality.get_tests (); BandersnatchAffineECProperties.get_tests (); BandersnatchAffineRepresentation.get_tests () ]
dune
(library (public_name parser_javascript.ast) (name parser_javascript_ast) (wrapped false) (libraries commons lib_parsing ast_generic ; just to factorize the operator ) (preprocess (pps ppx_deriving.show)) )
main_baker_015_PtLimaPt.ml
let () = Client_commands.register Protocol.hash @@ fun _network -> List.map (Tezos_clic.map_command (new Protocol_client_context.wrap_full)) @@ Baking_commands.baker_commands () let select_commands _ _ = return (List.map (Tezos_clic.map_command (new Protocol_client_context.wrap_full)) (Baking_commands.baker_commands ())) (* This call is not strictly necessary as the parameters are initialized lazily the first time a Sapling operation (validation or forging) is done. This is what the client does. For a long running binary however it is important to make sure that the parameters files are there at the start and avoid failing much later while validating an operation. Plus paying this cost upfront means that the first validation will not be more expensive. *) let () = Tezos_sapling.Core.Validator.init_params () let () = Client_main_run.run (module Daemon_config) ~select_commands
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* Copyright (c) 2019 Nomadic Labs, <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
typing.mli
open Algdt_types type cfg_prod = CProd | CSum of cnstr module AlgDtCfgSpec : sig type t = unit and nt = tp and symbol = NT of nt | T of t and prod = cfg_prod val compare_t : t -> t -> int val compare_nt : nt -> nt -> int val compare_prod : prod -> prod -> int end module MyCfg : Cfg.Cfg_intf.CFG with module Spec = AlgDtCfgSpec val calc_pre_ispec_info : (cnstr_name, tp_name) type_defs -> pre_ispec_info val cfg_of_pre_ispec_info : pre_ispec_info -> MyCfg.grammar val calc_ispec_info : MyCfg.live_grammar -> tp -> pre_ispec_info -> ispec_info val flatten_ispec : ispec -> fspec * tps
(* AIFAD - Automated Induction of Functions over Algebraic Datatypes Author: Markus Mottl email: markus.mottl@gmail.com WWW: http://www.ocaml.info Copyright (C) 2002 Austrian Research Institute for Artificial Intelligence Copyright (C) 2003- Markus Mottl This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA *)
helpers.ml
let rec repeat n f = if n < 0 then () else ( f (); repeat (n - 1) f) let must_fail f = let exception Local in try (try f () with _ -> raise Local); assert false with | Local -> () | _ -> assert false
(*****************************************************************************) (* *) (* MIT License *) (* Copyright (c) 2022 Nomadic Labs <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
dune
(library (name backtrace) (public_name xapi-backtrace) (flags (:standard -w -39-32)) (libraries rpclib rpclib.json threads) (preprocess (pps ppx_deriving_rpc ppx_sexp_conv)))
unsafe_pre408.ml
external set_int32_ne : bytes -> int -> int32 -> unit = "%caml_bytes_set32" external get_int64_ne : bytes -> int -> int64 = "%caml_bytes_get64" external swap32 : int32 -> int32 = "%bswap_int32" external swap64 : int64 -> int64 = "%bswap_int64" let set_int32_le b i x = if Sys.big_endian then set_int32_ne b i (swap32 x) else set_int32_ne b i x let get_int64_le b i = if Sys.big_endian then swap64 (get_int64_ne b i) else get_int64_ne b i
storage_functors.mli
(** Tezos Protocol Implementation - Typed storage builders. *) open Storage_sigs module Registered : REGISTER module Ghost : REGISTER module Make_subcontext (R : REGISTER) (C : Raw_context.T) (N : NAME) : Raw_context.T with type t = C.t module Make_single_data_storage (R : REGISTER) (C : Raw_context.T) (N : NAME) (V : VALUE) : Single_data_storage with type t = C.t and type value = V.t module type INDEX = sig type t val path_length : int val to_path : t -> string list -> string list val of_path : string list -> t option type 'a ipath val args : ('a, t, 'a ipath) Storage_description.args end module Pair (I1 : INDEX) (I2 : INDEX) : INDEX with type t = I1.t * I2.t module Make_data_set_storage (C : Raw_context.T) (I : INDEX) : Data_set_storage with type t = C.t and type elt = I.t module Make_carbonated_data_set_storage (C : Raw_context.T) (I : INDEX) : Carbonated_data_set_storage with type t = C.t and type elt = I.t module Make_indexed_data_storage (C : Raw_context.T) (I : INDEX) (V : VALUE) : Indexed_data_storage with type t = C.t and type key = I.t and type value = V.t module Make_indexed_carbonated_data_storage (C : Raw_context.T) (I : INDEX) (V : VALUE) : Non_iterable_indexed_carbonated_data_storage with type t = C.t and type key = I.t and type value = V.t module Make_indexed_data_snapshotable_storage (C : Raw_context.T) (Snapshot : INDEX) (I : INDEX) (V : VALUE) : Indexed_data_snapshotable_storage with type t = C.t and type snapshot = Snapshot.t and type key = I.t and type value = V.t module Make_indexed_subcontext (C : Raw_context.T) (I : INDEX) : Indexed_raw_context with type t = C.t and type key = I.t and type 'a ipath = 'a I.ipath module type WRAPPER = sig type t type key val wrap : t -> key val unwrap : key -> t option end module Wrap_indexed_data_storage (C : Indexed_data_storage) (K : WRAPPER with type key := C.key) : Indexed_data_storage with type t = C.t and type key = K.t and type value = C.value
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* Copyright (c) 2019-2020 Nomadic Labs <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
gWindow.mli
(**************************************************************************) (* Lablgtk *) (* *) (* This program is free software; you can redistribute it *) (* and/or modify it under the terms of the GNU Library General *) (* Public License as published by the Free Software Foundation *) (* version 2, with the exception described in file COPYING which *) (* comes with the library. *) (* *) (* This program is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *) (* GNU Library General Public License for more details. *) (* *) (* You should have received a copy of the GNU Library General *) (* Public License along with this program; if not, write to the *) (* Free Software Foundation, Inc., 59 Temple Place, Suite 330, *) (* Boston, MA 02111-1307 USA *) (* *) (* *) (**************************************************************************) (* $Id$ *) open Gtk open GObj (** Windows *) (** {3 GtkWindow} *) (** @gtkdoc gtk GtkWindow *) class window_skel : 'a obj -> object inherit GContainer.bin constraint 'a = [> Gtk.window] val obj : 'a obj method activate_default : unit -> bool method activate_focus : unit -> bool method add_accel_group : accel_group -> unit method as_window : Gtk.window obj method deiconify : unit -> unit method event : event_ops method iconify : unit -> unit method move : x:int -> y:int -> unit method parse_geometry : string -> bool method present : unit -> unit method resize : width:int -> height:int -> unit method show : unit -> unit method set_accept_focus : bool -> unit method set_allow_grow : bool -> unit method set_allow_shrink : bool -> unit method set_decorated : bool -> unit method set_default_height : int -> unit method set_default_size : width:int -> height:int -> unit method set_default_width : int -> unit method set_deletable : bool -> unit method set_destroy_with_parent : bool -> unit method set_focus_on_map : bool -> unit method set_geometry_hints : ?min_size:int * int -> ?max_size:int * int -> ?base_size:int * int -> ?aspect:float * float -> ?resize_inc:int * int -> ?win_gravity:Gdk.Tags.gravity -> ?pos:bool -> ?user_pos:bool -> ?user_size:bool -> GObj.widget -> unit method set_gravity : Gdk.Tags.gravity -> unit method set_icon : GdkPixbuf.pixbuf option -> unit method set_icon_name : string -> unit method set_modal : bool -> unit method set_opacity : float -> unit method set_position : Tags.window_position -> unit method set_resizable : bool -> unit method set_role : string -> unit method set_screen : Gdk.screen -> unit method set_skip_pager_hint : bool -> unit method set_skip_taskbar_hint : bool -> unit method set_title : string -> unit method set_transient_for : Gtk.window obj -> unit method set_type_hint : Gdk.Tags.window_type_hint -> unit method set_wm_class : string -> unit method set_wm_name : string -> unit method accept_focus : bool method allow_grow : bool method allow_shrink : bool method decorated : bool method default_height : int method default_width : int method deletable : bool method destroy_with_parent : bool method focus_on_map : bool method gravity : GdkEnums.gravity method has_toplevel_focus : bool method icon : GdkPixbuf.pixbuf option method icon_name : string method is_active : bool method kind : Tags.window_type method modal : bool method opacity : float method position : Tags.window_position method resizable : bool method role : string method screen : Gdk.screen method skip_pager_hint : bool method skip_taskbar_hint : bool method title : string method type_hint : Gdk.Tags.window_type_hint method set_urgency_hint : bool -> unit (** since Gtk 2.8 *) method urgency_hint : bool (** since Gtk 2.8 *) end (** Toplevel widget which can contain other widgets @gtkdoc gtk GtkWindow *) class window : ([> Gtk.window] as 'a) obj -> object inherit window_skel val obj : 'a obj method connect : GContainer.container_signals method fullscreen : unit -> unit (** @since GTK 2.2 *) method maximize : unit -> unit method stick : unit -> unit method unfullscreen : unit -> unit (** @since GTK 2.2 *) method unmaximize : unit -> unit method unstick : unit -> unit end (** @gtkdoc gtk GtkWindow *) val window : ?kind:Tags.window_type -> ?title:string -> ?allow_grow:bool -> ?allow_shrink:bool -> ?decorated:bool -> ?deletable:bool -> ?focus_on_map:bool -> ?icon:GdkPixbuf.pixbuf -> ?icon_name:string -> ?modal:bool -> ?position:Tags.window_position -> ?resizable:bool -> ?screen:Gdk.screen -> ?type_hint:Gdk.Tags.window_type_hint -> ?urgency_hint:bool -> ?wm_name:string -> ?wm_class:string -> ?border_width:int -> ?width:int -> ?height:int -> ?show:bool -> unit -> window (** @param kind default value is [`TOPLEVEL] @param allow_grow default value is [true] @param allow_shrink default value is [false] @param modal default value is [false] @param resizable default value is [true] @param type_hint default value is [`NORMAL] @param position default value is [`NONE] *) val toplevel : #widget -> window option (** return the toplevel window of this widget, if existing *) (** {3 GtkDialog} *) (** @gtkdoc gtk GtkDialog *) class ['a] dialog_signals : ([> Gtk.dialog] as 'b) obj -> decode:(int -> 'a) -> object inherit GContainer.container_signals val obj : 'b obj method response : callback:('a -> unit) -> GtkSignal.id method close : callback:(unit -> unit) -> GtkSignal.id end (** @gtkdoc gtk GtkDialog *) class ['a] dialog_skel : ([>Gtk.dialog] as 'b) obj -> object constraint 'a = [> `DELETE_EVENT] inherit window_skel val obj : 'b obj method action_area : GPack.button_box method event : event_ops method vbox : GPack.box method response : 'a -> unit method set_response_sensitive : 'a -> bool -> unit method set_default_response : 'a -> unit method has_separator : bool method set_has_separator : bool -> unit method run : unit -> 'a method private encode : 'a -> int method private decode : int -> 'a end (** Create popup windows @gtkdoc gtk GtkDialog *) class ['a] dialog_ext : ([> Gtk.dialog] as 'b) obj -> object inherit ['a] dialog_skel val obj : 'b obj method add_button : string -> 'a -> unit method add_button_stock : GtkStock.id -> 'a -> unit end (** Create popup windows @gtkdoc gtk GtkDialog *) class ['a] dialog : [> Gtk.dialog] obj -> object inherit ['a] dialog_ext val obj : Gtk.dialog obj method connect : 'a dialog_signals end (** @gtkdoc gtk GtkDialog *) val dialog : ?no_separator:bool -> ?parent:#window_skel -> ?destroy_with_parent:bool -> ?title:string -> ?allow_grow:bool -> ?allow_shrink:bool -> ?decorated:bool -> ?deletable:bool -> ?focus_on_map:bool -> ?icon:GdkPixbuf.pixbuf -> ?icon_name:string -> ?modal:bool -> ?position:Tags.window_position -> ?resizable:bool -> ?screen:Gdk.screen -> ?type_hint:Gdk.Tags.window_type_hint -> ?urgency_hint:bool -> ?wm_name:string -> ?wm_class:string -> ?border_width:int -> ?width:int -> ?height:int -> ?show:bool -> unit -> 'a dialog (** @param no_separator default value is [false] @param destroy_with_parent default value is [false] *) (** Variation for safe typing *) type any_response = [GtkEnums.response | `OTHER of int] class dialog_any : [> Gtk.dialog] obj -> [any_response] dialog (** {3 GtkMessageDialog} *) type 'a buttons module Buttons : sig val ok : [>`OK] buttons val close : [>`CLOSE] buttons val yes_no : [>`YES|`NO] buttons val ok_cancel : [>`OK|`CANCEL] buttons type color_selection = [`OK | `CANCEL | `HELP | `DELETE_EVENT] type file_selection = [`OK | `CANCEL | `HELP | `DELETE_EVENT] type font_selection = [`OK | `CANCEL | `APPLY | `DELETE_EVENT] type about = [ `CANCEL | `CLOSE | `DELETE_EVENT ] end (** Convenient message window @gtkdoc gtk GtkMessageDialog *) class type ['a] message_dialog = object inherit ['a] dialog_skel val obj : [> Gtk.message_dialog] obj method connect : 'a dialog_signals method set_markup : string -> unit (** @since GTK 2.4 *) method message_type : Tags.message_type method set_message_type : Tags.message_type -> unit end (** @gtkdoc gtk GtkMessageDialog *) val message_dialog : ?message:string -> ?use_markup:bool -> message_type:Tags.message_type -> buttons:'a buttons -> ?parent:#window_skel -> ?destroy_with_parent:bool -> ?title:string -> ?allow_grow:bool -> ?allow_shrink:bool -> ?decorated:bool -> ?deletable:bool -> ?focus_on_map:bool -> ?icon:GdkPixbuf.pixbuf -> ?icon_name:string -> ?modal:bool -> ?position:Tags.window_position -> ?resizable:bool -> ?screen:Gdk.screen -> ?type_hint:Gdk.Tags.window_type_hint -> ?urgency_hint:bool -> ?wm_name:string -> ?wm_class:string -> ?border_width:int -> ?width:int -> ?height:int -> ?show:bool -> unit -> 'a message_dialog (** {3 GtkAboutDialog} *) (** @gtkdoc gtk GtkAboutDialog @since GTK 2.6 *) class about_dialog : ([> Gtk.about_dialog] as 'a) Gtk.obj -> object inherit [Buttons.about] dialog_skel val obj : 'a Gtk.obj method connect : Buttons.about dialog_signals method artists : string list method authors : string list method comments : string method copyright : string method documenters : string list method license : string method logo : GdkPixbuf.pixbuf method logo_icon_name : string (** The property [name] is left unchanged, but it will access [program-name] if version is higher than 2.12 *) method name : string method translator_credits : string method version : string method website : string method website_label : string method wrap_license : bool method set_artists : string list -> unit method set_authors : string list -> unit method set_comments : string -> unit method set_copyright : string -> unit method set_documenters : string list -> unit method set_license : string -> unit method set_logo : GdkPixbuf.pixbuf -> unit method set_logo_icon_name : string -> unit method set_name : string -> unit method set_translator_credits : string -> unit method set_version : string -> unit method set_website : string -> unit method set_website_label : string -> unit method set_wrap_license : bool -> unit end (** Display information about an application. In GTK+ 2.6.x and 2.8.x, a default handler is already connected to the [response] signal. It simply hides the dialog. This is no longer the case since GTK+ 2.10.x though. You could use it like this: {[let about_dialog = ref (fun () -> raise Not_found) let show_dialog () = try !about_dialog () with Not_found -> let dialog = GWindow.about_dialog ~name:"..." (* etc. *) () in about_dialog := dialog#present ; dialog#show () ]} @gtkdoc gtk GtkAboutDialog @since GTK 2.6 *) val about_dialog : ?name:string -> ?authors:string list -> ?comments:string -> ?copyright:string -> ?license:string -> ?logo:GdkPixbuf.pixbuf -> ?logo_icon_name:string -> ?translator_credits:string -> ?version:string -> ?website:string -> ?website_label:string -> ?wrap_license:bool -> ?parent:#window_skel -> ?destroy_with_parent:bool -> ?title:string -> ?allow_grow:bool -> ?allow_shrink:bool -> ?decorated:bool -> ?deletable:bool -> ?focus_on_map:bool -> ?icon:GdkPixbuf.pixbuf -> ?icon_name:string -> ?modal:bool -> ?position:Tags.window_position -> ?resizable:bool -> ?screen:Gdk.screen -> ?type_hint:Gdk.Tags.window_type_hint -> ?urgency_hint:bool -> ?wm_name:string -> ?wm_class:string -> ?border_width:int -> ?width:int -> ?height:int -> ?show:bool -> unit -> about_dialog (** {3 File Chooser Dialog} *) (** @since GTK 2.4 @gtkdoc gtk GtkFileChooserDialog *) class ['a] file_chooser_dialog_signals : ([> Gtk.file_chooser|Gtk.dialog] as 'b) Gtk.obj -> decode:(int -> 'a) -> object inherit ['a] dialog_signals inherit GFile.chooser_signals val obj : 'b Gtk.obj end (** @since GTK 2.4 @gtkdoc gtk GtkFileChooserDialog *) class ['a] file_chooser_dialog : ([> Gtk.file_chooser|Gtk.dialog] as 'b) Gtk.obj -> object inherit ['a] dialog_ext inherit GFile.chooser val obj : 'b Gtk.obj method connect : 'a file_chooser_dialog_signals (** The following methods should be used to add the [OPEN] or [SAVE] button of a FileChooserDialog *) method add_select_button : string -> 'a -> unit (** ditto with a stock id *) method add_select_button_stock : GtkStock.id -> 'a -> unit end (** @since GTK 2.4 @gtkdoc gtk GtkFileChooserDialog *) val file_chooser_dialog : action:GtkEnums.file_chooser_action -> ?backend:string -> ?parent:#window_skel -> ?destroy_with_parent:bool -> ?title:string -> ?allow_grow:bool -> ?allow_shrink:bool -> ?decorated:bool -> ?deletable:bool -> ?focus_on_map:bool -> ?icon:GdkPixbuf.pixbuf -> ?icon_name:string -> ?modal:bool -> ?position:Tags.window_position -> ?resizable:bool -> ?screen:Gdk.screen -> ?type_hint:Gdk.Tags.window_type_hint -> ?urgency_hint:bool -> ?wm_name:string -> ?wm_class:string -> ?border_width:int -> ?width:int -> ?height:int -> ?show:bool -> unit -> 'a file_chooser_dialog (** {3 Selection Dialogs} *) (** @gtkdoc gtk GtkColorSelectionDialog *) class color_selection_dialog : Gtk.color_selection_dialog obj -> object inherit [Buttons.color_selection] dialog_skel val obj : Gtk.color_selection_dialog obj method connect : Buttons.color_selection dialog_signals method cancel_button : GButton.button method colorsel : GMisc.color_selection method help_button : GButton.button method ok_button : GButton.button end (** @gtkdoc gtk GtkColorSelectionDialog *) val color_selection_dialog : ?title:string -> ?parent:#window_skel -> ?destroy_with_parent:bool -> ?allow_grow:bool -> ?allow_shrink:bool -> ?decorated:bool -> ?deletable:bool -> ?focus_on_map:bool -> ?icon:GdkPixbuf.pixbuf -> ?icon_name:string -> ?modal:bool -> ?position:Tags.window_position -> ?screen:Gdk.screen -> ?type_hint:Gdk.Tags.window_type_hint -> ?urgency_hint:bool -> ?wm_name:string -> ?wm_class:string -> ?border_width:int -> ?width:int -> ?height:int -> ?show:bool -> unit -> color_selection_dialog (** @gtkdoc gtk GtkFileSelection *) class file_selection : Gtk.file_selection obj -> object inherit [Buttons.file_selection] dialog_skel val obj : Gtk.file_selection obj method connect : Buttons.file_selection dialog_signals method cancel_button : GButton.button method complete : filter:string -> unit method filename : string method get_selections : string list method help_button : GButton.button method ok_button : GButton.button method file_list : string GList.clist method dir_list : string GList.clist method select_multiple : bool method show_fileops : bool method set_filename : string -> unit method set_show_fileops : bool -> unit method set_select_multiple : bool -> unit end (** @gtkdoc gtk GtkFileSelection *) val file_selection : ?title:string -> ?show_fileops:bool -> ?filename:string -> ?select_multiple:bool -> ?parent:#window_skel -> ?destroy_with_parent:bool -> ?allow_grow:bool -> ?allow_shrink:bool -> ?decorated:bool -> ?deletable:bool -> ?focus_on_map:bool -> ?icon:GdkPixbuf.pixbuf -> ?icon_name:string -> ?modal:bool -> ?position:Tags.window_position -> ?resizable:bool -> ?screen:Gdk.screen -> ?type_hint:Gdk.Tags.window_type_hint -> ?urgency_hint:bool -> ?wm_name:string -> ?wm_class:string -> ?border_width:int -> ?width:int -> ?height:int -> ?show:bool -> unit -> file_selection (** @gtkdoc gtk GtkFontSelectionDialog*) class font_selection_dialog : Gtk.font_selection_dialog obj -> object inherit [Buttons.font_selection] dialog_skel val obj : Gtk.font_selection_dialog obj method connect : Buttons.font_selection dialog_signals method apply_button : GButton.button method cancel_button : GButton.button method selection : GMisc.font_selection method ok_button : GButton.button end (** @gtkdoc gtk GtkFontSelectionDialog*) val font_selection_dialog : ?title:string -> ?parent:#window_skel -> ?destroy_with_parent:bool -> ?allow_grow:bool -> ?allow_shrink:bool -> ?decorated:bool -> ?deletable:bool -> ?focus_on_map:bool -> ?icon:GdkPixbuf.pixbuf -> ?icon_name:string -> ?modal:bool -> ?position:Tags.window_position -> ?resizable:bool -> ?screen:Gdk.screen -> ?type_hint:Gdk.Tags.window_type_hint -> ?urgency_hint:bool -> ?wm_name:string -> ?wm_class:string -> ?border_width:int -> ?width:int -> ?height:int -> ?show:bool -> unit -> font_selection_dialog (** {3 GtkPlug} *) (** @gtkdoc gtk GtkPlug *) class plug_signals : ([> Gtk.plug] as 'a) obj -> object inherit GContainer.container_signals val obj : 'a obj method embedded : callback:(unit -> unit) -> GtkSignal.id end (** Toplevel for embedding into other processes @gtkdoc gtk GtkPlug *) class plug : Gtk.plug obj -> object inherit window_skel val obj : Gtk.plug obj method connect : plug_signals end (** @gtkdoc gtk GtkPlug *) val plug : window:Gdk.native_window -> ?border_width:int -> ?width:int -> ?height:int -> ?show:bool -> unit -> plug (** {3 GtkSocket} *) (** @gtkdoc gtk GtkSocket *) class socket_signals : ([>Gtk.socket] as 'a) obj -> object inherit GContainer.container_signals val obj : 'a obj method plug_added : callback:(unit -> unit) -> GtkSignal.id method plug_removed : callback:(unit -> unit) -> GtkSignal.id end (** Container for widgets from other processes @gtkdoc gtk GtkSocket *) class socket : Gtk.socket obj -> object inherit GContainer.container val obj : Gtk.socket obj method connect : socket_signals method steal : Gdk.native_window -> unit (** @deprecated "inherently unreliable" *) method xwindow : Gdk.xid end (** @gtkdoc gtk GtkSocket *) val socket : ?border_width:int -> ?width:int -> ?height:int -> ?packing:(widget -> unit) -> ?show:bool -> unit -> socket
(**************************************************************************) (* Lablgtk *) (* *) (* This program is free software; you can redistribute it *) (* and/or modify it under the terms of the GNU Library General *) (* Public License as published by the Free Software Foundation *) (* version 2, with the exception described in file COPYING which *) (* comes with the library. *) (* *) (* This program is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *) (* GNU Library General Public License for more details. *) (* *) (* You should have received a copy of the GNU Library General *) (* Public License along with this program; if not, write to the *) (* Free Software Foundation, Inc., 59 Temple Place, Suite 330, *) (* Boston, MA 02111-1307 USA *) (* *) (* *) (**************************************************************************)
patch.mli
(* patch.mli *) (* interface for patch.ml *) val applyPatch : Cabs.file -> Cabs.file -> Cabs.file
(* Copyright (c) 2001-2002, George C. Necula <necula@cs.berkeley.edu> Scott McPeak <smcpeak@cs.berkeley.edu> Wes Weimer <weimer@cs.berkeley.edu> All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The names of the contributors may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *)
test_dual.ml
open OUnit2 open Dual let d1 = dual 2.1 let d2 = dual 1.1 let d3 = dual 0.8 let test_add _ = let dt = dual 3.2 ~y:2.0 in let dd = add d1 d2 in assert_bool "add: [FAIL]" (equal dt dd) let test_sub _ = let dt = dual 1.0 ~y:0.0 in let dd = sub d1 d2 in assert_bool "sub: [FAIL]" (equal dt dd) let test_neg _ = let dt = dual (-. 2.1) ~y:(-. 1.0) in let dd = neg d1 in assert_bool "neg: [FAIL]" (equal dt dd) let test_mul _ = let dt = dual (2.1 *. 1.1) ~y:3.2 in let dd = mul d1 d2 in assert_bool "mul: [FAIL]" (equal dt dd) let test_div _ = let dt = dual (2.1 /. 1.1) ~y:(-. 1.0 /. (1.1 *. 1.1)) in let dd = div d1 d2 in assert_bool "div: [FAIL]" (equal dt dd) let test_pow _ = let dt = dual (Float.pow 2.1 1.1) ~y:(1.1 *. (Float.pow 2.1 0.1)) in let dd = pow d1 1.1 in assert_bool "pow: [FAIL]" (equal dt dd) let test_exp _ = let e1 = Float.exp(2.1) in let dt = dual e1 ~y:e1 in let dd = exp d1 in assert_bool "exp: [FAIL]" (equal dt dd) let test_sin _ = let dt = dual (Float.sin 2.1) ~y:(Float.cos 2.1) in let dd = sin d1 in assert_bool "sin: [FAIL]" (equal dt dd) let test_cos _ = let dt = dual (Float.cos 2.1) ~y:(-. (Float.sin 2.1)) in let dd = cos d1 in assert_bool "cos: [FAIL]" (equal dt dd) let test_tan _ = let dt = dual (Float.tan 2.1) ~y:(1.0 /. (Float.pow (Float.cos 2.1) 2.0)) in let dd = tan d1 in assert_bool "tan: [FAIL]" (equal dt dd) let test_asin _ = let dt = dual (Float.asin 0.8) ~y:(1.0 /. (Float.sqrt (1.0 -. 0.8 *. 0.8))) in let dd = asin d3 in assert_bool "asin: [FAIL]" (equal dt dd) let test_acos _ = let dt = dual (Float.acos 0.8) ~y:(-. 1.0 /. (Float.sqrt (1.0 -. 0.8 *. 0.8))) in let dd = acos d3 in assert_bool "acos: [FAIL]" (equal dt dd) let test_atan _ = let dt = dual (Float.atan 0.8) ~y:(1.0 /. (1.0 +. 0.8 *. 0.8)) in let dd = atan d3 in assert_bool "atan: [FAIL]" (equal dt dd) let test_sinh _ = let dt = dual (Float.sinh 0.8) ~y:(Float.cosh 0.8) in let dd = sinh d3 in assert_bool "sinh: [FAIL]" (equal dt dd) let test_cosh _ = let dt = dual (Float.cosh 0.8) ~y:(Float.sinh 0.8) in let dd = cosh d3 in assert_bool "cosh: [FAIL]" (equal dt dd) let test_tanh _ = let tanhx = Float.tanh 0.8 in let dt = dual tanhx ~y:(1.0 -. (Float.pow tanhx 2.0)) in let dd = tanh d3 in assert_bool "tanh: [FAIL]" (equal dt dd) let test_root _ = let f x = Float.exp (-. x) *. (Float.pow x 2.2) -. 0.1 *. x in let fd x = sub (mul (exp (neg x)) (pow x 2.2)) (mul (dual 0.1) x) in let x0 = dual 2.0 in let out = root fd x0 in assert_bool "root: [FAIL]" (Float.abs (f out.re) < (Float.sqrt Float.epsilon)) let suite = "Dual Test" >::: [ "test_add" >:: test_add; "test_sub" >:: test_sub; "test_neg" >:: test_neg; "test_mul" >:: test_mul; "test_div" >:: test_div; "test_pow" >:: test_pow; "test_exp" >:: test_exp; "test_sin" >:: test_sin; "test_cos" >:: test_cos; "test_tan" >:: test_tan; "test_asin" >:: test_asin; "test_acos" >:: test_acos; "test_atan" >:: test_atan; "test_sinh" >:: test_sinh; "test_cosh" >:: test_cosh; "test_tanh" >:: test_tanh; "test_root" >:: test_root; ] let () = run_test_tt_main suite
sys.ml
include Stdlib.Sys let linux = match Io.String_path.read_file "/proc/sys/kernel/ostype" |> String.trim with | "Linux" -> true | _ -> false | exception Sys_error _ -> false let force_remove = if win32 then (fun fn -> try remove fn with Sys_error _ -> (* Try to remove the "read-only" attribute, then retry. *) (try Unix.chmod fn 0o666 with Unix.Unix_error _ -> ()); remove fn) else remove
exec-target.c
/* SPDX-License-Identifier: MIT */ int main(int argc, char *argv[]) { return 0; }
/* SPDX-License-Identifier: MIT */
version.ml
let version = "2.11.0"
t-gcd_subresultant.c
#include "fmpq_mpoly.h" void gcd_check( fmpq_mpoly_t g, fmpq_mpoly_t a, fmpq_mpoly_t b, fmpq_mpoly_t t, fmpq_mpoly_ctx_t ctx, slong i, slong j, const char * name) { fmpq_mpoly_t ca, cb, cg; fmpq_mpoly_init(ca, ctx); fmpq_mpoly_init(cb, ctx); fmpq_mpoly_init(cg, ctx); if (!fmpq_mpoly_gcd_subresultant(g, a, b, ctx)) { flint_printf("FAIL: check gcd can be computed\n"); flint_printf("i = %wd, j = %wd, %s\n", i, j, name); fflush(stdout); flint_abort(); } fmpq_mpoly_assert_canonical(g, ctx); if (fmpq_mpoly_is_zero(g, ctx)) { if (!fmpq_mpoly_is_zero(a, ctx) || !fmpq_mpoly_is_zero(b, ctx)) { flint_printf("FAIL: check zero gcd\n"); flint_printf("i = %wd, j = %wd, %s\n", i, j, name); fflush(stdout); flint_abort(); } goto cleanup; } if (!fmpq_mpoly_is_monic(g, ctx)) { flint_printf("FAIL: check gcd is unit normal\n"); flint_printf("i = %wd, j = %wd, %s\n", i, j, name); fflush(stdout); flint_abort(); } if (!fmpq_mpoly_is_zero(t, ctx) && !fmpq_mpoly_divides(cg, g, t, ctx)) { flint_printf("FAIL: check gcd divisor\n"); flint_printf("i = %wd, j = %wd, %s\n", i, j, name); fflush(stdout); flint_abort(); } if (!fmpq_mpoly_divides(ca, a, g, ctx) || !fmpq_mpoly_divides(cb, b, g, ctx)) { flint_printf("FAIL: check divisibility\n"); flint_printf("i = %wd, j = %wd, %s\n", i, j, name); fflush(stdout); flint_abort(); } if (!fmpq_mpoly_gcd_subresultant(cg, ca, cb, ctx)) { flint_printf("FAIL: check cofactor gcd can be computed\n"); flint_printf("i = %wd, j = %wd, %s\n", i, j, name); fflush(stdout); flint_abort(); } fmpq_mpoly_assert_canonical(cg, ctx); if (!fmpq_mpoly_is_one(cg, ctx)) { flint_printf("FAIL: check gcd of cofactors is one\n"); flint_printf("i = %wd, j = %wd, %s\n", i, j, name); fflush(stdout); flint_abort(); } cleanup: fmpq_mpoly_clear(ca, ctx); fmpq_mpoly_clear(cb, ctx); fmpq_mpoly_clear(cg, ctx); } int main(void) { slong i, j; slong tmul = 5; FLINT_TEST_INIT(state); flint_printf("gcd_subresultant...."); fflush(stdout); { fmpq_mpoly_ctx_t ctx; fmpq_mpoly_t g, a, b, t; const char * vars[] = {"x", "y", "z", "t"}; fmpq_mpoly_ctx_init(ctx, 4, ORD_LEX); fmpq_mpoly_init(a, ctx); fmpq_mpoly_init(b, ctx); fmpq_mpoly_init(g, ctx); fmpq_mpoly_init(t, ctx); fmpq_mpoly_set_str_pretty(t, "39 - t*x + 39*x^100 - t*x^101 + 39*x^3*y - t*x^4*y - 7*x^2*y^3*z^11 - 7*x^102*y^3*z^11 - 7*x^5*y^4*z^11 + 78*t^15*x^78*y^3*z^13 - 2*t^16*x^79*y^3*z^13 + x^1000*y^3*z^20 + x^1100*y^3*z^20 + x^1003*y^4*z^20 - 14*t^15*x^80*y^6*z^24 + 2*t^15*x^1078*y^6*z^33", vars, ctx); fmpq_mpoly_set_str_pretty(a, "39 - t*x - 7*x^2*y^3*z^11 + x^1000*y^3*z^20", vars, ctx); fmpq_mpoly_set_str_pretty(b, "1 + x^100 + x^3*y + 2*t^15*x^78*y^3*z^13", vars, ctx); fmpq_mpoly_mul(a, a, t, ctx); fmpq_mpoly_mul(b, b, t, ctx); gcd_check(g, a, b, t, ctx, 0, 0, "example"); fmpq_mpoly_clear(a, ctx); fmpq_mpoly_clear(b, ctx); fmpq_mpoly_clear(g, ctx); fmpq_mpoly_clear(t, ctx); fmpq_mpoly_ctx_clear(ctx); } for (i = 0; i < tmul * flint_test_multiplier(); i++) { fmpq_mpoly_ctx_t ctx; fmpq_mpoly_t a, b, g, t; slong len, len1, len2; slong degbound; flint_bitcnt_t coeff_bits; fmpq_mpoly_ctx_init_rand(ctx, state, 3); fmpq_mpoly_init(g, ctx); fmpq_mpoly_init(a, ctx); fmpq_mpoly_init(b, ctx); fmpq_mpoly_init(t, ctx); len = n_randint(state, 20) + 1; len1 = n_randint(state, 20); len2 = n_randint(state, 20); degbound = 1 + 10/FLINT_MAX(WORD(1), fmpq_mpoly_ctx_nvars(ctx)); for (j = 0; j < 4; j++) { coeff_bits = 1 + n_randint(state, 80); fmpq_mpoly_randtest_bound(t, state, len, coeff_bits, degbound, ctx); if (fmpq_mpoly_is_zero(t, ctx)) fmpq_mpoly_one(t, ctx); coeff_bits = 1 + n_randint(state, 80); fmpq_mpoly_randtest_bound(a, state, len1, coeff_bits, degbound, ctx); coeff_bits = 1 + n_randint(state, 80); fmpq_mpoly_randtest_bound(b, state, len2, coeff_bits, degbound, ctx); fmpq_mpoly_mul(a, a, t, ctx); fmpq_mpoly_mul(b, b, t, ctx); coeff_bits = 1 + n_randint(state, 80); fmpq_mpoly_randtest_bits(g, state, len, coeff_bits, FLINT_BITS, ctx); gcd_check(g, a, b, t, ctx, i, j, "random small"); } fmpq_mpoly_clear(g, ctx); fmpq_mpoly_clear(a, ctx); fmpq_mpoly_clear(b, ctx); fmpq_mpoly_clear(t, ctx); fmpq_mpoly_ctx_clear(ctx); } flint_printf("PASS\n"); FLINT_TEST_CLEANUP(state); return 0; }
/* Copyright (C) 2021 Daniel Schultz This file is part of FLINT. FLINT is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. See <https://www.gnu.org/licenses/>. */
const_e.c
#include "arb.h" #include "hypgeom.h" void arb_const_e_eval(arb_t s, slong prec) { hypgeom_t series; arb_t t; arb_init(t); hypgeom_init(series); fmpz_poly_set_str(series->A, "1 1"); fmpz_poly_set_str(series->B, "1 1"); fmpz_poly_set_str(series->P, "1 1"); fmpz_poly_set_str(series->Q, "2 0 1"); prec += FLINT_CLOG2(prec); arb_hypgeom_infsum(s, t, series, prec, prec); arb_div(s, s, t, prec); hypgeom_clear(series); arb_clear(t); } ARB_DEF_CACHED_CONSTANT(arb_const_e, arb_const_e_eval)
/* Copyright (C) 2013 Fredrik Johansson This file is part of Arb. Arb is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. See <http://www.gnu.org/licenses/>. */
iobuf.mli
type t = { b: bytes; mutable i: int; mutable after_last: int; } type iobuf = t val len : t -> int (** Length of existing content *) val clear : t -> unit (** Clear content, set [i=0], [after_last=0] *) val cap : t -> int (** Internal size of buffer *) val get : t -> int -> char (** Get content at index [i] (relative to internal offset {!t.i}) *) val write_cap : t -> int (** How much can we write *) val write_byte : t -> char -> unit (** Write a single char. @raise Invalid_argument if [write_cap buf = 0] *) val write_slice : t -> bytes -> int -> int -> unit (** [write_slice buf b i len] writes a slice of [b] to [buf]. @raise Invalid_argument if [len > write_cap buf]. *) val consume : t -> int -> unit (** Pool of buffers, to reuse them once they're consumed *) module Pool : sig type t val create : ?buf_size:int -> unit -> t val alloc : t -> iobuf val recycle : t -> iobuf -> unit end (** A chain of buffers stringed together *) module Chain : sig type t val create : pool:Pool.t -> unit -> t val first : t -> iobuf val last : t -> iobuf val len : t -> int (** Sum of length of buffers *) val iter : t -> f:(iobuf -> unit) -> unit val dealloc : t -> unit (** Release all buffers. Do not use after calling that. *) val write_byte : t -> char -> unit (** Write a single char at the end. *) val write_slice : t -> bytes -> int -> int -> unit (** Write a slice at the end. *) val contents : t -> string (** Turn whole contents into a single string *) end
migrate_414_500.ml
module From = Ast_414 module To = Ast_500 let copy_structure : Ast_414.Parsetree.structure -> Ast_500.Parsetree.structure = fun x -> x let copy_signature : Ast_414.Parsetree.signature -> Ast_500.Parsetree.signature = fun x -> x let copy_toplevel_phrase : Ast_414.Parsetree.toplevel_phrase -> Ast_500.Parsetree.toplevel_phrase = fun x -> x let copy_core_type : Ast_414.Parsetree.core_type -> Ast_500.Parsetree.core_type = fun x -> x let copy_expression : Ast_414.Parsetree.expression -> Ast_500.Parsetree.expression = fun x -> x let copy_pattern : Ast_414.Parsetree.pattern -> Ast_500.Parsetree.pattern = fun x -> x let copy_case : Ast_414.Parsetree.case -> Ast_500.Parsetree.case = fun x -> x let copy_type_declaration : Ast_414.Parsetree.type_declaration -> Ast_500.Parsetree.type_declaration = fun x -> x let copy_type_extension : Ast_414.Parsetree.type_extension -> Ast_500.Parsetree.type_extension = fun x -> x let copy_extension_constructor : Ast_414.Parsetree.extension_constructor -> Ast_500.Parsetree.extension_constructor = fun x -> x
scenario.h
#ifndef GHERKIN_SCENARIO_H_ #define GHERKIN_SCENARIO_H_ #include <wchar.h> #include "ast.h" #include "id_generator.h" #include "child_definition.h" #include "example_table.h" #include "location.h" #include "tag.h" #include "step.h" #ifdef __cplusplus extern "C" { #endif typedef struct Scenario { item_delete_function scenario_delete; GherkinAstType type; Location location; const wchar_t* id; wchar_t* keyword; wchar_t* name; const wchar_t* description; const Tags* tags; const Steps* steps; const Examples* examples; } Scenario; const Scenario* Scenario_new(Location location, IdGenerator* id_generator, const wchar_t* keyword, const wchar_t* name, const wchar_t* description, const Tags* tags, const Steps* steps, const Examples* examples); void Scenario_delete(const Scenario* scenario); void Scenario_transfer(Scenario* to_scenario, Scenario* from_scenario); #ifdef __cplusplus } #endif #endif /* GHERKIN_SCENARIO_H_ */
dune_cmd.ml
open Stdune module Re = Dune_re let commands = Table.create (module String) 10 let register name of_args run = Table.add_exn commands name (fun args -> let t = of_args args in run t) (* Doesn't follow the symlinks! *) module Stat = struct type data = | Hardlinks | Permissions | Size | Kind type t = { file : Path.t ; data : data } let data_of_string = function | "size" -> Size | "hardlinks" -> Hardlinks | "permissions" -> Permissions | "kind" -> Kind | s -> raise (Arg.Bad (sprintf "%s is invalid. hardlinks, permissions are only valid options" s)) let pp_stats data (stats : Unix.stats) = match data with | Size -> Int.to_string stats.st_size | Hardlinks -> Int.to_string stats.st_nlink | Permissions -> sprintf "%o" stats.st_perm | Kind -> sprintf "%s" (File_kind.to_string_hum stats.st_kind) let name = "stat" let of_args = function | [ data; file ] -> let data = data_of_string data in let file = Path.of_filename_relative_to_initial_cwd file in { file; data } | _ -> raise (Arg.Bad (sprintf "2 arguments must be provided")) let run { file; data } = let stats = Path.lstat_exn file in print_endline (pp_stats data stats) let () = register name of_args run end module Wait_for_fs_clock_to_advance = struct let name = "wait-for-fs-clock-to-advance" let of_args = function | [] -> () | _ -> raise (Arg.Bad ("Usage: dune_cmd " ^ name)) let run () = let fn = "." ^ name ^ ".tmp" in let fstime () = Unix.close (Unix.openfile fn [ O_WRONLY; O_CREAT; O_TRUNC ] 0o644); let t = (Unix.stat fn).st_ctime in Unix.unlink fn; t in let t = fstime () in while fstime () <= t do Unix.sleepf 0.01 done let () = register name of_args run end module Cat = struct let name = "cat" let of_args = function | [ file ] -> file | _ -> raise (Arg.Bad "Usage: dune_cmd cat <file>") let run p = print_string (Io.String_path.read_file p) let () = register name of_args run end module Exists = struct type t = Path of Path.t let name = "exists" let of_args = function | [ path ] -> Path (Path.of_filename_relative_to_initial_cwd path) | _ -> raise (Arg.Bad "Usage: dune_cmd exists <path>") let run (Path path) = print_string (Path.exists path |> Bool.to_string) let () = register name of_args run end module Expand_lines = struct let name = "expand_lines" let of_args = function | [] -> () | _ -> raise (Arg.Bad ("Usage: dune_cmd " ^ name)) let run () = let re = Re.compile (Re.str "\\n") in set_binary_mode_in stdin true; set_binary_mode_out stdout true; let rec loop () = match input_line stdin with | exception End_of_file -> () | s -> print_endline (Re.replace_string ~all:true re s ~by:"\n"); loop () in loop () let () = register name of_args run end module Sanitizer = struct module Configurator = Configurator.V1 let make_ext_replace config = let tbl = List.filter_map [ "ext_exe"; "ext_dll"; "ext_asm"; "ext_lib"; "ext_obj" ] ~f:(fun var -> match Configurator.ocaml_config_var config var with | Some "" -> None | Some s -> Some (s, "$" ^ var) | None -> ( match (var, Configurator.ocaml_config_var config "system") with | "ext_exe", Some "Win32" -> Some (".exe", var) | _ -> None)) in let re = Re.( compile (seq [ diff any (char '/') ; alt (List.map tbl ~f:(fun (s, _) -> str s)) ; eow ])) in let map = String.Map.of_list_reduce tbl ~f:(fun _ x -> x) in fun s -> Re.replace re s ~f:(fun g -> let s = Re.Group.get g 0 in sprintf "%c%s" s.[0] (String.Map.find_exn map (String.drop s 1))) let name = "sanitize" let of_args = function | [] -> () | _ -> raise (Arg.Bad "Usage: dune_cmd sanitize takes no arguments") let run () = let config = Configurator.create "sanitizer" in let sanitize = make_ext_replace config in let rec loop () = match input_line stdin with | exception End_of_file -> () | line -> print_endline (sanitize line); loop () in loop () let () = register name of_args run end module Count_lines = struct type t = | Stdin | File of Path.t let name = "count-lines" let count_lines ic = let rec loop n = match input_line ic with | exception End_of_file -> n | _line -> loop (n + 1) in loop 0 let of_args = function | [] -> Stdin | [ file ] -> File (Path.of_filename_relative_to_initial_cwd file) | _ -> raise (Arg.Bad "Usage: dune_cmd count-lines <file>") let run t = let n = match t with | Stdin -> count_lines stdin | File p -> Io.with_file_in p ~binary:false ~f:count_lines in Printf.printf "%d\n%!" n let () = register name of_args run end module Override_on = struct module Configurator = Configurator.V1 type t = { system_to_override_on : string ; desired_output : string } let name = "override-on" let copy_stdin () = let rec loop () = match input_line stdin with | exception End_of_file -> () | line -> print_endline line; loop () in loop () let of_args = function | [ system_to_override_on; desired_output ] -> { system_to_override_on; desired_output } | _ -> raise (Arg.Bad "Usage: dune_cmd override-on <system-to-override-on> \ <desired-output>") let run { system_to_override_on; desired_output } = let config = Configurator.create "override-on" in match Configurator.ocaml_config_var config "system" with | Some system when String.equal system system_to_override_on -> print_endline desired_output | _ -> copy_stdin () let () = register name of_args run end module Rewrite_path = struct let name = "rewrite-path" let of_args = function | [ path ] -> path | _ -> raise (Arg.Bad "Usage: dune_cmd rewrite-path <path>") let run path = match Build_path_prefix_map.decode_map (Sys.getenv "BUILD_PATH_PREFIX_MAP") with | Error msg -> failwith msg | Ok map -> print_string (Build_path_prefix_map.rewrite map path) let () = register name of_args run end module Find_by_contents = struct let name = "find-file-by-contents-regexp" let of_args = function | [ path; contents_regexp ] -> (path, Str.regexp contents_regexp) | _ -> raise (Arg.Bad "Usage: dune_cmd find-files-by-contents-regexp <path> <regexp>") let rec find_files ~dir regexp : _ list = List.concat_map (List.sort (Sys.readdir dir |> Array.to_list) ~compare:String.compare) ~f:(fun name -> let path = Filename.concat dir name in let stats = Unix.stat path in match stats.st_kind with | S_DIR -> find_files ~dir:path regexp | S_REG -> let s = Io.String_path.read_file path in if Str.string_match regexp s 0 then [ Printf.sprintf "%s\n" path ] else [] | _other -> []) let run (dir, regexp) = match find_files ~dir regexp with | [] -> Format.eprintf "No files found matching pattern@.%!"; exit 1 | [ res ] -> Printf.printf "%s\n" res | _ :: _ as files -> Format.eprintf "Multiple files found matching pattern@.%!"; List.iter files ~f:(fun file -> Printf.printf "%s\n%!" file); exit 1 let () = register name of_args run end module Wait_for_file_to_appear = struct type t = { file : Path.t } let name = "wait-for-file-to-appear" let of_args = function | [ file ] -> let file = Path.of_filename_relative_to_initial_cwd file in { file } | _ -> raise (Arg.Bad (sprintf "1 argument must be provided")) let run { file } = while not (Path.exists file) do Unix.sleepf 0.01 done let () = register name of_args run end let () = let name, args = match Array.to_list Sys.argv with | _ :: name :: args -> (name, args) | [] -> assert false | [ _ ] -> Format.eprintf "No arguments passed@.%!"; exit 1 in match Table.find commands name with | None -> Format.eprintf "No command %S name found" name; exit 1 | Some run -> run args
dune
(library (name Spin) (public_name spin.lib) (modules (:standard)) (libraries base stdio str lwt.unix sexplib jingoo fileutils) (flags -open Base) (preprocess (pps ppx_sexp_conv))) (include_subdirs unqualified)
test_parsing_lisp.ml
open Common module Flag = Flag_parsing (*****************************************************************************) (* Subsystem testing *) (*****************************************************************************) let test_tokens_lisp file = (match File_type.file_type_of_file file with | File_type.PL (File_type.Lisp _) -> () | _ -> pr2 "warning: seems not a lisp file"); Flag.verbose_lexing := true; Flag.verbose_parsing := true; let toks = Parse_lisp.tokens file in toks |> List.iter (fun x -> pr2_gen x); () let test_parse_lisp xs = let fullxs = Lib_parsing_lisp.find_source_files_of_dir_or_files xs in let stat_list = ref [] in fullxs |> List.iter (fun file -> pr2 ("PARSING: " ^ file); let _xs, stat = Parse_lisp.parse file in Common.push stat stat_list); Parsing_stat.print_parsing_stat_list !stat_list; () (*****************************************************************************) (* Unit tests *) (*****************************************************************************) (*****************************************************************************) (* Main entry for Arg *) (*****************************************************************************) let actions () = [ ("-tokens_lisp", " <file>", Arg_helpers.mk_action_1_arg test_tokens_lisp); ( "-parse_lisp", " <files or dirs>", Arg_helpers.mk_action_n_arg test_parse_lisp ); ]
dune
(library (name vlib) (virtual_modules vlib) (wrapped false) (default_implementation lib_default))
batFile.ml
(* this code is purposedly before any module open directive *) let count_lines (fn: string): int = let count = ref 0 in let input = open_in fn in (try while true do let _line = input_line input in incr count done with End_of_file -> close_in input); !count (*$T count_lines (Sys.file_exists __FILE__) && (count_lines __FILE__ > 0) *) open BatIO open ListLabels open Unix let finally = BatInnerPervasives.finally (* Permissions *) type permission = int (**Internally, permissions are represented in Unix-style octal.*) let default_permission = 0o000 let user_read = 0o400 let user_write = 0o200 let user_exec = 0o100 let group_read = 0o040 let group_write = 0o020 let group_exec = 0o010 let other_read = 0o004 let other_write = 0o002 let other_exec = 0o001 let perm l = fold_left l ~init:default_permission ~f:(fun acc x -> acc lor x) let unix_perm i = if 0 <= i && i <= 511 then i else Printf.ksprintf invalid_arg "File.unix_perm: Unix permission %o" i (* Opening *) type open_in_flag = [ `create | `excl (**Fail if the file exists and [`create] is set *) | `text (**Open in ascii mode -- if this flag is not specified or if the operating system does not perform conversions, the file is opened in binary mode. *) | `nonblock (**Open in non-blocking mode *) | `mmap (**Open in memory-mapped mode (experimental)*) ] type open_out_flag = [ `append (**Start writing at the end of the file rather than the start *) | `create (**Create the file if it does not exist *) | `trunc (**Empty the file if it already exists (on by default) *) | `excl (**Fail if the file exists and [`create] is set *) | `text (**Open in ascii mode -- if this flag is not specified or if the operating system does not perform conversions, the file is opened in binary mode. *) | `nonblock (**Open in non-blocking mode *) ] (** Convert a [open_in_flag list] into a low-level [open_flag list] *) let in_chan_mode ?mode binary = let mode_to_open_flag l = let rec aux acc is_binary = function | [] -> if is_binary then Open_binary::acc else Open_text ::acc | `create::t -> aux (Open_creat::acc) is_binary t | `excl::t -> aux (Open_excl::acc) is_binary t | `text::t -> aux acc false t | `nonblock::t -> aux (Open_nonblock::acc) is_binary t | _::t -> aux acc is_binary t (*Allow for future extensions*) in aux [] binary l in match mode with | None -> [Open_rdonly; Open_binary] | Some l -> mode_to_open_flag l (** Convert a [open_out_flag list] into a low-level [open_flag list] *) let out_chan_mode ?mode binary = let mode_to_open_flag l = let rec aux acc is_binary = function | [] -> let acc' = if List.mem Open_append acc then acc else Open_trunc::acc in if is_binary then Open_binary::acc' else Open_text ::acc' | `append::t -> aux (Open_append::acc) is_binary t | `trunc::t -> aux (Open_trunc::acc) is_binary t | `create::t -> aux (Open_creat::acc) is_binary t | `excl::t -> aux (Open_excl::acc) is_binary t | `text::t -> aux acc false t | `nonblock::t -> aux (Open_nonblock::acc) is_binary t | _::t -> aux acc is_binary t (*Allow for future extensions*) in aux [] binary l in match mode with | None -> [Open_wronly; Open_binary; Open_creat; Open_trunc] | Some l -> Open_wronly :: (mode_to_open_flag l) let open_out ?mode ?(perm=0o666) name = (* Printf.eprintf "Opening out\n%!";*) output_channel ~cleanup:true (open_out_gen (out_chan_mode ?mode true) perm name) open BatBigarray let open_in ?mode ?(perm=default_permission) name = let unix_mode = in_chan_mode ?mode true in match mode with | Some l when List.mem `mmap l -> let desc = Unix.openfile name [O_RDONLY] 0 in let array= Array1.map_file desc char c_layout (*shared*)false (-1) in let pos = ref 0 and len = Array1.dim array in create_in ~read:(fun () -> if !pos >= len then raise No_more_input else Array1.get array (BatRef.post_incr pos)) ~input:(fun sout _p l -> if !pos >= len then raise No_more_input; let n = (if !pos + l > len then len - !pos else l) in for i = 0 to n - 1 do Bytes.set sout (!pos + i) (Array1.get array i) done; (* String.unsafe_blit s (post pos ( (+) n ) ) sout p n;*) pos := !pos + n; n ) ~close:(fun () -> Unix.close desc) | _ -> input_channel ~cleanup:true ~autoclose:false (open_in_gen unix_mode perm name) let with_do opener closer x f = let file = opener x in finally (fun () -> closer file) f file let with_file_in ?mode ?perm x = with_do (open_in ?mode ?perm) close_in x let with_file_out ?mode ?perm x = with_do (open_out ?mode ?perm) close_out x let lines_of file = BatIO.lines_of (open_in file) let write_lines file lines = let mode = [`trunc; `create] in with_file_out ~mode file (fun oc -> BatEnum.iter (BatIO.write_line oc) lines) (** {6 Temporary files} *) type open_temporary_out_flag = [ open_out_flag | `delete_on_exit (**Should the file be deleted when program ends?*) ] let open_temporary_out ?mode ?(prefix="ocaml") ?(suffix="tmp") ?temp_dir () : (_ output * string) = let chan_mode = out_chan_mode ?mode true in let (name, cout) = Filename.open_temp_file ?temp_dir ~mode:chan_mode prefix suffix in let out = output_channel ~cleanup:true cout in (match mode with | Some l when List.mem `delete_on_exit l -> at_exit (fun () -> try BatIO.close_out out; Sys.remove name with _ -> ()) | _ -> ()); (out, name) let with_temporary_out ?mode ?prefix ?suffix ?temp_dir f = let (file, name) = open_temporary_out ?mode ?prefix ?suffix ?temp_dir () in finally (fun () -> close_out file) (fun (file, name) -> f file name) (file, name) (** {6 File manipulation} *) open Unix let size_of s = (stat s).st_size let size_of_big s = (LargeFile.stat s).LargeFile.st_size let chmod = Unix.chmod let set_permissions = chmod
(* * File - File manipulation * Copyright (C) 2008 David Teller * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version, * with the special exception on linking described in file LICENSE. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA *)
token.ml
type pattern = Plexing.pattern; exception Error of string; type location = Ploc.t; type location_function = int -> location; type lexer_func 'te = Stream.t char -> (Stream.t 'te * Plexing.Locations.t); type glexer 'te = Plexing.lexer 'te == { tok_func : lexer_func 'te; tok_using : pattern -> unit; tok_removing : pattern -> unit; tok_match : mutable pattern -> 'te -> string; tok_text : pattern -> string; tok_comm : mutable option (list location) } ; value make_loc = Ploc.make_unlined; value dummy_loc = Ploc.dummy; value make_stream_and_location = Plexing.make_stream_and_location; value lexer_func_of_parser = Plexing.lexer_func_of_parser; value lexer_func_of_ocamllex = Plexing.lexer_func_of_ocamllex; value eval_char = Plexing.eval_char; value eval_string = Plexing.eval_string; value lexer_text = Plexing.lexer_text; value default_match = Plexing.default_match; value line_nb = Plexing.line_nb; value bol_pos = Plexing.bol_pos; value restore_lexing_info = Plexing.restore_lexing_info;
(* camlp5r *) (* token.ml,v *) (* Copyright (c) INRIA 2007-2017 *)
zk_rollup_repr.ml
module Address = struct let prefix = "epx1" let encoded_size = 37 let decoded_prefix = "\001\023\224\125" module H = Blake2B.Make (Base58) (struct let name = "Zk_rollup_hash" let title = "A zk rollup address" let b58check_prefix = decoded_prefix let size = Some 20 end) include H let () = Base58.check_encoded_prefix b58check_encoding prefix encoded_size include Path_encoding.Make_hex (H) type error += (* `Permanent *) Error_zk_rollup_address_generation let () = let open Data_encoding in let msg = "Error while generating rollup address" in register_error_kind `Permanent ~id:"rollup.error_zk_rollup_address_generation" ~title:msg ~pp:(fun ppf () -> Format.fprintf ppf "%s" msg) ~description:msg unit (function Error_zk_rollup_address_generation -> Some () | _ -> None) (fun () -> Error_zk_rollup_address_generation) let from_nonce nonce = Data_encoding.Binary.to_bytes_opt Origination_nonce.encoding nonce |> function | None -> error Error_zk_rollup_address_generation | Some nonce -> ok @@ hash_bytes [nonce] let of_b58data = function H.Data h -> Some h | _ -> None end type t = Address.t let to_scalar x = Zk_rollup_scalar.of_bits (Data_encoding.Binary.to_string_exn Address.encoding x) type pending_list = | Empty of {next_index : int64} | Pending of {next_index : int64; length : int} let pending_list_encoding : pending_list Data_encoding.t = let open Data_encoding in let empty_tag, pending_tag = (0, 1) in let empty_encoding = obj1 (req "next_index" Compact.(make ~tag_size:`Uint8 int64)) in let pending_encoding = obj2 (req "next_index" Compact.(make ~tag_size:`Uint8 int64)) (req "length" uint16) in matching (function | Empty {next_index} -> matched empty_tag empty_encoding next_index | Pending {next_index; length} -> matched pending_tag pending_encoding (next_index, length)) [ case ~title:"Empty" (Tag empty_tag) empty_encoding (function Empty {next_index} -> Some next_index | _ -> None) (fun next_index -> Empty {next_index}); case ~title:"Pending" (Tag pending_tag) pending_encoding (function | Pending {next_index; length} -> Some (next_index, length) | _ -> None) (fun (next_index, length) -> Pending {next_index; length}); ] module Index = struct type nonrec t = t let path_length = 1 let to_path c l = let raw_key = Data_encoding.Binary.to_bytes_exn Address.encoding c in let (`Hex key) = Hex.of_bytes raw_key in key :: l let of_path = function | [key] -> Option.bind (Hex.to_bytes (`Hex key)) (Data_encoding.Binary.of_bytes_opt Address.encoding) | _ -> None let rpc_arg = Address.rpc_arg let encoding = Address.encoding let compare = Address.compare end let in_memory_size (_ : t) = let open Cache_memory_helpers in h1w +! string_size_gen Address.size module Internal_for_tests = struct let originated_zk_rollup nonce = let data = Data_encoding.Binary.to_bytes_exn Origination_nonce.encoding nonce in Address.hash_bytes [data] end
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2022 Nomadic Labs <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
obj_id.mli
open! Core type t = Memtrace.Trace.Obj_id.t [@@deriving sexp_of] include Hashable.S_plain with type t := t
subst.ml
open Stdune open Import module Sub_dirs = Dune_engine.Sub_dirs module Vcs = Dune_engine.Vcs let is_a_source_file path = (match Path.extension path with | ".flv" | ".gif" | ".ico" | ".jpeg" | ".jpg" | ".mov" | ".mp3" | ".mp4" | ".otf" | ".pdf" | ".png" | ".ttf" | ".woff" -> false | _ -> true) && Path.is_file path let subst_string s path ~map = let len = String.length s in let longest_var = String.longest (String.Map.keys map) in let double_percent_len = String.length "%%" in let loc_of_offset ~ofs ~len = let rec loop lnum bol i = if i = ofs then let pos = { Lexing.pos_fname = Path.to_string path ; pos_cnum = i ; pos_lnum = lnum ; pos_bol = bol } in { Loc.start = pos; stop = { pos with pos_cnum = pos.pos_cnum + len } } else match s.[i] with | '\n' -> loop (lnum + 1) (i + 1) (i + 1) | _ -> loop lnum bol (i + 1) in loop 1 0 0 in let rec loop i acc = if i = len then acc else match s.[i] with | '%' -> after_percent (i + 1) acc | _ -> loop (i + 1) acc and after_percent i acc = if i = len then acc else match s.[i] with | '%' -> after_double_percent ~start:(i - 1) (i + 1) acc | _ -> loop (i + 1) acc and after_double_percent ~start i acc = if i = len then acc else match s.[i] with | '%' -> after_double_percent ~start:(i - 1) (i + 1) acc | 'A' .. 'Z' | '_' -> in_var ~start (i + 1) acc | _ -> loop (i + 1) acc and in_var ~start i acc = if i - start > longest_var + double_percent_len then loop i acc else if i = len then acc else match s.[i] with | '%' -> end_of_var ~start (i + 1) acc | 'A' .. 'Z' | '_' -> in_var ~start (i + 1) acc | _ -> loop (i + 1) acc and end_of_var ~start i acc = if i = len then acc else match s.[i] with | '%' -> ( let var = String.sub s ~pos:(start + 2) ~len:(i - start - 3) in match String.Map.find map var with | None -> in_var ~start:(i - 1) (i + 1) acc | Some (Ok repl) -> let acc = (start, i + 1, repl) :: acc in loop (i + 1) acc | Some (Error msg) -> let loc = loc_of_offset ~ofs:start ~len:(i + 1 - start) in User_error.raise ~loc [ Pp.text msg ]) | _ -> loop (i + 1) acc in match List.rev (loop 0 []) with | [] -> None | repls -> let result_len = List.fold_left repls ~init:(String.length s) ~f:(fun acc (a, b, repl) -> acc - (b - a) + String.length repl) in let buf = Buffer.create result_len in let pos = List.fold_left repls ~init:0 ~f:(fun pos (a, b, repl) -> Buffer.add_substring buf s pos (a - pos); Buffer.add_string buf repl; b) in Buffer.add_substring buf s pos (len - pos); Some (Buffer.contents buf) let subst_file path ~map = let s = Io.read_file path in let s = if Path.is_root (Path.parent_exn path) && Package.is_opam_file path then "version: \"%%" ^ "VERSION_NUM" ^ "%%\"\n" ^ s else s in match subst_string s ~map path with | None -> () | Some s -> Io.write_file path s (* Extending the Dune_project APIs, but adding capability to modify *) module Dune_project = struct include Dune_project type 'a simple_field = { loc : Loc.t ; loc_of_arg : Loc.t ; arg : 'a } type t = { contents : string ; name : Package.Name.t simple_field option ; version : string simple_field option ; project : Dune_project.t } let filename = Path.in_source Dune_project.filename let load ~dir ~files ~infer_from_opam_files = let open Memo.O in let+ project = (* dir_status only affects warning status, but it will not matter here. dune subst will fail with a hard error if the name is missing *) let dir_status = Sub_dirs.Status.Normal in Dune_project.load ~dir ~files ~infer_from_opam_files ~dir_status in let open Option.O in let* project = project in let file = Dune_project.file project |> Path.Source.to_string |> Path.in_source in let contents = Io.read_file file in let sexp = let lb = Lexbuf.from_string contents ~fname:(Path.to_string file) in Dune_lang.Parser.parse lb ~mode:Many_as_one in let parser = let open Dune_lang.Decoder in let simple_field name arg = let+ loc, x = located (field_o name (located arg)) in Option.map x ~f:(fun (loc_of_arg, arg) -> { loc; loc_of_arg; arg }) in enter (fields (let+ name = simple_field "name" Package.Name.decode and+ version = simple_field "version" string and+ () = junk_everything in Some { contents; name; version; project })) in Dune_lang.Decoder.parse parser Univ_map.empty sexp let project t = t.project let subst t ~map ~version = let s = match version with | None -> t.contents | Some version -> ( let replace_text start_ofs stop_ofs repl = sprintf "%s%s%s" (String.sub t.contents ~pos:0 ~len:start_ofs) repl (String.sub t.contents ~pos:stop_ofs ~len:(String.length t.contents - stop_ofs)) in match t.version with | Some v -> (* There is a [version] field, overwrite its argument *) replace_text v.loc_of_arg.start.pos_cnum v.loc_of_arg.stop.pos_cnum (Dune_lang.to_string (Dune_lang.atom_or_quoted_string version)) | None -> let version_field = Dune_lang.to_string (List [ Dune_lang.atom "version" ; Dune_lang.atom_or_quoted_string version ]) ^ "\n" in let ofs = ref (match t.name with | Some { loc; _ } -> (* There is no [version] field but there is a [name] one, add the version after it *) loc.stop.pos_cnum | None -> (* If all else fails, add the [version] field after the first line of the file *) 0) in let len = String.length t.contents in while !ofs < len && t.contents.[!ofs] <> '\n' do incr ofs done; if !ofs < len && t.contents.[!ofs] = '\n' then ( incr ofs; replace_text !ofs !ofs version_field) else replace_text !ofs !ofs ("\n" ^ version_field)) in let s = Option.value (subst_string s ~map filename) ~default:s in if s <> t.contents then Io.write_file filename s end let make_watermark_map ~commit ~version ~dune_project ~info = let dune_project = Dune_project.project dune_project in let version_num = let open Option.O in let+ version = version in Option.value ~default:version (String.drop_prefix version ~prefix:"v") in let name = Dune_project.name dune_project in (* XXX these error messages aren't particularly good as these values do not necessarily come from the project file. It's possible for them to be defined in the .opam file directly*) let make_value name = function | None -> Error (sprintf "variable %S not found in dune-project file" name) | Some value -> Ok value in let make_separated name sep = function | None -> Error (sprintf "variable %S not found in dune-project file" name) | Some value -> Ok (String.concat ~sep value) in let make_dev_repo_value = function | Some (Package.Source_kind.Host h) -> Ok (Package.Source_kind.Host.homepage h) | Some (Package.Source_kind.Url url) -> Ok url | None -> Error (sprintf "variable dev-repo not found in dune-project file") in let make_version = function | Some s -> Ok s | None -> Error "repository does not contain any version information" in String.Map.of_list_exn [ ("NAME", Ok (Dune_project.Name.to_string_hum name)) ; ("VERSION", make_version version) ; ("VERSION_NUM", make_version version_num) ; ( "VCS_COMMIT_ID" , match commit with | None -> Error "repository does not contain any commits" | Some s -> Ok s ) ; ( "PKG_MAINTAINER" , make_separated "maintainer" ", " @@ Package.Info.maintainers info ) ; ("PKG_AUTHORS", make_separated "authors" ", " @@ Package.Info.authors info) ; ("PKG_HOMEPAGE", make_value "homepage" @@ Package.Info.homepage info) ; ("PKG_ISSUES", make_value "bug-reports" @@ Package.Info.bug_reports info) ; ("PKG_DOC", make_value "doc" @@ Package.Info.documentation info) ; ("PKG_LICENSE", make_separated "license" ", " @@ Package.Info.license info) ; ("PKG_REPO", make_dev_repo_value @@ Package.Info.source info) ] let subst vcs = let open Memo.O in let* (version, commit), files = Memo.fork_and_join (fun () -> Memo.fork_and_join (fun () -> Vcs.describe vcs) (fun () -> Vcs.commit_id vcs)) (fun () -> Vcs.files vcs) in let+ (dune_project : Dune_project.t) = (let files = (* Filter-out files form sub-directories *) List.fold_left files ~init:String.Set.empty ~f:(fun acc fn -> if Path.is_root (Path.parent_exn fn) then String.Set.add acc (Path.to_string fn) else acc) in Dune_project.load ~dir:Path.Source.root ~files ~infer_from_opam_files:true) >>| function | Some dune_project -> dune_project | None -> User_error.raise [ Pp.text "There is no dune-project file in the current directory, please \ add one with a (name <name>) field in it." ] ~hints: [ Pp.text "dune subst must be executed from the root of the project." ] in (match Dune_project.subst_config dune_project.project with | Dune_engine.Subst_config.Disabled -> User_error.raise [ Pp.text "dune subst has been disabled in this project. Any use of it is \ forbidden." ] ~hints: [ Pp.text "If you wish to re-enable it, change to (subst enabled) in the \ dune-project file." ] | Dune_engine.Subst_config.Enabled -> ()); let info = let loc, name = match dune_project.name with | None -> User_error.raise [ Pp.textf "The project name is not defined, please add a (name <name>) \ field to your dune-project file." ] | Some n -> (n.loc_of_arg, n.arg) in let package_named_after_project = let packages = Dune_project.packages dune_project.project in Package.Name.Map.find packages name in let metadata_from_dune_project () = Dune_project.info dune_project.project in let metadata_from_matching_package () = match package_named_after_project with | Some pkg -> Ok pkg.info | None -> Error (User_error.make ~loc [ Pp.textf "Package %s doesn't exist." (Package.Name.to_string name) ]) in let version = Dune_project.dune_version dune_project.project in let ok_exn = function | Ok s -> s | Error e -> raise (User_error.E e) in if version >= (3, 0) then metadata_from_dune_project () else if version >= (2, 8) then match metadata_from_matching_package () with | Ok p -> p | Error _ -> metadata_from_dune_project () else ok_exn (metadata_from_matching_package ()) in let watermarks = make_watermark_map ~commit ~version ~dune_project ~info in Dune_project.subst ~map:watermarks ~version dune_project; List.iter files ~f:(fun path -> if is_a_source_file path && not (Path.equal path Dune_project.filename) then subst_file path ~map:watermarks) let subst () = match Sys.readdir "." |> Array.to_list |> String.Set.of_list |> Vcs.Kind.of_dir_contents with | None -> Fiber.return () | Some kind -> Memo.run (subst { kind; root = Path.root }) (** A string that is "3.7.1" but not expanded by [dune subst] *) let literal_version = "%%" ^ "VERSION%%" let doc = "Substitute watermarks in source files." let man = let var name desc = `Blocks [ `Noblank; `P ("- $(b,%%" ^ name ^ "%%), " ^ desc) ] in let opam field = var ("PKG_" ^ String.uppercase field) ("contents of the $(b," ^ field ^ ":) field from the opam file") in [ `S "DESCRIPTION" ; `P {|Substitute $(b,%%ID%%) strings in source files, in a similar fashion to what topkg does in the default configuration.|} ; `P ({|This command is only meant to be called when a user pins a package to its development version. Especially it replaces $(b,|} ^ literal_version ^ {|) strings by the version obtained from the vcs. Currently only git is supported and the version is obtained from the output of:|} ) ; `Pre {| \$ git describe --always --dirty --abbrev=7|} ; `P {|$(b,dune subst) substitutes the variables that topkg substitutes with the default configuration:|} ; var "NAME" "the name of the project (from the dune-project file)" ; var "VERSION" "output of $(b,git describe --always --dirty --abbrev=7)" ; var "VERSION_NUM" ("same as $(b," ^ literal_version ^ ") but with a potential leading 'v' or 'V' dropped") ; var "VCS_COMMIT_ID" "commit hash from the vcs" ; opam "maintainer" ; opam "authors" ; opam "homepage" ; opam "issues" ; opam "doc" ; opam "license" ; opam "repo" ; `P {|In order to call $(b,dune subst) when your package is pinned, add this line to the $(b,build:) field of your opam file:|} ; `Pre {| [dune "subst"] {pinned}|} ; `P {|Note that this command is meant to be called only from opam files and behaves a bit differently from other dune commands. In particular it doesn't try to detect the root and must be called from the root of the project.|} ; `Blocks Common.help_secs ] let info = Cmd.info "subst" ~doc ~man let term = let+ () = Common.build_info and+ debug_backtraces = Common.debug_backtraces in let config : Dune_config.t = { Dune_config.default with display = Dune_config.Display.quiet ; concurrency = Fixed 1 } in Dune_engine.Clflags.debug_backtraces debug_backtraces; Path.set_root (Path.External.cwd ()); Path.Build.set_build_dir (Path.Outside_build_dir.of_string Common.default_build_dir); Dune_config.init config ~watch:false; Log.init_disabled (); Dune_engine.Scheduler.Run.go ~on_event:(fun _ _ -> ()) (Dune_config.for_scheduler config None ~insignificant_changes:`React ~signal_watcher:`No) subst let command = Cmd.v info term
distribution.mli
(*s: distribution.mli *) val map_reduce: fmap:('a -> 'b) -> freduce:('c -> 'b -> 'c) -> 'c -> 'a list -> 'c (*x: distribution.mli *) val map_reduce_lazy: fmap:('a -> 'b) -> freduce:('c -> 'b -> 'c) -> 'c -> (unit -> 'a list) -> 'c (*x: distribution.mli *) val debug_mpi: bool ref (*x: distribution.mli *) (*****************************************************************************) (* Private *) (*****************************************************************************) (*s: distribution.mli private *) val under_mpirun : unit -> bool (*x: distribution.mli private *) val master : freduce:('c -> 'b -> 'c) -> 'c -> 'b list -> 'c (*x: distribution.mli private *) val worker : fmap:('a -> 'b) -> unit (*x: distribution.mli private *) exception TaskFinished (*x: distribution.mli private *) val mpi_adjust_argv : string array -> string array (*e: distribution.mli private *) (*e: distribution.mli *)
(*s: distribution.mli *) val map_reduce:
annot.ml
(* Utilities for interpreting annotations of type Ast.annot. *) open Import type t = Ast.annot let error_at loc s = failwith (sprintf "%s:\n%s" (Ast.string_of_loc loc) s) let fields ~section ~field l = List.filter_map (fun (s, (_, fs)) -> if s = section then Some fs else None) l |> List.map (fun fs -> List.filter_map (fun (f, (l, s)) -> if f = field then Some (l, s) else None) fs) |> List.flatten let field ~section ~field l = match fields ~section ~field l with | [fieldmatch] -> Some fieldmatch | (loc, _) :: others -> error_at loc (sprintf "Duplicate annotation %s.%s (also in:\n %s\n)" section field (List.map (fun (loc, _) -> (Ast.string_of_loc loc)) others |> String.concat ",\n ")) | _ -> None let has_section k l = Option.is_some (List.assoc k l) let has_field ~sections:k ~field:k2 l = List.exists (fun k1 -> field ~section:k1 ~field:k2 l |> Option.is_some ) k let get_flag ~sections:k ~field:k2 l = k |> List.find_map (fun k1 -> field ~section:k1 ~field:k2 l |> Option.map (fun (loc, o) -> match o with | None | Some "true" -> true | Some "false" -> false | Some s -> error_at loc (sprintf "Invalid value %S for flag %s.%s" s k1 k2))) |> Option.value ~default:false let get_field ~parse ~default ~sections:k ~field:k2 l = k |> List.find_map (fun k1 -> let open Option.O in field l ~section:k1 ~field:k2 >>= fun (loc, o) -> match o with | Some s -> (match parse s with Some _ as y -> y | None -> error_at loc (sprintf "Invalid annotation <%s %s=%S>" k1 k2 s)) | None -> error_at loc (sprintf "Missing value for annotation %s.%s" k1 k2)) |> Option.value ~default let get_fields ~parse ~sections ~field l = List.find_map (fun section -> Some ( fields l ~section ~field |> List.map (fun (loc, o) -> match o with | None -> error_at loc (sprintf "Missing value for annotation %s.%s" section field) | Some s -> (match parse s with | None -> error_at loc (sprintf "Invalid annotation <%s %s=%S>" section field s) | Some v -> v)) )) sections |> Option.value ~default:[] let get_opt_field ~parse ~sections ~field l = let parse s = match parse s with | None -> None (* indicates parse error *) | Some v -> Some (Some v) in get_field ~parse ~default:None ~sections ~field l let set_field ~loc ~section:k ~field:k2 v l : Ast.annot = match List.assoc k l with | None -> (k, (loc, [ k2, (loc, v) ])) :: l | Some (section_loc, section) -> let section_loc, section = List.assoc_exn k l in let section = match List.assoc k2 section with | None -> (k2, (loc, v)) :: section | Some _ -> List.assoc_update k2 (loc, v) section in List.assoc_update k (section_loc, section) l let get_loc ~sections:k ~field:k2 l = k |> List.find_map (fun k1 -> let open Option.O in field l ~section:k1 ~field:k2 >>= fun (loc, _o) -> Some loc) let get_loc_exn ~sections ~field l = get_loc ~sections ~field l |> Option.value_exn let collapse merge l = let tbl = Hashtbl.create 10 in let n = ref 0 in List.iter ( fun (s1, f1) -> incr n; try let _, f2 = Hashtbl.find tbl s1 in Hashtbl.replace tbl s1 (!n, merge f1 f2) with Not_found -> Hashtbl.add tbl s1 (!n, f1) ) (List.rev l); let l = Hashtbl.fold (fun s (i, f) l -> (i, (s, f)) :: l) tbl [] in let l = List.sort (fun (i, _) (j, _) -> compare j i) l in List.map snd l let override_values x1 _ = x1 let override_fields (loc1, l1) (_, l2) = (loc1, collapse override_values (l1 @ l2)) let merge l = collapse override_fields l let create_id = let n = ref (-1) in fun () -> incr n; if !n < 0 then failwith "Annot.create_id: counter overflow" else string_of_int !n type node_kind = | Module_head | Type_def | Type_expr | Variant | Cell | Field type schema_field = node_kind * string type schema_section = { section: string; fields: schema_field list; } type schema = schema_section list let validate_section sec root = (* split fields by location where they may occur *) let in_module_head = ref [] in let in_type_def = ref [] in let in_type_expr = ref [] in let in_variant = ref [] in let in_cell = ref [] in let in_field = ref [] in sec.fields |> List.iter (fun (kind, field_name) -> let acc = match kind with | Module_head -> in_module_head | Type_def -> in_type_def | Type_expr -> in_type_expr | Variant -> in_variant | Cell -> in_cell | Field -> in_field in acc := field_name :: ! acc ); let check acc = let allowed_fields = List.rev !acc in fun _node (an : Ast.annot) () -> an |> List.iter (fun ((sec_name, (loc, fields)) : Ast.annot_section) -> if sec_name = sec.section then fields |> List.iter (fun (field_name, (loc2, _opt_val)) -> if not (List.mem field_name allowed_fields) then Ast.error_at loc2 (sprintf "Invalid or misplaced annotation <%s ... %s... >" sec_name field_name) ) ) in Ast.fold_annot ~module_head:(check in_module_head) ~type_def:(check in_type_def) ~type_expr:(check in_type_expr) ~variant:(check in_variant) ~cell:(check in_cell) ~field:(check in_field) root () let validate schema root = List.iter (fun sec -> validate_section sec root ) schema
(* Utilities for interpreting annotations of type Ast.annot. *)
dune
(library (name merlin_extend) (public_name merlin-lib.extend) (modules (:standard \ extend_helper)) (flags :standard -open Ocaml_utils -open Ocaml_parsing -open Ocaml_typing) (libraries ocaml_parsing ocaml_typing unix ocaml_utils))
table.ml
type cell = { location : Location.t; value : string; } type row = { cells : cell list } type t = { rows: row list } let string_of_cell cell = let loc_str = Location.string_of_location cell.location in loc_str ^ "\n" ^ cell.value let string_of_row row = let aux accum cell = accum ^ (string_of_cell cell) ^ "\t" in (Base.List.fold row.cells ~init:"" ~f:aux) ^ "\n" let string_of_table table = let str = Base.List.fold table.rows ~init:"" ~f:(fun accum row -> accum ^ (string_of_row row)) in "\nTable\n" ^ str let zip_header header_row row = let header = Base.List.map header_row.cells ~f:(fun head -> head.value) in let row = Base.List.map row.cells ~f:(fun cell -> cell.value) in let zipped_row = Base.List.zip header row in match zipped_row with | Base.List.Or_unequal_lengths.Ok x -> x | Base.List.Or_unequal_lengths.Unequal_lengths -> [] let update_col_map map row = match row with | (k, v) -> Base.Map.update map k ~f:(fun vl -> match vl with | Some x -> (v::x) | _ -> [v] ) let to_map_with_header dt = let empty_map = (Base.Map.empty (module Base.String)) in match dt.rows with | header::rest -> let key_value_zip = List.flatten (Base.List.map (Base.List.rev rest) ~f:(zip_header header)) in Base.List.fold key_value_zip ~init:empty_map ~f:update_col_map | [] -> empty_map let transform dt f = let cells = Base.List.map dt.rows ~f:(fun row -> Base.List.map row.cells ~f:(fun cell -> cell.value)) in Base.List.map cells ~f:f let transform_with_header dt f = match dt.rows with | header::rows -> let cells = Base.List.map rows ~f:(fun row -> Base.List.map row.cells ~f:(fun cell -> cell.value)) in let header_cells = Base.List.map header.cells ~f:(fun hc -> hc.value) in Base.List.map cells ~f:(f header_cells) | [] -> [] let zip_col cells = match cells with | head::rest -> (head.value, (Base.List.map rest ~f:(fun x -> x.value))) | [] -> ("", [""]) let transform_with_col_header dt f = let zipped_cols = Base.List.map dt.rows ~f:(fun row -> zip_col row.cells) in Base.List.map zipped_cols ~f:(fun (head, rest) -> f head rest) let to_map_with_col_header dt = let map = Base.Map.empty (module Base.String) in let zipped_cols = Base.List.map dt.rows ~f:(fun row -> zip_col row.cells) in Base.List.fold zipped_cols ~init:map ~f:(fun accum zip_col -> match zip_col with | (head, rest) -> Base.Map.update accum head ~f:(fun o -> match o with | Some x -> x | None -> rest) )
functoria_test.mli
open Functoria open Functoria.DSL val run : ?keys:Key.Set.elt list -> ?init:job impl list -> context -> 'a impl -> unit Action.t
build_info_data.mli
(* The implementation of this module is generated by dune when linking an executable *) val version : string option val statically_linked_libraries : (string * string option) list
(* The implementation of this module is generated by dune when linking an executable *)
dune
(executables (names apply revapply) (libraries) (modes js)) (rule (target apply.referencejs) (deps apply.bc.js) (action (with-stdout-to %{target} (run node ./apply.bc.js)))) (rule (alias runtest) (deps apply.reference apply.referencejs) (action (diff apply.reference apply.referencejs))) (rule (target revapply.referencejs) (deps revapply.bc.js) (action (with-stdout-to %{target} (run node ./revapply.bc.js)))) (rule (alias runtest) (deps revapply.reference revapply.referencejs) (action (diff revapply.reference revapply.referencejs)))
cmdline.ml
(* -------------------------------------------------------------------------- *) (* Typedefs *) (* Benchmark command related types *) type determinizer_option = Percentile of int | Mean type storage_kind = | Memory | Disk of { source : Tezos_crypto.Signature.public_key_hash; base_dir : string; header_json : string; } type benchmark_options = { options : Measure.options; save_file : string; storage : storage_kind; csv_export : string option; } type codegen_options = | No_transform | Fixed_point_transform of Fixed_point_transform.options (* Infer command related types *) type report = NoReport | ReportToStdout | ReportToFile of string type infer_parameters_options = { print_problem : bool; (* Dump the regression problem *) csv_export : string option; (* Export solution to csv *) plot : bool; (* Plot solution *) ridge_alpha : float; (* Regularisation parameter for ridge regression *) lasso_alpha : float; (* Regularisation parameter for lasso regression *) lasso_positive : bool; (* Constrain lasso solution to be positive *) override_files : string list option; (* Source of CSV files for overriding free variables *) report : report; (* LaTeX report parameters *) save_solution : string option; (* Serialise solution to given file *) dot_file : string option; (* Export dependency graph to graphviz format *) display : Display.options; } (* Outcome of command-line parsing. *) type command = | Benchmark of {bench_name : string; bench_opts : benchmark_options} | Infer of { model_name : string; workload_data : string; solver : string; infer_opts : infer_parameters_options; } | Codegen of { solution : string; model_name : string; codegen_options : codegen_options; } | Codegen_all of { solution : string; matching : string; codegen_options : codegen_options; } | Codegen_inferred of {solution : string; codegen_options : codegen_options} | No_command (* -------------------------------------------------------------------------- *) (* Encodings *) let storage_kind_encoding : storage_kind Data_encoding.t = let open Data_encoding in union [ case ~title:"memory" (Tag 0) unit (function Memory -> Some () | Disk _ -> None) (fun () -> Memory); case ~title:"disk" (Tag 1) (tup3 Tezos_crypto.Signature.Public_key_hash.encoding string string) (function | Memory -> None | Disk {source; base_dir; header_json} -> Some (source, base_dir, header_json)) (fun (source, base_dir, header_json) -> Disk {source; base_dir; header_json}); ] let benchmark_options_encoding = (* : benchmark_options Data_encoding.encoding in *) let open Data_encoding in def "benchmark_options_encoding" @@ conv (fun {options; save_file; storage; csv_export} -> (options, save_file, storage, csv_export)) (fun (options, save_file, storage, csv_export) -> {options; save_file; storage; csv_export}) (obj4 (req "options" Measure.options_encoding) (req "save_file" string) (req "storage" storage_kind_encoding) (opt "csv-export" string)) (* -------------------------------------------------------------------------- *) (* Global state set by command line parsing. Custom benchmark commands need not set this variable. *) let commandline_outcome_ref : command option ref = ref None
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2020 Nomadic Labs. <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
dune
(include dune.inc) (rule (alias runtest) (deps (glob_files *.cnf) (glob_files *.smt2)) (action (with-stdout-to dune.inc (run %{exe:../../../generate_tests/generate_dune_tests.exe} . sat))) (mode promote))
test_stderr.ml
let%expect_test "stderr is collected" = Printf.eprintf "hello\n"; [%expect {| hello |}] ;;
compare.mli
module type COMPARABLE = sig type t val compare : t -> t -> int end module type S = sig type t val ( = ) : t -> t -> bool val ( <> ) : t -> t -> bool val ( < ) : t -> t -> bool val ( <= ) : t -> t -> bool val ( >= ) : t -> t -> bool val ( > ) : t -> t -> bool val compare : t -> t -> int val equal : t -> t -> bool val max : t -> t -> t val min : t -> t -> t end module Make (P : COMPARABLE) : S with type t := P.t module Char : S with type t = char module Bool : S with type t = bool module Int : S with type t = int module Int32 : S with type t = int32 module Uint32 : S with type t = int32 module Int64 : S with type t = int64 module Uint64 : S with type t = int64 module Float : S with type t = float module String : S with type t = string module Z : S with type t = Z.t module List (P : COMPARABLE) : S with type t = P.t list module Option (P : COMPARABLE) : S with type t = P.t option
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
Console.ml
(* used to be called common_extra.ml *) (* * How to use it ? ex in LFS: * Console.progress (w.prop_iprop#length) (fun k -> * w.prop_iprop#iter (fun (p, ip) -> * k (); * ... * )); * * todo: Unix.isatty, and the spinner trick of jason \ | / - *) let execute_and_show_progress ~show len f = let _count = ref 0 in (* kind of continuation passed to f *) let continue_pourcentage () = incr _count; ANSITerminal.set_cursor 1 (-1); ANSITerminal.printf [] "%d / %d" !_count len; flush stdout in let nothing () = () in (* ANSITerminal.printf [] "0 / %d" len; flush stdout; *) if !Common2._batch_mode || not show then f nothing else f continue_pourcentage; Common.pr2 "" let execute_and_show_progress2 ?(show = true) len f = let _count = ref 0 in (* kind of continuation passed to f *) let continue_pourcentage () = incr _count; ANSITerminal.set_cursor 1 (-1); ANSITerminal.eprintf [] "%d / %d" !_count len; flush stderr in let nothing () = () in (* ANSITerminal.printf [] "0 / %d" len; flush stdout; *) if !Common2._batch_mode || not show then f nothing else f continue_pourcentage let with_progress_list_metter ?show fk xs = let len = List.length xs in execute_and_show_progress2 ?show len (fun k -> fk k xs) let progress ?show fk xs = with_progress_list_metter ?show fk xs (* (* old code let ansi_terminal = ref true *) let (_execute_and_show_progress_func: (show:bool -> int (* length *) -> ((unit -> unit) -> 'a) -> 'a) ref) = ref (fun ~show a b -> failwith "no execute yet, have you included common_extra.cmo?" ) let execute_and_show_progress ?(show=true) len f = !_execute_and_show_progress_func ~show len f (* don't forget to call Common_extra.set_link () *) val _execute_and_show_progress_func : (show:bool -> int (* length *) -> ((unit -> unit) -> unit) -> unit) ref val execute_and_show_progress : ?show:bool -> int (* length *) -> ((unit -> unit) -> unit) -> unit let set_link () = Common2._execute_and_show_progress_func := execute_and_show_progress let _init_execute = set_link () *) (* now in common_extra.ml: * let execute_and_show_progress len f = ... *)
(* used to be called common_extra.ml *)
object_type.ml
type t = < hello: string (** some doc *) ; world: int ; more: int * float ; make: int ; it: string ; long: float [@default 42.] > [@@deriving make] type 'a u = < hello: string (** more doc *) ; world: int ; .. > as 'a type 'a v = < .. > as 'a type 'a w = (< .. > as 'a) -> 'a type z = < > t let x : unit -> < bouh: string ; .. > = fun () -> assert false let lookup_obj : < .. > -> (< .. > as 'a) list -> 'a = fun _ -> assert false let _ = [%ext: < a ; b > ] let _ = (x [@att: < a ; b > ]) type t = [`A of < a ; b > ] type t = private [> ] type t = < a: < > > type t = {a: < >; b: int} type t = {b: int; a: < > } class type c = object inherit [ < a: 'a ; b: 'b > ] a inherit [a, b, c] a end class c = object inherit [ < a: 'a ; b: 'b > ] a inherit [a, b, c] a end type 'a u = [< `A | `B of < > > `B ] as 'a (** about a *) class type a = object (** about a *) method a : int (** floatting *) (** about b *) method b : int end (** floatting *) and b = object end (** about b *) class type i = object (* test *) inherit oo end class i = object (* test *) inherit oo end
choice_group.mli
type t (** A scope for decision, allows to wait before adding decisions for terms. Used for propagating into a term without decisions *) val create : _ Egraph.t -> t val activate : _ Egraph.t -> t -> unit val add_to_group : _ Egraph.t -> Nodes.ThTerm.t -> t -> unit val register_decision : _ Egraph.t -> Nodes.ThTerm.t -> Egraph.choice -> unit (** If no group have been specified at that point, the default is for the decision to be directly activated *) val make_choosable : _ Egraph.t -> Nodes.ThTerm.t -> unit (** Register the decisions attached to the node, and will not delay them in the future *) val add_to_group_of : _ Egraph.t -> Nodes.ThTerm.t -> Nodes.ThTerm.t -> unit
(*************************************************************************) (* This file is part of Colibri2. *) (* *) (* Copyright (C) 2014-2021 *) (* CEA (Commissariat à l'énergie atomique et aux énergies *) (* alternatives) *) (* *) (* you can redistribute it and/or modify it under the terms of the GNU *) (* Lesser General Public License as published by the Free Software *) (* Foundation, version 2.1. *) (* *) (* It is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *) (* GNU Lesser General Public License for more details. *) (* *) (* See the GNU Lesser General Public License version 2.1 *) (* for more details (enclosed in the file licenses/LGPLv2.1). *) (*************************************************************************)
raw_context.mli
(** {1 Errors} ****************************************************************) type error += Too_many_internal_operations (* `Permanent *) (** An internal storage error that should not happen *) type storage_error = | Incompatible_protocol_version of string | Missing_key of string list * [`Get | `Set | `Del | `Copy] | Existing_key of string list | Corrupted_data of string list type error += Storage_error of storage_error type error += Failed_to_parse_parameter of MBytes.t type error += Failed_to_decode_parameter of Data_encoding.json * string val storage_error: storage_error -> 'a tzresult Lwt.t (** {1 Abstract Context} **************************************************) (** Abstract view of the context. Includes a handle to the functional key-value database ({!Context.t}) along with some in-memory values (gas, etc.). *) type t type context = t type root_context = t (** Retrieves the state of the database and gives its abstract view. It also returns wether this is the first block validated with this version of the protocol. *) val prepare: level: Int32.t -> timestamp: Time.t -> fitness: Fitness.t -> Context.t -> context tzresult Lwt.t type 'a previous_protocol = | Genesis of 'a | Alpha_002 val prepare_first_block: level:int32 -> timestamp:Time.t -> fitness:Fitness.t -> Context.t -> (Parameters_repr.t previous_protocol * context) tzresult Lwt.t val activate: context -> Protocol_hash.t -> t Lwt.t val fork_test_chain: context -> Protocol_hash.t -> Time.t -> t Lwt.t val register_resolvers: 'a Base58.encoding -> (context -> string -> 'a list Lwt.t) -> unit (** Returns the state of the database resulting of operations on its abstract view *) val recover: context -> Context.t val current_level: context -> Level_repr.t val current_timestamp: context -> Time.t val current_fitness: context -> Int64.t val set_current_fitness: context -> Int64.t -> t val constants: context -> Constants_repr.parametric val patch_constants: context -> (Constants_repr.parametric -> Constants_repr.parametric) -> context Lwt.t val first_level: context -> Raw_level_repr.t (** Increment the current block fee stash that will be credited to baker's frozen_fees account at finalize_application *) val add_fees: context -> Tez_repr.t -> context tzresult Lwt.t (** Increment the current block reward stash that will be credited to baker's frozen_fees account at finalize_application *) val add_rewards: context -> Tez_repr.t -> context tzresult Lwt.t (** Increment the current block deposit stash for a specific delegate. All the delegates' frozen_deposit accounts are credited at finalize_application *) val add_deposit: context -> Signature.Public_key_hash.t -> Tez_repr.t -> context tzresult Lwt.t val get_fees: context -> Tez_repr.t val get_rewards: context -> Tez_repr.t val get_deposits: context -> Tez_repr.t Signature.Public_key_hash.Map.t type error += Gas_limit_too_high (* `Permanent *) val check_gas_limit: t -> Z.t -> unit tzresult val set_gas_limit: t -> Z.t -> t val set_gas_unlimited: t -> t val gas_level: t -> Gas_limit_repr.t val gas_consumed: since: t -> until: t -> Z.t val block_gas_level: t -> Z.t val init_storage_space_to_pay: t -> t val update_storage_space_to_pay: t -> Z.t -> t val update_allocated_contracts_count: t -> t val clear_storage_space_to_pay: t -> t * Z.t * int type error += Undefined_operation_nonce (* `Permanent *) val init_origination_nonce: t -> Operation_hash.t -> t val origination_nonce: t -> Contract_repr.origination_nonce tzresult val increment_origination_nonce: t -> (t * Contract_repr.origination_nonce) tzresult val unset_origination_nonce: t -> t (** {1 Generic accessors} *************************************************) type key = string list type value = MBytes.t (** All context manipulation functions. This signature is included as-is for direct context accesses, and used in {!Storage_functors} to provide restricted views to the context. *) module type T = sig type t type context = t (** Tells if the key is already defined as a value. *) val mem: context -> key -> bool Lwt.t (** Tells if the key is already defined as a directory. *) val dir_mem: context -> key -> bool Lwt.t (** Retrieve the value from the storage bucket ; returns a {!Storage_error Missing_key} if the key is not set. *) val get: context -> key -> value tzresult Lwt.t (** Retrieves the value from the storage bucket ; returns [None] if the data is not initialized. *) val get_option: context -> key -> value option Lwt.t (** Allocates the storage bucket and initializes it ; returns a {!Storage_error Existing_key} if the bucket exists. *) val init: context -> key -> value -> context tzresult Lwt.t (** Updates the content of the bucket ; returns a {!Storage_error Missing_key} if the value does not exists. *) val set: context -> key -> value -> context tzresult Lwt.t (** Allocates the data and initializes it with a value ; just updates it if the bucket exists. *) val init_set: context -> key -> value -> context Lwt.t (** When the value is [Some v], allocates the data and initializes it with [v] ; just updates it if the bucket exists. When the valus is [None], delete the storage bucket when the value ; does nothing if the bucket does not exists. *) val set_option: context -> key -> value option -> context Lwt.t (** Delete the storage bucket ; returns a {!Storage_error Missing_key} if the bucket does not exists. *) val delete: context -> key -> context tzresult Lwt.t (** Removes the storage bucket and its contents ; does nothing if the bucket does not exists. *) val remove: context -> key -> context Lwt.t (** Recursively removes all the storage buckets and contents ; does nothing if no bucket exists. *) val remove_rec: context -> key -> context Lwt.t val copy: context -> from:key -> to_:key -> context tzresult Lwt.t (** Iterator on all the items of a given directory. *) val fold: context -> key -> init:'a -> f:([ `Key of key | `Dir of key ] -> 'a -> 'a Lwt.t) -> 'a Lwt.t (** Recursively list all subkeys of a given key. *) val keys: context -> key -> key list Lwt.t (** Recursive iterator on all the subkeys of a given key. *) val fold_keys: context -> key -> init:'a -> f:(key -> 'a -> 'a Lwt.t) -> 'a Lwt.t (** Internally used in {!Storage_functors} to escape from a view. *) val project: context -> root_context (** Internally used in {!Storage_functors} to retrieve a full key from partial key relative a view. *) val absolute_key: context -> key -> key (** Internally used in {!Storage_functors} to consume gas from within a view. *) val consume_gas: context -> Gas_limit_repr.cost -> context tzresult (** Check if consume_gas will fail *) val check_enough_gas: context -> Gas_limit_repr.cost -> unit tzresult val description: context Storage_description.t end include T with type t := t and type context := context (** Initialize the local nonce used for preventing a script to duplicate an internal operation to replay it. *) val reset_internal_nonce: context -> context (** Increments the internal operation nonce. *) val fresh_internal_nonce: context -> (context * int) tzresult (** Mark an internal operation nonce as taken. *) val record_internal_nonce: context -> int -> context (** Check is the internal operation nonce has been taken. *) val internal_nonce_already_recorded: context -> int -> bool (** Returns a map where to each endorser's pkh is associated the list of its endorsing slots (in decreasing order) for a given level. *) val allowed_endorsements: context -> (Signature.Public_key.t * int list * bool) Signature.Public_key_hash.Map.t (** Initializes the map of allowed endorsements, this function must only be called once. *) val init_endorsements: context -> (Signature.Public_key.t * int list * bool) Signature.Public_key_hash.Map.t -> context (** Marks an endorsment in the map as used. *) val record_endorsement: context -> Signature.Public_key_hash.t -> context
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
gen_tests.ml
let backends = try let be = Sys.getenv "MCCS_BACKENDS" in let bel = String.length be in let rec aux i = if i >= bel then [] else try let j = String.index_from be i ' ' in String.sub be i (j - i) :: aux (j+1) with Not_found -> [String.sub be i (String.length be - i)] in List.filter (( <> ) "") (aux 0) with Not_found -> ["GLPK"] let test name ?solver ?ignore ?(ref="") () = let solver = match solver with | None -> "" | Some solver -> Printf.sprintf " \"%s\"" solver in let ignore_eol = (* This means that an LF check-out on Windows won't cause a problem with CRLF output from the test program. Assume GNU diff present on Windows. *) if Sys.win32 || Sys.cygwin then "--ignore-trailing-space " else "" in let () = match ignore with | None -> Printf.printf " (rule (with-stdout-to test-%s.result (run %%{exe:mccs_test.exe} %%{dep:test.cudf}%s))) (rule (with-stdout-to test.%s.reference (cat %%{dep:test%s.output})))" name solver name ref | Some ignore -> Printf.printf " (rule (with-stdout-to test-%s.raw (run %%{exe:mccs_test.exe} %%{dep:test.cudf}%s))) (rule (with-stdout-to test-%s.result (system \"grep -v %s %%{dep:test-%s.raw}\"))) (rule (with-stdout-to test.%s.reference (progn (cat %%{dep:test.%s}) (cat %%{dep:test%s.output}))))" name solver name ignore name name name ref in Printf.printf " (alias (name runtest) (action (system \"diff %s%%{dep:test-%s.result} %%{dep:test.%s.reference}\")))\n" ignore_eol name name let () = print_endline "; This file is generated using `MCCS_BACKENDS=jbuilder build @settests --auto-promote`"; let () = try let lp_solver = Sys.getenv "MCCS_LPSOLVER" |> Printf.sprintf "%S" in let solver = "lp+" ^ String.sub lp_solver 1 (String.length lp_solver - 2) in test "lp" ~solver (); with Not_found -> () in if List.mem "GLPK" backends then test "glpk" (); if List.mem "SYMPHONY" backends then test "symphony" ~solver:"coin/symphony" (); (* These backends aren't stable enough for CI yet... if List.mem "CLP" backends then test "clp" ~solver:"coin/clp" ~ignore:"Clp0032I" ~ref:".cl" (); if List.mem "CBC" backends then test "cbc" ~solver:"coin/cbc" ~ignore:"'Coin3007W\\|Clp0032I'" ~ref:".cl" () *)
period_repr.ml
type t = Int64.t type period = t include (Compare.Int64 : Compare.S with type t := t) let encoding = Data_encoding.int64 let rpc_arg = RPC_arg.int64 let pp ppf v = Format.fprintf ppf "%Ld" v type error += (* `Permanent *) Malformed_period | Invalid_arg let () = let open Data_encoding in (* Malformed period *) register_error_kind `Permanent ~id:"malformed_period" ~title:"Malformed period" ~description:"Period is negative." ~pp:(fun ppf () -> Format.fprintf ppf "Malformed period") empty (function Malformed_period -> Some () | _ -> None) (fun () -> Malformed_period) ; (* Invalid arg *) register_error_kind `Permanent ~id:"invalid_arg" ~title:"Invalid arg" ~description:"Negative multiple of periods are not allowed." ~pp:(fun ppf () -> Format.fprintf ppf "Invalid arg") empty (function Invalid_arg -> Some () | _ -> None) (fun () -> Invalid_arg) let of_seconds t = if Compare.Int64.(t >= 0L) then ok t else error Malformed_period let to_seconds t = t let of_seconds_exn t = match of_seconds t with | Ok t -> t | _ -> invalid_arg "Period.of_seconds_exn" let mult i p = (* TODO check overflow *) if Compare.Int32.(i < 0l) then error Invalid_arg else ok (Int64.mul (Int64.of_int32 i) p) let zero = of_seconds_exn 0L let one_second = of_seconds_exn 1L let one_minute = of_seconds_exn 60L let one_hour = of_seconds_exn 3600L
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
test_preendorsement_functor.ml
(** Testing ------- Component: Protocol (preendorsement) in Full_construction & Application modes Subject: preendorsement inclusion in a block *) open Protocol open Alpha_context (****************************************************************) (* Utility functions *) (****************************************************************) module type MODE = sig val name : string val baking_mode : Block.baking_mode end module BakeWithMode (Mode : MODE) : sig val tests : unit Alcotest_lwt.test_case trace end = struct let name = Mode.name let bake = Block.bake ~baking_mode:Mode.baking_mode let aux_simple_preendorsement_inclusion ?(payload_round = Some Round.zero) ?(locked_round = Some Round.zero) ?(block_round = 1) ?(preend_round = Round.zero) ?(preend_branch = fun _predpred pred _curr -> pred) ?(preendorsed_block = fun _predpred _pred curr -> curr) ?(mk_ops = fun op -> [op]) ?(get_delegate_and_slot = fun _predpred _pred _curr -> return (None, None)) ?(post_process = Ok (fun _ -> return_unit)) ~loc () = Context.init_n ~consensus_threshold:1 5 () >>=? fun (genesis, _contracts) -> bake genesis >>=? fun b1 -> Op.endorsement b1 >>=? fun endo -> bake b1 ~operations:[endo] >>=? fun b2 -> let pred_branch = Some (Context.branch (Context.B (preend_branch genesis b1 b2))) in let endorsed_block = preendorsed_block genesis b1 b2 in get_delegate_and_slot genesis b1 b2 >>=? fun (delegate, slot) -> Op.preendorsement ?delegate ?slot ?pred_branch ~round:preend_round endorsed_block >>=? fun p -> let operations = endo :: (mk_ops @@ p) in bake ~payload_round ~locked_round ~policy:(By_round block_round) ~operations b1 >>= fun res -> match (res, post_process) with | Ok ok, Ok success_fun -> success_fun ok | Error _, Error error -> Assert.proto_error ~loc res error | Ok _, Error _ -> Assert.error ~loc res (fun _ -> false) | Error _, Ok _ -> Assert.error ~loc res (fun _ -> false) (****************************************************************) (* Tests *) (****************************************************************) (** OK: bake a block "_b2_1" at round 1, containing a PQC and a locked round of round 0 *) let include_preendorsement_in_block_with_locked_round () = aux_simple_preendorsement_inclusion ~loc:__LOC__ () (** KO: bake a block "_b2_1" at round 1, containing a PQC and a locked round of round 0. But the preendorsement is on a bad branch *) let test_preendorsement_with_bad_branch () = aux_simple_preendorsement_inclusion (* preendorsement should be on branch _pred to be valid *) ~preend_branch:(fun predpred _pred _curr -> predpred) ~loc:__LOC__ ~post_process: (Error (function | Validate_errors.Consensus.Wrong_consensus_operation_branch {kind; _} when kind = Validate_errors.Consensus.Preendorsement -> true | _ -> false)) () (** KO: The same preendorsement injected twice in the PQC *) let duplicate_preendorsement_in_pqc () = aux_simple_preendorsement_inclusion (* inject the op twice *) ~mk_ops:(fun op -> [op; op]) ~loc:__LOC__ ~post_process: (Error (function | Validate_errors.Consensus.Conflicting_consensus_operation {kind; _} when kind = Validate_errors.Consensus.Preendorsement -> true | _ -> false)) () (** KO: locked round declared in the block is not smaller than that block's round *) let locked_round_not_before_block_round () = aux_simple_preendorsement_inclusion (* default locked_round = 0 < block_round = 1 for this aux function *) ~block_round:0 ~loc:__LOC__ ~post_process: (Error (function | Fitness_repr.Locked_round_not_less_than_round _ -> true | _ -> false)) () (** KO: because we announce a locked_round, but we don't provide the preendorsement quorum certificate in the operations *) let with_locked_round_in_block_but_without_any_pqc () = (* This test only fails in Application mode. If full_construction mode, the given locked_round is not used / checked. Moreover, the test succeed in this case. *) let post_process = if Mode.baking_mode == Block.Application then Error (function Fitness_repr.Wrong_fitness -> true | _ -> false) else Ok (fun _ -> return_unit) in aux_simple_preendorsement_inclusion (* with declared locked_round but without a PQC in the ops *) ~mk_ops:(fun _p -> []) ~loc:__LOC__ ~post_process () (** KO: The preendorsed block is the pred one, not the current one *) let preendorsement_has_wrong_level () = aux_simple_preendorsement_inclusion (* preendorsement should be for _curr block to be valid *) ~preendorsed_block:(fun _predpred pred _curr -> pred) ~loc:__LOC__ ~post_process: (Error (function | Validate_errors.Consensus.Consensus_operation_for_old_level {kind; _} when kind = Validate_errors.Consensus.Preendorsement -> true | _ -> false)) () (** OK: explicit the correct endorser and preendorsing slot in the test *) let preendorsement_in_block_with_good_slot () = aux_simple_preendorsement_inclusion ~get_delegate_and_slot:(fun _predpred _pred curr -> let module V = Plugin.RPC.Validators in Context.get_endorsers (B curr) >>=? function | {V.delegate; slots = s :: _; _} :: _ -> return (Some delegate, Some s) | _ -> assert false (* there is at least one endorser with a slot *)) ~loc:__LOC__ () (** KO: the used slot for injecting the endorsement is not the canonical one *) let preendorsement_in_block_with_wrong_slot () = aux_simple_preendorsement_inclusion ~get_delegate_and_slot:(fun _predpred _pred curr -> let module V = Plugin.RPC.Validators in Context.get_endorsers (B curr) >>=? function | {V.delegate; V.slots = _ :: non_canonical_slot :: _; _} :: _ -> return (Some delegate, Some non_canonical_slot) | _ -> assert false (* there is at least one endorser with a slot *)) ~loc:__LOC__ ~post_process: (Error (function | Validate_errors.Consensus.Wrong_slot_used_for_consensus_operation {kind; _} when kind = Validate_errors.Consensus.Preendorsement -> true | _ -> false)) () (** KO: the delegate tries to injects with a canonical slot of another delegate *) let preendorsement_in_block_with_wrong_signature () = aux_simple_preendorsement_inclusion ~get_delegate_and_slot:(fun _predpred _pred curr -> let module V = Plugin.RPC.Validators in Context.get_endorsers (B curr) >>=? function | {V.delegate; _} :: {V.slots = s :: _; _} :: _ -> (* the canonical slot s is not owned by the delegate "delegate" !*) return (Some delegate, Some s) | _ -> assert false (* there is at least one endorser with a slot *)) ~loc:__LOC__ ~post_process: (Error (function Operation_repr.Invalid_signature -> true | _ -> false)) () (** KO: cannot have a locked_round higher than attached PQC's round *) let locked_round_is_higher_than_pqc_round () = (* This test only fails in Application mode. If full_construction mode, the given locked_round is not used / checked. Moreover, the test succeed in this case. *) let post_process = if Mode.baking_mode == Application then Error (function | Validate_errors.Consensus.Consensus_operation_for_old_round {kind; _} when kind = Validate_errors.Consensus.Preendorsement -> true | _ -> false) else Ok (fun _ -> return_unit) in aux_simple_preendorsement_inclusion ~preend_round:Round.zero ~locked_round:(Some (Round.succ Round.zero)) ~block_round:2 ~loc:__LOC__ ~post_process () let my_tztest title test = Tztest.tztest (Format.sprintf "%s: %s" name title) test let tests = [ my_tztest "ok: include_preendorsement_in_block_with_locked_round" `Quick include_preendorsement_in_block_with_locked_round; my_tztest "ko: test_preendorsement_with_bad_branch" `Quick test_preendorsement_with_bad_branch; my_tztest "ko: duplicate_preendorsement_in_pqc" `Quick duplicate_preendorsement_in_pqc; my_tztest "ko:locked_round_not_before_block_round" `Quick locked_round_not_before_block_round; my_tztest "ko: with_locked_round_in_block_but_without_any_pqc" `Quick with_locked_round_in_block_but_without_any_pqc; my_tztest "ko: preendorsement_has_wrong_level" `Quick preendorsement_has_wrong_level; my_tztest "ok: preendorsement_in_block_with_good_slot" `Quick preendorsement_in_block_with_good_slot; my_tztest "ko: preendorsement_in_block_with_wrong_slot" `Quick preendorsement_in_block_with_wrong_slot; my_tztest "ko: preendorsement_in_block_with_wrong_signature" `Quick preendorsement_in_block_with_wrong_signature; my_tztest "ko: locked_round_is_higher_than_pqc_round" `Quick locked_round_is_higher_than_pqc_round; ] end
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
container_intf.ml
(** Provides generic signatures for container data structures. These signatures include functions ([iter], [fold], [exists], [for_all], ...) that you would expect to find in any container. Used by including [Container.S0] or [Container.S1] in the signature for every container-like data structure ([Array], [List], [String], ...) to ensure a consistent interface. *) open! Import module Export = struct (** [Continue_or_stop.t] is used by the [f] argument to [fold_until] in order to indicate whether folding should continue, or stop early. @canonical Base.Container.Continue_or_stop *) module Continue_or_stop = struct type ('a, 'b) t = | Continue of 'a | Stop of 'b end end include Export (** @canonical Base.Container.Summable *) module type Summable = sig type t (** The result of summing no values. *) val zero : t (** An operation that combines two [t]'s and handles [zero + x] by just returning [x], as well as in the symmetric case. *) val ( + ) : t -> t -> t end (** Signature for monomorphic container - a container for a specific element type, e.g., string, which is a container of characters ([type elt = char]) and never of anything else. *) module type S0 = sig type t type elt (** Checks whether the provided element is there, using equality on [elt]s. *) val mem : t -> elt -> bool val length : t -> int val is_empty : t -> bool (** [iter] must allow exceptions raised in [f] to escape, terminating the iteration cleanly. The same holds for all functions below taking an [f]. *) val iter : t -> f:(elt -> unit) -> unit (** [fold t ~init ~f] returns [f (... f (f (f init e1) e2) e3 ...) en], where [e1..en] are the elements of [t]. *) val fold : t -> init:'accum -> f:('accum -> elt -> 'accum) -> 'accum (** [fold_result t ~init ~f] is a short-circuiting version of [fold] that runs in the [Result] monad. If [f] returns an [Error _], that value is returned without any additional invocations of [f]. *) val fold_result : t -> init:'accum -> f:('accum -> elt -> ('accum, 'e) Result.t) -> ('accum, 'e) Result.t (** [fold_until t ~init ~f ~finish] is a short-circuiting version of [fold]. If [f] returns [Stop _] the computation ceases and results in that value. If [f] returns [Continue _], the fold will proceed. If [f] never returns [Stop _], the final result is computed by [finish]. Example: {[ type maybe_negative = | Found_negative of int | All_nonnegative of { sum : int } (** [first_neg_or_sum list] returns the first negative number in [list], if any, otherwise returns the sum of the list. *) let first_neg_or_sum = List.fold_until ~init:0 ~f:(fun sum x -> if x < 0 then Stop (Found_negative x) else Continue (sum + x)) ~finish:(fun sum -> All_nonnegative { sum }) ;; let x = first_neg_or_sum [1; 2; 3; 4; 5] val x : maybe_negative = All_nonnegative {sum = 15} let y = first_neg_or_sum [1; 2; -3; 4; 5] val y : maybe_negative = Found_negative -3 ]} *) val fold_until : t -> init:'accum -> f:('accum -> elt -> ('accum, 'final) Continue_or_stop.t) -> finish:('accum -> 'final) -> 'final (** Returns [true] if and only if there exists an element for which the provided function evaluates to [true]. This is a short-circuiting operation. *) val exists : t -> f:(elt -> bool) -> bool (** Returns [true] if and only if the provided function evaluates to [true] for all elements. This is a short-circuiting operation. *) val for_all : t -> f:(elt -> bool) -> bool (** Returns the number of elements for which the provided function evaluates to true. *) val count : t -> f:(elt -> bool) -> int (** Returns the sum of [f i] for all [i] in the container. *) val sum : (module Summable with type t = 'sum) -> t -> f:(elt -> 'sum) -> 'sum (** Returns as an [option] the first element for which [f] evaluates to true. *) val find : t -> f:(elt -> bool) -> elt option (** Returns the first evaluation of [f] that returns [Some], and returns [None] if there is no such element. *) val find_map : t -> f:(elt -> 'a option) -> 'a option val to_list : t -> elt list val to_array : t -> elt array (** Returns a min (resp. max) element from the collection using the provided [compare] function. In case of a tie, the first element encountered while traversing the collection is returned. The implementation uses [fold] so it has the same complexity as [fold]. Returns [None] iff the collection is empty. *) val min_elt : t -> compare:(elt -> elt -> int) -> elt option val max_elt : t -> compare:(elt -> elt -> int) -> elt option end module type S0_phantom = sig type elt type 'a t (** Checks whether the provided element is there, using equality on [elt]s. *) val mem : _ t -> elt -> bool val length : _ t -> int val is_empty : _ t -> bool val iter : _ t -> f:(elt -> unit) -> unit (** [fold t ~init ~f] returns [f (... f (f (f init e1) e2) e3 ...) en], where [e1..en] are the elements of [t]. *) val fold : _ t -> init:'accum -> f:('accum -> elt -> 'accum) -> 'accum (** [fold_result t ~init ~f] is a short-circuiting version of [fold] that runs in the [Result] monad. If [f] returns an [Error _], that value is returned without any additional invocations of [f]. *) val fold_result : _ t -> init:'accum -> f:('accum -> elt -> ('accum, 'e) Result.t) -> ('accum, 'e) Result.t (** [fold_until t ~init ~f ~finish] is a short-circuiting version of [fold]. If [f] returns [Stop _] the computation ceases and results in that value. If [f] returns [Continue _], the fold will proceed. If [f] never returns [Stop _], the final result is computed by [finish]. Example: {[ type maybe_negative = | Found_negative of int | All_nonnegative of { sum : int } (** [first_neg_or_sum list] returns the first negative number in [list], if any, otherwise returns the sum of the list. *) let first_neg_or_sum = List.fold_until ~init:0 ~f:(fun sum x -> if x < 0 then Stop (Found_negative x) else Continue (sum + x)) ~finish:(fun sum -> All_nonnegative { sum }) ;; let x = first_neg_or_sum [1; 2; 3; 4; 5] val x : maybe_negative = All_nonnegative {sum = 15} let y = first_neg_or_sum [1; 2; -3; 4; 5] val y : maybe_negative = Found_negative -3 ]} *) val fold_until : _ t -> init:'accum -> f:('accum -> elt -> ('accum, 'final) Continue_or_stop.t) -> finish:('accum -> 'final) -> 'final (** Returns [true] if and only if there exists an element for which the provided function evaluates to [true]. This is a short-circuiting operation. *) val exists : _ t -> f:(elt -> bool) -> bool (** Returns [true] if and only if the provided function evaluates to [true] for all elements. This is a short-circuiting operation. *) val for_all : _ t -> f:(elt -> bool) -> bool (** Returns the number of elements for which the provided function evaluates to true. *) val count : _ t -> f:(elt -> bool) -> int (** Returns the sum of [f i] for all [i] in the container. The order in which the elements will be summed is unspecified. *) val sum : (module Summable with type t = 'sum) -> _ t -> f:(elt -> 'sum) -> 'sum (** Returns as an [option] the first element for which [f] evaluates to true. *) val find : _ t -> f:(elt -> bool) -> elt option (** Returns the first evaluation of [f] that returns [Some], and returns [None] if there is no such element. *) val find_map : _ t -> f:(elt -> 'a option) -> 'a option val to_list : _ t -> elt list val to_array : _ t -> elt array (** Returns a min (resp max) element from the collection using the provided [compare] function, or [None] if the collection is empty. In case of a tie, the first element encountered while traversing the collection is returned. *) val min_elt : _ t -> compare:(elt -> elt -> int) -> elt option val max_elt : _ t -> compare:(elt -> elt -> int) -> elt option end (** Signature for polymorphic container, e.g., ['a list] or ['a array]. *) module type S1 = sig type 'a t (** Checks whether the provided element is there, using [equal]. *) val mem : 'a t -> 'a -> equal:('a -> 'a -> bool) -> bool val length : 'a t -> int val is_empty : 'a t -> bool val iter : 'a t -> f:('a -> unit) -> unit (** [fold t ~init ~f] returns [f (... f (f (f init e1) e2) e3 ...) en], where [e1..en] are the elements of [t] *) val fold : 'a t -> init:'accum -> f:('accum -> 'a -> 'accum) -> 'accum (** [fold_result t ~init ~f] is a short-circuiting version of [fold] that runs in the [Result] monad. If [f] returns an [Error _], that value is returned without any additional invocations of [f]. *) val fold_result : 'a t -> init:'accum -> f:('accum -> 'a -> ('accum, 'e) Result.t) -> ('accum, 'e) Result.t (** [fold_until t ~init ~f ~finish] is a short-circuiting version of [fold]. If [f] returns [Stop _] the computation ceases and results in that value. If [f] returns [Continue _], the fold will proceed. If [f] never returns [Stop _], the final result is computed by [finish]. Example: {[ type maybe_negative = | Found_negative of int | All_nonnegative of { sum : int } (** [first_neg_or_sum list] returns the first negative number in [list], if any, otherwise returns the sum of the list. *) let first_neg_or_sum = List.fold_until ~init:0 ~f:(fun sum x -> if x < 0 then Stop (Found_negative x) else Continue (sum + x)) ~finish:(fun sum -> All_nonnegative { sum }) ;; let x = first_neg_or_sum [1; 2; 3; 4; 5] val x : maybe_negative = All_nonnegative {sum = 15} let y = first_neg_or_sum [1; 2; -3; 4; 5] val y : maybe_negative = Found_negative -3 ]} *) val fold_until : 'a t -> init:'accum -> f:('accum -> 'a -> ('accum, 'final) Continue_or_stop.t) -> finish:('accum -> 'final) -> 'final (** Returns [true] if and only if there exists an element for which the provided function evaluates to [true]. This is a short-circuiting operation. *) val exists : 'a t -> f:('a -> bool) -> bool (** Returns [true] if and only if the provided function evaluates to [true] for all elements. This is a short-circuiting operation. *) val for_all : 'a t -> f:('a -> bool) -> bool (** Returns the number of elements for which the provided function evaluates to true. *) val count : 'a t -> f:('a -> bool) -> int (** Returns the sum of [f i] for all [i] in the container. *) val sum : (module Summable with type t = 'sum) -> 'a t -> f:('a -> 'sum) -> 'sum (** Returns as an [option] the first element for which [f] evaluates to true. *) val find : 'a t -> f:('a -> bool) -> 'a option (** Returns the first evaluation of [f] that returns [Some], and returns [None] if there is no such element. *) val find_map : 'a t -> f:('a -> 'b option) -> 'b option val to_list : 'a t -> 'a list val to_array : 'a t -> 'a array (** Returns a minimum (resp maximum) element from the collection using the provided [compare] function, or [None] if the collection is empty. In case of a tie, the first element encountered while traversing the collection is returned. The implementation uses [fold] so it has the same complexity as [fold]. *) val min_elt : 'a t -> compare:('a -> 'a -> int) -> 'a option val max_elt : 'a t -> compare:('a -> 'a -> int) -> 'a option end module type S1_phantom_invariant = sig type ('a, 'phantom) t (** Checks whether the provided element is there, using [equal]. *) val mem : ('a, _) t -> 'a -> equal:('a -> 'a -> bool) -> bool val length : (_, _) t -> int val is_empty : (_, _) t -> bool val iter : ('a, _) t -> f:('a -> unit) -> unit (** [fold t ~init ~f] returns [f (... f (f (f init e1) e2) e3 ...) en], where [e1..en] are the elements of [t]. *) val fold : ('a, _) t -> init:'accum -> f:('accum -> 'a -> 'accum) -> 'accum (** [fold_result t ~init ~f] is a short-circuiting version of [fold] that runs in the [Result] monad. If [f] returns an [Error _], that value is returned without any additional invocations of [f]. *) val fold_result : ('a, _) t -> init:'accum -> f:('accum -> 'a -> ('accum, 'e) Result.t) -> ('accum, 'e) Result.t (** [fold_until t ~init ~f ~finish] is a short-circuiting version of [fold]. If [f] returns [Stop _] the computation ceases and results in that value. If [f] returns [Continue _], the fold will proceed. If [f] never returns [Stop _], the final result is computed by [finish]. Example: {[ type maybe_negative = | Found_negative of int | All_nonnegative of { sum : int } (** [first_neg_or_sum list] returns the first negative number in [list], if any, otherwise returns the sum of the list. *) let first_neg_or_sum = List.fold_until ~init:0 ~f:(fun sum x -> if x < 0 then Stop (Found_negative x) else Continue (sum + x)) ~finish:(fun sum -> All_nonnegative { sum }) ;; let x = first_neg_or_sum [1; 2; 3; 4; 5] val x : maybe_negative = All_nonnegative {sum = 15} let y = first_neg_or_sum [1; 2; -3; 4; 5] val y : maybe_negative = Found_negative -3 ]} *) val fold_until : ('a, _) t -> init:'accum -> f:('accum -> 'a -> ('accum, 'final) Continue_or_stop.t) -> finish:('accum -> 'final) -> 'final (** Returns [true] if and only if there exists an element for which the provided function evaluates to [true]. This is a short-circuiting operation. *) val exists : ('a, _) t -> f:('a -> bool) -> bool (** Returns [true] if and only if the provided function evaluates to [true] for all elements. This is a short-circuiting operation. *) val for_all : ('a, _) t -> f:('a -> bool) -> bool (** Returns the number of elements for which the provided function evaluates to true. *) val count : ('a, _) t -> f:('a -> bool) -> int (** Returns the sum of [f i] for all [i] in the container. *) val sum : (module Summable with type t = 'sum) -> ('a, _) t -> f:('a -> 'sum) -> 'sum (** Returns as an [option] the first element for which [f] evaluates to true. *) val find : ('a, _) t -> f:('a -> bool) -> 'a option (** Returns the first evaluation of [f] that returns [Some], and returns [None] if there is no such element. *) val find_map : ('a, _) t -> f:('a -> 'b option) -> 'b option val to_list : ('a, _) t -> 'a list val to_array : ('a, _) t -> 'a array (** Returns a min (resp max) element from the collection using the provided [compare] function. In case of a tie, the first element encountered while traversing the collection is returned. The implementation uses [fold] so it has the same complexity as [fold]. Returns [None] iff the collection is empty. *) val min_elt : ('a, _) t -> compare:('a -> 'a -> int) -> 'a option val max_elt : ('a, _) t -> compare:('a -> 'a -> int) -> 'a option end module type S1_phantom = sig type ('a, +'phantom) t include S1_phantom_invariant with type ('a, 'phantom) t := ('a, 'phantom) t end module type Generic = sig type 'a t type 'a elt val length : _ t -> int val is_empty : _ t -> bool val iter : 'a t -> f:('a elt -> unit) -> unit val fold : 'a t -> init:'accum -> f:('accum -> 'a elt -> 'accum) -> 'accum val fold_result : 'a t -> init:'accum -> f:('accum -> 'a elt -> ('accum, 'e) Result.t) -> ('accum, 'e) Result.t val fold_until : 'a t -> init:'accum -> f:('accum -> 'a elt -> ('accum, 'final) Continue_or_stop.t) -> finish:('accum -> 'final) -> 'final val exists : 'a t -> f:('a elt -> bool) -> bool val for_all : 'a t -> f:('a elt -> bool) -> bool val count : 'a t -> f:('a elt -> bool) -> int val sum : (module Summable with type t = 'sum) -> 'a t -> f:('a elt -> 'sum) -> 'sum val find : 'a t -> f:('a elt -> bool) -> 'a elt option val find_map : 'a t -> f:('a elt -> 'b option) -> 'b option val to_list : 'a t -> 'a elt list val to_array : 'a t -> 'a elt array val min_elt : 'a t -> compare:('a elt -> 'a elt -> int) -> 'a elt option val max_elt : 'a t -> compare:('a elt -> 'a elt -> int) -> 'a elt option end module type Generic_phantom = sig type ('a, 'phantom) t type 'a elt val length : (_, _) t -> int val is_empty : (_, _) t -> bool val iter : ('a, _) t -> f:('a elt -> unit) -> unit val fold : ('a, _) t -> init:'accum -> f:('accum -> 'a elt -> 'accum) -> 'accum val fold_result : ('a, _) t -> init:'accum -> f:('accum -> 'a elt -> ('accum, 'e) Result.t) -> ('accum, 'e) Result.t val fold_until : ('a, _) t -> init:'accum -> f:('accum -> 'a elt -> ('accum, 'final) Continue_or_stop.t) -> finish:('accum -> 'final) -> 'final val exists : ('a, _) t -> f:('a elt -> bool) -> bool val for_all : ('a, _) t -> f:('a elt -> bool) -> bool val count : ('a, _) t -> f:('a elt -> bool) -> int val sum : (module Summable with type t = 'sum) -> ('a, _) t -> f:('a elt -> 'sum) -> 'sum val find : ('a, _) t -> f:('a elt -> bool) -> 'a elt option val find_map : ('a, _) t -> f:('a elt -> 'b option) -> 'b option val to_list : ('a, _) t -> 'a elt list val to_array : ('a, _) t -> 'a elt array val min_elt : ('a, _) t -> compare:('a elt -> 'a elt -> int) -> 'a elt option val max_elt : ('a, _) t -> compare:('a elt -> 'a elt -> int) -> 'a elt option end module type Make_gen_arg = sig type 'a t type 'a elt val fold : 'a t -> init:'accum -> f:('accum -> 'a elt -> 'accum) -> 'accum (** The [iter] argument to [Container.Make] specifies how to implement the container's [iter] function. [`Define_using_fold] means to define [iter] via: {[ iter t ~f = Container.iter ~fold t ~f ]} [`Custom] overrides the default implementation, presumably with something more efficient. Several other functions returned by [Container.Make] are defined in terms of [iter], so passing in a more efficient [iter] will improve their efficiency as well. *) val iter : [ `Define_using_fold | `Custom of 'a t -> f:('a elt -> unit) -> unit ] (** The [length] argument to [Container.Make] specifies how to implement the container's [length] function. [`Define_using_fold] means to define [length] via: {[ length t ~f = Container.length ~fold t ~f ]} [`Custom] overrides the default implementation, presumably with something more efficient. Several other functions returned by [Container.Make] are defined in terms of [length], so passing in a more efficient [length] will improve their efficiency as well. *) val length : [ `Define_using_fold | `Custom of 'a t -> int ] end module type Make_arg = Make_gen_arg with type 'a elt := 'a Monad.Ident.t module type Make0_arg = sig module Elt : sig type t val equal : t -> t -> bool end type t include Make_gen_arg with type 'a t := t and type 'a elt := Elt.t end module type Container = sig include module type of struct include Export end module type S0 = S0 module type S0_phantom = S0_phantom module type S1 = S1 module type S1_phantom_invariant = S1_phantom_invariant module type S1_phantom = S1_phantom module type Generic = Generic module type Generic_phantom = Generic_phantom module type Summable = Summable (** Generic definitions of container operations in terms of [fold]. E.g.: [iter ~fold t ~f = fold t ~init:() ~f:(fun () a -> f a)]. *) type ('t, 'a, 'accum) fold = 't -> init:'accum -> f:('accum -> 'a -> 'accum) -> 'accum type ('t, 'a) iter = 't -> f:('a -> unit) -> unit type 't length = 't -> int val iter : fold:('t, 'a, unit) fold -> ('t, 'a) iter val count : fold:('t, 'a, int) fold -> 't -> f:('a -> bool) -> int val min_elt : fold:('t, 'a, 'a option) fold -> 't -> compare:('a -> 'a -> int) -> 'a option val max_elt : fold:('t, 'a, 'a option) fold -> 't -> compare:('a -> 'a -> int) -> 'a option val length : fold:('t, _, int) fold -> 't -> int val to_list : fold:('t, 'a, 'a list) fold -> 't -> 'a list val sum : fold:('t, 'a, 'sum) fold -> (module Summable with type t = 'sum) -> 't -> f:('a -> 'sum) -> 'sum val fold_result : fold:('t, 'a, 'b) fold -> init:'b -> f:('b -> 'a -> ('b, 'e) Result.t) -> 't -> ('b, 'e) Result.t val fold_until : fold:('t, 'a, 'b) fold -> init:'b -> f:('b -> 'a -> ('b, 'final) Continue_or_stop.t) -> finish:('b -> 'final) -> 't -> 'final (** Generic definitions of container operations in terms of [iter] and [length]. *) val is_empty : iter:('t, 'a) iter -> 't -> bool val exists : iter:('t, 'a) iter -> 't -> f:('a -> bool) -> bool val for_all : iter:('t, 'a) iter -> 't -> f:('a -> bool) -> bool val find : iter:('t, 'a) iter -> 't -> f:('a -> bool) -> 'a option val find_map : iter:('t, 'a) iter -> 't -> f:('a -> 'b option) -> 'b option val to_array : length:'t length -> iter:('t, 'a) iter -> 't -> 'a array (** The idiom for using [Container.Make] is to bind the resulting module and to explicitly import each of the functions that one wants: {[ module C = Container.Make (struct ... end) let count = C.count let exists = C.exists let find = C.find (* ... *) ]} This is preferable to: {[ include Container.Make (struct ... end) ]} because the [include] makes it too easy to shadow specialized implementations of container functions ([length] being a common one). [Container.Make0] is like [Container.Make], but for monomorphic containers like [string]. *) module Make (T : Make_arg) : S1 with type 'a t := 'a T.t module Make0 (T : Make0_arg) : S0 with type t := T.t and type elt := T.Elt.t module Make_gen (T : Make_gen_arg) : Generic with type 'a t := 'a T.t and type 'a elt := 'a T.elt end
(** Provides generic signatures for container data structures. These signatures include functions ([iter], [fold], [exists], [for_all], ...) that you would expect to find in any container. Used by including [Container.S0] or [Container.S1] in the signature for every container-like data structure ([Array], [List], [String], ...) to ensure a consistent interface. *)
cli.ml
open! Import open Cmdliner open Resolver module Http = Irmin_http_unix module Graphql = Irmin_graphql_unix let deprecated_info = (Term.info [@alert "-deprecated"]) let deprecated_man_format = (Term.man_format [@alert "-deprecated"]) let deprecated_eval_choice = (Term.eval_choice [@alert "-deprecated"]) let () = Irmin.Backend.Watch.set_listen_dir_hook Irmin_watcher.hook let info (type a) (module S : Irmin.Generic_key.S with type Schema.Info.t = a) ?(author = "irmin") fmt = let module Info = Info.Make (S.Info) in Info.v ~author fmt (* Help sections common to all commands *) let help_sections = [ `S global_option_section; `P "These options can be passed to any command"; `S "AUTHORS"; `P "Thomas Gazagnaire <thomas@gazagnaire.org>"; `S "BUGS"; `P "Check bug reports at https://github.com/mirage/irmin/issues."; ] let setup_log style_renderer level = Fmt_tty.setup_std_outputs ?style_renderer (); Logs.set_level level; Logs.set_reporter (Logs_fmt.reporter ()); () let setup_log = Term.(const setup_log $ Fmt_cli.style_renderer () $ Logs_cli.level ()) let term_info title ~doc ~man = let man = man @ help_sections in deprecated_info ~sdocs:global_option_section ~docs:global_option_section ~doc ~man title type command = (unit Term.t * Term.info[@alert "-deprecated"]) type sub = { name : string; doc : string; man : Manpage.block list; term : unit Term.t; } let create_command c = let man = [ `S "DESCRIPTION"; `P c.doc ] @ c.man in (c.term, term_info c.name ~doc:c.doc ~man) (* Converters *) let pr_str = Format.pp_print_string let path = let path_conv = let parse str = `Ok str in let print ppf path = pr_str ppf path in (parse, print) in let doc = Arg.info ~docv:"PATH" ~doc:"Key to lookup or modify." [] in Arg.(required & pos 0 (some path_conv) None & doc) type path_or_empty = Empty | Path of string let path_or_empty = let path_conv = let parse str = `Ok (Path str) in let print = Fmt.of_to_string (function Path str -> str | Empty -> "/") in (parse, print) in let doc = Arg.info [] ~docv:"PATH" ~doc: "Path to lookup or modify. Defaults to the empty path (which queries \ the root tree of a store)." in Arg.(value & pos 0 path_conv Empty & doc) let depth = let doc = Arg.info ~docv:"DEPTH" ~doc:"Limit the history depth." [ "d"; "depth" ] in Arg.(value & opt (some int) None & doc) let print_exc exc = (match exc with | Failure f -> Fmt.epr "ERROR: %s\n%!" f | e -> Fmt.epr "ERROR: %a\n%!" Fmt.exn e); exit 1 let run t = Lwt_main.run (Lwt.catch (fun () -> t) print_exc) let mk (fn : 'a) : 'a Term.t = Term.(const (fun () -> fn) $ setup_log) (* INIT *) let init = { name = "init"; doc = "Initialize a store."; man = []; term = (let init (S (_, _store, _)) = run Lwt.return_unit in Term.(mk init $ store ())); } (* HTTP *) let http = { name = "http"; doc = "Run http server"; man = []; term = (let uri = let doc = Arg.info ~docv:"URI" [ "a"; "address" ] ~doc: "Start the Irmin server on the given socket address. Examples \ include http://localhost:8080 and launchd://Listener." in Arg.(value & opt string "http://localhost:8080" & doc) in let init (S (impl, store, _)) uri = let (module S) = match Store.Impl.hash_keyed impl with | Some x -> x | None -> Fmt.failwith "Unsupported backend: can't start an HTTP server with a store \ that is not keyed by hashes" in run (let* t = store in let module HTTP = Http.Server (S) in let uri = Uri.of_string uri in let spec = HTTP.v (S.repo t) in match Uri.scheme uri with | Some "launchd" -> let uri, name = match Uri.host uri with | None -> (Uri.with_host uri (Some "Listener"), "Listener") | Some name -> (uri, name) in [%logs.info "daemon: %s" (Uri.to_string uri)]; Cohttp_lwt_unix.Server.create ~timeout:3600 ~mode:(`Launchd name) spec | _ -> let uri = match Uri.host uri with | None -> Uri.with_host uri (Some "localhost") | Some _ -> uri in let port, uri = match Uri.port uri with | None -> (8080, Uri.with_port uri (Some 8080)) | Some p -> (p, uri) in [%logs.info "daemon: %s" (Uri.to_string uri)]; Printf.printf "Server starting on port %d.\n%!" port; Cohttp_lwt_unix.Server.create ~timeout:3600 ~mode:(`TCP (`Port port)) spec) in Term.(mk init $ store () $ uri)); } let print fmt = Fmt.kstr print_endline fmt let get name f x = match Irmin.Type.of_string f x with | Ok x -> x | Error (`Msg e) -> Fmt.kstr invalid_arg "invalid %s: %s" name e let key f x = get "key" f x let value f x = get "value" f x let branch f x = get "branch" f x let commit f x = get "commit" f x (* GET *) let get = { name = "get"; doc = "Read the value associated with a key."; man = []; term = (let get (S (impl, store, _)) path = let (module S) = Store.Impl.generic_keyed impl in run (let* t = store in S.find t (key S.Path.t path) >>= function | None -> print "<none>"; exit 1 | Some v -> print "%a" (Irmin.Type.pp S.Contents.t) v; Lwt.return_unit) in Term.(mk get $ store () $ path)); } (* LIST *) let list = { name = "list"; doc = "List subdirectories."; man = []; term = (let list (S (impl, store, _)) path_or_empty = let (module S) = Store.Impl.generic_keyed impl in let path = match path_or_empty with | Empty -> S.Path.empty | Path str -> key S.Path.t str in run (let* t = store in let* paths = S.list t path in let pp_step = Irmin.Type.pp S.Path.step_t in let pp ppf (s, k) = match S.Tree.destruct k with | `Contents _ -> Fmt.pf ppf "FILE %a" pp_step s | `Node _ -> Fmt.pf ppf "DIR %a" pp_step s in List.iter (print "%a" pp) paths; Lwt.return_unit) in Term.(mk list $ store () $ path_or_empty)); } (* TREE *) let tree = { name = "tree"; doc = "List the store contents."; man = []; term = (let tree (S (impl, store, _)) = let (module S) = Store.Impl.generic_keyed impl in run (let* t = store in let all = ref [] in let todo = ref [ S.Path.empty ] in let rec walk () = match !todo with | [] -> Lwt.return_unit | k :: rest -> todo := rest; let* childs = S.list t k in Lwt_list.iter_p (fun (s, c) -> let k = S.Path.rcons k s in match S.Tree.destruct c with | `Node _ -> todo := k :: !todo; Lwt.return_unit | `Contents _ -> let+ v = S.get t k in all := (k, v) :: !all) childs >>= walk in walk () >>= fun () -> let all = !all in let all = List.map (fun (k, v) -> ( Irmin.Type.to_string S.Path.t k, Irmin.Type.to_string S.Contents.t v )) all in let max_length l = List.fold_left (fun len s -> max len (String.length s)) 0 l in let k_max = max_length (List.map fst all) in let v_max = max_length (List.map snd all) in let pad = 79 + k_max + v_max in List.iter (fun (k, v) -> let dots = String.make (pad - String.length k - String.length v) '.' in print "%s%s%s" k dots v) all; Lwt.return_unit) in Term.(mk tree $ store ())); } let author = let doc = Arg.info ~docv:"NAME" ~doc:"Commit author name." [ "author" ] in Arg.(value & opt (some string) None & doc) let message = let doc = Arg.info ~docv:"MESSAGE" ~doc:"Commit message." [ "message" ] in Arg.(value & opt (some string) None & doc) (* SET *) let set = { name = "set"; doc = "Update the value associated with a key."; man = []; term = (let v = let doc = Arg.info ~docv:"VALUE" ~doc:"Value to add." [] in Arg.(required & pos 1 (some string) None & doc) in let set (S (impl, store, _)) author message path v = let (module S) = Store.Impl.generic_keyed impl in run (let message = match message with Some s -> s | None -> "set" in let* t = store in let path = key S.Path.t path in let value = value S.Contents.t v in S.set_exn t ~info:(info (module S) ?author "%s" message) path value) in Term.(mk set $ store () $ author $ message $ path $ v)); } (* REMOVE *) let remove = { name = "remove"; doc = "Delete a key."; man = []; term = (let remove (S (impl, store, _)) author message path = let (module S) = Store.Impl.generic_keyed impl in run (let message = match message with Some s -> s | None -> "remove " ^ path in let* t = store in S.remove_exn t ~info:(info (module S) ?author "%s" message) (key S.Path.t path)) in Term.(mk remove $ store () $ author $ message $ path)); } let apply e f = match (e, f) with | R (h, e), Some f -> f ?ctx:None ?headers:h e | R _, None -> Fmt.failwith "invalid remote for that kind of store" | r, _ -> Lwt.return r (* CLONE *) let clone = { name = "clone"; doc = "Copy a remote respository to a local store"; man = []; term = (let clone (S (impl, store, f), remote) depth = let (module S) = Store.Impl.generic_keyed impl in let module Sync = Irmin.Sync.Make (S) in run (let* t = store in let* r = remote in let* x = apply r f in Sync.fetch t ?depth x >>= function | Ok (`Head d) -> S.Head.set t d | Ok `Empty -> Lwt.return_unit | Error (`Msg e) -> failwith e) in Term.(mk clone $ remote () $ depth)); } (* FETCH *) let fetch = { name = "fetch"; doc = "Download objects and refs from another repository."; man = []; term = (let fetch (S (impl, store, f), remote) = let (module S) = Store.Impl.generic_keyed impl in let module Sync = Irmin.Sync.Make (S) in run (let* t = store in let* r = remote in let branch = branch S.Branch.t "import" in let* t = S.of_branch (S.repo t) branch in let* x = apply r f in let* _ = Sync.pull_exn t x `Set in Lwt.return_unit) in Term.(mk fetch $ remote ())); } (* MERGE *) let merge = { name = "merge"; doc = "Merge branches."; man = []; term = (let merge (S (impl, store, _)) author message branch = let (module S) = Store.Impl.generic_keyed impl in run (let message = match message with Some s -> s | None -> "merge" in let branch = match Irmin.Type.of_string S.Branch.t branch with | Ok b -> b | Error (`Msg msg) -> failwith msg in let* t = store in S.merge_with_branch t branch ~info:(info (module S) ?author "%s" message) >|= function | Ok () -> () | Error conflict -> let fmt = Irmin.Type.pp_json Irmin.Merge.conflict_t in Fmt.epr "CONFLICT: %a\n%!" fmt conflict) in let branch_name = let doc = Arg.info ~docv:"BRANCH" ~doc:"Branch to merge from." [] in Arg.(required & pos 0 (some string) None & doc) in Term.(mk merge $ store () $ author $ message $ branch_name)); } (* PULL *) let pull = { name = "pull"; doc = "Fetch and merge with another repository."; man = []; term = (let pull (S (impl, store, f), remote) author message = let (module S) = Store.Impl.generic_keyed impl in let message = match message with Some s -> s | None -> "pull" in let module Sync = Irmin.Sync.Make (S) in run (let* t = store in let* r = remote in let* x = apply r f in let* _ = Sync.pull_exn t x (`Merge (info (module S) ?author "%s" message)) in Lwt.return_unit) in Term.(mk pull $ remote () $ author $ message)); } (* PUSH *) let push = { name = "push"; doc = "Update remote references along with associated objects."; man = []; term = (let push (S (impl, store, f), remote) = let (module S) = Store.Impl.generic_keyed impl in let module Sync = Irmin.Sync.Make (S) in run (let* t = store in let* r = remote in let* x = apply r f in let* _ = Sync.push_exn t x in Lwt.return_unit) in Term.(mk push $ remote ())); } (* SNAPSHOT *) let snapshot = { name = "snapshot"; doc = "Return a snapshot for the current state of the database."; man = []; term = (let snapshot (S (impl, store, _)) = let (module S) = Store.Impl.generic_keyed impl in run (let* t = store in let* k = S.Head.get t in print "%a" S.Commit.pp_hash k; Lwt.return_unit) in Term.(mk snapshot $ store ())); } (* REVERT *) let revert = { name = "revert"; doc = "Revert the contents of the store to a previous state."; man = []; term = (let snapshot = let doc = Arg.info ~docv:"SNAPSHOT" ~doc:"The snapshot to revert to." [] in Arg.(required & pos 0 (some string) None & doc) in let revert (S (impl, store, _)) snapshot = let (module S) = Store.Impl.generic_keyed impl in run (let* t = store in let hash = commit S.Hash.t snapshot in let* s = S.Commit.of_hash (S.repo t) hash in match s with | Some s -> S.Head.set t s | None -> failwith "invalid commit") in Term.(mk revert $ store () $ snapshot)); } (* WATCH *) let run_command (type a b c) (module S : Irmin.Generic_key.S with type Schema.Path.t = a and type Schema.Contents.t = b and type Schema.Metadata.t = c) diff command proc = let simple_output (k, v) = let x = match v with `Updated _ -> "*" | `Added _ -> "+" | `Removed _ -> "-" in print "%s %a" x (Irmin.Type.pp S.Path.t) k; Lwt.return_unit in (* Check if there was a command passed, if not print a simple message to stdout, if there is a command pass the whole diff *) match command with | h :: t -> let ty = [%typ: (S.path * (S.contents * S.metadata) Irmin.Diff.t) list] in let s = Fmt.str "%a" (Irmin.Type.pp_json ty) diff in let make_proc () = (* Start new process *) let p = Lwt_process.open_process_out (h, Array.of_list (h :: t)) in proc := Some p; p in let proc = (* Check if process is already running, if not run it *) match !proc with | None -> make_proc () | Some p -> ( (* Determine if the subprocess completed succesfully or exited with an error, if it was successful then we can restart it, otherwise report the exit code the user *) let status = p#state in match status with | Lwt_process.Running -> p | Exited (Unix.WEXITED 0) -> make_proc () | Exited (Unix.WEXITED code) -> Printf.printf "Subprocess exited with code %d\n" code; exit code | Exited (Unix.WSIGNALED code) | Exited (Unix.WSTOPPED code) -> Printf.printf "Subprocess stopped with code %d\n" code; exit code) in (* Write the diff to the subprocess *) let* () = Lwt_io.write_line proc#stdin s in Lwt_io.flush proc#stdin | [] -> Lwt_list.iter_s simple_output diff let handle_diff (type a b) (module S : Irmin.Generic_key.S with type Schema.Path.t = a and type commit = b) (path : a) command proc d = let view (c, _) = let* t = S.of_commit c in S.find_tree t path >|= function None -> S.Tree.empty () | Some v -> v in let* x, y = match d with | `Updated (x, y) -> let* x = view x in let+ y = view y in (x, y) | `Added x -> let+ x = view x in (S.Tree.empty (), x) | `Removed x -> let+ x = view x in (x, S.Tree.empty ()) in let* (diff : (S.path * (S.contents * S.metadata) Irmin.Diff.t) list) = S.Tree.diff x y in run_command (module S : Irmin.Generic_key.S with type Schema.Path.t = S.path and type Schema.Contents.t = S.contents and type Schema.Metadata.t = S.metadata) diff command proc let watch = { name = "watch"; doc = "Get notifications when values change."; man = []; term = (let watch (S (impl, store, _)) path command = let (module S) = Store.Impl.generic_keyed impl in let path = key S.Path.t path in let proc = ref None in let () = at_exit (fun () -> match !proc with None -> () | Some p -> p#terminate) in run (let* t = store in let* _ = S.watch_key t path (handle_diff (module S : Irmin.Generic_key.S with type Schema.Path.t = S.path and type commit = S.commit) path command proc) in let t, _ = Lwt.task () in t) in let command = let doc = Arg.info ~docv:"COMMAND" ~doc:"Command to execute" [] in Arg.(value & pos_right 0 string [] & doc) in Term.(mk watch $ store () $ path $ command)); } (* DOT *) let dot = { name = "dot"; doc = "Dump the contents of the store as a Graphviz file."; man = []; term = (let basename = let doc = Arg.info ~docv:"BASENAME" ~doc:"Basename for the .dot and .png files." [] in Arg.(required & pos 0 (some string) None & doc) in let no_dot_call = let doc = Arg.info ~doc:"Do not call the `dot' utility on the generated `.dot` file." [ "no-dot-call" ] in Arg.(value & flag & doc) in let full = let doc = Arg.info ~doc: "Show the full graph of objects, including the filesystem nodes \ and the content blobs." [ "full" ] in Arg.(value & flag & doc) in let dot (S (impl, store, _)) basename depth no_dot_call full = let (module S) = Store.Impl.generic_keyed impl in let module Dot = Irmin.Dot (S) in let date d = let tm = Unix.localtime (Int64.to_float d) in Printf.sprintf "%2d:%2d:%2d" tm.Unix.tm_hour tm.Unix.tm_min tm.Unix.tm_sec in run (let* t = store in let call_dot = not no_dot_call in let buf = Buffer.create 1024 in Dot.output_buffer ~html:false t ?depth ~full ~date buf >>= fun () -> let oc = open_out_bin (basename ^ ".dot") in let* () = Lwt.finalize (fun () -> output_string oc (Buffer.contents buf); Lwt.return_unit) (fun () -> close_out oc; Lwt.return_unit) in if call_dot then ( let i = Sys.command "/bin/sh -c 'command -v dot'" in if i <> 0 then [%logs.err "Cannot find the `dot' utility. Please install it on your \ system and be sure it is available in your $PATH."]; let i = Sys.command (Printf.sprintf "dot -Tpng %s.dot -o%s.png" basename basename) in if i <> 0 then [%logs.err "The %s.dot is corrupted" basename]); Lwt.return_unit) in Term.(mk dot $ store () $ basename $ depth $ no_dot_call $ full)); } let config_man = let version_string = Printf.sprintf "Irmin %s" Irmin.version in ( ("irmin.yml", 5, "", version_string, "Irmin Manual"), [ `S Manpage.s_name; `P "irmin.yml"; `S Manpage.s_synopsis; `P "Configure certain command-line options to cut down on mistakes and \ save on typing"; `S Manpage.s_description; `P "An $(b,irmin.yml) file lets the user specify repetitve command-line \ options in a YAML file. The $(b,irmin.yml) is read by default if it \ is found in the current working directory or defined globally as \ \\$HOME/.config/irmin/config.yml. The configuration file path can \ also be set using the $(b,--config) command-line flag or by setting \ \\$XDG_CONFIG_HOME. \n\ \ The following keys are allowed: $(b,contents), $(b,store), \ $(b,branch), $(b,root), $(b,bare) or $(b,head). These correspond to \ the irmin options of the same names. Additionally, specific\n\ \ backends may have other options available, these can be \ lised using the $(b,options)\n\ \ command and applied using the $(b,--opt) flag."; `S Manpage.s_examples; `P "Here is an example $(b,irmin.yml) for accessing a local http irmin \ store. This $(b,irmin.yml) prevents the user from having to specify \ the $(b,store) and $(b,root) options for every command."; `Pre " \\$ cat irmin.yml\n store: pack\n root: /path/to/my/store"; ] @ help_sections ) (* HELP *) let help = { name = "help"; doc = "Display help about Irmin and Irmin commands."; man = [ `P "Use `$(mname) help topics' to get the full list of help topics." ]; term = (let topic = let doc = Arg.info [] ~docv:"TOPIC" ~doc:"The topic to get help on." in Arg.(value & pos 0 (some string) None & doc) in let help man_format cmds topic = match topic with | None -> `Help (`Pager, None) | Some topic -> ( let topics = "irmin.yml" :: cmds in let conv, _ = Arg.enum (List.rev_map (fun s -> (s, s)) ("topics" :: topics)) in match conv topic with | `Error e -> `Error (false, e) | `Ok t when t = "topics" -> List.iter print_endline topics; `Ok () | `Ok t when t = "irmin.yml" -> `Ok (Cmdliner.Manpage.print man_format Format.std_formatter config_man) | `Ok t -> `Help (man_format, Some t)) in Term.(ret (mk help $ deprecated_man_format $ Term.choice_names $ topic))); } (* GRAPHQL *) let graphql = { name = "graphql"; doc = "Run a graphql server."; man = []; term = (let port = let doc = Arg.info ~doc:"Port for graphql server." [ "p"; "port" ] in Arg.(value & opt int 8080 & doc) in let addr = let doc = Arg.info ~doc:"Address for graphql server." [ "a"; "address" ] in Arg.(value & opt string "localhost" & doc) in let graphql (S (impl, store, remote_fn)) port addr = let (module S) = Store.Impl.generic_keyed impl in run (let module Server = Graphql.Server.Make (S) (struct let remote = remote_fn end) in let* t = store in let server = Server.v (S.repo t) in let* ctx = Conduit_lwt_unix.init ~src:addr () in let ctx = Cohttp_lwt_unix.Net.init ~ctx () in let on_exn exn = [%logs.debug "on_exn: %s" (Printexc.to_string exn)] in Cohttp_lwt_unix.Server.create ~on_exn ~ctx ~mode:(`TCP (`Port port)) server) in Term.(mk graphql $ store () $ port $ addr)); } let options = { name = "options"; doc = "Get information about backend specific configuration options."; man = []; term = (let options (store, hash, contents) = let module Conf = Irmin.Backend.Conf in let store, _ = Resolver.load_config ?store ?hash ?contents () in let spec = Store.spec store in Seq.iter (fun (Conf.K k) -> let name = Conf.name k in if name = "root" || name = "uri" then () else let ty = Conf.ty k in let doc = Conf.doc k |> Option.value ~default:"" in let ty = Fmt.str "%a" Irmin.Type.pp_ty ty |> Astring.String.filter (fun c -> c <> '\n') in Fmt.pr "%s: %s\n\t%s\n" name ty doc) (Conf.Spec.keys spec) in Term.(mk options $ Store.term ())); } let branches = { name = "branches"; doc = "List branches"; man = []; term = (let branches (S (impl, store, _)) = let (module S) = Store.Impl.generic_keyed impl in run (let* t = store in let+ branches = S.Branch.list (S.repo t) in List.iter (Fmt.pr "%a\n" (Irmin.Type.pp S.branch_t)) branches) in Term.(mk branches $ store ())); } let weekday Unix.{ tm_wday; _ } = match tm_wday with | 0 -> "Sun" | 1 -> "Mon" | 2 -> "Tue" | 3 -> "Wed" | 4 -> "Thu" | 5 -> "Fri" | 6 -> "Sat" | _ -> assert false let month Unix.{ tm_mon; _ } = match tm_mon with | 0 -> "Jan" | 1 -> "Feb" | 2 -> "Mar" | 3 -> "Apr" | 4 -> "May" | 5 -> "Jun" | 6 -> "Jul" | 7 -> "Aug" | 8 -> "Sep" | 9 -> "Oct" | 10 -> "Nov" | 11 -> "Dec" | _ -> assert false let log = { name = "log"; doc = "List commits"; man = []; term = (let plain = let doc = Arg.info ~doc:"Show plain text without pager" [ "plain" ] in Arg.(value & flag & doc) in let pager = let doc = Arg.info ~doc:"Specify pager program to use" [ "pager" ] in Arg.(value & opt string "pager" & doc) in let num = let doc = Arg.info ~doc:"Number of entries to show" [ "n"; "max-count" ] in Arg.(value & opt (some int) None & doc) in let skip = let doc = Arg.info ~doc:"Number of entries to skip" [ "skip" ] in Arg.(value & opt (some int) None & doc) in let reverse = let doc = Arg.info ~doc:"Print in reverse order" [ "reverse" ] in Arg.(value & flag & doc) in let exception Return in let commits (S (impl, store, _)) plain pager num skip reverse = let (module S) = Store.Impl.generic_keyed impl in run (let* t = store in let fmt f date = Fmt.pf f "%s %s %02d %02d:%02d:%02d %04d" (weekday date) (month date) date.tm_mday date.tm_hour date.tm_min date.tm_sec (date.tm_year + 1900) in let repo = S.repo t in let skip = ref (Option.value ~default:0 skip) in let num = Option.value ~default:0 num in let num_count = ref 0 in let commit formatter key = if num > 0 && !num_count >= num then raise Return else if !skip > 0 then let () = decr skip in Lwt.return_unit else let+ commit = S.Commit.of_key repo key >|= Option.get in let hash = S.Backend.Commit.Key.to_hash key in let info = S.Commit.info commit in let date = S.Info.date info in let author = S.Info.author info in let message = S.Info.message info in let date = Unix.localtime (Int64.to_float date) in let () = Fmt.pf formatter "commit %a\nAuthor: %s\nDate: %a\n\n%s\n\n%!" (Irmin.Type.pp S.hash_t) hash author fmt date message in incr num_count in let* max = S.Head.get t >|= fun x -> [ `Commit (S.Commit.key x) ] in let iter ~commit ~max repo = Lwt.catch (fun () -> if reverse then S.Repo.iter ~commit ~min:[] ~max repo else S.Repo.breadth_first_traversal ~commit ~max repo) (function Return -> Lwt.return_unit | exn -> raise exn) in if plain then let commit = commit Format.std_formatter in iter ~commit ~max repo else Lwt.catch (fun () -> let out = Unix.open_process_out pager in let commit = commit (Format.formatter_of_out_channel out) in let+ () = iter ~commit ~max repo in let _ = Unix.close_process_out out in ()) (function | Sys_error s when String.equal s "Broken pipe" -> Lwt.return_unit | exn -> raise exn)) in Term.(mk commits $ store () $ plain $ pager $ num $ skip $ reverse)); } let default = let doc = "Irmin, the database that never forgets." in let man = [ `S "DESCRIPTION"; `P "Irmin is a distributed database used primarily for application data. \ It is designed to work with a large variety of backends and has \ built-in snapshotting, reverting and branching mechanisms."; `P "Use either $(mname) <command> --help or $(mname) help <command> for \ more information on a specific command."; ] in let usage () = Fmt.pr "usage: irmin [--version]\n\ \ [--help]\n\ \ <command> [<args>]\n\n\ The most commonly used subcommands are:\n\ \ init %s\n\ \ get %s\n\ \ set %s\n\ \ remove %s\n\ \ list %s\n\ \ tree %s\n\ \ clone %s\n\ \ fetch %s\n\ \ merge %s\n\ \ pull %s\n\ \ push %s\n\ \ snapshot %s\n\ \ revert %s\n\ \ watch %s\n\ \ dot %s\n\ \ graphql %s\n\ \ http %s\n\ \ options %s\n\ \ branches %s\n\ \ log %s\n\n\ See `irmin help <command>` for more information on a specific command.\n\ %!" init.doc get.doc set.doc remove.doc list.doc tree.doc clone.doc fetch.doc merge.doc pull.doc push.doc snapshot.doc revert.doc watch.doc dot.doc graphql.doc http.doc options.doc branches.doc log.doc in ( Term.(mk usage $ const ()), deprecated_info "irmin" ~version:Irmin.version ~sdocs:global_option_section ~doc ~man ) let commands = List.map create_command [ help; init; http; get; set; remove; list; tree; clone; fetch; merge; pull; push; snapshot; revert; watch; dot; graphql; options; branches; log; ] let run ~default:x y = match deprecated_eval_choice x y with `Error _ -> exit 1 | _ -> ()
(* * Copyright (c) 2013-2022 Thomas Gazagnaire <thomas@gazagnaire.org> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *)
swrite_stubs.c
#define CAML_NAME_SPACE #include <caml/mlvalues.h> #include <caml/memory.h> #include <caml/alloc.h> #include "swrite.h" value caml_swrite_getBasic(value unit) { CAMLparam1(unit); CAMLreturn((swrite_Basic == TRUE) ? Val_true : Val_false); } value caml_swrite_setBasic(value b) { CAMLparam1(b); swrite_Basic = (Bool_val(b) ? TRUE : FALSE); CAMLreturn(Val_unit); } value caml_swrite_getParameters(value unit) { CAMLparam1(unit); CAMLreturn((swrite_Parameters == TRUE) ? Val_true : Val_false); } value caml_swrite_setParameters(value b) { CAMLparam1(b); swrite_Parameters = (Bool_val(b) ? TRUE : FALSE); CAMLreturn(Val_unit); } value caml_swrite_getCollectors(value unit) { CAMLparam1(unit); CAMLreturn((swrite_Collectors == TRUE) ? Val_true : Val_false); } value caml_swrite_setCollectors(value b) { CAMLparam1(b); swrite_Collectors = (Bool_val(b) ? TRUE : FALSE); CAMLreturn(Val_unit); } value caml_swrite_getClasses(value unit) { CAMLparam1(unit); CAMLreturn((swrite_Classes == TRUE) ? Val_true : Val_false); } value caml_swrite_setClasses(value b) { CAMLparam1(b); swrite_Classes = (Bool_val(b) ? TRUE : FALSE); CAMLreturn(Val_unit); } value caml_swrite_getCounters(value unit) { CAMLparam1(unit); CAMLreturn((swrite_Counters == TRUE) ? Val_true : Val_false); } value caml_swrite_setCounters(value b) { CAMLparam1(b); swrite_Counters = (Bool_val(b) ? TRUE : FALSE); CAMLreturn(Val_unit); } value caml_swrite_getHost(value unit) { CAMLparam1(unit); CAMLreturn((swrite_Host == TRUE) ? Val_true : Val_false); } value caml_swrite_setHost(value b) { CAMLparam1(b); swrite_Host = (Bool_val(b) ? TRUE : FALSE); CAMLreturn(Val_unit); }
zk_rollup_storage.ml
type error += | Zk_rollup_does_not_exist of Zk_rollup_repr.t | Zk_rollup_invalid_op_code of int | Zk_rollup_pending_list_too_short | Zk_rollup_negative_length let () = register_error_kind `Temporary ~id:"zk_rollup_does_not_exist" ~title:"ZK Rollup does not exist" ~description:"Attempted to use a ZK rollup that has not been originated." ~pp:(fun ppf x -> Format.fprintf ppf "Rollup %a does not exist" Zk_rollup_repr.Address.pp x) Data_encoding.(obj1 (req "rollup" Zk_rollup_repr.Address.encoding)) (function Zk_rollup_does_not_exist x -> Some x | _ -> None) (fun x -> Zk_rollup_does_not_exist x) ; register_error_kind `Permanent ~id:"zk_rollup_invalid_op code" ~title:"Invalid op code in append" ~description:"Invalid op code in append" ~pp:(fun ppf oc -> Format.fprintf ppf "Op code %d is not valid for this ZK Rollup" oc) Data_encoding.(obj1 (req "op_code" int31)) (function Zk_rollup_invalid_op_code oc -> Some oc | _ -> None) (fun oc -> Zk_rollup_invalid_op_code oc) ; register_error_kind `Temporary ~id:"zk_rollup_pending_list_too_short" ~title:"Pending list is too short" ~description:"Pending list is too short" Data_encoding.unit (function Zk_rollup_pending_list_too_short -> Some () | _ -> None) (fun () -> Zk_rollup_pending_list_too_short) ; register_error_kind `Permanent ~id:"zk_rollup_negative_length" ~title:"Negative length for pending list prefix" ~description:"Negative length for pending list prefix" Data_encoding.unit (function Zk_rollup_negative_length -> Some () | _ -> None) (fun () -> Zk_rollup_negative_length) let account = Storage.Zk_rollup.Account.get let pending_list = Storage.Zk_rollup.Pending_list.get let pending_op ctxt id = Storage.Zk_rollup.Pending_operation.get (ctxt, id) let originate ctxt static ~init_state = let open Lwt_result_syntax in let*? ctxt, nonce = Raw_context.increment_origination_nonce ctxt in let*? address = Zk_rollup_repr.Address.from_nonce nonce in let origination_size = Constants_storage.zk_rollup_origination_size ctxt in let initial_account = Zk_rollup_account_repr. { static; dynamic = { state = init_state; paid_l2_operations_storage_space = Z.of_int origination_size; used_l2_operations_storage_space = Z.zero; }; } in let* ctxt, account_size = Storage.Zk_rollup.Account.init ctxt address initial_account in let init_pl = Zk_rollup_repr.(Empty {next_index = 0L}) in let* ctxt, pl_size = Storage.Zk_rollup.Pending_list.init ctxt address init_pl in let address_size = Zk_rollup_repr.Address.size in let size = Z.of_int (origination_size + address_size + account_size + pl_size) in return (ctxt, address, size) let add_to_pending ctxt rollup ops = let open Lwt_result_syntax in let open Zk_rollup_repr in let open Zk_rollup_operation_repr in let* ctxt, acc = account ctxt rollup in let*? () = List.iter_e (fun (op, _ticket_hash_opt) -> if Compare.Int.(op.op_code >= acc.static.nb_ops || op.op_code < 0) then error @@ Zk_rollup_invalid_op_code op.op_code else ok ()) ops in let* ctxt, pl = Storage.Zk_rollup.Pending_list.get ctxt rollup in let next_index, length = match pl with | Empty {next_index} -> (next_index, 0) | Pending {next_index; length} -> (next_index, length) in let* ctxt, next_index, length, storage_diff = List.fold_left_es (fun (ctxt, next_index, length, storage_diff) op -> let* ctxt, new_storage_diff, _was_bound = Storage.Zk_rollup.Pending_operation.add (ctxt, rollup) next_index op in return ( ctxt, Int64.succ next_index, length + 1, new_storage_diff + storage_diff )) (ctxt, next_index, length, 0) ops in let used_l2_operations_storage_space = Z.(add acc.dynamic.used_l2_operations_storage_space (Z.of_int storage_diff)) in let l2_operations_storage_space_to_pay = Z.( max zero (sub used_l2_operations_storage_space acc.dynamic.paid_l2_operations_storage_space)) in let paid_l2_operations_storage_space = Z.( add acc.dynamic.paid_l2_operations_storage_space l2_operations_storage_space_to_pay) in let acc = { acc with dynamic = { acc.dynamic with paid_l2_operations_storage_space; used_l2_operations_storage_space; }; } in let pl = if Compare.Int.(length = 0) then Empty {next_index} else Pending {next_index; length} in (* Users aren't charged for storage diff in the account or pending list description of a ZKRU. When updating a ZKRU account, the storage diff can only come from the dynamically sized [Z.t] used for the watermark. These changes in storage size will not be accounted for. As for the pending list description, the storage size is fixed for each of the two cases (empty / non-empty). Then, there will be a storage diff when switching between these two, which won't be accounted for either. *) let* ctxt, _diff_acc = Storage.Zk_rollup.Account.update ctxt rollup acc in let* ctxt, _diff_pl = Storage.Zk_rollup.Pending_list.update ctxt rollup pl in return (ctxt, l2_operations_storage_space_to_pay) let pending_length = let open Zk_rollup_repr in function Empty _ -> 0 | Pending {length; _} -> length let head = let open Zk_rollup_repr in function | Empty _ -> error Zk_rollup_pending_list_too_short | Pending {next_index; length} -> Result_syntax.return Int64.(sub next_index (of_int length)) let next_index = let open Zk_rollup_repr in function | Empty {next_index} -> next_index | Pending {next_index; _} -> next_index let get_pending_length ctxt rollup = let open Lwt_result_syntax in let* ctxt, pl = pending_list ctxt rollup in return (ctxt, pending_length pl) (** Same as [Tezos_stdlib.Utils.fold_n_times] but with Lwt and Error monad *) let fold_n_times_es ~when_negative n f e = let open Lwt_result_syntax in if Compare.Int.(n < 0) then tzfail when_negative else let rec go acc = function | 0 -> return acc | n -> let* acc = f acc in (go [@ocaml.tailcall]) acc (n - 1) in go e n let get_prefix ctxt rollup n = let open Lwt_result_syntax in if Compare.Int.(n = 0) then return (ctxt, []) else let* ctxt, pl = pending_list ctxt rollup in let pl_length = pending_length pl in let*? () = error_when Compare.Int.(n > pl_length) Zk_rollup_pending_list_too_short in let*? hd = head pl in let* ctxt, ops, _i = (* Get the l2 ops corresponding to indeces [hd + n - 1 .. hd], so that the accumulated list is in the right order *) fold_n_times_es ~when_negative:Zk_rollup_negative_length n (fun (ctxt, ops, i) -> let* ctxt, op = pending_op ctxt rollup i in return (ctxt, op :: ops, Int64.pred i)) (ctxt, [], Int64.(sub (add hd (of_int n)) 1L)) in return (ctxt, ops) let update ctxt rollup ~pending_to_drop ~new_account = let open Lwt_result_syntax in let open Zk_rollup_repr in let open Zk_rollup_account_repr in let* ctxt, pl = pending_list ctxt rollup in let* ctxt, acc = account ctxt rollup in let pl_length = pending_length pl in let*? () = error_when Compare.Int.(pending_to_drop > pl_length) Zk_rollup_pending_list_too_short in let next_index = next_index pl in (* Drop the indeces from [head] to [head + pending_to_drop - 1] from the storage of L2 operations. *) let* ctxt, freed = match head pl with | Error _e -> (* If the pending list is empty, then [pending_to_drop] must be 0. *) return (ctxt, 0) | Ok head -> let* ctxt, freed, _i = fold_n_times_es ~when_negative:Zk_rollup_negative_length pending_to_drop (fun (ctxt, freed, i) -> let* ctxt, new_freed, _bound = Storage.Zk_rollup.Pending_operation.remove (ctxt, rollup) i in return (ctxt, freed + new_freed, Int64.succ i)) (ctxt, 0, head) in return (ctxt, freed) in (* Subtract the bytes freed by removing pending operations from acc.dynamic.used_l2_operations_storage_space, and update [new_account]. *) let used_l2_operations_storage_space = Z.(sub acc.dynamic.used_l2_operations_storage_space (Z.of_int freed)) in let new_account = { new_account with dynamic = { state = new_account.dynamic.state; paid_l2_operations_storage_space = new_account.dynamic.paid_l2_operations_storage_space; used_l2_operations_storage_space; }; } in let* ctxt, _diff_acc = Storage.Zk_rollup.Account.update ctxt rollup new_account in (* Update the pending list descriptor *) let pl_length = pl_length - pending_to_drop in let pl = if Compare.Int.(pl_length = 0) then Empty {next_index} else Pending {next_index; length = pl_length} in let* ctxt, _diff_pl = Storage.Zk_rollup.Pending_list.update ctxt rollup pl in return ctxt let assert_exist ctxt rollup = let open Lwt_result_syntax in let* ctxt, exists = Storage.Zk_rollup.Account.mem ctxt rollup in let*? () = error_unless exists (Zk_rollup_does_not_exist rollup) in return ctxt let exists ctxt rollup = Storage.Zk_rollup.Account.mem ctxt rollup
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2022 Nomadic Labs <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
dune
(library (name impl) (public_name dune-vlib.impl) (implements vlib))
pcl_propanalysis.c
#include "pcl_propanalysis.h" /*---------------------------------------------------------------------*/ /* Global Variables */ /*---------------------------------------------------------------------*/ /*---------------------------------------------------------------------*/ /* Forward Declarations */ /*---------------------------------------------------------------------*/ /*---------------------------------------------------------------------*/ /* Internal Functions */ /*---------------------------------------------------------------------*/ /*----------------------------------------------------------------------- // // Function: pcl_weight_compare() // // Compare two PCL steps by standard weight of the clause. // // Global Variables: - // // Side Effects : - // /----------------------------------------------------------------------*/ static int pcl_weight_compare(PCLStep_p step1, PCLStep_p step2) { double w1, w2; if(PCLStepIsFOF(step1) && PCLStepIsFOF(step2)) { return 0; } else if(PCLStepIsFOF(step1)) { return -1; } else if(PCLStepIsFOF(step2)) { return 1; } w1 = ClauseStandardWeight(step1->logic.clause); w2 = ClauseStandardWeight(step2->logic.clause); if(w1 < w2) { return -1; } if(w1 > w2) { return 1; } return 0; } /*----------------------------------------------------------------------- // // Function: pcl_sc_compare() // // Compare two clause PCL steps by strict symbol count of the // clause. FOF steps are always smaller and equivalent. // // Global Variables: - // // Side Effects : - // /----------------------------------------------------------------------*/ static int pcl_sc_compare(PCLStep_p step1, PCLStep_p step2) { double w1, w2; if(PCLStepIsFOF(step1) && PCLStepIsFOF(step2)) { return 0; } else if(PCLStepIsFOF(step1)) { return -1; } else if(PCLStepIsFOF(step2)) { return 1; } w1 = ClauseSymTypeWeight(step1->logic.clause, 1,1,1,1,1,1,1,1); w2 = ClauseSymTypeWeight(step2->logic.clause, 1,1,1,1,1,1,1,1); if(w1 < w2) { return -1; } if(w1 > w2) { return 1; } return 0; } /*----------------------------------------------------------------------- // // Function: pcl_litno_compare() // // Compare two PCL steps by literal number. // // Global Variables: - // // Side Effects : - // /----------------------------------------------------------------------*/ static int pcl_litno_compare(PCLStep_p step1, PCLStep_p step2) { int w1, w2; if(PCLStepIsFOF(step1) && PCLStepIsFOF(step2)) { return 0; } else if(PCLStepIsFOF(step1)) { return -1; } else if(PCLStepIsFOF(step2)) { return 1; } w1 = ClauseLiteralNumber(step1->logic.clause); w2 = ClauseLiteralNumber(step2->logic.clause); if(w1 < w2) { return -1; } if(w1 > w2) { return 1; } return 0; } /*----------------------------------------------------------------------- // // Function: pcl_depth_compare() // // Compare two PCL steps by clause depth. // // Global Variables: - // // Side Effects : - // /----------------------------------------------------------------------*/ static int pcl_depth_compare(PCLStep_p step1, PCLStep_p step2) { int w1, w2; if(PCLStepIsFOF(step1) && PCLStepIsFOF(step2)) { return 0; } else if(PCLStepIsFOF(step1)) { return -1; } else if(PCLStepIsFOF(step2)) { return 1; } w1 = ClauseDepth(step1->logic.clause); w2 = ClauseDepth(step2->logic.clause); if(w1 < w2) { return -1; } if(w1 > w2) { return 1; } return 0; } /*----------------------------------------------------------------------- // // Function: pcl_prot_global_count() // // Determine the global properties of the PCL listing. // // Global Variables: - // // Side Effects : Only temporary memory operations. // /----------------------------------------------------------------------*/ static void pcl_prot_global_count(PCLProt_p prot, PCLPropData_p data) { PCLStep_p tmp; Clause_p clause; PStackPointer i; assert(prot && data); data->fof_formulae = 0; data->pos_clauses = 0; data->neg_clauses = 0; data->mix_clauses = 0; data->pos_clause_literals = 0; data->neg_clause_literals = 0; data->mix_clause_literals = 0; data->pos_literals = 0; data->neg_literals = 0; data->const_count = 0; data->func_count = 0; data->pred_count = 0; data->var_count = 0; PCLProtSerialize(prot); for(i=0; i<PStackGetSP(prot->in_order); i++) { tmp = PStackElementP(prot->in_order, i); if(PCLStepIsFOF(tmp)) { data->fof_formulae++; } else { clause = tmp->logic.clause; if(!ClauseIsEmpty(clause)) { if(ClauseIsPositive(clause)) { data->pos_clauses++; data->pos_clause_literals += ClauseLiteralNumber(clause); } else if(ClauseIsNegative(clause)) { data->neg_clauses++; data->neg_clause_literals += ClauseLiteralNumber(clause); } else { data->mix_clauses++; data->mix_clause_literals += ClauseLiteralNumber(clause); } data->pos_literals += clause->pos_lit_no; data->neg_literals += clause->neg_lit_no; data->const_count += ClauseSymTypeWeight(clause, 1,1,1,0,0,1,0,1); data->func_count += ClauseSymTypeWeight(clause, 1,1,1,0,1,0,0,1); data->pred_count += ClauseSymTypeWeight(clause, 1,1,1,0,0,0,1,1); data->var_count += ClauseSymTypeWeight(clause, 1,1,1,1,0,0,0,1); } } } } /*---------------------------------------------------------------------*/ /* Exported Functions */ /*---------------------------------------------------------------------*/ /*----------------------------------------------------------------------- // // Function: PCLProtFindMaxStep() // // Find and return the first PCL step from the protocol that is // maximal with respect to cmp, NULL if prot is empty. // // Global Variables: - // // Side Effects : Only temporary memory operations. // /----------------------------------------------------------------------*/ PCLStep_p PCLProtFindMaxStep(PCLProt_p prot, PCLCmpFunType cmp) { PCLStep_p res = NULL, tmp; PStack_p stack; PTree_p cell; assert(prot && cmp); if(!prot->steps) { return NULL; } stack = PTreeTraverseInit(prot->steps); cell = PTreeTraverseNext(stack); res = cell->key; while((cell=PTreeTraverseNext(stack))) { tmp = cell->key; if(cmp(tmp,res) > 0) { res = tmp; } } PStackFree(stack); return res; } /*----------------------------------------------------------------------- // // Function: PCLProtPropAnalyse() // // Analyse the PCL protocol and put the relevant information into // data. // // Global Variables: // // Side Effects : // /----------------------------------------------------------------------*/ void PCLProtPropAnalyse(PCLProt_p prot, PCLPropData_p data) { data->max_standard_weight_clause = PCLProtFindMaxStep(prot, pcl_weight_compare); data->longest_clause = PCLProtFindMaxStep(prot, pcl_litno_compare); data->max_symbol_clause = PCLProtFindMaxStep(prot, pcl_sc_compare); data->max_depth_clause = PCLProtFindMaxStep(prot, pcl_depth_compare); pcl_prot_global_count(prot, data); } /*----------------------------------------------------------------------- // // Function: PCLProtPropDataPrint() // // Print the result of the property analysis in reasonably readable // form. // // Global Variables: - // // Side Effects : Output // /----------------------------------------------------------------------*/ void PCLProtPropDataPrint(FILE* out, PCLPropData_p data) { long clauses = data->pos_clauses+data->neg_clauses+data->mix_clauses; fprintf(out, "# Protocol properties\n" "# ===================\n" "# Number of clauses : %6ld\n" "# ...of those positive : %6ld\n" "# ...of those negative : %6ld\n" "# ...of those mixed : %6ld\n" "# Average number of literals : %6.4f\n" "# ...in positive clauses : %6.4f\n" "# ...in negative clauses : %6.4f\n" "# ...in mixed clauses : %6.4f\n" "# ...positive literals only : %6.4f\n" "# ...negative literals only : %6.4f\n" "# Average number of function symbols: %6.4f\n" "# Average number of variable symbols: %6.4f\n" "# Average number of constant symbols: %6.4f\n" "# Average number of predicate symbols: %6.4f\n", clauses, data->pos_clauses, data->neg_clauses, data->mix_clauses, (double)(data->pos_literals+data->neg_literals)/clauses, (double)(data->pos_clause_literals)/data->pos_clauses, (double)(data->neg_clause_literals)/data->neg_clauses, (double)(data->mix_clause_literals)/data->mix_clauses, (double)(data->pos_literals)/clauses, (double)(data->neg_literals)/clauses, (double)(data->func_count)/clauses, (double)(data->var_count)/clauses, (double)(data->const_count)/clauses, (double)(data->pred_count)/clauses); fprintf(out, "# Longest Clause (if any): \n"); PCLStepPrint(out, data->longest_clause); fprintf(out, "\n# Largest Clause (if any): \n"); PCLStepPrint(out, data->max_symbol_clause); fprintf(out, "\n# Heaviest Clause (if any): \n"); ClausePropInfoPrint(out, data->max_standard_weight_clause->logic.clause); PCLStepPrint(out, data->max_standard_weight_clause); fprintf(out, "\n# Deepest Clause (if any): \n"); PCLStepPrint(out, data->max_depth_clause); fprintf(out, "\n"); } /*---------------------------------------------------------------------*/ /* End of File */ /*---------------------------------------------------------------------*/
/*----------------------------------------------------------------------- File : pcl_propanalysis.c Author: Stephan Schulz Contents Functions for collecting various amounts of statistical information about a PCL protocol. Copyright 1998-2011 by the author. This code is released under the GNU General Public Licence and the GNU Lesser General Public License. See the file COPYING in the main E directory for details.. Run "eprover -h" for contact information. Changes <1> Fri Mar 8 15:56:49 CET 2002 New -----------------------------------------------------------------------*/
dune
(library (name lp_glpk_bindings) (synopsis "Ctypes bindings that describe the libglpk FFI") (public_name lp-glpk.bindings) (libraries lp-glpk.types ctypes.stubs ctypes))
cors.mli
(** CORS (cross-origin resource sharing) controls the ways in which resources from different domains are allowed to be obtained. See the specifications at https://fetch.spec.whatwg.org/#http-cors-protocol *) type t = { allowed_headers : string list; (** https://fetch.spec.whatwg.org/#http-access-control-allow-headers *) allowed_origins : string list; (** https://fetch.spec.whatwg.org/#http-access-control-allow-origin *) } (** [default] is a [t] with no allowed headers and no allowed origins. *) val default : t val add_allow_origin : Cohttp.Header.t -> t -> string option -> Cohttp.Header.t val add_headers : Cohttp.Header.t -> t -> string option -> Cohttp.Header.t (** [check_host header t] is [true] if one of [t]'s members matches the [header]'s [Host] field. *) val check_host : Cohttp.Header.t -> t -> bool
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
script_timestamp_repr.ml
type t = Z.t let compare = Z.compare let of_int64 = Z.of_int64 let of_string x = match Time_repr.of_notation x with | None -> ( try Some (Z.of_string x) with _ -> None ) | Some time -> Some (of_int64 (Time_repr.to_seconds time)) let to_notation x = try let notation = Time_repr.to_notation (Time.of_seconds (Z.to_int64 x)) in if String.equal notation "out_of_range" then None else Some notation with _ -> None let to_num_str = Z.to_string let to_string x = match to_notation x with None -> to_num_str x | Some s -> s let diff x y = Script_int_repr.of_zint @@ Z.sub x y let sub_delta t delta = Z.sub t (Script_int_repr.to_zint delta) let add_delta t delta = Z.add t (Script_int_repr.to_zint delta) let to_zint x = x let of_zint x = x
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
ocaml_file.mli
val y : unit
vertex_header_intf.ml
module type H = sig module Common : Field.COMMON (** Module for all common metadata. *) module Nentry : Field.INT module Ndeadentry : Field.INT type t val load : marker:(unit -> unit) -> bytes -> t val init : t -> Field.kind -> unit val size : int val pp : t Fmt.t val g_magic : t -> Common.Magic.t val s_magic : t -> Common.Magic.t -> unit val g_kind : t -> Common.Kind.t val s_kind : t -> Common.Kind.t -> unit val g_nentry : t -> Nentry.t val s_nentry : t -> Nentry.t -> unit val g_ndeadentry : t -> Ndeadentry.t val s_ndeadentry : t -> Ndeadentry.t -> unit end module type MAKER = functor (Params : Params.S) (Store : Store.S) (Common : Field.COMMON) -> H with module Common := Common module type Vertex_header = sig module Make : MAKER end
(* * Copyright (c) 2021 Tarides <contact@tarides.com> * Copyright (c) 2021 Gabriel Belouze <gabriel.belouze@ens.psl.eu> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *)
nonce_hash.ml
(* 32 *) let nonce_hash = "\069\220\169" (* nce(53) *) include Blake2B.Make(Base58)(struct let name = "cycle_nonce" let title = "A nonce hash" let b58check_prefix = nonce_hash let size = None end) let () = Base58.check_encoded_prefix b58check_encoding "nce" 53
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
signals.ml
let rec tak (x, y, z) = if x > y then tak(tak (x-1, y, z), tak (y-1, z, x), tak (z-1, x, y)) else z let break_handler _ = print_string "Thank you for pressing ctrl-C."; print_newline(); print_string "Allocating a bit..."; flush stdout; ignore (tak(18,12,6)); print_string "done."; print_newline() let stop_handler _ = print_string "Thank you for pressing ctrl-Z."; print_newline(); print_string "Now raising an exception..."; print_newline(); raise Exit let _ = ignore (Sys.signal Sys.sigint (Sys.Signal_handle break_handler)); ignore (Sys.signal Sys.sigtstp (Sys.Signal_handle stop_handler)); begin try print_string "Computing like crazy..."; print_newline(); for i = 1 to 1000 do ignore (tak(18,12,6)) done; print_string "Reading on input..."; print_newline(); for i = 1 to 5 do try let s = read_line () in print_string ">> "; print_string s; print_newline() with Exit -> print_string "Got Exit, continuing."; print_newline() done with Exit -> print_string "Got Exit, exiting."; print_newline() end; exit 0
(**************************************************************************) (* *) (* OCaml *) (* *) (* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) (* *) (* Copyright 1995 Institut National de Recherche en Informatique et *) (* en Automatique. *) (* *) (* All rights reserved. This file is distributed under the terms of *) (* the GNU Lesser General Public License version 2.1, with the *) (* special exception on linking described in the file LICENSE. *) (* *) (**************************************************************************)
dune
(executables (names alloc) (libraries cairo2-gtk)) (alias (name tests-gtk) (deps alloc.exe) (action (progn (run %{dep:alloc.exe}))))
main.ml
open Format let () = printf "%d@." (EuclideanAlgorithm63.euclid (int_of_string Sys.argv.(1)) (int_of_string Sys.argv.(2))) (* let usage () = eprintf "Reduction of combinator terms@."; eprintf "Usage: %s <combinator term>@." Sys.argv.(0); exit 2 let input = if Array.length Sys.argv <> 2 then usage (); Sys.argv.(1) let input_term = if input = "go" then let i = Vstte12_combinators__Combinators.i in Vstte12_combinators__Combinators.App(i,i) else try Parse.parse_term input with _ -> begin eprintf "syntax error@."; usage () end let () = let a = Vstte12_combinators__Combinators.reduction input_term in printf "The normal form of %a is %a@." Parse.pr input_term Parse.pr a *)
block_payload_hash.ml
(* 32 *) let prefix = "\001\106\242" (* vh(52) *) include Blake2B.Make (Base58) (struct let name = "value_hash" let title = "Hash of a consensus value" let b58check_prefix = prefix let size = None end) let () = Base58.check_encoded_prefix b58check_encoding "vh" 52
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2020 Nomadic Labs <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
round_repr.ml
type round = int32 type t = round module Map = Map.Make (Int32) include (Compare.Int32 : Compare.S with type t := t) let zero = 0l let succ n = if Compare.Int32.equal n Int32.max_int then invalid_arg "round_repr.succ: cannot apply succ to maximum round value" else Int32.succ n let pp fmt i = Format.fprintf fmt "%ld" i type error += Negative_round of int type error += Round_overflow of int let () = let open Data_encoding in register_error_kind `Permanent ~id:"negative_round" ~title:"Negative round" ~description:"Round cannot be built out of negative integers." ~pp:(fun ppf i -> Format.fprintf ppf "Negative round cannot be built out of negative integers (%Ld)" i) (obj1 (req "Negative_round" int64)) (function Negative_round i -> Some (Int64.of_int i) | _ -> None) (fun i -> Negative_round (Int64.to_int i)) ; register_error_kind `Permanent ~id:"round_overflow" ~title:"Round overflow" ~description: "Round cannot be built out of integer greater than maximum int32 value." ~pp:(fun ppf i -> Format.fprintf ppf "Round cannot be built out of integer greater than maximum int32 value \ (%Ld)" i) (obj1 (req "Negative_round" int64)) (function Round_overflow i -> Some (Int64.of_int i) | _ -> None) (fun i -> Round_overflow (Int64.to_int i)) let of_int32 i = if i >= 0l then Ok i else error (Negative_round (Int32.to_int i)) [@@inline] let pred r = let p = Int32.pred r in of_int32 p let of_int i = if Compare.Int.(i < 0) then error (Negative_round i) else (* i is positive *) let i32 = Int32.of_int i in if Compare.Int.(Int32.to_int i32 = i) then Ok i32 else error (Round_overflow i) let to_int i32 = let i = Int32.to_int i32 in if Int32.(equal (of_int i) i32) then ok i else error (Round_overflow i) let to_int32 t = t [@@inline] let to_slot round ~committee_size = to_int round >|? fun r -> let slot = r mod committee_size in Slot_repr.of_int_exn slot let encoding = Data_encoding.conv_with_guard (fun i -> i) (fun i -> match of_int32 i with | Ok _ as res -> res | Error _ -> Error "Round_repr.encoding: negative round") Data_encoding.int32 module Durations = struct type t = { first_round_duration : Period_repr.t; delay_increment_per_round : Period_repr.t; } type error += | Non_increasing_rounds of {increment : Period_repr.t} | Round_durations_must_be_at_least_one_second of {round : Period_repr.t} let () = register_error_kind `Permanent ~id:"durations.non_increasing_rounds" ~title:"Non increasing round" ~description:"The provided rounds are not increasing." ~pp:(fun ppf increment -> Format.fprintf ppf "The provided rounds are not increasing (increment: %a)" Period_repr.pp increment) Data_encoding.(obj1 (req "increment" Period_repr.encoding)) (function | Non_increasing_rounds {increment} -> Some increment | _ -> None) (fun increment -> Non_increasing_rounds {increment}) let pp fmt t = Format.fprintf fmt "%a,@ +%a" Period_repr.pp t.first_round_duration Period_repr.pp t.delay_increment_per_round let create ~first_round_duration ~delay_increment_per_round = error_when Compare.Int64.(Period_repr.to_seconds first_round_duration < 1L) (Round_durations_must_be_at_least_one_second {round = first_round_duration}) >>? fun () -> error_when Compare.Int64.(Period_repr.to_seconds delay_increment_per_round < 1L) (Non_increasing_rounds {increment = delay_increment_per_round}) >>? fun () -> ok {first_round_duration; delay_increment_per_round} let create_opt ~first_round_duration ~delay_increment_per_round = match create ~first_round_duration ~delay_increment_per_round with | Ok v -> Some v | Error _ -> None let encoding = let open Data_encoding in conv_with_guard (fun {first_round_duration; delay_increment_per_round} -> (first_round_duration, delay_increment_per_round)) (fun (first_round_duration, delay_increment_per_round) -> match create_opt ~first_round_duration ~delay_increment_per_round with | None -> Error "Either round durations are non-increasing or minimal block \ delay < 1" | Some rounds -> Ok rounds) (obj2 (req "first_round_duration" Period_repr.encoding) (req "delay_increment_per_round" Period_repr.encoding)) let round_duration {first_round_duration; delay_increment_per_round} round = if Compare.Int32.(round < 0l) then invalid_arg "round must be a non-negative integer" else let first_round_duration_s = Period_repr.to_seconds first_round_duration and delay_increment_per_round_s = Period_repr.to_seconds delay_increment_per_round in Period_repr.of_seconds_exn Int64.( add first_round_duration_s (mul (of_int32 round) delay_increment_per_round_s)) end type error += Round_too_high of int32 let () = let open Data_encoding in register_error_kind `Permanent ~id:"round_too_high" ~title:"round too high" ~description:"block round too high." ~pp:(fun ppf round -> Format.fprintf ppf "Block round is too high: %ld" round) (obj1 (req "level_offset_too_high" int32)) (function Round_too_high round -> Some round | _ -> None) (fun round -> Round_too_high round) (* The duration of round n follows the arithmetic sequence: round_duration(0) = first_round_duration round_duration(r+1) = round_duration(r) + delay_increment_per_round Hence, this sequence can be explicited into: round_duration(r) = first_round_duration + r * delay_increment_per_round The level offset of round r is the sum of the durations of the rounds up until round r - 1. In other words, when r > 0 level_offset_of_round(0) = 0 level_offset_of_round(r+1) = level_offset_of_round(r) + round_duration(r) Hence level_offset_of_round(r) = Σ_{k=0}^{r-1} (round_duration(k)) After unfolding the series, the same function can be finally explicited into level_offset_of_round(0) = 0 level_offset_of_round(r) = r * first_round_duration + 1/2 * r * (r - 1) * delay_increment_per_round *) let level_offset_of_round round_durations ~round = if Compare.Int32.(round = zero) then ok Int64.zero else let sum_durations = let Durations.{first_round_duration; delay_increment_per_round} = round_durations in let roundz = Int64.of_int32 round in let m = Z.of_int64 Int64.(div (mul roundz (pred roundz)) (of_int 2)) in Z.( add (mul m (Z.of_int64 @@ Period_repr.to_seconds delay_increment_per_round)) (mul (Z.of_int32 round) (Z.of_int64 @@ Period_repr.to_seconds first_round_duration))) in if Compare.Z.(sum_durations > Z.of_int64 Int64.max_int) then error (Round_too_high round) else ok (Z.to_int64 sum_durations) type error += Level_offset_too_high of Period_repr.t let () = let open Data_encoding in register_error_kind `Permanent ~id:"level_offset_too_high" ~title:"level offset too high" ~description:"The block's level offset is too high." ~pp:(fun ppf offset -> Format.fprintf ppf "The block's level offset is too high: %a" Period_repr.pp offset) (obj1 (req "level_offset_too_high" Period_repr.encoding)) (function Level_offset_too_high offset -> Some offset | _ -> None) (fun offset -> Level_offset_too_high offset) type round_and_offset = {round : int32; offset : Period_repr.t} (** Complexity: O(log max_int). *) let round_and_offset round_durations ~level_offset = let level_offset_in_seconds = Period_repr.to_seconds level_offset in (* We have the invariant [round <= level_offset] so there is no need to search beyond [level_offset]. We set [right_bound] to [level_offset + 1] to avoid triggering the error level_offset too high when the round equals [level_offset]. *) let right_bound = if Compare.Int64.(level_offset_in_seconds < Int64.of_int32 Int32.max_int) then Int32.of_int (Int64.to_int level_offset_in_seconds + 1) else Int32.max_int in let rec bin_search min_r max_r = if Compare.Int32.(min_r >= right_bound) then error (Level_offset_too_high level_offset) else let round = Int32.(add min_r (div (sub max_r min_r) 2l)) in level_offset_of_round round_durations ~round:(Int32.succ round) >>? fun next_level_offset -> if Compare.Int64.(Period_repr.to_seconds level_offset >= next_level_offset) then bin_search (Int32.succ round) max_r else level_offset_of_round round_durations ~round >>? fun current_level_offset -> if Compare.Int64.( Period_repr.to_seconds level_offset < current_level_offset) then bin_search min_r round else ok { round; offset = Period_repr.of_seconds_exn (Int64.sub (Period_repr.to_seconds level_offset) current_level_offset); } in bin_search 0l right_bound (** Complexity: O(|round_durations|). *) let timestamp_of_round round_durations ~predecessor_timestamp ~predecessor_round ~round = let pred_round_duration = Durations.round_duration round_durations predecessor_round in (* First, the function computes when the current level l is supposed to start. This is given by adding to the timestamp of the round of predecessor level l-1 [predecessor_timestamp], the duration of its last round [predecessor_round]. *) Time_repr.(predecessor_timestamp +? pred_round_duration) >>? fun start_of_current_level -> (* Finally, we sum the durations of the rounds at the current level l until reaching current [round]. *) level_offset_of_round round_durations ~round >>? fun level_offset -> let level_offset = Period_repr.of_seconds_exn level_offset in Time_repr.(start_of_current_level +? level_offset) (** Unlike [timestamp_of_round], this function gets the starting time of a given round, given the timestamp and the round of a proposal at the same level. We compute the starting time of [considered_round] from a given [round_durations] description, some [current_round], and its starting time [current_timestamp]. Complexity: O(|round_durations|). *) let timestamp_of_another_round_same_level round_durations ~current_timestamp ~current_round ~considered_round = level_offset_of_round round_durations ~round:considered_round >>? fun target_offset -> level_offset_of_round round_durations ~round:current_round >>? fun current_offset -> ok @@ Time_repr.of_seconds Int64.( add (sub (Time_repr.to_seconds current_timestamp) current_offset) target_offset) type error += | Round_of_past_timestamp of { provided_timestamp : Time.t; predecessor_timestamp : Time.t; predecessor_round : t; } let () = let open Data_encoding in register_error_kind `Permanent ~id:"round_of_past_timestamp" ~title:"Round_of_timestamp for past timestamp" ~description:"Provided timestamp is before the expected level start." ~pp:(fun ppf (provided_ts, predecessor_ts, round) -> Format.fprintf ppf "Provided timestamp (%a) is before the expected level start (computed \ based on predecessor_ts %a at round %a)." Time.pp_hum provided_ts Time.pp_hum predecessor_ts pp round) (obj3 (req "provided_timestamp" Time.encoding) (req "predecessor_timestamp" Time.encoding) (req "predecessor_round" encoding)) (function | Round_of_past_timestamp {provided_timestamp; predecessor_timestamp; predecessor_round} -> Some (provided_timestamp, predecessor_timestamp, predecessor_round) | _ -> None) (fun (provided_timestamp, predecessor_timestamp, predecessor_round) -> Round_of_past_timestamp {provided_timestamp; predecessor_timestamp; predecessor_round}) let round_of_timestamp round_durations ~predecessor_timestamp ~predecessor_round ~timestamp = let round_duration = Durations.round_duration round_durations predecessor_round in Time_repr.(predecessor_timestamp +? round_duration) >>? fun start_of_current_level -> Period_repr.of_seconds (Time_repr.diff timestamp start_of_current_level) |> Error_monad.record_trace (Round_of_past_timestamp { predecessor_timestamp; provided_timestamp = timestamp; predecessor_round; }) >>? fun diff -> round_and_offset round_durations ~level_offset:diff >>? fun round_and_offset -> ok round_and_offset.round let level_offset_of_round round_durations ~round = level_offset_of_round round_durations ~round >>? fun offset -> ok (Period_repr.of_seconds_exn offset) module Internals_for_test = struct type round_and_offset_raw = {round : round; offset : Period_repr.t} let round_and_offset round_durations ~level_offset = round_and_offset round_durations ~level_offset >|? fun v -> {round = v.round; offset = v.offset} end
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2021 Nomadic Labs <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
dependency.ml
let a = 1
format.mli
type formatter val pp_open_box : formatter -> int -> unit val open_box : int -> unit val pp_close_box : formatter -> unit -> unit val close_box : unit -> unit val pp_open_hbox : formatter -> unit -> unit val open_hbox : unit -> unit val pp_open_vbox : formatter -> int -> unit val open_vbox : int -> unit val pp_open_hvbox : formatter -> int -> unit val open_hvbox : int -> unit val pp_open_hovbox : formatter -> int -> unit val open_hovbox : int -> unit val pp_print_string : formatter -> string -> unit val print_string : string -> unit val pp_print_as : formatter -> int -> string -> unit val print_as : int -> string -> unit val pp_print_int : formatter -> int -> unit val print_int : int -> unit val pp_print_float : formatter -> float -> unit val print_float : float -> unit val pp_print_char : formatter -> char -> unit val print_char : char -> unit val pp_print_bool : formatter -> bool -> unit val print_bool : bool -> unit val pp_print_space : formatter -> unit -> unit val print_space : unit -> unit val pp_print_cut : formatter -> unit -> unit val print_cut : unit -> unit val pp_print_break : formatter -> int -> int -> unit val print_break : int -> int -> unit val pp_force_newline : formatter -> unit -> unit val force_newline : unit -> unit val pp_print_if_newline : formatter -> unit -> unit val print_if_newline : unit -> unit val pp_print_flush : formatter -> unit -> unit val print_flush : unit -> unit val pp_print_newline : formatter -> unit -> unit val print_newline : unit -> unit val pp_set_margin : formatter -> int -> unit val set_margin : int -> unit val pp_get_margin : formatter -> unit -> int val get_margin : unit -> int val pp_set_max_indent : formatter -> int -> unit val set_max_indent : int -> unit val pp_get_max_indent : formatter -> unit -> int val get_max_indent : unit -> int val pp_set_max_boxes : formatter -> int -> unit val set_max_boxes : int -> unit val pp_get_max_boxes : formatter -> unit -> int val get_max_boxes : unit -> int val pp_over_max_boxes : formatter -> unit -> bool val over_max_boxes : unit -> bool val pp_open_tbox : formatter -> unit -> unit val open_tbox : unit -> unit val pp_close_tbox : formatter -> unit -> unit val close_tbox : unit -> unit val pp_set_tab : formatter -> unit -> unit val set_tab : unit -> unit val pp_print_tab : formatter -> unit -> unit val print_tab : unit -> unit val pp_print_tbreak : formatter -> int -> int -> unit val print_tbreak : int -> int -> unit val pp_set_ellipsis_text : formatter -> string -> unit val set_ellipsis_text : string -> unit val pp_get_ellipsis_text : formatter -> unit -> string val get_ellipsis_text : unit -> string type tag = string val pp_open_tag : formatter -> string -> unit val open_tag : tag -> unit val pp_close_tag : formatter -> unit -> unit val close_tag : unit -> unit val pp_set_tags : formatter -> bool -> unit val set_tags : bool -> unit val pp_set_print_tags : formatter -> bool -> unit val set_print_tags : bool -> unit val pp_set_mark_tags : formatter -> bool -> unit val set_mark_tags : bool -> unit val pp_get_print_tags : formatter -> unit -> bool val get_print_tags : unit -> bool val pp_get_mark_tags : formatter -> unit -> bool val get_mark_tags : unit -> bool val pp_set_formatter_out_channel : formatter -> out_channel -> unit val set_formatter_out_channel : out_channel -> unit val pp_set_formatter_output_functions : formatter -> (string -> int -> int -> unit) -> (unit -> unit) -> unit val set_formatter_output_functions : (string -> int -> int -> unit) -> (unit -> unit) -> unit val pp_get_formatter_output_functions : formatter -> unit -> ((string -> int -> int -> unit) * (unit -> unit)) val get_formatter_output_functions : unit -> ((string -> int -> int -> unit) * (unit -> unit)) type formatter_out_functions = { out_string: string -> int -> int -> unit ; out_flush: unit -> unit ; out_newline: unit -> unit ; out_spaces: int -> unit ; out_indent: int -> unit } val pp_set_formatter_out_functions : formatter -> formatter_out_functions -> unit val set_formatter_out_functions : formatter_out_functions -> unit val pp_get_formatter_out_functions : formatter -> unit -> formatter_out_functions val get_formatter_out_functions : unit -> formatter_out_functions type formatter_tag_functions = { mark_open_tag: tag -> string ; mark_close_tag: tag -> string ; print_open_tag: tag -> unit ; print_close_tag: tag -> unit } val pp_set_formatter_tag_functions : formatter -> formatter_tag_functions -> unit val set_formatter_tag_functions : formatter_tag_functions -> unit val pp_get_formatter_tag_functions : formatter -> unit -> formatter_tag_functions val get_formatter_tag_functions : unit -> formatter_tag_functions val formatter_of_out_channel : out_channel -> formatter val std_formatter : formatter val err_formatter : formatter val formatter_of_buffer : Buffer.t -> formatter val stdbuf : Buffer.t val str_formatter : formatter val flush_str_formatter : unit -> string val make_formatter : (string -> int -> int -> unit) -> (unit -> unit) -> formatter val formatter_of_out_functions : formatter_out_functions -> formatter type symbolic_output_item = | Output_flush | Output_newline | Output_string of string | Output_spaces of int | Output_indent of int type symbolic_output_buffer val make_symbolic_output_buffer : unit -> symbolic_output_buffer val clear_symbolic_output_buffer : symbolic_output_buffer -> unit val get_symbolic_output_buffer : symbolic_output_buffer -> symbolic_output_item list val flush_symbolic_output_buffer : symbolic_output_buffer -> symbolic_output_item list val add_symbolic_output_item : symbolic_output_buffer -> symbolic_output_item -> unit val formatter_of_symbolic_output_buffer : symbolic_output_buffer -> formatter val pp_print_list : ?pp_sep:(formatter -> unit -> unit) -> (formatter -> 'a -> unit) -> formatter -> 'a list -> unit val pp_print_text : formatter -> string -> unit val fprintf : formatter -> ('a, formatter, unit) format -> 'a val printf : ('a, formatter, unit) format -> 'a val eprintf : ('a, formatter, unit) format -> 'a val sprintf : ('a, unit, string) format -> 'a val asprintf : ('a, formatter, unit, string) format4 -> 'a val ifprintf : formatter -> ('a, formatter, unit) format -> 'a val kfprintf : (formatter -> 'a) -> formatter -> ('b, formatter, unit, 'a) format4 -> 'b val ikfprintf : (formatter -> 'a) -> formatter -> ('b, formatter, unit, 'a) format4 -> 'b val ksprintf : (string -> 'a) -> ('b, unit, string, 'a) format4 -> 'b val kasprintf : (string -> 'a) -> ('b, formatter, unit, 'a) format4 -> 'b val bprintf : Buffer.t -> ('a, formatter, unit) format -> 'a[@@ocaml.deprecated "- : Buffer.t -> ('a, Format.formatter, unit) format -> 'a = <fun>"] val kprintf : (string -> 'a) -> ('b, unit, string, 'a) format4 -> 'b[@@ocaml.deprecated "Use Format.ksprintf instead."] val set_all_formatter_output_functions : out:(string -> int -> int -> unit) -> flush:(unit -> unit) -> newline:(unit -> unit) -> spaces:(int -> unit) -> unit[@@ocaml.deprecated "Use Format.set_formatter_out_functions instead."] val get_all_formatter_output_functions : unit -> ((string -> int -> int -> unit) * (unit -> unit) * (unit -> unit) * (int -> unit))[@@ocaml.deprecated "Use Format.get_formatter_out_functions instead."] val pp_set_all_formatter_output_functions : formatter -> out:(string -> int -> int -> unit) -> flush:(unit -> unit) -> newline:(unit -> unit) -> spaces:(int -> unit) -> unit[@@ocaml.deprecated "Use Format.pp_set_formatter_out_functions instead."] val pp_get_all_formatter_output_functions : formatter -> unit -> ((string -> int -> int -> unit) * (unit -> unit) * (unit -> unit) * (int -> unit))[@@ocaml.deprecated "Use Format.pp_get_formatter_out_functions instead."]
owl_opencl_kernel_map.h
#ifdef OWL_ENABLE_TEMPLATE // f : arr -> arr #ifdef CLFUN01 __kernel void CLFUN01 ( __global TYPE *a, __global TYPE *b ) { int gid = get_global_id(0); MAPFUN(a[gid], b[gid]); } #endif /* CLFUN01 */ // f : arr -> arr -> arr #ifdef CLFUN02 __kernel void CLFUN02 ( __global TYPE *a, __global TYPE *b, __global TYPE *c) { int gid = get_global_id(0); MAPFUN(a[gid], b[gid], c[gid]); } #endif /* CLFUN02 */ // f : arr -> elt -> arr #ifdef CLFUN03 __kernel void CLFUN03 ( __global TYPE *a, __global TYPE *b, __global TYPE *c) { int gid = get_global_id(0); MAPFUN(a[gid], b[0], c[gid]); } #endif /* CLFUN03 */ // f : elt -> arr -> arr #ifdef CLFUN04 __kernel void CLFUN04 ( __global TYPE *a, __global TYPE *b, __global TYPE *c) { int gid = get_global_id(0); MAPFUN(a[0], b[gid], c[gid]); } #endif /* CLFUN04 */ #endif /* OWL_ENABLE_TEMPLATE */
/* * OWL - OCaml Scientific and Engineering Computing * Copyright (c) 2016-2020 Liang Wang <liang.wang@cl.cam.ac.uk> */
bzladcr.h
#ifndef BZLADC_H_INCLUDED #define BZLADC_H_INCLUDED #include <stdint.h> #include "bzlatypes.h" void bzla_dcr_compute_scores(Bzla* bzla); void bzla_dcr_compute_scores_dual_prop(Bzla* bzla); int32_t bzla_dcr_compare_scores(Bzla* bzla, BzlaNode* a, BzlaNode* b); int32_t bzla_dcr_compare_scores_qsort(const void* p1, const void* p2); #endif
/*** * Bitwuzla: Satisfiability Modulo Theories (SMT) solver. * * This file is part of Bitwuzla. * * Copyright (C) 2007-2022 by the authors listed in the AUTHORS file. * * See COPYING for more information on using this software. */
light_proto.ml
(* Module implemented in the protocol *) module Proof = Tezos_context_sigs.Context.Proof_types module type PROTO_RPCS = sig val merkle_tree : Proxy.proxy_getter_input -> Tezos_protocol_environment.Proxy_context.M.key -> Proof.merkle_leaf_kind -> Proof.tree Proof.t option tzresult Lwt.t end
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2021 Nomadic Labs, <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
systime_os.mli
(** The current time according to the system clock *) val now : unit -> Ptime.t (** [sleep t] is an Lwt promise that resolves after [t] time has elapsed. If [t] is negative, [sleep t] is already resolved. *) val sleep : Ptime.Span.t -> unit Lwt.t
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2019 Nomadic Labs, <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
nonce_storage.mli
type error += | Too_late_revelation | Too_early_revelation | Previously_revealed_nonce | Unexpected_nonce type t = Seed_repr.nonce type nonce = t val encoding : nonce Data_encoding.t type unrevealed = Storage.Seed.unrevealed_nonce = { nonce_hash : Nonce_hash.t; delegate : Signature.Public_key_hash.t; rewards : Tez_repr.t; fees : Tez_repr.t; } type status = Unrevealed of unrevealed | Revealed of Seed_repr.nonce val get : Raw_context.t -> Level_repr.t -> status tzresult Lwt.t val record_hash : Raw_context.t -> unrevealed -> Raw_context.t tzresult Lwt.t val reveal : Raw_context.t -> Level_repr.t -> nonce -> Raw_context.t tzresult Lwt.t val of_bytes : bytes -> nonce tzresult val hash : nonce -> Nonce_hash.t val check_hash : nonce -> Nonce_hash.t -> bool
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
p2p_protocol.ml
module Events = P2p_events.P2p_protocol type ('msg, 'peer, 'conn) config = { swap_linger : Time.System.Span.t; pool : ('msg, 'peer, 'conn) P2p_pool.t; log : P2p_connection.P2p_event.t -> unit; connect : P2p_point.Id.t -> ('msg, 'peer, 'conn) P2p_conn.t tzresult Lwt.t; mutable latest_accepted_swap : Time.System.t; mutable latest_successful_swap : Time.System.t; } open P2p_answerer let message conn _request size msg = Lwt_pipe.Maybe_bounded.push conn.messages (size, msg) module Private_answerer = struct let advertise conn _request _points = Events.(emit private_node_new_peers) conn.peer_id let bootstrap conn _request = Lwt_result.ok @@ Events.(emit private_node_peers_request) conn.peer_id let swap_request conn _request _new_point _peer = Events.(emit private_node_swap_request) conn.peer_id let swap_ack conn _request _point _peer_id = Events.(emit private_node_swap_ack) conn.peer_id let create conn = P2p_answerer. { message = message conn; advertise = advertise conn; bootstrap = bootstrap conn; swap_request = swap_request conn; swap_ack = swap_ack conn; } end module Default_answerer = struct open P2p_connection.P2p_event let advertise config conn _request points = let log = config.log in let source_peer_id = conn.peer_id in log (Advertise_received {source = source_peer_id}) ; P2p_pool.register_list_of_new_points ~medium:"advertise" ~source:conn.peer_id config.pool points let bootstrap config conn _request_info = let open Lwt_result_syntax in let log = config.log in let source_peer_id = conn.peer_id in log (Bootstrap_received {source = source_peer_id}) ; if conn.is_private then let*! () = Events.(emit private_node_request) conn.peer_id in return_unit else let*! points = P2p_pool.list_known_points ~ignore_private:true config.pool in match points with | [] -> return_unit | points -> ( match conn.write_advertise points with | Ok true -> log (Advertise_sent {source = source_peer_id}) ; return_unit | Ok false -> (* if not sent then ?? TODO count dropped message ?? *) return_unit | Error err as error -> let*! () = Events.(emit advertise_sending_failed) (source_peer_id, err) in Lwt.return error) let swap t pool source_peer_id ~connect current_peer_id new_point = let open Lwt_syntax in t.latest_accepted_swap <- Time.System.now () ; let* r = connect new_point in match r with | Ok _new_conn -> ( t.latest_successful_swap <- Time.System.now () ; t.log (Swap_success {source = source_peer_id}) ; let* () = Events.(emit swap_succeeded) new_point in match P2p_pool.Connection.find_by_peer_id pool current_peer_id with | None -> Lwt.return_unit | Some conn -> P2p_conn.disconnect conn) | Error err -> ( t.latest_accepted_swap <- t.latest_successful_swap ; t.log (Swap_failure {source = source_peer_id}) ; match err with | [Timeout] -> Events.(emit swap_interrupted) (new_point, err) | _ -> Events.(emit swap_failed) (new_point, err)) let swap_ack config conn request new_point _peer = let open Lwt_syntax in let source_peer_id = conn.peer_id in let pool = config.pool in let connect = config.connect in let log = config.log in log (Swap_ack_received {source = source_peer_id}) ; let* () = Events.(emit swap_ack_received) source_peer_id in match request.last_sent_swap_request with | None -> Lwt.return_unit (* ignore *) | Some (_time, proposed_peer_id) -> ( match P2p_pool.Connection.find_by_peer_id pool proposed_peer_id with | None -> swap config pool source_peer_id ~connect proposed_peer_id new_point | Some _ -> Lwt.return_unit) let swap_request config conn _request new_point _peer = let open Lwt_syntax in let source_peer_id = conn.peer_id in let pool = config.pool in let swap_linger = config.swap_linger in let connect = config.connect in let log = config.log in log (Swap_request_received {source = source_peer_id}) ; let* () = Events.(emit swap_request_received) source_peer_id in (* Ignore if already connected to peer or already swapped less than <swap_linger> ago. *) let span_since_last_swap = Ptime.diff (Time.System.now ()) (Time.System.max config.latest_successful_swap config.latest_accepted_swap) in let new_point_info = P2p_pool.register_point pool new_point in if Ptime.Span.compare span_since_last_swap swap_linger < 0 || not (P2p_point_state.is_disconnected new_point_info) then ( log (Swap_request_ignored {source = source_peer_id}) ; Events.(emit swap_request_ignored) source_peer_id) else match P2p_pool.Connection.random_addr pool ~no_private:true with | None -> Events.(emit no_swap_candidate) source_peer_id | Some (proposed_point, proposed_peer_id) -> ( match conn.write_swap_ack proposed_point proposed_peer_id with | Ok true -> log (Swap_ack_sent {source = source_peer_id}) ; swap config pool source_peer_id ~connect proposed_peer_id new_point | Ok false | Error _ -> Lwt.return_unit) let create config conn = P2p_answerer. { message = message conn; advertise = advertise config conn; bootstrap = bootstrap config conn; swap_request = swap_request config conn; swap_ack = swap_ack config conn; } end let create_default = Default_answerer.create let create_private () = Private_answerer.create
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* Copyright (c) 2021 Nomadic Labs, <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
bootstrap_storage.mli
(** This module provides functions that can be used in a private network to delay initial rewarding, typically when waiting for more bakers to join the network. *) val init : Raw_context.t -> typecheck: (Raw_context.t -> Script_repr.t -> ((Script_repr.t * Lazy_storage_diff.diffs option) * Raw_context.t) tzresult Lwt.t) -> ?no_reward_cycles:int -> Parameters_repr.bootstrap_account list -> Parameters_repr.bootstrap_contract list -> (Raw_context.t * Receipt_repr.balance_updates) tzresult Lwt.t val cycle_end : Raw_context.t -> Cycle_repr.t -> Raw_context.t tzresult Lwt.t
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* Copyright (c) 2022 Nomadic Labs <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
parsing_recovery_cpp.ml
open Common module Flag = Flag_parsing module PI = Parse_info module T = Parser_cpp module TH = Token_helpers_cpp (*****************************************************************************) (* Wrappers *) (*****************************************************************************) let pr2_err, _pr2_once = Common2.mk_pr2_wrappers Flag.verbose_parsing let pr2_err s = pr2_err ("ERROR_RECOV: " ^ s) (*****************************************************************************) (* Helpers *) (*****************************************************************************) (*****************************************************************************) (* Skipping stuff, find next "synchronisation" point *) (*****************************************************************************) (* todo: do something if find T.Eof ? *) let rec find_next_synchro ~next ~already_passed = (* Maybe because not enough }, because for example an ifdef contains * in both branch some opening {, we later eat too much, "on deborde * sur la fonction d'apres". So already_passed may be too big and * looking for next synchro point starting from next may not be the * best. So maybe we can find synchro point inside already_passed * instead of looking in next. * * But take care! must progress. We must not stay in infinite loop! * For instance now I have as a error recovery to look for * a "start of something", corresponding to start of function, * but must go beyond this start otherwise will loop. * So look at premier(external_declaration2) in parser.output and * pass at least those first tokens. * * I have chosen to start search for next synchro point after the * first { I found, so quite sure we will not loop. *) let last_round = List.rev already_passed in let is_define = let xs = last_round |> List.filter TH.is_not_comment in match xs with | T.TDefine _ :: _ -> true | _ -> false in if is_define then find_next_synchro_define (last_round @ next) [] else let before, after = last_round |> Common.span (fun tok -> match tok with (* by looking at TOBrace we are sure that the "start of something" * will not arrive too early *) | T.TOBrace _ -> false | T.TDefine _ -> false | _ -> true) in find_next_synchro_orig (after @ next) (List.rev before) and find_next_synchro_define next already_passed = match next with | [] -> pr2_err "end of file while in recovery mode"; (already_passed, []) | (T.TCommentNewline_DefineEndOfMacro _ as v) :: xs -> pr2_err (spf "found sync end of #define at line %d" (TH.line_of_tok v)); (v :: already_passed, xs) | v :: xs -> find_next_synchro_define xs (v :: already_passed) and find_next_synchro_orig next already_passed = match next with | [] -> pr2_err "end of file while in recovery mode"; (already_passed, []) | (T.TCBrace i as v) :: xs when PI.col_of_info i =|= 0 -> ( pr2_err (spf "found sync '}' at line %d" (PI.line_of_info i)); match xs with | [] -> raise Impossible (* there is a EOF token normally *) (* still useful: now parser.mly allow empty ';' so normally no pb *) | T.TPtVirg iptvirg :: xs -> pr2_err "found sync bis, eating } and ;"; (T.TPtVirg iptvirg :: v :: already_passed, xs) | T.TIdent x :: T.TPtVirg iptvirg :: xs -> pr2_err "found sync bis, eating ident, }, and ;"; (T.TPtVirg iptvirg :: T.TIdent x :: v :: already_passed, xs) | T.TCommentSpace sp :: T.TIdent x :: T.TPtVirg iptvirg :: xs -> pr2_err "found sync bis, eating ident, }, and ;"; ( T.TCommentSpace sp :: T.TPtVirg iptvirg :: T.TIdent x :: v :: already_passed, xs ) | _ -> (v :: already_passed, xs)) | v :: xs -> let info = TH.info_of_tok v in if PI.col_of_info info =|= 0 && TH.is_start_of_something v then ( pr2_err (spf "found sync col 0 at line %d " (PI.line_of_info info)); (already_passed, v :: xs)) else find_next_synchro_orig xs (v :: already_passed)
(* Yoann Padioleau * * Copyright (C) 2011 Facebook * Copyright (C) 2006, 2007, 2008 Ecole des Mines de Nantes * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * version 2.1 as published by the Free Software Foundation. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file * license.txt for more details. * *)
num.ml
open Int_misc open Nat open Big_int open Arith_flags open Ratio type num = Int of int | Big_int of big_int | Ratio of ratio (* The type of numbers. *) let biggest_INT = big_int_of_int biggest_int and least_INT = big_int_of_int least_int (* Coercion big_int -> num *) let num_of_big_int bi = if le_big_int bi biggest_INT && ge_big_int bi least_INT then Int (int_of_big_int bi) else Big_int bi let normalize_num = function Int i -> Int i | Big_int bi -> num_of_big_int bi | Ratio r -> if is_integer_ratio r then num_of_big_int (numerator_ratio r) else Ratio r let cautious_normalize_num_when_printing n = if (!normalize_ratio_when_printing_flag) then (normalize_num n) else n let num_of_ratio r = ignore (normalize_ratio r); if not (is_integer_ratio r) then Ratio r else if is_int_big_int (numerator_ratio r) then Int (int_of_big_int (numerator_ratio r)) else Big_int (numerator_ratio r) (* Operations on num *) let add_num a b = match (a,b) with ((Int int1), (Int int2)) -> let r = int1 + int2 in if (int1 lxor int2) lor (int1 lxor (r lxor (-1))) < 0 then Int r (* No overflow *) else Big_int(add_big_int (big_int_of_int int1) (big_int_of_int int2)) | ((Int i), (Big_int bi)) -> num_of_big_int (add_int_big_int i bi) | ((Big_int bi), (Int i)) -> num_of_big_int (add_int_big_int i bi) | ((Int i), (Ratio r)) -> Ratio (add_int_ratio i r) | ((Ratio r), (Int i)) -> Ratio (add_int_ratio i r) | ((Big_int bi1), (Big_int bi2)) -> num_of_big_int (add_big_int bi1 bi2) | ((Big_int bi), (Ratio r)) -> Ratio (add_big_int_ratio bi r) | ((Ratio r), (Big_int bi)) -> Ratio (add_big_int_ratio bi r) | ((Ratio r1), (Ratio r2)) -> num_of_ratio (add_ratio r1 r2) let ( +/ ) = add_num let minus_num = function Int i -> if i = monster_int then Big_int (minus_big_int (big_int_of_int i)) else Int (-i) | Big_int bi -> Big_int (minus_big_int bi) | Ratio r -> Ratio (minus_ratio r) let sub_num n1 n2 = add_num n1 (minus_num n2) let ( -/ ) = sub_num let mult_num a b = match (a,b) with ((Int int1), (Int int2)) -> if num_bits_int int1 + num_bits_int int2 < length_of_int then Int (int1 * int2) else num_of_big_int (mult_big_int (big_int_of_int int1) (big_int_of_int int2)) | ((Int i), (Big_int bi)) -> num_of_big_int (mult_int_big_int i bi) | ((Big_int bi), (Int i)) -> num_of_big_int (mult_int_big_int i bi) | ((Int i), (Ratio r)) -> num_of_ratio (mult_int_ratio i r) | ((Ratio r), (Int i)) -> num_of_ratio (mult_int_ratio i r) | ((Big_int bi1), (Big_int bi2)) -> num_of_big_int (mult_big_int bi1 bi2) | ((Big_int bi), (Ratio r)) -> num_of_ratio (mult_big_int_ratio bi r) | ((Ratio r), (Big_int bi)) -> num_of_ratio (mult_big_int_ratio bi r) | ((Ratio r1), (Ratio r2)) -> num_of_ratio (mult_ratio r1 r2) let ( */ ) = mult_num let square_num = function Int i -> if 2 * num_bits_int i < length_of_int then Int (i * i) else num_of_big_int (square_big_int (big_int_of_int i)) | Big_int bi -> Big_int (square_big_int bi) | Ratio r -> Ratio (square_ratio r) let div_num n1 n2 = match n1 with | Int i1 -> begin match n2 with | Int i2 -> num_of_ratio (create_ratio (big_int_of_int i1) (big_int_of_int i2)) | Big_int bi2 -> num_of_ratio (create_ratio (big_int_of_int i1) bi2) | Ratio r2 -> num_of_ratio (div_int_ratio i1 r2) end | Big_int bi1 -> begin match n2 with | Int i2 -> num_of_ratio (create_ratio bi1 (big_int_of_int i2)) | Big_int bi2 -> num_of_ratio (create_ratio bi1 bi2) | Ratio r2 -> num_of_ratio (div_big_int_ratio bi1 r2) end | Ratio r1 -> begin match n2 with | Int i2 -> num_of_ratio (div_ratio_int r1 i2) | Big_int bi2 -> num_of_ratio (div_ratio_big_int r1 bi2) | Ratio r2 -> num_of_ratio (div_ratio r1 r2) end ;; let ( // ) = div_num let floor_num = function Int _ as n -> n | Big_int _ as n -> n | Ratio r -> num_of_big_int (floor_ratio r) (* Coercion with ratio type *) let ratio_of_num = function Int i -> ratio_of_int i | Big_int bi -> ratio_of_big_int bi | Ratio r -> r ;; (* Euclidean division and remainder. The specification is: a = b * quo_num a b + mod_num a b quo_num a b is an integer (Z) 0 <= mod_num a b < |b| A correct but slow implementation is: quo_num a b = if b >= 0 then floor_num (div_num a b) else minus_num (floor_num (div_num a (minus_num b))) mod_num a b = sub_num a (mult_num b (quo_num a b)) However, this definition is vastly inefficient (cf PR #3473): we define here a better way of computing the same thing. PR#6753: the previous implementation was based on quo_num a b = floor_num (div_num a b) which is incorrect for negative b. *) let quo_num n1 n2 = match n1, n2 with | Int i1, Int i2 -> let q = i1 / i2 and r = i1 mod i2 in Int (if r >= 0 then q else if i2 > 0 then q - 1 else q + 1) | Int i1, Big_int bi2 -> num_of_big_int (div_big_int (big_int_of_int i1) bi2) | Int i1, Ratio r2 -> num_of_big_int (report_sign_ratio r2 (floor_ratio (div_int_ratio i1 (abs_ratio r2)))) | Big_int bi1, Int i2 -> num_of_big_int (div_big_int bi1 (big_int_of_int i2)) | Big_int bi1, Big_int bi2 -> num_of_big_int (div_big_int bi1 bi2) | Big_int bi1, Ratio r2 -> num_of_big_int (report_sign_ratio r2 (floor_ratio (div_big_int_ratio bi1 (abs_ratio r2)))) | Ratio r1, _ -> let r2 = ratio_of_num n2 in num_of_big_int (report_sign_ratio r2 (floor_ratio (div_ratio r1 (abs_ratio r2)))) let mod_num n1 n2 = match n1, n2 with | Int i1, Int i2 -> let r = i1 mod i2 in Int (if r >= 0 then r else if i2 > 0 then r + i2 else r - i2) | Int i1, Big_int bi2 -> num_of_big_int (mod_big_int (big_int_of_int i1) bi2) | Big_int bi1, Int i2 -> num_of_big_int (mod_big_int bi1 (big_int_of_int i2)) | Big_int bi1, Big_int bi2 -> num_of_big_int (mod_big_int bi1 bi2) | _, _ -> sub_num n1 (mult_num n2 (quo_num n1 n2)) let power_num_int a b = match (a,b) with ((Int i), n) -> (match sign_int n with 0 -> Int 1 | 1 -> num_of_big_int (power_int_positive_int i n) | _ -> Ratio (create_normalized_ratio unit_big_int (power_int_positive_int i (-n)))) | ((Big_int bi), n) -> (match sign_int n with 0 -> Int 1 | 1 -> num_of_big_int (power_big_int_positive_int bi n) | _ -> Ratio (create_normalized_ratio unit_big_int (power_big_int_positive_int bi (-n)))) | ((Ratio r), n) -> (match sign_int n with 0 -> Int 1 | 1 -> Ratio (power_ratio_positive_int r n) | _ -> Ratio (power_ratio_positive_int (inverse_ratio r) (-n))) let power_num_big_int a b = match (a,b) with ((Int i), n) -> (match sign_big_int n with 0 -> Int 1 | 1 -> num_of_big_int (power_int_positive_big_int i n) | _ -> Ratio (create_normalized_ratio unit_big_int (power_int_positive_big_int i (minus_big_int n)))) | ((Big_int bi), n) -> (match sign_big_int n with 0 -> Int 1 | 1 -> num_of_big_int (power_big_int_positive_big_int bi n) | _ -> Ratio (create_normalized_ratio unit_big_int (power_big_int_positive_big_int bi (minus_big_int n)))) | ((Ratio r), n) -> (match sign_big_int n with 0 -> Int 1 | 1 -> Ratio (power_ratio_positive_big_int r n) | _ -> Ratio (power_ratio_positive_big_int (inverse_ratio r) (minus_big_int n))) let power_num a b = match (a,b) with (n, (Int i)) -> power_num_int n i | (n, (Big_int bi)) -> power_num_big_int n bi | _ -> invalid_arg "power_num" let ( **/ ) = power_num let is_integer_num = function Int _ -> true | Big_int _ -> true | Ratio r -> is_integer_ratio r (* integer_num, floor_num, round_num, ceiling_num rendent des nums *) let integer_num = function Int _ as n -> n | Big_int _ as n -> n | Ratio r -> num_of_big_int (integer_ratio r) and round_num = function Int _ as n -> n | Big_int _ as n -> n | Ratio r -> num_of_big_int (round_ratio r) and ceiling_num = function Int _ as n -> n | Big_int _ as n -> n | Ratio r -> num_of_big_int (ceiling_ratio r) (* Comparisons on nums *) let sign_num = function Int i -> sign_int i | Big_int bi -> sign_big_int bi | Ratio r -> sign_ratio r let eq_num a b = match (a,b) with ((Int int1), (Int int2)) -> int1 = int2 | ((Int i), (Big_int bi)) -> eq_big_int (big_int_of_int i) bi | ((Big_int bi), (Int i)) -> eq_big_int (big_int_of_int i) bi | ((Int i), (Ratio r)) -> eq_big_int_ratio (big_int_of_int i) r | ((Ratio r), (Int i)) -> eq_big_int_ratio (big_int_of_int i) r | ((Big_int bi1), (Big_int bi2)) -> eq_big_int bi1 bi2 | ((Big_int bi), (Ratio r)) -> eq_big_int_ratio bi r | ((Ratio r), (Big_int bi)) -> eq_big_int_ratio bi r | ((Ratio r1), (Ratio r2)) -> eq_ratio r1 r2 let ( =/ ) = eq_num let ( <>/ ) a b = not(eq_num a b) let compare_num a b = match (a,b) with ((Int int1), (Int int2)) -> compare_int int1 int2 | ((Int i), (Big_int bi)) -> compare_big_int (big_int_of_int i) bi | ((Big_int bi), (Int i)) -> compare_big_int bi (big_int_of_int i) | ((Int i), (Ratio r)) -> compare_big_int_ratio (big_int_of_int i) r | ((Ratio r), (Int i)) -> -(compare_big_int_ratio (big_int_of_int i) r) | ((Big_int bi1), (Big_int bi2)) -> compare_big_int bi1 bi2 | ((Big_int bi), (Ratio r)) -> compare_big_int_ratio bi r | ((Ratio r), (Big_int bi)) -> -(compare_big_int_ratio bi r) | ((Ratio r1), (Ratio r2)) -> compare_ratio r1 r2 let lt_num num1 num2 = compare_num num1 num2 < 0 and le_num num1 num2 = compare_num num1 num2 <= 0 and gt_num num1 num2 = compare_num num1 num2 > 0 and ge_num num1 num2 = compare_num num1 num2 >= 0 let ( </ ) = lt_num and ( <=/ ) = le_num and ( >/ ) = gt_num and ( >=/ ) = ge_num let max_num num1 num2 = if lt_num num1 num2 then num2 else num1 and min_num num1 num2 = if gt_num num1 num2 then num2 else num1 (* Coercions with basic types *) (* Coercion with int type *) let int_of_num = function Int i -> i | Big_int bi -> int_of_big_int bi | Ratio r -> int_of_ratio r let int_of_num_opt = function Int i -> Some i | Big_int bi -> int_of_big_int_opt bi | Ratio r -> (try Some (int_of_ratio r) with Failure _ -> None) and num_of_int i = if i = monster_int then Big_int (big_int_of_int i) else Int i (* Coercion with nat type *) let nat_of_num = function Int i -> nat_of_int i | Big_int bi -> nat_of_big_int bi | Ratio r -> nat_of_ratio r and num_of_nat nat = if (is_nat_int nat 0 (length_nat nat)) then Int (nth_digit_nat nat 0) else Big_int (big_int_of_nat nat) let nat_of_num_opt x = try Some (nat_of_num x) with Failure _ -> None (* Coercion with big_int type *) let big_int_of_num = function Int i -> big_int_of_int i | Big_int bi -> bi | Ratio r -> big_int_of_ratio r let big_int_of_num_opt x = try Some (big_int_of_num x) with Failure _ -> None let string_of_big_int_for_num bi = if !approx_printing_flag then approx_big_int !floating_precision bi else string_of_big_int bi (* Coercion with string type *) let string_of_normalized_num = function Int i -> string_of_int i | Big_int bi -> string_of_big_int_for_num bi | Ratio r -> string_of_ratio r let string_of_num n = string_of_normalized_num (cautious_normalize_num_when_printing n) let num_of_string s = try let flag = !normalize_ratio_flag in normalize_ratio_flag := true; let r = ratio_of_string s in normalize_ratio_flag := flag; if eq_big_int (denominator_ratio r) unit_big_int then num_of_big_int (numerator_ratio r) else Ratio r with Failure _ -> failwith "num_of_string" let num_of_string_opt s = try Some (num_of_string s) with Failure _ -> None (* Coercion with float type *) let float_of_num = function Int i -> float i | Big_int bi -> float_of_big_int bi | Ratio r -> float_of_ratio r let succ_num = function Int i -> if i = biggest_int then Big_int (succ_big_int (big_int_of_int i)) else Int (succ i) | Big_int bi -> num_of_big_int (succ_big_int bi) | Ratio r -> Ratio (add_int_ratio 1 r) and pred_num = function Int i -> if i = monster_int then Big_int (pred_big_int (big_int_of_int i)) else Int (pred i) | Big_int bi -> num_of_big_int (pred_big_int bi) | Ratio r -> Ratio (add_int_ratio (-1) r) let abs_num = function Int i -> if i = monster_int then Big_int (minus_big_int (big_int_of_int i)) else Int (abs i) | Big_int bi -> Big_int (abs_big_int bi) | Ratio r -> Ratio (abs_ratio r) let approx_num_fix n num = approx_ratio_fix n (ratio_of_num num) and approx_num_exp n num = approx_ratio_exp n (ratio_of_num num) let incr_num r = r := succ_num !r and decr_num r = r := pred_num !r
(**************************************************************************) (* *) (* OCaml *) (* *) (* Valerie Menissier-Morain, projet Cristal, INRIA Rocquencourt *) (* *) (* Copyright 1996 Institut National de Recherche en Informatique et *) (* en Automatique. *) (* *) (* All rights reserved. This file is distributed under the terms of *) (* the GNU Lesser General Public License version 2.1, with the *) (* special exception on linking described in the file LICENSE. *) (* *) (**************************************************************************)
protocol_validator_event.ml
(** Declares logging events for [protocol_validator] *) include Internal_event.Simple let section = ["node"; "validator"] let validator_terminated = declare_0 ~section ~name:"validator_terminated" ~msg:"validator terminated" ~level:Notice () let unexpected_worker_error = declare_1 ~section ~name:"unexpected_worker_error" ~msg:"unexpected worker error: {trace}" ~level:Notice ~pp1:pp_print_top_error_of_trace ("trace", trace_encoding) let previously_validated_protocol = declare_1 ~section ~name:"previously_validated_protocol" ~msg:"protocol {hash} already validated, ignoring" ~level:Debug ("hash", Protocol_hash.encoding) let pushing_protocol_validation = declare_1 ~section ~name:"pushing_protocol_validation" ~msg:"pushing validation request for protocol {hash}" ~level:Debug ("hash", Protocol_hash.encoding) let fetching_protocol = declare_2 ~section ~name:"fetching_protocol" ~msg:"fetching protocol {hash}" ~level:Notice ("hash", Protocol_hash.encoding) ("source", Data_encoding.option P2p_peer.Id.encoding)
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2020 Nomadic Labs. <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
dimacs.mli
(********************************************************************) (* *) (* The Why3 Verification Platform / The Why3 Development Team *) (* Copyright 2010-2023 -- Inria - CNRS - Paris-Saclay University *) (* *) (* This software is distributed under the terms of the GNU Lesser *) (* General Public License version 2.1, with the special exception *) (* on linking described in file LICENSE. *) (* *) (********************************************************************)
lem_list_extra.ml
(*Generated by Lem from list_extra.lem.*) open Lem_bool open Lem_maybe open Lem_basic_classes open Lem_tuple open Lem_num open Lem_list open Lem_assert_extra (* ------------------------- *) (* head of non-empty list *) (* ------------------------- *) (*val head : forall 'a. list 'a -> 'a*) (*let head l = match l with | x::xs -> x | [] -> failwith "List_extra.head of empty list" end*) (* ------------------------- *) (* tail of non-empty list *) (* ------------------------- *) (*val tail : forall 'a. list 'a -> list 'a*) (*let tail l = match l with | x::xs -> xs | [] -> failwith "List_extra.tail of empty list" end*) (* ------------------------- *) (* last *) (* ------------------------- *) (*val last : forall 'a. list 'a -> 'a*) let rec last l = ((match l with | [x] -> x | x1::x2::xs -> last (x2 :: xs) | [] -> failwith "List_extra.last of empty list" )) (* ------------------------- *) (* init *) (* ------------------------- *) (* All elements of a non-empty list except the last one. *) (*val init : forall 'a. list 'a -> list 'a*) let rec init l = ((match l with | [x] -> [] | x1::x2::xs -> x1::(init (x2::xs)) | [] -> failwith "List_extra.init of empty list" )) (* ------------------------- *) (* foldl1 / foldr1 *) (* ------------------------- *) (* folding functions for non-empty lists, which don`t take the base case *) (*val foldl1 : forall 'a. ('a -> 'a -> 'a) -> list 'a -> 'a*) let foldl1 f x_xs = ((match x_xs with | (x :: xs) -> List.fold_left f x xs | [] -> failwith "List_extra.foldl1 of empty list" )) (*val foldr1 : forall 'a. ('a -> 'a -> 'a) -> list 'a -> 'a*) let foldr1 f x_xs = ((match x_xs with | (x :: xs) -> List.fold_right f xs x | [] -> failwith "List_extra.foldr1 of empty list" )) (* ------------------------- *) (* nth element *) (* ------------------------- *) (* get the nth element of a list *) (*val nth : forall 'a. list 'a -> nat -> 'a*) (*let nth l n = match index l n with Just e -> e | Nothing -> failwith "List_extra.nth" end*) (* ------------------------- *) (* Find_non_pure *) (* ------------------------- *) (*val findNonPure : forall 'a. ('a -> bool) -> list 'a -> 'a*) let findNonPure p0 l = ((match (list_find_opt p0 l) with | Some e -> e | None -> failwith "List_extra.findNonPure" )) (* ------------------------- *) (* zip same length *) (* ------------------------- *) (*val zipSameLength : forall 'a 'b. list 'a -> list 'b -> list ('a * 'b)*) (*let rec zipSameLength l1 l2 = match (l1, l2) with | (x :: xs, y :: ys) -> (x, y) :: zipSameLength xs ys | ([], []) -> [] | _ -> failwith "List_extra.zipSameLength of different length lists" end*)
(*Generated by Lem from list_extra.lem.*)
method.ml
open Import include Httpaf.Method let sexp_of_t meth = Sexp_conv.sexp_of_string (to_string meth) let pp fmt t = Sexp.pp_hum fmt (sexp_of_t t)
ml_gtkxmhtml.c
/**************************************************************************/ /* Lablgtk */ /* */ /* This program is free software; you can redistribute it */ /* and/or modify it under the terms of the GNU Library General */ /* Public License as published by the Free Software Foundation */ /* version 2, with the exception described in file COPYING which */ /* comes with the library. */ /* */ /* This program is distributed in the hope that it will be useful, */ /* but WITHOUT ANY WARRANTY; without even the implied warranty of */ /* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */ /* GNU Library General Public License for more details. */ /* */ /* You should have received a copy of the GNU Library General */ /* Public License along with this program; if not, write to the */ /* Free Software Foundation, Inc., 59 Temple Place, Suite 330, */ /* Boston, MA 02111-1307 USA */ /* */ /* */ /**************************************************************************/ /* $Id$ */ #include <string.h> #include <gtk/gtk.h> #include <gtk-xmhtml/gtk-xmhtml.h> #include <caml/mlvalues.h> #include <caml/alloc.h> #include <caml/memory.h> #include <caml/callback.h> #include <caml/fail.h> #include "wrappers.h" #include "ml_glib.h" #include "ml_gdk.h" #include "ml_gtk.h" #include "gtkxmhtml_tags.h" /* conversion functions */ #include "gtkxmhtml_tags.c" Make_Flags_val (Line_type_val) #define GtkXmHTML_val(val) ((GtkXmHTML*)GtkObject_val(val)) ML_0 (gtk_xmhtml_new, Val_GtkAny_sink) ML_1 (gtk_xmhtml_freeze, GtkXmHTML_val, Unit) ML_1 (gtk_xmhtml_thaw, GtkXmHTML_val, Unit) ML_2 (gtk_xmhtml_source, GtkXmHTML_val, String_val, Unit) ML_2 (gtk_xmhtml_set_string_direction, GtkXmHTML_val, String_direction_val, Unit) ML_2 (gtk_xmhtml_set_alignment, GtkXmHTML_val, Alignment_val, Unit) /* ML_2 (gtk_xmhtml_outline, GtkXmHTML_val, Bool_val, Unit) */ ML_3 (gtk_xmhtml_set_font_familty, GtkXmHTML_val, String_val, String_val, Unit) ML_3 (gtk_xmhtml_set_font_familty_fixed, GtkXmHTML_val, String_val, String_val, Unit) ML_2 (gtk_xmhtml_set_font_charset, GtkXmHTML_val, String_val, Unit) ML_2 (gtk_xmhtml_set_allow_body_colors, GtkXmHTML_val, Bool_val, Unit) ML_2 (gtk_xmhtml_set_hilight_on_enter, GtkXmHTML_val, Bool_val, Unit) ML_2 (gtk_xmhtml_set_anchor_underline_type, GtkXmHTML_val, Flags_Line_type_val, Unit) ML_2 (gtk_xmhtml_set_anchor_visited_underline_type, GtkXmHTML_val, Flags_Line_type_val, Unit) ML_2 (gtk_xmhtml_set_anchor_target_underline_type, GtkXmHTML_val, Flags_Line_type_val, Unit) ML_2 (gtk_xmhtml_set_allow_color_switching, GtkXmHTML_val, Bool_val, Unit) ML_2 (gtk_xmhtml_set_dithering, GtkXmHTML_val, Dither_type_val, Unit) ML_2 (gtk_xmhtml_set_allow_font_switching, GtkXmHTML_val, Bool_val, Unit) ML_2 (gtk_xmhtml_set_max_image_colors, GtkXmHTML_val, Int_val, Unit) ML_2 (gtk_xmhtml_set_allow_images, GtkXmHTML_val, Bool_val, Unit) ML_4 (gtk_xmhtml_set_plc_intervals, GtkXmHTML_val, Int_val, Int_val, Int_val, Unit) /* ML_2 (gtk_xmhtml_set_def_body_image_url, GtkXmHTML_val, String_val, Unit) */ ML_2 (gtk_xmhtml_set_anchor_buttons, GtkXmHTML_val, Bool_val, Unit) CAMLprim value ml_gtk_xmhtml_set_anchor_cursor(value html, value cursor) { gtk_xmhtml_set_anchor_cursor (GtkXmHTML_val(html), Option_val(cursor, GdkCursor_val, NULL), Bool_val(cursor)); return Val_unit; } ML_2 (gtk_xmhtml_set_topline, GtkXmHTML_val, Int_val, Unit) ML_1 (gtk_xmhtml_get_topline, GtkXmHTML_val, Val_int) ML_2 (gtk_xmhtml_set_freeze_animations, GtkXmHTML_val, Bool_val, Unit) /* ML_1 (gtk_xmhtml_get_source, GtkXmHTML_val, copy_string) */ ML_2 (gtk_xmhtml_set_screen_gamma, GtkXmHTML_val, Float_val, Unit) /* ML_2 (gtk_xmhtml_set_event_proc, GtkXmHTML_val, ???, Unit) */ ML_2 (gtk_xmhtml_set_perfect_colors, GtkXmHTML_val, Bool_val, Unit) ML_2 (gtk_xmhtml_set_uncompress_command, GtkXmHTML_val, String_val, Unit) ML_2 (gtk_xmhtml_set_strict_checking, GtkXmHTML_val, Bool_val, Unit) ML_2 (gtk_xmhtml_set_bad_html_warnings, GtkXmHTML_val, Bool_val, Unit) ML_2 (gtk_xmhtml_set_allow_form_coloring, GtkXmHTML_val, Bool_val, Unit) ML_2 (gtk_xmhtml_set_imagemap_draw, GtkXmHTML_val, Bool_val, Unit) ML_2 (gtk_xmhtml_set_mime_type, GtkXmHTML_val, String_val, Unit) ML_2 (gtk_xmhtml_set_alpha_processing, GtkXmHTML_val, Bool_val, Unit) ML_2 (gtk_xmhtml_set_rgb_conv_mode, GtkXmHTML_val, Dither_type_val, Unit)
/**************************************************************************/ /* Lablgtk */ /* */ /* This program is free software; you can redistribute it */ /* and/or modify it under the terms of the GNU Library General */ /* Public License as published by the Free Software Foundation */ /* version 2, with the exception described in file COPYING which */ /* comes with the library. */ /* */ /* This program is distributed in the hope that it will be useful, */ /* but WITHOUT ANY WARRANTY; without even the implied warranty of */ /* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */ /* GNU Library General Public License for more details. */ /* */ /* You should have received a copy of the GNU Library General */ /* Public License along with this program; if not, write to the */ /* Free Software Foundation, Inc., 59 Temple Place, Suite 330, */ /* Boston, MA 02111-1307 USA */ /* */ /* */ /**************************************************************************/
parse_cpp.ml
open Common module Flag = Flag_parsing module PI = Parse_info module PS = Parsing_stat module FT = File_type module Ast = Ast_cpp module Flag_cpp = Flag_parsing_cpp module T = Parser_cpp module TH = Token_helpers_cpp module Lexer = Lexer_cpp let logger = Logging.get_logger [ __MODULE__ ] (*****************************************************************************) (* Prelude *) (*****************************************************************************) (* * A heuristic-based C/cpp/C++ parser. * * See "Parsing C/C++ Code without Pre-Preprocessing - Yoann Padioleau, CC'09" * avalaible at http://padator.org/papers/yacfe-cc09.pdf *) (* let use_dypgen = false *) (*****************************************************************************) (* Error diagnostic *) (*****************************************************************************) let error_msg_tok tok = Parsing_helpers.error_message_info (TH.info_of_tok tok) (*****************************************************************************) (* Stats on what was passed/commentized *) (*****************************************************************************) let commentized xs = xs |> Common.map_filter (function | T.TComment_Pp (cppkind, ii) -> if !Flag_cpp.filter_classic_passed then match cppkind with | Token_cpp.CppOther -> ( let s = PI.str_of_info ii in match s with | s when s =~ "KERN_.*" -> None | s when s =~ "__.*" -> None | _ -> Some ii.PI.token) | Token_cpp.CppDirective | Token_cpp.CppAttr | Token_cpp.CppMacro -> None | Token_cpp.CppMacroExpanded | Token_cpp.CppPassingNormal | Token_cpp.CppPassingCosWouldGetError -> raise Todo else Some ii.PI.token | T.TAny_Action ii -> Some ii.PI.token | _ -> None) let count_lines_commentized xs = let line = ref (-1) in let count = ref 0 in commentized xs |> List.iter (function | PI.OriginTok pinfo | PI.ExpandedTok (_, pinfo, _) -> let newline = pinfo.PI.line in if newline <> !line then ( line := newline; incr count) | _ -> ()); !count (* See also problematic_lines and parsing_stat.ml *) (* for most problematic tokens *) let is_same_line_or_close line tok = TH.line_of_tok tok =|= line || TH.line_of_tok tok =|= line - 1 || TH.line_of_tok tok =|= line - 2 (*****************************************************************************) (* Lexing only *) (*****************************************************************************) (* called by parse below *) let tokens file = Parsing_helpers.tokenize_all_and_adjust_pos file Lexer.token TH.visitor_info_of_tok TH.is_eof [@@profiling] (*****************************************************************************) (* Fuzzy parsing *) (*****************************************************************************) let rec multi_grouped_list xs = xs |> List.map multi_grouped and multi_grouped = function | Token_views_cpp.Braces (tok1, xs, Some tok2) -> Ast_fuzzy.Braces (tokext tok1, multi_grouped_list xs, tokext tok2) | Token_views_cpp.Parens (tok1, xs, Some tok2) -> Ast_fuzzy.Parens (tokext tok1, multi_grouped_list_comma xs, tokext tok2) | Token_views_cpp.Angle (tok1, xs, Some tok2) -> Ast_fuzzy.Angle (tokext tok1, multi_grouped_list xs, tokext tok2) | Token_views_cpp.Tok tok -> ( match PI.str_of_info (tokext tok) with | "..." -> Ast_fuzzy.Dots (tokext tok) | s when Ast_fuzzy.is_metavar s -> Ast_fuzzy.Metavar (s, tokext tok) | s -> Ast_fuzzy.Tok (s, tokext tok)) | _ -> failwith "could not find closing brace/parens/angle" and tokext tok_extended = TH.info_of_tok tok_extended.Token_views_cpp.t and multi_grouped_list_comma xs = let rec aux acc xs = match xs with | [] -> if null acc then [] else [ Left (acc |> List.rev |> multi_grouped_list) ] | x :: xs -> ( match x with | Token_views_cpp.Tok tok when PI.str_of_info (tokext tok) = "," -> let before = acc |> List.rev |> multi_grouped_list in if null before then aux [] xs else Left before :: Right (tokext tok) :: aux [] xs | _ -> aux (x :: acc) xs) in aux [] xs (* This is similar to what I did for OPA. This is also similar * to what I do for parsing hacks, but this fuzzy AST can be useful * on its own, e.g. for a not too bad sgrep/spatch. * * note: this is similar to what cpplint/fblint of andrei does? *) let parse_fuzzy file = Common.save_excursion Flag_parsing.sgrep_mode true (fun () -> let toks_orig = tokens file in let toks = toks_orig |> Common.exclude (fun x -> Token_helpers_cpp.is_comment x || Token_helpers_cpp.is_eof x) in let extended = toks |> List.map Token_views_cpp.mk_token_extended in Parsing_hacks_cpp.find_template_inf_sup extended; let groups = Token_views_cpp.mk_multi extended in let trees = multi_grouped_list groups in let hooks = { Lib_ast_fuzzy.kind = TH.token_kind_of_tok; tokf = TH.info_of_tok } in (trees, Lib_ast_fuzzy.mk_tokens hooks toks_orig)) (*****************************************************************************) (* Extract macros *) (*****************************************************************************) (* It can be used to to parse the macros defined in a macro.h file. It * can also be used to try to extract the macros defined in the file * that we try to parse *) let extract_macros file = Common.save_excursion Flag.verbose_lexing false (fun () -> let toks = tokens (* todo: ~profile:false *) file in let toks = Parsing_hacks_define.fix_tokens_define toks in Pp_token.extract_macros toks) [@@profiling] (* less: pass it as a parameter to parse_program instead ? * old: was a ref, but a hashtbl.t is actually already a kind of ref *) let (_defs : (string, Pp_token.define_body) Hashtbl.t) = Hashtbl.create 101 (* We used to have also a init_defs_builtins() so that we could use a * standard.h containing macros that were always useful, and a macros.h * that the user could customize for his own project. * But this was adding complexity so now we just have _defs and people * can call add_defs to add local macro definitions. *) let add_defs file = if not (Sys.file_exists file) then failwith (spf "Could not find %s, have you set PFFF_HOME correctly?" file); logger#info "Using %s macro file" file; let xs = extract_macros file in xs |> List.iter (fun (k, v) -> Hashtbl.add _defs k v) let init_defs file = Hashtbl.clear _defs; add_defs file (*****************************************************************************) (* Error recovery *) (*****************************************************************************) (* see parsing_recovery_cpp.ml *) (*****************************************************************************) (* Consistency checking *) (*****************************************************************************) (* todo: a parsing_consistency_cpp.ml *) (*****************************************************************************) (* Helper for main entry point *) (*****************************************************************************) open Parsing_helpers (* Hacked lex. This function use refs passed by parse. * 'tr' means 'token refs'. This is used mostly to enable * error recovery (This used to do lots of stuff, such as * calling some lookahead heuristics to reclassify * tokens such as TIdent into TIdent_Typeded but this is * now done in a fix_tokens style in parsing_hacks_typedef.ml. *) let rec lexer_function tr lexbuf = match tr.rest with | [] -> logger#error "LEXER: ALREADY AT END"; tr.current | v :: xs -> tr.rest <- xs; tr.current <- v; tr.passed <- v :: tr.passed; if !Flag.debug_lexer then pr2_gen v; if TH.is_comment v then lexer_function (*~pass*) tr lexbuf else v (* was a define ? *) let passed_a_define tr = let xs = tr.passed |> List.rev |> Common.exclude TH.is_comment in if List.length xs >= 2 then match Common2.head_middle_tail xs with | T.TDefine _, _, T.TCommentNewline_DefineEndOfMacro _ -> true | _ -> false else ( logger#error "WIERD: length list of error recovery tokens < 2 "; false) (*****************************************************************************) (* Main entry point *) (*****************************************************************************) (* * note: as now we go in two passes, there is first all the error message of * the lexer, and then the error of the parser. It is not anymore * interwinded. * * !!!This function use refs, and is not reentrant !!! so take care. * It uses the _defs global defined above!!!! *) let parse_with_lang ?(lang = Flag_parsing_cpp.Cplusplus) file : (Ast.program, T.token) Parsing_result.t = let stat = Parsing_stat.default_stat file in let filelines = Common2.cat_array file in (* -------------------------------------------------- *) (* call lexer and get all the tokens *) (* -------------------------------------------------- *) let toks_orig = tokens file in let toks = try Parsing_hacks.fix_tokens ~macro_defs:_defs lang toks_orig with | Token_views_cpp.UnclosedSymbol s -> logger#error "unclosed symbol %s" s; if !Flag_cpp.debug_cplusplus then raise (Token_views_cpp.UnclosedSymbol s) else toks_orig in let tr = Parsing_helpers.mk_tokens_state toks in let lexbuf_fake = Lexing.from_function (fun _buf _n -> raise Impossible) in let rec loop () = let info = TH.info_of_tok tr.Parsing_helpers.current in (* todo?: I am not sure that it represents current_line, cos maybe * tr.current partipated in the previous parsing phase, so maybe tr.current * is not the first token of the next parsing phase. Same with checkpoint2. * It would be better to record when we have a } or ; in parser.mly, * cos we know that they are the last symbols of external_declaration2. *) let checkpoint = PI.line_of_info info in (* bugfix: may not be equal to 'file' as after macro expansions we can * start to parse a new entity from the body of a macro, for instance * when parsing a define_machine() body, cf standard.h *) let checkpoint_file = PI.file_of_info info in tr.passed <- []; (* for some statistics *) let was_define = ref false in let parse_toplevel tr lexbuf_fake = Parser_cpp.toplevel (lexer_function tr) lexbuf_fake (* if not use_dypgen then else let (save1, save2, save3) = tr.PI.rest, tr.PI.current, tr.PI.passed in Parser_cpp.toplevel (lexer_function tr) lexbuf_fake try with _e -> tr.PI.rest <- save1; tr.PI.current <- save2; tr.PI.passed <- save3; (try Parser_cpp2.toplevel (lexer_function tr) lexbuf_fake |> List.hd |> fst with Failure "hd" -> logger#error "no elements"; raise Parsing.Parse_error ) *) in let elem = try (* -------------------------------------------------- *) (* Call parser *) (* -------------------------------------------------- *) parse_toplevel tr lexbuf_fake with | exn -> let e = Exception.catch exn in if not !Flag.error_recovery then raise (Parse_info.Parsing_error (TH.info_of_tok tr.Parsing_helpers.current)); (if !Flag.show_parsing_error then match exn with (* ocamlyacc *) | Parsing.Parse_error (* dypgen *) | Dyp.Syntax_error (* menhir *) | Parser_cpp.Error -> pr2 ("parse error \n = " ^ error_msg_tok tr.Parsing_helpers.current) | Parse_info.Other_error (s, _i) -> pr2 ("semantic error " ^ s ^ "\n =" ^ error_msg_tok tr.Parsing_helpers.current) | _ -> Exception.reraise e); let line_error = TH.line_of_tok tr.Parsing_helpers.current in let pbline = tr.Parsing_helpers.passed |> List.filter (is_same_line_or_close line_error) |> List.filter TH.is_ident_like in let error_info = ( pbline |> List.map (fun tok -> PI.str_of_info (TH.info_of_tok tok)), line_error ) in stat.PS.problematic_lines <- error_info :: stat.PS.problematic_lines; (* error recovery, go to next synchro point *) let passed', rest' = Parsing_recovery_cpp.find_next_synchro tr.Parsing_helpers.rest tr.Parsing_helpers.passed in tr.Parsing_helpers.rest <- rest'; tr.Parsing_helpers.passed <- passed'; tr.Parsing_helpers.current <- List.hd passed'; (* <> line_error *) let info = TH.info_of_tok tr.Parsing_helpers.current in let checkpoint2 = PI.line_of_info info in let checkpoint2_file = PI.file_of_info info in was_define := passed_a_define tr; if !was_define && !Flag_cpp.filter_define_error then () else if (* bugfix: *) checkpoint_file = checkpoint2_file && checkpoint_file = file then Parsing_helpers.print_bad line_error (checkpoint, checkpoint2) filelines else pr2 "PB: bad: but on tokens not from original file"; let info_of_bads = Common2.map_eff_rev TH.info_of_tok tr.Parsing_helpers.passed in Some (X (D (Ast.NotParsedCorrectly info_of_bads))) in (* again not sure if checkpoint2 corresponds to end of bad region *) let info = TH.info_of_tok tr.Parsing_helpers.current in let checkpoint2 = PI.line_of_info info in let checkpoint2_file = PI.file_of_info info in let diffline = if checkpoint_file = checkpoint2_file && checkpoint_file = file then checkpoint2 - checkpoint else 0 (* TODO? so if error come in middle of something ? where the * start token was from original file but synchro found in body * of macro ? then can have wrong number of lines stat. * Maybe simpler just to look at tr.passed and count * the lines in the token from the correct file ? *) in let info = List.rev tr.Parsing_helpers.passed in (* some stat updates *) stat.PS.commentized <- stat.PS.commentized + count_lines_commentized info; (match elem with | Some (Ast.X (Ast.D (Ast.NotParsedCorrectly _xs))) -> (* todo: could count same line multiple times! use Hashtbl.add * and a simple Hashtbl.length at the end to add in error_line_count *) if !was_define && !Flag_cpp.filter_define_error then stat.PS.commentized <- stat.PS.commentized + diffline else stat.PS.error_line_count <- stat.PS.error_line_count + diffline | _ -> ()); match elem with | None -> [] | Some xs -> (xs, info) :: loop () (* recurse *) in let xs = loop () in let ast = xs |> List.map fst in let tokens = xs |> List.map snd |> List.flatten in { Parsing_result.ast; tokens; stat } let parse2 file : (Ast.program, T.token) Parsing_result.t = match File_type.file_type_of_file file with | FT.PL (FT.C _) -> ( try parse_with_lang ~lang:Flag_cpp.C file with | _exn -> parse_with_lang ~lang:Flag_cpp.Cplusplus file) | FT.PL (FT.Cplusplus _) -> parse_with_lang ~lang:Flag_cpp.Cplusplus file | _ -> failwith (spf "not a C/C++ file: %s" file) let parse file : (Ast.program, T.token) Parsing_result.t = Profiling.profile_code "Parse_cpp.parse" (fun () -> try parse2 file with | Stack_overflow -> logger#error "PB stack overflow in %s" file; { Parsing_result.ast = []; tokens = []; stat = { (PS.bad_stat file) with PS.have_timeout = true }; }) let parse_program file = let res = parse file in res.Parsing_result.ast (*****************************************************************************) (* Sub parsers *) (*****************************************************************************) (* for sgrep/spatch *) let any_of_string lang s = Common.save_excursion Flag_parsing.sgrep_mode true (fun () -> Common2.with_tmp_file ~str:s ~ext:"c" (fun file -> let toks_orig = tokens file in let toks = try Parsing_hacks.fix_tokens ~macro_defs:_defs lang toks_orig with | Token_views_cpp.UnclosedSymbol s -> logger#error "unclosed symbol %s" s; if !Flag_cpp.debug_cplusplus then raise (Token_views_cpp.UnclosedSymbol s) else toks_orig in let tr = Parsing_helpers.mk_tokens_state toks in let lexbuf_fake = Lexing.from_function (fun _buf _n -> raise Impossible) in (* -------------------------------------------------- *) (* Call parser *) (* -------------------------------------------------- *) Parser_cpp.semgrep_pattern (lexer_function tr) lexbuf_fake)) (* experimental *) (* let parse_with_dypgen file = (* -------------------------------------------------- *) (* call lexer and get all the tokens *) (* -------------------------------------------------- *) let toks_orig = tokens file in let lang = Flag_parsing_cpp.Cplusplus in let toks = try Parsing_hacks.fix_tokens ~macro_defs:_defs lang toks_orig with Token_views_cpp.UnclosedSymbol s -> pr2 s; if !Flag_cpp.debug_cplusplus then raise (Token_views_cpp.UnclosedSymbol s) else toks_orig in let tr = Parse_info.mk_tokens_state toks in let lexbuf_fake = Lexing.from_function (fun _buf _n -> raise Impossible) in (* -------------------------------------------------- *) (* Call parser *) (* -------------------------------------------------- *) (* TODO: not sure why but calling main is significanctly faster * than calling toplevel in a loop *) try Parser_cpp2.main (lexer_function tr) lexbuf_fake |> List.hd |> fst with Dyp.Syntax_error -> raise (Parse_info.Parsing_error (TH.info_of_tok tr.PI.current)) *)
(* Yoann Padioleau * * Copyright (C) 2002-2013 Yoann Padioleau * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * version 2.1 as published by the Free Software Foundation. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file * license.txt for more details. * *)
common_pb.ml
[@@@ocaml.warning "-27-30-39"] type array_value_mutable = { mutable values : Common_types.any_value list; } let default_array_value_mutable () : array_value_mutable = { values = []; } type key_value_list_mutable = { mutable values : Common_types.key_value list; } let default_key_value_list_mutable () : key_value_list_mutable = { values = []; } type key_value_mutable = { mutable key : string; mutable value : Common_types.any_value option; } let default_key_value_mutable () : key_value_mutable = { key = ""; value = None; } type instrumentation_scope_mutable = { mutable name : string; mutable version : string; mutable attributes : Common_types.key_value list; mutable dropped_attributes_count : int32; } let default_instrumentation_scope_mutable () : instrumentation_scope_mutable = { name = ""; version = ""; attributes = []; dropped_attributes_count = 0l; } let rec decode_any_value d = let rec loop () = let ret:Common_types.any_value = match Pbrt.Decoder.key d with | None -> Pbrt.Decoder.malformed_variant "any_value" | Some (1, _) -> (Common_types.String_value (Pbrt.Decoder.string d) : Common_types.any_value) | Some (2, _) -> (Common_types.Bool_value (Pbrt.Decoder.bool d) : Common_types.any_value) | Some (3, _) -> (Common_types.Int_value (Pbrt.Decoder.int64_as_varint d) : Common_types.any_value) | Some (4, _) -> (Common_types.Double_value (Pbrt.Decoder.float_as_bits64 d) : Common_types.any_value) | Some (5, _) -> (Common_types.Array_value (decode_array_value (Pbrt.Decoder.nested d)) : Common_types.any_value) | Some (6, _) -> (Common_types.Kvlist_value (decode_key_value_list (Pbrt.Decoder.nested d)) : Common_types.any_value) | Some (7, _) -> (Common_types.Bytes_value (Pbrt.Decoder.bytes d) : Common_types.any_value) | Some (n, payload_kind) -> ( Pbrt.Decoder.skip d payload_kind; loop () ) in ret in loop () and decode_array_value d = let v = default_array_value_mutable () in let continue__= ref true in while !continue__ do match Pbrt.Decoder.key d with | None -> ( v.values <- List.rev v.values; ); continue__ := false | Some (1, Pbrt.Bytes) -> begin v.values <- (decode_any_value (Pbrt.Decoder.nested d)) :: v.values; end | Some (1, pk) -> Pbrt.Decoder.unexpected_payload "Message(array_value), field(1)" pk | Some (_, payload_kind) -> Pbrt.Decoder.skip d payload_kind done; ({ Common_types.values = v.values; } : Common_types.array_value) and decode_key_value_list d = let v = default_key_value_list_mutable () in let continue__= ref true in while !continue__ do match Pbrt.Decoder.key d with | None -> ( v.values <- List.rev v.values; ); continue__ := false | Some (1, Pbrt.Bytes) -> begin v.values <- (decode_key_value (Pbrt.Decoder.nested d)) :: v.values; end | Some (1, pk) -> Pbrt.Decoder.unexpected_payload "Message(key_value_list), field(1)" pk | Some (_, payload_kind) -> Pbrt.Decoder.skip d payload_kind done; ({ Common_types.values = v.values; } : Common_types.key_value_list) and decode_key_value d = let v = default_key_value_mutable () in let continue__= ref true in while !continue__ do match Pbrt.Decoder.key d with | None -> ( ); continue__ := false | Some (1, Pbrt.Bytes) -> begin v.key <- Pbrt.Decoder.string d; end | Some (1, pk) -> Pbrt.Decoder.unexpected_payload "Message(key_value), field(1)" pk | Some (2, Pbrt.Bytes) -> begin v.value <- Some (decode_any_value (Pbrt.Decoder.nested d)); end | Some (2, pk) -> Pbrt.Decoder.unexpected_payload "Message(key_value), field(2)" pk | Some (_, payload_kind) -> Pbrt.Decoder.skip d payload_kind done; ({ Common_types.key = v.key; Common_types.value = v.value; } : Common_types.key_value) let rec decode_instrumentation_scope d = let v = default_instrumentation_scope_mutable () in let continue__= ref true in while !continue__ do match Pbrt.Decoder.key d with | None -> ( v.attributes <- List.rev v.attributes; ); continue__ := false | Some (1, Pbrt.Bytes) -> begin v.name <- Pbrt.Decoder.string d; end | Some (1, pk) -> Pbrt.Decoder.unexpected_payload "Message(instrumentation_scope), field(1)" pk | Some (2, Pbrt.Bytes) -> begin v.version <- Pbrt.Decoder.string d; end | Some (2, pk) -> Pbrt.Decoder.unexpected_payload "Message(instrumentation_scope), field(2)" pk | Some (3, Pbrt.Bytes) -> begin v.attributes <- (decode_key_value (Pbrt.Decoder.nested d)) :: v.attributes; end | Some (3, pk) -> Pbrt.Decoder.unexpected_payload "Message(instrumentation_scope), field(3)" pk | Some (4, Pbrt.Varint) -> begin v.dropped_attributes_count <- Pbrt.Decoder.int32_as_varint d; end | Some (4, pk) -> Pbrt.Decoder.unexpected_payload "Message(instrumentation_scope), field(4)" pk | Some (_, payload_kind) -> Pbrt.Decoder.skip d payload_kind done; ({ Common_types.name = v.name; Common_types.version = v.version; Common_types.attributes = v.attributes; Common_types.dropped_attributes_count = v.dropped_attributes_count; } : Common_types.instrumentation_scope) let rec encode_any_value (v:Common_types.any_value) encoder = begin match v with | Common_types.String_value x -> Pbrt.Encoder.key (1, Pbrt.Bytes) encoder; Pbrt.Encoder.string x encoder; | Common_types.Bool_value x -> Pbrt.Encoder.key (2, Pbrt.Varint) encoder; Pbrt.Encoder.bool x encoder; | Common_types.Int_value x -> Pbrt.Encoder.key (3, Pbrt.Varint) encoder; Pbrt.Encoder.int64_as_varint x encoder; | Common_types.Double_value x -> Pbrt.Encoder.key (4, Pbrt.Bits64) encoder; Pbrt.Encoder.float_as_bits64 x encoder; | Common_types.Array_value x -> Pbrt.Encoder.key (5, Pbrt.Bytes) encoder; Pbrt.Encoder.nested (encode_array_value x) encoder; | Common_types.Kvlist_value x -> Pbrt.Encoder.key (6, Pbrt.Bytes) encoder; Pbrt.Encoder.nested (encode_key_value_list x) encoder; | Common_types.Bytes_value x -> Pbrt.Encoder.key (7, Pbrt.Bytes) encoder; Pbrt.Encoder.bytes x encoder; end and encode_array_value (v:Common_types.array_value) encoder = List.iter (fun x -> Pbrt.Encoder.key (1, Pbrt.Bytes) encoder; Pbrt.Encoder.nested (encode_any_value x) encoder; ) v.Common_types.values; () and encode_key_value_list (v:Common_types.key_value_list) encoder = List.iter (fun x -> Pbrt.Encoder.key (1, Pbrt.Bytes) encoder; Pbrt.Encoder.nested (encode_key_value x) encoder; ) v.Common_types.values; () and encode_key_value (v:Common_types.key_value) encoder = Pbrt.Encoder.key (1, Pbrt.Bytes) encoder; Pbrt.Encoder.string v.Common_types.key encoder; begin match v.Common_types.value with | Some x -> Pbrt.Encoder.key (2, Pbrt.Bytes) encoder; Pbrt.Encoder.nested (encode_any_value x) encoder; | None -> (); end; () let rec encode_instrumentation_scope (v:Common_types.instrumentation_scope) encoder = Pbrt.Encoder.key (1, Pbrt.Bytes) encoder; Pbrt.Encoder.string v.Common_types.name encoder; Pbrt.Encoder.key (2, Pbrt.Bytes) encoder; Pbrt.Encoder.string v.Common_types.version encoder; List.iter (fun x -> Pbrt.Encoder.key (3, Pbrt.Bytes) encoder; Pbrt.Encoder.nested (encode_key_value x) encoder; ) v.Common_types.attributes; Pbrt.Encoder.key (4, Pbrt.Varint) encoder; Pbrt.Encoder.int32_as_varint v.Common_types.dropped_attributes_count encoder; ()
Reference.ml
(* A reference implementation of memoization at type [int -> 'a]. *) (* This implementation is correct if the function that is memoized is pure. *) let memoize f = f
(******************************************************************************) (* *) (* Monolith *) (* *) (* François Pottier *) (* *) (* Copyright Inria. All rights reserved. This file is distributed under the *) (* terms of the GNU Lesser General Public License as published by the Free *) (* Software Foundation, either version 3 of the License, or (at your *) (* option) any later version, as described in the file LICENSE. *) (* *) (******************************************************************************)
Immutable_buffer.mli
(* Similar in principle to OCaml's Buffer, but immutable. `combine` is `O(1)` * (though the construction of the list to pass to it is `O(n)` where `n` is the * number of list items), and `to_string` is `O(n)` where `n` is the length of * the constructed string. *) type t val of_string : string -> t val to_string : t -> string val combine : ?sep:string -> t list -> t
(* Similar in principle to OCaml's Buffer, but immutable. `combine` is `O(1)` * (though the construction of the list to pass to it is `O(n)` where `n` is the * number of list items), and `to_string` is `O(n)` where `n` is the length of * the constructed string. *)
main_sc_rollup_client_016_PtMumbai.ml
let executable_name = Filename.basename Sys.executable_name let argv () = Array.to_list Sys.argv |> List.tl |> Stdlib.Option.get let main () = Configuration.parse (argv ()) >>=? fun (configuration, argv) -> let cctxt = Configuration.make_unix_client_context configuration in Tezos_client_base.Client_keys.register_aggregate_signer (module Tezos_signer_backends.Unencrypted.Aggregate) ; Tezos_clic.dispatch (Commands.all ()) cctxt argv let handle_error = function | Ok () -> Stdlib.exit 0 | Error [Tezos_clic.Version] -> let version = Tezos_version.Bin_version.version_string in Format.printf "%s\n" version ; Stdlib.exit 0 | Error [Tezos_clic.Help command] -> Tezos_clic.usage Format.std_formatter ~executable_name ~global_options:(Configuration.global_options ()) (match command with None -> [] | Some c -> [c]) ; Stdlib.exit 0 | Error errs -> Tezos_clic.pp_cli_errors Format.err_formatter ~executable_name ~global_options:(Configuration.global_options ()) ~default:Error_monad.pp errs ; Stdlib.exit 1 let () = Lwt_main.run (Lwt.catch main fail_with_exn) |> handle_error
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2022 Nomadic Labs, <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
windows.c
#define _WIN32_WINNT 0x0501 //targets XP or later #define CAML_NAME_SPACE #include <caml/mlvalues.h> #include <caml/alloc.h> #include <caml/memory.h> #include <caml/fail.h> #include <windows.h> #include <winerror.h> #include <shlobj.h> static value get_shared_folder(int nFolder) { TCHAR path[MAX_PATH]; if(SUCCEEDED(SHGetFolderPath(NULL, nFolder, NULL, 0, path))) return caml_copy_string(path); caml_failwith("get_local_appdata"); return Val_unit; } CAMLprim value caml_win_get_appdata(value v_unit) { CAMLparam1(v_unit); CAMLreturn(get_shared_folder(CSIDL_APPDATA | CSIDL_FLAG_CREATE)); } CAMLprim value caml_win_get_local_appdata(value v_unit) { CAMLparam1(v_unit); CAMLreturn(get_shared_folder(CSIDL_LOCAL_APPDATA | CSIDL_FLAG_CREATE)); } CAMLprim value caml_win_get_common_appdata(value v_unit) { CAMLparam1(v_unit); CAMLreturn(get_shared_folder(CSIDL_COMMON_APPDATA | CSIDL_FLAG_CREATE)); } CAMLprim value caml_win_read_registry_string(value v_subkey, value v_value, value v_wow) { CAMLparam3(v_subkey, v_value, v_wow); HKEY resultHKey = NULL; char resultString[4096]; DWORD resultSize = sizeof(resultString); REGSAM flags = KEY_READ; int wow = Int_val(v_wow); DWORD typ = REG_SZ; if (wow == 1) { flags |= KEY_WOW64_32KEY; } else if (wow == 2) { flags |= KEY_WOW64_64KEY; } if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, String_val(v_subkey), 0, flags, &resultHKey) != ERROR_SUCCESS) { caml_failwith("RegOpenKeyEx"); } else if (RegQueryValueEx(resultHKey, String_val(v_value), 0, &typ, (LPBYTE) &resultString, &resultSize) != ERROR_SUCCESS) { caml_failwith("RegQueryValue"); } else { if (resultSize < 0 || resultSize >= sizeof(resultString)) caml_failwith("Registry value too big"); resultString[resultSize] = '\0'; CAMLlocal1(result); result = caml_copy_string(resultString); RegCloseKey(resultHKey); resultHKey = NULL; CAMLreturn(result); } } CAMLprim value caml_win_read_registry_int(value v_subkey, value v_value, value v_wow) { CAMLparam3(v_subkey, v_value, v_wow); HKEY resultHKey = NULL; DWORD resultDWord = 0; DWORD resultSize = sizeof(resultDWord); REGSAM flags = KEY_READ; int wow = Int_val(v_wow); DWORD typ = REG_DWORD; if (wow == 1) { flags |= KEY_WOW64_32KEY; } else if (wow == 2) { flags |= KEY_WOW64_64KEY; } if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, String_val(v_subkey), 0, flags, &resultHKey) != ERROR_SUCCESS) { caml_failwith("RegOpenKeyEx"); } else if (RegQueryValueEx(resultHKey, String_val(v_value), 0, &typ, (LPBYTE) &resultDWord, &resultSize) != ERROR_SUCCESS) { caml_failwith("RegQueryValue(int)"); } else { RegCloseKey(resultHKey); resultHKey = NULL; CAMLreturn(Val_int(resultDWord)); } }
/* Copyright (C) 2013, Thomas Leonard * See the README file for details, or visit http://0install.net. */
digest_tests.ml
open Stdune module Digest = Dune_digest let%expect_test "directory digest version" = (* If this test fails with a new digest value, make sure to update to update [directory_digest_version] in digest.ml. The expected value is kept outside of the expect block on purpose so that it must be modified manually. *) let expected = "a743ec66ce913ff6587a3816a8acc6ea" in let dir = Temp.create Dir ~prefix:"digest-tests" ~suffix:"" in let stats = { Digest.Stats_for_digest.st_kind = S_DIR; st_perm = 1 } in (match Digest.path_with_stats ~allow_dirs:true dir stats with | Ok digest -> let digest = Digest.to_string digest in if String.equal digest expected then print_endline "[PASS]" else printfn "[FAIL] new digest value. please update the version and this test.\n%s" digest | Unexpected_kind | Unix_error _ -> print_endline "[FAIL] unable to calculate digest"); [%expect {| [PASS] |}] let%expect_test "directories with symlinks" = let dir = Temp.create Dir ~prefix:"digest-tests" ~suffix:"" in let stats = { Digest.Stats_for_digest.st_kind = S_DIR; st_perm = 1 } in let sub = Path.relative dir "sub" in Path.mkdir_p sub; Unix.symlink "bar" (Path.to_string (Path.relative dir "foo")); Unix.symlink "bar" (Path.to_string (Path.relative sub "foo")); (match Digest.path_with_stats ~allow_dirs:true dir stats with | Ok _ -> print_endline "[PASS]" | Unexpected_kind -> print_endline "[FAIL] unexpected kind" | Unix_error _ -> print_endline "[FAIL] unable to calculate digest"); [%expect {| [PASS] |}]
Analyze_pattern.mli
val extract_specific_strings : ?lang:Lang.t -> Pattern.t -> string list (* Extract strings and metavariables that occur in the source code. *) val extract_strings_and_mvars : ?lang:Lang.t -> Pattern.t -> string list * Metavariable.mvar list
batConcreteQueue_402.mli
type 'a t external of_abstr : 'a Queue.t -> 'a t = "%identity" external to_abstr : 'a t -> 'a Queue.t = "%identity" val filter_inplace : ('a -> bool) -> 'a t -> unit
CCBijection.mli
(* This file is free software, part of containers. See file "license" for more details. *) (** Functor to build a bijection Represents 1-to-1 mappings between two types. Each element from the "left" is mapped to one "right" value, and conversely. @since 2.1 *) type 'a iter = ('a -> unit) -> unit module type OrderedType = sig type t val compare : t -> t -> int end module type S = sig type t type left type right val empty : t val is_empty : t -> bool val equal : t -> t -> bool val compare : t -> t -> int val add : left -> right -> t -> t (** Add [left] and [right] correspondence to bijection such that [left] and [right] are unique in their respective sets and only correspond to each other. *) val cardinal : t -> int (** Number of bindings. O(n) time. *) val mem : left -> right -> t -> bool (** Check both sides for key membership. *) val mem_left : left -> t -> bool (** Check for membership of correspondence using [left] key. *) val mem_right : right -> t -> bool (** Check for membership of correspondence using [right] key. *) val find_left : left -> t -> right (** @raise Not_found if left is not found. *) val find_right : right -> t -> left (** @raise Not_found if right is not found. *) val remove : left -> right -> t -> t (** Remove the [left], [right] binding if it exists. Return the same bijection otherwise. *) val remove_left : left -> t -> t (** Remove the binding with [left] key if it exists. Return the same bijection otherwise. *) val remove_right : right -> t -> t (** Remove the binding with [right] key if it exists. Return the same bijection otherwise. *) val list_left : t -> (left * right) list (** Return the bindings as a list of ([left], [right]) values. *) val list_right : t -> (right * left) list (** Return the bindings as a list of [(right, left)] values. *) val add_iter : (left * right) iter -> t -> t val of_iter : (left * right) iter -> t val to_iter : t -> (left * right) iter val add_list : (left * right) list -> t -> t val of_list : (left * right) list -> t val to_list : t -> (left * right) list end module Make (L : OrderedType) (R : OrderedType) : S with type left = L.t and type right = R.t
(* This file is free software, part of containers. See file "license" for more details. *)
bit_blaster.h
/* * SUPPORT FOR CONVERTING BITVECTOR CONSTRAINTS INTO CLAUSES */ #ifndef __BIT_BLASTER_H #define __BIT_BLASTER_H #include <stdint.h> #include <stdbool.h> #include "solvers/bv/remap_table.h" #include "solvers/cdcl/gates_hash_table.h" #include "solvers/cdcl/smt_core.h" #include "utils/int_vectors.h" /* * CLAUSE-SET BUFFER * * Each gate/elementary component is encoded as a small set * of clauses (with no more than four variables). * A clause buffer is used to build and simplify this set. * * Main components: * - var[0 .. 3] = boolean variables occurring in the set * if there are fewer than 4 variables, then var is * padded with null_bvar * - nclauses = number of clauses in the set * - each clause is identified by an index between 0 and nclauses-1 * - data = two-dimensional array. * For clause i and var[j] = x, data[i][j] indicates whether x * occurs in clause i, and if so, with which polarity. * This is encoded as follows: * data[i][j] = 0 --> x does not occur in clause i * data[i][j] = +1 --> clause i contains the literal pos_lit(x) * data[i][j] = -1 --> clause i contains the literal neg_lit(x) * * To check for subsumption and subsumption-resolution, we keep * a signature for each clause. The signature is a 4-bit integer, * bit j of signature[i] is 1 iff var[j] occurs in clause i. * * The flag is_unsat is set if one of the clauses is empty. * * Mask is used as a bitvector in simplification. If bit i * of mask is 1 then clause i has been removed. */ // Dimensions: at most 8 clauses, at most 4 variables #define CBUFFER_NVARS 4 #define CBUFFER_NCLAUSES 8 typedef struct cbuffer_s { uint32_t nclauses; uint32_t mask; bool is_unsat; bvar_t var[CBUFFER_NVARS]; uint8_t signature[CBUFFER_NCLAUSES]; int8_t data[CBUFFER_NCLAUSES][CBUFFER_NVARS]; } cbuffer_t; /* * BIT-BLASTER: * * This is the main structure for encoding boolean gates and * bit-vector constraints into clauses. * * Components: * - solver: attached smt_core * where the clauses and literals are created * - remap_table to interface with the bvsolver * - gate table for hash consing * - buffers */ typedef struct bit_blaster_s { smt_core_t *solver; remap_table_t *remap; gate_table_t htbl; cbuffer_t buffer; ivector_t aux_vector; ivector_t aux_vector2; ivector_t aux_vector3; ivector_t aux_vector4; } bit_blaster_t; /* * BOUND FOR HASH-CONSING * * When building (OR a[0] ... a[n-1]) or (XOR a[0] ... a[n-1]), * hash consing is used only if n <= BIT_BLASTER_MAX_HASHCONS_SIZE. * * The bound must be no more than MAX_INDEGREE (defined in gates_hash_table.h). * It should be at least 3, since or/xor with 2 or 3 arguments are always * hash-consed. */ #define BIT_BLASTER_MAX_HASHCONS_SIZE 20 /*********************************** * INITIALIZATION/PUSH/POP/RESET * **********************************/ /* * Initialization: * - htbl is initialized to its default size * - solver and remap must be initialized outside this function */ extern void init_bit_blaster(bit_blaster_t *blaster, smt_core_t *solver, remap_table_t *remap); /* * Deletion: don't delete the solver, just the hash table */ extern void delete_bit_blaster(bit_blaster_t *blaster); /* * Reset internal structures, not the solver */ extern void reset_bit_blaster(bit_blaster_t *blaster); /* * Push/pop just apply to the internal gate table */ static inline void bit_blaster_push(bit_blaster_t *blaster) { gate_table_push(&blaster->htbl); } static inline void bit_blaster_pop(bit_blaster_t *blaster) { gate_table_pop(&blaster->htbl); } /* * Set level: same effect as calling push n times * - this is used to ensure that the bit-blaster trail stack * has the same depth as the bv_solver when the bit_blaster is allocated */ static inline void bit_blaster_set_level(bit_blaster_t *blaster, uint32_t n) { gate_table_set_level(&blaster->htbl, n); } /********************** * ELEMENTARY GATES * *********************/ /* * The basic gates are listed in gates_hash_table.h. All functions * below add clauses that encode the definition of a specific * gate. The clauses are simplified as much as possible. If the * constraints are inconsistent then the empty clause is added to * the solver. */ /* * Constraint: b = a or (xor a b) = 0 */ extern void bit_blaster_eq(bit_blaster_t *blaster, literal_t a, literal_t b); /* * Constraint: x = (xor a b) or (xor a b x) = 0 */ extern void bit_blaster_xor2_gate(bit_blaster_t *blaster, literal_t a, literal_t b, literal_t x); /* * Constraint: x = (xor a b c) or (xor a b c x) = 0 */ extern void bit_blaster_xor3_gate(bit_blaster_t *blaster, literal_t a, literal_t b, literal_t c, literal_t x); /* * Constraint: x = (or a b) */ extern void bit_blaster_or2_gate(bit_blaster_t *blaster, literal_t a, literal_t b, literal_t x); /* * Constraint: x = (or a b c) */ extern void bit_blaster_or3_gate(bit_blaster_t *blaster, literal_t a, literal_t b, literal_t c, literal_t x); /* * Constraint: x = (ite c a b) */ extern void bit_blaster_mux(bit_blaster_t *blaster, literal_t c, literal_t a, literal_t b, literal_t x); /* * Constraint: x = (cmp a b c) * Defined as: x = ((a > b) or (a = b and c)) * * This is used to encode (bvuge u v) or (bvsge u v) via the following equations: * 1) (bvuge u v) = (u[n] > v[n]) or (u[n] == v[n] and (bvuge u[n-1 .. 1] v[n-1 .. 1])) * 2) (bvsge u v) = (v[n] > u[n]) or (u[n] == v[n] and (bvuge u[n-1 .. 1] v[n-1 .. 1])) */ extern void bit_blaster_cmp(bit_blaster_t *blaster, literal_t a, literal_t b, literal_t c, literal_t x); /* * Constraint: x = (majority a b c) */ extern void bit_blaster_maj3(bit_blaster_t *blaster, literal_t a, literal_t b, literal_t c, literal_t x); /* * Constraint: x = (or a[0] ... a[n-1]) */ extern void bit_blaster_or_gate(bit_blaster_t *blaster, uint32_t n, literal_t *a, literal_t x); /* * Constraint: x = (xor a[0] ... a[n-1]) */ extern void bit_blaster_xor_gate(bit_blaster_t *blaster, uint32_t n, literal_t *a, literal_t x); /* * Constraint: (x, y) = half-add(a, b) where x = sum, y = carry * This is encoded as * x = (xor a b) * y = (and a b) */ extern void bit_blaster_half_adder(bit_blaster_t *blaster, literal_t a, literal_t b, literal_t x, literal_t y); /* * Constraint: (x, y) = full-adder(a, b, c) where x = sum, y = carry * This is encoded as * x = (xor a b c) * y = (majority a b c) */ extern void bit_blaster_full_adder(bit_blaster_t *blaster, literal_t a, literal_t b, literal_t c, literal_t x, literal_t y); /***************** * SIMPLIFIER * ****************/ /* * All functions below attempt to reduce an expression to a literal. * They return null_literal when they fail. * They take into account the base-value of all literals. */ /* * (xor a b) */ extern literal_t bit_blaster_eval_xor2(bit_blaster_t *blaster, literal_t a, literal_t b); /* * (xor a b c) */ extern literal_t bit_blaster_eval_xor3(bit_blaster_t *blaster, literal_t a, literal_t b, literal_t c); /* * (eq a b) */ static inline literal_t bit_blaster_eval_eq(bit_blaster_t *blaster, literal_t a, literal_t b) { return bit_blaster_eval_xor2(blaster, not(a), b); } /* * (or a b) */ extern literal_t bit_blaster_eval_or2(bit_blaster_t *blaster, literal_t a, literal_t b); /* * (or a b c) */ extern literal_t bit_blaster_eval_or3(bit_blaster_t *blaster, literal_t a, literal_t b, literal_t c); /* * (ite c a b) */ extern literal_t bit_blaster_eval_mux(bit_blaster_t *blaster, literal_t c, literal_t a, literal_t b); /* * (a > b): i.e. (a and not b) */ extern literal_t bit_blaster_eval_gt(bit_blaster_t *blaster, literal_t a, literal_t b); /* * (cmp a b c) i.e., ((a > b) or (a = b and c)) */ extern literal_t bit_blaster_eval_cmp(bit_blaster_t *blaster, literal_t a, literal_t b, literal_t c); /* * (majority a b c) */ extern literal_t bit_blaster_eval_maj3(bit_blaster_t *blaster, literal_t a, literal_t b, literal_t c); /* * (or a[0] ... a[n-1]) */ extern literal_t bit_blaster_eval_or(bit_blaster_t *blaster, uint32_t n, literal_t *a); /* * (xor a[0] ... a[n-1]) */ extern literal_t bit_blaster_eval_xor(bit_blaster_t *blaster, uint32_t n, literal_t *a); /* * (bveq a b): a and b are vectors of n bits */ extern literal_t bit_blaster_eval_bveq(bit_blaster_t *blaster, uint32_t n, literal_t *a, literal_t *b); /******************************* * BOOLEAN GATE CONSTRUCTION * ******************************/ /* * Create a new boolean variable */ extern bvar_t bit_blaster_new_var(bit_blaster_t *blaster); /* * Create a new literal */ static inline literal_t bit_blaster_fresh_literal(bit_blaster_t *blaster) { return pos_lit(bit_blaster_new_var(blaster)); } /* * All functions below return a literal l = (op a b ..) for some * primitive operator op. They use the following steps: * 1) Try to simplify (op a b ...) to l * 2) If that fails, search for the the gate (op a b ..) * in the hash table. * 3) If that fails create a fresh literal l, assert * the constraints l = (op a b ...) and add the gate to * the hash table. */ /* * (xor a[0] ... a[n-1]) */ extern literal_t bit_blaster_make_xor(bit_blaster_t *blaster, uint32_t n, literal_t *a); /* * (xor a b) */ extern literal_t bit_blaster_make_xor2(bit_blaster_t *blaster, literal_t a, literal_t b); /* * (xor a b c) */ extern literal_t bit_blaster_make_xor3(bit_blaster_t *blaster, literal_t a, literal_t b, literal_t c); /* * (eq a b) */ static inline literal_t bit_blaster_make_eq(bit_blaster_t *blaster, literal_t a, literal_t b) { return bit_blaster_make_xor2(blaster, not(a), b); } /* * (or a[0] ... a[n-1]) */ extern literal_t bit_blaster_make_or(bit_blaster_t *blaster, uint32_t n, literal_t *a); /* * (or a b) */ extern literal_t bit_blaster_make_or2(bit_blaster_t *blaster, literal_t a, literal_t b); /* * (or a b c) */ extern literal_t bit_blaster_make_or3(bit_blaster_t *blaster, literal_t a, literal_t b, literal_t c); /* * (and a b) */ static inline literal_t bit_blaster_make_and2(bit_blaster_t *blaster, literal_t a, literal_t b) { return not(bit_blaster_make_or2(blaster, not(a), not(b))); } /*************************** * CIRCUIT CONSTRUCTION * **************************/ /* * BIT-VECTOR COMPARATORS: * - input = 2 literal arrays of size n * - output = 1 literal * All the literals in both arrays must be non-null (valid literals in the bit_solver) * * (bveq a b) --> equality * (bvuge a b) --> (a >= b) with both interpreted as n-bit unsigned integers * (bvsge a b) --> (a >= b) with both interpreted as n-bit signed integers. * * For bveq and bvuge, n may be zero. * For bvsge, n must be positive. */ extern literal_t bit_blaster_make_bveq(bit_blaster_t *blaster, literal_t *a, literal_t *b, uint32_t n); extern literal_t bit_blaster_make_bvuge(bit_blaster_t *blaster, literal_t *a, literal_t *b, uint32_t n); extern literal_t bit_blaster_make_bvsge(bit_blaster_t *blaster, literal_t *a, literal_t *b, uint32_t n); /* * Variants * - assert l == (bveq a b) * - assert l == (bvuge a b) * - assert l == (bvsge a b) */ extern void bit_blaster_make_bveq2(bit_blaster_t *blaster, literal_t *a, literal_t *b, literal_t l, uint32_t n); extern void bit_blaster_make_bvuge2(bit_blaster_t *blaster, literal_t *a, literal_t *b, literal_t l, uint32_t n); extern void bit_blaster_make_bvsge2(bit_blaster_t *blaster, literal_t *a, literal_t *b, literal_t l, uint32_t n); /* * ASSERTION OF EQUALITIES/INEQUALITIES * - a and b must be literal arrays of size n * - all literals in both arrays must be non-null * * (bveq a b): assert a = b * (bvneq a b): assert a /= b * (bvuge a b): assert a >= b (unsigned) * (bvult a b): assert a < b (unsigned) * (bvsge a b): assert a >= b (signed, n must be positive) * (bvslt a b): assert a < b (signed, n must be positive) * * If the constraint is inconsistent, then a conflict is recorded * in the bit_solver (by adding the empty clause). */ extern void bit_blaster_assert_bveq(bit_blaster_t *blaster, literal_t *a, literal_t *b, uint32_t n); extern void bit_blaster_assert_bvneq(bit_blaster_t *blaster, literal_t *a, literal_t *b, uint32_t n); extern void bit_blaster_assert_bvuge(bit_blaster_t *blaster, literal_t *a, literal_t *b, uint32_t n); extern void bit_blaster_assert_bvult(bit_blaster_t *blaster, literal_t *a, literal_t *b, uint32_t n); extern void bit_blaster_assert_bvsge(bit_blaster_t *blaster, literal_t *a, literal_t *b, uint32_t n); extern void bit_blaster_assert_bvslt(bit_blaster_t *blaster, literal_t *a, literal_t *b, uint32_t n); /* * The following functions encode a bit-vector circuit with arrays of literals * as input and an array u of pseudo-literal as output. * - all elements in arrays a and b must be valid literals in the bit_solver * - all elements of u must be non-null pseudo literals in the remap table * - if pseudo-literal u[i] is not mapped to a real literal, then the * circuit constructions assign a real literal to u[i] * (and all elements of its class) * - if pseudo-literal u[i] is mapped to a real literal l, then the functions * add clauses to encode the equality between l and the circuit output * (e.g., for the adder circuit: assert l = bit[i] in sum of a and b). * * If the constraint is inconsistent, then a conflict is recorded * in the bit_solver (by adding the empty clause). */ /* * MULTIPLEXER: assert u = (ite c a b) * - a and b must be literal arrays of size n * - u must be a pseudo literal array of size n * - c must be a valid literal in the bit_solver */ extern void bit_blaster_make_bvmux(bit_blaster_t *blaster, literal_t c, literal_t *a, literal_t *b, literal_t *u, uint32_t n); /* * ARITHMETIC CIRCUITS * - a and b must be literal arrays of size n * - u must be a pseudo literal array of size n */ extern void bit_blaster_make_bvadd(bit_blaster_t *blaster, literal_t *a, literal_t *b, literal_t *u, uint32_t n); extern void bit_blaster_make_bvsub(bit_blaster_t *blaster, literal_t *a, literal_t *b, literal_t *u, uint32_t n); extern void bit_blaster_make_bvneg(bit_blaster_t *blaster, literal_t *a, literal_t *u, uint32_t n); extern void bit_blaster_make_bvmul(bit_blaster_t *blaster, literal_t *a, literal_t *b, literal_t *u, uint32_t n); /* * UNSIGNED DIVISION * - a and b must be literal arrays of size n * - q = either NULL or an array of n pseudo literals * - r = either NULL of an array of n pseudo literals * * If both r and q are non-null, the function asserts * q = (bvudiv a b): quotient * r = (bvurem a b): remainder * If r is NULL only the first part is asserted. * If q is NULL only the second equality is asserted. * * This asserts (a = b * q + r) AND (b == 0 or r < b) * * For division by zero: q is 0b111...1 and r = a. */ extern void bit_blaster_make_udivision(bit_blaster_t *blaster, literal_t *a, literal_t *b, literal_t *q, literal_t *r, uint32_t n); /* * SIGNED DIVISION * - a and b must be literal arrays of size n * - q = either NULL or an array of n pseudo literals * - r = either NULL of an array of n pseudo literals * * If both r and q are non-null, the function asserts * q = (bvsdiv a b): quotient * r = (bvsrem a b): remainder * If r is NULL only the first part is asserted. * If q is NULL only the second equality is asserted. * * This asserts a = b * q + r * with the following constraints on r: * (a >= 0, b > 0 ==> 0 <= r < b) * (a >= 0, b < 0 ==> 0 <= r < -b) * (a < 0, b > 0 ==> -b < r <= 0) * (a < 0, b < 0 ==> b < r <= 0) * * For division by zero: * (a >= 0, b = 0 ==> r = a, q = -1) * (a < 0, b = 0 ==> r = a, q = +1) * */ extern void bit_blaster_make_sdivision(bit_blaster_t *blaster, literal_t *a, literal_t *b, literal_t *q, literal_t *r, uint32_t n); /* * FLOOR DIVISION REMAINDER. * * - a and b must be literal arrays of size n * - r must be an array of n pseudo literals * * This asserts r = bvsmod(a, b) * - if b is zero, then bsvmod(a, b) = a * - otherwise, bvsmod(a, b) = a - b * floor(a/b) * * This is similar to fdiv in GMP: division with * rounding toward minus infinity. * * For b > 0, we have 0 <= r < b * For b < 0, we have b < r <= 0 */ extern void bit_blaster_make_smod(bit_blaster_t *blaster, literal_t *a, literal_t *b, literal_t *r, uint32_t n); /* * SHIFT LEFT * - a = vector to be shifted * - b = shift amount * Both a and b must be arrays of n non-null literals * - u = result: array of n pseudo literals * * The function asserts u == (bvshl a b) * * The SMT-LIB semantics for logical shift is that (bvshl a b) is equivalent * to multiplying a by 2^b. So if b's value is larger than n the result is 0b00..000. */ extern void bit_blaster_make_shift_left(bit_blaster_t *blaster, literal_t *a, literal_t *b, literal_t *u, uint32_t n); /* * LOGICAL SHIFT RIGHT * - a = vector to be shifted * - b = shift amount * Both a and b must be arrays of n non-null literals * - u = result: array of n pseudo literals * * The function asserts u == (bvlshr a b) (padding with 0) * * If b's value is larger than n the result is 0b00..000. */ extern void bit_blaster_make_lshift_right(bit_blaster_t *blaster, literal_t *a, literal_t *b, literal_t *u, uint32_t n); /* * LOGICAL SHIFT RIGHT * - a = vector to be shifted * - b = shift amount * Both a and b must be arrays of n non-null literals * - u = result: array of n pseudo literals * * The function asserts u == (bvashr a b) (sign bit is copied) * * If b's value is larger than n the result is [s ... s], * where s = sign bit of a. */ extern void bit_blaster_make_ashift_right(bit_blaster_t *blaster, literal_t *a, literal_t *b, literal_t *u, uint32_t n); #endif /* __BIT_BLASTER_H */
/* * The Yices SMT Solver. Copyright 2014 SRI International. * * This program may only be used subject to the noncommercial end user * license agreement which is downloadable along with this program. */
test_template_constraints.ml
open Core open Comby_kernel open Matchers open Test_helpers let%expect_test "implicit_equals" = let source = "(fun i -> j x) (fun x -> x x)" in let match_template = "fun :[[a]] -> :[[a]] :[[a]]" in run_all_matches (module Alpha.Generic) source match_template; [%expect_exact {|{"uri":null,"matches":[{"range":{"start":{"offset":16,"line":1,"column":17},"end":{"offset":28,"line":1,"column":29}},"environment":[{"variable":"a","value":"x","range":{"start":{"offset":20,"line":1,"column":21},"end":{"offset":21,"line":1,"column":22}}},{"variable":"a_equal_!@#$000000000003","value":"x","range":{"start":{"offset":25,"line":1,"column":26},"end":{"offset":26,"line":1,"column":27}}},{"variable":"a_equal_!@#$000000000004","value":"x","range":{"start":{"offset":27,"line":1,"column":28},"end":{"offset":28,"line":1,"column":29}}}],"matched":"fun x -> x x"}]} |}]; run_all_matches (module Omega.Generic) source match_template; [%expect_exact {|{"uri":null,"matches":[{"range":{"start":{"offset":16,"line":1,"column":17},"end":{"offset":28,"line":1,"column":29}},"environment":[{"variable":"a","value":"x","range":{"start":{"offset":20,"line":1,"column":21},"end":{"offset":21,"line":1,"column":22}}},{"variable":"a_equal_!@#$000000000005","value":"x","range":{"start":{"offset":25,"line":1,"column":26},"end":{"offset":26,"line":1,"column":27}}},{"variable":"a_equal_!@#$000000000006","value":"x","range":{"start":{"offset":27,"line":1,"column":28},"end":{"offset":28,"line":1,"column":29}}}],"matched":"fun x -> x x"}]} |}]
test_bigstring_safe_accessors.ml
open Core let generic_mk_set_and_print_for_test ~to_string ~set ~get buf = Staged.stage (fun ~test_name ~pos n -> try set buf ~pos n; printf "%s: %s\n" test_name (to_string (get buf ~pos)) with | e -> printf !"%s: %{Exn}\n" test_name e) ;; let mk_set_and_print_for_test = generic_mk_set_and_print_for_test ~to_string:string_of_int let generic_test_get ~to_string get buf ~pos = try printf "%s\n" (to_string (get buf ~pos)) with | e -> printf !"%{Exn}\n" e ;; let test_get = generic_test_get ~to_string:Int.to_string let%expect_test "set_int8_exn" = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_int8_exn ~get:Bigstring.get_int8 buf |> Staged.unstage in set_and_print ~test_name:"ok pos" ~pos:1 127; [%expect {| ok pos: 127 |}]; set_and_print ~test_name:"ok neg" ~pos:1 (-128); [%expect {| ok neg: -128 |}]; set_and_print ~test_name:"too large" ~pos:1 128; [%expect {| too large: (Invalid_argument "Bigstring.set_int8_exn: 128 is not a valid (signed) 8-bit integer") |}]; set_and_print ~test_name:"too small" ~pos:1 (-129); [%expect {| too small: (Invalid_argument "Bigstring.set_int8_exn: -129 is not a valid (signed) 8-bit integer") |}]; set_and_print ~test_name:"out of bounds" ~pos:16 64; [%expect {| out of bounds: (Invalid_argument "index out of bounds") |}] ;; let%expect_test "get_int8 oob" = let buf = Bigstring.init 1 ~f:(fun _ -> ' ') in test_get Bigstring.get_int8 buf ~pos:1; [%expect {| (Invalid_argument "index out of bounds") |}] ;; let%expect_test "set_uint8_exn" = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 15 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_uint8_exn ~get:Bigstring.get_uint8 buf |> Staged.unstage in set_and_print ~test_name:"ok" ~pos 0xFF; [%expect {| ok: 255 |}]; set_and_print ~test_name:"too large" ~pos 0x100; [%expect {| too large: (Invalid_argument "Bigstring.set_uint8_exn: 256 is not a valid unsigned 8-bit integer") |}]; set_and_print ~test_name:"too small" ~pos (-1); [%expect {| too small: (Invalid_argument "Bigstring.set_uint8_exn: -1 is not a valid unsigned 8-bit integer") |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "index out of bounds") |}] ;; let%expect_test "get_int16_le oob" = let buf = Bigstring.init 2 ~f:(fun _ -> ' ') in test_get Bigstring.get_int16_le buf ~pos:1; [%expect {| (Invalid_argument "Bigstring.get_16: length(bstr) < pos + len") |}] ;; let%expect_test "get_int16_be oob" = let buf = Bigstring.init 2 ~f:(fun _ -> ' ') in test_get Bigstring.get_int16_be buf ~pos:1; [%expect {| (Invalid_argument "Bigstring.get_16: length(bstr) < pos + len") |}] ;; let%expect_test "set_int16_le_exn" = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 14 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_int16_le_exn ~get:Bigstring.get_int16_le buf |> Staged.unstage in set_and_print ~test_name:"ok pos" ~pos 32767; [%expect {| ok pos: 32767 |}]; printf "@pos: 0x%x\n" (Bigstring.get_uint8 buf ~pos); printf "@pos + 1: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 1)); [%expect {| @pos: 0xff @pos + 1: 0x7f |}]; set_and_print ~test_name:"ok neg" ~pos (-32768); [%expect {| ok neg: -32768 |}]; set_and_print ~test_name:"too large" ~pos 32768; [%expect {| too large: (Invalid_argument "Bigstring.write_int16: 32768 is not a valid (signed) 16-bit integer") |}]; set_and_print ~test_name:"too small" ~pos (-32769); [%expect {| too small: (Invalid_argument "Bigstring.write_int16: -32769 is not a valid (signed) 16-bit integer") |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_16: length(bstr) < pos + len") |}] ;; let%expect_test "set_int16_be_exn" = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 14 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_int16_be_exn ~get:Bigstring.get_int16_be buf |> Staged.unstage in set_and_print ~test_name:"ok pos" ~pos 32767; [%expect {| ok pos: 32767 |}]; printf "@pos: 0x%x\n" (Bigstring.get_uint8 buf ~pos); printf "@pos + 1: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 1)); [%expect {| @pos: 0x7f @pos + 1: 0xff |}]; set_and_print ~test_name:"ok neg" ~pos (-32768); [%expect {| ok neg: -32768 |}]; set_and_print ~test_name:"too large" ~pos 32768; [%expect {| too large: (Invalid_argument "Bigstring.write_int16: 32768 is not a valid (signed) 16-bit integer") |}]; set_and_print ~test_name:"too small" ~pos (-32769); [%expect {| too small: (Invalid_argument "Bigstring.write_int16: -32769 is not a valid (signed) 16-bit integer") |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_16: length(bstr) < pos + len") |}] ;; let%expect_test "set_uint16_le_exn" = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 14 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_uint16_le_exn ~get:Bigstring.get_uint16_le buf |> Staged.unstage in set_and_print ~test_name:"ok" ~pos 0xFFFF; [%expect {| ok: 65535 |}]; set_and_print ~test_name:"endianness check" ~pos 51966; printf "@pos: 0x%x\n" (Bigstring.get_uint8 buf ~pos); printf "@pos + 1: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 1)); [%expect {| endianness check: 51966 @pos: 0xfe @pos + 1: 0xca |}]; set_and_print ~test_name:"too large" ~pos 0x1_0000; [%expect {| too large: (Invalid_argument "Bigstring.write_uint16: 65536 is not a valid unsigned 16-bit integer") |}]; set_and_print ~test_name:"too small" ~pos (-1); [%expect {| too small: (Invalid_argument "Bigstring.write_uint16: -1 is not a valid unsigned 16-bit integer") |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_16: length(bstr) < pos + len") |}] ;; let%expect_test "set_uint16_be_exn" = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 14 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_uint16_be_exn ~get:Bigstring.get_uint16_be buf |> Staged.unstage in set_and_print ~test_name:"ok" ~pos 0xFFFF; [%expect {| ok: 65535 |}]; set_and_print ~test_name:"endianness check" ~pos 51966; printf "@pos: 0x%x\n" (Bigstring.get_uint8 buf ~pos); printf "@pos + 1: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 1)); [%expect {| endianness check: 51966 @pos: 0xca @pos + 1: 0xfe |}]; set_and_print ~test_name:"too large" ~pos 0x1_0000; [%expect {| too large: (Invalid_argument "Bigstring.write_uint16: 65536 is not a valid unsigned 16-bit integer") |}]; set_and_print ~test_name:"too small" ~pos (-1); [%expect {| too small: (Invalid_argument "Bigstring.write_uint16: -1 is not a valid unsigned 16-bit integer") |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_16: length(bstr) < pos + len") |}] ;; let%expect_test "get_uint16_le oob" = let buf = Bigstring.init 2 ~f:(fun _ -> ' ') in test_get Bigstring.get_uint16_le buf ~pos:1; [%expect {| (Invalid_argument "Bigstring.get_16: length(bstr) < pos + len") |}] ;; let%expect_test "get_uint16_be oob" = let buf = Bigstring.init 2 ~f:(fun _ -> ' ') in test_get Bigstring.get_uint16_be buf ~pos:1; [%expect {| (Invalid_argument "Bigstring.get_16: length(bstr) < pos + len") |}] ;; (* There's a slightly different code path here on 32-bit, so let's test it *) let%expect_test "set_int32_le_exn 32-bit" = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 12 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_int32_le_exn ~get:Bigstring.get_int32_le buf |> Staged.unstage in set_and_print ~test_name:"ok pos" ~pos 1073741823; [%expect {| ok pos: 1073741823 |}]; set_and_print ~test_name:"ok neg" ~pos (-1073741824); [%expect {| ok neg: -1073741824 |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_32: length(bstr) < pos + len") |}] ;; let%expect_test "set_int32_be_exn 32-bit" = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 12 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_int32_be_exn ~get:Bigstring.get_int32_be buf |> Staged.unstage in set_and_print ~test_name:"ok pos" ~pos 1073741823; [%expect {| ok pos: 1073741823 |}]; set_and_print ~test_name:"ok neg" ~pos (-1073741824); [%expect {| ok neg: -1073741824 |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_32: length(bstr) < pos + len") |}] ;; let%expect_test ("set_int32_le_exn" [@tags "64-bits-only"]) = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 12 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_int32_le_exn ~get:Bigstring.get_int32_le buf |> Staged.unstage in set_and_print ~test_name:"ok pos" ~pos ((1 lsl 31) - 1); [%expect {| ok pos: 2147483647 |}]; set_and_print ~test_name:"ok neg" ~pos (-1 lsl 31); [%expect {| ok neg: -2147483648 |}]; set_and_print ~test_name:"endianness check" ~pos 0x1AFEDEAD; printf "@pos: 0x%x\n" (Bigstring.get_uint8 buf ~pos); printf "@pos + 1: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 1)); printf "@pos + 2: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 2)); printf "@pos + 3: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 3)); [%expect {| endianness check: 452910765 @pos: 0xad @pos + 1: 0xde @pos + 2: 0xfe @pos + 3: 0x1a |}]; set_and_print ~test_name:"too large" ~pos (1 lsl 31); [%expect {| too large: (Invalid_argument "Bigstring.write_int32_int: 2147483648 is not a valid (signed) 32-bit integer") |}]; set_and_print ~test_name:"too small" ~pos ((-1 lsl 31) - 1); [%expect {| too small: (Invalid_argument "Bigstring.write_int32_int: -2147483649 is not a valid (signed) 32-bit integer") |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_32: length(bstr) < pos + len") |}] ;; let%expect_test ("set_int32_be" [@tags "64-bits-only"]) = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 12 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_int32_be_exn ~get:Bigstring.get_int32_be buf |> Staged.unstage in set_and_print ~test_name:"ok pos" ~pos ((1 lsl 31) - 1); [%expect {| ok pos: 2147483647 |}]; set_and_print ~test_name:"ok neg" ~pos (-1 lsl 31); [%expect {| ok neg: -2147483648 |}]; set_and_print ~test_name:"endianness check" ~pos 0x1AFEDEAD; printf "@pos: 0x%x\n" (Bigstring.get_uint8 buf ~pos); printf "@pos + 1: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 1)); printf "@pos + 2: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 2)); printf "@pos + 3: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 3)); [%expect {| endianness check: 452910765 @pos: 0x1a @pos + 1: 0xfe @pos + 2: 0xde @pos + 3: 0xad |}]; set_and_print ~test_name:"too large" ~pos (1 lsl 31); [%expect {| too large: (Invalid_argument "Bigstring.write_int32_int: 2147483648 is not a valid (signed) 32-bit integer") |}]; set_and_print ~test_name:"too small" ~pos ((-1 lsl 31) - 1); [%expect {| too small: (Invalid_argument "Bigstring.write_int32_int: -2147483649 is not a valid (signed) 32-bit integer") |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_32: length(bstr) < pos + len") |}] ;; let%expect_test "set_uint32_le_exn 32-bit" = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 12 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_uint32_le_exn ~get:Bigstring.get_uint32_le buf |> Staged.unstage in set_and_print ~test_name:"ok" ~pos 1073741823; [%expect {| ok: 1073741823 |}]; set_and_print ~test_name:"too small" ~pos (-1); [%expect {| too small: (Invalid_argument "Bigstring.set_uint32_le_exn: -1 is not a valid unsigned 32-bit integer") |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_32: length(bstr) < pos + len") |}] ;; let%expect_test "get_int32_le oob" = let buf = Bigstring.init 4 ~f:(fun _ -> ' ') in test_get Bigstring.get_int32_le buf ~pos:1; [%expect {| (Invalid_argument "Bigstring.get_32: length(bstr) < pos + len") |}] ;; let%expect_test "get_int32_be oob" = let buf = Bigstring.init 4 ~f:(fun _ -> ' ') in test_get Bigstring.get_int32_be buf ~pos:1; [%expect {| (Invalid_argument "Bigstring.get_32: length(bstr) < pos + len") |}] ;; let%expect_test "set_uint32_be_exn 32-bit" = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 12 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_uint32_be_exn ~get:Bigstring.get_uint32_be buf |> Staged.unstage in set_and_print ~test_name:"ok" ~pos 1073741823; [%expect {| ok: 1073741823 |}]; set_and_print ~test_name:"too small" ~pos (-1); [%expect {| too small: (Invalid_argument "Bigstring.set_uint32_be_exn: -1 is not a valid unsigned 32-bit integer") |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_32: length(bstr) < pos + len") |}] ;; let%expect_test ("set_uint32_le_exn" [@tags "64-bits-only"]) = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 12 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_uint32_le_exn ~get:Bigstring.get_uint32_le buf |> Staged.unstage in set_and_print ~test_name:"ok" ~pos ((1 lsl 32) - 1); [%expect {| ok: 4294967295 |}]; set_and_print ~test_name:"endianness check" ~pos ((0xCAFE lsl 16) lor 0xDEAD); printf "@pos: 0x%x\n" (Bigstring.get_uint8 buf ~pos); printf "@pos + 1: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 1)); printf "@pos + 2: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 2)); printf "@pos + 3: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 3)); [%expect {| endianness check: 3405700781 @pos: 0xad @pos + 1: 0xde @pos + 2: 0xfe @pos + 3: 0xca |}]; set_and_print ~test_name:"too large" ~pos (1 lsl 32); [%expect {| too large: (Invalid_argument "Bigstring.set_uint32_le_exn: 4294967296 is not a valid unsigned 32-bit integer") |}]; set_and_print ~test_name:"too small" ~pos (-1); [%expect {| too small: (Invalid_argument "Bigstring.set_uint32_le_exn: -1 is not a valid unsigned 32-bit integer") |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_32: length(bstr) < pos + len") |}] ;; let%expect_test ("set_uint32_be_exn" [@tags "64-bits-only"]) = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 12 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_uint32_be_exn ~get:Bigstring.get_uint32_be buf |> Staged.unstage in set_and_print ~test_name:"ok" ~pos ((1 lsl 32) - 1); [%expect {| ok: 4294967295 |}]; set_and_print ~test_name:"endianness check" ~pos ((0xCAFE lsl 16) lor 0xDEAD); printf "@pos: 0x%x\n" (Bigstring.get_uint8 buf ~pos); printf "@pos + 1: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 1)); printf "@pos + 2: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 2)); printf "@pos + 3: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 3)); [%expect {| endianness check: 3405700781 @pos: 0xca @pos + 1: 0xfe @pos + 2: 0xde @pos + 3: 0xad |}]; set_and_print ~test_name:"too large" ~pos (1 lsl 32); [%expect {| too large: (Invalid_argument "Bigstring.set_uint32_be_exn: 4294967296 is not a valid unsigned 32-bit integer") |}]; set_and_print ~test_name:"too small" ~pos (-1); [%expect {| too small: (Invalid_argument "Bigstring.set_uint32_be_exn: -1 is not a valid unsigned 32-bit integer") |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_32: length(bstr) < pos + len") |}] ;; let%expect_test "get_uint32_le oob" = let buf = Bigstring.init 4 ~f:(fun _ -> ' ') in test_get Bigstring.get_uint32_le buf ~pos:1; [%expect {| (Invalid_argument "Bigstring.get_32: length(bstr) < pos + len") |}] ;; let%expect_test "get_uint32_be oob" = let buf = Bigstring.init 4 ~f:(fun _ -> ' ') in test_get Bigstring.get_uint32_be buf ~pos:1; [%expect {| (Invalid_argument "Bigstring.get_32: length(bstr) < pos + len") |}] ;; let%expect_test "set_int32_t_le" = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 12 in let set_and_print = generic_mk_set_and_print_for_test ~set:Bigstring.set_int32_t_le ~get:Bigstring.get_int32_t_le buf ~to_string:Int32.to_string |> Staged.unstage in set_and_print ~test_name:"ok pos" ~pos 0x7FFF_FFFFl; [%expect {| ok pos: 2147483647 |}]; set_and_print ~test_name:"ok neg" ~pos 0x8000_0000l; [%expect {| ok neg: -2147483648 |}]; set_and_print ~test_name:"endianness check" ~pos 0xCAFEDEADl; printf "@pos: 0x%x\n" (Bigstring.get_uint8 buf ~pos); printf "@pos + 1: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 1)); printf "@pos + 2: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 2)); printf "@pos + 3: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 3)); [%expect {| endianness check: -889266515 @pos: 0xad @pos + 1: 0xde @pos + 2: 0xfe @pos + 3: 0xca |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64l; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_32: length(bstr) < pos + len") |}] ;; let%expect_test "set_int32_t_be" = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 12 in let set_and_print = generic_mk_set_and_print_for_test ~set:Bigstring.set_int32_t_be ~get:Bigstring.get_int32_t_be buf ~to_string:Int32.to_string |> Staged.unstage in set_and_print ~test_name:"ok pos" ~pos 0x7FFF_FFFFl; [%expect {| ok pos: 2147483647 |}]; set_and_print ~test_name:"ok neg" ~pos 0x8000_0000l; [%expect {| ok neg: -2147483648 |}]; set_and_print ~test_name:"endianness check" ~pos 0xCAFEDEADl; printf "@pos: 0x%x\n" (Bigstring.get_uint8 buf ~pos); printf "@pos + 1: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 1)); printf "@pos + 2: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 2)); printf "@pos + 3: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 3)); [%expect {| endianness check: -889266515 @pos: 0xca @pos + 1: 0xfe @pos + 2: 0xde @pos + 3: 0xad |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64l; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_32: length(bstr) < pos + len") |}] ;; let%expect_test "get_uint32_le oob" = let buf = Bigstring.init 4 ~f:(fun _ -> ' ') in generic_test_get ~to_string:Int32.to_string Bigstring.get_int32_t_le buf ~pos:1; [%expect {| (Invalid_argument "Bigstring.get_32: length(bstr) < pos + len") |}] ;; let%expect_test "get_uint32_be oob" = let buf = Bigstring.init 4 ~f:(fun _ -> ' ') in generic_test_get ~to_string:Int32.to_string Bigstring.get_int32_t_be buf ~pos:1; [%expect {| (Invalid_argument "Bigstring.get_32: length(bstr) < pos + len") |}] ;; let%expect_test ("set_int64_le" [@tags "64-bits-only"]) = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 8 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_int64_le ~get:Bigstring.get_int64_le_exn buf |> Staged.unstage in set_and_print ~test_name:"ok pos" ~pos ((1 lsl 62) - 1); [%expect {| ok pos: 4611686018427387903 |}]; set_and_print ~test_name:"ok neg" ~pos (-1 lsl 62); [%expect {| ok neg: -4611686018427387904 |}]; set_and_print ~test_name:"endianness check" ~pos ((0x1AFE lsl 48) lor (0xBABE lsl 32) lor (0xDEAD lsl 16) lor 0xBEEF); printf "@pos: 0x%x\n" (Bigstring.get_uint8 buf ~pos); printf "@pos + 1: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 1)); printf "@pos + 2: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 2)); printf "@pos + 3: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 3)); printf "@pos + 4: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 4)); printf "@pos + 5: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 5)); printf "@pos + 6: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 6)); printf "@pos + 7: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 7)); [%expect {| endianness check: 1945197418013114095 @pos: 0xef @pos + 1: 0xbe @pos + 2: 0xad @pos + 3: 0xde @pos + 4: 0xbe @pos + 5: 0xba @pos + 6: 0xfe @pos + 7: 0x1a |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_64: length(bstr) < pos + len") |}] ;; let%expect_test ("set_int64_be" [@tags "64-bits-only"]) = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 8 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_int64_be ~get:Bigstring.get_int64_be_exn buf |> Staged.unstage in set_and_print ~test_name:"ok pos" ~pos ((1 lsl 62) - 1); [%expect {| ok pos: 4611686018427387903 |}]; set_and_print ~test_name:"ok neg" ~pos (-1 lsl 62); [%expect {| ok neg: -4611686018427387904 |}]; set_and_print ~test_name:"endianness check" ~pos ((0x1AFE lsl 48) lor (0xBABE lsl 32) lor (0xDEAD lsl 16) lor 0xBEEF); printf "@pos: 0x%x\n" (Bigstring.get_uint8 buf ~pos); printf "@pos + 1: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 1)); printf "@pos + 2: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 2)); printf "@pos + 3: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 3)); printf "@pos + 4: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 4)); printf "@pos + 5: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 5)); printf "@pos + 6: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 6)); printf "@pos + 7: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 7)); [%expect {| endianness check: 1945197418013114095 @pos: 0x1a @pos + 1: 0xfe @pos + 2: 0xba @pos + 3: 0xbe @pos + 4: 0xde @pos + 5: 0xad @pos + 6: 0xbe @pos + 7: 0xef |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_64: length(bstr) < pos + len") |}] ;; let%expect_test "get_int64_le_exn oob" = let buf = Bigstring.init 8 ~f:(fun _ -> ' ') in test_get Bigstring.get_int64_le_exn buf ~pos:1; [%expect {| (Invalid_argument "Bigstring.get_64: length(bstr) < pos + len") |}] ;; let%expect_test "get_int64_be_exn oob" = let buf = Bigstring.init 8 ~f:(fun _ -> ' ') in test_get Bigstring.get_int64_be_exn buf ~pos:1; [%expect {| (Invalid_argument "Bigstring.get_64: length(bstr) < pos + len") |}] ;; let%expect_test "get_int64_le_trunc oob" = let buf = Bigstring.init 4 ~f:(fun _ -> ' ') in test_get Bigstring.get_int64_le_trunc buf ~pos:1; [%expect {| (Invalid_argument "Bigstring.get_64: length(bstr) < pos + len") |}] ;; let%expect_test "get_int64_be_trunc oob" = let buf = Bigstring.init 4 ~f:(fun _ -> ' ') in test_get Bigstring.get_int64_be_trunc buf ~pos:1; [%expect {| (Invalid_argument "Bigstring.get_64: length(bstr) < pos + len") |}] ;; let%expect_test "get_int64_le_exn raises" = let buf = Bigstring.init 8 ~f:(fun _ -> ' ') in Bigstring.set_int64_t_le buf ~pos:0 0x8000000000000000L; test_get Bigstring.get_int64_le_exn buf ~pos:0; [%expect {| (Failure "unsafe_read_int64: value cannot be represented unboxed!") |}] ;; let%expect_test "get_int64_be_exn raises" = let buf = Bigstring.init 8 ~f:(fun _ -> ' ') in Bigstring.set_int64_t_be buf ~pos:0 0x8000000000000000L; test_get Bigstring.get_int64_be_exn buf ~pos:0; [%expect {| (Failure "unsafe_read_int64: value cannot be represented unboxed!") |}] ;; let%expect_test "get_int64_le_trunc truncates" = let buf = Bigstring.init 8 ~f:(fun _ -> ' ') in Bigstring.set_int64_t_le buf ~pos:0 0x8000000000000000L; test_get Bigstring.get_int64_le_trunc buf ~pos:0; [%expect {| 0 |}] ;; let%expect_test "get_int64_be_trunc truncates" = let buf = Bigstring.init 8 ~f:(fun _ -> ' ') in Bigstring.set_int64_t_be buf ~pos:0 0x8000000000000000L; test_get Bigstring.get_int64_be_trunc buf ~pos:0; [%expect {| 0 |}] ;; let%expect_test ("set_uint64_le_exn" [@tags "64-bits-only"]) = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 8 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_uint64_le_exn ~get:Bigstring.get_uint64_le_exn buf |> Staged.unstage in set_and_print ~test_name:"ok" ~pos ((1 lsl 62) - 1); [%expect {| ok: 4611686018427387903 |}]; set_and_print ~test_name:"endianness check" ~pos ((0x1AFE lsl 48) lor (0xBABE lsl 32) lor (0xDEAD lsl 16) lor 0xBEEF); printf "@pos: 0x%x\n" (Bigstring.get_uint8 buf ~pos); printf "@pos + 1: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 1)); printf "@pos + 2: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 2)); printf "@pos + 3: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 3)); printf "@pos + 4: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 4)); printf "@pos + 5: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 5)); printf "@pos + 6: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 6)); printf "@pos + 7: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 7)); [%expect {| endianness check: 1945197418013114095 @pos: 0xef @pos + 1: 0xbe @pos + 2: 0xad @pos + 3: 0xde @pos + 4: 0xbe @pos + 5: 0xba @pos + 6: 0xfe @pos + 7: 0x1a |}]; set_and_print ~test_name:"too small" ~pos (-1); [%expect {| too small: (Invalid_argument "Bigstring.set_uint64_le_exn: -1 is not a valid unsigned 64-bit integer") |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_64: length(bstr) < pos + len") |}] ;; let%expect_test ("set_uint64_be_exn" [@tags "64-bits-only"]) = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 8 in let set_and_print = mk_set_and_print_for_test ~set:Bigstring.set_uint64_be_exn ~get:Bigstring.get_uint64_be_exn buf |> Staged.unstage in set_and_print ~test_name:"ok" ~pos ((1 lsl 62) - 1); [%expect {| ok: 4611686018427387903 |}]; set_and_print ~test_name:"endianness check" ~pos ((0x1AFE lsl 48) lor (0xBABE lsl 32) lor (0xDEAD lsl 16) lor 0xBEEF); printf "@pos: 0x%x\n" (Bigstring.get_uint8 buf ~pos); printf "@pos + 1: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 1)); printf "@pos + 2: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 2)); printf "@pos + 3: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 3)); printf "@pos + 4: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 4)); printf "@pos + 5: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 5)); printf "@pos + 6: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 6)); printf "@pos + 7: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 7)); [%expect {| endianness check: 1945197418013114095 @pos: 0x1a @pos + 1: 0xfe @pos + 2: 0xba @pos + 3: 0xbe @pos + 4: 0xde @pos + 5: 0xad @pos + 6: 0xbe @pos + 7: 0xef |}]; set_and_print ~test_name:"too small" ~pos (-1); [%expect {| too small: (Invalid_argument "Bigstring.set_uint64_be_exn: -1 is not a valid unsigned 64-bit integer") |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_64: length(bstr) < pos + len") |}] ;; let%expect_test "set_int64_t_le" = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 8 in let set_and_print = generic_mk_set_and_print_for_test ~set:Bigstring.set_int64_t_le ~get:Bigstring.get_int64_t_le buf ~to_string:Int64.to_string |> Staged.unstage in set_and_print ~test_name:"ok pos" ~pos 0x7FFF_FFFF_FFFF_FFFFL; [%expect {| ok pos: 9223372036854775807 |}]; set_and_print ~test_name:"ok neg" ~pos 0x8000_0000_0000_0000L; [%expect {| ok neg: -9223372036854775808 |}]; set_and_print ~test_name:"endianness check" ~pos 0xCAFEBABEDEADBEEFL; printf "@pos: 0x%x\n" (Bigstring.get_uint8 buf ~pos); printf "@pos + 1: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 1)); printf "@pos + 2: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 2)); printf "@pos + 3: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 3)); printf "@pos + 4: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 4)); printf "@pos + 5: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 5)); printf "@pos + 6: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 6)); printf "@pos + 7: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 7)); [%expect {| endianness check: -3819410105021120785 @pos: 0xef @pos + 1: 0xbe @pos + 2: 0xad @pos + 3: 0xde @pos + 4: 0xbe @pos + 5: 0xba @pos + 6: 0xfe @pos + 7: 0xca |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64L; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_64: length(bstr) < pos + len") |}] ;; let%expect_test "set_int64_t_be" = let buf = Bigstring.init 16 ~f:(fun _ -> ' ') in let pos = 8 in let set_and_print = generic_mk_set_and_print_for_test ~set:Bigstring.set_int64_t_be ~get:Bigstring.get_int64_t_be buf ~to_string:Int64.to_string |> Staged.unstage in set_and_print ~test_name:"ok pos" ~pos 0x7FFF_FFFF_FFFF_FFFFL; [%expect {| ok pos: 9223372036854775807 |}]; set_and_print ~test_name:"ok neg" ~pos 0x8000_0000_0000_0000L; [%expect {| ok neg: -9223372036854775808 |}]; set_and_print ~test_name:"endianness check" ~pos 0xCAFEBABEDEADBEEFL; printf "@pos: 0x%x\n" (Bigstring.get_uint8 buf ~pos); printf "@pos + 1: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 1)); printf "@pos + 2: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 2)); printf "@pos + 3: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 3)); printf "@pos + 4: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 4)); printf "@pos + 5: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 5)); printf "@pos + 6: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 6)); printf "@pos + 7: 0x%x\n" (Bigstring.get_uint8 buf ~pos:(pos + 7)); [%expect {| endianness check: -3819410105021120785 @pos: 0xca @pos + 1: 0xfe @pos + 2: 0xba @pos + 3: 0xbe @pos + 4: 0xde @pos + 5: 0xad @pos + 6: 0xbe @pos + 7: 0xef |}]; set_and_print ~test_name:"out of bounds" ~pos:(pos + 1) 64L; [%expect {| out of bounds: (Invalid_argument "Bigstring.set_64: length(bstr) < pos + len") |}] ;; let%expect_test "get_int64_t_le oob" = let buf = Bigstring.init 8 ~f:(fun _ -> ' ') in generic_test_get ~to_string:Int64.to_string Bigstring.get_int64_t_le buf ~pos:1; [%expect {| (Invalid_argument "Bigstring.get_64: length(bstr) < pos + len") |}] ;; let%expect_test "get_int64_t_be oob" = let buf = Bigstring.init 8 ~f:(fun _ -> ' ') in generic_test_get ~to_string:Int64.to_string Bigstring.get_int64_t_be buf ~pos:1; [%expect {| (Invalid_argument "Bigstring.get_64: length(bstr) < pos + len") |}] ;;
fooppx.ml
open Ppxlib let rules = let extension = Extension.declare "test" Expression Ast_pattern.__ (fun ~loc ~path:_ _ -> Ast_builder.Default.eint ~loc 42) in [ Context_free.Rule.extension extension ] let () = Ppxlib.Driver.register_transformation "rules" ~rules
t-mod.c
#include <stdio.h> #include <stdlib.h> #include <gmp.h> #include "flint.h" #include "ulong_extras.h" #include "fmpz.h" int main(void) { int i, result; FLINT_TEST_INIT(state); flint_printf("mod...."); fflush(stdout); for (i = 0; i < 10000 * flint_test_multiplier(); i++) { fmpz_t a, b, c; mpz_t d, e, f, g; fmpz_init(a); fmpz_init(b); fmpz_init(c); mpz_init(d); mpz_init(e); mpz_init(f); mpz_init(g); fmpz_randtest(a, state, 200); fmpz_randtest_not_zero(b, state, 200); fmpz_get_mpz(d, a); fmpz_get_mpz(e, b); fmpz_mod(c, a, b); mpz_mod(f, d, e); fmpz_get_mpz(g, c); result = (mpz_cmp(f, g) == 0); if (!result) { flint_printf("FAIL:\n"); gmp_printf("d = %Zd, e = %Zd, f = %Zd, g = %Zd\n", d, e, f, g); fflush(stdout); flint_abort(); } fmpz_clear(a); fmpz_clear(b); fmpz_clear(c); mpz_clear(d); mpz_clear(e); mpz_clear(f); mpz_clear(g); } /* Check aliasing of a and b */ for (i = 0; i < 10000 * flint_test_multiplier(); i++) { fmpz_t a, c; mpz_t d, f, g; fmpz_init(a); fmpz_init(c); mpz_init(d); mpz_init(f); mpz_init(g); fmpz_randtest_not_zero(a, state, 200); fmpz_get_mpz(d, a); fmpz_mod(c, a, a); mpz_mod(f, d, d); fmpz_get_mpz(g, c); result = (mpz_cmp(f, g) == 0); if (!result) { flint_printf("FAIL:\n"); gmp_printf("d = %Zd, f = %Zd, g = %Zd\n", d, f, g); fflush(stdout); flint_abort(); } fmpz_clear(a); fmpz_clear(c); mpz_clear(d); mpz_clear(f); mpz_clear(g); } /* Test aliasing of a and c */ for (i = 0; i < 10000 * flint_test_multiplier(); i++) { fmpz_t a, b; mpz_t d, e, f, g; fmpz_init(a); fmpz_init(b); mpz_init(d); mpz_init(e); mpz_init(f); mpz_init(g); fmpz_randtest(a, state, 200); fmpz_randtest_not_zero(b, state, 200); fmpz_get_mpz(d, a); fmpz_get_mpz(e, b); fmpz_mod(a, a, b); mpz_mod(f, d, e); fmpz_get_mpz(g, a); result = (mpz_cmp(f, g) == 0); if (!result) { flint_printf("FAIL:\n"); gmp_printf("d = %Zd, e = %Zd, f = %Zd, g = %Zd\n", d, e, f, g); fflush(stdout); flint_abort(); } fmpz_clear(a); fmpz_clear(b); mpz_clear(d); mpz_clear(e); mpz_clear(f); mpz_clear(g); } /* Test aliasing of b and c */ for (i = 0; i < 10000 * flint_test_multiplier(); i++) { fmpz_t a, b; mpz_t d, e, f, g; fmpz_init(a); fmpz_init(b); mpz_init(d); mpz_init(e); mpz_init(f); mpz_init(g); fmpz_randtest(a, state, 200); fmpz_randtest_not_zero(b, state, 200); fmpz_get_mpz(d, a); fmpz_get_mpz(e, b); fmpz_mod(b, a, b); mpz_mod(f, d, e); fmpz_get_mpz(g, b); result = (mpz_cmp(f, g) == 0); if (!result) { flint_printf("FAIL:\n"); gmp_printf("d = %Zd, e = %Zd, f = %Zd, g = %Zd\n", d, e, f, g); fflush(stdout); flint_abort(); } fmpz_clear(a); fmpz_clear(b); mpz_clear(d); mpz_clear(e); mpz_clear(f); mpz_clear(g); } FLINT_TEST_CLEANUP(state); flint_printf("PASS\n"); return 0; }
/* Copyright (C) 2009 William Hart This file is part of FLINT. FLINT is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. See <https://www.gnu.org/licenses/>. */
lm_lexer.mli
(* * Debug flags. *) val debug_lexgen : bool ref val debug_lex : bool ref (* * The lexer takes an input stream as an argument. *) module type LexerInput = sig (* * Input channel is a stream of integers. * Usually these are just the ASCII codes for characters. *) type t (* * The channel has two special characters. * bof: the beginning of file * eof: the end of file. *) val bof : int val eof : int (* * The next function returns the next character in the input stream. *) val lex_next : t -> int (* * The pos function returns the current position of * the input buffer within the lexeme * (used for collecting \( ... \) arguments. *) val lex_pos : t -> int (* * The lexer will call start when it begins lexing. * The integer should be the *previous* character in the * input channel, or bof if at the beginning. *) val lex_start : t -> int (* * In some cases, the lexer may want to restart scanning * from a previous point. If so, it will call this function * to reset the start point. *) val lex_restart : t -> int -> unit (* * When the lexer is done, it calls lex_stop with * the number of characters in the final lexeme. Note * that this can cause data to be pushed back onto the input stream. *) val lex_stop : t -> int -> unit (* * Before calling lex_stop, the lexer may ask for the * lexeme as a string. The integer is the number of * characters in the lexeme, the same as the argument * to lex_stop. *) val lex_string : t -> int -> string val lex_substring : t -> int -> int -> string val lex_loc : t -> int -> Lm_location.t end (* * Semantic actions. *) module type LexerAction = sig (* * Values of action type *must* be comparable with =, * hopefully quickly. * * For example, functions are not allowed. * If you want a function, you should make an array of functions, * and use the index for the action name. *) type action (* For debugging *) val pp_print_action : action Lm_printf.t (* For creating sets and tables *) val hash : action -> int val compare : action -> action -> int (* * You can use the function to decide which clauses take * precedence for a match of equal length. The function * gets two clause numbers. If you use the min function, * then you get the first clause that matched. If you * use the max function, you get the second clause that * matched. *) val choose : int -> int -> int end module MakeLexer (Input : LexerInput) (Action : LexerAction) : sig open Action type t (* Return values from the searchto function *) type searchto_info = LexEOF | LexSkipped of Lm_location.t * string | LexMatched of action * Lm_location.t * string * string * string list (* The empty lexer accepts the empty language *) val empty : t (* Add a clause, specified as a regular expression *) val add_clause : t -> action -> string -> int * t (* Remove a clause by action name *) val remove_clause : t -> action -> t (* * Union of two lexers. * The union assumes that actions with the same name * have the same regular expression. *) val union : t -> t -> t (* * Compile the machine if not already compiled. * This is entirely optional. It is here just in case you * want to expand the machine eagerly (for example before * marshaling it to a file). *) val compile : t -> unit (* * Print the lexer. * This is mainly for debugging. *) val pp_print_lexer : t Lm_printf.t (* * Hash code for the lexer. *) val hash : t -> int (* * Now match against an input channel. * The result is (clause, lexeme, args) * clause: the index of the clause that matched * lexeme: the entire string that matched * args: the arguments for \(...\) patterns. *) val lex : t -> Input.t -> action * Lm_location.t * string * string list (* * Search for the first occurrence of a match. * Return the unmatched data that was skipped as well. * (action, skipped, matched, args) * This will not read past EOF. *) val search : t -> Input.t -> (action * Lm_location.t * string * string * string list) option (* * The searchto function is similar, but if it doesn't detect a match, * it returns the text to the end of the channel. *) val searchto : t -> Input.t -> searchto_info (* * Just check if a string matches. *) val matches : t -> Input.t -> bool end (* * Str module replacement. *) module LmStr : sig type t (* * Construct a regular expression from a string. *) val regexp : string -> t (* * Check for a match. *) val string_match : t -> string -> int -> bool end
(* * Debug flags. *)
longident.ml
type t = Lident of string | Ldot of t * string | Lapply of t * t let rec flat accu = function Lident s -> s :: accu | Ldot(lid, s) -> flat (s :: accu) lid | Lapply(_, _) -> Misc.fatal_error "Longident.flat" let flatten lid = flat [] lid let rec head = function Lident s -> s | Ldot(lid, _) -> head lid | Lapply(_, _) -> assert false let last = function Lident s -> s | Ldot(_, s) -> s | Lapply(_, _) -> Misc.fatal_error "Longident.last" let rec split_at_dots s pos = try let dot = String.index_from s pos '.' in String.sub s pos (dot - pos) :: split_at_dots s (dot + 1) with Not_found -> [String.sub s pos (String.length s - pos)] let unflatten l = match l with | [] -> None | hd :: tl -> Some (List.fold_left (fun p s -> Ldot(p, s)) (Lident hd) tl) let parse s = match unflatten (split_at_dots s 0) with | None -> Lident "" (* should not happen, but don't put assert false so as not to crash the toplevel (see Genprintval) *) | Some v -> v let keep_suffix = let rec aux = function | Lident str -> if String.uncapitalize_ascii str <> str then Some (Lident str, false) else None | Ldot (t, str) -> if String.uncapitalize_ascii str <> str then match aux t with | None -> Some (Lident str, true) | Some (t, is_label) -> Some (Ldot (t, str), is_label) else None | t -> Some (t, false) (* Can be improved... *) in function | Lident s -> Lident s, false | Ldot (t, s) -> begin match aux t with | None -> Lident s, true | Some (t, is_label) -> Ldot (t, s), is_label end | otherwise -> otherwise, false
(**************************************************************************) (* *) (* OCaml *) (* *) (* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) (* *) (* Copyright 1996 Institut National de Recherche en Informatique et *) (* en Automatique. *) (* *) (* All rights reserved. This file is distributed under the terms of *) (* the GNU Lesser General Public License version 2.1, with the *) (* special exception on linking described in the file LICENSE. *) (* *) (**************************************************************************)
script_ir_translator.ml
open Alpha_context open Micheline open Script open Script_tc_errors open Script_ir_annot open Script_typed_ir module Typecheck_costs = Michelson_v1_gas.Cost_of.Typechecking module Unparse_costs = Michelson_v1_gas.Cost_of.Unparsing module Tc_context = Script_tc_context type ex_stack_ty = Ex_stack_ty : ('a, 's) stack_ty -> ex_stack_ty (** Equality witnesses *) type ('ta, 'tb) eq = Eq : ('same, 'same) eq (* The following type represents an instruction parameterized by its continuation. During the elaboration of the typed term, a sequence of instructions in Micheline is read from left to right: hence, the elaboration needs to wait for the next instruction to be elaborated to be able to construct the current instruction. *) type ('a, 's, 'b, 'u) cinstr = { apply : 'r 'f. ('a, 's) kinfo -> ('b, 'u, 'r, 'f) kinstr -> ('a, 's, 'r, 'f) kinstr; } (* While a [Script_typed_ir.descr] contains a fully defined instruction, [descr] contains a [cinstr], that is an instruction parameterized by the next instruction, as explained in the previous comment. *) type ('a, 's, 'b, 'u) descr = { loc : Script.location; bef : ('a, 's) stack_ty; aft : ('b, 'u) stack_ty; instr : ('a, 's, 'b, 'u) cinstr; } let close_descr {loc; bef; aft; instr} = let kinfo = {iloc = loc; kstack_ty = aft} in let kinfo' = {iloc = loc; kstack_ty = bef} in let kinstr = instr.apply kinfo' (IHalt kinfo) in {kloc = loc; kbef = bef; kaft = aft; kinstr} let kinfo_of_descr {loc; bef; _} = {iloc = loc; kstack_ty = bef} let compose_descr : type a s b u c v. Script.location -> (a, s, b, u) descr -> (b, u, c, v) descr -> (a, s, c, v) descr = fun loc d1 d2 -> { loc; bef = d1.bef; aft = d2.aft; instr = { apply = (fun _ k -> d1.instr.apply (kinfo_of_descr d1) (d2.instr.apply (kinfo_of_descr d2) k)); }; } type tc_context = Tc_context.t type unparsing_mode = Optimized | Readable | Optimized_legacy type type_logger = Script.location -> stack_ty_before:Script.expr list -> stack_ty_after:Script.expr list -> unit (* ---- Error helpers -------------------------------------------------------*) let location = function | Prim (loc, _, _, _) | Int (loc, _) | String (loc, _) | Bytes (loc, _) | Seq (loc, _) -> loc let kind_equal a b = match (a, b) with | (Int_kind, Int_kind) | (String_kind, String_kind) | (Bytes_kind, Bytes_kind) | (Prim_kind, Prim_kind) | (Seq_kind, Seq_kind) -> true | _ -> false let kind = function | Int _ -> Int_kind | String _ -> String_kind | Bytes _ -> Bytes_kind | Prim _ -> Prim_kind | Seq _ -> Seq_kind let unexpected expr exp_kinds exp_ns exp_prims = match expr with | Int (loc, _) -> Invalid_kind (loc, Prim_kind :: exp_kinds, Int_kind) | String (loc, _) -> Invalid_kind (loc, Prim_kind :: exp_kinds, String_kind) | Bytes (loc, _) -> Invalid_kind (loc, Prim_kind :: exp_kinds, Bytes_kind) | Seq (loc, _) -> Invalid_kind (loc, Prim_kind :: exp_kinds, Seq_kind) | Prim (loc, name, _, _) -> ( let open Michelson_v1_primitives in match (namespace name, exp_ns) with | (Type_namespace, Type_namespace) | (Instr_namespace, Instr_namespace) | (Constant_namespace, Constant_namespace) -> Invalid_primitive (loc, exp_prims, name) | (ns, _) -> Invalid_namespace (loc, name, exp_ns, ns)) let check_kind kinds expr = let kind = kind expr in if List.exists (kind_equal kind) kinds then Result.return_unit else let loc = location expr in error (Invalid_kind (loc, kinds, kind)) (* ---- Unparsing (Typed IR -> Untyped expressions) of types -----------------*) (* This part contains the unparsing that does not depend on parsing (everything that cannot contain a lambda). The rest is located at the end of the file. *) let unparse_memo_size ~loc memo_size = let z = Sapling.Memo_size.unparse_to_z memo_size in Int (loc, z) let rec unparse_ty_and_entrypoints_uncarbonated : type a ac loc. loc:loc -> (a, ac) ty -> a entrypoints_node -> loc Script.michelson_node = fun ~loc ty {nested = nested_entrypoints; at_node} -> let (name, args) = match ty with | Unit_t -> (T_unit, []) | Int_t -> (T_int, []) | Nat_t -> (T_nat, []) | Signature_t -> (T_signature, []) | String_t -> (T_string, []) | Bytes_t -> (T_bytes, []) | Mutez_t -> (T_mutez, []) | Bool_t -> (T_bool, []) | Key_hash_t -> (T_key_hash, []) | Key_t -> (T_key, []) | Timestamp_t -> (T_timestamp, []) | Address_t -> (T_address, []) | Tx_rollup_l2_address_t -> (T_tx_rollup_l2_address, []) | Operation_t -> (T_operation, []) | Chain_id_t -> (T_chain_id, []) | Never_t -> (T_never, []) | Bls12_381_g1_t -> (T_bls12_381_g1, []) | Bls12_381_g2_t -> (T_bls12_381_g2, []) | Bls12_381_fr_t -> (T_bls12_381_fr, []) | Contract_t (ut, _meta) -> let t = unparse_ty_and_entrypoints_uncarbonated ~loc ut no_entrypoints in (T_contract, [t]) | Pair_t (utl, utr, _meta, _) -> ( let tl = unparse_ty_and_entrypoints_uncarbonated ~loc utl no_entrypoints in let tr = unparse_ty_and_entrypoints_uncarbonated ~loc utr no_entrypoints in (* Fold [pair a1 (pair ... (pair an-1 an))] into [pair a1 ... an] *) (* Note that the folding does not happen if the pair on the right has an annotation because this annotation would be lost *) match tr with | Prim (_, T_pair, ts, []) -> (T_pair, tl :: ts) | _ -> (T_pair, [tl; tr])) | Union_t (utl, utr, _meta, _) -> let (entrypoints_l, entrypoints_r) = match nested_entrypoints with | Entrypoints_None -> (no_entrypoints, no_entrypoints) | Entrypoints_Union {left; right} -> (left, right) in let tl = unparse_ty_and_entrypoints_uncarbonated ~loc utl entrypoints_l in let tr = unparse_ty_and_entrypoints_uncarbonated ~loc utr entrypoints_r in (T_or, [tl; tr]) | Lambda_t (uta, utr, _meta) -> let ta = unparse_ty_and_entrypoints_uncarbonated ~loc uta no_entrypoints in let tr = unparse_ty_and_entrypoints_uncarbonated ~loc utr no_entrypoints in (T_lambda, [ta; tr]) | Option_t (ut, _meta, _) -> let ut = unparse_ty_and_entrypoints_uncarbonated ~loc ut no_entrypoints in (T_option, [ut]) | List_t (ut, _meta) -> let t = unparse_ty_and_entrypoints_uncarbonated ~loc ut no_entrypoints in (T_list, [t]) | Ticket_t (ut, _meta) -> let t = unparse_comparable_ty_uncarbonated ~loc ut in (T_ticket, [t]) | Set_t (ut, _meta) -> let t = unparse_comparable_ty_uncarbonated ~loc ut in (T_set, [t]) | Map_t (uta, utr, _meta) -> let ta = unparse_comparable_ty_uncarbonated ~loc uta in let tr = unparse_ty_and_entrypoints_uncarbonated ~loc utr no_entrypoints in (T_map, [ta; tr]) | Big_map_t (uta, utr, _meta) -> let ta = unparse_comparable_ty_uncarbonated ~loc uta in let tr = unparse_ty_and_entrypoints_uncarbonated ~loc utr no_entrypoints in (T_big_map, [ta; tr]) | Sapling_transaction_t memo_size -> (T_sapling_transaction, [unparse_memo_size ~loc memo_size]) | Sapling_transaction_deprecated_t memo_size -> (T_sapling_transaction_deprecated, [unparse_memo_size ~loc memo_size]) | Sapling_state_t memo_size -> (T_sapling_state, [unparse_memo_size ~loc memo_size]) | Chest_key_t -> (T_chest_key, []) | Chest_t -> (T_chest, []) in let annot = match at_node with | None -> [] | Some {name; original_type_expr = _} -> [Entrypoint.unparse_as_field_annot name] in Prim (loc, name, args, annot) and unparse_comparable_ty_uncarbonated : type a loc. loc:loc -> a comparable_ty -> loc Script.michelson_node = fun ~loc ty -> unparse_ty_and_entrypoints_uncarbonated ~loc ty no_entrypoints let unparse_ty_uncarbonated ~loc ty = unparse_ty_and_entrypoints_uncarbonated ~loc ty no_entrypoints let unparse_ty ~loc ctxt ty = Gas.consume ctxt (Unparse_costs.unparse_type ty) >|? fun ctxt -> (unparse_ty_uncarbonated ~loc ty, ctxt) let unparse_parameter_ty ~loc ctxt ty ~entrypoints = Gas.consume ctxt (Unparse_costs.unparse_type ty) >|? fun ctxt -> (unparse_ty_and_entrypoints_uncarbonated ~loc ty entrypoints.root, ctxt) let serialize_ty_for_error ty = (* Types are bounded by [Constants.michelson_maximum_type_size], so [unparse_ty_uncarbonated] and [strip_locations] are bounded in time. It is hence OK to use them in errors that are not caught in the validation (only once in apply). *) unparse_ty_uncarbonated ~loc:() ty |> Micheline.strip_locations let[@coq_axiom_with_reason "gadt"] check_comparable : type a ac. Script.location -> (a, ac) ty -> (ac, Dependent_bool.yes) eq tzresult = fun loc ty -> match is_comparable ty with | Yes -> ok Eq | No -> let t = serialize_ty_for_error ty in error (Comparable_type_expected (loc, t)) let rec unparse_stack_uncarbonated : type a s. (a, s) stack_ty -> Script.expr list = function | Bot_t -> [] | Item_t (ty, rest) -> let uty = unparse_ty_uncarbonated ~loc:() ty in let urest = unparse_stack_uncarbonated rest in strip_locations uty :: urest let serialize_stack_for_error ctxt stack_ty = match Gas.level ctxt with | Unaccounted -> unparse_stack_uncarbonated stack_ty | Limited _ -> [] let unparse_unit ~loc ctxt () = ok (Prim (loc, D_Unit, [], []), ctxt) let unparse_int ~loc ctxt v = ok (Int (loc, Script_int.to_zint v), ctxt) let unparse_nat ~loc ctxt v = ok (Int (loc, Script_int.to_zint v), ctxt) let unparse_string ~loc ctxt s = ok (String (loc, Script_string.to_string s), ctxt) let unparse_bytes ~loc ctxt s = ok (Bytes (loc, s), ctxt) let unparse_bool ~loc ctxt b = ok (Prim (loc, (if b then D_True else D_False), [], []), ctxt) let unparse_timestamp ~loc ctxt mode t = match mode with | Optimized | Optimized_legacy -> ok (Int (loc, Script_timestamp.to_zint t), ctxt) | Readable -> ( Gas.consume ctxt Unparse_costs.timestamp_readable >>? fun ctxt -> match Script_timestamp.to_notation t with | None -> ok (Int (loc, Script_timestamp.to_zint t), ctxt) | Some s -> ok (String (loc, s), ctxt)) let unparse_address ~loc ctxt mode {destination; entrypoint} = match mode with | Optimized | Optimized_legacy -> Gas.consume ctxt Unparse_costs.contract_optimized >|? fun ctxt -> let bytes = Data_encoding.Binary.to_bytes_exn Data_encoding.(tup2 Destination.encoding Entrypoint.value_encoding) (destination, entrypoint) in (Bytes (loc, bytes), ctxt) | Readable -> Gas.consume ctxt Unparse_costs.contract_readable >|? fun ctxt -> let notation = Destination.to_b58check destination ^ Entrypoint.to_address_suffix entrypoint in (String (loc, notation), ctxt) let unparse_tx_rollup_l2_address ~loc ctxt mode (tx_address : tx_rollup_l2_address) = let tx_address = Indexable.to_value tx_address in match mode with | Optimized | Optimized_legacy -> Gas.consume ctxt Unparse_costs.contract_optimized >|? fun ctxt -> let bytes = Data_encoding.Binary.to_bytes_exn Tx_rollup_l2_address.encoding tx_address in (Bytes (loc, bytes), ctxt) | Readable -> Gas.consume ctxt Unparse_costs.contract_readable >|? fun ctxt -> let b58check = Tx_rollup_l2_address.to_b58check tx_address in (String (loc, b58check), ctxt) let unparse_contract ~loc ctxt mode (Typed_contract {arg_ty = _; address}) = unparse_address ~loc ctxt mode address let unparse_signature ~loc ctxt mode s = let s = Script_signature.get s in match mode with | Optimized | Optimized_legacy -> Gas.consume ctxt Unparse_costs.signature_optimized >|? fun ctxt -> let bytes = Data_encoding.Binary.to_bytes_exn Signature.encoding s in (Bytes (loc, bytes), ctxt) | Readable -> Gas.consume ctxt Unparse_costs.signature_readable >|? fun ctxt -> (String (loc, Signature.to_b58check s), ctxt) let unparse_mutez ~loc ctxt v = ok (Int (loc, Z.of_int64 (Tez.to_mutez v)), ctxt) let unparse_key ~loc ctxt mode k = match mode with | Optimized | Optimized_legacy -> Gas.consume ctxt Unparse_costs.public_key_optimized >|? fun ctxt -> let bytes = Data_encoding.Binary.to_bytes_exn Signature.Public_key.encoding k in (Bytes (loc, bytes), ctxt) | Readable -> Gas.consume ctxt Unparse_costs.public_key_readable >|? fun ctxt -> (String (loc, Signature.Public_key.to_b58check k), ctxt) let unparse_key_hash ~loc ctxt mode k = match mode with | Optimized | Optimized_legacy -> Gas.consume ctxt Unparse_costs.key_hash_optimized >|? fun ctxt -> let bytes = Data_encoding.Binary.to_bytes_exn Signature.Public_key_hash.encoding k in (Bytes (loc, bytes), ctxt) | Readable -> Gas.consume ctxt Unparse_costs.key_hash_readable >|? fun ctxt -> (String (loc, Signature.Public_key_hash.to_b58check k), ctxt) (* Operations are only unparsed during the production of execution traces of the interpreter. *) let unparse_operation ~loc ctxt {piop; lazy_storage_diff = _} = let iop = Apply_results.contents_of_packed_internal_operation piop in let bytes = Data_encoding.Binary.to_bytes_exn Apply_results.internal_contents_encoding iop in Gas.consume ctxt (Unparse_costs.operation bytes) >|? fun ctxt -> (Bytes (loc, bytes), ctxt) let unparse_chain_id ~loc ctxt mode chain_id = match mode with | Optimized | Optimized_legacy -> Gas.consume ctxt Unparse_costs.chain_id_optimized >|? fun ctxt -> let bytes = Data_encoding.Binary.to_bytes_exn Script_chain_id.encoding chain_id in (Bytes (loc, bytes), ctxt) | Readable -> Gas.consume ctxt Unparse_costs.chain_id_readable >|? fun ctxt -> (String (loc, Script_chain_id.to_b58check chain_id), ctxt) let unparse_bls12_381_g1 ~loc ctxt x = Gas.consume ctxt Unparse_costs.bls12_381_g1 >|? fun ctxt -> let bytes = Script_bls.G1.to_bytes x in (Bytes (loc, bytes), ctxt) let unparse_bls12_381_g2 ~loc ctxt x = Gas.consume ctxt Unparse_costs.bls12_381_g2 >|? fun ctxt -> let bytes = Script_bls.G2.to_bytes x in (Bytes (loc, bytes), ctxt) let unparse_bls12_381_fr ~loc ctxt x = Gas.consume ctxt Unparse_costs.bls12_381_fr >|? fun ctxt -> let bytes = Script_bls.Fr.to_bytes x in (Bytes (loc, bytes), ctxt) let unparse_with_data_encoding ~loc ctxt s unparse_cost encoding = Lwt.return ( Gas.consume ctxt unparse_cost >|? fun ctxt -> let bytes = Data_encoding.Binary.to_bytes_exn encoding s in (Bytes (loc, bytes), ctxt) ) (* -- Unparsing data of complex types -- *) type ('ty, 'depth) comb_witness = | Comb_Pair : ('t, 'd) comb_witness -> (_ * 't, unit -> 'd) comb_witness | Comb_Any : (_, _) comb_witness let unparse_pair (type r) ~loc unparse_l unparse_r ctxt mode (r_comb_witness : (r, unit -> unit -> _) comb_witness) (l, (r : r)) = unparse_l ctxt l >>=? fun (l, ctxt) -> unparse_r ctxt r >|=? fun (r, ctxt) -> (* Fold combs. For combs, three notations are supported: - a) [Pair x1 (Pair x2 ... (Pair xn-1 xn) ...)], - b) [Pair x1 x2 ... xn-1 xn], and - c) [{x1; x2; ...; xn-1; xn}]. In readable mode, we always use b), in optimized mode we use the shortest to serialize: - for n=2, [Pair x1 x2], - for n=3, [Pair x1 (Pair x2 x3)], - for n>=4, [{x1; x2; ...; xn}]. *) let res = match (mode, r_comb_witness, r) with | (Optimized, Comb_Pair _, Micheline.Seq (_, r)) -> (* Optimized case n > 4 *) Micheline.Seq (loc, l :: r) | ( Optimized, Comb_Pair (Comb_Pair _), Prim (_, D_Pair, [x2; Prim (_, D_Pair, [x3; x4], [])], []) ) -> (* Optimized case n = 4 *) Micheline.Seq (loc, [l; x2; x3; x4]) | (Readable, Comb_Pair _, Prim (_, D_Pair, xs, [])) -> (* Readable case n > 2 *) Prim (loc, D_Pair, l :: xs, []) | _ -> (* The remaining cases are: - Optimized n = 2, - Optimized n = 3, and - Readable n = 2, - Optimized_legacy, any n *) Prim (loc, D_Pair, [l; r], []) in (res, ctxt) let unparse_union ~loc unparse_l unparse_r ctxt = function | L l -> unparse_l ctxt l >|=? fun (l, ctxt) -> (Prim (loc, D_Left, [l], []), ctxt) | R r -> unparse_r ctxt r >|=? fun (r, ctxt) -> (Prim (loc, D_Right, [r], []), ctxt) let unparse_option ~loc unparse_v ctxt = function | Some v -> unparse_v ctxt v >|=? fun (v, ctxt) -> (Prim (loc, D_Some, [v], []), ctxt) | None -> return (Prim (loc, D_None, [], []), ctxt) (* -- Unparsing data of comparable types -- *) let comb_witness2 : type t tc. (t, tc) ty -> (t, unit -> unit -> unit) comb_witness = function | Pair_t (_, Pair_t _, _, _) -> Comb_Pair (Comb_Pair Comb_Any) | Pair_t _ -> Comb_Pair Comb_Any | _ -> Comb_Any let[@coq_axiom_with_reason "gadt"] rec unparse_comparable_data : type a loc. loc:loc -> context -> unparsing_mode -> a comparable_ty -> a -> (loc Script.michelson_node * context) tzresult Lwt.t = fun ~loc ctxt mode ty a -> (* No need for stack_depth here. Unlike [unparse_data], [unparse_comparable_data] doesn't call [unparse_code]. The stack depth is bounded by the type depth, currently bounded by 1000 (michelson_maximum_type_size). *) Gas.consume ctxt Unparse_costs.unparse_data_cycle (* We could have a smaller cost but let's keep it consistent with [unparse_data] for now. *) >>?= fun ctxt -> match (ty, a) with | (Unit_t, v) -> Lwt.return @@ unparse_unit ~loc ctxt v | (Int_t, v) -> Lwt.return @@ unparse_int ~loc ctxt v | (Nat_t, v) -> Lwt.return @@ unparse_nat ~loc ctxt v | (String_t, s) -> Lwt.return @@ unparse_string ~loc ctxt s | (Bytes_t, s) -> Lwt.return @@ unparse_bytes ~loc ctxt s | (Bool_t, b) -> Lwt.return @@ unparse_bool ~loc ctxt b | (Timestamp_t, t) -> Lwt.return @@ unparse_timestamp ~loc ctxt mode t | (Address_t, address) -> Lwt.return @@ unparse_address ~loc ctxt mode address | (Tx_rollup_l2_address_t, address) -> Lwt.return @@ unparse_tx_rollup_l2_address ~loc ctxt mode address | (Signature_t, s) -> Lwt.return @@ unparse_signature ~loc ctxt mode s | (Mutez_t, v) -> Lwt.return @@ unparse_mutez ~loc ctxt v | (Key_t, k) -> Lwt.return @@ unparse_key ~loc ctxt mode k | (Key_hash_t, k) -> Lwt.return @@ unparse_key_hash ~loc ctxt mode k | (Chain_id_t, chain_id) -> Lwt.return @@ unparse_chain_id ~loc ctxt mode chain_id | (Pair_t (tl, tr, _, YesYes), pair) -> let r_witness = comb_witness2 tr in let unparse_l ctxt v = unparse_comparable_data ~loc ctxt mode tl v in let unparse_r ctxt v = unparse_comparable_data ~loc ctxt mode tr v in unparse_pair ~loc unparse_l unparse_r ctxt mode r_witness pair | (Union_t (tl, tr, _, YesYes), v) -> let unparse_l ctxt v = unparse_comparable_data ~loc ctxt mode tl v in let unparse_r ctxt v = unparse_comparable_data ~loc ctxt mode tr v in unparse_union ~loc unparse_l unparse_r ctxt v | (Option_t (t, _, Yes), v) -> let unparse_v ctxt v = unparse_comparable_data ~loc ctxt mode t v in unparse_option ~loc unparse_v ctxt v | (Never_t, _) -> . let pack_node unparsed ctxt = Gas.consume ctxt (Script.strip_locations_cost unparsed) >>? fun ctxt -> let bytes = Data_encoding.Binary.to_bytes_exn expr_encoding (Micheline.strip_locations unparsed) in Gas.consume ctxt (Script.serialized_cost bytes) >|? fun ctxt -> let bytes = Bytes.cat (Bytes.of_string "\005") bytes in (bytes, ctxt) let pack_comparable_data ctxt ty data ~mode = unparse_comparable_data ~loc:() ctxt mode ty data >>=? fun (unparsed, ctxt) -> Lwt.return @@ pack_node unparsed ctxt let hash_bytes ctxt bytes = Gas.consume ctxt (Michelson_v1_gas.Cost_of.Interpreter.blake2b bytes) >|? fun ctxt -> (Script_expr_hash.(hash_bytes [bytes]), ctxt) let hash_comparable_data ctxt ty data = pack_comparable_data ctxt ty data ~mode:Optimized_legacy >>=? fun (bytes, ctxt) -> Lwt.return @@ hash_bytes ctxt bytes (* ---- Tickets ------------------------------------------------------------ *) (* All comparable types are dupable, this function exists only to not forget checking this property when adding new types. *) let check_dupable_comparable_ty : type a. a comparable_ty -> unit = function | Unit_t | Never_t | Int_t | Nat_t | Signature_t | String_t | Bytes_t | Mutez_t | Bool_t | Key_hash_t | Key_t | Timestamp_t | Chain_id_t | Address_t | Tx_rollup_l2_address_t | Pair_t _ | Union_t _ | Option_t _ -> () let check_dupable_ty ctxt loc ty = let rec aux : type a ac. location -> (a, ac) ty -> (unit, error) Gas_monad.t = fun loc ty -> let open Gas_monad.Syntax in let* () = Gas_monad.consume_gas Typecheck_costs.check_dupable_cycle in match ty with | Unit_t -> return_unit | Int_t -> return_unit | Nat_t -> return_unit | Signature_t -> return_unit | String_t -> return_unit | Bytes_t -> return_unit | Mutez_t -> return_unit | Key_hash_t -> return_unit | Key_t -> return_unit | Timestamp_t -> return_unit | Address_t -> return_unit | Tx_rollup_l2_address_t -> return_unit | Bool_t -> return_unit | Contract_t _ -> return_unit | Operation_t -> return_unit | Chain_id_t -> return_unit | Never_t -> return_unit | Bls12_381_g1_t -> return_unit | Bls12_381_g2_t -> return_unit | Bls12_381_fr_t -> return_unit | Sapling_state_t _ -> return_unit | Sapling_transaction_t _ -> return_unit | Sapling_transaction_deprecated_t _ -> return_unit | Chest_t -> return_unit | Chest_key_t -> return_unit | Ticket_t _ -> fail @@ Unexpected_ticket loc | Pair_t (ty_a, ty_b, _, _) -> let* () = aux loc ty_a in aux loc ty_b | Union_t (ty_a, ty_b, _, _) -> let* () = aux loc ty_a in aux loc ty_b | Lambda_t (_, _, _) -> (* Lambda are dupable as long as: - they don't contain non-dupable values, e.g. in `PUSH` (mostly non-dupable values should probably be considered forged) - they are not the result of a partial application on a non-dupable value. `APPLY` rejects non-packable types (because of `PUSH`). Hence non-dupable should imply non-packable. *) return_unit | Option_t (ty, _, _) -> aux loc ty | List_t (ty, _) -> aux loc ty | Set_t (key_ty, _) -> let () = check_dupable_comparable_ty key_ty in return_unit | Map_t (key_ty, val_ty, _) -> let () = check_dupable_comparable_ty key_ty in aux loc val_ty | Big_map_t (key_ty, val_ty, _) -> let () = check_dupable_comparable_ty key_ty in aux loc val_ty in let gas = aux loc ty in Gas_monad.run ctxt gas >>? fun (res, ctxt) -> match res with Ok () -> ok ctxt | Error e -> error e let type_metadata_eq : type error_trace. error_details:(_, error_trace) error_details -> 'a ty_metadata -> 'b ty_metadata -> (unit, error_trace) result = fun ~error_details {size = size_a} {size = size_b} -> Type_size.check_eq ~error_details size_a size_b let default_ty_eq_error loc ty1 ty2 = let ty1 = serialize_ty_for_error ty1 in let ty2 = serialize_ty_for_error ty2 in Inconsistent_types (loc, ty1, ty2) let memo_size_eq : type error_trace. error_details:(_, error_trace) error_details -> Sapling.Memo_size.t -> Sapling.Memo_size.t -> (unit, error_trace) result = fun ~error_details ms1 ms2 -> if Sapling.Memo_size.equal ms1 ms2 then Result.return_unit else Error (match error_details with | Fast -> Inconsistent_types_fast | Informative _ -> trace_of_error @@ Inconsistent_memo_sizes (ms1, ms2)) (* Check that two types are equal. The result is an equality witness between the types of the two inputs within the gas monad (for gas consumption). *) let rec ty_eq : type a ac b bc error_trace. error_details:(Script.location, error_trace) error_details -> (a, ac) ty -> (b, bc) ty -> (((a, ac) ty, (b, bc) ty) eq, error_trace) Gas_monad.t = fun ~error_details ty1 ty2 -> let type_metadata_eq meta1 meta2 = Gas_monad.of_result (type_metadata_eq ~error_details meta1 meta2) |> Gas_monad.record_trace_eval ~error_details (fun loc -> default_ty_eq_error loc ty1 ty2) in let memo_size_eq ms1 ms2 = Gas_monad.of_result (memo_size_eq ~error_details ms1 ms2) in let rec help : type ta tac tb tbc. (ta, tac) ty -> (tb, tbc) ty -> (((ta, tac) ty, (tb, tbc) ty) eq, error_trace) Gas_monad.t = fun ty1 ty2 -> help0 ty1 ty2 |> Gas_monad.record_trace_eval ~error_details (fun loc -> default_ty_eq_error loc ty1 ty2) and help0 : type ta tac tb tbc. (ta, tac) ty -> (tb, tbc) ty -> (((ta, tac) ty, (tb, tbc) ty) eq, error_trace) Gas_monad.t = fun ty1 ty2 -> let open Gas_monad.Syntax in let* () = Gas_monad.consume_gas Typecheck_costs.merge_cycle in let not_equal () = Gas_monad.of_result @@ Error (match error_details with | Fast -> (Inconsistent_types_fast : error_trace) | Informative loc -> trace_of_error @@ default_ty_eq_error loc ty1 ty2) in match (ty1, ty2) with | (Unit_t, Unit_t) -> return (Eq : ((ta, tac) ty, (tb, tbc) ty) eq) | (Unit_t, _) -> not_equal () | (Int_t, Int_t) -> return Eq | (Int_t, _) -> not_equal () | (Nat_t, Nat_t) -> return Eq | (Nat_t, _) -> not_equal () | (Key_t, Key_t) -> return Eq | (Key_t, _) -> not_equal () | (Key_hash_t, Key_hash_t) -> return Eq | (Key_hash_t, _) -> not_equal () | (String_t, String_t) -> return Eq | (String_t, _) -> not_equal () | (Bytes_t, Bytes_t) -> return Eq | (Bytes_t, _) -> not_equal () | (Signature_t, Signature_t) -> return Eq | (Signature_t, _) -> not_equal () | (Mutez_t, Mutez_t) -> return Eq | (Mutez_t, _) -> not_equal () | (Timestamp_t, Timestamp_t) -> return Eq | (Timestamp_t, _) -> not_equal () | (Address_t, Address_t) -> return Eq | (Address_t, _) -> not_equal () | (Tx_rollup_l2_address_t, Tx_rollup_l2_address_t) -> return Eq | (Tx_rollup_l2_address_t, _) -> not_equal () | (Bool_t, Bool_t) -> return Eq | (Bool_t, _) -> not_equal () | (Chain_id_t, Chain_id_t) -> return Eq | (Chain_id_t, _) -> not_equal () | (Never_t, Never_t) -> return Eq | (Never_t, _) -> not_equal () | (Operation_t, Operation_t) -> return Eq | (Operation_t, _) -> not_equal () | (Bls12_381_g1_t, Bls12_381_g1_t) -> return Eq | (Bls12_381_g1_t, _) -> not_equal () | (Bls12_381_g2_t, Bls12_381_g2_t) -> return Eq | (Bls12_381_g2_t, _) -> not_equal () | (Bls12_381_fr_t, Bls12_381_fr_t) -> return Eq | (Bls12_381_fr_t, _) -> not_equal () | (Map_t (tal, tar, meta1), Map_t (tbl, tbr, meta2)) -> let* () = type_metadata_eq meta1 meta2 in let* Eq = help tar tbr in let+ Eq = ty_eq ~error_details tal tbl in (Eq : ((ta, tac) ty, (tb, tbc) ty) eq) | (Map_t _, _) -> not_equal () | (Big_map_t (tal, tar, meta1), Big_map_t (tbl, tbr, meta2)) -> let* () = type_metadata_eq meta1 meta2 in let* Eq = help tar tbr in let+ Eq = ty_eq ~error_details tal tbl in (Eq : ((ta, tac) ty, (tb, tbc) ty) eq) | (Big_map_t _, _) -> not_equal () | (Set_t (ea, meta1), Set_t (eb, meta2)) -> let* () = type_metadata_eq meta1 meta2 in let+ Eq = ty_eq ~error_details ea eb in (Eq : ((ta, tac) ty, (tb, tbc) ty) eq) | (Set_t _, _) -> not_equal () | (Ticket_t (ea, meta1), Ticket_t (eb, meta2)) -> let* () = type_metadata_eq meta1 meta2 in let+ Eq = ty_eq ~error_details ea eb in (Eq : ((ta, tac) ty, (tb, tbc) ty) eq) | (Ticket_t _, _) -> not_equal () | (Pair_t (tal, tar, meta1, cmp1), Pair_t (tbl, tbr, meta2, cmp2)) -> let* () = type_metadata_eq meta1 meta2 in let* Eq = help tal tbl in let+ Eq = help tar tbr in let Eq = Dependent_bool.merge_dand cmp1 cmp2 in (Eq : ((ta, tac) ty, (tb, tbc) ty) eq) | (Pair_t _, _) -> not_equal () | (Union_t (tal, tar, meta1, cmp1), Union_t (tbl, tbr, meta2, cmp2)) -> let* () = type_metadata_eq meta1 meta2 in let* Eq = help tal tbl in let+ Eq = help tar tbr in let Eq = Dependent_bool.merge_dand cmp1 cmp2 in (Eq : ((ta, tac) ty, (tb, tbc) ty) eq) | (Union_t _, _) -> not_equal () | (Lambda_t (tal, tar, meta1), Lambda_t (tbl, tbr, meta2)) -> let* () = type_metadata_eq meta1 meta2 in let* Eq = help tal tbl in let+ Eq = help tar tbr in (Eq : ((ta, tac) ty, (tb, tbc) ty) eq) | (Lambda_t _, _) -> not_equal () | (Contract_t (tal, meta1), Contract_t (tbl, meta2)) -> let* () = type_metadata_eq meta1 meta2 in let+ Eq = help tal tbl in (Eq : ((ta, tac) ty, (tb, tbc) ty) eq) | (Contract_t _, _) -> not_equal () | (Option_t (tva, meta1, _), Option_t (tvb, meta2, _)) -> let* () = type_metadata_eq meta1 meta2 in let+ Eq = help tva tvb in (Eq : ((ta, tac) ty, (tb, tbc) ty) eq) | (Option_t _, _) -> not_equal () | (List_t (tva, meta1), List_t (tvb, meta2)) -> let* () = type_metadata_eq meta1 meta2 in let+ Eq = help tva tvb in (Eq : ((ta, tac) ty, (tb, tbc) ty) eq) | (List_t _, _) -> not_equal () | (Sapling_state_t ms1, Sapling_state_t ms2) -> let+ () = memo_size_eq ms1 ms2 in Eq | (Sapling_state_t _, _) -> not_equal () | (Sapling_transaction_t ms1, Sapling_transaction_t ms2) -> let+ () = memo_size_eq ms1 ms2 in Eq | (Sapling_transaction_t _, _) -> not_equal () | ( Sapling_transaction_deprecated_t ms1, Sapling_transaction_deprecated_t ms2 ) -> let+ () = memo_size_eq ms1 ms2 in Eq | (Sapling_transaction_deprecated_t _, _) -> not_equal () | (Chest_t, Chest_t) -> return Eq | (Chest_t, _) -> not_equal () | (Chest_key_t, Chest_key_t) -> return Eq | (Chest_key_t, _) -> not_equal () in help ty1 ty2 [@@coq_axiom_with_reason "non-top-level mutual recursion"] (* Same as ty_eq but for stacks. A single error monad is used here because there is no need to recover from stack merging errors. *) let rec stack_eq : type ta tb ts tu. Script.location -> context -> int -> (ta, ts) stack_ty -> (tb, tu) stack_ty -> (((ta, ts) stack_ty, (tb, tu) stack_ty) eq * context) tzresult = fun loc ctxt lvl stack1 stack2 -> match (stack1, stack2) with | (Bot_t, Bot_t) -> ok (Eq, ctxt) | (Item_t (ty1, rest1), Item_t (ty2, rest2)) -> Gas_monad.run ctxt @@ ty_eq ~error_details:(Informative loc) ty1 ty2 |> record_trace (Bad_stack_item lvl) >>? fun (eq, ctxt) -> eq >>? fun Eq -> stack_eq loc ctxt (lvl + 1) rest1 rest2 >|? fun (Eq, ctxt) -> ((Eq : ((ta, ts) stack_ty, (tb, tu) stack_ty) eq), ctxt) | (_, _) -> error Bad_stack_length (* ---- Type checker results -------------------------------------------------*) type ('a, 's) judgement = | Typed : ('a, 's, 'b, 'u) descr -> ('a, 's) judgement | Failed : { descr : 'b 'u. ('b, 'u) stack_ty -> ('a, 's, 'b, 'u) descr; } -> ('a, 's) judgement (* ---- Type checker (Untyped expressions -> Typed IR) ----------------------*) type ('a, 's, 'b, 'u, 'c, 'v) branch = { branch : 'r 'f. ('a, 's, 'r, 'f) descr -> ('b, 'u, 'r, 'f) descr -> ('c, 'v, 'r, 'f) descr; } [@@unboxed] let merge_branches : type a s b u c v. context -> Script.location -> (a, s) judgement -> (b, u) judgement -> (a, s, b, u, c, v) branch -> ((c, v) judgement * context) tzresult = fun ctxt loc btr bfr {branch} -> match (btr, bfr) with | (Typed ({aft = aftbt; _} as dbt), Typed ({aft = aftbf; _} as dbf)) -> let unmatched_branches () = let aftbt = serialize_stack_for_error ctxt aftbt in let aftbf = serialize_stack_for_error ctxt aftbf in Unmatched_branches (loc, aftbt, aftbf) in record_trace_eval unmatched_branches ( stack_eq loc ctxt 1 aftbt aftbf >|? fun (Eq, ctxt) -> (Typed (branch dbt dbf), ctxt) ) | (Failed {descr = descrt}, Failed {descr = descrf}) -> let descr ret = branch (descrt ret) (descrf ret) in ok (Failed {descr}, ctxt) | (Typed dbt, Failed {descr = descrf}) -> ok (Typed (branch dbt (descrf dbt.aft)), ctxt) | (Failed {descr = descrt}, Typed dbf) -> ok (Typed (branch (descrt dbf.aft) dbf), ctxt) let parse_memo_size (n : (location, _) Micheline.node) : Sapling.Memo_size.t tzresult = match n with | Int (_, z) -> ( match Sapling.Memo_size.parse_z z with | Ok _ as ok_memo_size -> ok_memo_size [@coq_cast] | Error msg -> error @@ Invalid_syntactic_constant (location n, strip_locations n, msg)) | _ -> error @@ Invalid_kind (location n, [Int_kind], kind n) type ex_comparable_ty = | Ex_comparable_ty : 'a comparable_ty -> ex_comparable_ty let[@coq_struct "ty"] rec parse_comparable_ty : stack_depth:int -> context -> Script.node -> (ex_comparable_ty * context) tzresult = fun ~stack_depth ctxt ty -> Gas.consume ctxt Typecheck_costs.parse_type_cycle >>? fun ctxt -> if Compare.Int.(stack_depth > 10000) then error Typechecking_too_many_recursive_calls else match ty with | Prim (loc, T_unit, [], annot) -> check_type_annot loc annot >|? fun () -> (Ex_comparable_ty unit_t, ctxt) | Prim (loc, T_never, [], annot) -> check_type_annot loc annot >|? fun () -> (Ex_comparable_ty never_t, ctxt) | Prim (loc, T_int, [], annot) -> check_type_annot loc annot >|? fun () -> (Ex_comparable_ty int_t, ctxt) | Prim (loc, T_nat, [], annot) -> check_type_annot loc annot >|? fun () -> (Ex_comparable_ty nat_t, ctxt) | Prim (loc, T_signature, [], annot) -> check_type_annot loc annot >|? fun () -> (Ex_comparable_ty signature_t, ctxt) | Prim (loc, T_string, [], annot) -> check_type_annot loc annot >|? fun () -> (Ex_comparable_ty string_t, ctxt) | Prim (loc, T_bytes, [], annot) -> check_type_annot loc annot >|? fun () -> (Ex_comparable_ty bytes_t, ctxt) | Prim (loc, T_mutez, [], annot) -> check_type_annot loc annot >|? fun () -> (Ex_comparable_ty mutez_t, ctxt) | Prim (loc, T_bool, [], annot) -> check_type_annot loc annot >|? fun () -> (Ex_comparable_ty bool_t, ctxt) | Prim (loc, T_key_hash, [], annot) -> check_type_annot loc annot >|? fun () -> (Ex_comparable_ty key_hash_t, ctxt) | Prim (loc, T_key, [], annot) -> check_type_annot loc annot >|? fun () -> (Ex_comparable_ty key_t, ctxt) | Prim (loc, T_timestamp, [], annot) -> check_type_annot loc annot >|? fun () -> (Ex_comparable_ty timestamp_t, ctxt) | Prim (loc, T_chain_id, [], annot) -> check_type_annot loc annot >|? fun () -> (Ex_comparable_ty chain_id_t, ctxt) | Prim (loc, T_address, [], annot) -> check_type_annot loc annot >|? fun () -> (Ex_comparable_ty address_t, ctxt) | Prim (loc, T_tx_rollup_l2_address, [], annot) -> if Constants.tx_rollup_enable ctxt then check_type_annot loc annot >|? fun () -> (Ex_comparable_ty tx_rollup_l2_address_t, ctxt) else error @@ Tx_rollup_addresses_disabled loc | Prim ( loc, (( T_unit | T_never | T_int | T_nat | T_string | T_bytes | T_mutez | T_bool | T_key_hash | T_timestamp | T_address | T_chain_id | T_signature | T_key ) as prim), l, _ ) -> error (Invalid_arity (loc, prim, 0, List.length l)) | Prim (loc, T_pair, left :: right, annot) -> check_type_annot loc annot >>? fun () -> remove_field_annot left >>? fun left -> (match right with | [right] -> remove_field_annot right | right -> (* Unfold [pair t1 ... tn] as [pair t1 (... (pair tn-1 tn))] *) ok (Prim (loc, T_pair, right, []))) >>? fun right -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt right >>? fun (Ex_comparable_ty right, ctxt) -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt left >>? fun (Ex_comparable_ty left, ctxt) -> comparable_pair_t loc left right >|? fun ty -> (Ex_comparable_ty ty, ctxt) | Prim (loc, T_or, [left; right], annot) -> check_type_annot loc annot >>? fun () -> remove_field_annot left >>? fun left -> remove_field_annot right >>? fun right -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt right >>? fun (Ex_comparable_ty right, ctxt) -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt left >>? fun (Ex_comparable_ty left, ctxt) -> comparable_union_t loc left right >|? fun ty -> (Ex_comparable_ty ty, ctxt) | Prim (loc, ((T_pair | T_or) as prim), l, _) -> error (Invalid_arity (loc, prim, 2, List.length l)) | Prim (loc, T_option, [t], annot) -> check_type_annot loc annot >>? fun () -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt t >>? fun (Ex_comparable_ty t, ctxt) -> comparable_option_t loc t >|? fun ty -> (Ex_comparable_ty ty, ctxt) | Prim (loc, T_option, l, _) -> error (Invalid_arity (loc, T_option, 1, List.length l)) | Prim ( loc, (T_set | T_map | T_list | T_lambda | T_contract | T_operation), _, _ ) -> error (Comparable_type_expected (loc, Micheline.strip_locations ty)) | expr -> error @@ unexpected expr [] Type_namespace [ T_unit; T_never; T_int; T_nat; T_string; T_bytes; T_mutez; T_bool; T_key_hash; T_timestamp; T_address; T_pair; T_or; T_option; T_chain_id; T_signature; T_key; ] type ex_ty = Ex_ty : ('a, _) ty -> ex_ty type ex_parameter_ty_and_entrypoints_node = | Ex_parameter_ty_and_entrypoints_node : { arg_type : ('a, _) ty; entrypoints : 'a entrypoints_node; } -> ex_parameter_ty_and_entrypoints_node (** [parse_ty] can be used to parse regular types as well as parameter types together with their entrypoints. In the first case, use [~ret:Don't_parse_entrypoints], [parse_ty] will return an [ex_ty]. In the second case, use [~ret:Parse_entrypoints], [parse_ty] will return an [ex_parameter_ty_and_entrypoints_node]. *) type ('ret, 'name) parse_ty_ret = | Don't_parse_entrypoints : (ex_ty, unit) parse_ty_ret | Parse_entrypoints : (ex_parameter_ty_and_entrypoints_node, Entrypoint.t option) parse_ty_ret let[@coq_axiom_with_reason "complex mutually recursive definition"] rec parse_ty : type ret name. context -> stack_depth:int -> legacy:bool -> allow_lazy_storage:bool -> allow_operation:bool -> allow_contract:bool -> allow_ticket:bool -> ret:(ret, name) parse_ty_ret -> Script.node -> (ret * context) tzresult = fun ctxt ~stack_depth ~legacy ~allow_lazy_storage ~allow_operation ~allow_contract ~allow_ticket ~ret node -> Gas.consume ctxt Typecheck_costs.parse_type_cycle >>? fun ctxt -> if Compare.Int.(stack_depth > 10000) then error Typechecking_too_many_recursive_calls else (match ret with | Don't_parse_entrypoints -> ok (node, (() : name)) | Parse_entrypoints -> extract_entrypoint_annot node) >>? fun (node, name) -> let return ctxt ty : ret * context = match ret with | Don't_parse_entrypoints -> (Ex_ty ty, ctxt) | Parse_entrypoints -> let at_node = Option.map (fun name -> {name; original_type_expr = node}) name in ( Ex_parameter_ty_and_entrypoints_node { arg_type = ty; entrypoints = {at_node; nested = Entrypoints_None}; }, ctxt ) in match node with | Prim (loc, T_unit, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt unit_t | Prim (loc, T_int, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt int_t | Prim (loc, T_nat, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt nat_t | Prim (loc, T_string, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt string_t | Prim (loc, T_bytes, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt bytes_t | Prim (loc, T_mutez, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt mutez_t | Prim (loc, T_bool, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt bool_t | Prim (loc, T_key, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt key_t | Prim (loc, T_key_hash, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt key_hash_t | Prim (loc, T_chest_key, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt chest_key_t | Prim (loc, T_chest, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt chest_t | Prim (loc, T_timestamp, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt timestamp_t | Prim (loc, T_address, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt address_t | Prim (loc, T_tx_rollup_l2_address, [], annot) -> if Constants.tx_rollup_enable ctxt then check_type_annot loc annot >|? fun () -> return ctxt tx_rollup_l2_address_t else error @@ Tx_rollup_addresses_disabled loc | Prim (loc, T_signature, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt signature_t | Prim (loc, T_operation, [], annot) -> if allow_operation then check_type_annot loc annot >|? fun () -> return ctxt operation_t else error (Unexpected_operation loc) | Prim (loc, T_chain_id, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt chain_id_t | Prim (loc, T_never, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt never_t | Prim (loc, T_bls12_381_g1, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt bls12_381_g1_t | Prim (loc, T_bls12_381_g2, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt bls12_381_g2_t | Prim (loc, T_bls12_381_fr, [], annot) -> check_type_annot loc annot >|? fun () -> return ctxt bls12_381_fr_t | Prim (loc, T_contract, [utl], annot) -> if allow_contract then parse_passable_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy utl ~ret:Don't_parse_entrypoints >>? fun (Ex_ty tl, ctxt) -> check_type_annot loc annot >>? fun () -> contract_t loc tl >|? fun ty -> return ctxt ty else error (Unexpected_contract loc) | Prim (loc, T_pair, utl :: utr, annot) -> remove_field_annot utl >>? fun utl -> parse_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ~allow_lazy_storage ~allow_operation ~allow_contract ~allow_ticket ~ret:Don't_parse_entrypoints utl >>? fun (Ex_ty tl, ctxt) -> (match utr with | [utr] -> remove_field_annot utr | utr -> (* Unfold [pair t1 ... tn] as [pair t1 (... (pair tn-1 tn))] *) ok (Prim (loc, T_pair, utr, []))) >>? fun utr -> parse_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ~allow_lazy_storage ~allow_operation ~allow_contract ~allow_ticket ~ret:Don't_parse_entrypoints utr >>? fun (Ex_ty tr, ctxt) -> check_type_annot loc annot >>? fun () -> pair_t loc tl tr >|? fun (Ty_ex_c ty) -> return ctxt ty | Prim (loc, T_or, [utl; utr], annot) -> ( (match ret with | Don't_parse_entrypoints -> remove_field_annot utl >>? fun utl -> remove_field_annot utr >|? fun utr -> (utl, utr) | Parse_entrypoints -> ok (utl, utr)) >>? fun (utl, utr) -> parse_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ~allow_lazy_storage ~allow_operation ~allow_contract ~allow_ticket ~ret utl >>? fun (parsed_l, ctxt) -> parse_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ~allow_lazy_storage ~allow_operation ~allow_contract ~allow_ticket ~ret utr >>? fun (parsed_r, ctxt) -> check_type_annot loc annot >>? fun () -> match ret with | Don't_parse_entrypoints -> let (Ex_ty tl) = parsed_l in let (Ex_ty tr) = parsed_r in union_t loc tl tr >|? fun (Ty_ex_c ty) -> ((Ex_ty ty : ret), ctxt) | Parse_entrypoints -> let (Ex_parameter_ty_and_entrypoints_node {arg_type = tl; entrypoints = left}) = parsed_l in let (Ex_parameter_ty_and_entrypoints_node {arg_type = tr; entrypoints = right}) = parsed_r in union_t loc tl tr >|? fun (Ty_ex_c arg_type) -> let entrypoints = let at_node = Option.map (fun name -> {name; original_type_expr = node}) name in {at_node; nested = Entrypoints_Union {left; right}} in (Ex_parameter_ty_and_entrypoints_node {arg_type; entrypoints}, ctxt) ) | Prim (loc, T_lambda, [uta; utr], annot) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy uta >>? fun (Ex_ty ta, ctxt) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy utr >>? fun (Ex_ty tr, ctxt) -> check_type_annot loc annot >>? fun () -> lambda_t loc ta tr >|? fun ty -> return ctxt ty | Prim (loc, T_option, [ut], annot) -> (if legacy then (* legacy semantics with (broken) field annotations *) remove_field_annot ut >>? fun ut -> check_composed_type_annot loc annot >>? fun () -> ok ut else check_type_annot loc annot >>? fun () -> ok ut) >>? fun ut -> parse_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ~allow_lazy_storage ~allow_operation ~allow_contract ~allow_ticket ~ret:Don't_parse_entrypoints ut >>? fun (Ex_ty t, ctxt) -> option_t loc t >|? fun ty -> return ctxt ty | Prim (loc, T_list, [ut], annot) -> parse_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ~allow_lazy_storage ~allow_operation ~allow_contract ~allow_ticket ~ret:Don't_parse_entrypoints ut >>? fun (Ex_ty t, ctxt) -> check_type_annot loc annot >>? fun () -> list_t loc t >|? fun ty -> return ctxt ty | Prim (loc, T_ticket, [ut], annot) -> if allow_ticket then parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt ut >>? fun (Ex_comparable_ty t, ctxt) -> check_type_annot loc annot >>? fun () -> ticket_t loc t >|? fun ty -> return ctxt ty else error (Unexpected_ticket loc) | Prim (loc, T_set, [ut], annot) -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt ut >>? fun (Ex_comparable_ty t, ctxt) -> check_type_annot loc annot >>? fun () -> set_t loc t >|? fun ty -> return ctxt ty | Prim (loc, T_map, [uta; utr], annot) -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt uta >>? fun (Ex_comparable_ty ta, ctxt) -> parse_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ~allow_lazy_storage ~allow_operation ~allow_contract ~allow_ticket ~ret:Don't_parse_entrypoints utr >>? fun (Ex_ty tr, ctxt) -> check_type_annot loc annot >>? fun () -> map_t loc ta tr >|? fun ty -> return ctxt ty | Prim (loc, T_sapling_transaction, [memo_size], annot) -> check_type_annot loc annot >>? fun () -> parse_memo_size memo_size >|? fun memo_size -> return ctxt (sapling_transaction_t ~memo_size) | Prim (loc, T_sapling_transaction_deprecated, [memo_size], annot) -> if legacy then check_type_annot loc annot >>? fun () -> parse_memo_size memo_size >|? fun memo_size -> return ctxt (sapling_transaction_deprecated_t ~memo_size) else error (Deprecated_instruction T_sapling_transaction_deprecated) (* /!\ When adding new lazy storage kinds, be careful to use [when allow_lazy_storage] /!\ Lazy storage should not be packable to avoid stealing a lazy storage from another contract with `PUSH t id` or `UNPACK`. *) | Prim (loc, T_big_map, args, annot) when allow_lazy_storage -> parse_big_map_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy loc args annot >|? fun (Ex_ty ty, ctxt) -> return ctxt ty | Prim (loc, T_sapling_state, [memo_size], annot) when allow_lazy_storage -> check_type_annot loc annot >>? fun () -> parse_memo_size memo_size >|? fun memo_size -> return ctxt (sapling_state_t ~memo_size) | Prim (loc, (T_big_map | T_sapling_state), _, _) -> error (Unexpected_lazy_storage loc) | Prim ( loc, (( T_unit | T_signature | T_int | T_nat | T_string | T_bytes | T_mutez | T_bool | T_key | T_key_hash | T_timestamp | T_address | T_tx_rollup_l2_address | T_chain_id | T_operation | T_never ) as prim), l, _ ) -> error (Invalid_arity (loc, prim, 0, List.length l)) | Prim ( loc, ((T_set | T_list | T_option | T_contract | T_ticket) as prim), l, _ ) -> error (Invalid_arity (loc, prim, 1, List.length l)) | Prim (loc, ((T_pair | T_or | T_map | T_lambda) as prim), l, _) -> error (Invalid_arity (loc, prim, 2, List.length l)) | expr -> error @@ unexpected expr [] Type_namespace [ T_pair; T_or; T_set; T_map; T_list; T_option; T_lambda; T_unit; T_signature; T_contract; T_int; T_nat; T_operation; T_string; T_bytes; T_mutez; T_bool; T_key; T_key_hash; T_timestamp; T_chain_id; T_never; T_bls12_381_g1; T_bls12_381_g2; T_bls12_381_fr; T_ticket; T_tx_rollup_l2_address; ] and[@coq_axiom_with_reason "complex mutually recursive definition"] parse_passable_ty : type ret name. context -> stack_depth:int -> legacy:bool -> ret:(ret, name) parse_ty_ret -> Script.node -> (ret * context) tzresult = fun ctxt ~stack_depth ~legacy -> (parse_ty [@tailcall]) ctxt ~stack_depth ~legacy ~allow_lazy_storage:true ~allow_operation:false ~allow_contract:true ~allow_ticket:true and[@coq_axiom_with_reason "complex mutually recursive definition"] parse_any_ty : context -> stack_depth:int -> legacy:bool -> Script.node -> (ex_ty * context) tzresult = fun ctxt ~stack_depth ~legacy -> (parse_ty [@tailcall]) ctxt ~stack_depth ~legacy ~allow_lazy_storage:true ~allow_operation:true ~allow_contract:true ~allow_ticket:true ~ret:Don't_parse_entrypoints and[@coq_axiom_with_reason "complex mutually recursive definition"] parse_big_map_ty ctxt ~stack_depth ~legacy big_map_loc args map_annot = Gas.consume ctxt Typecheck_costs.parse_type_cycle >>? fun ctxt -> match args with | [key_ty; value_ty] -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt key_ty >>? fun (Ex_comparable_ty key_ty, ctxt) -> parse_big_map_value_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy value_ty >>? fun (Ex_ty value_ty, ctxt) -> check_type_annot big_map_loc map_annot >>? fun () -> big_map_t big_map_loc key_ty value_ty >|? fun big_map_ty -> (Ex_ty big_map_ty, ctxt) | args -> error @@ Invalid_arity (big_map_loc, T_big_map, 2, List.length args) and[@coq_axiom_with_reason "complex mutually recursive definition"] parse_big_map_value_ty ctxt ~stack_depth ~legacy value_ty = (parse_ty [@tailcall]) ctxt ~stack_depth ~legacy ~allow_lazy_storage:false ~allow_operation:false ~allow_contract:legacy ~allow_ticket:true ~ret:Don't_parse_entrypoints value_ty let parse_packable_ty ctxt ~stack_depth ~legacy node = (parse_ty [@tailcall]) ctxt ~stack_depth ~legacy ~allow_lazy_storage:false ~allow_operation:false ~allow_contract: legacy (* type contract is forbidden in UNPACK because of https://gitlab.com/tezos/tezos/-/issues/301 *) ~allow_ticket:false ~ret:Don't_parse_entrypoints node let parse_view_input_ty ctxt ~stack_depth ~legacy node = (parse_ty [@tailcall]) ctxt ~stack_depth ~legacy ~allow_lazy_storage:false ~allow_operation:false ~allow_contract:true ~allow_ticket:false ~ret:Don't_parse_entrypoints node let parse_view_output_ty ctxt ~stack_depth ~legacy node = (parse_ty [@tailcall]) ctxt ~stack_depth ~legacy ~allow_lazy_storage:false ~allow_operation:false ~allow_contract:true ~allow_ticket:false ~ret:Don't_parse_entrypoints node let parse_normal_storage_ty ctxt ~stack_depth ~legacy node = (parse_ty [@tailcall]) ctxt ~stack_depth ~legacy ~allow_lazy_storage:true ~allow_operation:false ~allow_contract:legacy ~allow_ticket:true ~ret:Don't_parse_entrypoints node let parse_storage_ty : context -> stack_depth:int -> legacy:bool -> Script.node -> (ex_ty * context) tzresult = fun ctxt ~stack_depth ~legacy node -> match node with | Prim ( loc, T_pair, [Prim (big_map_loc, T_big_map, args, map_annot); remaining_storage], storage_annot ) when legacy -> ( match storage_annot with | [] -> (parse_normal_storage_ty [@tailcall]) ctxt ~stack_depth ~legacy node | [single] when Compare.Int.(String.length single > 0) && Compare.Char.(single.[0] = '%') -> (parse_normal_storage_ty [@tailcall]) ctxt ~stack_depth ~legacy node | _ -> (* legacy semantics of big maps used the wrong annotation parser *) Gas.consume ctxt Typecheck_costs.parse_type_cycle >>? fun ctxt -> parse_big_map_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy big_map_loc args map_annot >>? fun (Ex_ty big_map_ty, ctxt) -> parse_normal_storage_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy remaining_storage >>? fun (Ex_ty remaining_storage, ctxt) -> check_composed_type_annot loc storage_annot >>? fun () -> pair_t loc big_map_ty remaining_storage >|? fun (Ty_ex_c ty) -> (Ex_ty ty, ctxt)) | _ -> (parse_normal_storage_ty [@tailcall]) ctxt ~stack_depth ~legacy node let check_packable ~legacy loc root = let rec check : type t tc. (t, tc) ty -> unit tzresult = function (* /!\ When adding new lazy storage kinds, be sure to return an error. /!\ Lazy storage should not be packable. *) | Big_map_t _ -> error (Unexpected_lazy_storage loc) | Sapling_state_t _ -> error (Unexpected_lazy_storage loc) | Operation_t -> error (Unexpected_operation loc) | Unit_t -> Result.return_unit | Int_t -> Result.return_unit | Nat_t -> Result.return_unit | Signature_t -> Result.return_unit | String_t -> Result.return_unit | Bytes_t -> Result.return_unit | Mutez_t -> Result.return_unit | Key_hash_t -> Result.return_unit | Key_t -> Result.return_unit | Timestamp_t -> Result.return_unit | Address_t -> Result.return_unit | Tx_rollup_l2_address_t -> Result.return_unit | Bool_t -> Result.return_unit | Chain_id_t -> Result.return_unit | Never_t -> Result.return_unit | Set_t (_, _) -> Result.return_unit | Ticket_t _ -> error (Unexpected_ticket loc) | Lambda_t (_, _, _) -> Result.return_unit | Bls12_381_g1_t -> Result.return_unit | Bls12_381_g2_t -> Result.return_unit | Bls12_381_fr_t -> Result.return_unit | Pair_t (l_ty, r_ty, _, _) -> check l_ty >>? fun () -> check r_ty | Union_t (l_ty, r_ty, _, _) -> check l_ty >>? fun () -> check r_ty | Option_t (v_ty, _, _) -> check v_ty | List_t (elt_ty, _) -> check elt_ty | Map_t (_, elt_ty, _) -> check elt_ty | Contract_t (_, _) when legacy -> Result.return_unit | Contract_t (_, _) -> error (Unexpected_contract loc) | Sapling_transaction_t _ -> ok () | Sapling_transaction_deprecated_t _ -> ok () | Chest_key_t -> Result.return_unit | Chest_t -> Result.return_unit in check root type toplevel = { code_field : Script.node; arg_type : Script.node; storage_type : Script.node; views : view_map; } type ('arg, 'storage) code = | Code : { code : (('arg, 'storage) pair, (operation boxed_list, 'storage) pair) lambda; arg_type : ('arg, _) ty; storage_type : ('storage, _) ty; views : view_map; entrypoints : 'arg entrypoints; code_size : Cache_memory_helpers.sint; } -> ('arg, 'storage) code type ex_script = Ex_script : ('a, 'c) Script_typed_ir.script -> ex_script type ex_code = Ex_code : ('a, 'c) code -> ex_code type 'storage typed_view = | Typed_view : { input_ty : ('input, _) ty; output_ty : ('output, _) ty; kinstr : ('input * 'storage, end_of_stack, 'output, end_of_stack) kinstr; original_code_expr : Script.node; } -> 'storage typed_view type 'storage typed_view_map = (Script_string.t, 'storage typed_view) map type (_, _) dig_proof_argument = | Dig_proof_argument : ('x, 'a * 's, 'a, 's, 'b, 't, 'c, 'u) stack_prefix_preservation_witness * ('x, _) ty * ('c, 'u) stack_ty -> ('b, 't) dig_proof_argument type (_, _, _) dug_proof_argument = | Dug_proof_argument : (('a, 's, 'x, 'a * 's, 'b, 't, 'c, 'u) stack_prefix_preservation_witness * ('c, 'u) stack_ty) -> ('b, 't, 'x) dug_proof_argument type (_, _) dipn_proof_argument = | Dipn_proof_argument : ('fa, 'fs, 'fb, 'fu, 'a, 's, 'b, 'u) stack_prefix_preservation_witness * context * ('fa, 'fs, 'fb, 'fu) descr * ('b, 'u) stack_ty -> ('a, 's) dipn_proof_argument type (_, _) dropn_proof_argument = | Dropn_proof_argument : ('fa, 'fs, 'fa, 'fs, 'a, 's, 'a, 's) stack_prefix_preservation_witness * ('fa, 'fs) stack_ty -> ('a, 's) dropn_proof_argument type 'before comb_proof_argument = | Comb_proof_argument : ('a * 's, 'b * 'u) comb_gadt_witness * ('b, 'u) stack_ty -> ('a * 's) comb_proof_argument type 'before uncomb_proof_argument = | Uncomb_proof_argument : ('a * 's, 'b * 'u) uncomb_gadt_witness * ('b, 'u) stack_ty -> ('a * 's) uncomb_proof_argument type 'before comb_get_proof_argument = | Comb_get_proof_argument : ('before, 'after) comb_get_gadt_witness * ('after, _) ty -> 'before comb_get_proof_argument type ('rest, 'before) comb_set_proof_argument = | Comb_set_proof_argument : ('rest, 'before, 'after) comb_set_gadt_witness * ('after, _) ty -> ('rest, 'before) comb_set_proof_argument type 'before dup_n_proof_argument = | Dup_n_proof_argument : ('before, 'a) dup_n_gadt_witness * ('a, _) ty -> 'before dup_n_proof_argument let rec make_dug_proof_argument : type a s x xc. location -> int -> (x, xc) ty -> (a, s) stack_ty -> (a, s, x) dug_proof_argument option = fun loc n x stk -> match (n, stk) with | (0, rest) -> Some (Dug_proof_argument (KRest, Item_t (x, rest))) | (n, Item_t (v, rest)) -> make_dug_proof_argument loc (n - 1) x rest |> Option.map @@ fun (Dug_proof_argument (n', aft')) -> let kinfo = {iloc = loc; kstack_ty = aft'} in Dug_proof_argument (KPrefix (kinfo, n'), Item_t (v, aft')) | (_, _) -> None let rec make_comb_get_proof_argument : type b bc. int -> (b, bc) ty -> b comb_get_proof_argument option = fun n ty -> match (n, ty) with | (0, value_ty) -> Some (Comb_get_proof_argument (Comb_get_zero, value_ty)) | (1, Pair_t (hd_ty, _, _annot, _)) -> Some (Comb_get_proof_argument (Comb_get_one, hd_ty)) | (n, Pair_t (_, tl_ty, _annot, _)) -> make_comb_get_proof_argument (n - 2) tl_ty |> Option.map @@ fun (Comb_get_proof_argument (comb_get_left_witness, ty')) -> Comb_get_proof_argument (Comb_get_plus_two comb_get_left_witness, ty') | _ -> None let rec make_comb_set_proof_argument : type value valuec before beforec a s. context -> (a, s) stack_ty -> location -> int -> (value, valuec) ty -> (before, beforec) ty -> (value, before) comb_set_proof_argument tzresult = fun ctxt stack_ty loc n value_ty ty -> match (n, ty) with | (0, _) -> ok @@ Comb_set_proof_argument (Comb_set_zero, value_ty) | (1, Pair_t (_hd_ty, tl_ty, _, _)) -> pair_t loc value_ty tl_ty >|? fun (Ty_ex_c after_ty) -> Comb_set_proof_argument (Comb_set_one, after_ty) | (n, Pair_t (hd_ty, tl_ty, _, _)) -> make_comb_set_proof_argument ctxt stack_ty loc (n - 2) value_ty tl_ty >>? fun (Comb_set_proof_argument (comb_set_left_witness, tl_ty')) -> pair_t loc hd_ty tl_ty' >|? fun (Ty_ex_c after_ty) -> Comb_set_proof_argument (Comb_set_plus_two comb_set_left_witness, after_ty) | _ -> let whole_stack = serialize_stack_for_error ctxt stack_ty in error (Bad_stack (loc, I_UPDATE, 2, whole_stack)) type 'a ex_ty_cstr = | Ex_ty_cstr : { ty : ('b, _) Script_typed_ir.ty; construct : 'b -> 'a; original_type_expr : Script.node; } -> 'a ex_ty_cstr let find_entrypoint (type full fullc error_context error_trace) ~(error_details : (error_context, error_trace) error_details) (full : (full, fullc) ty) (entrypoints : full entrypoints) entrypoint : (full ex_ty_cstr, error_trace) Gas_monad.t = let open Gas_monad.Syntax in let rec find_entrypoint : type t tc. (t, tc) ty -> t entrypoints_node -> Entrypoint.t -> (t ex_ty_cstr, unit) Gas_monad.t = fun ty entrypoints entrypoint -> let* () = Gas_monad.consume_gas Typecheck_costs.find_entrypoint_cycle in match (ty, entrypoints) with | (_, {at_node = Some {name; original_type_expr}; _}) when Entrypoint.(name = entrypoint) -> return (Ex_ty_cstr {ty; construct = (fun e -> e); original_type_expr}) | (Union_t (tl, tr, _, _), {nested = Entrypoints_Union {left; right}; _}) -> ( Gas_monad.bind_recover (find_entrypoint tl left entrypoint) @@ function | Ok (Ex_ty_cstr {ty; construct; original_type_expr}) -> return (Ex_ty_cstr { ty; construct = (fun e -> L (construct e)); original_type_expr; }) | Error () -> let+ (Ex_ty_cstr {ty; construct; original_type_expr}) = find_entrypoint tr right entrypoint in Ex_ty_cstr {ty; construct = (fun e -> R (construct e)); original_type_expr}) | (_, {nested = Entrypoints_None; _}) -> Gas_monad.of_result (Error ()) in let {root; original_type_expr} = entrypoints in Gas_monad.bind_recover (find_entrypoint full root entrypoint) @@ function | Ok f_t -> return f_t | Error () -> if Entrypoint.is_default entrypoint then return (Ex_ty_cstr {ty = full; construct = (fun e -> e); original_type_expr}) else Gas_monad.of_result @@ Error (match error_details with | Fast -> (Inconsistent_types_fast : error_trace) | Informative _ -> trace_of_error @@ No_such_entrypoint entrypoint) let find_entrypoint_for_type (type full fullc exp expc error_trace) ~error_details ~(full : (full, fullc) ty) ~(expected : (exp, expc) ty) entrypoints entrypoint : (Entrypoint.t * (exp, expc) ty, error_trace) Gas_monad.t = let open Gas_monad.Syntax in let* res = find_entrypoint ~error_details full entrypoints entrypoint in match res with | Ex_ty_cstr {ty; _} -> ( match entrypoints.root.at_node with | Some {name; original_type_expr = _} when Entrypoint.is_root name && Entrypoint.is_default entrypoint -> Gas_monad.bind_recover (ty_eq ~error_details:Fast ty expected) (function | Ok Eq -> return (Entrypoint.default, (ty : (exp, expc) ty)) | Error Inconsistent_types_fast -> let+ Eq = ty_eq ~error_details full expected in (Entrypoint.root, (full : (exp, expc) ty))) | _ -> let+ Eq = ty_eq ~error_details ty expected in (entrypoint, (ty : (exp, expc) ty))) let well_formed_entrypoints (type full fullc) (full : (full, fullc) ty) entrypoints = let merge path (type t tc) (ty : (t, tc) ty) (entrypoints : t entrypoints_node) reachable ((first_unreachable, all) as acc) = match entrypoints.at_node with | None -> ok ( (if reachable then acc else match ty with | Union_t _ -> acc | _ -> ( match first_unreachable with | None -> (Some (List.rev path), all) | Some _ -> acc)), reachable ) | Some {name; original_type_expr = _} -> if Entrypoint.Set.mem name all then error (Duplicate_entrypoint name) else ok ((first_unreachable, Entrypoint.Set.add name all), true) in let rec check : type t tc. (t, tc) ty -> t entrypoints_node -> prim list -> bool -> prim list option * Entrypoint.Set.t -> (prim list option * Entrypoint.Set.t) tzresult = fun t entrypoints path reachable acc -> match (t, entrypoints) with | (Union_t (tl, tr, _, _), {nested = Entrypoints_Union {left; right}; _}) -> merge (D_Left :: path) tl left reachable acc >>? fun (acc, l_reachable) -> merge (D_Right :: path) tr right reachable acc >>? fun (acc, r_reachable) -> check tl left (D_Left :: path) l_reachable acc >>? fun acc -> check tr right (D_Right :: path) r_reachable acc | _ -> ok acc in let (init, reachable) = match entrypoints.at_node with | None -> (Entrypoint.Set.empty, false) | Some {name; original_type_expr = _} -> (Entrypoint.Set.singleton name, true) in check full entrypoints [] reachable (None, init) >>? fun (first_unreachable, all) -> if not (Entrypoint.Set.mem Entrypoint.default all) then Result.return_unit else match first_unreachable with | None -> Result.return_unit | Some path -> error (Unreachable_entrypoint path) type ex_parameter_ty_and_entrypoints = | Ex_parameter_ty_and_entrypoints : { arg_type : ('a, _) ty; entrypoints : 'a entrypoints; } -> ex_parameter_ty_and_entrypoints let parse_parameter_ty_and_entrypoints : context -> stack_depth:int -> legacy:bool -> Script.node -> (ex_parameter_ty_and_entrypoints * context) tzresult = fun ctxt ~stack_depth ~legacy node -> parse_passable_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy node ~ret:Parse_entrypoints >>? fun (Ex_parameter_ty_and_entrypoints_node {arg_type; entrypoints}, ctxt) -> (if legacy then Result.return_unit else well_formed_entrypoints arg_type entrypoints) >|? fun () -> let entrypoints = {root = entrypoints; original_type_expr = node} in (Ex_parameter_ty_and_entrypoints {arg_type; entrypoints}, ctxt) let parse_passable_ty = parse_passable_ty ~ret:Don't_parse_entrypoints let parse_uint ~nb_bits = assert (Compare.Int.(nb_bits >= 0 && nb_bits <= 30)) ; let max_int = (1 lsl nb_bits) - 1 in let max_z = Z.of_int max_int in function | Micheline.Int (_, n) when Compare.Z.(Z.zero <= n) && Compare.Z.(n <= max_z) -> ok (Z.to_int n) | node -> error @@ Invalid_syntactic_constant ( location node, strip_locations node, "a positive " ^ string_of_int nb_bits ^ "-bit integer (between 0 and " ^ string_of_int max_int ^ ")" ) let parse_uint10 = parse_uint ~nb_bits:10 let parse_uint11 = parse_uint ~nb_bits:11 (* This type is used to: - serialize and deserialize tickets when they are stored or transferred, - type the READ_TICKET instruction. *) let opened_ticket_type loc ty = comparable_pair_3_t loc address_t ty nat_t (* -- parse data of primitive types -- *) let parse_unit ctxt ~legacy = function | Prim (loc, D_Unit, [], annot) -> (if legacy then Result.return_unit else error_unexpected_annot loc annot) >>? fun () -> Gas.consume ctxt Typecheck_costs.unit >|? fun ctxt -> ((), ctxt) | Prim (loc, D_Unit, l, _) -> error @@ Invalid_arity (loc, D_Unit, 0, List.length l) | expr -> error @@ unexpected expr [] Constant_namespace [D_Unit] let parse_bool ctxt ~legacy = function | Prim (loc, D_True, [], annot) -> (if legacy then Result.return_unit else error_unexpected_annot loc annot) >>? fun () -> Gas.consume ctxt Typecheck_costs.bool >|? fun ctxt -> (true, ctxt) | Prim (loc, D_False, [], annot) -> (if legacy then Result.return_unit else error_unexpected_annot loc annot) >>? fun () -> Gas.consume ctxt Typecheck_costs.bool >|? fun ctxt -> (false, ctxt) | Prim (loc, ((D_True | D_False) as c), l, _) -> error @@ Invalid_arity (loc, c, 0, List.length l) | expr -> error @@ unexpected expr [] Constant_namespace [D_True; D_False] let parse_string ctxt : Script.node -> (Script_string.t * context) tzresult = function | String (loc, v) as expr -> Gas.consume ctxt (Typecheck_costs.check_printable v) >>? fun ctxt -> record_trace (Invalid_syntactic_constant (loc, strip_locations expr, "a printable ascii string")) (Script_string.of_string v >|? fun s -> (s, ctxt)) | expr -> error @@ Invalid_kind (location expr, [String_kind], kind expr) let parse_bytes ctxt = function | Bytes (_, v) -> ok (v, ctxt) | expr -> error @@ Invalid_kind (location expr, [Bytes_kind], kind expr) let parse_int ctxt = function | Int (_, v) -> ok (Script_int.of_zint v, ctxt) | expr -> error @@ Invalid_kind (location expr, [Int_kind], kind expr) let parse_nat ctxt : Script.node -> (Script_int.n Script_int.num * context) tzresult = function | Int (loc, v) as expr -> ( let v = Script_int.of_zint v in match Script_int.is_nat v with | Some nat -> ok (nat, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a non-negative integer")) | expr -> error @@ Invalid_kind (location expr, [Int_kind], kind expr) let parse_mutez ctxt : Script.node -> (Tez.t * context) tzresult = function | Int (loc, v) as expr -> ( match let open Option in bind (catch (fun () -> Z.to_int64 v)) Tez.of_mutez with | Some tez -> Ok (tez, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid mutez amount")) | expr -> error @@ Invalid_kind (location expr, [Int_kind], kind expr) let parse_timestamp ctxt : Script.node -> (Script_timestamp.t * context) tzresult = function | Int (_, v) (* As unparsed with [Optimized] or out of bounds [Readable]. *) -> ok (Script_timestamp.of_zint v, ctxt) | String (loc, s) as expr (* As unparsed with [Readable]. *) -> ( Gas.consume ctxt (Typecheck_costs.timestamp_readable s) >>? fun ctxt -> match Script_timestamp.of_string s with | Some v -> ok (v, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid timestamp")) | expr -> error @@ Invalid_kind (location expr, [String_kind; Int_kind], kind expr) let parse_key ctxt : Script.node -> (public_key * context) tzresult = function | Bytes (loc, bytes) as expr -> ( (* As unparsed with [Optimized]. *) Gas.consume ctxt Typecheck_costs.public_key_optimized >>? fun ctxt -> match Data_encoding.Binary.of_bytes_opt Signature.Public_key.encoding bytes with | Some k -> ok (k, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid public key")) | String (loc, s) as expr -> ( (* As unparsed with [Readable]. *) Gas.consume ctxt Typecheck_costs.public_key_readable >>? fun ctxt -> match Signature.Public_key.of_b58check_opt s with | Some k -> ok (k, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid public key")) | expr -> error @@ Invalid_kind (location expr, [String_kind; Bytes_kind], kind expr) let parse_key_hash ctxt : Script.node -> (public_key_hash * context) tzresult = function | Bytes (loc, bytes) as expr -> ( (* As unparsed with [Optimized]. *) Gas.consume ctxt Typecheck_costs.key_hash_optimized >>? fun ctxt -> match Data_encoding.Binary.of_bytes_opt Signature.Public_key_hash.encoding bytes with | Some k -> ok (k, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid key hash")) | String (loc, s) as expr (* As unparsed with [Readable]. *) -> ( Gas.consume ctxt Typecheck_costs.key_hash_readable >>? fun ctxt -> match Signature.Public_key_hash.of_b58check_opt s with | Some k -> ok (k, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid key hash")) | expr -> error @@ Invalid_kind (location expr, [String_kind; Bytes_kind], kind expr) let parse_signature ctxt : Script.node -> (signature * context) tzresult = function | Bytes (loc, bytes) as expr (* As unparsed with [Optimized]. *) -> ( Gas.consume ctxt Typecheck_costs.signature_optimized >>? fun ctxt -> match Data_encoding.Binary.of_bytes_opt Script_signature.encoding bytes with | Some k -> ok (k, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid signature")) | String (loc, s) as expr (* As unparsed with [Readable]. *) -> ( Gas.consume ctxt Typecheck_costs.signature_readable >>? fun ctxt -> match Script_signature.of_b58check_opt s with | Some s -> ok (s, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid signature")) | expr -> error @@ Invalid_kind (location expr, [String_kind; Bytes_kind], kind expr) let parse_chain_id ctxt : Script.node -> (Script_chain_id.t * context) tzresult = function | Bytes (loc, bytes) as expr -> ( Gas.consume ctxt Typecheck_costs.chain_id_optimized >>? fun ctxt -> match Data_encoding.Binary.of_bytes_opt Script_chain_id.encoding bytes with | Some k -> ok (k, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid chain id")) | String (loc, s) as expr -> ( Gas.consume ctxt Typecheck_costs.chain_id_readable >>? fun ctxt -> match Script_chain_id.of_b58check_opt s with | Some s -> ok (s, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid chain id")) | expr -> error @@ Invalid_kind (location expr, [String_kind; Bytes_kind], kind expr) let parse_address ctxt : Script.node -> (address * context) tzresult = let destination_allowed loc {destination; entrypoint} ctxt = match destination with | Destination.Tx_rollup _ when not (Constants.tx_rollup_enable ctxt) -> error @@ Tx_rollup_addresses_disabled loc | _ -> Ok ({destination; entrypoint}, ctxt) in function | Bytes (loc, bytes) as expr (* As unparsed with [Optimized]. *) -> ( Gas.consume ctxt Typecheck_costs.contract_optimized >>? fun ctxt -> match Data_encoding.Binary.of_bytes_opt Data_encoding.(tup2 Destination.encoding Entrypoint.value_encoding) bytes with | Some (destination, entrypoint) -> destination_allowed loc {destination; entrypoint} ctxt | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid address")) | String (loc, s) (* As unparsed with [Readable]. *) -> Gas.consume ctxt Typecheck_costs.contract_readable >>? fun ctxt -> (match String.index_opt s '%' with | None -> ok (s, Entrypoint.default) | Some pos -> let len = String.length s - pos - 1 in let name = String.sub s (pos + 1) len in Entrypoint.of_string_strict ~loc name >|? fun entrypoint -> (String.sub s 0 pos, entrypoint)) >>? fun (addr, entrypoint) -> Destination.of_b58check addr >>? fun destination -> destination_allowed loc {destination; entrypoint} ctxt | expr -> error @@ Invalid_kind (location expr, [String_kind; Bytes_kind], kind expr) let parse_tx_rollup_l2_address ctxt : Script.node -> (tx_rollup_l2_address * context) tzresult = function | Bytes (loc, bytes) as expr (* As unparsed with [Optimized]. *) -> ( Gas.consume ctxt Typecheck_costs.tx_rollup_l2_address >>? fun ctxt -> match Tx_rollup_l2_address.of_bytes_opt bytes with | Some txa -> ok (Tx_rollup_l2_address.Indexable.value txa, ctxt) | None -> error @@ Invalid_syntactic_constant ( loc, strip_locations expr, "a valid transaction rollup L2 address" )) | String (loc, str) as expr (* As unparsed with [Readable]. *) -> ( Gas.consume ctxt Typecheck_costs.tx_rollup_l2_address >>? fun ctxt -> match Tx_rollup_l2_address.of_b58check_opt str with | Some txa -> ok (Tx_rollup_l2_address.Indexable.value txa, ctxt) | None -> error @@ Invalid_syntactic_constant ( loc, strip_locations expr, "a valid transaction rollup L2 address" )) | expr -> error @@ Invalid_kind (location expr, [String_kind; Bytes_kind], kind expr) let parse_never expr : (never * context) tzresult = error @@ Invalid_never_expr (location expr) (* -- parse data of complex types -- *) let parse_pair (type r) parse_l parse_r ctxt ~legacy (r_comb_witness : (r, unit -> _) comb_witness) expr = let parse_comb loc l rs = parse_l ctxt l >>=? fun (l, ctxt) -> (match (rs, r_comb_witness) with | ([r], _) -> ok r | ([], _) -> error @@ Invalid_arity (loc, D_Pair, 2, 1) | (_ :: _, Comb_Pair _) -> (* Unfold [Pair x1 ... xn] as [Pair x1 (Pair x2 ... xn-1 xn))] for type [pair ta (pair tb1 tb2)] and n >= 3 only *) ok (Prim (loc, D_Pair, rs, [])) | _ -> error @@ Invalid_arity (loc, D_Pair, 2, 1 + List.length rs)) >>?= fun r -> parse_r ctxt r >|=? fun (r, ctxt) -> ((l, r), ctxt) in match expr with | Prim (loc, D_Pair, l :: rs, annot) -> (if legacy then Result.return_unit else error_unexpected_annot loc annot) >>?= fun () -> parse_comb loc l rs | Prim (loc, D_Pair, l, _) -> fail @@ Invalid_arity (loc, D_Pair, 2, List.length l) (* Unfold [{x1; ...; xn}] as [Pair x1 x2 ... xn-1 xn] for n >= 2 *) | Seq (loc, l :: (_ :: _ as rs)) -> parse_comb loc l rs | Seq (loc, l) -> fail @@ Invalid_seq_arity (loc, 2, List.length l) | expr -> fail @@ unexpected expr [] Constant_namespace [D_Pair] let parse_union parse_l parse_r ctxt ~legacy = function | Prim (loc, D_Left, [v], annot) -> (if legacy then Result.return_unit else error_unexpected_annot loc annot) >>?= fun () -> parse_l ctxt v >|=? fun (v, ctxt) -> (L v, ctxt) | Prim (loc, D_Left, l, _) -> fail @@ Invalid_arity (loc, D_Left, 1, List.length l) | Prim (loc, D_Right, [v], annot) -> (if legacy then Result.return_unit else error_unexpected_annot loc annot) >>?= fun () -> parse_r ctxt v >|=? fun (v, ctxt) -> (R v, ctxt) | Prim (loc, D_Right, l, _) -> fail @@ Invalid_arity (loc, D_Right, 1, List.length l) | expr -> fail @@ unexpected expr [] Constant_namespace [D_Left; D_Right] let parse_option parse_v ctxt ~legacy = function | Prim (loc, D_Some, [v], annot) -> (if legacy then Result.return_unit else error_unexpected_annot loc annot) >>?= fun () -> parse_v ctxt v >|=? fun (v, ctxt) -> (Some v, ctxt) | Prim (loc, D_Some, l, _) -> fail @@ Invalid_arity (loc, D_Some, 1, List.length l) | Prim (loc, D_None, [], annot) -> Lwt.return ( (if legacy then Result.return_unit else error_unexpected_annot loc annot) >|? fun () -> (None, ctxt) ) | Prim (loc, D_None, l, _) -> fail @@ Invalid_arity (loc, D_None, 0, List.length l) | expr -> fail @@ unexpected expr [] Constant_namespace [D_Some; D_None] (* -- parse data of comparable types -- *) let comb_witness1 : type t tc. (t, tc) ty -> (t, unit -> unit) comb_witness = function | Pair_t _ -> Comb_Pair Comb_Any | _ -> Comb_Any let[@coq_axiom_with_reason "gadt"] rec parse_comparable_data : type a. ?type_logger:type_logger -> context -> a comparable_ty -> Script.node -> (a * context) tzresult Lwt.t = fun ?type_logger ctxt ty script_data -> (* No need for stack_depth here. Unlike [parse_data], [parse_comparable_data] doesn't call [parse_returning]. The stack depth is bounded by the type depth, bounded by 1024. *) let parse_data_error () = let ty = serialize_ty_for_error ty in Invalid_constant (location script_data, strip_locations script_data, ty) in let traced_no_lwt body = record_trace_eval parse_data_error body in let traced body = trace_eval parse_data_error body in Gas.consume ctxt Typecheck_costs.parse_data_cycle (* We could have a smaller cost but let's keep it consistent with [parse_data] for now. *) >>?= fun ctxt -> let legacy = false in match (ty, script_data) with | (Unit_t, expr) -> Lwt.return @@ traced_no_lwt @@ (parse_unit ctxt ~legacy expr : (a * context) tzresult) | (Bool_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_bool ctxt ~legacy expr | (String_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_string ctxt expr | (Bytes_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_bytes ctxt expr | (Int_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_int ctxt expr | (Nat_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_nat ctxt expr | (Mutez_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_mutez ctxt expr | (Timestamp_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_timestamp ctxt expr | (Key_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_key ctxt expr | (Key_hash_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_key_hash ctxt expr | (Signature_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_signature ctxt expr | (Chain_id_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_chain_id ctxt expr | (Address_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_address ctxt expr | (Tx_rollup_l2_address_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_tx_rollup_l2_address ctxt expr | (Pair_t (tl, tr, _, YesYes), expr) -> let r_witness = comb_witness1 tr in let parse_l ctxt v = parse_comparable_data ?type_logger ctxt tl v in let parse_r ctxt v = parse_comparable_data ?type_logger ctxt tr v in traced @@ parse_pair parse_l parse_r ctxt ~legacy r_witness expr | (Union_t (tl, tr, _, YesYes), expr) -> let parse_l ctxt v = parse_comparable_data ?type_logger ctxt tl v in let parse_r ctxt v = parse_comparable_data ?type_logger ctxt tr v in traced @@ parse_union parse_l parse_r ctxt ~legacy expr | (Option_t (t, _, Yes), expr) -> let parse_v ctxt v = parse_comparable_data ?type_logger ctxt t v in traced @@ parse_option parse_v ctxt ~legacy expr | (Never_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_never expr (* -- parse data of any type -- *) (* Some values, such as operations, tickets, or big map ids, are used only internally and are not allowed to be forged by users. In [parse_data], [allow_forged] should be [false] for: - PUSH - UNPACK - user-provided script parameters - storage on origination And [true] for: - internal calls parameters - storage after origination *) let[@coq_axiom_with_reason "gadt"] rec parse_data : type a ac. ?type_logger:type_logger -> stack_depth:int -> context -> legacy:bool -> allow_forged:bool -> (a, ac) ty -> Script.node -> (a * context) tzresult Lwt.t = fun ?type_logger ~stack_depth ctxt ~legacy ~allow_forged ty script_data -> Gas.consume ctxt Typecheck_costs.parse_data_cycle >>?= fun ctxt -> let non_terminal_recursion ?type_logger ctxt ~legacy ty script_data = if Compare.Int.(stack_depth > 10_000) then fail Typechecking_too_many_recursive_calls else parse_data ?type_logger ~stack_depth:(stack_depth + 1) ctxt ~legacy ~allow_forged ty script_data in let parse_data_error () = let ty = serialize_ty_for_error ty in Invalid_constant (location script_data, strip_locations script_data, ty) in let fail_parse_data () = fail (parse_data_error ()) in let traced_no_lwt body = record_trace_eval parse_data_error body in let traced body = trace_eval parse_data_error body in let traced_fail err = Lwt.return @@ traced_no_lwt (error err) in let parse_items ?type_logger ctxt expr key_type value_type items item_wrapper = List.fold_left_es (fun (last_value, map, ctxt) item -> match item with | Prim (loc, D_Elt, [k; v], annot) -> (if legacy then Result.return_unit else error_unexpected_annot loc annot) >>?= fun () -> parse_comparable_data ?type_logger ctxt key_type k >>=? fun (k, ctxt) -> non_terminal_recursion ?type_logger ctxt ~legacy value_type v >>=? fun (v, ctxt) -> Lwt.return ( (match last_value with | Some value -> Gas.consume ctxt (Michelson_v1_gas.Cost_of.Interpreter.compare key_type value k) >>? fun ctxt -> let c = Script_comparable.compare_comparable key_type value k in if Compare.Int.(0 <= c) then if Compare.Int.(0 = c) then error (Duplicate_map_keys (loc, strip_locations expr)) else error (Unordered_map_keys (loc, strip_locations expr)) else ok ctxt | None -> ok ctxt) >>? fun ctxt -> Gas.consume ctxt (Michelson_v1_gas.Cost_of.Interpreter.map_update k map) >|? fun ctxt -> (Some k, Script_map.update k (Some (item_wrapper v)) map, ctxt) ) | Prim (loc, D_Elt, l, _) -> fail @@ Invalid_arity (loc, D_Elt, 2, List.length l) | Prim (loc, name, _, _) -> fail @@ Invalid_primitive (loc, [D_Elt], name) | Int _ | String _ | Bytes _ | Seq _ -> fail_parse_data ()) (None, Script_map.empty key_type, ctxt) items |> traced >|=? fun (_, items, ctxt) -> (items, ctxt) in let parse_big_map_items (type t) ?type_logger ctxt expr (key_type : t comparable_ty) value_type items item_wrapper = List.fold_left_es (fun (last_key, {map; size}, ctxt) item -> match item with | Prim (loc, D_Elt, [k; v], annot) -> (if legacy then Result.return_unit else error_unexpected_annot loc annot) >>?= fun () -> parse_comparable_data ?type_logger ctxt key_type k >>=? fun (k, ctxt) -> hash_comparable_data ctxt key_type k >>=? fun (key_hash, ctxt) -> non_terminal_recursion ?type_logger ctxt ~legacy value_type v >>=? fun (v, ctxt) -> Lwt.return ( (match last_key with | Some last_key -> Gas.consume ctxt (Michelson_v1_gas.Cost_of.Interpreter.compare key_type last_key k) >>? fun ctxt -> let c = Script_comparable.compare_comparable key_type last_key k in if Compare.Int.(0 <= c) then if Compare.Int.(0 = c) then error (Duplicate_map_keys (loc, strip_locations expr)) else error (Unordered_map_keys (loc, strip_locations expr)) else ok ctxt | None -> ok ctxt) >>? fun ctxt -> Gas.consume ctxt (Michelson_v1_gas.Cost_of.Interpreter.big_map_update {map; size}) >>? fun ctxt -> if Big_map_overlay.mem key_hash map then error (Duplicate_map_keys (loc, strip_locations expr)) else ok ( Some k, { map = Big_map_overlay.add key_hash (k, item_wrapper v) map; size = size + 1; }, ctxt ) ) | Prim (loc, D_Elt, l, _) -> fail @@ Invalid_arity (loc, D_Elt, 2, List.length l) | Prim (loc, name, _, _) -> fail @@ Invalid_primitive (loc, [D_Elt], name) | Int _ | String _ | Bytes _ | Seq _ -> fail_parse_data ()) (None, {map = Big_map_overlay.empty; size = 0}, ctxt) items |> traced >|=? fun (_, map, ctxt) -> (map, ctxt) in match (ty, script_data) with | (Unit_t, expr) -> Lwt.return @@ traced_no_lwt @@ (parse_unit ctxt ~legacy expr : (a * context) tzresult) | (Bool_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_bool ctxt ~legacy expr | (String_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_string ctxt expr | (Bytes_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_bytes ctxt expr | (Int_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_int ctxt expr | (Nat_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_nat ctxt expr | (Mutez_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_mutez ctxt expr | (Timestamp_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_timestamp ctxt expr | (Key_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_key ctxt expr | (Key_hash_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_key_hash ctxt expr | (Signature_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_signature ctxt expr | (Operation_t, _) -> (* operations cannot appear in parameters or storage, the protocol should never parse the bytes of an operation *) assert false | (Chain_id_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_chain_id ctxt expr | (Address_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_address ctxt expr | (Tx_rollup_l2_address_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_tx_rollup_l2_address ctxt expr | (Contract_t (arg_ty, _), expr) -> traced ( parse_address ctxt expr >>?= fun (address, ctxt) -> let loc = location expr in parse_contract ~stack_depth:(stack_depth + 1) ctxt loc arg_ty address.destination ~entrypoint:address.entrypoint >|=? fun (ctxt, _) -> (Typed_contract {arg_ty; address}, ctxt) ) (* Pairs *) | (Pair_t (tl, tr, _, _), expr) -> let r_witness = comb_witness1 tr in let parse_l ctxt v = non_terminal_recursion ?type_logger ctxt ~legacy tl v in let parse_r ctxt v = non_terminal_recursion ?type_logger ctxt ~legacy tr v in traced @@ parse_pair parse_l parse_r ctxt ~legacy r_witness expr (* Unions *) | (Union_t (tl, tr, _, _), expr) -> let parse_l ctxt v = non_terminal_recursion ?type_logger ctxt ~legacy tl v in let parse_r ctxt v = non_terminal_recursion ?type_logger ctxt ~legacy tr v in traced @@ parse_union parse_l parse_r ctxt ~legacy expr (* Lambdas *) | (Lambda_t (ta, tr, _ty_name), (Seq (_loc, _) as script_instr)) -> traced @@ parse_returning Tc_context.data ?type_logger ~stack_depth:(stack_depth + 1) ctxt ~legacy ta tr script_instr | (Lambda_t _, expr) -> traced_fail (Invalid_kind (location expr, [Seq_kind], kind expr)) (* Options *) | (Option_t (t, _, _), expr) -> let parse_v ctxt v = non_terminal_recursion ?type_logger ctxt ~legacy t v in traced @@ parse_option parse_v ctxt ~legacy expr (* Lists *) | (List_t (t, _ty_name), Seq (_loc, items)) -> traced @@ List.fold_right_es (fun v (rest, ctxt) -> non_terminal_recursion ?type_logger ctxt ~legacy t v >|=? fun (v, ctxt) -> (Script_list.cons v rest, ctxt)) items (Script_list.empty, ctxt) | (List_t _, expr) -> traced_fail (Invalid_kind (location expr, [Seq_kind], kind expr)) (* Tickets *) | (Ticket_t (t, _ty_name), expr) -> if allow_forged then opened_ticket_type (location expr) t >>?= fun ty -> parse_comparable_data ?type_logger ctxt ty expr >>=? fun (({destination; entrypoint = _}, (contents, amount)), ctxt) -> match destination with | Contract ticketer -> return ({ticketer; contents; amount}, ctxt) | Tx_rollup _ -> fail (Unexpected_ticket_owner destination) else traced_fail (Unexpected_forged_value (location expr)) (* Sets *) | (Set_t (t, _ty_name), (Seq (loc, vs) as expr)) -> traced @@ List.fold_left_es (fun (last_value, set, ctxt) v -> parse_comparable_data ?type_logger ctxt t v >>=? fun (v, ctxt) -> Lwt.return ( (match last_value with | Some value -> Gas.consume ctxt (Michelson_v1_gas.Cost_of.Interpreter.compare t value v) >>? fun ctxt -> let c = Script_comparable.compare_comparable t value v in if Compare.Int.(0 <= c) then if Compare.Int.(0 = c) then error (Duplicate_set_values (loc, strip_locations expr)) else error (Unordered_set_values (loc, strip_locations expr)) else ok ctxt | None -> ok ctxt) >>? fun ctxt -> Gas.consume ctxt (Michelson_v1_gas.Cost_of.Interpreter.set_update v set) >|? fun ctxt -> (Some v, Script_set.update v true set, ctxt) )) (None, Script_set.empty t, ctxt) vs >|=? fun (_, set, ctxt) -> (set, ctxt) | (Set_t _, expr) -> traced_fail (Invalid_kind (location expr, [Seq_kind], kind expr)) (* Maps *) | (Map_t (tk, tv, _ty_name), (Seq (_, vs) as expr)) -> parse_items ?type_logger ctxt expr tk tv vs (fun x -> x) | (Map_t _, expr) -> traced_fail (Invalid_kind (location expr, [Seq_kind], kind expr)) | (Big_map_t (tk, tv, _ty_name), expr) -> (match expr with | Int (loc, id) -> return (Some (id, loc), {map = Big_map_overlay.empty; size = 0}, ctxt) | Seq (_, vs) -> parse_big_map_items ?type_logger ctxt expr tk tv vs (fun x -> Some x) >|=? fun (diff, ctxt) -> (None, diff, ctxt) | Prim (loc, D_Pair, [Int (loc_id, id); Seq (_, vs)], annot) -> error_unexpected_annot loc annot >>?= fun () -> option_t loc tv >>?= fun tv_opt -> parse_big_map_items ?type_logger ctxt expr tk tv_opt vs (fun x -> x) >|=? fun (diff, ctxt) -> (Some (id, loc_id), diff, ctxt) | Prim (_, D_Pair, [Int _; expr], _) -> traced_fail (Invalid_kind (location expr, [Seq_kind], kind expr)) | Prim (_, D_Pair, [expr; _], _) -> traced_fail (Invalid_kind (location expr, [Int_kind], kind expr)) | Prim (loc, D_Pair, l, _) -> traced_fail @@ Invalid_arity (loc, D_Pair, 2, List.length l) | _ -> traced_fail (unexpected expr [Seq_kind; Int_kind] Constant_namespace [D_Pair])) >>=? fun (id_opt, diff, ctxt) -> (match id_opt with | None -> return @@ (None, ctxt) | Some (id, loc) -> if allow_forged then let id = Big_map.Id.parse_z id in Big_map.exists ctxt id >>=? function | (_, None) -> traced_fail (Invalid_big_map (loc, id)) | (ctxt, Some (btk, btv)) -> Lwt.return ( parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt (Micheline.root btk) >>? fun (Ex_comparable_ty btk, ctxt) -> parse_big_map_value_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy (Micheline.root btv) >>? fun (Ex_ty btv, ctxt) -> (Gas_monad.run ctxt @@ let open Gas_monad.Syntax in let error_details = Informative loc in let* Eq = ty_eq ~error_details tk btk in ty_eq ~error_details tv btv) >>? fun (eq, ctxt) -> eq >|? fun Eq -> (Some id, ctxt) ) else traced_fail (Unexpected_forged_value loc)) >|=? fun (id, ctxt) -> (Big_map {id; diff; key_type = tk; value_type = tv}, ctxt) | (Never_t, expr) -> Lwt.return @@ traced_no_lwt @@ parse_never expr (* Bls12_381 types *) | (Bls12_381_g1_t, Bytes (_, bs)) -> ( Gas.consume ctxt Typecheck_costs.bls12_381_g1 >>?= fun ctxt -> match Script_bls.G1.of_bytes_opt bs with | Some pt -> return (pt, ctxt) | None -> fail_parse_data ()) | (Bls12_381_g1_t, expr) -> traced_fail (Invalid_kind (location expr, [Bytes_kind], kind expr)) | (Bls12_381_g2_t, Bytes (_, bs)) -> ( Gas.consume ctxt Typecheck_costs.bls12_381_g2 >>?= fun ctxt -> match Script_bls.G2.of_bytes_opt bs with | Some pt -> return (pt, ctxt) | None -> fail_parse_data ()) | (Bls12_381_g2_t, expr) -> traced_fail (Invalid_kind (location expr, [Bytes_kind], kind expr)) | (Bls12_381_fr_t, Bytes (_, bs)) -> ( Gas.consume ctxt Typecheck_costs.bls12_381_fr >>?= fun ctxt -> match Script_bls.Fr.of_bytes_opt bs with | Some pt -> return (pt, ctxt) | None -> fail_parse_data ()) | (Bls12_381_fr_t, Int (_, v)) -> Gas.consume ctxt Typecheck_costs.bls12_381_fr >>?= fun ctxt -> return (Script_bls.Fr.of_z v, ctxt) | (Bls12_381_fr_t, expr) -> traced_fail (Invalid_kind (location expr, [Bytes_kind], kind expr)) (* /!\ When adding new lazy storage kinds, you may want to guard the parsing of identifiers with [allow_forged]. *) (* Sapling *) | (Sapling_transaction_t memo_size, Bytes (_, bytes)) -> ( match Data_encoding.Binary.of_bytes_opt Sapling.transaction_encoding bytes with | Some transaction -> ( match Sapling.transaction_get_memo_size transaction with | None -> return (transaction, ctxt) | Some transac_memo_size -> Lwt.return ( memo_size_eq ~error_details:(Informative ()) memo_size transac_memo_size >|? fun () -> (transaction, ctxt) )) | None -> fail_parse_data ()) | (Sapling_transaction_t _, expr) -> traced_fail (Invalid_kind (location expr, [Bytes_kind], kind expr)) | (Sapling_transaction_deprecated_t memo_size, Bytes (_, bytes)) -> ( match Data_encoding.Binary.of_bytes_opt Sapling.Legacy.transaction_encoding bytes with | Some transaction -> ( match Sapling.Legacy.transaction_get_memo_size transaction with | None -> return (transaction, ctxt) | Some transac_memo_size -> Lwt.return ( memo_size_eq ~error_details:(Informative ()) memo_size transac_memo_size >|? fun () -> (transaction, ctxt) )) | None -> fail_parse_data ()) | (Sapling_transaction_deprecated_t _, expr) -> traced_fail (Invalid_kind (location expr, [Bytes_kind], kind expr)) | (Sapling_state_t memo_size, Int (loc, id)) -> if allow_forged then let id = Sapling.Id.parse_z id in Sapling.state_from_id ctxt id >>=? fun (state, ctxt) -> Lwt.return ( traced_no_lwt @@ memo_size_eq ~error_details:(Informative ()) memo_size state.Sapling.memo_size >|? fun () -> (state, ctxt) ) else traced_fail (Unexpected_forged_value loc) | (Sapling_state_t memo_size, Seq (_, [])) -> return (Sapling.empty_state ~memo_size (), ctxt) | (Sapling_state_t _, expr) -> (* Do not allow to input diffs as they are untrusted and may not be the result of a verify_update. *) traced_fail (Invalid_kind (location expr, [Int_kind; Seq_kind], kind expr)) (* Time lock*) | (Chest_key_t, Bytes (_, bytes)) -> ( Gas.consume ctxt Typecheck_costs.chest_key >>?= fun ctxt -> match Data_encoding.Binary.of_bytes_opt Script_timelock.chest_key_encoding bytes with | Some chest_key -> return (chest_key, ctxt) | None -> fail_parse_data ()) | (Chest_key_t, expr) -> traced_fail (Invalid_kind (location expr, [Bytes_kind], kind expr)) | (Chest_t, Bytes (_, bytes)) -> ( Gas.consume ctxt (Typecheck_costs.chest ~bytes:(Bytes.length bytes)) >>?= fun ctxt -> match Data_encoding.Binary.of_bytes_opt Script_timelock.chest_encoding bytes with | Some chest -> return (chest, ctxt) | None -> fail_parse_data ()) | (Chest_t, expr) -> traced_fail (Invalid_kind (location expr, [Bytes_kind], kind expr)) and parse_view : type storage storagec. ?type_logger:type_logger -> context -> legacy:bool -> (storage, storagec) ty -> view -> (storage typed_view * context) tzresult Lwt.t = fun ?type_logger ctxt ~legacy storage_type {input_ty; output_ty; view_code} -> let input_ty_loc = location input_ty in record_trace_eval (fun () -> Ill_formed_type (Some "arg of view", strip_locations input_ty, input_ty_loc)) (parse_view_input_ty ctxt ~stack_depth:0 ~legacy input_ty) >>?= fun (Ex_ty input_ty, ctxt) -> let output_ty_loc = location output_ty in record_trace_eval (fun () -> Ill_formed_type (Some "return of view", strip_locations output_ty, output_ty_loc)) (parse_view_output_ty ctxt ~stack_depth:0 ~legacy output_ty) >>?= fun (Ex_ty output_ty, ctxt) -> pair_t input_ty_loc input_ty storage_type >>?= fun (Ty_ex_c pair_ty) -> parse_instr ?type_logger ~stack_depth:0 Tc_context.view ctxt ~legacy view_code (Item_t (pair_ty, Bot_t)) >>=? fun (judgement, ctxt) -> Lwt.return @@ match judgement with | Failed {descr} -> let {kinstr; _} = close_descr (descr (Item_t (output_ty, Bot_t))) in ok ( Typed_view {input_ty; output_ty; kinstr; original_code_expr = view_code}, ctxt ) | Typed ({loc; aft; _} as descr) -> ( let ill_type_view stack_ty loc = let actual = serialize_stack_for_error ctxt stack_ty in let expected_stack = Item_t (output_ty, Bot_t) in let expected = serialize_stack_for_error ctxt expected_stack in Ill_typed_view {loc; actual; expected} in match aft with | Item_t (ty, Bot_t) -> let error_details = Informative loc in Gas_monad.run ctxt @@ Gas_monad.record_trace_eval ~error_details (fun loc -> ill_type_view aft loc) @@ ty_eq ~error_details ty output_ty >>? fun (eq, ctxt) -> eq >|? fun Eq -> let {kinstr; _} = close_descr descr in ( Typed_view {input_ty; output_ty; kinstr; original_code_expr = view_code}, ctxt ) | _ -> error (ill_type_view aft loc)) and parse_views : type storage storagec. ?type_logger:type_logger -> context -> legacy:bool -> (storage, storagec) ty -> view_map -> (storage typed_view_map * context) tzresult Lwt.t = fun ?type_logger ctxt ~legacy storage_type views -> let aux ctxt name cur_view = Gas.consume ctxt (Michelson_v1_gas.Cost_of.Interpreter.view_update name views) >>?= fun ctxt -> parse_view ?type_logger ctxt ~legacy storage_type cur_view in Script_map.map_es_in_context aux ctxt views and[@coq_axiom_with_reason "gadt"] parse_returning : type arg argc ret retc. ?type_logger:type_logger -> stack_depth:int -> tc_context -> context -> legacy:bool -> (arg, argc) ty -> (ret, retc) ty -> Script.node -> ((arg, ret) lambda * context) tzresult Lwt.t = fun ?type_logger ~stack_depth tc_context ctxt ~legacy arg ret script_instr -> parse_instr ?type_logger tc_context ctxt ~legacy ~stack_depth:(stack_depth + 1) script_instr (Item_t (arg, Bot_t)) >>=? function | (Typed ({loc; aft = Item_t (ty, Bot_t) as stack_ty; _} as descr), ctxt) -> Lwt.return (let error_details = Informative loc in Gas_monad.run ctxt @@ Gas_monad.record_trace_eval ~error_details (fun loc -> let ret = serialize_ty_for_error ret in let stack_ty = serialize_stack_for_error ctxt stack_ty in Bad_return (loc, stack_ty, ret)) @@ ty_eq ~error_details ty ret >>? fun (eq, ctxt) -> eq >|? fun Eq -> ((Lam (close_descr descr, script_instr) : (arg, ret) lambda), ctxt)) | (Typed {loc; aft = stack_ty; _}, ctxt) -> let ret = serialize_ty_for_error ret in let stack_ty = serialize_stack_for_error ctxt stack_ty in fail @@ Bad_return (loc, stack_ty, ret) | (Failed {descr}, ctxt) -> return ( (Lam (close_descr (descr (Item_t (ret, Bot_t))), script_instr) : (arg, ret) lambda), ctxt ) and[@coq_axiom_with_reason "gadt"] parse_instr : type a s. ?type_logger:type_logger -> stack_depth:int -> tc_context -> context -> legacy:bool -> Script.node -> (a, s) stack_ty -> ((a, s) judgement * context) tzresult Lwt.t = fun ?type_logger ~stack_depth tc_context ctxt ~legacy script_instr stack_ty -> let check_item_ty (type a ac b bc) ctxt (exp : (a, ac) ty) (got : (b, bc) ty) loc name n m : ((a, b) eq * context) tzresult = record_trace_eval (fun () -> let stack_ty = serialize_stack_for_error ctxt stack_ty in Bad_stack (loc, name, m, stack_ty)) @@ record_trace (Bad_stack_item n) ( Gas_monad.run ctxt @@ ty_eq ~error_details:(Informative loc) exp got >>? fun (eq, ctxt) -> eq >|? fun Eq -> ((Eq : (a, b) eq), ctxt) ) in let log_stack loc stack_ty aft = match (type_logger, script_instr) with | (None, _) | (Some _, (Int _ | String _ | Bytes _)) -> () | (Some log, (Prim _ | Seq _)) -> (* Unparsing for logging is not carbonated as this is used only by the client and not the protocol *) let stack_ty_before = unparse_stack_uncarbonated stack_ty in let stack_ty_after = unparse_stack_uncarbonated aft in log loc ~stack_ty_before ~stack_ty_after in let typed_no_lwt ctxt loc instr aft = log_stack loc stack_ty aft ; let j = Typed {loc; instr; bef = stack_ty; aft} in Ok (j, ctxt) in let typed ctxt loc instr aft = Lwt.return @@ typed_no_lwt ctxt loc instr aft in Gas.consume ctxt Typecheck_costs.parse_instr_cycle >>?= fun ctxt -> let non_terminal_recursion ?type_logger tc_context ctxt ~legacy script_instr stack_ty = if Compare.Int.(stack_depth > 10000) then fail Typechecking_too_many_recursive_calls else parse_instr ?type_logger tc_context ctxt ~stack_depth:(stack_depth + 1) ~legacy script_instr stack_ty in match (script_instr, stack_ty) with (* stack ops *) | (Prim (loc, I_DROP, [], annot), Item_t (_, rest)) -> (error_unexpected_annot loc annot >>?= fun () -> typed ctxt loc {apply = (fun kinfo k -> IDrop (kinfo, k))} rest : ((a, s) judgement * context) tzresult Lwt.t) | (Prim (loc, I_DROP, [n], result_annot), whole_stack) -> parse_uint10 n >>?= fun whole_n -> Gas.consume ctxt (Typecheck_costs.proof_argument whole_n) >>?= fun ctxt -> let rec make_proof_argument : type a s. int -> (a, s) stack_ty -> (a, s) dropn_proof_argument tzresult = fun n stk -> match (Compare.Int.(n = 0), stk) with | (true, rest) -> ok @@ Dropn_proof_argument (KRest, rest) | (false, Item_t (_, rest)) -> make_proof_argument (n - 1) rest >|? fun (Dropn_proof_argument (n', stack_after_drops)) -> let kinfo = {iloc = loc; kstack_ty = rest} in Dropn_proof_argument (KPrefix (kinfo, n'), stack_after_drops) | (_, _) -> let whole_stack = serialize_stack_for_error ctxt whole_stack in error (Bad_stack (loc, I_DROP, whole_n, whole_stack)) in error_unexpected_annot loc result_annot >>?= fun () -> make_proof_argument whole_n whole_stack >>?= fun (Dropn_proof_argument (n', stack_after_drops)) -> let kdropn kinfo k = IDropn (kinfo, whole_n, n', k) in typed ctxt loc {apply = kdropn} stack_after_drops | (Prim (loc, I_DROP, (_ :: _ :: _ as l), _), _) -> (* Technically, the arities 0 and 1 are allowed but the error only mentions 1. However, DROP is equivalent to DROP 1 so hinting at an arity of 1 makes sense. *) fail (Invalid_arity (loc, I_DROP, 1, List.length l)) | (Prim (loc, I_DUP, [], annot), (Item_t (v, _) as stack)) -> check_var_annot loc annot >>?= fun () -> record_trace_eval (fun () -> let t = serialize_ty_for_error v in Non_dupable_type (loc, t)) (check_dupable_ty ctxt loc v) >>?= fun ctxt -> let dup = {apply = (fun kinfo k -> IDup (kinfo, k))} in typed ctxt loc dup (Item_t (v, stack)) | (Prim (loc, I_DUP, [n], v_annot), stack_ty) -> check_var_annot loc v_annot >>?= fun () -> let rec make_proof_argument : type a s. int -> (a, s) stack_ty -> (a * s) dup_n_proof_argument tzresult = fun n (stack_ty : (a, s) stack_ty) -> match (n, stack_ty) with | (1, Item_t (hd_ty, _)) -> ok @@ Dup_n_proof_argument (Dup_n_zero, hd_ty) | (n, Item_t (_, tl_ty)) -> make_proof_argument (n - 1) tl_ty >|? fun (Dup_n_proof_argument (dup_n_witness, b_ty)) -> Dup_n_proof_argument (Dup_n_succ dup_n_witness, b_ty) | _ -> let whole_stack = serialize_stack_for_error ctxt stack_ty in error (Bad_stack (loc, I_DUP, 1, whole_stack)) in parse_uint10 n >>?= fun n -> Gas.consume ctxt (Typecheck_costs.proof_argument n) >>?= fun ctxt -> error_unless (Compare.Int.( > ) n 0) (Dup_n_bad_argument loc) >>?= fun () -> record_trace (Dup_n_bad_stack loc) (make_proof_argument n stack_ty) >>?= fun (Dup_n_proof_argument (witness, after_ty)) -> record_trace_eval (fun () -> let t = serialize_ty_for_error after_ty in Non_dupable_type (loc, t)) (check_dupable_ty ctxt loc after_ty) >>?= fun ctxt -> let dupn = {apply = (fun kinfo k -> IDup_n (kinfo, n, witness, k))} in typed ctxt loc dupn (Item_t (after_ty, stack_ty)) | (Prim (loc, I_DIG, [n], result_annot), stack) -> let rec make_proof_argument : type a s. int -> (a, s) stack_ty -> (a, s) dig_proof_argument tzresult = fun n stk -> match (Compare.Int.(n = 0), stk) with | (true, Item_t (v, rest)) -> ok @@ Dig_proof_argument (KRest, v, rest) | (false, Item_t (v, rest)) -> make_proof_argument (n - 1) rest >|? fun (Dig_proof_argument (n', x, aft')) -> let kinfo = {iloc = loc; kstack_ty = aft'} in Dig_proof_argument (KPrefix (kinfo, n'), x, Item_t (v, aft')) | (_, _) -> let whole_stack = serialize_stack_for_error ctxt stack in error (Bad_stack (loc, I_DIG, 3, whole_stack)) in parse_uint10 n >>?= fun n -> Gas.consume ctxt (Typecheck_costs.proof_argument n) >>?= fun ctxt -> error_unexpected_annot loc result_annot >>?= fun () -> make_proof_argument n stack >>?= fun (Dig_proof_argument (n', x, aft)) -> let dig = {apply = (fun kinfo k -> IDig (kinfo, n, n', k))} in typed ctxt loc dig (Item_t (x, aft)) | (Prim (loc, I_DIG, (([] | _ :: _ :: _) as l), _), _) -> fail (Invalid_arity (loc, I_DIG, 1, List.length l)) | (Prim (loc, I_DUG, [n], result_annot), Item_t (x, whole_stack)) -> ( parse_uint10 n >>?= fun whole_n -> Gas.consume ctxt (Typecheck_costs.proof_argument whole_n) >>?= fun ctxt -> error_unexpected_annot loc result_annot >>?= fun () -> match make_dug_proof_argument loc whole_n x whole_stack with | None -> let whole_stack = serialize_stack_for_error ctxt whole_stack in fail (Bad_stack (loc, I_DUG, whole_n, whole_stack)) | Some (Dug_proof_argument (n', aft)) -> let dug = {apply = (fun kinfo k -> IDug (kinfo, whole_n, n', k))} in typed ctxt loc dug aft) | (Prim (loc, I_DUG, [_], result_annot), stack) -> Lwt.return ( error_unexpected_annot loc result_annot >>? fun () -> let stack = serialize_stack_for_error ctxt stack in error (Bad_stack (loc, I_DUG, 1, stack)) ) | (Prim (loc, I_DUG, (([] | _ :: _ :: _) as l), _), _) -> fail (Invalid_arity (loc, I_DUG, 1, List.length l)) | (Prim (loc, I_SWAP, [], annot), Item_t (v, Item_t (w, rest))) -> error_unexpected_annot loc annot >>?= fun () -> let swap = {apply = (fun kinfo k -> ISwap (kinfo, k))} in let stack_ty = Item_t (w, Item_t (v, rest)) in typed ctxt loc swap stack_ty | (Prim (loc, I_PUSH, [t; d], annot), stack) -> check_var_annot loc annot >>?= fun () -> parse_packable_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy t >>?= fun (Ex_ty t, ctxt) -> parse_data ?type_logger ~stack_depth:(stack_depth + 1) ctxt ~legacy ~allow_forged:false t d >>=? fun (v, ctxt) -> let const = {apply = (fun kinfo k -> IConst (kinfo, v, k))} in typed ctxt loc const (Item_t (t, stack)) | (Prim (loc, I_UNIT, [], annot), stack) -> check_var_type_annot loc annot >>?= fun () -> let const = {apply = (fun kinfo k -> IConst (kinfo, (), k))} in typed ctxt loc const (Item_t (unit_t, stack)) (* options *) | (Prim (loc, I_SOME, [], annot), Item_t (t, rest)) -> check_var_type_annot loc annot >>?= fun () -> let cons_some = {apply = (fun kinfo k -> ICons_some (kinfo, k))} in option_t loc t >>?= fun ty -> typed ctxt loc cons_some (Item_t (ty, rest)) | (Prim (loc, I_NONE, [t], annot), stack) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy t >>?= fun (Ex_ty t, ctxt) -> check_var_type_annot loc annot >>?= fun () -> let cons_none = {apply = (fun kinfo k -> ICons_none (kinfo, k))} in option_t loc t >>?= fun ty -> let stack_ty = Item_t (ty, stack) in typed ctxt loc cons_none stack_ty | (Prim (loc, I_MAP, [body], annot), Item_t (Option_t (t, _, _), rest)) -> ( check_kind [Seq_kind] body >>?= fun () -> check_var_type_annot loc annot >>?= fun () -> non_terminal_recursion ?type_logger ~legacy tc_context ctxt body (Item_t (t, rest)) >>=? fun (judgement, ctxt) -> Lwt.return @@ match judgement with | Typed ({loc; aft = Item_t (ret, aft_rest); _} as kibody) -> let invalid_map_body () = let aft = serialize_stack_for_error ctxt kibody.aft in Invalid_map_body (loc, aft) in record_trace_eval invalid_map_body ( stack_eq loc ctxt 1 aft_rest rest >>? fun (Eq, ctxt) -> option_t loc ret >>? fun opt_ty -> let final_stack = Item_t (opt_ty, rest) in let hinfo = {iloc = loc; kstack_ty = Item_t (ret, aft_rest)} in let cinfo = kinfo_of_descr kibody in let body = kibody.instr.apply cinfo (IHalt hinfo) in let apply kinfo k = IOpt_map {kinfo; body; k} in typed_no_lwt ctxt loc {apply} final_stack ) | Typed {aft = Bot_t; _} -> let aft = serialize_stack_for_error ctxt Bot_t in error (Invalid_map_body (loc, aft)) | Failed _ -> error (Invalid_map_block_fail loc)) | ( Prim (loc, I_IF_NONE, [bt; bf], annot), (Item_t (Option_t (t, _, _), rest) as bef) ) -> check_kind [Seq_kind] bt >>?= fun () -> check_kind [Seq_kind] bf >>?= fun () -> error_unexpected_annot loc annot >>?= fun () -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy bt rest >>=? fun (btr, ctxt) -> let stack_ty = Item_t (t, rest) in non_terminal_recursion ?type_logger tc_context ctxt ~legacy bf stack_ty >>=? fun (bfr, ctxt) -> let branch ibt ibf = let ifnone = { apply = (fun kinfo k -> let hinfo = kinfo_of_kinstr k in let btinfo = kinfo_of_descr ibt and bfinfo = kinfo_of_descr ibf in let branch_if_none = ibt.instr.apply btinfo (IHalt hinfo) and branch_if_some = ibf.instr.apply bfinfo (IHalt hinfo) in IIf_none {kinfo; branch_if_none; branch_if_some; k}); } in {loc; instr = ifnone; bef; aft = ibt.aft} in Lwt.return @@ merge_branches ctxt loc btr bfr {branch} (* pairs *) | (Prim (loc, I_PAIR, [], annot), Item_t (a, Item_t (b, rest))) -> check_constr_annot loc annot >>?= fun () -> pair_t loc a b >>?= fun (Ty_ex_c ty) -> let stack_ty = Item_t (ty, rest) in let cons_pair = {apply = (fun kinfo k -> ICons_pair (kinfo, k))} in typed ctxt loc cons_pair stack_ty | (Prim (loc, I_PAIR, [n], annot), stack_ty) -> check_var_annot loc annot >>?= fun () -> let rec make_proof_argument : type a s. int -> (a, s) stack_ty -> (a * s) comb_proof_argument tzresult = fun n stack_ty -> match (n, stack_ty) with | (1, Item_t (a_ty, tl_ty)) -> ok (Comb_proof_argument (Comb_one, Item_t (a_ty, tl_ty))) | (n, Item_t (a_ty, tl_ty)) -> make_proof_argument (n - 1) tl_ty >>? fun (Comb_proof_argument (comb_witness, Item_t (b_ty, tl_ty'))) -> pair_t loc a_ty b_ty >|? fun (Ty_ex_c pair_t) -> Comb_proof_argument (Comb_succ comb_witness, Item_t (pair_t, tl_ty')) | _ -> let whole_stack = serialize_stack_for_error ctxt stack_ty in error (Bad_stack (loc, I_PAIR, 1, whole_stack)) in parse_uint10 n >>?= fun n -> Gas.consume ctxt (Typecheck_costs.proof_argument n) >>?= fun ctxt -> error_unless (Compare.Int.( > ) n 1) (Pair_bad_argument loc) >>?= fun () -> make_proof_argument n stack_ty >>?= fun (Comb_proof_argument (witness, after_ty)) -> let comb = {apply = (fun kinfo k -> IComb (kinfo, n, witness, k))} in typed ctxt loc comb after_ty | (Prim (loc, I_UNPAIR, [n], annot), stack_ty) -> error_unexpected_annot loc annot >>?= fun () -> let rec make_proof_argument : type a s. int -> (a, s) stack_ty -> (a * s) uncomb_proof_argument tzresult = fun n stack_ty -> match (n, stack_ty) with | (1, stack) -> ok @@ Uncomb_proof_argument (Uncomb_one, stack) | (n, Item_t (Pair_t (a_ty, b_ty, _, _), tl_ty)) -> make_proof_argument (n - 1) (Item_t (b_ty, tl_ty)) >|? fun (Uncomb_proof_argument (uncomb_witness, after_ty)) -> Uncomb_proof_argument (Uncomb_succ uncomb_witness, Item_t (a_ty, after_ty)) | _ -> let whole_stack = serialize_stack_for_error ctxt stack_ty in error (Bad_stack (loc, I_UNPAIR, 1, whole_stack)) in parse_uint10 n >>?= fun n -> Gas.consume ctxt (Typecheck_costs.proof_argument n) >>?= fun ctxt -> error_unless (Compare.Int.( > ) n 1) (Unpair_bad_argument loc) >>?= fun () -> make_proof_argument n stack_ty >>?= fun (Uncomb_proof_argument (witness, after_ty)) -> let uncomb = {apply = (fun kinfo k -> IUncomb (kinfo, n, witness, k))} in typed ctxt loc uncomb after_ty | (Prim (loc, I_GET, [n], annot), Item_t (comb_ty, rest_ty)) -> ( check_var_annot loc annot >>?= fun () -> parse_uint11 n >>?= fun n -> Gas.consume ctxt (Typecheck_costs.proof_argument n) >>?= fun ctxt -> match make_comb_get_proof_argument n comb_ty with | None -> let whole_stack = serialize_stack_for_error ctxt stack_ty in fail (Bad_stack (loc, I_GET, 1, whole_stack)) | Some (Comb_get_proof_argument (witness, ty')) -> let after_stack_ty = Item_t (ty', rest_ty) in let comb_get = {apply = (fun kinfo k -> IComb_get (kinfo, n, witness, k))} in typed ctxt loc comb_get after_stack_ty) | ( Prim (loc, I_UPDATE, [n], annot), Item_t (value_ty, Item_t (comb_ty, rest_ty)) ) -> check_var_annot loc annot >>?= fun () -> parse_uint11 n >>?= fun n -> Gas.consume ctxt (Typecheck_costs.proof_argument n) >>?= fun ctxt -> make_comb_set_proof_argument ctxt stack_ty loc n value_ty comb_ty >>?= fun (Comb_set_proof_argument (witness, after_ty)) -> let after_stack_ty = Item_t (after_ty, rest_ty) in let comb_set = {apply = (fun kinfo k -> IComb_set (kinfo, n, witness, k))} in typed ctxt loc comb_set after_stack_ty | (Prim (loc, I_UNPAIR, [], annot), Item_t (Pair_t (a, b, _, _), rest)) -> check_unpair_annot loc annot >>?= fun () -> let unpair = {apply = (fun kinfo k -> IUnpair (kinfo, k))} in typed ctxt loc unpair (Item_t (a, Item_t (b, rest))) | (Prim (loc, I_CAR, [], annot), Item_t (Pair_t (a, _, _, _), rest)) -> check_destr_annot loc annot >>?= fun () -> let car = {apply = (fun kinfo k -> ICar (kinfo, k))} in typed ctxt loc car (Item_t (a, rest)) | (Prim (loc, I_CDR, [], annot), Item_t (Pair_t (_, b, _, _), rest)) -> check_destr_annot loc annot >>?= fun () -> let cdr = {apply = (fun kinfo k -> ICdr (kinfo, k))} in typed ctxt loc cdr (Item_t (b, rest)) (* unions *) | (Prim (loc, I_LEFT, [tr], annot), Item_t (tl, rest)) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy tr >>?= fun (Ex_ty tr, ctxt) -> check_constr_annot loc annot >>?= fun () -> let cons_left = {apply = (fun kinfo k -> ICons_left (kinfo, k))} in union_t loc tl tr >>?= fun (Ty_ex_c ty) -> let stack_ty = Item_t (ty, rest) in typed ctxt loc cons_left stack_ty | (Prim (loc, I_RIGHT, [tl], annot), Item_t (tr, rest)) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy tl >>?= fun (Ex_ty tl, ctxt) -> check_constr_annot loc annot >>?= fun () -> let cons_right = {apply = (fun kinfo k -> ICons_right (kinfo, k))} in union_t loc tl tr >>?= fun (Ty_ex_c ty) -> let stack_ty = Item_t (ty, rest) in typed ctxt loc cons_right stack_ty | ( Prim (loc, I_IF_LEFT, [bt; bf], annot), (Item_t (Union_t (tl, tr, _, _), rest) as bef) ) -> check_kind [Seq_kind] bt >>?= fun () -> check_kind [Seq_kind] bf >>?= fun () -> error_unexpected_annot loc annot >>?= fun () -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy bt (Item_t (tl, rest)) >>=? fun (btr, ctxt) -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy bf (Item_t (tr, rest)) >>=? fun (bfr, ctxt) -> let branch ibt ibf = let infobt = kinfo_of_descr ibt and infobf = kinfo_of_descr ibf in let instr = { apply = (fun kinfo k -> let hinfo = kinfo_of_kinstr k in let branch_if_left = ibt.instr.apply infobt (IHalt hinfo) and branch_if_right = ibf.instr.apply infobf (IHalt hinfo) in IIf_left {kinfo; branch_if_left; branch_if_right; k}); } in {loc; instr; bef; aft = ibt.aft} in Lwt.return @@ merge_branches ctxt loc btr bfr {branch} (* lists *) | (Prim (loc, I_NIL, [t], annot), stack) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy t >>?= fun (Ex_ty t, ctxt) -> check_var_type_annot loc annot >>?= fun () -> let nil = {apply = (fun kinfo k -> INil (kinfo, k))} in list_t loc t >>?= fun ty -> typed ctxt loc nil (Item_t (ty, stack)) | ( Prim (loc, I_CONS, [], annot), Item_t (tv, (Item_t (List_t (t, _), _) as stack)) ) -> check_item_ty ctxt tv t loc I_CONS 1 2 >>?= fun (Eq, ctxt) -> check_var_annot loc annot >>?= fun () -> let cons_list = {apply = (fun kinfo k -> ICons_list (kinfo, k))} in (typed ctxt loc cons_list stack : ((a, s) judgement * context) tzresult Lwt.t) | ( Prim (loc, I_IF_CONS, [bt; bf], annot), (Item_t (List_t (t, _), rest) as bef) ) -> check_kind [Seq_kind] bt >>?= fun () -> check_kind [Seq_kind] bf >>?= fun () -> error_unexpected_annot loc annot >>?= fun () -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy bt (Item_t (t, bef)) >>=? fun (btr, ctxt) -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy bf rest >>=? fun (bfr, ctxt) -> let branch ibt ibf = let infobt = kinfo_of_descr ibt and infobf = kinfo_of_descr ibf in let instr = { apply = (fun kinfo k -> let hinfo = kinfo_of_kinstr k in let branch_if_cons = ibt.instr.apply infobt (IHalt hinfo) and branch_if_nil = ibf.instr.apply infobf (IHalt hinfo) in IIf_cons {kinfo; branch_if_nil; branch_if_cons; k}); } in {loc; instr; bef; aft = ibt.aft} in Lwt.return @@ merge_branches ctxt loc btr bfr {branch} | (Prim (loc, I_SIZE, [], annot), Item_t (List_t _, rest)) -> check_var_type_annot loc annot >>?= fun () -> let list_size = {apply = (fun kinfo k -> IList_size (kinfo, k))} in typed ctxt loc list_size (Item_t (nat_t, rest)) | (Prim (loc, I_MAP, [body], annot), Item_t (List_t (elt, _), starting_rest)) -> ( check_kind [Seq_kind] body >>?= fun () -> check_var_type_annot loc annot >>?= fun () -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy body (Item_t (elt, starting_rest)) >>=? fun (judgement, ctxt) -> Lwt.return @@ match judgement with | Typed ({aft = Item_t (ret, rest) as aft; _} as kibody) -> let invalid_map_body () = let aft = serialize_stack_for_error ctxt aft in Invalid_map_body (loc, aft) in record_trace_eval invalid_map_body ( stack_eq loc ctxt 1 rest starting_rest >>? fun (Eq, ctxt) -> let binfo = kinfo_of_descr kibody in let hinfo = {iloc = loc; kstack_ty = aft} in let ibody = kibody.instr.apply binfo (IHalt hinfo) in let list_map = {apply = (fun kinfo k -> IList_map (kinfo, ibody, k))} in list_t loc ret >>? fun ty -> let stack = Item_t (ty, rest) in typed_no_lwt ctxt loc list_map stack ) | Typed {aft; _} -> let aft = serialize_stack_for_error ctxt aft in error (Invalid_map_body (loc, aft)) | Failed _ -> error (Invalid_map_block_fail loc)) | (Prim (loc, I_ITER, [body], annot), Item_t (List_t (elt, _), rest)) -> ( check_kind [Seq_kind] body >>?= fun () -> error_unexpected_annot loc annot >>?= fun () -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy body (Item_t (elt, rest)) >>=? fun (judgement, ctxt) -> let mk_list_iter ibody = { apply = (fun kinfo k -> let hinfo = {iloc = loc; kstack_ty = rest} in let binfo = kinfo_of_descr ibody in let ibody = ibody.instr.apply binfo (IHalt hinfo) in IList_iter (kinfo, ibody, k)); } in Lwt.return @@ match judgement with | Typed ({aft; _} as ibody) -> let invalid_iter_body () = let aft = serialize_stack_for_error ctxt ibody.aft in let rest = serialize_stack_for_error ctxt rest in Invalid_iter_body (loc, rest, aft) in record_trace_eval invalid_iter_body ( stack_eq loc ctxt 1 aft rest >>? fun (Eq, ctxt) : ((a, s) judgement * context) tzresult -> typed_no_lwt ctxt loc (mk_list_iter ibody) rest ) | Failed {descr} -> typed_no_lwt ctxt loc (mk_list_iter (descr rest)) rest ) (* sets *) | (Prim (loc, I_EMPTY_SET, [t], annot), rest) -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt t >>?= fun (Ex_comparable_ty t, ctxt) -> check_var_type_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IEmpty_set (kinfo, t, k))} in set_t loc t >>?= fun ty -> typed ctxt loc instr (Item_t (ty, rest)) | (Prim (loc, I_ITER, [body], annot), Item_t (Set_t (elt, _), rest)) -> ( check_kind [Seq_kind] body >>?= fun () -> error_unexpected_annot loc annot >>?= fun () -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy body (Item_t (elt, rest)) >>=? fun (judgement, ctxt) -> let mk_iset_iter ibody = { apply = (fun kinfo k -> let hinfo = {iloc = loc; kstack_ty = rest} in let binfo = kinfo_of_descr ibody in let ibody = ibody.instr.apply binfo (IHalt hinfo) in ISet_iter (kinfo, ibody, k)); } in Lwt.return @@ match judgement with | Typed ({aft; _} as ibody) -> let invalid_iter_body () = let aft = serialize_stack_for_error ctxt ibody.aft in let rest = serialize_stack_for_error ctxt rest in Invalid_iter_body (loc, rest, aft) in record_trace_eval invalid_iter_body ( stack_eq loc ctxt 1 aft rest >>? fun (Eq, ctxt) : ((a, s) judgement * context) tzresult -> typed_no_lwt ctxt loc (mk_iset_iter ibody) rest ) | Failed {descr} -> typed_no_lwt ctxt loc (mk_iset_iter (descr rest)) rest ) | (Prim (loc, I_MEM, [], annot), Item_t (v, Item_t (Set_t (elt, _), rest))) -> check_var_type_annot loc annot >>?= fun () -> check_item_ty ctxt elt v loc I_MEM 1 2 >>?= fun (Eq, ctxt) -> let instr = {apply = (fun kinfo k -> ISet_mem (kinfo, k))} in (typed ctxt loc instr (Item_t (bool_t, rest)) : ((a, s) judgement * context) tzresult Lwt.t) | ( Prim (loc, I_UPDATE, [], annot), Item_t (v, Item_t (Bool_t, (Item_t (Set_t (elt, _), _) as stack))) ) -> check_item_ty ctxt elt v loc I_UPDATE 1 3 >>?= fun (Eq, ctxt) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISet_update (kinfo, k))} in (typed ctxt loc instr stack : ((a, s) judgement * context) tzresult Lwt.t) | (Prim (loc, I_SIZE, [], annot), Item_t (Set_t _, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISet_size (kinfo, k))} in typed ctxt loc instr (Item_t (nat_t, rest)) (* maps *) | (Prim (loc, I_EMPTY_MAP, [tk; tv], annot), stack) -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt tk >>?= fun (Ex_comparable_ty tk, ctxt) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy tv >>?= fun (Ex_ty tv, ctxt) -> check_var_type_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IEmpty_map (kinfo, tk, k))} in map_t loc tk tv >>?= fun ty -> typed ctxt loc instr (Item_t (ty, stack)) | (Prim (loc, I_MAP, [body], annot), Item_t (Map_t (k, elt, _), starting_rest)) -> ( check_kind [Seq_kind] body >>?= fun () -> check_var_type_annot loc annot >>?= fun () -> pair_t loc k elt >>?= fun (Ty_ex_c ty) -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy body (Item_t (ty, starting_rest)) >>=? fun (judgement, ctxt) -> Lwt.return @@ match judgement with | Typed ({aft = Item_t (ret, rest) as aft; _} as ibody) -> let invalid_map_body () = let aft = serialize_stack_for_error ctxt aft in Invalid_map_body (loc, aft) in record_trace_eval invalid_map_body ( stack_eq loc ctxt 1 rest starting_rest >>? fun (Eq, ctxt) -> let instr = { apply = (fun kinfo k -> let binfo = kinfo_of_descr ibody in let hinfo = {iloc = loc; kstack_ty = aft} in let ibody = ibody.instr.apply binfo (IHalt hinfo) in IMap_map (kinfo, ibody, k)); } in map_t loc k ret >>? fun ty -> let stack = Item_t (ty, rest) in typed_no_lwt ctxt loc instr stack ) | Typed {aft; _} -> let aft = serialize_stack_for_error ctxt aft in error (Invalid_map_body (loc, aft)) | Failed _ -> error (Invalid_map_block_fail loc)) | ( Prim (loc, I_ITER, [body], annot), Item_t (Map_t (key, element_ty, _), rest) ) -> ( check_kind [Seq_kind] body >>?= fun () -> error_unexpected_annot loc annot >>?= fun () -> pair_t loc key element_ty >>?= fun (Ty_ex_c ty) -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy body (Item_t (ty, rest)) >>=? fun (judgement, ctxt) -> let make_instr ibody = { apply = (fun kinfo k -> let hinfo = {iloc = loc; kstack_ty = rest} in let binfo = kinfo_of_descr ibody in let ibody = ibody.instr.apply binfo (IHalt hinfo) in IMap_iter (kinfo, ibody, k)); } in Lwt.return @@ match judgement with | Typed ({aft; _} as ibody) -> let invalid_iter_body () = let aft = serialize_stack_for_error ctxt ibody.aft in let rest = serialize_stack_for_error ctxt rest in Invalid_iter_body (loc, rest, aft) in record_trace_eval invalid_iter_body ( stack_eq loc ctxt 1 aft rest >>? fun (Eq, ctxt) : ((a, s) judgement * context) tzresult -> typed_no_lwt ctxt loc (make_instr ibody) rest ) | Failed {descr} -> typed_no_lwt ctxt loc (make_instr (descr rest)) rest) | (Prim (loc, I_MEM, [], annot), Item_t (vk, Item_t (Map_t (k, _, _), rest))) -> check_item_ty ctxt vk k loc I_MEM 1 2 >>?= fun (Eq, ctxt) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMap_mem (kinfo, k))} in (typed ctxt loc instr (Item_t (bool_t, rest)) : ((a, s) judgement * context) tzresult Lwt.t) | (Prim (loc, I_GET, [], annot), Item_t (vk, Item_t (Map_t (k, elt, _), rest))) -> check_item_ty ctxt vk k loc I_GET 1 2 >>?= fun (Eq, ctxt) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMap_get (kinfo, k))} in option_t loc elt >>?= fun ty : ((a, s) judgement * context) tzresult Lwt.t -> typed ctxt loc instr (Item_t (ty, rest)) | ( Prim (loc, I_UPDATE, [], annot), Item_t ( vk, Item_t (Option_t (vv, _, _), (Item_t (Map_t (k, v, _), _) as stack)) ) ) -> check_item_ty ctxt vk k loc I_UPDATE 1 3 >>?= fun (Eq, ctxt) -> check_item_ty ctxt vv v loc I_UPDATE 2 3 >>?= fun (Eq, ctxt) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMap_update (kinfo, k))} in (typed ctxt loc instr stack : ((a, s) judgement * context) tzresult Lwt.t) | ( Prim (loc, I_GET_AND_UPDATE, [], annot), Item_t ( vk, (Item_t (Option_t (vv, _, _), Item_t (Map_t (k, v, _), _)) as stack) ) ) -> check_item_ty ctxt vk k loc I_GET_AND_UPDATE 1 3 >>?= fun (Eq, ctxt) -> check_item_ty ctxt vv v loc I_GET_AND_UPDATE 2 3 >>?= fun (Eq, ctxt) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMap_get_and_update (kinfo, k))} in (typed ctxt loc instr stack : ((a, s) judgement * context) tzresult Lwt.t) | (Prim (loc, I_SIZE, [], annot), Item_t (Map_t (_, _, _), rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMap_size (kinfo, k))} in typed ctxt loc instr (Item_t (nat_t, rest)) (* big_map *) | (Prim (loc, I_EMPTY_BIG_MAP, [tk; tv], annot), stack) -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt tk >>?= fun (Ex_comparable_ty tk, ctxt) -> parse_big_map_value_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy tv >>?= fun (Ex_ty tv, ctxt) -> check_var_type_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IEmpty_big_map (kinfo, tk, tv, k))} in big_map_t loc tk tv >>?= fun ty -> let stack = Item_t (ty, stack) in typed ctxt loc instr stack | ( Prim (loc, I_MEM, [], annot), Item_t (set_key, Item_t (Big_map_t (k, _, _), rest)) ) -> check_item_ty ctxt set_key k loc I_MEM 1 2 >>?= fun (Eq, ctxt) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IBig_map_mem (kinfo, k))} in let stack = Item_t (bool_t, rest) in (typed ctxt loc instr stack : ((a, s) judgement * context) tzresult Lwt.t) | ( Prim (loc, I_GET, [], annot), Item_t (vk, Item_t (Big_map_t (k, elt, _), rest)) ) -> check_item_ty ctxt vk k loc I_GET 1 2 >>?= fun (Eq, ctxt) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IBig_map_get (kinfo, k))} in option_t loc elt >>?= fun ty -> let stack = Item_t (ty, rest) in (typed ctxt loc instr stack : ((a, s) judgement * context) tzresult Lwt.t) | ( Prim (loc, I_UPDATE, [], annot), Item_t ( set_key, Item_t ( Option_t (set_value, _, _), (Item_t (Big_map_t (map_key, map_value, _), _) as stack) ) ) ) -> check_item_ty ctxt set_key map_key loc I_UPDATE 1 3 >>?= fun (Eq, ctxt) -> check_item_ty ctxt set_value map_value loc I_UPDATE 2 3 >>?= fun (Eq, ctxt) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IBig_map_update (kinfo, k))} in (typed ctxt loc instr stack : ((a, s) judgement * context) tzresult Lwt.t) | ( Prim (loc, I_GET_AND_UPDATE, [], annot), Item_t ( vk, (Item_t (Option_t (vv, _, _), Item_t (Big_map_t (k, v, _), _)) as stack) ) ) -> check_item_ty ctxt vk k loc I_GET_AND_UPDATE 1 3 >>?= fun (Eq, ctxt) -> check_item_ty ctxt vv v loc I_GET_AND_UPDATE 2 3 >>?= fun (Eq, ctxt) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IBig_map_get_and_update (kinfo, k))} in (typed ctxt loc instr stack : ((a, s) judgement * context) tzresult Lwt.t) (* Sapling *) | (Prim (loc, I_SAPLING_EMPTY_STATE, [memo_size], annot), rest) -> parse_memo_size memo_size >>?= fun memo_size -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISapling_empty_state (kinfo, memo_size, k))} in let stack = Item_t (sapling_state_t ~memo_size, rest) in typed ctxt loc instr stack | ( Prim (loc, I_SAPLING_VERIFY_UPDATE, [], _), Item_t ( Sapling_transaction_deprecated_t transaction_memo_size, Item_t ((Sapling_state_t state_memo_size as state_ty), rest) ) ) -> if legacy then memo_size_eq ~error_details:(Informative ()) state_memo_size transaction_memo_size >>?= fun () -> let instr = { apply = (fun kinfo k -> ISapling_verify_update_deprecated (kinfo, k)); } in pair_t loc int_t state_ty >>?= fun (Ty_ex_c pair_ty) -> option_t loc pair_ty >>?= fun ty -> let stack = Item_t (ty, rest) in typed ctxt loc instr stack else fail (Deprecated_instruction T_sapling_transaction_deprecated) | ( Prim (loc, I_SAPLING_VERIFY_UPDATE, [], _), Item_t ( Sapling_transaction_t transaction_memo_size, Item_t ((Sapling_state_t state_memo_size as state_ty), rest) ) ) -> memo_size_eq ~error_details:(Informative ()) state_memo_size transaction_memo_size >>?= fun () -> let instr = {apply = (fun kinfo k -> ISapling_verify_update (kinfo, k))} in pair_t loc int_t state_ty >>?= fun (Ty_ex_c pair_ty) -> pair_t loc bytes_t pair_ty >>?= fun (Ty_ex_c pair_ty) -> option_t loc pair_ty >>?= fun ty -> let stack = Item_t (ty, rest) in typed ctxt loc instr stack (* control *) | (Seq (loc, []), stack) -> let instr = {apply = (fun _kinfo k -> k)} in typed ctxt loc instr stack | (Seq (_, [single]), stack) -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy single stack | (Seq (loc, hd :: tl), stack) -> ( non_terminal_recursion ?type_logger tc_context ctxt ~legacy hd stack >>=? fun (judgement, ctxt) -> match judgement with | Failed _ -> fail (Fail_not_in_tail_position (Micheline.location hd)) | Typed ({aft = middle; _} as ihd) -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy (Seq (Micheline.dummy_location, tl)) middle >|=? fun (judgement, ctxt) -> let judgement = match judgement with | Failed {descr} -> let descr ret = compose_descr loc ihd (descr ret) in Failed {descr} | Typed itl -> Typed (compose_descr loc ihd itl) in (judgement, ctxt)) | (Prim (loc, I_IF, [bt; bf], annot), (Item_t (Bool_t, rest) as bef)) -> check_kind [Seq_kind] bt >>?= fun () -> check_kind [Seq_kind] bf >>?= fun () -> error_unexpected_annot loc annot >>?= fun () -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy bt rest >>=? fun (btr, ctxt) -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy bf rest >>=? fun (bfr, ctxt) -> let branch ibt ibf = let infobt = kinfo_of_descr ibt and infobf = kinfo_of_descr ibf in let instr = { apply = (fun kinfo k -> let hinfo = kinfo_of_kinstr k in let branch_if_true = ibt.instr.apply infobt (IHalt hinfo) and branch_if_false = ibf.instr.apply infobf (IHalt hinfo) in IIf {kinfo; branch_if_true; branch_if_false; k}); } in {loc; instr; bef; aft = ibt.aft} in Lwt.return @@ merge_branches ctxt loc btr bfr {branch} | (Prim (loc, I_LOOP, [body], annot), (Item_t (Bool_t, rest) as stack)) -> ( check_kind [Seq_kind] body >>?= fun () -> error_unexpected_annot loc annot >>?= fun () -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy body rest >>=? fun (judgement, ctxt) -> Lwt.return @@ match judgement with | Typed ibody -> let unmatched_branches () = let aft = serialize_stack_for_error ctxt ibody.aft in let stack = serialize_stack_for_error ctxt stack in Unmatched_branches (loc, aft, stack) in record_trace_eval unmatched_branches ( stack_eq loc ctxt 1 ibody.aft stack >>? fun (Eq, ctxt) -> let instr = { apply = (fun kinfo k -> let ibody = ibody.instr.apply (kinfo_of_descr ibody) (IHalt kinfo) in ILoop (kinfo, ibody, k)); } in typed_no_lwt ctxt loc instr rest ) | Failed {descr} -> let instr = { apply = (fun kinfo k -> let ibody = descr stack in let ibody = ibody.instr.apply (kinfo_of_descr ibody) (IHalt kinfo) in ILoop (kinfo, ibody, k)); } in typed_no_lwt ctxt loc instr rest) | ( Prim (loc, I_LOOP_LEFT, [body], annot), (Item_t (Union_t (tl, tr, _, _), rest) as stack) ) -> ( check_kind [Seq_kind] body >>?= fun () -> check_var_annot loc annot >>?= fun () -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy body (Item_t (tl, rest)) >>=? fun (judgement, ctxt) -> Lwt.return @@ match judgement with | Typed ibody -> let unmatched_branches () = let aft = serialize_stack_for_error ctxt ibody.aft in let stack = serialize_stack_for_error ctxt stack in Unmatched_branches (loc, aft, stack) in record_trace_eval unmatched_branches ( stack_eq loc ctxt 1 ibody.aft stack >>? fun (Eq, ctxt) -> let instr = { apply = (fun kinfo k -> let ibody = ibody.instr.apply (kinfo_of_descr ibody) (IHalt kinfo) in ILoop_left (kinfo, ibody, k)); } in let stack = Item_t (tr, rest) in typed_no_lwt ctxt loc instr stack ) | Failed {descr} -> let instr = { apply = (fun kinfo k -> let ibody = descr stack in let ibody = ibody.instr.apply (kinfo_of_descr ibody) (IHalt kinfo) in ILoop_left (kinfo, ibody, k)); } in let stack = Item_t (tr, rest) in typed_no_lwt ctxt loc instr stack) | (Prim (loc, I_LAMBDA, [arg; ret; code], annot), stack) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy arg >>?= fun (Ex_ty arg, ctxt) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ret >>?= fun (Ex_ty ret, ctxt) -> check_kind [Seq_kind] code >>?= fun () -> check_var_annot loc annot >>?= fun () -> parse_returning (Tc_context.add_lambda tc_context) ?type_logger ~stack_depth:(stack_depth + 1) ctxt ~legacy arg ret code >>=? fun (lambda, ctxt) -> let instr = {apply = (fun kinfo k -> ILambda (kinfo, lambda, k))} in lambda_t loc arg ret >>?= fun ty -> let stack = Item_t (ty, stack) in typed ctxt loc instr stack | ( Prim (loc, I_EXEC, [], annot), Item_t (arg, Item_t (Lambda_t (param, ret, _), rest)) ) -> check_item_ty ctxt arg param loc I_EXEC 1 2 >>?= fun (Eq, ctxt) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IExec (kinfo, k))} in let stack = Item_t (ret, rest) in (typed ctxt loc instr stack : ((a, s) judgement * context) tzresult Lwt.t) | ( Prim (loc, I_APPLY, [], annot), Item_t ( capture, Item_t (Lambda_t (Pair_t (capture_ty, arg_ty, _, _), ret, _), rest) ) ) -> check_packable ~legacy:false loc capture_ty >>?= fun () -> check_item_ty ctxt capture capture_ty loc I_APPLY 1 2 >>?= fun (Eq, ctxt) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IApply (kinfo, capture_ty, k))} in lambda_t loc arg_ty ret (* This cannot fail because the type [lambda 'arg 'ret] is always smaller than the input type [lambda (pair 'arg 'capture) 'ret]. In an ideal world, there would be a smart deconstructor to ensure this statically. *) >>?= fun res_ty -> let stack = Item_t (res_ty, rest) in (typed ctxt loc instr stack : ((a, s) judgement * context) tzresult Lwt.t) | (Prim (loc, I_DIP, [code], annot), Item_t (v, rest)) -> ( error_unexpected_annot loc annot >>?= fun () -> check_kind [Seq_kind] code >>?= fun () -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy code rest >>=? fun (judgement, ctxt) -> match judgement with | Typed descr -> let instr = { apply = (fun kinfo k -> let binfo = {iloc = descr.loc; kstack_ty = descr.bef} in let kinfoh = {iloc = descr.loc; kstack_ty = descr.aft} in let b = descr.instr.apply binfo (IHalt kinfoh) in IDip (kinfo, b, k)); } in let stack = Item_t (v, descr.aft) in typed ctxt loc instr stack | Failed _ -> fail (Fail_not_in_tail_position loc)) | (Prim (loc, I_DIP, [n; code], result_annot), stack) -> parse_uint10 n >>?= fun n -> Gas.consume ctxt (Typecheck_costs.proof_argument n) >>?= fun ctxt -> let rec make_proof_argument : type a s. int -> (a, s) stack_ty -> (a, s) dipn_proof_argument tzresult Lwt.t = fun n stk -> match (Compare.Int.(n = 0), stk) with | (true, rest) -> ( non_terminal_recursion ?type_logger tc_context ctxt ~legacy code rest >>=? fun (judgement, ctxt) -> Lwt.return @@ match judgement with | Typed descr -> ok (Dipn_proof_argument (KRest, ctxt, descr, descr.aft) : (a, s) dipn_proof_argument) | Failed _ -> error (Fail_not_in_tail_position loc)) | (false, Item_t (v, rest)) -> make_proof_argument (n - 1) rest >|=? fun (Dipn_proof_argument (n', ctxt, descr, aft')) -> let kinfo' = {iloc = loc; kstack_ty = aft'} in let w = KPrefix (kinfo', n') in Dipn_proof_argument (w, ctxt, descr, Item_t (v, aft')) | (_, _) -> Lwt.return (let whole_stack = serialize_stack_for_error ctxt stack in error (Bad_stack (loc, I_DIP, 1, whole_stack))) in error_unexpected_annot loc result_annot >>?= fun () -> make_proof_argument n stack >>=? fun (Dipn_proof_argument (n', ctxt, descr, aft)) -> let kinfo = {iloc = descr.loc; kstack_ty = descr.bef} in let kinfoh = {iloc = descr.loc; kstack_ty = descr.aft} in let b = descr.instr.apply kinfo (IHalt kinfoh) in let res = {apply = (fun kinfo k -> IDipn (kinfo, n, n', b, k))} in typed ctxt loc res aft | (Prim (loc, I_DIP, (([] | _ :: _ :: _ :: _) as l), _), _) -> (* Technically, the arities 1 and 2 are allowed but the error only mentions 2. However, DIP {code} is equivalent to DIP 1 {code} so hinting at an arity of 2 makes sense. *) fail (Invalid_arity (loc, I_DIP, 2, List.length l)) | (Prim (loc, I_FAILWITH, [], annot), Item_t (v, _rest)) -> Lwt.return ( error_unexpected_annot loc annot >>? fun () -> (if legacy then Result.return_unit else check_packable ~legacy:false loc v) >|? fun () -> let instr = {apply = (fun kinfo _k -> IFailwith (kinfo, loc, v))} in let descr aft = {loc; instr; bef = stack_ty; aft} in log_stack loc stack_ty Bot_t ; (Failed {descr}, ctxt) ) | (Prim (loc, I_NEVER, [], annot), Item_t (Never_t, _rest)) -> Lwt.return ( error_unexpected_annot loc annot >|? fun () -> let instr = {apply = (fun kinfo _k -> INever kinfo)} in let descr aft = {loc; instr; bef = stack_ty; aft} in log_stack loc stack_ty Bot_t ; (Failed {descr}, ctxt) ) (* timestamp operations *) | (Prim (loc, I_ADD, [], annot), Item_t (Timestamp_t, Item_t (Int_t, rest))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IAdd_timestamp_to_seconds (kinfo, k))} in typed ctxt loc instr (Item_t (Timestamp_t, rest)) | ( Prim (loc, I_ADD, [], annot), Item_t (Int_t, (Item_t (Timestamp_t, _) as stack)) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IAdd_seconds_to_timestamp (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_SUB, [], annot), Item_t (Timestamp_t, Item_t (Int_t, rest))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISub_timestamp_seconds (kinfo, k))} in let stack = Item_t (Timestamp_t, rest) in typed ctxt loc instr stack | ( Prim (loc, I_SUB, [], annot), Item_t (Timestamp_t, Item_t (Timestamp_t, rest)) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IDiff_timestamps (kinfo, k))} in let stack = Item_t (int_t, rest) in typed ctxt loc instr stack (* string operations *) | ( Prim (loc, I_CONCAT, [], annot), Item_t (String_t, (Item_t (String_t, _) as stack)) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IConcat_string_pair (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_CONCAT, [], annot), Item_t (List_t (String_t, _), rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IConcat_string (kinfo, k))} in typed ctxt loc instr (Item_t (String_t, rest)) | ( Prim (loc, I_SLICE, [], annot), Item_t (Nat_t, Item_t (Nat_t, Item_t (String_t, rest))) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISlice_string (kinfo, k))} in let stack = Item_t (option_string_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_SIZE, [], annot), Item_t (String_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IString_size (kinfo, k))} in let stack = Item_t (nat_t, rest) in typed ctxt loc instr stack (* bytes operations *) | ( Prim (loc, I_CONCAT, [], annot), Item_t (Bytes_t, (Item_t (Bytes_t, _) as stack)) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IConcat_bytes_pair (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_CONCAT, [], annot), Item_t (List_t (Bytes_t, _), rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IConcat_bytes (kinfo, k))} in let stack = Item_t (Bytes_t, rest) in typed ctxt loc instr stack | ( Prim (loc, I_SLICE, [], annot), Item_t (Nat_t, Item_t (Nat_t, Item_t (Bytes_t, rest))) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISlice_bytes (kinfo, k))} in let stack = Item_t (option_bytes_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_SIZE, [], annot), Item_t (Bytes_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IBytes_size (kinfo, k))} in let stack = Item_t (nat_t, rest) in typed ctxt loc instr stack (* currency operations *) | ( Prim (loc, I_ADD, [], annot), Item_t (Mutez_t, (Item_t (Mutez_t, _) as stack)) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IAdd_tez (kinfo, k))} in typed ctxt loc instr stack | ( Prim (loc, I_SUB, [], annot), Item_t (Mutez_t, (Item_t (Mutez_t, _) as stack)) ) -> if legacy then check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISub_tez_legacy (kinfo, k))} in typed ctxt loc instr stack else fail (Deprecated_instruction I_SUB) | ( Prim (loc, I_SUB_MUTEZ, [], annot), Item_t (Mutez_t, Item_t (Mutez_t, rest)) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISub_tez (kinfo, k))} in let stack = Item_t (option_mutez_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_MUL, [], annot), Item_t (Mutez_t, Item_t (Nat_t, rest))) -> (* no type name check *) check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMul_teznat (kinfo, k))} in let stack = Item_t (Mutez_t, rest) in typed ctxt loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Nat_t, (Item_t (Mutez_t, _) as stack)) ) -> (* no type name check *) check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMul_nattez (kinfo, k))} in typed ctxt loc instr stack (* boolean operations *) | (Prim (loc, I_OR, [], annot), Item_t (Bool_t, (Item_t (Bool_t, _) as stack))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IOr (kinfo, k))} in typed ctxt loc instr stack | ( Prim (loc, I_AND, [], annot), Item_t (Bool_t, (Item_t (Bool_t, _) as stack)) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IAnd (kinfo, k))} in typed ctxt loc instr stack | ( Prim (loc, I_XOR, [], annot), Item_t (Bool_t, (Item_t (Bool_t, _) as stack)) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IXor (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_NOT, [], annot), (Item_t (Bool_t, _) as stack)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> INot (kinfo, k))} in typed ctxt loc instr stack (* integer operations *) | (Prim (loc, I_ABS, [], annot), Item_t (Int_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IAbs_int (kinfo, k))} in let stack = Item_t (nat_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_ISNAT, [], annot), Item_t (Int_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IIs_nat (kinfo, k))} in let stack = Item_t (option_nat_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_INT, [], annot), Item_t (Nat_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IInt_nat (kinfo, k))} in let stack = Item_t (int_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_NEG, [], annot), (Item_t (Int_t, _) as stack)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> INeg (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_NEG, [], annot), Item_t (Nat_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> INeg (kinfo, k))} in let stack = Item_t (int_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_ADD, [], annot), Item_t (Int_t, (Item_t (Int_t, _) as stack))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IAdd_int (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_ADD, [], annot), Item_t (Int_t, Item_t (Nat_t, rest))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IAdd_int (kinfo, k))} in let stack = Item_t (Int_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_ADD, [], annot), Item_t (Nat_t, (Item_t (Int_t, _) as stack))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IAdd_int (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_ADD, [], annot), Item_t (Nat_t, (Item_t (Nat_t, _) as stack))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IAdd_nat (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_SUB, [], annot), Item_t (Int_t, (Item_t (Int_t, _) as stack))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISub_int (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_SUB, [], annot), Item_t (Int_t, Item_t (Nat_t, rest))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISub_int (kinfo, k))} in let stack = Item_t (Int_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_SUB, [], annot), Item_t (Nat_t, (Item_t (Int_t, _) as stack))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISub_int (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_SUB, [], annot), Item_t (Nat_t, Item_t (Nat_t, rest))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISub_int (kinfo, k))} in let stack = Item_t (int_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_MUL, [], annot), Item_t (Int_t, (Item_t (Int_t, _) as stack))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMul_int (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_MUL, [], annot), Item_t (Int_t, Item_t (Nat_t, rest))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMul_int (kinfo, k))} in let stack = Item_t (Int_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_MUL, [], annot), Item_t (Nat_t, (Item_t (Int_t, _) as stack))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMul_nat (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_MUL, [], annot), Item_t (Nat_t, (Item_t (Nat_t, _) as stack))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMul_nat (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_EDIV, [], annot), Item_t (Mutez_t, Item_t (Nat_t, rest))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IEdiv_teznat (kinfo, k))} in let stack = Item_t (option_pair_mutez_mutez_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_EDIV, [], annot), Item_t (Mutez_t, Item_t (Mutez_t, rest))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IEdiv_tez (kinfo, k))} in let stack = Item_t (option_pair_nat_mutez_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_EDIV, [], annot), Item_t (Int_t, Item_t (Int_t, rest))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IEdiv_int (kinfo, k))} in let stack = Item_t (option_pair_int_nat_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_EDIV, [], annot), Item_t (Int_t, Item_t (Nat_t, rest))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IEdiv_int (kinfo, k))} in let stack = Item_t (option_pair_int_nat_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_EDIV, [], annot), Item_t (Nat_t, Item_t (Int_t, rest))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IEdiv_nat (kinfo, k))} in let stack = Item_t (option_pair_int_nat_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_EDIV, [], annot), Item_t (Nat_t, Item_t (Nat_t, rest))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IEdiv_nat (kinfo, k))} in let stack = Item_t (option_pair_nat_nat_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_LSL, [], annot), Item_t (Nat_t, (Item_t (Nat_t, _) as stack))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ILsl_nat (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_LSR, [], annot), Item_t (Nat_t, (Item_t (Nat_t, _) as stack))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ILsr_nat (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_OR, [], annot), Item_t (Nat_t, (Item_t (Nat_t, _) as stack))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IOr_nat (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_AND, [], annot), Item_t (Nat_t, (Item_t (Nat_t, _) as stack))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IAnd_nat (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_AND, [], annot), Item_t (Int_t, (Item_t (Nat_t, _) as stack))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IAnd_int_nat (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_XOR, [], annot), Item_t (Nat_t, (Item_t (Nat_t, _) as stack))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IXor_nat (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_NOT, [], annot), (Item_t (Int_t, _) as stack)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> INot_int (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_NOT, [], annot), Item_t (Nat_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> INot_int (kinfo, k))} in let stack = Item_t (int_t, rest) in typed ctxt loc instr stack (* comparison *) | (Prim (loc, I_COMPARE, [], annot), Item_t (t1, Item_t (t2, rest))) -> check_var_annot loc annot >>?= fun () -> check_item_ty ctxt t1 t2 loc I_COMPARE 1 2 >>?= fun (Eq, ctxt) -> check_comparable loc t1 >>?= fun Eq -> let instr = {apply = (fun kinfo k -> ICompare (kinfo, t1, k))} in let stack = Item_t (int_t, rest) in (typed ctxt loc instr stack : ((a, s) judgement * context) tzresult Lwt.t) (* comparators *) | (Prim (loc, I_EQ, [], annot), Item_t (Int_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IEq (kinfo, k))} in let stack = Item_t (bool_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_NEQ, [], annot), Item_t (Int_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> INeq (kinfo, k))} in let stack = Item_t (bool_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_LT, [], annot), Item_t (Int_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ILt (kinfo, k))} in let stack = Item_t (bool_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_GT, [], annot), Item_t (Int_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IGt (kinfo, k))} in let stack = Item_t (bool_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_LE, [], annot), Item_t (Int_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ILe (kinfo, k))} in let stack = Item_t (bool_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_GE, [], annot), Item_t (Int_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IGe (kinfo, k))} in let stack = Item_t (bool_t, rest) in typed ctxt loc instr stack (* annotations *) | (Prim (loc, I_CAST, [cast_t], annot), (Item_t (t, _) as stack)) -> check_var_annot loc annot >>?= fun () -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy cast_t >>?= fun (Ex_ty cast_t, ctxt) -> Gas_monad.run ctxt @@ ty_eq ~error_details:(Informative loc) cast_t t >>?= fun (eq, ctxt) -> eq >>?= fun Eq -> (* We can reuse [stack] because [a ty = b ty] means [a = b]. *) let instr = {apply = (fun _ k -> k)} in (typed ctxt loc instr stack : ((a, s) judgement * context) tzresult Lwt.t) | (Prim (loc, I_RENAME, [], annot), (Item_t _ as stack)) -> check_var_annot loc annot >>?= fun () -> (* can erase annot *) let instr = {apply = (fun _ k -> k)} in typed ctxt loc instr stack (* packing *) | (Prim (loc, I_PACK, [], annot), Item_t (t, rest)) -> check_packable ~legacy:true (* allow to pack contracts for hash/signature checks *) loc t >>?= fun () -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IPack (kinfo, t, k))} in let stack = Item_t (bytes_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_UNPACK, [ty], annot), Item_t (Bytes_t, rest)) -> parse_packable_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ty >>?= fun (Ex_ty t, ctxt) -> check_var_type_annot loc annot >>?= fun () -> option_t loc t >>?= fun res_ty -> let instr = {apply = (fun kinfo k -> IUnpack (kinfo, t, k))} in let stack = Item_t (res_ty, rest) in typed ctxt loc instr stack (* protocol *) | (Prim (loc, I_ADDRESS, [], annot), Item_t (Contract_t _, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IAddress (kinfo, k))} in let stack = Item_t (address_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_CONTRACT, [ty], annot), Item_t (Address_t, rest)) -> parse_passable_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ty >>?= fun (Ex_ty t, ctxt) -> contract_t loc t >>?= fun contract_ty -> option_t loc contract_ty >>?= fun res_ty -> parse_entrypoint_annot_strict loc annot >>?= fun entrypoint -> let instr = {apply = (fun kinfo k -> IContract (kinfo, t, entrypoint, k))} in let stack = Item_t (res_ty, rest) in typed ctxt loc instr stack | ( Prim (loc, I_VIEW, [name; output_ty], annot), Item_t (input_ty, Item_t (Address_t, rest)) ) -> let output_ty_loc = location output_ty in parse_view_name ctxt name >>?= fun (name, ctxt) -> parse_view_output_ty ctxt ~stack_depth:0 ~legacy output_ty >>?= fun (Ex_ty output_ty, ctxt) -> option_t output_ty_loc output_ty >>?= fun res_ty -> check_var_annot loc annot >>?= fun () -> let instr = { apply = (fun kinfo k -> IView (kinfo, View_signature {name; input_ty; output_ty}, k)); } in let stack = Item_t (res_ty, rest) in typed ctxt loc instr stack | ( Prim (loc, (I_TRANSFER_TOKENS as prim), [], annot), Item_t (p, Item_t (Mutez_t, Item_t (Contract_t (cp, _), rest))) ) -> Tc_context.check_not_in_view loc ~legacy tc_context prim >>?= fun () -> check_item_ty ctxt p cp loc prim 1 4 >>?= fun (Eq, ctxt) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ITransfer_tokens (kinfo, k))} in let stack = Item_t (operation_t, rest) in (typed ctxt loc instr stack : ((a, s) judgement * context) tzresult Lwt.t) | ( Prim (loc, (I_SET_DELEGATE as prim), [], annot), Item_t (Option_t (Key_hash_t, _, _), rest) ) -> Tc_context.check_not_in_view loc ~legacy tc_context prim >>?= fun () -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISet_delegate (kinfo, k))} in let stack = Item_t (operation_t, rest) in typed ctxt loc instr stack | (Prim (_, I_CREATE_ACCOUNT, _, _), _) -> fail (Deprecated_instruction I_CREATE_ACCOUNT) | (Prim (loc, I_IMPLICIT_ACCOUNT, [], annot), Item_t (Key_hash_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IImplicit_account (kinfo, k))} in let stack = Item_t (contract_unit_t, rest) in typed ctxt loc instr stack | ( Prim (loc, (I_CREATE_CONTRACT as prim), [(Seq _ as code)], annot), Item_t (Option_t (Key_hash_t, _, _), Item_t (Mutez_t, Item_t (ginit, rest))) ) -> Tc_context.check_not_in_view ~legacy loc tc_context prim >>?= fun () -> check_two_var_annot loc annot >>?= fun () -> (* We typecheck the script to make sure we will originate only well-typed contracts but then we throw away the typed version, except for the storage type which is kept for efficiency in the ticket scanner. *) let canonical_code = Micheline.strip_locations code in parse_toplevel ctxt ~legacy canonical_code >>?= fun ({arg_type; storage_type; code_field; views}, ctxt) -> record_trace (Ill_formed_type (Some "parameter", canonical_code, location arg_type)) (parse_parameter_ty_and_entrypoints ctxt ~stack_depth:(stack_depth + 1) ~legacy arg_type) >>?= fun (Ex_parameter_ty_and_entrypoints {arg_type; entrypoints}, ctxt) -> record_trace (Ill_formed_type (Some "storage", canonical_code, location storage_type)) (parse_storage_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy storage_type) >>?= fun (Ex_ty storage_type, ctxt) -> pair_t loc arg_type storage_type >>?= fun (Ty_ex_c arg_type_full) -> pair_t loc list_operation_t storage_type >>?= fun (Ty_ex_c ret_type_full) -> trace (Ill_typed_contract (canonical_code, [])) (parse_returning (Tc_context.toplevel ~storage_type ~param_type:arg_type ~entrypoints) ctxt ~legacy ?type_logger ~stack_depth:(stack_depth + 1) arg_type_full ret_type_full code_field) >>=? fun ( Lam ( {kbef = Item_t (arg, Bot_t); kaft = Item_t (ret, Bot_t); _}, _ ), ctxt ) -> let views_result = parse_views ctxt ?type_logger ~legacy storage_type views in trace (Ill_typed_contract (canonical_code, [])) views_result >>=? fun (_typed_views, ctxt) -> (let error_details = Informative loc in Gas_monad.run ctxt @@ let open Gas_monad.Syntax in let* Eq = ty_eq ~error_details arg arg_type_full in let* Eq = ty_eq ~error_details ret ret_type_full in ty_eq ~error_details storage_type ginit) >>?= fun (storage_eq, ctxt) -> storage_eq >>?= fun Eq -> let instr = { apply = (fun kinfo k -> ICreate_contract {kinfo; storage_type; code = canonical_code; k}); } in let stack = Item_t (operation_t, Item_t (address_t, rest)) in typed ctxt loc instr stack | (Prim (loc, I_NOW, [], annot), stack) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> INow (kinfo, k))} in let stack = Item_t (timestamp_t, stack) in typed ctxt loc instr stack | (Prim (loc, I_MIN_BLOCK_TIME, [], _), stack) -> typed ctxt loc {apply = (fun kinfo k -> IMin_block_time (kinfo, k))} (Item_t (nat_t, stack)) | (Prim (loc, I_AMOUNT, [], annot), stack) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IAmount (kinfo, k))} in let stack = Item_t (mutez_t, stack) in typed ctxt loc instr stack | (Prim (loc, I_CHAIN_ID, [], annot), stack) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IChainId (kinfo, k))} in let stack = Item_t (chain_id_t, stack) in typed ctxt loc instr stack | (Prim (loc, I_BALANCE, [], annot), stack) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IBalance (kinfo, k))} in let stack = Item_t (mutez_t, stack) in typed ctxt loc instr stack | (Prim (loc, I_LEVEL, [], annot), stack) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ILevel (kinfo, k))} in let stack = Item_t (nat_t, stack) in typed ctxt loc instr stack | (Prim (loc, I_VOTING_POWER, [], annot), Item_t (Key_hash_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IVoting_power (kinfo, k))} in let stack = Item_t (nat_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_TOTAL_VOTING_POWER, [], annot), stack) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ITotal_voting_power (kinfo, k))} in let stack = Item_t (nat_t, stack) in typed ctxt loc instr stack | (Prim (_, I_STEPS_TO_QUOTA, _, _), _) -> fail (Deprecated_instruction I_STEPS_TO_QUOTA) | (Prim (loc, I_SOURCE, [], annot), stack) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISource (kinfo, k))} in let stack = Item_t (address_t, stack) in typed ctxt loc instr stack | (Prim (loc, I_SENDER, [], annot), stack) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISender (kinfo, k))} in let stack = Item_t (address_t, stack) in typed ctxt loc instr stack | (Prim (loc, (I_SELF as prim), [], annot), stack) -> Lwt.return ( parse_entrypoint_annot_lax loc annot >>? fun entrypoint -> let open Tc_context in match tc_context.callsite with | _ when is_in_lambda tc_context -> error (Forbidden_instr_in_context (loc, Script_tc_errors.Lambda, prim)) (* [Data] is for pushed instructions of lambda type. *) | Data -> error (Forbidden_instr_in_context (loc, Script_tc_errors.Lambda, prim)) | View -> error (Forbidden_instr_in_context (loc, Script_tc_errors.View, prim)) | Toplevel {param_type; entrypoints; storage_type = _} -> Gas_monad.run ctxt @@ find_entrypoint ~error_details:(Informative ()) param_type entrypoints entrypoint >>? fun (r, ctxt) -> r >>? fun (Ex_ty_cstr {ty = param_type; _}) -> contract_t loc param_type >>? fun res_ty -> let instr = { apply = (fun kinfo k -> ISelf (kinfo, param_type, entrypoint, k)); } in let stack = Item_t (res_ty, stack) in typed_no_lwt ctxt loc instr stack ) | (Prim (loc, I_SELF_ADDRESS, [], annot), stack) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISelf_address (kinfo, k))} in let stack = Item_t (address_t, stack) in typed ctxt loc instr stack (* cryptography *) | (Prim (loc, I_HASH_KEY, [], annot), Item_t (Key_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IHash_key (kinfo, k))} in let stack = Item_t (key_hash_t, rest) in typed ctxt loc instr stack | ( Prim (loc, I_CHECK_SIGNATURE, [], annot), Item_t (Key_t, Item_t (Signature_t, Item_t (Bytes_t, rest))) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ICheck_signature (kinfo, k))} in let stack = Item_t (bool_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_BLAKE2B, [], annot), (Item_t (Bytes_t, _) as stack)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IBlake2b (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_SHA256, [], annot), (Item_t (Bytes_t, _) as stack)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISha256 (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_SHA512, [], annot), (Item_t (Bytes_t, _) as stack)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISha512 (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_KECCAK, [], annot), (Item_t (Bytes_t, _) as stack)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IKeccak (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_SHA3, [], annot), (Item_t (Bytes_t, _) as stack)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> ISha3 (kinfo, k))} in typed ctxt loc instr stack | ( Prim (loc, I_ADD, [], annot), Item_t (Bls12_381_g1_t, (Item_t (Bls12_381_g1_t, _) as stack)) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IAdd_bls12_381_g1 (kinfo, k))} in typed ctxt loc instr stack | ( Prim (loc, I_ADD, [], annot), Item_t (Bls12_381_g2_t, (Item_t (Bls12_381_g2_t, _) as stack)) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IAdd_bls12_381_g2 (kinfo, k))} in typed ctxt loc instr stack | ( Prim (loc, I_ADD, [], annot), Item_t (Bls12_381_fr_t, (Item_t (Bls12_381_fr_t, _) as stack)) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IAdd_bls12_381_fr (kinfo, k))} in typed ctxt loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Bls12_381_g1_t, Item_t (Bls12_381_fr_t, rest)) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMul_bls12_381_g1 (kinfo, k))} in let stack = Item_t (Bls12_381_g1_t, rest) in typed ctxt loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Bls12_381_g2_t, Item_t (Bls12_381_fr_t, rest)) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMul_bls12_381_g2 (kinfo, k))} in let stack = Item_t (Bls12_381_g2_t, rest) in typed ctxt loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Bls12_381_fr_t, (Item_t (Bls12_381_fr_t, _) as stack)) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMul_bls12_381_fr (kinfo, k))} in typed ctxt loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Nat_t, (Item_t (Bls12_381_fr_t, _) as stack)) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMul_bls12_381_fr_z (kinfo, k))} in typed ctxt loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Int_t, (Item_t (Bls12_381_fr_t, _) as stack)) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMul_bls12_381_fr_z (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_MUL, [], annot), Item_t (Bls12_381_fr_t, Item_t (Int_t, rest))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMul_bls12_381_z_fr (kinfo, k))} in let stack = Item_t (Bls12_381_fr_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_MUL, [], annot), Item_t (Bls12_381_fr_t, Item_t (Nat_t, rest))) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IMul_bls12_381_z_fr (kinfo, k))} in let stack = Item_t (Bls12_381_fr_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_INT, [], annot), Item_t (Bls12_381_fr_t, rest)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IInt_bls12_381_fr (kinfo, k))} in let stack = Item_t (int_t, rest) in typed ctxt loc instr stack | (Prim (loc, I_NEG, [], annot), (Item_t (Bls12_381_g1_t, _) as stack)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> INeg_bls12_381_g1 (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_NEG, [], annot), (Item_t (Bls12_381_g2_t, _) as stack)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> INeg_bls12_381_g2 (kinfo, k))} in typed ctxt loc instr stack | (Prim (loc, I_NEG, [], annot), (Item_t (Bls12_381_fr_t, _) as stack)) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> INeg_bls12_381_fr (kinfo, k))} in typed ctxt loc instr stack | ( Prim (loc, I_PAIRING_CHECK, [], annot), Item_t (List_t (Pair_t (Bls12_381_g1_t, Bls12_381_g2_t, _, _), _), rest) ) -> check_var_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo k -> IPairing_check_bls12_381 (kinfo, k))} in let stack = Item_t (bool_t, rest) in typed ctxt loc instr stack (* Tickets *) | (Prim (loc, I_TICKET, [], annot), Item_t (t, Item_t (Nat_t, rest))) -> check_var_annot loc annot >>?= fun () -> check_comparable loc t >>?= fun Eq -> ticket_t loc t >>?= fun res_ty -> let instr = {apply = (fun kinfo k -> ITicket (kinfo, k))} in let stack = Item_t (res_ty, rest) in typed ctxt loc instr stack | ( Prim (loc, I_READ_TICKET, [], annot), (Item_t (Ticket_t (t, _), _) as full_stack) ) -> check_var_annot loc annot >>?= fun () -> let () = check_dupable_comparable_ty t in opened_ticket_type loc t >>?= fun result -> let instr = {apply = (fun kinfo k -> IRead_ticket (kinfo, k))} in let stack = Item_t (result, full_stack) in typed ctxt loc instr stack | ( Prim (loc, I_SPLIT_TICKET, [], annot), Item_t ( (Ticket_t (t, _) as ticket_t), Item_t (Pair_t (Nat_t, Nat_t, _, _), rest) ) ) -> check_var_annot loc annot >>?= fun () -> let () = check_dupable_comparable_ty t in pair_t loc ticket_t ticket_t >>?= fun (Ty_ex_c pair_tickets_ty) -> option_t loc pair_tickets_ty >>?= fun res_ty -> let instr = {apply = (fun kinfo k -> ISplit_ticket (kinfo, k))} in let stack = Item_t (res_ty, rest) in typed ctxt loc instr stack | ( Prim (loc, I_JOIN_TICKETS, [], annot), Item_t ( Pair_t ( (Ticket_t (contents_ty_a, _) as ty_a), Ticket_t (contents_ty_b, _), _, _ ), rest ) ) -> check_var_annot loc annot >>?= fun () -> Gas_monad.run ctxt @@ ty_eq ~error_details:(Informative loc) contents_ty_a contents_ty_b >>?= fun (eq, ctxt) -> eq >>?= fun Eq -> option_t loc ty_a >>?= fun res_ty -> let instr = {apply = (fun kinfo k -> IJoin_tickets (kinfo, contents_ty_a, k))} in let stack = Item_t (res_ty, rest) in typed ctxt loc instr stack (* Timelocks *) | ( Prim (loc, I_OPEN_CHEST, [], _), Item_t (Chest_key_t, Item_t (Chest_t, Item_t (Nat_t, rest))) ) -> let instr = {apply = (fun kinfo k -> IOpen_chest (kinfo, k))} in typed ctxt loc instr (Item_t (union_bytes_bool_t, rest)) (* Primitive parsing errors *) | ( Prim ( loc, (( I_DUP | I_SWAP | I_SOME | I_UNIT | I_PAIR | I_UNPAIR | I_CAR | I_CDR | I_CONS | I_CONCAT | I_SLICE | I_MEM | I_UPDATE | I_GET | I_EXEC | I_FAILWITH | I_SIZE | I_ADD | I_SUB | I_SUB_MUTEZ | I_MUL | I_EDIV | I_OR | I_AND | I_XOR | I_NOT | I_ABS | I_NEG | I_LSL | I_LSR | I_COMPARE | I_EQ | I_NEQ | I_LT | I_GT | I_LE | I_GE | I_TRANSFER_TOKENS | I_SET_DELEGATE | I_NOW | I_MIN_BLOCK_TIME | I_IMPLICIT_ACCOUNT | I_AMOUNT | I_BALANCE | I_LEVEL | I_CHECK_SIGNATURE | I_HASH_KEY | I_SOURCE | I_SENDER | I_BLAKE2B | I_SHA256 | I_SHA512 | I_ADDRESS | I_RENAME | I_PACK | I_ISNAT | I_INT | I_SELF | I_CHAIN_ID | I_NEVER | I_VOTING_POWER | I_TOTAL_VOTING_POWER | I_KECCAK | I_SHA3 | I_PAIRING_CHECK | I_TICKET | I_READ_TICKET | I_SPLIT_TICKET | I_JOIN_TICKETS | I_OPEN_CHEST ) as name), (_ :: _ as l), _ ), _ ) -> fail (Invalid_arity (loc, name, 0, List.length l)) | ( Prim ( loc, (( I_NONE | I_LEFT | I_RIGHT | I_NIL | I_MAP | I_ITER | I_EMPTY_SET | I_LOOP | I_LOOP_LEFT | I_CONTRACT | I_CAST | I_UNPACK | I_CREATE_CONTRACT ) as name), (([] | _ :: _ :: _) as l), _ ), _ ) -> fail (Invalid_arity (loc, name, 1, List.length l)) | ( Prim ( loc, (( I_PUSH | I_VIEW | I_IF_NONE | I_IF_LEFT | I_IF_CONS | I_EMPTY_MAP | I_EMPTY_BIG_MAP | I_IF ) as name), (([] | [_] | _ :: _ :: _ :: _) as l), _ ), _ ) -> fail (Invalid_arity (loc, name, 2, List.length l)) | ( Prim (loc, I_LAMBDA, (([] | [_] | [_; _] | _ :: _ :: _ :: _ :: _) as l), _), _ ) -> fail (Invalid_arity (loc, I_LAMBDA, 3, List.length l)) (* Stack errors *) | ( Prim ( loc, (( I_ADD | I_SUB | I_SUB_MUTEZ | I_MUL | I_EDIV | I_AND | I_OR | I_XOR | I_LSL | I_LSR | I_CONCAT | I_PAIRING_CHECK ) as name), [], _ ), Item_t (ta, Item_t (tb, _)) ) -> let ta = serialize_ty_for_error ta in let tb = serialize_ty_for_error tb in fail (Undefined_binop (loc, name, ta, tb)) | ( Prim ( loc, (( I_NEG | I_ABS | I_NOT | I_SIZE | I_EQ | I_NEQ | I_LT | I_GT | I_LE | I_GE (* CONCAT is both unary and binary; this case can only be triggered on a singleton stack *) | I_CONCAT ) as name), [], _ ), Item_t (t, _) ) -> let t = serialize_ty_for_error t in fail (Undefined_unop (loc, name, t)) | (Prim (loc, ((I_UPDATE | I_SLICE | I_OPEN_CHEST) as name), [], _), stack) -> Lwt.return (let stack = serialize_stack_for_error ctxt stack in error (Bad_stack (loc, name, 3, stack))) | (Prim (loc, I_CREATE_CONTRACT, _, _), stack) -> let stack = serialize_stack_for_error ctxt stack in fail (Bad_stack (loc, I_CREATE_CONTRACT, 7, stack)) | (Prim (loc, I_TRANSFER_TOKENS, [], _), stack) -> Lwt.return (let stack = serialize_stack_for_error ctxt stack in error (Bad_stack (loc, I_TRANSFER_TOKENS, 4, stack))) | ( Prim ( loc, (( I_DROP | I_DUP | I_CAR | I_CDR | I_UNPAIR | I_SOME | I_BLAKE2B | I_SHA256 | I_SHA512 | I_DIP | I_IF_NONE | I_LEFT | I_RIGHT | I_IF_LEFT | I_IF | I_LOOP | I_IF_CONS | I_IMPLICIT_ACCOUNT | I_NEG | I_ABS | I_INT | I_NOT | I_HASH_KEY | I_EQ | I_NEQ | I_LT | I_GT | I_LE | I_GE | I_SIZE | I_FAILWITH | I_RENAME | I_PACK | I_ISNAT | I_ADDRESS | I_SET_DELEGATE | I_CAST | I_MAP | I_ITER | I_LOOP_LEFT | I_UNPACK | I_CONTRACT | I_NEVER | I_KECCAK | I_SHA3 | I_READ_TICKET | I_JOIN_TICKETS ) as name), _, _ ), stack ) -> Lwt.return (let stack = serialize_stack_for_error ctxt stack in error (Bad_stack (loc, name, 1, stack))) | ( Prim ( loc, (( I_SWAP | I_PAIR | I_CONS | I_GET | I_MEM | I_EXEC | I_CHECK_SIGNATURE | I_ADD | I_SUB | I_SUB_MUTEZ | I_MUL | I_EDIV | I_AND | I_OR | I_XOR | I_LSL | I_LSR | I_COMPARE | I_PAIRING_CHECK | I_TICKET | I_SPLIT_TICKET ) as name), _, _ ), stack ) -> Lwt.return (let stack = serialize_stack_for_error ctxt stack in error (Bad_stack (loc, name, 2, stack))) (* Generic parsing errors *) | (expr, _) -> fail @@ unexpected expr [Seq_kind] Instr_namespace [ I_DROP; I_DUP; I_DIG; I_DUG; I_VIEW; I_SWAP; I_SOME; I_UNIT; I_PAIR; I_UNPAIR; I_CAR; I_CDR; I_CONS; I_MEM; I_UPDATE; I_MAP; I_ITER; I_GET; I_GET_AND_UPDATE; I_EXEC; I_FAILWITH; I_SIZE; I_CONCAT; I_ADD; I_SUB; I_SUB_MUTEZ; I_MUL; I_EDIV; I_OR; I_AND; I_XOR; I_NOT; I_ABS; I_INT; I_NEG; I_LSL; I_LSR; I_COMPARE; I_EQ; I_NEQ; I_LT; I_GT; I_LE; I_GE; I_TRANSFER_TOKENS; I_CREATE_CONTRACT; I_NOW; I_MIN_BLOCK_TIME; I_AMOUNT; I_BALANCE; I_LEVEL; I_IMPLICIT_ACCOUNT; I_CHECK_SIGNATURE; I_BLAKE2B; I_SHA256; I_SHA512; I_HASH_KEY; I_PUSH; I_NONE; I_LEFT; I_RIGHT; I_NIL; I_EMPTY_SET; I_DIP; I_LOOP; I_IF_NONE; I_IF_LEFT; I_IF_CONS; I_EMPTY_MAP; I_EMPTY_BIG_MAP; I_IF; I_SOURCE; I_SENDER; I_SELF; I_SELF_ADDRESS; I_LAMBDA; I_NEVER; I_VOTING_POWER; I_TOTAL_VOTING_POWER; I_KECCAK; I_SHA3; I_PAIRING_CHECK; I_SAPLING_EMPTY_STATE; I_SAPLING_VERIFY_UPDATE; I_TICKET; I_READ_TICKET; I_SPLIT_TICKET; I_JOIN_TICKETS; I_OPEN_CHEST; ] and[@coq_axiom_with_reason "complex mutually recursive definition"] parse_contract : type arg argc. stack_depth:int -> context -> Script.location -> (arg, argc) ty -> Destination.t -> entrypoint:Entrypoint.t -> (context * arg typed_contract) tzresult Lwt.t = fun ~stack_depth ctxt loc arg destination ~entrypoint -> match destination with | Contract contract -> ( match Contract.is_implicit contract with | Some _ -> if Entrypoint.is_default entrypoint then (* An implicit account on the "default" entrypoint always exists and has type unit. *) Lwt.return ( Gas_monad.run ctxt @@ ty_eq ~error_details:(Informative loc) arg unit_t >>? fun (eq, ctxt) -> eq >|? fun Eq -> let destination : Destination.t = Contract contract in let address = {destination; entrypoint} in (ctxt, Typed_contract {arg_ty = arg; address}) ) else fail (No_such_entrypoint entrypoint) | None -> ( (* Originated account *) trace (Invalid_contract (loc, contract)) @@ Contract.get_script_code ctxt contract >>=? fun (ctxt, code) -> match code with | None -> fail (Invalid_contract (loc, contract)) | Some code -> Lwt.return ( Script.force_decode_in_context ~consume_deserialization_gas:When_needed ctxt code >>? fun (code, ctxt) -> (* can only fail because of gas *) parse_toplevel ctxt ~legacy:true code >>? fun ({arg_type; _}, ctxt) -> parse_parameter_ty_and_entrypoints ctxt ~stack_depth:(stack_depth + 1) ~legacy:true arg_type >>? fun ( Ex_parameter_ty_and_entrypoints {arg_type = targ; entrypoints}, ctxt ) -> (* we don't check targ size here because it's a legacy contract code *) Gas_monad.run ctxt @@ find_entrypoint_for_type ~error_details:(Informative loc) ~full:targ ~expected:arg entrypoints entrypoint >>? fun (entrypoint_arg, ctxt) -> entrypoint_arg >|? fun (entrypoint, arg_ty) -> let address = {destination; entrypoint} in (ctxt, Typed_contract {arg_ty; address}) ))) | Tx_rollup tx_rollup -> Tx_rollup_state.assert_exist ctxt tx_rollup >>=? fun ctxt -> if Entrypoint.(entrypoint = Tx_rollup.deposit_entrypoint) then (* /!\ This pattern matching needs to remain in sync with [parse_contract] and [parse_tx_rollup_deposit_parameters]. *) match arg with | Pair_t (Ticket_t (_, _), Tx_rollup_l2_address_t, _, _) -> let address = {destination; entrypoint} in return (ctxt, Typed_contract {arg_ty = arg; address}) | _ -> fail @@ Tx_rollup_bad_deposit_parameter (loc, serialize_ty_for_error arg) else fail (No_such_entrypoint entrypoint) and parse_view_name ctxt : Script.node -> (Script_string.t * context) tzresult = function | String (loc, v) as expr -> (* The limitation of length of string is same as entrypoint *) if Compare.Int.(String.length v > 31) then error (View_name_too_long v) else let rec check_char i = if Compare.Int.(i < 0) then ok v else if Script_ir_annot.is_allowed_char v.[i] then check_char (i - 1) else error (Bad_view_name loc) in Gas.consume ctxt (Typecheck_costs.check_printable v) >>? fun ctxt -> record_trace (Invalid_syntactic_constant ( loc, strip_locations expr, "string [a-zA-Z0-9_.%@] and the maximum string length of 31 \ characters" )) ( check_char (String.length v - 1) >>? fun v -> Script_string.of_string v >|? fun s -> (s, ctxt) ) | expr -> error @@ Invalid_kind (location expr, [String_kind], kind expr) and parse_toplevel : context -> legacy:bool -> Script.expr -> (toplevel * context) tzresult = fun ctxt ~legacy toplevel -> record_trace (Ill_typed_contract (toplevel, [])) @@ match root toplevel with | Int (loc, _) -> error (Invalid_kind (loc, [Seq_kind], Int_kind)) | String (loc, _) -> error (Invalid_kind (loc, [Seq_kind], String_kind)) | Bytes (loc, _) -> error (Invalid_kind (loc, [Seq_kind], Bytes_kind)) | Prim (loc, _, _, _) -> error (Invalid_kind (loc, [Seq_kind], Prim_kind)) | Seq (_, fields) -> ( let rec find_fields ctxt p s c views fields = match fields with | [] -> ok (ctxt, (p, s, c, views)) | Int (loc, _) :: _ -> error (Invalid_kind (loc, [Prim_kind], Int_kind)) | String (loc, _) :: _ -> error (Invalid_kind (loc, [Prim_kind], String_kind)) | Bytes (loc, _) :: _ -> error (Invalid_kind (loc, [Prim_kind], Bytes_kind)) | Seq (loc, _) :: _ -> error (Invalid_kind (loc, [Prim_kind], Seq_kind)) | Prim (loc, K_parameter, [arg], annot) :: rest -> ( match p with | None -> find_fields ctxt (Some (arg, loc, annot)) s c views rest | Some _ -> error (Duplicate_field (loc, K_parameter))) | Prim (loc, K_storage, [arg], annot) :: rest -> ( match s with | None -> find_fields ctxt p (Some (arg, loc, annot)) c views rest | Some _ -> error (Duplicate_field (loc, K_storage))) | Prim (loc, K_code, [arg], annot) :: rest -> ( match c with | None -> find_fields ctxt p s (Some (arg, loc, annot)) views rest | Some _ -> error (Duplicate_field (loc, K_code))) | Prim (loc, ((K_parameter | K_storage | K_code) as name), args, _) :: _ -> error (Invalid_arity (loc, name, 1, List.length args)) | Prim (loc, K_view, [name; input_ty; output_ty; view_code], _) :: rest -> parse_view_name ctxt name >>? fun (str, ctxt) -> Gas.consume ctxt (Michelson_v1_gas.Cost_of.Interpreter.view_update str views) >>? fun ctxt -> if Script_map.mem str views then error (Duplicated_view_name loc) else let views' = Script_map.update str (Some {input_ty; output_ty; view_code}) views in find_fields ctxt p s c views' rest | Prim (loc, K_view, args, _) :: _ -> error (Invalid_arity (loc, K_view, 4, List.length args)) | Prim (loc, name, _, _) :: _ -> let allowed = [K_parameter; K_storage; K_code; K_view] in error (Invalid_primitive (loc, allowed, name)) in find_fields ctxt None None None (Script_map.empty string_t) fields >>? fun (ctxt, toplevel) -> match toplevel with | (None, _, _, _) -> error (Missing_field K_parameter) | (Some _, None, _, _) -> error (Missing_field K_storage) | (Some _, Some _, None, _) -> error (Missing_field K_code) | ( Some (p, ploc, pannot), Some (s, sloc, sannot), Some (c, cloc, cannot), views ) -> let p_pannot = (* root name can be attached to either the parameter primitive or the toplevel constructor (legacy only). In the latter case we move it to the parameter type. *) Script_ir_annot.has_field_annot p >>? function | true -> ok (p, pannot) | false -> ( match pannot with | [single] when legacy -> ( is_field_annot ploc single >|? fun is_field_annot -> match (is_field_annot, p) with | (true, Prim (loc, prim, args, annots)) -> (Prim (loc, prim, args, single :: annots), []) | _ -> (p, [])) | _ -> ok (p, pannot)) in (* only one field annot is allowed to set the root entrypoint name *) p_pannot >>? fun (arg_type, pannot) -> Script_ir_annot.error_unexpected_annot ploc pannot >>? fun () -> Script_ir_annot.error_unexpected_annot cloc cannot >>? fun () -> Script_ir_annot.error_unexpected_annot sloc sannot >|? fun () -> ({code_field = c; arg_type; views; storage_type = s}, ctxt)) (* Same as [parse_contract], but does not fail when the contact is missing or if the expected type doesn't match the actual one. In that case None is returned and some overapproximation of the typechecking gas is consumed. This can still fail on gas exhaustion. *) let parse_contract_for_script : type arg argc. context -> Script.location -> (arg, argc) ty -> Destination.t -> entrypoint:Entrypoint.t -> (context * arg typed_contract option) tzresult Lwt.t = fun ctxt loc arg contract ~entrypoint -> match contract with | Contract contract -> ( match Contract.is_implicit contract with | Some _ -> if Entrypoint.is_default entrypoint then (* An implicit account on the "default" entrypoint always exists and has type unit. *) Lwt.return ( Gas_monad.run ctxt @@ ty_eq ~error_details:Fast arg unit_t >|? fun (eq, ctxt) -> match eq with | Ok Eq -> let destination : Destination.t = Contract contract in let address = {destination; entrypoint} in let contract = Typed_contract {arg_ty = arg; address} in (ctxt, Some contract) | Error Inconsistent_types_fast -> (ctxt, None) ) else Lwt.return ( Gas.consume ctxt Typecheck_costs.parse_instr_cycle >|? fun ctxt -> (* An implicit account on any other entrypoint is not a valid contract. *) (ctxt, None) ) | None -> ( (* Originated account *) trace (Invalid_contract (loc, contract)) @@ Contract.get_script_code ctxt contract >>=? fun (ctxt, code) -> match code with | None -> return (ctxt, None) | Some code -> Lwt.return ( Script.force_decode_in_context ~consume_deserialization_gas:When_needed ctxt code >>? fun (code, ctxt) -> (* can only fail because of gas *) match parse_toplevel ctxt ~legacy:true code with | Error _ -> error (Invalid_contract (loc, contract)) | Ok ({arg_type; _}, ctxt) -> ( match parse_parameter_ty_and_entrypoints ctxt ~stack_depth:0 ~legacy:true arg_type with | Error _ -> error (Invalid_contract (loc, contract)) | Ok ( Ex_parameter_ty_and_entrypoints {arg_type = targ; entrypoints}, ctxt ) -> ( (* we don't check targ size here because it's a legacy contract code *) Gas_monad.run ctxt @@ find_entrypoint_for_type ~error_details:Fast ~full:targ ~expected:arg entrypoints entrypoint >|? fun (entrypoint_arg, ctxt) -> match entrypoint_arg with | Ok (entrypoint, arg_ty) -> let destination = Destination.Contract contract in let address = {destination; entrypoint} in let contract = Typed_contract {arg_ty; address} in (ctxt, Some contract) | Error Inconsistent_types_fast -> (ctxt, None))) ))) | Tx_rollup tx_rollup -> ( (* /!\ This pattern matching needs to remain in sync with [parse_contract_for_script] and [parse_tx_rollup_deposit_parameters]. *) match arg with | Pair_t (Ticket_t (_, _), Tx_rollup_l2_address_t, _, _) when Entrypoint.( entrypoint = Alpha_context.Tx_rollup.deposit_entrypoint) -> ( Tx_rollup_state.find ctxt tx_rollup >|=? function | (ctxt, Some _) -> let address = {destination = contract; entrypoint} in (ctxt, Some (Typed_contract {arg_ty = arg; address})) | (ctxt, None) -> (ctxt, None)) | _ -> return (ctxt, None)) let view_size view = let open Script_typed_ir_size in node_size view.view_code ++ node_size view.input_ty ++ node_size view.output_ty let code_size ctxt code views = let open Script_typed_ir_size in let views_size = Script_map.fold (fun _ v s -> view_size v ++ s) views zero in (* The size of the storage_type and the arg_type is counted by [lambda_size]. *) let ir_size = lambda_size code in let (nodes, code_size) = views_size ++ ir_size in (* We consume gas after the fact in order to not have to instrument [node_size] (for efficiency). This is safe, as we already pay gas proportional to [views_size] and [ir_size] during their typechecking. *) Gas.consume ctxt (Script_typed_ir_size_costs.nodes_cost ~nodes) >|? fun ctxt -> (code_size, ctxt) let parse_code : ?type_logger:type_logger -> context -> legacy:bool -> code:lazy_expr -> (ex_code * context) tzresult Lwt.t = fun ?type_logger ctxt ~legacy ~code -> Script.force_decode_in_context ~consume_deserialization_gas:When_needed ctxt code >>?= fun (code, ctxt) -> Global_constants_storage.expand ctxt code >>=? fun (ctxt, code) -> parse_toplevel ctxt ~legacy code >>?= fun ({arg_type; storage_type; code_field; views}, ctxt) -> let arg_type_loc = location arg_type in record_trace (Ill_formed_type (Some "parameter", code, arg_type_loc)) (parse_parameter_ty_and_entrypoints ctxt ~stack_depth:0 ~legacy arg_type) >>?= fun (Ex_parameter_ty_and_entrypoints {arg_type; entrypoints}, ctxt) -> let storage_type_loc = location storage_type in record_trace (Ill_formed_type (Some "storage", code, storage_type_loc)) (parse_storage_ty ctxt ~stack_depth:0 ~legacy storage_type) >>?= fun (Ex_ty storage_type, ctxt) -> pair_t storage_type_loc arg_type storage_type >>?= fun (Ty_ex_c arg_type_full) -> pair_t storage_type_loc list_operation_t storage_type >>?= fun (Ty_ex_c ret_type_full) -> trace (Ill_typed_contract (code, [])) (parse_returning Tc_context.(toplevel ~storage_type ~param_type:arg_type ~entrypoints) ctxt ~legacy ~stack_depth:0 ?type_logger arg_type_full ret_type_full code_field) >>=? fun (code, ctxt) -> Lwt.return ( code_size ctxt code views >>? fun (code_size, ctxt) -> ok ( Ex_code (Code {code; arg_type; storage_type; views; entrypoints; code_size}), ctxt ) ) let parse_storage : ?type_logger:type_logger -> context -> legacy:bool -> allow_forged:bool -> ('storage, _) ty -> storage:lazy_expr -> ('storage * context) tzresult Lwt.t = fun ?type_logger ctxt ~legacy ~allow_forged storage_type ~storage -> Script.force_decode_in_context ~consume_deserialization_gas:When_needed ctxt storage >>?= fun (storage, ctxt) -> trace_eval (fun () -> let storage_type = serialize_ty_for_error storage_type in Ill_typed_data (None, storage, storage_type)) (parse_data ?type_logger ~stack_depth:0 ctxt ~legacy ~allow_forged storage_type (root storage)) let[@coq_axiom_with_reason "gadt"] parse_script : ?type_logger:type_logger -> context -> legacy:bool -> allow_forged_in_storage:bool -> Script.t -> (ex_script * context) tzresult Lwt.t = fun ?type_logger ctxt ~legacy ~allow_forged_in_storage {code; storage} -> parse_code ~legacy ctxt ?type_logger ~code >>=? fun ( Ex_code (Code {code; arg_type; storage_type; views; entrypoints; code_size}), ctxt ) -> parse_storage ?type_logger ctxt ~legacy ~allow_forged:allow_forged_in_storage storage_type ~storage >|=? fun (storage, ctxt) -> ( Ex_script (Script {code_size; code; arg_type; storage; storage_type; views; entrypoints}), ctxt ) type typechecked_code_internal = | Typechecked_code_internal : { toplevel : toplevel; arg_type : ('arg, _) ty; storage_type : ('storage, _) ty; entrypoints : 'arg entrypoints; typed_views : 'storage typed_view_map; type_map : type_map; } -> typechecked_code_internal let typecheck_code : legacy:bool -> show_types:bool -> context -> Script.expr -> (typechecked_code_internal * context) tzresult Lwt.t = fun ~legacy ~show_types ctxt code -> (* Constants need to be expanded or [parse_toplevel] may fail. *) Global_constants_storage.expand ctxt code >>=? fun (ctxt, code) -> parse_toplevel ctxt ~legacy code >>?= fun (toplevel, ctxt) -> let {arg_type; storage_type; code_field; views} = toplevel in let type_map = ref [] in let arg_type_loc = location arg_type in record_trace (Ill_formed_type (Some "parameter", code, arg_type_loc)) (parse_parameter_ty_and_entrypoints ctxt ~stack_depth:0 ~legacy arg_type) >>?= fun (Ex_parameter_ty_and_entrypoints {arg_type; entrypoints}, ctxt) -> let storage_type_loc = location storage_type in record_trace (Ill_formed_type (Some "storage", code, storage_type_loc)) (parse_storage_ty ctxt ~stack_depth:0 ~legacy storage_type) >>?= fun (ex_storage_type, ctxt) -> let (Ex_ty storage_type) = ex_storage_type in pair_t storage_type_loc arg_type storage_type >>?= fun (Ty_ex_c arg_type_full) -> pair_t storage_type_loc list_operation_t storage_type >>?= fun (Ty_ex_c ret_type_full) -> let type_logger loc ~stack_ty_before ~stack_ty_after = type_map := (loc, (stack_ty_before, stack_ty_after)) :: !type_map in let type_logger = if show_types then Some type_logger else None in let result = parse_returning (Tc_context.toplevel ~storage_type ~param_type:arg_type ~entrypoints) ctxt ~legacy ~stack_depth:0 ?type_logger arg_type_full ret_type_full code_field in trace (Ill_typed_contract (code, !type_map)) result >>=? fun (Lam _, ctxt) -> let views_result = parse_views ctxt ?type_logger ~legacy storage_type views in trace (Ill_typed_contract (code, !type_map)) views_result >|=? fun (typed_views, ctxt) -> ( Typechecked_code_internal { toplevel; arg_type; storage_type; entrypoints; typed_views; type_map = !type_map; }, ctxt ) (* Uncarbonated because used only in RPCs *) let list_entrypoints_uncarbonated (type full fullc) (full : (full, fullc) ty) (entrypoints : full entrypoints) = let merge path (type t tc) (ty : (t, tc) ty) (entrypoints : t entrypoints_node) reachable ((unreachables, all) as acc) = match entrypoints.at_node with | None -> ( (if reachable then acc else match ty with | Union_t _ -> acc | _ -> (List.rev path :: unreachables, all)), reachable ) | Some {name; original_type_expr} -> ( (if Entrypoint.Map.mem name all then (List.rev path :: unreachables, all) else ( unreachables, Entrypoint.Map.add name (Ex_ty ty, original_type_expr) all )), true ) in let rec fold_tree : type t tc. (t, tc) ty -> t entrypoints_node -> prim list -> bool -> prim list list * (ex_ty * Script.node) Entrypoint.Map.t -> prim list list * (ex_ty * Script.node) Entrypoint.Map.t = fun t entrypoints path reachable acc -> match (t, entrypoints) with | (Union_t (tl, tr, _, _), {nested = Entrypoints_Union {left; right}; _}) -> let (acc, l_reachable) = merge (D_Left :: path) tl left reachable acc in let (acc, r_reachable) = merge (D_Right :: path) tr right reachable acc in let acc = fold_tree tl left (D_Left :: path) l_reachable acc in fold_tree tr right (D_Right :: path) r_reachable acc | _ -> acc in let (init, reachable) = match entrypoints.root.at_node with | None -> (Entrypoint.Map.empty, false) | Some {name; original_type_expr} -> (Entrypoint.Map.singleton name (Ex_ty full, original_type_expr), true) in fold_tree full entrypoints.root [] reachable ([], init) [@@coq_axiom_with_reason "unsupported syntax"] (* ---- Unparsing (Typed IR -> Untyped expressions) --------------------------*) (* -- Unparsing data of any type -- *) let[@coq_axiom_with_reason "gadt"] rec unparse_data : type a ac. context -> stack_depth:int -> unparsing_mode -> (a, ac) ty -> a -> (Script.node * context) tzresult Lwt.t = fun ctxt ~stack_depth mode ty a -> Gas.consume ctxt Unparse_costs.unparse_data_cycle >>?= fun ctxt -> let non_terminal_recursion ctxt mode ty a = if Compare.Int.(stack_depth > 10_000) then fail Unparsing_too_many_recursive_calls else unparse_data ctxt ~stack_depth:(stack_depth + 1) mode ty a in let loc = Micheline.dummy_location in match (ty, a) with | (Unit_t, v) -> Lwt.return @@ unparse_unit ~loc ctxt v | (Int_t, v) -> Lwt.return @@ unparse_int ~loc ctxt v | (Nat_t, v) -> Lwt.return @@ unparse_nat ~loc ctxt v | (String_t, s) -> Lwt.return @@ unparse_string ~loc ctxt s | (Bytes_t, s) -> Lwt.return @@ unparse_bytes ~loc ctxt s | (Bool_t, b) -> Lwt.return @@ unparse_bool ~loc ctxt b | (Timestamp_t, t) -> Lwt.return @@ unparse_timestamp ~loc ctxt mode t | (Address_t, address) -> Lwt.return @@ unparse_address ~loc ctxt mode address | (Tx_rollup_l2_address_t, address) -> Lwt.return @@ unparse_tx_rollup_l2_address ~loc ctxt mode address | (Contract_t _, contract) -> Lwt.return @@ unparse_contract ~loc ctxt mode contract | (Signature_t, s) -> Lwt.return @@ unparse_signature ~loc ctxt mode s | (Mutez_t, v) -> Lwt.return @@ unparse_mutez ~loc ctxt v | (Key_t, k) -> Lwt.return @@ unparse_key ~loc ctxt mode k | (Key_hash_t, k) -> Lwt.return @@ unparse_key_hash ~loc ctxt mode k | (Operation_t, operation) -> Lwt.return @@ unparse_operation ~loc ctxt operation | (Chain_id_t, chain_id) -> Lwt.return @@ unparse_chain_id ~loc ctxt mode chain_id | (Bls12_381_g1_t, x) -> Lwt.return @@ unparse_bls12_381_g1 ~loc ctxt x | (Bls12_381_g2_t, x) -> Lwt.return @@ unparse_bls12_381_g2 ~loc ctxt x | (Bls12_381_fr_t, x) -> Lwt.return @@ unparse_bls12_381_fr ~loc ctxt x | (Pair_t (tl, tr, _, _), pair) -> let r_witness = comb_witness2 tr in let unparse_l ctxt v = non_terminal_recursion ctxt mode tl v in let unparse_r ctxt v = non_terminal_recursion ctxt mode tr v in unparse_pair ~loc unparse_l unparse_r ctxt mode r_witness pair | (Union_t (tl, tr, _, _), v) -> let unparse_l ctxt v = non_terminal_recursion ctxt mode tl v in let unparse_r ctxt v = non_terminal_recursion ctxt mode tr v in unparse_union ~loc unparse_l unparse_r ctxt v | (Option_t (t, _, _), v) -> let unparse_v ctxt v = non_terminal_recursion ctxt mode t v in unparse_option ~loc unparse_v ctxt v | (List_t (t, _), items) -> List.fold_left_es (fun (l, ctxt) element -> non_terminal_recursion ctxt mode t element >|=? fun (unparsed, ctxt) -> (unparsed :: l, ctxt)) ([], ctxt) items.elements >|=? fun (items, ctxt) -> (Micheline.Seq (loc, List.rev items), ctxt) | (Ticket_t (t, _), {ticketer; contents; amount}) -> (* ideally we would like to allow a little overhead here because it is only used for unparsing *) opened_ticket_type loc t >>?= fun t -> let destination : Destination.t = Contract ticketer in let addr = {destination; entrypoint = Entrypoint.default} in (unparse_data [@tailcall]) ctxt ~stack_depth mode t (addr, (contents, amount)) | (Set_t (t, _), set) -> List.fold_left_es (fun (l, ctxt) item -> unparse_comparable_data ~loc ctxt mode t item >|=? fun (item, ctxt) -> (item :: l, ctxt)) ([], ctxt) (Script_set.fold (fun e acc -> e :: acc) set []) >|=? fun (items, ctxt) -> (Micheline.Seq (loc, items), ctxt) | (Map_t (kt, vt, _), map) -> let items = Script_map.fold (fun k v acc -> (k, v) :: acc) map [] in unparse_items ctxt ~stack_depth:(stack_depth + 1) mode kt vt items >|=? fun (items, ctxt) -> (Micheline.Seq (loc, items), ctxt) | (Big_map_t (_kt, _vt, _), Big_map {id = Some id; diff = {size; _}; _}) when Compare.Int.( = ) size 0 -> return (Micheline.Int (loc, Big_map.Id.unparse_to_z id), ctxt) | (Big_map_t (kt, vt, _), Big_map {id = Some id; diff = {map; _}; _}) -> let items = Big_map_overlay.fold (fun _ (k, v) acc -> (k, v) :: acc) map [] in let items = (* Sort the items in Michelson comparison order and not in key hash order. This code path is only exercised for tracing, so we don't bother carbonating this sort operation precisely. Also, the sort uses a reverse compare because [unparse_items] will reverse the result. *) List.sort (fun (a, _) (b, _) -> Script_comparable.compare_comparable kt b a) items in (* this can't fail if the original type is well-formed because [option vt] is always strictly smaller than [big_map kt vt] *) option_t loc vt >>?= fun vt -> unparse_items ctxt ~stack_depth:(stack_depth + 1) mode kt vt items >|=? fun (items, ctxt) -> ( Micheline.Prim ( loc, D_Pair, [Int (loc, Big_map.Id.unparse_to_z id); Seq (loc, items)], [] ), ctxt ) | (Big_map_t (kt, vt, _), Big_map {id = None; diff = {map; _}; _}) -> let items = Big_map_overlay.fold (fun _ (k, v) acc -> match v with None -> acc | Some v -> (k, v) :: acc) map [] in let items = (* See note above. *) List.sort (fun (a, _) (b, _) -> Script_comparable.compare_comparable kt b a) items in unparse_items ctxt ~stack_depth:(stack_depth + 1) mode kt vt items >|=? fun (items, ctxt) -> (Micheline.Seq (loc, items), ctxt) | (Lambda_t _, Lam (_, original_code)) -> unparse_code ctxt ~stack_depth:(stack_depth + 1) mode original_code | (Never_t, _) -> . | (Sapling_transaction_t _, s) -> Lwt.return ( Gas.consume ctxt (Unparse_costs.sapling_transaction s) >|? fun ctxt -> let bytes = Data_encoding.Binary.to_bytes_exn Sapling.transaction_encoding s in (Bytes (loc, bytes), ctxt) ) | (Sapling_transaction_deprecated_t _, s) -> Lwt.return ( Gas.consume ctxt (Unparse_costs.sapling_transaction_deprecated s) >|? fun ctxt -> let bytes = Data_encoding.Binary.to_bytes_exn Sapling.Legacy.transaction_encoding s in (Bytes (loc, bytes), ctxt) ) | (Sapling_state_t _, {id; diff; _}) -> Lwt.return ( Gas.consume ctxt (Unparse_costs.sapling_diff diff) >|? fun ctxt -> ( (match diff with | {commitments_and_ciphertexts = []; nullifiers = []} -> ( match id with | None -> Micheline.Seq (loc, []) | Some id -> let id = Sapling.Id.unparse_to_z id in Micheline.Int (loc, id)) | diff -> ( let diff_bytes = Data_encoding.Binary.to_bytes_exn Sapling.diff_encoding diff in let unparsed_diff = Bytes (loc, diff_bytes) in match id with | None -> unparsed_diff | Some id -> let id = Sapling.Id.unparse_to_z id in Micheline.Prim (loc, D_Pair, [Int (loc, id); unparsed_diff], []))), ctxt ) ) | (Chest_key_t, s) -> unparse_with_data_encoding ~loc ctxt s Unparse_costs.chest_key Script_timelock.chest_key_encoding | (Chest_t, s) -> unparse_with_data_encoding ~loc ctxt s (Unparse_costs.chest ~plaintext_size:(Script_timelock.get_plaintext_size s)) Script_timelock.chest_encoding and unparse_items : type k v vc. context -> stack_depth:int -> unparsing_mode -> k comparable_ty -> (v, vc) ty -> (k * v) list -> (Script.node list * context) tzresult Lwt.t = fun ctxt ~stack_depth mode kt vt items -> List.fold_left_es (fun (l, ctxt) (k, v) -> let loc = Micheline.dummy_location in unparse_comparable_data ~loc ctxt mode kt k >>=? fun (key, ctxt) -> unparse_data ctxt ~stack_depth:(stack_depth + 1) mode vt v >|=? fun (value, ctxt) -> (Prim (loc, D_Elt, [key; value], []) :: l, ctxt)) ([], ctxt) items and[@coq_axiom_with_reason "gadt"] unparse_code ctxt ~stack_depth mode code = let legacy = true in Gas.consume ctxt Unparse_costs.unparse_instr_cycle >>?= fun ctxt -> let non_terminal_recursion ctxt mode code = if Compare.Int.(stack_depth > 10_000) then fail Unparsing_too_many_recursive_calls else unparse_code ctxt ~stack_depth:(stack_depth + 1) mode code in match code with | Prim (loc, I_PUSH, [ty; data], annot) -> parse_packable_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ty >>?= fun (Ex_ty t, ctxt) -> let allow_forged = false (* Forgeable in PUSH data are already forbidden at parsing, the only case for which this matters is storing a lambda resulting from APPLYing a non-forgeable but this cannot happen either as long as all packable values are also forgeable. *) in parse_data ctxt ~stack_depth:(stack_depth + 1) ~legacy ~allow_forged t data >>=? fun (data, ctxt) -> unparse_data ctxt ~stack_depth:(stack_depth + 1) mode t data >>=? fun (data, ctxt) -> return (Prim (loc, I_PUSH, [ty; data], annot), ctxt) | Seq (loc, items) -> List.fold_left_es (fun (l, ctxt) item -> non_terminal_recursion ctxt mode item >|=? fun (item, ctxt) -> (item :: l, ctxt)) ([], ctxt) items >>=? fun (items, ctxt) -> return (Micheline.Seq (loc, List.rev items), ctxt) | Prim (loc, prim, items, annot) -> List.fold_left_es (fun (l, ctxt) item -> non_terminal_recursion ctxt mode item >|=? fun (item, ctxt) -> (item :: l, ctxt)) ([], ctxt) items >>=? fun (items, ctxt) -> return (Prim (loc, prim, List.rev items, annot), ctxt) | (Int _ | String _ | Bytes _) as atom -> return (atom, ctxt) let parse_and_unparse_script_unaccounted ctxt ~legacy ~allow_forged_in_storage mode ~normalize_types {code; storage} = Script.force_decode_in_context ~consume_deserialization_gas:When_needed ctxt code >>?= fun (code, ctxt) -> typecheck_code ~legacy ~show_types:false ctxt code >>=? fun ( Typechecked_code_internal { toplevel = { code_field; arg_type = original_arg_type_expr; storage_type = original_storage_type_expr; views; }; arg_type; storage_type; entrypoints; typed_views; type_map = _; }, ctxt ) -> parse_storage ctxt ~legacy ~allow_forged:allow_forged_in_storage storage_type ~storage >>=? fun (storage, ctxt) -> unparse_code ctxt ~stack_depth:0 mode code_field >>=? fun (code, ctxt) -> unparse_data ctxt ~stack_depth:0 mode storage_type storage >>=? fun (storage, ctxt) -> let loc = Micheline.dummy_location in (if normalize_types then unparse_parameter_ty ~loc ctxt arg_type ~entrypoints >>?= fun (arg_type, ctxt) -> unparse_ty ~loc ctxt storage_type >>?= fun (storage_type, ctxt) -> Script_map.map_es_in_context (fun ctxt _name (Typed_view {input_ty; output_ty; kinstr = _; original_code_expr}) -> Lwt.return ( unparse_ty ~loc ctxt input_ty >>? fun (input_ty, ctxt) -> unparse_ty ~loc ctxt output_ty >|? fun (output_ty, ctxt) -> ({input_ty; output_ty; view_code = original_code_expr}, ctxt) )) ctxt typed_views >|=? fun (views, ctxt) -> (arg_type, storage_type, views, ctxt) else return (original_arg_type_expr, original_storage_type_expr, views, ctxt)) >>=? fun (arg_type, storage_type, views, ctxt) -> Script_map.map_es_in_context (fun ctxt _name {input_ty; output_ty; view_code} -> unparse_code ctxt ~stack_depth:0 mode view_code >|=? fun (view_code, ctxt) -> ({input_ty; output_ty; view_code}, ctxt)) ctxt views >>=? fun (views, ctxt) -> let open Micheline in let unparse_view_unaccounted name {input_ty; output_ty; view_code} views = Prim ( loc, K_view, [ String (loc, Script_string.to_string name); input_ty; output_ty; view_code; ], [] ) :: views in let views = Script_map.fold unparse_view_unaccounted views [] |> List.rev in let code = Seq ( loc, [ Prim (loc, K_parameter, [arg_type], []); Prim (loc, K_storage, [storage_type], []); Prim (loc, K_code, [code], []); ] @ views ) in return ( { code = lazy_expr (strip_locations code); storage = lazy_expr (strip_locations storage); }, ctxt ) let pack_data_with_mode ctxt ty data ~mode = unparse_data ~stack_depth:0 ctxt mode ty data >>=? fun (unparsed, ctxt) -> Lwt.return @@ pack_node unparsed ctxt let hash_data ctxt ty data = pack_data_with_mode ctxt ty data ~mode:Optimized_legacy >>=? fun (bytes, ctxt) -> Lwt.return @@ hash_bytes ctxt bytes let pack_data ctxt ty data = pack_data_with_mode ctxt ty data ~mode:Optimized_legacy (* ---------------- Big map -------------------------------------------------*) let empty_big_map key_type value_type = Big_map { id = None; diff = {map = Big_map_overlay.empty; size = 0}; key_type; value_type; } let big_map_mem ctxt key (Big_map {id; diff; key_type; _}) = hash_comparable_data ctxt key_type key >>=? fun (key, ctxt) -> match (Big_map_overlay.find key diff.map, id) with | (None, None) -> return (false, ctxt) | (None, Some id) -> Alpha_context.Big_map.mem ctxt id key >|=? fun (ctxt, res) -> (res, ctxt) | (Some (_, None), _) -> return (false, ctxt) | (Some (_, Some _), _) -> return (true, ctxt) let big_map_get_by_hash ctxt key (Big_map {id; diff; value_type; _}) = match (Big_map_overlay.find key diff.map, id) with | (Some (_, x), _) -> return (x, ctxt) | (None, None) -> return (None, ctxt) | (None, Some id) -> ( Alpha_context.Big_map.get_opt ctxt id key >>=? function | (ctxt, None) -> return (None, ctxt) | (ctxt, Some value) -> parse_data ~stack_depth:0 ctxt ~legacy:true ~allow_forged:true value_type (Micheline.root value) >|=? fun (x, ctxt) -> (Some x, ctxt)) let big_map_get ctxt key (Big_map {key_type; _} as map) = hash_comparable_data ctxt key_type key >>=? fun (key_hash, ctxt) -> big_map_get_by_hash ctxt key_hash map let big_map_update_by_hash ctxt key_hash key value (Big_map map) = let contains = Big_map_overlay.mem key_hash map.diff.map in return ( Big_map { map with diff = { map = Big_map_overlay.add key_hash (key, value) map.diff.map; size = (if contains then map.diff.size else map.diff.size + 1); }; }, ctxt ) let big_map_update ctxt key value (Big_map {key_type; _} as map) = hash_comparable_data ctxt key_type key >>=? fun (key_hash, ctxt) -> big_map_update_by_hash ctxt key_hash key value map let big_map_get_and_update ctxt key value (Big_map {key_type; _} as map) = hash_comparable_data ctxt key_type key >>=? fun (key_hash, ctxt) -> big_map_update_by_hash ctxt key_hash key value map >>=? fun (map', ctxt) -> big_map_get_by_hash ctxt key_hash map >>=? fun (old_value, ctxt) -> return ((old_value, map'), ctxt) (* ---------------- Lazy storage---------------------------------------------*) type lazy_storage_ids = Lazy_storage.IdSet.t let no_lazy_storage_id = Lazy_storage.IdSet.empty let diff_of_big_map ctxt mode ~temporary ~ids_to_copy (Big_map {id; key_type; value_type; diff}) = (match id with | Some id -> if Lazy_storage.IdSet.mem Big_map id ids_to_copy then Big_map.fresh ~temporary ctxt >|=? fun (ctxt, duplicate) -> (ctxt, Lazy_storage.Copy {src = id}, duplicate) else (* The first occurrence encountered of a big_map reuses the ID. This way, the payer is only charged for the diff. For this to work, this diff has to be put at the end of the global diff, otherwise the duplicates will use the updated version as a base. This is true because we add this diff first in the accumulator of `extract_lazy_storage_updates`, and this accumulator is not reversed. *) return (ctxt, Lazy_storage.Existing, id) | None -> Big_map.fresh ~temporary ctxt >>=? fun (ctxt, id) -> Lwt.return (let kt = unparse_comparable_ty_uncarbonated ~loc:() key_type in Gas.consume ctxt (Script.strip_locations_cost kt) >>? fun ctxt -> unparse_ty ~loc:() ctxt value_type >>? fun (kv, ctxt) -> Gas.consume ctxt (Script.strip_locations_cost kv) >|? fun ctxt -> let key_type = Micheline.strip_locations kt in let value_type = Micheline.strip_locations kv in (ctxt, Lazy_storage.(Alloc Big_map.{key_type; value_type}), id))) >>=? fun (ctxt, init, id) -> let pairs = Big_map_overlay.fold (fun key_hash (key, value) acc -> (key_hash, key, value) :: acc) diff.map [] in List.fold_left_es (fun (acc, ctxt) (key_hash, key, value) -> Gas.consume ctxt Typecheck_costs.parse_instr_cycle >>?= fun ctxt -> unparse_comparable_data ~loc:() ctxt mode key_type key >>=? fun (key_node, ctxt) -> Gas.consume ctxt (Script.strip_locations_cost key_node) >>?= fun ctxt -> let key = Micheline.strip_locations key_node in (match value with | None -> return (None, ctxt) | Some x -> unparse_data ~stack_depth:0 ctxt mode value_type x >>=? fun (node, ctxt) -> Lwt.return ( Gas.consume ctxt (Script.strip_locations_cost node) >|? fun ctxt -> (Some (Micheline.strip_locations node), ctxt) )) >|=? fun (value, ctxt) -> let diff_item = Big_map.{key; key_hash; value} in (diff_item :: acc, ctxt)) ([], ctxt) (List.rev pairs) >|=? fun (updates, ctxt) -> (Lazy_storage.Update {init; updates}, id, ctxt) let diff_of_sapling_state ctxt ~temporary ~ids_to_copy ({id; diff; memo_size} : Sapling.state) = (match id with | Some id -> if Lazy_storage.IdSet.mem Sapling_state id ids_to_copy then Sapling.fresh ~temporary ctxt >|=? fun (ctxt, duplicate) -> (ctxt, Lazy_storage.Copy {src = id}, duplicate) else return (ctxt, Lazy_storage.Existing, id) | None -> Sapling.fresh ~temporary ctxt >|=? fun (ctxt, id) -> (ctxt, Lazy_storage.Alloc Sapling.{memo_size}, id)) >|=? fun (ctxt, init, id) -> (Lazy_storage.Update {init; updates = diff}, id, ctxt) (** Witness flag for whether a type can be populated by a value containing a lazy storage. [False_f] must be used only when a value of the type cannot contain a lazy storage. This flag is built in [has_lazy_storage] and used only in [extract_lazy_storage_updates] and [collect_lazy_storage]. This flag is necessary to avoid these two functions to have a quadratic complexity in the size of the type. Add new lazy storage kinds here. Please keep the usage of this GADT local. *) type 'ty has_lazy_storage = | Big_map_f : ('a, 'b) big_map has_lazy_storage | Sapling_state_f : Sapling.state has_lazy_storage | False_f : _ has_lazy_storage | Pair_f : 'a has_lazy_storage * 'b has_lazy_storage -> ('a, 'b) pair has_lazy_storage | Union_f : 'a has_lazy_storage * 'b has_lazy_storage -> ('a, 'b) union has_lazy_storage | Option_f : 'a has_lazy_storage -> 'a option has_lazy_storage | List_f : 'a has_lazy_storage -> 'a boxed_list has_lazy_storage | Map_f : 'v has_lazy_storage -> (_, 'v) map has_lazy_storage (** This function is called only on storage and parameter types of contracts, once per typechecked contract. It has a complexity linear in the size of the types, which happen to be literally written types, so the gas for them has already been paid. *) let rec has_lazy_storage : type t tc. (t, tc) ty -> t has_lazy_storage = fun ty -> let aux1 cons t = match has_lazy_storage t with False_f -> False_f | h -> cons h in let aux2 cons t1 t2 = match (has_lazy_storage t1, has_lazy_storage t2) with | (False_f, False_f) -> False_f | (h1, h2) -> cons h1 h2 in match ty with | Big_map_t (_, _, _) -> Big_map_f | Sapling_state_t _ -> Sapling_state_f | Unit_t -> False_f | Int_t -> False_f | Nat_t -> False_f | Signature_t -> False_f | String_t -> False_f | Bytes_t -> False_f | Mutez_t -> False_f | Key_hash_t -> False_f | Key_t -> False_f | Timestamp_t -> False_f | Address_t -> False_f | Tx_rollup_l2_address_t -> False_f | Bool_t -> False_f | Lambda_t (_, _, _) -> False_f | Set_t (_, _) -> False_f | Contract_t (_, _) -> False_f | Operation_t -> False_f | Chain_id_t -> False_f | Never_t -> False_f | Bls12_381_g1_t -> False_f | Bls12_381_g2_t -> False_f | Bls12_381_fr_t -> False_f | Sapling_transaction_t _ -> False_f | Sapling_transaction_deprecated_t _ -> False_f | Ticket_t _ -> False_f | Chest_key_t -> False_f | Chest_t -> False_f | Pair_t (l, r, _, _) -> aux2 (fun l r -> Pair_f (l, r)) l r | Union_t (l, r, _, _) -> aux2 (fun l r -> Union_f (l, r)) l r | Option_t (t, _, _) -> aux1 (fun h -> Option_f h) t | List_t (t, _) -> aux1 (fun h -> List_f h) t | Map_t (_, t, _) -> aux1 (fun h -> Map_f h) t (** Transforms a value potentially containing lazy storage in an intermediary state to a value containing lazy storage only represented by identifiers. Returns the updated value, the updated set of ids to copy, and the lazy storage diff to show on the receipt and apply on the storage. *) let[@coq_axiom_with_reason "gadt"] extract_lazy_storage_updates ctxt mode ~temporary ids_to_copy acc ty x = let rec aux : type a ac. context -> unparsing_mode -> temporary:bool -> Lazy_storage.IdSet.t -> Lazy_storage.diffs -> (a, ac) ty -> a -> has_lazy_storage:a has_lazy_storage -> (context * a * Lazy_storage.IdSet.t * Lazy_storage.diffs) tzresult Lwt.t = fun ctxt mode ~temporary ids_to_copy acc ty x ~has_lazy_storage -> Gas.consume ctxt Typecheck_costs.parse_instr_cycle >>?= fun ctxt -> match (has_lazy_storage, ty, x) with | (False_f, _, _) -> return (ctxt, x, ids_to_copy, acc) | (Big_map_f, Big_map_t (_, _, _), map) -> diff_of_big_map ctxt mode ~temporary ~ids_to_copy map >|=? fun (diff, id, ctxt) -> let map = let (Big_map map) = map in Big_map { map with diff = {map = Big_map_overlay.empty; size = 0}; id = Some id; } in let diff = Lazy_storage.make Big_map id diff in let ids_to_copy = Lazy_storage.IdSet.add Big_map id ids_to_copy in (ctxt, map, ids_to_copy, diff :: acc) | (Sapling_state_f, Sapling_state_t _, sapling_state) -> diff_of_sapling_state ctxt ~temporary ~ids_to_copy sapling_state >|=? fun (diff, id, ctxt) -> let sapling_state = Sapling.empty_state ~id ~memo_size:sapling_state.memo_size () in let diff = Lazy_storage.make Sapling_state id diff in let ids_to_copy = Lazy_storage.IdSet.add Sapling_state id ids_to_copy in (ctxt, sapling_state, ids_to_copy, diff :: acc) | (Pair_f (hl, hr), Pair_t (tyl, tyr, _, _), (xl, xr)) -> aux ctxt mode ~temporary ids_to_copy acc tyl xl ~has_lazy_storage:hl >>=? fun (ctxt, xl, ids_to_copy, acc) -> aux ctxt mode ~temporary ids_to_copy acc tyr xr ~has_lazy_storage:hr >|=? fun (ctxt, xr, ids_to_copy, acc) -> (ctxt, (xl, xr), ids_to_copy, acc) | (Union_f (has_lazy_storage, _), Union_t (ty, _, _, _), L x) -> aux ctxt mode ~temporary ids_to_copy acc ty x ~has_lazy_storage >|=? fun (ctxt, x, ids_to_copy, acc) -> (ctxt, L x, ids_to_copy, acc) | (Union_f (_, has_lazy_storage), Union_t (_, ty, _, _), R x) -> aux ctxt mode ~temporary ids_to_copy acc ty x ~has_lazy_storage >|=? fun (ctxt, x, ids_to_copy, acc) -> (ctxt, R x, ids_to_copy, acc) | (Option_f has_lazy_storage, Option_t (ty, _, _), Some x) -> aux ctxt mode ~temporary ids_to_copy acc ty x ~has_lazy_storage >|=? fun (ctxt, x, ids_to_copy, acc) -> (ctxt, Some x, ids_to_copy, acc) | (List_f has_lazy_storage, List_t (ty, _), l) -> List.fold_left_es (fun (ctxt, l, ids_to_copy, acc) x -> aux ctxt mode ~temporary ids_to_copy acc ty x ~has_lazy_storage >|=? fun (ctxt, x, ids_to_copy, acc) -> (ctxt, Script_list.cons x l, ids_to_copy, acc)) (ctxt, Script_list.empty, ids_to_copy, acc) l.elements >|=? fun (ctxt, l, ids_to_copy, acc) -> let reversed = {length = l.length; elements = List.rev l.elements} in (ctxt, reversed, ids_to_copy, acc) | (Map_f has_lazy_storage, Map_t (_, ty, _), map) -> let (module M) = Script_map.get_module map in let bindings m = M.OPS.fold (fun k v bs -> (k, v) :: bs) m [] in List.fold_left_es (fun (ctxt, m, ids_to_copy, acc) (k, x) -> aux ctxt mode ~temporary ids_to_copy acc ty x ~has_lazy_storage >|=? fun (ctxt, x, ids_to_copy, acc) -> (ctxt, M.OPS.add k x m, ids_to_copy, acc)) (ctxt, M.OPS.empty, ids_to_copy, acc) (bindings M.boxed) >|=? fun (ctxt, m, ids_to_copy, acc) -> let module M = struct module OPS = M.OPS type key = M.key type value = M.value let boxed = m let size = M.size end in ( ctxt, Script_map.make (module M : Boxed_map with type key = M.key and type value = M.value), ids_to_copy, acc ) | (_, Option_t (_, _, _), None) -> return (ctxt, None, ids_to_copy, acc) in let has_lazy_storage = has_lazy_storage ty in aux ctxt mode ~temporary ids_to_copy acc ty x ~has_lazy_storage (** We namespace an error type for [fold_lazy_storage]. The error case is only available when the ['error] parameter is equal to unit. *) module Fold_lazy_storage = struct type ('acc, 'error) result = | Ok : 'acc -> ('acc, 'error) result | Error : ('acc, unit) result end (** Prematurely abort if [f] generates an error. Use this function without the [unit] type for [error] if you are in a case where errors are impossible. *) let[@coq_axiom_with_reason "gadt"] rec fold_lazy_storage : type a ac error. f:('acc, error) Fold_lazy_storage.result Lazy_storage.IdSet.fold_f -> init:'acc -> context -> (a, ac) ty -> a -> has_lazy_storage:a has_lazy_storage -> (('acc, error) Fold_lazy_storage.result * context) tzresult = fun ~f ~init ctxt ty x ~has_lazy_storage -> Gas.consume ctxt Typecheck_costs.parse_instr_cycle >>? fun ctxt -> match (has_lazy_storage, ty, x) with | (Big_map_f, Big_map_t (_, _, _), Big_map {id = Some id; _}) -> Gas.consume ctxt Typecheck_costs.parse_instr_cycle >>? fun ctxt -> ok (f.f Big_map id (Fold_lazy_storage.Ok init), ctxt) | (Sapling_state_f, Sapling_state_t _, {id = Some id; _}) -> Gas.consume ctxt Typecheck_costs.parse_instr_cycle >>? fun ctxt -> ok (f.f Sapling_state id (Fold_lazy_storage.Ok init), ctxt) | (False_f, _, _) -> ok (Fold_lazy_storage.Ok init, ctxt) | (Big_map_f, Big_map_t (_, _, _), Big_map {id = None; _}) -> ok (Fold_lazy_storage.Ok init, ctxt) | (Sapling_state_f, Sapling_state_t _, {id = None; _}) -> ok (Fold_lazy_storage.Ok init, ctxt) | (Pair_f (hl, hr), Pair_t (tyl, tyr, _, _), (xl, xr)) -> ( fold_lazy_storage ~f ~init ctxt tyl xl ~has_lazy_storage:hl >>? fun (init, ctxt) -> match init with | Fold_lazy_storage.Ok init -> fold_lazy_storage ~f ~init ctxt tyr xr ~has_lazy_storage:hr | Fold_lazy_storage.Error -> ok (init, ctxt)) | (Union_f (has_lazy_storage, _), Union_t (ty, _, _, _), L x) -> fold_lazy_storage ~f ~init ctxt ty x ~has_lazy_storage | (Union_f (_, has_lazy_storage), Union_t (_, ty, _, _), R x) -> fold_lazy_storage ~f ~init ctxt ty x ~has_lazy_storage | (_, Option_t (_, _, _), None) -> ok (Fold_lazy_storage.Ok init, ctxt) | (Option_f has_lazy_storage, Option_t (ty, _, _), Some x) -> fold_lazy_storage ~f ~init ctxt ty x ~has_lazy_storage | (List_f has_lazy_storage, List_t (ty, _), l) -> List.fold_left_e (fun ((init, ctxt) : ('acc, error) Fold_lazy_storage.result * context) x -> match init with | Fold_lazy_storage.Ok init -> fold_lazy_storage ~f ~init ctxt ty x ~has_lazy_storage | Fold_lazy_storage.Error -> ok (init, ctxt)) (Fold_lazy_storage.Ok init, ctxt) l.elements | (Map_f has_lazy_storage, Map_t (_, ty, _), m) -> Script_map.fold (fun _ v (acc : (('acc, error) Fold_lazy_storage.result * context) tzresult) -> acc >>? fun (init, ctxt) -> match init with | Fold_lazy_storage.Ok init -> fold_lazy_storage ~f ~init ctxt ty v ~has_lazy_storage | Fold_lazy_storage.Error -> ok (init, ctxt)) m (ok (Fold_lazy_storage.Ok init, ctxt)) let[@coq_axiom_with_reason "gadt"] collect_lazy_storage ctxt ty x = let has_lazy_storage = has_lazy_storage ty in let f kind id (acc : (_, never) Fold_lazy_storage.result) = let acc = match acc with Fold_lazy_storage.Ok acc -> acc in Fold_lazy_storage.Ok (Lazy_storage.IdSet.add kind id acc) in fold_lazy_storage ~f:{f} ~init:no_lazy_storage_id ctxt ty x ~has_lazy_storage >>? fun (ids, ctxt) -> match ids with Fold_lazy_storage.Ok ids -> ok (ids, ctxt) let[@coq_axiom_with_reason "gadt"] extract_lazy_storage_diff ctxt mode ~temporary ~to_duplicate ~to_update ty v = (* Basically [to_duplicate] are ids from the argument and [to_update] are ids from the storage before execution (i.e. it is safe to reuse them since they will be owned by the same contract). *) let to_duplicate = Lazy_storage.IdSet.diff to_duplicate to_update in extract_lazy_storage_updates ctxt mode ~temporary to_duplicate [] ty v >|=? fun (ctxt, v, alive, diffs) -> let diffs = if temporary then diffs else let dead = Lazy_storage.IdSet.diff to_update alive in Lazy_storage.IdSet.fold_all {f = (fun kind id acc -> Lazy_storage.make kind id Remove :: acc)} dead diffs in match diffs with | [] -> (v, None, ctxt) | diffs -> (v, Some diffs (* do not reverse *), ctxt) let list_of_big_map_ids ids = Lazy_storage.IdSet.fold Big_map (fun id acc -> id :: acc) ids [] let parse_data = parse_data ~stack_depth:0 let parse_instr : type a s. ?type_logger:type_logger -> tc_context -> context -> legacy:bool -> Script.node -> (a, s) stack_ty -> ((a, s) judgement * context) tzresult Lwt.t = fun ?type_logger tc_context ctxt ~legacy script_instr stack_ty -> parse_instr ~stack_depth:0 ?type_logger tc_context ctxt ~legacy script_instr stack_ty let unparse_data = unparse_data ~stack_depth:0 let unparse_code ctxt mode code = (* Constants need to be expanded or [unparse_code] may fail. *) Global_constants_storage.expand ctxt (strip_locations code) >>=? fun (ctxt, code) -> unparse_code ~stack_depth:0 ctxt mode (root code) let parse_contract context loc arg_ty contract ~entrypoint = parse_contract ~stack_depth:0 context loc arg_ty contract ~entrypoint let parse_toplevel ctxt ~legacy toplevel = Global_constants_storage.expand ctxt toplevel >>=? fun (ctxt, toplevel) -> Lwt.return @@ parse_toplevel ctxt ~legacy toplevel let parse_comparable_ty = parse_comparable_ty ~stack_depth:0 let parse_big_map_value_ty = parse_big_map_value_ty ~stack_depth:0 let parse_packable_ty = parse_packable_ty ~stack_depth:0 let parse_passable_ty = parse_passable_ty ~stack_depth:0 let parse_any_ty = parse_any_ty ~stack_depth:0 let parse_ty = parse_ty ~stack_depth:0 ~ret:Don't_parse_entrypoints let parse_parameter_ty_and_entrypoints = parse_parameter_ty_and_entrypoints ~stack_depth:0 let[@coq_axiom_with_reason "gadt"] get_single_sapling_state ctxt ty x = let has_lazy_storage = has_lazy_storage ty in let f (type i a u) (kind : (i, a, u) Lazy_storage.Kind.t) (id : i) single_id_opt : (Sapling.Id.t option, unit) Fold_lazy_storage.result = match kind with | Lazy_storage.Kind.Sapling_state -> ( match single_id_opt with | Fold_lazy_storage.Ok None -> Fold_lazy_storage.Ok (Some id) | Fold_lazy_storage.Ok (Some _) -> Fold_lazy_storage.Error (* more than one *) | Fold_lazy_storage.Error -> single_id_opt) | _ -> single_id_opt in fold_lazy_storage ~f:{f} ~init:None ctxt ty x ~has_lazy_storage >>? fun (id, ctxt) -> match id with | Fold_lazy_storage.Ok (Some id) -> ok (Some id, ctxt) | Fold_lazy_storage.Ok None | Fold_lazy_storage.Error -> ok (None, ctxt) (* {!Script_cache} needs a measure of the script size in memory. Determining this size is not easy in OCaml because of sharing. Indeed, many values present in the script share the same memory area. This is especially true for types and stack types: they are heavily shared in every typed IR internal representation. As a consequence, computing the size of the typed IR without taking sharing into account leads to a size which is sometimes two order of magnitude bigger than the actual size. We could track down this sharing. Unfortunately, sharing is not part of OCaml semantics: for this reason, a compiler can optimize memory representation by adding more sharing. If two nodes use different optimization flags or compilers, such a precise computation of the memory footprint of scripts would lead to two distinct sizes. As these sizes occur in the blockchain context, this situation would lead to a fork. For this reason, we introduce a *size model* for the script size. This model provides an overapproximation of the actual size in memory. The risk is to be too far from the actual size: the cache would then be wrongly marked as full. This situation would make the cache less useful but should present no security risk . *) let script_size (Ex_script (Script { code_size; code = _; arg_type = _; storage; storage_type; entrypoints = _; views = _; })) = let (nodes, storage_size) = Script_typed_ir_size.value_size storage_type storage in let cost = Script_typed_ir_size_costs.nodes_cost ~nodes in (Saturation_repr.(add code_size storage_size |> to_int), cost) let typecheck_code ~legacy ~show_types ctxt code = typecheck_code ~legacy ~show_types ctxt code >|=? fun (Typechecked_code_internal {type_map; _}, ctxt) -> (type_map, ctxt)
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* Copyright (c) 2020 Metastate AG <hello@metastate.dev> *) (* Copyright (c) 2021-2022 Nomadic Labs <contact@nomadic-labs.com> *) (* Copyright (c) 2022 Trili Tech <contact@trili.tech> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
stream.mli
(** Streams and parsers. *) type 'a t (** The type of streams holding values of type ['a]. *) exception Failure (** Raised by parsers when none of the first components of the stream patterns is accepted. *) exception Error of string (** Raised by parsers when the first component of a stream pattern is accepted, but one of the following components is rejected. *) (** {1 Stream builders} *) val from : ?offset:int -> (unit -> 'a option) -> 'a t (** [Stream.from f] returns a stream built from the function [f]. To create a new stream element, the function [f] is called. The user function [f] must return either [Some <value>] for a value or [None] to specify the end of the stream. [offset] will initialize the stream [count] to start with [offset] consumed items, which is useful for some uses cases such as parsing resumption. *) val empty : unit -> 'a t (** Return the stream holding the elements of the list in the same order. *) val of_string : ?offset:int -> string -> char t (** Return the stream of the characters of the string parameter. If set. [offset] parameter is similar to [from]. *) (** Return the stream of the characters of the string parameter. If set. [offset] parameter is similar to [from]. *) val of_channel : in_channel -> char t (** Return the stream of the characters read from the input channel. *) (** {1 Predefined parsers} *) val next : 'a t -> 'a (** Return the first element of the stream and remove it from the stream. @raise Stream.Failure if the stream is empty. *) val is_empty : 'a t -> bool (** Return [true] if the stream is empty, else [false]. *) (** {1 Useful functions} *) val peek : 'a t -> 'a option (** Return [Some] of "the first element" of the stream, or [None] if the stream is empty. *) val junk : 'a t -> unit (** Remove the first element of the stream, possibly unfreezing it before. *) val count : 'a t -> int (** Return the current count of the stream elements, i.e. the number of the stream elements discarded. *) val npeek : int -> 'a t -> 'a list (** [npeek n] returns the list of the [n] first elements of the stream, or all its remaining elements if less than [n] elements are available. *) val nth : int -> 'a t -> 'a val njunk : int -> 'a t -> unit (**/**)
(**************************************************************************) (* *) (* OCaml *) (* *) (* Daniel de Rauglaudre, projet Cristal, INRIA Rocquencourt *) (* *) (* Copyright 1997 Institut National de Recherche en Informatique et *) (* en Automatique. *) (* *) (* All rights reserved. This file is distributed under the terms of *) (* the GNU Lesser General Public License version 2.1, with the *) (* special exception on linking described in the file LICENSE. *) (* *) (**************************************************************************)
indexable_intf.ml
open! Import open Store_properties module type S_without_key_impl = sig include Read_only.S (** @inline *) type hash (** The type of hashes of [value]. *) val add : [> write ] t -> value -> key Lwt.t (** Write the contents of a value to the store, and obtain its key. *) val unsafe_add : [> write ] t -> hash -> value -> key Lwt.t (** Same as {!add} but allows specifying the value's hash directly. The backend might choose to discard that hash and/or can be corrupt if the hash is not consistent. *) val index : [> read ] t -> hash -> key option Lwt.t (** Indexing maps the hash of a value to a corresponding key of that value in the store. For stores that are addressed by hashes directly, this is typically [fun _t h -> Lwt.return (Key.of_hash h)]; for stores with more complex addressing schemes, [index] may attempt a lookup operation in the store. In general, indexing is best-effort and reveals no information about the membership of the value in the store. In particular: - [index t hash = Some key] doesn't guarantee [mem t key]: the value with hash [hash] may still be absent from the store; - [index t hash = None] doesn't guarantee that there is no [key] such that [mem t key] and [Key.to_hash key = hash]: the value may still be present in the store under a key that is not indexed. *) include Batch with type 'a t := 'a t (** @inline *) end module type S = sig (** An {i indexable} store is a read-write store in which values can be added and later found via their keys. Keys are not necessarily portable between different stores, so each store provides an {!val-index} mechanism to find keys by the hashes of the values they reference. *) include S_without_key_impl (* @inline *) module Key : Key.S with type t = key and type hash = hash end module type Maker = functor (Hash : Hash.S) (Value : Type.S) -> sig include S with type value = Value.t and type hash = Hash.t include Of_config with type 'a t := 'a t (** @inline *) end (** A {!Maker_concrete_key} is an indexable store in which the key type is uniquely determined by the hash type and is stated up-front. *) module type Maker_concrete_key1 = sig type 'h key module Key : functor (Hash : Hash.S) -> Key.S with type t = Hash.t key and type hash = Hash.t module Make : functor (Hash : Hash.S) (Value : Type.S) -> sig include S with type value = Value.t and type hash = Hash.t and type key = Hash.t key include Of_config with type 'a t := 'a t (** @inline *) end end (** Like {!Maker_concrete_key1}, but the key type may also depend on type of the value that it references. *) module type Maker_concrete_key2 = sig type ('h, 'v) key module Key : functor (Hash : Hash.S) (Value : Type.S) -> Key.S with type t = (Hash.t, Value.t) key and type hash = Hash.t module Make : functor (Hash : Hash.S) (Value : Type.S) -> sig include S with type value = Value.t and type hash = Hash.t and type key = (Hash.t, Value.t) key include Of_config with type 'a t := 'a t (** @inline *) end end module type Sigs = sig module type S = S module type S_without_key_impl = S_without_key_impl module type Maker = Maker module type Maker_concrete_key1 = Maker_concrete_key1 module type Maker_concrete_key2 = Maker_concrete_key2 module Maker_concrete_key2_of_1 (X : Maker_concrete_key1) : Maker_concrete_key2 with type ('h, _) key = 'h X.key module Of_content_addressable (Key : Type.S) (S : Content_addressable.S with type key = Key.t) : S with type 'a t = 'a S.t and type key = Key.t and type hash = Key.t and type value = S.value module Check_closed_store (CA : S) : sig include S with type key = CA.key and type hash = CA.hash and type value = CA.value val make_closeable : 'a CA.t -> 'a t (** [make_closeable t] returns a version of [t] that raises {!Irmin.Closed} if an operation is performed when it is already closed. *) val get_if_open_exn : 'a t -> 'a CA.t (** [get_if_open_exn t] returns the store (without close checks) if it is open; otherwise raises {!Irmin.Closed} *) end module Check_closed (M : Maker) : Maker end
(* * Copyright (c) 2021 Craig Ferguson <craig@tarides.com> * Copyright (c) 2018-2022 Tarides <contact@tarides.com> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *)