filename
stringlengths
3
67
data
stringlengths
0
58.3M
license
stringlengths
0
19.5k
queue.mli
type 'a t exception Empty val create : unit -> 'a t val add : 'a -> 'a t -> unit val push : 'a -> 'a t -> unit val take : 'a t -> 'a val pop : 'a t -> 'a val peek : 'a t -> 'a val top : 'a t -> 'a val clear : 'a t -> unit val copy : 'a t -> 'a t val is_empty : 'a t -> bool val length : 'a t -> int val iter : ('a -> unit) -> 'a t -> unit val fold : ('a -> 'b -> 'a) -> 'a -> 'b t -> 'a val transfer : 'a t -> 'a t -> unit
pretyping.mli
(** This file implements type inference. It maps [glob_constr] (i.e. untyped terms whose names are located) to [constr]. In particular, it drives complex pattern-matching problems ("match") into elementary ones, insertion of coercions and resolution of implicit arguments. *) open Names open Environ open Evd open EConstr open Glob_term open Ltac_pretype val add_bidirectionality_hint : GlobRef.t -> int -> unit (** A bidirectionality hint `n` for a global `g` tells the pretyper to use typing information from the context after typing the `n` for arguments of an application of `g`. *) val get_bidirectionality_hint : GlobRef.t -> int option val clear_bidirectionality_hint : GlobRef.t -> unit (** An auxiliary function for searching for fixpoint guard indexes *) val search_guard : ?loc:Loc.t -> env -> int list list -> Constr.rec_declaration -> int array type typing_constraint = | IsType (** Necessarily a type *) | OfType of types (** A term of the expected type *) | WithoutTypeConstraint (** A term of unknown expected type *) type use_typeclasses = NoUseTC | UseTCForConv | UseTC (** Typeclasses are used in 2 ways: - through the "Typeclass Resolution For Conversion" option, if a conversion problem fails we try again after resolving typeclasses (UseTCForConv and UseTC) - after pretyping we resolve typeclasses (UseTC) (in [solve_remaining_evars]) *) type inference_flags = { use_coercions : bool; use_typeclasses : use_typeclasses; solve_unification_constraints : bool; fail_evar : bool; expand_evars : bool; program_mode : bool; polymorphic : bool; } val default_inference_flags : bool -> inference_flags val no_classes_no_fail_inference_flags : inference_flags val all_no_fail_flags : inference_flags val all_and_fail_flags : inference_flags (** Generic calls to the interpreter from glob_constr to open_constr; by default, inference_flags tell to use type classes and heuristics (but no external tactic solver hooks), as well as to ensure that conversion problems are all solved and expand evars, but unresolved evars can remain. The difference is in whether the evar_map is modified explicitly or by side-effect. *) val understand_tcc : ?flags:inference_flags -> env -> evar_map -> ?expected_type:typing_constraint -> glob_constr -> evar_map * constr (** As [understand_tcc] but also returns the type of the elaborated term. The [expand_evars] flag is not applied to the type (only to the term). *) val understand_tcc_ty : ?flags:inference_flags -> env -> evar_map -> ?expected_type:typing_constraint -> glob_constr -> evar_map * constr * types (** More general entry point with evars from ltac *) (** Generic call to the interpreter from glob_constr to constr In [understand_ltac flags sigma env ltac_env constraint c], flags: tell how to manage evars sigma: initial set of existential variables (typically current goals) ltac_env: partial substitution of variables (used for the tactic language) constraint: tell if interpreted as a possibly constrained term or a type *) val understand_ltac : inference_flags -> env -> evar_map -> ltac_var_map -> typing_constraint -> glob_constr -> evar_map * EConstr.t (** Standard call to get a constr from a glob_constr, resolving implicit arguments and coercions, and compiling pattern-matching; the default inference_flags tells to use type classes and heuristics (but no external tactic solver hook), as well as to ensure that conversion problems are all solved and that no unresolved evar remains, expanding evars. *) val understand : ?flags:inference_flags -> ?expected_type:typing_constraint -> env -> evar_map -> glob_constr -> constr Evd.in_evar_universe_context val understand_uconstr : ?flags:inference_flags -> ?expected_type:typing_constraint -> env -> evar_map -> Ltac_pretype.closed_glob_constr -> evar_map * EConstr.t (** [hook env sigma ev] returns [Some (sigma', term)] if [ev] can be instantiated with a solution, [None] otherwise. Used to extend [solve_remaining_evars] below. *) type inference_hook = env -> evar_map -> Evar.t -> (evar_map * constr) option (** Trying to solve remaining evars and remaining conversion problems possibly using type classes, heuristics, external tactic solver hook depending on given flags. *) (* For simplicity, it is assumed that current map has no other evars with candidate and no other conversion problems that the one in [pending], however, it can contain more evars than the pending ones. *) val solve_remaining_evars : ?hook:inference_hook -> inference_flags -> env -> ?initial:evar_map -> (* current map *) evar_map -> evar_map (** Checking evars and pending conversion problems are all solved, reporting an appropriate error message *) val check_evars_are_solved : program_mode:bool -> env -> ?initial:evar_map -> (* current map: *) evar_map -> unit (** [check_evars env ?initial sigma c] fails if some unresolved evar remains in [c] which isn't in [initial] (any unresolved evar if [initial] not provided) *) val check_evars : env -> ?initial:evar_map -> evar_map -> constr -> unit (**/**) (** Internal of Pretyping... *) val ise_pretype_gen : inference_flags -> env -> evar_map -> ltac_var_map -> typing_constraint -> glob_constr -> evar_map * constr * types (** {6 Open-recursion style pretyper} *) type pretype_flags = { poly : bool; resolve_tc : bool; program_mode : bool; use_coercions : bool; } type 'a pretype_fun = ?loc:Loc.t -> flags:pretype_flags -> Evardefine.type_constraint -> GlobEnv.t -> evar_map -> evar_map * 'a type pretyper = { pretype_ref : pretyper -> GlobRef.t * glob_level list option -> unsafe_judgment pretype_fun; pretype_var : pretyper -> Id.t -> unsafe_judgment pretype_fun; pretype_evar : pretyper -> existential_name CAst.t * (lident * glob_constr) list -> unsafe_judgment pretype_fun; pretype_patvar : pretyper -> Evar_kinds.matching_var_kind -> unsafe_judgment pretype_fun; pretype_app : pretyper -> glob_constr * glob_constr list -> unsafe_judgment pretype_fun; pretype_proj : pretyper -> (Constant.t * glob_level list option) * glob_constr list * glob_constr -> unsafe_judgment pretype_fun; pretype_lambda : pretyper -> Name.t * binding_kind * glob_constr * glob_constr -> unsafe_judgment pretype_fun; pretype_prod : pretyper -> Name.t * binding_kind * glob_constr * glob_constr -> unsafe_judgment pretype_fun; pretype_letin : pretyper -> Name.t * glob_constr * glob_constr option * glob_constr -> unsafe_judgment pretype_fun; pretype_cases : pretyper -> Constr.case_style * glob_constr option * tomatch_tuples * cases_clauses -> unsafe_judgment pretype_fun; pretype_lettuple : pretyper -> Name.t list * (Name.t * glob_constr option) * glob_constr * glob_constr -> unsafe_judgment pretype_fun; pretype_if : pretyper -> glob_constr * (Name.t * glob_constr option) * glob_constr * glob_constr -> unsafe_judgment pretype_fun; pretype_rec : pretyper -> glob_fix_kind * Id.t array * glob_decl list array * glob_constr array * glob_constr array -> unsafe_judgment pretype_fun; pretype_sort : pretyper -> glob_sort -> unsafe_judgment pretype_fun; pretype_hole : pretyper -> Evar_kinds.t * Namegen.intro_pattern_naming_expr * Genarg.glob_generic_argument option -> unsafe_judgment pretype_fun; pretype_cast : pretyper -> glob_constr * Constr.cast_kind * glob_constr -> unsafe_judgment pretype_fun; pretype_int : pretyper -> Uint63.t -> unsafe_judgment pretype_fun; pretype_float : pretyper -> Float64.t -> unsafe_judgment pretype_fun; pretype_array : pretyper -> glob_level list option * glob_constr array * glob_constr * glob_constr -> unsafe_judgment pretype_fun; pretype_type : pretyper -> glob_constr -> unsafe_type_judgment pretype_fun; } (** Type of pretyping algorithms in open-recursion style. A typical way to implement a pretyping variant is to inherit from some pretyper using record inheritance and replacing particular fields with the [where] clause. Recursive calls to the subterms should call the [pretyper] provided as the first argument to the function. This object can be turned in an actual pretyping function through the {!eval_pretyper} function below. *) val default_pretyper : pretyper (** Coq vanilla pretyper. *) val eval_pretyper : pretyper -> flags:pretype_flags -> Evardefine.type_constraint -> GlobEnv.t -> evar_map -> glob_constr -> evar_map * unsafe_judgment
(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* <O___,, * (see version control and CREDITS file for authors & dates) *) (* \VV/ **************************************************************) (* // * This file is distributed under the terms of the *) (* * GNU Lesser General Public License Version 2.1 *) (* * (see LICENSE file for the text of the license) *) (************************************************************************)
sapling_validator.ml
(* Check that each nullifier is not already present in the state and add it. Important to avoid spending the same input twice in a transaction. *) let rec check_and_update_nullifiers ctxt state inputs = match inputs with | [] -> return (ctxt, Some state) | input :: inputs -> ( Sapling_storage.nullifiers_mem ctxt state Sapling.UTXO.(input.nf) >>=? function | (ctxt, true) -> return (ctxt, None) | (ctxt, false) -> let state = Sapling_storage.nullifiers_add state Sapling.UTXO.(input.nf) in check_and_update_nullifiers ctxt state inputs ) let verify_update : Raw_context.t -> Sapling_storage.state -> Sapling_repr.transaction -> string -> (Raw_context.t * (Int64.t * Sapling_storage.state) option) tzresult Lwt.t = fun ctxt state transaction key -> (* Check the transaction *) (* To avoid overflowing the balance, the number of inputs and outputs must be bounded. Ciphertexts' memo_size must match the state's memo_size. These constraints are already enforced at the encoding level. *) assert (Compare.Int.(List.compare_length_with transaction.inputs 5208 <= 0)) ; assert (Compare.Int.(List.compare_length_with transaction.outputs 2019 <= 0)) ; let pass = List.for_all (fun output -> Compare.Int.( Sapling.Ciphertext.get_memo_size Sapling.UTXO.(output.ciphertext) = state.memo_size)) transaction.outputs in if not pass then return (ctxt, None) else (* Check the root is a recent state *) Sapling_storage.root_mem ctxt state transaction.root >>=? fun pass -> if not pass then return (ctxt, None) else check_and_update_nullifiers ctxt state transaction.inputs >|=? function | (ctxt, None) -> (ctxt, None) | (ctxt, Some state) -> Sapling.Verification.with_verification_ctx (fun vctx -> let pass = (* Check all the output ZK proofs *) List.for_all (fun output -> Sapling.Verification.check_output vctx output) transaction.outputs in if not pass then (ctxt, None) else let pass = (* Check all the input Zk proofs and signatures *) List.for_all (fun input -> Sapling.Verification.check_spend vctx input transaction.root key) transaction.inputs in if not pass then (ctxt, None) else let pass = (* Check the signature and balance of the whole transaction *) Sapling.Verification.final_check vctx transaction key in if not pass then (ctxt, None) else (* update tree *) let list_to_add = List.map (fun output -> Sapling.UTXO.(output.cm, output.ciphertext)) transaction.outputs in let state = Sapling_storage.add state list_to_add in (ctxt, Some (transaction.balance, state)))
(* The MIT License (MIT) * * Copyright (c) 2019-2020 Nomadic Labs <contact@nomadic-labs.com> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. *)
contract_services.mli
open Alpha_context val list: 'a #RPC_context.simple -> 'a -> Contract.t list shell_tzresult Lwt.t type info = { manager: public_key_hash ; balance: Tez.t ; spendable: bool ; delegate: bool * public_key_hash option ; counter: counter ; script: Script.t option ; } val info_encoding: info Data_encoding.t val info: 'a #RPC_context.simple -> 'a -> Contract.t -> info shell_tzresult Lwt.t val balance: 'a #RPC_context.simple -> 'a -> Contract.t -> Tez.t shell_tzresult Lwt.t val manager: 'a #RPC_context.simple -> 'a -> Contract.t -> public_key_hash shell_tzresult Lwt.t val manager_key: 'a #RPC_context.simple -> 'a -> Contract.t -> (public_key_hash * public_key option) shell_tzresult Lwt.t val delegate: 'a #RPC_context.simple -> 'a -> Contract.t -> public_key_hash shell_tzresult Lwt.t val delegate_opt: 'a #RPC_context.simple -> 'a -> Contract.t -> public_key_hash option shell_tzresult Lwt.t val is_delegatable: 'a #RPC_context.simple -> 'a -> Contract.t -> bool shell_tzresult Lwt.t val is_spendable: 'a #RPC_context.simple -> 'a -> Contract.t -> bool shell_tzresult Lwt.t val counter: 'a #RPC_context.simple -> 'a -> Contract.t -> counter shell_tzresult Lwt.t val script: 'a #RPC_context.simple -> 'a -> Contract.t -> Script.t shell_tzresult Lwt.t val script_opt: 'a #RPC_context.simple -> 'a -> Contract.t -> Script.t option shell_tzresult Lwt.t val storage: 'a #RPC_context.simple -> 'a -> Contract.t -> Script.expr shell_tzresult Lwt.t val storage_opt: 'a #RPC_context.simple -> 'a -> Contract.t -> Script.expr option shell_tzresult Lwt.t val big_map_get_opt: 'a #RPC_context.simple -> 'a -> Contract.t -> Script.expr * Script.expr -> Script.expr option shell_tzresult Lwt.t val register: unit -> unit
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
cudf.c
// TODO should check / handle exceptions for all invoked caml_callback-s // TODO better management of g_error() (not all should be fatal) // TODO property-by-property access for preamble (as per packages) #include <stdio.h> #include <string.h> #define CAML_NAME_SPACE #include <caml/alloc.h> #include <caml/callback.h> #include <caml/memory.h> #include <caml/mlvalues.h> #include "cudf-private.h" // instantiate OCaml-related ADTs #include "cudf-variants.h" #include "cudf.h" #define Val_none Val_int(0) #define Some_val(v) Field(v,0) /* field indexes in the return type of {!Cudf_parser.parse_from_file}, * {!Cudf_parser.load_from_file}, and {!Cudf_parser.load_solution_from_file} */ #define FIELD_PRE 0 #define FIELD_UNIV 1 // universe for load_*, package list for parse_* #define FIELD_REQ 2 // unused for load_solution_from_file /* field indexes in {!Cudf.package} */ #define FIELD_PKG 0 #define FIELD_VERSION 1 #define FIELD_DEPS 2 #define FIELD_CONFL 3 #define FIELD_PROV 4 #define FIELD_INST 5 #define FIELD_WASINST 6 #define FIELD_KEEP 7 #define FIELD_PKGEXTRA 8 /* field indexes in {!Cudf.request} */ #define FIELD_REQID 0 #define FIELD_REQINST 1 #define FIELD_REQREM 2 #define FIELD_REQUP 3 #define FIELD_REQEXTRA 4 /* field indexes in {!Cudf.preamble} */ #define FIELD_PREID 0 #define FIELD_TYPEDECL 1 #define FIELD_UCHECK 2 #define FIELD_SCHECK 3 #define FIELD_RCHECK 4 /* field indexes in the return type of {!Cudf_checker.is_solution} and * {!Cudf_checker.is_consistent} */ #define FIELD_ISSOL 0 /* Initialize a pointer to an OCaml value */ #define NEW_MLVAL(p) \ do { p = malloc(sizeof(value)); \ caml_register_global_root(p); } \ while (0) /* Free a pointer to an OCaml value */ #define FREE_MLVAL(p) \ do { free(p); \ caml_remove_global_root(p); } \ while (0) /** generic OCaml binding helpers */ #if 0 static int caml_list_length(value l) { int length = 0; while (l != Val_emptylist) { length++; l = Field(l, 1); } return length; } #endif /** CUDF-specific binding helpers */ static int relop_val(value v) { CAMLparam1(v); int op; switch (Int_val(v)) { case MLPVAR_Eq : op = RELOP_EQ ; break ; case MLPVAR_Neq : op = RELOP_NEQ ; break ; case MLPVAR_Geq : op = RELOP_GEQ ; break ; case MLPVAR_Gt : op = RELOP_GT ; break ; case MLPVAR_Leq : op = RELOP_LEQ ; break ; case MLPVAR_Lt : op = RELOP_LT ; break ; default : g_error("Internal error: unexpected variant for \"relop\": %d", Int_val(v)); } CAMLreturnT(int, op); } cudf_vpkg_t *cudf_vpkg_val(value ml_vpkg) { CAMLparam1(ml_vpkg); CAMLlocal1(ml_constr); cudf_vpkg_t *vpkg; vpkg = malloc(sizeof(cudf_vpkg_t)); vpkg->name = strdup(String_val(Field(ml_vpkg, 0))); if (Field(ml_vpkg, 1) != Val_none) { /* version constraint */ ml_constr = Some_val(Field(ml_vpkg, 1)); vpkg->relop = relop_val(Field(ml_constr, 0)); vpkg->version = Int_val(Field(ml_constr, 1)); } else { /* no version constraint */ vpkg->relop = 0; vpkg->version = -1; } CAMLreturnT(cudf_vpkg_t *, vpkg); } cudf_vpkglist_t cudf_vpkglist_val(value ml_vpkgs) { CAMLparam1(ml_vpkgs); CAMLlocal1(ml_vpkg); GList *l = NULL; cudf_vpkg_t *vpkg; while (ml_vpkgs != Val_emptylist) { ml_vpkg = Field(ml_vpkgs, 0); vpkg = cudf_vpkg_val(ml_vpkg); l = g_list_append(l, vpkg); ml_vpkgs = Field(ml_vpkgs, 1); } CAMLreturnT(cudf_vpkglist_t, l); } cudf_vpkgformula_t cudf_vpkgformula_val(value ml_fmla) { CAMLparam1(ml_fmla); CAMLlocal2(ml_and, ml_or); GList *and_l = NULL; /* top-level formula (CNF) */ GList *or_l; /* OR-ed deps */ /* ml_and: iterates over OR-ed deps (which are AND-ed together) */ /* ml_or: iterates over vpkg-s (which are OR-ed together) */ cudf_vpkg_t *vpkg; ml_and = ml_fmla; while (ml_and != Val_emptylist) { ml_or = Field(ml_and, 0); or_l = NULL; while (ml_or != Val_emptylist) { vpkg = cudf_vpkg_val(Field(ml_or, 0)); or_l = g_list_append(or_l, vpkg); ml_or = Field(ml_or, 1); } and_l = g_list_append(and_l, or_l); ml_and = Field(ml_and, 1); } CAMLreturnT(cudf_vpkgformula_t, and_l); } cudf_value_t *cudf_value_val(value ml_v) { CAMLparam1(ml_v); CAMLlocal1(ml_payload); cudf_value_t *v; int typ; v = malloc(sizeof(cudf_value_t)); typ = Int_val(Field(ml_v, 0)); ml_payload = Field(ml_v, 1); v->typ = typ; switch (typ) { case MLPVAR_Int : v->typ = TYPE_INT; v->val.i = Int_val(ml_payload); break; case MLPVAR_Posint : v->typ = TYPE_POSINT; v->val.i = Int_val(ml_payload); break; case MLPVAR_Nat : v->typ = TYPE_NAT; v->val.i = Int_val(ml_payload); break; case MLPVAR_Bool : v->typ = TYPE_BOOL; v->val.i = Bool_val(ml_payload); break; case MLPVAR_String : v->typ = TYPE_STRING; v->val.s = strdup(String_val(ml_payload)); case MLPVAR_Pkgname : v->typ = TYPE_PKGNAME; v->val.s = strdup(String_val(ml_payload)); case MLPVAR_Ident : v->typ = TYPE_IDENT; v->val.s = strdup(String_val(ml_payload)); break; case MLPVAR_Enum : v->typ = TYPE_ENUM; /* Skip enum list and jump to the actual enum. Enum list is * currently not accessible using C bindings. */ v->val.s = strdup(String_val(Field(ml_payload, 1))); break; case MLPVAR_Vpkg : v->typ = TYPE_VPKG; v->val.vpkg = cudf_vpkg_val(ml_payload); break; case MLPVAR_Veqpkg : v->typ = TYPE_VEQPKG; v->val.vpkg = cudf_vpkg_val(ml_payload); break; case MLPVAR_Vpkglist : v->typ = TYPE_VPKGLIST; v->val.vpkgs = cudf_vpkglist_val(ml_payload); break; case MLPVAR_Veqpkglist : v->typ = TYPE_VEQPKGLIST; v->val.vpkgs = cudf_vpkglist_val(ml_payload); break; case MLPVAR_Vpkgformula : v->typ = TYPE_VPKGFORMULA; v->val.f = cudf_vpkgformula_val(ml_payload); break; case MLPVAR_Typedecl : v->typ = TYPE_TYPEDECL; break; default : g_error("Internal error: unexpected variant for type: %d", typ); } CAMLreturnT(cudf_value_t *, v); } /** libCUDF binding public interface */ void cudf_init() { char *fake_argv[] = {"", NULL}; static int cudf_initialized = 0; if (cudf_initialized) return; caml_startup(fake_argv); cudf_initialized = 1; } cudf_doc_t *cudf_parse_from_file(char *fname) { CAMLparam0(); CAMLlocal2(ml_doc, ml_pkgs); static value *closure_f = NULL; cudf_doc_t *doc; GList *l = NULL; cudf_package_t pkg; doc = malloc(sizeof(cudf_doc_t)); if (closure_f == NULL) closure_f = caml_named_value("parse_from_file"); ml_doc = caml_callback(*closure_f, caml_copy_string(fname)); NEW_MLVAL(doc->preamble); /* preamble */ if (Field(ml_doc, FIELD_PRE) != Val_none) { doc->has_preamble = 1; *(doc->preamble) = Some_val(Field(ml_doc, FIELD_PRE)); } else { doc->has_preamble = 0; *(doc->preamble) = Val_none; } NEW_MLVAL(doc->request); /* request */ if (Field(ml_doc, FIELD_REQ) != Val_none) { doc->has_request = 1; *(doc->request) = Some_val(Field(ml_doc, FIELD_REQ)); } else { doc->has_request = 0; *(doc->request) = Val_none; } ml_pkgs = Field(ml_doc, FIELD_UNIV); /* packages */ while (ml_pkgs != Val_emptylist) { NEW_MLVAL(pkg); *pkg = Field(ml_pkgs, 0); l = g_list_prepend(l, pkg); ml_pkgs = Field(ml_pkgs, 1); } doc->packages = g_list_reverse(l); CAMLreturnT(cudf_doc_t *, doc); } cudf_t *cudf_load_from_file(char *fname) { CAMLparam0(); CAMLlocal1(ml_cudf); static value *closure_f = NULL; cudf_t *cudf; cudf = malloc(sizeof(cudf_t)); if (closure_f == NULL) closure_f = caml_named_value("load_from_file"); ml_cudf = caml_callback(*closure_f, caml_copy_string(fname)); NEW_MLVAL(cudf->preamble); /* preamble */ if (Field(ml_cudf, FIELD_PRE) != Val_none) { cudf->has_preamble = 1; *(cudf->preamble) = Some_val(Field(ml_cudf, FIELD_PRE)); } else { cudf->has_preamble = 0; *(cudf->preamble) = Val_none; } NEW_MLVAL(cudf->request); /* request */ if (Field(ml_cudf, FIELD_REQ) != Val_none) { cudf->has_request = 1; *(cudf->request) = Some_val(Field(ml_cudf, FIELD_REQ)); } else { cudf->has_request = 0; *(cudf->request) = Val_none; } NEW_MLVAL(cudf->universe); /* universe */ *(cudf->universe) = Field(ml_cudf, FIELD_UNIV); CAMLreturnT(cudf_t *, cudf); } cudf_t *cudf_load_solution_from_file(char *fname, cudf_universe_t ref_univ) { CAMLparam0(); CAMLlocal1(ml_cudf); static value *closure_f = NULL; cudf_t *cudf; cudf = malloc(sizeof(cudf_t)); if (closure_f == NULL) closure_f = caml_named_value("load_solution_from_file"); ml_cudf = caml_callback2(*closure_f, caml_copy_string(fname), *ref_univ); NEW_MLVAL(cudf->preamble); /* preamble */ if (Field(ml_cudf, FIELD_PRE) != Val_none) { cudf->has_preamble = 1; *(cudf->preamble) = Some_val(Field(ml_cudf, FIELD_PRE)); } else { cudf->has_preamble = 0; *(cudf->preamble) = Val_none; } NEW_MLVAL(cudf->request); /* request */ cudf->has_request = 0; /* solutions have no request */ *(cudf->request) = Val_none; NEW_MLVAL(cudf->universe); /* universe */ *(cudf->universe) = Field(ml_cudf, FIELD_UNIV); CAMLreturnT(cudf_t *, cudf); } char *cudf_pkg_name(cudf_package_t pkg) { return String_val(Field(*pkg, FIELD_PKG)); } int cudf_pkg_version(cudf_package_t pkg) { return Int_val(Field(*pkg, FIELD_VERSION)); } int cudf_pkg_installed(cudf_package_t pkg) { return Int_val(Field(*pkg, FIELD_INST)); } int cudf_pkg_was_installed(cudf_package_t pkg) { return Int_val(Field(*pkg, FIELD_WASINST)); } int cudf_pkg_keep(cudf_package_t pkg) { CAMLparam0(); CAMLlocal1(keep); int k; keep = Field(*pkg, FIELD_KEEP); switch (Int_val(keep)) { case MLPVAR_Keep_none : k = KEEP_NONE ; break ; case MLPVAR_Keep_version : k = KEEP_VERSION ; break ; case MLPVAR_Keep_package : k = KEEP_PACKAGE ; break ; case MLPVAR_Keep_feature : k = KEEP_FEATURE ; break ; default : g_error("Internal error: unexpected variant for \"keep\": %d", Int_val(keep)); } CAMLreturnT(int, k); } cudf_vpkgformula_t cudf_pkg_depends(cudf_package_t pkg) { return cudf_vpkgformula_val(Field(*pkg, FIELD_DEPS)); } cudf_vpkglist_t cudf_pkg_conflicts(cudf_package_t pkg) { return cudf_vpkglist_val(Field(*pkg, FIELD_CONFL)); } cudf_vpkglist_t cudf_pkg_provides(cudf_package_t pkg) { return cudf_vpkglist_val(Field(*pkg, FIELD_PROV)); } char *cudf_pkg_property(cudf_package_t pkg, const char *prop) { CAMLparam0(); CAMLlocal1(prop_val); static value *closure_f = NULL; if (closure_f == NULL) closure_f = caml_named_value("lookup_package_property"); prop_val = caml_callback2_exn(*closure_f, *pkg, caml_copy_string(prop)); CAMLreturnT(char *, Is_exception_result(prop_val) ? NULL : strdup(String_val(prop_val))); } char *cudf_req_property(cudf_request_t req, const char *prop) { CAMLparam0(); CAMLlocal1(prop_val); static value *closure_f = NULL; if (closure_f == NULL) closure_f = caml_named_value("lookup_request_property"); prop_val = caml_callback2_exn(*closure_f, *req, caml_copy_string(prop)); CAMLreturnT(char *, Is_exception_result(prop_val) ? NULL : strdup(String_val(prop_val))); } cudf_vpkglist_t cudf_req_install(cudf_request_t req) { return cudf_vpkglist_val(Field(*req, FIELD_REQINST)); } cudf_vpkglist_t cudf_req_remove(cudf_request_t req) { return cudf_vpkglist_val(Field(*req, FIELD_REQREM)); } cudf_vpkglist_t cudf_req_upgrade(cudf_request_t req) { return cudf_vpkglist_val(Field(*req, FIELD_REQUP)); } char *cudf_pre_property(cudf_preamble_t pre, const char *prop) { CAMLparam0(); CAMLlocal1(prop_val); static value *closure_f = NULL; if (closure_f == NULL) closure_f = caml_named_value("lookup_preamble_property"); prop_val = caml_callback2_exn(*closure_f, *pre, caml_copy_string(prop)); CAMLreturnT(char *, Is_exception_result(prop_val) ? NULL : strdup(String_val(prop_val))); } cudf_extra_t cudf_pkg_extra(cudf_package_t pkg) { CAMLparam0(); CAMLlocal2(ml_extras, ml_prop); GHashTable *h = NULL; h = g_hash_table_new_full(g_str_hash, g_str_equal, g_free, (GDestroyNotify) cudf_free_value); ml_extras = Field(*pkg, FIELD_PKGEXTRA); while (ml_extras != Val_emptylist) { ml_prop = Field(ml_extras, 0); g_hash_table_insert(h, strdup(String_val(Field(ml_prop, 0))), cudf_value_val(Field(ml_prop, 1))); ml_extras = Field(ml_extras, 1); } CAMLreturnT(cudf_extra_t, h); } /** Universe management */ cudf_universe_t cudf_load_universe(GList *packages) { CAMLparam0(); CAMLlocal2(ml_pkgs, cons); static value *closure_f = NULL; GList *l = packages; cudf_universe_t univ = NULL; ml_pkgs = Val_emptylist; while (l != NULL) { cons = caml_alloc(2, 0); Store_field(cons, 0, * (cudf_package_t) g_list_nth_data(l, 0)); Store_field(cons, 1, ml_pkgs); ml_pkgs = cons; l = g_list_next(l); } if (closure_f == NULL) closure_f = caml_named_value("load_universe"); NEW_MLVAL(univ); *univ = caml_callback(*closure_f, ml_pkgs); CAMLreturnT(cudf_universe_t, univ); } int cudf_universe_size(cudf_universe_t univ) { static value *closure_f = NULL; if (closure_f == NULL) closure_f = caml_named_value("universe_size"); return Int_val(caml_callback(*closure_f, *univ)); } int cudf_installed_size(cudf_universe_t univ) { static value *closure_f = NULL; if (closure_f == NULL) closure_f = caml_named_value("installed_size"); return Int_val(caml_callback(*closure_f, *univ)); } int cudf_is_consistent(cudf_universe_t univ) { static value *closure_f = NULL; if (closure_f == NULL) closure_f = caml_named_value("is_consistent"); return Bool_val(Field(caml_callback(*closure_f, *univ), FIELD_ISSOL)); } int cudf_is_solution(cudf_t *cudf, cudf_universe_t sol) { CAMLparam0(); CAMLlocal1(ml_cudf); static value *closure_f = NULL; if (closure_f == NULL) closure_f = caml_named_value("is_solution"); if (! cudf->has_request) g_error("Given CUDF has no request: cannot compare it with a solution."); ml_cudf = caml_alloc(2, 0); Store_field(ml_cudf, 0, *(cudf->universe)); Store_field(ml_cudf, 1, *(cudf->request)); CAMLreturnT(int, Bool_val(Field(caml_callback2(*closure_f, ml_cudf, *sol), FIELD_ISSOL))); } /** Memory management. free-like functions to free binding-specific data structures */ void cudf_free_doc(cudf_doc_t *doc) { GList *l; if (doc == NULL) return; FREE_MLVAL(doc->preamble); FREE_MLVAL(doc->request); l = doc->packages; while (l != NULL) { FREE_MLVAL(g_list_nth_data(l, 0)); l = g_list_next(l); } g_list_free(l); free(doc); } void cudf_free_cudf(cudf_t *cudf) { if (cudf == NULL) return; FREE_MLVAL(cudf->preamble); FREE_MLVAL(cudf->request); FREE_MLVAL(cudf->universe); free(cudf); } void cudf_free_universe(cudf_universe_t univ) { if (univ == NULL) return; FREE_MLVAL(univ); } void cudf_free_vpkg(cudf_vpkg_t *vpkg) { if (vpkg == NULL) return; if (vpkg->name != NULL) free(vpkg->name); free(vpkg); } void cudf_free_vpkglist(cudf_vpkglist_t vpkgs) { GList *l = vpkgs; while (l != NULL) { cudf_free_vpkg(g_list_nth_data(l, 0)); l = g_list_next(l); } g_list_free(vpkgs); } void cudf_free_vpkgformula(cudf_vpkgformula_t fmla) { GList *l = fmla; while (l != NULL) { cudf_free_vpkglist(g_list_nth_data(l, 0)); l = g_list_next(l); } g_list_free(fmla); } void cudf_free_value(cudf_value_t *v) { int typ; if (v == NULL) return; typ = v->typ; switch (typ) { case TYPE_INT : case TYPE_POSINT : case TYPE_NAT : case TYPE_BOOL : break; /* integers don't require any freeing */ case TYPE_STRING : case TYPE_PKGNAME : case TYPE_IDENT : case TYPE_ENUM : free(v->val.s); break; case TYPE_VPKG : case TYPE_VEQPKG : cudf_free_vpkg(v->val.vpkg); break; case TYPE_VPKGLIST : case TYPE_VEQPKGLIST : cudf_free_vpkglist(v->val.vpkgs); break; case TYPE_VPKGFORMULA : cudf_free_vpkgformula(v->val.f); break; case TYPE_TYPEDECL : break; default : g_error("Internal error: unexpected variant for type: %d", typ); } free(v); } void cudf_free_extra(cudf_extra_t extra) { g_hash_table_destroy(extra); }
/*****************************************************************************/ /* libCUDF - CUDF (Common Upgrade Description Format) manipulation library */ /* Copyright (C) 2009-2012 Stefano Zacchiroli <zack@upsilon.cc> */ /* */ /* This library is free software: you can redistribute it and/or modify */ /* it under the terms of the GNU Lesser General Public License as */ /* published by the Free Software Foundation, either version 3 of the */ /* License, or (at your option) any later version. A special linking */ /* exception to the GNU Lesser General Public License applies to this */ /* library, see the COPYING file for more information. */ /*****************************************************************************/
funcname.c
extern void exit(int); extern int strcmp(const char*, const char*); extern int printf(const char*, ...); //Note that the concatenation in the strcmp arguments doesn't work on gcc4. //Maybe __FUNCTION__ is no longer considered a literal?? int main(void) { printf("__FUNCTION__ = %s\n", __FUNCTION__); printf("__PRETTY_FUNCTION__ = %s\n", __PRETTY_FUNCTION__); if(strcmp("This is " __FUNCTION__, "This is main") || strcmp("This is " __PRETTY_FUNCTION__, "This is main")) { exit(1); } exit(0); }
async_find.mli
(* The trivial rewrite of Core_extended.find - The code duplication makes me sad *) open! Core open Async type t module Options : sig type error_handler = | Ignore | Print | Raise | Handle_with of (string -> unit Deferred.t) type t = { min_depth : int ; max_depth : int option ; follow_links : bool ; on_open_errors : error_handler ; on_stat_errors : error_handler ; filter : (string * Unix.Stats.t -> bool Deferred.t) option ; skip_dir : (string * Unix.Stats.t -> bool Deferred.t) option ; relative_paths : bool } val default : t val ignore_errors : t end (** [create ?options dir] create a Find.t based in dir *) val create : ?options:Options.t -> string -> t (** [next t] return the next file from the collection of valid files in t or None if no more files remain *) val next : t -> (string * Unix.Stats.t) option Deferred.t (** [close t] drops all the resources associated with t. Attempting to use t again will raise an exception. Any Find.t will be automatically closed after the last file is read by any means. *) val close : t -> unit Deferred.t (** [iter t ~f] calls f on every file in t *) val iter : t -> f:(string * Unix.Stats.t -> unit Deferred.t) -> unit Deferred.t (** [fold t ~init ~f] folds f over the files in t *) val fold : t -> init:'a -> f:('a -> string * Unix.Stats.t -> 'a Deferred.t) -> 'a Deferred.t (** [to_list t] returns all of the remaining files in t as a list in the order they would have been returned by subsequent calls to next *) val to_list : t -> (string * Unix.Stats.t) list Deferred.t (** [find_all ?options dir] short for to_list (create ?options dir) *) val find_all : ?options:Options.t -> string -> (string * Unix.Stats.t) list Deferred.t
(* The trivial rewrite of Core_extended.find - The code duplication makes me sad *)
builtini_GetPixel.ml
##ifdef CAMLTK let cCAMLtoTKunits = function Pixels (foo) -> TkToken (string_of_int foo) | Millimeters (foo) -> TkToken(Printf.sprintf "%gm" foo) | Inches (foo) -> TkToken(Printf.sprintf "%gi" foo) | PrinterPoint (foo) -> TkToken(Printf.sprintf "%gp" foo) | Centimeters (foo) -> TkToken(Printf.sprintf "%gc" foo) ;; let cTKtoCAMLunits str = let len = String.length str in let num_part str = String.sub str 0 (len - 1) in match String.get str (pred len) with 'c' -> Centimeters (float_of_string (num_part str)) | 'i' -> Inches (float_of_string (num_part str)) | 'm' -> Millimeters (float_of_string (num_part str)) | 'p' -> PrinterPoint (float_of_string (num_part str)) | _ -> Pixels(int_of_string str) ;; ##else let cCAMLtoTKunits : units -> tkArgs = function | `Pix (foo) -> TkToken (string_of_int foo) | `Mm (foo) -> TkToken(Printf.sprintf "%gm" foo) | `In (foo) -> TkToken(Printf.sprintf "%gi" foo) | `Pt (foo) -> TkToken(Printf.sprintf "%gp" foo) | `Cm (foo) -> TkToken(Printf.sprintf "%gc" foo) ;; let cTKtoCAMLunits str = let len = String.length str in let num_part str = String.sub str ~pos:0 ~len:(len - 1) in match String.get str (pred len) with | 'c' -> `Cm (float_of_string (num_part str)) | 'i' -> `In (float_of_string (num_part str)) | 'm' -> `Mm (float_of_string (num_part str)) | 'p' -> `Pt (float_of_string (num_part str)) | _ -> `Pix(int_of_string str) ;; ##endif
Parse.mli
(** Functions for parsing rust programs into a CST. Generated by ocaml-tree-sitter. *) (** Parse a rust program from a string into a typed OCaml CST. The resulting CST is [None] if parsing failed completely, otherwise some tree is returned even if some parsing errors occurred, in which case the error list is not empty. *) val string : ?src_file:string -> string -> CST.source_file Tree_sitter_run.Parsing_result.t (** Parse a rust program from a file into a typed OCaml CST. See the [string] function above for details. *) val file : string -> CST.source_file Tree_sitter_run.Parsing_result.t (** Whether to print debugging information. Default: false. *) val debug : bool ref (** The original tree-sitter parser. *) val ts_parser : Tree_sitter_bindings.Tree_sitter_API.ts_parser (** Parse a program into a tree-sitter CST. *) val parse_source_string : ?src_file:string -> string -> Tree_sitter_run.Tree_sitter_parsing.t (** Parse a source file into a tree-sitter CST. *) val parse_source_file : string -> Tree_sitter_run.Tree_sitter_parsing.t (** Parse a tree-sitter CST into an OCaml typed CST. *) val parse_input_tree : Tree_sitter_run.Tree_sitter_parsing.t -> CST.source_file Tree_sitter_run.Parsing_result.t
cmdliner_arg.ml
let rev_compare n0 n1 = compare n1 n0 (* Invalid_argument strings **) let err_not_opt = "Option argument without name" let err_not_pos = "Positional argument with a name" (* Documentation formatting helpers *) let strf = Printf.sprintf let doc_quote = Cmdliner_base.quote let doc_alts = Cmdliner_base.alts_str let doc_alts_enum ?quoted enum = doc_alts ?quoted (List.map fst enum) let str_of_pp pp v = pp Format.str_formatter v; Format.flush_str_formatter () (* Argument converters *) type 'a parser = string -> [ `Ok of 'a | `Error of string ] type 'a printer = Format.formatter -> 'a -> unit type 'a conv = 'a parser * 'a printer type 'a converter = 'a conv let default_docv = "VALUE" let conv ?docv (parse, print) = let parse s = match parse s with Ok v -> `Ok v | Error (`Msg e) -> `Error e in parse, print let conv' ?docv (parse, print) = let parse s = match parse s with Ok v -> `Ok v | Error e -> `Error e in parse, print let pconv ?docv conv = conv let conv_parser (parse, _) = fun s -> match parse s with `Ok v -> Ok v | `Error e -> Error (`Msg e) let conv_printer (_, print) = print let conv_docv _ = default_docv let err_invalid s kind = `Msg (strf "invalid value '%s', expected %s" s kind) let parser_of_kind_of_string ~kind k_of_string = fun s -> match k_of_string s with | None -> Error (err_invalid s kind) | Some v -> Ok v let some = Cmdliner_base.some let some' = Cmdliner_base.some' (* Argument information *) type env = Cmdliner_info.Env.info let env_var = Cmdliner_info.Env.info type 'a t = 'a Cmdliner_term.t type info = Cmdliner_info.Arg.t let info = Cmdliner_info.Arg.v (* Arguments *) let ( & ) f x = f x let err e = Error (`Parse e) let parse_to_list parser s = match parser s with | `Ok v -> `Ok [v] | `Error _ as e -> e let report_deprecated_env ei e = match Cmdliner_info.Env.info_deprecated e with | None -> () | Some msg -> let var = Cmdliner_info.Env.info_var e in let msg = String.concat "" ["environment variable "; var; ": "; msg ] in let err_fmt = Cmdliner_info.Eval.err_ppf ei in Cmdliner_msg.pp_err err_fmt ei ~err:msg let try_env ei a parse ~absent = match Cmdliner_info.Arg.env a with | None -> Ok absent | Some env -> let var = Cmdliner_info.Env.info_var env in match Cmdliner_info.Eval.env_var ei var with | None -> Ok absent | Some v -> match parse v with | `Error e -> err (Cmdliner_msg.err_env_parse env ~err:e) | `Ok v -> report_deprecated_env ei env; Ok v let arg_to_args = Cmdliner_info.Arg.Set.singleton let list_to_args f l = let add acc v = Cmdliner_info.Arg.Set.add (f v) acc in List.fold_left add Cmdliner_info.Arg.Set.empty l let flag a = if Cmdliner_info.Arg.is_pos a then invalid_arg err_not_opt else let convert ei cl = match Cmdliner_cline.opt_arg cl a with | [] -> try_env ei a Cmdliner_base.env_bool_parse ~absent:false | [_, _, None] -> Ok true | [_, f, Some v] -> err (Cmdliner_msg.err_flag_value f v) | (_, f, _) :: (_ ,g, _) :: _ -> err (Cmdliner_msg.err_opt_repeated f g) in arg_to_args a, convert let flag_all a = if Cmdliner_info.Arg.is_pos a then invalid_arg err_not_opt else let a = Cmdliner_info.Arg.make_all_opts a in let convert ei cl = match Cmdliner_cline.opt_arg cl a with | [] -> try_env ei a (parse_to_list Cmdliner_base.env_bool_parse) ~absent:[] | l -> try let truth (_, f, v) = match v with | None -> true | Some v -> failwith (Cmdliner_msg.err_flag_value f v) in Ok (List.rev_map truth l) with Failure e -> err e in arg_to_args a, convert let vflag v l = let convert _ cl = let rec aux fv = function | (v, a) :: rest -> begin match Cmdliner_cline.opt_arg cl a with | [] -> aux fv rest | [_, f, None] -> begin match fv with | None -> aux (Some (f, v)) rest | Some (g, _) -> failwith (Cmdliner_msg.err_opt_repeated g f) end | [_, f, Some v] -> failwith (Cmdliner_msg.err_flag_value f v) | (_, f, _) :: (_, g, _) :: _ -> failwith (Cmdliner_msg.err_opt_repeated g f) end | [] -> match fv with None -> v | Some (_, v) -> v in try Ok (aux None l) with Failure e -> err e in let flag (_, a) = if Cmdliner_info.Arg.is_pos a then invalid_arg err_not_opt else a in list_to_args flag l, convert let vflag_all v l = let convert _ cl = let rec aux acc = function | (fv, a) :: rest -> begin match Cmdliner_cline.opt_arg cl a with | [] -> aux acc rest | l -> let fval (k, f, v) = match v with | None -> (k, fv) | Some v -> failwith (Cmdliner_msg.err_flag_value f v) in aux (List.rev_append (List.rev_map fval l) acc) rest end | [] -> if acc = [] then v else List.rev_map snd (List.sort rev_compare acc) in try Ok (aux [] l) with Failure e -> err e in let flag (_, a) = if Cmdliner_info.Arg.is_pos a then invalid_arg err_not_opt else Cmdliner_info.Arg.make_all_opts a in list_to_args flag l, convert let parse_opt_value parse f v = match parse v with | `Ok v -> v | `Error err -> failwith (Cmdliner_msg.err_opt_parse f ~err) let opt ?vopt (parse, print) v a = if Cmdliner_info.Arg.is_pos a then invalid_arg err_not_opt else let absent = match Cmdliner_info.Arg.absent a with | Cmdliner_info.Arg.Doc d as a when d <> "" -> a | _ -> Cmdliner_info.Arg.Val (lazy (str_of_pp print v)) in let kind = match vopt with | None -> Cmdliner_info.Arg.Opt | Some dv -> Cmdliner_info.Arg.Opt_vopt (str_of_pp print dv) in let a = Cmdliner_info.Arg.make_opt ~absent ~kind a in let convert ei cl = match Cmdliner_cline.opt_arg cl a with | [] -> try_env ei a parse ~absent:v | [_, f, Some v] -> (try Ok (parse_opt_value parse f v) with Failure e -> err e) | [_, f, None] -> begin match vopt with | None -> err (Cmdliner_msg.err_opt_value_missing f) | Some optv -> Ok optv end | (_, f, _) :: (_, g, _) :: _ -> err (Cmdliner_msg.err_opt_repeated g f) in arg_to_args a, convert let opt_all ?vopt (parse, print) v a = if Cmdliner_info.Arg.is_pos a then invalid_arg err_not_opt else let absent = match Cmdliner_info.Arg.absent a with | Cmdliner_info.Arg.Doc d as a when d <> "" -> a | _ -> Cmdliner_info.Arg.Val (lazy "") in let kind = match vopt with | None -> Cmdliner_info.Arg.Opt | Some dv -> Cmdliner_info.Arg.Opt_vopt (str_of_pp print dv) in let a = Cmdliner_info.Arg.make_opt_all ~absent ~kind a in let convert ei cl = match Cmdliner_cline.opt_arg cl a with | [] -> try_env ei a (parse_to_list parse) ~absent:v | l -> let parse (k, f, v) = match v with | Some v -> (k, parse_opt_value parse f v) | None -> match vopt with | None -> failwith (Cmdliner_msg.err_opt_value_missing f) | Some dv -> (k, dv) in try Ok (List.rev_map snd (List.sort rev_compare (List.rev_map parse l))) with | Failure e -> err e in arg_to_args a, convert (* Positional arguments *) let parse_pos_value parse a v = match parse v with | `Ok v -> v | `Error err -> failwith (Cmdliner_msg.err_pos_parse a ~err) let pos ?(rev = false) k (parse, print) v a = if Cmdliner_info.Arg.is_opt a then invalid_arg err_not_pos else let absent = match Cmdliner_info.Arg.absent a with | Cmdliner_info.Arg.Doc d as a when d <> "" -> a | _ -> Cmdliner_info.Arg.Val (lazy (str_of_pp print v)) in let pos = Cmdliner_info.Arg.pos ~rev ~start:k ~len:(Some 1) in let a = Cmdliner_info.Arg.make_pos_abs ~absent ~pos a in let convert ei cl = match Cmdliner_cline.pos_arg cl a with | [] -> try_env ei a parse ~absent:v | [v] -> (try Ok (parse_pos_value parse a v) with Failure e -> err e) | _ -> assert false in arg_to_args a, convert let pos_list pos (parse, _) v a = if Cmdliner_info.Arg.is_opt a then invalid_arg err_not_pos else let a = Cmdliner_info.Arg.make_pos ~pos a in let convert ei cl = match Cmdliner_cline.pos_arg cl a with | [] -> try_env ei a (parse_to_list parse) ~absent:v | l -> try Ok (List.rev (List.rev_map (parse_pos_value parse a) l)) with | Failure e -> err e in arg_to_args a, convert let all = Cmdliner_info.Arg.pos ~rev:false ~start:0 ~len:None let pos_all c v a = pos_list all c v a let pos_left ?(rev = false) k = let start = if rev then k + 1 else 0 in let len = if rev then None else Some k in pos_list (Cmdliner_info.Arg.pos ~rev ~start ~len) let pos_right ?(rev = false) k = let start = if rev then 0 else k + 1 in let len = if rev then Some k else None in pos_list (Cmdliner_info.Arg.pos ~rev ~start ~len) (* Arguments as terms *) let absent_error args = let make_req a acc = let req_a = Cmdliner_info.Arg.make_req a in Cmdliner_info.Arg.Set.add req_a acc in Cmdliner_info.Arg.Set.fold make_req args Cmdliner_info.Arg.Set.empty let value a = a let err_arg_missing args = err @@ Cmdliner_msg.err_arg_missing (Cmdliner_info.Arg.Set.choose args) let required (args, convert) = let args = absent_error args in let convert ei cl = match convert ei cl with | Ok (Some v) -> Ok v | Ok None -> err_arg_missing args | Error _ as e -> e in args, convert let non_empty (al, convert) = let args = absent_error al in let convert ei cl = match convert ei cl with | Ok [] -> err_arg_missing args | Ok l -> Ok l | Error _ as e -> e in args, convert let last (args, convert) = let convert ei cl = match convert ei cl with | Ok [] -> err_arg_missing args | Ok l -> Ok (List.hd (List.rev l)) | Error _ as e -> e in args, convert (* Predefined arguments *) let man_fmts = ["auto", `Auto; "pager", `Pager; "groff", `Groff; "plain", `Plain] let man_fmt_docv = "FMT" let man_fmts_enum = Cmdliner_base.enum man_fmts let man_fmts_alts = doc_alts_enum man_fmts let man_fmts_doc kind = strf "Show %s in format $(docv). The value $(docv) must be %s. \ With $(b,auto), the format is $(b,pager) or $(b,plain) whenever \ the $(b,TERM) env var is $(b,dumb) or undefined." kind man_fmts_alts let man_format = let doc = man_fmts_doc "output" in let docv = man_fmt_docv in value & opt man_fmts_enum `Pager & info ["man-format"] ~docv ~doc let stdopt_version ~docs = value & flag & info ["version"] ~docs ~doc:"Show version information." let stdopt_help ~docs = let doc = man_fmts_doc "this help" in let docv = man_fmt_docv in value & opt ~vopt:(Some `Auto) (some man_fmts_enum) None & info ["help"] ~docv ~docs ~doc (* Predefined converters. *) let bool = Cmdliner_base.bool let char = Cmdliner_base.char let int = Cmdliner_base.int let nativeint = Cmdliner_base.nativeint let int32 = Cmdliner_base.int32 let int64 = Cmdliner_base.int64 let float = Cmdliner_base.float let string = Cmdliner_base.string let enum = Cmdliner_base.enum let file = Cmdliner_base.file let dir = Cmdliner_base.dir let non_dir_file = Cmdliner_base.non_dir_file let list = Cmdliner_base.list let array = Cmdliner_base.array let pair = Cmdliner_base.pair let t2 = Cmdliner_base.t2 let t3 = Cmdliner_base.t3 let t4 = Cmdliner_base.t4 (*--------------------------------------------------------------------------- Copyright (c) 2011 The cmdliner programmers Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ---------------------------------------------------------------------------*)
(*--------------------------------------------------------------------------- Copyright (c) 2011 The cmdliner programmers. All rights reserved. Distributed under the ISC license, see terms at the end of the file. ---------------------------------------------------------------------------*)
script_set.mli
(** Convert a list to a Script IR set. If the list contains duplicates, the last occurence is used. *) val of_list : 'a Protocol.Script_typed_ir.comparable_ty -> 'a list -> 'a Protocol.Script_typed_ir.set
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* Copyright (c) 2020 Metastate AG <hello@metastate.dev> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
main.ml
let main () = let argv = Array.to_list Sys.argv in let args = List.tl argv in let this = List.hd argv in match args with | [] -> Printf.eprintf "usage: %s [WORD]..." this; exit 1 | _ -> Printf.eprintf "%s\n" (Util.join args) let () = main ()
Array.ml
module A = ArrayF.Make (struct let length = ArrayLabels.length let make = ArrayLabels.make let append = ArrayLabels.append let map f arr = ArrayLabels.map ~f arr let mapi f arr = ArrayLabels.mapi ~f:(fun index e -> f e index) arr let fold_left f init arr = ArrayLabels.fold_left ~f ~init arr let every f arr = ArrayLabels.for_all ~f arr let slice ~start ~end_ arr = ArrayLabels.sub arr ~pos:start ~len:(end_ - start) end) include A
carbonated_map_costs.ml
module S = Saturation_repr open Gas_limit_repr type cost = Saturation_repr.may_saturate Saturation_repr.t (** This is a good enough approximation *) let log2 x = S.safe_int (1 + S.numbits x) (** Collect benchmark from [Carbonated_map_benchmarks.Find_benchmark]. The model is similar to the gas model as from [Michelson_v1_gas.map_get]. The user is responsible for providing the [compare_key_cost] which depends on the size of the [key]. See [Carbonated_map_benchmarks.Find_benchmark] for an example. The rational for the model is: - [intercept] is for paying a fixed cost regardless of size. - [compare_key_cost] is for the log2 of steps comparing keys - [traversal_overhead] is for the overhead of log2 steps walking the tree *) let find_cost ~compare_key_cost ~size = let intercept = S.safe_int 50 in let size = S.safe_int size in let compare_cost = log2 size *@ compare_key_cost in let traversal_overhead = log2 size *@ S.safe_int 2 in intercept +@ compare_cost +@ traversal_overhead (** Modelling the precise overhead of update compared with [find] is tricky. The cost of [find] depends on the cost of comparing keys. When the tree is recreated, after looking up the element, this cost is no longer a factor. On the other hand, if the old map is no longer used, some nodes are going to be garbage collected at a later stage which incurs an extra cost. We here use the same model as in [Michelson_v1_gas.map_update]. That is providing an overestimate by doubling the cost of [find]. *) let update_cost ~compare_key_cost ~size = S.safe_int 2 *@ find_cost ~compare_key_cost ~size (** Collect benchmark from [Carbonated_map_benchmarks.Fold_benchmark]. The cost of producing a list of elements is linear in the size of the map and does not depend on the size of the elements nor keys. *) let fold_cost ~size = S.safe_int 50 +@ (S.safe_int 24 *@ S.safe_int size)
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2022 Trili Tech, <contact@trili.tech> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
delegate_services.mli
open Alpha_context val list : 'a #RPC_context.simple -> 'a -> ?active:bool -> ?inactive:bool -> unit -> Signature.Public_key_hash.t list shell_tzresult Lwt.t type info = { balance : Tez.t; frozen_balance : Tez.t; frozen_balance_by_cycle : Delegate.frozen_balance Cycle.Map.t; staking_balance : Tez.t; delegated_contracts : Contract_repr.t list; delegated_balance : Tez.t; deactivated : bool; grace_period : Cycle.t; voting_power : int32; } val info_encoding : info Data_encoding.t val info : 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> info shell_tzresult Lwt.t val balance : 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> Tez.t shell_tzresult Lwt.t val frozen_balance : 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> Tez.t shell_tzresult Lwt.t val frozen_balance_by_cycle : 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> Delegate.frozen_balance Cycle.Map.t shell_tzresult Lwt.t val staking_balance : 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> Tez.t shell_tzresult Lwt.t val delegated_contracts : 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> Contract_repr.t list shell_tzresult Lwt.t val delegated_balance : 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> Tez.t shell_tzresult Lwt.t val deactivated : 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> bool shell_tzresult Lwt.t val grace_period : 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> Cycle.t shell_tzresult Lwt.t val voting_power : 'a #RPC_context.simple -> 'a -> public_key_hash -> int32 shell_tzresult Lwt.t module Baking_rights : sig type t = { level : Raw_level.t; delegate : Signature.Public_key_hash.t; priority : int; timestamp : Timestamp.t option; } (** Retrieves the list of delegates allowed to bake a block. By default, it gives the best baking priorities for bakers that have at least one opportunity below the 64th priority for the next block. Parameters [levels] and [cycles] can be used to specify the (valid) level(s) in the past or future at which the baking rights have to be returned. Parameter [delegates] can be used to restrict the results to the given delegates. If parameter [all] is [true], all the baking opportunities for each baker at each level are returned, instead of just the first one. Returns the list of baking slots. Also returns the minimal timestamps that correspond to these slots. The timestamps are omitted for levels in the past, and are only estimates for levels later that the next block, based on the hypothesis that all predecessor blocks were baked at the first priority. *) val get : 'a #RPC_context.simple -> ?levels:Raw_level.t list -> ?cycles:Cycle.t list -> ?delegates:Signature.public_key_hash list -> ?all:bool -> ?max_priority:int -> 'a -> t list shell_tzresult Lwt.t end module Endorsing_rights : sig type t = { level : Raw_level.t; delegate : Signature.Public_key_hash.t; slots : int list; estimated_time : Timestamp.t option; } (** Retrieves the delegates allowed to endorse a block. By default, it gives the endorsement slots for bakers that have at least one in the next block. Parameters [levels] and [cycles] can be used to specify the (valid) level(s) in the past or future at which the endorsement rights have to be returned. Parameter [delegates] can be used to restrict the results to the given delegates. Returns the list of endorsement slots. Also returns the minimal timestamps that correspond to these slots. Timestamps are omitted for levels in the past, and are only estimates for levels later that the next block, based on the hypothesis that all predecessor blocks were baked at the first priority. *) val get : 'a #RPC_context.simple -> ?levels:Raw_level.t list -> ?cycles:Cycle.t list -> ?delegates:Signature.public_key_hash list -> 'a -> t list shell_tzresult Lwt.t end module Endorsing_power : sig val get : 'a #RPC_context.simple -> 'a -> Alpha_context.packed_operation -> Chain_id.t -> int shell_tzresult Lwt.t end module Required_endorsements : sig val get : 'a #RPC_context.simple -> 'a -> Period.t -> int shell_tzresult Lwt.t end module Minimal_valid_time : sig val get : 'a #RPC_context.simple -> 'a -> int -> int -> Time.t shell_tzresult Lwt.t end (* temporary export for deprecated unit test *) val endorsement_rights : Alpha_context.t -> Level.t -> public_key_hash list tzresult Lwt.t val baking_rights : Alpha_context.t -> int option -> (Raw_level.t * (public_key_hash * Time.t option) list) tzresult Lwt.t val endorsing_power : Alpha_context.t -> Alpha_context.packed_operation * Chain_id.t -> int tzresult Lwt.t val required_endorsements : Alpha_context.t -> Alpha_context.Period.t -> int val minimal_valid_time : Alpha_context.t -> int -> int -> Time.t tzresult val register : unit -> unit
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* Copyright (c) 2020 Metastate AG <hello@metastate.dev> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
modifyGlobalCluster.ml
open Types open Aws type input = ModifyGlobalClusterMessage.t type output = ModifyGlobalClusterResult.t type error = Errors_internal.t let service = "rds" let signature_version = Request.V4 let to_http service region req = let uri = Uri.add_query_params (Uri.of_string (Aws.Util.of_option_exn (Endpoints.url_of service region))) (List.append [("Version", ["2014-10-31"]); ("Action", ["ModifyGlobalCluster"])] (Util.drop_empty (Uri.query_of_encoded (Query.render (ModifyGlobalClusterMessage.to_query req))))) in (`POST, uri, []) let of_http body = try let xml = Ezxmlm.from_string body in let resp = Util.option_bind (Xml.member "ModifyGlobalClusterResponse" (snd xml)) (Xml.member "ModifyGlobalClusterResult") in try Util.or_error (Util.option_bind resp ModifyGlobalClusterResult.parse) (let open Error in BadResponse { body; message = "Could not find well formed ModifyGlobalClusterResult." }) with | Xml.RequiredFieldMissing msg -> let open Error in `Error (BadResponse { body; message = ("Error parsing ModifyGlobalClusterResult - missing field in body or children: " ^ msg) }) with | Failure msg -> `Error (let open Error in BadResponse { body; message = ("Error parsing xml: " ^ msg) }) let parse_error code err = let errors = [] @ Errors_internal.common in match Errors_internal.of_string err with | Some var -> if (List.mem var errors) && ((match Errors_internal.to_http_code var with | Some var -> var = code | None -> true)) then Some var else None | None -> None
baking.mli
open Alpha_context open Misc type error += Invalid_fitness_gap of int64 * int64 (* `Permanent *) type error += Timestamp_too_early of Timestamp.t * Timestamp.t (* `Permanent *) type error += Invalid_block_signature of Block_hash.t * Signature.Public_key_hash.t (* `Permanent *) type error += Unexpected_endorsement type error += Invalid_signature (* `Permanent *) type error += Invalid_stamp (* `Permanent *) (** [minimal_time ctxt priority pred_block_time] returns the minimal time, given the predecessor block timestamp [pred_block_time], after which a baker with priority [priority] is allowed to bake. Fail with [Invalid_time_between_blocks_constant] if the minimal time cannot be computed. *) val minimal_time: context -> int -> Time.t -> Time.t tzresult Lwt.t (** [check_baking_rights ctxt block pred_timestamp] verifies that: * the contract that owned the roll at cycle start has the block signer as delegate. * the timestamp is coherent with the announced slot. *) val check_baking_rights: context -> Block_header.contents -> Time.t -> public_key tzresult Lwt.t (** For a given level computes who has the right to include an endorsement in the next block. The result can be stored in Alpha_context.allowed_endorsements *) val endorsement_rights: context -> Level.t -> (public_key * int list * bool) Signature.Public_key_hash.Map.t tzresult Lwt.t (** Check that the operation was signed by a delegate allowed to endorse at the level specified by the endorsement. *) val check_endorsement_rights: context -> Chain_id.t -> Kind.endorsement Operation.t -> (public_key_hash * int list * bool) tzresult Lwt.t (** Returns the endorsement reward calculated w.r.t a given priority. *) val endorsement_reward: context -> block_priority:int -> int -> Tez.t tzresult Lwt.t (** [baking_priorities ctxt level] is the lazy list of contract's public key hashes that are allowed to bake for [level]. *) val baking_priorities: context -> Level.t -> public_key lazy_list (** [first_baking_priorities ctxt ?max_priority contract_hash level] is a list of priorities of max [?max_priority] elements, where the delegate of [contract_hash] is allowed to bake for [level]. If [?max_priority] is [None], a sensible number of priorities is returned. *) val first_baking_priorities: context -> ?max_priority:int -> public_key_hash -> Level.t -> int list tzresult Lwt.t (** [check_signature ctxt chain_id block id] check if the block is signed with the given key, and belongs to the given [chain_id] *) val check_signature: Block_header.t -> Chain_id.t -> public_key -> unit tzresult Lwt.t (** Checks if the header that would be built from the given components is valid for the given diffculty. The signature is not passed as it is does not impact the proof-of-work stamp. The stamp is checked on the hash of a block header whose signature has been zeroed-out. *) val check_header_proof_of_work_stamp: Block_header.shell_header -> Block_header.contents -> int64 -> bool (** verify if the proof of work stamp is valid *) val check_proof_of_work_stamp: context -> Block_header.t -> unit tzresult Lwt.t (** check if the gap between the fitness of the current context and the given block is within the protocol parameters *) val check_fitness_gap: context -> Block_header.t -> unit tzresult Lwt.t val dawn_of_a_new_cycle: context -> Cycle.t option tzresult Lwt.t val earlier_predecessor_timestamp: context -> Level.t -> Timestamp.t tzresult Lwt.t
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
script_set.mli
(** Convert a list to a Script IR set. If the list contains duplicates, the last occurence is used. *) val of_list : 'a Protocol.Script_typed_ir.comparable_ty -> 'a list -> 'a Protocol.Script_typed_ir.set
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* Copyright (c) 2020 Metastate AG <hello@metastate.dev> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
add_special_comments.mli
(* empty *)
(* empty *)
tls.ml
open! Core open! Async open! Import module Connection = Ssl.Connection open Require_explicit_time_source let teardown_connection ~outer_rd ~outer_wr ~time_source = let force_close = Time_source.after time_source (Time_ns.Span.of_sec 30.) in let%bind () = Writer.close ~force_close outer_wr in Reader.close outer_rd ;; (* One needs to be careful around Async Readers and Writers that share the same underyling file descriptor, which is something that happens when they're used for sockets. Closing the Reader before the Writer will cause the Writer to throw and complain about its underlying file descriptor being closed. This is why instead of using Reader.pipe directly below, we write out an equivalent version which will first close the Writer before closing the Reader once the input pipe is fully consumed. Additionally, [Writer.pipe] will not close the writer if the pipe is closed, so in order to avoid leaking file descriptors, we allow the pipe 30 seconds to flush before closing the writer. *) let reader_writer_pipes ~outer_rd ~outer_wr ~time_source = let reader_pipe_r, reader_pipe_w = Pipe.create () in let writer_pipe = Writer.pipe outer_wr in upon (Reader.transfer outer_rd reader_pipe_w) (fun () -> (* must close the writer before the readers, otherwise everything breaks. *) teardown_connection ~outer_rd ~outer_wr ~time_source >>> fun () -> Pipe.close reader_pipe_w); upon (Pipe.closed writer_pipe) (fun () -> Deferred.choose [ Deferred.choice (Time_source.after time_source (Time_ns.Span.of_sec 30.)) (fun () -> ()) ; Deferred.choice (Pipe.downstream_flushed writer_pipe) (fun (_ : Pipe.Flushed_result.t) -> ()) ] >>> fun () -> don't_wait_for (teardown_connection ~outer_rd ~outer_wr ~time_source)); reader_pipe_r, writer_pipe ;; (* [Reader.of_pipe] will not close the pipe when the returned [Reader] is closed, so we manually do that ourselves. [Writer.of_pipe] will create a writer that will raise once the pipe is closed, so we set [raise_when_consumer_leaves] to false. *) let reader_writer_of_pipes ~app_rd ~app_wr = let%bind inner_rd = Reader.of_pipe (Info.of_string "async_ssl_tls_reader") app_rd in upon (Reader.close_finished inner_rd) (fun () -> Pipe.close_read app_rd); let%map inner_wr, _ = Writer.of_pipe (Info.of_string "async_ssl_tls_writer") app_wr in Writer.set_raise_when_consumer_leaves inner_wr false; inner_rd, inner_wr ;; let call_handler_and_cleanup ~outer_rd:_ ~outer_wr ~inner_rd ~inner_wr f = Monitor.protect f ~run:`Now ~rest:`Log ~finally:(fun () -> (* Close writer before reader in-case they share the underlying FD *) let%bind () = Writer.close inner_wr in Deferred.all_unit [ (* Close the reader for completeness *) Reader.close inner_rd ; (* Wait for [Async_ssl] to close [outer_wr] in response to [inner_wr] having been closed. *) Writer.close_finished outer_wr ]) ;; let wrap_connection ?(timeout = Time_ns.Span.of_sec 30.) outer_rd outer_wr ~negotiate ~f ~time_source = let net_to_ssl, ssl_to_net = reader_writer_pipes ~outer_rd ~outer_wr ~time_source in let app_to_ssl, app_wr = Pipe.create () in let app_rd, ssl_to_app = Pipe.create () in let%bind negotiate = match%map Time_source.with_timeout time_source timeout (negotiate ~app_to_ssl ~ssl_to_app ~net_to_ssl ~ssl_to_net) with | `Timeout -> error_s [%message "Timeout exceeded"] | `Result connection -> connection in match negotiate with | Error error -> let%map () = teardown_connection ~outer_rd ~outer_wr ~time_source in Error.raise error | Ok conn -> let%bind inner_rd, inner_wr = reader_writer_of_pipes ~app_rd ~app_wr in call_handler_and_cleanup ~outer_rd ~outer_wr ~inner_rd ~inner_wr (fun () -> f conn inner_rd inner_wr) ;; let wrap_server_connection tls_settings outer_rd outer_wr ~f ~time_source = let ca_file = Config.Server.ca_file tls_settings in let ca_path = Config.Server.ca_path tls_settings in let verify_modes = Config.Server.verify_modes tls_settings in let version = Config.Server.tls_version tls_settings in let options = Config.Server.tls_options tls_settings in let crt_file = Config.Server.crt_file tls_settings in let key_file = Config.Server.key_file tls_settings in let allowed_ciphers = Config.Server.allowed_ciphers tls_settings in wrap_connection outer_rd outer_wr ~negotiate: (Ssl.server ?ca_file ?ca_path ?verify_modes ~version ~options ~crt_file ~key_file ~allowed_ciphers ()) ~f:(fun conn r w -> match Ssl.Connection.peer_certificate conn with | None | Some (Ok (_ : Ssl.Certificate.t)) -> f conn r w | Some (Error error) -> Error.raise error) ~time_source ;; let listen ?max_connections ?backlog ?buffer_age_limit ?advance_clock_before_tls_negotiation tls_settings where_to_listen ~on_handler_error ~f = Tcp.Server.create ?max_connections ?backlog ?buffer_age_limit ~on_handler_error where_to_listen (fun sock r w -> let%bind time_source = match advance_clock_before_tls_negotiation with | None -> return (Time_source.wall_clock ()) | Some (time_source, delay) -> let%map () = Time_source.advance_by_alarms_by time_source delay in Time_source.read_only time_source in wrap_server_connection tls_settings r w ~f:(f sock) ~time_source) ;; let wrap_client_connection ?timeout tls_settings outer_rd outer_wr ~f = let ca_file = Config.Client.ca_file tls_settings in let ca_path = Config.Client.ca_path tls_settings in let version = Config.Client.tls_version tls_settings in let options = Config.Client.tls_options tls_settings in let crt_file = Config.Client.crt_file tls_settings in let key_file = Config.Client.key_file tls_settings in let hostname = Config.Client.remote_hostname tls_settings in let allowed_ciphers = Config.Client.allowed_ciphers tls_settings in let verify_modes = Config.Client.verify_modes tls_settings in let verify_callback = Config.Client.verify_callback tls_settings in let session = Config.Client.session tls_settings in let connection_name = Config.Client.connection_name tls_settings in wrap_connection ?timeout ~negotiate: (Ssl.client ?ca_file ?ca_path ?crt_file ?key_file ?hostname ?session ?name:connection_name ~verify_modes ~allowed_ciphers ~version ~options ()) outer_rd outer_wr ~f:(fun conn inner_rd inner_wr -> match%bind verify_callback conn with | Error connection_verification_error -> raise_s [%message "Connection verification failed." (connection_verification_error : Error.t)] | Ok () -> f conn inner_rd inner_wr) ;; let with_connection ?interrupt ?timeout tls_settings where_to_connect ~f ~time_source = let start_time = Time_source.now time_source in Async.Tcp.with_connection ?interrupt ?timeout:(Option.map timeout ~f:Time_ns.Span.to_span_float_round_nearest) where_to_connect (fun socket outer_rd outer_wr -> let timeout = Option.map timeout ~f:(fun timeout -> let tcp_time_elapsed = Time_ns.diff (Time_source.now time_source) start_time in Time_ns.Span.(timeout - tcp_time_elapsed)) in wrap_client_connection ?timeout tls_settings outer_rd outer_wr ~f:(f socket) ~time_source) ;; module For_testing = struct let listen = listen let with_connection = with_connection end let time_source = Time_source.wall_clock () let listen = listen ?advance_clock_before_tls_negotiation:None let wrap_server_connection = wrap_server_connection ~time_source let with_connection = with_connection ~time_source let wrap_client_connection = wrap_client_connection ~time_source module Expert = struct let connect ?interrupt ?timeout tls_settings where_to_connect = let conn_ivar = Ivar.create () in (* This will raise if the connection fails to establish which will bubble out to the enclosing monitor and avoid issues with the [Ivar] not getting filled. *) don't_wait_for (with_connection ?interrupt ?timeout tls_settings where_to_connect ~f:(fun sock conn r w -> Ivar.fill conn_ivar (sock, conn, r, w); Deferred.any [ Reader.close_finished r; Writer.close_finished w ])); Ivar.read conn_ivar ;; let wrap_client_connection_and_stay_open tls_settings outer_rd outer_wr ~f = let result = Ivar.create () in let finished = wrap_client_connection tls_settings outer_rd outer_wr ~f:(fun conn r w -> let%bind res, `Do_not_close_until finished = f conn r w in Ivar.fill result res; finished) in let%map result = Ivar.read result in result, `Connection_closed finished ;; end
dune
(library (name alib) (public_name alib) (libraries blib))
commands.mli
val commands : unit -> Client_context.printer Clic.command list
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2019 Nomadic Labs, <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
Matching_generic.ml
open Common module B = AST_generic module G = AST_generic module MV = Metavariable module H = AST_generic_helpers module Flag = Flag_semgrep module Env = Metavariable_capture module PI = Parse_info let logger = Logging.get_logger [ __MODULE__ ] (*****************************************************************************) (* Prelude *) (*****************************************************************************) (* Helper types and functions for Generic_vs_generic.ml. * See Generic_vs_generic.ml top comment for more information. * * todo: * - use m_list_in_any_order at some point for: * * m_list__m_field * * m_list__m_attribute * * m_list__m_xml_attr * * m_list__m_argument (harder) *) (*****************************************************************************) (* Types *) (*****************************************************************************) (* ------------------------------------------------------------------------*) (* Combinators history *) (* ------------------------------------------------------------------------*) (* * version0: * type ('a, 'b) matcher = 'a -> 'b -> bool * * This just lets you know if you matched something. * * version1: * type ('a, 'b) matcher = 'a -> 'b -> unit -> ('a, 'b) option * * The Maybe monad. * * version2: * type ('a, 'b) matcher = 'a -> 'b -> binding -> binding list * * Why not returning a binding option ? because we need sometimes * to return multiple possible bindings for one matching code. * For instance with the pattern do 'f(..., $X, ...)', $X could be bound * to different parts of the code. * * Note that the empty list means a match failure. * * version3: * type ('a, 'b) matcher = 'a -> 'b -> tin -> ('a,'b) tout * * version4: back to simpler * type ('a, 'b) matcher = 'a -> 'b -> tin -> tout *) (* tin is for 'type in' and tout for 'type out' *) (* incoming environment *) type tin = { mv : Metavariable_capture.t; stmts_match_span : Stmts_match_span.t; cache : tout Caching.Cache.t option; (* TODO: this does not have to be in tout; maybe split tin in 2? *) lang : Lang.t; config : Config_semgrep.t; deref_sym_vals : int; } (* list of possible outcoming matching environments *) and tout = tin list (* A matcher is something taking an element A and an element B * (for this module A will be the AST of the pattern and B * the AST of the program we want to match over), then some environment * information tin, and it will return something (tout) that will * represent a match between element A and B. *) (* currently A and B are usually the same type as we use the * same language for the host language and pattern language *) type 'a matcher = 'a -> 'a -> tin -> tout type ('a, 'b) general_matcher = 'a -> 'b -> tin -> tout type 'a comb_result = tin -> ('a * tout) list type 'a comb_matcher = 'a -> 'a list -> 'a list comb_result (*****************************************************************************) (* Globals *) (*****************************************************************************) (*****************************************************************************) (* Debugging *) (*****************************************************************************) (*****************************************************************************) (* Monadic operators *) (*****************************************************************************) (* The >>= combinator below allow you to configure the matching process * anyway you want. Essentially this combinator takes a matcher, * another matcher, and returns a matcher that combines the 2 * matcher arguments. * * In the case of a simple boolean matcher, you just need to write: * * let (>>=) m1 m2 = fun tin -> * match m1 tin with * | None -> None * | Some x -> * m2 x tin * * For more context, this tutorial on monads in OCaml can be useful: * https://www.cs.cornell.edu/courses/cs3110/2019sp/textbook/ads/ex_maybe_monad.html *) let (( >>= ) : (tin -> tout) -> (unit -> tin -> tout) -> tin -> tout) = fun m1 m2 tin -> (* let's get a list of possible environment match (could be * the empty list when it didn't match, playing the role None * had before) *) let xs = m1 tin in (* try m2 on each possible returned bindings *) let xxs = xs |> Common.map (fun binding -> m2 () binding) in List.flatten xxs (* the disjunctive combinator *) let (( >||> ) : (tin -> tout) -> (tin -> tout) -> tin -> tout) = fun m1 m2 tin -> (* CHOICE let xs = m1 tin in if null xs then m2 tin else xs *) (* opti? use set instead of list *) m1 tin @ m2 tin (* the if-fail combinator *) let ( >!> ) m1 else_cont tin = match m1 tin with | [] -> (else_cont ()) tin | xs -> xs let if_config f ~then_ ~else_ tin = if f tin.config then then_ tin else else_ tin let with_lang f tin = f tin.lang tin (* The classical monad combinators *) let (return : tin -> tout) = fun tin -> [ tin ] let (fail : tin -> tout) = fun _tin -> if !Flag.debug_matching then failwith "Generic_vs_generic.fail: Match failure"; [] let or_list m a bs = let rec aux xs = match xs with | [] -> fail | b :: bs -> m a b >||> aux bs in aux bs (* Since OCaml 4.08 you can define your own let operators! * alt: use ppx_let, but you need to write it as let%bind (uglier) * You can use the ppx future_syntax to support older version of OCaml, but * then you can not use other PPX rewriters (which we do). *) let ( let* ) o f = o >>= f (* TODO: could maybe also define let (let/) o f = match o with | None -> fail () | Some x -> f x useful in Generic_vs_generic when see code like 'None -> fail()' *) (*****************************************************************************) (* Environment *) (*****************************************************************************) let add_mv_capture key value (env : tin) = { env with mv = Env.add_capture key value env.mv } let extend_stmts_match_span rightmost_stmt (env : tin) = let stmts_match_span = Stmts_match_span.extend rightmost_stmt env.stmts_match_span in { env with stmts_match_span } (* pre: both 'a' and 'b' contains only regular code; there are no * metavariables inside them. *) let rec equal_ast_bound_code (config : Config_semgrep.t) (a : MV.mvalue) (b : MV.mvalue) : bool = let res = match (a, b) with (* if one of the two IDs is not resolved, then we allow * a match, so a pattern like 'self.$FOO = $FOO' matches * code like 'self.foo = foo'. * Maybe we should not ... but let's try. * * At least we don't allow a resolved id with a precise sid to match * another id with a different sid (same id but in different scope), * which we rely on with our deep stmt matching hacks. * * TODO: relax even more and allow some id_resolved EnclosedVar (a field) * to match anything? *) | ( MV.Id ((s1, _), Some { G.id_resolved = { contents = None }; _ }), MV.Id ((s2, _), _) ) | ( MV.Id ((s1, _), _), MV.Id ((s2, _), Some { G.id_resolved = { contents = None }; _ }) ) -> s1 = s2 (* In Ruby, they use atoms for metaprogramming to generate fields * (e.g., 'serialize :tags ... post.tags') in which case we want * a Text metavariable like :$INPUT to be compared with an Id * metavariable like post.$INPUT. * TODO? split MV.Text in a separate MV.Atom? *) | ( MV.Id ((s1, _), Some { G.id_resolved = { contents = None }; _ }), MV.Text (s2, _, _) ) | ( MV.Text (s1, _, _), MV.Id ((s2, _), Some { G.id_resolved = { contents = None }; _ }) ) -> s1 = s2 (* A variable occurrence that is known to have a constant value is equal to * that same constant value. * * THINK: We could also equal two different variable occurrences that happen * to have the same constant value. *) | ( MV.E { e = G.L a_lit; _ }, MV.Id (_, Some { B.id_svalue = { contents = Some (B.Lit b_lit) }; _ }) ) | ( MV.Id (_, Some { G.id_svalue = { contents = Some (G.Lit a_lit) }; _ }), MV.E { e = B.L b_lit; _ } ) when config.constant_propagation -> G.equal_literal a_lit b_lit (* We're adding this in as a hack, so that idents without id_infos can be allowed to match to metavariables. Notably, this allows things like qualified identifiers (within decorators) to match to metavariables. This almost certainly should break something at some point in the future, but for now we can allow it. *) | MV.Id ((s1, _), None), MV.Id ((s2, _), Some _) -> s1 = s2 (* general case, equality modulo-position-and-svalue. * TODO: in theory we should use user-defined equivalence to allow * equality modulo-equivalence rewriting! * TODO? missing MV.Ss _, MV.Ss _ ?? *) | MV.Id _, MV.Id _ | MV.N _, MV.N _ | MV.E _, MV.E _ | MV.S _, MV.S _ | MV.P _, MV.P _ | MV.T _, MV.T _ | MV.Text _, MV.Text _ | MV.Params _, MV.Params _ | MV.Args _, MV.Args _ | MV.Xmls _, MV.Xmls _ -> (* Note that because we want to retain the position information * of the matched code in the environment (e.g. for the -pvar * sgrep command line argument), we can not just use the * generic '=' OCaml operator as 'a' and 'b' may represent * the same code but they will contain leaves in their AST * with different position information. * old: So before doing * the comparison we just need to remove/abstract-away * the line number information in each ASTs. * let a = MV.abstract_position_info_mval a in * let b = MV.abstract_position_info_mval b in * a =*= b *) (* This will perform equality but not care about: * - position information (see adhoc AST_generic.equal_tok) * - id_svalue (see the special @equal for id_svalue) *) MV.Structural.equal_mvalue a b (* TODO still needed now that we have the better MV.Id of id_info? *) | MV.Id _, MV.E { e = G.N (G.Id (b_id, b_id_info)); _ } -> (* TOFIX: regression if remove this code *) (* Allow identifier nodes to match pure identifier expressions *) (* You should prefer to add metavar as expression (G.E), not id (G.I), * (see Generic_vs_generic.m_ident_and_id_info_add_in_env_Expr) * but in some cases you have no choice and you need to match an expr * metavar with an id metavar. * For example, we want the pattern 'const $X = foo.$X' to match * 'const bar = foo.bar' * (this is useful in the Javascript transpilation context of * complex pattern parameter). *) equal_ast_bound_code config a (MV.Id (b_id, Some b_id_info)) (* TODO: we should get rid of that too, we should properly bind to MV.N *) | MV.E { e = G.N (G.Id (a_id, a_id_info)); _ }, MV.Id _ -> equal_ast_bound_code config (MV.Id (a_id, Some a_id_info)) b | _, _ -> false in if not res then logger#ldebug (lazy (spf "A != B\nA = %s\nB = %s\n" (MV.str_of_mval a) (MV.str_of_mval b))); res let check_and_add_metavar_binding ((mvar : MV.mvar), valu) (tin : tin) = match Common2.assoc_opt mvar tin.mv.full_env with | Some valu' -> (* Should we use generic_vs_generic itself for comparing the code? * Hmmm, we can't because it leads to a circular dependencies. * Moreover here we know both valu and valu' are regular code, * not patterns, so we can just use the generic '=' of OCaml. *) if equal_ast_bound_code tin.config valu valu' then Some tin (* valu remains the metavar witness *) else None | None -> (* 'backrefs' is the set of metavariables that may be referenced later in the pattern. It's inherited from the last stmt pattern, so it might contain a few extra members. *) (* first time the metavar is bound, just add it to the environment *) Some (add_mv_capture mvar valu tin) let (envf : MV.mvar G.wrap -> MV.mvalue -> tin -> tout) = fun (mvar, _imvar) any tin -> match check_and_add_metavar_binding (mvar, any) tin with | None -> logger#ldebug (lazy (spf "envf: fail, %s (%s)" mvar (MV.str_of_mval any))); fail tin | Some new_binding -> logger#ldebug (lazy (spf "envf: success, %s (%s)" mvar (MV.str_of_mval any))); return new_binding let empty_environment ?(mvar_context = None) opt_cache lang config = let mv = match mvar_context with | None -> Env.empty | Some bindings -> { full_env = bindings; min_env = []; last_stmt_backrefs = Set_.empty } in { mv; stmts_match_span = Empty; cache = opt_cache; lang; config; deref_sym_vals = 0; } (*****************************************************************************) (* Helpers *) (*****************************************************************************) let rec inits_and_rest_of_list = function | [] -> failwith "inits_1 requires a non-empty list" | [ e ] -> [ ([ e ], []) ] | e :: l -> ([ e ], l) :: Common.map (fun (l, rest) -> (e :: l, rest)) (inits_and_rest_of_list l) let _ = Common2.example (inits_and_rest_of_list [ 'a'; 'b'; 'c' ] =*= [ ([ 'a' ], [ 'b'; 'c' ]); ([ 'a'; 'b' ], [ 'c' ]); ([ 'a'; 'b'; 'c' ], []); ]) let inits_and_rest_of_list_empty_ok = function | [] -> [ ([], []) ] | xs -> [ ([], xs) ] @ inits_and_rest_of_list xs let _ = Common2.example (inits_and_rest_of_list_empty_ok [ 'a'; 'b'; 'c' ] =*= [ ([], [ 'a'; 'b'; 'c' ]); ([ 'a' ], [ 'b'; 'c' ]); ([ 'a'; 'b' ], [ 'c' ]); ([ 'a'; 'b'; 'c' ], []); ]) (* todo? optimize, probably not the optimal version ... *) let all_elem_and_rest_of_list xs = let rec loop acc prev_xs = function | [] -> acc | x :: next_xs -> let other_xs = lazy (List.rev_append prev_xs next_xs) in let acc' = (x, other_xs) :: acc in let prev_xs' = x :: prev_xs in loop acc' prev_xs' next_xs in loop [] [] xs [@@profiling] let rec all_splits = function | [] -> [ ([], []) ] | x :: xs -> all_splits xs |> Common.map (function ls, rs -> [ (x :: ls, rs); (ls, x :: rs) ]) |> List.flatten (* let _ = Common2.example (all_elem_and_rest_of_list ['a';'b';'c'] = [('a', ['b';'c']); ('b', ['a';'c']); ('c', ['a';'b'])]) *) (* Since all_elem_and_rest_of_list computes the rest of list lazily, * we want to still keep track of how much time we're spending on * computing the rest of the list *) let lazy_rest_of_list v = Profiling.profile_code "Matching_generic.eval_rest_of_list" (fun () -> Lazy.force v) let return () = return let fail () = fail (* TODO: deprecate *) type regexp = Re.re (* old: Str.regexp *) let regexp_matcher_of_regexp_string s = if s =~ Pattern.regexp_regexp_string then ( let x, flags = Common.matched2 s in let flags = match flags with | "" -> [] | "i" -> [ `CASELESS ] | "m" -> [ `MULTILINE ] | _ -> failwith (spf "This is not a valid PCRE regexp flag: %s" flags) in (* old: let re = Str.regexp x in (fun s -> Str.string_match re s 0) *) (* TODO: add `ANCHORED to be consistent with Python re.match (!re.search)*) let re = Re.Pcre.regexp ~flags x in fun s2 -> Re.Pcre.pmatch ~rex:re s2 |> fun b -> logger#debug "regexp match: %s on %s, result = %b" s s2 b; b) else failwith (spf "This is not a PCRE-compatible regexp: " ^ s) (*****************************************************************************) (* Generic matchers *) (*****************************************************************************) (* ---------------------------------------------------------------------- *) (* stdlib: option *) (* ---------------------------------------------------------------------- *) (* you should probably use m_option_none_can_match_some instead *) let (m_option : 'a matcher -> 'a option matcher) = fun f a b -> match (a, b) with | None, None -> return () | Some xa, Some xb -> f xa xb | None, _ | Some _, _ -> fail () (* dots: *) let m_option_ellipsis_ok f a b = match (a, b) with | None, None -> return () (* dots: ... can match 0 or 1 expression *) | Some { G.e = G.Ellipsis _; _ }, None -> return () | Some xa, Some xb -> f xa xb | None, _ | Some _, _ -> fail () (* less-is-ok: *) let m_option_none_can_match_some f a b = match (a, b) with (* Nothing specified in the pattern can match Some stuff *) | None, _ -> return () | Some xa, Some xb -> f xa xb | Some _, None -> fail () (* ---------------------------------------------------------------------- *) (* stdlib: list *) (* ---------------------------------------------------------------------- *) let rec m_list f a b = match (a, b) with | [], [] -> return () | xa :: aas, xb :: bbs -> f xa xb >>= fun () -> m_list f aas bbs | [], _ | _ :: _, _ -> fail () let rec m_list_prefix f a b = match (a, b) with | [], [] -> return () | xa :: aas, xb :: bbs -> f xa xb >>= fun () -> m_list_prefix f aas bbs (* less-is-ok: prefix is ok *) | [], _ -> return () | _ :: _, _ -> fail () let rec m_list_with_dots ~less_is_ok f is_dots xsa xsb = match (xsa, xsb) with | [], [] -> return () (* less-is-ok: empty list can sometimes match non-empty list *) | [], _ :: _ when less_is_ok -> return () (* dots: '...', can also match no argument *) | [ a ], [] when is_dots a -> return () | a :: xsa, xb :: xsb when is_dots a -> (* can match nothing *) m_list_with_dots f is_dots ~less_is_ok xsa (xb :: xsb) >||> (* can match more *) m_list_with_dots f is_dots ~less_is_ok (a :: xsa) xsb (* the general case *) | xa :: aas, xb :: bbs -> f xa xb >>= fun () -> m_list_with_dots f is_dots ~less_is_ok aas bbs | [], _ | _ :: _, _ -> fail () let m_list_with_dots_and_metavar_ellipsis ~less_is_ok ~f ~is_dots ~is_metavar_ellipsis xsa xsb = let rec aux xsa xsb = match (xsa, xsb) with | [], [] -> return () (* less-is-ok: empty list can sometimes match non-empty list *) | [], _ :: _ when less_is_ok -> return () (* dots: '...', can also match no argument *) | [ a ], [] when is_dots a -> return () (* opti: if is_metavar_ellipsis and less_is_ok is false, then * it's useless to enumerate all the candidates below; only the * one that match everything will work | [ a ], xs when is_metavar_ellipsis a <> None && not less_is_ok -> *) (* dots: metavars: $...ARGS *) | a :: xsa, xsb when is_metavar_ellipsis a <> None -> ( match is_metavar_ellipsis a with | None -> raise Impossible | Some ((s, tok), metavar_build) -> (* can match 0 or more arguments (just like ...) *) let candidates = inits_and_rest_of_list_empty_ok xsb in let rec aux2 xs = match xs with | [] -> fail () | (inits, rest) :: xs -> envf (s, tok) (metavar_build inits) >>= (fun () -> aux xsa rest) >||> aux2 xs in aux2 candidates) | a :: xsa, xb :: xsb when is_dots a -> (* can match nothing *) aux xsa (xb :: xsb) >||> (* can match more *) aux (a :: xsa) xsb (* the general case *) | xa :: aas, xb :: bbs -> f xa xb >>= fun () -> aux aas bbs | [], _ | _ :: _, _ -> fail () in aux xsa xsb (* todo? opti? try to go faster to the one with split_when? * need reflect tin so we can call the matcher and query whether there * was a match. Maybe a <??> monadic operator? *) let rec m_list_in_any_order ~less_is_ok f xsa xsb = match (xsa, xsb) with | [], [] -> return () (* less-is-ok: empty list can sometimes match non-empty list *) | [], _ :: _ -> if less_is_ok then return () else fail () | a :: xsa, xsb -> let candidates = all_elem_and_rest_of_list xsb in (* less: could use a fold *) let rec aux xs = match xs with | [] -> fail () | (b, xsb) :: xs -> f a b >>= (fun () -> m_list_in_any_order ~less_is_ok f xsa (lazy_rest_of_list xsb)) >||> aux xs in aux candidates (* ---------------------------------------------------------------------- *) (* stdlib: combinatorial search *) (* ---------------------------------------------------------------------- *) (* Used for Associative-Commutative (AC) matching! *) let m_comb_unit xs : _ comb_result = fun tin -> [ (xs, [ tin ]) ] let m_comb_bind (comb_result : _ comb_result) f : _ comb_result = fun tin -> let rec loop = function | [] -> [] | (bs, tout) :: comb_matches' -> let bs_matches = tout |> Common.map (fun tin -> f bs tin) |> List.flatten in bs_matches @ loop comb_matches' in loop (comb_result tin) let m_comb_flatten (comb_result : _ comb_result) (tin : tin) : tout = comb_result tin |> Common.map snd |> List.flatten let m_comb_fold (m_comb : _ comb_matcher) (xs : _ list) (comb_result : _ comb_result) : _ comb_result = List.fold_left (fun comb_result' x -> m_comb_bind comb_result' (m_comb x)) comb_result xs let m_comb_1to1 (m : _ matcher) a bs : _ comb_result = fun tin -> bs |> all_elem_and_rest_of_list |> List.filter_map (fun (b, other_bs) -> match m a b tin with | [] -> None | tout -> Some (Lazy.force other_bs, tout)) let m_comb_1toN m_1toN a bs : _ comb_result = fun tin -> bs |> all_splits |> List.filter_map (fun (l, r) -> match m_1toN a l tin with | [] -> None | tout -> Some (r, tout)) (* ---------------------------------------------------------------------- *) (* stdlib: bool/int/string/... *) (* ---------------------------------------------------------------------- *) (* try to not use m_eq, you could get bad surprise *) let m_eq a b = if a =*= b then return () else fail () let m_bool a b = if a =:= b then return () else fail () let m_int a b = if a =|= b then return () else fail () let m_string a b = if a = b then return () else fail () (* old: Before we just checked whether `s2` was a prefix of `s1`, e.g. * "foo" is a prefix of "foobar". However we use this function to check * file paths, and the path "foo" is NOT a prefix of the path "foobar". * We must also check that what comes after "foo", if anything, is a * path separator. *) let filepath_is_prefix s1 s2 = (* todo: can we assume that the strings are trimmed? *) let is_sep c = c =$= '/' || c =$= '\\' in let len1 = String.length s1 and len2 = String.length s2 in if len1 < len2 then false else let sub = Str.first_chars s1 len2 in sub = s2 && (len1 =|= len2 || is_sep s1.[len2]) (* less-is-ok: *) let m_filepath_prefix a b = if filepath_is_prefix b a then return () else fail () (* ---------------------------------------------------------------------- *) (* Token *) (* ---------------------------------------------------------------------- *) (* we do not care about position! or differences in space/indent/comment! * so we can just 'return ()' *) let m_info _a _b = return () let m_tok a b = m_info a b let m_wrap f a b = match (a, b) with | (xaa, ainfo), (xbb, binfo) -> f xaa xbb >>= fun () -> m_info ainfo binfo let m_bracket f (a1, a2, a3) (b1, b2, b3) = m_info a1 b1 >>= fun () -> f a2 b2 >>= fun () -> m_info a3 b3 let m_tuple3 m_a m_b m_c (a1, b1, c1) (a2, b2, c2) = (m_a a1 a2 >>= fun () -> m_b b1 b2) >>= fun () -> m_c c1 c2 (* ---------------------------------------------------------------------- *) (* Misc *) (* ---------------------------------------------------------------------- *) (* TODO: this would be simpler if we had an * AST_generic.String of string wrap bracket, but this requires * lots of work in our Pfff parsers (less in tree-sitter which already * split strings in different tokens). *) let adjust_info_remove_enclosing_quotes (s, info) = match PI.token_location_of_info info with | Error _ -> (* We have no token location to adjust (typically a fake token), * this happens if the string is the result of constant folding. *) (s, info) | Ok loc -> ( let raw_str = loc.PI.str in let re = Str.regexp_string s in try let pos = Str.search_forward re raw_str 0 in let loc = { loc with PI.str = s; charpos = loc.charpos + pos; column = loc.column + pos; } in let info = { PI.transfo = PI.NoTransfo; token = PI.OriginTok loc } in (s, info) with | Not_found -> logger#error "could not find %s in %s" s raw_str; (* return original token ... better than failwith? *) (s, info)) (* TODO: should factorize with m_ellipsis_or_metavar_or_string at some * point when AST_generic.String is of string bracket *) let m_string_ellipsis_or_metavar_or_default ?(m_string_for_default = m_string) a b = match fst a with (* dots: '...' on string *) | "..." -> return () (* metavar: "$MVAR" *) | astr when MV.is_metavar_name astr -> let _, orig_info = b in let s, info = adjust_info_remove_enclosing_quotes b in envf a (MV.Text (s, info, orig_info)) (* TODO: deprecate *) | astr when Pattern.is_regexp_string astr -> let f = regexp_matcher_of_regexp_string astr in if f (fst b) then return () else fail () | _ -> m_wrap m_string_for_default a b let m_ellipsis_or_metavar_or_string a b = match fst a with (* dots: '...' on string in atom/regexp/string *) | "..." -> return () (* metavar: *) | s when MV.is_metavar_name s -> let str, info = b in envf a (MV.Text (str, info, info)) | _ -> m_wrap m_string a b let m_other_xxx a b = match (a, b) with | a, b when a =*= b -> return () | _ -> fail ()
(* Yoann Padioleau * * Copyright (C) 2019-2021 r2c * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * version 2.1 as published by the Free Software Foundation, with the * special exception on linking described in file LICENSE. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file * LICENSE for more details. *)
sdlimage_test.ml
module SdlImg = Sdlimage let is_imgs = [ "ico", SdlImg.is_ico; "cur", SdlImg.is_cur; "bmp", SdlImg.is_bmp; "gif", SdlImg.is_gif; "jpg", SdlImg.is_jpg; "lbm", SdlImg.is_lbm; "pcx", SdlImg.is_pcx; "png", SdlImg.is_png; "pnm", SdlImg.is_pnm; "tif", SdlImg.is_tif; "xcf", SdlImg.is_xcf; "xpm", SdlImg.is_xpm; "xv", SdlImg.is_xv; "webp", SdlImg.is_webp; ] let () = let filename = Sys.argv.(1) in SdlImg.init [ `JPG; `PNG; `TIF; `WEBP; ]; let rwo = Sdlrwops.from_file ~filename ~mode:"rb" in let found = ref false in List.iter (fun (ext, is_img) -> if is_img rwo then begin Printf.printf "\"%s\" is %s\n" filename ext; found := true end ) is_imgs; if not !found then Printf.printf "unknown filetype for \"%s\"\n" filename; SdlImg.quit (); ;;
VisitorsGeneration.ml
open Ppxlib let mknoloc = Ocaml_common.Location.mknoloc open Asttypes open Parsetree open Ast_helper open Ppx_deriving.Ast_convenience open VisitorsList open VisitorsAnalysis open VisitorsCompatibility (* This module offers helper functions for code generation. *) (* -------------------------------------------------------------------------- *) (* Type abbreviations. *) type variable = string type datacon = string type label = string type classe = string type methode = string type tyvar = string type variables = variable list type tyvars = tyvar list type core_types = core_type list type patterns = pattern list type expressions = expression list (* -------------------------------------------------------------------------- *) (* We should in principle ensure that our code makes sense even if the standard names that we rely upon are shadowed by the user. *) (* This is made slightly difficult by the fact that the name [Pervasives] has been deprecated in favor of [Stdlib] in OCaml 4.07. *) (* One viable approach would be to define the names that we need in the library [VisitorsRuntime], then refer to this library in the generated code. *) (* One problem is that defining an alias for the standard operator (&&) causes it to become strict instead of lazy! So we cannot define an alias for it. *) (* Let's just cross our fingers and assume that the user won't shadow the standard names that we need. *) let pervasive (x : string) : Longident.t = Lident x (* We normally place an improbable prefix in front of our private (local) variables, so as to make sure that we do not shadow user variables that are used in [@build] code fragments. *) (* When producing code for inclusion in the documentation, we remove this prefix, just so that things look pretty. We rely on an undocumented environment variable to toggle this behavior. *) let improbable (x : string) : string = try let _ = Sys.getenv "VISITORS_BUILDING_DOCUMENTATION" in x with Not_found -> "_visitors_" ^ x (* -------------------------------------------------------------------------- *) (* Types. *) let ty_var (alpha : tyvar) : core_type = Typ.var alpha let ty_vars (alphas : tyvars) : core_types = List.map ty_var alphas let ty_any = Typ.any() let ty_unit = tconstr "unit" [] (* For [ty_arrow], see [VisitorsCompatibility]. *) let ty_arrows : core_types -> core_type -> core_type = List.fold_right ty_arrow (* [decl_type decl] turns a declaration of the type ['a foo] into a the type ['a foo]. *) let decl_type (decl : type_declaration) : core_type = tconstr decl.ptype_name.txt (ty_vars (decl_params decl)) (* -------------------------------------------------------------------------- *) (* [unit] produces a unit constant. [tuple] produces a tuple. [record] produces a record. These functions already exist; we redefine them without any optional arguments so as avoid OCaml's warning 48 (implicit elimination of optional arguments). *) let unit() = unit() let tuple es = tuple es let record les = record les (* -------------------------------------------------------------------------- *) (* [number i thing] constructs an English description of "[i] thing(s)". *) let number i s = match i with | 0 -> Printf.sprintf "zero %s" s | 1 -> Printf.sprintf "one %s" s | _ -> Printf.sprintf "%d %ss" i s (* -------------------------------------------------------------------------- *) (* [eident] converts a (possibly-qualified) identifier to an expression. *) let eident (id : Longident.t) : expression = Exp.ident (mknoloc id) (* -------------------------------------------------------------------------- *) (* [pvars] converts a list of variables to a list of patterns. *) let pvars (xs : variables) : patterns = List.map (fun x -> pvar x) xs (* [evars] converts a list of variables to a list of expressions. *) let evars (xs : variables) : expressions = List.map (fun x -> evar x) xs (* [pvarss] converts a matrix of variables to a matrix of patterns. *) let pvarss (xss : variables list) : patterns list = List.map pvars xss (* [evarss] converts a matrix of variables to a matrix of expressions. *) let evarss (xss : variables list) : expressions list = List.map evars xss (* -------------------------------------------------------------------------- *) (* [wildcards] converts a list of anything to a list of wildcard patterns. *) let wildcards xs = List.map (fun _ -> Pat.any()) xs (* -------------------------------------------------------------------------- *) (* [plambda p e] constructs a function [fun p -> e]. *) (* For [plambda], see [VisitorsCompatibility]. *) (* [lambda x e] constructs a function [fun x -> e]. *) let lambda (x : variable) (e : expression) : expression = plambda (pvar x) e (* [plambdas ps e] constructs a multi-argument function [fun ps -> e]. *) let plambdas (ps : patterns) (e : expression) : expression = List.fold_right plambda ps e (* [lambdas xs e] constructs a multi-argument function [fun xs -> e]. *) let lambdas (xs : variables) (e : expression) : expression = List.fold_right lambda xs e (* -------------------------------------------------------------------------- *) (* [app] works like [Ast_convenience.app] (which it shadows), except it avoids constructing nested applications of the form [(f x) y], transforming them instead into a single application [f x y]. The difference is probably just cosmetic. *) let app (e : expression) (es2 : expressions) : expression = match e.pexp_desc with | Pexp_apply (e1, les1) -> let les2 = List.map (fun e -> Label.nolabel, e) es2 in { e with pexp_desc = Pexp_apply (e1, les1 @ les2) } | _ -> app e es2 (* -------------------------------------------------------------------------- *) (* [sequence es] constructs a sequence of the expressions [es]. *) let sequence (es : expressions) : expression = (* Using [fold_right1] instead of [List.fold_right] allows us to get rid of a final [()] constant at the end of the sequence. Cosmetic. *) fold_right1 (fun e accu -> Exp.sequence e accu) es (unit()) (* -------------------------------------------------------------------------- *) (* [vblet1 vb e] constructs a single [let] binding. *) let vblet1 (vb : value_binding) (e : expression) : expression = Exp.let_ Nonrecursive [vb] e (* [let1 x e1 e2] constructs a single [let] binding. *) let let1 (x : variable) (e1 : expression) (e2 : expression) : expression = vblet1 (Vb.mk (pvar x) e1) e2 (* [let1p x y e1 e2] constructs a single [let] binding of a pair. *) let let1p (x, y : variable * variable) (e1 : expression) (e2 : expression) : expression = vblet1 (Vb.mk (ptuple [pvar x; pvar y]) e1) e2 (* [vbletn vbs e] constructs a series of nested [let] bindings. *) let vbletn (vbs : value_binding list) (e : expression) : expression = List.fold_right vblet1 vbs e (* [letn xs es e] constructs a series of nested [let] bindings. *) let letn (xs : variables) (es : expressions) (e : expression) = List.fold_right2 let1 xs es e (* [letnp xs ys es e] constructs a series of nested [let] bindings of pairs. *) let letnp (xs : variables) (ys : variables) (es : expressions) (e : expression) = List.fold_right2 let1p (List.combine xs ys) es e (* -------------------------------------------------------------------------- *) (* [access x label] constructs a record access expression [x.label]. *) let access (x : variable) (label : label) : expression = Exp.field (evar x) (mknoloc (Lident label)) (* [accesses labels xs] constructs a matrix of record access expressions of the form [x.label]. There is a row for every [label] and a column for every [x]. *) let accesses (xs : variables) (labels : label list) : expressions list = List.map (fun label -> List.map (fun x -> access x label) xs) labels (* -------------------------------------------------------------------------- *) (* [ptuple] is [Ast_convenience.ptuple], deprived of its optional arguments. *) let ptuple (ps : patterns) : pattern = ptuple ps (* [ptuples] is [map ptuple]. *) let ptuples (pss : patterns list) : patterns = List.map ptuple pss (* -------------------------------------------------------------------------- *) (* The Boolean expressions [false] and [true]. *) let efalse : expression = Exp.construct (mknoloc (Lident "false")) None let etrue : expression = Exp.construct (mknoloc (Lident "true")) None (* -------------------------------------------------------------------------- *) (* [conjunction es] constructs a Boolean conjunction of the expressions [es]. *) let conjunction : expression = eident (pervasive "&&") let conjunction e1 e2 = app conjunction [e1; e2] let conjunction (es : expressions) : expression = fold_right1 conjunction es etrue (* -------------------------------------------------------------------------- *) (* [eassertfalse] is the expression [assert false]. *) let eassertfalse : expression = Exp.assert_ efalse (* -------------------------------------------------------------------------- *) (* [eforce e] is the expression [Lazy.force e]. *) let eforce : expression = eident (parse "Lazy.force") (* danger: the module name [Lazy] must not be shadowed. *) let eforce (e : expression) : expression = app eforce [e] (* -------------------------------------------------------------------------- *) (* [eqphy e1 e2] is the expression [e1 == e2]. *) let eqphy : expression = eident (pervasive "==") let eqphy (e1 : expression) (e2 : expression) : expression = app eqphy [e1; e2] (* [eqphys es1 es2] is the conjunction of the expressions [e1 == e2]. *) let eqphys (es1 : expressions) (es2 : expressions) : expression = assert (List.length es1 = List.length es2); conjunction (List.map2 eqphy es1 es2) (* -------------------------------------------------------------------------- *) (* [efail s] generates a call to [VisitorsRuntime.fail]. The parameter [s] is a string, which could represent the place where a failure occurred, or the reason why a failure occurred. As of now, it is unused. *) let efail : expression = eident (Ldot (Lident "VisitorsRuntime", "fail")) (* danger: the module name [VisitorsRuntime] must not be shadowed. *) let efail (_ : string) : expression = app efail [ unit() ] (* -------------------------------------------------------------------------- *) (* [include_ e] constructs an [include] declaration. *) let include_ (e : module_expr) : structure_item = Str.include_ { pincl_mod = e; pincl_loc = Location.none; pincl_attributes = []; } (* -------------------------------------------------------------------------- *) (* [with_warnings w items] wraps the structure items [items] in such a way that the warning directive [w] is applied to these items. Technically, this is done by emitting [include struct [@@@ocaml.warning <w>] <items> end]. *) let with_warnings (w : string) (items : structure_item list) : structure_item = include_ (Mod.structure ( floating "ocaml.warning" [ Str.eval (Exp.constant (Const.string w)) ] :: items )) (* -------------------------------------------------------------------------- *) (* [class1 concrete ancestors params name self fields] builds a class declaration and packages it as a structure item. (This implies that it cannot be recursive with other class declarations). *) let class1 (concrete : bool) (params : (core_type * (variance * injectivity)) list) (name : classe) (self : pattern) (fields : class_field list) : structure_item = Str.class_ [{ pci_virt = if concrete then Concrete else Virtual; pci_params = params; pci_name = mknoloc name; pci_expr = Cl.structure (Cstr.mk self fields); pci_loc = !default_loc; pci_attributes = []; }] (* -------------------------------------------------------------------------- *) (* [inherit_ c tys] builds an [inherit] clause, where the superclass is [c] and its actual type parameters are [tys]. No [super] identifier is bound. *) let inherit_ (c : Longident.t) (tys : core_types) : class_field = Cf.inherit_ Fresh (Cl.constr (mknoloc c) tys) None (* -------------------------------------------------------------------------- *) (* An algebraic data type of the methods that we generate. These include concrete methods (with code) and virtual methods (without code). They may be public or private. The method type is optional. If omitted, then it is inferred by OCaml. If present, it can be a polymorphic type. *) type meth = Meth of private_flag * methode * expression option * core_type option let concrete_method p m e oty = Meth (p, m, Some e, oty) let virtual_method p m oty = Meth (p, m, None, oty) (* -------------------------------------------------------------------------- *) (* Converting a method description to OCaml abstract syntax. *) let oe2cfk (oe : expression option) (oty : core_type option) : class_field_kind = match oe, oty with | Some e, Some _ -> Cf.concrete Fresh (Exp.poly e oty) | Some e, None -> Cf.concrete Fresh e | None, Some ty -> Cf.virtual_ ty | None, None -> Cf.virtual_ ty_any let meth2cf (Meth (p, m, oe, oty)) : class_field = Cf.method_ (mknoloc m) p (oe2cfk oe oty) (* -------------------------------------------------------------------------- *) (* [method_name] extracts a method name out of a method description. *) let method_name (Meth (_, m, _, _)) : string = m (* -------------------------------------------------------------------------- *) (* [is_virtual] tests whether a method description represents a virtual method. *) let is_virtual (Meth (_, _, oe, _)) : bool = oe = None (* -------------------------------------------------------------------------- *) (* [send o m es] produces a call to the method [o#m] with arguments [es]. *) let send (o : variable) (m : methode) (es : expressions) : expression = app (exp_send (evar o) m) es (* -------------------------------------------------------------------------- *) (* An algebraic data type of the ``hoisted expressions'' that we generate. *) (* A ``hoisted expression'' is evaluated at most once after the object is allocated. Its value is stored in an instance field. We allow such an expression to reference [self], as long as it does not actually invoke any methods. *) type hoisted = Hoisted of string (* the name of the instance field *) * expression (* the hoisted expression *) (* -------------------------------------------------------------------------- *) (* Converting a hoisted field description to OCaml abstract syntax. *) (* We generate a mutable field declaration, followed with an initialization: val mutable x = lazy (assert false) initializer x <- lazy e We must do this in two steps because the expression [e] might contain references to [self], which are invalid in a field declaration, whereas they are allowed in an initializer. The potential danger in this idiom lies in forcing [x] before the initializer has finished running, leading to an assertion failure. This should not happen if [e] does not perform any method calls or read any fields. *) let hoisted2cf (Hoisted (x, e)) : class_field list = [ Cf.val_ (mknoloc x) (Mutable) (Cf.concrete Fresh (Exp.lazy_ eassertfalse)); Cf.initializer_ (Exp.setinstvar (mknoloc x) (Exp.lazy_ e)) ] (* -------------------------------------------------------------------------- *) (* A facility for generating a class. *) module ClassFieldStore () : sig (* [generate meth] adds [meth] to the list of methods. *) val generate: meth -> unit (* [hoist e] causes the expression [e] to be hoisted, that is, computed once after the object is allocated. The result of evaluating [e] is stored in a field. The call [hoist e] returns an expression which reads this field. *) val hoist: expression -> expression (* [dump concrete ancestors params self c] returns a class definition. *) val dump: bool -> Longident.t list -> (core_type * (variance * injectivity)) list -> pattern -> classe -> structure_item end = struct let meths : meth list ref = ref [] let generate meth = meths := meth :: !meths let dump () : class_field list = let methods = List.rev !meths in (* Move all of the virtual methods up front. If two virtual methods have the same name, keep only one of them. This is useful because we allow a virtual method declaration to be generated several times. In fact, OCaml supports this, but it looks tidier if we remove duplicates. *) let virtual_methods, concrete_methods = List.partition is_virtual methods in let cmp meth1 meth2 = Stdlib.compare (method_name meth1) (method_name meth2) in let virtual_methods = VisitorsList.weed cmp virtual_methods in let methods = virtual_methods @ concrete_methods in List.map meth2cf methods let hoisted : hoisted list ref = ref [] let fresh : unit -> int = let c = ref 0 in fun () -> let x = !c in c := x + 1; x let hoist (e : expression) : expression = let x = Printf.sprintf "h%d" (fresh()) in hoisted := Hoisted (x, e) :: !hoisted; eforce (evar x) let dump concrete ancestors params self c : structure_item = class1 concrete params c self ( (* [inherit] clauses. *) (* We ARBITRARILY assume that every ancestor class is parameterized with ONE type parameter. *) List.map (fun c -> inherit_ c [ ty_any ]) ancestors @ (* Hoisted expressions. *) List.flatten (List.map hoisted2cf (List.rev !hoisted)) @ (* Methods. *) dump() ) end (* -------------------------------------------------------------------------- *) (* A facility for emitting preprocessor warnings. *) (* Warnings must be emitted under the form of [ppwarning] attributes, placed in the generated code. This is not very convenient; we must store these warnings, waiting for a convenient time to emit them. *) module WarningStore () : sig (* [warning loc format ...] emits a warning. *) val warning: loc -> ('a, unit, string, unit) format4 -> 'a (* [warnings()] returns a list of all warnings emitted so far. *) val warnings: unit -> structure end = struct let warnings : attribute list ref = ref [] let warning loc msg = warnings := attribute_of_warning loc msg :: !warnings let warning loc format = Printf.ksprintf (warning loc) format let warnings () = let ws = !warnings in warnings := []; List.map (fun a -> Str.attribute a) (List.rev ws) end
test_addition_input.ml
let _ = (1 + 3) [@time.duration addition]
proof.ml
(* Module defining the last essential tiles of interactive proofs. A proof deals with the focusing commands (including the braces and bullets), the shelf (see the [shelve] tactic) and given up goal (see the [give_up] tactic). A proof is made of the following: - Proofview: a proof is primarily the data of the current view. That which is shown to the user (as a remainder, a proofview is mainly the logical state of the proof, together with the currently focused goals). - Focus: a proof has a focus stack: the top of the stack contains the context in which to unfocus the current view to a view focused with the rest of the stack. In addition, this contains, for each of the focus context, a "focus kind" and a "focus condition" (in practice, and for modularity, the focus kind is actually stored inside the condition). To unfocus, one needs to know the focus kind, and the condition (for instance "no condition" or the proof under focused must be complete) must be met. - Given up goals: as long as there is a given up goal, the proof is not completed. Given up goals cannot be retrieved, the user must go back where the tactic [give_up] was run and solve the goal there. *) open Util module FocusKind = Dyn.Make() type 'a focus_kind = 'a FocusKind.tag type reason = NotThisWay | AlreadyNoFocus type unfocusable = | Cannot of reason | Loose | Strict type 'a focus_condition = | CondNo of bool * 'a focus_kind | CondDone of bool * 'a focus_kind | CondEndStack of 'a focus_kind (* loose_end is false here *) let next_kind = ref 0 let new_focus_kind () = let r = !next_kind in incr next_kind; FocusKind.anonymous r (* To be authorized to unfocus one must meet the condition prescribed by the action which focused.*) (* spiwack: we could consider having a list of authorized focus_kind instead of just one, if anyone needs it *) exception CannotUnfocusThisWay (* Cannot focus on non-existing subgoals *) exception NoSuchGoals of int * int exception NoSuchGoal of Names.Id.t option exception FullyUnfocused let _ = CErrors.register_handler begin function | CannotUnfocusThisWay -> Some (Pp.str "This proof is focused, but cannot be unfocused this way") | NoSuchGoals (i,j) when Int.equal i j -> Some Pp.(str "[Focus] No such goal (" ++ int i ++ str").") | NoSuchGoals (i,j) -> Some Pp.(str "[Focus] Not every goal in range ["++ int i ++ str","++int j++str"] exist.") | NoSuchGoal (Some id) -> Some Pp.(str "[Focus] No such goal: " ++ str (Names.Id.to_string id) ++ str ".") | NoSuchGoal None -> Some Pp.(str "[Focus] No such goal.") | FullyUnfocused -> Some (Pp.str "The proof is not focused") | _ -> None end let check_cond_kind c k = let kind_of_cond = function | CondNo (_,k) | CondDone(_,k) | CondEndStack k -> k in FocusKind.eq (kind_of_cond c) k let equal_kind c k = match FocusKind.eq c k with | None -> false | Some _ -> true let test_cond c k1 pw = match c with | CondNo(_, k) when equal_kind k k1 -> Strict | CondNo(true, _) -> Loose | CondNo(false, _) -> Cannot NotThisWay | CondDone(_, k) when equal_kind k k1 && Proofview.finished pw -> Strict | CondDone(true, _) -> Loose | CondDone(false, _) -> Cannot NotThisWay | CondEndStack k when equal_kind k k1 -> Strict | CondEndStack _ -> Cannot AlreadyNoFocus let no_cond ?(loose_end=false) k = CondNo (loose_end, k) let done_cond ?(loose_end=false) k = CondDone (loose_end,k) type focus_element = FocusElt : 'a focus_condition * 'a * Proofview.focus_context -> focus_element (* Subpart of the type of proofs. It contains the parts of the proof which are under control of the undo mechanism *) type t = { proofview: Proofview.proofview (** Current focused proofview *) ; entry : Proofview.entry (** Entry for the proofview *) ; focus_stack: focus_element list (** History of the focusings, provides information on how to unfocus the proof and the extra information stored while focusing. The list is empty when the proof is fully unfocused. *) ; name : Names.Id.t (** the name of the theorem whose proof is being constructed *) ; poly : bool (** polymorphism *) ; typing_flags : Declarations.typing_flags option } (*** General proof functions ***) let proof p = let (goals,sigma) = Proofview.proofview p.proofview in (* spiwack: beware, the bottom of the stack is used by [Proof] internally, and should not be exposed. *) let rec map_minus_one f = function | [] -> assert false | [_] -> [] | a::l -> f a :: (map_minus_one f l) in let map (FocusElt (_, _, c)) = Proofview.focus_context c in let stack = map_minus_one map p.focus_stack in (goals,stack,sigma) let rec unroll_focus pv = function | FocusElt (_,_,ctx)::stk -> unroll_focus (Proofview.unfocus ctx pv) stk | [] -> pv (* spiwack: a proof is considered completed even if its still focused, if the focus doesn't hide any goal. Unfocusing is handled in {!return}. *) let is_done p = Proofview.finished p.proofview && Proofview.finished (unroll_focus p.proofview p.focus_stack) (* spiwack: for compatibility with <= 8.2 proof engine *) let has_unresolved_evar p = Evd.has_undefined (Proofview.return p.proofview) let has_shelved_goals p = let (_goals,sigma) = Proofview.proofview p.proofview in Evd.has_shelved sigma let has_given_up_goals p = let (_goals,sigma) = Proofview.proofview p.proofview in Evd.has_given_up sigma let is_complete p = is_done p && not (has_unresolved_evar p) && not (has_shelved_goals p) && not (has_given_up_goals p) (* Returns the list of partial proofs to initial goals *) let partial_proof p = Proofview.partial_proof p.entry p.proofview (*** The following functions implement the basic internal mechanisms of proofs, they are not meant to be exported in the .mli ***) (* An auxiliary function to push a {!focus_context} on the focus stack. *) let push_focus cond inf context pr = { pr with focus_stack = FocusElt(cond,inf,context)::pr.focus_stack } type any_focus_condition = AnyFocusCond : 'a focus_condition -> any_focus_condition (* An auxiliary function to read the kind of the next focusing step *) let cond_of_focus pr = match pr.focus_stack with | FocusElt (cond,_,_)::_ -> AnyFocusCond cond | _ -> raise FullyUnfocused (* An auxiliary function to pop and read the last {!Proofview.focus_context} on the focus stack. *) let pop_focus pr = match pr.focus_stack with | focus::other_focuses -> { pr with focus_stack = other_focuses }, focus | _ -> raise FullyUnfocused (* This function focuses the proof [pr] between indices [i] and [j] *) let _focus cond inf i j pr = let focused, context = Proofview.focus i j pr.proofview in let pr = push_focus cond inf context pr in { pr with proofview = focused } (* This function unfocuses the proof [pr], it raises [FullyUnfocused], if the proof is already fully unfocused. This function does not care about the condition of the current focus. *) let _unfocus pr = let pr, FocusElt (_,_,fc) = pop_focus pr in { pr with proofview = Proofview.unfocus fc pr.proofview } (* Focus command (focuses on the [i]th subgoal) *) (* spiwack: there could also, easily be a focus-on-a-range tactic, is there a need for it? *) let focus cond inf i pr = try _focus cond inf i i pr with CList.IndexOutOfRange -> raise (NoSuchGoals (i,i)) (* Focus on the goal named id *) let focus_id cond inf id pr = let (focused_goals, evar_map) = Proofview.proofview pr.proofview in begin match try Some (Evd.evar_key id evar_map) with Not_found -> None with | Some ev -> begin match CList.safe_index Evar.equal ev focused_goals with | Some i -> (* goal is already under focus *) _focus cond inf i i pr | None -> if CList.mem_f Evar.equal ev (Evd.shelf evar_map) then (* goal is on the shelf, put it in focus *) let proofview = Proofview.unshelve [ev] pr.proofview in let pr = { pr with proofview } in let (focused_goals, _) = Proofview.proofview pr.proofview in let i = (* Now we know that this will succeed *) try CList.index Evar.equal ev focused_goals with Not_found -> assert false in _focus cond inf i i pr else raise CannotUnfocusThisWay end | None -> raise (NoSuchGoal (Some id)) end let rec unfocus kind pr () = let AnyFocusCond cond = cond_of_focus pr in match test_cond cond kind pr.proofview with | Cannot NotThisWay -> raise CannotUnfocusThisWay | Cannot AlreadyNoFocus -> raise FullyUnfocused | Strict -> let pr = _unfocus pr in pr | Loose -> begin try let pr = _unfocus pr in unfocus kind pr () with FullyUnfocused -> raise CannotUnfocusThisWay end exception NoSuchFocus (* no handler: should not be allowed to reach toplevel. *) let rec get_in_focus_stack : type a. a focus_kind -> _ -> a = fun kind stack -> match stack with | FocusElt (cond,inf,_)::stack -> begin match check_cond_kind cond kind with | Some Refl -> inf | None -> get_in_focus_stack kind stack end | [] -> raise NoSuchFocus let get_at_focus kind pr = get_in_focus_stack kind pr.focus_stack let is_last_focus kind pr = let FocusElt (cond,_,_) = List.hd pr.focus_stack in Option.has_some (check_cond_kind cond kind) let no_focused_goal p = Proofview.finished p.proofview let rec maximal_unfocus k p = if no_focused_goal p then try maximal_unfocus k (unfocus k p ()) with FullyUnfocused | CannotUnfocusThisWay -> p else p (*** Proof Creation/Termination ***) (* [end_of_stack] is unfocused by return to close every loose focus. *) let end_of_stack_kind = new_focus_kind () let end_of_stack = CondEndStack end_of_stack_kind let unfocused = is_last_focus end_of_stack_kind let start ~name ~poly ?typing_flags sigma goals = let entry, proofview = Proofview.init sigma goals in let pr = { proofview ; entry ; focus_stack = [] ; name ; poly ; typing_flags } in _focus end_of_stack () 1 (List.length goals) pr let dependent_start ~name ~poly ?typing_flags goals = let entry, proofview = Proofview.dependent_init goals in let pr = { proofview ; entry ; focus_stack = [] ; name ; poly ; typing_flags } in let number_of_goals = List.length (Proofview.initial_goals pr.entry) in _focus end_of_stack () 1 number_of_goals pr type open_error_reason = | UnfinishedProof | HasGivenUpGoals let print_open_error_reason er = let open Pp in match er with | UnfinishedProof -> str "Attempt to save an incomplete proof" | HasGivenUpGoals -> strbrk "Attempt to save a proof with given up goals. If this is really what you want to do, use Admitted in place of Qed." exception OpenProof of Names.Id.t option * open_error_reason let _ = CErrors.register_handler begin function | OpenProof (pid, reason) -> let open Pp in Some (Option.cata (fun pid -> str " (in proof " ++ Names.Id.print pid ++ str "): ") (mt()) pid ++ print_open_error_reason reason) | _ -> None end let warn_remaining_shelved_goals = CWarnings.create ~name:"remaining-shelved-goals" ~category:"tactics" (fun () -> Pp.str"The proof has remaining shelved goals") let warn_remaining_unresolved_evars = CWarnings.create ~name:"remaining-unresolved-evars" ~category:"tactics" (fun () -> Pp.str"The proof has unresolved variables") let return ?pid (p : t) = if not (is_done p) then raise (OpenProof(pid, UnfinishedProof)) else if has_given_up_goals p then raise (OpenProof(pid, HasGivenUpGoals)) else begin if has_shelved_goals p then warn_remaining_shelved_goals () else if has_unresolved_evar p then warn_remaining_unresolved_evars (); let p = unfocus end_of_stack_kind p () in Proofview.return p.proofview end let compact p = let entry, proofview = Proofview.compact p.entry p.proofview in { p with proofview; entry } let update_sigma_univs ugraph p = let proofview = Proofview.Unsafe.update_sigma_univs ugraph p.proofview in { p with proofview } (*** Function manipulation proof extra informations ***) (*** Tactics ***) let run_tactic env tac pr = let open Proofview.Notations in let undef sigma l = List.filter (fun g -> Evd.is_undefined sigma g) l in let tac = Proofview.tclEVARMAP >>= fun sigma -> Proofview.Unsafe.tclEVARS (Evd.push_shelf sigma) >>= fun () -> tac >>= fun result -> Proofview.tclEVARMAP >>= fun sigma -> (* Already solved goals are not to be counted as shelved. Nor are they to be marked as unresolvable. *) let retrieved, sigma = Evd.pop_future_goals sigma in let retrieved = Evd.FutureGoals.filter (Evd.is_undefined sigma) retrieved in let retrieved = List.rev (Evd.FutureGoals.comb retrieved) in let sigma = Proofview.Unsafe.mark_as_goals sigma retrieved in let to_shelve, sigma = Evd.pop_shelf sigma in Proofview.Unsafe.tclEVARS sigma >>= fun () -> Proofview.Unsafe.tclNEWSHELVED (retrieved@to_shelve) <*> Proofview.tclUNIT (result,retrieved,to_shelve) in let { name; poly; proofview } = pr in let proofview = Proofview.Unsafe.push_future_goals proofview in let ((result,retrieved,to_shelve),proofview,status,info_trace) = Proofview.apply ~name ~poly env tac proofview in let sigma = Proofview.return proofview in let to_shelve = undef sigma to_shelve in let proofview = Proofview.Unsafe.mark_as_unresolvables proofview to_shelve in let proofview = Proofview.filter_shelf (Evd.is_undefined sigma) proofview in { pr with proofview },(status,info_trace),result (*** Commands ***) (* Remove all the goals from the shelf and adds them at the end of the focused goals. *) let unshelve p = let sigma = Proofview.return p.proofview in let shelf = Evd.shelf sigma in let proofview = Proofview.unshelve shelf p.proofview in { p with proofview } let background_subgoals p = let it, _ = Proofview.proofview (unroll_focus p.proofview p.focus_stack) in it let all_goals p = let add gs set = List.fold_left (fun s g -> Evar.Set.add g s) set gs in let (goals,stack,sigma) = proof p in let set = add goals Evar.Set.empty in let set = List.fold_left (fun s gs -> let (g1, g2) = gs in add g1 (add g2 set)) set stack in let set = add (Evd.shelf sigma) set in let set = Evar.Set.union (Evd.given_up sigma) set in let bgoals = background_subgoals p in add bgoals set type data = { sigma : Evd.evar_map (** A representation of the evar_map [EJGA wouldn't it better to just return the proofview?] *) ; goals : Evar.t list (** Focused goals *) ; entry : Proofview.entry (** Entry for the proofview *) ; stack : (Evar.t list * Evar.t list) list (** A representation of the focus stack *) ; name : Names.Id.t (** The name of the theorem whose proof is being constructed *) ; poly : bool (** Locality, polymorphism, and "kind" [Coercion, Definition, etc...] *) } let data { proofview; focus_stack; entry; name; poly } = let goals, sigma = Proofview.proofview proofview in (* spiwack: beware, the bottom of the stack is used by [Proof] internally, and should not be exposed. *) let rec map_minus_one f = function | [] -> assert false | [_] -> [] | a::l -> f a :: (map_minus_one f l) in let map (FocusElt (_, _, c)) = Proofview.focus_context c in let stack = map_minus_one map focus_stack in { sigma; goals; entry; stack; name; poly } let pr_goal e = Pp.(str "GOAL:" ++ int (Evar.repr e)) let goal_uid e = string_of_int (Evar.repr e) let pr_proof p = let { goals=fg_goals; stack=bg_goals; sigma } = data p in Pp.( let pr_goal_list = prlist_with_sep spc pr_goal in let rec aux acc = function | [] -> acc | (before,after)::stack -> aux (pr_goal_list before ++ spc () ++ str "{" ++ acc ++ str "}" ++ spc () ++ pr_goal_list after) stack in str "[" ++ str "focus structure: " ++ aux (pr_goal_list fg_goals) bg_goals ++ str ";" ++ spc () ++ str "shelved: " ++ pr_goal_list (Evd.shelf sigma) ++ str ";" ++ spc () ++ str "given up: " ++ pr_goal_list (Evar.Set.elements @@ Evd.given_up sigma) ++ str "]" ) let use_unification_heuristics = Goptions.declare_bool_option_and_ref ~depr:false ~key:["Solve";"Unification";"Constraints"] ~value:true exception SuggestNoSuchGoals of int * t let solve ?with_end_tac gi info_lvl tac pr = let tac = match with_end_tac with | None -> tac | Some etac -> Proofview.tclTHEN tac etac in let tac = match info_lvl with | None -> tac | Some _ -> Proofview.Trace.record_info_trace tac in let nosuchgoal = let info = Exninfo.reify () in Proofview.tclZERO ~info (SuggestNoSuchGoals (1,pr)) in let tac = Goal_select.tclSELECT ~nosuchgoal gi tac in let tac = if use_unification_heuristics () then Proofview.tclTHEN tac Refine.solve_constraints else tac in let env = Global.env () in let env = Environ.update_typing_flags ?typing_flags:pr.typing_flags env in let (p,(status,info),()) = run_tactic env tac pr in let env = Global.env () in let sigma = Evd.from_env env in let () = match info_lvl with | None -> () | Some i -> Feedback.msg_info (Pp.hov 0 (Proofview.Trace.pr_info env sigma ~lvl:i info)) in (p,status) (**********************************************************************) (* Shortcut to build a term using tactics *) let refine_by_tactic ~name ~poly env sigma ty tac = (* Save the initial side-effects to restore them afterwards. We set the current set of side-effects to be empty so that we can retrieve the ones created during the tactic invocation easily. *) let eff = Evd.eval_side_effects sigma in let sigma = Evd.drop_side_effects sigma in (* Save the existing goals *) let sigma = Evd.push_future_goals sigma in (* Start a proof *) let prf = start ~name ~poly sigma [env, ty] in let (prf, _, ()) = try run_tactic env tac prf with Logic_monad.TacticFailure e as src -> (* Catch the inner error of the monad tactic *) let (_, info) = Exninfo.capture src in Exninfo.iraise (e, info) in (* Plug back the retrieved sigma *) let { goals; stack; sigma; entry } = data prf in assert (stack = []); let ans = match Proofview.initial_goals entry with | [_, c, _] -> c | _ -> assert false in let ans = EConstr.to_constr ~abort_on_undefined_evars:false sigma ans in (* [neff] contains the freshly generated side-effects *) let neff = Evd.eval_side_effects sigma in (* Reset the old side-effects *) let sigma = Evd.drop_side_effects sigma in let sigma = Evd.emit_side_effects eff sigma in (* Restore former goals *) let _goals, sigma = Evd.pop_future_goals sigma in (* Push remaining goals as future_goals which is the only way we have to inform the caller that there are goals to collect while not being encapsulated in the monad *) let sigma = List.fold_right Evd.declare_future_goal goals sigma in (* Get rid of the fresh side-effects by internalizing them in the term itself. Note that this is unsound, because the tactic may have solved other goals that were already present during its invocation, so that those goals rely on effects that are not present anymore. Hopefully, this hack will work in most cases. *) let neff = neff.Evd.seff_private in let (ans, _) = Safe_typing.inline_private_constants env ((ans, Univ.ContextSet.empty), neff) in ans, sigma let get_goal_context_gen pf i = let { sigma; goals } = data pf in let goal = try List.nth goals (i-1) with Failure _ -> raise (NoSuchGoal None) in let env = Evd.evar_filtered_env (Global.env ()) (Evd.find sigma goal) in (sigma, env) let get_proof_context p = try get_goal_context_gen p 1 with | NoSuchGoal _ -> (* No more focused goals *) let { sigma } = data p in sigma, Global.env ()
(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* <O___,, * (see version control and CREDITS file for authors & dates) *) (* \VV/ **************************************************************) (* // * This file is distributed under the terms of the *) (* * GNU Lesser General Public License Version 2.1 *) (* * (see LICENSE file for the text of the license) *) (************************************************************************)
irmin_pack_mem.ml
open! Import module Atomic_write (K : Irmin.Type.S) (V : Irmin.Hash.S) = struct module AW = Irmin_mem.Atomic_write (K) (V) include AW let v () = AW.v (Irmin_mem.config ()) let flush _t = () end module Indexable_mem (Hash : Irmin.Hash.S) (Value : Irmin_pack.Pack_value.S with type hash := Hash.t and type key = Hash.t) = struct module Pack = Indexable.Maker (Hash) module Indexable_mem = Pack.Make (Value) include Irmin_pack.Indexable.Closeable (Indexable_mem) let v x = Indexable_mem.v x >|= make_closeable end module Maker (Config : Irmin_pack.Conf.S) = struct type endpoint = unit include Irmin.Key.Store_spec.Hash_keyed module Make (Schema : Irmin.Schema.Extended) = struct module H = Schema.Hash module C = Schema.Contents module P = Schema.Path module M = Schema.Metadata module B = Schema.Branch module Pack = Indexable.Maker (H) module XKey = struct include Irmin.Key.Of_hash (H) let unfindable_of_hash x = x end module X = struct module Schema = Schema module Hash = H module Info = Schema.Info module Contents = struct module Pack_value = Irmin_pack.Pack_value.Of_contents (Config) (H) (XKey) (C) module Indexable = Indexable_mem (H) (Pack_value) include Irmin.Contents.Store_indexable (Indexable) (H) (C) end module Node = struct module Value = Schema.Node (XKey) (XKey) module Indexable = struct module Inter = Irmin_pack.Inode.Make_internal (Config) (H) (XKey) (Value) module CA = Pack.Make (Inter.Raw) include Irmin_pack.Inode.Make (H) (XKey) (Value) (Inter) (CA) let v = CA.v end include Irmin.Node.Generic_key.Store (Contents) (Indexable) (H) (Indexable.Val) (M) (P) end module Node_portable = Node.Indexable.Val.Portable module Commit = struct module Value = struct include Schema.Commit (Node.Key) (XKey) module Info = Schema.Info type hash = Hash.t [@@deriving irmin] end module Pack_value = Irmin_pack.Pack_value.Of_commit (H) (XKey) (Value) module Indexable = Indexable_mem (H) (Pack_value) include Irmin.Commit.Generic_key.Store (Info) (Node) (Indexable) (H) (Value) end module Commit_portable = Irmin.Commit.Portable.Of_commit (Commit.Value) module Branch = struct module Key = B module Val = struct include H include Commit.Key end module AW = Atomic_write (Key) (Val) include Irmin_pack.Atomic_write.Closeable (AW) let v () = AW.v () >|= make_closeable end module Slice = Irmin.Backend.Slice.Make (Contents) (Node) (Commit) module Remote = Irmin.Backend.Remote.None (H) (B) module Repo = struct type t = { config : Irmin.Backend.Conf.t; contents : read Contents.Indexable.t; node : read Node.Indexable.t; commit : read Commit.Indexable.t; branch : Branch.t; } let contents_t t : 'a Contents.t = t.contents let node_t t : 'a Node.t = (contents_t t, t.node) let commit_t t : 'a Commit.t = (node_t t, t.commit) let branch_t t = t.branch let config t = t.config let batch t f = Commit.Indexable.batch t.commit (fun commit -> Node.Indexable.batch t.node (fun node -> Contents.Indexable.batch t.contents (fun contents -> let contents : 'a Contents.t = contents in let node : 'a Node.t = (contents, node) in let commit : 'a Commit.t = (node, commit) in f contents node commit))) let v config = let root = Irmin_pack.Conf.root config in let* contents = Contents.Indexable.v root in let* node = Node.Indexable.v root in let* commit = Commit.Indexable.v root in let+ branch = Branch.v () in { contents; node; commit; branch; config } let close t = Contents.Indexable.close (contents_t t) >>= fun () -> Node.Indexable.close (snd (node_t t)) >>= fun () -> Commit.Indexable.close (snd (commit_t t)) >>= fun () -> Branch.close t.branch end end include Irmin.Of_backend (X) end end
(* * Copyright (c) 2018-2022 Tarides <contact@tarides.com> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *)
digest.mli
type t = string val compare : t -> t -> int val equal : t -> t -> bool val string : string -> t val bytes : bytes -> t val substring : string -> int -> int -> t val subbytes : bytes -> int -> int -> t external channel : in_channel -> int -> t = "caml_md5_chan" val file : string -> t val output : out_channel -> t -> unit val input : in_channel -> t val to_hex : t -> string val from_hex : string -> t
s.ml
module type GROUP = sig type t val size_in_bytes : int val check_bytes : Bytes.t -> bool val of_bytes_exn : Bytes.t -> t val of_bytes_opt : Bytes.t -> t option val to_bytes : t -> Bytes.t val size_in_memory : int val zero : t val is_zero : t -> bool val one : t val is_one : t -> bool val random : ?state:Random.State.t -> unit -> t val eq : t -> t -> bool val neg : t -> t val add : t -> t -> t val double : t -> t val mul : t -> Z.t -> t end
pr_extfun.ml
#directory "."; #load "q_MLast.cmo"; #load "pa_extfun.cmo"; #load "pa_extprint.cmo"; #load "pa_pprintf.cmo"; #load "pa_macro.cmo"; (* heuristic to rebuild the extfun statement from the AST *) open Pretty; open Pcaml; open Prtools; value expr = Eprinter.apply pr_expr; value patt = Eprinter.apply pr_patt; value rec un_extfun rpel = fun [ <:expr< [ ($_$, $_$, fun [ $list:pel$ ]) :: $el$ ] >> -> let (p, wo, e) = match pel with [ [(p, wo, <:expr< Some $e$ >>); (<:patt< _ >>, <:vala< None >>, <:expr< None >>)] -> (p, wo, e) | [(p, wo, <:expr< Some $e$ >>)] -> (p, wo, e) | _ -> raise Not_found ] in let rpel = match rpel with [ [(p1, wo1, e1) :: pel] -> if wo1 = wo && e1 = e then let p = let loc = MLast.loc_of_patt p1 in match (p1, p) with [ (<:patt< ($x1$ as $x2$) >>, <:patt< ($y1$ as $y2$) >>) -> if x2 = y2 then <:patt< ($x1$ | $y1$ as $x2$) >> else <:patt< $p1$ | $p$ >> | _ -> <:patt< $p1$ | $p$ >> ] in [(p, wo, e) :: pel] else [(p, wo, e) :: rpel] | [] -> [(p, wo, e)] ] in un_extfun rpel el | <:expr< [] >> -> List.rev rpel | _ -> raise Not_found ] ; value bar_before elem pc x = pprintf pc "| %p" elem x; value comm_bef pc loc = if flag_comments_in_phrases.val then Prtools.comm_bef pc loc else "" ; value comm_expr expr pc z = let ccc = comm_bef pc.ind (MLast.loc_of_expr z) in sprintf "%s%s" ccc (expr pc z) ; value patt_as pc z = match z with [ <:patt< ($x$ as $y$) >> -> pprintf pc "%p as @[%p@]" patt x patt y | z -> patt pc z ] ; value match_assoc pc (p, w, e) = let pc_dang = if pc.aft = "" then "|" else "" in match w with [ <:vala< Some e1 >> -> pprintf pc "%p@ @[when@;%p ->@]@;%q" patt_as p expr e1 (comm_expr expr) e pc_dang | _ -> pprintf pc "%p ->@;%q" patt_as p (comm_expr expr) e pc_dang ] ; value match_assoc_sh pc pwe = match_assoc {(pc) with ind = pc.ind + 2} pwe; value match_assoc_list pc pwel = if pwel = [] then pprintf pc "[]" else pprintf pc "[ %p ]" (vlist2 match_assoc_sh (bar_before match_assoc_sh)) pwel ; EXTEND_PRINTER pr_expr: LEVEL "top" [ [ <:expr< Extfun.extend $e$ $list$ >> as ge -> try let pwel = un_extfun [] list in pprintf pc "@[<b>extfun %p with@ %p" expr e match_assoc_list pwel with [ Not_found -> next pc ge ] ] ] ; END;
(* camlp5r *) (* pr_extfun.ml,v *) (* Copyright (c) INRIA 2007-2017 *)
recovery.ml
open MenhirSdk open Cmly_api open Utils module type S = sig module G : GRAMMAR type item = G.lr1 * G.production * int type recovery = G.lr1 -> int * (G.lr1 option * item list) list val recover : recovery val report : Format.formatter -> unit end module Make (G : GRAMMAR) (S : Synthesis.S with module G = G) : S with module G = G = struct module G = G open G type item = lr1 * production * int type recovery = lr1 -> int * (lr1 option * item list) list (* let item_to_string (st, prod, p) = Printf.sprintf "(#%d, p%d, %d)" (Lr1.to_int st) (Production.to_int prod) p *) type trace = Trace of float * item list module Trace = struct type t = trace let min = arg_min_float (fun (Trace (c,_)) -> c) let cat (Trace (c1, tr1)) (Trace (c2, tr2)) = Trace (c1 +. c2, tr1 @ tr2) (* let to_string (Trace (c1, tr)) = Printf.sprintf "Trace (%f, %s)" c1 (list_fmt item_to_string tr) *) end module State = struct type level = (nonterminal * Trace.t) list type t = level list let rec merge_level l1 l2 : level = match l1, l2 with | [], l -> l | l, [] -> l | ((nt1, c1) :: xs1), (x2 :: xs2) -> let (nt2, c2) = x2 in match compare nt1 nt2 with | 0 -> let x = (nt1, Trace.min c1 c2) in x :: merge_level xs1 xs2 | n when n > 0 -> x2 :: merge_level l1 xs2 | _ -> (nt1, c1) :: merge_level xs1 l2 let rec merge l1 l2 : t = match l1, l2 with | [], l -> l | l, [] -> l | (x1 :: l1), (x2 :: l2) -> let x' = merge_level x1 x2 in x' :: merge l1 l2 (* let reduction_to_string (n, tr) = Printf.sprintf "(%s, %s)" (Nonterminal.name n) (Trace.to_string tr) let to_string (t : t) = list_fmt (list_fmt reduction_to_string) t *) end let synthesize = let rec add_nt tr nt = function | [] -> [(nt, tr)] | x :: xs -> let c = compare nt (fst x) in if c = 0 then (nt, Trace.min tr (snd x)) :: xs else if c < 0 then (nt, tr) :: xs else x :: add_nt tr nt xs in let add_item cost item stack = let (_, prod, pos) = item in if cost = infinity then stack else let stack_hd = function | [] -> [] | x :: _ -> x and stack_tl = function | [] -> [] | _ :: xs -> xs in let rec aux stack = function | 0 -> add_nt (Trace (cost, [item])) (Production.lhs prod) (stack_hd stack) :: stack_tl stack | n -> stack_hd stack :: aux (stack_tl stack) (n - 1) in aux stack pos in Lr1.tabulate (fun st -> List.fold_left (fun acc (prod, pos) -> if pos = 0 then ( (*if prod.p_kind = `START then ( *) (* pos = 0 means we are on an initial state *) (*report "skipping %s at depth %d\n" prod.p_lhs.n_name pos;*) acc ) else ( (*report "adding %s at depth %d\n" prod.p_lhs.n_name pos;*) add_item (S.cost_of (S.Tail (st, prod, pos))) (st, prod, pos) acc ) ) [] (Lr0.items (Lr1.lr0 st)) ) let step st ntss = let seen = ref CompressedBitSet.empty in let rec aux = function | [] -> [] | ((nt, tr) :: x) :: xs when not (CompressedBitSet.mem (Nonterminal.to_int nt) !seen) && not (Nonterminal.kind nt = `START) -> seen := CompressedBitSet.add (Nonterminal.to_int nt) !seen; let st' = List.assoc (N nt) (Lr1.transitions st) in let xs' = synthesize st' in let xs' = match xs' with | [] -> [] | _ :: xs -> xs in let merge_trace (nt,tr') = (nt, Trace.cat tr' tr) in let xs' = List.map (List.map merge_trace) xs' in aux (State.merge xs' (x :: xs)) | (_ :: x) :: xs -> aux (x :: xs) | [] :: xs -> xs in aux ntss let init st = ((st, [st]), step st (synthesize st)) let expand ((st, sts), nts) = List.map (fun st' -> ((st', st' :: sts), step st' nts)) (S.pred st) let recover st = (* How big is the known prefix of the stack *) let pos = let items = Lr0.items (Lr1.lr0 st) in List.fold_left (fun pos (_, pos') -> max pos pos') (snd (List.hd items)) (List.tl items) in (* Walk this prefix *) let traces = let acc = ref [init st] in for _i = 1 to pos - 1 do acc := List.concat (List.map expand !acc) done; !acc in (* Last step *) let select_trace traces = (* Pick a trace with minimal cost, somewhat arbitrary *) match List.flatten traces with | [] -> (* FIXME: for release, empty list means recovery not possible (not enough annotations) *) assert false | (_, trace) :: alternatives -> List.fold_left (fun tr1 (_,tr2) -> Trace.min tr1 tr2) trace alternatives in let process_trace trace = match expand trace with | [] -> (* Initial state *) assert (snd trace = []); [] | states -> let select_expansion ((st, _sts), trace') = if trace' = [] then (* Reached stack bottom *) (None, select_trace (snd trace)) else (Some st, select_trace trace') in List.map select_expansion states in pos, List.flatten @@ List.map (fun trace -> List.map (fun (st, Trace (_, reductions)) -> st, reductions) (process_trace trace) ) traces let recover = Lr1.tabulate recover let report _ppf = () end
transformSugar.ml
open CommonTypes open Operators open Sugartypes open SourceCode open SourceCode.WithPos open Utility open Types module TyEnv = Env.String type program_transformer = Types.typing_environment -> Sugartypes.program -> Sugartypes.program type sentence_transformer = Types.typing_environment -> Sugartypes.sentence -> Sugartypes.sentence let internal_error message = Errors.internal_error ~filename:"transformSugar.ml" ~message let type_section env = let open Section in function | Minus -> TyEnv.find "-" env | FloatMinus -> TyEnv.find "-."env | Project label -> let ab, a = Types.fresh_type_quantifier (lin_any, res_any) in let rhob, row = fresh_row_quantifier (lin_any, res_any) in let (fields, rho, _) = TypeUtils.extract_row_parts row in let eb, e = Types.fresh_row_quantifier default_effect_subkind in let r = Record (Row (StringMap.add label (Present a) fields, rho, false)) in ForAll ([ab; rhob; eb], Function (Types.make_tuple_type [r], e, a)) | Name var -> TyEnv.find var env let type_unary_op env tycon_env = let datatype = DesugarDatatypes.read ~aliases:tycon_env in function | UnaryOp.Minus -> datatype "(Int) -> Int" | UnaryOp.FloatMinus -> datatype "(Float) -> Float" | UnaryOp.Name n -> TyEnv.find n env let type_binary_op env tycon_env = let open BinaryOp in let datatype = DesugarDatatypes.read ~aliases:tycon_env in function | Minus -> TyEnv.find "-" env | FloatMinus -> TyEnv.find "-." env | RegexMatch flags -> let nativep = List.exists ((=) RegexNative) flags and listp = List.exists ((=) RegexList) flags and replacep = List.exists ((=) RegexReplace) flags in (match replacep, listp, nativep with | true, _ , false -> (* stilde *) datatype "(String, Regex) -> String" | false, true , false -> (* ltilde *) datatype "(String, Regex) -> [String]" | false, false, false -> (* tilde *) datatype "(String, Regex) -> Bool" | _, _, true -> assert false) | And | Or -> datatype "(Bool,Bool) -> Bool" | Cons -> TyEnv.find "Cons" env | Name "++" -> TyEnv.find "Concat" env | Name ">" | Name ">=" | Name "==" | Name "<" | Name "<=" | Name "<>" -> let ab, a = Types.fresh_type_quantifier (lin_any, res_any) in let eb, e = Types.fresh_row_quantifier (lin_any, res_any) in ForAll ([ab; eb], Function (Types.make_tuple_type [a; a], e, Primitive Primitive.Bool )) | Name "!" -> TyEnv.find "Send" env | Name n -> TyEnv.find n env let fun_effects t pss = let rec get_eff = function | Function (_, effects, _), [_] | Lolli (_, effects, _), [_] -> effects | Function (_, _, t), _::pss | Lolli (_, _, t), _::pss -> get_eff (TypeUtils.concrete_type t, pss) | _ -> assert false in let t = match TypeUtils.concrete_type t with | ForAll (_, body) -> TypeUtils.concrete_type body | t -> t in get_eff (t, pss) let option : 'self_type -> ('self_type -> 'a -> ('self_type * 'a * Types.datatype)) -> 'a option -> ('self_type * ('a option) * (Types.datatype option)) = fun o f -> function | None -> (o, None, None) | Some x -> let (o, x, t) = f o x in (o, Some x, Some t) let optionu : 'self_type -> ('self_type -> 'a -> ('self_type * 'a)) -> 'a option -> ('self_type * ('a option)) = fun o f -> function | None -> (o, None) | Some x -> let (o, x) = f o x in (o, Some x) let rec list : 'self_type -> ('self_type -> 'a -> 'self_type * 'a * Types.datatype) -> 'a list -> 'self_type * 'a list * Types.datatype list = fun o f -> function | [] -> (o, [], []) | x :: xs -> let (o, x, t) = f o x in let (o, xs, ts) = list o f xs in (o, x::xs, t::ts) let rec listu : 'self_type -> ('self_type -> 'a -> 'self_type * 'a) -> 'a list -> 'self_type * 'a list = fun o f -> function | [] -> (o, []) | x :: xs -> let (o, x) = f o x in let (o, xs) = listu o f xs in (o, x::xs) let on_effects o (eff : Types.row) fn x = let effect_row = o#lookup_effects in let o = o#with_effects eff in let (o, x, y) = fn o x in (o#with_effects effect_row, x, y) let check_type_application (e, t) k = begin try k () with Instantiate.ArityMismatch (exp, prov) -> prerr_endline ("Arity mismatch in type application"); prerr_endline ("Expression: " ^ show_phrasenode e); prerr_endline ("Type: "^Types.string_of_datatype t); raise (Instantiate.ArityMismatch (exp, prov)) end class transform (env : Types.typing_environment) = let open PrimaryKind in object (o : 'self_type) val var_env = env.Types.var_env val tycon_env = env.Types.tycon_env val formlet_env = TyEnv.empty val effect_row = fst (Types.unwrap_row env.Types.effect_row) method get_var_env : unit -> Types.environment = fun () -> var_env method get_tycon_env : unit -> Types.tycon_environment = fun () -> tycon_env method get_formlet_env : unit -> Types.environment = fun () -> formlet_env method backup_envs = var_env, tycon_env, formlet_env, effect_row method restore_envs (var_env, tycon_env, formlet_env, effect_row) = {< var_env = var_env; tycon_env = tycon_env; formlet_env = formlet_env; effect_row = effect_row >} method with_var_env var_env = {< var_env = var_env >} method with_formlet_env formlet_env = {< formlet_env = formlet_env >} method bind_tycon name tycon = {< tycon_env = TyEnv.bind name tycon tycon_env >} method bind_binder bndr = {< var_env = TyEnv.bind (Binder.to_name bndr) (Binder.to_type bndr) var_env >} method lookup_type : Name.t -> Types.datatype = fun var -> TyEnv.find var var_env method lookup_effects : Types.row = effect_row method with_effects : Types.row -> 'self_type = fun effects -> {< effect_row = fst (Types.unwrap_row effects) >} method sugar_datatype : Datatype.with_pos -> ('self_type * Datatype.with_pos) = fun s -> (o, s) method datatype : Types.datatype -> ('self_type * Types.datatype) = fun t -> (o, t) method datatype' : datatype' -> ('self_type * datatype') = fun (s, t) -> let (o, s) = o#sugar_datatype s in let (o, t) = optionu o (fun o -> o#datatype) t in (o, (s, t)) method lens_type : Lens.Type.t -> ('self_type * Lens.Type.t) = fun sort -> (o, sort) method row : Types.row -> ('self_type * Types.row) = fun row -> (o, row) method unary_op : UnaryOp.t -> ('self_type * UnaryOp.t * Types.datatype) = fun op -> (o, op, type_unary_op var_env tycon_env op) method binop : BinaryOp.t -> ('self_type * BinaryOp.t * Types.datatype) = fun op -> (o, op, type_binary_op var_env tycon_env op) method section : Section.t -> ('self_type * Section.t * Types.datatype) = fun section -> (o, section, type_section var_env section) method sentence : sentence -> ('self_type * sentence * Types.datatype option) = function | Definitions defs -> let (o, defs) = listu o (fun o -> o#binding) defs in (o, Definitions defs, Some Types.unit_type) | Expression e -> let (o, e, t) = o#phrase e in (o, Expression e, Some t) | Directive d -> (o, Directive d, None) method regex : regex -> ('self_type * regex) = function | (Range _ | Simply _ | Any | StartAnchor | EndAnchor) as r -> (o, r) | Quote r -> let (o, r) = o#regex r in (o, Quote r) | Seq rs -> let (o, rs) = listu o (fun o -> o#regex) rs in (o, Seq rs) | Alternate (r1, r2) -> let (o, r1) = o#regex r1 in let (o, r2) = o#regex r2 in (o, Alternate (r1, r2)) | Group r -> let (o, r) = o#regex r in (o, Group r) | Repeat (repeat, r) -> let (o, r) = o#regex r in (o, Repeat (repeat, r)) | Splice e -> let (o, e, _) = o#phrase e in (o, Splice e) | Replace (r, Literal s) -> let (o, r) = o#regex r in (o, Replace (r, Literal s)) | Replace (r, SpliceExpr e) -> let (o, r) = o#regex r in let (o, e, _) = o#phrase e in (o, Replace (r, SpliceExpr e)) method program : program -> ('self_type * program * Types.datatype option) = fun (bs, e) -> let (o, bs) = listu o (fun o -> o#binding) bs in let (o, e, t) = option o (fun o -> o#phrase) e in (o, (bs, e), opt_map Types.normalise_datatype t) method given_spawn_location : given_spawn_location -> ('self_type * given_spawn_location) = function | ExplicitSpawnLocation p -> let (o, phr, _phr_ty) = o#phrase p in (o, ExplicitSpawnLocation phr) | l -> (o, l) method temporal_update : temporal_update -> ('self_type * temporal_update) = function | ValidTimeUpdate (SequencedUpdate { validity_from; validity_to }) -> let (o, validity_from, _) = o#phrase validity_from in let (o, validity_to, _) = o#phrase validity_to in (o, ValidTimeUpdate (SequencedUpdate { validity_from; validity_to })) | ValidTimeUpdate (NonsequencedUpdate { from_time; to_time }) -> let (o, from_time, _) = option o (fun o -> o#phrase) from_time in let (o, to_time, _) = option o (fun o -> o#phrase) to_time in (o, ValidTimeUpdate (NonsequencedUpdate { from_time; to_time })) | upd -> (o, upd) method temporal_deletion : temporal_deletion -> ('self_type * temporal_deletion) = function | ValidTimeDeletion (SequencedDeletion { validity_from; validity_to }) -> let (o, validity_from, _) = o#phrase validity_from in let (o, validity_to, _) = o#phrase validity_to in (o, ValidTimeDeletion (SequencedDeletion { validity_from; validity_to })) | del -> (o, del) method phrasenode : phrasenode -> ('self_type * phrasenode * Types.datatype) = function | Constant c -> let (o, c, t) = o#constant c in (o, Constant c, t) | Sugartypes.Var var -> (o, Sugartypes.Var var, o#lookup_type var) | FreezeVar var -> (o, FreezeVar var, o#lookup_type var) | FunLit (Some argss, lin, lam, location) -> let inner_e = snd (last argss) in let (o, lam, rt) = o#funlit inner_e lam in let (o, t) = List.fold_right (fun (args, effects) (o, rt) -> let (o, args) = o#datatype args in let (o, row) = o#row effects in (o, Function (args, row, rt))) argss (o, rt) in (o, FunLit (Some argss, lin, lam, location), t) | Spawn (Wait, loc, body, Some inner_effects) -> assert (loc = NoSpawnLocation); (* bring the inner effects into scope, then restore the environments afterwards *) let envs = o#backup_envs in let (o, inner_effects) = o#row inner_effects in let o = o#with_effects inner_effects in let (o, body, body_type) = o#phrase body in let o = o#restore_envs envs in (o, Spawn (Wait, loc, body, Some inner_effects), body_type) | Spawn (k, spawn_loc, body, Some inner_effects) -> (* bring the inner effects into scope, then restore the environments afterwards *) let (o, spawn_loc) = o#given_spawn_location spawn_loc in let envs = o#backup_envs in let (o, inner_effects) = o#row inner_effects in let process_type = Application (Types.process, [(Row, inner_effects)]) in let o = o#with_effects inner_effects in let (o, body, _) = o#phrase body in let o = o#restore_envs envs in (o, Spawn (k, spawn_loc, body, Some inner_effects), process_type) | Sugartypes.Select (l, e) -> let (o, e, t) = o#phrase e in (o, Sugartypes.Select (l, e), TypeUtils.select_type l t) | Offer (e, bs, Some t) -> let (o, e, _) = o#phrase e in let (o, bs) = listu o (fun o (p, e) -> let (o, p) = o#pattern p in let (o, e, _) = o#phrase e in (o, (p, e))) bs in let (o, t) = o#datatype t in (o, Offer (e, bs, Some t), t) | CP p -> let (o, p, t) = o#cp_phrase p in (o, CP p, t) | Query (range, policy, body, Some t) -> let (o, range) = optionu o (fun o (limit, offset) -> let (o, limit, _) = o#phrase limit in let (o, offset, _) = o#phrase offset in (o, (limit, offset))) range in let (o, body, _) = on_effects o (Types.make_empty_closed_row ()) (fun o -> o#phrase) body in let (o, body, _) = o#phrase body in let (o, t) = o#datatype t in (o, Query (range, policy, body, Some t), t) | ListLit (es, Some t) -> let (o, es, _) = list o (fun o -> o#phrase) es in let (o, t) = o#datatype t in (o, ListLit (es, Some t), Types.make_list_type t) | RangeLit (e1, e2) -> let (o, e1, _) = o#phrase e1 in let (o, e2, _) = o#phrase e2 in (o, RangeLit (e1, e2), Types.make_list_type Types.int_type) | Iteration (gens, body, cond, orderby) -> let envs = o#backup_envs in let (o, gens) = listu o (fun o -> o#iterpatt) gens in let (o, body, t) = o#phrase body in let (o, cond, _) = option o (fun o -> o#phrase) cond in let (o, orderby, _) = option o (fun o -> o#phrase) orderby in let o = o#restore_envs envs in (o, Iteration (gens, body, cond, orderby), t) | Escape (b, e) -> let envs = o#backup_envs in let (o, b) = o#binder b in let (o, e, t) = o#phrase e in let o = o#restore_envs envs in (o, Escape (b, e), t) | Section sec -> (o, Section sec, type_section var_env sec) | FreezeSection sec -> (o, FreezeSection sec, type_section var_env sec) | Conditional (p, e1, e2) -> let (o, p, _) = o#phrase p in let (o, e1, t) = o#phrase e1 in let (o, e2, _) = o#phrase e2 in (o, Conditional (p, e1, e2), t) | Block (bs, e) -> let envs = o#backup_envs in let (o, bs) = listu o (fun o -> o#binding) bs in let (o, e, t) = o#phrase e in let o = o#restore_envs envs in o, Block (bs, e), t | InfixAppl ((tyargs, op), e1, e2) -> let (o, op, t) = o#binop op in check_type_application (InfixAppl ((tyargs, op), e1, e2), t) (fun () -> let t = TypeUtils.return_type (Instantiate.apply_type t tyargs) in let (o, e1, _) = o#phrase e1 in let (o, e2, _) = o#phrase e2 in (o, InfixAppl ((tyargs, op), e1, e2), t)) | Regex r -> let (o, r) = o#regex r in (o, Regex r, Instantiate.alias "Regex" [] tycon_env) | UnaryAppl ((tyargs, op), e) -> let (o, op, t) = o#unary_op op in check_type_application (UnaryAppl ((tyargs, op), e), t) (fun () -> let t = TypeUtils.return_type (Instantiate.apply_type t tyargs) in let (o, e, _) = o#phrase e in (o, UnaryAppl ((tyargs, op), e), t)) | FnAppl (f, args) -> let (o, f, ft) = o#phrase f in let (o, args, _) = list o (fun o -> o#phrase) args in (o, FnAppl (f, args), TypeUtils.return_type ft) | TAbstr (tyvars, e) -> let outer_tyvars = o#backup_quantifiers in let (o, sqs) = o#quantifiers tyvars in let (o, e, t) = o#phrase e in let o = o#restore_quantifiers outer_tyvars in let qs = List.map SugarQuantifier.get_resolved_exn sqs in let t = Types.for_all (qs, t) in (o, tabstr (sqs, e.node), t) | TAppl (e, tyargs) -> let (o, e, t) = o#phrase e in check_type_application (TAppl (e, tyargs), t) (fun () -> let t = Instantiate.apply_type t (List.map (snd ->- val_of) tyargs) in (o, TAppl (e, tyargs), t)) | TupleLit [e] -> (* QUESTION: Why do we type 1-tuples as if they aren't tuples? *) let (o, e, t) = o#phrase e in (o, TupleLit [e], t) | TupleLit es -> let (o, es, ts) = list o (fun o -> o#phrase) es in (o, TupleLit es, Types.make_tuple_type ts) | RecordLit (fields, base) -> let (o, fields, field_types) = let rec list o = function | [] -> (o, [], StringMap.empty) | (name, e)::fields -> let (o, e, t) = o#phrase e in let (o, fields, field_types) = list o fields in (o, (name, e)::fields, StringMap.add name t field_types) in list o fields in let (o, base, base_type) = option o (fun o -> o#phrase) base in let t = match base_type with | None -> Types.make_record_type field_types | Some t -> begin match TypeUtils.concrete_type t with | Record row -> Record (Types.extend_row field_types row) | t -> Debug.print ("bad t: " ^ Types.string_of_datatype t); assert false end in (o, RecordLit (fields, base), t) | Projection (e, name) -> let (o, e, t) = o#phrase e in (o, Projection (e, name), TypeUtils.project_type name t) | With (e, fields) -> let (o, e, t) = o#phrase e in let o, fields, ts = list o (fun o (name, e) -> let o, e, t = o#phrase e in (o, (name, e), t)) fields in let t = match Types.concrete_type t with | Record row -> let ( fs, rv, closed ) = Types.flatten_row row |> TypeUtils.extract_row_parts in let fs = List.fold_left2 (fun fs (name, _) t -> StringMap.add name (Present t) fs) fs fields ts in Record (Row (fs, rv, closed)) | _ -> t in (o, With (e, fields), t) | TypeAnnotation (e, ann_type) -> let (o, e, _) = o#phrase e in let (o, ann_type) = o#datatype' ann_type in let t = val_of (snd ann_type) in (o, TypeAnnotation (e, ann_type), t) | Upcast (e, to_type, from_type) -> let (o, e, _) = o#phrase e in let (o, to_type) = o#datatype' to_type in let (o, from_type) = o#datatype' from_type in let t = val_of (snd to_type) in (o, Upcast (e, to_type, from_type), t) | ConstructorLit (name, e, Some t) -> let (o, e, _) = option o (fun o -> o#phrase) e in let (o, t) = o#datatype t in (o, ConstructorLit (name, e, Some t), t) | DoOperation (name, ps, Some t) -> let (o, ps, _) = list o (fun o -> o#phrase) ps in (o, DoOperation (name, ps, Some t), t) | Handle { sh_expr; sh_effect_cases; sh_value_cases; sh_descr } -> let (input_row, input_t, output_row, output_t) = sh_descr.shd_types in let (o, expr, _) = o#phrase sh_expr in let envs = o#backup_envs in let (o, params) = match sh_descr.shd_params with | Some params -> let (o, bindings) = List.fold_right (fun (pat, body) (o, bindings) -> let (o, body, _) = o#phrase body in let (o, pat) = o#pattern pat in (o, (pat, body) :: bindings)) params.shp_bindings (o, []) in (o, Some { params with shp_bindings = bindings }) | None -> (o, None) in let (o, val_cases) = listu o (fun o (p, e) -> let (o, p) = o#pattern p in let (o, e, _) = o#phrase e in (o, (p, e))) sh_value_cases in let (o, eff_cases) = listu o (fun o (p, e) -> let (o, p) = o#pattern p in let (o, e, _) = o#phrase e in (o, (p, e))) sh_effect_cases in let o = o#restore_envs envs in let (o, input_row) = o#row input_row in let (o, input_t) = o#datatype input_t in let (o, output_row) = o#row output_row in let (o, output_t) = o#datatype output_t in let (o, raw_row) = o#row sh_descr.shd_raw_row in let descr = { shd_depth = sh_descr.shd_depth; shd_types = (input_row, input_t, output_row, output_t); shd_raw_row = raw_row; shd_params = params} in (o, Handle { sh_expr = expr; sh_effect_cases = eff_cases; sh_value_cases = val_cases; sh_descr = descr }, output_t) | TryInOtherwise (try_phr, as_pat, as_phr, otherwise_phr, (Some dt)) -> let (o, try_phr, _) = o#phrase try_phr in let (o, as_pat) = o#pattern as_pat in let (o, as_phr, _) = o#phrase as_phr in let (o, otherwise_phr, _) = o#phrase otherwise_phr in let (o, dt) = o#datatype dt in (o, TryInOtherwise (try_phr, as_pat, as_phr, otherwise_phr, (Some dt)), dt) | Raise -> (o, Raise, Types.fresh_type_variable (lin_any, res_any)) | Switch (v, cases, Some t) -> let (o, v, _) = o#phrase v in let (o, cases) = listu o (fun o (p, e) -> let (o, p) = o#pattern p in let (o, e, _) = o#phrase e in (o, (p, e))) cases in let (o, t) = o#datatype t in (o, Switch (v, cases, Some t), t) | Receive (cases, Some t) -> let (o, cases) = listu o (fun o (p, e) -> let (o, p) = o#pattern p in let (o, e, _) = o#phrase e in (o, (p, e))) cases in let (o, t) = o#datatype t in (o, Receive (cases, Some t), t) | DatabaseLit (name, (driver, args)) -> let (o, name, _) = o#phrase name in let (o, driver, _) = option o (fun o -> o#phrase) driver in let (o, args, _) = option o (fun o -> o#phrase) args in (o, DatabaseLit (name, (driver, args)), Primitive Primitive.DB) | LensLit (table, Some t) -> let (o, table, _) = o#phrase table in let (o, t) = o#lens_type t in (o, LensLit (table, Some t), Lens t) | LensSerialLit (lens,columns,Some t) -> let (o, lens, _) = o#phrase lens in let (o, t) = o#lens_type t in (o, LensSerialLit (lens, columns, Some t), Lens t) | LensDropLit (lens, drop, key, default, Some t) -> let (o, lens, _) = o#phrase lens in let (o, t) = o#lens_type t in let (o, default, _) = o#phrase default in (o, LensDropLit (lens, drop, key, default, Some t), Lens t) | LensSelectLit (lens, predicate, Some t) -> let (o, lens, _) = o#phrase lens in let (o, predicate, _) = o#phrase predicate in let (o, t) = o#lens_type t in (o, LensSelectLit (lens, predicate, Some t), Lens t) | LensJoinLit (lens1, lens2, on, left, right, Some t) -> let (o, lens1, _) = o#phrase lens1 in let (o, lens2, _) = o#phrase lens2 in let (o, t) = o#lens_type t in (o, LensJoinLit (lens1, lens2, on, left, right, Some t), Lens t) | LensCheckLit (lens, Some t) -> let (o, lens, _) = o#phrase lens in let (o, t) = o#lens_type t in (o, LensCheckLit (lens, Some t), Lens t) | LensGetLit (lens, Some t) -> let (o, lens, _) = o#phrase lens in let (o, t) = o#datatype t in (o, LensGetLit (lens, Some t), Types.make_list_type t) | LensPutLit (lens, data, Some t) -> let (o, lens, _) = o#phrase lens in let (o, data, _) = o#phrase data in let (o, t) = o#datatype t in (o, LensPutLit (lens, data, Some t), Types.make_list_type t) | TableLit { tbl_name; tbl_type = (tmp, dtype, Some (read_row, write_row, needed_row)); tbl_field_constraints; tbl_keys; tbl_temporal_fields; tbl_database } -> let (o, tbl_name, _) = o#phrase tbl_name in let (o, tbl_database, _) = o#phrase tbl_database in let (o, dtype) = o#sugar_datatype dtype in let (o, read_row) = o#datatype read_row in let (o, write_row) = o#datatype write_row in let (o, needed_row) = o#datatype needed_row in let tbl = TableLit { tbl_name; tbl_type = (tmp, dtype, Some (read_row, write_row, needed_row)); tbl_field_constraints; tbl_keys; tbl_temporal_fields; tbl_database } in (o, tbl, Table (tmp, read_row, write_row, needed_row)) | DBDelete (del, p, from, where) -> let (o, del) = optionu o (fun o -> o#temporal_deletion) del in let (o, from, _) = o#phrase from in let (o, p) = o#pattern p in (* BUG: We should really reset the environment: variables bound by p shouldn't be visible in subsequent expression. The same applies to DBUpdate and Iteration. *) let (o, where, _) = option o (fun o -> o#phrase) where in (o, DBDelete (del, p, from, where), Types.unit_type) | DBInsert (tmp, into, labels, values, id) -> let (o, into, _) = o#phrase into in let (o, values, _) = o#phrase values in let (o, id, _) = option o (fun o -> o#phrase) id in (o, DBInsert (tmp, into, labels, values, id), Types.unit_type) | DBUpdate (upd, p, from, where, set) -> let (o, from, _) = o#phrase from in let (o, p) = o#pattern p in let (o, upd) = optionu o (fun o -> o#temporal_update) upd in let (o, where, _) = option o (fun o -> o#phrase) where in let (o, set) = listu o (fun o (name, value) -> let (o, value, _) = o#phrase value in (o, (name, value))) set in (o, DBUpdate (upd, p, from, where, set), Types.unit_type) | DBTemporalJoin (mode, body, Some t) -> let (o, body, _) = on_effects o (Types.make_empty_closed_row ()) (fun o -> o#phrase) body in let (o, body, _) = o#phrase body in let (o, t) = o#datatype t in (o, DBTemporalJoin (mode, body, Some t), t) | Xml (tag, attrs, attrexp, children) -> let (o, attrs) = listu o (fun o (name, value) -> let (o, value, _) = list o (fun o -> o#phrase) value in (o, (name, value))) attrs in let (o, attrexp, _) = option o (fun o -> o#phrase) attrexp in let (o, children, _) = list o (fun o -> o#phrase) children in (o, Xml (tag, attrs, attrexp, children), Types.xml_type) | TextNode s -> (o, TextNode s, Types.xml_type) | Formlet (body, yields) -> let envs = o#backup_envs in let (o, body, _) = o#phrase body in (* ensure that the formlet bindings are only in scope in the yields clause *) let o = o#with_var_env (TyEnv.extend (o#get_var_env ()) (o#get_formlet_env ())) in let (o, yields, t) = o#phrase yields in let o = o#restore_envs envs in (o, Formlet (body, yields), Instantiate.alias "Formlet" [(Type, t)] tycon_env) | Page e -> let (o, e, _) = o#phrase e in (o, Page e, Instantiate.alias "Page" [] tycon_env) | FormletPlacement (f, h, attributes) -> let (o, f, _) = o#phrase f in let (o, h, _) = o#phrase h in let (o, attributes, _) = o#phrase attributes in (o, FormletPlacement (f, h, attributes), Types.xml_type) | PagePlacement e -> let (o, e, _) = o#phrase e in (o, PagePlacement e, Types.xml_type) | FormBinding (f, p) -> let envs = o#backup_envs in let (o, f, _) = o#phrase f in (* HACK: add the formlet bindings to the formlet environment *) let o = o#with_var_env TyEnv.empty in let (o, p) = o#pattern p in let formlet_env = TyEnv.extend formlet_env (o#get_var_env()) in let o = o#restore_envs envs in let o = o#with_formlet_env formlet_env in (* let o = {< formlet_env=TyEnv.extend formlet_env (o#get_var_env()) >} in *) (o, FormBinding (f, p), Types.xml_type) | e -> raise (internal_error ("oops: "^show_phrasenode e)) method phrase : phrase -> ('self_type * phrase * Types.datatype) = fun {node; pos} -> let (o, node, t) = o#phrasenode node in (o, WithPos.make ~pos node, t) method patternnode : Pattern.t -> ('self_type * Pattern.t) = let open Pattern in function | Any -> (o, Any) | Nil -> (o, Nil) | Cons (p, ps) -> let (o, p) = o#pattern p in let (o, ps) = o#pattern ps in (o, Cons (p, ps)) | List p -> let (o, p) = listu o (fun o -> o#pattern) p in (o, List p) | Variant (name, p) -> let (o, p) = optionu o (fun o -> o#pattern) p in (o, Variant (name, p)) | Effect (name, ps, k) -> let (o, ps) = listu o (fun o -> o#pattern) ps in let (o, k) = o#pattern k in (o, Effect (name, ps, k)) | Negative name -> (o, Negative name) | Record (fields, rest) -> let (o, fields) = listu o (fun o (name, p) -> let (o, p) = o#pattern p in (o, (name, p))) fields in let (o, rest) = optionu o (fun o -> o#pattern) rest in (o, Record (fields, rest)) | Tuple ps -> let (o, ps) = listu o (fun o -> o#pattern) ps in (o, Tuple ps) | Constant c -> let (o, c, _) = o#constant c in (o, Constant c) | Variable x -> let (o, x) = o#binder x in (o, Variable x) | As (x, p) -> let (o, x) = o#binder x in let (o, p) = o#pattern p in (o, (As (x, p))) | HasType (p, t) -> let (o, p) = o#pattern p in (o, (HasType (p, t))) method pattern : Pattern.with_pos -> ('self_type * Pattern.with_pos) = WithPos.traverse_map ~o ~f_pos:(fun o v -> o, v) ~f_node:(fun o v -> o#patternnode v) method iterpatt : iterpatt -> ('self_type * iterpatt) = function | List (p, e) -> let (o, e, _) = o#phrase e in let (o, p) = o#pattern p in (o, List (p, e)) | Sugartypes.Table (t, p, e) -> let (o, e, _) = o#phrase e in let (o, p) = o#pattern p in (o, Sugartypes.Table (t, p, e)) method funlit : Types.row -> funlit -> ('self_type * funlit * Types.datatype) = fun inner_eff f -> match f with | NormalFunlit (pss, e) -> let envs = o#backup_envs in let (o, pss) = listu o (fun o -> listu o (fun o -> o#pattern)) pss in let o = o#with_effects inner_eff in let (o, e, t) = o#phrase e in let o = o#restore_envs envs in (o, NormalFunlit (pss, e), t) | SwitchFunlit (pss, body) -> let envs = o#backup_envs in let (o, pss) = listu o (fun o -> listu o (fun o -> o#pattern)) pss in let o = o#with_effects inner_eff in let (o, body) = listu o (fun o (p, c) -> let (o, p) = o#pattern p in let (o, c, _) = o#phrase c in (o, (p, c))) body in let o = o#restore_envs envs in (o, SwitchFunlit (pss, body), Types.unit_type) method constant : Constant.t -> ('self_type * Constant.t * Types.datatype) = function | Constant.Float v -> (o, Constant.Float v , Types.float_type ) | Constant.Int v -> (o, Constant.Int v , Types.int_type ) | Constant.String v -> (o, Constant.String v, Types.string_type) | Constant.Bool v -> (o, Constant.Bool v , Types.bool_type ) | Constant.Char v -> (o, Constant.Char v , Types.char_type ) | Constant.DateTime v -> (o, Constant.DateTime v , Types.datetime_type ) method quantifiers : SugarQuantifier.t list -> ('self_type * SugarQuantifier.t list) = fun qs -> (o, qs) method backup_quantifiers : IntSet.t = IntSet.empty method restore_quantifiers : IntSet.t -> 'self_type = fun _ -> o method rec_bodies : recursive_function list -> ('self * recursive_function list) = let outer_tyvars = o#backup_quantifiers in let rec list o = function | [] -> (o, []) | {node={ rec_definition = ((tyvars, Some (inner, extras)), lam); _ } as fn; pos} :: defs -> let (o, tyvars) = o#quantifiers tyvars in let (o, inner) = o#datatype inner in let lam_in = Sugartypes.get_normal_funlit lam in let inner_effects = fun_effects inner (fst lam_in) in let (o, lam, _) = o#funlit inner_effects lam in let o = o#restore_quantifiers outer_tyvars in let (o, defs) = list o defs in (o, make ~pos { fn with rec_definition = ((tyvars, Some (inner, extras)), lam) } :: defs) | _ :: _ -> assert false in list o method rec_activate_outer_bindings : recursive_function list -> ('self * recursive_function list) = let rec list o = function | [] -> o, [] | {node={ rec_binder; rec_signature; _ } as fn; pos} :: defs -> let (o, rec_binder) = o#binder rec_binder in let (o, defs) = list o defs in let (o, rec_signature) = optionu o (fun o -> o#datatype') rec_signature in (o, make ~pos { fn with rec_binder; rec_signature } :: defs) in list o method rec_activate_inner_bindings : recursive_function list -> 'self_type = let rec list o = function | [] -> o | {node={ rec_binder = f; rec_definition = ((_tyvars, Some (inner, _extras)), _lam); _ }; _} :: defs -> let (o, _) = o#binder (Binder.set_type f inner) in list o defs | _ :: _ -> assert false in list o method bindingnode : bindingnode -> ('self_type * bindingnode) = function | Val (p, (tyvars, e), location, t) -> let outer_tyvars = o#backup_quantifiers in let (o, tyvars) = o#quantifiers tyvars in let (o, e, _) = o#phrase e in let o = o#restore_quantifiers outer_tyvars in let (o, p) = o#pattern p in let (o, t) = optionu o (fun o -> o#datatype') t in (o, Val (p, (tyvars, e), location, t)) | Fun { fun_binder; fun_linearity; fun_definition = (tyvars, lam); fun_location; fun_signature; fun_frozen; fun_unsafe_signature } when Binder.has_type fun_binder -> let outer_tyvars = o#backup_quantifiers in let (o, tyvars) = o#quantifiers tyvars in let lam_in = Sugartypes.get_normal_funlit lam in let inner_effects = fun_effects (Binder.to_type fun_binder) (fst lam_in) in let (o, lam, _) = o#funlit inner_effects lam in let o = o#restore_quantifiers outer_tyvars in let (o, fun_binder) = o#binder fun_binder in let (o, fun_signature) = optionu o (fun o -> o#datatype') fun_signature in (o, Fun { fun_binder; fun_linearity; fun_definition = (tyvars, lam); fun_location; fun_signature; fun_frozen; fun_unsafe_signature }) | Fun _ -> raise (internal_error "Unannotated non-recursive function binding") | Funs defs -> (* put the inner bindings in the environment *) let o = o#rec_activate_inner_bindings defs in (* transform the function bodies *) let (o, defs) = o#rec_bodies defs in (* put the outer bindings in the environment *) let o, defs = o#rec_activate_outer_bindings defs in (o, (Funs defs)) | Foreign alien -> let (o, declarations) = listu o (fun o (b, dt) -> let o, b = o#binder b in let o, dt = o#datatype' dt in (o, (b, dt))) (Alien.declarations alien) in let o, language = o#foreign_language (Alien.language alien) in (o, Foreign (Alien.modify ~language ~declarations alien)) | Aliases ts -> let (o, _) = listu o (fun o {node=(name, vars, b); pos} -> match b with | Typename (x, (Some dt as dt')) -> let o = o#bind_tycon name (`Alias (pk_type, List.map (SugarQuantifier.get_resolved_exn) vars, dt)) in (o, WithPos.make ~pos (name, vars, Typename (x, dt'))) | Effectname (x, (Some r as r')) -> let o = o#bind_tycon name (`Alias (pk_row, List.map (SugarQuantifier.get_resolved_exn) vars, r)) in (o, WithPos.make ~pos (name, vars, Effectname (x, r'))) | _ -> raise (internal_error "Unannotated type alias") ) ts in (o, Aliases ts) | (Infix _) as node -> (o, node) | Exp e -> let (o, e, _) = o#phrase e in (o, Exp e) | AlienBlock _ -> assert false | Module _ -> assert false | Import _ -> assert false | Open _ -> assert false method binding : binding -> ('self_type * binding) = WithPos.traverse_map ~o ~f_pos:(fun o v -> o, v) ~f_node:(fun _ v -> o#bindingnode v) method binder : Binder.with_pos -> ('self_type * Binder.with_pos) = fun bndr -> assert (Binder.has_type bndr); let var_env = TyEnv.bind (Binder.to_name bndr) (Binder.to_type bndr) var_env in ({< var_env=var_env >}, bndr) method cp_phrase : cp_phrase -> ('self_type * cp_phrase * Types.datatype) = fun phrase -> let o, node, t = WithPos.node phrase |> o#cp_phrasenode in o, (WithPos.map ~f:(fun _ -> node) phrase), t (* TODO: should really invoke o#datatype on type annotations! *) method cp_phrasenode : cp_phrasenode -> ('self_type * cp_phrasenode * Types.datatype) = function | CPUnquote (bs, e) -> let envs = o#backup_envs in let (o, bs) = listu o (fun o -> o#binding) bs in let (o, e, t) = o#phrase e in let o = o#restore_envs envs in o, CPUnquote (bs, e), t | CPGrab (cbind, None, p) -> let (o, p, t) = o#cp_phrase p in o, CPGrab (cbind, None, p), t | CPGrab ((c, Some (Input (_a, s), _grab_tyargs) as cbind), Some b, p) -> (* FYI: a = u *) let envs = o#backup_envs in let (o, b) = o#binder b in let venv = TyEnv.bind c s (o#get_var_env ()) in let o = {< var_env = venv >} in let (o, p, t) = o#cp_phrase p in let o = o#restore_envs envs in o, CPGrab (cbind, Some b, p), t | CPGive ((c, Some (Output (_t, s), _tyargs) as cbind), e, p) -> let envs = o#backup_envs in let o = {< var_env = TyEnv.bind c s (o#get_var_env ()) >} in let (o, e, _typ) = option o (fun o -> o#phrase) e in let (o, p, t) = o#cp_phrase p in let o = o#restore_envs envs in o, CPGive (cbind, e, p), t | CPGiveNothing c -> let envs = o#backup_envs in let o, c = o#binder c in let o = o#restore_envs envs in o, CPGiveNothing c, Types.make_endbang_type | CPGrab _ -> raise (internal_error "Malformed grab in TransformSugar") | CPGive _ -> raise (internal_error "Malformed give in TransformSugar") | CPSelect (b, label, p) -> let envs = o#backup_envs in let o, b = o#binder b in let (o, p, t) = o#cp_phrase p in let o = o#restore_envs envs in o, CPSelect (b, label, p), t | CPOffer (b, cases) -> let (o, cases) = List.fold_right (fun (label, p) (o, cases) -> let envs = o#backup_envs in let o, _ = o#binder b in let (o, p, t) = o#cp_phrase p in (o#restore_envs envs, ((label, p), t) :: cases)) cases (o, []) in begin match List.split cases with | cases, t :: _ts -> o, CPOffer (b, cases), t | _ -> assert false end | CPLink (c, d) -> o, CPLink (c, d), Types.unit_type | CPComp (bndr, left, right) -> let c = Binder.to_name bndr in let s = Binder.to_type bndr in let envs = o#backup_envs in let (o, left, _typ) = {< var_env = TyEnv.bind c s (o#get_var_env ()) >}#cp_phrase left in let whiny_dual_type s = try Types.dual_type s with Invalid_argument _ -> raise (Invalid_argument ("Attempted to dualize non-session type " ^ Types.string_of_datatype s)) in let (o, right, t) = {< var_env = TyEnv.bind c (whiny_dual_type s) (o#get_var_env ()) >}#cp_phrase right in let o = o#restore_envs envs in o, CPComp (bndr, left, right), t method foreign_language : ForeignLanguage.t -> ('self_type * ForeignLanguage.t) = fun lang -> (o, lang) end
input.mli
(** A module providing efficient input buffers with preprocessing. *) (** {2 Type} *) (** The abstract type for an input buffer. *) type buffer (** {2 Reading from a buffer} *) (** [read buf pos] returns the character at position [pos] in the buffer [buf], together with the new buffer and position. *) val read : buffer -> int -> char * buffer * int (** [get buf pos] returns the character at position [pos] in the buffer [buf]. *) val get : buffer -> int -> char (** {2 Creating a buffer} *) (** [from_file fn] returns a buffer constructed using the file [fn]. *) val from_file : string -> buffer (** [from_channel ~filename ch] returns a buffer constructed using the channel [ch]. The optional [filename] is only used as a reference to the channel in error messages. *) val from_channel : ?filename:string -> in_channel -> buffer (** [from_string ~filename str] returns a buffer constructed using the string [str]. The optional [filename] is only used as a reference to the channel in error messages. *) val from_string : ?filename:string -> string -> buffer (** [from_fun finalise name get data] returns a buffer constructed from the object [data] using the [get] function. The get function is used to obtain one line of input from [data]. The [finalise] function is applied to [data] when the end of file is reached. The [name] string is used to reference the origin of the data in error messages. *) val from_fun : ('a -> unit) -> string -> ('a -> string) -> 'a -> buffer (** {2 Creating buffers with a custom preprocessor} *) (** Exception that can be raised by a preprocessor in case of error. The first string references the name of the buffer (e.g. the name of the corresponding file) and the second string contains the message. *) exception Preprocessor_error of string * string (** [pp_error name msg] raises [Preprocessor_error(name,msg)]. *) val pp_error : string -> string -> 'a (** Specification of a preprocessor. *) module type Preprocessor = sig (** Type for the internal state of the preprocessor. *) type state (** Initial state of the preprocessor. *) val initial_state : state (** [update st name lnum line] takes as input the state [st] of the preprocessor, the file name [name], the number of the next input line [lnum] and the next input line [line] itself. It returns a tuple of the new state, the new file name, the new line number, and a boolean. The new file name and line number can be used to implement line number directives. The boolean is [true] if the line should be part of the input (i.e. it is not a specific preprocessor line) and [false] if it should be ignored. The function may raise [Preprocessor_error] in case of error. *) val update : state -> string -> int -> string -> state * string * int * bool (** [check_final st name] check that [st] indeed is a correct state of the preprocessor for the end of input of file [name]. If it is not the case, then the exception [Preprocessor_error] is raised. *) val check_final : state -> string -> unit end (** Functor for building buffers with a preprocessor. *) module WithPP : functor (PP : Preprocessor) -> sig (** Same as [Input.from_fun] but uses the preprocessor. *) val from_fun : ('a -> unit) -> string -> ('a -> string) -> 'a -> buffer (** Same as [Input.from_channel] but uses the preprocessor. *) val from_channel : ?filename:string -> in_channel -> buffer (** Same as [Input.from_file] but uses the preprocessor. *) val from_file : string -> buffer (** Same as [Input.from_string] but uses the preprocessor. *) val from_string : ?filename:string -> string -> buffer end (** {2 Buffer manipulation functions} *) (** [is_empty buf] test whether the buffer [buf] is empty. *) val is_empty : buffer -> int -> bool (** [line_num buf] returns the current line number of [buf]. *) val line_num : buffer -> int (** [line_beginning buf] returns the offset of the current line in the buffer [buf]. *) val line_offset : buffer -> int (** [line buf] returns the current line in the buffer [buf]. *) val line : buffer -> string (** [line_length buf] returns the length of the current line in the buffer [buf]. *) val line_length : buffer -> int (** [utf8_col_num buf pos] returns the utf8 column number corresponding to the position [pos] in [buf]. *) val utf8_col_num : buffer -> int -> int (** [normalize buf pos] ensures that [pos] is less than the length of the current line in [str]. *) val normalize : buffer -> int -> buffer * int (** [filename buf] returns the file name associated to the [buf]. *) val filename : buffer -> string (** [buffer_uid buf] returns a unique identifier for [buf]. *) val buffer_uid : buffer -> int (** [buffer_eq b1 b2] tests the equality of [b1] and [b2]. *) val buffer_equal : buffer -> buffer -> bool (** [buffer_compare b1 b2] compares [b1] and [b2]. *) val buffer_compare : buffer -> buffer -> int (** [leq_bug b1 i1 b2 i2] returns true if the position [b1, i1] is before [b2, i2]. Gives meaningless result if [b1] and [b2] do not refer to the same file. *) val buffer_before : buffer -> int -> buffer -> int -> bool (** .... *) (** First kind of table: association list in file order (first position in the beginning *) module OrdTbl : sig type 'a t val empty : 'a t val add : buffer -> int -> 'a -> 'a t -> 'a t val pop : 'a t -> buffer * int * 'a list * 'a t val iter : 'a t -> ('a -> unit) -> unit val is_empty : 'a t -> bool end (** Second kind of table: unordered, but imperative and more efficient *) module Tbl : sig type 'a t val create : unit -> 'a t val add : 'a t -> buffer -> int -> 'a -> unit val find : 'a t -> buffer -> int -> 'a val clear : 'a t -> unit val iter : 'a t -> ('a -> unit) -> unit end
(* ====================================================================== Copyright Christophe Raffalli & Rodolphe Lepigre LAMA, UMR 5127 CNRS, Université Savoie Mont Blanc christophe.raffalli@univ-savoie.fr rodolphe.lepigre@univ-savoie.fr This software contains a parser combinator library for the OCaml lang- uage. It is intended to be used in conjunction with pa_ocaml (an OCaml parser and syntax extention mechanism) to provide a fully-integrated way of building parsers using an extention of OCaml's syntax. This software is governed by the CeCILL-B license under French law and abiding by the rules of distribution of free software. You can use, modify and/or redistribute the software under the terms of the CeCILL- B license as circulated by CEA, CNRS and INRIA at the following URL. http://www.cecill.info As a counterpart to the access to the source code and rights to copy, modify and redistribute granted by the license, users are provided only with a limited warranty and the software's author, the holder of the economic rights, and the successive licensors have only limited liability. In this respect, the user's attention is drawn to the risks associated with loading, using, modifying and/or developing or reproducing the software by the user in light of its specific status of free software, that may mean that it is complicated to manipulate, and that also therefore means that it is reserved for developers and experienced professionals having in-depth computer knowledge. Users are therefore encouraged to load and test the software's suitability as regards their requirements in conditions enabling the security of their sys- tems and/or data to be ensured and, more generally, to use and operate it in the same conditions as regards security. The fact that you are presently reading this means that you have had knowledge of the CeCILL-B license and that you accept its terms. ====================================================================== *)
remove_unused_closure_vars.mli
[@@@ocaml.warning "+a-4-9-30-40-41-42"] (* CR-soon mshinwell: Rename this module. *) (** Eliminate variables bound by sets of closures that are not required. Also eliminate functions within sets of closures that are not required. *) val remove_unused_closure_variables : remove_direct_call_surrogates:bool -> Flambda.program -> Flambda.program
(**************************************************************************) (* *) (* OCaml *) (* *) (* Pierre Chambart, OCamlPro *) (* Mark Shinwell and Leo White, Jane Street Europe *) (* *) (* Copyright 2013--2016 OCamlPro SAS *) (* Copyright 2014--2016 Jane Street Group LLC *) (* *) (* All rights reserved. This file is distributed under the terms of *) (* the GNU Lesser General Public License version 2.1, with the *) (* special exception on linking described in the file LICENSE. *) (* *) (**************************************************************************)
test_time_repr.ml
(** Testing ------- Component: Protocol (time repr) Invocation: dune exec src/proto_012_Psithaca/lib_protocol/test/unit/main.exe \ -- test "^\[Unit\] time$" Subject: Error handling of time operations *) open Protocol let test_nominal_add () = let t = Time_repr.of_seconds (Int64.of_int 2) in let addition = Period_repr.of_seconds Int64.one >>? fun p -> Time_repr.( +? ) t p in match addition with | Ok v -> Assert.equal ~loc:__LOC__ Time_repr.equal "test_nominal_add" Time_repr.pp_hum v (Time_repr.of_seconds (Int64.of_int 3)) | Error _ -> failwith "Addition has overflowed" let test_overflow_add () = let t = Time_repr.of_seconds Int64.max_int in match Period_repr.of_seconds Int64.one with | Error _ -> failwith "period_repr conversion" | Ok p -> ( match Time_repr.( +? ) t p with | Error _ -> return_unit | Ok tres -> failwith "No overflow: %Ld + %Ld = %Ld" (Time_repr.to_seconds t) (Period_repr.to_seconds p) (Time_repr.to_seconds tres)) let tests = [ Tztest.tztest "non-overflowing addition" `Quick test_nominal_add; Tztest.tztest "overflowing addition" `Quick test_overflow_add; ]
(** Testing ------- Component: Protocol (time repr) Invocation: dune exec src/proto_012_Psithaca/lib_protocol/test/unit/main.exe \ -- test "^\[Unit\] time$" Subject: Error handling of time operations *)
File_type.ml
open Common (*****************************************************************************) (* Prelude *) (*****************************************************************************) (*****************************************************************************) (* Types *) (*****************************************************************************) (* see also dircolors.el and LFS *) type file_type = | PL of pl_type | Obj of string (* .o, .a, .aux, .bak, etc *) | Binary of string | Text of string (* tex, txt, readme, noweb, org, etc *) | Doc of string (* ps, pdf *) | Config of config_type (* json, yaml, ini *) | Media of media_type | Archive of string (* tgz, rpm, etc *) | Other of string (* programming languages *) and pl_type = (* functional *) | OCaml of string (* mli, ml, mly, mll *) | FSharp of string (* fsi, fsx, fs *) | MLOther of string | Haskell of string | Lisp of lisp_type | Skip | Scala (* logic *) | Prolog of string (* classic script *) | Script of string (* sh, csh, awk, sed, etc *) (* mainstream *) | C of string | Cplusplus of string | Java | Kotlin | Csharp | ObjectiveC of string | Swift (* advanced script *) | Perl | Python | Ruby | Lua | R (* other *) | Erlang | Go | Rust | Beta | Pascal | Web of webpl_type | Haxe | Opa | Flash | Bytecode of string | Asm | Thrift | MiscPL of string and config_type = | Makefile | Json | Jsonnet (* kinda pl_type *) | Yaml | HCL and lisp_type = CommonLisp | Elisp | Scheme | Clojure and webpl_type = | Php of string (* php or phpt or script *) | Hack | Js | TypeScript (* JSX/TSX are converted in Js/Typescript *) | Coffee | Vue | Css | Html | Xml | Sql and media_type = Sound of string | Picture of string | Video of string (*****************************************************************************) (* Main entry point *) (*****************************************************************************) (* this function is used by codemap and archi_parse and called for each * filenames, so it has to be fast! *) let file_type_of_file file = let _d, b, e = Common2.dbe_of_filename_noext_ok file in match e with | "ml" | "mli" | "mly" | "mll" | "dyp" (* dypgen =~ GLR ocamlyacc *) -> PL (OCaml e) | "mlb" (* mlburg *) | "mlp" (* used in some source *) | "eliom" (* ocsigen, obviously *) -> PL (OCaml e) | "sml" -> PL (MLOther e) (* fsharp *) | "fsi" | "fsx" | "fs" -> PL (FSharp e) (* linear ML *) | "lml" -> PL (MLOther e) | "hs" | "lhs" -> PL (Haskell e) | "scala" | "sc" -> PL Scala | "erl" | "hrl" -> PL Erlang | "hx" | "hxp" | "hxml" -> PL Haxe | "opa" -> PL Opa | "sk" -> PL Skip | "as" -> PL Flash | "bet" -> PL Beta (* todo detect false C file, look for "Mode: Objective-C++" string in file ? * can also be a c++, use Parser_cplusplus.is_problably_cplusplus_file *) | "c" -> PL (C e) | "h" -> PL (C e) (* todo? have a PL of xxx_kind * pl_kind ? *) | "y" | "l" -> PL (C e) | "hpp" -> PL (Cplusplus e) | "hxx" -> PL (Cplusplus e) | "hh" -> PL (Cplusplus e) | "cpp" -> PL (Cplusplus e) | "C" -> PL (Cplusplus e) | "cc" -> PL (Cplusplus e) | "cxx" -> PL (Cplusplus e) (* used in libstdc++ *) | "tcc" -> PL (Cplusplus e) | "m" | "mm" -> PL (ObjectiveC e) | "swift" -> PL Swift | "java" -> PL Java | "kt" -> PL Kotlin | "cs" -> PL Csharp | "p" -> PL Pascal | "thrift" -> PL Thrift | "scm" | "rkt" | "ss" | "lsp" -> PL (Lisp Scheme) | "lisp" -> PL (Lisp CommonLisp) | "el" -> PL (Lisp Elisp) | "clj" -> PL (Lisp Clojure) (* Perl or Prolog ... I made my choice *) | "pl" -> PL (Prolog "pl") | "perl" -> PL Perl | "py" | "pyi" -> PL Python | "rb" -> PL Ruby | "logic" -> PL (Prolog "logic") (* datalog of logicblox *) | "dtl" -> PL (Prolog "dtl") (* bddbddb *) | "dl" -> PL (Prolog "dl") (* datalog *) | "ql" | "qll" -> PL (MiscPL e) (* Semmle Query language *) | "clp" -> PL (Prolog e) | "s" | "S" | "asm" -> PL Asm | "c--" -> PL (MiscPL e) | "oz" -> PL (MiscPL e) | "groovy" -> PL (MiscPL e) | "sh" | "rc" | "csh" | "bash" -> PL (Script e) | "m4" -> PL (MiscPL e) | "conf" -> PL (MiscPL e) (* Andrew Appel's Tiger toy language *) | "tig" -> PL (MiscPL e) (* merd *) | "me" -> PL (MiscPL "me") | "vim" -> PL (MiscPL "vim") | "nanorc" -> PL (MiscPL "nanorc") (* from hex to bcc *) | "he" -> PL (MiscPL "he") | "bc" -> PL (MiscPL "bc") | "php" | "phpt" -> PL (Web (Php e)) | "hck" | "hack" (* | "hh" *) -> (* ".hh" is also a popular choice for C++ header files *) PL (Web Hack) | "css" -> PL (Web Css) (* "javascript" | "es" | ? *) | "js" -> PL (Web Js) | "jsx" -> PL (Web Js) (* Js with JSX enabled *) | "coffee" -> PL (Web Coffee) | "ts" -> PL (Web TypeScript) | "tsx" -> PL (Web TypeScript) (* Typescript with JSX enabled *) | "vue" -> PL (Web Vue) | "html" | "htm" -> PL (Web Html) | "xml" -> PL (Web Xml) | "json" -> Config Json | "jsonnet" -> Config Jsonnet | "yml" | "yaml" -> Config Yaml | "tf" -> Config HCL | "sql" -> PL (Web Sql) | "sqlite" -> PL (Web Sql) (* apple stuff ? *) | "xib" -> PL (Web Xml) (* xml i18n stuff for apple *) | "nib" -> Obj e (* facebook: sqlshim files *) | "sql3" -> PL (Web Sql) | "fbobj" -> PL (MiscPL "fbobj") | "png" | "jpg" | "JPG" | "gif" | "tiff" -> Media (Picture e) | "xcf" | "xpm" -> Media (Picture e) | "icns" | "icon" | "ico" -> Media (Picture e) | "ppm" -> Media (Picture e) | "tga" -> Media (Picture e) | "ttf" | "font" -> Media (Picture e) | "wav" -> Media (Sound e) | "swf" -> Media (Picture e) | "ps" | "pdf" -> Doc e | "ppt" -> Doc e | "tex" | "texi" -> Text e | "txt" | "doc" -> Text e | "nw" | "web" -> Text e | "ms" -> Text e | "org" | "md" | "rest" | "textile" | "wiki" | "rst" -> Text e | "rtf" -> Text e | "cmi" | "cmo" | "cmx" | "cma" | "cmxa" | "annot" | "cmt" | "cmti" | "o" | "a" | "pyc" | "log" | "toc" | "brf" | "out" | "output" | "hi" | "msi" -> Obj e (* pad: I use it to store marshalled data *) | "db" -> Obj e | "po" | "pot" | "gmo" -> Obj e (* facebook fbcode stuff *) | "apcarc" | "serialized" | "wsdl" | "dat" | "train" -> Obj e | "facts" -> Obj e (* logicblox *) (* pad specific, cached git blame info *) | "git_annot" -> Obj e (* pad specific, codegraph cached data *) | "marshall" | "matrix" -> Obj e | "byte" | "top" -> Binary e | "tar" -> Archive e | "tgz" -> Archive e (* was PL Bytecode, but more accurate as an Obj *) | "class" -> Obj e (* pad specific, clang ast dump *) | "clang" | "c.clang2" | "h.clang2" | "clang2" -> Obj e (* was Archive *) | "jar" -> Archive e | "bz2" -> Archive e | "gz" -> Archive e | "rar" -> Archive e | "zip" -> Archive e | "exe" -> Binary e | "mk" -> Config Makefile | "rs" -> PL Rust | "go" -> PL Go | "lua" -> PL Lua | "r" | "R" -> PL R | _ when Common2.is_executable file -> Binary e | _ when b = "Makefile" || b = "mkfile" || b = "Imakefile" -> Config Makefile | _ when b = "README" -> Text "txt" | _ when b = "TAGS" -> Binary e | _ when b = "TARGETS" -> Config Makefile | _ when b = ".depend" -> Obj "depend" | _ when b = ".emacs" -> PL (Lisp Elisp) | _ when Common2.filesize file > 300_000 -> Obj e | _ -> Other e (*****************************************************************************) (* Misc *) (*****************************************************************************) (* this is used in codemap, to know whether to display a file *) let is_textual_file file = match file_type_of_file file with (* still? if this contains weird code then pfff_visual crash *) | PL (Web Sql) -> false | PL _ | Text _ | Config _ -> true | Obj _ | Binary _ | Media _ | Doc _ | Archive _ | Other _ -> false let webpl_type_of_file file = match file_type_of_file file with | PL (Web x) -> Some x | _ -> None (* let detect_pl_of_file file = raise Todo let string_of_pl x = raise Todo | C -> "c" | Cplusplus -> "c++" | Java -> "java" | Web _ -> raise Todo *) let is_syncweb_obj_file file = file =~ ".*md5sum_" let is_json_filename filename = filename =~ ".*\\.json$" (* match File_type.file_type_of_file filename with | File_type.PL (File_type.Web (File_type.Json)) -> true | _ -> false *) let files_of_dirs_or_files p xs = Common.files_of_dir_or_files_no_vcs_nofilter xs |> List.filter (fun filename -> p (file_type_of_file filename)) |> Common.sort
(* Yoann Padioleau * * Copyright (C) 2010-2013 Facebook * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * version 2.1 as published by the Free Software Foundation, with the * special exception on linking described in file license.txt. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file * license.txt for more details. *)
gcc-c11-generic-3-2.c
// https://github.com/gcc-mirror/gcc/blob/16e2427f50c208dfe07d07f18009969502c25dc8/gcc/testsuite/gcc.dg/c11-generic-3.c char const *b = _Generic ("bla", char[4]: ""); int main() { return 0; }
protocol_files.ml
open Error_monad let name = "TEZOS_PROTOCOL" open Protocol open TzFilename.Infix let to_file ~dir:dirname ?hash ?env_version modules = let config_file = Data_encoding.Json.construct Meta.encoding {hash; expected_env_version = env_version; modules} in Lwt_utils_unix.Json.write_file (dirname // name) config_file let of_file ~dir:dirname = Lwt_utils_unix.Json.read_file (dirname // name) >>=? fun json -> return (Data_encoding.Json.destruct Meta.encoding json) let find_component dirname module_name = let name_lowercase = String.uncapitalize_ascii module_name in let implementation = (dirname // name_lowercase) ^ ".ml" in let interface = implementation ^ "i" in match (Sys.file_exists implementation, Sys.file_exists interface) with | (false, _) -> Stdlib.failwith @@ "No such file: " ^ implementation | (true, false) -> Lwt_utils_unix.read_file implementation >|= fun implementation -> {name = module_name; interface = None; implementation} | _ -> Lwt_utils_unix.read_file interface >>= fun interface -> Lwt_utils_unix.read_file implementation >|= fun implementation -> {name = module_name; interface = Some interface; implementation} let read_dir dir = of_file ~dir >>=? fun meta -> Lwt_list.map_p (find_component dir) meta.modules >>= fun components -> let expected_env = match meta.expected_env_version with None -> V0 | Some v -> v in return (meta.hash, {expected_env; components}) open Lwt.Infix let create_files dir units = Lwt_utils_unix.remove_dir dir >>= fun () -> Lwt_utils_unix.create_dir dir >>= fun () -> Lwt_list.map_s (fun {name; interface; implementation} -> let name = String.lowercase_ascii name in let ml = dir // (name ^ ".ml") in let mli = dir // (name ^ ".mli") in Lwt_utils_unix.create_file ml implementation >>= fun () -> match interface with | None -> Lwt.return [ml] | Some content -> Lwt_utils_unix.create_file mli content >>= fun () -> Lwt.return [mli; ml]) units >>= fun files -> let files = List.concat files in Lwt.return files let write_dir dir ?hash (p : t) = create_files dir p.components >>= fun _files -> to_file ~dir ?hash ~env_version:p.expected_env (List.map (fun {name; _} -> String.capitalize_ascii name) p.components)
p-is_irreducible.c
#include "fq_poly.h" #ifdef T #undef T #endif #define T fq #define CAP_T FQ #include "fq_poly_templates/profile/p-is_irreducible.c" #undef CAP_T #undef T
/* Copyright (C) 2013 Mike Hansen This file is part of FLINT. FLINT is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. See <https://www.gnu.org/licenses/>. */
icons.mli
val drag_icon : GdkPixbuf.pixbuf val doculib_icon : GdkPixbuf.pixbuf
(******************************************************************************) (* DocuLib *) (* Copyright (C) 2022 Nathan Guermond *) (* *) (* This program is free software: you can redistribute it and/or modify it *) (* under the terms of the GNU General Public License as published by the Free *) (* Software Foundation, either version 3 of the License, or (at your option) *) (* any later version. *) (* *) (* This program is distributed in the hope that it will be useful, but *) (* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *) (* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *) (* for more details. *) (* *) (* You should have received a copy of the GNU General Public License along *) (* with this program. If not, see <https://www.gnu.org/licenses/>. *) (* *) (******************************************************************************)
vararg1.c
/* VA.C: The program below illustrates passing a variable * number of arguments using the following macros: * va_start va_arg va_end * va_list va_dcl (UNIX only) */ #include <stdio.h> #include <stdarg.h> int average( int first, ... ); union vararg_average { int ints; /* We only pass ints to this one */ }; #include "testharness.h" int main( void ) { /* Call with 3 integers (-1 is used as terminator). */ if(average( 2, 3, 4, -1 ) != 3) E(1); if(average( 5, 7, 9, 11, 13, -1 ) != 9) E(2); if(average( -1 ) != 0) E(3); SUCCESS; } /* Returns the average of a variable list of integers. */ int average( int first, ... ) { int count = 0, sum = 0, i = first; va_list marker; va_start( marker, first ); /* Initialize variable arguments. */ while( i != -1 ) { sum += i; count++; i = va_arg( marker, int); } va_end( marker ); /* Reset variable arguments. */ return( sum ? (sum / count) : 0 ); } // Put this intentionally at the end #pragma ccuredvararg("average", sizeof(union vararg_average))
loc.ml
include Loc0 module O = Comparable.Make (Loc0) include O let in_file p = let pos = none_pos (Path.to_string p) in { start = pos; stop = pos } let in_dir = in_file let drop_position (t : t) = let pos = none_pos t.start.pos_fname in { start = pos; stop = pos } let of_lexbuf lexbuf : t = { start = Lexing.lexeme_start_p lexbuf; stop = Lexing.lexeme_end_p lexbuf } let equal_position { Lexing.pos_fname = f_a; pos_lnum = l_a; pos_bol = b_a; pos_cnum = c_a } { Lexing.pos_fname = f_b; pos_lnum = l_b; pos_bol = b_b; pos_cnum = c_b } = f_a = f_b && l_a = l_b && b_a = b_b && c_a = c_b let equal { start = start_a; stop = stop_a } { start = start_b; stop = stop_b } = equal_position start_a start_b && equal_position stop_a stop_b let of_pos (fname, lnum, cnum, enum) = let pos : Lexing.position = { pos_fname = fname; pos_lnum = lnum; pos_cnum = cnum; pos_bol = 0 } in { start = pos; stop = { pos with pos_cnum = enum } } let is_none = equal none let to_file_colon_line t = Printf.sprintf "%s:%d" t.start.pos_fname t.start.pos_lnum let to_dyn_hum t : Dyn.t = String (to_file_colon_line t) let pp_file_colon_line t = Pp.verbatim (to_file_colon_line t) let pp_left_pad n s = let needed_spaces = n - String.length s in Pp.verbatim (if needed_spaces > 0 then String.make needed_spaces ' ' ^ s else s) let pp_line padding_width (lnum, l) = let open Pp.O in pp_left_pad padding_width lnum ++ Pp.verbatim " | " ++ Pp.verbatim l ++ Pp.newline type tag = Loc let pp_file_excerpt ~context_lines ~max_lines_to_print_in_full { start; stop } : tag Pp.t = let start_c = start.pos_cnum - start.pos_bol in let stop_c = stop.pos_cnum - start.pos_bol in let file = start.pos_fname in let pp_file_excerpt () = let line_num = start.pos_lnum in let line_num_str = string_of_int line_num in let padding_width = String.length line_num_str in let open Result.O in let* line = Result.try_with (fun () -> Io.String_path.file_line file line_num) in if stop_c <= String.length line then let len = stop_c - start_c in let open Pp.O in Ok (pp_line padding_width (line_num_str, line) ++ pp_left_pad (stop_c + padding_width + 3) (String.make len '^') ++ Pp.newline) else let get_padding lines = let lnum, _ = Option.value_exn (List.last lines) in String.length lnum in let print_ellipsis padding_width = (* We add 2 to the width of max line to account for the extra space and the `|` character at the end of a line number *) let line = String.make (padding_width + 2) '.' in let open Pp.O in Pp.verbatim line ++ Pp.newline in let print_lines lines padding_width = Pp.concat_map lines ~f:(pp_line padding_width) in let file_lines ~start ~stop = Result.try_with (fun () -> Io.String_path.file_lines file ~start ~stop) in let num_lines = stop.pos_lnum - start.pos_lnum in if num_lines <= max_lines_to_print_in_full then let+ lines = file_lines ~start:start.pos_lnum ~stop:stop.pos_lnum in print_lines lines (get_padding lines) else (* We need to send the padding width from the last four lines so the two blocks of lines align if they have different number of digits in their line numbers *) let* first_shown_lines = file_lines ~start:start.pos_lnum ~stop:(start.pos_lnum + context_lines) in let+ last_shown_lines = file_lines ~start:(stop.pos_lnum - context_lines) ~stop:stop.pos_lnum in let padding_width = get_padding last_shown_lines in let open Pp.O in print_lines first_shown_lines padding_width ++ print_ellipsis padding_width ++ print_lines last_shown_lines padding_width in let whole_file = start_c = 0 && stop_c = 0 in if whole_file then Pp.nop else match let open Result.O in let* exists = Result.try_with (fun () -> Sys.file_exists start.pos_fname) in if exists then pp_file_excerpt () else Result.Ok Pp.nop with | Ok pp -> pp | Error exn -> let backtrace = Printexc.get_backtrace () in Format.eprintf "Raised when trying to print location contents of %s@.%a@." file (Exn.pp_uncaught ~backtrace) exn; Pp.nop let pp ({ start; stop } as loc) = let start_c = start.pos_cnum - start.pos_bol in let stop_c = stop.pos_cnum - start.pos_bol in let open Pp.O in Pp.tag Loc (Pp.verbatim (Printf.sprintf "File \"%s\", line %d, characters %d-%d:" start.pos_fname start.pos_lnum start_c stop_c)) ++ Pp.newline ++ pp_file_excerpt ~context_lines:2 ~max_lines_to_print_in_full:10 loc let on_same_line loc1 loc2 = let start1 = loc1.start in let start2 = loc2.start in let same_file = String.equal start1.pos_fname start2.pos_fname in let same_line = Int.equal start1.pos_lnum start2.pos_lnum in same_file && same_line let span begin_ end_ = { begin_ with stop = end_.stop } let rec render ppf pp = Pp.to_fmt_with_tags ppf pp ~tag_handler:(fun ppf Loc pp -> Format.fprintf ppf "@{<loc>%a@}" render pp)
current_term.ml
module S = S module Output = Output module Make (Metadata : sig type t end) = struct type description = string type 'a primitive = ('a Output.t * Metadata.t option) Current_incr.t module Node = Node.Make(Metadata) open Node type 'a t = 'a Node.t module Quick_stats = struct let v = ref { S. ok = 0; waiting_for_confirmation = 0; ready = 0; running = 0; failed = 0; blocked = 0; (* Calculated from [quick_stats_total] *) } (* The expected total of all the values in [quick_stats]. If [v] doesn't add up to this, the missing ones are assumed to be blocked. *) let total = ref 0 let dec_ok () = v := { !v with ok = !v.ok - 1 } let dec_waiting_for_confirmation () = v := { !v with waiting_for_confirmation = !v.waiting_for_confirmation - 1 } let dec_ready () = v := { !v with ready = !v.ready - 1 } let dec_running () = v := { !v with running = !v.running - 1 } let dec_failed () = v := { !v with failed = !v.failed - 1 } let update ~id : _ Dyn.t -> unit = function | Ok _ -> v := { !v with ok = !v.ok + 1 }; Current_incr.on_release dec_ok | Error (src, _) when not (Id.equal src id) -> () | Error (_, `Active `Waiting_for_confirmation) -> v := { !v with waiting_for_confirmation = !v.waiting_for_confirmation + 1 }; Current_incr.on_release dec_waiting_for_confirmation | Error (_, `Active `Ready) -> v := { !v with ready = !v.ready + 1 }; Current_incr.on_release dec_ready | Error (_, `Active `Running) -> v := { !v with running = !v.running + 1 }; Current_incr.on_release dec_running | Error (_, `Msg _) -> v := { !v with failed = !v.failed + 1 }; Current_incr.on_release dec_failed let dec_total () = decr total let update_total () = incr total; Current_incr.on_release dec_total let get () = let v = !v in { v with blocked = !total - v.ok - v.ready - v.running - v.failed } end let bind_context : bind_context ref = ref None let node ?(id=Id.mint ()) ty v = { id; v; ty; bind = !bind_context } let with_bind_context bc f = let old = !bind_context in bind_context := Some bc; Fun.protect (fun () -> f ()) ~finally:(fun () -> bind_context := old) let with_id id = function | Ok _ as v -> v | Error e -> Error (id, e) let active s = let id = Id.mint () in node ~id (Constant None) @@ Current_incr.const (Dyn.active ~id s) let return ?label x = node (Constant label) @@ Current_incr.const (Dyn.return x) let map_input ~label source x = node (Map_input {source = Term source; info = label}) @@ Current_incr.const x let option_input source x = node (Opt_input {source = Term source }) @@ Current_incr.const x let fail msg = let id = Id.mint () in node ~id (Constant None) @@ Current_incr.const (Dyn.fail ~id msg) let incr_map ?eq fn v = Current_incr.map ~eq:(Dyn.equal ?eq) fn v let state ?(hidden=false) t = let eq = Output.equal (==) in node (State { source = Term t; hidden }) @@ incr_map ~eq Dyn.state t.v let catch ?(hidden=false) t = let eq = Result.equal ~ok:(==) ~error:(==) in node (Catch { source = Term t; hidden }) @@ incr_map ~eq Dyn.catch t.v let component fmt = Fmt.str ("@[<v>" ^^ fmt ^^ "@]") let join ?eq x = Current_incr.of_cc begin Current_incr.read x @@ fun y -> Current_incr.read y.v @@ Current_incr.write ~eq:(Dyn.equal ?eq) end let bind ?(info="") ?eq (f:'a -> 'b t) (x:'a t) = Quick_stats.update_total (); let bind_in = node (Bind_in (Term x, info)) x.v in let t = x.v |> Current_incr.map @@ fun v -> Quick_stats.update ~id:x.id v; with_bind_context (Term bind_in) @@ fun () -> match v with | Error _ as e -> node (Constant None) @@ Current_incr.const e | Ok y -> f y in let nested = Current_incr.map (fun t -> Term t) t in node (Bind_out nested) (join ?eq t) let map ?eq f x = let id = Id.mint () in node ~id (Map (Term x)) @@ incr_map ?eq (Dyn.map ~id f) x.v let cutoff ~eq x = map ~eq (fun x -> x) x let map_error f x = let id = Id.mint () in node ~id (Map (Term x)) @@ incr_map (Dyn.map_error ~id f) x.v let ignore_value x = map ignore x let pair a b = node (Pair (Term a, Term b)) @@ Current_incr.of_cc begin Current_incr.read a.v @@ fun a -> Current_incr.read b.v @@ fun b -> Current_incr.write @@ Dyn.pair a b end let primitive ~info (f:'a -> 'b primitive) (x:'a t) = Quick_stats.update_total (); let id = Id.mint () in let v_meta = Current_incr.of_cc begin Current_incr.read x.v @@ function | Error _ as e -> Current_incr.write (e, None) | Ok y -> let output = f y in Current_incr.read output @@ fun (v, job) -> let v = with_id id v in Quick_stats.update ~id v; Current_incr.write (v, job) end in let v = incr_map fst v_meta in let meta = Current_incr.map snd v_meta in node ~id (Primitive { x = Term x; info; meta }) v module Syntax = struct let (let**) x f info = bind ~info f x let (let>) x f info = primitive ~info f x let (and>) = pair let (let*) x f = bind f x let (and*) = pair let (let+) x f = map f x let (and+) = pair end open Syntax let collapse ~key ~value ~input t = node (Collapse { key; value; input = Term input; output = Term t }) t.v let with_context (ctx : _ t) f = let ctx = match !bind_context with | None -> Term ctx | Some (Term prev) -> Term (pair prev ctx) in with_bind_context ctx f let rec all = function | [] -> return () | [x] -> x | x :: xs -> let+ () = x and+ () = all xs in () let all_labelled items = let rec aux = function | [] -> return (Ok ()) | (l, x) :: xs -> let+ x = catch x ~hidden:true and+ xs = aux xs in match x with | Ok () -> xs | Error (`Msg e) -> match xs with | Ok () -> Error (`Same ([l], e)) | Error (`Same (ls, e2)) when e = e2 -> Error (`Same (l :: ls, e)) | Error (`Same (ls, _)) | Error (`Diff ls) -> Error (`Diff (l :: ls)) in "all" |> let** results = aux items in match results with | Ok () -> return () | Error (`Same (ls, e)) -> fail (Fmt.str "%a failed: %s" Fmt.(list ~sep:(any ", ") string) ls e) | Error (`Diff ls) -> fail (Fmt.str "%a failed" Fmt.(list ~sep:(any ", ") string) ls) (* A node with the constant value [v], but that depends on [old]. *) let replace old v = { id = Id.mint (); v = Current_incr.const v; ty = Constant None; bind = Some (Term old) } let option_map (type a b) ?label (f : a t -> b t) (input : a option t) : b option t = let results = input.v |> Current_incr.map @@ function | Error _ as r -> (* Not ready; use static version. *) let output = f (option_input input r) in replace output r | Ok None -> (* Show what would have been done. *) let no_item = Error (Id.mint (), `Active `Ready) in let output = f (option_input input no_item) in replace output (Ok None) | Ok (Some item) -> let output = f (option_input input (Ok item)) in { output with v = Current_incr.map (Result.map Option.some) output.v } in let output = Current_incr.map (fun x -> Term x) results in node (Option_map { item = Term input; output; label }) (join results) let option_iter (type a) ?label (f : a t -> unit t) (input : a option t) = let+ (_ : unit option) = option_map ?label f input in () let rec list_seq : 'a t list -> 'a list t = function | [] -> return [] | x :: xs -> let+ y = x and+ ys = list_seq xs in y :: ys let collapse_list ~key ~value ~input t = let all_of_them = list_seq t in let collapse_node = node (Collapse { key; value; input = Term input; output = Term all_of_them }) all_of_them.v in List.map (fun t -> node (Map (Term collapse_node)) t.v) t, collapse_node |> map (fun _ -> ()) let list_map (type a) (module M : S.ORDERED with type t = a) ?collapse_key ?label (f : a t -> 'b t) (input : a list t) = let module Map = Map.Make(M) in let module Sep = Current_incr.Separate(Map) in (* Stage 1 : convert input list to a set. This runs whenever the input list changes. *) let as_map = input.v |> Current_incr.map @@ function | Ok items -> items |> List.fold_left (fun acc x -> Map.add x () acc) Map.empty | _ -> Map.empty in (* Stage 2 : process each element separately. We only process an element when it is first added to the set, not on every change to the set. *) let results = Sep.map as_map @@ fun item -> let label = Fmt.to_to_string M.pp item in let input = map_input ~label:(Ok label) input (Ok item) in let output = f input in match collapse_key with | None -> Current_incr.write output | Some key -> Current_incr.write (collapse ~key ~value:label ~input output) in (* Stage 3 : combine results. This runs whenever either the set of results changes, or the input list changes (since the output order might need to change). *) let results = Current_incr.of_cc begin Current_incr.read input.v @@ function | Error _ as r -> (* Not ready; use static version of map. *) let output = f (map_input input ~label:(Error `Blocked) r) in Current_incr.write @@ replace output r | Ok [] -> (* Empty list; show what would have been done. *) let no_items = Error (Id.mint (), `Active `Ready) in let output = f (map_input input ~label:(Error `Empty_list) no_items) in Current_incr.write @@ replace output (Ok []) | Ok items -> Current_incr.read results @@ fun results -> (* Convert result set to a results list. *) let results = items |> List.map (fun item -> Map.find item results) |> list_seq in Current_incr.write results end in let output = Current_incr.map (fun x -> Term x) results in node (List_map { items = Term input; output; label }) (join results) let list_iter (type a) (module M : S.ORDERED with type t = a) ?collapse_key ?label f (xs : a list t) = let+ (_ : unit list) = list_map (module M) ?collapse_key ?label f xs in () let option_seq : 'a t option -> 'a option t = function | None -> return None | Some x -> let+ y = x in Some y let gate ~on t = let eq = Dyn.equal ~eq:(==) in node (Gate_on { ctrl = Term on; value = Term t }) @@ Current_incr.of_cc begin Current_incr.read t.v @@ fun t -> Current_incr.read on.v @@ fun on -> Current_incr.write ~eq @@ Dyn.bind on (fun () -> t) end let of_output x = let id = Id.mint () in let x = with_id id x in Quick_stats.update_total (); Quick_stats.update ~id x; node ~id (Constant None) @@ Current_incr.const x module Executor = struct let run (t : 'a t) = Current_incr.map Dyn.run t.v end module Analysis = struct include Analysis.Make(Metadata) (* This is a bit of a hack. *) let metadata t = let rec aux (Term t) = match t.ty with | Primitive p -> p.meta | Map t -> aux t | _ -> failwith "metadata: this is not a primitive term!" in node (Constant None) @@ Current_incr.map Result.ok @@ aux (Term t) let quick_stat = Quick_stats.get end end
describeMetricCollectionTypes.ml
open Types open Aws type input = Aws.BaseTypes.Unit.t type output = DescribeMetricCollectionTypesAnswer.t type error = Errors_internal.t let service = "autoscaling" let signature_version = Request.V4 let to_http service region req = let uri = Uri.add_query_params (Uri.of_string (Aws.Util.of_option_exn (Endpoints.url_of service region))) (List.append [("Version", ["2011-01-01"]); ("Action", ["DescribeMetricCollectionTypes"])] (Util.drop_empty (Uri.query_of_encoded (Query.render (Aws.BaseTypes.Unit.to_query req))))) in (`POST, uri, []) let of_http body = try let xml = Ezxmlm.from_string body in let resp = Util.option_bind (Xml.member "DescribeMetricCollectionTypesResponse" (snd xml)) (Xml.member "DescribeMetricCollectionTypesResult") in try Util.or_error (Util.option_bind resp DescribeMetricCollectionTypesAnswer.parse) (let open Error in BadResponse { body; message = "Could not find well formed DescribeMetricCollectionTypesAnswer." }) with | Xml.RequiredFieldMissing msg -> let open Error in `Error (BadResponse { body; message = ("Error parsing DescribeMetricCollectionTypesAnswer - missing field in body or children: " ^ msg) }) with | Failure msg -> `Error (let open Error in BadResponse { body; message = ("Error parsing xml: " ^ msg) }) let parse_error code err = let errors = [Errors_internal.ResourceContention] @ Errors_internal.common in match Errors_internal.of_string err with | Some var -> if (List.mem var errors) && ((match Errors_internal.to_http_code var with | Some var -> var = code | None -> true)) then Some var else None | None -> None
pp_parsetree.mli
(**pp -syntax camlp5r *) (* camlp5r *) (* pp_parsetree.ml,v *) declare end;
(**pp -syntax camlp5r *) (* camlp5r *) (* pp_parsetree.ml,v *)
concat.c
#include "fq_nmod_poly.h" #ifdef T #undef T #endif #define T fq_nmod #define CAP_T FQ_NMOD #include "fq_poly_factor_templates/concat.c" #undef CAP_T #undef T
/* Copyright (C) 2013 Mike Hansen This file is part of FLINT. FLINT is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. See <https://www.gnu.org/licenses/>. */
target.h
#ifndef __KRML_TARGET_H #define __KRML_TARGET_H #include <stdlib.h> #include <stddef.h> #include <stdio.h> #include <stdbool.h> #include <inttypes.h> #include <limits.h> #include <assert.h> /* Since KaRaMeL emits the inline keyword unconditionally, we follow the * guidelines at https://gcc.gnu.org/onlinedocs/gcc/Inline.html and make this * __inline__ to ensure the code compiles with -std=c90 and earlier. */ #ifdef __GNUC__ # define inline __inline__ #endif /******************************************************************************/ /* Macros that KaRaMeL will generate. */ /******************************************************************************/ /* For "bare" targets that do not have a C stdlib, the user might want to use * [-add-early-include '"mydefinitions.h"'] and override these. */ #ifndef KRML_HOST_PRINTF # define KRML_HOST_PRINTF printf #endif #if ( \ (defined __STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) && \ (!(defined KRML_HOST_EPRINTF))) # define KRML_HOST_EPRINTF(...) fprintf(stderr, __VA_ARGS__) #elif !(defined KRML_HOST_EPRINTF) && defined(_MSC_VER) # define KRML_HOST_EPRINTF(...) fprintf(stderr, __VA_ARGS__) #endif #ifndef KRML_HOST_EXIT # define KRML_HOST_EXIT exit #endif #ifndef KRML_HOST_MALLOC # define KRML_HOST_MALLOC malloc #endif #ifndef KRML_HOST_CALLOC # define KRML_HOST_CALLOC calloc #endif #ifndef KRML_HOST_FREE # define KRML_HOST_FREE free #endif #ifndef KRML_HOST_IGNORE # define KRML_HOST_IGNORE(x) (void)(x) #endif #ifndef KRML_PRE_ALIGN # ifdef _MSC_VER # define KRML_PRE_ALIGN(X) __declspec(align(X)) # else # define KRML_PRE_ALIGN(X) # endif #endif #ifndef KRML_POST_ALIGN # ifdef _MSC_VER # define KRML_POST_ALIGN(X) # else # define KRML_POST_ALIGN(X) __attribute__((aligned(X))) # endif #endif /* MinGW-W64 does not support C11 aligned_alloc, but it supports * MSVC's _aligned_malloc. */ #ifndef KRML_ALIGNED_MALLOC # ifdef __MINGW32__ # include <_mingw.h> # endif # if (defined(_MSC_VER) || (defined(__MINGW32__) && defined(__MINGW64_VERSION_MAJOR))) # define KRML_ALIGNED_MALLOC(X, Y) _aligned_malloc(Y, X) # else # define KRML_ALIGNED_MALLOC(X, Y) aligned_alloc(X, Y) # endif #endif /* Since aligned allocations with MinGW-W64 are done with * _aligned_malloc (see above), such pointers must be freed with * _aligned_free. */ #ifndef KRML_ALIGNED_FREE # ifdef __MINGW32__ # include <_mingw.h> # endif # if (defined(_MSC_VER) || (defined(__MINGW32__) && defined(__MINGW64_VERSION_MAJOR))) # define KRML_ALIGNED_FREE(X) _aligned_free(X) # else # define KRML_ALIGNED_FREE(X) free(X) # endif #endif #ifndef KRML_HOST_TIME # include <time.h> /* Prims_nat not yet in scope */ inline static int32_t krml_time() { return (int32_t)time(NULL); } # define KRML_HOST_TIME krml_time #endif /* In statement position, exiting is easy. */ #define KRML_EXIT \ do { \ KRML_HOST_PRINTF("Unimplemented function at %s:%d\n", __FILE__, __LINE__); \ KRML_HOST_EXIT(254); \ } while (0) /* In expression position, use the comma-operator and a malloc to return an * expression of the right size. KaRaMeL passes t as the parameter to the macro. */ #define KRML_EABORT(t, msg) \ (KRML_HOST_PRINTF("KaRaMeL abort at %s:%d\n%s\n", __FILE__, __LINE__, msg), \ KRML_HOST_EXIT(255), *((t *)KRML_HOST_MALLOC(sizeof(t)))) /* In FStar.Buffer.fst, the size of arrays is uint32_t, but it's a number of * *elements*. Do an ugly, run-time check (some of which KaRaMeL can eliminate). */ #if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 4)) # define _KRML_CHECK_SIZE_PRAGMA \ _Pragma("GCC diagnostic ignored \"-Wtype-limits\"") #else # define _KRML_CHECK_SIZE_PRAGMA #endif #define KRML_CHECK_SIZE(size_elt, sz) \ do { \ _KRML_CHECK_SIZE_PRAGMA \ if (((size_t)(sz)) > ((size_t)(SIZE_MAX / (size_elt)))) { \ KRML_HOST_PRINTF( \ "Maximum allocatable size exceeded, aborting before overflow at " \ "%s:%d\n", \ __FILE__, __LINE__); \ KRML_HOST_EXIT(253); \ } \ } while (0) #if defined(_MSC_VER) && _MSC_VER < 1900 # define KRML_HOST_SNPRINTF(buf, sz, fmt, arg) _snprintf_s(buf, sz, _TRUNCATE, fmt, arg) #else # define KRML_HOST_SNPRINTF(buf, sz, fmt, arg) snprintf(buf, sz, fmt, arg) #endif #if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 4)) # define KRML_DEPRECATED(x) __attribute__((deprecated(x))) #elif defined(__GNUC__) /* deprecated attribute is not defined in GCC < 4.5. */ # define KRML_DEPRECATED(x) #elif defined(_MSC_VER) # define KRML_DEPRECATED(x) __declspec(deprecated(x)) #endif /* Macros for prettier unrolling of loops */ #define KRML_LOOP1(i, n, x) { \ x \ i += n; \ } #define KRML_LOOP2(i, n, x) \ KRML_LOOP1(i, n, x) \ KRML_LOOP1(i, n, x) #define KRML_LOOP3(i, n, x) \ KRML_LOOP2(i, n, x) \ KRML_LOOP1(i, n, x) #define KRML_LOOP4(i, n, x) \ KRML_LOOP2(i, n, x) \ KRML_LOOP2(i, n, x) #define KRML_LOOP5(i, n, x) \ KRML_LOOP4(i, n, x) \ KRML_LOOP1(i, n, x) #define KRML_LOOP6(i, n, x) \ KRML_LOOP4(i, n, x) \ KRML_LOOP2(i, n, x) #define KRML_LOOP7(i, n, x) \ KRML_LOOP4(i, n, x) \ KRML_LOOP3(i, n, x) #define KRML_LOOP8(i, n, x) \ KRML_LOOP4(i, n, x) \ KRML_LOOP4(i, n, x) #define KRML_LOOP9(i, n, x) \ KRML_LOOP8(i, n, x) \ KRML_LOOP1(i, n, x) #define KRML_LOOP10(i, n, x) \ KRML_LOOP8(i, n, x) \ KRML_LOOP2(i, n, x) #define KRML_LOOP11(i, n, x) \ KRML_LOOP8(i, n, x) \ KRML_LOOP3(i, n, x) #define KRML_LOOP12(i, n, x) \ KRML_LOOP8(i, n, x) \ KRML_LOOP4(i, n, x) #define KRML_LOOP13(i, n, x) \ KRML_LOOP8(i, n, x) \ KRML_LOOP5(i, n, x) #define KRML_LOOP14(i, n, x) \ KRML_LOOP8(i, n, x) \ KRML_LOOP6(i, n, x) #define KRML_LOOP15(i, n, x) \ KRML_LOOP8(i, n, x) \ KRML_LOOP7(i, n, x) #define KRML_LOOP16(i, n, x) \ KRML_LOOP8(i, n, x) \ KRML_LOOP8(i, n, x) #define KRML_UNROLL_FOR(i, z, n, k, x) do { \ uint32_t i = z; \ KRML_LOOP##n(i, k, x) \ } while (0) #define KRML_ACTUAL_FOR(i, z, n, k, x) \ do { \ for (uint32_t i = z; i < n; i += k) { \ x \ } \ } while (0) #ifndef KRML_UNROLL_MAX #define KRML_UNROLL_MAX 16 #endif /* 1 is the number of loop iterations, i.e. (n - z)/k as evaluated by krml */ #if 0 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR0(i, z, n, k, x) #else #define KRML_MAYBE_FOR0(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 1 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR1(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 1, k, x) #else #define KRML_MAYBE_FOR1(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 2 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR2(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 2, k, x) #else #define KRML_MAYBE_FOR2(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 3 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR3(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 3, k, x) #else #define KRML_MAYBE_FOR3(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 4 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR4(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 4, k, x) #else #define KRML_MAYBE_FOR4(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 5 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR5(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 5, k, x) #else #define KRML_MAYBE_FOR5(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 6 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR6(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 6, k, x) #else #define KRML_MAYBE_FOR6(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 7 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR7(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 7, k, x) #else #define KRML_MAYBE_FOR7(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 8 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR8(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 8, k, x) #else #define KRML_MAYBE_FOR8(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 9 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR9(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 9, k, x) #else #define KRML_MAYBE_FOR9(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 10 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR10(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 10, k, x) #else #define KRML_MAYBE_FOR10(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 11 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR11(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 11, k, x) #else #define KRML_MAYBE_FOR11(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 12 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR12(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 12, k, x) #else #define KRML_MAYBE_FOR12(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 13 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR13(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 13, k, x) #else #define KRML_MAYBE_FOR13(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 14 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR14(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 14, k, x) #else #define KRML_MAYBE_FOR14(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 15 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR15(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 15, k, x) #else #define KRML_MAYBE_FOR15(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 16 <= KRML_UNROLL_MAX #define KRML_MAYBE_FOR16(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 16, k, x) #else #define KRML_MAYBE_FOR16(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #endif
/* Copyright (c) INRIA and Microsoft Corporation. All rights reserved. Licensed under the Apache 2.0 License. */
t-mullow_KS.c
#ifdef T #include "templates.h" #include <stdio.h> #include <stdlib.h> #include "ulong_extras.h" #include "long_extras.h" int main(void) { int i, result; FLINT_TEST_INIT(state); flint_printf("mullow_KS... "); fflush(stdout); /* Compare with truncated product of a and b */ for (i = 0; i < 200 * flint_test_multiplier(); i++) { TEMPLATE(T, ctx_t) ctx; TEMPLATE(T, poly_t) a, b, c, d; slong n; TEMPLATE(T, ctx_randtest) (ctx, state); TEMPLATE(T, poly_init) (a, ctx); TEMPLATE(T, poly_init) (b, ctx); TEMPLATE(T, poly_init) (c, ctx); TEMPLATE(T, poly_init) (d, ctx); TEMPLATE(T, poly_randtest) (a, state, n_randint(state, 100), ctx); TEMPLATE(T, poly_randtest) (b, state, n_randint(state, 100), ctx); n = n_randint(state, 100); TEMPLATE(T, poly_mullow_KS) (c, a, b, n, ctx); TEMPLATE(T, poly_mul) (d, a, b, ctx); TEMPLATE(T, poly_truncate) (d, n, ctx); result = (TEMPLATE(T, poly_equal) (c, d, ctx)); if (!result) { flint_printf("FAIL:\n\n"); flint_printf("a = "), TEMPLATE(T, poly_print_pretty) (a, "X", ctx), flint_printf("\n"); flint_printf("b = "), TEMPLATE(T, poly_print_pretty) (b, "X", ctx), flint_printf("\n"); flint_printf("c = "), TEMPLATE(T, poly_print_pretty) (c, "X", ctx), flint_printf("\n"); flint_printf("d = "), TEMPLATE(T, poly_print_pretty) (d, "X", ctx), flint_printf("\n"); fflush(stdout); flint_abort(); } TEMPLATE(T, poly_clear) (a, ctx); TEMPLATE(T, poly_clear) (b, ctx); TEMPLATE(T, poly_clear) (c, ctx); TEMPLATE(T, poly_clear) (d, ctx); TEMPLATE(T, ctx_clear) (ctx); } FLINT_TEST_CLEANUP(state); flint_printf("PASS\n"); return EXIT_SUCCESS; } #endif
/* Copyright (C) 2012 Sebastian Pancratz Copyright (C) 2013 Mike Hansen This file is part of FLINT. FLINT is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. See <https://www.gnu.org/licenses/>. */
dune
(executable (name discover) (libraries dune-configurator))
batOrd.mli
type order = Lt | Eq | Gt (** An algebraic datatype for ordering. Traditional OCaml code, under the influence of C comparison functions, has used int-returning comparisons (< 0, 0 or > 0). Using an algebraic datatype instead is actually nicer, both for comparison producers (no arbitrary choice of a positive and negative value) and consumers (nice pattern-matching elimination). *) type 'a ord = 'a -> 'a -> order (** The type of ordering functions returning an [order] variant. *) type 'a comp = 'a -> 'a -> int (** The legacy int-returning comparisons : - compare a b < 0 means a < b - compare a b = 0 means a = b - compare a b > 0 means a > b *) module type Comp = sig type t val compare : t comp end (** We use [compare] as member name instead of [comp], so that the Comp modules can be used as the legacy OrderedType interface. *) module type Ord = sig type t val ord : t ord end val ord0 : int -> order val ord : 'a comp -> 'a ord (** Returns a variant ordering from a legacy comparison *) module Ord : functor (Comp : Comp) -> Ord with type t = Comp.t val comp0 : order -> int val comp : 'a ord -> 'a comp (** Returns an legacy comparison from a variant ordering *) module Comp : functor (Ord : Ord) -> Comp with type t = Ord.t val poly_comp : 'a comp val poly_ord : 'a ord val poly : 'a ord (** Polymorphic comparison functions, based on the [Pervasives.compare] function from inria's stdlib, have polymorphic types: they claim to be able to compare values of any type. In practice, they work for only some types, may fail on function types and may not terminate on cyclic values. They work by runtime magic, inspecting the values in an untyped way. While being an useful hack for base types and simple composite types (say [(int * float) list], they do not play well with functions, type abstractions, and structures that would need a finer notion of equality/comparison. For example, if one represent sets as balanced binary tree, one may want set with equal elements but different balancings to be equal, which would not be the case using the polymorphic equality function. When possible, you should therefore avoid relying on these polymorphic comparison functions. You should be especially careful if your data structure may later evolve to allow cyclic data structures or functions. *) val rev_ord0 : order -> order val rev_comp0 : int -> int val rev_ord : 'a ord -> 'a ord val rev_comp : 'a comp -> 'a comp val rev : 'a ord -> 'a ord (** Reverse a given ordering. If [Int.ord] sorts integer by increasing order, [rev Int.ord] will sort them by decreasing order. *) module RevOrd (Ord : Ord) : Ord with type t = Ord.t module RevComp (Comp : Comp) : Comp with type t = Comp.t module Rev (Ord : Ord) : Ord with type t = Ord.t type 'a eq = 'a -> 'a -> bool (** The type for equality function. All ordered types also support equality, as equality can be derived from ordering. However, there are also cases where elements may be compared for equality, but have no natural ordering. It is therefore useful to provide equality as an independent notion. *) val eq_ord0 : order -> bool val eq_comp0 : int -> bool val eq_ord : 'a ord -> 'a eq val eq_comp : 'a comp -> 'a eq val eq : 'a ord -> 'a eq (** Derives an equality function from an ordering function. *) module type Eq = sig type t val eq : t eq end module EqOrd (Ord : Ord) : Eq with type t = Ord.t module EqComp (Comp : Comp) : Eq with type t = Comp.t module Eq (Ord : Ord) : Eq with type t = Ord.t type 'a choice = 'a -> 'a -> 'a (** choice functions, see [min] and [max]. *) val min_ord : 'a ord -> 'a choice val max_ord : 'a ord -> 'a choice val min_comp : 'a comp -> 'a choice val max_comp : 'a comp -> 'a choice val min : 'a ord -> 'a choice (** [min ord] will choose the smallest element, according to [ord]. For example, [min Int.ord 1 2] will return [1]. {[ (* the minimum element of a list *) let list_min ord = List.reduce (min ord) ]} *) val max : 'a ord -> 'a choice (** [max ord] will choose the biggest element according to [ord]. *) val bin_comp : 'a comp -> 'a -> 'a -> 'b comp -> 'b -> 'b -> int val bin_ord : 'a ord -> 'a -> 'a -> 'b ord -> 'b -> 'b -> order (** binary lifting of the comparison function, using lexicographic order: [bin_ord ord1 v1 v1' ord2 v2 v2'] is [ord2 v2 v2'] if [ord1 v1 v1' = Eq], and [ord1 v1 v1'] otherwise. *) val bin_eq : 'a eq -> 'a -> 'a -> 'b eq -> 'b -> 'b -> bool val map_eq : ('a -> 'b) -> 'b eq -> 'a eq val map_comp : ('a -> 'b) -> 'b comp -> 'a comp val map_ord : ('a -> 'b) -> 'b ord -> 'a ord (** These functions extend an existing equality/comparison/ordering to a new domain through a mapping function. For example, to order sets by their cardinality, use [map_ord Set.cardinal Int.ord]. The input of the mapping function is the type you want to compare, so this is the reverse of [List.map]. *) module Incubator : sig val eq_by : ('a -> 'b) -> 'a eq val comp_by : ('a -> 'b) -> 'a comp val ord_by : ('a -> 'b) -> 'a ord (** Build a [eq], [cmp] or [ord] function from a projection function. For example, if you wanted to compare integers based on their lowest 4 bits, you could write [let cmp_bot4 = cmp_by (fun x -> x land 0xf)] and use cmp_bot4 as the desired integer comparator. *) end
tippy.mli
type t val t_of_js : Ojs.t -> t val t_to_js : t -> Ojs.t type props = { trigger: string option } type options = { content: string; trigger: string option } val create : Js_browser.Element.t -> options -> t [@@js.global "tippy"] val set_content : t -> string -> unit [@@js.call "setContent"] val set_props : t -> props -> unit [@@js.call "setProps"] val destroy : t -> unit
google_protobuf_source_context_pc.ml
[@@@ocaml.warning "-39"] let (>>=) = Runtime.Result.(>>=) let (>>|) = Runtime.Result.(>>|) module Field' = Runtime.Field_value module Bin' = Runtime.Binary_format module Text' = Runtime.Text_format module rec Source_context : sig type t = { file_name : string; } [@@deriving eq, show] val to_binary : t -> (string, [> Bin'.serialization_error]) result val of_binary : string -> (t, [> Bin'.deserialization_error]) result val to_text : t -> (string, [> Text'.serialization_error]) result val of_text : string -> (t, [> Text'.deserialization_error]) result end = struct type t = { file_name : string; } [@@deriving eq, show] let rec to_binary = fun { file_name } -> let _o = Runtime.Byte_output.create () in Bin'.serialize_field 1 Field'.String_t file_name _o >>= fun () -> Ok (Runtime.Byte_output.contents _o) let rec of_binary = fun input' -> Ok (Runtime.Byte_input.create input') >>= Bin'.deserialize_message >>= fun _m -> Bin'.decode_field 1 Field'.String_t _m >>= fun file_name -> Ok { file_name } let rec to_text = fun { file_name } -> let _o = Runtime.Byte_output.create () in Text'.serialize_field "file_name" Field'.String_t file_name _o >>= fun () -> Ok (Runtime.Byte_output.contents _o) let rec of_text = fun input' -> Ok (Runtime.Byte_input.create input') >>= Text'.deserialize_message >>= fun _m -> Text'.decode_field "file_name" Field'.String_t _m >>= fun file_name -> Ok { file_name } end
CCGraph.mli
(** Simple Graph Interface A collections of algorithms on (mostly read-only) graph structures. The user provides her own graph structure as a [('v, 'e) CCGraph.t], where ['v] is the type of vertices and ['e] the type of edges (for instance, ['e = ('v * 'v)] is perfectly fine in many cases). Such a [('v, 'e) CCGraph.t] structure is a record containing three functions: two relate edges to their origin and destination, and one maps vertices to their outgoing edges. This abstract notion of graph makes it possible to run the algorithms on any user-specific type that happens to have a graph structure. Many graph algorithms here take an iterator of vertices as input. The helper module {!Iter} contains basic functions for that, as does the [iter] library on opam. If the user only has a single vertex (e.g., for a topological sort from a given vertex), they can use [Iter.return x] to build a iter of one element. {b status: unstable} @since 0.12 *) (** {2 Iter Helpers} *) type 'a iter = ('a -> unit) -> unit (** A sequence of items of type ['a], possibly infinite @since 2.8 *) type 'a iter_once = 'a iter (** Iter that should be used only once @since 2.8 *) exception Iter_once (** Raised when a sequence meant to be used once is used several times. *) module Iter : sig type 'a t = 'a iter val return : 'a -> 'a t val ( >>= ) : 'a t -> ('a -> 'b t) -> 'b t val map : ('a -> 'b) -> 'a t -> 'b t val filter_map : ('a -> 'b option) -> 'a t -> 'b t val iter : ('a -> unit) -> 'a t -> unit val fold : ('b -> 'a -> 'b) -> 'b -> 'a t -> 'b val to_list : 'a t -> 'a list end (** {2 Interfaces for graphs} This interface is designed for oriented graphs with labels on edges *) type ('v, 'e) t = 'v -> ('e * 'v) iter (** Directed graph with vertices of type ['v] and edges labeled with [e'] *) type ('v, 'e) graph = ('v, 'e) t val make : ('v -> ('e * 'v) iter) -> ('v, 'e) t (** Make a graph by providing the children function. *) type 'v tag_set = { get_tag: 'v -> bool; set_tag: 'v -> unit; (** Set tag for the given element *) } (** {2 Tags} Mutable tags from values of type ['v] to tags of type [bool] *) type ('k, 'a) table = { mem: 'k -> bool; find: 'k -> 'a; (** @raise Not_found if element not added before *) add: 'k -> 'a -> unit; (** Erases previous binding *) } (** {2 Table} Mutable table with keys ['k] and values ['a] *) type 'a set = ('a, unit) table (** Mutable set *) val mk_table : eq:('k -> 'k -> bool) -> ?hash:('k -> int) -> int -> ('k, 'a) table (** Default implementation for {!table}: a {!Hashtbl.t}. *) val mk_map : cmp:('k -> 'k -> int) -> unit -> ('k, 'a) table (** Use a {!Map.S} underneath. *) (** {2 Bags of vertices} *) type 'a bag = { push: 'a -> unit; is_empty: unit -> bool; pop: unit -> 'a; (** raises some exception is empty *) } (** Bag of elements of type ['a] *) val mk_queue : unit -> 'a bag val mk_stack : unit -> 'a bag val mk_heap : leq:('a -> 'a -> bool) -> 'a bag (** [mk_heap ~leq] makes a priority queue where [leq x y = true] means that [x] is smaller than [y] and should be prioritary. *) (** {2 Traversals} *) module Traverse : sig type ('v, 'e) path = ('v * 'e * 'v) list val generic : tbl:'v set -> bag:'v bag -> graph:('v, 'e) t -> 'v iter -> 'v iter_once (** Traversal of the given graph, starting from a sequence of vertices, using the given bag to choose the next vertex to explore. Each vertex is visited at most once. *) val generic_tag : tags:'v tag_set -> bag:'v bag -> graph:('v, 'e) t -> 'v iter -> 'v iter_once (** One-shot traversal of the graph using a tag set and the given bag. *) val dfs : tbl:'v set -> graph:('v, 'e) t -> 'v iter -> 'v iter_once val dfs_tag : tags:'v tag_set -> graph:('v, 'e) t -> 'v iter -> 'v iter_once val bfs : tbl:'v set -> graph:('v, 'e) t -> 'v iter -> 'v iter_once val bfs_tag : tags:'v tag_set -> graph:('v, 'e) t -> 'v iter -> 'v iter_once val dijkstra : tbl:'v set -> ?dist:('e -> int) -> graph:('v, 'e) t -> 'v iter -> ('v * int * ('v, 'e) path) iter_once (** Dijkstra algorithm, traverses a graph in increasing distance order. Yields each vertex paired with its distance to the set of initial vertices (the smallest distance needed to reach the node from the initial vertices). @param dist distance from origin of the edge to destination, must be strictly positive. Default is 1 for every edge. *) val dijkstra_tag : ?dist:('e -> int) -> tags:'v tag_set -> graph:('v, 'e) t -> 'v iter -> ('v * int * ('v, 'e) path) iter_once (** {2 More detailed interface} *) module Event : sig type edge_kind = [ `Forward | `Back | `Cross ] type ('v, 'e) t = [ `Enter of 'v * int * ('v, 'e) path (* unique index in traversal, path from start *) | `Exit of 'v | `Edge of 'v * 'e * 'v * edge_kind ] (** A traversal is a sequence of such events *) val get_vertex : ('v, 'e) t -> ('v * [ `Enter | `Exit ]) option val get_enter : ('v, 'e) t -> 'v option val get_exit : ('v, 'e) t -> 'v option val get_edge : ('v, 'e) t -> ('v * 'e * 'v) option val get_edge_kind : ('v, 'e) t -> ('v * 'e * 'v * edge_kind) option val dfs : tbl:'v set -> eq:('v -> 'v -> bool) -> graph:('v, 'e) graph -> 'v iter -> ('v, 'e) t iter_once (** Full version of DFS. @param eq equality predicate on vertices. *) val dfs_tag : eq:('v -> 'v -> bool) -> tags:'v tag_set -> graph:('v, 'e) graph -> 'v iter -> ('v, 'e) t iter_once (** Full version of DFS using integer tags. @param eq equality predicate on vertices. *) end end (** {2 Cycles} *) val is_dag : tbl:'v set -> eq:('v -> 'v -> bool) -> graph:('v, _) t -> 'v iter -> bool (** [is_dag ~graph vs] returns [true] if the subset of [graph] reachable from [vs] is acyclic. @since 0.18 *) (** {2 Topological Sort} *) exception Has_cycle val topo_sort : eq:('v -> 'v -> bool) -> ?rev:bool -> tbl:'v set -> graph:('v, 'e) t -> 'v iter -> 'v list (** [topo_sort ~graph seq] returns a list of vertices [l] where each element of [l] is reachable from [seq]. The list is sorted in a way such that if [v -> v'] in the graph, then [v] comes before [v'] in the list (i.e. has a smaller index). Basically [v -> v'] means that [v] is smaller than [v']. See {{: https://en.wikipedia.org/wiki/Topological_sorting} wikipedia}. @param eq equality predicate on vertices (default [(=)]). @param rev if true, the dependency relation is inverted ([v -> v'] means [v'] occurs before [v]). @raise Has_cycle if the graph is not a DAG. *) val topo_sort_tag : eq:('v -> 'v -> bool) -> ?rev:bool -> tags:'v tag_set -> graph:('v, 'e) t -> 'v iter -> 'v list (** Same as {!topo_sort} but uses an explicit tag set. @raise Has_cycle if the graph is not a DAG. *) (** {2 Lazy Spanning Tree} *) module Lazy_tree : sig type ('v, 'e) t = { vertex: 'v; children: ('e * ('v, 'e) t) list Lazy.t } val map_v : ('a -> 'b) -> ('a, 'e) t -> ('b, 'e) t val fold_v : ('acc -> 'v -> 'acc) -> 'acc -> ('v, _) t -> 'acc end val spanning_tree : tbl:'v set -> graph:('v, 'e) t -> 'v -> ('v, 'e) Lazy_tree.t (** [spanning_tree ~graph v] computes a lazy spanning tree that has [v] as a root. The table [tbl] is used for the memoization part. *) val spanning_tree_tag : tags:'v tag_set -> graph:('v, 'e) t -> 'v -> ('v, 'e) Lazy_tree.t (** {2 Strongly Connected Components} *) type 'v scc_state (** Hidden state for {!scc}. *) val scc : tbl:('v, 'v scc_state) table -> graph:('v, 'e) t -> 'v iter -> 'v list iter_once (** Strongly connected components reachable from the given vertices. Each component is a list of vertices that are all mutually reachable in the graph. The components are explored in a topological order (if C1 and C2 are components, and C1 points to C2, then C2 will be yielded before C1). Uses {{: https://en.wikipedia.org/wiki/Tarjan's_strongly_connected_components_algorithm} Tarjan's algorithm}. @param tbl table used to map nodes to some hidden state. @raise Iter_once if the result is iterated on more than once. *) (** {2 Pretty printing in the DOT (graphviz) format} Example (print divisors from [42]): {[ let open CCGraph in let open Dot in with_out "/tmp/truc.dot" (fun out -> pp ~attrs_v:(fun i -> [`Label (string_of_int i)]) ~graph:divisors_graph out 42 ) ]} *) module Dot : sig type attribute = [ `Color of string | `Shape of string | `Weight of int | `Style of string | `Label of string | `Other of string * string ] (** Dot attribute *) type vertex_state (** Hidden state associated to a vertex *) val pp : tbl:('v, vertex_state) table -> eq:('v -> 'v -> bool) -> ?attrs_v:('v -> attribute list) -> ?attrs_e:('e -> attribute list) -> ?name:string -> graph:('v, 'e) t -> Format.formatter -> 'v -> unit (** Print the graph, starting from given vertex, on the formatter. @param attrs_v attributes for vertices. @param attrs_e attributes for edges. @param name name of the graph. *) val pp_all : tbl:('v, vertex_state) table -> eq:('v -> 'v -> bool) -> ?attrs_v:('v -> attribute list) -> ?attrs_e:('e -> attribute list) -> ?name:string -> graph:('v, 'e) t -> Format.formatter -> 'v iter -> unit (** Same as {!pp} but starting from several vertices, not just one. @since 2.8 *) val with_out : string -> (Format.formatter -> 'a) -> 'a (** Shortcut to open a file and write to it. *) end (** {2 Mutable Graph} *) type ('v, 'e) mut_graph = { graph: ('v, 'e) t; add_edge: 'v -> 'e -> 'v -> unit; remove: 'v -> unit; } val mk_mut_tbl : eq:('v -> 'v -> bool) -> ?hash:('v -> int) -> int -> ('v, 'a) mut_graph (** Make a new mutable graph from a Hashtbl. Edges are labelled with type ['a]. *) (** {2 Immutable Graph} A classic implementation of a graph structure on totally ordered vertices, with unlabelled edges. The graph allows to add and remove edges and vertices, and to iterate on edges and vertices. *) module type MAP = sig type vertex type 'a t val as_graph : 'a t -> (vertex, 'a) graph (** Graph view of the map. *) val empty : 'a t val add_edge : vertex -> 'a -> vertex -> 'a t -> 'a t val remove_edge : vertex -> vertex -> 'a t -> 'a t val add : vertex -> 'a t -> 'a t (** Add a vertex, possibly with no outgoing edge. *) val remove : vertex -> 'a t -> 'a t (** Remove the vertex and all its outgoing edges. Edges that point to the vertex are {b NOT} removed, they must be manually removed with {!remove_edge}. *) val union : 'a t -> 'a t -> 'a t val vertices : _ t -> vertex iter val vertices_l : _ t -> vertex list val of_list : (vertex * 'a * vertex) list -> 'a t val add_list : (vertex * 'a * vertex) list -> 'a t -> 'a t val to_list : 'a t -> (vertex * 'a * vertex) list val of_iter : (vertex * 'a * vertex) iter -> 'a t (** @since 2.8 *) val add_iter : (vertex * 'a * vertex) iter -> 'a t -> 'a t (** @since 2.8 *) val to_iter : 'a t -> (vertex * 'a * vertex) iter (** @since 2.8 *) end module Map (O : Map.OrderedType) : MAP with type vertex = O.t (** {2 Misc} *) val of_list : eq:('v -> 'v -> bool) -> ('v * 'v) list -> ('v, unit) t (** [of_list l] makes a graph from a list of pairs of vertices. Each pair [(a,b)] is an edge from [a] to [b]. @param eq equality used to compare vertices. *) val of_hashtbl : ('v, 'v list) Hashtbl.t -> ('v, unit) t (** [of_hashtbl tbl] makes a graph from a hashtable that maps vertices to lists of children. *) val of_fun : ('v -> 'v list) -> ('v, unit) t (** [of_fun f] makes a graph out of a function that maps a vertex to the list of its children. The function is assumed to be deterministic. *) val divisors_graph : (int, unit) t (** [n] points to all its strict divisors. *)
(** Simple Graph Interface A collections of algorithms on (mostly read-only) graph structures. The user provides her own graph structure as a [('v, 'e) CCGraph.t], where ['v] is the type of vertices and ['e] the type of edges (for instance, ['e = ('v * 'v)] is perfectly fine in many cases). Such a [('v, 'e) CCGraph.t] structure is a record containing three functions: two relate edges to their origin and destination, and one maps vertices to their outgoing edges. This abstract notion of graph makes it possible to run the algorithms on any user-specific type that happens to have a graph structure. Many graph algorithms here take an iterator of vertices as input. The helper module {!Iter} contains basic functions for that, as does the [iter] library on opam. If the user only has a single vertex (e.g., for a topological sort from a given vertex), they can use [Iter.return x] to build a iter of one element. {b status: unstable} @since 0.12 *)
children.ml
(* Keep track of running child processes and notify their fiber when they exit. After forking a child process, it gets registered in the global [db] along with a resolver for the promise of its exit status. When we get a SIGCHLD signal, we reap all exited processes and resolve their promises, waking whichever fibers are waiting for them. We have to be careful not to use a PID after [wait] reaps it, as the PID could have been reused by then. The signal handler can run in any domain or systhread, so we have to be careful about that too. We can't defer the call to [wait] until we're running in an Eio domain as we don't know which domain should handle it until [wait] gives as the process ID. We don't want to delegate to a particular domain because it might be spinning doing CPU stuff for a long time. Instead, we try to take the lock in the signal handler and do it there. If we can't get the lock then we just record that a wait is needed; whoever holds the lock will soon release it and will do the reaping for us. Note that, since signal handlers are global, this will interfere with any libraries trying to manage processes themselves. For systems with Process Descriptors we could skip all this nonsense and just poll on the process's FD. e.g. using [pdfork] on FreeBSD or [CLONE_PIDFD] on Linux. *) open Eio.Std (* Each child process is registered in this table. Must hold [lock] when accessing it. *) let db : (int, Unix.process_status Promise.u) Hashtbl.t = Hashtbl.create 10 (* Set to [true] when we receive [SIGCHLD] and [false] before calling [wait]. *) let need_wait = Atomic.make false (* [lock] must be held when spawning or reaping. Otherwise, this can happen: - We spawn process 100, adding it to [db]. - It exits, sending us SIGCHLD. - The signal handler calls [wait], reaping it. - Another domain spawns another process 100 and adds it to [db], overwriting the previous entry. - The signal handler resumes, and gets the wrong entry. If [lock] is already locked when the SIGCHLD handler runs then it just leaves [need_wait = true] (a signal handler can't wait on a mutex, since it may have interrupted the holder). The unlocker needs to check [need_wait] after releasing the lock. *) let lock = Mutex.create () (* [pid] has exited. Notify the waiter. Must hold [lock] when calling this. *) let report_child_status pid status = match Hashtbl.find_opt db pid with | Some r -> Hashtbl.remove db pid; Promise.resolve r status | None -> (* Not one of ours. Not much we can do here. The spawner will probably get an [ECHILD] error when they wait, which will do for the error. *) () (* Must hold [lock] when calling this. *) let rec reap () = Atomic.set need_wait false; match Unix.(waitpid [WNOHANG] (-1)) with | 0, _ -> () (* Returned if there are children but none has exited yet. *) | pid, status -> report_child_status pid status; reap () | exception Unix.Unix_error (EINTR, _, _) -> reap () | exception Unix.Unix_error (ECHILD, _, _) -> () (* Returned if there are no children at all. *) let rec reap_nonblocking () = if Mutex.try_lock lock then ( reap (); Mutex.unlock lock; if Atomic.get need_wait then reap_nonblocking () ) (* else the unlocker will see [need_wait] and call us later *) let unlock () = Mutex.unlock lock; if Atomic.get need_wait then reap_nonblocking () (* Must hold [lock] when calling this. *) let register pid = assert (not (Hashtbl.mem db pid)); let p, r = Promise.create () in Hashtbl.add db pid r; p let with_lock fn = Mutex.lock lock; Fun.protect fn ~finally:unlock let handle_sigchld () = Atomic.set need_wait true; reap_nonblocking ()
(* Keep track of running child processes and notify their fiber when they exit. After forking a child process, it gets registered in the global [db] along with a resolver for the promise of its exit status. When we get a SIGCHLD signal, we reap all exited processes and resolve their promises, waking whichever fibers are waiting for them. We have to be careful not to use a PID after [wait] reaps it, as the PID could have been reused by then. The signal handler can run in any domain or systhread, so we have to be careful about that too. We can't defer the call to [wait] until we're running in an Eio domain as we don't know which domain should handle it until [wait] gives as the process ID. We don't want to delegate to a particular domain because it might be spinning doing CPU stuff for a long time. Instead, we try to take the lock in the signal handler and do it there. If we can't get the lock then we just record that a wait is needed; whoever holds the lock will soon release it and will do the reaping for us. Note that, since signal handlers are global, this will interfere with any libraries trying to manage processes themselves. For systems with Process Descriptors we could skip all this nonsense and just poll on the process's FD. e.g. using [pdfork] on FreeBSD or [CLONE_PIDFD] on Linux. *)
models.c
#include "config.h" #include <gmp.h> /* should be included before yices and zarith */ #define __GMP_H #include "models.h" #include <stdlib.h> // for malloc, free #include <stdint.h> // for (u)int32_t etc. #include <yices.h> #include <caml/mlvalues.h> #include <caml/custom.h> // custom block #include <caml/memory.h> // CAMLparam etc. #include <caml/alloc.h> // caml_alloc, _small, _tuple, caml_copy_string/int32/int64/..., etc. #include <zarith.h> #include "misc.h" #include "terms.h" #include "contexts.h" static inline context_t *Mdlctx_val_context(value v) { return Context_val(Field(v,1)); } static void _oy_model_finalize(value); static char _oy_model_id[] = "ocamlyices.model"; static struct custom_operations _oy_model_ops = { _oy_model_id, &_oy_model_finalize, custom_compare_default, custom_hash_default, custom_serialize_default, custom_deserialize_default, custom_compare_ext_default, }; static inline value alloc_model_val () { return caml_alloc_custom(&_oy_model_ops, sizeof (model_t *), 0, 1); } static inline void Store_model_val(value v, model_t *raw) { *((model_t **)Data_custom_val(v)) = raw; } CAMLprim value ocamlyices_context_get_model(value v_keepsubst, value v_context) { CAMLparam2(v_keepsubst, v_context); CAMLlocal2(v_model, v_res); model_t *model; context_t *context; int32_t keepsubst; context = Context_val(v_context); keepsubst = (Is_block(v_keepsubst) ? Int_val(Field(v_keepsubst, 0)) : 1); COND_MT_START(MTFLAG_GET_MODEL); model = yices_get_model(context, keepsubst); COND_MT_END(MTFLAG_GET_MODEL); if (model == NULL) { _oy_error(); } v_model = alloc_model_val(); Store_model_val(v_model, model); v_res = caml_alloc(2, 0); Store_field(v_res, 0, v_model); Store_field(v_res, 1, v_context); CAMLreturn(v_res); } static void _oy_model_finalize(value v_model) { model_t *model = Model_val(v_model); if (model != NULL) { yices_free_model(model); Store_model_val(v_model, NULL); } } CAMLprim value ocamlyices_model_get_bool(value v_mdlctx, value v_t) { CAMLparam1(v_mdlctx); model_t *mdl; term_t t; int32_t res, val; mdl = Mdlctx_val_model(v_mdlctx); t = Term_val(v_t); if (mdl == NULL) { _oy_freed_model_error(); } res = yices_get_bool_value(mdl, t, &val); if (res != 0) { _oy_error(); } CAMLreturn(Val_long(val != 0)); } CAMLprim value ocamlyices_model_get_int(value v_mdlctx, value v_t) { CAMLparam1(v_mdlctx); model_t *mdl; term_t t; intnat val; int32_t res; mdl = Mdlctx_val_model(v_mdlctx); t = Term_val(v_t); if (mdl == NULL) { _oy_freed_model_error(); } if (sizeof(intnat) == sizeof(int32_t)) { int32_t val32; res = yices_get_int32_value(mdl, t, &val32); val = val32; } else { int64_t val64; res = yices_get_int64_value(mdl, t, &val64); val = val64; } if (res != 0) { _oy_error(); } if (val > Max_long || val < Min_long) { _oy_binding_overflow_error(); } CAMLreturn(Val_long(val)); } CAMLprim value ocamlyices_model_get_nativeint(value v_mdlctx, value v_t) { CAMLparam1(v_mdlctx); model_t *mdl; term_t t; intnat val; int32_t res; mdl = Mdlctx_val_model(v_mdlctx); t = Term_val(v_t); if (mdl == NULL) { _oy_freed_model_error(); } if (sizeof(intnat) == sizeof(int32_t)) { int32_t val32; res = yices_get_int32_value(mdl, t, &val32); val = val32; } else { int64_t val64; res = yices_get_int64_value(mdl, t, &val64); val = val64; } if (res != 0) { _oy_error(); } CAMLreturn(caml_copy_nativeint(val)); } CAMLprim value ocamlyices_model_get_int32(value v_mdlctx, value v_t) { CAMLparam1(v_mdlctx); model_t *mdl; term_t t; int32_t res, val; mdl = Mdlctx_val_model(v_mdlctx); t = Term_val(v_t); if (mdl == NULL) { _oy_freed_model_error(); } res = yices_get_int32_value(mdl, t, &val); if (res != 0) { _oy_error(); } CAMLreturn(caml_copy_int32(val)); } CAMLprim value ocamlyices_model_get_int64(value v_mdlctx, value v_t) { CAMLparam1(v_mdlctx); model_t *mdl; term_t t; int32_t res; int64_t val; mdl = Mdlctx_val_model(v_mdlctx); t = Term_val(v_t); if (mdl == NULL) { _oy_freed_model_error(); } res = yices_get_int64_value(mdl, t, &val); if (res != 0) { _oy_error(); } CAMLreturn(caml_copy_int64(val)); } CAMLprim value ocamlyices_model_get_rational_int(value v_mdlctx, value v_t) { CAMLparam1(v_mdlctx); CAMLlocal1(v_res); model_t *mdl; term_t t; int32_t res; intnat num; uintnat den; mdl = Mdlctx_val_model(v_mdlctx); t = Term_val(v_t); if (mdl == NULL) { _oy_freed_model_error(); } if (sizeof(intnat) == sizeof(int32_t)) { int32_t num32; uint32_t den32; res = yices_get_rational32_value(mdl, t, &num32, &den32); num = num32; den = den32; } else { int64_t num64; uint64_t den64; res = yices_get_rational64_value(mdl, t, &num64, &den64); num = num64; den = den64; } if (res != 0) { _oy_error(); } if (num > Max_long || num < Min_long) { _oy_binding_overflow_error(); } if (den > Max_long) { _oy_binding_overflow_error(); } v_res = caml_alloc_tuple(2); Store_field(v_res, 0, Val_long(num)); Store_field(v_res, 1, Val_long((intnat)den)); CAMLreturn(v_res); } CAMLprim value ocamlyices_model_get_rational_nativeint(value v_mdlctx, value v_t) { CAMLparam1(v_mdlctx); CAMLlocal1(v_res); model_t *mdl; term_t t; int32_t res; intnat num; uintnat den; mdl = Mdlctx_val_model(v_mdlctx); t = Term_val(v_t); if (mdl == NULL) { _oy_freed_model_error(); } if (sizeof(intnat) == sizeof(int32_t)) { int32_t num32; uint32_t den32; res = yices_get_rational32_value(mdl, t, &num32, &den32); num = num32; den = den32; } else { int64_t num64; uint64_t den64; res = yices_get_rational64_value(mdl, t, &num64, &den64); num = num64; den = den64; } if (res != 0) { _oy_error(); } if (num > Max_long || num < Min_long) { _oy_binding_overflow_error(); } if (den > Max_long) { _oy_binding_overflow_error(); } v_res = caml_alloc_tuple(2); Store_field(v_res, 0, caml_copy_nativeint(num)); Store_field(v_res, 1, caml_copy_nativeint((intnat)den)); CAMLreturn(v_res); } CAMLprim value ocamlyices_model_get_rational_int32(value v_mdlctx, value v_t) { CAMLparam1(v_mdlctx); CAMLlocal1(v_res); model_t *mdl; term_t t; int32_t res, num; uint32_t den; mdl = Mdlctx_val_model(v_mdlctx); t = Term_val(v_t); if (mdl == NULL) { _oy_freed_model_error(); } res = yices_get_rational32_value(mdl, t, &num, &den); if (res != 0) { _oy_error(); } if (den > INT32_MAX) { _oy_binding_overflow_error(); } v_res = caml_alloc_tuple(2); Store_field(v_res, 0, caml_copy_int32(num)); Store_field(v_res, 1, caml_copy_int32((int32_t)den)); CAMLreturn(v_res); } CAMLprim value ocamlyices_model_get_rational_int64(value v_mdlctx, value v_t) { CAMLparam1(v_mdlctx); CAMLlocal1(v_res); model_t *mdl; term_t t; int32_t res; int64_t num; uint64_t den; mdl = Mdlctx_val_model(v_mdlctx); t = Term_val(v_t); if (mdl == NULL) { _oy_freed_model_error(); } res = yices_get_rational64_value(mdl, t, &num, &den); if (res != 0) { _oy_error(); } if (den > INT64_MAX) { _oy_binding_overflow_error(); } v_res = caml_alloc_tuple(2); Store_field(v_res, 0, caml_copy_int64(num)); Store_field(v_res, 1, caml_copy_int64((int64_t)den)); CAMLreturn(v_res); } CAMLprim value ocamlyices_model_get_float(value v_mdlctx, value v_t) { CAMLparam1(v_mdlctx); model_t *mdl; term_t t; int32_t res; double val; mdl = Mdlctx_val_model(v_mdlctx); t = Term_val(v_t); if (mdl == NULL) { _oy_freed_model_error(); } res = yices_get_double_value(mdl, t, &val); if (res != 0) { _oy_error(); } CAMLreturn(caml_copy_double(val)); } CAMLprim value ocamlyices_model_get_z(value v_mdlctx, value v_t) { CAMLparam1(v_mdlctx); int32_t res; mpz_t val; mpz_init(val); res = yices_get_mpz_value(Mdlctx_val_model(v_mdlctx), Term_val(v_t), val); if (res != 0) { _oy_error(); } CAMLreturn(ml_z_from_mpz(val)); } CAMLprim value ocamlyices_model_get_q(value v_mdlctx, value v_t) { CAMLparam1(v_mdlctx); CAMLlocal1(v_res); int32_t res; mpq_t val; mpz_t num, den; mpq_init(val); mpz_inits(num, den); res = yices_get_mpq_value(Mdlctx_val_model(v_mdlctx), Term_val(v_t), val); if (res != 0) { _oy_error(); } mpq_get_num(num, val); mpq_get_den(den, val); v_res = caml_alloc(2, 0); Store_field(v_res, 0, ml_z_from_mpz(num)); Store_field(v_res, 1, ml_z_from_mpz(den)); CAMLreturn(v_res); } CAMLprim value ocamlyices_model_get_bitvector(value v_mdlctx, value v_t) { CAMLparam1(v_mdlctx); CAMLlocal1(v_bv); model_t *mdl; term_t t; int32_t res, *bv; uint32_t n; size_t i; mdl = Mdlctx_val_model(v_mdlctx); t = Term_val(v_t); if (mdl == NULL) { _oy_freed_model_error(); } n = yices_term_bitsize(t); if (n <= 0) { _oy_error(); } bv = (int32_t *)malloc(sizeof(int32_t[n])); if (bv == NULL) { _oy_allocation_error(); } res = yices_get_bv_value(mdl, t, bv); if (res != 0) { free(bv); _oy_error(); } v_bv = caml_alloc(n, 0); for (i = 0; i < n; i++) { Store_field(v_bv, i, Val_long(bv[i] != 0)); } CAMLreturn(v_bv); } CAMLprim value ocamlyices_model_get_scalar(value v_mdlctx, value v_t) { CAMLparam1(v_mdlctx); model_t *mdl; term_t t; int32_t res, val; mdl = Mdlctx_val_model(v_mdlctx); t = Term_val(v_t); if (mdl == NULL) { _oy_freed_model_error(); } res = yices_get_scalar_value(mdl, t, &val); if (res != 0) { _oy_error(); } CAMLreturn(Val_long(val)); } CAMLprim value ocamlyices_model_get_as_term(value v_mdlctx, value v_t) { CAMLparam1(v_mdlctx); model_t *mdl; int32_t res; mdl = Mdlctx_val_model(v_mdlctx); if (mdl == NULL) { _oy_freed_model_error(); } res = yices_get_value_as_term(mdl, Term_val(v_t)); if (res == NULL_TERM) { _oy_error(); } CAMLreturn(Val_term(res)); } CAMLprim value ocamlyices_model_get_as_terms(value v_mdlctx, value v_ts) { CAMLparam1(v_mdlctx); CAMLlocal1(v_outts); model_t *mdl; term_t *ts, *outts; uint32_t lts, i; int32_t res; mdl = Mdlctx_val_model(v_mdlctx); if (mdl == NULL) { _oy_freed_model_error(); } lts = check_Wosize_val(v_ts); ts = _oy_terms_from_values(v_ts, lts); outts = (term_t *)malloc(sizeof(term_t[lts])); if (ts == NULL || outts == NULL) { free(ts); free(outts); _oy_allocation_error(); } res = yices_term_array_value(mdl, lts, ts, outts); free(ts); if (res == NULL_TERM) { free(outts); _oy_error(); } v_outts = caml_alloc(lts, 0); for (i = 0; i < lts; i++) { Store_field(v_outts, i, outts[i]); } free(outts); CAMLreturn(v_outts); } // Pretty printing struct pp_model_arg { model_t *mdl; uint32_t width, height, offset; }; static int _oy_model_pp(FILE *output, void *arg_) { struct pp_model_arg *arg = (struct pp_model_arg *)arg_; return yices_pp_model(output, arg->mdl, arg->width, arg->height, arg->offset); } CAMLprim value ocamlyices_model_print(value v_width_opt, value v_height_opt, value v_offset_opt, value v_cb, value v_mdl) { CAMLparam4(v_width_opt, v_height_opt, v_offset_opt, v_cb); model_t *mdl = Mdlctx_val_model(v_mdl); uint32_t width = (uint32_t)Long_option_val(v_width_opt, UINT32_MAX); uint32_t height = (uint32_t)Long_option_val(v_height_opt, 1); uint32_t offset = (uint32_t)Long_option_val(v_offset_opt, 0); struct pp_model_arg arg = { mdl, width, height, offset }; int res = _oy_callback_print(v_cb, &_oy_model_pp, &arg); if (res != 0) { _oy_error(); } CAMLreturn(Val_unit); }
FileUtilTypes.ml
open FilePath exception FileDoesntExist of filename exception RecursiveLink of filename exception Fatal of string (** See FileUtil.mli *) type action_link = | Follow | Skip | SkipInform of (filename -> unit) | AskFollow of (filename -> bool) (** See FileUtil.mli *) type interactive = Force | Ask of (filename -> bool) (** See FileUtil.mli *) type size = TB of int64 | GB of int64 | MB of int64 | KB of int64 | B of int64 (** See FileUtil.mli *) type kind = Dir | File | Dev_char | Dev_block | Fifo | Socket | Symlink (** See FileUtil.mli *) type base_permission = { sticky: bool; exec: bool; write: bool; read: bool; } (** See FileUtil.mli *) type permission = { user: base_permission; group: base_permission; other: base_permission; } (** See FileUtil.mli *) type stat = { kind: kind; is_link: bool; permission: permission; size: size; owner: int; group_owner: int; access_time: float; modification_time: float; creation_time: float; device: int; inode: int; } (** See FileUtil.mli *) type test_file = | Is_dev_block | Is_dev_char | Is_dir | Exists | Is_file | Is_set_group_ID | Has_sticky_bit | Is_link | Is_pipe | Is_readable | Is_writeable | Size_not_null | Size_bigger_than of size | Size_smaller_than of size | Size_equal_to of size | Size_fuzzy_equal_to of size | Is_socket | Has_set_user_ID | Is_exec | Is_owned_by_user_ID | Is_owned_by_group_ID | Is_newer_than of filename | Is_older_than of filename | Is_newer_than_date of float | Is_older_than_date of float | And of test_file * test_file | Or of test_file * test_file | Not of test_file | Match of string | True | False | Has_extension of extension | Has_no_extension | Is_parent_dir | Is_current_dir | Basename_is of filename | Dirname_is of filename | Custom of (filename -> bool) (** See FileUtil.mli *) type touch_time_t = | Touch_now | Touch_file_time of filename | Touch_timestamp of float
(******************************************************************************) (* ocaml-fileutils: files and filenames common operations *) (* *) (* Copyright (C) 2003-2014, Sylvain Le Gall *) (* *) (* This library is free software; you can redistribute it and/or modify it *) (* under the terms of the GNU Lesser General Public License as published by *) (* the Free Software Foundation; either version 2.1 of the License, or (at *) (* your option) any later version, with the OCaml static compilation *) (* exception. *) (* *) (* This library is distributed in the hope that it will be useful, but *) (* WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file *) (* COPYING for more details. *) (* *) (* You should have received a copy of the GNU Lesser General Public License *) (* along with this library; if not, write to the Free Software Foundation, *) (* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA *) (******************************************************************************)
lib_invalid.ml
type t = unit [@@deriving irmin { lib = "foo" }] (* should be [Some "foo"] *)
type t = unit [@@deriving irmin { lib = "foo" }] (* should be [Some "foo"] *)
dune
(library (name vector3) (public_name vector3) (modules vector3))
t-theta.c
#include "acb_modular.h" int main() { slong iter; flint_rand_t state; flint_printf("theta...."); fflush(stdout); flint_randinit(state); /* Test consistency with/without transform */ for (iter = 0; iter < 10000 * arb_test_multiplier(); iter++) { acb_t t1, t2, t3, t4, t1b, t2b, t3b, t4b, z, tau; slong prec0, prec1, prec2, e0; acb_init(t1); acb_init(t2); acb_init(t3); acb_init(t4); acb_init(t1b); acb_init(t2b); acb_init(t3b); acb_init(t4b); acb_init(z); acb_init(tau); prec0 = 2 + n_randint(state, 2000); prec1 = 2 + n_randint(state, 2000); prec2 = 2 + n_randint(state, 2000); e0 = 1 + n_randint(state, 100); acb_randtest(tau, state, prec0, e0); acb_randtest(z, state, prec0, e0); arb_abs(acb_imagref(tau), acb_imagref(tau)); if (n_randint(state, 3) == 0) arb_set_si(acb_realref(tau), -10 + n_randint(state, 20)); if (n_randint(state, 3) == 0) arb_zero(acb_imagref(z)); if (n_randint(state, 3) == 0) arb_zero(acb_realref(z)); acb_modular_theta(t1, t2, t3, t4, z, tau, prec1); acb_modular_theta_notransform(t1b, t2b, t3b, t4b, z, tau, prec2); if (!acb_overlaps(t1, t1b) || !acb_overlaps(t2, t2b) || !acb_overlaps(t3, t3b) || !acb_overlaps(t4, t4b)) { flint_printf("FAIL (overlap)\n"); flint_printf("z = "); acb_printd(z, 25); flint_printf("\n\n"); flint_printf("tau = "); acb_printd(tau, 25); flint_printf("\n\n"); flint_printf("t1 = "); acb_printd(t1, 15); flint_printf("\n\n"); flint_printf("t1b = "); acb_printd(t1b, 15); flint_printf("\n\n"); flint_printf("t2 = "); acb_printd(t2, 15); flint_printf("\n\n"); flint_printf("t2b = "); acb_printd(t2b, 15); flint_printf("\n\n"); flint_printf("t3 = "); acb_printd(t3, 15); flint_printf("\n\n"); flint_printf("t3b = "); acb_printd(t3b, 15); flint_printf("\n\n"); flint_printf("t4 = "); acb_printd(t4, 15); flint_printf("\n\n"); flint_printf("t4b = "); acb_printd(t4b, 15); flint_printf("\n\n"); flint_abort(); } acb_clear(t1); acb_clear(t2); acb_clear(t3); acb_clear(t4); acb_clear(t1b); acb_clear(t2b); acb_clear(t3b); acb_clear(t4b); acb_clear(z); acb_clear(tau); } flint_randclear(state); flint_cleanup(); flint_printf("PASS\n"); return EXIT_SUCCESS; }
/* Copyright (C) 2014 Fredrik Johansson This file is part of Arb. Arb is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. See <http://www.gnu.org/licenses/>. */
reloc.ml
(* TEST flags = "-I ${ocamlsrcdir}/parsing -I ${ocamlsrcdir}/toplevel" include ocamlcommon * expect *) (* Check that [e.pexp_loc :: e.pexp_loc_stack] includes all intermediate locations of an expression. *) let blocks = let s = {| ( (* comment *) (( "contant" [@attr] ) (* comment *))) |} in let e = Parse.expression (Lexing.from_string s) in let extract (loc : Location.t) = let a = loc.loc_start.pos_cnum in let b = loc.loc_end.pos_cnum in String.sub s a (b - a) in List.map extract (e.pexp_loc :: e.pexp_loc_stack) ;; [%%expect {| val blocks : string list = ["( (* comment *) (( \"contant\" [@attr] ) (* comment *)))"; "(( \"contant\" [@attr] ) (* comment *))"; "( \"contant\" [@attr] )"; "\"contant\""] |}];;
(* TEST flags = "-I ${ocamlsrcdir}/parsing -I ${ocamlsrcdir}/toplevel" include ocamlcommon * expect *)
dune
(executable (name main) (libraries foo)) (alias (name default) (action (run ./main.exe)))
script_ir_translator.ml
open Alpha_context open Micheline open Script open Script_tc_errors open Script_ir_annot open Script_typed_ir module Typecheck_costs = Michelson_v1_gas.Cost_of.Typechecking module Unparse_costs = Michelson_v1_gas.Cost_of.Unparsing type ex_comparable_ty = | Ex_comparable_ty : 'a comparable_ty -> ex_comparable_ty type ex_ty = Ex_ty : 'a ty -> ex_ty type ex_stack_ty = Ex_stack_ty : ('a, 's) stack_ty -> ex_stack_ty (* The following type represents an instruction parameterized by its continuation. During the elaboration of the typed term, a sequence of instructions in Micheline is read from left to right: hence, the elaboration needs to wait for the next instruction to be elaborated to be able to construct the current instruction. *) type ('a, 's, 'b, 'u) cinstr = { apply : 'r 'f. ('a, 's) kinfo -> ('b, 'u, 'r, 'f) kinstr -> ('a, 's, 'r, 'f) kinstr; } (* While a [Script_typed_ir.descr] contains a fully defined instruction, [descr] contains a [cinstr], that is an instruction parameterized by the next instruction, as explained in the previous comment. *) type ('a, 's, 'b, 'u) descr = { loc : Script.location; bef : ('a, 's) stack_ty; aft : ('b, 'u) stack_ty; instr : ('a, 's, 'b, 'u) cinstr; } let close_descr {loc; bef; aft; instr} = let kinfo = {iloc = loc; kstack_ty = aft} in let kinfo' = {iloc = loc; kstack_ty = bef} in let kinstr = instr.apply kinfo' (IHalt kinfo) in {kloc = loc; kbef = bef; kaft = aft; kinstr} let kinfo_of_descr {loc; bef; _} = {iloc = loc; kstack_ty = bef} let compose_descr : type a s b u c v. Script.location -> (a, s, b, u) descr -> (b, u, c, v) descr -> (a, s, c, v) descr = fun loc d1 d2 -> { loc; bef = d1.bef; aft = d2.aft; instr = { apply = (fun _ k -> d1.instr.apply (kinfo_of_descr d1) (d2.instr.apply (kinfo_of_descr d2) k)); }; } type tc_context = | Lambda : tc_context | Dip : ('a, 's) stack_ty * tc_context -> tc_context | Toplevel : { storage_type : 'sto ty; param_type : 'param ty; root_name : field_annot option; legacy_create_contract_literal : bool; } -> tc_context type unparsing_mode = Optimized | Readable | Optimized_legacy type type_logger = int -> (Script.expr * Script.annot) list -> (Script.expr * Script.annot) list -> unit let add_dip ty annot prev = match prev with | Lambda | Toplevel _ -> Dip (Item_t (ty, Item_t (Unit_t None, Bot_t, None), annot), prev) | Dip (stack, _) -> Dip (Item_t (ty, stack, annot), prev) (* ---- Type size accounting ------------------------------------------------*) (** [deduce_comparable_type_size ~remaining ty] returns [remaining] minus the size of type [ty]. It is guaranteed to not grow the stack by more than [remaining] non-tail calls. *) let rec deduce_comparable_type_size : type t. remaining:int -> t comparable_ty -> int = fun ~remaining ty -> if Compare.Int.(remaining < 0) then remaining else match ty with | Unit_key _ | Never_key _ | Int_key _ | Nat_key _ | Signature_key _ | String_key _ | Bytes_key _ | Mutez_key _ | Bool_key _ | Key_hash_key _ | Key_key _ | Timestamp_key _ | Chain_id_key _ | Address_key _ -> remaining - 1 | Pair_key ((t1, _), (t2, _), _) -> let remaining = remaining - 1 in let remaining = deduce_comparable_type_size ~remaining t1 in deduce_comparable_type_size ~remaining t2 | Union_key ((t1, _), (t2, _), _) -> let remaining = remaining - 1 in let remaining = deduce_comparable_type_size ~remaining t1 in deduce_comparable_type_size ~remaining t2 | Option_key (t, _) -> let remaining = remaining - 1 in deduce_comparable_type_size ~remaining t (** [deduce_type_size ~remaining ty] returns [remaining] minus the size of type [ty]. It is guaranteed to not grow the stack by more than [remaining] non-tail calls. *) let rec deduce_type_size : type t. remaining:int -> t ty -> int = fun ~remaining ty -> match ty with | Unit_t _ | Int_t _ | Nat_t _ | Signature_t _ | Bytes_t _ | String_t _ | Mutez_t _ | Key_hash_t _ | Key_t _ | Timestamp_t _ | Address_t _ | Bool_t _ | Operation_t _ | Chain_id_t _ | Never_t _ | Bls12_381_g1_t _ | Bls12_381_g2_t _ | Bls12_381_fr_t _ | Sapling_transaction_t _ | Sapling_state_t _ -> remaining - 1 | Pair_t ((l, _, _), (r, _, _), _) -> let remaining = remaining - 1 in let remaining = deduce_type_size ~remaining l in deduce_type_size ~remaining r | Union_t ((l, _), (r, _), _) -> let remaining = remaining - 1 in let remaining = deduce_type_size ~remaining l in deduce_type_size ~remaining r | Lambda_t (arg, ret, _) -> let remaining = remaining - 1 in let remaining = deduce_type_size ~remaining arg in deduce_type_size ~remaining ret | Option_t (t, _) -> let remaining = remaining - 1 in deduce_type_size ~remaining t | List_t (t, _) -> let remaining = remaining - 1 in deduce_type_size ~remaining t | Ticket_t (t, _) -> let remaining = remaining - 1 in deduce_comparable_type_size ~remaining t | Set_t (k, _) -> let remaining = remaining - 1 in deduce_comparable_type_size ~remaining k | Map_t (k, v, _) -> let remaining = remaining - 1 in let remaining = deduce_comparable_type_size ~remaining k in deduce_type_size ~remaining v | Big_map_t (k, v, _) -> let remaining = remaining - 1 in let remaining = deduce_comparable_type_size ~remaining k in deduce_type_size ~remaining v | Contract_t (arg, _) -> let remaining = remaining - 1 in deduce_type_size ~remaining arg let check_type_size ~loc ~maximum_type_size ty = if Compare.Int.(deduce_type_size ~remaining:maximum_type_size ty >= 0) then ok_unit else error (Type_too_large (loc, maximum_type_size)) let rec check_type_size_of_stack_head : type a s. loc:Script.location -> maximum_type_size:int -> (a, s) stack_ty -> up_to:int -> unit tzresult = fun ~loc ~maximum_type_size stack ~up_to -> if Compare.Int.(up_to <= 0) then ok_unit else match stack with | Bot_t -> ok_unit | Item_t (head, tail, _annot) -> check_type_size ~loc ~maximum_type_size head >>? fun () -> (check_type_size_of_stack_head [@tailcall]) ~loc ~maximum_type_size tail ~up_to:(up_to - 1) let check_comparable_type_size ~legacy ctxt ~loc ty = if legacy then ok_unit else let maximum_type_size = Constants.michelson_maximum_type_size ctxt in if Compare.Int.( deduce_comparable_type_size ~remaining:maximum_type_size ty >= 0) then ok_unit else error (Type_too_large (loc, maximum_type_size)) let check_type_size ~legacy ctxt ~loc ty = if legacy then ok_unit else let maximum_type_size = Constants.michelson_maximum_type_size ctxt in check_type_size ~loc ~maximum_type_size ty (* ---- Error helpers -------------------------------------------------------*) let location = function | Prim (loc, _, _, _) | Int (loc, _) | String (loc, _) | Bytes (loc, _) | Seq (loc, _) -> loc let kind_equal a b = match (a, b) with | (Int_kind, Int_kind) | (String_kind, String_kind) | (Bytes_kind, Bytes_kind) | (Prim_kind, Prim_kind) | (Seq_kind, Seq_kind) -> true | _ -> false let kind = function | Int _ -> Int_kind | String _ -> String_kind | Bytes _ -> Bytes_kind | Prim _ -> Prim_kind | Seq _ -> Seq_kind let unexpected expr exp_kinds exp_ns exp_prims = match expr with | Int (loc, _) -> Invalid_kind (loc, Prim_kind :: exp_kinds, Int_kind) | String (loc, _) -> Invalid_kind (loc, Prim_kind :: exp_kinds, String_kind) | Bytes (loc, _) -> Invalid_kind (loc, Prim_kind :: exp_kinds, Bytes_kind) | Seq (loc, _) -> Invalid_kind (loc, Prim_kind :: exp_kinds, Seq_kind) | Prim (loc, name, _, _) -> ( let open Michelson_v1_primitives in match (namespace name, exp_ns) with | (Type_namespace, Type_namespace) | (Instr_namespace, Instr_namespace) | (Constant_namespace, Constant_namespace) -> Invalid_primitive (loc, exp_prims, name) | (ns, _) -> Invalid_namespace (loc, name, exp_ns, ns) ) let check_kind kinds expr = let kind = kind expr in if List.exists (kind_equal kind) kinds then ok_unit else let loc = location expr in error (Invalid_kind (loc, kinds, kind)) (* ---- Lists, Sets and Maps ----------------------------------------------- *) let list_empty : 'a Script_typed_ir.boxed_list = let open Script_typed_ir in {elements = []; length = 0} let list_cons : 'a -> 'a Script_typed_ir.boxed_list -> 'a Script_typed_ir.boxed_list = fun elt l -> let open Script_typed_ir in {length = 1 + l.length; elements = elt :: l.elements} let compare_address (x, ex) (y, ey) = let lres = Contract.compare x y in if Compare.Int.(lres = 0) then Compare.String.compare ex ey else lres type compare_comparable_cont = | Compare_comparable : 'a comparable_ty * 'a * 'a * compare_comparable_cont -> compare_comparable_cont | Compare_comparable_return : compare_comparable_cont let compare_comparable : type a. a comparable_ty -> a -> a -> int = let rec compare_comparable : type a. a comparable_ty -> compare_comparable_cont -> a -> a -> int = fun kind k x y -> match (kind, x, y) with | (Unit_key _, (), ()) -> (apply [@tailcall]) 0 k | (Never_key _, _, _) -> . | (Signature_key _, x, y) -> (apply [@tailcall]) (Signature.compare x y) k | (String_key _, x, y) -> (apply [@tailcall]) (Compare.String.compare x y) k | (Bool_key _, x, y) -> (apply [@tailcall]) (Compare.Bool.compare x y) k | (Mutez_key _, x, y) -> (apply [@tailcall]) (Tez.compare x y) k | (Key_hash_key _, x, y) -> (apply [@tailcall]) (Signature.Public_key_hash.compare x y) k | (Key_key _, x, y) -> (apply [@tailcall]) (Signature.Public_key.compare x y) k | (Int_key _, x, y) -> (apply [@tailcall]) (Script_int.compare x y) k | (Nat_key _, x, y) -> (apply [@tailcall]) (Script_int.compare x y) k | (Timestamp_key _, x, y) -> (apply [@tailcall]) (Script_timestamp.compare x y) k | (Address_key _, x, y) -> (apply [@tailcall]) (compare_address x y) k | (Bytes_key _, x, y) -> (apply [@tailcall]) (Compare.Bytes.compare x y) k | (Chain_id_key _, x, y) -> (apply [@tailcall]) (Chain_id.compare x y) k | (Pair_key ((tl, _), (tr, _), _), (lx, rx), (ly, ry)) -> (compare_comparable [@tailcall]) tl (Compare_comparable (tr, rx, ry, k)) lx ly | (Union_key ((tl, _), _, _), L x, L y) -> (compare_comparable [@tailcall]) tl k x y | (Union_key _, L _, R _) -> -1 | (Union_key _, R _, L _) -> 1 | (Union_key (_, (tr, _), _), R x, R y) -> (compare_comparable [@tailcall]) tr k x y | (Option_key _, None, None) -> 0 | (Option_key _, None, Some _) -> -1 | (Option_key _, Some _, None) -> 1 | (Option_key (t, _), Some x, Some y) -> (compare_comparable [@tailcall]) t k x y and apply ret k = match (ret, k) with | (0, Compare_comparable (ty, x, y, k)) -> (compare_comparable [@tailcall]) ty k x y | (0, Compare_comparable_return) -> 0 | (ret, _) -> (* ret <> 0, we perform an early exit *) if Compare.Int.(ret > 0) then 1 else -1 in fun t -> compare_comparable t Compare_comparable_return let empty_set : type a. a comparable_ty -> a set = fun ty -> let module OPS = Set.Make (struct type t = a let compare = compare_comparable ty end) in ( module struct type elt = a let elt_ty = ty module OPS = OPS let boxed = OPS.empty let size = 0 end ) let set_update : type a. a -> bool -> a set -> a set = fun v b (module Box) -> ( module struct type elt = a let elt_ty = Box.elt_ty module OPS = Box.OPS let boxed = if b then Box.OPS.add v Box.boxed else Box.OPS.remove v Box.boxed let size = let mem = Box.OPS.mem v Box.boxed in if mem then if b then Box.size else Box.size - 1 else if b then Box.size + 1 else Box.size end ) let set_mem : type elt. elt -> elt set -> bool = fun v (module Box) -> Box.OPS.mem v Box.boxed let set_fold : type elt acc. (elt -> acc -> acc) -> elt set -> acc -> acc = fun f (module Box) -> Box.OPS.fold f Box.boxed let set_size : type elt. elt set -> Script_int.n Script_int.num = fun (module Box) -> Script_int.(abs (of_int Box.size)) let map_key_ty : type a b. (a, b) map -> a comparable_ty = fun (module Box) -> Box.key_ty let empty_map : type a b. a comparable_ty -> (a, b) map = fun ty -> let module OPS = Map.Make (struct type t = a let compare = compare_comparable ty end) in ( module struct type key = a type value = b let key_ty = ty module OPS = OPS let boxed = (OPS.empty, 0) end ) let map_get : type key value. key -> (key, value) map -> value option = fun k (module Box) -> Box.OPS.find_opt k (fst Box.boxed) let map_update : type a b. a -> b option -> (a, b) map -> (a, b) map = fun k v (module Box) -> ( module struct type key = a type value = b let key_ty = Box.key_ty module OPS = Box.OPS let boxed = let (map, size) = Box.boxed in let contains = Box.OPS.mem k map in match v with | Some v -> (Box.OPS.add k v map, size + if contains then 0 else 1) | None -> (Box.OPS.remove k map, size - if contains then 1 else 0) end ) let map_mem : type key value. key -> (key, value) map -> bool = fun k (module Box) -> Box.OPS.mem k (fst Box.boxed) let map_fold : type key value acc. (key -> value -> acc -> acc) -> (key, value) map -> acc -> acc = fun f (module Box) -> Box.OPS.fold f (fst Box.boxed) let map_size : type key value. (key, value) map -> Script_int.n Script_int.num = fun (module Box) -> Script_int.(abs (of_int (snd Box.boxed))) (* ---- Unparsing (Typed IR -> Untyped expressions) of types -----------------*) (* This part contains the unparsing that does not depend on parsing (everything that cannot contain a lambda). The rest is located at the end of the file. *) let rec ty_of_comparable_ty : type a. a comparable_ty -> a ty = function | Unit_key tname -> Unit_t tname | Never_key tname -> Never_t tname | Int_key tname -> Int_t tname | Nat_key tname -> Nat_t tname | Signature_key tname -> Signature_t tname | String_key tname -> String_t tname | Bytes_key tname -> Bytes_t tname | Mutez_key tname -> Mutez_t tname | Bool_key tname -> Bool_t tname | Key_hash_key tname -> Key_hash_t tname | Key_key tname -> Key_t tname | Timestamp_key tname -> Timestamp_t tname | Address_key tname -> Address_t tname | Chain_id_key tname -> Chain_id_t tname | Pair_key ((l, al), (r, ar), tname) -> Pair_t ( (ty_of_comparable_ty l, al, None), (ty_of_comparable_ty r, ar, None), tname ) | Union_key ((l, al), (r, ar), tname) -> Union_t ((ty_of_comparable_ty l, al), (ty_of_comparable_ty r, ar), tname) | Option_key (t, tname) -> Option_t (ty_of_comparable_ty t, tname) let add_field_annot a var = function | Prim (loc, prim, args, annots) -> Prim ( loc, prim, args, annots @ unparse_field_annot a @ unparse_var_annot var ) | expr -> expr let rec unparse_comparable_ty : type a. a comparable_ty -> Script.node = function | Unit_key tname -> Prim (-1, T_unit, [], unparse_type_annot tname) | Never_key tname -> Prim (-1, T_never, [], unparse_type_annot tname) | Int_key tname -> Prim (-1, T_int, [], unparse_type_annot tname) | Nat_key tname -> Prim (-1, T_nat, [], unparse_type_annot tname) | Signature_key tname -> Prim (-1, T_signature, [], unparse_type_annot tname) | String_key tname -> Prim (-1, T_string, [], unparse_type_annot tname) | Bytes_key tname -> Prim (-1, T_bytes, [], unparse_type_annot tname) | Mutez_key tname -> Prim (-1, T_mutez, [], unparse_type_annot tname) | Bool_key tname -> Prim (-1, T_bool, [], unparse_type_annot tname) | Key_hash_key tname -> Prim (-1, T_key_hash, [], unparse_type_annot tname) | Key_key tname -> Prim (-1, T_key, [], unparse_type_annot tname) | Timestamp_key tname -> Prim (-1, T_timestamp, [], unparse_type_annot tname) | Address_key tname -> Prim (-1, T_address, [], unparse_type_annot tname) | Chain_id_key tname -> Prim (-1, T_chain_id, [], unparse_type_annot tname) | Pair_key ((l, al), (r, ar), pname) -> ( let tl = add_field_annot al None (unparse_comparable_ty l) in let tr = add_field_annot ar None (unparse_comparable_ty r) in (* Fold [pair a1 (pair ... (pair an-1 an))] into [pair a1 ... an] *) (* Note that the folding does not happen if the pair on the right has a field annotation because this annotation would be lost *) match tr with | Prim (_, T_pair, ts, []) -> Prim (-1, T_pair, tl :: ts, unparse_type_annot pname) | _ -> Prim (-1, T_pair, [tl; tr], unparse_type_annot pname) ) | Union_key ((l, al), (r, ar), tname) -> let tl = add_field_annot al None (unparse_comparable_ty l) in let tr = add_field_annot ar None (unparse_comparable_ty r) in Prim (-1, T_or, [tl; tr], unparse_type_annot tname) | Option_key (t, tname) -> Prim (-1, T_option, [unparse_comparable_ty t], unparse_type_annot tname) let unparse_memo_size memo_size = let z = Sapling.Memo_size.unparse_to_z memo_size in Int (-1, z) let rec unparse_ty : type a. context -> a ty -> (Script.node * context) tzresult = fun ctxt ty -> Gas.consume ctxt Unparse_costs.unparse_type_cycle >>? fun ctxt -> let return ctxt (name, args, annot) = let result = Prim (-1, name, args, annot) in ok (result, ctxt) in match ty with | Unit_t tname -> return ctxt (T_unit, [], unparse_type_annot tname) | Int_t tname -> return ctxt (T_int, [], unparse_type_annot tname) | Nat_t tname -> return ctxt (T_nat, [], unparse_type_annot tname) | Signature_t tname -> return ctxt (T_signature, [], unparse_type_annot tname) | String_t tname -> return ctxt (T_string, [], unparse_type_annot tname) | Bytes_t tname -> return ctxt (T_bytes, [], unparse_type_annot tname) | Mutez_t tname -> return ctxt (T_mutez, [], unparse_type_annot tname) | Bool_t tname -> return ctxt (T_bool, [], unparse_type_annot tname) | Key_hash_t tname -> return ctxt (T_key_hash, [], unparse_type_annot tname) | Key_t tname -> return ctxt (T_key, [], unparse_type_annot tname) | Timestamp_t tname -> return ctxt (T_timestamp, [], unparse_type_annot tname) | Address_t tname -> return ctxt (T_address, [], unparse_type_annot tname) | Operation_t tname -> return ctxt (T_operation, [], unparse_type_annot tname) | Chain_id_t tname -> return ctxt (T_chain_id, [], unparse_type_annot tname) | Never_t tname -> return ctxt (T_never, [], unparse_type_annot tname) | Bls12_381_g1_t tname -> return ctxt (T_bls12_381_g1, [], unparse_type_annot tname) | Bls12_381_g2_t tname -> return ctxt (T_bls12_381_g2, [], unparse_type_annot tname) | Bls12_381_fr_t tname -> return ctxt (T_bls12_381_fr, [], unparse_type_annot tname) | Contract_t (ut, tname) -> unparse_ty ctxt ut >>? fun (t, ctxt) -> return ctxt (T_contract, [t], unparse_type_annot tname) | Pair_t ((utl, l_field, l_var), (utr, r_field, r_var), tname) -> let annot = unparse_type_annot tname in unparse_ty ctxt utl >>? fun (utl, ctxt) -> let tl = add_field_annot l_field l_var utl in unparse_ty ctxt utr >>? fun (utr, ctxt) -> let tr = add_field_annot r_field r_var utr in (* Fold [pair a1 (pair ... (pair an-1 an))] into [pair a1 ... an] *) (* Note that the folding does not happen if the pair on the right has an annotation because this annotation would be lost *) return ctxt ( match tr with | Prim (_, T_pair, ts, []) -> (T_pair, tl :: ts, annot) | _ -> (T_pair, [tl; tr], annot) ) | Union_t ((utl, l_field), (utr, r_field), tname) -> let annot = unparse_type_annot tname in unparse_ty ctxt utl >>? fun (utl, ctxt) -> let tl = add_field_annot l_field None utl in unparse_ty ctxt utr >>? fun (utr, ctxt) -> let tr = add_field_annot r_field None utr in return ctxt (T_or, [tl; tr], annot) | Lambda_t (uta, utr, tname) -> unparse_ty ctxt uta >>? fun (ta, ctxt) -> unparse_ty ctxt utr >>? fun (tr, ctxt) -> return ctxt (T_lambda, [ta; tr], unparse_type_annot tname) | Option_t (ut, tname) -> let annot = unparse_type_annot tname in unparse_ty ctxt ut >>? fun (ut, ctxt) -> return ctxt (T_option, [ut], annot) | List_t (ut, tname) -> unparse_ty ctxt ut >>? fun (t, ctxt) -> return ctxt (T_list, [t], unparse_type_annot tname) | Ticket_t (ut, tname) -> let t = unparse_comparable_ty ut in return ctxt (T_ticket, [t], unparse_type_annot tname) | Set_t (ut, tname) -> let t = unparse_comparable_ty ut in return ctxt (T_set, [t], unparse_type_annot tname) | Map_t (uta, utr, tname) -> let ta = unparse_comparable_ty uta in unparse_ty ctxt utr >>? fun (tr, ctxt) -> return ctxt (T_map, [ta; tr], unparse_type_annot tname) | Big_map_t (uta, utr, tname) -> let ta = unparse_comparable_ty uta in unparse_ty ctxt utr >>? fun (tr, ctxt) -> return ctxt (T_big_map, [ta; tr], unparse_type_annot tname) | Sapling_transaction_t (memo_size, tname) -> return ctxt ( T_sapling_transaction, [unparse_memo_size memo_size], unparse_type_annot tname ) | Sapling_state_t (memo_size, tname) -> return ctxt ( T_sapling_state, [unparse_memo_size memo_size], unparse_type_annot tname ) let rec strip_var_annots = function | (Int _ | String _ | Bytes _) as atom -> atom | Seq (loc, args) -> Seq (loc, List.map strip_var_annots args) | Prim (loc, name, args, annots) -> let not_var_annot s = Compare.Char.(s.[0] <> '@') in let annots = List.filter not_var_annot annots in Prim (loc, name, List.map strip_var_annots args, annots) let serialize_ty_for_error ctxt ty = unparse_ty ctxt ty >>? (fun (ty, ctxt) -> Gas.consume ctxt (Script.strip_locations_cost ty) >|? fun ctxt -> (Micheline.strip_locations (strip_var_annots ty), ctxt)) |> record_trace Cannot_serialize_error let rec comparable_ty_of_ty : type a. context -> Script.location -> a ty -> (a comparable_ty * context) tzresult = fun ctxt loc ty -> Gas.consume ctxt Typecheck_costs.comparable_ty_of_ty_cycle >>? fun ctxt -> match ty with | Unit_t tname -> ok ((Unit_key tname : a comparable_ty), ctxt) | Never_t tname -> ok (Never_key tname, ctxt) | Int_t tname -> ok (Int_key tname, ctxt) | Nat_t tname -> ok (Nat_key tname, ctxt) | Signature_t tname -> ok (Signature_key tname, ctxt) | String_t tname -> ok (String_key tname, ctxt) | Bytes_t tname -> ok (Bytes_key tname, ctxt) | Mutez_t tname -> ok (Mutez_key tname, ctxt) | Bool_t tname -> ok (Bool_key tname, ctxt) | Key_hash_t tname -> ok (Key_hash_key tname, ctxt) | Key_t tname -> ok (Key_key tname, ctxt) | Timestamp_t tname -> ok (Timestamp_key tname, ctxt) | Address_t tname -> ok (Address_key tname, ctxt) | Chain_id_t tname -> ok (Chain_id_key tname, ctxt) | Pair_t ((l, al, _), (r, ar, _), pname) -> comparable_ty_of_ty ctxt loc l >>? fun (lty, ctxt) -> comparable_ty_of_ty ctxt loc r >|? fun (rty, ctxt) -> (Pair_key ((lty, al), (rty, ar), pname), ctxt) | Union_t ((l, al), (r, ar), tname) -> comparable_ty_of_ty ctxt loc l >>? fun (lty, ctxt) -> comparable_ty_of_ty ctxt loc r >|? fun (rty, ctxt) -> (Union_key ((lty, al), (rty, ar), tname), ctxt) | Option_t (tt, tname) -> comparable_ty_of_ty ctxt loc tt >|? fun (ty, ctxt) -> (Option_key (ty, tname), ctxt) | Lambda_t _ | List_t _ | Ticket_t _ | Set_t _ | Map_t _ | Big_map_t _ | Contract_t _ | Operation_t _ | Bls12_381_fr_t _ | Bls12_381_g1_t _ | Bls12_381_g2_t _ | Sapling_state_t _ | Sapling_transaction_t _ -> serialize_ty_for_error ctxt ty >>? fun (t, _ctxt) -> error (Comparable_type_expected (loc, t)) let rec unparse_stack : type a s. context -> (a, s) stack_ty -> ((Script.expr * Script.annot) list * context) tzresult = fun ctxt -> function | Bot_t -> ok ([], ctxt) | Item_t (ty, rest, annot) -> unparse_ty ctxt ty >>? fun (uty, ctxt) -> unparse_stack ctxt rest >|? fun (urest, ctxt) -> ((strip_locations uty, unparse_var_annot annot) :: urest, ctxt) let serialize_stack_for_error ctxt stack_ty = record_trace Cannot_serialize_error (unparse_stack ctxt stack_ty) let name_of_ty : type a. a ty -> type_annot option = function | Unit_t tname -> tname | Int_t tname -> tname | Nat_t tname -> tname | String_t tname -> tname | Bytes_t tname -> tname | Mutez_t tname -> tname | Bool_t tname -> tname | Key_hash_t tname -> tname | Key_t tname -> tname | Timestamp_t tname -> tname | Address_t tname -> tname | Signature_t tname -> tname | Operation_t tname -> tname | Chain_id_t tname -> tname | Never_t tname -> tname | Contract_t (_, tname) -> tname | Pair_t (_, _, tname) -> tname | Union_t (_, _, tname) -> tname | Lambda_t (_, _, tname) -> tname | Option_t (_, tname) -> tname | List_t (_, tname) -> tname | Ticket_t (_, tname) -> tname | Set_t (_, tname) -> tname | Map_t (_, _, tname) -> tname | Big_map_t (_, _, tname) -> tname | Bls12_381_g1_t tname -> tname | Bls12_381_g2_t tname -> tname | Bls12_381_fr_t tname -> tname | Sapling_state_t (_, tname) -> tname | Sapling_transaction_t (_, tname) -> tname let unparse_unit ctxt () = ok (Prim (-1, D_Unit, [], []), ctxt) let unparse_int ctxt v = ok (Int (-1, Script_int.to_zint v), ctxt) let unparse_nat ctxt v = ok (Int (-1, Script_int.to_zint v), ctxt) let unparse_string ctxt s = ok (String (-1, s), ctxt) let unparse_bytes ctxt s = ok (Bytes (-1, s), ctxt) let unparse_bool ctxt b = ok (Prim (-1, (if b then D_True else D_False), [], []), ctxt) let unparse_timestamp ctxt mode t = match mode with | Optimized | Optimized_legacy -> ok (Int (-1, Script_timestamp.to_zint t), ctxt) | Readable -> ( Gas.consume ctxt Unparse_costs.timestamp_readable >>? fun ctxt -> match Script_timestamp.to_notation t with | None -> ok (Int (-1, Script_timestamp.to_zint t), ctxt) | Some s -> ok (String (-1, s), ctxt) ) let unparse_address ctxt mode (c, entrypoint) = Gas.consume ctxt Unparse_costs.contract >>? fun ctxt -> ( match entrypoint with (* given parse_address, this should not happen *) | "" -> error Unparsing_invariant_violated | _ -> ok () ) >|? fun () -> match mode with | Optimized | Optimized_legacy -> let entrypoint = match entrypoint with "default" -> "" | name -> name in let bytes = Data_encoding.Binary.to_bytes_exn Data_encoding.(tup2 Contract.encoding Variable.string) (c, entrypoint) in (Bytes (-1, bytes), ctxt) | Readable -> let notation = match entrypoint with | "default" -> Contract.to_b58check c | entrypoint -> Contract.to_b58check c ^ "%" ^ entrypoint in (String (-1, notation), ctxt) let unparse_contract ctxt mode (_, address) = unparse_address ctxt mode address let unparse_signature ctxt mode s = match mode with | Optimized | Optimized_legacy -> Gas.consume ctxt Unparse_costs.signature_optimized >|? fun ctxt -> let bytes = Data_encoding.Binary.to_bytes_exn Signature.encoding s in (Bytes (-1, bytes), ctxt) | Readable -> Gas.consume ctxt Unparse_costs.signature_readable >|? fun ctxt -> (String (-1, Signature.to_b58check s), ctxt) let unparse_mutez ctxt v = ok (Int (-1, Z.of_int64 (Tez.to_mutez v)), ctxt) let unparse_key ctxt mode k = match mode with | Optimized | Optimized_legacy -> Gas.consume ctxt Unparse_costs.public_key_optimized >|? fun ctxt -> let bytes = Data_encoding.Binary.to_bytes_exn Signature.Public_key.encoding k in (Bytes (-1, bytes), ctxt) | Readable -> Gas.consume ctxt Unparse_costs.public_key_readable >|? fun ctxt -> (String (-1, Signature.Public_key.to_b58check k), ctxt) let unparse_key_hash ctxt mode k = match mode with | Optimized | Optimized_legacy -> Gas.consume ctxt Unparse_costs.key_hash_optimized >|? fun ctxt -> let bytes = Data_encoding.Binary.to_bytes_exn Signature.Public_key_hash.encoding k in (Bytes (-1, bytes), ctxt) | Readable -> Gas.consume ctxt Unparse_costs.key_hash_readable >|? fun ctxt -> (String (-1, Signature.Public_key_hash.to_b58check k), ctxt) let unparse_operation ctxt (op, _big_map_diff) = let bytes = Data_encoding.Binary.to_bytes_exn Operation.internal_operation_encoding op in Gas.consume ctxt (Unparse_costs.operation bytes) >|? fun ctxt -> (Bytes (-1, bytes), ctxt) let unparse_chain_id ctxt mode chain_id = match mode with | Optimized | Optimized_legacy -> Gas.consume ctxt Unparse_costs.chain_id_optimized >|? fun ctxt -> let bytes = Data_encoding.Binary.to_bytes_exn Chain_id.encoding chain_id in (Bytes (-1, bytes), ctxt) | Readable -> Gas.consume ctxt Unparse_costs.chain_id_readable >|? fun ctxt -> (String (-1, Chain_id.to_b58check chain_id), ctxt) let unparse_bls12_381_g1 ctxt x = Gas.consume ctxt Unparse_costs.bls12_381_g1 >|? fun ctxt -> let bytes = Bls12_381.G1.to_bytes x in (Bytes (-1, bytes), ctxt) let unparse_bls12_381_g2 ctxt x = Gas.consume ctxt Unparse_costs.bls12_381_g2 >|? fun ctxt -> let bytes = Bls12_381.G2.to_bytes x in (Bytes (-1, bytes), ctxt) let unparse_bls12_381_fr ctxt x = Gas.consume ctxt Unparse_costs.bls12_381_fr >|? fun ctxt -> let bytes = Bls12_381.Fr.to_bytes x in (Bytes (-1, bytes), ctxt) (* -- Unparsing data of complex types -- *) type ('ty, 'depth) comb_witness = | Comb_Pair : ('t, 'd) comb_witness -> (_ * 't, unit -> 'd) comb_witness | Comb_Any : (_, _) comb_witness let unparse_pair (type r) unparse_l unparse_r ctxt mode (r_comb_witness : (r, unit -> unit -> _) comb_witness) (l, (r : r)) = unparse_l ctxt l >>=? fun (l, ctxt) -> unparse_r ctxt r >|=? fun (r, ctxt) -> (* Fold combs. For combs, three notations are supported: - a) [Pair x1 (Pair x2 ... (Pair xn-1 xn) ...)], - b) [Pair x1 x2 ... xn-1 xn], and - c) [{x1; x2; ...; xn-1; xn}]. In readable mode, we always use b), in optimized mode we use the shortest to serialize: - for n=2, [Pair x1 x2], - for n=3, [Pair x1 (Pair x2 x3)], - for n>=4, [{x1; x2; ...; xn}]. *) let res = match (mode, r_comb_witness, r) with | (Optimized, Comb_Pair _, Micheline.Seq (_, r)) -> (* Optimized case n > 4 *) Micheline.Seq (-1, l :: r) | ( Optimized, Comb_Pair (Comb_Pair _), Prim (_, D_Pair, [x2; Prim (_, D_Pair, [x3; x4], [])], []) ) -> (* Optimized case n = 4 *) Micheline.Seq (-1, [l; x2; x3; x4]) | (Readable, Comb_Pair _, Prim (_, D_Pair, xs, [])) -> (* Readable case n > 2 *) Prim (-1, D_Pair, l :: xs, []) | _ -> (* The remaining cases are: - Optimized n = 2, - Optimized n = 3, and - Readable n = 2, - Optimized_legacy, any n *) Prim (-1, D_Pair, [l; r], []) in (res, ctxt) let unparse_union unparse_l unparse_r ctxt = function | L l -> unparse_l ctxt l >|=? fun (l, ctxt) -> (Prim (-1, D_Left, [l], []), ctxt) | R r -> unparse_r ctxt r >|=? fun (r, ctxt) -> (Prim (-1, D_Right, [r], []), ctxt) let unparse_option unparse_v ctxt = function | Some v -> unparse_v ctxt v >|=? fun (v, ctxt) -> (Prim (-1, D_Some, [v], []), ctxt) | None -> return (Prim (-1, D_None, [], []), ctxt) (* -- Unparsing data of comparable types -- *) let comparable_comb_witness2 : type t. t comparable_ty -> (t, unit -> unit -> unit) comb_witness = function | Pair_key (_, (Pair_key _, _), _) -> Comb_Pair (Comb_Pair Comb_Any) | Pair_key _ -> Comb_Pair Comb_Any | _ -> Comb_Any let rec unparse_comparable_data : type a. context -> unparsing_mode -> a comparable_ty -> a -> (Script.node * context) tzresult Lwt.t = fun ctxt mode ty a -> (* No need for stack_depth here. Unlike [unparse_data], [unparse_comparable_data] doesn't call [unparse_code]. The stack depth is bounded by the type depth, currently bounded by 1000 (michelson_maximum_type_size). *) Gas.consume ctxt Unparse_costs.unparse_data_cycle (* We could have a smaller cost but let's keep it consistent with [unparse_data] for now. *) >>?= fun ctxt -> match (ty, a) with | (Unit_key _, v) -> Lwt.return @@ unparse_unit ctxt v | (Int_key _, v) -> Lwt.return @@ unparse_int ctxt v | (Nat_key _, v) -> Lwt.return @@ unparse_nat ctxt v | (String_key _, s) -> Lwt.return @@ unparse_string ctxt s | (Bytes_key _, s) -> Lwt.return @@ unparse_bytes ctxt s | (Bool_key _, b) -> Lwt.return @@ unparse_bool ctxt b | (Timestamp_key _, t) -> Lwt.return @@ unparse_timestamp ctxt mode t | (Address_key _, address) -> Lwt.return @@ unparse_address ctxt mode address | (Signature_key _, s) -> Lwt.return @@ unparse_signature ctxt mode s | (Mutez_key _, v) -> Lwt.return @@ unparse_mutez ctxt v | (Key_key _, k) -> Lwt.return @@ unparse_key ctxt mode k | (Key_hash_key _, k) -> Lwt.return @@ unparse_key_hash ctxt mode k | (Chain_id_key _, chain_id) -> Lwt.return @@ unparse_chain_id ctxt mode chain_id | (Pair_key ((tl, _), (tr, _), _), pair) -> let r_witness = comparable_comb_witness2 tr in let unparse_l ctxt v = unparse_comparable_data ctxt mode tl v in let unparse_r ctxt v = unparse_comparable_data ctxt mode tr v in unparse_pair unparse_l unparse_r ctxt mode r_witness pair | (Union_key ((tl, _), (tr, _), _), v) -> let unparse_l ctxt v = unparse_comparable_data ctxt mode tl v in let unparse_r ctxt v = unparse_comparable_data ctxt mode tr v in unparse_union unparse_l unparse_r ctxt v | (Option_key (t, _), v) -> let unparse_v ctxt v = unparse_comparable_data ctxt mode t v in unparse_option unparse_v ctxt v | (Never_key _, _) -> . let pack_node unparsed ctxt = Gas.consume ctxt (Script.strip_locations_cost unparsed) >>? fun ctxt -> let bytes = Data_encoding.Binary.to_bytes_exn expr_encoding (Micheline.strip_locations unparsed) in Gas.consume ctxt (Script.serialized_cost bytes) >>? fun ctxt -> let bytes = Bytes.cat (Bytes.of_string "\005") bytes in Gas.consume ctxt (Script.serialized_cost bytes) >|? fun ctxt -> (bytes, ctxt) let pack_comparable_data ctxt typ data ~mode = unparse_comparable_data ctxt mode typ data >>=? fun (unparsed, ctxt) -> Lwt.return @@ pack_node unparsed ctxt let hash_bytes ctxt bytes = Gas.consume ctxt (Michelson_v1_gas.Cost_of.Interpreter.blake2b bytes) >|? fun ctxt -> (Script_expr_hash.(hash_bytes [bytes]), ctxt) let hash_comparable_data ctxt typ data = pack_comparable_data ctxt typ data ~mode:Optimized_legacy >>=? fun (bytes, ctxt) -> Lwt.return @@ hash_bytes ctxt bytes (* ---- Tickets ------------------------------------------------------------ *) (* All comparable types are dupable, this function exists only to not forget checking this property when adding new types. *) let check_dupable_comparable_ty : type a. a comparable_ty -> unit = function | Unit_key _ | Never_key _ | Int_key _ | Nat_key _ | Signature_key _ | String_key _ | Bytes_key _ | Mutez_key _ | Bool_key _ | Key_hash_key _ | Key_key _ | Timestamp_key _ | Chain_id_key _ | Address_key _ | Pair_key _ | Union_key _ | Option_key _ -> () let rec check_dupable_ty : type a. context -> location -> a ty -> context tzresult = fun ctxt loc ty -> Gas.consume ctxt Typecheck_costs.check_dupable_cycle >>? fun ctxt -> match ty with | Unit_t _ -> ok ctxt | Int_t _ -> ok ctxt | Nat_t _ -> ok ctxt | Signature_t _ -> ok ctxt | String_t _ -> ok ctxt | Bytes_t _ -> ok ctxt | Mutez_t _ -> ok ctxt | Key_hash_t _ -> ok ctxt | Key_t _ -> ok ctxt | Timestamp_t _ -> ok ctxt | Address_t _ -> ok ctxt | Bool_t _ -> ok ctxt | Contract_t (_, _) -> ok ctxt | Operation_t _ -> ok ctxt | Chain_id_t _ -> ok ctxt | Never_t _ -> ok ctxt | Bls12_381_g1_t _ -> ok ctxt | Bls12_381_g2_t _ -> ok ctxt | Bls12_381_fr_t _ -> ok ctxt | Sapling_state_t _ -> ok ctxt | Sapling_transaction_t _ -> ok ctxt | Ticket_t _ -> error (Unexpected_ticket loc) | Pair_t ((ty_a, _, _), (ty_b, _, _), _) -> check_dupable_ty ctxt loc ty_a >>? fun ctxt -> check_dupable_ty ctxt loc ty_b | Union_t ((ty_a, _), (ty_b, _), _) -> check_dupable_ty ctxt loc ty_a >>? fun ctxt -> check_dupable_ty ctxt loc ty_b | Lambda_t (_, _, _) -> (* Lambda are dupable as long as: - they don't contain non-dupable values, e.g. in `PUSH` (mosty non-dupable values should probably be considered forged) - they are not the result of a partial application on a non-dupable value. `APPLY` rejects non-packable types (because of `PUSH`). Hence non-dupable should imply non-packable. *) ok ctxt | Option_t (ty, _) -> check_dupable_ty ctxt loc ty | List_t (ty, _) -> check_dupable_ty ctxt loc ty | Set_t (key_ty, _) -> let () = check_dupable_comparable_ty key_ty in ok ctxt | Map_t (key_ty, val_ty, _) -> let () = check_dupable_comparable_ty key_ty in check_dupable_ty ctxt loc val_ty | Big_map_t (key_ty, val_ty, _) -> let () = check_dupable_comparable_ty key_ty in check_dupable_ty ctxt loc val_ty (* ---- Equality witnesses --------------------------------------------------*) type ('ta, 'tb) eq = Eq : ('same, 'same) eq let record_inconsistent ctxt ta tb = record_trace_eval (fun () -> serialize_ty_for_error ctxt ta >>? fun (ta, ctxt) -> serialize_ty_for_error ctxt tb >|? fun (tb, _ctxt) -> Inconsistent_types (ta, tb)) let record_inconsistent_type_annotations ctxt loc ta tb = record_trace_eval (fun () -> serialize_ty_for_error ctxt ta >>? fun (ta, ctxt) -> serialize_ty_for_error ctxt tb >|? fun (tb, _ctxt) -> Inconsistent_type_annotations (loc, ta, tb)) let rec merge_comparable_types : type ta tb. legacy:bool -> context -> ta comparable_ty -> tb comparable_ty -> ((ta comparable_ty, tb comparable_ty) eq * ta comparable_ty * context) tzresult = fun ~legacy ctxt ta tb -> Gas.consume ctxt Typecheck_costs.merge_cycle >>? fun ctxt -> match (ta, tb) with | (Unit_key annot_a, Unit_key annot_b) -> merge_type_annot ~legacy annot_a annot_b >|? fun annot -> ( (Eq : (ta comparable_ty, tb comparable_ty) eq), (Unit_key annot : ta comparable_ty), ctxt ) | (Never_key annot_a, Never_key annot_b) -> merge_type_annot ~legacy annot_a annot_b >|? fun annot -> (Eq, Never_key annot, ctxt) | (Int_key annot_a, Int_key annot_b) -> merge_type_annot ~legacy annot_a annot_b >|? fun annot -> (Eq, Int_key annot, ctxt) | (Nat_key annot_a, Nat_key annot_b) -> merge_type_annot ~legacy annot_a annot_b >|? fun annot -> (Eq, Nat_key annot, ctxt) | (Signature_key annot_a, Signature_key annot_b) -> merge_type_annot ~legacy annot_a annot_b >|? fun annot -> (Eq, Signature_key annot, ctxt) | (String_key annot_a, String_key annot_b) -> merge_type_annot ~legacy annot_a annot_b >|? fun annot -> (Eq, String_key annot, ctxt) | (Bytes_key annot_a, Bytes_key annot_b) -> merge_type_annot ~legacy annot_a annot_b >|? fun annot -> (Eq, Bytes_key annot, ctxt) | (Mutez_key annot_a, Mutez_key annot_b) -> merge_type_annot ~legacy annot_a annot_b >|? fun annot -> (Eq, Mutez_key annot, ctxt) | (Bool_key annot_a, Bool_key annot_b) -> merge_type_annot ~legacy annot_a annot_b >|? fun annot -> (Eq, Bool_key annot, ctxt) | (Key_hash_key annot_a, Key_hash_key annot_b) -> merge_type_annot ~legacy annot_a annot_b >|? fun annot -> (Eq, Key_hash_key annot, ctxt) | (Key_key annot_a, Key_key annot_b) -> merge_type_annot ~legacy annot_a annot_b >|? fun annot -> (Eq, Key_key annot, ctxt) | (Timestamp_key annot_a, Timestamp_key annot_b) -> merge_type_annot ~legacy annot_a annot_b >|? fun annot -> (Eq, Timestamp_key annot, ctxt) | (Chain_id_key annot_a, Chain_id_key annot_b) -> merge_type_annot ~legacy annot_a annot_b >|? fun annot -> (Eq, Chain_id_key annot, ctxt) | (Address_key annot_a, Address_key annot_b) -> merge_type_annot ~legacy annot_a annot_b >|? fun annot -> (Eq, Address_key annot, ctxt) | ( Pair_key ((left_a, annot_left_a), (right_a, annot_right_a), annot_a), Pair_key ((left_b, annot_left_b), (right_b, annot_right_b), annot_b) ) -> merge_type_annot ~legacy annot_a annot_b >>? fun annot -> merge_field_annot ~legacy annot_left_a annot_left_b >>? fun annot_left -> merge_field_annot ~legacy annot_right_a annot_right_b >>? fun annot_right -> merge_comparable_types ~legacy ctxt left_a left_b >>? fun (Eq, left, ctxt) -> merge_comparable_types ~legacy ctxt right_a right_b >|? fun (Eq, right, ctxt) -> ( (Eq : (ta comparable_ty, tb comparable_ty) eq), Pair_key ((left, annot_left), (right, annot_right), annot), ctxt ) | ( Union_key ((left_a, annot_left_a), (right_a, annot_right_a), annot_a), Union_key ((left_b, annot_left_b), (right_b, annot_right_b), annot_b) ) -> merge_type_annot ~legacy annot_a annot_b >>? fun annot -> merge_field_annot ~legacy annot_left_a annot_left_b >>? fun annot_left -> merge_field_annot ~legacy annot_right_a annot_right_b >>? fun annot_right -> merge_comparable_types ~legacy ctxt left_a left_b >>? fun (Eq, left, ctxt) -> merge_comparable_types ~legacy ctxt right_a right_b >|? fun (Eq, right, ctxt) -> ( (Eq : (ta comparable_ty, tb comparable_ty) eq), Union_key ((left, annot_left), (right, annot_right), annot), ctxt ) | (Option_key (ta, annot_a), Option_key (tb, annot_b)) -> merge_type_annot ~legacy annot_a annot_b >>? fun annot -> merge_comparable_types ~legacy ctxt ta tb >|? fun (Eq, t, ctxt) -> ( (Eq : (ta comparable_ty, tb comparable_ty) eq), Option_key (t, annot), ctxt ) | (_, _) -> serialize_ty_for_error ctxt (ty_of_comparable_ty ta) >>? fun (ta, ctxt) -> serialize_ty_for_error ctxt (ty_of_comparable_ty tb) >>? fun (tb, _ctxt) -> error (Inconsistent_types (ta, tb)) let comparable_ty_eq : type ta tb. context -> ta comparable_ty -> tb comparable_ty -> ((ta comparable_ty, tb comparable_ty) eq * context) tzresult = fun ctxt ta tb -> merge_comparable_types ~legacy:true ctxt ta tb >|? fun (eq, _ty, ctxt) -> (eq, ctxt) let merge_memo_sizes ms1 ms2 = if Sapling.Memo_size.equal ms1 ms2 then ok ms1 else error (Inconsistent_memo_sizes (ms1, ms2)) let merge_types : type a b. legacy:bool -> context -> Script.location -> a ty -> b ty -> ((a ty, b ty) eq * a ty * context) tzresult = fun ~legacy ctxt loc ty1 ty2 -> let merge_type_annot tn1 tn2 = merge_type_annot ~legacy tn1 tn2 |> record_inconsistent_type_annotations ctxt loc ty1 ty2 in let rec help : type ta tb. context -> ta ty -> tb ty -> ((ta ty, tb ty) eq * ta ty * context) tzresult = fun ctxt ty1 ty2 -> help0 ctxt ty1 ty2 |> record_inconsistent ctxt ty1 ty2 and help0 : type ta tb. context -> ta ty -> tb ty -> ((ta ty, tb ty) eq * ta ty * context) tzresult = fun ctxt ty1 ty2 -> Gas.consume ctxt Typecheck_costs.merge_cycle >>? fun ctxt -> match (ty1, ty2) with | (Unit_t tn1, Unit_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> ((Eq : (ta ty, tb ty) eq), (Unit_t tname : ta ty), ctxt) | (Int_t tn1, Int_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, Int_t tname, ctxt) | (Nat_t tn1, Nat_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, Nat_t tname, ctxt) | (Key_t tn1, Key_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, Key_t tname, ctxt) | (Key_hash_t tn1, Key_hash_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, Key_hash_t tname, ctxt) | (String_t tn1, String_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, String_t tname, ctxt) | (Bytes_t tn1, Bytes_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, Bytes_t tname, ctxt) | (Signature_t tn1, Signature_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, Signature_t tname, ctxt) | (Mutez_t tn1, Mutez_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, Mutez_t tname, ctxt) | (Timestamp_t tn1, Timestamp_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, Timestamp_t tname, ctxt) | (Address_t tn1, Address_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, Address_t tname, ctxt) | (Bool_t tn1, Bool_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, Bool_t tname, ctxt) | (Chain_id_t tn1, Chain_id_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, Chain_id_t tname, ctxt) | (Never_t tn1, Never_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, Never_t tname, ctxt) | (Operation_t tn1, Operation_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, Operation_t tname, ctxt) | (Bls12_381_g1_t tn1, Bls12_381_g1_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, Bls12_381_g1_t tname, ctxt) | (Bls12_381_g2_t tn1, Bls12_381_g2_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, Bls12_381_g2_t tname, ctxt) | (Bls12_381_fr_t tn1, Bls12_381_fr_t tn2) -> merge_type_annot tn1 tn2 >|? fun tname -> (Eq, Bls12_381_fr_t tname, ctxt) | (Map_t (tal, tar, tn1), Map_t (tbl, tbr, tn2)) -> merge_type_annot tn1 tn2 >>? fun tname -> help ctxt tar tbr >>? fun (Eq, value, ctxt) -> merge_comparable_types ~legacy ctxt tal tbl >|? fun (Eq, tk, ctxt) -> ((Eq : (ta ty, tb ty) eq), Map_t (tk, value, tname), ctxt) | (Big_map_t (tal, tar, tn1), Big_map_t (tbl, tbr, tn2)) -> merge_type_annot tn1 tn2 >>? fun tname -> help ctxt tar tbr >>? fun (Eq, value, ctxt) -> merge_comparable_types ~legacy ctxt tal tbl >|? fun (Eq, tk, ctxt) -> ((Eq : (ta ty, tb ty) eq), Big_map_t (tk, value, tname), ctxt) | (Set_t (ea, tn1), Set_t (eb, tn2)) -> merge_type_annot tn1 tn2 >>? fun tname -> merge_comparable_types ~legacy ctxt ea eb >|? fun (Eq, e, ctxt) -> ((Eq : (ta ty, tb ty) eq), Set_t (e, tname), ctxt) | (Ticket_t (ea, tn1), Ticket_t (eb, tn2)) -> merge_type_annot tn1 tn2 >>? fun tname -> merge_comparable_types ~legacy ctxt ea eb >|? fun (Eq, e, ctxt) -> ((Eq : (ta ty, tb ty) eq), Ticket_t (e, tname), ctxt) | ( Pair_t ((tal, l_field1, l_var1), (tar, r_field1, r_var1), tn1), Pair_t ((tbl, l_field2, l_var2), (tbr, r_field2, r_var2), tn2) ) -> merge_type_annot tn1 tn2 >>? fun tname -> merge_field_annot ~legacy l_field1 l_field2 >>? fun l_field -> merge_field_annot ~legacy r_field1 r_field2 >>? fun r_field -> let l_var = merge_var_annot l_var1 l_var2 in let r_var = merge_var_annot r_var1 r_var2 in help ctxt tal tbl >>? fun (Eq, left_ty, ctxt) -> help ctxt tar tbr >|? fun (Eq, right_ty, ctxt) -> ( (Eq : (ta ty, tb ty) eq), Pair_t ((left_ty, l_field, l_var), (right_ty, r_field, r_var), tname), ctxt ) | ( Union_t ((tal, tal_annot), (tar, tar_annot), tn1), Union_t ((tbl, tbl_annot), (tbr, tbr_annot), tn2) ) -> merge_type_annot tn1 tn2 >>? fun tname -> merge_field_annot ~legacy tal_annot tbl_annot >>? fun left_annot -> merge_field_annot ~legacy tar_annot tbr_annot >>? fun right_annot -> help ctxt tal tbl >>? fun (Eq, left_ty, ctxt) -> help ctxt tar tbr >|? fun (Eq, right_ty, ctxt) -> ( (Eq : (ta ty, tb ty) eq), Union_t ((left_ty, left_annot), (right_ty, right_annot), tname), ctxt ) | (Lambda_t (tal, tar, tn1), Lambda_t (tbl, tbr, tn2)) -> merge_type_annot tn1 tn2 >>? fun tname -> help ctxt tal tbl >>? fun (Eq, left_ty, ctxt) -> help ctxt tar tbr >|? fun (Eq, right_ty, ctxt) -> ((Eq : (ta ty, tb ty) eq), Lambda_t (left_ty, right_ty, tname), ctxt) | (Contract_t (tal, tn1), Contract_t (tbl, tn2)) -> merge_type_annot tn1 tn2 >>? fun tname -> help ctxt tal tbl >|? fun (Eq, arg_ty, ctxt) -> ((Eq : (ta ty, tb ty) eq), Contract_t (arg_ty, tname), ctxt) | (Option_t (tva, tn1), Option_t (tvb, tn2)) -> merge_type_annot tn1 tn2 >>? fun tname -> help ctxt tva tvb >|? fun (Eq, ty, ctxt) -> ((Eq : (ta ty, tb ty) eq), Option_t (ty, tname), ctxt) | (List_t (tva, tn1), List_t (tvb, tn2)) -> merge_type_annot tn1 tn2 >>? fun tname -> help ctxt tva tvb >|? fun (Eq, ty, ctxt) -> ((Eq : (ta ty, tb ty) eq), List_t (ty, tname), ctxt) | (Sapling_state_t (ms1, tn1), Sapling_state_t (ms2, tn2)) -> merge_type_annot tn1 tn2 >>? fun tname -> merge_memo_sizes ms1 ms2 >|? fun ms -> (Eq, Sapling_state_t (ms, tname), ctxt) | (Sapling_transaction_t (ms1, tn1), Sapling_transaction_t (ms2, tn2)) -> merge_type_annot tn1 tn2 >>? fun tname -> merge_memo_sizes ms1 ms2 >|? fun ms -> (Eq, Sapling_transaction_t (ms, tname), ctxt) | (_, _) -> serialize_ty_for_error ctxt ty1 >>? fun (ty1, ctxt) -> serialize_ty_for_error ctxt ty2 >>? fun (ty2, _ctxt) -> error (Inconsistent_types (ty1, ty2)) in help ctxt ty1 ty2 [@@coq_axiom_with_reason "non-top-level mutual recursion"] let ty_eq : type ta tb. context -> Script.location -> ta ty -> tb ty -> ((ta ty, tb ty) eq * context) tzresult = fun ctxt loc ta tb -> merge_types ~legacy:true ctxt loc ta tb >|? fun (eq, _ty, ctxt) -> (eq, ctxt) let merge_stacks : type ta tb ts tu. legacy:bool -> Script.location -> context -> int -> (ta, ts) stack_ty -> (tb, tu) stack_ty -> (((ta, ts) stack_ty, (tb, tu) stack_ty) eq * (ta, ts) stack_ty * context) tzresult = fun ~legacy loc -> let rec help : type ta tb ts tu. context -> int -> (ta, ts) stack_ty -> (tb, tu) stack_ty -> (((ta, ts) stack_ty, (tb, tu) stack_ty) eq * (ta, ts) stack_ty * context) tzresult = fun ctxt lvl stack1 stack2 -> match (stack1, stack2) with | (Bot_t, Bot_t) -> ok (Eq, Bot_t, ctxt) | (Item_t (ty1, rest1, annot1), Item_t (ty2, rest2, annot2)) -> merge_types ~legacy ctxt loc ty1 ty2 |> record_trace (Bad_stack_item lvl) >>? fun (Eq, ty, ctxt) -> help ctxt (lvl + 1) rest1 rest2 >|? fun (Eq, rest, ctxt) -> let annot = merge_var_annot annot1 annot2 in ( (Eq : ((ta, ts) stack_ty, (tb, tu) stack_ty) eq), Item_t (ty, rest, annot), ctxt ) | (_, _) -> error Bad_stack_length in help (* ---- Type checker results -------------------------------------------------*) type ('a, 's) judgement = | Typed : ('a, 's, 'b, 'u) descr -> ('a, 's) judgement | Failed : { descr : 'b 'u. ('b, 'u) stack_ty -> ('a, 's, 'b, 'u) descr; } -> ('a, 's) judgement (* ---- Type checker (Untyped expressions -> Typed IR) ----------------------*) type ('a, 's, 'b, 'u, 'c, 'v) branch = { branch : 'r 'f. ('a, 's, 'r, 'f) descr -> ('b, 'u, 'r, 'f) descr -> ('c, 'v, 'r, 'f) descr; } [@@unboxed] let merge_branches : type a s b u c v. legacy:bool -> context -> int -> (a, s) judgement -> (b, u) judgement -> (a, s, b, u, c, v) branch -> ((c, v) judgement * context) tzresult = fun ~legacy ctxt loc btr bfr {branch} -> match (btr, bfr) with | (Typed ({aft = aftbt; _} as dbt), Typed ({aft = aftbf; _} as dbf)) -> let unmatched_branches () = serialize_stack_for_error ctxt aftbt >>? fun (aftbt, ctxt) -> serialize_stack_for_error ctxt aftbf >|? fun (aftbf, _ctxt) -> Unmatched_branches (loc, aftbt, aftbf) in record_trace_eval unmatched_branches ( merge_stacks ~legacy loc ctxt 1 aftbt aftbf >|? fun (Eq, merged_stack, ctxt) -> ( Typed (branch {dbt with aft = merged_stack} {dbf with aft = merged_stack}), ctxt ) ) | (Failed {descr = descrt}, Failed {descr = descrf}) -> let descr ret = branch (descrt ret) (descrf ret) in ok (Failed {descr}, ctxt) | (Typed dbt, Failed {descr = descrf}) -> ok (Typed (branch dbt (descrf dbt.aft)), ctxt) | (Failed {descr = descrt}, Typed dbf) -> ok (Typed (branch (descrt dbf.aft) dbf), ctxt) let parse_memo_size (n : (location, _) Micheline.node) : Sapling.Memo_size.t tzresult = match n with | Int (_, z) -> ( match Sapling.Memo_size.parse_z z with | Ok _ as ok_memo_size -> ok_memo_size | Error msg -> error @@ Invalid_syntactic_constant (location n, strip_locations n, msg) ) | _ -> error @@ Invalid_kind (location n, [Int_kind], kind n) let rec parse_comparable_ty : stack_depth:int -> context -> Script.node -> (ex_comparable_ty * context) tzresult = fun ~stack_depth ctxt ty -> Gas.consume ctxt Typecheck_costs.parse_type_cycle >>? fun ctxt -> if Compare.Int.(stack_depth > 10000) then error Typechecking_too_many_recursive_calls else match ty with | Prim (loc, T_unit, [], annot) -> parse_type_annot loc annot >|? fun tname -> (Ex_comparable_ty (Unit_key tname), ctxt) | Prim (loc, T_never, [], annot) -> parse_type_annot loc annot >|? fun tname -> (Ex_comparable_ty (Never_key tname), ctxt) | Prim (loc, T_int, [], annot) -> parse_type_annot loc annot >|? fun tname -> (Ex_comparable_ty (Int_key tname), ctxt) | Prim (loc, T_nat, [], annot) -> parse_type_annot loc annot >|? fun tname -> (Ex_comparable_ty (Nat_key tname), ctxt) | Prim (loc, T_signature, [], annot) -> parse_type_annot loc annot >|? fun tname -> (Ex_comparable_ty (Signature_key tname), ctxt) | Prim (loc, T_string, [], annot) -> parse_type_annot loc annot >|? fun tname -> (Ex_comparable_ty (String_key tname), ctxt) | Prim (loc, T_bytes, [], annot) -> parse_type_annot loc annot >|? fun tname -> (Ex_comparable_ty (Bytes_key tname), ctxt) | Prim (loc, T_mutez, [], annot) -> parse_type_annot loc annot >|? fun tname -> (Ex_comparable_ty (Mutez_key tname), ctxt) | Prim (loc, T_bool, [], annot) -> parse_type_annot loc annot >|? fun tname -> (Ex_comparable_ty (Bool_key tname), ctxt) | Prim (loc, T_key_hash, [], annot) -> parse_type_annot loc annot >|? fun tname -> (Ex_comparable_ty (Key_hash_key tname), ctxt) | Prim (loc, T_key, [], annot) -> parse_type_annot loc annot >|? fun tname -> (Ex_comparable_ty (Key_key tname), ctxt) | Prim (loc, T_timestamp, [], annot) -> parse_type_annot loc annot >|? fun tname -> (Ex_comparable_ty (Timestamp_key tname), ctxt) | Prim (loc, T_chain_id, [], annot) -> parse_type_annot loc annot >|? fun tname -> (Ex_comparable_ty (Chain_id_key tname), ctxt) | Prim (loc, T_address, [], annot) -> parse_type_annot loc annot >|? fun tname -> (Ex_comparable_ty (Address_key tname), ctxt) | Prim ( loc, ( ( T_unit | T_never | T_int | T_nat | T_string | T_bytes | T_mutez | T_bool | T_key_hash | T_timestamp | T_address | T_chain_id | T_signature | T_key ) as prim ), l, _ ) -> error (Invalid_arity (loc, prim, 0, List.length l)) | Prim (loc, T_pair, left :: right, annot) -> parse_type_annot loc annot >>? fun pname -> extract_field_annot left >>? fun (left, left_annot) -> ( match right with | [right] -> extract_field_annot right | right -> (* Unfold [pair t1 ... tn] as [pair t1 (... (pair tn-1 tn))] *) ok (Prim (loc, T_pair, right, []), None) ) >>? fun (right, right_annot) -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt right >>? fun (Ex_comparable_ty right, ctxt) -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt left >|? fun (Ex_comparable_ty left, ctxt) -> ( Ex_comparable_ty (Pair_key ((left, left_annot), (right, right_annot), pname)), ctxt ) | Prim (loc, T_or, [left; right], annot) -> parse_type_annot loc annot >>? fun pname -> extract_field_annot left >>? fun (left, left_annot) -> extract_field_annot right >>? fun (right, right_annot) -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt right >>? fun (Ex_comparable_ty right, ctxt) -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt left >|? fun (Ex_comparable_ty left, ctxt) -> ( Ex_comparable_ty (Union_key ((left, left_annot), (right, right_annot), pname)), ctxt ) | Prim (loc, ((T_pair | T_or) as prim), l, _) -> error (Invalid_arity (loc, prim, 2, List.length l)) | Prim (loc, T_option, [t], annot) -> parse_type_annot loc annot >>? fun tname -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt t >|? fun (Ex_comparable_ty t, ctxt) -> (Ex_comparable_ty (Option_key (t, tname)), ctxt) | Prim (loc, T_option, l, _) -> error (Invalid_arity (loc, T_option, 1, List.length l)) | Prim ( loc, (T_set | T_map | T_list | T_lambda | T_contract | T_operation), _, _ ) -> error (Comparable_type_expected (loc, Micheline.strip_locations ty)) | expr -> error @@ unexpected expr [] Type_namespace [ T_unit; T_never; T_int; T_nat; T_string; T_bytes; T_mutez; T_bool; T_key_hash; T_timestamp; T_address; T_pair; T_or; T_option; T_chain_id; T_signature; T_key ] let rec parse_packable_ty : context -> stack_depth:int -> legacy:bool -> Script.node -> (ex_ty * context) tzresult = fun ctxt ~stack_depth ~legacy -> (parse_ty [@tailcall]) ctxt ~stack_depth ~legacy ~allow_lazy_storage:false ~allow_operation:false ~allow_contract:legacy ~allow_ticket:false and parse_parameter_ty : context -> stack_depth:int -> legacy:bool -> Script.node -> (ex_ty * context) tzresult = fun ctxt ~stack_depth ~legacy -> (parse_ty [@tailcall]) ctxt ~stack_depth ~legacy ~allow_lazy_storage:true ~allow_operation:false ~allow_contract:true ~allow_ticket:true and parse_normal_storage_ty : context -> stack_depth:int -> legacy:bool -> Script.node -> (ex_ty * context) tzresult = fun ctxt ~stack_depth ~legacy -> (parse_ty [@tailcall]) ctxt ~stack_depth ~legacy ~allow_lazy_storage:true ~allow_operation:false ~allow_contract:legacy ~allow_ticket:true and parse_any_ty : context -> stack_depth:int -> legacy:bool -> Script.node -> (ex_ty * context) tzresult = fun ctxt ~stack_depth ~legacy -> (parse_ty [@tailcall]) ctxt ~stack_depth ~legacy ~allow_lazy_storage:true ~allow_operation:true ~allow_contract:true ~allow_ticket:true and parse_ty : context -> stack_depth:int -> legacy:bool -> allow_lazy_storage:bool -> allow_operation:bool -> allow_contract:bool -> allow_ticket:bool -> Script.node -> (ex_ty * context) tzresult = fun ctxt ~stack_depth ~legacy ~allow_lazy_storage ~allow_operation ~allow_contract ~allow_ticket node -> Gas.consume ctxt Typecheck_costs.parse_type_cycle >>? fun ctxt -> if Compare.Int.(stack_depth > 10000) then error Typechecking_too_many_recursive_calls else match node with | Prim (loc, T_unit, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Unit_t ty_name), ctxt) | Prim (loc, T_int, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Int_t ty_name), ctxt) | Prim (loc, T_nat, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Nat_t ty_name), ctxt) | Prim (loc, T_string, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (String_t ty_name), ctxt) | Prim (loc, T_bytes, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Bytes_t ty_name), ctxt) | Prim (loc, T_mutez, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Mutez_t ty_name), ctxt) | Prim (loc, T_bool, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Bool_t ty_name), ctxt) | Prim (loc, T_key, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Key_t ty_name), ctxt) | Prim (loc, T_key_hash, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Key_hash_t ty_name), ctxt) | Prim (loc, T_timestamp, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Timestamp_t ty_name), ctxt) | Prim (loc, T_address, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Address_t ty_name), ctxt) | Prim (loc, T_signature, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Signature_t ty_name), ctxt) | Prim (loc, T_operation, [], annot) -> if allow_operation then parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Operation_t ty_name), ctxt) else error (Unexpected_operation loc) | Prim (loc, T_chain_id, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Chain_id_t ty_name), ctxt) | Prim (loc, T_never, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Never_t ty_name), ctxt) | Prim (loc, T_bls12_381_g1, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Bls12_381_g1_t ty_name), ctxt) | Prim (loc, T_bls12_381_g2, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Bls12_381_g2_t ty_name), ctxt) | Prim (loc, T_bls12_381_fr, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Bls12_381_fr_t ty_name), ctxt) | Prim (loc, T_contract, [utl], annot) -> if allow_contract then parse_parameter_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy utl >>? fun (Ex_ty tl, ctxt) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Contract_t (tl, ty_name)), ctxt) else error (Unexpected_contract loc) | Prim (loc, T_pair, utl :: utr, annot) -> extract_field_annot utl >>? fun (utl, left_field) -> parse_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ~allow_lazy_storage ~allow_operation ~allow_contract ~allow_ticket utl >>? fun (Ex_ty tl, ctxt) -> ( match utr with | [utr] -> extract_field_annot utr | utr -> (* Unfold [pair t1 ... tn] as [pair t1 (... (pair tn-1 tn))] *) ok (Prim (loc, T_pair, utr, []), None) ) >>? fun (utr, right_field) -> parse_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ~allow_lazy_storage ~allow_operation ~allow_contract ~allow_ticket utr >>? fun (Ex_ty tr, ctxt) -> parse_type_annot loc annot >>? fun ty_name -> ok ( Ex_ty (Pair_t ((tl, left_field, None), (tr, right_field, None), ty_name)), ctxt ) | Prim (loc, T_or, [utl; utr], annot) -> extract_field_annot utl >>? fun (utl, left_constr) -> extract_field_annot utr >>? fun (utr, right_constr) -> parse_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ~allow_lazy_storage ~allow_operation ~allow_contract ~allow_ticket utl >>? fun (Ex_ty tl, ctxt) -> parse_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ~allow_lazy_storage ~allow_operation ~allow_contract ~allow_ticket utr >>? fun (Ex_ty tr, ctxt) -> parse_type_annot loc annot >>? fun ty_name -> ok ( Ex_ty (Union_t ((tl, left_constr), (tr, right_constr), ty_name)), ctxt ) | Prim (loc, T_lambda, [uta; utr], annot) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy uta >>? fun (Ex_ty ta, ctxt) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy utr >>? fun (Ex_ty tr, ctxt) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Lambda_t (ta, tr, ty_name)), ctxt) | Prim (loc, T_option, [ut], annot) -> ( if legacy then (* legacy semantics with (broken) field annotations *) extract_field_annot ut >>? fun (ut, _some_constr) -> parse_composed_type_annot loc annot >>? fun (ty_name, _none_constr, _) -> ok (ut, ty_name) else parse_type_annot loc annot >>? fun ty_name -> ok (ut, ty_name) ) >>? fun (ut, ty_name) -> parse_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ~allow_lazy_storage ~allow_operation ~allow_contract ~allow_ticket ut >>? fun (Ex_ty t, ctxt) -> ok (Ex_ty (Option_t (t, ty_name)), ctxt) | Prim (loc, T_list, [ut], annot) -> parse_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ~allow_lazy_storage ~allow_operation ~allow_contract ~allow_ticket ut >>? fun (Ex_ty t, ctxt) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (List_t (t, ty_name)), ctxt) | Prim (loc, T_ticket, [ut], annot) -> if allow_ticket then parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt ut >>? fun (Ex_comparable_ty t, ctxt) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Ticket_t (t, ty_name)), ctxt) else error (Unexpected_ticket loc) | Prim (loc, T_set, [ut], annot) -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt ut >>? fun (Ex_comparable_ty t, ctxt) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Set_t (t, ty_name)), ctxt) | Prim (loc, T_map, [uta; utr], annot) -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt uta >>? fun (Ex_comparable_ty ta, ctxt) -> parse_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ~allow_lazy_storage ~allow_operation ~allow_contract ~allow_ticket utr >>? fun (Ex_ty tr, ctxt) -> parse_type_annot loc annot >>? fun ty_name -> ok (Ex_ty (Map_t (ta, tr, ty_name)), ctxt) | Prim (loc, T_sapling_transaction, [memo_size], annot) -> parse_type_annot loc annot >>? fun ty_name -> parse_memo_size memo_size >|? fun memo_size -> (Ex_ty (Sapling_transaction_t (memo_size, ty_name)), ctxt) (* /!\ When adding new lazy storage kinds, be careful to use [when allow_lazy_storage] /!\ Lazy storage should not be packable to avoid stealing a lazy storage from another contract with `PUSH t id` or `UNPACK`. *) | Prim (loc, T_big_map, args, annot) when allow_lazy_storage -> (parse_big_map_ty [@tailcall]) ctxt ~stack_depth ~legacy loc args annot | Prim (loc, T_sapling_state, [memo_size], annot) when allow_lazy_storage -> parse_type_annot loc annot >>? fun ty_name -> parse_memo_size memo_size >|? fun memo_size -> (Ex_ty (Sapling_state_t (memo_size, ty_name)), ctxt) | Prim (loc, (T_big_map | T_sapling_state), _, _) -> error (Unexpected_lazy_storage loc) | Prim ( loc, ( ( T_unit | T_signature | T_int | T_nat | T_string | T_bytes | T_mutez | T_bool | T_key | T_key_hash | T_timestamp | T_address | T_chain_id | T_operation | T_never ) as prim ), l, _ ) -> error (Invalid_arity (loc, prim, 0, List.length l)) | Prim ( loc, ((T_set | T_list | T_option | T_contract | T_ticket) as prim), l, _ ) -> error (Invalid_arity (loc, prim, 1, List.length l)) | Prim (loc, ((T_pair | T_or | T_map | T_lambda) as prim), l, _) -> error (Invalid_arity (loc, prim, 2, List.length l)) | expr -> error @@ unexpected expr [] Type_namespace [ T_pair; T_or; T_set; T_map; T_list; T_option; T_lambda; T_unit; T_signature; T_contract; T_int; T_nat; T_operation; T_string; T_bytes; T_mutez; T_bool; T_key; T_key_hash; T_timestamp; T_chain_id; T_never; T_bls12_381_g1; T_bls12_381_g2; T_bls12_381_fr; T_ticket ] and parse_big_map_ty ctxt ~stack_depth ~legacy big_map_loc args map_annot = Gas.consume ctxt Typecheck_costs.parse_type_cycle >>? fun ctxt -> match args with | [key_ty; value_ty] -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt key_ty >>? fun (Ex_comparable_ty key_ty, ctxt) -> parse_big_map_value_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy value_ty >>? fun (Ex_ty value_ty, ctxt) -> parse_type_annot big_map_loc map_annot >|? fun map_name -> let big_map_ty = Big_map_t (key_ty, value_ty, map_name) in (Ex_ty big_map_ty, ctxt) | args -> error @@ Invalid_arity (big_map_loc, T_big_map, 2, List.length args) and parse_big_map_value_ty ctxt ~stack_depth ~legacy value_ty = (parse_ty [@tailcall]) ctxt ~stack_depth ~legacy ~allow_lazy_storage:false ~allow_operation:false ~allow_contract:legacy ~allow_ticket:true value_ty let parse_storage_ty : context -> stack_depth:int -> legacy:bool -> Script.node -> (ex_ty * context) tzresult = fun ctxt ~stack_depth ~legacy node -> match node with | Prim ( loc, T_pair, [Prim (big_map_loc, T_big_map, args, map_annot); remaining_storage], storage_annot ) when legacy -> ( match storage_annot with | [] -> (parse_normal_storage_ty [@tailcall]) ctxt ~stack_depth ~legacy node | [single] when Compare.Int.(String.length single > 0) && Compare.Char.(single.[0] = '%') -> (parse_normal_storage_ty [@tailcall]) ctxt ~stack_depth ~legacy node | _ -> (* legacy semantics of big maps used the wrong annotation parser *) Gas.consume ctxt Typecheck_costs.parse_type_cycle >>? fun ctxt -> parse_big_map_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy big_map_loc args map_annot >>? fun (Ex_ty big_map_ty, ctxt) -> parse_normal_storage_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy remaining_storage >>? fun (Ex_ty remaining_storage, ctxt) -> parse_composed_type_annot loc storage_annot >>? fun (ty_name, map_field, storage_field) -> ok ( Ex_ty (Pair_t ( (big_map_ty, map_field, None), (remaining_storage, storage_field, None), ty_name )), ctxt ) ) | _ -> (parse_normal_storage_ty [@tailcall]) ctxt ~stack_depth ~legacy node let check_packable ~legacy loc root = let rec check : type t. t ty -> unit tzresult = function (* /!\ When adding new lazy storage kinds, be sure to return an error. /!\ Lazy storage should not be packable. *) | Big_map_t _ -> error (Unexpected_lazy_storage loc) | Sapling_state_t _ -> error (Unexpected_lazy_storage loc) | Operation_t _ -> error (Unexpected_operation loc) | Unit_t _ -> ok_unit | Int_t _ -> ok_unit | Nat_t _ -> ok_unit | Signature_t _ -> ok_unit | String_t _ -> ok_unit | Bytes_t _ -> ok_unit | Mutez_t _ -> ok_unit | Key_hash_t _ -> ok_unit | Key_t _ -> ok_unit | Timestamp_t _ -> ok_unit | Address_t _ -> ok_unit | Bool_t _ -> ok_unit | Chain_id_t _ -> ok_unit | Never_t _ -> ok_unit | Set_t (_, _) -> ok_unit | Ticket_t _ -> error (Unexpected_ticket loc) | Lambda_t (_, _, _) -> ok_unit | Bls12_381_g1_t _ -> ok_unit | Bls12_381_g2_t _ -> ok_unit | Bls12_381_fr_t _ -> ok_unit | Pair_t ((l_ty, _, _), (r_ty, _, _), _) -> check l_ty >>? fun () -> check r_ty | Union_t ((l_ty, _), (r_ty, _), _) -> check l_ty >>? fun () -> check r_ty | Option_t (v_ty, _) -> check v_ty | List_t (elt_ty, _) -> check elt_ty | Map_t (_, elt_ty, _) -> check elt_ty | Contract_t (_, _) when legacy -> ok_unit | Contract_t (_, _) -> error (Unexpected_contract loc) | Sapling_transaction_t _ -> ok () in check root type ('arg, 'storage) code = { code : (('arg, 'storage) pair, (operation boxed_list, 'storage) pair) lambda; arg_type : 'arg ty; storage_type : 'storage ty; root_name : field_annot option; } type ex_script = Ex_script : ('a, 'c) script -> ex_script type ex_code = Ex_code : ('a, 'c) code -> ex_code type (_, _) dig_proof_argument = | Dig_proof_argument : ('x, 'a * 's, 'a, 's, 'b, 't, 'c, 'u) stack_prefix_preservation_witness * 'x ty * var_annot option * ('c, 'u) stack_ty -> ('b, 't) dig_proof_argument type (_, _, _) dug_proof_argument = | Dug_proof_argument : ( ('a, 's, 'x, 'a * 's, 'b, 't, 'c, 'u) stack_prefix_preservation_witness * unit * ('c, 'u) stack_ty ) -> ('b, 't, 'x) dug_proof_argument type (_, _) dipn_proof_argument = | Dipn_proof_argument : ('fa, 'fs, 'fb, 'fu, 'a, 's, 'b, 'u) stack_prefix_preservation_witness * context * ('fa, 'fs, 'fb, 'fu) descr * ('b, 'u) stack_ty -> ('a, 's) dipn_proof_argument type (_, _) dropn_proof_argument = | Dropn_proof_argument : ('fa, 'fs, 'fa, 'fs, 'a, 's, 'a, 's) stack_prefix_preservation_witness * ('fa, 'fs) stack_ty -> ('a, 's) dropn_proof_argument type 'before comb_proof_argument = | Comb_proof_argument : ('a * 's, 'b * 'u) comb_gadt_witness * ('b, 'u) stack_ty -> ('a * 's) comb_proof_argument type 'before uncomb_proof_argument = | Uncomb_proof_argument : ('a * 's, 'b * 'u) uncomb_gadt_witness * ('b, 'u) stack_ty -> ('a * 's) uncomb_proof_argument type 'before comb_get_proof_argument = | Comb_get_proof_argument : ('before, 'after) comb_get_gadt_witness * 'after ty -> 'before comb_get_proof_argument type ('rest, 'before) comb_set_proof_argument = | Comb_set_proof_argument : ('rest, 'before, 'after) comb_set_gadt_witness * 'after ty -> ('rest, 'before) comb_set_proof_argument type 'before dup_n_proof_argument = | Dup_n_proof_argument : ('before, 'a) dup_n_gadt_witness * 'a ty -> 'before dup_n_proof_argument let find_entrypoint (type full) (full : full ty) ~root_name entrypoint = let annot_is_entrypoint entrypoint = function | None -> false | Some (Field_annot l) -> Compare.String.(l = entrypoint) in let rec find_entrypoint : type t. t ty -> string -> ((Script.node -> Script.node) * ex_ty) option = fun t entrypoint -> match t with | Union_t ((tl, al), (tr, ar), _) -> ( if annot_is_entrypoint entrypoint al then Some ((fun e -> Prim (0, D_Left, [e], [])), Ex_ty tl) else if annot_is_entrypoint entrypoint ar then Some ((fun e -> Prim (0, D_Right, [e], [])), Ex_ty tr) else match find_entrypoint tl entrypoint with | Some (f, t) -> Some ((fun e -> Prim (0, D_Left, [f e], [])), t) | None -> ( match find_entrypoint tr entrypoint with | Some (f, t) -> Some ((fun e -> Prim (0, D_Right, [f e], [])), t) | None -> None ) ) | _ -> None in let entrypoint = if Compare.String.(entrypoint = "") then "default" else entrypoint in if Compare.Int.(String.length entrypoint > 31) then error (Entrypoint_name_too_long entrypoint) else match root_name with | Some (Field_annot root_name) when Compare.String.(entrypoint = root_name) -> ok ((fun e -> e), Ex_ty full) | _ -> ( match find_entrypoint full entrypoint with | Some result -> ok result | None -> ( match entrypoint with | "default" -> ok ((fun e -> e), Ex_ty full) | _ -> error (No_such_entrypoint entrypoint) ) ) let find_entrypoint_for_type (type full exp) ~legacy ~(full : full ty) ~(expected : exp ty) ~root_name entrypoint ctxt loc : (context * string * exp ty) tzresult = match (entrypoint, root_name) with | ("default", Some (Field_annot "root")) -> ( match find_entrypoint full ~root_name entrypoint with | Error _ as err -> err | Ok (_, Ex_ty ty) -> ( match merge_types ~legacy ctxt loc ty expected with | Ok (Eq, ty, ctxt) -> ok (ctxt, "default", ty) | Error _ -> merge_types ~legacy ctxt loc full expected >>? fun (Eq, full, ctxt) -> ok (ctxt, "root", (full : exp ty)) ) ) | _ -> find_entrypoint full ~root_name entrypoint >>? fun (_, Ex_ty ty) -> merge_types ~legacy ctxt loc ty expected >>? fun (Eq, ty, ctxt) -> ok (ctxt, entrypoint, (ty : exp ty)) [@@coq_axiom_with_reason "cast on err"] module Entrypoints = Set.Make (String) exception Duplicate of string exception Too_long of string let well_formed_entrypoints (type full) (full : full ty) ~root_name = let merge path annot (type t) (ty : t ty) reachable ((first_unreachable, all) as acc) = match annot with | None | Some (Field_annot "") -> ( if reachable then acc else match ty with | Union_t _ -> acc | _ -> ( match first_unreachable with | None -> (Some (List.rev path), all) | Some _ -> acc ) ) | Some (Field_annot name) -> if Compare.Int.(String.length name > 31) then raise (Too_long name) else if Entrypoints.mem name all then raise (Duplicate name) else (first_unreachable, Entrypoints.add name all) in let rec check : type t. t ty -> prim list -> bool -> prim list option * Entrypoints.t -> prim list option * Entrypoints.t = fun t path reachable acc -> match t with | Union_t ((tl, al), (tr, ar), _) -> let acc = merge (D_Left :: path) al tl reachable acc in let acc = merge (D_Right :: path) ar tr reachable acc in let acc = check tl (D_Left :: path) (match al with Some _ -> true | None -> reachable) acc in check tr (D_Right :: path) (match ar with Some _ -> true | None -> reachable) acc | _ -> acc in try let (init, reachable) = match root_name with | None | Some (Field_annot "") -> (Entrypoints.empty, false) | Some (Field_annot name) -> (Entrypoints.singleton name, true) in let (first_unreachable, all) = check full [] reachable (None, init) in if not (Entrypoints.mem "default" all) then ok_unit else match first_unreachable with | None -> ok_unit | Some path -> error (Unreachable_entrypoint path) with | Duplicate name -> error (Duplicate_entrypoint name) | Too_long name -> error (Entrypoint_name_too_long name) let parse_uint ~nb_bits = assert (Compare.Int.(nb_bits >= 0 && nb_bits <= 30)) ; let max_int = (1 lsl nb_bits) - 1 in let max_z = Z.of_int max_int in function | Micheline.Int (_, n) when Compare.Z.(Z.zero <= n) && Compare.Z.(n <= max_z) -> ok (Z.to_int n) | node -> error @@ Invalid_syntactic_constant ( location node, strip_locations node, "a positive " ^ string_of_int nb_bits ^ "-bit integer (between 0 and " ^ string_of_int max_int ^ ")" ) let parse_uint10 = parse_uint ~nb_bits:10 let parse_uint11 = parse_uint ~nb_bits:11 (* This type is used to: - serialize and deserialize tickets when they are stored or transferred, - type the READ_TICKET instruction. *) let opened_ticket_type ty = Pair_key ( (Address_key None, None), (Pair_key ((ty, None), (Nat_key None, None), None), None), None ) (* -- parse data of primitive types -- *) let parse_unit ctxt ~legacy = function | Prim (loc, D_Unit, [], annot) -> (if legacy then ok_unit else error_unexpected_annot loc annot) >>? fun () -> Gas.consume ctxt Typecheck_costs.unit >|? fun ctxt -> ((), ctxt) | Prim (loc, D_Unit, l, _) -> error @@ Invalid_arity (loc, D_Unit, 0, List.length l) | expr -> error @@ unexpected expr [] Constant_namespace [D_Unit] let parse_bool ctxt ~legacy = function | Prim (loc, D_True, [], annot) -> (if legacy then ok_unit else error_unexpected_annot loc annot) >>? fun () -> Gas.consume ctxt Typecheck_costs.bool >|? fun ctxt -> (true, ctxt) | Prim (loc, D_False, [], annot) -> (if legacy then ok_unit else error_unexpected_annot loc annot) >>? fun () -> Gas.consume ctxt Typecheck_costs.bool >|? fun ctxt -> (false, ctxt) | Prim (loc, ((D_True | D_False) as c), l, _) -> error @@ Invalid_arity (loc, c, 0, List.length l) | expr -> error @@ unexpected expr [] Constant_namespace [D_True; D_False] let parse_string ctxt = function | String (loc, v) as expr -> Gas.consume ctxt (Typecheck_costs.check_printable v) >>? fun ctxt -> let rec check_printable_ascii i = if Compare.Int.(i < 0) then true else match v.[i] with | '\n' | '\x20' .. '\x7E' -> check_printable_ascii (i - 1) | _ -> false in if check_printable_ascii (String.length v - 1) then ok (v, ctxt) else error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a printable ascii string") | expr -> error @@ Invalid_kind (location expr, [String_kind], kind expr) let parse_bytes ctxt = function | Bytes (_, v) -> ok (v, ctxt) | expr -> error @@ Invalid_kind (location expr, [Bytes_kind], kind expr) let parse_int ctxt = function | Int (_, v) -> ok (Script_int.of_zint v, ctxt) | expr -> error @@ Invalid_kind (location expr, [Int_kind], kind expr) let parse_nat ctxt = function | Int (loc, v) as expr -> ( let v = Script_int.of_zint v in match Script_int.is_nat v with | Some nat -> ok (nat, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a non-negative integer") ) | expr -> error @@ Invalid_kind (location expr, [Int_kind], kind expr) let parse_mutez ctxt = function | Int (loc, v) as expr -> ( try match Tez.of_mutez (Z.to_int64 v) with | None -> raise Exit | Some tez -> ok (tez, ctxt) with _ -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid mutez amount") ) | expr -> error @@ Invalid_kind (location expr, [Int_kind], kind expr) let parse_timestamp ctxt = function | Int (_, v) (* As unparsed with [Optimized] or out of bounds [Readable]. *) -> ok (Script_timestamp.of_zint v, ctxt) | String (loc, s) as expr (* As unparsed with [Readable]. *) -> ( Gas.consume ctxt Typecheck_costs.timestamp_readable >>? fun ctxt -> match Script_timestamp.of_string s with | Some v -> ok (v, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid timestamp") ) | expr -> error @@ Invalid_kind (location expr, [String_kind; Int_kind], kind expr) let parse_key ctxt = function | Bytes (loc, bytes) as expr -> ( (* As unparsed with [Optimized]. *) Gas.consume ctxt Typecheck_costs.public_key_optimized >>? fun ctxt -> match Data_encoding.Binary.of_bytes Signature.Public_key.encoding bytes with | Some k -> ok (k, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid public key") ) | String (loc, s) as expr -> ( (* As unparsed with [Readable]. *) Gas.consume ctxt Typecheck_costs.public_key_readable >>? fun ctxt -> match Signature.Public_key.of_b58check_opt s with | Some k -> ok (k, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid public key") ) | expr -> error @@ Invalid_kind (location expr, [String_kind; Bytes_kind], kind expr) let parse_key_hash ctxt = function | Bytes (loc, bytes) as expr -> ( (* As unparsed with [Optimized]. *) Gas.consume ctxt Typecheck_costs.key_hash_optimized >>? fun ctxt -> match Data_encoding.Binary.of_bytes Signature.Public_key_hash.encoding bytes with | Some k -> ok (k, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid key hash") ) | String (loc, s) as expr (* As unparsed with [Readable]. *) -> ( Gas.consume ctxt Typecheck_costs.key_hash_readable >>? fun ctxt -> match Signature.Public_key_hash.of_b58check_opt s with | Some k -> ok (k, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid key hash") ) | expr -> error @@ Invalid_kind (location expr, [String_kind; Bytes_kind], kind expr) let parse_signature ctxt = function | Bytes (loc, bytes) as expr (* As unparsed with [Optimized]. *) -> ( Gas.consume ctxt Typecheck_costs.signature_optimized >>? fun ctxt -> match Data_encoding.Binary.of_bytes Signature.encoding bytes with | Some k -> ok (k, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid signature") ) | String (loc, s) as expr (* As unparsed with [Readable]. *) -> ( Gas.consume ctxt Typecheck_costs.signature_readable >>? fun ctxt -> match Signature.of_b58check_opt s with | Some s -> ok (s, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid signature") ) | expr -> error @@ Invalid_kind (location expr, [String_kind; Bytes_kind], kind expr) let parse_chain_id ctxt = function | Bytes (loc, bytes) as expr -> ( Gas.consume ctxt Typecheck_costs.chain_id_optimized >>? fun ctxt -> match Data_encoding.Binary.of_bytes Chain_id.encoding bytes with | Some k -> ok (k, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid chain id") ) | String (loc, s) as expr -> ( Gas.consume ctxt Typecheck_costs.chain_id_readable >>? fun ctxt -> match Chain_id.of_b58check_opt s with | Some s -> ok (s, ctxt) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid chain id") ) | expr -> error @@ Invalid_kind (location expr, [String_kind; Bytes_kind], kind expr) let parse_address ctxt = function | Bytes (loc, bytes) as expr (* As unparsed with [Optimized]. *) -> ( Gas.consume ctxt Typecheck_costs.contract >>? fun ctxt -> match Data_encoding.Binary.of_bytes Data_encoding.(tup2 Contract.encoding Variable.string) bytes with | Some (c, entrypoint) -> ( if Compare.Int.(String.length entrypoint > 31) then error (Entrypoint_name_too_long entrypoint) else match entrypoint with | "" -> ok ((c, "default"), ctxt) | "default" -> error (Unexpected_annotation loc) | name -> ok ((c, name), ctxt) ) | None -> error @@ Invalid_syntactic_constant (loc, strip_locations expr, "a valid address") ) | String (loc, s) (* As unparsed with [Readable]. *) -> Gas.consume ctxt Typecheck_costs.contract >>? fun ctxt -> ( match String.index_opt s '%' with | None -> ok (s, "default") | Some pos -> ( let len = String.length s - pos - 1 in let name = String.sub s (pos + 1) len in if Compare.Int.(len > 31) then error (Entrypoint_name_too_long name) else match (String.sub s 0 pos, name) with | (addr, "") -> ok (addr, "default") | (_, "default") -> error @@ Unexpected_annotation loc | addr_and_name -> ok addr_and_name ) ) >>? fun (addr, entrypoint) -> Contract.of_b58check addr >|? fun c -> ((c, entrypoint), ctxt) | expr -> error @@ Invalid_kind (location expr, [String_kind; Bytes_kind], kind expr) let parse_never expr = error @@ Invalid_never_expr (location expr) (* -- parse data of complex types -- *) let parse_pair (type r) parse_l parse_r ctxt ~legacy (r_comb_witness : (r, unit -> _) comb_witness) expr = let parse_comb loc l rs = parse_l ctxt l >>=? fun (l, ctxt) -> ( match (rs, r_comb_witness) with | ([r], _) -> ok r | ([], _) -> error @@ Invalid_arity (loc, D_Pair, 2, 1) | (_ :: _, Comb_Pair _) -> (* Unfold [Pair x1 ... xn] as [Pair x1 (Pair x2 ... xn-1 xn))] for type [pair ta (pair tb1 tb2)] and n >= 3 only *) ok (Prim (loc, D_Pair, rs, [])) | _ -> error @@ Invalid_arity (loc, D_Pair, 2, 1 + List.length rs) ) >>?= fun r -> parse_r ctxt r >|=? fun (r, ctxt) -> ((l, r), ctxt) in match expr with | Prim (loc, D_Pair, l :: rs, annot) -> (if legacy then ok_unit else error_unexpected_annot loc annot) >>?= fun () -> parse_comb loc l rs | Prim (loc, D_Pair, l, _) -> fail @@ Invalid_arity (loc, D_Pair, 2, List.length l) (* Unfold [{x1; ...; xn}] as [Pair x1 x2 ... xn-1 xn] for n >= 2 *) | Seq (loc, l :: (_ :: _ as rs)) -> parse_comb loc l rs | Seq (loc, l) -> fail @@ Invalid_seq_arity (loc, 2, List.length l) | expr -> fail @@ unexpected expr [] Constant_namespace [D_Pair] let parse_union parse_l parse_r ctxt ~legacy = function | Prim (loc, D_Left, [v], annot) -> (if legacy then ok_unit else error_unexpected_annot loc annot) >>?= fun () -> parse_l ctxt v >|=? fun (v, ctxt) -> (L v, ctxt) | Prim (loc, D_Left, l, _) -> fail @@ Invalid_arity (loc, D_Left, 1, List.length l) | Prim (loc, D_Right, [v], annot) -> (if legacy then ok_unit else error_unexpected_annot loc annot) >>?= fun () -> parse_r ctxt v >|=? fun (v, ctxt) -> (R v, ctxt) | Prim (loc, D_Right, l, _) -> fail @@ Invalid_arity (loc, D_Right, 1, List.length l) | expr -> fail @@ unexpected expr [] Constant_namespace [D_Left; D_Right] let parse_option parse_v ctxt ~legacy = function | Prim (loc, D_Some, [v], annot) -> (if legacy then ok_unit else error_unexpected_annot loc annot) >>?= fun () -> parse_v ctxt v >|=? fun (v, ctxt) -> (Some v, ctxt) | Prim (loc, D_Some, l, _) -> fail @@ Invalid_arity (loc, D_Some, 1, List.length l) | Prim (loc, D_None, [], annot) -> Lwt.return ( (if legacy then ok_unit else error_unexpected_annot loc annot) >|? fun () -> (None, ctxt) ) | Prim (loc, D_None, l, _) -> fail @@ Invalid_arity (loc, D_None, 0, List.length l) | expr -> fail @@ unexpected expr [] Constant_namespace [D_Some; D_None] (* -- parse data of comparable types -- *) let comparable_comb_witness1 : type t. t comparable_ty -> (t, unit -> unit) comb_witness = function | Pair_key _ -> Comb_Pair Comb_Any | _ -> Comb_Any let rec parse_comparable_data : type a. ?type_logger:type_logger -> context -> a comparable_ty -> Script.node -> (a * context) tzresult Lwt.t = fun ?type_logger ctxt ty script_data -> (* No need for stack_depth here. Unlike [parse_data], [parse_comparable_data] doesn't call [parse_returning]. The stack depth is bounded by the type depth, bounded by 1024. *) let parse_data_error () = serialize_ty_for_error ctxt (ty_of_comparable_ty ty) >|? fun (ty, _ctxt) -> Invalid_constant (location script_data, strip_locations script_data, ty) in let traced_no_lwt body = record_trace_eval parse_data_error body in let traced body = trace_eval (fun () -> Lwt.return @@ parse_data_error ()) body in Gas.consume ctxt Typecheck_costs.parse_data_cycle (* We could have a smaller cost but let's keep it consistent with [parse_data] for now. *) >>?= fun ctxt -> let legacy = false in match (ty, script_data) with | (Unit_key _, expr) -> Lwt.return @@ traced_no_lwt @@ (parse_unit ctxt ~legacy expr : (a * context) tzresult) | (Bool_key _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_bool ctxt ~legacy expr | (String_key _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_string ctxt expr | (Bytes_key _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_bytes ctxt expr | (Int_key _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_int ctxt expr | (Nat_key _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_nat ctxt expr | (Mutez_key _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_mutez ctxt expr | (Timestamp_key _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_timestamp ctxt expr | (Key_key _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_key ctxt expr | (Key_hash_key _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_key_hash ctxt expr | (Signature_key _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_signature ctxt expr | (Chain_id_key _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_chain_id ctxt expr | (Address_key _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_address ctxt expr | (Pair_key ((tl, _), (tr, _), _), expr) -> let r_witness = comparable_comb_witness1 tr in let parse_l ctxt v = parse_comparable_data ?type_logger ctxt tl v in let parse_r ctxt v = parse_comparable_data ?type_logger ctxt tr v in traced @@ parse_pair parse_l parse_r ctxt ~legacy r_witness expr | (Union_key ((tl, _), (tr, _), _), expr) -> let parse_l ctxt v = parse_comparable_data ?type_logger ctxt tl v in let parse_r ctxt v = parse_comparable_data ?type_logger ctxt tr v in traced @@ parse_union parse_l parse_r ctxt ~legacy expr | (Option_key (t, _), expr) -> let parse_v ctxt v = parse_comparable_data ?type_logger ctxt t v in traced @@ parse_option parse_v ctxt ~legacy expr | (Never_key _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_never expr (* -- parse data of any type -- *) let comb_witness1 : type t. t ty -> (t, unit -> unit) comb_witness = function | Pair_t _ -> Comb_Pair Comb_Any | _ -> Comb_Any (* Some values, such as operations, tickets, or big map ids, are used only internally and are not allowed to be forged by users. In [parse_data], [allow_forged] should be [false] for: - PUSH - UNPACK - user-provided script parameters - storage on origination And [true] for: - internal calls parameters - storage after origination *) let rec parse_data : type a. ?type_logger:type_logger -> stack_depth:int -> context -> legacy:bool -> allow_forged:bool -> a ty -> Script.node -> (a * context) tzresult Lwt.t = fun ?type_logger ~stack_depth ctxt ~legacy ~allow_forged ty script_data -> Gas.consume ctxt Typecheck_costs.parse_data_cycle >>?= fun ctxt -> let non_terminal_recursion ?type_logger ctxt ~legacy ty script_data = if Compare.Int.(stack_depth > 10_000) then fail Typechecking_too_many_recursive_calls else parse_data ?type_logger ~stack_depth:(stack_depth + 1) ctxt ~legacy ~allow_forged ty script_data in let parse_data_error () = serialize_ty_for_error ctxt ty >|? fun (ty, _ctxt) -> Invalid_constant (location script_data, strip_locations script_data, ty) in let fail_parse_data () = parse_data_error () >>?= fail in let traced_no_lwt body = record_trace_eval parse_data_error body in let traced body = trace_eval (fun () -> Lwt.return @@ parse_data_error ()) body in let traced_fail err = Lwt.return @@ traced_no_lwt (error err) in let parse_items ?type_logger ctxt expr key_type value_type items item_wrapper = fold_left_s (fun (last_value, map, ctxt) item -> match item with | Prim (loc, D_Elt, [k; v], annot) -> (if legacy then ok_unit else error_unexpected_annot loc annot) >>?= fun () -> parse_comparable_data ?type_logger ctxt key_type k >>=? fun (k, ctxt) -> non_terminal_recursion ?type_logger ctxt ~legacy value_type v >>=? fun (v, ctxt) -> Lwt.return ( ( match last_value with | Some value -> Gas.consume ctxt (Michelson_v1_gas.Cost_of.Interpreter.compare key_type value k) >>? fun ctxt -> let c = compare_comparable key_type value k in if Compare.Int.(0 <= c) then if Compare.Int.(0 = c) then error (Duplicate_map_keys (loc, strip_locations expr)) else error (Unordered_map_keys (loc, strip_locations expr)) else ok ctxt | None -> ok ctxt ) >>? fun ctxt -> Gas.consume ctxt (Michelson_v1_gas.Cost_of.Interpreter.map_update k map) >|? fun ctxt -> (Some k, map_update k (Some (item_wrapper v)) map, ctxt) ) | Prim (loc, D_Elt, l, _) -> fail @@ Invalid_arity (loc, D_Elt, 2, List.length l) | Prim (loc, name, _, _) -> fail @@ Invalid_primitive (loc, [D_Elt], name) | Int _ | String _ | Bytes _ | Seq _ -> fail_parse_data ()) (None, empty_map key_type, ctxt) items |> traced >|=? fun (_, items, ctxt) -> (items, ctxt) in let parse_big_map_items (type t) ?type_logger ctxt expr (key_type : t comparable_ty) value_type items item_wrapper = fold_left_s (fun (last_key, {map; size}, ctxt) item -> match item with | Prim (loc, D_Elt, [k; v], annot) -> (if legacy then ok_unit else error_unexpected_annot loc annot) >>?= fun () -> parse_comparable_data ?type_logger ctxt key_type k >>=? fun (k, ctxt) -> hash_comparable_data ctxt key_type k >>=? fun (key_hash, ctxt) -> non_terminal_recursion ?type_logger ctxt ~legacy value_type v >>=? fun (v, ctxt) -> Lwt.return ( ( match last_key with | Some last_key -> Gas.consume ctxt (Michelson_v1_gas.Cost_of.Interpreter.compare key_type last_key k) >>? fun ctxt -> let c = compare_comparable key_type last_key k in if Compare.Int.(0 <= c) then if Compare.Int.(0 = c) then error (Duplicate_map_keys (loc, strip_locations expr)) else error (Unordered_map_keys (loc, strip_locations expr)) else ok ctxt | None -> ok ctxt ) >>? fun ctxt -> Gas.consume ctxt (Michelson_v1_gas.Cost_of.Interpreter.big_map_update {map; size}) >>? fun ctxt -> if Big_map_overlay.mem key_hash map then error (Duplicate_map_keys (loc, strip_locations expr)) else ok ( Some k, { map = Big_map_overlay.add key_hash (k, item_wrapper v) map; size = size + 1; }, ctxt ) ) | Prim (loc, D_Elt, l, _) -> fail @@ Invalid_arity (loc, D_Elt, 2, List.length l) | Prim (loc, name, _, _) -> fail @@ Invalid_primitive (loc, [D_Elt], name) | Int _ | String _ | Bytes _ | Seq _ -> fail_parse_data ()) (None, {map = Big_map_overlay.empty; size = 0}, ctxt) items |> traced >|=? fun (_, map, ctxt) -> (map, ctxt) in match (ty, script_data) with | (Unit_t _, expr) -> Lwt.return @@ traced_no_lwt @@ (parse_unit ctxt ~legacy expr : (a * context) tzresult) | (Bool_t _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_bool ctxt ~legacy expr | (String_t _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_string ctxt expr | (Bytes_t _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_bytes ctxt expr | (Int_t _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_int ctxt expr | (Nat_t _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_nat ctxt expr | (Mutez_t _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_mutez ctxt expr | (Timestamp_t _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_timestamp ctxt expr | (Key_t _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_key ctxt expr | (Key_hash_t _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_key_hash ctxt expr | (Signature_t _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_signature ctxt expr | (Operation_t _, _) -> (* operations cannot appear in parameters or storage, the protocol should never parse the bytes of an operation *) assert false | (Chain_id_t _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_chain_id ctxt expr | (Address_t _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_address ctxt expr | (Contract_t (ty, _), expr) -> traced ( parse_address ctxt expr >>?= fun ((c, entrypoint), ctxt) -> let loc = location expr in parse_contract ~stack_depth:(stack_depth + 1) ~legacy ctxt loc ty c ~entrypoint >|=? fun (ctxt, _) -> ((ty, (c, entrypoint)), ctxt) ) (* Pairs *) | (Pair_t ((tl, _, _), (tr, _, _), _), expr) -> let r_witness = comb_witness1 tr in let parse_l ctxt v = non_terminal_recursion ?type_logger ctxt ~legacy tl v in let parse_r ctxt v = non_terminal_recursion ?type_logger ctxt ~legacy tr v in traced @@ parse_pair parse_l parse_r ctxt ~legacy r_witness expr (* Unions *) | (Union_t ((tl, _), (tr, _), _), expr) -> let parse_l ctxt v = non_terminal_recursion ?type_logger ctxt ~legacy tl v in let parse_r ctxt v = non_terminal_recursion ?type_logger ctxt ~legacy tr v in traced @@ parse_union parse_l parse_r ctxt ~legacy expr (* Lambdas *) | (Lambda_t (ta, tr, _ty_name), (Seq (_loc, _) as script_instr)) -> traced @@ parse_returning Lambda ?type_logger ~stack_depth:(stack_depth + 1) ctxt ~legacy (ta, Some (Var_annot "@arg")) tr script_instr | (Lambda_t _, expr) -> traced_fail (Invalid_kind (location expr, [Seq_kind], kind expr)) (* Options *) | (Option_t (t, _), expr) -> let parse_v ctxt v = non_terminal_recursion ?type_logger ctxt ~legacy t v in traced @@ parse_option parse_v ctxt ~legacy expr (* Lists *) | (List_t (t, _ty_name), Seq (_loc, items)) -> traced @@ fold_right_s (fun v (rest, ctxt) -> non_terminal_recursion ?type_logger ctxt ~legacy t v >|=? fun (v, ctxt) -> (list_cons v rest, ctxt)) items (list_empty, ctxt) | (List_t _, expr) -> traced_fail (Invalid_kind (location expr, [Seq_kind], kind expr)) (* Tickets *) | (Ticket_t (t, _ty_name), expr) -> if allow_forged then parse_comparable_data ?type_logger ctxt (opened_ticket_type t) expr >|=? fun ((ticketer, (contents, amount)), ctxt) -> ({ticketer; contents; amount}, ctxt) else traced_fail (Unexpected_forged_value (location expr)) (* Sets *) | (Set_t (t, _ty_name), (Seq (loc, vs) as expr)) -> traced @@ fold_left_s (fun (last_value, set, ctxt) v -> parse_comparable_data ?type_logger ctxt t v >>=? fun (v, ctxt) -> Lwt.return ( ( match last_value with | Some value -> Gas.consume ctxt (Michelson_v1_gas.Cost_of.Interpreter.compare t value v) >>? fun ctxt -> let c = compare_comparable t value v in if Compare.Int.(0 <= c) then if Compare.Int.(0 = c) then error (Duplicate_set_values (loc, strip_locations expr)) else error (Unordered_set_values (loc, strip_locations expr)) else ok ctxt | None -> ok ctxt ) >>? fun ctxt -> Gas.consume ctxt (Michelson_v1_gas.Cost_of.Interpreter.set_update v set) >|? fun ctxt -> (Some v, set_update v true set, ctxt) )) (None, empty_set t, ctxt) vs >|=? fun (_, set, ctxt) -> (set, ctxt) | (Set_t _, expr) -> traced_fail (Invalid_kind (location expr, [Seq_kind], kind expr)) (* Maps *) | (Map_t (tk, tv, _ty_name), (Seq (_, vs) as expr)) -> parse_items ?type_logger ctxt expr tk tv vs (fun x -> x) | (Map_t _, expr) -> traced_fail (Invalid_kind (location expr, [Seq_kind], kind expr)) | (Big_map_t (tk, tv, _ty_name), expr) -> ( match expr with | Int (loc, id) -> return (Some (id, loc), {map = Big_map_overlay.empty; size = 0}, ctxt) | Seq (_, vs) -> parse_big_map_items ?type_logger ctxt expr tk tv vs (fun x -> Some x) >|=? fun (diff, ctxt) -> (None, diff, ctxt) | Prim (loc, D_Pair, [Int (loc_id, id); Seq (_, vs)], annot) -> error_unexpected_annot loc annot >>?= fun () -> let tv_opt = Option_t (tv, None) in parse_big_map_items ?type_logger ctxt expr tk tv_opt vs (fun x -> x) >|=? fun (diff, ctxt) -> (Some (id, loc_id), diff, ctxt) | Prim (_, D_Pair, [Int _; expr], _) -> traced_fail (Invalid_kind (location expr, [Seq_kind], kind expr)) | Prim (_, D_Pair, [expr; _], _) -> traced_fail (Invalid_kind (location expr, [Int_kind], kind expr)) | Prim (loc, D_Pair, l, _) -> traced_fail @@ Invalid_arity (loc, D_Pair, 2, List.length l) | _ -> traced_fail (unexpected expr [Seq_kind; Int_kind] Constant_namespace [D_Pair]) ) >>=? fun (id_opt, diff, ctxt) -> ( match id_opt with | None -> return @@ (None, ctxt) | Some (id, loc) -> if allow_forged then let id = Big_map.Id.parse_z id in Big_map.exists ctxt id >>=? function | (_, None) -> traced_fail (Invalid_big_map (loc, id)) | (ctxt, Some (btk, btv)) -> Lwt.return ( parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt (Micheline.root btk) >>? fun (Ex_comparable_ty btk, ctxt) -> parse_big_map_value_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy (Micheline.root btv) >>? fun (Ex_ty btv, ctxt) -> comparable_ty_eq ctxt tk btk >>? fun (Eq, ctxt) -> ty_eq ctxt loc tv btv >>? fun (Eq, ctxt) -> ok (Some id, ctxt) ) else traced_fail (Unexpected_forged_value loc) ) >|=? fun (id, ctxt) -> ({id; diff; key_type = tk; value_type = tv}, ctxt) | (Never_t _, expr) -> Lwt.return @@ traced_no_lwt @@ parse_never expr (* Bls12_381 types *) | (Bls12_381_g1_t _, Bytes (_, bs)) -> ( Gas.consume ctxt Typecheck_costs.bls12_381_g1 >>?= fun ctxt -> match Bls12_381.G1.of_bytes_opt bs with | Some pt -> return (pt, ctxt) | None -> fail_parse_data () ) | (Bls12_381_g1_t _, expr) -> traced_fail (Invalid_kind (location expr, [Bytes_kind], kind expr)) | (Bls12_381_g2_t _, Bytes (_, bs)) -> ( Gas.consume ctxt Typecheck_costs.bls12_381_g2 >>?= fun ctxt -> match Bls12_381.G2.of_bytes_opt bs with | Some pt -> return (pt, ctxt) | None -> fail_parse_data () ) | (Bls12_381_g2_t _, expr) -> traced_fail (Invalid_kind (location expr, [Bytes_kind], kind expr)) | (Bls12_381_fr_t _, Bytes (_, bs)) -> ( Gas.consume ctxt Typecheck_costs.bls12_381_fr >>?= fun ctxt -> match Bls12_381.Fr.of_bytes_opt bs with | Some pt -> return (pt, ctxt) | None -> fail_parse_data () ) | (Bls12_381_fr_t _, Int (_, v)) -> Gas.consume ctxt Typecheck_costs.bls12_381_fr >>?= fun ctxt -> return (Bls12_381.Fr.of_z v, ctxt) | (Bls12_381_fr_t _, expr) -> traced_fail (Invalid_kind (location expr, [Bytes_kind], kind expr)) (* /!\ When adding new lazy storage kinds, you may want to guard the parsing of identifiers with [allow_forged]. *) (* Sapling *) | (Sapling_transaction_t (memo_size, _), Bytes (_, bytes)) -> ( match Data_encoding.Binary.of_bytes Sapling.transaction_encoding bytes with | Some transaction -> ( match Sapling.transaction_get_memo_size transaction with | None -> return (transaction, ctxt) | Some transac_memo_size -> Lwt.return ( merge_memo_sizes memo_size transac_memo_size >|? fun _ms -> (transaction, ctxt) ) ) | None -> fail_parse_data () ) | (Sapling_transaction_t _, expr) -> traced_fail (Invalid_kind (location expr, [Bytes_kind], kind expr)) | (Sapling_state_t (memo_size, _), Int (loc, id)) -> if allow_forged then let id = Sapling.Id.parse_z id in Sapling.state_from_id ctxt id >>=? fun (state, ctxt) -> Lwt.return ( traced_no_lwt @@ merge_memo_sizes memo_size state.Sapling.memo_size >|? fun _memo_size -> (state, ctxt) ) else traced_fail (Unexpected_forged_value loc) | (Sapling_state_t (memo_size, _), Seq (_, [])) -> return (Sapling.empty_state ~memo_size (), ctxt) | (Sapling_state_t _, expr) -> (* Do not allow to input diffs as they are untrusted and may not be the result of a verify_update. *) traced_fail (Invalid_kind (location expr, [Int_kind; Seq_kind], kind expr)) and parse_returning : type arg ret. ?type_logger:type_logger -> stack_depth:int -> tc_context -> context -> legacy:bool -> arg ty * var_annot option -> ret ty -> Script.node -> ((arg, ret) lambda * context) tzresult Lwt.t = fun ?type_logger ~stack_depth tc_context ctxt ~legacy (arg, arg_annot) ret script_instr -> parse_instr ?type_logger tc_context ctxt ~legacy ~stack_depth:(stack_depth + 1) script_instr (Item_t (arg, Bot_t, arg_annot)) >>=? function | (Typed ({loc; aft = Item_t (ty, Bot_t, _) as stack_ty; _} as descr), ctxt) -> Lwt.return @@ record_trace_eval (fun () -> serialize_ty_for_error ctxt ret >>? fun (ret, ctxt) -> serialize_stack_for_error ctxt stack_ty >|? fun (stack_ty, _ctxt) -> Bad_return (loc, stack_ty, ret)) ( merge_types ~legacy ctxt loc ty ret >|? fun (Eq, _ret, ctxt) -> ((Lam (close_descr descr, script_instr) : (arg, ret) lambda), ctxt) ) | (Typed {loc; aft = stack_ty; _}, ctxt) -> Lwt.return ( serialize_ty_for_error ctxt ret >>? fun (ret, ctxt) -> serialize_stack_for_error ctxt stack_ty >>? fun (stack_ty, _ctxt) -> error (Bad_return (loc, stack_ty, ret)) ) | (Failed {descr}, ctxt) -> return ( ( Lam (close_descr (descr (Item_t (ret, Bot_t, None))), script_instr) : (arg, ret) lambda ), ctxt ) and parse_instr : type a s. ?type_logger:type_logger -> stack_depth:int -> tc_context -> context -> legacy:bool -> Script.node -> (a, s) stack_ty -> ((a, s) judgement * context) tzresult Lwt.t = fun ?type_logger ~stack_depth tc_context ctxt ~legacy script_instr stack_ty -> let check_item_ty (type a b) ctxt (exp : a ty) (got : b ty) loc name n m : ((a, b) eq * a ty * context) tzresult = record_trace_eval (fun () -> serialize_stack_for_error ctxt stack_ty >|? fun (stack_ty, _ctxt) -> Bad_stack (loc, name, m, stack_ty)) @@ record_trace (Bad_stack_item n) ( merge_types ~legacy ctxt loc exp got >>? fun (Eq, ty, ctxt) -> ok ((Eq : (a, b) eq), (ty : a ty), ctxt) ) in let log_stack ctxt loc stack_ty aft = match (type_logger, script_instr) with | (None, _) | (Some _, (Seq (-1, _) | Int _ | String _ | Bytes _)) -> ok_unit | (Some log, (Prim _ | Seq _)) -> (* Unparsing for logging done in an unlimited context as this is used only by the client and not the protocol *) let ctxt = Gas.set_unlimited ctxt in unparse_stack ctxt stack_ty >>? fun (stack_ty, _) -> unparse_stack ctxt aft >|? fun (aft, _) -> log loc stack_ty aft ; () in (* In the following functions, [number_of_generated_growing_types] is the depth of the stack to inspect for sizes overflow. We only need to check the produced types that can be larger than the arguments. That's why Swap is 0 for instance as no type grows. Constant sized types are not checked: it is assumed they are lower than the bound (otherwise every program would be rejected). *) let return_no_lwt : type a s. context -> int -> (a, s) judgement -> ((a, s) judgement * context) tzresult = fun ctxt number_of_generated_growing_types judgement -> match judgement with | Typed {loc; aft; _} -> let maximum_type_size = Constants.michelson_maximum_type_size ctxt in check_type_size_of_stack_head ~loc ~maximum_type_size aft ~up_to:number_of_generated_growing_types >|? fun () -> (judgement, ctxt) | Failed _ -> ok (judgement, ctxt) in let return : type a s. context -> int -> (a, s) judgement -> ((a, s) judgement * context) tzresult Lwt.t = fun ctxt number_of_generated_growing_types judgement -> Lwt.return @@ return_no_lwt ctxt number_of_generated_growing_types judgement in let typed_no_lwt ctxt number_of_generated_growing_types loc instr aft = log_stack ctxt loc stack_ty aft >>? fun () -> let j = Typed {loc; instr; bef = stack_ty; aft} in return_no_lwt ctxt number_of_generated_growing_types j in let typed ctxt number_of_generated_growing_types loc instr aft = Lwt.return @@ typed_no_lwt ctxt number_of_generated_growing_types loc instr aft in Gas.consume ctxt Typecheck_costs.parse_instr_cycle >>?= fun ctxt -> let non_terminal_recursion ?type_logger tc_context ctxt ~legacy script_instr stack_ty = if Compare.Int.(stack_depth > 10000) then fail Typechecking_too_many_recursive_calls else parse_instr ?type_logger tc_context ctxt ~stack_depth:(stack_depth + 1) ~legacy script_instr stack_ty in match (script_instr, stack_ty) with (* stack ops *) | (Prim (loc, I_DROP, [], annot), Item_t (_, rest, _)) -> ( error_unexpected_annot loc annot >>?= fun () -> typed ctxt 0 loc {apply = (fun kinfo k -> IDrop (kinfo, k))} rest : ((a, s) judgement * context) tzresult Lwt.t ) | (Prim (loc, I_DROP, [n], result_annot), whole_stack) -> parse_uint10 n >>?= fun whole_n -> Gas.consume ctxt (Typecheck_costs.proof_argument whole_n) >>?= fun ctxt -> let rec make_proof_argument : type a s. int -> (a, s) stack_ty -> (a, s) dropn_proof_argument tzresult = fun n stk -> match (Compare.Int.(n = 0), stk) with | (true, rest) -> ok @@ Dropn_proof_argument (KRest, rest) | (false, Item_t (_, rest, _)) -> make_proof_argument (n - 1) rest >|? fun (Dropn_proof_argument (n', stack_after_drops)) -> let kinfo = {iloc = loc; kstack_ty = rest} in Dropn_proof_argument (KPrefix (kinfo, n'), stack_after_drops) | (_, _) -> serialize_stack_for_error ctxt whole_stack >>? fun (whole_stack, _ctxt) -> error (Bad_stack (loc, I_DROP, whole_n, whole_stack)) in error_unexpected_annot loc result_annot >>?= fun () -> make_proof_argument whole_n whole_stack >>?= fun (Dropn_proof_argument (n', stack_after_drops)) -> let kdropn kinfo k = IDropn (kinfo, whole_n, n', k) in typed ctxt 0 loc {apply = kdropn} stack_after_drops | (Prim (loc, I_DROP, (_ :: _ :: _ as l), _), _) -> (* Technically, the arities 0 and 1 are allowed but the error only mentions 1. However, DROP is equivalent to DROP 1 so hinting at an arity of 1 makes sense. *) fail (Invalid_arity (loc, I_DROP, 1, List.length l)) | (Prim (loc, I_DUP, [], annot), Item_t (v, rest, stack_annot)) -> parse_var_annot loc annot ~default:stack_annot >>?= fun annot -> record_trace_eval (fun () -> serialize_ty_for_error ctxt v >|? fun (t, _ctxt) -> Non_dupable_type (loc, t)) (check_dupable_ty ctxt loc v) >>?= fun ctxt -> let dup = {apply = (fun kinfo k -> IDup (kinfo, k))} in typed ctxt 0 loc dup (Item_t (v, Item_t (v, rest, stack_annot), annot)) | (Prim (loc, I_DUP, [n], v_annot), stack_ty) -> parse_var_annot loc v_annot >>?= fun annot -> let rec make_proof_argument : type a s. int -> (a, s) stack_ty -> (a * s) dup_n_proof_argument tzresult = fun n (stack_ty : (a, s) stack_ty) -> match (n, stack_ty) with | (1, Item_t (hd_ty, _, _)) -> ok @@ Dup_n_proof_argument (Dup_n_zero, hd_ty) | (n, Item_t (_, tl_ty, _)) -> make_proof_argument (n - 1) tl_ty >|? fun (Dup_n_proof_argument (dup_n_witness, b_ty)) -> Dup_n_proof_argument (Dup_n_succ dup_n_witness, b_ty) | _ -> serialize_stack_for_error ctxt stack_ty >>? fun (whole_stack, _ctxt) -> error (Bad_stack (loc, I_DUP, 1, whole_stack)) in parse_uint10 n >>?= fun n -> Gas.consume ctxt (Typecheck_costs.proof_argument n) >>?= fun ctxt -> error_unless (Compare.Int.( > ) n 0) (Dup_n_bad_argument loc) >>?= fun () -> record_trace (Dup_n_bad_stack loc) (make_proof_argument n stack_ty) >>?= fun (Dup_n_proof_argument (witness, after_ty)) -> record_trace_eval (fun () -> serialize_ty_for_error ctxt after_ty >|? fun (t, _ctxt) -> Non_dupable_type (loc, t)) (check_dupable_ty ctxt loc after_ty) >>?= fun ctxt -> let dupn = {apply = (fun kinfo k -> IDup_n (kinfo, n, witness, k))} in typed ctxt 0 loc dupn (Item_t (after_ty, stack_ty, annot)) | (Prim (loc, I_DIG, [n], result_annot), stack) -> let rec make_proof_argument : type a s. int -> (a, s) stack_ty -> (a, s) dig_proof_argument tzresult = fun n stk -> match (Compare.Int.(n = 0), stk) with | (true, Item_t (v, rest, annot)) -> ok @@ Dig_proof_argument (KRest, v, annot, rest) | (false, Item_t (v, rest, annot)) -> make_proof_argument (n - 1) rest >|? fun (Dig_proof_argument (n', x, xv, aft')) -> let kinfo = {iloc = loc; kstack_ty = aft'} in Dig_proof_argument (KPrefix (kinfo, n'), x, xv, Item_t (v, aft', annot)) | (_, _) -> serialize_stack_for_error ctxt stack >>? fun (whole_stack, _ctxt) -> error (Bad_stack (loc, I_DIG, 3, whole_stack)) in parse_uint10 n >>?= fun n -> Gas.consume ctxt (Typecheck_costs.proof_argument n) >>?= fun ctxt -> error_unexpected_annot loc result_annot >>?= fun () -> make_proof_argument n stack >>?= fun (Dig_proof_argument (n', x, stack_annot, aft)) -> let dig = {apply = (fun kinfo k -> IDig (kinfo, n, n', k))} in typed ctxt 0 loc dig (Item_t (x, aft, stack_annot)) | (Prim (loc, I_DIG, (([] | _ :: _ :: _) as l), _), _) -> fail (Invalid_arity (loc, I_DIG, 1, List.length l)) | (Prim (loc, I_DUG, [n], result_annot), Item_t (x, whole_stack, stack_annot)) -> parse_uint10 n >>?= fun whole_n -> Gas.consume ctxt (Typecheck_costs.proof_argument whole_n) >>?= fun ctxt -> let rec make_proof_argument : type a s x. int -> x ty -> var_annot option -> (a, s) stack_ty -> (a, s, x) dug_proof_argument tzresult = fun n x stack_annot stk -> match (Compare.Int.(n = 0), stk) with | (true, rest) -> ok @@ Dug_proof_argument (KRest, (), Item_t (x, rest, stack_annot)) | (false, Item_t (v, rest, annot)) -> make_proof_argument (n - 1) x stack_annot rest >|? fun (Dug_proof_argument (n', (), aft')) -> let kinfo = {iloc = loc; kstack_ty = aft'} in Dug_proof_argument (KPrefix (kinfo, n'), (), Item_t (v, aft', annot)) | (_, _) -> serialize_stack_for_error ctxt whole_stack >>? fun (whole_stack, _ctxt) -> error (Bad_stack (loc, I_DUG, whole_n, whole_stack)) in error_unexpected_annot loc result_annot >>?= fun () -> make_proof_argument whole_n x stack_annot whole_stack >>?= fun (Dug_proof_argument (n', (), aft)) -> let dug = {apply = (fun kinfo k -> IDug (kinfo, whole_n, n', k))} in typed ctxt 0 loc dug aft | (Prim (loc, I_DUG, [_], result_annot), stack) -> Lwt.return ( error_unexpected_annot loc result_annot >>? fun () -> serialize_stack_for_error ctxt stack >>? fun (stack, _ctxt) -> error (Bad_stack (loc, I_DUG, 1, stack)) ) | (Prim (loc, I_DUG, (([] | _ :: _ :: _) as l), _), _) -> fail (Invalid_arity (loc, I_DUG, 1, List.length l)) | ( Prim (loc, I_SWAP, [], annot), Item_t (v, Item_t (w, rest, stack_annot), cur_top_annot) ) -> error_unexpected_annot loc annot >>?= fun () -> let swap = {apply = (fun kinfo k -> ISwap (kinfo, k))} in let stack_ty = Item_t (w, Item_t (v, rest, cur_top_annot), stack_annot) in typed ctxt 0 loc swap stack_ty | (Prim (loc, I_PUSH, [t; d], annot), stack) -> parse_var_annot loc annot >>?= fun annot -> parse_packable_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy t >>?= fun (Ex_ty t, ctxt) -> check_type_size ~legacy ctxt ~loc t >>?= fun () -> parse_data ?type_logger ~stack_depth:(stack_depth + 1) ctxt ~legacy ~allow_forged:false t d >>=? fun (v, ctxt) -> let const = {apply = (fun kinfo k -> IConst (kinfo, v, k))} in typed ctxt 0 (* type size already checked *) loc const (Item_t (t, stack, annot)) | (Prim (loc, I_UNIT, [], annot), stack) -> parse_var_type_annot loc annot >>?= fun (annot, ty_name) -> let const = {apply = (fun kinfo k -> IConst (kinfo, (), k))} in typed ctxt 0 loc const (Item_t (Unit_t ty_name, stack, annot)) (* options *) | (Prim (loc, I_SOME, [], annot), Item_t (t, rest, _)) -> parse_var_type_annot loc annot >>?= fun (annot, ty_name) -> let cons_some = {apply = (fun kinfo k -> ICons_some (kinfo, k))} in typed ctxt 1 loc cons_some (Item_t (Option_t (t, ty_name), rest, annot)) | (Prim (loc, I_NONE, [t], annot), stack) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy t >>?= fun (Ex_ty t, ctxt) -> parse_var_type_annot loc annot >>?= fun (annot, ty_name) -> let cons_none = {apply = (fun kinfo k -> ICons_none (kinfo, t, k))} in let stack_ty = Item_t (Option_t (t, ty_name), stack, annot) in typed ctxt 1 loc cons_none stack_ty | ( Prim (loc, I_IF_NONE, [bt; bf], annot), (Item_t (Option_t (t, _), rest, option_annot) as bef) ) -> check_kind [Seq_kind] bt >>?= fun () -> check_kind [Seq_kind] bf >>?= fun () -> error_unexpected_annot loc annot >>?= fun () -> let annot = gen_access_annot option_annot default_some_annot in non_terminal_recursion ?type_logger tc_context ctxt ~legacy bt rest >>=? fun (btr, ctxt) -> let stack_ty = Item_t (t, rest, annot) in non_terminal_recursion ?type_logger tc_context ctxt ~legacy bf stack_ty >>=? fun (bfr, ctxt) -> let branch ibt ibf = let ifnone = { apply = (fun kinfo k -> let btinfo = kinfo_of_descr ibt and bfinfo = kinfo_of_descr ibf in let branch_if_none = ibt.instr.apply btinfo k and branch_if_some = ibf.instr.apply bfinfo k in IIf_none {kinfo; branch_if_none; branch_if_some}); } in {loc; instr = ifnone; bef; aft = ibt.aft} in merge_branches ~legacy ctxt loc btr bfr {branch} >>?= fun (judgement, ctxt) -> return ctxt 0 judgement (* pairs *) | ( Prim (loc, I_PAIR, [], annot), Item_t (a, Item_t (b, rest, snd_annot), fst_annot) ) -> parse_constr_annot loc annot ~if_special_first:(var_to_field_annot fst_annot) ~if_special_second:(var_to_field_annot snd_annot) >>?= fun (annot, ty_name, l_field, r_field) -> let stack_ty = Item_t ( Pair_t ((a, l_field, fst_annot), (b, r_field, snd_annot), ty_name), rest, annot ) in let cons_pair = {apply = (fun kinfo k -> ICons_pair (kinfo, k))} in typed ctxt 1 loc cons_pair stack_ty | (Prim (loc, I_PAIR, [n], annot), stack_ty) -> parse_var_annot loc annot >>?= fun annot -> let rec make_proof_argument : type a s. int -> (a, s) stack_ty -> (a * s) comb_proof_argument tzresult = fun n stack_ty -> match (n, stack_ty) with | (1, Item_t (a_ty, tl_ty, _a_annot_opt)) -> ok (Comb_proof_argument (Comb_one, Item_t (a_ty, tl_ty, annot))) | (n, Item_t (a_ty, tl_ty, _prop_annot_opt)) -> make_proof_argument (n - 1) tl_ty >|? fun (Comb_proof_argument (comb_witness, Item_t (b_ty, tl_ty', annot))) -> let pair_t = Pair_t ((a_ty, None, None), (b_ty, None, None), None) in Comb_proof_argument (Comb_succ comb_witness, Item_t (pair_t, tl_ty', annot)) | _ -> serialize_stack_for_error ctxt stack_ty >>? fun (whole_stack, _ctxt) -> error (Bad_stack (loc, I_PAIR, 1, whole_stack)) in parse_uint10 n >>?= fun n -> Gas.consume ctxt (Typecheck_costs.proof_argument n) >>?= fun ctxt -> error_unless (Compare.Int.( > ) n 1) (Pair_bad_argument loc) >>?= fun () -> make_proof_argument n stack_ty >>?= fun (Comb_proof_argument (witness, after_ty)) -> let comb = {apply = (fun kinfo k -> IComb (kinfo, n, witness, k))} in typed ctxt 1 loc comb after_ty | (Prim (loc, I_UNPAIR, [n], annot), stack_ty) -> error_unexpected_annot loc annot >>?= fun () -> let rec make_proof_argument : type a s. int -> (a, s) stack_ty -> (a * s) uncomb_proof_argument tzresult = fun n stack_ty -> match (n, stack_ty) with | (1, Item_t (a_ty, tl_ty, annot)) -> ok @@ Uncomb_proof_argument (Uncomb_one, Item_t (a_ty, tl_ty, annot)) | ( n, Item_t ( Pair_t ((a_ty, field_opt, _), (b_ty, b_field_opt, _), _), tl_ty, _ ) ) -> let b_annot = Script_ir_annot.field_to_var_annot b_field_opt in make_proof_argument (n - 1) (Item_t (b_ty, tl_ty, b_annot)) >|? fun (Uncomb_proof_argument (uncomb_witness, after_ty)) -> Uncomb_proof_argument ( Uncomb_succ uncomb_witness, Item_t (a_ty, after_ty, Script_ir_annot.field_to_var_annot field_opt) ) | _ -> serialize_stack_for_error ctxt stack_ty >>? fun (whole_stack, _ctxt) -> error (Bad_stack (loc, I_UNPAIR, 1, whole_stack)) in parse_uint10 n >>?= fun n -> Gas.consume ctxt (Typecheck_costs.proof_argument n) >>?= fun ctxt -> error_unless (Compare.Int.( > ) n 1) (Unpair_bad_argument loc) >>?= fun () -> make_proof_argument n stack_ty >>?= fun (Uncomb_proof_argument (witness, after_ty)) -> let uncomb = {apply = (fun kinfo k -> IUncomb (kinfo, n, witness, k))} in typed ctxt 0 loc uncomb after_ty | (Prim (loc, I_GET, [n], annot), Item_t (comb_ty, rest_ty, _)) -> parse_var_annot loc annot >>?= fun annot -> let rec make_proof_argument : type b. int -> b ty -> b comb_get_proof_argument tzresult = fun n ty -> match (n, ty) with | (0, value_ty) -> ok @@ Comb_get_proof_argument (Comb_get_zero, value_ty) | (1, Pair_t ((hd_ty, _at1, _at2), _, _annot)) -> ok @@ Comb_get_proof_argument (Comb_get_one, hd_ty) | (n, Pair_t (_, (tl_ty, _bt1, _bt2), _annot)) -> make_proof_argument (n - 2) tl_ty >|? fun (Comb_get_proof_argument (comb_get_left_witness, ty')) -> Comb_get_proof_argument (Comb_get_plus_two comb_get_left_witness, ty') | _ -> serialize_stack_for_error ctxt stack_ty >>? fun (whole_stack, _ctxt) -> error (Bad_stack (loc, I_GET, 1, whole_stack)) in parse_uint11 n >>?= fun n -> Gas.consume ctxt (Typecheck_costs.proof_argument n) >>?= fun ctxt -> make_proof_argument n comb_ty >>?= fun (Comb_get_proof_argument (witness, ty')) -> let after_stack_ty = Item_t (ty', rest_ty, annot) in let comb_get = {apply = (fun kinfo k -> IComb_get (kinfo, n, witness, k))} in typed ctxt 0 loc comb_get after_stack_ty | ( Prim (loc, I_UPDATE, [n], annot), Item_t (value_ty, Item_t (comb_ty, rest_ty, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let rec make_proof_argument : type value before. int -> value ty -> before ty -> (value, before) comb_set_proof_argument tzresult = fun n value_ty ty -> match (n, ty) with | (0, _) -> ok @@ Comb_set_proof_argument (Comb_set_zero, value_ty) | (1, Pair_t ((_hd_ty, at1, at2), (tl_ty, bt1, bt2), annot)) -> let after_ty = Pair_t ((value_ty, at1, at2), (tl_ty, bt1, bt2), annot) in ok @@ Comb_set_proof_argument (Comb_set_one, after_ty) | (n, Pair_t ((hd_ty, at1, at2), (tl_ty, bt1, bt2), annot)) -> make_proof_argument (n - 2) value_ty tl_ty >|? fun (Comb_set_proof_argument (comb_set_left_witness, tl_ty')) -> let after_ty = Pair_t ((hd_ty, at1, at2), (tl_ty', bt1, bt2), annot) in Comb_set_proof_argument (Comb_set_plus_two comb_set_left_witness, after_ty) | _ -> serialize_stack_for_error ctxt stack_ty >>? fun (whole_stack, _ctxt) -> error (Bad_stack (loc, I_UPDATE, 2, whole_stack)) in parse_uint11 n >>?= fun n -> Gas.consume ctxt (Typecheck_costs.proof_argument n) >>?= fun ctxt -> make_proof_argument n value_ty comb_ty >>?= fun (Comb_set_proof_argument (witness, after_ty)) -> let after_stack_ty = Item_t (after_ty, rest_ty, annot) in let comb_set = {apply = (fun kinfo k -> IComb_set (kinfo, n, witness, k))} in typed ctxt 0 loc comb_set after_stack_ty | ( Prim (loc, I_UNPAIR, [], annot), Item_t ( Pair_t ( (a, expected_field_annot_a, a_annot), (b, expected_field_annot_b, b_annot), _ ), rest, pair_annot ) ) -> parse_unpair_annot loc annot ~pair_annot ~value_annot_car:a_annot ~value_annot_cdr:b_annot ~field_name_car:expected_field_annot_a ~field_name_cdr:expected_field_annot_b >>?= fun (annot_a, annot_b, field_a, field_b) -> check_correct_field field_a expected_field_annot_a >>?= fun () -> check_correct_field field_b expected_field_annot_b >>?= fun () -> let unpair = {apply = (fun kinfo k -> IUnpair (kinfo, k))} in typed ctxt 0 loc unpair (Item_t (a, Item_t (b, rest, annot_b), annot_a)) | ( Prim (loc, I_CAR, [], annot), Item_t (Pair_t ((a, expected_field_annot, a_annot), _, _), rest, pair_annot) ) -> parse_destr_annot loc annot ~pair_annot ~value_annot:a_annot ~field_name:expected_field_annot ~default_accessor:default_car_annot >>?= fun (annot, field_annot) -> check_correct_field field_annot expected_field_annot >>?= fun () -> let car = {apply = (fun kinfo k -> ICar (kinfo, k))} in typed ctxt 0 loc car (Item_t (a, rest, annot)) | ( Prim (loc, I_CDR, [], annot), Item_t (Pair_t (_, (b, expected_field_annot, b_annot), _), rest, pair_annot) ) -> parse_destr_annot loc annot ~pair_annot ~value_annot:b_annot ~field_name:expected_field_annot ~default_accessor:default_cdr_annot >>?= fun (annot, field_annot) -> check_correct_field field_annot expected_field_annot >>?= fun () -> let cdr = {apply = (fun kinfo k -> ICdr (kinfo, k))} in typed ctxt 0 loc cdr (Item_t (b, rest, annot)) (* unions *) | (Prim (loc, I_LEFT, [tr], annot), Item_t (tl, rest, stack_annot)) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy tr >>?= fun (Ex_ty tr, ctxt) -> parse_constr_annot loc annot ~if_special_first:(var_to_field_annot stack_annot) >>?= fun (annot, tname, l_field, r_field) -> let cons_left = {apply = (fun kinfo k -> ICons_left (kinfo, k))} in let stack_ty = Item_t (Union_t ((tl, l_field), (tr, r_field), tname), rest, annot) in typed ctxt 1 loc cons_left stack_ty | (Prim (loc, I_RIGHT, [tl], annot), Item_t (tr, rest, stack_annot)) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy tl >>?= fun (Ex_ty tl, ctxt) -> parse_constr_annot loc annot ~if_special_second:(var_to_field_annot stack_annot) >>?= fun (annot, tname, l_field, r_field) -> let cons_right = {apply = (fun kinfo k -> ICons_right (kinfo, k))} in let stack_ty = Item_t (Union_t ((tl, l_field), (tr, r_field), tname), rest, annot) in typed ctxt 1 loc cons_right stack_ty | ( Prim (loc, I_IF_LEFT, [bt; bf], annot), ( Item_t (Union_t ((tl, l_field), (tr, r_field), _), rest, union_annot) as bef ) ) -> check_kind [Seq_kind] bt >>?= fun () -> check_kind [Seq_kind] bf >>?= fun () -> error_unexpected_annot loc annot >>?= fun () -> let left_annot = gen_access_annot union_annot l_field ~default:default_left_annot in let right_annot = gen_access_annot union_annot r_field ~default:default_right_annot in non_terminal_recursion ?type_logger tc_context ctxt ~legacy bt (Item_t (tl, rest, left_annot)) >>=? fun (btr, ctxt) -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy bf (Item_t (tr, rest, right_annot)) >>=? fun (bfr, ctxt) -> let branch ibt ibf = let infobt = kinfo_of_descr ibt and infobf = kinfo_of_descr ibf in let instr = { apply = (fun kinfo k -> let branch_if_left = ibt.instr.apply infobt k and branch_if_right = ibf.instr.apply infobf k in IIf_left {kinfo; branch_if_left; branch_if_right}); } in {loc; instr; bef; aft = ibt.aft} in merge_branches ~legacy ctxt loc btr bfr {branch} >>?= fun (judgement, ctxt) -> return ctxt 0 judgement (* lists *) | (Prim (loc, I_NIL, [t], annot), stack) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy t >>?= fun (Ex_ty t, ctxt) -> parse_var_type_annot loc annot >>?= fun (annot, ty_name) -> let nil = {apply = (fun kinfo k -> INil (kinfo, k))} in typed ctxt 1 loc nil (Item_t (List_t (t, ty_name), stack, annot)) | ( Prim (loc, I_CONS, [], annot), Item_t (tv, Item_t (List_t (t, ty_name), rest, _), _) ) -> check_item_ty ctxt tv t loc I_CONS 1 2 >>?= fun (Eq, t, ctxt) -> parse_var_annot loc annot >>?= fun annot -> let cons_list = {apply = (fun kinfo k -> ICons_list (kinfo, k))} in ( typed ctxt 0 loc cons_list (Item_t (List_t (t, ty_name), rest, annot)) : ((a, s) judgement * context) tzresult Lwt.t ) | ( Prim (loc, I_IF_CONS, [bt; bf], annot), (Item_t (List_t (t, ty_name), rest, list_annot) as bef) ) -> check_kind [Seq_kind] bt >>?= fun () -> check_kind [Seq_kind] bf >>?= fun () -> error_unexpected_annot loc annot >>?= fun () -> let hd_annot = gen_access_annot list_annot default_hd_annot in let tl_annot = gen_access_annot list_annot default_tl_annot in non_terminal_recursion ?type_logger tc_context ctxt ~legacy bt (Item_t (t, Item_t (List_t (t, ty_name), rest, tl_annot), hd_annot)) >>=? fun (btr, ctxt) -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy bf rest >>=? fun (bfr, ctxt) -> let branch ibt ibf = let infobt = kinfo_of_descr ibt and infobf = kinfo_of_descr ibf in let instr = { apply = (fun kinfo k -> let branch_if_cons = ibt.instr.apply infobt k and branch_if_nil = ibf.instr.apply infobf k in IIf_cons {kinfo; branch_if_nil; branch_if_cons}); } in {loc; instr; bef; aft = ibt.aft} in merge_branches ~legacy ctxt loc btr bfr {branch} >>?= fun (judgement, ctxt) -> return ctxt 0 judgement | (Prim (loc, I_SIZE, [], annot), Item_t (List_t _, rest, _)) -> parse_var_type_annot loc annot >>?= fun (annot, tname) -> let list_size = {apply = (fun kinfo k -> IList_size (kinfo, k))} in typed ctxt 0 loc list_size (Item_t (Nat_t tname, rest, annot)) | ( Prim (loc, I_MAP, [body], annot), Item_t (List_t (elt, _), starting_rest, list_annot) ) -> ( check_kind [Seq_kind] body >>?= fun () -> parse_var_type_annot loc annot >>?= fun (ret_annot, list_ty_name) -> let elt_annot = gen_access_annot list_annot default_elt_annot in non_terminal_recursion ?type_logger tc_context ctxt ~legacy body (Item_t (elt, starting_rest, elt_annot)) >>=? fun (judgement, ctxt) -> match judgement with | Typed ({aft = Item_t (ret, rest, _); _} as kibody) -> let invalid_map_body () = serialize_stack_for_error ctxt kibody.aft >|? fun (aft, _ctxt) -> Invalid_map_body (loc, aft) in Lwt.return @@ record_trace_eval invalid_map_body ( merge_stacks ~legacy loc ctxt 1 rest starting_rest >>? fun (Eq, rest, ctxt) -> let binfo = kinfo_of_descr kibody in let hinfo = {iloc = loc; kstack_ty = Item_t (ret, rest, ret_annot)} in let ibody = kibody.instr.apply binfo (IHalt hinfo) in let list_map = {apply = (fun kinfo k -> IList_map (kinfo, ibody, k))} in let stack = Item_t (List_t (ret, list_ty_name), rest, ret_annot) in typed_no_lwt ctxt 1 loc list_map stack ) | Typed {aft; _} -> Lwt.return ( serialize_stack_for_error ctxt aft >>? fun (aft, _ctxt) -> error (Invalid_map_body (loc, aft)) ) | Failed _ -> fail (Invalid_map_block_fail loc) ) | ( Prim (loc, I_ITER, [body], annot), Item_t (List_t (elt, _), rest, list_annot) ) -> ( check_kind [Seq_kind] body >>?= fun () -> error_unexpected_annot loc annot >>?= fun () -> let elt_annot = gen_access_annot list_annot default_elt_annot in non_terminal_recursion ?type_logger tc_context ctxt ~legacy body (Item_t (elt, rest, elt_annot)) >>=? fun (judgement, ctxt) -> let mk_list_iter ibody = { apply = (fun kinfo k -> let hinfo = {iloc = loc; kstack_ty = rest} in let binfo = kinfo_of_descr ibody in let ibody = ibody.instr.apply binfo (IHalt hinfo) in IList_iter (kinfo, ibody, k)); } in match judgement with | Typed ({aft; _} as ibody) -> let invalid_iter_body () = serialize_stack_for_error ctxt ibody.aft >>? fun (aft, ctxt) -> serialize_stack_for_error ctxt rest >|? fun (rest, _ctxt) -> Invalid_iter_body (loc, rest, aft) in Lwt.return @@ record_trace_eval invalid_iter_body ( merge_stacks ~legacy loc ctxt 1 aft rest >>? fun (Eq, rest, ctxt) -> ( typed_no_lwt ctxt 0 loc (mk_list_iter ibody) rest : ((a, s) judgement * context) tzresult ) ) | Failed {descr} -> typed ctxt 0 loc (mk_list_iter (descr rest)) rest ) (* sets *) | (Prim (loc, I_EMPTY_SET, [t], annot), rest) -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt t >>?= fun (Ex_comparable_ty t, ctxt) -> parse_var_type_annot loc annot >>?= fun (annot, tname) -> let instr = {apply = (fun kinfo k -> IEmpty_set (kinfo, t, k))} in typed ctxt 1 loc instr (Item_t (Set_t (t, tname), rest, annot)) | ( Prim (loc, I_ITER, [body], annot), Item_t (Set_t (comp_elt, _), rest, set_annot) ) -> ( check_kind [Seq_kind] body >>?= fun () -> error_unexpected_annot loc annot >>?= fun () -> let elt_annot = gen_access_annot set_annot default_elt_annot in let elt = ty_of_comparable_ty comp_elt in non_terminal_recursion ?type_logger tc_context ctxt ~legacy body (Item_t (elt, rest, elt_annot)) >>=? fun (judgement, ctxt) -> let mk_iset_iter ibody = { apply = (fun kinfo k -> let hinfo = {iloc = loc; kstack_ty = rest} in let binfo = kinfo_of_descr ibody in let ibody = ibody.instr.apply binfo (IHalt hinfo) in ISet_iter (kinfo, ibody, k)); } in match judgement with | Typed ({aft; _} as ibody) -> let invalid_iter_body () = serialize_stack_for_error ctxt ibody.aft >>? fun (aft, ctxt) -> serialize_stack_for_error ctxt rest >|? fun (rest, _ctxt) -> Invalid_iter_body (loc, rest, aft) in Lwt.return @@ record_trace_eval invalid_iter_body ( merge_stacks ~legacy loc ctxt 1 aft rest >>? fun (Eq, rest, ctxt) -> ( typed_no_lwt ctxt 0 loc (mk_iset_iter ibody) rest : ((a, s) judgement * context) tzresult ) ) | Failed {descr} -> typed ctxt 0 loc (mk_iset_iter (descr rest)) rest ) | ( Prim (loc, I_MEM, [], annot), Item_t (v, Item_t (Set_t (elt, _), rest, _), _) ) -> let elt = ty_of_comparable_ty elt in parse_var_type_annot loc annot >>?= fun (annot, tname) -> check_item_ty ctxt elt v loc I_MEM 1 2 >>?= fun (Eq, _, ctxt) -> let instr = {apply = (fun kinfo k -> ISet_mem (kinfo, k))} in ( typed ctxt 0 loc instr (Item_t (Bool_t tname, rest, annot)) : ((a, s) judgement * context) tzresult Lwt.t ) | ( Prim (loc, I_UPDATE, [], annot), Item_t ( v, Item_t (Bool_t _, Item_t (Set_t (elt, tname), rest, set_annot), _), _ ) ) -> check_item_ty ctxt (ty_of_comparable_ty elt) v loc I_UPDATE 1 3 >>?= fun (Eq, _, ctxt) -> parse_var_annot loc annot ~default:set_annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ISet_update (kinfo, k))} in ( typed ctxt 0 loc instr (Item_t (Set_t (elt, tname), rest, annot)) : ((a, s) judgement * context) tzresult Lwt.t ) | (Prim (loc, I_SIZE, [], annot), Item_t (Set_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ISet_size (kinfo, k))} in typed ctxt 0 loc instr (Item_t (Nat_t None, rest, annot)) (* maps *) | (Prim (loc, I_EMPTY_MAP, [tk; tv], annot), stack) -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt tk >>?= fun (Ex_comparable_ty tk, ctxt) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy tv >>?= fun (Ex_ty tv, ctxt) -> parse_var_type_annot loc annot >>?= fun (annot, ty_name) -> let instr = {apply = (fun kinfo k -> IEmpty_map (kinfo, tk, tv, k))} in typed ctxt 1 loc instr (Item_t (Map_t (tk, tv, ty_name), stack, annot)) | ( Prim (loc, I_MAP, [body], annot), Item_t (Map_t (ck, elt, _), starting_rest, _map_annot) ) -> ( let k = ty_of_comparable_ty ck in check_kind [Seq_kind] body >>?= fun () -> parse_var_type_annot loc annot >>?= fun (ret_annot, ty_name) -> let k_name = field_to_var_annot default_key_annot in let e_name = field_to_var_annot default_elt_annot in non_terminal_recursion ?type_logger tc_context ctxt ~legacy body (Item_t ( Pair_t ((k, None, k_name), (elt, None, e_name), None), starting_rest, None )) >>=? fun (judgement, ctxt) -> match judgement with | Typed ({aft = Item_t (ret, rest, _); _} as ibody) -> let invalid_map_body () = serialize_stack_for_error ctxt ibody.aft >|? fun (aft, _ctxt) -> Invalid_map_body (loc, aft) in Lwt.return @@ record_trace_eval invalid_map_body ( merge_stacks ~legacy loc ctxt 1 rest starting_rest >>? fun (Eq, rest, ctxt) -> let instr = { apply = (fun kinfo k -> let binfo = kinfo_of_descr ibody in let hinfo = { iloc = loc; kstack_ty = Item_t (ret, rest, ret_annot); } in let ibody = ibody.instr.apply binfo (IHalt hinfo) in IMap_map (kinfo, ibody, k)); } in let stack = Item_t (Map_t (ck, ret, ty_name), rest, ret_annot) in typed_no_lwt ctxt 1 loc instr stack ) | Typed {aft; _} -> Lwt.return ( serialize_stack_for_error ctxt aft >>? fun (aft, _ctxt) -> error (Invalid_map_body (loc, aft)) ) | Failed _ -> fail (Invalid_map_block_fail loc) ) | ( Prim (loc, I_ITER, [body], annot), Item_t (Map_t (comp_elt, element_ty, _), rest, _map_annot) ) -> ( check_kind [Seq_kind] body >>?= fun () -> error_unexpected_annot loc annot >>?= fun () -> let k_name = field_to_var_annot default_key_annot in let e_name = field_to_var_annot default_elt_annot in let key = ty_of_comparable_ty comp_elt in non_terminal_recursion ?type_logger tc_context ctxt ~legacy body (Item_t ( Pair_t ((key, None, k_name), (element_ty, None, e_name), None), rest, None )) >>=? fun (judgement, ctxt) -> let make_instr ibody = { apply = (fun kinfo k -> let hinfo = {iloc = loc; kstack_ty = rest} in let binfo = kinfo_of_descr ibody in let ibody = ibody.instr.apply binfo (IHalt hinfo) in IMap_iter (kinfo, ibody, k)); } in match judgement with | Typed ({aft; _} as ibody) -> let invalid_iter_body () = serialize_stack_for_error ctxt ibody.aft >>? fun (aft, ctxt) -> serialize_stack_for_error ctxt rest >|? fun (rest, _ctxt) -> Invalid_iter_body (loc, rest, aft) in Lwt.return @@ record_trace_eval invalid_iter_body ( merge_stacks ~legacy loc ctxt 1 aft rest >>? fun (Eq, rest, ctxt) -> ( typed_no_lwt ctxt 0 loc (make_instr ibody) rest : ((a, s) judgement * context) tzresult ) ) | Failed {descr} -> typed ctxt 0 loc (make_instr (descr rest)) rest ) | ( Prim (loc, I_MEM, [], annot), Item_t (vk, Item_t (Map_t (ck, _, _), rest, _), _) ) -> let k = ty_of_comparable_ty ck in check_item_ty ctxt vk k loc I_MEM 1 2 >>?= fun (Eq, _, ctxt) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IMap_mem (kinfo, k))} in ( typed ctxt 0 loc instr (Item_t (Bool_t None, rest, annot)) : ((a, s) judgement * context) tzresult Lwt.t ) | ( Prim (loc, I_GET, [], annot), Item_t (vk, Item_t (Map_t (ck, elt, _), rest, _), _) ) -> let k = ty_of_comparable_ty ck in check_item_ty ctxt vk k loc I_GET 1 2 >>?= fun (Eq, _, ctxt) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IMap_get (kinfo, k))} in ( typed ctxt 0 loc instr (Item_t (Option_t (elt, None), rest, annot)) : ((a, s) judgement * context) tzresult Lwt.t ) | ( Prim (loc, I_UPDATE, [], annot), Item_t ( vk, Item_t ( Option_t (vv, _), Item_t (Map_t (ck, v, map_name), rest, map_annot), _ ), _ ) ) -> let k = ty_of_comparable_ty ck in check_item_ty ctxt vk k loc I_UPDATE 1 3 >>?= fun (Eq, _, ctxt) -> check_item_ty ctxt vv v loc I_UPDATE 2 3 >>?= fun (Eq, v, ctxt) -> parse_var_annot loc annot ~default:map_annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IMap_update (kinfo, k))} in ( typed ctxt 0 loc instr (Item_t (Map_t (ck, v, map_name), rest, annot)) : ((a, s) judgement * context) tzresult Lwt.t ) | ( Prim (loc, I_GET_AND_UPDATE, [], annot), Item_t ( vk, Item_t ( Option_t (vv, vname), Item_t (Map_t (ck, v, map_name), rest, map_annot), v_annot ), _ ) ) -> let k = ty_of_comparable_ty ck in check_item_ty ctxt vk k loc I_GET_AND_UPDATE 1 3 >>?= fun (Eq, _, ctxt) -> check_item_ty ctxt vv v loc I_GET_AND_UPDATE 2 3 >>?= fun (Eq, v, ctxt) -> parse_var_annot loc annot ~default:map_annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IMap_get_and_update (kinfo, k))} in let stack = Item_t ( Option_t (vv, vname), Item_t (Map_t (ck, v, map_name), rest, annot), v_annot ) in ( typed ctxt 0 loc instr stack : ((a, s) judgement * context) tzresult Lwt.t ) | (Prim (loc, I_SIZE, [], annot), Item_t (Map_t (_, _, _), rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IMap_size (kinfo, k))} in typed ctxt 0 loc instr (Item_t (Nat_t None, rest, annot)) (* big_map *) | (Prim (loc, I_EMPTY_BIG_MAP, [tk; tv], annot), stack) -> parse_comparable_ty ~stack_depth:(stack_depth + 1) ctxt tk >>?= fun (Ex_comparable_ty tk, ctxt) -> parse_big_map_value_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy tv >>?= fun (Ex_ty tv, ctxt) -> parse_var_type_annot loc annot >>?= fun (annot, ty_name) -> let instr = {apply = (fun kinfo k -> IEmpty_big_map (kinfo, tk, tv, k))} in let stack = Item_t (Big_map_t (tk, tv, ty_name), stack, annot) in typed ctxt 1 loc instr stack | ( Prim (loc, I_MEM, [], annot), Item_t (set_key, Item_t (Big_map_t (map_key, _, _), rest, _), _) ) -> let k = ty_of_comparable_ty map_key in check_item_ty ctxt set_key k loc I_MEM 1 2 >>?= fun (Eq, _, ctxt) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IBig_map_mem (kinfo, k))} in let stack = Item_t (Bool_t None, rest, annot) in ( typed ctxt 0 loc instr stack : ((a, s) judgement * context) tzresult Lwt.t ) | ( Prim (loc, I_GET, [], annot), Item_t (vk, Item_t (Big_map_t (ck, elt, _), rest, _), _) ) -> let k = ty_of_comparable_ty ck in check_item_ty ctxt vk k loc I_GET 1 2 >>?= fun (Eq, _, ctxt) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IBig_map_get (kinfo, k))} in let stack = Item_t (Option_t (elt, None), rest, annot) in ( typed ctxt 0 loc instr stack : ((a, s) judgement * context) tzresult Lwt.t ) | ( Prim (loc, I_UPDATE, [], annot), Item_t ( set_key, Item_t ( Option_t (set_value, _), Item_t (Big_map_t (map_key, map_value, map_name), rest, map_annot), _ ), _ ) ) -> let k = ty_of_comparable_ty map_key in check_item_ty ctxt set_key k loc I_UPDATE 1 3 >>?= fun (Eq, _, ctxt) -> check_item_ty ctxt set_value map_value loc I_UPDATE 2 3 >>?= fun (Eq, map_value, ctxt) -> parse_var_annot loc annot ~default:map_annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IBig_map_update (kinfo, k))} in let stack = Item_t (Big_map_t (map_key, map_value, map_name), rest, annot) in ( typed ctxt 0 loc instr stack : ((a, s) judgement * context) tzresult Lwt.t ) | ( Prim (loc, I_GET_AND_UPDATE, [], annot), Item_t ( vk, Item_t ( Option_t (vv, vname), Item_t (Big_map_t (ck, v, map_name), rest, map_annot), v_annot ), _ ) ) -> let k = ty_of_comparable_ty ck in check_item_ty ctxt vk k loc I_GET_AND_UPDATE 1 3 >>?= fun (Eq, _, ctxt) -> check_item_ty ctxt vv v loc I_GET_AND_UPDATE 2 3 >>?= fun (Eq, v, ctxt) -> parse_var_annot loc annot ~default:map_annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IBig_map_get_and_update (kinfo, k))} in let stack = Item_t ( Option_t (vv, vname), Item_t (Big_map_t (ck, v, map_name), rest, annot), v_annot ) in ( typed ctxt 0 loc instr stack : ((a, s) judgement * context) tzresult Lwt.t ) (* Sapling *) | (Prim (loc, I_SAPLING_EMPTY_STATE, [memo_size], annot), rest) -> parse_memo_size memo_size >>?= fun memo_size -> parse_var_annot loc annot ~default:default_sapling_state_annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ISapling_empty_state (kinfo, memo_size, k))} in let stack = Item_t (Sapling_state_t (memo_size, None), rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_SAPLING_VERIFY_UPDATE, [], _), Item_t ( Sapling_transaction_t (transaction_memo_size, _), Item_t ( (Sapling_state_t (state_memo_size, _) as state_ty), rest, stack_annot ), _ ) ) -> merge_memo_sizes state_memo_size transaction_memo_size >>?= fun _memo_size -> let instr = {apply = (fun kinfo k -> ISapling_verify_update (kinfo, k))} in let stack = Item_t ( Option_t ( Pair_t ( (Int_t None, None, default_sapling_balance_annot), (state_ty, None, None), None ), None ), rest, stack_annot ) in typed ctxt 0 loc instr stack (* control *) | (Seq (loc, []), stack) -> let instr = {apply = (fun _kinfo k -> k)} in typed ctxt 0 loc instr stack | (Seq (_, [single]), stack) -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy single stack | (Seq (loc, hd :: tl), stack) -> ( non_terminal_recursion ?type_logger tc_context ctxt ~legacy hd stack >>=? fun (judgement, ctxt) -> match judgement with | Failed _ -> fail (Fail_not_in_tail_position (Micheline.location hd)) | Typed ({aft = middle; _} as ihd) -> ( non_terminal_recursion ?type_logger tc_context ctxt ~legacy (Seq (-1, tl)) middle >>=? fun (judgement, ctxt) -> match judgement with | Failed {descr} -> let descr ret = compose_descr loc ihd (descr ret) in return ctxt 0 (Failed {descr}) | Typed itl -> ( Lwt.return (Ok (Typed (compose_descr loc ihd itl), ctxt)) : ((a, s) judgement * context) tzresult Lwt.t ) ) ) | (Prim (loc, I_IF, [bt; bf], annot), (Item_t (Bool_t _, rest, _) as bef)) -> check_kind [Seq_kind] bt >>?= fun () -> check_kind [Seq_kind] bf >>?= fun () -> error_unexpected_annot loc annot >>?= fun () -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy bt rest >>=? fun (btr, ctxt) -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy bf rest >>=? fun (bfr, ctxt) -> let branch ibt ibf = let infobt = kinfo_of_descr ibt and infobf = kinfo_of_descr ibf in let instr = { apply = (fun kinfo k -> let branch_if_true = ibt.instr.apply infobt k and branch_if_false = ibf.instr.apply infobf k in IIf {kinfo; branch_if_true; branch_if_false}); } in {loc; instr; bef; aft = ibt.aft} in merge_branches ~legacy ctxt loc btr bfr {branch} >>?= fun (judgement, ctxt) -> return ctxt 0 judgement | ( Prim (loc, I_LOOP, [body], annot), (Item_t (Bool_t _, rest, _stack_annot) as stack) ) -> ( check_kind [Seq_kind] body >>?= fun () -> error_unexpected_annot loc annot >>?= fun () -> non_terminal_recursion ?type_logger tc_context ctxt ~legacy body rest >>=? fun (judgement, ctxt) -> match judgement with | Typed ibody -> let unmatched_branches () = serialize_stack_for_error ctxt ibody.aft >>? fun (aft, ctxt) -> serialize_stack_for_error ctxt stack >|? fun (stack, _ctxt) -> Unmatched_branches (loc, aft, stack) in Lwt.return @@ record_trace_eval unmatched_branches ( merge_stacks ~legacy loc ctxt 1 ibody.aft stack >>? fun (Eq, _stack, ctxt) -> let instr = { apply = (fun kinfo k -> let ibody = ibody.instr.apply (kinfo_of_descr ibody) (IHalt kinfo) in ILoop (kinfo, ibody, k)); } in typed_no_lwt ctxt 0 loc instr rest ) | Failed {descr} -> let instr = { apply = (fun kinfo k -> let ibody = descr stack in let ibody = ibody.instr.apply (kinfo_of_descr ibody) (IHalt kinfo) in ILoop (kinfo, ibody, k)); } in typed ctxt loc 0 instr rest ) | ( Prim (loc, I_LOOP_LEFT, [body], annot), (Item_t (Union_t ((tl, l_field), (tr, _), _), rest, union_annot) as stack) ) -> ( check_kind [Seq_kind] body >>?= fun () -> parse_var_annot loc annot >>?= fun annot -> let l_annot = gen_access_annot union_annot l_field ~default:default_left_annot in non_terminal_recursion ?type_logger tc_context ctxt ~legacy body (Item_t (tl, rest, l_annot)) >>=? fun (judgement, ctxt) -> match judgement with | Typed ibody -> let unmatched_branches () = serialize_stack_for_error ctxt ibody.aft >>? fun (aft, ctxt) -> serialize_stack_for_error ctxt stack >|? fun (stack, _ctxt) -> Unmatched_branches (loc, aft, stack) in Lwt.return @@ record_trace_eval unmatched_branches ( merge_stacks ~legacy loc ctxt 1 ibody.aft stack >>? fun (Eq, _stack, ctxt) -> let instr = { apply = (fun kinfo k -> let ibody = ibody.instr.apply (kinfo_of_descr ibody) (IHalt kinfo) in ILoop_left (kinfo, ibody, k)); } in let stack = Item_t (tr, rest, annot) in typed_no_lwt ctxt 0 loc instr stack ) | Failed {descr} -> let instr = { apply = (fun kinfo k -> let ibody = descr stack in let ibody = ibody.instr.apply (kinfo_of_descr ibody) (IHalt kinfo) in ILoop_left (kinfo, ibody, k)); } in let stack = Item_t (tr, rest, annot) in typed ctxt 0 loc instr stack ) | (Prim (loc, I_LAMBDA, [arg; ret; code], annot), stack) -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy arg >>?= fun (Ex_ty arg, ctxt) -> check_type_size ~legacy ctxt ~loc arg >>?= fun () -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ret >>?= fun (Ex_ty ret, ctxt) -> check_type_size ~legacy ctxt ~loc ret >>?= fun () -> check_kind [Seq_kind] code >>?= fun () -> parse_var_annot loc annot >>?= fun annot -> parse_returning Lambda ?type_logger ~stack_depth:(stack_depth + 1) ctxt ~legacy (arg, default_arg_annot) ret code >>=? fun (lambda, ctxt) -> let instr = {apply = (fun kinfo k -> ILambda (kinfo, lambda, k))} in let stack = Item_t (Lambda_t (arg, ret, None), stack, annot) in typed ctxt 1 loc instr stack | ( Prim (loc, I_EXEC, [], annot), Item_t (arg, Item_t (Lambda_t (param, ret, _), rest, _), _) ) -> check_item_ty ctxt arg param loc I_EXEC 1 2 >>?= fun (Eq, _, ctxt) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IExec (kinfo, k))} in let stack = Item_t (ret, rest, annot) in ( typed ctxt 0 loc instr stack : ((a, s) judgement * context) tzresult Lwt.t ) | ( Prim (loc, I_APPLY, [], annot), Item_t ( capture, Item_t ( Lambda_t (Pair_t ((capture_ty, _, _), (arg_ty, _, _), lam_annot), ret, _), rest, _ ), _ ) ) -> check_packable ~legacy:false loc capture_ty >>?= fun () -> check_item_ty ctxt capture capture_ty loc I_APPLY 1 2 >>?= fun (Eq, capture_ty, ctxt) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IApply (kinfo, capture_ty, k))} in let stack = Item_t (Lambda_t (arg_ty, ret, lam_annot), rest, annot) in ( typed ctxt 0 loc instr stack : ((a, s) judgement * context) tzresult Lwt.t ) | (Prim (loc, I_DIP, [code], annot), Item_t (v, rest, stack_annot)) -> ( error_unexpected_annot loc annot >>?= fun () -> check_kind [Seq_kind] code >>?= fun () -> non_terminal_recursion ?type_logger (add_dip v stack_annot tc_context) ctxt ~legacy code rest >>=? fun (judgement, ctxt) -> match judgement with | Typed descr -> let instr = { apply = (fun kinfo k -> let binfo = {iloc = descr.loc; kstack_ty = descr.bef} in let kinfoh = {iloc = descr.loc; kstack_ty = descr.aft} in let b = descr.instr.apply binfo (IHalt kinfoh) in IDip (kinfo, b, k)); } in let stack = Item_t (v, descr.aft, stack_annot) in typed ctxt 0 loc instr stack | Failed _ -> fail (Fail_not_in_tail_position loc) ) | (Prim (loc, I_DIP, [n; code], result_annot), stack) -> parse_uint10 n >>?= fun n -> Gas.consume ctxt (Typecheck_costs.proof_argument n) >>?= fun ctxt -> let rec make_proof_argument : type a s. int -> tc_context -> (a, s) stack_ty -> (a, s) dipn_proof_argument tzresult Lwt.t = fun n inner_tc_context stk -> match (Compare.Int.(n = 0), stk) with | (true, rest) -> ( non_terminal_recursion ?type_logger inner_tc_context ctxt ~legacy code rest >>=? fun (judgement, ctxt) -> match judgement with | Typed descr -> Lwt.return (ok ( Dipn_proof_argument (KRest, ctxt, descr, descr.aft) : (a, s) dipn_proof_argument )) | Failed _ -> Lwt.return (error (Fail_not_in_tail_position loc)) ) | (false, Item_t (v, rest, annot)) -> make_proof_argument (n - 1) (add_dip v annot tc_context) rest >|=? fun (Dipn_proof_argument (n', ctxt, descr, aft')) -> let kinfo' = {iloc = loc; kstack_ty = aft'} in let w = KPrefix (kinfo', n') in Dipn_proof_argument (w, ctxt, descr, Item_t (v, aft', annot)) | (_, _) -> Lwt.return ( serialize_stack_for_error ctxt stack >>? fun (whole_stack, _ctxt) -> error (Bad_stack (loc, I_DIP, 1, whole_stack)) ) in error_unexpected_annot loc result_annot >>?= fun () -> make_proof_argument n tc_context stack >>=? fun (Dipn_proof_argument (n', ctxt, descr, aft)) -> let kinfo = {iloc = descr.loc; kstack_ty = descr.bef} in let kinfoh = {iloc = descr.loc; kstack_ty = descr.aft} in let b = descr.instr.apply kinfo (IHalt kinfoh) in let res = {apply = (fun kinfo k -> IDipn (kinfo, n, n', b, k))} in typed ctxt 0 loc res aft | (Prim (loc, I_DIP, (([] | _ :: _ :: _ :: _) as l), _), _) -> (* Technically, the arities 1 and 2 are allowed but the error only mentions 2. However, DIP {code} is equivalent to DIP 1 {code} so hinting at an arity of 2 makes sense. *) fail (Invalid_arity (loc, I_DIP, 2, List.length l)) | (Prim (loc, I_FAILWITH, [], annot), Item_t (v, _rest, _)) -> error_unexpected_annot loc annot >>?= fun () -> (if legacy then ok_unit else check_packable ~legacy:false loc v) >>?= fun () -> let instr = {apply = (fun kinfo k -> IFailwith (kinfo, loc, v, k))} in let descr aft = {loc; instr; bef = stack_ty; aft} in log_stack ctxt loc stack_ty Bot_t >>?= fun () -> return ctxt 0 (Failed {descr}) | (Prim (loc, I_NEVER, [], annot), Item_t (Never_t _, _rest, _)) -> error_unexpected_annot loc annot >>?= fun () -> let instr = {apply = (fun kinfo _k -> INever kinfo)} in let descr aft = {loc; instr; bef = stack_ty; aft} in log_stack ctxt loc stack_ty Bot_t >>?= fun () -> return ctxt 0 (Failed {descr}) (* timestamp operations *) | ( Prim (loc, I_ADD, [], annot), Item_t (Timestamp_t tname, Item_t (Int_t _, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IAdd_timestamp_to_seconds (kinfo, k))} in typed ctxt 0 loc instr (Item_t (Timestamp_t tname, rest, annot)) | ( Prim (loc, I_ADD, [], annot), Item_t (Int_t _, Item_t (Timestamp_t tname, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IAdd_seconds_to_timestamp (kinfo, k))} in typed ctxt 0 loc instr (Item_t (Timestamp_t tname, rest, annot)) | ( Prim (loc, I_SUB, [], annot), Item_t (Timestamp_t tname, Item_t (Int_t _, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ISub_timestamp_seconds (kinfo, k))} in let stack = Item_t (Timestamp_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_SUB, [], annot), Item_t (Timestamp_t tn1, Item_t (Timestamp_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IDiff_timestamps (kinfo, k))} in let stack = Item_t (Int_t tname, rest, annot) in typed ctxt 0 loc instr stack (* string operations *) | ( Prim (loc, I_CONCAT, [], annot), Item_t (String_t tn1, Item_t (String_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IConcat_string_pair (kinfo, k))} in typed ctxt 0 loc instr (Item_t (String_t tname, rest, annot)) | ( Prim (loc, I_CONCAT, [], annot), Item_t (List_t (String_t tname, _), rest, list_annot) ) -> parse_var_annot ~default:list_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IConcat_string (kinfo, k))} in typed ctxt 0 loc instr (Item_t (String_t tname, rest, annot)) | ( Prim (loc, I_SLICE, [], annot), Item_t ( Nat_t _, Item_t (Nat_t _, Item_t (String_t tname, rest, string_annot), _), _ ) ) -> parse_var_annot ~default:(gen_access_annot string_annot default_slice_annot) loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ISlice_string (kinfo, k))} in let stack = Item_t (Option_t (String_t tname, None), rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_SIZE, [], annot), Item_t (String_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IString_size (kinfo, k))} in let stack = Item_t (Nat_t None, rest, annot) in typed ctxt 0 loc instr stack (* bytes operations *) | ( Prim (loc, I_CONCAT, [], annot), Item_t (Bytes_t tn1, Item_t (Bytes_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IConcat_bytes_pair (kinfo, k))} in let stack = Item_t (Bytes_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_CONCAT, [], annot), Item_t (List_t (Bytes_t tname, _), rest, list_annot) ) -> parse_var_annot ~default:list_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IConcat_bytes (kinfo, k))} in let stack = Item_t (Bytes_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_SLICE, [], annot), Item_t ( Nat_t _, Item_t (Nat_t _, Item_t (Bytes_t tname, rest, bytes_annot), _), _ ) ) -> parse_var_annot ~default:(gen_access_annot bytes_annot default_slice_annot) loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ISlice_bytes (kinfo, k))} in let stack = Item_t (Option_t (Bytes_t tname, None), rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_SIZE, [], annot), Item_t (Bytes_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IBytes_size (kinfo, k))} in let stack = Item_t (Nat_t None, rest, annot) in typed ctxt 0 loc instr stack (* currency operations *) | ( Prim (loc, I_ADD, [], annot), Item_t (Mutez_t tn1, Item_t (Mutez_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IAdd_tez (kinfo, k))} in let stack = Item_t (Mutez_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_SUB, [], annot), Item_t (Mutez_t tn1, Item_t (Mutez_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> ISub_tez (kinfo, k))} in let stack = Item_t (Mutez_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Mutez_t tname, Item_t (Nat_t _, rest, _), _) ) -> (* no type name check *) parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IMul_teznat (kinfo, k))} in let stack = Item_t (Mutez_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Nat_t _, Item_t (Mutez_t tname, rest, _), _) ) -> (* no type name check *) parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IMul_nattez (kinfo, k))} in let stack = Item_t (Mutez_t tname, rest, annot) in typed ctxt 0 loc instr stack (* boolean operations *) | ( Prim (loc, I_OR, [], annot), Item_t (Bool_t tn1, Item_t (Bool_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IOr (kinfo, k))} in let stack = Item_t (Bool_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_AND, [], annot), Item_t (Bool_t tn1, Item_t (Bool_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IAnd (kinfo, k))} in let stack = Item_t (Bool_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_XOR, [], annot), Item_t (Bool_t tn1, Item_t (Bool_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IXor (kinfo, k))} in let stack = Item_t (Bool_t tname, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_NOT, [], annot), Item_t (Bool_t tname, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> INot (kinfo, k))} in let stack = Item_t (Bool_t tname, rest, annot) in typed ctxt 0 loc instr stack (* integer operations *) | (Prim (loc, I_ABS, [], annot), Item_t (Int_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IAbs_int (kinfo, k))} in let stack = Item_t (Nat_t None, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_ISNAT, [], annot), Item_t (Int_t _, rest, int_annot)) -> parse_var_annot loc annot ~default:int_annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IIs_nat (kinfo, k))} in let stack = Item_t (Option_t (Nat_t None, None), rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_INT, [], annot), Item_t (Nat_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IInt_nat (kinfo, k))} in let stack = Item_t (Int_t None, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_NEG, [], annot), Item_t (Int_t tname, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> INeg_int (kinfo, k))} in let stack = Item_t (Int_t tname, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_NEG, [], annot), Item_t (Nat_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> INeg_nat (kinfo, k))} in let stack = Item_t (Int_t None, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_ADD, [], annot), Item_t (Int_t tn1, Item_t (Int_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IAdd_intint (kinfo, k))} in let stack = Item_t (Int_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_ADD, [], annot), Item_t (Int_t tname, Item_t (Nat_t _, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IAdd_intnat (kinfo, k))} in let stack = Item_t (Int_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_ADD, [], annot), Item_t (Nat_t _, Item_t (Int_t tname, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IAdd_natint (kinfo, k))} in let stack = Item_t (Int_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_ADD, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IAdd_natnat (kinfo, k))} in let stack = Item_t (Nat_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_SUB, [], annot), Item_t (Int_t tn1, Item_t (Int_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> ISub_int (kinfo, k))} in let stack = Item_t (Int_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_SUB, [], annot), Item_t (Int_t tname, Item_t (Nat_t _, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ISub_int (kinfo, k))} in let stack = Item_t (Int_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_SUB, [], annot), Item_t (Nat_t _, Item_t (Int_t tname, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ISub_int (kinfo, k))} in let stack = Item_t (Int_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_SUB, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun _tname -> let instr = {apply = (fun kinfo k -> ISub_int (kinfo, k))} in let stack = Item_t (Int_t None, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Int_t tn1, Item_t (Int_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IMul_intint (kinfo, k))} in let stack = Item_t (Int_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Int_t tname, Item_t (Nat_t _, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IMul_intnat (kinfo, k))} in let stack = Item_t (Int_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Nat_t _, Item_t (Int_t tname, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IMul_natint (kinfo, k))} in let stack = Item_t (Int_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IMul_natnat (kinfo, k))} in let stack = Item_t (Nat_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_EDIV, [], annot), Item_t (Mutez_t tname, Item_t (Nat_t _, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IEdiv_teznat (kinfo, k))} in let stack = Item_t ( Option_t ( Pair_t ( (Mutez_t tname, None, None), (Mutez_t tname, None, None), None ), None ), rest, annot ) in typed ctxt 0 loc instr stack | ( Prim (loc, I_EDIV, [], annot), Item_t (Mutez_t tn1, Item_t (Mutez_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IEdiv_tez (kinfo, k))} in let stack = Item_t ( Option_t ( Pair_t ((Nat_t None, None, None), (Mutez_t tname, None, None), None), None ), rest, annot ) in typed ctxt 0 loc instr stack | ( Prim (loc, I_EDIV, [], annot), Item_t (Int_t tn1, Item_t (Int_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IEdiv_intint (kinfo, k))} in let stack = Item_t ( Option_t ( Pair_t ((Int_t tname, None, None), (Nat_t None, None, None), None), None ), rest, annot ) in typed ctxt 0 loc instr stack | ( Prim (loc, I_EDIV, [], annot), Item_t (Int_t tname, Item_t (Nat_t _, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IEdiv_intnat (kinfo, k))} in let stack = Item_t ( Option_t ( Pair_t ((Int_t tname, None, None), (Nat_t None, None, None), None), None ), rest, annot ) in typed ctxt 0 loc instr stack | ( Prim (loc, I_EDIV, [], annot), Item_t (Nat_t tname, Item_t (Int_t _, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IEdiv_natint (kinfo, k))} in let stack = Item_t ( Option_t ( Pair_t ((Int_t None, None, None), (Nat_t tname, None, None), None), None ), rest, annot ) in typed ctxt 0 loc instr stack | ( Prim (loc, I_EDIV, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IEdiv_natnat (kinfo, k))} in let stack = Item_t ( Option_t ( Pair_t ((Nat_t tname, None, None), (Nat_t tname, None, None), None), None ), rest, annot ) in typed ctxt 0 loc instr stack | ( Prim (loc, I_LSL, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> ILsl_nat (kinfo, k))} in let stack = Item_t (Nat_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_LSR, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> ILsr_nat (kinfo, k))} in let stack = Item_t (Nat_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_OR, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IOr_nat (kinfo, k))} in let stack = Item_t (Nat_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_AND, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IAnd_nat (kinfo, k))} in let stack = Item_t (Nat_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_AND, [], annot), Item_t (Int_t _, Item_t (Nat_t tname, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IAnd_int_nat (kinfo, k))} in let stack = Item_t (Nat_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_XOR, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IXor_nat (kinfo, k))} in let stack = Item_t (Nat_t tname, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_NOT, [], annot), Item_t (Int_t tname, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> INot_int (kinfo, k))} in let stack = Item_t (Int_t tname, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_NOT, [], annot), Item_t (Nat_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> INot_nat (kinfo, k))} in let stack = Item_t (Int_t None, rest, annot) in typed ctxt 0 loc instr stack (* comparison *) | (Prim (loc, I_COMPARE, [], annot), Item_t (t1, Item_t (t2, rest, _), _)) -> parse_var_annot loc annot >>?= fun annot -> check_item_ty ctxt t1 t2 loc I_COMPARE 1 2 >>?= fun (Eq, t, ctxt) -> comparable_ty_of_ty ctxt loc t >>?= fun (key, ctxt) -> let instr = {apply = (fun kinfo k -> ICompare (kinfo, key, k))} in let stack = Item_t (Int_t None, rest, annot) in ( typed ctxt 0 loc instr stack : ((a, s) judgement * context) tzresult Lwt.t ) (* comparators *) | (Prim (loc, I_EQ, [], annot), Item_t (Int_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IEq (kinfo, k))} in let stack = Item_t (Bool_t None, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_NEQ, [], annot), Item_t (Int_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> INeq (kinfo, k))} in let stack = Item_t (Bool_t None, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_LT, [], annot), Item_t (Int_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ILt (kinfo, k))} in let stack = Item_t (Bool_t None, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_GT, [], annot), Item_t (Int_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IGt (kinfo, k))} in let stack = Item_t (Bool_t None, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_LE, [], annot), Item_t (Int_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ILe (kinfo, k))} in let stack = Item_t (Bool_t None, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_GE, [], annot), Item_t (Int_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IGe (kinfo, k))} in let stack = Item_t (Bool_t None, rest, annot) in typed ctxt 0 loc instr stack (* annotations *) | (Prim (loc, I_CAST, [cast_t], annot), Item_t (t, stack, item_annot)) -> parse_var_annot loc annot ~default:item_annot >>?= fun annot -> parse_any_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy cast_t >>?= fun (Ex_ty cast_t, ctxt) -> merge_types ~legacy ctxt loc cast_t t >>?= fun (Eq, _, ctxt) -> let instr = {apply = (fun _ k -> k)} in let stack = Item_t (cast_t, stack, annot) in ( typed ctxt 0 loc instr stack : ((a, s) judgement * context) tzresult Lwt.t ) | (Prim (loc, I_RENAME, [], annot), Item_t (t, stack, _)) -> parse_var_annot loc annot >>?= fun annot -> (* can erase annot *) let instr = {apply = (fun _ k -> k)} in let stack = Item_t (t, stack, annot) in typed ctxt 0 loc instr stack (* packing *) | (Prim (loc, I_PACK, [], annot), Item_t (t, rest, unpacked_annot)) -> check_packable ~legacy:true (* allow to pack contracts for hash/signature checks *) loc t >>?= fun () -> parse_var_annot loc annot ~default:(gen_access_annot unpacked_annot default_pack_annot) >>?= fun annot -> let instr = {apply = (fun kinfo k -> IPack (kinfo, t, k))} in let stack = Item_t (Bytes_t None, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_UNPACK, [ty], annot), Item_t (Bytes_t _, rest, packed_annot)) -> parse_packable_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ty >>?= fun (Ex_ty t, ctxt) -> parse_var_type_annot loc annot >>?= fun (annot, ty_name) -> let annot = default_annot annot ~default:(gen_access_annot packed_annot default_unpack_annot) in let instr = {apply = (fun kinfo k -> IUnpack (kinfo, t, k))} in let stack = Item_t (Option_t (t, ty_name), rest, annot) in typed ctxt 1 loc instr stack (* protocol *) | ( Prim (loc, I_ADDRESS, [], annot), Item_t (Contract_t _, rest, contract_annot) ) -> parse_var_annot loc annot ~default:(gen_access_annot contract_annot default_addr_annot) >>?= fun annot -> let instr = {apply = (fun kinfo k -> IAddress (kinfo, k))} in let stack = Item_t (Address_t None, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_CONTRACT, [ty], annot), Item_t (Address_t _, rest, addr_annot) ) -> parse_parameter_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ty >>?= fun (Ex_ty t, ctxt) -> parse_entrypoint_annot loc annot ~default:(gen_access_annot addr_annot default_contract_annot) >>?= fun (annot, entrypoint) -> ( match entrypoint with | None -> Ok "default" | Some (Field_annot "default") -> error (Unexpected_annotation loc) | Some (Field_annot entrypoint) -> if Compare.Int.(String.length entrypoint > 31) then error (Entrypoint_name_too_long entrypoint) else Ok entrypoint ) >>?= fun entrypoint -> let instr = {apply = (fun kinfo k -> IContract (kinfo, t, entrypoint, k))} in let stack = Item_t (Option_t (Contract_t (t, None), None), rest, annot) in typed ctxt 1 loc instr stack | ( Prim (loc, I_TRANSFER_TOKENS, [], annot), Item_t (p, Item_t (Mutez_t _, Item_t (Contract_t (cp, _), rest, _), _), _) ) -> check_item_ty ctxt p cp loc I_TRANSFER_TOKENS 1 4 >>?= fun (Eq, _, ctxt) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ITransfer_tokens (kinfo, k))} in let stack = Item_t (Operation_t None, rest, annot) in ( typed ctxt 0 loc instr stack : ((a, s) judgement * context) tzresult Lwt.t ) | ( Prim (loc, I_SET_DELEGATE, [], annot), Item_t (Option_t (Key_hash_t _, _), rest, _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ISet_delegate (kinfo, k))} in let stack = Item_t (Operation_t None, rest, annot) in typed ctxt 0 loc instr stack | (Prim (_, I_CREATE_ACCOUNT, _, _), _) -> fail (Deprecated_instruction I_CREATE_ACCOUNT) | (Prim (loc, I_IMPLICIT_ACCOUNT, [], annot), Item_t (Key_hash_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IImplicit_account (kinfo, k))} in let stack = Item_t (Contract_t (Unit_t None, None), rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_CREATE_CONTRACT, [(Seq _ as code)], annot), Item_t ( Option_t (Key_hash_t _, _), Item_t (Mutez_t _, Item_t (ginit, rest, _), _), _ ) ) -> parse_two_var_annot loc annot >>?= fun (op_annot, addr_annot) -> let canonical_code = fst @@ Micheline.extract_locations code in parse_toplevel ~legacy canonical_code >>?= fun (arg_type, storage_type, code_field, root_name) -> record_trace (Ill_formed_type (Some "parameter", canonical_code, location arg_type)) (parse_parameter_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy arg_type) >>?= fun (Ex_ty arg_type, ctxt) -> check_type_size ~legacy ctxt ~loc arg_type >>?= fun () -> (if legacy then ok_unit else well_formed_entrypoints ~root_name arg_type) >>?= fun () -> record_trace (Ill_formed_type (Some "storage", canonical_code, location storage_type)) (parse_storage_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy storage_type) >>?= fun (Ex_ty storage_type, ctxt) -> check_type_size ~legacy ctxt ~loc storage_type >>?= fun () -> let arg_annot = default_annot (type_to_var_annot (name_of_ty arg_type)) ~default:default_param_annot in let storage_annot = default_annot (type_to_var_annot (name_of_ty storage_type)) ~default:default_storage_annot in let arg_type_full = Pair_t ( (arg_type, None, arg_annot), (storage_type, None, storage_annot), None ) in let ret_type_full = Pair_t ( (List_t (Operation_t None, None), None, None), (storage_type, None, None), None ) in trace (Ill_typed_contract (canonical_code, [])) (parse_returning (Toplevel { storage_type; param_type = arg_type; root_name; legacy_create_contract_literal = false; }) ctxt ~legacy ?type_logger ~stack_depth:(stack_depth + 1) (arg_type_full, None) ret_type_full code_field) >>=? fun ( ( Lam ( { kbef = Item_t (arg, Bot_t, _); kaft = Item_t (ret, Bot_t, _); _ }, _ ) as lambda ), ctxt ) -> merge_types ~legacy ctxt loc arg arg_type_full >>?= fun (Eq, _, ctxt) -> merge_types ~legacy ctxt loc ret ret_type_full >>?= fun (Eq, _, ctxt) -> merge_types ~legacy ctxt loc storage_type ginit >>?= fun (Eq, _, ctxt) -> let instr = { apply = (fun kinfo k -> ICreate_contract {kinfo; storage_type; arg_type; lambda; root_name; k}); } in let stack = Item_t ( Operation_t None, Item_t (Address_t None, rest, addr_annot), op_annot ) in typed ctxt 0 loc instr stack | (Prim (loc, I_NOW, [], annot), stack) -> parse_var_annot loc annot ~default:default_now_annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> INow (kinfo, k))} in let stack = Item_t (Timestamp_t None, stack, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_AMOUNT, [], annot), stack) -> parse_var_annot loc annot ~default:default_amount_annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IAmount (kinfo, k))} in let stack = Item_t (Mutez_t None, stack, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_CHAIN_ID, [], annot), stack) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IChainId (kinfo, k))} in let stack = Item_t (Chain_id_t None, stack, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_BALANCE, [], annot), stack) -> parse_var_annot loc annot ~default:default_balance_annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IBalance (kinfo, k))} in let stack = Item_t (Mutez_t None, stack, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_LEVEL, [], annot), stack) -> parse_var_annot loc annot ~default:default_level_annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ILevel (kinfo, k))} in let stack = Item_t (Nat_t None, stack, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_VOTING_POWER, [], annot), Item_t (Key_hash_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IVoting_power (kinfo, k))} in let stack = Item_t (Nat_t None, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_TOTAL_VOTING_POWER, [], annot), stack) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ITotal_voting_power (kinfo, k))} in let stack = Item_t (Nat_t None, stack, annot) in typed ctxt 0 loc instr stack | (Prim (_, I_STEPS_TO_QUOTA, _, _), _) -> fail (Deprecated_instruction I_STEPS_TO_QUOTA) | (Prim (loc, I_SOURCE, [], annot), stack) -> parse_var_annot loc annot ~default:default_source_annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ISource (kinfo, k))} in let stack = Item_t (Address_t None, stack, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_SENDER, [], annot), stack) -> parse_var_annot loc annot ~default:default_sender_annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ISender (kinfo, k))} in let stack = Item_t (Address_t None, stack, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_SELF, [], annot), stack) -> Lwt.return ( parse_entrypoint_annot loc annot ~default:default_self_annot >>? fun (annot, entrypoint) -> let entrypoint = Option.fold ~some:(fun (Field_annot annot) -> annot) ~none:"default" entrypoint in let rec get_toplevel_type : tc_context -> ((a, s) judgement * context) tzresult = function | Lambda -> error (Self_in_lambda loc) | Dip (_, prev) -> get_toplevel_type prev | Toplevel {param_type; root_name; legacy_create_contract_literal = false} -> find_entrypoint param_type ~root_name entrypoint >>? fun (_, Ex_ty param_type) -> let instr = { apply = (fun kinfo k -> ISelf (kinfo, param_type, entrypoint, k)); } in let stack = Item_t (Contract_t (param_type, None), stack, annot) in typed_no_lwt ctxt 1 loc instr stack | Toplevel {param_type; root_name = _; legacy_create_contract_literal = true} -> let instr = { apply = (fun kinfo k -> ISelf (kinfo, param_type, "default", k)); } in let stack = Item_t (Contract_t (param_type, None), stack, annot) in typed_no_lwt ctxt 1 loc instr stack in get_toplevel_type tc_context ) | (Prim (loc, I_SELF_ADDRESS, [], annot), stack) -> parse_var_annot loc annot ~default:default_self_annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ISelf_address (kinfo, k))} in let stack = Item_t (Address_t None, stack, annot) in typed ctxt 0 loc instr stack (* cryptography *) | (Prim (loc, I_HASH_KEY, [], annot), Item_t (Key_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IHash_key (kinfo, k))} in let stack = Item_t (Key_hash_t None, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_CHECK_SIGNATURE, [], annot), Item_t (Key_t _, Item_t (Signature_t _, Item_t (Bytes_t _, rest, _), _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ICheck_signature (kinfo, k))} in let stack = Item_t (Bool_t None, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_BLAKE2B, [], annot), Item_t (Bytes_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IBlake2b (kinfo, k))} in let stack = Item_t (Bytes_t None, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_SHA256, [], annot), Item_t (Bytes_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ISha256 (kinfo, k))} in let stack = Item_t (Bytes_t None, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_SHA512, [], annot), Item_t (Bytes_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ISha512 (kinfo, k))} in let stack = Item_t (Bytes_t None, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_KECCAK, [], annot), Item_t (Bytes_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IKeccak (kinfo, k))} in let stack = Item_t (Bytes_t None, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_SHA3, [], annot), Item_t (Bytes_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> ISha3 (kinfo, k))} in let stack = Item_t (Bytes_t None, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_ADD, [], annot), Item_t (Bls12_381_g1_t tn1, Item_t (Bls12_381_g1_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IAdd_bls12_381_g1 (kinfo, k))} in let stack = Item_t (Bls12_381_g1_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_ADD, [], annot), Item_t (Bls12_381_g2_t tn1, Item_t (Bls12_381_g2_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IAdd_bls12_381_g2 (kinfo, k))} in let stack = Item_t (Bls12_381_g2_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_ADD, [], annot), Item_t (Bls12_381_fr_t tn1, Item_t (Bls12_381_fr_t tn2, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> merge_type_annot ~legacy tn1 tn2 >>?= fun tname -> let instr = {apply = (fun kinfo k -> IAdd_bls12_381_fr (kinfo, k))} in let stack = Item_t (Bls12_381_fr_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Bls12_381_g1_t tname, Item_t (Bls12_381_fr_t _, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IMul_bls12_381_g1 (kinfo, k))} in let stack = Item_t (Bls12_381_g1_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Bls12_381_g2_t tname, Item_t (Bls12_381_fr_t _, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IMul_bls12_381_g2 (kinfo, k))} in let stack = Item_t (Bls12_381_g2_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Bls12_381_fr_t tname, Item_t (Bls12_381_fr_t _, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IMul_bls12_381_fr (kinfo, k))} in let stack = Item_t (Bls12_381_fr_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Nat_t tname, Item_t (Bls12_381_fr_t _, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IMul_bls12_381_fr_z (kinfo, k))} in let stack = Item_t (Bls12_381_fr_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Int_t tname, Item_t (Bls12_381_fr_t _, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IMul_bls12_381_fr_z (kinfo, k))} in let stack = Item_t (Bls12_381_fr_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Bls12_381_fr_t tname, Item_t (Int_t _, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IMul_bls12_381_z_fr (kinfo, k))} in let stack = Item_t (Bls12_381_fr_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_MUL, [], annot), Item_t (Bls12_381_fr_t tname, Item_t (Nat_t _, rest, _), _) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IMul_bls12_381_z_fr (kinfo, k))} in let stack = Item_t (Bls12_381_fr_t tname, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_INT, [], annot), Item_t (Bls12_381_fr_t _, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IInt_bls12_381_fr (kinfo, k))} in let stack = Item_t (Int_t None, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_NEG, [], annot), Item_t (Bls12_381_g1_t tname, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> INeg_bls12_381_g1 (kinfo, k))} in let stack = Item_t (Bls12_381_g1_t tname, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_NEG, [], annot), Item_t (Bls12_381_g2_t tname, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> INeg_bls12_381_g2 (kinfo, k))} in let stack = Item_t (Bls12_381_g2_t tname, rest, annot) in typed ctxt 0 loc instr stack | (Prim (loc, I_NEG, [], annot), Item_t (Bls12_381_fr_t tname, rest, _)) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> INeg_bls12_381_fr (kinfo, k))} in let stack = Item_t (Bls12_381_fr_t tname, rest, annot) in typed ctxt 0 loc instr stack | ( Prim (loc, I_PAIRING_CHECK, [], annot), Item_t ( List_t (Pair_t ((Bls12_381_g1_t _, _, _), (Bls12_381_g2_t _, _, _), _), _), rest, _ ) ) -> parse_var_annot loc annot >>?= fun annot -> let instr = {apply = (fun kinfo k -> IPairing_check_bls12_381 (kinfo, k))} in let stack = Item_t (Bool_t None, rest, annot) in typed ctxt 0 loc instr stack (* Tickets *) | (Prim (loc, I_TICKET, [], annot), Item_t (t, Item_t (Nat_t _, rest, _), _)) -> parse_var_annot loc annot >>?= fun annot -> comparable_ty_of_ty ctxt loc t >>?= fun (ty, ctxt) -> let instr = {apply = (fun kinfo k -> ITicket (kinfo, k))} in let stack = Item_t (Ticket_t (ty, None), rest, annot) in typed ctxt 1 loc instr stack | ( Prim (loc, I_READ_TICKET, [], annot), (Item_t (Ticket_t (t, _), _, _) as full_stack) ) -> parse_var_annot loc annot >>?= fun annot -> let () = check_dupable_comparable_ty t in let result = ty_of_comparable_ty @@ opened_ticket_type t in let instr = {apply = (fun kinfo k -> IRead_ticket (kinfo, k))} in let stack = Item_t (result, full_stack, annot) in typed ctxt 1 loc instr stack | ( Prim (loc, I_SPLIT_TICKET, [], annot), Item_t ( (Ticket_t (t, _) as ticket_t), Item_t (Pair_t ((Nat_t _, fa_a, a_a), (Nat_t _, fa_b, a_b), _), rest, _), _ ) ) -> parse_var_annot loc annot >>?= fun annot -> let () = check_dupable_comparable_ty t in let result = Option_t (Pair_t ((ticket_t, fa_a, a_a), (ticket_t, fa_b, a_b), None), None) in let instr = {apply = (fun kinfo k -> ISplit_ticket (kinfo, k))} in let stack = Item_t (result, rest, annot) in typed ctxt 1 loc instr stack | ( Prim (loc, I_JOIN_TICKETS, [], annot), Item_t ( Pair_t (((Ticket_t _ as ty_a), _, _), ((Ticket_t _ as ty_b), _, _), _), rest, _ ) ) -> ( parse_var_annot loc annot >>?= fun annot -> merge_types ~legacy ctxt loc ty_a ty_b >>?= fun (Eq, ty, ctxt) -> match ty with | Ticket_t (contents_ty, _) -> let instr = {apply = (fun kinfo k -> IJoin_tickets (kinfo, contents_ty, k))} in let stack = Item_t (Option_t (ty, None), rest, annot) in typed ctxt 0 loc instr stack | _ -> (* TODO: fix injectivity of types *) assert false ) (* Primitive parsing errors *) | ( Prim ( loc, ( ( I_DUP | I_SWAP | I_SOME | I_UNIT | I_PAIR | I_UNPAIR | I_CAR | I_CDR | I_CONS | I_CONCAT | I_SLICE | I_MEM | I_UPDATE | I_GET | I_EXEC | I_FAILWITH | I_SIZE | I_ADD | I_SUB | I_MUL | I_EDIV | I_OR | I_AND | I_XOR | I_NOT | I_ABS | I_NEG | I_LSL | I_LSR | I_COMPARE | I_EQ | I_NEQ | I_LT | I_GT | I_LE | I_GE | I_TRANSFER_TOKENS | I_SET_DELEGATE | I_NOW | I_IMPLICIT_ACCOUNT | I_AMOUNT | I_BALANCE | I_LEVEL | I_CHECK_SIGNATURE | I_HASH_KEY | I_SOURCE | I_SENDER | I_BLAKE2B | I_SHA256 | I_SHA512 | I_ADDRESS | I_RENAME | I_PACK | I_ISNAT | I_INT | I_SELF | I_CHAIN_ID | I_NEVER | I_VOTING_POWER | I_TOTAL_VOTING_POWER | I_KECCAK | I_SHA3 | I_PAIRING_CHECK | I_TICKET | I_READ_TICKET | I_SPLIT_TICKET | I_JOIN_TICKETS ) as name ), (_ :: _ as l), _ ), _ ) -> fail (Invalid_arity (loc, name, 0, List.length l)) | ( Prim ( loc, ( ( I_NONE | I_LEFT | I_RIGHT | I_NIL | I_MAP | I_ITER | I_EMPTY_SET | I_LOOP | I_LOOP_LEFT | I_CONTRACT | I_CAST | I_UNPACK | I_CREATE_CONTRACT ) as name ), (([] | _ :: _ :: _) as l), _ ), _ ) -> fail (Invalid_arity (loc, name, 1, List.length l)) | ( Prim ( loc, ( ( I_PUSH | I_IF_NONE | I_IF_LEFT | I_IF_CONS | I_EMPTY_MAP | I_EMPTY_BIG_MAP | I_IF ) as name ), (([] | [_] | _ :: _ :: _ :: _) as l), _ ), _ ) -> fail (Invalid_arity (loc, name, 2, List.length l)) | ( Prim (loc, I_LAMBDA, (([] | [_] | [_; _] | _ :: _ :: _ :: _ :: _) as l), _), _ ) -> fail (Invalid_arity (loc, I_LAMBDA, 3, List.length l)) (* Stack errors *) | ( Prim ( loc, ( ( I_ADD | I_SUB | I_MUL | I_EDIV | I_AND | I_OR | I_XOR | I_LSL | I_LSR | I_CONCAT | I_PAIRING_CHECK ) as name ), [], _ ), Item_t (ta, Item_t (tb, _, _), _) ) -> serialize_ty_for_error ctxt ta >>?= fun (ta, ctxt) -> serialize_ty_for_error ctxt tb >>?= fun (tb, _ctxt) -> fail (Undefined_binop (loc, name, ta, tb)) | ( Prim ( loc, ( ( I_NEG | I_ABS | I_NOT | I_SIZE | I_EQ | I_NEQ | I_LT | I_GT | I_LE | I_GE (* CONCAT is both unary and binary; this case can only be triggered on a singleton stack *) | I_CONCAT ) as name ), [], _ ), Item_t (t, _, _) ) -> serialize_ty_for_error ctxt t >>?= fun (t, _ctxt) -> fail (Undefined_unop (loc, name, t)) | (Prim (loc, ((I_UPDATE | I_SLICE) as name), [], _), stack) -> Lwt.return ( serialize_stack_for_error ctxt stack >>? fun (stack, _ctxt) -> error (Bad_stack (loc, name, 3, stack)) ) | (Prim (loc, I_CREATE_CONTRACT, _, _), stack) -> serialize_stack_for_error ctxt stack >>?= fun (stack, _ctxt) -> fail (Bad_stack (loc, I_CREATE_CONTRACT, 7, stack)) | (Prim (loc, I_TRANSFER_TOKENS, [], _), stack) -> Lwt.return ( serialize_stack_for_error ctxt stack >>? fun (stack, _ctxt) -> error (Bad_stack (loc, I_TRANSFER_TOKENS, 4, stack)) ) | ( Prim ( loc, ( ( I_DROP | I_DUP | I_CAR | I_CDR | I_UNPAIR | I_SOME | I_BLAKE2B | I_SHA256 | I_SHA512 | I_DIP | I_IF_NONE | I_LEFT | I_RIGHT | I_IF_LEFT | I_IF | I_LOOP | I_IF_CONS | I_IMPLICIT_ACCOUNT | I_NEG | I_ABS | I_INT | I_NOT | I_HASH_KEY | I_EQ | I_NEQ | I_LT | I_GT | I_LE | I_GE | I_SIZE | I_FAILWITH | I_RENAME | I_PACK | I_ISNAT | I_ADDRESS | I_SET_DELEGATE | I_CAST | I_MAP | I_ITER | I_LOOP_LEFT | I_UNPACK | I_CONTRACT | I_NEVER | I_KECCAK | I_SHA3 | I_READ_TICKET | I_JOIN_TICKETS ) as name ), _, _ ), stack ) -> Lwt.return ( serialize_stack_for_error ctxt stack >>? fun (stack, _ctxt) -> error (Bad_stack (loc, name, 1, stack)) ) | ( Prim ( loc, ( ( I_SWAP | I_PAIR | I_CONS | I_GET | I_MEM | I_EXEC | I_CHECK_SIGNATURE | I_ADD | I_SUB | I_MUL | I_EDIV | I_AND | I_OR | I_XOR | I_LSL | I_LSR | I_COMPARE | I_PAIRING_CHECK | I_TICKET | I_SPLIT_TICKET ) as name ), _, _ ), stack ) -> Lwt.return ( serialize_stack_for_error ctxt stack >>? fun (stack, _ctxt) -> error (Bad_stack (loc, name, 2, stack)) ) (* Generic parsing errors *) | (expr, _) -> fail @@ unexpected expr [Seq_kind] Instr_namespace [ I_DROP; I_DUP; I_DIG; I_DUG; I_SWAP; I_SOME; I_UNIT; I_PAIR; I_UNPAIR; I_CAR; I_CDR; I_CONS; I_MEM; I_UPDATE; I_MAP; I_ITER; I_GET; I_GET_AND_UPDATE; I_EXEC; I_FAILWITH; I_SIZE; I_CONCAT; I_ADD; I_SUB; I_MUL; I_EDIV; I_OR; I_AND; I_XOR; I_NOT; I_ABS; I_INT; I_NEG; I_LSL; I_LSR; I_COMPARE; I_EQ; I_NEQ; I_LT; I_GT; I_LE; I_GE; I_TRANSFER_TOKENS; I_CREATE_CONTRACT; I_NOW; I_AMOUNT; I_BALANCE; I_LEVEL; I_IMPLICIT_ACCOUNT; I_CHECK_SIGNATURE; I_BLAKE2B; I_SHA256; I_SHA512; I_HASH_KEY; I_PUSH; I_NONE; I_LEFT; I_RIGHT; I_NIL; I_EMPTY_SET; I_DIP; I_LOOP; I_IF_NONE; I_IF_LEFT; I_IF_CONS; I_EMPTY_MAP; I_EMPTY_BIG_MAP; I_IF; I_SOURCE; I_SENDER; I_SELF; I_SELF_ADDRESS; I_LAMBDA; I_NEVER; I_VOTING_POWER; I_TOTAL_VOTING_POWER; I_KECCAK; I_SHA3; I_PAIRING_CHECK; I_SAPLING_EMPTY_STATE; I_SAPLING_VERIFY_UPDATE; I_TICKET; I_READ_TICKET; I_SPLIT_TICKET; I_JOIN_TICKETS ] and parse_contract : type arg. stack_depth:int -> legacy:bool -> context -> Script.location -> arg ty -> Contract.t -> entrypoint:string -> (context * arg typed_contract) tzresult Lwt.t = fun ~stack_depth ~legacy ctxt loc arg contract ~entrypoint -> Gas.consume ctxt Typecheck_costs.contract_exists >>?= fun ctxt -> Contract.exists ctxt contract >>=? function | false -> fail (Invalid_contract (loc, contract)) | true -> ( trace (Invalid_contract (loc, contract)) @@ Contract.get_script_code ctxt contract >>=? fun (ctxt, code) -> Lwt.return @@ match code with | None -> ( ty_eq ctxt loc arg (Unit_t None) >>? fun (Eq, ctxt) -> match entrypoint with | "default" -> let contract : arg typed_contract = (arg, (contract, entrypoint)) in ok (ctxt, contract) | entrypoint -> error (No_such_entrypoint entrypoint) ) | Some code -> Script.force_decode_in_context ctxt code >>? fun (code, ctxt) -> parse_toplevel ~legacy:true code >>? fun (arg_type, _, _, root_name) -> parse_parameter_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy:true arg_type >>? fun (Ex_ty targ, ctxt) -> find_entrypoint_for_type ~legacy ~full:targ ~expected:arg ~root_name entrypoint ctxt loc >|? fun (ctxt, entrypoint, arg) -> let contract : arg typed_contract = (arg, (contract, entrypoint)) in (ctxt, contract) ) and parse_toplevel : legacy:bool -> Script.expr -> (Script.node * Script.node * Script.node * field_annot option) tzresult = fun ~legacy toplevel -> record_trace (Ill_typed_contract (toplevel, [])) @@ match root toplevel with | Int (loc, _) -> error (Invalid_kind (loc, [Seq_kind], Int_kind)) | String (loc, _) -> error (Invalid_kind (loc, [Seq_kind], String_kind)) | Bytes (loc, _) -> error (Invalid_kind (loc, [Seq_kind], Bytes_kind)) | Prim (loc, _, _, _) -> error (Invalid_kind (loc, [Seq_kind], Prim_kind)) | Seq (_, fields) -> ( let rec find_fields p s c fields = match fields with | [] -> ok (p, s, c) | Int (loc, _) :: _ -> error (Invalid_kind (loc, [Prim_kind], Int_kind)) | String (loc, _) :: _ -> error (Invalid_kind (loc, [Prim_kind], String_kind)) | Bytes (loc, _) :: _ -> error (Invalid_kind (loc, [Prim_kind], Bytes_kind)) | Seq (loc, _) :: _ -> error (Invalid_kind (loc, [Prim_kind], Seq_kind)) | Prim (loc, K_parameter, [arg], annot) :: rest -> ( match p with | None -> find_fields (Some (arg, loc, annot)) s c rest | Some _ -> error (Duplicate_field (loc, K_parameter)) ) | Prim (loc, K_storage, [arg], annot) :: rest -> ( match s with | None -> find_fields p (Some (arg, loc, annot)) c rest | Some _ -> error (Duplicate_field (loc, K_storage)) ) | Prim (loc, K_code, [arg], annot) :: rest -> ( match c with | None -> find_fields p s (Some (arg, loc, annot)) rest | Some _ -> error (Duplicate_field (loc, K_code)) ) | Prim (loc, ((K_parameter | K_storage | K_code) as name), args, _) :: _ -> error (Invalid_arity (loc, name, 1, List.length args)) | Prim (loc, name, _, _) :: _ -> let allowed = [K_parameter; K_storage; K_code] in error (Invalid_primitive (loc, allowed, name)) in find_fields None None None fields >>? function | (None, _, _) -> error (Missing_field K_parameter) | (Some _, None, _) -> error (Missing_field K_storage) | (Some _, Some _, None) -> error (Missing_field K_code) | (Some (p, ploc, pannot), Some (s, sloc, sannot), Some (c, cloc, carrot)) -> let maybe_root_name = (* root name can be attached to either the parameter primitive or the toplevel constructor *) Script_ir_annot.extract_field_annot p >>? fun (p, root_name) -> match root_name with | Some _ -> ok (p, pannot, root_name) | None -> ( match pannot with | [single] when Compare.Int.(String.length single > 0) && Compare.Char.(single.[0] = '%') -> parse_field_annot ploc [single] >>? fun pannot -> ok (p, [], pannot) | _ -> ok (p, pannot, None) ) in if legacy then (* legacy semantics ignores spurious annotations *) let (p, root_name) = match maybe_root_name with | Ok (p, _, root_name) -> (p, root_name) | Error _ -> (p, None) in ok (p, s, c, root_name) else (* only one field annot is allowed to set the root entrypoint name *) maybe_root_name >>? fun (p, pannot, root_name) -> Script_ir_annot.error_unexpected_annot ploc pannot >>? fun () -> Script_ir_annot.error_unexpected_annot cloc carrot >>? fun () -> Script_ir_annot.error_unexpected_annot sloc sannot >>? fun () -> ok (p, s, c, root_name) ) (* Same as [parse_contract], but does not fail when the contact is missing or if the expected type doesn't match the actual one. In that case None is returned and some overapproximation of the typechecking gas is consumed. This can still fail on gas exhaustion. *) let parse_contract_for_script : type arg. context -> Script.location -> arg ty -> Contract.t -> entrypoint:string -> (context * arg typed_contract option) tzresult Lwt.t = fun ctxt loc arg contract ~entrypoint -> Gas.consume ctxt Typecheck_costs.contract_exists >>?= fun ctxt -> match (Contract.is_implicit contract, entrypoint) with | (Some _, "default") -> (* An implicit account on the "default" entrypoint always exists and has type unit. *) Lwt.return ( match ty_eq ctxt loc arg (Unit_t None) with | Ok (Eq, ctxt) -> let contract : arg typed_contract = (arg, (contract, entrypoint)) in ok (ctxt, Some contract) | Error _ -> Gas.consume ctxt Typecheck_costs.parse_instr_cycle >>? fun ctxt -> ok (ctxt, None) ) | (Some _, _) -> Lwt.return ( Gas.consume ctxt Typecheck_costs.parse_instr_cycle >|? fun ctxt -> (* An implicit account on any other entrypoint is not a valid contract. *) (ctxt, None) ) | (None, _) -> ( (* Originated account *) Contract.exists ctxt contract >>=? function | false -> return (ctxt, None) | true -> ( trace (Invalid_contract (loc, contract)) @@ Contract.get_script_code ctxt contract >>=? fun (ctxt, code) -> match code with | None -> (* Since protocol 005, we have the invariant that all originated accounts have code *) assert false | Some code -> Lwt.return ( Script.force_decode_in_context ctxt code >>? fun (code, ctxt) -> (* can only fail because of gas *) match parse_toplevel ~legacy:true code with | Error _ -> error (Invalid_contract (loc, contract)) | Ok (arg_type, _, _, root_name) -> ( match parse_parameter_ty ctxt ~stack_depth:0 ~legacy:true arg_type with | Error _ -> error (Invalid_contract (loc, contract)) | Ok (Ex_ty targ, ctxt) -> ( (* we don't check targ size here because it's a legacy contract code *) match find_entrypoint_for_type ~legacy:false ~full:targ ~expected:arg ~root_name entrypoint ctxt loc >|? fun (ctxt, entrypoint, arg) -> let contract : arg typed_contract = (arg, (contract, entrypoint)) in (ctxt, Some contract) with | Ok res -> ok res | Error _ -> (* overapproximation by checking if targ = targ, can only fail because of gas *) merge_types ~legacy:false ctxt loc targ targ >|? fun (Eq, _, ctxt) -> (ctxt, None) ) ) ) ) ) let parse_code : ?type_logger:type_logger -> context -> legacy:bool -> code:lazy_expr -> (ex_code * context) tzresult Lwt.t = fun ?type_logger ctxt ~legacy ~code -> Script.force_decode_in_context ctxt code >>?= fun (code, ctxt) -> parse_toplevel ~legacy code >>?= fun (arg_type, storage_type, code_field, root_name) -> let arg_type_loc = location arg_type in record_trace (Ill_formed_type (Some "parameter", code, arg_type_loc)) (parse_parameter_ty ctxt ~stack_depth:0 ~legacy arg_type) >>?= fun (Ex_ty arg_type, ctxt) -> check_type_size ~legacy ctxt ~loc:arg_type_loc arg_type >>?= fun () -> (if legacy then ok_unit else well_formed_entrypoints ~root_name arg_type) >>?= fun () -> let storage_type_loc = location storage_type in record_trace (Ill_formed_type (Some "storage", code, storage_type_loc)) (parse_storage_ty ctxt ~stack_depth:0 ~legacy storage_type) >>?= fun (Ex_ty storage_type, ctxt) -> check_type_size ~legacy ctxt ~loc:storage_type_loc storage_type >>?= fun () -> let arg_annot = default_annot (type_to_var_annot (name_of_ty arg_type)) ~default:default_param_annot in let storage_annot = default_annot (type_to_var_annot (name_of_ty storage_type)) ~default:default_storage_annot in let arg_type_full = Pair_t ((arg_type, None, arg_annot), (storage_type, None, storage_annot), None) in let ret_type_full = Pair_t ( (List_t (Operation_t None, None), None, None), (storage_type, None, None), None ) in trace (Ill_typed_contract (code, [])) (parse_returning (Toplevel { storage_type; param_type = arg_type; root_name; legacy_create_contract_literal = false; }) ctxt ~legacy ~stack_depth:0 ?type_logger (arg_type_full, None) ret_type_full code_field) >|=? fun (code, ctxt) -> (Ex_code {code; arg_type; storage_type; root_name}, ctxt) let parse_storage : ?type_logger:type_logger -> context -> legacy:bool -> allow_forged:bool -> 'storage ty -> storage:lazy_expr -> ('storage * context) tzresult Lwt.t = fun ?type_logger ctxt ~legacy ~allow_forged storage_type ~storage -> Script.force_decode_in_context ctxt storage >>?= fun (storage, ctxt) -> trace_eval (fun () -> Lwt.return ( serialize_ty_for_error ctxt storage_type >|? fun (storage_type, _ctxt) -> Ill_typed_data (None, storage, storage_type) )) (parse_data ?type_logger ~stack_depth:0 ctxt ~legacy ~allow_forged storage_type (root storage)) let parse_script : ?type_logger:type_logger -> context -> legacy:bool -> allow_forged_in_storage:bool -> Script.t -> (ex_script * context) tzresult Lwt.t = fun ?type_logger ctxt ~legacy ~allow_forged_in_storage {code; storage} -> parse_code ~legacy ctxt ?type_logger ~code >>=? fun (Ex_code {code; arg_type; storage_type; root_name}, ctxt) -> parse_storage ?type_logger ctxt ~legacy ~allow_forged:allow_forged_in_storage storage_type ~storage >|=? fun (storage, ctxt) -> (Ex_script {code; arg_type; storage; storage_type; root_name}, ctxt) let typecheck_code : legacy:bool -> context -> Script.expr -> (type_map * context) tzresult Lwt.t = fun ~legacy ctxt code -> parse_toplevel ~legacy code >>?= fun (arg_type, storage_type, code_field, root_name) -> let type_map = ref [] in let arg_type_loc = location arg_type in record_trace (Ill_formed_type (Some "parameter", code, arg_type_loc)) (parse_parameter_ty ctxt ~stack_depth:0 ~legacy arg_type) >>?= fun (Ex_ty arg_type, ctxt) -> check_type_size ~legacy ctxt ~loc:arg_type_loc arg_type >>?= fun () -> (if legacy then ok_unit else well_formed_entrypoints ~root_name arg_type) >>?= fun () -> let storage_type_loc = location storage_type in record_trace (Ill_formed_type (Some "storage", code, storage_type_loc)) (parse_storage_ty ctxt ~stack_depth:0 ~legacy storage_type) >>?= fun (Ex_ty storage_type, ctxt) -> check_type_size ~legacy ctxt ~loc:storage_type_loc storage_type >>?= fun () -> let arg_annot = default_annot (type_to_var_annot (name_of_ty arg_type)) ~default:default_param_annot in let storage_annot = default_annot (type_to_var_annot (name_of_ty storage_type)) ~default:default_storage_annot in let arg_type_full = Pair_t ((arg_type, None, arg_annot), (storage_type, None, storage_annot), None) in let ret_type_full = Pair_t ( (List_t (Operation_t None, None), None, None), (storage_type, None, None), None ) in let result = parse_returning (Toplevel { storage_type; param_type = arg_type; root_name; legacy_create_contract_literal = false; }) ctxt ~legacy ~stack_depth:0 ~type_logger:(fun loc bef aft -> type_map := (loc, (bef, aft)) :: !type_map) (arg_type_full, None) ret_type_full code_field in trace (Ill_typed_contract (code, !type_map)) result >|=? fun (Lam _, ctxt) -> (!type_map, ctxt) module Entrypoints_map = Map.Make (String) let list_entrypoints (type full) (full : full ty) ctxt ~root_name = let merge path annot (type t) (ty : t ty) reachable ((unreachables, all) as acc) = match annot with | None | Some (Field_annot "") -> ( ok @@ if reachable then acc else match ty with | Union_t _ -> acc | _ -> (List.rev path :: unreachables, all) ) | Some (Field_annot name) -> if Compare.Int.(String.length name > 31) then ok (List.rev path :: unreachables, all) else if Entrypoints_map.mem name all then ok (List.rev path :: unreachables, all) else unparse_ty ctxt ty >>? fun (unparsed_ty, _) -> ok ( unreachables, Entrypoints_map.add name (List.rev path, unparsed_ty) all ) in let rec fold_tree : type t. t ty -> prim list -> bool -> prim list list * (prim list * Script.node) Entrypoints_map.t -> (prim list list * (prim list * Script.node) Entrypoints_map.t) tzresult = fun t path reachable acc -> match t with | Union_t ((tl, al), (tr, ar), _) -> merge (D_Left :: path) al tl reachable acc >>? fun acc -> merge (D_Right :: path) ar tr reachable acc >>? fun acc -> fold_tree tl (D_Left :: path) (match al with Some _ -> true | None -> reachable) acc >>? fun acc -> fold_tree tr (D_Right :: path) (match ar with Some _ -> true | None -> reachable) acc | _ -> ok acc in unparse_ty ctxt full >>? fun (unparsed_full, _) -> let (init, reachable) = match root_name with | None | Some (Field_annot "") -> (Entrypoints_map.empty, false) | Some (Field_annot name) -> (Entrypoints_map.singleton name ([], unparsed_full), true) in fold_tree full [] reachable ([], init) [@@coq_axiom_with_reason "unsupported syntax"] (* ---- Unparsing (Typed IR -> Untyped expressions) --------------------------*) (* -- Unparsing data of any type -- *) let comb_witness2 : type t. t ty -> (t, unit -> unit -> unit) comb_witness = function | Pair_t (_, (Pair_t _, _, _), _) -> Comb_Pair (Comb_Pair Comb_Any) | Pair_t _ -> Comb_Pair Comb_Any | _ -> Comb_Any let rec unparse_data : type a. context -> stack_depth:int -> unparsing_mode -> a ty -> a -> (Script.node * context) tzresult Lwt.t = fun ctxt ~stack_depth mode ty a -> Gas.consume ctxt Unparse_costs.unparse_data_cycle >>?= fun ctxt -> let non_terminal_recursion ctxt mode ty a = if Compare.Int.(stack_depth > 10_000) then fail Unparsing_too_many_recursive_calls else unparse_data ctxt ~stack_depth:(stack_depth + 1) mode ty a in match (ty, a) with | (Unit_t _, v) -> Lwt.return @@ unparse_unit ctxt v | (Int_t _, v) -> Lwt.return @@ unparse_int ctxt v | (Nat_t _, v) -> Lwt.return @@ unparse_nat ctxt v | (String_t _, s) -> Lwt.return @@ unparse_string ctxt s | (Bytes_t _, s) -> Lwt.return @@ unparse_bytes ctxt s | (Bool_t _, b) -> Lwt.return @@ unparse_bool ctxt b | (Timestamp_t _, t) -> Lwt.return @@ unparse_timestamp ctxt mode t | (Address_t _, address) -> Lwt.return @@ unparse_address ctxt mode address | (Contract_t _, contract) -> Lwt.return @@ unparse_contract ctxt mode contract | (Signature_t _, s) -> Lwt.return @@ unparse_signature ctxt mode s | (Mutez_t _, v) -> Lwt.return @@ unparse_mutez ctxt v | (Key_t _, k) -> Lwt.return @@ unparse_key ctxt mode k | (Key_hash_t _, k) -> Lwt.return @@ unparse_key_hash ctxt mode k | (Operation_t _, operation) -> Lwt.return @@ unparse_operation ctxt operation | (Chain_id_t _, chain_id) -> Lwt.return @@ unparse_chain_id ctxt mode chain_id | (Bls12_381_g1_t _, x) -> Lwt.return @@ unparse_bls12_381_g1 ctxt x | (Bls12_381_g2_t _, x) -> Lwt.return @@ unparse_bls12_381_g2 ctxt x | (Bls12_381_fr_t _, x) -> Lwt.return @@ unparse_bls12_381_fr ctxt x | (Pair_t ((tl, _, _), (tr, _, _), _), pair) -> let r_witness = comb_witness2 tr in let unparse_l ctxt v = non_terminal_recursion ctxt mode tl v in let unparse_r ctxt v = non_terminal_recursion ctxt mode tr v in unparse_pair unparse_l unparse_r ctxt mode r_witness pair | (Union_t ((tl, _), (tr, _), _), v) -> let unparse_l ctxt v = non_terminal_recursion ctxt mode tl v in let unparse_r ctxt v = non_terminal_recursion ctxt mode tr v in unparse_union unparse_l unparse_r ctxt v | (Option_t (t, _), v) -> let unparse_v ctxt v = non_terminal_recursion ctxt mode t v in unparse_option unparse_v ctxt v | (List_t (t, _), items) -> fold_left_s (fun (l, ctxt) element -> non_terminal_recursion ctxt mode t element >|=? fun (unparsed, ctxt) -> (unparsed :: l, ctxt)) ([], ctxt) items.elements >|=? fun (items, ctxt) -> (Micheline.Seq (-1, List.rev items), ctxt) | (Ticket_t (t, _), {ticketer; contents; amount}) -> let t = ty_of_comparable_ty @@ opened_ticket_type t in (unparse_data [@tailcall]) ctxt ~stack_depth mode t (ticketer, (contents, amount)) | (Set_t (t, _), set) -> fold_left_s (fun (l, ctxt) item -> unparse_comparable_data ctxt mode t item >|=? fun (item, ctxt) -> (item :: l, ctxt)) ([], ctxt) (set_fold (fun e acc -> e :: acc) set []) >|=? fun (items, ctxt) -> (Micheline.Seq (-1, items), ctxt) | (Map_t (kt, vt, _), map) -> let items = map_fold (fun k v acc -> (k, v) :: acc) map [] in unparse_items ctxt ~stack_depth:(stack_depth + 1) mode kt vt items >|=? fun (items, ctxt) -> (Micheline.Seq (-1, items), ctxt) | (Big_map_t (_kt, _vt, _), {id = Some id; diff = {size}; _}) when Compare.Int.( = ) size 0 -> return (Micheline.Int (-1, Big_map.Id.unparse_to_z id), ctxt) | (Big_map_t (kt, vt, _), {id = Some id; diff = {map; _}; _}) -> let items = Big_map_overlay.fold (fun _ (k, v) acc -> (k, v) :: acc) map [] in let items = (* Sort the items in Michelson comparison order and not in key hash order. This code path is only exercized for tracing, so we don't bother carbonating this sort operation precisely. Also, the sort uses a reverse compare because [unparse_items] will reverse the result. *) List.sort (fun (a, _) (b, _) -> compare_comparable kt b a) items in let vt = Option_t (vt, None) in unparse_items ctxt ~stack_depth:(stack_depth + 1) mode kt vt items >|=? fun (items, ctxt) -> ( Micheline.Prim ( -1, D_Pair, [Int (-1, Big_map.Id.unparse_to_z id); Seq (-1, items)], [] ), ctxt ) | (Big_map_t (kt, vt, _), {id = None; diff = {map; _}; _}) -> let items = Big_map_overlay.fold (fun _ (k, v) acc -> match v with None -> acc | Some v -> (k, v) :: acc) map [] in let items = (* See note above. *) List.sort (fun (a, _) (b, _) -> compare_comparable kt b a) items in unparse_items ctxt ~stack_depth:(stack_depth + 1) mode kt vt items >|=? fun (items, ctxt) -> (Micheline.Seq (-1, items), ctxt) | (Lambda_t _, Lam (_, original_code)) -> unparse_code ctxt ~stack_depth:(stack_depth + 1) mode original_code | (Never_t _, _) -> . | (Sapling_transaction_t _, s) -> Lwt.return ( Gas.consume ctxt (Unparse_costs.sapling_transaction s) >|? fun ctxt -> let bytes = Data_encoding.Binary.to_bytes_exn Sapling.transaction_encoding s in (Bytes (-1, bytes), ctxt) ) | (Sapling_state_t _, {id; diff; _}) -> Lwt.return ( Gas.consume ctxt (Unparse_costs.sapling_diff diff) >|? fun ctxt -> ( ( match diff with | {commitments_and_ciphertexts = []; nullifiers = []} -> ( match id with | None -> Micheline.Seq (-1, []) | Some id -> let id = Sapling.Id.unparse_to_z id in Micheline.Int (-1, id) ) | diff -> ( let diff_bytes = Data_encoding.Binary.to_bytes_exn Sapling.diff_encoding diff in let unparsed_diff = Bytes (-1, diff_bytes) in match id with | None -> unparsed_diff | Some id -> let id = Sapling.Id.unparse_to_z id in Micheline.Prim (-1, D_Pair, [Int (-1, id); unparsed_diff], []) ) ), ctxt ) ) and unparse_items : type k v. context -> stack_depth:int -> unparsing_mode -> k comparable_ty -> v ty -> (k * v) list -> (Script.node list * context) tzresult Lwt.t = fun ctxt ~stack_depth mode kt vt items -> fold_left_s (fun (l, ctxt) (k, v) -> unparse_comparable_data ctxt mode kt k >>=? fun (key, ctxt) -> unparse_data ctxt ~stack_depth:(stack_depth + 1) mode vt v >|=? fun (value, ctxt) -> (Prim (-1, D_Elt, [key; value], []) :: l, ctxt)) ([], ctxt) items and unparse_code ctxt ~stack_depth mode code = let legacy = true in Gas.consume ctxt Unparse_costs.unparse_instr_cycle >>?= fun ctxt -> let non_terminal_recursion ctxt mode code = if Compare.Int.(stack_depth > 10_000) then fail Unparsing_too_many_recursive_calls else unparse_code ctxt ~stack_depth:(stack_depth + 1) mode code in match code with | Prim (loc, I_PUSH, [ty; data], annot) -> parse_packable_ty ctxt ~stack_depth:(stack_depth + 1) ~legacy ty >>?= fun (Ex_ty t, ctxt) -> let allow_forged = false (* Forgeable in PUSH data are already forbidden at parsing, the only case for which this matters is storing a lambda resulting from APPLYing a non-forgeable but this cannot happen either as long as all packable values are also forgeable. *) in parse_data ctxt ~stack_depth:(stack_depth + 1) ~legacy ~allow_forged t data >>=? fun (data, ctxt) -> unparse_data ctxt ~stack_depth:(stack_depth + 1) mode t data >>=? fun (data, ctxt) -> return (Prim (loc, I_PUSH, [ty; data], annot), ctxt) | Seq (loc, items) -> fold_left_s (fun (l, ctxt) item -> non_terminal_recursion ctxt mode item >|=? fun (item, ctxt) -> (item :: l, ctxt)) ([], ctxt) items >>=? fun (items, ctxt) -> return (Micheline.Seq (loc, List.rev items), ctxt) | Prim (loc, prim, items, annot) -> fold_left_s (fun (l, ctxt) item -> non_terminal_recursion ctxt mode item >|=? fun (item, ctxt) -> (item :: l, ctxt)) ([], ctxt) items >>=? fun (items, ctxt) -> return (Prim (loc, prim, List.rev items, annot), ctxt) | (Int _ | String _ | Bytes _) as atom -> return (atom, ctxt) (* Gas accounting may not be perfect in this function, as it is only called by RPCs. *) let unparse_script ctxt mode {code; arg_type; storage; storage_type; root_name} = let (Lam (_, original_code)) = code in unparse_code ctxt ~stack_depth:0 mode original_code >>=? fun (code, ctxt) -> unparse_data ctxt ~stack_depth:0 mode storage_type storage >>=? fun (storage, ctxt) -> Lwt.return ( unparse_ty ctxt arg_type >>? fun (arg_type, ctxt) -> unparse_ty ctxt storage_type >>? fun (storage_type, ctxt) -> let arg_type = add_field_annot root_name None arg_type in let open Micheline in let code = Seq ( -1, [ Prim (-1, K_parameter, [arg_type], []); Prim (-1, K_storage, [storage_type], []); Prim (-1, K_code, [code], []) ] ) in Gas.consume ctxt Unparse_costs.unparse_instr_cycle >>? fun ctxt -> Gas.consume ctxt Unparse_costs.unparse_instr_cycle >>? fun ctxt -> Gas.consume ctxt Unparse_costs.unparse_instr_cycle >>? fun ctxt -> Gas.consume ctxt Unparse_costs.unparse_instr_cycle >>? fun ctxt -> Gas.consume ctxt (Script.strip_locations_cost code) >>? fun ctxt -> Gas.consume ctxt (Script.strip_locations_cost storage) >|? fun ctxt -> ( { code = lazy_expr (strip_locations code); storage = lazy_expr (strip_locations storage); }, ctxt ) ) let pack_data ctxt typ data ~mode = unparse_data ~stack_depth:0 ctxt mode typ data >>=? fun (unparsed, ctxt) -> Lwt.return @@ pack_node unparsed ctxt let hash_data ctxt typ data = pack_data ctxt typ data ~mode:Optimized_legacy >>=? fun (bytes, ctxt) -> Lwt.return @@ hash_bytes ctxt bytes let pack_data ctxt typ data = pack_data ctxt typ data ~mode:Optimized_legacy (* ---------------- Big map -------------------------------------------------*) let empty_big_map key_type value_type = { id = None; diff = {map = Big_map_overlay.empty; size = 0}; key_type; value_type; } let big_map_mem ctxt key {id; diff; key_type; _} = hash_comparable_data ctxt key_type key >>=? fun (key, ctxt) -> match (Big_map_overlay.find_opt key diff.map, id) with | (None, None) -> return (false, ctxt) | (None, Some id) -> Alpha_context.Big_map.mem ctxt id key >|=? fun (ctxt, res) -> (res, ctxt) | (Some (_, None), _) -> return (false, ctxt) | (Some (_, Some _), _) -> return (true, ctxt) let big_map_get_by_hash ctxt key {id; diff; value_type} = match (Big_map_overlay.find_opt key diff.map, id) with | (Some (_, x), _) -> return (x, ctxt) | (None, None) -> return (None, ctxt) | (None, Some id) -> ( Alpha_context.Big_map.get_opt ctxt id key >>=? function | (ctxt, None) -> return (None, ctxt) | (ctxt, Some value) -> parse_data ~stack_depth:0 ctxt ~legacy:true ~allow_forged:true value_type (Micheline.root value) >|=? fun (x, ctxt) -> (Some x, ctxt) ) let big_map_get ctxt key map = hash_comparable_data ctxt map.key_type key >>=? fun (key_hash, ctxt) -> big_map_get_by_hash ctxt key_hash map let big_map_update_by_hash ctxt key_hash key value map = let contains = Big_map_overlay.mem key_hash map.diff.map in return ( { map with diff = { map = Big_map_overlay.add key_hash (key, value) map.diff.map; size = (if contains then map.diff.size else map.diff.size + 1); }; }, ctxt ) let big_map_update ctxt key value map = hash_comparable_data ctxt map.key_type key >>=? fun (key_hash, ctxt) -> big_map_update_by_hash ctxt key_hash key value map let big_map_get_and_update ctxt key value map = hash_comparable_data ctxt map.key_type key >>=? fun (key_hash, ctxt) -> big_map_update_by_hash ctxt key_hash key value map >>=? fun (map', ctxt) -> big_map_get_by_hash ctxt key_hash map >>=? fun (old_value, ctxt) -> return ((old_value, map'), ctxt) (* ---------------- Lazy storage---------------------------------------------*) type lazy_storage_ids = Lazy_storage.IdSet.t let no_lazy_storage_id = Lazy_storage.IdSet.empty let diff_of_big_map ctxt mode ~temporary ~ids_to_copy {id; key_type; value_type; diff} = ( match id with | Some id -> if Lazy_storage.IdSet.mem Big_map id ids_to_copy then Big_map.fresh ~temporary ctxt >|=? fun (ctxt, duplicate) -> (ctxt, Lazy_storage.Copy {src = id}, duplicate) else (* The first occurrence encountered of a big_map reuses the ID. This way, the payer is only charged for the diff. For this to work, this diff has to be put at the end of the global diff, otherwise the duplicates will use the updated version as a base. This is true because we add this diff first in the accumulator of `extract_lazy_storage_updates`, and this accumulator is not reversed. *) return (ctxt, Lazy_storage.Existing, id) | None -> Big_map.fresh ~temporary ctxt >>=? fun (ctxt, id) -> Lwt.return (let kt = unparse_comparable_ty key_type in Gas.consume ctxt (Script.strip_locations_cost kt) >>? fun ctxt -> unparse_ty ctxt value_type >>? fun (kv, ctxt) -> Gas.consume ctxt (Script.strip_locations_cost kv) >|? fun ctxt -> let key_type = Micheline.strip_locations kt in let value_type = Micheline.strip_locations kv in (ctxt, Lazy_storage.(Alloc Big_map.{key_type; value_type}), id)) ) >>=? fun (ctxt, init, id) -> let pairs = Big_map_overlay.fold (fun key_hash (key, value) acc -> (key_hash, key, value) :: acc) diff.map [] in fold_left_s (fun (acc, ctxt) (key_hash, key, value) -> Gas.consume ctxt Typecheck_costs.parse_instr_cycle >>?= fun ctxt -> unparse_comparable_data ctxt mode key_type key >>=? fun (key_node, ctxt) -> Gas.consume ctxt (Script.strip_locations_cost key_node) >>?= fun ctxt -> let key = Micheline.strip_locations key_node in ( match value with | None -> return (None, ctxt) | Some x -> unparse_data ~stack_depth:0 ctxt mode value_type x >>=? fun (node, ctxt) -> Lwt.return ( Gas.consume ctxt (Script.strip_locations_cost node) >|? fun ctxt -> (Some (Micheline.strip_locations node), ctxt) ) ) >|=? fun (value, ctxt) -> let diff_item = Big_map.{key; key_hash; value} in (diff_item :: acc, ctxt)) ([], ctxt) (List.rev pairs) >|=? fun (updates, ctxt) -> (Lazy_storage.Update {init; updates}, id, ctxt) let diff_of_sapling_state ctxt ~temporary ~ids_to_copy ({id; diff; memo_size} : Sapling.state) = ( match id with | Some id -> if Lazy_storage.IdSet.mem Sapling_state id ids_to_copy then Sapling.fresh ~temporary ctxt >|=? fun (ctxt, duplicate) -> (ctxt, Lazy_storage.Copy {src = id}, duplicate) else return (ctxt, Lazy_storage.Existing, id) | None -> Sapling.fresh ~temporary ctxt >|=? fun (ctxt, id) -> (ctxt, Lazy_storage.Alloc Sapling.{memo_size}, id) ) >|=? fun (ctxt, init, id) -> (Lazy_storage.Update {init; updates = diff}, id, ctxt) (** Witness flag for whether a type can be populated by a value containing a lazy storage. [False_f] must be used only when a value of the type cannot contain a lazy storage. This flag is built in [has_lazy_storage] and used only in [extract_lazy_storage_updates] and [collect_lazy_storage]. This flag is necessary to avoid these two functions to have a quadratic complexity in the size of the type. Add new lazy storage kinds here. Please keep the usage of this GADT local. *) type 'ty has_lazy_storage = | True_f : _ has_lazy_storage | False_f : _ has_lazy_storage | Pair_f : 'a has_lazy_storage * 'b has_lazy_storage -> ('a, 'b) pair has_lazy_storage | Union_f : 'a has_lazy_storage * 'b has_lazy_storage -> ('a, 'b) union has_lazy_storage | Option_f : 'a has_lazy_storage -> 'a option has_lazy_storage | List_f : 'a has_lazy_storage -> 'a boxed_list has_lazy_storage | Map_f : 'v has_lazy_storage -> (_, 'v) map has_lazy_storage (** This function is called only on storage and parameter types of contracts, once per typechecked contract. It has a complexity linear in the size of the types, which happen to be literally written types, so the gas for them has already been paid. *) let rec has_lazy_storage : type t. t ty -> t has_lazy_storage = let aux1 cons t = match has_lazy_storage t with False_f -> False_f | h -> cons h in let aux2 cons t1 t2 = match (has_lazy_storage t1, has_lazy_storage t2) with | (False_f, False_f) -> False_f | (h1, h2) -> cons h1 h2 in function | Big_map_t (_, _, _) -> True_f | Sapling_state_t _ -> True_f | Unit_t _ -> False_f | Int_t _ -> False_f | Nat_t _ -> False_f | Signature_t _ -> False_f | String_t _ -> False_f | Bytes_t _ -> False_f | Mutez_t _ -> False_f | Key_hash_t _ -> False_f | Key_t _ -> False_f | Timestamp_t _ -> False_f | Address_t _ -> False_f | Bool_t _ -> False_f | Lambda_t (_, _, _) -> False_f | Set_t (_, _) -> False_f | Contract_t (_, _) -> False_f | Operation_t _ -> False_f | Chain_id_t _ -> False_f | Never_t _ -> False_f | Bls12_381_g1_t _ -> False_f | Bls12_381_g2_t _ -> False_f | Bls12_381_fr_t _ -> False_f | Sapling_transaction_t _ -> False_f | Ticket_t _ -> False_f | Pair_t ((l, _, _), (r, _, _), _) -> aux2 (fun l r -> Pair_f (l, r)) l r | Union_t ((l, _), (r, _), _) -> aux2 (fun l r -> Union_f (l, r)) l r | Option_t (t, _) -> aux1 (fun h -> Option_f h) t | List_t (t, _) -> aux1 (fun h -> List_f h) t | Map_t (_, t, _) -> aux1 (fun h -> Map_f h) t (** Transforms a value potentially containing lazy storage in an intermediary state to a value containing lazy storage only represented by identifiers. Returns the updated value, the updated set of ids to copy, and the lazy storage diff to show on the receipt and apply on the storage. *) let extract_lazy_storage_updates ctxt mode ~temporary ids_to_copy acc ty x = let rec aux : type a. context -> unparsing_mode -> temporary:bool -> Lazy_storage.IdSet.t -> Lazy_storage.diffs -> a ty -> a -> has_lazy_storage:a has_lazy_storage -> (context * a * Lazy_storage.IdSet.t * Lazy_storage.diffs) tzresult Lwt.t = fun ctxt mode ~temporary ids_to_copy acc ty x ~has_lazy_storage -> Gas.consume ctxt Typecheck_costs.parse_instr_cycle >>?= fun ctxt -> match (has_lazy_storage, ty, x) with | (False_f, _, _) -> return (ctxt, x, ids_to_copy, acc) | (_, Big_map_t (_, _, _), map) -> diff_of_big_map ctxt mode ~temporary ~ids_to_copy map >|=? fun (diff, id, ctxt) -> let map = { map with diff = {map = Big_map_overlay.empty; size = 0}; id = Some id; } in let diff = Lazy_storage.make Big_map id diff in let ids_to_copy = Lazy_storage.IdSet.add Big_map id ids_to_copy in (ctxt, map, ids_to_copy, diff :: acc) | (_, Sapling_state_t _, sapling_state) -> diff_of_sapling_state ctxt ~temporary ~ids_to_copy sapling_state >|=? fun (diff, id, ctxt) -> let sapling_state = Sapling.empty_state ~id ~memo_size:sapling_state.memo_size () in let diff = Lazy_storage.make Sapling_state id diff in let ids_to_copy = Lazy_storage.IdSet.add Sapling_state id ids_to_copy in (ctxt, sapling_state, ids_to_copy, diff :: acc) | (Pair_f (hl, hr), Pair_t ((tyl, _, _), (tyr, _, _), _), (xl, xr)) -> aux ctxt mode ~temporary ids_to_copy acc tyl xl ~has_lazy_storage:hl >>=? fun (ctxt, xl, ids_to_copy, acc) -> aux ctxt mode ~temporary ids_to_copy acc tyr xr ~has_lazy_storage:hr >|=? fun (ctxt, xr, ids_to_copy, acc) -> (ctxt, (xl, xr), ids_to_copy, acc) | (Union_f (has_lazy_storage, _), Union_t ((ty, _), (_, _), _), L x) -> aux ctxt mode ~temporary ids_to_copy acc ty x ~has_lazy_storage >|=? fun (ctxt, x, ids_to_copy, acc) -> (ctxt, L x, ids_to_copy, acc) | (Union_f (_, has_lazy_storage), Union_t ((_, _), (ty, _), _), R x) -> aux ctxt mode ~temporary ids_to_copy acc ty x ~has_lazy_storage >|=? fun (ctxt, x, ids_to_copy, acc) -> (ctxt, R x, ids_to_copy, acc) | (Option_f has_lazy_storage, Option_t (ty, _), Some x) -> aux ctxt mode ~temporary ids_to_copy acc ty x ~has_lazy_storage >|=? fun (ctxt, x, ids_to_copy, acc) -> (ctxt, Some x, ids_to_copy, acc) | (List_f has_lazy_storage, List_t (ty, _), l) -> fold_left_s (fun (ctxt, l, ids_to_copy, acc) x -> aux ctxt mode ~temporary ids_to_copy acc ty x ~has_lazy_storage >|=? fun (ctxt, x, ids_to_copy, acc) -> (ctxt, list_cons x l, ids_to_copy, acc)) (ctxt, list_empty, ids_to_copy, acc) l.elements >|=? fun (ctxt, l, ids_to_copy, acc) -> let reversed = {length = l.length; elements = List.rev l.elements} in (ctxt, reversed, ids_to_copy, acc) | (Map_f has_lazy_storage, Map_t (_, ty, _), (module M)) -> fold_left_s (fun (ctxt, m, ids_to_copy, acc) (k, x) -> aux ctxt mode ~temporary ids_to_copy acc ty x ~has_lazy_storage >|=? fun (ctxt, x, ids_to_copy, acc) -> (ctxt, M.OPS.add k x m, ids_to_copy, acc)) (ctxt, M.OPS.empty, ids_to_copy, acc) (M.OPS.bindings (fst M.boxed)) >|=? fun (ctxt, m, ids_to_copy, acc) -> let module M = struct module OPS = M.OPS type key = M.key type value = M.value let key_ty = M.key_ty let boxed = (m, snd M.boxed) end in ( ctxt, (module M : Boxed_map with type key = M.key and type value = M.value), ids_to_copy, acc ) | (_, Option_t (_, _), None) -> return (ctxt, None, ids_to_copy, acc) | _ -> assert false (* TODO: fix injectivity of types *) in let has_lazy_storage = has_lazy_storage ty in aux ctxt mode ~temporary ids_to_copy acc ty x ~has_lazy_storage (** We namespace an error type for [fold_lazy_storage]. The error case is only available when the ['error] parameter is equal to unit. *) module Fold_lazy_storage = struct type ('acc, 'error) result = | Ok : 'acc -> ('acc, 'error) result | Error : ('acc, unit) result end (** Prematurely abort if [f] generates an error. Use this function without the [unit] type for [error] if you are in a case where errors are impossible. *) let rec fold_lazy_storage : type a error. f:('acc, error) Fold_lazy_storage.result Lazy_storage.IdSet.fold_f -> init:'acc -> context -> a ty -> a -> has_lazy_storage:a has_lazy_storage -> (('acc, error) Fold_lazy_storage.result * context) tzresult = fun ~f ~init ctxt ty x ~has_lazy_storage -> Gas.consume ctxt Typecheck_costs.parse_instr_cycle >>? fun ctxt -> match (has_lazy_storage, ty, x) with | (_, Big_map_t (_, _, _), {id = Some id}) -> Gas.consume ctxt Typecheck_costs.parse_instr_cycle >>? fun ctxt -> ok (f.f Big_map id (Fold_lazy_storage.Ok init), ctxt) | (_, Sapling_state_t _, {id = Some id}) -> Gas.consume ctxt Typecheck_costs.parse_instr_cycle >>? fun ctxt -> ok (f.f Sapling_state id (Fold_lazy_storage.Ok init), ctxt) | (False_f, _, _) -> ok (Fold_lazy_storage.Ok init, ctxt) | (_, Big_map_t (_, _, _), {id = None}) -> ok (Fold_lazy_storage.Ok init, ctxt) | (_, Sapling_state_t _, {id = None}) -> ok (Fold_lazy_storage.Ok init, ctxt) | (Pair_f (hl, hr), Pair_t ((tyl, _, _), (tyr, _, _), _), (xl, xr)) -> ( fold_lazy_storage ~f ~init ctxt tyl xl ~has_lazy_storage:hl >>? fun (init, ctxt) -> match init with | Fold_lazy_storage.Ok init -> fold_lazy_storage ~f ~init ctxt tyr xr ~has_lazy_storage:hr | Fold_lazy_storage.Error -> ok (init, ctxt) ) | (Union_f (has_lazy_storage, _), Union_t ((ty, _), (_, _), _), L x) -> fold_lazy_storage ~f ~init ctxt ty x ~has_lazy_storage | (Union_f (_, has_lazy_storage), Union_t ((_, _), (ty, _), _), R x) -> fold_lazy_storage ~f ~init ctxt ty x ~has_lazy_storage | (_, Option_t (_, _), None) -> ok (Fold_lazy_storage.Ok init, ctxt) | (Option_f has_lazy_storage, Option_t (ty, _), Some x) -> fold_lazy_storage ~f ~init ctxt ty x ~has_lazy_storage | (List_f has_lazy_storage, List_t (ty, _), l) -> List.fold_left (fun (acc : (('acc, error) Fold_lazy_storage.result * context) tzresult) x -> acc >>? fun (init, ctxt) -> match init with | Fold_lazy_storage.Ok init -> fold_lazy_storage ~f ~init ctxt ty x ~has_lazy_storage | Fold_lazy_storage.Error -> ok (init, ctxt)) (ok (Fold_lazy_storage.Ok init, ctxt)) l.elements | (Map_f has_lazy_storage, Map_t (_, ty, _), m) -> map_fold (fun _ v (acc : (('acc, error) Fold_lazy_storage.result * context) tzresult) -> acc >>? fun (init, ctxt) -> match init with | Fold_lazy_storage.Ok init -> fold_lazy_storage ~f ~init ctxt ty v ~has_lazy_storage | Fold_lazy_storage.Error -> ok (init, ctxt)) m (ok (Fold_lazy_storage.Ok init, ctxt)) | _ -> (* TODO: fix injectivity of types *) assert false let collect_lazy_storage ctxt ty x = let has_lazy_storage = has_lazy_storage ty in let f kind id (acc : (_, never) Fold_lazy_storage.result) = let acc = match acc with Fold_lazy_storage.Ok acc -> acc in Fold_lazy_storage.Ok (Lazy_storage.IdSet.add kind id acc) in fold_lazy_storage ~f:{f} ~init:no_lazy_storage_id ctxt ty x ~has_lazy_storage >>? fun (ids, ctxt) -> match ids with Fold_lazy_storage.Ok ids -> ok (ids, ctxt) let extract_lazy_storage_diff ctxt mode ~temporary ~to_duplicate ~to_update ty v = (* Basically [to_duplicate] are ids from the argument and [to_update] are ids from the storage before execution (i.e. it is safe to reuse them since they will be owned by the same contract). *) let to_duplicate = Lazy_storage.IdSet.diff to_duplicate to_update in extract_lazy_storage_updates ctxt mode ~temporary to_duplicate [] ty v >|=? fun (ctxt, v, alive, diffs) -> let diffs = if temporary then diffs else let dead = Lazy_storage.IdSet.diff to_update alive in Lazy_storage.IdSet.fold_all {f = (fun kind id acc -> Lazy_storage.make kind id Remove :: acc)} dead diffs in match diffs with | [] -> (v, None, ctxt) | diffs -> (v, Some diffs (* do not reverse *), ctxt) let list_of_big_map_ids ids = Lazy_storage.IdSet.fold Big_map (fun id acc -> id :: acc) ids [] let parse_data = parse_data ~stack_depth:0 let parse_instr : type a s. ?type_logger:type_logger -> tc_context -> context -> legacy:bool -> Script.node -> (a, s) stack_ty -> ((a, s) judgement * context) tzresult Lwt.t = fun ?type_logger tc_context ctxt ~legacy script_instr stack_ty -> parse_instr ~stack_depth:0 ?type_logger tc_context ctxt ~legacy script_instr stack_ty let unparse_data = unparse_data ~stack_depth:0 let unparse_code = unparse_code ~stack_depth:0 let parse_contract ~legacy context loc arg_ty contract ~entrypoint = parse_contract ~stack_depth:0 ~legacy context loc arg_ty contract ~entrypoint let parse_comparable_ty = parse_comparable_ty ~stack_depth:0 let parse_big_map_value_ty = parse_big_map_value_ty ~stack_depth:0 let parse_packable_ty = parse_packable_ty ~stack_depth:0 let parse_parameter_ty = parse_parameter_ty ~stack_depth:0 let parse_any_ty = parse_any_ty ~stack_depth:0 let parse_ty = parse_ty ~stack_depth:0 let get_single_sapling_state ctxt ty x = let has_lazy_storage = has_lazy_storage ty in let f (type i a u) (kind : (i, a, u) Lazy_storage.Kind.t) (id : i) single_id_opt : (Sapling.Id.t option, unit) Fold_lazy_storage.result = match kind with | Lazy_storage.Kind.Sapling_state -> ( match single_id_opt with | Fold_lazy_storage.Ok None -> Fold_lazy_storage.Ok (Some id) | Fold_lazy_storage.Ok (Some _) -> Fold_lazy_storage.Error (* more than one *) | Fold_lazy_storage.Error -> single_id_opt ) | _ -> single_id_opt in fold_lazy_storage ~f:{f} ~init:None ctxt ty x ~has_lazy_storage >>? fun (id, ctxt) -> match id with | Fold_lazy_storage.Ok (Some id) -> ok (Some id, ctxt) | Fold_lazy_storage.Ok None | Fold_lazy_storage.Error -> ok (None, ctxt)
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* Copyright (c) 2020 Metastate AG <hello@metastate.dev> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
script_ir_annot.mli
open Alpha_context open Script_typed_ir (** Default annotations *) val default_now_annot : var_annot option val default_amount_annot : var_annot option val default_balance_annot : var_annot option val default_level_annot : var_annot option val default_steps_annot : var_annot option val default_source_annot : var_annot option val default_sender_annot : var_annot option val default_self_annot : var_annot option val default_arg_annot : var_annot option val default_param_annot : var_annot option val default_storage_annot : var_annot option val default_sapling_state_annot : var_annot option val default_sapling_balance_annot : var_annot option val default_car_annot : field_annot option val default_cdr_annot : field_annot option val default_contract_annot : field_annot option val default_addr_annot : field_annot option val default_manager_annot : field_annot option val default_pack_annot : field_annot option val default_unpack_annot : field_annot option val default_slice_annot : field_annot option val default_elt_annot : field_annot option val default_key_annot : field_annot option val default_hd_annot : field_annot option val default_tl_annot : field_annot option val default_some_annot : field_annot option val default_left_annot : field_annot option val default_right_annot : field_annot option val default_binding_annot : field_annot option (** Unparse annotations to their string representation *) val unparse_type_annot : type_annot option -> string list val unparse_var_annot : var_annot option -> string list val unparse_field_annot : field_annot option -> string list (** Conversion functions between different annotation kinds *) val field_to_var_annot : field_annot option -> var_annot option val type_to_var_annot : type_annot option -> var_annot option val var_to_field_annot : var_annot option -> field_annot option (** Replace an annotation by its default value if it is [None] *) val default_annot : default:'a option -> 'a option -> 'a option (** Generate annotation for field accesses, of the form [var.field1.field2] *) val gen_access_annot : var_annot option -> ?default:field_annot option -> field_annot option -> var_annot option (** Merge type annotations. @return an error {!Inconsistent_type_annotations} if they are both present and different, unless [legacy] *) val merge_type_annot : legacy:bool -> type_annot option -> type_annot option -> type_annot option tzresult (** Merge field annotations. @return an error {!Inconsistent_type_annotations} if they are both present and different, unless [legacy] *) val merge_field_annot : legacy:bool -> field_annot option -> field_annot option -> field_annot option tzresult (** Merge variable annotations, does not fail ([None] if different). *) val merge_var_annot : var_annot option -> var_annot option -> var_annot option (** @return an error {!Unexpected_annotation} in the monad the list is not empty. *) val error_unexpected_annot : int -> 'a list -> unit tzresult (** Parse a type annotation only. *) val parse_type_annot : int -> string list -> type_annot option tzresult (** Parse a field annotation only. *) val parse_field_annot : int -> string list -> field_annot option tzresult (** Parse an annotation for composed types, of the form [:ty_name %field] in any order. *) val parse_type_field_annot : int -> string list -> (type_annot option * field_annot option) tzresult (** Parse an annotation for composed types, of the form [:ty_name %field1 %field2] in any order. *) val parse_composed_type_annot : int -> string list -> (type_annot option * field_annot option * field_annot option) tzresult (** Extract and remove a field annotation from a node *) val extract_field_annot : Script.node -> (Script.node * field_annot option) tzresult (** Check that field annotations match, used for field accesses. *) val check_correct_field : field_annot option -> field_annot option -> unit tzresult (** Instruction annotations parsing *) (** Parse a variable annotation, replaced by a default value if [None]. *) val parse_var_annot : int -> ?default:var_annot option -> string list -> var_annot option tzresult val parse_constr_annot : int -> ?if_special_first:field_annot option -> ?if_special_second:field_annot option -> string list -> ( var_annot option * type_annot option * field_annot option * field_annot option ) tzresult val parse_two_var_annot : int -> string list -> (var_annot option * var_annot option) tzresult val parse_destr_annot : int -> string list -> default_accessor:field_annot option -> field_name:field_annot option -> pair_annot:var_annot option -> value_annot:var_annot option -> (var_annot option * field_annot option) tzresult val parse_unpair_annot : int -> string list -> field_name_car:field_annot option -> field_name_cdr:field_annot option -> pair_annot:var_annot option -> value_annot_car:var_annot option -> value_annot_cdr:var_annot option -> ( var_annot option * var_annot option * field_annot option * field_annot option ) tzresult val parse_entrypoint_annot : int -> ?default:var_annot option -> string list -> (var_annot option * field_annot option) tzresult val parse_var_type_annot : int -> string list -> (var_annot option * type_annot option) tzresult
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
t-submod.c
#include <gmp.h> #include "flint.h" #include "ulong_extras.h" int main(void) { int i, result; FLINT_TEST_INIT(state); flint_printf("submod...."); fflush(stdout); for (i = 0; i < 100000 * flint_test_multiplier(); i++) { ulong a, b, d, r1, r2, s1; d = n_randtest_not_zero(state); a = n_randtest(state) % d; b = n_randtest(state) % d; r1 = n_submod(a, b, d); add_ssaaaa(s1, r2, UWORD(0), a, UWORD(0), d); sub_ddmmss(s1, r2, s1, r2, UWORD(0), b); if (s1 != 0 || r2 >= d) r2 -= d; result = (r1 == r2); if (!result) { flint_printf("FAIL:\n"); flint_printf("a = %wu, b = %wu, d = %wu\n", a, b, d); flint_printf("r1 = %wu, r2 = %wu\n", r1, r2); fflush(stdout); flint_abort(); } } FLINT_TEST_CLEANUP(state); flint_printf("PASS\n"); return 0; }
/* Copyright (C) 2009, 2016 William Hart This file is part of FLINT. FLINT is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. See <https://www.gnu.org/licenses/>. */
type_helpers.mli
(** Type conversion helpers *) open Protocol (** Exception raised in case an error occurs in this module. *) exception Type_helpers_error of string (** [michelson_type_list_to_ex_stack_ty] converts a list of types in Micheline form to a stack type in IR form. @raise Type_helpers_error if parsing the Michelson type fails. *) val michelson_type_list_to_ex_stack_ty : Alpha_context.Script.expr list -> Alpha_context.t -> Script_ir_translator.ex_stack_ty (** [michelson_type_to_ex_ty ty ctxt] parses the type [ty]. @raise Type_helpers_error if an error arises during parsing. *) val michelson_type_to_ex_ty : Alpha_context.Script.expr -> Alpha_context.t -> Script_ir_translator.ex_ty (** [stack_type_to_michelson_type_list] converts a Mikhailsky stack type to a stack represented as a list of Micheline expressions, each element denoting a type on the stack type. @raise Type_helpers_error if the stack type contains variables. *) val stack_type_to_michelson_type_list : Type.Stack.t -> Script_repr.expr list (** [base_type_to_ex_ty] converts a Mikhailsky type to a Michelson one. *) val base_type_to_ex_ty : Type.Base.t -> Alpha_context.t -> Script_ir_translator.ex_ty
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2021 Nomadic Labs, <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
dune
(library (name hardcaml_xilinx) (public_name hardcaml_xilinx) (libraries base hardcaml) (preprocess (pps ppx_jane ppx_deriving_hardcaml)))
nonce.ml
open Protocol module Table = Hashtbl.Make (struct type t = Nonce_hash.t let hash h = Int32.to_int (TzEndian.get_int32 (Nonce_hash.to_bytes h) 0) let equal = Nonce_hash.equal end) let known_nonces = Table.create 17 let generate () = match Alpha_context.Nonce.of_bytes @@ Tezos_crypto.Rand.generate Alpha_context.Constants.nonce_length with | Ok nonce -> let hash = Alpha_context.Nonce.hash nonce in Table.add known_nonces hash nonce ; (hash, nonce) | Error _ -> assert false let forget_all () = Table.clear known_nonces let get hash = Table.find known_nonces hash
(**************************************************************************) (* *) (* Copyright (c) 2014 - 2018. *) (* Dynamic Ledger Solutions, Inc.< contact@tezos.com > *) (* *) (* All rights reserved.No warranty, explicit or implicit, provided. *) (* *) (**************************************************************************)
deleteVpcEndpointConnectionNotifications.mli
open Types type input = DeleteVpcEndpointConnectionNotificationsRequest.t type output = DeleteVpcEndpointConnectionNotificationsResult.t type error = Errors_internal.t include Aws.Call with type input := input and type output := output and type error := error
ccl_clausepos.h
#ifndef CLAUSEPOS #define CLAUSEPOS #include <cte_termpos.h> #include <ccl_clauses.h> /*---------------------------------------------------------------------*/ /* Data type declarations */ /*---------------------------------------------------------------------*/ typedef struct clauseposcell { Clause_p clause; Eqn_p literal; EqnSide side; TermPos_p pos; void* data; }ClausePosCell, *ClausePos_p; typedef struct match_info_cell { int remaining_args; ClausePos_p pos; }MatchResCell, *MatchRes_p; /*---------------------------------------------------------------------*/ /* Exported Functions and Variables */ /*---------------------------------------------------------------------*/ typedef void (*Deleter)(void*); #define ClausePosCellAlloc() (ClausePosCell*)SizeMalloc(sizeof(ClausePosCell)) #define ClausePosCellFree(junk) SizeFree(junk, sizeof(ClausePosCell)) static inline void ClausePosCellFreeWDeleter(ClausePos_p junk, Deleter del); #define MatchResAlloc() (MatchRes_p) SizeMalloc(sizeof(MatchResCell)) #define MatchResFree(junk) SizeFree(junk, sizeof(MatchResCell)) #ifdef CONSTANT_MEM_ESTIMATE #define CLAUSEPOSCELL_MEM 20 #else #define CLAUSEPOSCELL_MEM MEMSIZE(ClausePosCell) #endif static inline ClausePos_p ClausePosAlloc(void); static inline void ClausePosFreeWDeleter(ClausePos_p junk, Deleter deleter); #define ClausePosFree(junk) ClausePosFreeWDeleter(junk, NULL) static inline Term_p ClausePosGetSide(ClausePos_p pos); static inline Term_p ClausePosGetOtherSide(ClausePos_p pos); static inline Term_p ClausePosGetSubterm(ClausePos_p pos); #define ClausePosIsTop(position) ((PStackEmpty((position)->pos))) void ClausePosPrint(FILE* out, ClausePos_p pos); Eqn_p ClausePosFindPosLiteral(ClausePos_p pos, bool maximal); Eqn_p ClausePosFindMaxLiteral(ClausePos_p pos, bool positive); Term_p ClausePosFindFirstMaximalSide(ClausePos_p pos, bool positive); Term_p ClausePosFindNextMaximalSide(ClausePos_p pos, bool positive); Term_p ClausePosFindFirstMaximalSubterm(ClausePos_p pos); Term_p ClausePosFindNextMaximalSubterm(ClausePos_p pos); bool TermComputeRWSequence(PStack_p stack, Term_p from, Term_p to, int inject_op); /*---------------------------------------------------------------------*/ /* Inline Functions */ /*---------------------------------------------------------------------*/ /*----------------------------------------------------------------------- // // Function: ClausePosAlloc() // // Allocate an empty, semi-initialized ClausePosCell. // // Global Variables: - // // Side Effects : Memory operations // /----------------------------------------------------------------------*/ static inline ClausePos_p ClausePosAlloc(void) { ClausePos_p handle = ClausePosCellAlloc(); handle->literal = NULL; handle->side = LeftSide; handle->pos = TermPosAlloc(); return handle; } /*----------------------------------------------------------------------- // // Function: ClausePosCellFreeWDeleter() // // Free a clause pos cell and use deleter on junk->data // // Global Variables: - // // Side Effects : Memory operations // /----------------------------------------------------------------------*/ static inline void ClausePosCellFreeWDeleter(ClausePos_p junk, Deleter deleter) { assert(junk); if(deleter) { deleter(junk); } ClausePosCellFree(junk); } /*----------------------------------------------------------------------- // // Function: ClausePosFree() // // Free a clausepos. // // Global Variables: - // // Side Effects : Memory operations // /----------------------------------------------------------------------*/ static inline void ClausePosFreeWDeleter(ClausePos_p junk, Deleter deleter) { assert(junk); TermPosFree(junk->pos); ClausePosCellFree(junk); if(deleter) { deleter(junk->data); } } /*----------------------------------------------------------------------- // // Function: ClausePosGetSide() // // Given a clause position, return the designated side of the // literal. // // Global Variables: - // // Side Effects : - // /----------------------------------------------------------------------*/ static inline Term_p ClausePosGetSide(ClausePos_p pos) { if(pos->side == LeftSide) { return pos->literal->lterm; } return pos->literal->rterm; } /*----------------------------------------------------------------------- // // Function: ClausePosGetOtherSide() // // Given a clause position, return the _not_ designated side of the // literal - don't ask, this has its use! // // Global Variables: - // // Side Effects : - // /----------------------------------------------------------------------*/ static inline Term_p ClausePosGetOtherSide(ClausePos_p pos) { if(pos->side == LeftSide) { return pos->literal->rterm; } return pos->literal->lterm; } /*----------------------------------------------------------------------- // // Function: ClausePosGetSubterm() // // Given a clause position, return the designated subterm of the // literal. // // Global Variables: - // // Side Effects : - // /----------------------------------------------------------------------*/ static inline Term_p ClausePosGetSubterm(ClausePos_p pos) { return TermPosGetSubterm(ClausePosGetSide(pos), pos->pos); } #endif /*---------------------------------------------------------------------*/ /* End of File */ /*---------------------------------------------------------------------*/
/*----------------------------------------------------------------------- File : ccl_clausepos.h Author: Stephan Schulz Contents Positions of subterms in clauses (and in equations). Copyright 1998, 1999 by the author. This code is released under the GNU General Public Licence and the GNU Lesser General Public License. See the file COPYING in the main E directory for details.. Run "eprover -h" for contact information. Changes <1> Wed May 20 03:34:54 MET DST 1998 New -----------------------------------------------------------------------*/
t-get_set_str_pretty.c
#include <stdio.h> #include <stdlib.h> #include "fmpz_mod_mpoly.h" int main(void) { slong i; FLINT_TEST_INIT(state); flint_printf("get_set_str_pretty...."); fflush(stdout); { slong len1; flint_bitcnt_t exp_bits1; fmpz_mod_mpoly_ctx_t ctx; fmpz_mod_mpoly_t f, f1; char * str; const char * vars[] = {"x","y","z","w","u","v"}; for (i = 0; i < flint_test_multiplier(); i++) { fmpz_mod_mpoly_ctx_init_rand_bits(ctx, state, 6, 200); fmpz_mod_mpoly_init(f, ctx); fmpz_mod_mpoly_init(f1, ctx); for (len1 = 3; len1 < 1000; len1 += len1/2) { exp_bits1 = 200; fmpz_mod_mpoly_randtest_bits(f, state, len1, exp_bits1, ctx); str = fmpz_mod_mpoly_get_str_pretty(f, vars, ctx); fmpz_mod_mpoly_set_str_pretty(f1, str, vars, ctx); flint_free(str); if (!fmpz_mod_mpoly_equal(f, f1, ctx)) { flint_printf("FAIL\n"); fflush(stdout); flint_abort(); } } fmpz_mod_mpoly_clear(f, ctx); fmpz_mod_mpoly_clear(f1, ctx); fmpz_mod_mpoly_ctx_clear(ctx); } } flint_printf("PASS\n"); FLINT_TEST_CLEANUP(state); return 0; }
/* Copyright (C) 2020 Daniel Schultz This file is part of FLINT. FLINT is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. See <https://www.gnu.org/licenses/>. */
delegate_consensus_key.ml
type error += | Invalid_consensus_key_update_noop of Cycle_repr.t | Invalid_consensus_key_update_active | Invalid_consensus_key_update_tz4 of Bls.Public_key.t let () = register_error_kind `Permanent ~id:"delegate.consensus_key.invalid_noop" ~title:"Invalid key for consensus key update" ~description:"Tried to update the consensus key with the active key" ~pp:(fun ppf cycle -> Format.fprintf ppf "Invalid key while updating a consensus key (already active since %a)." Cycle_repr.pp cycle) Data_encoding.(obj1 (req "cycle" Cycle_repr.encoding)) (function Invalid_consensus_key_update_noop c -> Some c | _ -> None) (fun c -> Invalid_consensus_key_update_noop c) ; register_error_kind `Permanent ~id:"delegate.consensus_key.active" ~title:"Active consensus key" ~description: "The delegate consensus key is already used by another delegate" ~pp:(fun ppf () -> Format.fprintf ppf "The delegate consensus key is already used by another delegate") Data_encoding.empty (function Invalid_consensus_key_update_active -> Some () | _ -> None) (fun () -> Invalid_consensus_key_update_active) ; register_error_kind `Permanent ~id:"delegate.consensus_key.tz4" ~title:"Consensus key cannot be a tz4" ~description:"Consensus key cannot be a tz4 (BLS public key)." ~pp:(fun ppf pk -> Format.fprintf ppf "The consensus key %a is forbidden as it is a BLS public key." Bls.Public_key_hash.pp (Bls.Public_key.hash pk)) Data_encoding.(obj1 (req "delegate_pk" Bls.Public_key.encoding)) (function Invalid_consensus_key_update_tz4 pk -> Some pk | _ -> None) (fun pk -> Invalid_consensus_key_update_tz4 pk) type pk = Raw_context.consensus_pk = { delegate : Signature.Public_key_hash.t; consensus_pk : Signature.Public_key.t; consensus_pkh : Signature.Public_key_hash.t; } type t = { delegate : Signature.Public_key_hash.t; consensus_pkh : Signature.Public_key_hash.t; } let pkh {delegate; consensus_pkh; consensus_pk = _} = {delegate; consensus_pkh} let zero = { consensus_pkh = Signature.Public_key_hash.zero; delegate = Signature.Public_key_hash.zero; } let pp ppf {delegate; consensus_pkh} = Format.fprintf ppf "@[<v 2>%a" Signature.Public_key_hash.pp delegate ; if not (Signature.Public_key_hash.equal delegate consensus_pkh) then Format.fprintf ppf "@,Active key: %a" Signature.Public_key_hash.pp consensus_pkh ; Format.fprintf ppf "@]" (* Invariant: No two delegates use the same active consensus key at a given time. To ensure that, {!Storage.Consensus_keys} contains keys that will be active at cycle `current + preserved_cycles + 1`. *) let check_unused ctxt pkh = let open Lwt_result_syntax in let*! is_active = Storage.Consensus_keys.mem ctxt pkh in fail_when is_active Invalid_consensus_key_update_active let check_not_tz4 : Signature.Public_key.t -> unit tzresult = function | Bls pk -> error (Invalid_consensus_key_update_tz4 pk) | Ed25519 _ | Secp256k1 _ | P256 _ -> Ok () let set_unused = Storage.Consensus_keys.remove let set_used = Storage.Consensus_keys.add let init ctxt delegate pk = let open Lwt_result_syntax in let*? () = check_not_tz4 pk in let pkh = Signature.Public_key.hash pk in let* () = check_unused ctxt pkh in let*! ctxt = set_used ctxt pkh in Storage.Contract.Consensus_key.init ctxt (Contract_repr.Implicit delegate) pk let active_pubkey ctxt delegate = let open Lwt_result_syntax in let* pk = Storage.Contract.Consensus_key.get ctxt (Contract_repr.Implicit delegate) in let pkh = Signature.Public_key.hash pk in return {consensus_pk = pk; consensus_pkh = pkh; delegate} let active_key ctxt delegate = let open Lwt_result_syntax in let* pk = active_pubkey ctxt delegate in return (pkh pk) let raw_pending_updates ctxt delegate = let open Lwt_result_syntax in let*! pendings = Storage.Contract.Pending_consensus_keys.bindings (ctxt, Contract_repr.Implicit delegate) in return pendings let pending_updates ctxt delegate = let open Lwt_result_syntax in let* updates = raw_pending_updates ctxt delegate in let updates = List.sort (fun (c1, _) (c2, _) -> Cycle_repr.compare c1 c2) updates in return (List.map (fun (c, pk) -> (c, Signature.Public_key.hash pk)) updates) let raw_active_pubkey_for_cycle ctxt delegate cycle = let open Lwt_result_syntax in let* pendings = raw_pending_updates ctxt delegate in let* active = active_pubkey ctxt delegate in let current_level = Raw_context.current_level ctxt in let active_for_cycle = List.fold_left (fun (c1, active) (c2, pk) -> if Cycle_repr.(c1 < c2 && c2 <= cycle) then (c2, pk) else (c1, active)) (current_level.cycle, active.consensus_pk) pendings in return active_for_cycle let active_pubkey_for_cycle ctxt delegate cycle = let open Lwt_result_syntax in let* _, consensus_pk = raw_active_pubkey_for_cycle ctxt delegate cycle in return { consensus_pk; consensus_pkh = Signature.Public_key.hash consensus_pk; delegate; } let register_update ctxt delegate pk = let open Lwt_result_syntax in let update_cycle = let current_level = Raw_context.current_level ctxt in let preserved_cycles = Constants_storage.preserved_cycles ctxt in Cycle_repr.add current_level.cycle (preserved_cycles + 1) in let* () = let* first_active_cycle, active_pubkey = raw_active_pubkey_for_cycle ctxt delegate update_cycle in fail_when Signature.Public_key.(pk = active_pubkey) (Invalid_consensus_key_update_noop first_active_cycle) in let*? () = check_not_tz4 pk in let pkh = Signature.Public_key.hash pk in let* () = check_unused ctxt pkh in let*! ctxt = set_used ctxt pkh in let* {consensus_pkh = old_pkh; _} = active_pubkey_for_cycle ctxt delegate update_cycle in let*! ctxt = set_unused ctxt old_pkh in let*! ctxt = Storage.Contract.Pending_consensus_keys.add (ctxt, Contract_repr.Implicit delegate) update_cycle pk in return ctxt let activate ctxt ~new_cycle = let open Lwt_result_syntax in Storage.Delegates.fold ctxt ~order:`Undefined ~init:(ok ctxt) ~f:(fun delegate ctxt -> let*? ctxt = ctxt in let delegate = Contract_repr.Implicit delegate in let* update = Storage.Contract.Pending_consensus_keys.find (ctxt, delegate) new_cycle in match update with | None -> return ctxt | Some pk -> let*! ctxt = Storage.Contract.Consensus_key.add ctxt delegate pk in let*! ctxt = Storage.Contract.Pending_consensus_keys.remove (ctxt, delegate) new_cycle in return ctxt)
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2022 G.B. Fefe, <gb.fefe@protonmail.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
tx_rollup_l2_context.ml
open Tx_rollup_l2_storage_sig open Tx_rollup_l2_context_sig let metadata_encoding = Data_encoding.( conv (fun {counter; public_key} -> (counter, public_key)) (fun (counter, public_key) -> {counter; public_key}) (obj2 (req "counter" int64) (req "public_key" Bls.Public_key.encoding))) (** {1 Type-Safe Storage Access and Gas Accounting} *) (** A value of type ['a key] identifies a value of type ['a] in an underlying, untyped storage. This GADT is used to enforce type-safety of the abstraction of the transactions rollup context. For this abstraction to work, it is necessary to ensure that the serialization of values ['a key] and ['b key] cannot collide. To that end, we use [Data_encoding] (see {!packed_key_encoding}). *) type _ key = | Address_metadata : address_index -> metadata key | Address_count : int32 key | Address_index : Tx_rollup_l2_address.t -> address_index key | Ticket_count : int32 key | Ticket_index : Alpha_context.Ticket_hash.t -> ticket_index key | Ticket_ledger : ticket_index * address_index -> Tx_rollup_l2_qty.t key (** A monomorphic version of {!Key}, used for serialization purposes. *) type packed_key = Key : 'a key -> packed_key (** The encoding used to serialize keys to be used with an untyped storage. *) let packed_key_encoding : packed_key Data_encoding.t = Data_encoding.( union ~tag_size:`Uint8 [ case (Tag 0) ~title:"Address_metadata" Tx_rollup_l2_address.Indexable.index_encoding (function Key (Address_metadata idx) -> Some idx | _ -> None) (fun idx -> Key (Address_metadata idx)); case (Tag 1) ~title:"Address_count" empty (function Key Address_count -> Some () | _ -> None) (fun () -> Key Address_count); case (Tag 2) ~title:"Address_index" Tx_rollup_l2_address.encoding (function Key (Address_index addr) -> Some addr | _ -> None) (fun addr -> Key (Address_index addr)); case (Tag 3) ~title:"Ticket_count" empty (function Key Ticket_count -> Some () | _ -> None) (fun () -> Key Ticket_count); case (Tag 4) ~title:"Ticket_index" Alpha_context.Ticket_hash.encoding (function Key (Ticket_index ticket) -> Some ticket | _ -> None) (fun ticket -> Key (Ticket_index ticket)); case (Tag 5) ~title:"Ticket_ledger" (tup2 Ticket_indexable.index_encoding Tx_rollup_l2_address.Indexable.index_encoding) (function | Key (Ticket_ledger (ticket, address)) -> Some (ticket, address) | _ -> None) (fun (ticket, address) -> Key (Ticket_ledger (ticket, address))); ]) (** [value_encoding key] returns the encoding to be used to serialize and deserialize values associated to a [key] from and to the underlying storage. *) let value_encoding : type a. a key -> a Data_encoding.t = let open Data_encoding in function | Address_metadata _ -> metadata_encoding | Address_count -> int32 | Address_index _ -> Tx_rollup_l2_address.Indexable.index_encoding | Ticket_count -> int32 | Ticket_index _ -> Ticket_indexable.index_encoding | Ticket_ledger _ -> Tx_rollup_l2_qty.encoding (** {1 Errors} *) type error += Key_cannot_be_serialized type error += Value_cannot_be_serialized type error += Value_cannot_be_deserialized let () = let open Data_encoding in (* Key cannot be serialized *) register_error_kind `Permanent ~id:"tx_rollup_key_cannot_be_serialized" ~title:"Key cannot be serialized" ~description:"Tried to serialize an invalid key." empty (function Key_cannot_be_serialized -> Some () | _ -> None) (fun () -> Key_cannot_be_serialized) ; (* Value cannot be serialized *) register_error_kind `Permanent ~id:"tx_rollup_value_cannot_be_serialized" ~title:"Value cannot be serialized" ~description:"Tried to serialize an invalid value." empty (function Value_cannot_be_serialized -> Some () | _ -> None) (fun () -> Value_cannot_be_serialized) ; (* Value cannot be deserialized *) register_error_kind `Permanent ~id:"tx_rollup_value_cannot_be_deserialized" ~title:"Value cannot be deserialized" ~description: "A value has been serialized in the Tx_rollup store, but cannot be \ deserialized." empty (function Value_cannot_be_deserialized -> Some () | _ -> None) (fun () -> Value_cannot_be_deserialized) (** {1 The Context Functor} *) module Make (S : STORAGE) : CONTEXT with type t = S.t and type 'a m = 'a S.m = struct type t = S.t type 'a m = 'a S.m module Syntax = struct include S.Syntax let ( let*? ) res f = match res with Result.Ok v -> f v | Result.Error error -> fail error let fail_unless cond error = let open S.Syntax in if cond then return () else fail error let fail_when cond error = let open S.Syntax in if cond then fail error else return () end let bls_verify : (Bls.Public_key.t * bytes) list -> signature -> bool m = fun accounts aggregated_signature -> let open Syntax in let msgs = List.map (fun (pk, msg) -> (pk, None, msg)) accounts in return (Bls.aggregate_check msgs aggregated_signature) let unwrap_or : type a. a option -> error -> a S.m = fun opt err -> match opt with Some x -> S.Syntax.return x | None -> S.Syntax.fail err let serialize_key : type a. a key -> bytes m = fun key -> unwrap_or (Data_encoding.Binary.to_bytes_opt packed_key_encoding (Key key)) Key_cannot_be_serialized let serialize_value : type a. a Data_encoding.t -> a -> bytes m = fun encoding value -> unwrap_or (Data_encoding.Binary.to_bytes_opt encoding value) Value_cannot_be_serialized let deserialize_value : type a. a Data_encoding.t -> bytes -> a m = fun encoding value -> unwrap_or (Data_encoding.Binary.of_bytes_opt encoding value) Value_cannot_be_deserialized (** [get ctxt key] is a type-safe [get] function. *) let get : type a. t -> a key -> a option m = fun ctxt key -> let open Syntax in let value_encoding = value_encoding key in let* key = serialize_key key in let* value = S.get ctxt key in match value with | Some value -> let* value = deserialize_value value_encoding value in return (Some value) | None -> return None (** [set ctxt key value] is a type-safe [set] function. *) let set : type a. t -> a key -> a -> t m = fun ctxt key value -> let open Syntax in let value_encoding = value_encoding key in let* key = serialize_key key in let* value = serialize_value value_encoding value in S.set ctxt key value let remove : type a. t -> a key -> t m = fun ctxt key -> let open Syntax in let* key = serialize_key key in S.remove ctxt key module Address_metadata = struct let get ctxt idx = get ctxt (Address_metadata idx) let incr_counter ctxt idx = let open Syntax in let* metadata = get ctxt idx in match metadata with | Some meta -> let new_counter = Int64.succ meta.counter in let* () = fail_unless Compare.Int64.(new_counter >= meta.counter) Counter_overflow in set ctxt (Address_metadata idx) {meta with counter = new_counter} | None -> fail (Unknown_address_index idx) let init_with_public_key ctxt idx public_key = let open Syntax in let* metadata = get ctxt idx in match metadata with | None -> set ctxt (Address_metadata idx) {counter = 0L; public_key} | Some _ -> fail (Metadata_already_initialized idx) module Internal_for_tests = struct let set ctxt idx metadata = set ctxt (Address_metadata idx) metadata end end module Address_index = struct let count ctxt = let open Syntax in let+ count = get ctxt Address_count in Option.value ~default:0l count let init_counter ctxt = set ctxt Address_count 0l let associate_index ctxt addr = let open Syntax in let* i = count ctxt in let new_count = Int32.succ i in let* () = fail_unless Compare.Int32.(new_count >= i) Too_many_l2_addresses in (* This can not fail as by construction [count ctxt] is always positive. *) let idx = Indexable.index_exn i in let* ctxt = set ctxt (Address_index addr) idx in let+ ctxt = set ctxt Address_count new_count in (ctxt, idx) let get ctxt addr = get ctxt (Address_index addr) let get_or_associate_index ctxt addr = let open Syntax in let* index_opt = get ctxt addr in match index_opt with | Some idx -> return (ctxt, `Existed, idx) | None -> let+ ctxt, idx = associate_index ctxt addr in (ctxt, `Created, idx) module Internal_for_tests = struct let set_count ctxt count = set ctxt Address_count count end end module Ticket_index = struct let count ctxt = let open Syntax in let+ count = get ctxt Ticket_count in Option.value ~default:0l count let init_counter ctxt = set ctxt Ticket_count 0l let associate_index ctxt ticket = let open Syntax in let* i = count ctxt in let new_count = Int32.succ i in let* () = fail_unless Compare.Int32.(new_count >= i) Too_many_l2_tickets in (* This can not fail as by construction [count ctxt] is always positive. *) let idx = Indexable.index_exn i in let* ctxt = set ctxt (Ticket_index ticket) idx in let+ ctxt = set ctxt Ticket_count new_count in (ctxt, idx) let get ctxt ticket = get ctxt (Ticket_index ticket) let get_or_associate_index ctxt ticket = let open Syntax in let* index_opt = get ctxt ticket in match index_opt with | Some idx -> return (ctxt, `Existed, idx) | None -> let+ ctxt, idx = associate_index ctxt ticket in (ctxt, `Created, idx) module Internal_for_tests = struct let set_count ctxt count = set ctxt Ticket_count count end end module Ticket_ledger = struct let get_opt ctxt tidx aidx = get ctxt (Ticket_ledger (tidx, aidx)) let get ctxt tidx aidx = let open Syntax in let+ res = get_opt ctxt tidx aidx in Option.value ~default:Tx_rollup_l2_qty.zero res let set ctxt tidx aidx = set ctxt (Ticket_ledger (tidx, aidx)) let remove ctxt tidx aidx = remove ctxt (Ticket_ledger (tidx, aidx)) let spend ctxt tidx aidx qty = let open Syntax in let* src_balance = get ctxt tidx aidx in match Tx_rollup_l2_qty.sub src_balance qty with | None -> fail Balance_too_low | Some remainder when Tx_rollup_l2_qty.(remainder > zero) -> set ctxt tidx aidx remainder | Some _ -> remove ctxt tidx aidx let credit ctxt tidx aidx qty = let open Syntax in let* balance = get ctxt tidx aidx in match Tx_rollup_l2_qty.add balance qty with | None -> fail Balance_overflow | Some new_balance -> set ctxt tidx aidx new_balance module Internal_for_tests = struct let get_opt = get_opt end end end
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2022 Marigold <contact@marigold.dev> *) (* Copyright (c) 2022 Nomadic Labs <contact@nomadic-labs.com> *) (* Copyright (c) 2022 Oxhead Alpha <info@oxheadalpha.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
dune
mockup_simulator.ml
type block = { rpc_context : Tezos_protocol_environment.rpc_context; protocol_data : Protocol.Alpha_context.Block_header.protocol_data; raw_protocol_data : Bytes.t; operations : Mockup.M.Block_services.operation list list; resulting_context_hash : Context_hash.t; } type chain = block list (** As new blocks and operations are received they are pushed to an Lwt_pipe wrapped into this type. *) type broadcast = | Broadcast_block of Block_hash.t * Block_header.t * Operation.t list list | Broadcast_op of Operation_hash.t * Alpha_context.packed_operation (** The state of a mockup node. *) type state = { instance_index : int; (** Index of this node. Indices go from 0 to N-1 where N is the total number of bakers in the simulation. *) live_depth : int; (** How many blocks (counting from the head into the past) are considered live? *) mutable chain : chain; (** The chain as seen by this fake "node". *) mutable mempool : (Operation_hash.t * Mockup.M.Protocol.operation) list; (** Mempool of this fake "node". *) chain_table : chain Block_hash.Table.t; (** The chain table of this fake "node". It maps from block hashes to blocks. *) global_chain_table : block Block_hash.Table.t; (** The global chain table that allows us to look up blocks that may be missing in [chain_table], i.e. not known to this particular node. This is used to find unknown predecessors. The real node can ask about an unknown block and receive it on request, this is supposed to emulate that functionality. *) ctxt_table : Tezos_protocol_environment.rpc_context Context_hash.Table.t; (** The context table allows us to look up rpc_context by its hash. *) validated_blocks_pipe : (Block_hash.t * Block_header.t * Operation.t list list) Lwt_pipe.Unbounded.t; (** [validated_blocks_pipe] is used to implement the [monitor_validated_blocks] RPC. *) heads_pipe : (Block_hash.t * Block_header.t) Lwt_pipe.Unbounded.t; (** [heads_pipe] is used to implement the [monitor_heads] RPC. *) mutable operations_stream : (Operation_hash.t * Mockup.M.Protocol.operation) list Lwt_stream.t; mutable operations_stream_push : (Operation_hash.t * Mockup.M.Protocol.operation) list option -> unit; (** [operations_pipe] is used to implement the [operations_pipe] RPC. *) mutable streaming_operations : bool; (** A helper flag used to implement the monitor operations RPC. *) broadcast_pipes : broadcast Lwt_pipe.Unbounded.t list; (** Broadcast pipes per node. *) genesis_block_true_hash : Block_hash.t; (** True hash of the genesis block as calculated by the [Block_header.hash] function. *) } let accounts = Mockup.Protocol_parameters.default_value.bootstrap_accounts let chain_id = Chain_id.of_string_exn "main" let genesis_block_hash = Block_hash.of_b58check_exn "BLockGenesisGenesisGenesisGenesisGenesisCCCCCeZiLHU" let genesis_predecessor_block_hash = Block_hash.zero type propagation = Block | Pass | Delay of float type propagation_vector = propagation list module type Hooks = sig val on_inject_block : level:int32 -> round:int32 -> block_hash:Block_hash.t -> block_header:Block_header.t -> operations:Operation.t list list -> protocol_data:Alpha_context.Block_header.protocol_data -> (Block_hash.t * Block_header.t * Operation.t list list * propagation_vector) tzresult Lwt.t val on_inject_operation : op_hash:Operation_hash.t -> op:Alpha_context.packed_operation -> (Operation_hash.t * Alpha_context.packed_operation * propagation_vector) tzresult Lwt.t val on_new_validated_block : block_hash:Block_hash.t -> block_header:Block_header.t -> operations:Operation.t list list -> (Block_hash.t * Block_header.t * Operation.t list list) option Lwt.t val on_new_head : block_hash:Block_hash.t -> block_header:Block_header.t -> (Block_hash.t * Block_header.t) option Lwt.t val on_new_operation : Operation_hash.t * Alpha_context.packed_operation -> (Operation_hash.t * Alpha_context.packed_operation) option Lwt.t val check_block_before_processing : level:int32 -> round:int32 -> block_hash:Block_hash.t -> block_header:Block_header.t -> protocol_data:Alpha_context.Block_header.protocol_data -> unit tzresult Lwt.t val check_chain_after_processing : level:int32 -> round:int32 -> chain:chain -> unit tzresult Lwt.t val check_mempool_after_processing : mempool:(Operation_hash.t * Mockup.M.Protocol.operation) list -> unit tzresult Lwt.t val stop_on_event : Baking_state.event -> bool val on_start_baker : baker_position:int -> delegates:Baking_state.consensus_key list -> cctxt:Protocol_client_context.full -> unit Lwt.t val check_chain_on_success : chain:chain -> unit tzresult Lwt.t end (** Return a series of blocks starting from the block with the given identifier. *) let locate_blocks (state : state) (block : Tezos_shell_services.Block_services.block) : block list tzresult Lwt.t = match block with | `Hash (hash, rel) -> ( match Block_hash.Table.find state.chain_table hash with | None -> failwith "locate_blocks: can't find the block %a" Block_hash.pp hash | Some chain0 -> let _, chain = List.split_n rel chain0 in return chain) | `Head rel -> let _, chain = List.split_n rel state.chain in return chain | `Level _ -> failwith "locate_blocks: `Level block spec not handled" | `Genesis -> failwith "locate_blocks: `Genesis block spec net handled" | `Alias _ -> failwith "locate_blocks: `Alias block spec not handled" (** Similar to [locate_blocks], but only returns the first block. *) let locate_block (state : state) (block : Tezos_shell_services.Block_services.block) : block tzresult Lwt.t = locate_blocks state block >>=? function | [] -> failwith "locate_block: can't find the block" | x :: _ -> return x (** Return the collection of live blocks for a given block identifier. *) let live_blocks (state : state) block = locate_blocks state block >>=? fun chain -> let segment, _ = List.split_n state.live_depth chain in return (List.fold_left (fun set ({rpc_context; _} : block) -> let hash = rpc_context.Tezos_protocol_environment.block_hash in Block_hash.Set.add hash set) (Block_hash.Set.singleton state.genesis_block_true_hash) segment) (** Extract the round number from raw fitness. *) let round_from_raw_fitness raw_fitness = match Protocol.Alpha_context.Fitness.from_raw raw_fitness with | Ok fitness -> return (Alpha_context.Round.to_int32 (Protocol.Alpha_context.Fitness.round fitness)) | Error _ -> failwith "round_from_raw_fitness: cannot parse fitness" (** Extract level from a block header. *) let get_block_level (block_header : Block_header.t) = return block_header.shell.level (** Extract round from a block header. *) let get_block_round (block_header : Block_header.t) = round_from_raw_fitness block_header.shell.fitness (** Parse protocol data. *) let parse_protocol_data (protocol_data : Bytes.t) = match Data_encoding.Binary.of_bytes_opt Protocol.Alpha_context.Block_header.protocol_data_encoding protocol_data with | None -> failwith "can't parse protocol data of a block" | Some parsed_protocol_data -> return parsed_protocol_data (** Broadcast an operation or block according to the given propagation vector. *) let handle_propagation msg propagation_vector broadcast_pipes = List.iter_s (fun (propagation, pipe) -> match propagation with | Block -> Lwt.return () | Pass -> Lwt_pipe.Unbounded.push pipe msg ; Lwt.return_unit | Delay s -> Lwt.dont_wait (fun () -> Lwt_unix.sleep s >>= fun () -> Lwt_pipe.Unbounded.push pipe msg ; Lwt.return_unit) (fun _exn -> ()) ; Lwt.return ()) (List.combine_drop propagation_vector broadcast_pipes) >>= fun () -> return () (** Use the [user_hooks] to produce a module of functions that will perform the heavy lifting for the RPC implementations. *) let make_mocked_services_hooks (state : state) (user_hooks : (module Hooks)) : Faked_services.hooks = let module User_hooks = (val user_hooks : Hooks) in let module Impl : Faked_services.Mocked_services_hooks = struct type mempool = Mockup.M.Block_services.Mempool.t let monitor_validated_blocks () = let next () = let rec pop_until_ok () = Lwt_pipe.Unbounded.pop state.validated_blocks_pipe >>= fun (block_hash, block_header, operations) -> User_hooks.on_new_validated_block ~block_hash ~block_header ~operations >>= function | None -> pop_until_ok () | Some (hash, head, operations) -> Lwt.return_some (chain_id, hash, head, operations) in pop_until_ok () in let shutdown () = () in Tezos_rpc.Answer.{next; shutdown} let monitor_heads () = let next () = let rec pop_until_ok () = Lwt_pipe.Unbounded.pop state.heads_pipe >>= fun (block_hash, block_header) -> (* Sleep a 0.1s to simulate a block application delay *) Lwt_unix.sleep 0.1 >>= fun () -> User_hooks.on_new_head ~block_hash ~block_header >>= function | None -> pop_until_ok () | Some head -> Lwt.return_some head in pop_until_ok () in let shutdown () = () in Tezos_rpc.Answer.{next; shutdown} let monitor_bootstrapped () = let first_run = ref true in let next () = if !first_run then ( first_run := false ; let b = match state.chain with [] -> assert false | b :: _ -> b in let head_hash = b.rpc_context.block_hash in let timestamp = b.rpc_context.block_header.timestamp in Lwt.return_some (head_hash, timestamp)) else Lwt.return_none in let shutdown () = () in Tezos_rpc.Answer.{next; shutdown} let protocols (block : Tezos_shell_services.Block_services.block) = locate_block state block >>=? fun x -> let hash = x.rpc_context.block_hash in let is_predecessor_of_genesis = match block with | `Hash (requested_hash, rel) -> Int.equal rel 0 && Block_hash.equal requested_hash genesis_predecessor_block_hash | _ -> false in (* It is important to tell the baker that the genesis block is not in the alpha protocol (we use Protocol_hash.zero). This will make the baker not try to propose alternatives to that block and just accept it as final in that Protocol_hash.zero protocol. The same for predecessor of genesis, it should be in Protocol_hash.zero. *) return Tezos_shell_services.Block_services. { current_protocol = (if Block_hash.equal hash genesis_block_hash || is_predecessor_of_genesis then Protocol_hash.zero else Protocol.hash); next_protocol = (if is_predecessor_of_genesis then Protocol_hash.zero else Protocol.hash); } let may_lie_on_proto_level block x = (* As for ../protocols, the baker distinguishes activation blocks from "normal" blocks by comparing the [proto_level] of the shell header and its predecessor. If the predecessor's one is different, it must mean that we are considering an activation block and must not endorse. Here, we do a bit of hacking in order to return a different proto_level for the predecessor of the genesis block which is considered as the current protocol activation block. To perfectly mimic what is supposed to happen, the first mocked up block created should be made in the genesis protocol, however, it is not what's done in the mockup mode. *) let is_predecessor_of_genesis = match block with | `Hash (requested_hash, rel) -> Int.equal rel 0 && Block_hash.equal requested_hash genesis_predecessor_block_hash | _ -> false in if is_predecessor_of_genesis then { x.rpc_context.block_header with proto_level = pred x.rpc_context.block_header.proto_level; } else x.rpc_context.block_header let raw_header (block : Tezos_shell_services.Block_services.block) : bytes tzresult Lwt.t = locate_block state block >>=? fun x -> let shell = may_lie_on_proto_level block x in let protocol_data = Data_encoding.Binary.to_bytes_exn Protocol.block_header_data_encoding x.protocol_data in return (Data_encoding.Binary.to_bytes_exn Tezos_base.Block_header.encoding {shell; protocol_data}) let header (block : Tezos_shell_services.Block_services.block) : Mockup.M.Block_services.block_header tzresult Lwt.t = locate_block state block >>=? fun x -> let shell = may_lie_on_proto_level block x in return { Mockup.M.Block_services.hash = x.rpc_context.block_hash; chain_id; shell; protocol_data = x.protocol_data; } let resulting_context_hash (block : Tezos_shell_services.Block_services.block) : Context_hash.t tzresult Lwt.t = locate_block state block >>=? fun x -> return x.resulting_context_hash let operations block = locate_block state block >>=? fun x -> return x.operations let inject_block block_hash (block_header : Block_header.t) operations = parse_protocol_data block_header.protocol_data >>=? fun protocol_data -> get_block_level block_header >>=? fun level -> get_block_round block_header >>=? fun round -> User_hooks.on_inject_block ~level ~round ~block_hash ~block_header ~operations ~protocol_data >>=? fun (block_hash1, block_header1, operations1, propagation_vector) -> handle_propagation (Broadcast_block (block_hash1, block_header1, operations1)) propagation_vector state.broadcast_pipes let all_pipes_or_select = function | None -> return state.broadcast_pipes | Some l -> List.map_es (fun n -> match List.nth_opt state.broadcast_pipes n with | None -> failwith "Node number %d is out of range (max is %d)" n (List.length state.broadcast_pipes - 1) | Some pipe -> return pipe) l let broadcast_block ?dests block_hash (block_header : Block_header.t) operations = all_pipes_or_select dests >>=? fun pipes -> List.iter_s (fun pipe -> Lwt_pipe.Unbounded.push pipe (Broadcast_block (block_hash, block_header, operations)) ; Lwt.return ()) pipes >>= return let inject_operation (Operation.{shell; proto} as op) = let op_hash = Operation.hash op in let proto_op_opt = Data_encoding.Binary.of_bytes Protocol.operation_data_encoding proto in match proto_op_opt with | Error _ -> failwith "inject_operation: cannot parse operation" | Ok protocol_data -> let op : Protocol.Alpha_context.packed_operation = {shell; protocol_data} in User_hooks.on_inject_operation ~op_hash ~op >>=? fun (op_hash1, op1, propagation_vector) -> handle_propagation (Broadcast_op (op_hash1, op1)) propagation_vector state.broadcast_pipes >>=? fun () -> return op_hash1 let broadcast_operation ?dests (op : Protocol.Alpha_context.packed_operation) = all_pipes_or_select dests >>=? fun pipes -> let op_hash = Alpha_context.Operation.hash_packed op in List.iter_s (fun pipe -> Lwt_pipe.Unbounded.push pipe (Broadcast_op (op_hash, op)) ; Lwt.return ()) pipes >>= return let pending_operations () = let ops = state.mempool in Lwt.return Mockup.M.Block_services.Mempool. { applied = ops; refused = Operation_hash.Map.empty; outdated = Operation_hash.Map.empty; branch_refused = Operation_hash.Map.empty; branch_delayed = Operation_hash.Map.empty; unprocessed = Operation_hash.Map.empty; } let monitor_operations ~applied ~branch_delayed ~branch_refused ~refused = ignore applied ; ignore branch_delayed ; ignore branch_refused ; ignore refused ; let streamed = ref false in state.streaming_operations <- true ; let next () = let rec loop () = Lwt_stream.get state.operations_stream >>= function | None when !streamed -> Lwt.return None | None -> streamed := true ; Lwt.return (Some []) | Some ops -> ( List.filter_map_s User_hooks.on_new_operation ops >>= function | [] -> loop () | l -> Lwt.return_some (List.map (fun x -> (x, None)) l)) in loop () in let shutdown () = () in Tezos_rpc.Answer.{next; shutdown} let rpc_context_callback block = locate_block state block >>=? fun x -> return x.rpc_context let list_blocks ~heads ~length ~min_date:_ = let compare_block_fitnesses block0 block1 = Fitness.compare block0.rpc_context.block_header.fitness block1.rpc_context.block_header.fitness in let hash_of_block block = block.rpc_context.block_hash in let lookup_head head = locate_blocks state (`Hash (head, 0)) >>=? fun xs -> let segment = match length with None -> xs | Some n -> List.take_n n xs in return (List.map hash_of_block (List.sort compare_block_fitnesses segment)) in List.map_es lookup_head heads let live_blocks block = live_blocks state block let raw_protocol_data block = locate_block state block >>=? fun x -> return x.raw_protocol_data end in (module Impl) (** Return the current head. *) let head {chain; _} = match List.hd chain with | None -> failwith "mockup_simulator.ml: empty chain" | Some hd -> return hd (** Clear from the mempool operations whose branch does not point to a live block with respect to the current head. *) let clear_mempool state = head state >>=? fun head -> let included_ops_hashes = List.map (fun (op : Mockup.M.Block_services.operation) -> op.hash) (List.flatten head.operations) in live_blocks state (`Head 0) >>=? fun live_set -> let mempool = List.filter (fun (_oph, (op : Mockup.M.Protocol.operation)) -> let included_in_head = List.mem ~equal:Operation_hash.equal (Alpha_context.Operation.hash_packed op) included_ops_hashes in Block_hash.Set.mem op.shell.branch live_set && not included_in_head) state.mempool in state.mempool <- mempool ; return_unit let begin_validation_and_application ctxt chain_id mode ~predecessor ~cache = let open Lwt_result_syntax in let* validation_state = Mockup.M.Protocol.begin_validation ctxt chain_id mode ~predecessor ~cache in let* application_state = Mockup.M.Protocol.begin_application ctxt chain_id mode ~predecessor ~cache in return (validation_state, application_state) let validate_and_apply_operation (validation_state, application_state) oph op = let open Lwt_result_syntax in let* validation_state = Mockup.M.Protocol.validate_operation validation_state oph op in let* application_state, receipt = Mockup.M.Protocol.apply_operation application_state oph op in return ((validation_state, application_state), receipt) let finalize_validation_and_application (validation_state, application_state) shell_header = let open Lwt_result_syntax in let* () = Mockup.M.Protocol.finalize_validation validation_state in Mockup.M.Protocol.finalize_application application_state shell_header (** Apply a block to the given [rpc_context]. *) let reconstruct_context (rpc_context : Tezos_protocol_environment.rpc_context) (operations : Operation.t list list) (block_header : Block_header.t) = let predecessor = rpc_context.block_header in let predecessor_context = rpc_context.context in parse_protocol_data block_header.protocol_data >>=? fun protocol_data -> begin_validation_and_application predecessor_context chain_id (Application {shell = block_header.shell; protocol_data}) ~predecessor ~cache:`Lazy >>=? fun state -> let i = ref 0 in List.fold_left_es (List.fold_left_es (fun (state, results) op -> incr i ; let oph = Operation.hash op in let operation_data = Data_encoding.Binary.of_bytes_exn Mockup.M.Protocol.operation_data_encoding op.Operation.proto in let op = {Mockup.M.Protocol.shell = op.shell; protocol_data = operation_data} in validate_and_apply_operation state oph op >>=? fun (state, receipt) -> return (state, receipt :: results))) (state, []) operations >>=? fun (state, _) -> finalize_validation_and_application state None (** Process an incoming block. If validation succeeds: - update the current head to this new block - cleanup outdated operations - cleanup listener table Note that this implementation does not handle concurrent branches. *) let rec process_block state block_hash (block_header : Block_header.t) operations = let get_predecessor () = let predecessor_hash = block_header.Block_header.shell.predecessor in head state >>=? fun head -> match Block_hash.Table.find state.chain_table predecessor_hash with | None | Some [] -> ( (* Even if the predecessor is not known locally, it might be known by some node in the network. The code below "requests" information about the block by its hash. *) match Block_hash.Table.find state.global_chain_table predecessor_hash with | None -> failwith "get_predecessor: unknown predecessor block" | Some predecessor -> let predecessor_block_header = Block_header. { shell = predecessor.rpc_context.block_header; protocol_data = predecessor.raw_protocol_data; } in let predecessor_ops = List.map (fun xs -> List.map (fun (op : Mockup.M.Block_services.operation) -> Operation. { shell = op.shell; proto = Data_encoding.Binary.to_bytes_exn Protocol.operation_data_encoding op.protocol_data; }) xs) predecessor.operations in (* If the block is found, apply it before proceeding. *) process_block state predecessor.rpc_context.block_hash predecessor_block_header predecessor_ops >>=? fun () -> return predecessor) | Some (predecessor :: _) -> if Int32.sub head.rpc_context.block_header.level predecessor.rpc_context.block_header.level <= 2l then return predecessor else failwith "get_predecessor: the predecessor block is too old" in match Block_hash.Table.find state.chain_table block_hash with | Some _ -> (* The block is already known. *) return_unit | None -> get_predecessor () >>=? fun predecessor -> head state >>=? fun head -> reconstruct_context predecessor.rpc_context operations block_header >>=? fun ({context; message; _}, _) -> let resulting_context_hash = Tezos_context_ops.Context_ops.hash ~time:block_header.shell.timestamp ?message context in let rpc_context = Tezos_protocol_environment. {context; block_hash; block_header = block_header.shell} in let operations = List.map (fun pass -> List.map (fun (Operation.{shell; proto} as op) -> let hash : Operation_hash.t = Operation.hash op in let protocol_data : Alpha_context.packed_protocol_data = Data_encoding.Binary.of_bytes_exn Protocol.operation_data_encoding proto in { Mockup.M.Block_services.chain_id; hash; shell; protocol_data; receipt = Empty; }) pass) operations in parse_protocol_data block_header.protocol_data >>=? fun protocol_data -> let new_block = { rpc_context; protocol_data; raw_protocol_data = block_header.protocol_data; operations; resulting_context_hash; } in let predecessor_hash = block_header.Block_header.shell.predecessor in let tail = Block_hash.Table.find state.chain_table predecessor_hash |> WithExceptions.Option.get ~loc:__LOC__ in let new_chain = new_block :: tail in Block_hash.Table.replace state.chain_table block_hash new_chain ; Block_hash.Table.replace state.global_chain_table block_hash new_block ; Context_hash.Table.replace state.ctxt_table resulting_context_hash rpc_context ; if Fitness.( block_header.shell.fitness > head.rpc_context.block_header.fitness) then ( state.chain <- new_chain ; clear_mempool state >>=? fun () -> (* The head changed: notify that the stream ended. *) state.operations_stream_push None ; state.streaming_operations <- false ; (* Instanciate a new stream *) let operations_stream, operations_stream_push = Lwt_stream.create () in state.operations_stream <- operations_stream ; state.operations_stream_push <- operations_stream_push ; state.operations_stream_push (Some state.mempool) ; return_unit) else return_unit (** This process listens to broadcast block and operations and incorporates them in the context of the fake node. *) let rec listener ~(user_hooks : (module Hooks)) ~state ~broadcast_pipe = let module User_hooks = (val user_hooks : Hooks) in Lwt_pipe.Unbounded.pop broadcast_pipe >>= function | Broadcast_op (operation_hash, packed_operation) -> (if List.mem_assoc ~equal:Operation_hash.equal operation_hash state.mempool then return_unit else ( state.mempool <- (operation_hash, packed_operation) :: state.mempool ; state.operations_stream_push (Some [(operation_hash, packed_operation)]) ; User_hooks.check_mempool_after_processing ~mempool:state.mempool)) >>=? fun () -> listener ~user_hooks ~state ~broadcast_pipe | Broadcast_block (block_hash, block_header, operations) -> get_block_level block_header >>=? fun level -> get_block_round block_header >>=? fun round -> parse_protocol_data block_header.protocol_data >>=? fun protocol_data -> User_hooks.check_block_before_processing ~level ~round ~block_hash ~block_header ~protocol_data >>=? fun () -> process_block state block_hash block_header operations >>=? fun () -> User_hooks.check_chain_after_processing ~level ~round ~chain:state.chain >>=? fun () -> Lwt_pipe.Unbounded.push state.validated_blocks_pipe (block_hash, block_header, operations) ; Lwt_pipe.Unbounded.push state.heads_pipe (block_hash, block_header) ; listener ~user_hooks ~state ~broadcast_pipe (** Create a fake node state. *) let create_fake_node_state ~i ~live_depth ~(genesis_block : Block_header.t * Tezos_protocol_environment.rpc_context) ~global_chain_table ~broadcast_pipes = let block_header0, rpc_context0 = genesis_block in parse_protocol_data block_header0.protocol_data >>=? fun protocol_data -> let genesis0 = { rpc_context = rpc_context0; protocol_data; raw_protocol_data = block_header0.protocol_data; operations = [[]; []; []; []]; resulting_context_hash = block_header0.shell.context; } in let chain0 = [genesis0] in let validated_blocks_pipe = Lwt_pipe.Unbounded.create () in let heads_pipe = Lwt_pipe.Unbounded.create () in let operations_stream, operations_stream_push = Lwt_stream.create () in let genesis_block_true_hash = Block_header.hash { shell = rpc_context0.block_header; protocol_data = block_header0.protocol_data; } in (* Only push genesis block as a new head, not a valid block: it is the shell's semantics to not advertise "transition" blocks. *) Lwt_pipe.Unbounded.push heads_pipe (rpc_context0.block_hash, block_header0) ; return { instance_index = i; live_depth; mempool = []; chain = chain0; chain_table = Block_hash.Table.of_seq (List.to_seq [ (rpc_context0.block_hash, chain0); (genesis_block_true_hash, chain0); (genesis_predecessor_block_hash, chain0); ]); global_chain_table; ctxt_table = Context_hash.Table.of_seq (List.to_seq [ ( rpc_context0.Tezos_protocol_environment.block_header .Block_header.context, rpc_context0 ); ]); validated_blocks_pipe; heads_pipe; operations_stream; operations_stream_push; streaming_operations = false; broadcast_pipes; genesis_block_true_hash; } (** Start baker process. *) let baker_process ~(delegates : Baking_state.consensus_key list) ~base_dir ~(genesis_block : Block_header.t * Tezos_protocol_environment.rpc_context) ~i ~global_chain_table ~broadcast_pipes ~(user_hooks : (module Hooks)) = let broadcast_pipe = List.nth broadcast_pipes i |> WithExceptions.Option.get ~loc:__LOC__ in create_fake_node_state ~i ~live_depth:60 ~genesis_block ~global_chain_table ~broadcast_pipes >>=? fun state -> let filesystem = String.Hashtbl.create 10 in let wallet = new Faked_client_context.faked_io_wallet ~base_dir ~filesystem in let cctxt = let hooks = make_mocked_services_hooks state user_hooks in new Protocol_client_context.wrap_full (new Faked_client_context.unix_faked ~base_dir ~filesystem ~chain_id ~hooks) in let module User_hooks = (val user_hooks : Hooks) in User_hooks.on_start_baker ~baker_position:i ~delegates ~cctxt >>= fun () -> List.iter_es (fun ({alias; public_key; public_key_hash; secret_key_uri} : Baking_state.consensus_key) -> let open Tezos_client_base in let name = alias |> WithExceptions.Option.get ~loc:__LOC__ in Client_keys.neuterize secret_key_uri >>=? fun public_key_uri -> Client_keys.register_key wallet ~force:false (public_key_hash, public_key_uri, secret_key_uri) ~public_key name) delegates >>=? fun () -> let context_index = let open Abstract_context_index in { sync_fun = Lwt.return; checkout_fun = (fun hash -> Context_hash.Table.find state.ctxt_table hash |> Option.map (fun Tezos_protocol_environment.{context; _} -> context) |> Lwt.return); finalize_fun = Lwt.return; } in let module User_hooks = (val user_hooks : Hooks) in let listener_process () = listener ~user_hooks ~state ~broadcast_pipe in let stop_on_event event = User_hooks.stop_on_event event in let baker_process () = Faked_daemon.Baker.run ~cctxt ~stop_on_event ~chain_id ~context_index ~delegates in Lwt.pick [listener_process (); baker_process ()] >>=? fun () -> User_hooks.check_chain_on_success ~chain:state.chain let genesis_protocol_data (baker_sk : Signature.secret_key) (predecessor_hash : Block_hash.t) (block_header : Block_header.shell_header) : Bytes.t = let proof_of_work_nonce = Bytes.create Protocol.Alpha_context.Constants.proof_of_work_nonce_size in let payload_hash = Protocol.Alpha_context.Block_payload.hash ~predecessor_hash ~payload_round:Alpha_context.Round.zero [] in let contents = Protocol.Alpha_context.Block_header. { payload_hash; payload_round = Alpha_context.Round.zero; proof_of_work_nonce; seed_nonce_hash = None; liquidity_baking_toggle_vote = Baking_configuration.default_liquidity_baking_toggle_vote; } in let unsigned_header = Data_encoding.Binary.to_bytes_exn Protocol.Alpha_context.Block_header.unsigned_encoding (block_header, contents) in let signature = Signature.sign ~watermark: Alpha_context.Block_header.(to_watermark (Block_header chain_id)) baker_sk unsigned_header in Data_encoding.Binary.to_bytes_exn Protocol.Alpha_context.Block_header.protocol_data_encoding {contents; signature} (** Figure out who should be the signer for the genesis block. *) let deduce_baker_sk (accounts_with_secrets : (Protocol.Alpha_context.Parameters.bootstrap_account * Tezos_mockup_commands.Mockup_wallet.bootstrap_secret) list) (total_accounts : int) (level : int) : Signature.secret_key tzresult Lwt.t = (match (total_accounts, level) with | _, 0 -> return 0 (* apparently this doesn't really matter *) | _ -> failwith "cannot deduce baker for a genesis block, total accounts = %d, level = \ %d" total_accounts level) >>=? fun baker_index -> let _, secret = List.nth accounts_with_secrets baker_index |> WithExceptions.Option.get ~loc:__LOC__ in let secret_key = Signature.Secret_key.of_b58check_exn (Uri.path (secret.sk_uri :> Uri.t)) in return secret_key (** Generate the two initial genesis blocks. *) let make_genesis_context ~delegate_selection ~initial_seed ~round0 ~round1 ~consensus_committee_size ~consensus_threshold accounts_with_secrets (total_accounts : int) = let default_constants = Mockup.Protocol_parameters.default_value.constants in let round_durations = let open Alpha_context in Stdlib.Option.get (Round.Durations.create_opt ~first_round_duration:(Period.of_seconds_exn round0) ~delay_increment_per_round: (Period.of_seconds_exn (Int64.sub round1 round0))) in let constants = { default_constants with initial_seed; consensus_committee_size; consensus_threshold; minimal_block_delay = Alpha_context.Period.of_seconds_exn (max 1L round0); delay_increment_per_round = Alpha_context.Period.of_seconds_exn Int64.(max 1L (sub round1 round0)); } in let from_bootstrap_account i ( (account : Protocol.Alpha_context.Parameters.bootstrap_account), (secret : Tezos_mockup_commands.Mockup_wallet.bootstrap_secret) ) : Mockup.Parsed_account.t = { name = Format.sprintf "bootstrap%d" (i + 1); sk_uri = secret.sk_uri; amount = account.amount; } in let bootstrap_accounts = Data_encoding.Json.construct (Data_encoding.list Mockup.Parsed_account.encoding) (List.mapi from_bootstrap_account accounts_with_secrets) in List.map_e (fun (level, round_delegates) -> Raw_level_repr.of_int32 level >>? fun level -> List.map_e (fun (round, delegate) -> Round_repr.of_int32 round >|? fun round -> (round, delegate)) round_delegates >|? fun round_delegates -> (level, round_delegates)) delegate_selection |> Environment.wrap_tzresult >>?= fun delegate_selection -> (match (delegate_selection, constants.initial_seed) with | [], seed_opt -> return seed_opt | selection, (Some _ as seed) -> ( Faked_client_context.logger#warning "Checking provided seed." >>= fun () -> Tenderbrute.check_seed ~bootstrap_accounts_json:bootstrap_accounts ~parameters:Mockup.Protocol_parameters.{default_value with constants} ~seed selection >>=? function | true -> return seed | false -> failwith "Provided initial seed does not match delegate selection") | _, None -> Faked_client_context.logger#warning "No initial seed provided, bruteforcing." >>= fun () -> Tenderbrute.bruteforce ~max:100_000_000_000 ~bootstrap_accounts_json:bootstrap_accounts ~parameters:Mockup.Protocol_parameters.{default_value with constants} delegate_selection) >>=? fun initial_seed -> (match initial_seed with | None -> Lwt.return_unit | _ when initial_seed = constants.initial_seed -> Lwt.return_unit | Some seed -> Faked_client_context.logger#warning "Bruteforced seed is %a, please save into your test." State_hash.pp seed) >>= fun () -> let constants = {constants with initial_seed} in let common_parameters = Mockup.Protocol_parameters.{default_value with constants} in let make_block0 initial_timestamp = let parameters = {common_parameters with initial_timestamp} in let reencoded_parameters = Data_encoding.Binary.of_bytes_exn Mockup.M.parameters_encoding @@ Data_encoding.Binary.to_bytes_exn Mockup.Protocol_parameters.encoding parameters in Mockup.M.init ~cctxt:Faked_client_context.logger ~parameters:reencoded_parameters ~constants_overrides_json:None ~bootstrap_accounts_json:(Some bootstrap_accounts) >>=? fun {chain = _; rpc_context = rpc_context0; protocol_data = _} -> let block_header0 = { rpc_context0.block_header with predecessor = genesis_predecessor_block_hash; } in let rpc_context = {rpc_context0 with block_header = block_header0} in deduce_baker_sk accounts_with_secrets total_accounts 0 >>=? fun baker_sk -> let protocol_data = genesis_protocol_data baker_sk genesis_predecessor_block_hash rpc_context.block_header in let block_header = Block_header.{shell = rpc_context.block_header; protocol_data} in return (block_header, rpc_context) in let level0_round0_duration = Protocol.Alpha_context.Round.round_duration round_durations Alpha_context.Round.zero in let timestamp0 = Time.Protocol.of_seconds Int64.( sub (of_float (Unix.time ())) (Alpha_context.Period.to_seconds level0_round0_duration)) in make_block0 timestamp0 (** By default, propagate every message everywhere. *) let default_propagation_vector = List.repeat 5 Pass module Default_hooks : Hooks = struct let on_inject_block ~level:_ ~round:_ ~block_hash ~block_header ~operations ~protocol_data:_ = return (block_hash, block_header, operations, default_propagation_vector) let on_inject_operation ~op_hash ~op = return (op_hash, op, default_propagation_vector) let on_new_validated_block ~block_hash ~block_header ~operations = Lwt.return (Some (block_hash, block_header, operations)) let on_new_head ~block_hash ~block_header = Lwt.return (Some (block_hash, block_header)) let on_new_operation x = Lwt.return_some x let check_block_before_processing ~level:_ ~round:_ ~block_hash:_ ~block_header:_ ~protocol_data:_ = return_unit let check_chain_after_processing ~level:_ ~round:_ ~chain:_ = return_unit let check_mempool_after_processing ~mempool:_ = return_unit let stop_on_event _ = false let on_start_baker ~baker_position:_ ~delegates:_ ~cctxt:_ = Lwt.return_unit let check_chain_on_success ~chain:_ = return_unit end type config = { debug : bool; round0 : int64; round1 : int64; timeout : int; delegate_selection : (int32 * (int32 * Signature.public_key_hash) list) list; initial_seed : State_hash.t option; consensus_committee_size : int; consensus_threshold : int; } let default_config = { debug = false; round0 = 2L; (* Rounds should be long enough for the bakers to exchange all the necessary messages. *) round1 = 3L (* No real need to increase round durations. *); timeout = 30; delegate_selection = []; initial_seed = None; consensus_committee_size = Default_parameters.constants_mainnet.consensus_committee_size; consensus_threshold = Default_parameters.constants_mainnet.consensus_threshold; } let make_baking_delegate ( (account : Alpha_context.Parameters.bootstrap_account), (secret : Tezos_mockup_commands.Mockup_wallet.bootstrap_secret) ) : Baking_state.consensus_key = Baking_state. { alias = Some secret.name; public_key = account.public_key |> WithExceptions.Option.get ~loc:__LOC__; public_key_hash = account.public_key_hash; secret_key_uri = secret.sk_uri; } let run ?(config = default_config) bakers_spec = Tezos_client_base.Client_keys.register_signer (module Tezos_signer_backends.Unencrypted) ; let total_accounts = List.fold_left (fun acc (n, _) -> acc + n) 0 bakers_spec in if total_accounts = 0 then failwith "the simulation should use at least one delegate" else if total_accounts > 5 then failwith "only up to 5 bootstrap accounts are available" else (* When logging is enabled it may cause non-termination: https://gitlab.com/nomadic-labs/tezos/-/issues/546 In particular, it seems that when logging is enabled the baker process can get cancelled without executing its Lwt finalizer. *) (if config.debug then Tezos_base_unix.Internal_event_unix.init () else Lwt.return_unit) >>= fun () -> let total_bakers = List.length bakers_spec in (List.init ~when_negative_length:() total_bakers (fun _ -> Lwt_pipe.Unbounded.create ()) |> function | Error () -> failwith "impossible: negative length of the baker spec" | Ok xs -> return xs) >>=? fun broadcast_pipes -> let global_chain_table = Block_hash.Table.create 10 in Tezos_mockup_commands.Mockup_wallet.default_bootstrap_accounts >>=? fun bootstrap_secrets -> let accounts_with_secrets = List.combine_drop (List.take_n total_accounts accounts) bootstrap_secrets in let all_delegates = List.map make_baking_delegate accounts_with_secrets in make_genesis_context ~delegate_selection:config.delegate_selection ~initial_seed:config.initial_seed ~round0:config.round0 ~round1:config.round1 ~consensus_committee_size:config.consensus_committee_size ~consensus_threshold:config.consensus_threshold accounts_with_secrets total_accounts >>=? fun genesis_block -> let take_third (_, _, x) = x in let timeout_process () = Lwt_unix.sleep (Float.of_int config.timeout) >>= fun () -> failwith "the test is taking longer than %d seconds@." config.timeout in Lwt.pick [ timeout_process (); Lwt_result_syntax.tzjoin (take_third (List.fold_left (fun (i, delegates_acc, ms) (n, user_hooks) -> let delegates, leftover_delegates = List.split_n n delegates_acc in let m = baker_process ~delegates ~base_dir:"dummy" ~genesis_block ~i ~global_chain_table ~broadcast_pipes ~user_hooks in (i + 1, leftover_delegates, m :: ms)) (0, all_delegates, []) bakers_spec)); ] let get_account_pk i = match List.nth accounts i with | None -> assert false | Some acc -> acc.public_key |> WithExceptions.Option.get ~loc:__LOC__ let bootstrap1 = get_account_pk 0 let bootstrap2 = get_account_pk 1 let bootstrap3 = get_account_pk 2 let bootstrap4 = get_account_pk 3 let bootstrap5 = get_account_pk 4 let check_block_signature ~block_hash ~(block_header : Block_header.t) ~public_key = let (protocol_data : Protocol.Alpha_context.Block_header.protocol_data) = Data_encoding.Binary.of_bytes_exn Protocol.Alpha_context.Block_header.protocol_data_encoding block_header.protocol_data in let unsigned_header = Data_encoding.Binary.to_bytes_exn Protocol.Alpha_context.Block_header.unsigned_encoding (block_header.shell, protocol_data.contents) in if Signature.check ~watermark: Alpha_context.Block_header.(to_watermark (Block_header chain_id)) public_key protocol_data.signature unsigned_header then return_unit else failwith "unexpected signature for %a; tried with %a@." Block_hash.pp block_hash Signature.Public_key.pp public_key type op_predicate = Operation_hash.t -> Alpha_context.packed_operation -> bool tzresult Lwt.t let mempool_count_ops ~mempool ~predicate = List.map_es (fun (op_hash, op) -> predicate op_hash op) mempool >>=? fun results -> return (List.fold_left (fun acc result -> if result then acc + 1 else acc) 0 results) let mempool_has_op ~mempool ~predicate = mempool_count_ops ~mempool ~predicate >>=? fun n -> return (n > 0) let mempool_has_op_ref ~mempool ~predicate ~var = mempool_has_op ~mempool ~predicate >>=? fun result -> if result then var := true ; return_unit let op_is_signed_by ~public_key (op_hash : Operation_hash.t) (op : Alpha_context.packed_operation) = match op.protocol_data with | Operation_data d -> ( (match d.contents with | Single op_contents -> return (match op_contents with | Endorsement _ -> Alpha_context.Operation.to_watermark (Endorsement chain_id) | Preendorsement _ -> Alpha_context.Operation.to_watermark (Preendorsement chain_id) | _ -> Signature.Generic_operation) | _ -> failwith "unexpected contents in %a@." Operation_hash.pp op_hash) >>=? fun watermark -> match d.signature with | None -> failwith "did not find a signature for op %a@." Operation_hash.pp op_hash | Some signature -> let unsigned_operation_bytes = Data_encoding.Binary.to_bytes_exn Protocol.Alpha_context.Operation.unsigned_encoding (op.shell, Contents_list d.contents) in return (Signature.check ~watermark public_key signature unsigned_operation_bytes)) let op_is_preendorsement ?level ?round (op_hash : Operation_hash.t) (op : Alpha_context.packed_operation) = match op.protocol_data with | Operation_data d -> ( match d.contents with | Single op_contents -> ( match op_contents with | Preendorsement consensus_content -> let right_level = match level with | None -> true | Some expected_level -> Int32.equal (Alpha_context.Raw_level.to_int32 consensus_content.level) expected_level in let right_round = match round with | None -> true | Some expected_round -> Int32.equal (Alpha_context.Round.to_int32 consensus_content.round) expected_round in return (right_level && right_round) | _ -> return false) | _ -> failwith "unexpected contents in %a@." Operation_hash.pp op_hash) let op_is_endorsement ?level ?round (op_hash : Operation_hash.t) (op : Alpha_context.packed_operation) = match op.protocol_data with | Operation_data d -> ( match d.contents with | Single op_contents -> ( match op_contents with | Endorsement consensus_content -> let right_level = match level with | None -> true | Some expected_level -> Int32.equal (Alpha_context.Raw_level.to_int32 consensus_content.level) expected_level in let right_round = match round with | None -> true | Some expected_round -> Int32.equal (Alpha_context.Round.to_int32 consensus_content.round) expected_round in return (right_level && right_round) | _ -> return false) | _ -> failwith "unexpected contents in %a@." Operation_hash.pp op_hash) let op_is_both f g op_hash op = f op_hash op >>=? fun f_result -> if f_result then g op_hash op else return false let save_proposal_payload ~(protocol_data : Alpha_context.Block_header.protocol_data) ~var = var := Some (protocol_data.contents.payload_hash, protocol_data.contents.payload_round) ; return_unit let verify_payload_hash ~(protocol_data : Alpha_context.Block_header.protocol_data) ~original_proposal ~message = match !original_proposal with | None -> failwith "verify_payload_hash: expected to have observed a proposal by now" | Some (original_hash, original_round) -> if Protocol.Block_payload_hash.equal original_hash protocol_data.contents.payload_hash && Protocol.Alpha_context.Round.equal original_round protocol_data.contents.payload_round then return_unit else failwith "verify_payload_hash: %s" message let get_block_round block = round_from_raw_fitness block.rpc_context.block_header.fitness
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2021 Nomadic Labs, <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
netamqp_test.ml
(* This is the sender for receiver_t. Please read the comments there first! Also, when trying this example, make sure the receiver is started first because the receiver declares the queue. *) open Netamqp_types open Printf (* let () = Netamqp_endpoint.Debug.enable := true; Netamqp_transport.Debug.enable := true *) let esys = Unixqueue.create_unix_event_system() let p = `TCP(`Inet("localhost", Netamqp_endpoint.default_port)) let ep = Netamqp_endpoint.create p (`AMQP_0_9 `One) esys let c = Netamqp_connection.create ep let auth = Netamqp_connection.plain_auth "guest" "guest" let qname = "test_xy" let sender c = let ch = Netamqp_channel.open_next_s c in let channel = Netamqp_channel.number ch in eprintf "*** Channel could be opened!\n%!"; let header = `AMQP_0_9 (`P_basic ( None, None, None, Some 1, (* non-persistent *) None, (* priority *) None, None, None, None, None, None, None, None, None ) ) in for n = 100000 downto 0 do (* d is the queued message. Note that the body is actually a list of mstring (see t_receiver.ml for explanations). *) let d = (header, [Netamqp_rtypes.mk_mstring (sprintf "%d" n)] ) in Netamqp_endpoint.async_c2s ep (`AMQP_0_9(`Basic_publish(0, "", qname, false, false))) (Some d) channel; (* eprintf "*** Message published!\n%!"; *) done; eprintf "Send done\n%!" let receiver c = (* Now open the data channel. Channels are multiplexed over connections *) let ch = Netamqp_channel.open_next_s c in let channel = Netamqp_channel.number ch in eprintf "*** Channel could be opened!\n%!"; eprintf "*** Queue declared!\n%!"; Netamqp_endpoint.register_async_s2c ep (`AMQP_0_9 `Basic_deliver) channel (fun _ -> function | Some(_header,body) -> begin match int_of_string (Netxdr_mstring.concat_mstrings body) with | 0 -> (* Shut it down *) exit 0 | n when n mod 1000 = 0 -> eprintf "%d\n%!" n | _ -> () end | None -> failwith "No data" ); let _ = Netamqp_endpoint.sync_c2s_s ep (`AMQP_0_9 (`Basic_consume(0, qname, "", false, false, false, false, [] ))) None channel (-1.0) in () let _main = Netamqp_connection.open_s c [ auth ] (`Pref "en_US") "/"; eprintf "*** Connection could be opened, and the proto handshake is done!\n%!"; let ch = Netamqp_channel.open_next_s c in let channel = Netamqp_channel.number ch in let _ = Netamqp_endpoint.sync_c2s_s ep (`AMQP_0_9 (`Queue_declare(0, qname, false, false, false, (* auto-delete: *) true, false, []))) None (* This value would allow to send content data along with the method. Only certain methods permit this, though. *) channel (-1.0) (* timeout *) in receiver c; sender c; Unixqueue.run esys; Netamqp_channel.close_s ch; eprintf "*** Channel could be closed!\n%!"; Netamqp_connection.close_s c; eprintf "*** Connection could be closed!\n%!"
(* This is the sender for receiver_t. Please read the comments there first! Also, when trying this example, make sure the receiver is started first because the receiver declares the queue. *)
test_mem_context.ml
(** Testing ------- Component: Protocol Environment Invocation: dune exec src/lib_protocol_environment/test/test.exe -- test "^mem_context$" Dependencies: src/lib_protocol_environment/test/assert.ml Subject: Low-level operations on memory contexts. *) (** Context creation *) (* Genesis -- block2 -- block3a \ \-- block3b *) module Assert = Assert let create_block2 ctxt = let open Lwt_syntax in let* ctxt = Context.add ctxt ["a"; "b"] (Bytes.of_string "Novembre") in let* ctxt = Context.add ctxt ["a"; "c"] (Bytes.of_string "Juin") in let* ctxt = Context.add ctxt ["version"] (Bytes.of_string "0.0") in Lwt.return ctxt let create_block3a ctxt = let open Lwt_syntax in let* ctxt = Context.remove ctxt ["a"; "b"] in let* ctxt = Context.add ctxt ["a"; "d"] (Bytes.of_string "Mars") in Lwt.return ctxt let create_block3b ctxt = let open Lwt_syntax in let* ctxt = Context.remove ctxt ["a"; "c"] in let* ctxt = Context.add ctxt ["a"; "d"] (Bytes.of_string "Février") in Lwt.return ctxt type t = { genesis : Context.t; block2 : Context.t; block3a : Context.t; block3b : Context.t; } let wrap_context_init f _ () = let open Lwt_syntax in let genesis = Tezos_protocol_environment.Memory_context.empty in let* block2 = create_block2 genesis in let* block3a = create_block3a block2 in let* block3b = create_block3b block2 in let* result = f {genesis; block2; block3a; block3b} in Lwt.return result (** Simple test *) let c = Option.map Bytes.to_string (** Restore the context applied until [block2]. It is asserted that the following key-values are present: - (["version"], ["0.0"]) - (["a"; "b"], ["Novembre"]) - [(["a"; "c"], "Juin")] *) let test_simple {block2 = ctxt; _} = let open Lwt_syntax in let* version = Context.find ctxt ["version"] in Assert.String.Option.equal ~loc:__LOC__ (c version) (Some "0.0") ; let* novembre = Context.find ctxt ["a"; "b"] in Assert.String.Option.equal (Some "Novembre") (c novembre) ; let* juin = Context.find ctxt ["a"; "c"] in Assert.String.Option.equal ~loc:__LOC__ (Some "Juin") (c juin) ; (* Mem function returns "true" if the key given leads to an existing leaf *) let* res = Context.mem ctxt ["a"] in Assert.Bool.equal false res ; let* res = Context.mem ctxt ["a"; "c"] in Assert.Bool.equal true res ; let* res = Context.mem ctxt ["a"; "x"] in Assert.Bool.equal false res ; (* Mem_tree is like "mem", but also returns "true" for a trunk node *) let* res = Context.mem_tree ctxt ["a"] in Assert.Bool.equal true res ; let* res = Context.mem_tree ctxt ["a"; "c"] in Assert.Bool.equal true res ; let* res = Context.mem_tree ctxt ["a"; "x"] in Assert.Bool.equal false res ; return_unit (** Restore the context applied until [block3a]. It is asserted that the following key-values are present: - (["version"], ["0.0"]) - (["a"; "c"], ["Juin"]) - (["a"; "d"], ["Mars"]) Additionally, the key ["a"; "b"] is associated with nothing as it has been removed by block [block3a]. *) let test_continuation {block3a = ctxt; _} = let open Lwt_syntax in let* version = Context.find ctxt ["version"] in Assert.String.Option.equal ~loc:__LOC__ (Some "0.0") (c version) ; let* novembre = Context.find ctxt ["a"; "b"] in Assert.is_none ~loc:__LOC__ (c novembre) ; let* juin = Context.find ctxt ["a"; "c"] in Assert.String.Option.equal ~loc:__LOC__ (Some "Juin") (c juin) ; let* mars = Context.find ctxt ["a"; "d"] in Assert.String.Option.equal ~loc:__LOC__ (Some "Mars") (c mars) ; Lwt.return_unit (** Restore the context applied until [block3b]. It is asserted that the following key-values are present: - (["version"], ["0.0"]) - (["a"; "b"], ["Novembre"]) - (["a"; "d"], ["Février"]) Additionally, the key ["a"; "c"] is associated with nothing as it has been removed by block [block3b]. *) let test_fork {block3b = ctxt; _} = let open Lwt_syntax in let* version = Context.find ctxt ["version"] in Assert.String.Option.equal ~loc:__LOC__ (Some "0.0") (c version) ; let* novembre = Context.find ctxt ["a"; "b"] in Assert.String.Option.equal ~loc:__LOC__ (Some "Novembre") (c novembre) ; let* juin = Context.find ctxt ["a"; "c"] in Assert.is_none ~loc:__LOC__ (c juin) ; let* mars = Context.find ctxt ["a"; "d"] in Assert.String.Option.equal ~loc:__LOC__ (Some "Février") (c mars) ; Lwt.return_unit (** Restore the context at [genesis] and explicitly replay setting/getting key-values. *) let test_replay {genesis = ctxt0; _} = let open Lwt_syntax in let* ctxt1 = Context.add ctxt0 ["version"] (Bytes.of_string "0.0") in let* ctxt2 = Context.add ctxt1 ["a"; "b"] (Bytes.of_string "Novembre") in let* ctxt3 = Context.add ctxt2 ["a"; "c"] (Bytes.of_string "Juin") in let* ctxt4a = Context.add ctxt3 ["a"; "d"] (Bytes.of_string "July") in let* ctxt4b = Context.add ctxt3 ["a"; "d"] (Bytes.of_string "Juillet") in let* ctxt5a = Context.add ctxt4a ["a"; "b"] (Bytes.of_string "November") in let* novembre = Context.find ctxt4a ["a"; "b"] in Assert.String.Option.equal ~loc:__LOC__ (Some "Novembre") (c novembre) ; let* november = Context.find ctxt5a ["a"; "b"] in Assert.String.Option.equal ~loc:__LOC__ (Some "November") (c november) ; let* july = Context.find ctxt5a ["a"; "d"] in Assert.String.Option.equal ~loc:__LOC__ (Some "July") (c july) ; let* novembre = Context.find ctxt4b ["a"; "b"] in Assert.String.Option.equal ~loc:__LOC__ (Some "Novembre") (c novembre) ; let* juillet = Context.find ctxt4b ["a"; "d"] in Assert.String.Option.equal ~loc:__LOC__ (Some "Juillet") (c juillet) ; Lwt.return_unit (** Restore the context at [genesis] and fold upon a context a series of key prefixes using {!Context.fold}. *) let test_fold_keys {genesis = ctxt; _} = let open Lwt_syntax in let* ctxt = Context.add ctxt ["a"; "b"] (Bytes.of_string "Novembre") in let* ctxt = Context.add ctxt ["a"; "c"] (Bytes.of_string "Juin") in let* ctxt = Context.add ctxt ["a"; "d"; "e"] (Bytes.of_string "Septembre") in let* ctxt = Context.add ctxt ["f"] (Bytes.of_string "Avril") in let* ctxt = Context.add ctxt ["g"; "h"] (Bytes.of_string "Avril") in let* l = Test_mem_context_common.keys ctxt [] in Assert.String.List_list.equal ~loc:__LOC__ [["a"; "b"]; ["a"; "c"]; ["a"; "d"; "e"]; ["f"]; ["g"; "h"]] (List.sort compare l) ; let* l = Test_mem_context_common.keys ctxt ["a"] in Assert.String.List_list.equal ~loc:__LOC__ [["a"; "b"]; ["a"; "c"]; ["a"; "d"; "e"]] (List.sort compare l) ; let* l = Test_mem_context_common.keys ctxt ["f"] in Assert.String.List_list.equal ~loc:__LOC__ [] l ; let* l = Test_mem_context_common.keys ctxt ["g"] in Assert.String.List_list.equal ~loc:__LOC__ [["g"; "h"]] l ; let* l = Test_mem_context_common.keys ctxt ["i"] in Assert.String.List_list.equal ~loc:__LOC__ [] l ; Lwt.return_unit let test_fold {genesis = ctxt; _} = let open Lwt_syntax in let foo1 = Bytes.of_string "foo1" in let foo2 = Bytes.of_string "foo2" in let* ctxt = Context.add ctxt ["foo"; "toto"] foo1 in let* ctxt = Context.add ctxt ["foo"; "bar"; "toto"] foo2 in let fold depth ecs ens = let* cs, ns = Context.fold ?depth ctxt [] ~f:(fun path tree (cs, ns) -> match Context.Tree.kind tree with | `Value -> Lwt.return (path :: cs, ns) | `Tree -> Lwt.return (cs, path :: ns)) ~order:`Sorted ~init:([], []) in Assert.String.List_list.equal ~loc:__LOC__ ecs cs ; Assert.String.List_list.equal ~loc:__LOC__ ens ns ; Lwt.return () in let* () = fold None [["foo"; "toto"]; ["foo"; "bar"; "toto"]] [["foo"; "bar"]; ["foo"]; []] in let* () = fold (Some (`Eq 0)) [] [[]] in let* () = fold (Some (`Eq 1)) [] [["foo"]] in let* () = fold (Some (`Eq 2)) [["foo"; "toto"]] [["foo"; "bar"]] in let* () = fold (Some (`Lt 2)) [] [["foo"]; []] in let* () = fold (Some (`Le 2)) [["foo"; "toto"]] [["foo"; "bar"]; ["foo"]; []] in let* () = fold (Some (`Ge 2)) [["foo"; "toto"]; ["foo"; "bar"; "toto"]] [["foo"; "bar"]] in fold (Some (`Gt 2)) [["foo"; "bar"; "toto"]] [] let steps = ["00"; "01"; "02"; "03"; "05"; "06"; "07"; "09"; "0a"; "0b"; "0c"; "0e"; "0f"; "10"; "11"; "12"; "13"; "14"; "15"; "16"; "17"; "19"; "1a"; "1b"; "1c"; "1d"; "1e"; "1f"; "20"; "22"; "23"; "25"; "26"; "27"; "28"; "2a"; "2b"; "2f"; "30"; "31"; "32"; "33"; "35"; "36"; "37"; "3a"; "3b"; "3c"; "3d"; "3e"; "3f"; "40"; "42"; "43"; "45"; "46"; "47"; "48"; "4a"; "4b"; "4c"; "4e"; "4f"; "50"; "52"; "53"; "54"; "55"; "56"; "57"; "59"; "5b"; "5c"; "5f"; "60"; "61"; "62"; "63"; "64"; "65"; "66"; "67"; "69"; "6b"; "6c"; "6d"; "6e"; "6f"; "71"; "72"; "73"; "74"; "75"; "78"; "79"; "7a"; "7b"; "7c"; "7d"; "7e"; "80"; "82"; "83"; "84"; "85"; "86"; "88"; "8b"; "8c"; "8d"; "8f"; "92"; "93"; "94"; "96"; "97"; "99"; "9a"; "9b"; "9d"; "9e"; "9f"; "a0"; "a1"; "a2"; "a3"; "a4"; "a5"; "a6"; "a7"; "a8"; "aa"; "ab"; "ac"; "ad"; "ae"; "af"; "b0"; "b1"; "b2"; "b3"; "b4"; "b6"; "b8"; "b9"; "bb"; "bc"; "bf"; "c0"; "c1"; "c2"; "c3"; "c4"; "c5"; "c8"; "c9"; "cb"; "cc"; "cd"; "ce"; "d0"; "d1"; "d2"; "d4"; "d5"; "d7"; "d8"; "d9"; "da"; "e0"; "e3"; "e6"; "e8"; "e9"; "ea"; "ec"; "ee"; "ef"; "f0"; "f1"; "f5"; "f7"; "f8"; "f9"; "fb"; "fc"; "fd"; "fe"; "ff"] [@@ocamlformat "disable"] let bindings = let zero = Bytes.make 10 '0' in List.map (fun x -> (["root"; x], zero)) steps let test_fold_order {genesis = ctxt; _} = let open Lwt_syntax in let* ctxt = List.fold_left_s (fun ctxt (k, v) -> Context.add ctxt k v) ctxt bindings in (* check that folding over a in-memory checkout is ok. It would be nice to test this on a checkout as well, but [Context] doesn't expose the right hooks (yet?). *) let* bs = Test_mem_context_common.fold_keys ctxt ["root"] ~init:[] ~f:(fun k acc -> Lwt.return (k :: acc)) in let bs = List.rev bs in Assert.String.List_list.equal ~loc:__LOC__ (List.map fst bindings) bs ; Lwt.return_unit let test_trees {genesis = ctxt; _} = let open Lwt_syntax in let* () = Context.Tree.fold ~depth:(`Eq 1) ~order:`Sorted ~init:() (Context.Tree.empty ctxt) [] ~f:(fun k _ () -> assert (Compare.List_length_with.(k = 1)) ; Assert.fail_msg "empty") in let foo1 = Bytes.of_string "foo1" in let foo2 = Bytes.of_string "foo2" in Context.Tree.empty ctxt |> fun v1 -> let* v1 = Context.Tree.add v1 ["foo"; "toto"] foo1 in let* v1 = Context.Tree.add v1 ["foo"; "bar"; "toto"] foo2 in let fold depth ecs ens = let* cs, ns = Context.Tree.fold v1 ?depth [] ~f:(fun path tree (cs, ns) -> match Context.Tree.kind tree with | `Value -> Lwt.return (path :: cs, ns) | `Tree -> Lwt.return (cs, path :: ns)) ~order:`Sorted ~init:([], []) in Assert.String.List_list.equal ~loc:__LOC__ ecs cs ; Assert.String.List_list.equal ~loc:__LOC__ ens ns ; Lwt.return () in let* () = fold None [["foo"; "toto"]; ["foo"; "bar"; "toto"]] [["foo"; "bar"]; ["foo"]; []] in let* () = fold (Some (`Eq 0)) [] [[]] in let* () = fold (Some (`Eq 1)) [] [["foo"]] in let* () = fold (Some (`Eq 2)) [["foo"; "toto"]] [["foo"; "bar"]] in let* () = fold (Some (`Lt 2)) [] [["foo"]; []] in let* () = fold (Some (`Le 2)) [["foo"; "toto"]] [["foo"; "bar"]; ["foo"]; []] in let* () = fold (Some (`Ge 2)) [["foo"; "toto"]; ["foo"; "bar"; "toto"]] [["foo"; "bar"]] in let* () = fold (Some (`Gt 2)) [["foo"; "bar"; "toto"]] [] in let* v1 = Context.Tree.remove v1 ["foo"; "bar"; "toto"] in let* v = Context.Tree.find v1 ["foo"; "bar"; "toto"] in Assert.Bytes.Option.equal ~loc:__LOC__ None v ; let* v = Context.Tree.find v1 ["foo"; "toto"] in Assert.Bytes.Option.equal ~loc:__LOC__ (Some foo1) v ; Context.Tree.empty ctxt |> fun v1 -> let* v1 = Context.Tree.add v1 ["foo"; "1"] foo1 in let* v1 = Context.Tree.add v1 ["foo"; "2"] foo2 in let* v1 = Context.Tree.remove v1 ["foo"; "1"] in let* v1 = Context.Tree.remove v1 ["foo"; "2"] in let* v = Context.Tree.find v1 ["foo"; "1"] in Assert.Bytes.Option.equal ~loc:__LOC__ None v ; let* v1 = Context.Tree.remove v1 [] in Assert.Bool.equal ~loc:__LOC__ true (Context.Tree.is_empty v1) ; Lwt.return () (* We now test the [keys] function. * * These tests are important for [Test_mem_context_array_theory] that * relies on this function. We don't want the tests of [keys] to be * in [Test_mem_context_array_theory] because it uses [QCheck2]. * * We need [keys] to be correct, because it's at the core of checking * the second axiom of array theory in [Test_mem_context_array_theory]. *) module StringListOrd : Stdlib.Set.OrderedType with type t = string list = struct type t = string list let compare = Stdlib.compare end module StringListSet = Set.Make (StringListOrd) module PP = struct let key ppf k = let atom_pp fmt s = Format.fprintf fmt "%s" s in Format.pp_print_list atom_pp ppf ~pp_sep:(fun fmt () -> Format.fprintf fmt "->") k let domain ppf d = let l = StringListSet.elements d in Format.pp_print_list key ppf ~pp_sep:(fun fmt () -> Format.fprintf fmt "; ") l let domain ppf d = Format.fprintf ppf "[%a]" domain d end let check_eq_domains d1 d2 = Assert.equal ~eq:StringListSet.equal ~pp:PP.domain d1 d2 let test_domain0 () = let open Lwt_syntax in let b0 = Bytes.of_string "0" in let k1 = ["a"] in let k2 = ["b"] in let k3 = ["c"] in let ctxt = Tezos_protocol_environment.Memory_context.empty in let* ctxt = Context.add ctxt k1 b0 in let* ctxt = Context.add ctxt k2 b0 in let* ctxt = Context.add ctxt k3 b0 in let expected_domain = [k1; k2; k3] |> StringListSet.of_list in let* actual_domain = Test_mem_context_common.domain ctxt in let actual_domain = StringListSet.of_list actual_domain in check_eq_domains expected_domain actual_domain ; Lwt.return_unit let test_domain1 () = let open Lwt_syntax in let b0 = Bytes.of_string "0" in let k1 = ["a"; "b"] in let k2 = ["a"; "c"; "d"] in let ctxt = Tezos_protocol_environment.Memory_context.empty in let* ctxt = Context.add ctxt k1 b0 in let* ctxt = Context.add ctxt k2 b0 in let expected_domain = [k1; k2] |> StringListSet.of_list in let* actual_domain = Test_mem_context_common.domain ctxt in let actual_domain = StringListSet.of_list actual_domain in check_eq_domains expected_domain actual_domain ; Lwt.return_unit let test_domain2 () = let open Lwt_syntax in let b0 = Bytes.of_string "0" in let k1 = ["a"; "b"] in let k2 = ["a"; "c"; "d"] in let k3 = ["a"; "c"; "e"] in let k4 = ["x"] in let ctxt = Tezos_protocol_environment.Memory_context.empty in let* ctxt = Context.add ctxt k1 b0 in let* ctxt = Context.add ctxt k2 b0 in let* ctxt = Context.add ctxt k3 b0 in let* ctxt = Context.add ctxt k4 b0 in let expected_domain = [k1; k2; k3; k4] |> StringListSet.of_list in let* actual_domain = Test_mem_context_common.domain ctxt in let actual_domain = StringListSet.of_list actual_domain in check_eq_domains expected_domain actual_domain ; Lwt.return_unit (******************************************************************************) let tests = [ ("simple", test_simple); ("continuation", test_continuation); ("fork", test_fork); ("replay", test_replay); ("fold_keys", test_fold_keys); ("fold", test_fold); ("fold order", test_fold_order); ("trees", test_trees); ] let domain_tests = [ ("domain0", test_domain0); ("domain1", test_domain1); ("domain2", test_domain2); ] let tests = List.map (fun (n, f) -> Alcotest_lwt.test_case n `Quick (wrap_context_init f)) tests @ List.map (fun (n, f) -> Alcotest_lwt.test_case n `Quick (fun _ _ -> f ())) domain_tests let () = Alcotest_lwt.run "tezos-shell-context" [("mem_context", tests)] |> Lwt_main.run
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
dune
(library (name fileutils_str) (public_name fileutils.str) (wrapped false) (libraries fileutils str))
common.ml
open Ppxlib.Ast_helper open Ppxlib.Parsetree (** Lang utilities *) type lang = Html | Svg type name = lang * string let html_implementation = ref "Html" let svg_implementation = ref "Svg" let implemenentation_ref = function | Html -> html_implementation | Svg -> svg_implementation let set_implementation lang s = (implemenentation_ref lang) := s let implementation lang = !(implemenentation_ref lang) let lang = function | Html -> "HTML" | Svg -> "SVG" let make_lid ~loc i s = { txt = (Longident.parse @@ implementation i ^ "." ^ s); loc } let make ~loc i s = Exp.ident ~loc @@ make_lid ~loc i s (** Generic *) let find f l = try Some (List.find f l) with Not_found -> None let error loc ppf = (* Originally written by @Drup in 24d87befcc505a9e3a1b081849b12560ce38028f. *) (* We use a custom implementation because the type of Location.raise_errorf changed in 4.03 *) let buf = Buffer.create 17 in let fmt = Format.formatter_of_buffer buf in Format.kfprintf (fun _ -> Format.pp_print_flush fmt (); Location.raise_errorf ~loc "%s@." (Buffer.contents buf)) fmt ppf (** Ast manipulation *) let int loc = Ast_builder.Default.eint ~loc let float loc fl = Ast_builder.Default.efloat ~loc @@ string_of_float fl let string loc = Ast_builder.Default.estring ~loc let add_constraints ~list lang e = let loc = {e.pexp_loc with loc_ghost = true} in let elt = make_lid ~loc lang "elt" in let wrap = if list then make_lid ~loc lang "list_wrap" else make_lid ~loc lang "wrap" in let ty = Typ.(constr ~loc wrap [ constr ~loc elt [any ~loc ()]]) in Exp.constraint_ ~loc e ty type 'a value = | Val of 'a | Antiquot of expression let value x = Val x let antiquot e = Antiquot e let map_value f = function | Val x -> Val (f x) | Antiquot x -> Antiquot x let list_gen cons append nil l = let f acc = function | Val x -> cons acc x | Antiquot e -> append acc e in (l |> List.rev |> List.fold_left f nil) let list loc l = let nil = [%expr []][@metaloc loc] in let cons acc x = [%expr [%e x]::[%e acc]][@metaloc loc] in let append acc x = [%expr [%e x]@[%e acc]][@metaloc loc] in list_gen cons append nil @@ List.map (fun x -> Val x) l let list_wrap_value lang loc = let (!!) = make ~loc lang in let nil = [%expr [%e !!"Xml.W.nil"] ()] [@metaloc loc] in let cons acc x = [%expr [%e !!"Xml.W.cons"] ([%e !!"Xml.W.return"] [%e x]) [%e acc] ][@metaloc loc] in let append acc x = [%expr [%e !!"Xml.W.append"] [%e add_constraints ~list:true lang x] [%e acc] ][@metaloc loc] in list_gen cons append nil let list_wrap lang loc l = list_wrap_value lang loc @@ List.map (fun x -> Val x) l let wrap implementation loc e = [%expr [%e make ~loc implementation "Xml.W.return"] [%e e]] [@metaloc loc] let wrap_value lang loc = function | Val x -> wrap lang loc x | Antiquot e -> add_constraints ~list:false lang e let txt ~loc ~lang s = let txt = make ~loc lang "txt" in let arg = wrap lang loc @@ string loc s in Ast_helper.Exp.apply ~loc txt [Nolabel, arg]
(* TyXML * http://www.ocsigen.org/tyxml * Copyright (C) 2016 Anton Bachin * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, with linking exception; * either version 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Suite 500, Boston, MA 02110-1301, USA. *)
expr.ml
open Misc;; open Namespace;; let ( =%= ) = ( = );; let ( = ) = ();; type expr = | Evar of string * private_info | Emeta of expr * private_info | Eapp of string * expr list * private_info | Enot of expr * private_info | Eand of expr * expr * private_info | Eor of expr * expr * private_info | Eimply of expr * expr * private_info | Eequiv of expr * expr * private_info | Etrue | Efalse | Eall of expr * string * expr * private_info | Eex of expr * string * expr * private_info | Etau of expr * string * expr * private_info | Elam of expr * string * expr * private_info and private_info = { hash : int; skel_hash : int; free_vars : string list; size : int; taus : int; (* depth of tau nesting *) metas : expr list; };; type definition = | DefReal of string * string * expr list * expr * string option | DefPseudo of (expr * int) * string * expr list * expr | DefRec of expr * string * expr list * expr ;; exception Higher_order;; (************************) (* small sets of formulas (represented as lists) *) let rec diff l1 l2 = match l1 with | [] -> [] | e::t -> if List.exists ((==) e) l2 then diff t l2 else e :: (diff t l2) ;; let union l1 l2 = (diff l1 l2) @@ l2;; let rec disjoint l1 l2 = match l1 with | [] -> true | h::t -> if List.exists ((==) h) l2 then false else disjoint t l2 ;; (*******************) let k_true = 0xb063cd7 and k_false = 0xd5ab9f0 and k_meta = 0x33d092c and k_app = 0x33b9c25 and k_not = 0x7c3e7d2 and k_and = 0xccdc15b and k_or = 0x49b55b9 and k_imply = 0x7ebfa6f and k_equiv = 0xb0f18f7 and k_all = 0xfb437ff and k_ex = 0x0716b52 and k_tau = 0x4ae7fad and k_lam = 0x24adcb3 ;; let mkpriv skel fv sz taus metas = { hash = Hashtbl.hash (skel, fv); skel_hash = skel; free_vars = fv; size = sz; taus = taus; metas = metas; };; let priv_true = mkpriv k_true [] 1 0 [];; let priv_false = mkpriv k_false [] 1 0 [];; let get_priv = function | Evar (_, h) -> h | Emeta (_, h) -> h | Eapp (_, _, h) -> h | Enot (_, h) -> h | Eand (_, _, h) -> h | Eor (_, _, h) -> h | Eimply (_, _, h) -> h | Eequiv (_, _, h) -> h | Etrue -> priv_true | Efalse -> priv_false | Eall (_, _, _, h) -> h | Eex (_, _, _, h) -> h | Etau (_, _, _, h) -> h | Elam (_, _, _, h) -> h ;; let get_hash e = (get_priv e).hash;; let get_skel e = (get_priv e).skel_hash;; let get_fv e = (get_priv e).free_vars;; let get_size e = (get_priv e).size;; let get_taus e = (get_priv e).taus;; let get_metas e = (get_priv e).metas;; let rec str_union l1 l2 = match l1, l2 with | [], _ -> l2 | _, [] -> l1 | h::t, _ when List.exists ((=%=) h) l2 -> str_union t l2 | h::t, _ -> str_union t (h :: l2) ;; let rec remove x l = match x, l with | _, [] -> [] | Evar (v, _), h::t when v =%= h -> t | _, h::t -> h :: (remove x t) ;; let combine x y = x + y * 131 + 1;; let priv_var s = mkpriv 0 [s] 1 0 [];; let priv_meta e = mkpriv (combine k_meta (get_skel e)) [] 1 0 [e] ;; let priv_app s args = let comb_skel accu e = combine (get_skel e) accu in let skel = combine k_app (List.fold_left comb_skel (Hashtbl.hash s) args) in let fv = List.fold_left (fun a e -> str_union a (get_fv e)) [] args in let sz = List.fold_left (fun a e -> a + get_size e) 1 args in let taus = List.fold_left (fun a e -> max (get_taus e) a) 0 args in let metas = List.fold_left (fun a e -> union (get_metas e) a) [] args in mkpriv skel fv sz taus metas ;; let priv_not e = mkpriv (combine k_not (get_skel e)) (get_fv e) (get_size e + 1) (get_taus e) (get_metas e) ;; let priv_and e1 e2 = mkpriv (combine k_and (combine (get_skel e1) (get_skel e2))) (str_union (get_fv e1) (get_fv e2)) (get_size e1 + get_size e2 + 1) (max (get_taus e1) (get_taus e2)) (union (get_metas e1) (get_metas e2)) ;; let priv_or e1 e2 = mkpriv (combine k_or (combine (get_skel e1) (get_skel e2))) (str_union (get_fv e1) (get_fv e2)) (get_size e1 + get_size e2 + 1) (max (get_taus e1) (get_taus e2)) (union (get_metas e1) (get_metas e2)) ;; let priv_imply e1 e2 = mkpriv (combine k_imply (combine (get_skel e1) (get_skel e2))) (str_union (get_fv e1) (get_fv e2)) (get_size e1 + get_size e2 + 1) (max (get_taus e1) (get_taus e2)) (union (get_metas e1) (get_metas e2)) ;; let priv_equiv e1 e2 = mkpriv (combine k_equiv (combine (get_skel e1) (get_skel e2))) (str_union (get_fv e1) (get_fv e2)) (get_size e1 + get_size e2 + 1) (max (get_taus e1) (get_taus e2)) (union (get_metas e1) (get_metas e2)) ;; let priv_all v t e = mkpriv (combine k_all (combine (Hashtbl.hash t) (get_skel e))) (remove v (get_fv e)) (1 + get_size e) (get_taus e) (get_metas e) ;; let priv_ex v t e = mkpriv (combine k_ex (combine (Hashtbl.hash t) (get_skel e))) (remove v (get_fv e)) (1 + get_size e) (get_taus e) (get_metas e) ;; let priv_tau v t e = mkpriv (combine k_tau (combine (Hashtbl.hash t) (get_skel e))) (remove v (get_fv e)) 1 (1 + get_taus e) (get_metas e) ;; let priv_lam v t e = mkpriv (combine k_lam (combine (Hashtbl.hash t) (get_skel e))) (remove v (get_fv e)) 1 (get_taus e) (get_metas e) ;; module HashedExpr = struct type t = expr;; let hash = get_hash;; type binding = Bound of int | Free of expr;; let get_binding env v = let rec index i v env = match env with | x :: _ when x == v -> Bound i | _ :: t -> index (i+1) v t | [] -> Free v in index 0 v env ;; let same_binding env1 v1 env2 v2 = match (get_binding env1 v1), (get_binding env2 v2) with | Bound i1, Bound i2 -> i1 == i2 | Free w1, Free w2 -> w1 == w2 | _, _ -> false ;; let var_name v = match v with | Evar (name, _) -> name | _ -> assert false ;; let intersects env l = let eq x e = match e with Evar (s, _) -> s =%= x | _ -> assert false in List.exists (fun v -> List.exists (eq v) env) l ;; let rec equal_in_env env1 env2 e1 e2 = let m1 = intersects env1 (get_fv e1) in let m2 = intersects env2 (get_fv e2) in not m1 && not m2 && e1 == e2 || m1 && m2 && begin match e1, e2 with | Evar _, Evar _ -> same_binding env1 e1 env2 e2 | Emeta (n1, _), Emeta (n2, _) -> n1 == n2 | Eapp (sym1, args1, _), Eapp (sym2, args2, _) -> sym1 =%= sym2 && List.length args1 =%= List.length args2 && List.for_all2 (equal_in_env env1 env2) args1 args2 | Enot (f1, _), Enot (f2, _) -> equal_in_env env1 env2 f1 f2 | Eand (f1, g1, _), Eand (f2, g2, _) | Eor (f1, g1, _), Eor (f2, g2, _) | Eimply (f1, g1, _), Eimply (f2, g2, _) | Eequiv (f1, g1, _), Eequiv (f2, g2, _) -> equal_in_env env1 env2 f1 f2 && equal_in_env env1 env2 g1 g2 | Efalse, Efalse | Etrue, Etrue -> true | Eall (v1, t1, f1, _), Eall (v2, t2, f2, _) | Eex (v1, t1, f1, _), Eex (v2, t2, f2, _) | Etau (v1, t1, f1, _), Etau (v2, t2, f2, _) | Elam (v1, t1, f1, _), Elam (v2, t2, f2, _) -> (List.mem (var_name v1) (get_fv f1)) =%= (List.mem (var_name v2) (get_fv f2)) && equal_in_env (v1::env1) (v2::env2) f1 f2 | _, _ -> false end ;; let equal_in_env1 v1 v2 f1 f2 = let m1 = List.mem (var_name v1) (get_fv f1) in let m2 = List.mem (var_name v2) (get_fv f2) in not m1 && not m2 && f1 == f2 || m1 && m2 && equal_in_env [v1] [v2] f1 f2 ;; let equal e1 e2 = match e1, e2 with | Evar (v1, _), Evar (v2, _) -> v1 =%= v2 | Emeta (f1, _), Emeta (f2, _) -> f1 == f2 | Eapp (sym1, args1, _), Eapp (sym2, args2, _) -> sym1 =%= sym2 && List.length args1 =%= List.length args2 && List.for_all2 (==) args1 args2 | Enot (f1, _), Enot (f2, _) -> f1 == f2 | Eand (f1, g1, _), Eand (f2, g2, _) | Eor (f1, g1, _), Eor (f2, g2, _) | Eimply (f1, g1, _), Eimply (f2, g2, _) | Eequiv (f1, g1, _), Eequiv (f2, g2, _) -> f1 == f2 && g1 == g2 | Eall (v1, t1, f1, _), Eall (v2, t2, f2, _) | Eex (v1, t1, f1, _), Eex (v2, t2, f2, _) | Etau (v1, t1, f1, _), Etau (v2, t2, f2, _) | Elam (v1, t1, f1, _), Elam (v2, t2, f2, _) when t1 =%= t2 && v1 == v2 -> f1 == f2 | Eall (v1, t1, f1, _), Eall (v2, t2, f2, _) | Eex (v1, t1, f1, _), Eex (v2, t2, f2, _) | Etau (v1, t1, f1, _), Etau (v2, t2, f2, _) | Elam (v1, t1, f1, _), Elam (v2, t2, f2, _) -> t1 =%= t2 && equal_in_env1 v1 v2 f1 f2 | _, _ -> false ;; end;; (* Weak table version *) module HE = Weak.Make (HashedExpr);; let tbl = HE.create 999997;; let he_merge k = try HE.find tbl k with Not_found -> incr Globals.num_expr; HE.add tbl k; k ;; let print_stats oc = let (tbllen, entries, bucklen, least, median, largest) = HE.stats tbl in Printf.fprintf oc "tbl:%d ent:%d buc:%d sml:%d med:%d lrg:%d\n" tbllen entries bucklen least median largest ;; (* Normal table version (faster but uses more memory) *) (* module HE = Hashtbl.Make (HashedExpr);; let tbl = HE.create 999997;; let he_merge k = try HE.find tbl k with Not_found -> incr Globals.num_expr; HE.add tbl k k; k ;; *) let evar (s) = he_merge (Evar (s, priv_var s));; let emeta (e) = he_merge (Emeta (e, priv_meta e));; let eapp (s, args) = he_merge (Eapp (s, args, priv_app s args));; let enot (e) = he_merge (Enot (e, priv_not e));; let eand (e1, e2) = he_merge (Eand (e1, e2, priv_and e1 e2));; let eor (e1, e2) = he_merge (Eor (e1, e2, priv_or e1 e2));; let eimply (e1, e2) = he_merge (Eimply (e1, e2, priv_imply e1 e2));; let etrue = Etrue;; let efalse = Efalse;; let eequiv (e1, e2) = he_merge (Eequiv (e1, e2, priv_equiv e1 e2));; let eall (v, t, e) = he_merge (Eall (v, t, e, priv_all v t e));; let eex (v, t, e) = he_merge (Eex (v, t, e, priv_ex v t e));; let etau (v, t, e) = he_merge (Etau (v, t, e, priv_tau v t e));; let elam (v, t, e) = he_merge (Elam (v, t, e, priv_lam v t e));; let rec all_list vs body = match vs with | [] -> body | h::t -> eall (h, "", all_list t body) ;; let rec ex_list vs body = match vs with | [] -> body | h::t -> eex (h, "", ex_list t body) ;; type t = expr;; let hash = get_hash;; let equal = (==);; let compare x y = match compare (hash x) (hash y) with | 0 -> if equal x y then 0 else Stdlib.compare x y | x when x < 0 -> -1 | _ -> 1 ;; (************************) exception Mismatch;; let rec xpreunify accu e1 e2 = match e1, e2 with | _, _ when e1 == e2 -> accu | Eapp (s1, a1, _), Eapp (s2, a2, _) when s1 =%= s2 -> List.fold_left2 xpreunify accu a1 a2 | Emeta (m1, _), _ -> (m1, e2) :: accu | _, Emeta (m2, _) -> (m2, e1) :: accu | _, _ -> raise Mismatch ;; let preunify e1 e2 = try xpreunify [] e1 e2 with Mismatch -> [] ;; let preunifiable e1 e2 = try ignore (xpreunify [] e1 e2); true with Mismatch -> false ;; let occurs_as_meta e f = List.exists ((==) e) (get_metas f);; let size = get_size;; let has_metas e = get_metas e <> [];; let count_metas e = List.length (get_metas e);; let cursym = ref (Bytes.of_string var_prefix);; let rec incr_sym n = if n >= Bytes.length !cursym then cursym := Bytes.cat !cursym (Bytes.of_string "a") else match Bytes.get !cursym n with | 'z' -> Bytes.set !cursym n 'a'; incr_sym (n+1) | c -> Bytes.set !cursym n (Char.chr (1 + Char.code c)) ;; let newname () = incr_sym (String.length var_prefix); Bytes.to_string !cursym ;; let newvar () = evar (newname ());; let rec rm_binding v map = match map with | [] -> [] | (w, _) :: t when w == v -> t | h :: t -> h :: (rm_binding v t) ;; let conflict v map = match v with | Evar (vv, _) -> List.exists (fun (w, e) -> List.mem vv (get_fv e)) map | _ -> assert false ;; let disj vars map = let diff_var v e = match e with | Evar (w, _), _ -> not (v =%= w) | _ -> assert false in let irrelevant v = List.for_all (diff_var v) map in List.for_all irrelevant vars ;; let rec substitute map e = match e with | _ when disj (get_fv e) map -> e | Evar (v, _) -> (try List.assq e map with Not_found -> e) | Emeta _ -> e | Eapp (s, args, _) -> eapp (s, List.map (substitute map) args) | Enot (f, _) -> enot (substitute map f) | Eand (f, g, _) -> eand (substitute map f, substitute map g) | Eor (f, g, _) -> eor (substitute map f, substitute map g) | Eimply (f, g, _) -> eimply (substitute map f, substitute map g) | Eequiv (f, g, _) -> eequiv (substitute map f, substitute map g) | Etrue | Efalse -> e | Eall (v, t, f, _) -> let map1 = rm_binding v map in if conflict v map1 then let nv = newvar () in eall (nv, t, substitute ((v, nv) :: map1) f) else eall (v, t, substitute map1 f) | Eex (v, t, f, _) -> let map1 = rm_binding v map in if conflict v map1 then let nv = newvar () in eex (nv, t, substitute ((v, nv) :: map1) f) else eex (v, t, substitute map1 f) | Etau (v, t, f, _) -> let map1 = rm_binding v map in if conflict v map1 then let nv = newvar () in etau (nv, t, substitute ((v, nv) :: map1) f) else etau (v, t, substitute map1 f) | Elam (v, t, f, _) -> let map1 = rm_binding v map in if conflict v map1 then let nv = newvar () in elam (nv, t, substitute ((v, nv) :: map1) f) else elam (v, t, substitute map1 f) ;; let rec substitute_2nd map e = match e with | Evar (v, _) -> (try List.assq e map with Not_found -> e) | Emeta _ -> e | Eapp (s, args, _) -> let acts = List.map (substitute_2nd map) args in begin try let lam = List.assq (evar s) map in match lam, acts with | Elam (v, _, body, _), [a] -> substitute [(v,a)] body | Evar (v, _), _ -> eapp (v, acts) | Eapp (s1, args1, _), _ -> eapp (s1, args1 @ acts) | _ -> raise Higher_order with Not_found -> eapp (s, acts) end | Enot (f, _) -> enot (substitute_2nd map f) | Eand (f, g, _) -> eand (substitute_2nd map f, substitute_2nd map g) | Eor (f, g, _) -> eor (substitute_2nd map f, substitute_2nd map g) | Eimply (f, g, _) -> eimply (substitute_2nd map f, substitute_2nd map g) | Eequiv (f, g, _) -> eequiv (substitute_2nd map f, substitute_2nd map g) | Etrue | Efalse -> e | Eall (v, t, f, _) -> let map1 = rm_binding v map in if conflict v map1 then let nv = newvar () in eall (nv, t, substitute_2nd ((v, nv) :: map1) f) else eall (v, t, substitute_2nd map1 f) | Eex (v, t, f, _) -> let map1 = rm_binding v map in if conflict v map1 then let nv = newvar () in eex (nv, t, substitute_2nd ((v, nv) :: map1) f) else eex (v, t, substitute_2nd map1 f) | Etau (v, t, f, _) -> let map1 = rm_binding v map in if conflict v map1 then let nv = newvar () in etau (nv, t, substitute_2nd ((v, nv) :: map1) f) else etau (v, t, substitute_2nd map1 f) | Elam (v, t, f, _) -> let map1 = rm_binding v map in if conflict v map1 then let nv = newvar () in elam (nv, t, substitute_2nd ((v, nv) :: map1) f) else elam (v, t, substitute_2nd map1 f) ;; let apply f a = match f with | Elam (v, _, body, _) -> substitute [(v, a)] body | _ -> raise Higher_order ;; let add_argument f a = match f with | Elam _ -> apply f a | Evar (s, _) -> eapp (s, [a]) | Eapp (s, args, _) -> eapp (s, args @ [a]) | _ -> raise Higher_order ;; let rec remove_scope e = match e with | Eapp ("$scope", e1 :: t :: vals, _) -> remove_scope (apply e1 t) | Eapp (f, args, _) -> e | Enot (e1, _) -> enot (remove_scope e1) | Eand (e1, e2, _) -> eand (remove_scope e1, remove_scope e2) | Eor (e1, e2, _) -> eor (remove_scope e1, remove_scope e2) | Eimply (e1, e2, _) -> eimply (remove_scope e1, remove_scope e2) | Eequiv (e1, e2, _) -> eequiv (remove_scope e1, remove_scope e2) | Eall (v, t, e1, _) -> eall (v, t, remove_scope e1) | Eex (v, t, e1, _) -> eex (v, t, remove_scope e1) | Evar _ | Emeta _ | Etrue | Efalse | Etau _ | Elam _ -> e ;; type goalness = int;;
(* Copyright 2002 INRIA *)
annotated_manager_operation.mli
(** Annotated manager operations are wrappers used to accumulate information (especially about limits) on the operation prior to the injection. *) open Protocol open Alpha_context type _ t = | Manager_info : { source : Alpha_context.public_key_hash option; fee : Tez.t Limit.t; gas_limit : Gas.Arith.integral Limit.t; storage_limit : Z.t Limit.t; counter : Z.t option; operation : 'kind manager_operation; } -> 'kind t type packed = Annotated_manager_operation : 'kind t -> packed (** The [annotated_list] type helps making [contents_list] from a list of [manager_operation]s. Its construction mimics [contents_list] in order to keep consistent types when calling [inject_manager_operation] and [inject_operation].*) type _ annotated_list = | Single_manager : 'kind t -> 'kind annotated_list | Cons_manager : 'kind t * 'rest annotated_list -> ('kind * 'rest) annotated_list type packed_annotated_list = | Manager_list : 'kind annotated_list -> packed_annotated_list (** Convert a list of annotated operations to a list of packed annotated operations *) val manager_to_list : packed_annotated_list -> packed list (** Converse of [manager_to_list] *) val manager_of_list : packed list -> packed_annotated_list (** [join_fee fee op] updates [op.fee] to [Limit.join op.fee fee] and fails if the join fails *) val join_fee : Tez.t Limit.t -> 'a t -> 'a t tzresult (** [set_fee fee op] updates [op.fee] to [fee] *) val set_fee : Tez.t Limit.t -> 'a t -> 'a t (** See [join_fee] *) val join_gas_limit : Gas.Arith.integral Limit.t -> 'a t -> 'a t tzresult (** See [set_fee] *) val set_gas_limit : Gas.Arith.integral Limit.t -> 'a t -> 'a t (** See [join_fee] *) val join_storage_limit : Z.t Limit.t -> 'a t -> 'a t tzresult (** See [set_fee] *) val set_storage_limit : Z.t Limit.t -> 'a t -> 'a t (** Set the counter of the annotated operation. Fail if the counter is already set. *) val set_counter : counter -> 'a t -> 'a t tzresult (** Set the source of the operation. Fail if the source is already set. *) val set_source : public_key_hash -> 'a t -> 'a t tzresult (** Convert an annotated manager operation to a proper manager operation. Fail if some fields in the annotated operation are not set. *) val manager_from_annotated : 'a t -> 'a Kind.manager contents tzresult val manager_list_from_annotated : 'kind annotated_list -> 'kind Kind.manager contents_list tzresult
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* Copyright (c) 2018 Nomadic Labs, <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
json_decoder.mli
module Dict : Map.S with type key = string type 'a dict = 'a Dict.t type 'a t type value (* source *) val value_of_string : string -> value val value_to_string : value -> string val value_of_yojson : Yojson.Basic.json -> value val value_to_yojson : value -> Yojson.Basic.json val decode : 'a t -> value -> ('a, string) Result.result val decode_string : 'a t -> string -> ('a, string) Result.result (* Primitives *) val string : string t val float : float t val int : int t val bool : bool t val null : 'a -> 'a t (* Containers *) val list : 'a t -> 'a list t val array : 'a t -> 'a array t val dict : 'a t -> 'a dict t val pairs : 'a t -> (string * 'a) list t (* Selectors *) val (@=) : string -> 'a t -> 'a t val field : string -> 'a t -> 'a t val index : int -> 'a t -> 'a t val at : string list -> 'a t -> 'a t (* inconsistent structure *) val option : 'a t -> 'a option t val one_of : 'a t list -> 'a t val value : value t (* combinators *) val succeed : 'a -> 'a t val fail : string -> 'a t val map : ('a -> 'b) -> 'a t -> 'b t val and_then : ('a -> 'b t) -> 'a t -> 'b t val apply : ('a -> 'b) t -> 'a t -> 'b t val (>>=) : 'a t -> ('a -> 'b t) -> 'b t (* object *) val mapN : 'fn -> 'fn t val (||>) : ('a -> 'b) t -> 'a t -> 'b t
dtd.mli
open Xml_light_types open Xml_light_errors (** Xml Light DTD This module provide several functions to create, check, and use DTD to prove Xml documents : {ul {li using the DTD types, you can directly create your own DTD structure} {li the {!Dtd.check} function can then be used to check that all DTD states have been declared, that no attributes are declared twice, and so on.} {li the {!Dtd.prove} function can be used to check an {!Xml} data structure with a checked DTD. The function will return the expanded Xml document or raise an exception if the DTD proving fails.} } {i Note about ENTITIES:} While parsing Xml, PCDATA is always parsed and the Xml entities &amp; &gt; &lt; &apos; &quot; are replaced by their corresponding ASCII characters. For Xml attributes, theses can be put between either double or simple quotes, and the backslash character can be used to escape inner quotes. There is no support for CDATA Xml nodes or PCDATA attributes declarations in DTD, and no support for user-defined entities using the ENTITY DTD element. *) (** {6 The DTD Types} *) type dtd_child = Xml_light_types.dtd_child = | DTDTag of string | DTDPCData | DTDOptional of dtd_child | DTDZeroOrMore of dtd_child | DTDOneOrMore of dtd_child | DTDChoice of dtd_child list | DTDChildren of dtd_child list type dtd_element_type = Xml_light_types.dtd_element_type = | DTDEmpty | DTDAny | DTDChild of dtd_child type dtd_attr_default = Xml_light_types.dtd_attr_default = | DTDDefault of string | DTDRequired | DTDImplied | DTDFixed of string type dtd_attr_type = Xml_light_types.dtd_attr_type = | DTDCData | DTDNMToken | DTDEnum of string list | DTDID | DTDIDRef type dtd_item = Xml_light_types.dtd_item = | DTDAttribute of string * string * dtd_attr_type * dtd_attr_default | DTDElement of string * dtd_element_type type dtd = dtd_item list type checked = Xml_light_dtd_check.checked (** {6 The DTD Functions} *) (** Parse the named file into a Dtd data structure. Raise {!Xml.File_not_found} if an error occured while opening the file. Raise {!Dtd.Parse_error} if parsing failed. *) val parse_file : string -> dtd (** Read the content of the in_channel and parse it into a Dtd data structure. Raise {!Dtd.Parse_error} if parsing failed. *) val parse_in : in_channel -> dtd (** Parse the string containing a Dtd document into a Dtd data structure. Raise {!Dtd.Parse_error} if parsing failed. *) val parse_string : string -> dtd (** Check the Dtd data structure declaration and return a checked DTD. Raise {!Dtd.Check_error} if the DTD checking failed. *) val check : dtd -> checked (** Prove an Xml document using a checked DTD and an entry point. The entry point is the first excepted tag of the Xml document, the returned Xml document has the same structure has the original one, excepted that non declared optional attributes have been set to their default value specified in the DTD. Raise {!Dtd.Check_error} [ElementNotDeclared] if the entry point is not found, raise {!Dtd.Prove_error} if the Xml document failed to be proved with the DTD. *) val prove : checked -> string -> xml -> xml (** Print a DTD element into a string. You can easily get a DTD document from a DTD data structure using for example [String.concat "\n" (List.map Dtd.to_string) my_dtd] *) val to_string : dtd_item -> string (** {6 The DTD Exceptions} *) (** There is three types of DTD excecptions : {ul {li {!Dtd.Parse_error} is raised when an error occured while parsing a DTD document into a DTD data structure.} {li {!Dtd.Check_error} is raised when an error occured while checking a DTD data structure for completeness, or when the prove entry point is not found when calling {!Dtd.prove}.} {li {!Dtd.Prove_error} is raised when an error occured while proving an Xml document.} } Several string conversion functions are provided to enable you to report errors to the user. *) type parse_error_msg = Xml_light_errors.dtd_parse_error_msg = | InvalidDTDDecl | InvalidDTDElement | InvalidDTDAttribute | InvalidDTDTag | DTDItemExpected type check_error = Xml_light_errors.dtd_check_error = | ElementDefinedTwice of string | AttributeDefinedTwice of string * string | ElementEmptyContructor of string | ElementReferenced of string * string | ElementNotDeclared of string | WrongImplicitValueForID of string * string type prove_error = Xml_light_errors.dtd_prove_error = | UnexpectedPCData | UnexpectedTag of string | UnexpectedAttribute of string | InvalidAttributeValue of string | RequiredAttribute of string | ChildExpected of string | EmptyExpected | DuplicateID of string | MissingID of string type parse_error = parse_error_msg * error_pos exception Parse_error of parse_error exception Check_error of check_error exception Prove_error of prove_error val parse_error : parse_error -> string val check_error : check_error -> string val prove_error : prove_error -> string
(* * Xml Light, an small Xml parser/printer with DTD support. * Copyright (C) 2003 Nicolas Cannasse (ncannasse@motion-twin.com) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library has the special exception on linking described in file * README. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, * MA 02110-1301 USA *)
dune
(executable (name foo) (modules foo)) (executable (name bar) (modules bar))
isolation.mli
(** Database transaction isolation levels *) type t = | Serializable | Repeatable_read | Read_committed | Read_uncommitted [@@deriving sexp] val to_string : t -> string
(** Database transaction isolation levels *) type t =
uncurry.ml
open Types let rec is_unit (t : Types.type_expr) = match t.desc with | Tconstr (p, _, _) -> Path.same p Predef.path_unit | Tlink t' -> is_unit (Btype.repr t') | _ -> false let drop_last_unit acc = match acc with [ x ] when is_unit x -> [] | _ -> acc let rec uncurry_tarrow type_expr acc = match type_expr.desc with | Tarrow (_, param, out, _) -> uncurry_tarrow out (param :: acc) | Tlink t -> uncurry_tarrow (Btype.repr t) acc | _ -> let acc = drop_last_unit acc in (List.rev acc, type_expr) let rec from_type_expr : Types.type_expr -> [ `Uncurried of Types.type_expr list * Types.type_expr | `Not_a_function ] = fun type_expr -> match type_expr.desc with | Tarrow (_, _, _, _) -> `Uncurried (uncurry_tarrow type_expr []) | Tlink t -> from_type_expr (Btype.repr t) | _ -> `Not_a_function
wsCommon.ml
open Lwt.Infix open Websocket.Frame open EzAPIServerUtils.Directory let ws_react ?onclose f pong rsend fr = match fr.opcode with | Opcode.Ping -> !rsend @@ Some (create ~opcode:Opcode.Pong ~content:fr.content ()) | Opcode.Close -> if String.length fr.content >= 2 then let content = String.sub fr.content 0 2 in !rsend @@ Some (create ~opcode:Opcode.Close ~content ()) else !rsend @@ Some (close 1000); (match onclose with | None -> () | Some f -> Lwt.async f) | Opcode.Pong -> pong fr.content | Opcode.Text | Opcode.Binary -> Lwt.async (fun () -> f fr.content >|= function | Ok `none -> !rsend None | Ok (`binary content) -> !rsend @@ Some (create ~opcode:Opcode.Binary ~content ()) | Ok (`text content) -> !rsend @@ Some (create ~opcode:Opcode.Text ~content ()) | Error (`handler_error content) -> !rsend @@ Some (create ~opcode:Opcode.Text ~content ()) | Error _ -> !rsend @@ Some (close 1011)) | _ -> !rsend @@ Some (close 1002) let ws_loop bg send = let send : (ws_frame, handler_error) result -> unit = function | Error _ -> send (Some (close 1000)) | Ok `none -> send None | Ok (`binary content) -> send @@ Some (create ~opcode:Opcode.Binary ~content ()) | Ok (`text content) -> send @@ Some (create ~opcode:Opcode.Text ~content ()) in bg send let ping_table : (string, CalendarLib.Fcalendar.Precise.t) Hashtbl.t = Hashtbl.create 1024 let check_ping ?(step=30.) id key = let open CalendarLib.Fcalendar.Precise in match Hashtbl.find_opt ping_table (id ^ key) with | None -> false | Some t -> let step = Period.second (Time.Second.from_float step) in let now = now () in if compare (add t step) now < 0 then ( Hashtbl.remove ping_table (id ^ key); false) else true let ping_pong ?(step=30.) id rsend = let id_str = Uuidm.to_string id in let content = string_of_int @@ Random.int 1_000_000_000 in let rec loop () = !rsend (Some (create ~opcode:Opcode.Ping ~content ())); EzLwtSys.sleep (step /. 2.) >>= fun () -> if check_ping ~step id_str content then EzLwtSys.sleep (step /. 2.) >>= fun () -> loop () else Lwt.return_unit in let fill content = let now = CalendarLib.Fcalendar.Precise.now () in Hashtbl.replace ping_table (id_str ^ content) now in loop, fill let close send = send (Some (close 1000))
(**************************************************************************) (* *) (* Copyright 2018-2023 OCamlPro *) (* *) (* All rights reserved. This file is distributed under the terms of the *) (* GNU Lesser General Public License version 2.1, with the special *) (* exception on linking described in the file LICENSE. *) (* *) (**************************************************************************)
dune
(coq.theory (name a) (package csimple))
CArch_litmus.ml
module Make(O:sig val memory : Memory.t val hexa : bool val mode : Mode.t end) = struct module V = Int32Constant.Make(CBase.Instr) module FaultType = FaultType.No module type SmallBase = sig val base_type : CType.t type reg = string type instruction = CBase.instruction val dump_instruction : instruction -> string end include (CBase : SmallBase) module RegSet = StringSet module RegMap = StringMap let vToName = let open Constant in function | Concrete i -> "addr_" ^ V.Scalar.pp O.hexa i | Symbolic (Virtual {name=s; tag=None; cap=0L;_ })-> s | Label _|Symbolic _|Tag _|ConcreteVector _| PteVal _|Instruction _ -> assert false module Internal = struct type arch_reg = reg let pp_reg x = x let reg_compare = String.compare module G = Global_litmus type arch_global = G.t let pp_global = G.pp let global_compare = G.compare let arch = `C end include Location.Make(Internal) let is_pte_loc _ = false let parse_reg x = Some x let reg_compare = Internal.reg_compare type state = (location * V.v) list let debug_state st = String.concat " " (List.map (fun (loc,v) -> Printf.sprintf "<%s -> %s>" (pp_location loc) (V.pp_v v)) st) type fullstate = (location * (TestType.t * V.v)) list module Out = struct module V = V include CTarget include OutUtils.Make(O)(V) let dump_init_val = dump_v end let dump_loc_tag loc = let module G = Global_litmus in match loc with | Location_reg (proc,reg) -> Out.dump_out_reg proc reg | Location_global (G.Addr s) -> s | Location_global (G.Pte s) -> Printf.sprintf "pte_%s" s | Location_global (G.Phy _) -> assert false let dump_rloc_tag = ConstrGen.match_rloc dump_loc_tag (fun loc i -> Printf.sprintf "%s__%02d" (dump_loc_tag loc) i) let location_of_addr a = Location_global (Global_litmus.Addr a) let arch = Internal.arch let rec find_in_state loc = function | [] -> V.zero | (loc2,v)::rem -> if location_compare loc loc2 = 0 then v else find_in_state loc rem let get_label_init _ = [] let pp_reg x = x let rec count_procs = function | CAst.Test _::xs -> 1 + count_procs xs | CAst.Global _::xs -> count_procs xs | [] -> 0 let type_reg r = CBase.type_reg r let features = [] include HardwareExtra.No module GetInstr = GetInstr.No(struct type instr = instruction end) end
(****************************************************************************) (* the diy toolsuite *) (* *) (* Jade Alglave, University College London, UK. *) (* Luc Maranget, INRIA Paris-Rocquencourt, France. *) (* *) (* Copyright 2017-present Institut National de Recherche en Informatique et *) (* en Automatique and the authors. All rights reserved. *) (* *) (* This software is governed by the CeCILL-B license under French law and *) (* abiding by the rules of distribution of free software. You can use, *) (* modify and/ or redistribute the software under the terms of the CeCILL-B *) (* license as circulated by CEA, CNRS and INRIA at the following URL *) (* "http://www.cecill.info". We also give a copy in LICENSE.txt. *) (****************************************************************************)
X86ParseTest.mli
module Make : functor(Conf:RunTest.Config) -> functor(ModelConfig:MemWithCav12.Config) -> sig val run : RunTest.runfun end
(****************************************************************************) (* the diy toolsuite *) (* *) (* Jade Alglave, University College London, UK. *) (* Luc Maranget, INRIA Paris-Rocquencourt, France. *) (* *) (* Copyright 2023-present Institut National de Recherche en Informatique et *) (* en Automatique and the authors. All rights reserved. *) (* *) (* This software is governed by the CeCILL-B license under French law and *) (* abiding by the rules of distribution of free software. You can use, *) (* modify and/ or redistribute the software under the terms of the CeCILL-B *) (* license as circulated by CEA, CNRS and INRIA at the following URL *) (* "http://www.cecill.info". We also give a copy in LICENSE.txt. *) (****************************************************************************)
dune
(library (name async_bus) (public_name async_extra.async_bus) (preprocess (pps ppx_jane)) (libraries async_kernel core_kernel.bus core))
b00_www_browser.mli
(** Web browser interaction. [Browser] shows URIs in the user's browsers. Up to severe platform and browser application limitations it tries to limit the creation of new tabs, reloading existing one which have the same URI or are, if requested, prefixed by the URI. *) open B00_std (** {1:env Environment variables} *) (** Environment variables. *) module Env : sig val browser : string (** [browser] is [BROWSER].*) end (** {1:show Show URIs} *) type t (** The type for specifying a browser. *) val find : ?search:Fpath.t list -> browser:Cmd.t option -> unit -> (t option, string) result (** [find ~search ~browser] tries to find a browser in a rather complex and platform dependent way. *) val show : background:bool -> prefix:bool -> t option -> string -> (unit, string) result (** [show ~background ~prefix browser uri] shows URI using browser [browser] (if [None] an error message is returned mentioning that no browser was found. If [background] is [true] tries to keep the browser application in the background, if [false] brings it in user focus. The function tries to limit the creation of new tabs using the following strategy: {ul {- Repeat from the frontmost browser window to the backmost one until a tab to reload is found: {ol {- If the window's current tab's URI is [uri] (or is prefixed by [uri] when [prefix] is [true]), reload this tab.} {- If the window has one or more tab whose URI is [uri] (or is prefixed by [uri] when [prefix] is [true]), pick the left most one, make it current in the window and reload it.}}} {- If no tab was found, get the frontmost window. If the current tab has no URI, use that tab with [uri] otherwise create a new tab with [uri] and make it current for the window.}} *) (** {1:cli Cli interaction} *) val browser : ?docs:string -> ?opts:string list -> unit -> Cmd.t option Cmdliner.Term.t (** [browser] is an option and [BROWSER] environment variable to use with the [browser] argument of {!find}. [opts] are the cli options and default to [["browser"]]. *) val prefix : ?docs:string -> ?opts:string list -> unit -> bool Cmdliner.Term.t (** [prefix] is option to use the with [prefix] argument of {!val-show}. [opts] are the cli options and default to [["prefix"]]. *) val background : ?docs:string -> ?opts:string list -> unit -> bool Cmdliner.Term.t (** [background] is an option to use with the [background] argument of [!show]. [opts] are the cli options and default to [["background"]] *) (*--------------------------------------------------------------------------- Copyright (c) 2018 The b0 programmers Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ---------------------------------------------------------------------------*)
(*--------------------------------------------------------------------------- Copyright (c) 2018 The b0 programmers. All rights reserved. Distributed under the ISC license, see terms at the end of the file. ---------------------------------------------------------------------------*)
dict_intf.ml
module type S = sig module Fm : File_manager.S type t val find : t -> int -> string option val index : t -> string -> int option val v : Fm.t -> (t, [> Fm.Io.read_error ]) result val close : t -> unit end module type Sigs = sig module type S = S module Make (Fm : File_manager.S) : S with module Fm = Fm end
(* * Copyright (c) 2018-2022 Tarides <contact@tarides.com> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *)
test_string.ml
(* String.concat "/" (String.cuts '/' xs) == xs *) let basic_cuts () = let sep = '/' and j = "/" and data = "" in Alcotest.(check string __LOC__ data (String.concat j (Conex_utils.String.cuts sep data))); let data = "foo" in Alcotest.(check string __LOC__ data (String.concat j (Conex_utils.String.cuts sep data))); let data = "///" in Alcotest.(check string __LOC__ data (String.concat j (Conex_utils.String.cuts sep data))); let data = "foo/bar/baz" in Alcotest.(check string __LOC__ data (String.concat j (Conex_utils.String.cuts sep data))); let data = "foo//bar///baz" in Alcotest.(check string __LOC__ data (String.concat j (Conex_utils.String.cuts sep data))); let data = "/foo/bar/baz/" in Alcotest.(check string __LOC__ data (String.concat j (Conex_utils.String.cuts sep data))); let data = "/foo//bar//baz/" in Alcotest.(check string __LOC__ data (String.concat j (Conex_utils.String.cuts sep data))) let tests = [ "basic cuts is good", `Quick, basic_cuts ; ]
realpath.c
#include <limits.h> #include <stdlib.h> #include <caml/mlvalues.h> #include <caml/alloc.h> #include <caml/fail.h> CAMLprim value c_realpath(value v) { // Conversion of the argument to a C value, and performing the C call. const char *input_path = String_val(v); char *output_path = realpath(input_path, NULL); // Checking for error. if (output_path == NULL) caml_invalid_argument("Filename.realpath\0"); // Preparing the result value. value res = caml_copy_string(output_path); // Free the memory allocated by [realpath] before returning. free(output_path); return res; }
targets.ml
open Import (* CR-someday amokhov: Most of these records will have [dir = empty]. We might want to somehow optimise for the common case, e.g. by switching to a sum type with the [Files_only] constructor. It's best not to expose the current representation so we can easily change it in future. *) type t = { files : Path.Build.Set.t ; dirs : Path.Build.Set.t } module File = struct let create file = { files = Path.Build.Set.singleton file; dirs = Path.Build.Set.empty } end module Files = struct let create files = { files; dirs = Path.Build.Set.empty } end let create ~files ~dirs = { files; dirs } let empty = { files = Path.Build.Set.empty; dirs = Path.Build.Set.empty } let combine x y = { files = Path.Build.Set.union x.files y.files ; dirs = Path.Build.Set.union x.dirs y.dirs } let is_empty { files; dirs } = Path.Build.Set.is_empty files && Path.Build.Set.is_empty dirs let head { files; dirs } = match Path.Build.Set.choose files with | Some _ as target -> target | None -> Path.Build.Set.choose dirs let head_exn t = match head t with | Some target -> target | None -> Code_error.raise "Targets.head_exn applied to empty set of targets" [] let to_dyn { files; dirs } = Dyn.Record [ ("files", Path.Build.Set.to_dyn files) ; ("dirs", Path.Build.Set.to_dyn dirs) ] let pp { files; dirs } = Pp.enumerate (Path.Build.Set.to_list files @ Path.Build.Set.to_list dirs) ~f:(fun target -> Pp.text (Dpath.describe_target target)) let exists { files; dirs } ~f = Path.Build.Set.exists files ~f || Path.Build.Set.exists dirs ~f module Validated = struct type nonrec t = t = { files : Path.Build.Set.t ; dirs : Path.Build.Set.t } let to_dyn = to_dyn let head = head_exn end module Validation_result = struct type t = | Valid of { parent_dir : Path.Build.t ; targets : Validated.t } | No_targets | Inconsistent_parent_dir | File_and_directory_target_with_the_same_name of Path.Build.t end let validate t = match is_empty t with | true -> Validation_result.No_targets | false -> ( match Path.Build.Set.inter t.files t.dirs |> Path.Build.Set.choose with | Some path -> File_and_directory_target_with_the_same_name path | None -> ( let parent_dir = Path.Build.parent_exn (head_exn t) in match exists t ~f:(fun path -> Path.Build.(parent_exn path <> parent_dir)) with | true -> Inconsistent_parent_dir | false -> Valid { parent_dir; targets = t })) module Produced = struct (* CR-someday amokhov: A hierarchical representation of the produced file trees may be better. It would allow for hierarchical traversals and reduce the number of internal invariants. *) type 'a t = { files : 'a Path.Build.Map.t ; dirs : 'a String.Map.t Path.Build.Map.t } let of_validated = let rec collect dir : (unit String.Map.t Path.Build.Map.t, _) result = match Path.Untracked.readdir_unsorted_with_kinds (Path.build dir) with | Error e -> Error (`Directory dir, e) | Ok dir_contents -> let open Result.O in let+ filenames, dirs = Result.List.fold_left dir_contents ~init:(String.Map.empty, Path.Build.Map.empty) ~f:(fun (acc_filenames, acc_dirs) (filename, kind) -> match (kind : File_kind.t) with | S_REG -> Ok (String.Map.add_exn acc_filenames filename (), acc_dirs) | S_DIR -> let+ dir = collect (Path.Build.relative dir filename) in (acc_filenames, Path.Build.Map.union_exn acc_dirs dir) | _ -> Ok (acc_filenames, acc_dirs)) in Path.Build.Map.add_exn dirs dir filenames in fun (validated : Validated.t) -> match Path.Build.Set.to_list_map validated.dirs ~f:collect |> Result.List.all with | Error _ as error -> error | Ok dirs -> let files = Path.Build.Set.to_map validated.files ~f:(fun (_ : Path.Build.t) -> ()) in (* The [union_exn] below can't raise because each map in [dirs] contains unique keys, which are paths rooted at the corresponding [dir]s. *) let dirs = List.fold_left dirs ~init:Path.Build.Map.empty ~f:Path.Build.Map.union_exn in Ok { files; dirs } let produced_after_rule_executed_exn ~loc targets = match of_validated targets with | Ok t -> t | Error (`Directory dir, (Unix.ENOENT, _, _)) -> User_error.raise ~loc [ Pp.textf "Rule failed to produce directory %S" (Path.Build.drop_build_context_maybe_sandboxed_exn dir |> Path.Source.to_string_maybe_quoted) ] | Error (`Directory dir, (unix_error, _, _)) -> User_error.raise ~loc [ Pp.textf "Rule produced unreadable directory %S" (Path.Build.drop_build_context_maybe_sandboxed_exn dir |> Path.Source.to_string_maybe_quoted) ; Pp.verbatim (Unix.error_message unix_error) ] let of_file_list_exn list = { files = Path.Build.Map.of_list_exn list; dirs = Path.Build.Map.empty } let expand_validated_exn (validated : Validated.t) dir_filename_pairs = let files = Path.Build.Set.to_map validated.files ~f:(fun (_ : Path.Build.t) -> ()) in let dirs = Path.Build.Map.of_list_multi dir_filename_pairs |> Path.Build.Map.map ~f:(String.Map.of_list_map_exn ~f:(fun file -> (file, ()))) in let is_unexpected dir = not (Path.Build.Set.exists validated.dirs ~f:(fun validated_dir -> Path.Build.is_descendant dir ~of_:validated_dir)) in Path.Build.Map.iteri dirs ~f:(fun dir _ -> if is_unexpected dir then Code_error.raise "Targets.Produced.expand_validated_exn: Unexpected directory." [ ("validated", Validated.to_dyn validated) ; ("dir", Path.Build.to_dyn dir) ]); { files; dirs } let all_files { files; dirs } = let disallow_duplicates file _payload1 _payload2 = Code_error.raise (sprintf "Targets.Produced.all_files: duplicate file %S" (Path.Build.to_string file)) [ ("files", Path.Build.Map.to_dyn Dyn.opaque files) ; ("dirs", Path.Build.Map.to_dyn (String.Map.to_dyn Dyn.opaque) dirs) ] in let files_in_dirs = Path.Build.Map.foldi dirs ~init:Path.Build.Map.empty ~f:(fun dir filenames -> let paths = Path.Build.Map.of_list_exn (String.Map.to_list_map filenames ~f:(fun filename payload -> (Path.Build.relative dir filename, payload))) in Path.Build.Map.union paths ~f:disallow_duplicates) in Path.Build.Map.union ~f:disallow_duplicates files files_in_dirs let all_files_seq t = Seq.append (Path.Build.Map.to_seq t.files) (Seq.concat (Path.Build.Map.to_seq t.dirs |> Seq.map ~f:(fun (dir, filenames) -> String.Map.to_seq filenames |> Seq.map ~f:(fun (filename, payload) -> (Path.Build.relative dir filename, payload))))) let digest { files; dirs } = let all_digests = Path.Build.Map.values files :: Path.Build.Map.to_list_map dirs ~f:(fun _ -> String.Map.values) in Digest.generic (List.concat all_digests) module Option = struct exception Short_circuit let mapi { files; dirs } ~(f : Path.Build.t -> 'a -> 'b option) = let f path a = match f path a with | Some b -> b | None -> raise_notrace Short_circuit in try let files = Path.Build.Map.mapi files ~f in let dirs = Path.Build.Map.mapi dirs ~f:(fun dir -> String.Map.mapi ~f:(fun filename -> f (Path.Build.relative dir filename))) in Some { files; dirs } with Short_circuit -> None end let to_dyn { files; dirs } = Dyn.record [ ("files", Path.Build.Map.to_dyn Dyn.opaque files) ; ("dirs", Path.Build.Map.to_dyn (String.Map.to_dyn Dyn.opaque) dirs) ] end
set.c
#include "arb_mat.h" void arb_mat_set(arb_mat_t dest, const arb_mat_t src) { slong i, j; if (dest != src && arb_mat_ncols(src) != 0) { for (i = 0; i < arb_mat_nrows(src); i++) for (j = 0; j < arb_mat_ncols(src); j++) arb_set(arb_mat_entry(dest, i, j), arb_mat_entry(src, i, j)); } }
/* Copyright (C) 2012 Fredrik Johansson This file is part of Arb. Arb is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. See <http://www.gnu.org/licenses/>. */
script_list.ml
open Script_typed_ir let empty : 'a boxed_list = {elements = []; length = 0} let cons : 'a -> 'a boxed_list -> 'a boxed_list = fun elt l -> {length = 1 + l.length; elements = elt :: l.elements}
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* Copyright (c) 2020 Metastate AG <hello@metastate.dev> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
mul_fmpz.c
#include "fq_nmod.h" void fq_nmod_mul_fmpz(fq_nmod_t rop, const fq_nmod_t op, const fmpz_t x, const fq_nmod_ctx_t ctx) { fmpz_t rx; fmpz_init(rx); fmpz_mod(rx, x, fq_nmod_ctx_prime(ctx)); nmod_poly_scalar_mul_nmod(rop, op, fmpz_get_ui(rx)); fmpz_clear(rx); }
/* Copyright (C) 2012 Sebastian Pancratz Copyright (C) 2013 Mike Hansen This file is part of FLINT. FLINT is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. See <https://www.gnu.org/licenses/>. */
object.ml
type 'a t = < value : 'a; show : 'a -> string > [@@deriving refl] let () = let o = Refl.map [%refl: _ t] [%refl: _ t] [PN (int_of_float, float_of_int)] (object method value = 1. method show = string_of_float end) in assert (o#show (succ o#value) = "2.")