filename
stringlengths
3
67
data
stringlengths
0
58.3M
license
stringlengths
0
19.5k
dune
(executables (names test_error_tables) (libraries tezos-error-monad lwt.unix alcotest-lwt) (flags (:standard -open Tezos_error_monad))) (alias (name buildtest) (deps test_error_tables.exe)) (alias (name runtest_error_tables) (action (run %{exe:test_error_tables.exe}))) (alias (name runtest) (package tezos-error-monad) (deps (alias runtest_error_tables)))
valconstraint.ml
(** A simple constraint solver *) (* Constraints: v is any value: a constant cst or a variable S. Possible constraints are v1 := v2 v1 := v2 (op) v3 Solutions are S -> cst *) module type S = sig type atom type cst type location type state type arch_op1 type expr = | Atom of atom | ReadInit of location * state | Unop of (arch_op1 Op.op1) * atom | Binop of Op.op * atom * atom | Terop of Op.op3 * atom * atom * atom type rvalue = expr type cnstrnt = | Assign of atom * rvalue | Failed of exn (* Delay exceptions *) | Warn of string type cnstrnts = cnstrnt list val pp_cnstrnts : cnstrnt list -> string type solution type answer = | NoSolns | Maybe of solution * cnstrnts val pp_answer : answer -> string (* Argument `final` characterises the last call to solver: delayed exception are raised *) val solve : final:bool -> cnstrnt list -> answer end module type Config = sig val hexa : bool val debug : bool end module Make (C:Config) (A:Arch_herd.S) : S with type atom = A.V.v and type cst = A.V.Cst.v and type arch_op1 = A.V.arch_op1 and type solution = A.V.solution and type location = A.location and type state = A.state = struct open Printf module V = A.V type atom = V.v type cst = V.Cst.v type location = A.location type state = A.state type arch_op1 = V.arch_op1 type expr = | Atom of atom | ReadInit of location * state | Unop of (arch_op1 Op.op1) * atom | Binop of Op.op * atom * atom | Terop of Op.op3 * atom * atom * atom let map_expr fv e = match e with | Atom v -> Atom (fv v) | ReadInit (loc,s) -> ReadInit (A.map_loc fv loc,s) | Unop (o,a1) -> Unop (o,fv a1) | Binop (o,a1,a2) -> Binop (o,fv a1, fv a2) | Terop (o,a1,a2,a3) -> Terop (o,fv a1, fv a2, fv a3) type rvalue = expr type cnstrnt = | Assign of V.v * rvalue | Failed of exn | Warn of string type cnstrnts = cnstrnt list let pp_atom a = V.pp C.hexa a let pp_expr e = match e with | Atom a -> pp_atom a | ReadInit(loc,_) -> A.dump_location loc ^ " in init" | Unop (o,a1) -> sprintf "%s(%s)" (Op.pp_op1 C.hexa V.pp_arch_op1 o) (pp_atom a1) | Binop (o,a1,a2) -> if Op.is_infix o then pp_atom a1 ^ Op.pp_op o ^ pp_atom a2 else Printf.sprintf "%s(%s,%s)" (Op.pp_op o) (pp_atom a1) (pp_atom a2) | Terop (op,a1,a2,a3) -> Op.pp_op3 op (pp_atom a1) (pp_atom a2) (pp_atom a3) let pp_rvalue e = pp_expr e let pp_cnstrnt cnstr = match cnstr with | Assign (v,rval) -> (V.pp C.hexa v) ^ ":=" ^(pp_rvalue rval) | Failed e -> sprintf "Failed %s" (Printexc.to_string e) | Warn e -> e let pp_cnstrnts lst = String.concat "\n" (List.map pp_cnstrnt lst) type solution = V.solution type answer = | NoSolns | Maybe of solution * cnstrnts let pp_answer = let pp_cns cns = match cns with | [] -> "" | _::_ -> "\nUnsolved equations:\n" ^ (pp_cnstrnts cns) in fun soln -> match soln with | NoSolns -> "No solutions" | Maybe (sol,cns) -> let sol_pped = let bds = V.Solution.fold (fun v i k -> (v,i)::k) sol [] in String.concat ", " (List.map (fun (v,i) -> V.pp_csym v ^ "<-" ^ V.pp C.hexa i) bds) in sol_pped ^ pp_cns cns (**************************************) (* Initial phase: normalize variables *) (**************************************) (* straightforward union-find. <http://en.wikipedia.org/wiki/Disjoint-set_data_structure> *) (* Collect all variables in partition *) module OV = struct type t = V.csym let compare = V.compare_csym end module Part = Partition.Make (OV) let add_var t v = match v with | V.Val _ -> t | V.Var x -> Part.add t x let add_var_loc t loc = match A.undetermined_vars_in_loc_opt loc with | None -> t | Some v -> add_var t v let add_vars_expr t e = match e with | Atom v -> add_var t v | ReadInit (loc,_) -> add_var_loc t loc | Unop (_,v) -> add_var t v | Binop (_,v1,v2) -> add_var (add_var t v1) v2 | Terop (_,v1,v2,v3) -> add_var (add_var (add_var t v1) v2) v3 let add_vars_cn t cn = match cn with | Assign (v,e) -> add_vars_expr (add_var t v) e | Failed _ | Warn _ -> t let add_vars_cns cns = List.fold_left add_vars_cn (Part.create ()) cns (* Perform union-find *) let uf_cn t cn = match cn with | Assign (V.Var v,Atom (V.Var w)) -> Part.union t v w | _ -> () let uf_cns t cns = List.iter (uf_cn t) cns ; Part.as_solution t (* Simplify equations *) let subst_atom m v = V.map_csym (fun x -> try V.Var (Part.Sol.find x m) with Not_found -> V.Var x) v let subst_expr m = map_expr (subst_atom m) let subst_cn m cn k = match cn with | Assign (v,Atom w) -> let v = subst_atom m v and w = subst_atom m w in if V.compare v w = 0 then k else Assign (v,Atom w)::k | Assign (v,e) -> let v = subst_atom m v and e = subst_expr m e in Assign (v,e)::k | Failed _ | Warn _ -> cn::k let subst_cns soln cns = List.fold_right (subst_cn soln) cns [] (* All together *) let normalize_vars cns = let t = add_vars_cns cns in let m = uf_cns t cns in let cns = subst_cns m cns in if C.debug then begin eprintf "* Normalizes to *\n%s\n%!" (pp_cnstrnts cns) end ; m,cns (*****************) (* Solver proper *) (*****************) (* Solver proceeds by iterating three simple steps, could use a topological sorting, to be more efficient. Not needed at the moment. *) (* Phase 1: detection of contradictions and erasure of trivial equations *) exception Contradiction let mk_atom_from_expr e = try match e with | Atom _ -> e | ReadInit (loc,init) -> Atom (A.look_address_in_state init loc) | Unop (op,v1) -> Atom (V.op1 op v1) | Binop (op,v1,v2) -> Atom (V.op op v1 v2) | Terop (op,v1,v2,v3) -> Atom (V.op3 op v1 v2 v3) with (* [expr] still contains at least one undetermined sub-expression *) | A.LocUndetermined | V.Undetermined -> e let check_true_false cn k = match cn with | Assign (v,e) -> begin try let e = mk_atom_from_expr e in begin match e with | Atom w -> if V.is_var_determined v && V.is_var_determined w then if V.compare v w = 0 then k else raise Contradiction else Assign (v,e)::k | ReadInit _ | Unop _|Binop _|Terop _ -> Assign (v,e)::k end (* Delay failure to preserve potential contradiction *) with | Contradiction|Misc.Timeout as e -> raise e | e -> if C.debug then eprintf "Delaying exception in solver: %s\n" (Printexc.to_string e) ; Failed e::k end | Failed _ | Warn _ -> cn::k let check_true_false_constraints cns = List.fold_right check_true_false cns [] (* Phase 3, substitution *) let simplify_vars_in_atom soln v = V.map_csym (fun x -> try V.Val (V.Solution.find x soln) with Not_found -> V.Var x) v let simplify_vars_in_expr soln = map_expr (simplify_vars_in_atom soln) let simplify_vars_in_cnstrnt soln cn = match cn with | Assign (v,rval) -> let v = simplify_vars_in_atom soln v in let rval = simplify_vars_in_expr soln rval in Assign (v,rval) | Failed _ | Warn _ -> cn let simplify_vars_in_cnstrnts soln cs = List.map (simplify_vars_in_cnstrnt soln) cs (* Phase 2, "solving": just collect equations S := cst / cst := S *) let singleton v i = V.Solution.add v i V.Solution.empty and empty = V.Solution.empty let solve_cnstrnt cnstr = match cnstr with | Assign (V.Var v,Atom (V.Val i)) | Assign (V.Val i,Atom (V.Var v)) -> singleton v i | Assign (V.Val _,Atom (V.Val _)) -> (* By previous application of check_true_false *) assert false | Assign (V.Var _,Atom (V.Var _)) (* can occur in spite of variable normalization (ternary if) *) | Assign (_,(Unop _|Binop _|Terop _|ReadInit _)) -> empty | Failed _ | Warn _ -> empty (* merge of solutions, with consistency check *) let merge sol1 sol2 = V.Solution.fold (fun v i k -> try let i' = V.Solution.find v sol2 in if V.Cst.compare i i' = 0 then V.Solution.add v i k else raise Contradiction with Not_found -> V.Solution.add v i k) sol1 sol2 let solve_cnstrnts = List.fold_left (fun solns cnstr -> merge (solve_cnstrnt cnstr) solns) V.Solution.empty (************************) (* Raise exceptions now *) (************************) let check_failed cns = List.iter (function | Failed e -> raise e | Warn e -> Warn.warn_always "%s. Legal outcomes may be missing" e; raise Contradiction | Assign _ -> ()) cns (*******************************) (* Iterate basic solving steps *) (*******************************) (* Just union since there are no variables in rhs of solutions *) let compose_sols sol1 sol2 = V.Solution.fold V.Solution.add sol1 sol2 let rec solve_step cns solns_final = (* Phase 1, check individual constraint validity *) let cns = check_true_false_constraints cns in (* Phase 2, orient constraints S := cst / cst := S *) let solns = solve_cnstrnts cns in if V.Solution.is_empty solns then begin solns_final,cns end else (* Phase 3, and iteration *) let cns = simplify_vars_in_cnstrnts solns cns and solns_final = compose_sols solns solns_final in solve_step cns solns_final let add_vars_solns m solns0 = Part.Sol.fold (fun x y solns -> try let cst = V.Solution.find y solns0 in V.Solution.add x (V.Val cst) solns with Not_found -> V.Solution.add x (V.Var y) solns) m (V.Solution.map (fun x -> V.Val x) solns0) let solve ~final lst = if C.debug then begin prerr_endline "** Solve **" ; eprintf "%s\n" (pp_cnstrnts lst) ; flush stderr end ; let m,lst = normalize_vars lst in let sol = try let solns,lst = solve_step lst V.Solution.empty in if final then check_failed lst ; let solns = add_vars_solns m solns in Maybe (solns,lst) with Contradiction -> NoSolns in if C.debug then begin eprintf "Solutions: %s\n" (pp_answer sol) ; flush stderr end ; sol (*********************************) (* Topological sort-based solver *) (*********************************) end
(****************************************************************************) (* the diy toolsuite *) (* *) (* Jade Alglave, University College London, UK. *) (* Luc Maranget, INRIA Paris-Rocquencourt, France. *) (* *) (* Copyright 2010-present Institut National de Recherche en Informatique et *) (* en Automatique and the authors. All rights reserved. *) (* *) (* This software is governed by the CeCILL-B license under French law and *) (* abiding by the rules of distribution of free software. You can use, *) (* modify and/ or redistribute the software under the terms of the CeCILL-B *) (* license as circulated by CEA, CNRS and INRIA at the following URL *) (* "http://www.cecill.info". We also give a copy in LICENSE.txt. *) (****************************************************************************)
main.ml
(* Tezos Protocol Implementation - Protocol Signature Instance *) type block_header_data = Alpha_context.Block_header.protocol_data type block_header = Alpha_context.Block_header.t = { shell: Block_header.shell_header ; protocol_data: block_header_data ; } let block_header_data_encoding = Alpha_context.Block_header.protocol_data_encoding type block_header_metadata = Apply_results.block_metadata let block_header_metadata_encoding = Apply_results.block_metadata_encoding type operation_data = Alpha_context.packed_protocol_data = | Operation_data : 'kind Alpha_context.Operation.protocol_data -> operation_data let operation_data_encoding = Alpha_context.Operation.protocol_data_encoding type operation_receipt = Apply_results.packed_operation_metadata = | Operation_metadata : 'kind Apply_results.operation_metadata -> operation_receipt | No_operation_metadata: operation_receipt let operation_receipt_encoding = Apply_results.operation_metadata_encoding let operation_data_and_receipt_encoding = Apply_results.operation_data_and_metadata_encoding type operation = Alpha_context.packed_operation = { shell: Operation.shell_header ; protocol_data: operation_data ; } let acceptable_passes = Alpha_context.Operation.acceptable_passes let max_block_length = Alpha_context.Block_header.max_header_length let max_operation_data_length = Alpha_context.Constants.max_operation_data_length let validation_passes = let max_anonymous_operations = Alpha_context.Constants.max_revelations_per_block + (* allow 100 wallet activations or denunciations per block *) 100 in Updater.[ { max_size = 32 * 1024 ; max_op = Some 32 } ; (* 32 endorsements *) { max_size = 32 * 1024 ; max_op = None } ; (* 32k of voting operations *) { max_size = max_anonymous_operations * 1024 ; max_op = Some max_anonymous_operations } ; { max_size = 512 * 1024 ; max_op = None } ] (* 512kB *) let rpc_services = Alpha_services.register () ; Services_registration.get_rpc_services () type validation_mode = | Application of { block_header : Alpha_context.Block_header.t ; baker : Alpha_context.public_key_hash ; } | Partial_application of { block_header : Alpha_context.Block_header.t ; baker : Alpha_context.public_key_hash ; } | Partial_construction of { predecessor : Block_hash.t ; } | Full_construction of { predecessor : Block_hash.t ; protocol_data : Alpha_context.Block_header.contents ; baker : Alpha_context.public_key_hash ; } type validation_state = { mode : validation_mode ; chain_id : Chain_id.t ; ctxt : Alpha_context.t ; op_count : int ; } let current_context { ctxt ; _ } = return (Alpha_context.finalize ctxt).context let begin_partial_application ~chain_id ~ancestor_context:ctxt ~predecessor_timestamp ~predecessor_fitness (block_header : Alpha_context.Block_header.t) = let level = block_header.shell.level in let fitness = predecessor_fitness in let timestamp = block_header.shell.timestamp in Alpha_context.prepare ~level ~timestamp ~fitness ctxt >>=? fun ctxt -> Apply.begin_application ctxt chain_id block_header predecessor_timestamp >>=? fun (ctxt, baker) -> let mode = Partial_application { block_header ; baker = Signature.Public_key.hash baker } in return { mode ; chain_id ; ctxt ; op_count = 0 } let begin_application ~chain_id ~predecessor_context:ctxt ~predecessor_timestamp ~predecessor_fitness (block_header : Alpha_context.Block_header.t) = let level = block_header.shell.level in let fitness = predecessor_fitness in let timestamp = block_header.shell.timestamp in Alpha_context.prepare ~level ~timestamp ~fitness ctxt >>=? fun ctxt -> Apply.begin_application ctxt chain_id block_header predecessor_timestamp >>=? fun (ctxt, baker) -> let mode = Application { block_header ; baker = Signature.Public_key.hash baker } in return { mode ; chain_id ; ctxt ; op_count = 0 } let begin_construction ~chain_id ~predecessor_context:ctxt ~predecessor_timestamp:pred_timestamp ~predecessor_level:pred_level ~predecessor_fitness:pred_fitness ~predecessor ~timestamp ?(protocol_data : block_header_data option) () = let level = Int32.succ pred_level in let fitness = pred_fitness in Alpha_context.prepare ~timestamp ~level ~fitness ctxt >>=? fun ctxt -> begin match protocol_data with | None -> Apply.begin_partial_construction ctxt >>=? fun ctxt -> let mode = Partial_construction { predecessor } in return (mode, ctxt) | Some proto_header -> Apply.begin_full_construction ctxt pred_timestamp proto_header.contents >>=? fun (ctxt, protocol_data, baker) -> let mode = let baker = Signature.Public_key.hash baker in Full_construction { predecessor ; baker ; protocol_data } in return (mode, ctxt) end >>=? fun (mode, ctxt) -> return { mode ; chain_id ; ctxt ; op_count = 0 } let apply_operation ({ mode ; chain_id ; ctxt ; op_count ; _ } as data) (operation : Alpha_context.packed_operation) = match mode with | Partial_application _ when not (List.exists (Compare.Int.equal 0) (Alpha_context.Operation.acceptable_passes operation)) -> (* Multipass validation only considers operations in pass 0. *) let op_count = op_count + 1 in return ({ data with ctxt ; op_count }, No_operation_metadata) | _ -> let { shell ; protocol_data = Operation_data protocol_data } = operation in let operation : _ Alpha_context.operation = { shell ; protocol_data } in let predecessor, baker = match mode with | Partial_application { block_header = { shell = { predecessor ; _ } ; _ } ; baker } | Application { block_header = { shell = { predecessor ; _ } ; _ } ; baker } | Full_construction { predecessor ; baker ; _ } -> predecessor, baker | Partial_construction { predecessor } -> predecessor, Signature.Public_key_hash.zero in Apply.apply_operation ctxt chain_id Optimized predecessor baker (Alpha_context.Operation.hash operation) operation >>=? fun (ctxt, result) -> let op_count = op_count + 1 in return ({ data with ctxt ; op_count }, Operation_metadata result) let finalize_block { mode ; ctxt ; op_count } = match mode with | Partial_construction _ -> let level = Alpha_context.Level.current ctxt in Alpha_context.Vote.get_current_period_kind ctxt >>=? fun voting_period_kind -> let baker = Signature.Public_key_hash.zero in Signature.Public_key_hash.Map.fold (fun delegate deposit ctxt -> ctxt >>=? fun ctxt -> Alpha_context.Delegate.freeze_deposit ctxt delegate deposit) (Alpha_context.get_deposits ctxt) (return ctxt) >>=? fun ctxt -> let ctxt = Alpha_context.finalize ctxt in return (ctxt, Apply_results.{ baker ; level ; voting_period_kind ; nonce_hash = None ; consumed_gas = Z.zero ; deactivated = []; balance_updates = []}) | Partial_application { baker ; _ } -> let level = Alpha_context. Level.current ctxt in Alpha_context.Vote.get_current_period_kind ctxt >>=? fun voting_period_kind -> let ctxt = Alpha_context.finalize ctxt in return (ctxt, Apply_results.{ baker ; level ; voting_period_kind ; nonce_hash = None ; consumed_gas = Z.zero ; deactivated = []; balance_updates = []}) | Application { baker ; block_header = { protocol_data = { contents = protocol_data ; _ } ; _ } } | Full_construction { protocol_data ; baker ; _ } -> Apply.finalize_application ctxt protocol_data baker >>=? fun (ctxt, receipt) -> let level = Alpha_context.Level.current ctxt in let priority = protocol_data.priority in let raw_level = Alpha_context.Raw_level.to_int32 level.level in let fitness = Alpha_context.Fitness.current ctxt in let commit_message = Format.asprintf "lvl %ld, fit %Ld, prio %d, %d ops" raw_level fitness priority op_count in let ctxt = Alpha_context.finalize ~commit_message ctxt in return (ctxt, receipt) let compare_operations op1 op2 = let open Alpha_context in let Operation_data op1 = op1.protocol_data in let Operation_data op2 = op2.protocol_data in match op1.contents, op2.contents with | Single (Endorsement _), Single (Endorsement _) -> 0 | _, Single (Endorsement _) -> 1 | Single (Endorsement _), _ -> -1 | Single (Seed_nonce_revelation _), Single (Seed_nonce_revelation _) -> 0 | _, Single (Seed_nonce_revelation _) -> 1 | Single (Seed_nonce_revelation _), _ -> -1 | Single (Double_endorsement_evidence _), Single (Double_endorsement_evidence _) -> 0 | _, Single (Double_endorsement_evidence _) -> 1 | Single (Double_endorsement_evidence _), _ -> -1 | Single (Double_baking_evidence _), Single (Double_baking_evidence _) -> 0 | _, Single (Double_baking_evidence _) -> 1 | Single (Double_baking_evidence _), _ -> -1 | Single (Activate_account _), Single (Activate_account _) -> 0 | _, Single (Activate_account _) -> 1 | Single (Activate_account _), _ -> -1 | Single (Proposals _), Single (Proposals _) -> 0 | _, Single (Proposals _) -> 1 | Single (Proposals _), _ -> -1 | Single (Ballot _), Single (Ballot _) -> 0 | _, Single (Ballot _) -> 1 | Single (Ballot _), _ -> -1 (* Manager operations with smaller counter are pre-validated first. *) | Single (Manager_operation op1), Single (Manager_operation op2) -> Z.compare op1.counter op2.counter | Cons (Manager_operation op1, _), Single (Manager_operation op2) -> Z.compare op1.counter op2.counter | Single (Manager_operation op1), Cons (Manager_operation op2, _) -> Z.compare op1.counter op2.counter | Cons (Manager_operation op1, _), Cons (Manager_operation op2, _) -> Z.compare op1.counter op2.counter let init ctxt block_header = let level = block_header.Block_header.level in let fitness = block_header.fitness in let timestamp = block_header.timestamp in let typecheck (ctxt:Alpha_context.context) (script:Alpha_context.Script.t) = Script_ir_translator.parse_script ctxt script >>=? fun (_ex_script, ctxt) -> return ctxt in Alpha_context.prepare_first_block ~typecheck ~level ~timestamp ~fitness ctxt >>=? fun ctxt -> return (Alpha_context.finalize ctxt)
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
echo_server.ml
open Lwt open Ex_common let string_of_unix_err err f p = Printf.sprintf "Unix_error (%s, %s, %s)" (Unix.error_message err) f p let serve_ssl port callback = let tag = "server" in X509_lwt.private_of_pems ~cert:server_cert ~priv_key:server_key >>= fun cert -> let server_s () = let open Lwt_unix in let s = socket PF_INET SOCK_STREAM 0 in setsockopt s SO_REUSEADDR true ; bind s (ADDR_INET (Unix.inet_addr_any, port)) >|= fun () -> listen s 10 ; s in let handle channels addr = async @@ fun () -> Lwt.catch (fun () -> callback channels addr >>= fun () -> yap ~tag "<- handler done") (function | Tls_lwt.Tls_alert a -> yap ~tag @@ "handler: " ^ Tls.Packet.alert_type_to_string a | Tls_lwt.Tls_failure a -> yap ~tag @@ "handler: " ^ Tls.Engine.string_of_failure a | Unix.Unix_error (e, f, p) -> yap ~tag @@ "handler: " ^ (string_of_unix_err e f p) | _exn -> yap ~tag "handler: exception") in yap ~tag ("-> start @ " ^ string_of_int port) >>= fun () -> let rec loop s = let authenticator = null_auth in let config = Tls.Config.server ~reneg:true ~certificates:(`Single cert) ~authenticator () in (Lwt.catch (fun () -> Tls_lwt.accept_ext config s >|= fun r -> `R r) (function | Unix.Unix_error (e, f, p) -> return (`L (string_of_unix_err e f p)) | Tls_lwt.Tls_alert a -> return (`L (Tls.Packet.alert_type_to_string a)) | Tls_lwt.Tls_failure f -> return (`L (Tls.Engine.string_of_failure f)) | exn -> return (`L ("loop: exception: " ^ Printexc.to_string exn)))) >>= function | `R (channels, addr) -> yap ~tag "-> connect" >>= fun () -> ( handle channels addr ; loop s ) | `L (msg) -> yap ~tag ("server socket: " ^ msg) >>= fun () -> loop s in server_s () >>= fun s -> loop s let echo_server _ port = Lwt_main.run ( serve_ssl port @@ fun (ic, oc) _addr -> lines ic |> Lwt_stream.iter_s (fun line -> yap ~tag:"handler" ("+ " ^ line) >>= fun () -> Lwt_io.write_line oc line)) open Cmdliner let port = let doc = "Port to connect to" in Arg.(value & opt int 4433 & info [ "port" ] ~doc) let cmd = let term = Term.(ret (const echo_server $ setup_log $ port)) and info = Cmd.info "server" ~version:"0.17.0" in Cmd.v info term let () = exit (Cmd.eval cmd)
dune
; This file was automatically generated, do not edit. ; Edit file manifest/main.ml instead. (library (name tezos_lwt_result_stdlib) (public_name tezos-lwt-result-stdlib) (instrumentation (backend bisect_ppx)) (libraries lwt tezos-lwt-result-stdlib.bare.sigs tezos-lwt-result-stdlib.bare.structs tezos-lwt-result-stdlib.traced.sigs tezos-lwt-result-stdlib.traced.structs) (js_of_ocaml)) (documentation (package tezos-lwt-result-stdlib))
state.c
#include <stdlib.h> #include <assert.h> #include "util.h" #include "list.h" #include "color.h" #include "transform.h" #include "font_desc.h" #include "draw_style.h" #include "polygonize.h" #include "color_composition.h" #include "draw_instr.h" #include "state.h" state_t * state_create( void) { state_t *s = (state_t *)calloc(1, sizeof(state_t)); if (s == NULL) { return NULL; } s->transform = transform_create(); if (s->transform == NULL) { free(s); return NULL; } s->font_desc = font_desc_create(); if (s->font_desc == NULL) { transform_destroy(s->transform); free(s); return NULL; } s->line_dash = NULL; s->clip_path = list_new((free_val_fun_t *)path_fill_instr_destroy); if (s->clip_path == NULL) { transform_destroy(s->transform); font_desc_destroy(s->font_desc); free(s); return NULL; } s->fill_style.type = DRAW_STYLE_COLOR; s->stroke_style.type = DRAW_STYLE_COLOR; state_reset(s); return s; } void state_destroy( state_t *s) { assert(s != NULL); assert(s->transform != NULL); assert(s->font_desc != NULL); assert(s->clip_path != NULL); draw_style_destroy(&s->fill_style); draw_style_destroy(&s->stroke_style); if (s->line_dash != NULL) { free(s->line_dash); } list_delete(s->clip_path); font_desc_destroy(s->font_desc); transform_destroy(s->transform); free(s); } void state_reset( state_t *s) { assert(s != NULL); assert(s->transform != NULL); assert(s->font_desc != NULL); assert(s->clip_path != NULL); transform_reset(s->transform); font_desc_reset(s->font_desc); list_reset(s->clip_path); if (s->line_dash != NULL) { free(s->line_dash); } draw_style_destroy(&s->fill_style); s->fill_style.type = DRAW_STYLE_COLOR; s->fill_style.content.color = color_white; draw_style_destroy(&s->stroke_style); s->stroke_style.type = DRAW_STYLE_COLOR; s->stroke_style.content.color = color_black; s->line_dash = NULL; s->line_dash_len = 0; s->line_dash_offset = 0; s->line_width = 1.0; s->global_alpha = 1.0; s->shadow_blur = 0; s->shadow_color = color_transparent_black; s->shadow_offset_x = 0; s->shadow_offset_y = 0; s->miter_limit = 10.0; s->join_type = JOIN_ROUND; s->cap_type = CAP_BUTT; s->global_composite_operation = SOURCE_OVER; } state_t * state_copy( const state_t *s) { assert(s != NULL); assert(s->transform != NULL); assert(s->font_desc != NULL); assert(s->clip_path != NULL); state_t *sc = (state_t *)calloc(1, sizeof(state_t)); if (sc == NULL) { return NULL; } sc->transform = transform_copy(s->transform); if (sc->transform == NULL) { free(sc); return NULL; } sc->font_desc = font_desc_copy(s->font_desc); if (sc->font_desc == NULL) { transform_destroy(sc->transform); free(sc); return NULL; } sc->clip_path = list_new((free_val_fun_t*)path_fill_instr_destroy); if (sc->clip_path == NULL) { transform_destroy(sc->transform); font_desc_destroy(sc->font_desc); free(sc); return NULL; } if (s->line_dash) { sc->line_dash = (double *)memdup(sc->line_dash, s->line_dash_len * sizeof(double)); if (sc->line_dash == NULL) { transform_destroy(sc->transform); font_desc_destroy(sc->font_desc); list_delete(s->clip_path); free(sc); } } list_iterator_t *it = list_get_iterator(s->clip_path); if (it == NULL) { transform_destroy(sc->transform); font_desc_destroy(sc->font_desc); list_delete(s->clip_path); if (sc->line_dash != NULL) { free(sc->line_dash); } free(sc); return NULL; } path_fill_instr_t *instr = NULL; while ((instr = (path_fill_instr_t *)list_iterator_next(it)) != NULL) { path_fill_instr_t *copy = path_fill_instr_create(instr->poly, instr->non_zero); list_push(sc->clip_path, copy); } list_free_iterator(it); sc->fill_style = draw_style_copy(&s->fill_style); sc->stroke_style = draw_style_copy(&s->stroke_style); sc->line_dash_len = s->line_dash_len; sc->line_dash_offset = s->line_dash_offset; sc->line_width = s->line_width; sc->global_alpha = s->global_alpha; sc->shadow_blur = s->shadow_blur; sc->shadow_color = s->shadow_color; sc->shadow_offset_x = s->shadow_offset_x; sc->shadow_offset_y = s->shadow_offset_y; sc->miter_limit = s->miter_limit; sc->join_type = s->join_type; sc->cap_type = s->cap_type; sc->global_composite_operation = s->global_composite_operation; return sc; }
/**************************************************************************/ /* */ /* Copyright 2022 OCamlPro */ /* */ /* All rights reserved. This file is distributed under the terms of the */ /* GNU Lesser General Public License version 2.1, with the special */ /* exception on linking described in the file LICENSE. */ /* */ /**************************************************************************/
dune
(coq.theory (name B) (package B) (theories A))
batIMap.ml
module Core = struct type 'a t = (int * int * 'a) BatAvlTree.tree include BatAvlTree let singleton n v = singleton_tree (n, n, v) let make eq l (n1, n2, v) r = let n1, l = if is_empty l || n1 = min_int then n1, empty else let (k1, k2, v0), l' = split_rightmost l in if k2 + 1 = n1 && eq v v0 then k1, l' else n1, l in let n2, r = if is_empty r || n2 = max_int then n2, empty else let (k1, k2, v0), r' = split_leftmost r in if n2 + 1 = k1 && eq v v0 then k2, r' else n2, r in make_tree l (n1, n2, v) r let rec add ?(eq = (==)) n v m = if is_empty m then make_tree empty (n, n, v) empty else let (n1, n2, v0) as x = root m in let l = left_branch m in let r = right_branch m in if n1 <> min_int && n = n1 - 1 && eq v v0 then make eq l (n, n2, v) r else if n < n1 then make_tree (add n v l) x r else if n1 <= n && n <= n2 then if eq v v0 then m else let l = if n1 = n then l else make_tree l (n1, n - 1, v0) empty in let r = if n2 = n then r else make_tree empty (n + 1, n2, v0) r in make eq l (n, n, v) r else if n2 <> max_int && n = n2 + 1 && eq v v0 then make eq l (n1, n, v) r else make_tree l x (add n v r) let rec from n s = if is_empty s then empty else let (n1, n2, v) as x = root s in let s0 = left_branch s in let s1 = right_branch s in if n < n1 then make_tree (from n s0) x s1 else if n > n2 then from n s1 else make_tree empty (n, n2, v) s1 let after n s = if n = max_int then empty else from (n + 1) s let rec until n s = if is_empty s then empty else let (n1, n2, v) as x = root s in let s0 = left_branch s in let s1 = right_branch s in if n > n2 then make_tree s0 x (until n s1) else if n < n1 then until n s0 else make_tree s0 (n1, n, v) empty let before n s = if n = min_int then empty else until (n - 1) s let add_range ?(eq=(==)) n1 n2 v s = if n1 > n2 then invalid_arg "IMap.add_range" else make eq (before n1 s) (n1, n2, v) (after n2 s) let rec find (n:int) m = if is_empty m then raise Not_found else let (n1, n2, v) = root m in if n < n1 then find n (left_branch m) else if n1 <= n && n <= n2 then v else find n (right_branch m) let modify_opt ?(eq=(==)) (n:int) f m = let rec aux m = if is_empty m then match f None with | Some v -> singleton n v | None -> raise Exit else let (n1, n2, v) = root m in if n < n1 then make_tree (aux (left_branch m)) (n1, n2, v) (right_branch m) else if n > n2 then make_tree (left_branch m) (n1, n2, v) (aux (right_branch m)) else match f (Some v) with | None -> concat (left_branch m) (right_branch m) | Some v' -> if eq v' v then raise Exit (* fast exit *) else if n = n1 && n = n2 then (* no need to rebalance *) create (left_branch m) (n, n, v') (right_branch m) else let l = if n = n1 then left_branch m else add_range ~eq n1 (n-1) v (left_branch m) and r = if n = n2 then right_branch m else add_range ~eq (n+1) n2 v (right_branch m) in make_tree l (n, n, v') r in try aux m with Exit -> m let modify ?(eq=(==)) (n:int) f m = let f' = function | Some v -> Some (f v) | None -> raise Not_found in modify_opt ~eq n f' m let modify_def v0 ?(eq=(==)) (n:int) f m = let f' = function | Some v -> Some (f v) | None -> Some (f v0) in modify_opt ~eq n f' m let rec remove n m = if is_empty m then empty else let (n1, n2, v) as x = root m in let l = left_branch m in let r = right_branch m in if n < n1 then make_tree (remove n l) x r else if n1 = n then if n2 = n then concat l r else make_tree l (n + 1, n2, v) r else if n1 < n && n < n2 then make_tree (make_tree l (n1, n - 1, v) empty) (n + 1, n2, v) r else if n = n2 then make_tree l (n1, n - 1, v) r else make_tree l x (remove n r) let remove_range n1 n2 m = if n1 > n2 then invalid_arg "IMap.remove_range" else concat (before n1 m) (after n2 m) let rec mem (n:int) m = if is_empty m then false else let (n1, n2, _) = root m in if n < n1 then mem n (left_branch m) else if n1 <= n && n <= n2 then true else mem n (right_branch m) let iter_range proc m = BatAvlTree.iter (fun (n1, n2, v) -> proc n1 n2 v) m let fold_range f m a = BatAvlTree.fold (fun (n1, n2, v) a -> f n1 n2 v a) m a let fold f m a = let rec loop n1 n2 v a = let a = f n1 v a in if n1 = n2 then a else loop (n1 + 1) n2 v a in fold_range loop m a let iter proc m = fold (fun n v () -> proc n v) m () let rec map ?(eq=(=)) f m = if is_empty m then empty else let n1, n2, v = root m in let l = map ~eq f (left_branch m) in let r = map ~eq f (right_branch m) in let v = f v in make eq l (n1, n2, v) r let mapi ?eq f m = fold (fun n v a -> add ?eq n (f n v) a) m empty let rec map_range ?(eq=(=)) f m = if is_empty m then empty else let n1, n2, v = root m in let l = map_range ~eq f (left_branch m) in let r = map_range ~eq f (right_branch m) in let v = f n1 n2 v in make eq l (n1, n2, v) r let rec set_to_map s v = if is_empty s then empty else let (n1, n2) = root s in let l = left_branch s in let r = right_branch s in make_tree (set_to_map l v) (n1, n2, v) (set_to_map r v) let domain m = if is_empty m then empty else let (k1, k2, _), m' = split_leftmost m in let f n1 n2 _ (k1, k2, s) = if n1 = k2 + 1 then (k1, n2, s) else (n1, n2, make_tree s (k1, k2) empty) in let k1, k2, s = fold_range f m' (k1, k2, empty) in make_tree s (k1, k2) empty let map_to_set p m = let rec loop m = if is_empty m then None else let (k1, k2, v), m' = split_leftmost m in if p v then Some (k1, k2, m') else loop m' in match loop m with Some (k1, k2, m') -> let f n1 n2 v (k1, k2, s) = if p v then if n1 = k2 + 1 then (k1, n2, s) else (n1, n2, make_tree s (k1, k2) empty) else (k1, k2, s) in let (k1, k2, s) = fold_range f m' (k1, k2, empty) in make_tree s (k1, k2) empty | None -> empty module Enum = BatEnum (* Fold across two maps *) let fold2_range f m1 m2 acc = let e1 = enum m1 and e2 = enum m2 in let rec aux acc = function None,None -> acc | Some (lo,hi,rx), None -> aux (f lo hi (Some rx) None acc) (Enum.get e1, None) | None, Some (lo,hi,rx) -> aux (f lo hi None (Some rx) acc) (None, Enum.get e2) | Some (lo1,hi1,rx1), Some (lo2,hi2,rx2) when lo1 < lo2 -> let hi, v1 = if hi1 > lo2 then lo2-1, Some (lo2,hi1,rx1) else if hi1 = lo2 then hi1, Some (lo2,lo2,rx1) else hi1, Enum.get e1 and v2 = Some (lo2,hi2,rx2) in aux (f lo1 hi (Some rx1) None acc) (v1, v2) | Some (lo1,hi1,rx1), Some (lo2,hi2,rx2) when lo2 < lo1 -> let hi, v2 = if hi2 > lo1 then lo1-1, Some (lo1,hi2,rx2) else if hi2 = lo1 then hi2, Some (lo1,lo1,rx2) else hi2, Enum.get e2 and v1 = Some (lo1,hi1,rx1) in aux (f lo2 hi None (Some rx2) acc) (v1,v2) | Some (lo1,hi1,rx1), Some (_lo2,hi2,rx2) (* lo1 = lo2 *) -> let hi, v1, v2 = if hi1 = hi2 then hi1, Enum.get e1, Enum.get e2 else if hi1 < hi2 then hi1, Enum.get e1, Some (hi1+1,hi2,rx2) else (* hi2 < hi1 *) hi2, Some (hi2+1,hi1,rx1), Enum.get e2 in (* printf "#@%a\n" print_rng (lo1, hi); *) aux (f lo1 hi (Some rx1) (Some rx2) acc) (v1, v2) in aux acc (Enum.get e1, Enum.get e2) let union ~eq f m1 m2 = let insert lo hi v1 v2 m = match v1, v2 with | Some v1, Some v2 -> add_range ~eq lo hi (f v1 v2) m | Some x, None | None, Some x -> add_range ~eq lo hi x m | None, None -> assert false in fold2_range insert m1 m2 empty let merge ~eq f m1 m2 = let insert lo hi v1 v2 m = match f lo hi v1 v2 with None -> m | Some v -> add_range ~eq lo hi v m in fold2_range insert m1 m2 empty let forall2_range f m1 m2 = let e1 = enum m1 and e2 = enum m2 in let rec aux = function None,None -> true | Some (lo,hi,rx), None -> (f lo hi (Some rx) None) && aux (Enum.get e1, None) | None, Some (lo,hi,rx) -> (f lo hi None (Some rx)) && aux (None, Enum.get e2) | Some (lo1,hi1,rx1), Some (lo2,hi2,rx2) when lo1 < lo2 -> let hi, v1 = if hi1 > lo2 then lo2-1, Some (lo2,hi1,rx1) else hi1, Enum.get e1 and v2 = Some (lo2,hi2,rx2) in (f lo1 hi (Some rx1) None) && aux (v1, v2) | Some (lo1,hi1,rx1), Some (lo2,hi2,rx2) when lo2 < lo1 -> let hi, v2 = if hi2 > lo1 then lo1-1, Some (lo1,hi2,rx2) else hi2, Enum.get e2 and v1 = Some (lo1,hi1,rx1) in (f lo2 hi None (Some rx2)) && aux (v1,v2) | Some (lo1,hi1,rx1), Some (_,hi2,rx2) (* lo1 = lo2 *) -> let hi, v1, v2 = if hi1 = hi2 then hi1, Enum.get e1, Enum.get e2 else if hi1 < hi2 then hi1, Enum.get e1, Some (hi1+1,hi2,rx2) else (* hi2 < hi1 *) hi2, Some (hi2+1,hi1,rx1), Enum.get e2 in (f lo1 hi (Some rx1) (Some rx2)) && aux (v1, v2) in aux (Enum.get e1, Enum.get e2) end type 'a t = {m: 'a Core.t; eq: 'a -> 'a -> bool} type key = int let empty ~eq = {m = Core.empty; eq} (*$T empty is_empty (empty ~eq:(=)) *) let singleton ~eq x y = {m = Core.singleton x y; eq} (*$T singleton not (is_empty (singleton ~eq:(=) 1 'x')) find 1 (singleton ~eq:(=) 1 'x') = 'x' try ignore(find 0 (singleton ~eq:(=) 1 'x')); false with Not_found -> true *) let is_empty {m; _} = Core.is_empty m let add x y {m;eq} = {m=Core.add ~eq x y m; eq} (*$= add as a & ~cmp:(List.eq (Tuple3.eq Int.equal Int.equal Int.equal)) ~printer:(List.print (Tuple3.print Int.print Int.print Int.print) |> IO.to_string) [(0,2,0)] (empty ~eq:(=) |> a 0 0 |> a 2 0 |> a 1 0 |> enum |> List.of_enum) *) (*$= add as a & ~cmp:(List.eq (Tuple3.eq Int.equal Int.equal String.equal)) ~printer:(List.print (Tuple3.print Int.print Int.print String.print) |> IO.to_string) [(0,2,"foo")] \ (empty ~eq:(=) |> a 0 "foo" |> a 2 "foo" |> a 1 "foo" |> enum |> List.of_enum) *) let add_range lo hi y {m;eq} = {m=Core.add_range ~eq lo hi y m; eq} let find x {m; _} = Core.find x m let modify x f {m;eq} = {m=Core.modify ~eq x f m; eq} (*$T modify (* modify a single entry *) \ empty ~eq:(=) |> add 1 1 |> modify 1 succ |> find 1 = 2 (* modify a range boundary *) \ empty ~eq:(=) |> add_range 1 5 1 |> modify 1 succ |> find 1 = 2 empty ~eq:(=) |> add_range 1 5 1 |> modify 1 succ |> find 2 = 1 empty ~eq:(=) |> add_range 1 5 1 |> modify 1 succ |> find 5 = 1 (* modify a range boundary (the other one) *) \ empty ~eq:(=) |> add_range 1 5 1 |> modify 5 succ |> find 1 = 1 empty ~eq:(=) |> add_range 1 5 1 |> modify 5 succ |> find 4 = 1 empty ~eq:(=) |> add_range 1 5 1 |> modify 5 succ |> find 5 = 2 (* modify a range in the middle *) \ empty ~eq:(=) |> add_range 1 5 1 |> modify 2 succ |> find 1 = 1 empty ~eq:(=) |> add_range 1 5 1 |> modify 2 succ |> find 2 = 2 empty ~eq:(=) |> add_range 1 5 1 |> modify 2 succ |> find 3 = 1 empty ~eq:(=) |> add_range 1 5 1 |> modify 2 succ |> find 5 = 1 *) let modify_def v0 x f {m;eq} = {m=Core.modify_def ~eq v0 x f m; eq} (*$T modify_def (* adding an entry *) \ empty ~eq:(=) |> modify_def 0 1 succ |> find 1 = 1 *) let modify_opt x f {m;eq} = {m=Core.modify_opt ~eq x f m; eq} (*$T modify_opt (* adding an entry *) \ empty ~eq:(=) |> modify_opt 1 (function None -> Some 1 | _ -> assert false) |> find 1 = 1 (* deleting an entry *) \ empty ~eq:(=) |> add 1 1 |> modify_opt 1 (function Some 1 -> None | _ -> assert false) |> mem 1 |> not *) let remove x {m;eq} = {m=Core.remove x m; eq} let remove_range lo hi {m;eq} = {m=Core.remove_range lo hi m; eq} let from x {m;eq} = {m=Core.from x m; eq} let after x {m;eq} = {m=Core.after x m; eq} let until x {m;eq} = {m=Core.until x m; eq} let before x {m;eq} = {m=Core.before x m; eq} let mem x {m; _} = Core.mem x m let iter f {m; _} = Core.iter f m let iter_range f {m; _} = Core.iter_range f m let map ?(eq=(=)) f {m; _} = {m=Core.map ~eq f m; eq} let mapi ?(eq=(=)) f {m; _} = {m=Core.mapi ~eq f m; eq} let map_range ?(eq=(=)) f {m; _} = {m = Core.map_range ~eq f m; eq} let fold f {m; _} x0 = Core.fold f m x0 let fold_range f {m; _} x0 = Core.fold_range f m x0 let set_to_map ?(eq=(=)) s x = {m = Core.set_to_map s x; eq} let domain {m; _} = Core.domain m let map_to_set f {m; _} = Core.map_to_set f m let enum {m; _} = Core.enum m let fold2_range f {m=m1; _} {m=m2; _} x0 = Core.fold2_range f m1 m2 x0 let union f {m=m1;eq} {m=m2; _} = {m=Core.union ~eq f m1 m2; eq} let merge ?(eq=(=)) f {m=m1; _} {m=m2; _} = {m=Core.merge ~eq f m1 m2; eq} let forall2_range f {m=m1; _} {m=m2; _} = Core.forall2_range f m1 m2 let get_dec_eq {eq; _} = eq (*$T get_dec_eq get_dec_eq (empty ~eq:Int.equal) == Int.equal *) let of_enum ~eq e = BatEnum.fold (fun t (n1, n2, v) -> add_range n1 n2 v t) (empty ~eq) e module Infix = struct let (-->) {m; _} k = Core.find k m let (<--) m (k,v) = add k v m end
(* Copyright 2003 Yamagata Yoriyuki. distributed with LGPL *) (* Modified by Edgar Friendly <thelema314@gmail.com> *)
c.nobigarray.ml
let () = Printf.eprintf "Welcome to c WITHOUT bigarray support\n%!"
feature_test_multispool.mli
(*_ This signature is deliberately empty. *)
(*_ This signature is deliberately empty. *)
result.mli
type ('a, 'e) t = ('a, 'e) result = Ok of 'a | Error of 'e (***) val ok : 'a -> ('a, 'e) result val ok_s : 'a -> ('a, 'e) result Lwt.t val error : 'e -> ('a, 'e) result val error_s : 'e -> ('a, 'e) result Lwt.t val return : 'a -> ('a, 'e) result val return_unit : (unit, 'e) result val return_none : ('a option, 'e) result val return_some : 'a -> ('a option, 'e) result val return_nil : ('a list, 'e) result val return_true : (bool, 'e) result val return_false : (bool, 'e) result val value : ('a, 'e) result -> default:'a -> 'a val value_f : ('a, 'e) result -> default:(unit -> 'a) -> 'a val bind : ('a, 'e) result -> ('a -> ('b, 'e) result) -> ('b, 'e) result val bind_s : ('a, 'e) result -> ('a -> ('b, 'e) result Lwt.t) -> ('b, 'e) result Lwt.t val bind_error : ('a, 'e) result -> ('e -> ('a, 'f) result) -> ('a, 'f) result val bind_error_s : ('a, 'e) result -> ('e -> ('a, 'f) result Lwt.t) -> ('a, 'f) result Lwt.t val join : (('a, 'e) result, 'e) result -> ('a, 'e) result val map : ('a -> 'b) -> ('a, 'e) result -> ('b, 'e) result (* NOTE: [map_e] is [bind] *) val map_e : ('a -> ('b, 'e) result) -> ('a, 'e) result -> ('b, 'e) result val map_s : ('a -> 'b Lwt.t) -> ('a, 'e) result -> ('b, 'e) result Lwt.t (* NOTE: [map_es] is [bind_s] *) val map_es : ('a -> ('b, 'e) result Lwt.t) -> ('a, 'e) result -> ('b, 'e) result Lwt.t val map_error : ('e -> 'f) -> ('a, 'e) result -> ('a, 'f) result (* NOTE: [map_error_e] is [bind_error] *) val map_error_e : ('e -> ('a, 'f) result) -> ('a, 'e) result -> ('a, 'f) result val map_error_s : ('e -> 'f Lwt.t) -> ('a, 'e) result -> ('a, 'f) result Lwt.t (* NOTE: [map_error_es] is [bind_error_s] *) val map_error_es : ('e -> ('a, 'f) result Lwt.t) -> ('a, 'e) result -> ('a, 'f) result Lwt.t val fold : ok:('a -> 'c) -> error:('e -> 'c) -> ('a, 'e) result -> 'c val iter : ('a -> unit) -> ('a, 'e) result -> unit val iter_s : ('a -> unit Lwt.t) -> ('a, 'e) result -> unit Lwt.t val iter_error : ('e -> unit) -> ('a, 'e) result -> unit val iter_error_s : ('e -> unit Lwt.t) -> ('a, 'e) result -> unit Lwt.t val is_ok : ('a, 'e) result -> bool val is_error : ('a, 'e) result -> bool val equal : ok:('a -> 'a -> bool) -> error:('e -> 'e -> bool) -> ('a, 'e) result -> ('a, 'e) result -> bool val compare : ok:('a -> 'a -> int) -> error:('e -> 'e -> int) -> ('a, 'e) result -> ('a, 'e) result -> int val to_option : ('a, 'e) result -> 'a option val of_option : error:'e -> 'a option -> ('a, 'e) result val to_list : ('a, 'e) result -> 'a list val to_seq : ('a, 'e) result -> 'a Seq.t (** [catch f] is [try Ok (f ()) with e -> Error e]: it is [Ok x] if [f ()] evaluates to [x], and it is [Error e] if [f ()] raises [e]. See {!WithExceptions.S.Result.to_exn} for a converse function. If [catch_only] is set, then only exceptions [e] such that [catch_only e] is [true] are caught. Whether [catch_only] is set or not, you cannot catch non-deterministic runtime exceptions of OCaml such as {!Stack_overflow} and {!Out_of_memory} nor system exceptions such as {!Unix.Unix_error}. *) val catch : ?catch_only:(exn -> bool) -> (unit -> 'a) -> ('a, exn) result (** [catch_f f handler] is equivalent to [map_error (catch f) handler]. In other words, it catches exceptions in [f ()] and either returns the value in an [Ok] or passes the exception to [handler] for the [Error]. [catch_only] has the same use as with [catch]. The same restriction on catching non-deterministic runtime exceptions applies. *) val catch_f : ?catch_only:(exn -> bool) -> (unit -> 'a) -> (exn -> 'error) -> ('a, 'error) result (** [catch_s] is [catch] but for Lwt promises. Specifically, [catch_s f] returns a promise that resolves to [Ok x] if and when [f ()] resolves to [x], or to [Error exc] if and when [f ()] is rejected with [exc]. If [catch_only] is set, then only exceptions [e] such that [catch_only e] is [true] are caught. Whether [catch_only] is set or not, you cannot catch non-deterministic runtime exceptions of OCaml such as {!Stack_overflow} and {!Out_of_memory} nor system exceptions such as {!Unix.Unix_error}. *) val catch_s : ?catch_only:(exn -> bool) -> (unit -> 'a Lwt.t) -> ('a, exn) result Lwt.t
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2020 Nomadic Labs <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
dune
(executables (names simple chat) (libraries eio_main lwt_eio logs.fmt))
pack.ml
let src = Logs.Src.create "irmin.pack" ~doc:"irmin-pack backend" module Log = (val Logs.src_log src : Logs.LOG) let current_version = "00000001" let ( -- ) = Int64.sub module type ELT = sig include Irmin.Type.S type hash val hash : t -> hash val magic : t -> char val encode_bin : dict:(string -> int option) -> offset:(hash -> int64 option) -> t -> hash -> (string -> unit) -> unit val decode_bin : dict:(int -> string option) -> hash:(int64 -> hash) -> string -> int -> t end module type S = sig include Irmin.CONTENT_ADDRESSABLE_STORE type index val v : ?fresh:bool -> ?readonly:bool -> ?lru_size:int -> index:index -> string -> [ `Read ] t Lwt.t val batch : [ `Read ] t -> ([ `Read | `Write ] t -> 'a Lwt.t) -> 'a Lwt.t val unsafe_append : 'a t -> key -> value -> unit val unsafe_mem : 'a t -> key -> bool val unsafe_find : 'a t -> key -> value option val sync : 'a t -> unit type integrity_error = [ `Wrong_hash | `Absent_value ] val integrity_check : offset:int64 -> length:int -> key -> 'a t -> (unit, integrity_error) result val close : 'a t -> unit Lwt.t end module type MAKER = sig type key type index module Make (V : ELT with type hash := key) : S with type key = key and type value = V.t and type index = index end open Lwt.Infix module Table (K : Irmin.Type.S) = Hashtbl.Make (struct type t = K.t let hash (t : t) = Irmin.Type.short_hash K.t t let equal (x : t) (y : t) = Irmin.Type.equal K.t x y end) module Cache (K : Irmin.Type.S) = Lru.Make (struct type t = K.t let hash (t : t) = Irmin.Type.short_hash K.t t let equal (x : t) (y : t) = Irmin.Type.equal K.t x y end) let with_cache = IO.with_cache module IO = IO.Unix module File (Index : Pack_index.S) (K : Irmin.Hash.S with type t = Index.key) = struct module Tbl = Table (K) module Dict = Pack_dict type index = Index.t type 'a t = { block : IO.t; index : Index.t; dict : Dict.t; lock : Lwt_mutex.t; mutable open_instances : int; } let clear t = IO.clear t.block; Index.clear t.index; Dict.clear t.dict let valid t = if t.open_instances <> 0 then ( t.open_instances <- t.open_instances + 1; true ) else false let unsafe_v ~index ~fresh ~readonly file = let root = Filename.dirname file in let lock = Lwt_mutex.create () in let dict = Dict.v ~fresh ~readonly root in let block = IO.v ~fresh ~version:current_version ~readonly file in if IO.version block <> current_version then Fmt.failwith "invalid version: got %S, expecting %S" (IO.version block) current_version; { block; index; lock; dict; open_instances = 1 } let (`Staged v) = with_cache ~clear ~valid ~v:(fun index -> unsafe_v ~index) "store.pack" type key = K.t let close t = t.open_instances <- t.open_instances - 1; if t.open_instances = 0 then ( if not (IO.readonly t.block) then IO.sync t.block; IO.close t.block; Dict.close t.dict ) module Make (V : ELT with type hash := K.t) = struct module Tbl = Table (K) module Lru = Cache (K) type nonrec 'a t = { pack : 'a t; lru : V.t Lru.t; staging : V.t Tbl.t; mutable open_instances : int; } type key = K.t type value = V.t type index = Index.t let clear t = clear t.pack; Tbl.clear t.staging (* we need another cache here, as we want to share the LRU and staging caches too. *) let roots = Hashtbl.create 10 let create = Lwt_mutex.create () let valid t = if t.open_instances <> 0 then ( t.open_instances <- t.open_instances + 1; true ) else false let unsafe_v_no_cache ~fresh ~readonly ~lru_size ~index root = let pack = v index ~fresh ~readonly root in let staging = Tbl.create 127 in let lru = Lru.create lru_size in { staging; lru; pack; open_instances = 1 } let unsafe_v ?(fresh = false) ?(readonly = false) ?(lru_size = 10_000) ~index root = try let t = Hashtbl.find roots (root, readonly) in if valid t then ( if fresh then clear t; t ) else ( Hashtbl.remove roots (root, readonly); raise Not_found ) with Not_found -> let t = unsafe_v_no_cache ~fresh ~readonly ~lru_size ~index root in if fresh then clear t; Hashtbl.add roots (root, readonly) t; t let v ?fresh ?readonly ?lru_size ~index root = Lwt_mutex.with_lock create (fun () -> let t = unsafe_v ?fresh ?readonly ?lru_size ~index root in Lwt.return t) let pp_hash = Irmin.Type.pp K.t let io_read_and_decode_hash ~off t = let buf = Bytes.create K.hash_size in let n = IO.read t.pack.block ~off buf in assert (n = K.hash_size); let _, v = Irmin.Type.decode_bin ~headers:false K.t (Bytes.unsafe_to_string buf) 0 in v let unsafe_mem t k = Log.debug (fun l -> l "[pack] mem %a" pp_hash k); Tbl.mem t.staging k || Lru.mem t.lru k || Index.mem t.pack.index k let mem t k = Lwt_mutex.with_lock create (fun () -> let b = unsafe_mem t k in Lwt.return b) let check_key k v = let k' = V.hash v in if Irmin.Type.equal K.t k k' then Ok () else Error (k, k') exception Invalid_read let io_read_and_decode ~off ~len t = if (not (IO.readonly t.pack.block)) && off > IO.offset t.pack.block then raise Invalid_read; let buf = Bytes.create len in let n = IO.read t.pack.block ~off buf in if n <> len then raise Invalid_read; let hash off = io_read_and_decode_hash ~off t in let dict = Dict.find t.pack.dict in V.decode_bin ~hash ~dict (Bytes.unsafe_to_string buf) 0 let unsafe_find t k = Log.debug (fun l -> l "[pack] find %a" pp_hash k); Stats.incr_finds (); match Tbl.find t.staging k with | v -> Lru.add t.lru k v; Some v | exception Not_found -> ( match Lru.find t.lru k with | v -> Some v | exception Not_found -> ( Stats.incr_cache_misses (); match Index.find t.pack.index k with | None -> None | Some (off, len, _) -> let v = io_read_and_decode ~off ~len t in (check_key k v |> function | Ok () -> () | Error (expected, got) -> Fmt.failwith "corrupted value: got %a, expecting %a." pp_hash got pp_hash expected); Lru.add t.lru k v; Some v ) ) let find t k = Lwt_mutex.with_lock t.pack.lock (fun () -> let v = unsafe_find t k in Lwt.return v) let cast t = (t :> [ `Read | `Write ] t) let sync t = Dict.sync t.pack.dict; IO.sync t.pack.block; Index.flush t.pack.index; Tbl.clear t.staging type integrity_error = [ `Wrong_hash | `Absent_value ] let integrity_check ~offset ~length k t = try let value = io_read_and_decode ~off:offset ~len:length t in match check_key k value with | Ok () -> Ok () | Error _ -> Error `Wrong_hash with Invalid_read -> Error `Absent_value let batch t f = f (cast t) >>= fun r -> if Tbl.length t.staging = 0 then Lwt.return r else ( sync t; Lwt.return r ) let auto_flush = 1024 let unsafe_append t k v = match unsafe_mem t k with | true -> () | false -> Log.debug (fun l -> l "[pack] append %a" pp_hash k); let offset k = match Index.find t.pack.index k with | None -> Stats.incr_appended_hashes (); None | Some (off, _, _) -> Stats.incr_appended_offsets (); Some off in let dict = Dict.index t.pack.dict in let off = IO.offset t.pack.block in V.encode_bin ~offset ~dict v k (IO.append t.pack.block); let len = Int64.to_int (IO.offset t.pack.block -- off) in Index.add t.pack.index k (off, len, V.magic v); if Tbl.length t.staging >= auto_flush then sync t else Tbl.add t.staging k v; Lru.add t.lru k v let append t k v = Lwt_mutex.with_lock t.pack.lock (fun () -> unsafe_append t k v; Lwt.return_unit) let add t v = let k = V.hash v in append t k v >|= fun () -> k let unsafe_add t k v = append t k v let unsafe_close t = t.open_instances <- t.open_instances - 1; if t.open_instances = 0 then ( Log.debug (fun l -> l "[pack] close %s" (IO.name t.pack.block)); Tbl.clear t.staging; ignore (Lru.clear t.lru); close t.pack ) let close t = Lwt_mutex.with_lock t.pack.lock (fun () -> unsafe_close t; Lwt.return_unit) end end
(* * Copyright (c) 2013-2019 Thomas Gazagnaire <thomas@gazagnaire.org> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *)
unit_naming.ml
(* TEST modules = "camlCase.ml" * setup-ocamlc.byte-build-env ** ocamlc.byte ocamlc_byte_exit_status = "2" *** check-ocamlc.byte-output *) print_int Camlcase.answer
(* TEST modules = "camlCase.ml" * setup-ocamlc.byte-build-env ** ocamlc.byte ocamlc_byte_exit_status = "2" *** check-ocamlc.byte-output *)
equal.c
/*============================================================================= This file is part of Antic. Antic is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. See <http://www.gnu.org/licenses/>. =============================================================================*/ /****************************************************************************** Copyright (C) 2013 William Hart ******************************************************************************/ #include "nf_elem.h" int _nf_elem_equal(const nf_elem_t a, const nf_elem_t b, const nf_t nf) { if (nf->flag & NF_LINEAR) { slong d, bits1, bits2; int res = 1; const fmpz * const anum = LNF_ELEM_NUMREF(a); const fmpz * const bnum = LNF_ELEM_NUMREF(b); const fmpz * const aden = LNF_ELEM_DENREF(a); const fmpz * const bden = LNF_ELEM_DENREF(b); fmpz_t t1, t2; if (fmpz_equal(aden, bden)) return fmpz_equal(anum, bnum); d = fmpz_bits(aden) - fmpz_bits(bden) + 1; bits1 = fmpz_bits(anum); bits2 = fmpz_bits(bnum); if (!(bits1 == 0 && bits2 == 0) && (ulong) (bits1 - bits2 + d) > 2) return 0; fmpz_init(t1); fmpz_init(t2); fmpz_mul(t1, anum, bden); fmpz_mul(t2, bnum, aden); if (!fmpz_equal(t1, t2)) res = 0; fmpz_clear(t1); fmpz_clear(t2); return res; } else if (nf->flag & NF_QUADRATIC) { slong d, bits1, bits2; int res = 1; const fmpz * const anum = QNF_ELEM_NUMREF(a); const fmpz * const bnum = QNF_ELEM_NUMREF(b); const fmpz * const aden = QNF_ELEM_DENREF(a); const fmpz * const bden = QNF_ELEM_DENREF(b); fmpz_t t1, t2; if (fmpz_equal(aden, bden)) return fmpz_equal(anum, bnum) && fmpz_equal(anum + 1, bnum + 1); d = fmpz_bits(aden) - fmpz_bits(bden) + 1; bits1 = fmpz_bits(anum + 1); bits2 = fmpz_bits(bnum + 1); if (!(bits1 == 0 && bits2 == 0) && (ulong) (bits1 - bits2 + d) > 2) return 0; bits1 = fmpz_bits(anum); bits2 = fmpz_bits(bnum); if (!(bits1 == 0 && bits2 == 0) && (ulong) (bits1 - bits2 + d) > 2) return 0; fmpz_init(t1); fmpz_init(t2); fmpz_mul(t1, anum, bden); fmpz_mul(t2, bnum, aden); if (!fmpz_equal(t1, t2)) { res = 0; goto cleanup; } fmpz_mul(t1, anum + 1, bden); fmpz_mul(t2, bnum + 1, aden); if (!fmpz_equal(t1, t2)) { res = 0; goto cleanup; } cleanup: fmpz_clear(t1); fmpz_clear(t2); return res; } else { const slong len1 = NF_ELEM(a)->length; const slong len2 = NF_ELEM(b)->length; if (len1 != len2) return 0; if (fmpz_equal(fmpq_poly_denref(NF_ELEM(a)), fmpq_poly_denref(NF_ELEM(b)))) return _fmpz_vec_equal(NF_ELEM_NUMREF(a), NF_ELEM_NUMREF(b), len1); else { slong i; slong d = fmpz_bits(fmpq_poly_denref(NF_ELEM(b))) - fmpz_bits(fmpq_poly_denref(NF_ELEM(a))) + 1; fmpz * p1 = NF_ELEM_NUMREF(a); fmpz * p2 = NF_ELEM_NUMREF(b); fmpz_t gcd, den1, den2; fmpz * t1, * t2; int equal; for (i = 0; i < len1; i++) { slong b1 = fmpz_bits(p1 + i); slong b2 = fmpz_bits(p2 + i); if (!(b1 == 0 && b2 == 0) && (ulong) (b1 - b2 + d) > 2) return 0; } fmpz_init(gcd); fmpz_init(den1); fmpz_init(den2); /* TODO: possibly only compute GCD if it will save time */ fmpz_gcd(gcd, fmpq_poly_denref(NF_ELEM(a)), fmpq_poly_denref(NF_ELEM(b))); fmpz_divexact(den1, fmpq_poly_denref(NF_ELEM(a)), gcd); fmpz_divexact(den2, fmpq_poly_denref(NF_ELEM(b)), gcd); t1 = _fmpz_vec_init(len1); t2 = _fmpz_vec_init(len1); _fmpz_vec_scalar_mul_fmpz(t1, p1, len1, den2); _fmpz_vec_scalar_mul_fmpz(t2, p2, len2, den1); equal = _fmpz_vec_equal(t1, t2, len1); fmpz_clear(gcd); fmpz_clear(den1); fmpz_clear(den2); _fmpz_vec_clear(t1, len1); _fmpz_vec_clear(t2, len1); return equal; } } } int nf_elem_equal(const nf_elem_t a, const nf_elem_t b, const nf_t nf) { if (nf->flag & NF_LINEAR) { if (!fmpz_equal(LNF_ELEM_DENREF(a), LNF_ELEM_DENREF(b))) return 0; if (!fmpz_equal(LNF_ELEM_NUMREF(a), LNF_ELEM_NUMREF(b))) return 0; return 1; } else if (nf->flag & NF_QUADRATIC) { if (!fmpz_equal(QNF_ELEM_DENREF(a), QNF_ELEM_DENREF(b))) return 0; if (!fmpz_equal(QNF_ELEM_NUMREF(a), QNF_ELEM_NUMREF(b))) return 0; if (!fmpz_equal(QNF_ELEM_NUMREF(a) + 1, QNF_ELEM_NUMREF(b) + 1)) return 0; return 1; } else { const slong len1 = NF_ELEM(a)->length; const slong len2 = NF_ELEM(b)->length; if (len1 != len2) return 0; if (fmpz_equal(fmpq_poly_denref(NF_ELEM(a)), fmpq_poly_denref(NF_ELEM(b)))) return _fmpz_vec_equal(NF_ELEM_NUMREF(a), NF_ELEM_NUMREF(b), len1); else return 0; } }
/*============================================================================= This file is part of Antic. Antic is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. See <http://www.gnu.org/licenses/>. =============================================================================*/
bnf_pp.ml
open Format open Bnf_spec.Bnf open Spec let before_first iterator f_begin f_end fall = let start = ref true in let do_el el = if !start then (start := false; fall el) else ( f_begin (); let res = fall el in f_end (); res) in iterator do_el let string_of_t t = Printf.sprintf "%S" t let string_of_nt nt = Printf.sprintf "<%S>" nt let string_of_symbol = function | T t -> string_of_t t | NT nt -> string_of_nt nt let rec pp_prod ppf = function | [] -> () | [h] -> fprintf ppf "%s" (string_of_symbol h) | h::t -> fprintf ppf "%s@ " (string_of_symbol h); pp_prod ppf t let pp_prods ppf = before_first ProdSet.iter (fun _ -> fprintf ppf "@ @[| ") (pp_close_box ppf) (fun (_, sym_list) -> fprintf ppf "@[%a@]" pp_prod sym_list) let pp_live_prods ppf = before_first ProdMap.iter (fun _ -> fprintf ppf "@ @[| ") (pp_close_box ppf) (fun (_, sym_list) n -> fprintf ppf "@[%a (%d)@]" pp_prod sym_list n) let pp_nt ppf nt = fprintf ppf "@[<0>%s @[<1>:= %a .@]@]" (string_of_nt nt) pp_prods let pp_live_nt ppf nt (d, prods) = fprintf ppf "@[<0>%s (%d) @[<1>:= %a .@]@]" (string_of_nt nt) d pp_live_prods prods let nop _ = () let pp_nt_map ppf = before_first NTMap.iter force_newline nop (pp_nt ppf) let pp_live_nts ppf = before_first NTMap.iter force_newline nop (pp_live_nt ppf) let pp_ts ppf = before_first TSet.iter force_newline nop (fun t -> pp_print_string ppf (string_of_t t)) let pp_nts ppf = before_first NTSet.iter force_newline nop (fun nt -> pp_print_string ppf (string_of_nt nt)) let pp_prods ppf = before_first ProdSet.iter force_newline nop (fun (_, sl) -> pp_prod ppf sl)
test_cnf.ml
open Base open Stdio open Expect_test_helpers_base open Hardcaml_verify let a = Cnf.Literal.create "a" ~width:2 let b = Cnf.Literal.create1 "b" let cnf = Cnf.( Conjunction.of_list [ Disjunction.of_list [ a.(0); a.(1); ~:b ]; Disjunction.of_list [ ~:(a.(1)); b ] ] |> create) ;; let%expect_test "empty" = let empty = Cnf.(Conjunction.of_list [] |> create) in print_s [%message (empty : Cnf.t)]; [%expect {| (empty ( (input_bits ()) (input_map ()) (int_map ()) (conjunction ()) (number_of_variables 0) (number_of_clauses 0))) |}] ;; let%expect_test "empty disjunction" = let empty = Cnf.(Conjunction.of_list [ Disjunction.of_list [] ] |> create) in print_s [%message (empty : Cnf.t)]; [%expect {| (empty ( (input_bits ()) (input_map ()) (int_map ()) (conjunction (())) (number_of_variables 0) (number_of_clauses 1))) |}] ;; let%expect_test "show data structure" = print_s [%message (cnf : Cnf.t)]; [%expect {| (cnf ( (input_bits (a/0/1 a/1/1 b/0/2)) (input_map ( (1 a/0/1) (2 a/1/1) (3 b/0/2))) (int_map ( (a/0/1 1) (a/1/1 2) (b/0/2 3))) (conjunction ( ((P a/0/1) (P a/1/1) (N b/0/2)) ((N a/1/1) (P b/0/2)))) (number_of_variables 3) (number_of_clauses 2))) |}] ;; let%expect_test "print problem" = printf "vars=%i clauses=%i\n" (Cnf.number_of_variables cnf) (Cnf.number_of_clauses cnf); Cnf.iter cnf ~f:(fun disjunction -> Cnf.Disjunction.iter disjunction ~f:(fun literal -> printf "%s " (Cnf.Literal.to_string literal)); printf "\n"); [%expect {| vars=3 clauses=2 a/0/1 a/1/1 -b/0/2 -a/1/1 b/0/2 |}]; (* fancy pretty printer *) let cnf = Cnf.fold cnf ~init:[] ~f:(fun lst disjunction -> let terms = Cnf.Disjunction.fold ~init:[] disjunction ~f:(fun lst literal -> Cnf.Literal.to_string literal :: lst) |> List.rev in String.concat ~sep:"" [ "("; String.concat ~sep:" V " terms; ")" ] :: lst) |> List.rev |> String.concat ~sep:" . " in print_s [%message cnf]; [%expect {| "(a/0/1 V a/1/1 V -b/0/2) . (-a/1/1 V b/0/2)" |}] ;;
ocurl.ml
let writer accum data = Buffer.add_string accum data; String.length data let showContent content = Printf.printf "%s" (Buffer.contents content); flush stdout let showInfo connection = Printf.printf "Time: %f\nURL: %s\n" (Curl.get_totaltime connection) (Curl.get_effectiveurl connection) let getContent connection url = Curl.set_url connection url; Curl.perform connection let _ = Curl.global_init Curl.CURLINIT_GLOBALALL; begin let result = Buffer.create 16384 and errorBuffer = ref "" in try let connection = Curl.init () in Curl.set_errorbuffer connection errorBuffer; Curl.set_writefunction connection (writer result); Curl.set_followlocation connection true; Curl.set_url connection Sys.argv.(1); Curl.perform connection; showContent result; showInfo connection; Curl.cleanup connection with | Curl.CurlException (reason, code, str) -> Printf.fprintf stderr "Error: %s\n" !errorBuffer | Failure s -> Printf.fprintf stderr "Caught exception: %s\n" s end; Curl.global_cleanup ()
(* * ocurl.ml * * Copyright (c) 2003-2008, Lars Nilsson, <lars@quantumchamaleon.com> *)
p2p_discovery.mli
(** Local peer discovery. This module manages the discovery of local peers through UDP broadcasting. It is composed of two workers: - The sender worker whose role is to broadcast discovery messages. - The answer worker whose role is to listen discovery messages and register new peers in the current pool. Discovery messages are composed of an arbitrary key, the listening port and the peer id of the current peer. *) (** Type of a discovery worker. *) type t (** [create ~listening_port ~discovery_port ~discovery_addr pool peer_id] returns a discovery worker registering local peers to the [pool] and broadcasting discovery messages with the [peer_id] and the [listening_port] through the address [discovery_addr:discovery_port]. *) val create : listening_port:int -> discovery_port:int -> discovery_addr:Ipaddr.V4.t -> trust_discovered_peers:bool -> ('a, 'b, 'c) P2p_pool.t -> P2p_peer.Table.key -> t val activate : t -> unit (** [wakeup t] sends a signal to the sender machine of [t], asking it to immediately proceed to broadcasting. *) val wakeup : t -> unit (** [shutdown t] returns when [t] has completed shutdown. *) val shutdown : t -> unit Lwt.t
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* Copyright (c) 2019 Nomadic Labs, <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
test_enums.ml
open OUnit2 open Ctypes module Build_enum_stub_tests (S : Cstubs.Types.TYPE with type 'a typ = 'a Ctypes.typ and type ('a, 's) field = ('a, 's) Ctypes.field) = struct module M = Types.Struct_stubs(S) open M let test_enum_struct_members _ = let reverse cell = let rec loop prev cell = match cell with None -> prev | Some c -> let n = getf !@c next in let () = setf !@c next prev in loop cell n in loop None cell in let as_list cell = let rec loop l = function None -> List.rev l | Some c -> loop (getf !@c frt :: l) (getf !@c next) in loop [] cell in let rec of_list l = match l with [] -> None | f :: fs -> let c = make fruit_cell in let n = of_list fs in let () = setf c frt f in let () = setf c next n in Some (addr c) in begin let open Types in let l = of_list [Apple; Apple; Pear; Banana] in assert_equal [Apple; Apple; Pear; Banana] (as_list l); assert_equal [Banana; Pear; Apple; Apple] (as_list (reverse l)); assert_equal [] (as_list None); end let test_enum_arrays _ = let module Array = CArray in let a = Array.make bears 4 in begin a.(0) <- `Edward; a.(1) <- `Winnie; a.(2) <- `Paddington; a.(3) <- `Edward; assert_equal [`Edward; `Winnie; `Paddington; `Edward] (Array.to_list a) end module Build_call_tests (F : Cstubs.FOREIGN with type 'a result = 'a and type 'a return = 'a) = struct module F = Functions.Stubs(F) open F open M let test_passing_returning_enums _ = let open Types in begin assert_equal Apple (next_fruit Orange); assert_equal Banana (next_fruit Apple); assert_equal Pear (next_fruit Banana); assert_equal Orange (next_fruit Pear); end let test_signed_enums _ = begin assert_equal (-1) (classify_integer (-3)); assert_equal 1 (classify_integer 4); end let test_default_enums _ = begin assert_equal 0 (out_of_range ()) end end end module Enum_stubs_tests = Build_enum_stub_tests(Generated_struct_bindings) module Combined_stub_tests = Enum_stubs_tests.Build_call_tests(Generated_bindings) let suite = "Enum tests" >::: [ "passing and returning enums" >:: Combined_stub_tests.test_passing_returning_enums; "enums with signed values" >:: Combined_stub_tests.test_signed_enums; "enums with default values" >:: Combined_stub_tests.test_default_enums; "enums as struct members" >:: Enum_stubs_tests.test_enum_struct_members; "arrays of enums" >:: Enum_stubs_tests.test_enum_arrays; ] let _ = run_test_tt_main suite
(* * Copyright (c) 2013 Jeremy Yallop. * * This file is distributed under the terms of the MIT License. * See the file LICENSE for details. *)
testing.ml
(* Value equality and pretty printing *) type 'a eq = 'a -> 'a -> bool type 'a pp = Format.formatter -> 'a -> unit (* Pretty printers *) let pp = Format.fprintf let pp_exn ppf v = pp ppf "%s" (Printexc.to_string v) let pp_bool ppf v = pp ppf "%b" v let pp_char ppf v = pp ppf "%C" v let pp_str ppf v = pp ppf "%S" v let pp_int = Format.pp_print_int let pp_float ppf v = pp ppf "%.10f" (* bof... *) v let pp_int32 ppf v = pp ppf "%ld" v let pp_int64 ppf v = pp ppf "%Ld" v let pp_text = Format.pp_print_text let pp_list pp_v ppf l = let pp_sep ppf () = pp ppf ";@," in pp ppf "@[<1>[%a]@]" (Format.pp_print_list ~pp_sep pp_v) l let pp_option pp_v ppf = function | None -> Format.fprintf ppf "None" | Some v -> Format.fprintf ppf "Some %a" pp_v v let pp_slot_loc ppf l = pp ppf "%s:%d.%d-%d:" l.Printexc.filename l.Printexc.line_number l.Printexc.start_char l.Printexc.end_char let pp_bt ppf bt = match Printexc.backtrace_slots bt with | None -> pp ppf "@,@[%a@]" pp_text "No backtrace. Did you compile with -g ?" | Some slots -> let rec loop = function | [] -> assert false | s :: ss -> begin match Printexc.Slot.location s with | None -> () | Some l when l.Printexc.filename = "test/testing.ml" || l.Printexc.filename = "test/test.ml" -> () | Some l -> pp ppf "@,%a" pp_slot_loc l end; if ss <> [] then (loop ss) else () in loop (Array.to_list slots) (* Assertion counters *) let fail_count = ref 0 let pass_count = ref 0 (* Logging *) let log_part fmt = Format.printf fmt let log ?header fmt = match header with | Some h -> Format.printf ("[%s] " ^^ fmt ^^ "@.") h | None -> Format.printf (fmt ^^ "@.") let log_results () = let total = !pass_count + !fail_count in match !fail_count with | 0 -> log ~header:"OK" "All %d assertions succeeded !@." total; true | 1 -> log ~header:"FAIL" "1 failure out of %d assertions" total; false | n -> log ~header:"FAIL" "%d failures out of %d assertions" !fail_count total; false let log_fail msg bt = log ~header:"FAIL" "@[<v>@[%a@]%a@]" pp_text msg pp_bt bt let log_unexpected_exn ~header exn bt = log ~header:"SUITE" "@[<v>@[ABORTED: unexpected exception:@]@,%a%a@]" pp_exn exn pp_bt bt (* Testing scopes *) exception Fail exception Fail_handled let block f = try f () with | Fail | Fail_handled -> () | exn -> let bt = Printexc.get_raw_backtrace () in incr fail_count; log_unexpected_exn ~header:"BLOCK" exn bt type test = string * (unit -> unit) let test n f = n, f let run_test (n, f) = log "* %s" n; try f () with | Fail | Fail_handled -> log ~header:"TEST" "ABORTED: a test failure blew the test scope" | exn -> let bt = Printexc.get_raw_backtrace () in incr fail_count; log_unexpected_exn ~header:"TEST" exn bt type suite = string * test list let suite n ts = n, ts let run_suite (n, ts) = try log "%s" n; List.iter run_test ts with | exn -> let bt = Printexc.get_raw_backtrace () in incr fail_count; log_unexpected_exn ~header:"SUITE" exn bt let run suites = List.iter run_suite suites (* Passing and failing tests *) let pass () = incr pass_count let fail fmt = let bt = Printexc.get_callstack 10 in let fail _ = log_fail (Format.flush_str_formatter ()) bt in (incr fail_count; Format.kfprintf fail Format.str_formatter fmt) (* Checking values *) let pp_neq pp_v ppf (v, v') = pp ppf "@[%a@]@ <>@ @[%a@]@]" pp_v v pp_v v' let fail_eq pp v v' = fail "%a" (pp_neq pp) (v, v') let eq ~eq ~pp v v' = if eq v v' then pass () else fail_eq pp v v' let eq_char = eq ~eq:(=) ~pp:pp_char let eq_str = eq ~eq:(=) ~pp:pp_str let eq_bool = eq ~eq:(=) ~pp:Format.pp_print_bool let eq_int = eq ~eq:(=) ~pp:Format.pp_print_int let eq_int32 = eq ~eq:(=) ~pp:pp_int32 let eq_int64 = eq ~eq:(=) ~pp:pp_int64 let eq_float = eq ~eq:(=) ~pp:pp_float let eq_nan f = if f <> f then pass () else fail "@[%a@]@ is@ not a NaN" pp_float f let eq_option ~eq:eq_v ~pp = let eq_opt v v' = match v, v' with | Some v, Some v' -> eq_v v v' | None, None -> true | _ -> false in let pp = pp_option pp in fun v v' -> eq ~eq:eq_opt ~pp v v' let eq_some = function | Some _ -> pass () | None -> fail "None <> Some _" let eq_none ~pp = function | None -> pass () | Some v -> fail "@[%a <>@ None@]" pp v let eq_list ~eq:eq_v ~pp:pp_v = let eql l l' = try List.for_all2 eq_v l l' with Invalid_argument _ -> false in fun l l' -> eq ~eq:eql ~pp:(pp_list pp_v) l l' (* Tracing and checking function applications. *) type app = (* Gathers information about the application *) { fail_count : int; (* fail_count checkpoint when the app starts *) pp_args : Format.formatter -> unit -> unit; } let ctx () = { fail_count = -1; pp_args = fun ppf () -> (); } let log_app_raised app exn = log "@[<2>@[%a@]==> raised %a" app.pp_args () pp_exn exn let pp_app app pp_v ppf v = pp ppf "@[<2>@[%a@]==>@ @[%a@]@]" app.pp_args () pp_v v let log_app app pp_v v = log "%a" (pp_app app pp_v) v let ( $ ) f k = k (ctx ()) f let ( @-> ) (pp_v : 'a pp) k app f v = let pp_args ppf () = app.pp_args ppf (); pp ppf "%a@ " pp_v v in let fc = if app.fail_count = -1 then !fail_count else app.fail_count in let app = { fail_count = fc; pp_args } in try k app (f v) with | Fail -> log_app app pp_v v; raise Fail_handled | Fail_handled as e -> raise e | exn -> log_app_raised app exn; fail "unexpected exception %a raised" pp_exn exn; raise Fail_handled let ret pp app v = if !fail_count <> app.fail_count then log_app app pp v; v let ret_eq ~eq pp r app v = if eq r v then (pass (); ret pp app v) else (fail "@[<v>%a@,%a@]" (pp_neq pp) (r, v) (pp_app app pp) v; raise Fail_handled) let ret_none pp app v = match v with | None -> pass (); ret (pp_option pp) app v | Some _ -> ret_eq ~eq:(=) (pp_option pp) None app v let ret_some pp app v = match v with | Some _ as v -> pass (); ret (pp_option pp) app v | None as v -> fail "@[<v>Some _ <> None@,%a@]" (pp_app app (pp_option pp)) v; raise Fail_handled let ret_get_option pp app v = match ret_some pp app v with | Some v -> v | None -> assert false (* I think we could handle the following functions on app traced ones by enriching the app type and have alternate functions to $ for handling these cases. Note that the only place were we can check for these things are in the @-> combinator *) let app_invalid ~pp f v = try let r = f v in fail "%a <> exception Invalid_arg _" pp r with | Invalid_argument _ -> pass () | exn -> fail "exception %a <> exception Invalid_arg _" pp_exn exn let app_exn ~pp e f v = try let r = f v in fail "%a <> exception %a" pp r pp_exn e with | exn when exn = e -> pass () | exn -> fail "exception %a <> exception %a_" pp_exn exn pp_exn e let app_raises ~pp f v = try let r = f v in fail "%a <> exception _ " pp r with | exn -> pass () (*--------------------------------------------------------------------------- Copyright (c) 2015 The astring programmers Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ---------------------------------------------------------------------------*)
(*--------------------------------------------------------------------------- Copyright (c) 2015 The astring programmers. All rights reserved. Distributed under the ISC license, see terms at the end of the file. ---------------------------------------------------------------------------*)
enabled_if.mli
type allowed_vars = | Any | Only of (string * Dune_lang.Syntax.Version.t) list val common_vars : since:Dune_lang.Syntax.Version.t -> allowed_vars val decode : allowed_vars:allowed_vars -> ?is_error:bool -> since:Dune_lang.Syntax.Version.t option -> unit -> Blang.t Dune_lang.Decoder.fields_parser
optimizer.h
#ifndef OPTIMIZER_H #define OPTIMIZER_H #include "nodes/parsenodes.h" /* Test if an expression node represents a SRF call. Beware multiple eval! */ #define IS_SRF_CALL(node) \ ((IsA(node, FuncExpr) && ((FuncExpr *) (node))->funcretset) || \ (IsA(node, OpExpr) && ((OpExpr *) (node))->opretset)) /* * We don't want to include nodes/pathnodes.h here, because non-planner * code should generally treat PlannerInfo as an opaque typedef. * But we'd like such code to use that typedef name, so define the * typedef either here or in pathnodes.h, whichever is read first. */ #ifndef HAVE_PLANNERINFO_TYPEDEF typedef struct PlannerInfo PlannerInfo; #define HAVE_PLANNERINFO_TYPEDEF 1 #endif /* Likewise for IndexOptInfo and SpecialJoinInfo. */ #ifndef HAVE_INDEXOPTINFO_TYPEDEF typedef struct IndexOptInfo IndexOptInfo; #define HAVE_INDEXOPTINFO_TYPEDEF 1 #endif #ifndef HAVE_SPECIALJOININFO_TYPEDEF typedef struct SpecialJoinInfo SpecialJoinInfo; #define HAVE_SPECIALJOININFO_TYPEDEF 1 #endif /* It also seems best not to include plannodes.h, params.h, or htup.h here */ struct PlannedStmt; struct ParamListInfoData; struct HeapTupleData; /* in path/clausesel.c: */ extern Selectivity clause_selectivity(PlannerInfo *root, Node *clause, int varRelid, JoinType jointype, SpecialJoinInfo *sjinfo); extern Selectivity clauselist_selectivity_simple(PlannerInfo *root, List *clauses, int varRelid, JoinType jointype, SpecialJoinInfo *sjinfo, Bitmapset *estimatedclauses); extern Selectivity clauselist_selectivity(PlannerInfo *root, List *clauses, int varRelid, JoinType jointype, SpecialJoinInfo *sjinfo); /* in path/costsize.c: */ /* widely used cost parameters */ extern PGDLLIMPORT double seq_page_cost; extern PGDLLIMPORT double random_page_cost; extern PGDLLIMPORT double cpu_tuple_cost; extern PGDLLIMPORT double cpu_index_tuple_cost; extern PGDLLIMPORT double cpu_operator_cost; extern PGDLLIMPORT double parallel_tuple_cost; extern PGDLLIMPORT double parallel_setup_cost; extern PGDLLIMPORT int effective_cache_size; extern double clamp_row_est(double nrows); /* in path/indxpath.c: */ extern bool is_pseudo_constant_for_index(Node *expr, IndexOptInfo *index); extern bool is_pseudo_constant_for_index_new(PlannerInfo *root, Node *expr, IndexOptInfo *index); /* in plan/planner.c: */ /* possible values for force_parallel_mode */ typedef enum { FORCE_PARALLEL_OFF, FORCE_PARALLEL_ON, FORCE_PARALLEL_REGRESS } ForceParallelMode; /* GUC parameters */ extern int force_parallel_mode; extern bool parallel_leader_participation; extern struct PlannedStmt *planner(Query *parse, const char *query_string, int cursorOptions, struct ParamListInfoData *boundParams); extern Expr *expression_planner(Expr *expr); extern Expr *expression_planner_with_deps(Expr *expr, List **relationOids, List **invalItems); extern bool plan_cluster_use_sort(Oid tableOid, Oid indexOid); extern int plan_create_index_workers(Oid tableOid, Oid indexOid); /* in plan/setrefs.c: */ extern void extract_query_dependencies(Node *query, List **relationOids, List **invalItems, bool *hasRowSecurity); /* in prep/prepqual.c: */ extern Node *negate_clause(Node *node); extern Expr *canonicalize_qual(Expr *qual, bool is_check); /* in util/clauses.c: */ extern bool contain_mutable_functions(Node *clause); extern bool contain_volatile_functions(Node *clause); extern bool contain_volatile_functions_not_nextval(Node *clause); extern Node *eval_const_expressions(PlannerInfo *root, Node *node); extern Node *estimate_expression_value(PlannerInfo *root, Node *node); extern Expr *evaluate_expr(Expr *expr, Oid result_type, int32 result_typmod, Oid result_collation); extern List *expand_function_arguments(List *args, Oid result_type, struct HeapTupleData *func_tuple); /* in util/predtest.c: */ extern bool predicate_implied_by(List *predicate_list, List *clause_list, bool weak); extern bool predicate_refuted_by(List *predicate_list, List *clause_list, bool weak); /* in util/tlist.c: */ extern int count_nonjunk_tlist_entries(List *tlist); extern TargetEntry *get_sortgroupref_tle(Index sortref, List *targetList); extern TargetEntry *get_sortgroupclause_tle(SortGroupClause *sgClause, List *targetList); extern Node *get_sortgroupclause_expr(SortGroupClause *sgClause, List *targetList); extern List *get_sortgrouplist_exprs(List *sgClauses, List *targetList); extern SortGroupClause *get_sortgroupref_clause(Index sortref, List *clauses); extern SortGroupClause *get_sortgroupref_clause_noerr(Index sortref, List *clauses); /* in util/var.c: */ /* Bits that can be OR'd into the flags argument of pull_var_clause() */ #define PVC_INCLUDE_AGGREGATES 0x0001 /* include Aggrefs in output list */ #define PVC_RECURSE_AGGREGATES 0x0002 /* recurse into Aggref arguments */ #define PVC_INCLUDE_WINDOWFUNCS 0x0004 /* include WindowFuncs in output list */ #define PVC_RECURSE_WINDOWFUNCS 0x0008 /* recurse into WindowFunc arguments */ #define PVC_INCLUDE_PLACEHOLDERS 0x0010 /* include PlaceHolderVars in * output list */ #define PVC_RECURSE_PLACEHOLDERS 0x0020 /* recurse into PlaceHolderVar * arguments */ extern Bitmapset *pull_varnos(Node *node); extern Bitmapset *pull_varnos_of_level(Node *node, int levelsup); extern Bitmapset *pull_varnos_new(PlannerInfo *root, Node *node); extern Bitmapset *pull_varnos_of_level_new(PlannerInfo *root, Node *node, int levelsup); extern void pull_varattnos(Node *node, Index varno, Bitmapset **varattnos); extern List *pull_vars_of_level(Node *node, int levelsup); extern bool contain_var_clause(Node *node); extern bool contain_vars_of_level(Node *node, int levelsup); extern int locate_var_of_level(Node *node, int levelsup); extern List *pull_var_clause(Node *node, int flags); extern Node *flatten_join_alias_vars(Query *query, Node *node); #endif /* OPTIMIZER_H */
/*------------------------------------------------------------------------- * * optimizer.h * External API for the Postgres planner. * * This header is meant to define everything that the core planner * exposes for use by non-planner modules. * * Note that there are files outside src/backend/optimizer/ that are * considered planner modules, because they're too much in bed with * planner operations to be treated otherwise. FDW planning code is an * example. For the most part, however, code outside the core planner * should not need to include any optimizer/ header except this one. * * Portions Copyright (c) 1996-2020, PostgreSQL Global Development Group * Portions Copyright (c) 1994, Regents of the University of California * * src/include/optimizer/optimizer.h * *------------------------------------------------------------------------- */
ex_http_post.ml
let () = let http = SFHttp.create () in let request = SFHttp.Request.create () in SFHttp.Request.setMethod request SFHttp.Post; SFHttp.Request.setUri request "/w/index.php?title=Special:Export&action=submit"; SFHttp.Request.setBody request "OCaml"; SFHttp.setHost http ~host:"en.wikipedia.org" ~port:0 (); let timeout = SFTime.of_seconds 5.0 in let r = SFHttp.sendRequest http ~request ~timeout () in print_endline (SFHttp.Response.getBody r) ;;
omake_ast_lex.ml
# 12 "omake_ast_lex.mll" include Omake_pos.Make (struct let name = "Omake_ast_lex" end) let debug_lex = Lm_debug.create_debug (**) { debug_name = "debug-ast-lex"; debug_description = "Print tokens as they are scanned"; debug_value = false } (* * Current mode: * ModeNormal: normal lexing mode * ModeString s: parsing a literal string, dollar sequences are still expanded, * s is the quotation delimiter * ModeSkipString s :parsing a literal string, dollar sequences are still expanded, * s is the quotation delimiter, skip the token if it is a quote that is not s * ModeQuote s: parsing a literal string, dollar sequences are still expanded, * escape sequences are allowed, s is the quotation delimiter. * * GS. The main entry is lex_line (below). Depending on the current mode, * a different lexer function is invoked: * * ModeNormal: calls lex_main * ModeString: calls lex_string, for text in $+dquote (e.g. $"") * ModeSkipString: calls lex_skip_string. This is used after newlines inside * $-dquoted-text for checking whether the matching end * quote is following. Fairly technical. * ModeQuote: calls lex_quote, for text after dquote *) type mode = ModeNormal | ModeSkipString of string | ModeString of string | ModeQuote of string (* * The lexing mode. * ModeInitial: lexbuf is ready to be used * ModeIndent i: initial indentation has been scanned * ModeNormal: normal processing * * GS. LexModeInitial means we are at the beginning of the line. LexModeNormal * means that we've just lexed the left indentation. *) type lexmode = LexModeInitial | LexModeNormal of int (* * Parsing results. *) type parse_item = ParseExp of Omake_ast.exp list | ParseError | ParseEOF (* * This is the info for each indentation level. *) type info = { info_mode : mode; info_indent : int; info_parens : int option } (* * State of the lexer. *) type session = { (* The current location *) current_file : Lm_symbol.t; mutable current_line : int; mutable current_off : int; mutable current_loc : Lm_location.t; (* GS TODO: line/off/loc is now tracked by lexbuf (it wasn't in ancient versions of OCaml). Remove this here, and rely on lexbuf only. *) (* The current input buffer *) mutable current_buffer : string; mutable current_index : int; mutable current_prompt : string; mutable current_fill_ok : bool; mutable current_eof : bool; readline : (string -> string); mutable is_interactive : bool; (* The current lexbuf *) mutable current_lexbuf : Lexing.lexbuf; mutable current_lexmode : lexmode; mutable current_token : Omake_ast_parse.token; (* The current mode *) mutable current_mode : mode; mutable current_parens : int option; mutable current_indent : int; mutable current_stack : info list } (************************************************************************ * Printing. NOTICE: if new tokens are added, please update * the token list in omake_gen_parse.ml!!! *) let pp_print_token buf = function Omake_ast_parse.TokEof _ -> Lm_printf.pp_print_string buf "<eof>" | TokEol _ -> Lm_printf.pp_print_string buf "<eol>" | TokWhite (s, _) -> Format.fprintf buf "whitespace: \"%s\"" s | TokLeftParen (s, _) -> Format.fprintf buf "left parenthesis: %s" s | TokRightParen (s, _) -> Format.fprintf buf "right parenthesis: %s" s | TokArrow (s, _) -> Format.fprintf buf "arrow: %s" s | TokComma (s, _) -> Format.fprintf buf "comma: %s" s | TokColon (s, _) -> Format.fprintf buf "colon: %s" s | TokDoubleColon (s, _) -> Format.fprintf buf "doublecolon: %s" s | TokNamedColon (s, _) -> Format.fprintf buf "named colon: %s" s | TokDollar (s, strategy, _) -> Format.fprintf buf "dollar: %s%a" s Omake_ast_print.pp_print_strategy strategy | TokEq (s, _) -> Format.fprintf buf "equals: %s" s | TokArray (s, _) -> Format.fprintf buf "array: %s" s | TokDot (s, _) -> Format.fprintf buf "dot: %s" s | TokId (s, _) -> Format.fprintf buf "id: %s" s | TokInt (s, _) -> Format.fprintf buf "int: %s" s | TokFloat (s, _) -> Format.fprintf buf "float: %s" s | TokKey (s, _) -> Format.fprintf buf "key: %s" s | TokKeyword (s, _) -> Format.fprintf buf "keyword: %s" s | TokCatch (s, _) -> Format.fprintf buf "catch: %s" s | TokClass (s, _) -> Format.fprintf buf "class: %s" s | TokVar (_, s, _) -> Format.fprintf buf "var: %s" s | TokOp (s, _) -> Format.fprintf buf "op: %s" s | TokString (s, _) -> Format.fprintf buf "string: \"%s\"" (String.escaped s) | TokBeginQuote (s, _) -> Format.fprintf buf "begin-quote: %s" s | TokEndQuote (s, _) -> Format.fprintf buf "end-quote: %s" s | TokBeginQuoteString (s, _) -> Format.fprintf buf "begin-quote-string: %s" s | TokEndQuoteString (s, _) -> Format.fprintf buf "end-quote-string: %s" s | TokStringQuote (s, _) -> Format.fprintf buf "quote: %s" s | TokVarQuote (_, s, _) -> Format.fprintf buf "key: %s" s (* * Set state. *) let create name readline = let loc = Lm_location.bogus_loc name in { current_file = Lm_symbol.add name; current_line = 1; current_off = 0; current_loc = loc; current_buffer = ""; current_index = 0; current_prompt = ">"; current_fill_ok = true; current_eof = true; readline = readline; is_interactive = false; current_lexbuf = Lexing.from_string ""; current_lexmode = LexModeInitial; current_token = TokEof loc; current_mode = ModeNormal; current_parens = None; current_indent = 0; current_stack = [] } (* let set_current_loc state loc = *) (* state.current_loc <- loc *) let current_location state = state.current_loc (* * Advance a line. *) let set_next_line state lexbuf = let { current_line = line; current_file = file; _ } = state in let line = succ line in state.current_line <- line; state.current_off <- Lexing.lexeme_start lexbuf; state.current_loc <- Lm_location.create_loc file line 0 line 0 (* * Save the state. *) let save_mode state = let { current_mode = mode'; current_parens = parens; current_indent = indent; current_stack = stack; _ } = state in let info = { info_mode = mode'; info_parens = parens; info_indent = indent } in info :: stack (* * Restore the state. *) let restore_mode state stack = match stack with info :: stack -> state.current_mode <- info.info_mode; state.current_parens <- info.info_parens; state.current_indent <- info.info_indent; state.current_stack <- stack | [] -> () (* * Push the new mode. *) let push_mode state mode = let stack = save_mode state in state.current_mode <- mode; state.current_parens <- None; state.current_stack <- stack (* * Pop the mode. *) let pop_mode state = restore_mode state state.current_stack (* * We are moving from a quotation to normal mode. * Start collecting parentheses. *) let push_dollar state mode = push_mode state mode; state.current_parens <- Some 0 (* GS. The reason for counting open parentheses (in current_parens) is that a line feed is interpreted differently while there is an open parenthesis. *) (* * Push a paren. *) let push_paren state = let { current_parens = parens ; _} = state in match parens with Some i -> state.current_parens <- Some (succ i) | None -> () (* * When a paren is popped, if the level becomes zero, * then return to the previous mode. *) let pop_paren state = let { current_parens = parens ; _} = state in match parens with Some i -> let i = pred i in if i = 0 then pop_mode state else state.current_parens <- Some i | None -> () (* * Get the location of the current lexeme. * We assume it is all on one line. *) let lexeme_loc state lexbuf = let { current_line = line; current_off = off; current_file = file; _ } = state in let schar = Lexing.lexeme_start lexbuf - off in let echar = Lexing.lexeme_end lexbuf - off in let loc = Lm_location.create_loc file line schar line echar in state.current_loc <- loc; loc (* GS TODO: use Lexing.lexeme_start_p and Lexing.lexeme_end_p instead *) (* * Raise a syntax error exception. *) let parse_error state = let lexbuf = state.current_lexbuf in let loc = lexeme_loc state lexbuf in let print_error buf = Format.fprintf buf "unexpected token: %a" pp_print_token state.current_token in raise (Omake_value_type.OmakeException (loc_exp_pos loc, LazyError print_error)) let syntax_error state s lexbuf = let loc = lexeme_loc state lexbuf in raise (Omake_value_type.OmakeException (loc_exp_pos loc, SyntaxError s)) (* * Get the string in the lexbuf. *) let lexeme_string state lexbuf = let loc = lexeme_loc state lexbuf in let s = Lexing.lexeme lexbuf in s, loc (* * Remove any trailing dots from the string. *) (* let split_nl_string s = *) (* let len = String.length s in *) (* let rec search i = *) (* if i = len then *) (* s, "" *) (* else *) (* match s.[i] with *) (* '\n' *) (* | '\r' -> *) (* search (succ i) *) (* | _ -> *) (* String.sub s 0 i, String.sub s i (len - i) *) (* in *) (* search 0 *) (* * Process a name. *) let lexeme_name state lexbuf = let id, loc = lexeme_string state lexbuf in match id with "if" | "elseif" | "else" | "switch" | "match" | "select" | "case" | "default" | "section" | "include" | "extends" | "import" | "try" | "when" | "finally" | "raise" | "return" | "export" | "open" | "autoload" | "declare" | "value" | "with" | "as" | "while" | "do" | "set" | "program-syntax" -> Omake_ast_parse.TokKeyword (id, loc) | "catch" -> TokCatch (id, loc) | "class" -> TokClass (id, loc) | _ -> TokId (id, loc) let lexeme_key state lexbuf = let id, loc = lexeme_string state lexbuf in Omake_ast_parse.TokKey (id, loc) (* * Get the escaped char. * GS. e.g. "\X" -> "X" *) let lexeme_esc state lexbuf = let s, loc = lexeme_string state lexbuf in String.make 1 s.[1], loc (* * Single character variable. * GS. $x (not $(...)). Also $`x and $,x. *) let lexeme_var state lexbuf = let s, loc = lexeme_string state lexbuf in let strategy, s = match s.[1] with | '`' -> Omake_ast.LazyApply, String.sub s 2 1 | ',' -> EagerApply, String.sub s 2 1 | _ -> NormalApply, String.sub s 1 1 in Omake_ast_parse.TokVar (strategy, s, loc) (* * Dollar sequence. *) let lexeme_dollar_pipe state lexbuf = let s, loc = lexeme_string state lexbuf in let len = String.length s in let strategy, off = if len >= 2 then match s.[1] with '`' -> Omake_ast.LazyApply, 2 | ',' -> EagerApply, 2 | '|' -> NormalApply, 1 | _ -> syntax_error state ("illegal character: " ^ s) lexbuf else NormalApply, 1 in let s = String.sub s off (String.length s - off) in strategy, s, loc (* GS. Unclear why there are two versions of this function. lexeme_dollar seems to be the usual function, for all of $` $, $$ *) let lexeme_dollar state lexbuf = let s, loc = lexeme_string state lexbuf in let len = String.length s in if len >= 2 then match s.[1] with '`' -> Omake_ast_parse.TokDollar (s, LazyApply, loc) | ',' -> TokDollar (s, EagerApply, loc) | '$' -> TokString ("$", loc) | _ -> syntax_error state ("illegal character: " ^ s) lexbuf else TokDollar (s, NormalApply, loc) (* * Special character. * Keep track of paren nesting. *) let lexeme_char state lexbuf = let s, loc = lexeme_string state lexbuf in match s.[0] with '$' -> Omake_ast_parse.TokDollar (s, NormalApply, loc) | ':' -> TokColon (s, loc) | ',' -> TokComma (s, loc) | '=' -> TokEq (s, loc) | '.' -> TokDot (s, loc) | '%' -> TokVar (NormalApply, s, loc) | '(' -> push_paren state; TokLeftParen (s, loc) | ')' -> pop_paren state; TokRightParen (s, loc) | _ -> TokOp (s, loc) (* * Special string. *) let lexeme_special_string state lexbuf = let s, loc = lexeme_string state lexbuf in match s with "=>" -> Omake_ast_parse.TokArrow (s, loc) | "::" -> TokDoubleColon (s, loc) | "+=" -> TokEq (s, loc) | "[]" -> TokArray (s, loc) | _ -> TokOp (s, loc) (* * Count the indentation in a string of characters. *) let indent_of_string s = let len = String.length s in let rec loop col i = if i = len then col else match s.[i] with '\r' | '\n' -> loop 0 (succ i) | '\t' -> loop ((col + 8) land (lnot 7)) (succ i) | _ -> loop (succ col) (succ i) in loop 0 0 (* * Use lexer positions. *) let lexeme_pos lexbuf = let s = Lexing.lexeme lexbuf in let pos1 = Lexing.lexeme_start_p lexbuf in let pos2 = Lexing.lexeme_end_p lexbuf in let { Lexing.pos_fname = file; Lexing.pos_lnum = line1; Lexing.pos_bol = bol1; Lexing.pos_cnum = cnum1 } = pos1 in let { Lexing.pos_lnum = line2; Lexing.pos_bol = bol2; Lexing.pos_cnum = cnum2; _ } = pos2 in let loc = Lm_location.create_loc (Lm_symbol.add file) line1 (cnum1 - bol1) line2 (cnum2 - bol2) in s, loc # 553 "omake_ast_lex.ml" let __ocaml_lex_tables = { Lexing.lex_base = "\000\000\236\255\118\000\160\000\243\255\001\000\058\000\061\000\ \069\000\069\000\237\000\070\000\098\001\249\255\216\001\087\000\ \253\255\088\000\040\002\120\002\199\002\022\003\135\000\255\255\ \001\000\151\000\098\001\099\001\071\001\102\003\181\003\004\004\ \083\004\182\001\162\004\241\004\077\000\000\005\010\005\020\005\ \119\000\101\000\242\255\103\000\058\000\109\000\069\005\147\005\ \006\006\086\006\120\000\098\000\244\255\245\255\041\000\200\006\ \129\000\137\000\116\000\024\007\241\255\113\000\139\007\208\000\ \237\255\004\000\238\255\252\007\247\255\047\008\253\255\000\008\ \013\008\255\255\005\000\248\255\006\000\249\255\251\255\211\008\ \252\255\211\008\248\255\250\255\007\000\042\009\143\000\141\000\ \014\008\156\009\100\001\249\255\008\000\252\255\218\009\253\255\ \174\000\058\000\150\000\146\000\183\008\252\255\063\000\154\000\ \150\000\186\009\255\255\009\000\175\000\066\000\157\000\154\000\ \201\006\255\255\010\000\085\004\087\004\123\010\247\255\248\255\ \132\004\251\255\011\000\252\255\253\255\165\008\209\006\255\255\ \201\010\027\011\067\001\210\004\243\004\249\255\012\000\250\255\ \254\255\233\007\253\255\254\255\234\007\099\001\255\255"; Lexing.lex_backtrk = "\255\255\255\255\016\000\015\000\255\255\012\000\012\000\012\000\ \012\000\012\000\012\000\012\000\012\000\255\255\015\000\012\000\ \255\255\002\000\002\000\002\000\002\000\002\000\020\000\255\255\ \000\000\001\000\255\255\000\000\255\255\002\000\002\000\002\000\ \002\000\255\255\002\000\004\000\004\000\255\255\004\000\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\015\000\005\000\ \005\000\005\000\255\255\255\255\255\255\255\255\009\000\011\000\ \008\000\007\000\255\255\255\255\255\255\013\000\015\000\255\255\ \255\255\018\000\255\255\255\255\255\255\005\000\255\255\001\000\ \001\000\255\255\000\000\255\255\007\000\255\255\255\255\005\000\ \255\255\255\255\255\255\255\255\005\000\004\000\001\000\001\000\ \000\000\000\000\000\000\255\255\006\000\255\255\004\000\255\255\ \000\000\000\000\000\000\000\000\255\255\255\255\002\000\002\000\ \002\000\001\000\255\255\000\000\000\000\000\000\000\000\000\000\ \001\000\255\255\000\000\255\255\000\000\255\255\255\255\255\255\ \007\000\255\255\004\000\255\255\255\255\000\000\000\000\255\255\ \000\000\000\000\255\255\255\255\004\000\255\255\006\000\255\255\ \255\255\255\255\255\255\255\255\000\000\000\000\255\255"; Lexing.lex_default = "\003\000\000\000\255\255\003\000\000\000\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\000\000\046\000\255\255\ \000\000\255\255\255\255\255\255\255\255\255\255\026\000\000\000\ \255\255\255\255\026\000\026\000\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\000\000\255\255\255\255\255\255\046\000\255\255\ \046\000\255\255\255\255\255\255\000\000\000\000\255\255\255\255\ \255\255\255\255\255\255\255\255\000\000\255\255\003\000\255\255\ \000\000\255\255\000\000\071\000\000\000\255\255\000\000\071\000\ \255\255\000\000\255\255\000\000\255\255\000\000\000\000\255\255\ \000\000\088\000\000\000\000\000\255\255\255\255\255\255\255\255\ \088\000\088\000\255\255\000\000\255\255\000\000\255\255\000\000\ \255\255\255\255\255\255\255\255\105\000\000\000\255\255\255\255\ \255\255\105\000\000\000\255\255\255\255\255\255\255\255\255\255\ \255\255\000\000\255\255\115\000\115\000\119\000\000\000\000\000\ \131\000\000\000\255\255\000\000\000\000\255\255\255\255\000\000\ \255\255\255\255\255\255\131\000\131\000\000\000\255\255\000\000\ \000\000\140\000\000\000\000\000\140\000\255\255\000\000"; Lexing.lex_trans = "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\025\000\023\000\023\000\025\000\024\000\064\000\073\000\ \075\000\083\000\091\000\106\000\113\000\121\000\133\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \025\000\000\000\013\000\022\000\012\000\004\000\006\000\013\000\ \004\000\004\000\004\000\009\000\004\000\004\000\015\000\004\000\ \021\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\ \020\000\020\000\010\000\004\000\008\000\011\000\007\000\014\000\ \018\000\019\000\019\000\019\000\019\000\019\000\019\000\019\000\ \019\000\019\000\019\000\019\000\019\000\019\000\019\000\019\000\ \019\000\019\000\019\000\019\000\019\000\019\000\019\000\019\000\ \019\000\019\000\019\000\017\000\002\000\016\000\004\000\018\000\ \042\000\019\000\019\000\019\000\019\000\019\000\019\000\019\000\ \019\000\019\000\019\000\019\000\019\000\019\000\019\000\019\000\ \019\000\019\000\019\000\019\000\019\000\019\000\019\000\019\000\ \019\000\019\000\019\000\061\000\005\000\042\000\014\000\066\000\ \064\000\042\000\042\000\065\000\042\000\045\000\041\000\036\000\ \036\000\036\000\036\000\036\000\036\000\036\000\036\000\036\000\ \036\000\023\000\037\000\043\000\027\000\044\000\066\000\042\000\ \066\000\066\000\066\000\042\000\050\000\066\000\066\000\066\000\ \025\000\023\000\066\000\025\000\024\000\054\000\003\000\046\000\ \056\000\255\255\255\255\057\000\255\255\255\255\060\000\042\000\ \066\000\086\000\037\000\066\000\087\000\042\000\097\000\025\000\ \098\000\099\000\026\000\102\000\103\000\104\000\109\000\110\000\ \255\255\111\000\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\000\000\255\255\255\255\ \098\000\110\000\066\000\003\000\046\000\099\000\111\000\000\000\ \000\000\000\000\255\255\255\255\255\255\255\255\255\255\000\000\ \000\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\255\255\255\255\255\255\255\255\003\000\ \001\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\000\000\255\255\059\000\059\000\059\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\042\000\ \000\000\000\000\097\000\109\000\003\000\059\000\059\000\059\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \058\000\000\000\058\000\000\000\059\000\000\000\059\000\059\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \000\000\000\000\000\000\000\000\023\000\023\000\091\000\027\000\ \027\000\092\000\127\000\000\000\000\000\000\000\064\000\036\000\ \036\000\036\000\036\000\036\000\036\000\036\000\036\000\036\000\ \036\000\000\000\000\000\000\000\057\000\142\000\052\000\255\255\ \053\000\056\000\142\000\000\000\053\000\053\000\055\000\000\000\ \000\000\000\000\053\000\053\000\053\000\053\000\053\000\053\000\ \053\000\053\000\053\000\053\000\000\000\000\000\053\000\127\000\ \255\255\053\000\053\000\053\000\053\000\053\000\053\000\053\000\ \053\000\053\000\053\000\053\000\053\000\053\000\053\000\053\000\ \053\000\053\000\053\000\053\000\053\000\053\000\053\000\053\000\ \053\000\053\000\053\000\053\000\053\000\053\000\000\000\053\000\ \053\000\053\000\055\000\053\000\053\000\053\000\053\000\053\000\ \053\000\053\000\053\000\053\000\053\000\053\000\053\000\053\000\ \053\000\053\000\053\000\053\000\053\000\053\000\053\000\053\000\ \053\000\053\000\053\000\053\000\053\000\000\000\054\000\000\000\ \053\000\255\255\255\255\000\000\255\255\255\255\035\000\035\000\ \035\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \255\255\000\000\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\047\000\255\255\255\255\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\255\255\255\255\255\255\255\255\255\255\000\000\ \047\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\049\000\049\000\255\255\255\255\255\255\255\255\047\000\ \000\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\049\000\049\000\000\000\255\255\018\000\048\000\000\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\255\255\255\255\091\000\000\000\000\000\000\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\000\000\000\000\000\000\000\000\018\000\ \000\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\000\000\000\000\018\000\018\000\000\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\040\000\000\000\000\000\000\000\000\000\000\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\000\000\000\000\000\000\000\000\018\000\ \255\255\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\000\000\018\000\028\000\018\000\020\000\ \020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\ \020\000\000\000\000\000\000\000\000\000\000\000\000\000\018\000\ \018\000\018\000\018\000\018\000\029\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\000\000\000\000\000\000\000\000\018\000\000\000\ \018\000\018\000\018\000\018\000\029\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\000\000\018\000\028\000\018\000\020\000\020\000\ \020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\ \000\000\000\000\000\000\000\000\000\000\000\000\018\000\018\000\ \018\000\018\000\018\000\029\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\000\000\000\000\000\000\000\000\018\000\000\000\018\000\ \032\000\018\000\018\000\029\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\031\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\030\000\018\000\ \018\000\033\000\000\000\034\000\018\000\000\000\034\000\034\000\ \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ \000\000\000\000\000\000\000\000\000\000\000\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\000\000\000\000\000\000\000\000\018\000\000\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\000\000\018\000\000\000\018\000\030\000\030\000\030\000\ \030\000\030\000\030\000\030\000\030\000\030\000\030\000\000\000\ \000\000\000\000\000\000\000\000\000\000\018\000\030\000\030\000\ \030\000\030\000\030\000\030\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \000\000\000\000\000\000\000\000\018\000\000\000\030\000\030\000\ \030\000\030\000\030\000\030\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \000\000\018\000\000\000\018\000\031\000\031\000\031\000\031\000\ \031\000\031\000\031\000\031\000\018\000\018\000\000\000\000\000\ \000\000\000\000\000\000\000\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\113\000\ \000\000\113\000\116\000\018\000\116\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\000\000\ \018\000\000\000\018\000\032\000\032\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\000\000\121\000\000\000\ \000\000\132\000\000\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\000\000\000\000\ \000\000\000\000\018\000\000\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\000\000\018\000\ \000\000\018\000\034\000\034\000\034\000\034\000\034\000\034\000\ \034\000\034\000\034\000\034\000\121\000\000\000\000\000\132\000\ \000\000\000\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\121\000\000\000\000\000\ \132\000\018\000\000\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\000\000\000\000\000\000\ \018\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ \035\000\035\000\035\000\039\000\000\000\039\000\000\000\000\000\ \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ \038\000\038\000\038\000\038\000\038\000\038\000\038\000\038\000\ \038\000\038\000\038\000\038\000\038\000\038\000\255\255\255\255\ \000\000\255\255\255\255\000\000\000\000\113\000\000\000\113\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\255\255\000\000\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\051\000\051\000\ \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ \255\255\255\255\255\255\255\255\255\255\000\000\051\000\051\000\ \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ \051\000\051\000\051\000\051\000\051\000\051\000\051\000\051\000\ \047\000\255\255\000\000\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\000\000\000\000\000\000\ \000\000\000\000\255\255\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\000\000\000\000\ \000\000\000\000\047\000\255\255\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\000\000\255\255\ \255\255\047\000\255\255\255\255\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\255\255\000\000\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\047\000\255\255\255\255\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \255\255\255\255\255\255\255\255\255\255\255\255\047\000\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\255\255\255\255\255\255\255\255\047\000\000\000\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\000\000\255\255\047\000\048\000\000\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \050\000\000\000\000\000\000\000\000\000\000\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \047\000\000\000\000\000\000\000\000\000\047\000\000\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \047\000\000\000\112\000\113\000\047\000\112\000\114\000\000\000\ \000\000\000\000\126\000\121\000\000\000\126\000\122\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\112\000\000\000\000\000\115\000\000\000\053\000\000\000\ \000\000\126\000\053\000\053\000\131\000\000\000\000\000\000\000\ \053\000\053\000\053\000\053\000\053\000\053\000\053\000\053\000\ \053\000\053\000\000\000\000\000\053\000\000\000\255\255\053\000\ \053\000\053\000\053\000\053\000\053\000\053\000\053\000\053\000\ \053\000\053\000\053\000\053\000\053\000\053\000\053\000\053\000\ \053\000\053\000\053\000\053\000\053\000\053\000\053\000\053\000\ \053\000\053\000\053\000\053\000\000\000\053\000\053\000\053\000\ \000\000\053\000\053\000\053\000\053\000\053\000\053\000\053\000\ \053\000\053\000\053\000\053\000\053\000\053\000\053\000\053\000\ \053\000\053\000\053\000\053\000\053\000\053\000\053\000\053\000\ \053\000\053\000\053\000\000\000\054\000\059\000\053\000\000\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\060\000\000\000\000\000\000\000\000\000\000\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\000\000\000\000\000\000\000\000\059\000\ \000\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\000\000\255\255\255\255\059\000\255\255\ \255\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\255\255\000\000\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \000\000\255\255\255\255\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\063\000\255\255\255\255\ \255\255\255\255\000\000\000\000\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\255\255\255\255\ \255\255\255\255\000\000\000\000\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\073\000\255\255\ \000\000\074\000\255\255\139\000\255\255\255\255\000\000\000\000\ \139\000\255\255\000\000\000\000\000\000\000\000\000\000\075\000\ \255\255\000\000\076\000\255\255\000\000\000\000\070\000\000\000\ \069\000\000\000\255\255\070\000\255\255\000\000\000\000\255\255\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\077\000\ \255\255\000\000\255\255\000\000\077\000\255\255\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\141\000\255\255\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\080\000\000\000\078\000\000\000\000\000\ \072\000\078\000\078\000\079\000\255\255\000\000\000\000\078\000\ \078\000\078\000\078\000\078\000\078\000\078\000\078\000\078\000\ \078\000\077\000\255\255\078\000\000\000\000\000\078\000\078\000\ \078\000\078\000\078\000\078\000\078\000\078\000\078\000\078\000\ \078\000\078\000\078\000\078\000\078\000\078\000\078\000\078\000\ \078\000\078\000\078\000\078\000\078\000\078\000\078\000\078\000\ \078\000\078\000\078\000\255\255\078\000\078\000\078\000\079\000\ \078\000\078\000\078\000\078\000\078\000\078\000\078\000\078\000\ \078\000\078\000\078\000\078\000\078\000\078\000\078\000\078\000\ \078\000\078\000\078\000\078\000\078\000\078\000\078\000\078\000\ \078\000\078\000\000\000\000\000\000\000\078\000\135\000\133\000\ \000\000\000\000\134\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\106\000\000\000\000\000\107\000\135\000\000\000\135\000\ \135\000\135\000\000\000\000\000\135\000\135\000\135\000\000\000\ \000\000\135\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\103\000\000\000\000\000\000\000\083\000\104\000\136\000\ \084\000\000\000\135\000\000\000\000\000\000\000\000\000\000\000\ \000\000\138\000\255\255\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\086\000\000\000\085\000\ \000\000\078\000\087\000\000\000\068\000\078\000\078\000\000\000\ \255\255\135\000\000\000\078\000\078\000\078\000\078\000\078\000\ \078\000\078\000\078\000\078\000\078\000\075\000\255\255\078\000\ \000\000\000\000\078\000\078\000\078\000\078\000\078\000\078\000\ \078\000\078\000\078\000\078\000\078\000\078\000\078\000\078\000\ \078\000\078\000\078\000\078\000\078\000\078\000\078\000\078\000\ \078\000\078\000\078\000\078\000\078\000\078\000\078\000\090\000\ \078\000\078\000\078\000\102\000\078\000\078\000\078\000\078\000\ \078\000\078\000\078\000\078\000\078\000\078\000\078\000\078\000\ \078\000\078\000\078\000\078\000\078\000\078\000\078\000\078\000\ \078\000\078\000\078\000\078\000\078\000\078\000\095\000\089\000\ \093\000\078\000\000\000\000\000\093\000\093\000\094\000\000\000\ \000\000\000\000\093\000\093\000\093\000\093\000\093\000\093\000\ \093\000\093\000\093\000\093\000\000\000\000\000\093\000\000\000\ \000\000\093\000\093\000\093\000\093\000\093\000\093\000\093\000\ \093\000\093\000\093\000\093\000\093\000\093\000\093\000\093\000\ \093\000\093\000\093\000\093\000\093\000\093\000\093\000\093\000\ \093\000\093\000\093\000\093\000\093\000\093\000\000\000\093\000\ \093\000\093\000\094\000\093\000\093\000\093\000\093\000\093\000\ \093\000\093\000\093\000\093\000\093\000\093\000\093\000\093\000\ \093\000\093\000\093\000\093\000\093\000\093\000\093\000\093\000\ \093\000\093\000\093\000\093\000\093\000\133\000\255\255\000\000\ \093\000\255\255\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\101\000\ \000\000\000\000\000\000\000\000\000\000\000\000\255\255\000\000\ \255\255\000\000\000\000\255\255\255\255\000\000\000\000\255\255\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\082\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\255\255\000\000\000\000\000\000\ \000\000\255\255\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \255\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \093\000\000\000\000\000\000\000\093\000\093\000\000\000\000\000\ \000\000\000\000\093\000\093\000\093\000\093\000\093\000\093\000\ \093\000\093\000\093\000\093\000\000\000\000\000\093\000\000\000\ \089\000\093\000\093\000\093\000\093\000\093\000\093\000\093\000\ \093\000\093\000\093\000\093\000\093\000\093\000\093\000\093\000\ \093\000\093\000\093\000\093\000\093\000\093\000\093\000\093\000\ \093\000\093\000\093\000\093\000\093\000\093\000\255\255\093\000\ \093\000\093\000\000\000\093\000\093\000\093\000\093\000\093\000\ \093\000\093\000\093\000\093\000\093\000\093\000\093\000\093\000\ \093\000\093\000\093\000\093\000\093\000\093\000\093\000\093\000\ \093\000\093\000\093\000\093\000\093\000\000\000\000\000\000\000\ \093\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\126\000\121\000\000\000\126\000\ \122\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\126\000\255\255\123\000\120\000\000\000\ \000\000\000\000\123\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\128\000\124\000\000\000\000\000\ \000\000\000\000\255\255\128\000\129\000\129\000\129\000\129\000\ \129\000\129\000\129\000\129\000\129\000\129\000\129\000\129\000\ \129\000\129\000\129\000\129\000\129\000\129\000\129\000\129\000\ \129\000\129\000\129\000\129\000\129\000\129\000\127\000\125\000\ \127\000\000\000\128\000\000\000\129\000\129\000\129\000\129\000\ \129\000\129\000\129\000\129\000\129\000\129\000\129\000\129\000\ \129\000\129\000\129\000\129\000\129\000\129\000\129\000\129\000\ \129\000\129\000\129\000\129\000\129\000\129\000\128\000\000\000\ \000\000\128\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\000\000\000\000\000\000\000\000\000\000\ \000\000\128\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\000\000\000\000\000\000\000\000\ \128\000\000\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\000\000\000\000\000\000\128\000\ \128\000\000\000\000\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\128\000\130\000\000\000\000\000\ \000\000\000\000\000\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\128\000\128\000\000\000\000\000\ \000\000\000\000\128\000\118\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\128\000\128\000\000\000\000\000\ \000\000\128\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000"; Lexing.lex_check = "\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\000\000\000\000\024\000\000\000\000\000\065\000\074\000\ \076\000\084\000\092\000\107\000\114\000\122\000\134\000\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \000\000\255\255\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \006\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\ \000\000\000\000\000\000\007\000\000\000\005\000\000\000\002\000\ \002\000\008\000\009\000\002\000\011\000\015\000\017\000\015\000\ \015\000\015\000\015\000\015\000\015\000\015\000\015\000\015\000\ \015\000\022\000\036\000\041\000\022\000\043\000\002\000\044\000\ \002\000\002\000\002\000\045\000\051\000\002\000\002\000\002\000\ \025\000\025\000\002\000\025\000\025\000\054\000\040\000\050\000\ \056\000\003\000\003\000\057\000\003\000\003\000\058\000\061\000\ \002\000\086\000\036\000\002\000\087\000\017\000\097\000\025\000\ \098\000\099\000\025\000\102\000\103\000\104\000\109\000\110\000\ \003\000\111\000\003\000\003\000\003\000\003\000\003\000\003\000\ \003\000\003\000\003\000\003\000\003\000\255\255\003\000\003\000\ \096\000\108\000\002\000\040\000\050\000\096\000\108\000\255\255\ \255\255\255\255\003\000\003\000\003\000\003\000\003\000\255\255\ \255\255\003\000\003\000\003\000\003\000\003\000\003\000\003\000\ \003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\ \003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\ \003\000\003\000\003\000\003\000\003\000\003\000\003\000\063\000\ \000\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\ \003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\ \003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\ \003\000\003\000\003\000\255\255\003\000\010\000\010\000\010\000\ \010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\ \255\255\255\255\096\000\108\000\063\000\010\000\010\000\010\000\ \010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\ \010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\ \010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\ \010\000\255\255\010\000\255\255\010\000\255\255\010\000\010\000\ \010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\ \010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\ \010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\ \255\255\255\255\255\255\255\255\026\000\027\000\090\000\026\000\ \027\000\090\000\130\000\255\255\255\255\255\255\002\000\028\000\ \028\000\028\000\028\000\028\000\028\000\028\000\028\000\028\000\ \028\000\255\255\255\255\255\255\012\000\141\000\012\000\022\000\ \012\000\012\000\141\000\255\255\012\000\012\000\012\000\255\255\ \255\255\255\255\012\000\012\000\012\000\012\000\012\000\012\000\ \012\000\012\000\012\000\012\000\255\255\255\255\012\000\130\000\ \003\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\ \012\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\ \012\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\ \012\000\012\000\012\000\012\000\012\000\012\000\255\255\012\000\ \012\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\ \012\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\ \012\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\ \012\000\012\000\012\000\012\000\012\000\255\255\012\000\255\255\ \012\000\014\000\014\000\255\255\014\000\014\000\033\000\033\000\ \033\000\033\000\033\000\033\000\033\000\033\000\033\000\033\000\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \014\000\255\255\014\000\014\000\014\000\014\000\014\000\014\000\ \014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\ \014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\ \014\000\014\000\014\000\014\000\014\000\014\000\014\000\255\255\ \014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\ \014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\ \014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\ \014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\ \255\255\014\000\014\000\014\000\014\000\014\000\014\000\014\000\ \014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\ \014\000\014\000\014\000\014\000\014\000\014\000\014\000\014\000\ \014\000\014\000\014\000\255\255\014\000\018\000\014\000\255\255\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\026\000\027\000\090\000\255\255\255\255\255\255\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\255\255\255\255\255\255\255\255\018\000\ \255\255\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\018\000\018\000\018\000\018\000\018\000\ \018\000\018\000\018\000\255\255\255\255\019\000\018\000\255\255\ \019\000\019\000\019\000\019\000\019\000\019\000\019\000\019\000\ \019\000\019\000\019\000\255\255\255\255\255\255\255\255\255\255\ \019\000\019\000\019\000\019\000\019\000\019\000\019\000\019\000\ \019\000\019\000\019\000\019\000\019\000\019\000\019\000\019\000\ \019\000\019\000\019\000\019\000\019\000\019\000\019\000\019\000\ \019\000\019\000\019\000\255\255\255\255\255\255\255\255\019\000\ \014\000\019\000\019\000\019\000\019\000\019\000\019\000\019\000\ \019\000\019\000\019\000\019\000\019\000\019\000\019\000\019\000\ \019\000\019\000\019\000\019\000\019\000\019\000\019\000\019\000\ \019\000\019\000\019\000\255\255\020\000\020\000\019\000\020\000\ \020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\ \020\000\255\255\255\255\255\255\255\255\255\255\255\255\020\000\ \020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\ \020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\ \020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\ \020\000\020\000\255\255\255\255\255\255\255\255\020\000\255\255\ \020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\ \020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\ \020\000\020\000\020\000\020\000\020\000\020\000\020\000\020\000\ \020\000\020\000\255\255\021\000\021\000\020\000\021\000\021\000\ \021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\ \255\255\255\255\255\255\255\255\255\255\255\255\021\000\021\000\ \021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\ \021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\ \021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\ \021\000\255\255\255\255\255\255\255\255\021\000\255\255\021\000\ \021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\ \021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\ \021\000\021\000\021\000\021\000\021\000\021\000\021\000\021\000\ \021\000\029\000\255\255\029\000\021\000\255\255\029\000\029\000\ \029\000\029\000\029\000\029\000\029\000\029\000\029\000\029\000\ \255\255\255\255\255\255\255\255\255\255\255\255\029\000\029\000\ \029\000\029\000\029\000\029\000\029\000\029\000\029\000\029\000\ \029\000\029\000\029\000\029\000\029\000\029\000\029\000\029\000\ \029\000\029\000\029\000\029\000\029\000\029\000\029\000\029\000\ \029\000\255\255\255\255\255\255\255\255\029\000\255\255\029\000\ \029\000\029\000\029\000\029\000\029\000\029\000\029\000\029\000\ \029\000\029\000\029\000\029\000\029\000\029\000\029\000\029\000\ \029\000\029\000\029\000\029\000\029\000\029\000\029\000\029\000\ \029\000\255\255\030\000\255\255\029\000\030\000\030\000\030\000\ \030\000\030\000\030\000\030\000\030\000\030\000\030\000\255\255\ \255\255\255\255\255\255\255\255\255\255\030\000\030\000\030\000\ \030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ \030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ \030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ \255\255\255\255\255\255\255\255\030\000\255\255\030\000\030\000\ \030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ \030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ \030\000\030\000\030\000\030\000\030\000\030\000\030\000\030\000\ \255\255\031\000\255\255\030\000\031\000\031\000\031\000\031\000\ \031\000\031\000\031\000\031\000\031\000\031\000\255\255\255\255\ \255\255\255\255\255\255\255\255\031\000\031\000\031\000\031\000\ \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ \031\000\031\000\031\000\031\000\031\000\031\000\031\000\115\000\ \255\255\116\000\115\000\031\000\116\000\031\000\031\000\031\000\ \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ \031\000\031\000\031\000\031\000\031\000\031\000\031\000\031\000\ \031\000\031\000\031\000\031\000\031\000\031\000\031\000\255\255\ \032\000\255\255\031\000\032\000\032\000\032\000\032\000\032\000\ \032\000\032\000\032\000\032\000\032\000\255\255\120\000\255\255\ \255\255\120\000\255\255\032\000\032\000\032\000\032\000\032\000\ \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ \032\000\032\000\032\000\032\000\032\000\032\000\255\255\255\255\ \255\255\255\255\032\000\255\255\032\000\032\000\032\000\032\000\ \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ \032\000\032\000\032\000\032\000\032\000\032\000\032\000\032\000\ \032\000\032\000\032\000\032\000\032\000\032\000\255\255\034\000\ \255\255\032\000\034\000\034\000\034\000\034\000\034\000\034\000\ \034\000\034\000\034\000\034\000\131\000\255\255\255\255\131\000\ \255\255\255\255\034\000\034\000\034\000\034\000\034\000\034\000\ \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ \034\000\034\000\034\000\034\000\034\000\132\000\255\255\255\255\ \132\000\034\000\255\255\034\000\034\000\034\000\034\000\034\000\ \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ \034\000\034\000\034\000\034\000\034\000\034\000\034\000\034\000\ \034\000\034\000\034\000\034\000\034\000\255\255\255\255\255\255\ \034\000\035\000\035\000\035\000\035\000\035\000\035\000\035\000\ \035\000\035\000\035\000\037\000\255\255\037\000\255\255\255\255\ \037\000\037\000\037\000\037\000\037\000\037\000\037\000\037\000\ \037\000\037\000\038\000\038\000\038\000\038\000\038\000\038\000\ \038\000\038\000\038\000\038\000\039\000\039\000\039\000\039\000\ \039\000\039\000\039\000\039\000\039\000\039\000\046\000\046\000\ \255\255\046\000\046\000\255\255\255\255\115\000\255\255\116\000\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\046\000\255\255\046\000\ \046\000\046\000\046\000\046\000\046\000\046\000\046\000\046\000\ \046\000\046\000\046\000\046\000\046\000\046\000\046\000\046\000\ \046\000\046\000\046\000\046\000\046\000\046\000\046\000\046\000\ \046\000\046\000\046\000\046\000\120\000\046\000\046\000\046\000\ \046\000\046\000\046\000\046\000\046\000\046\000\046\000\046\000\ \046\000\046\000\046\000\046\000\046\000\046\000\046\000\046\000\ \046\000\046\000\046\000\046\000\046\000\046\000\046\000\046\000\ \046\000\046\000\046\000\046\000\046\000\255\255\046\000\046\000\ \046\000\046\000\046\000\046\000\046\000\046\000\046\000\046\000\ \046\000\046\000\046\000\046\000\046\000\046\000\046\000\046\000\ \046\000\046\000\046\000\046\000\046\000\046\000\046\000\046\000\ \047\000\046\000\255\255\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\255\255\255\255\255\255\ \255\255\255\255\131\000\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\255\255\255\255\ \255\255\255\255\047\000\132\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\047\000\047\000\ \047\000\047\000\047\000\047\000\047\000\047\000\255\255\048\000\ \048\000\047\000\048\000\048\000\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\048\000\255\255\ \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ \048\000\048\000\048\000\048\000\048\000\046\000\048\000\048\000\ \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ \048\000\048\000\048\000\048\000\048\000\048\000\255\255\048\000\ \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ \048\000\048\000\048\000\048\000\048\000\048\000\048\000\048\000\ \048\000\255\255\048\000\049\000\048\000\255\255\049\000\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\255\255\255\255\255\255\255\255\255\255\049\000\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\255\255\255\255\255\255\255\255\049\000\255\255\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\049\000\049\000\049\000\049\000\049\000\049\000\049\000\ \049\000\255\255\112\000\112\000\049\000\112\000\112\000\255\255\ \255\255\255\255\126\000\126\000\255\255\126\000\126\000\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\112\000\255\255\255\255\112\000\255\255\055\000\255\255\ \255\255\126\000\055\000\055\000\126\000\255\255\255\255\255\255\ \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ \055\000\055\000\255\255\255\255\055\000\255\255\048\000\055\000\ \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ \055\000\055\000\055\000\055\000\255\255\055\000\055\000\055\000\ \255\255\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ \055\000\055\000\055\000\055\000\055\000\055\000\055\000\055\000\ \055\000\055\000\055\000\255\255\055\000\059\000\055\000\255\255\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\255\255\255\255\255\255\255\255\255\255\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\255\255\255\255\255\255\255\255\059\000\ \255\255\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\059\000\059\000\059\000\059\000\059\000\ \059\000\059\000\059\000\255\255\062\000\062\000\059\000\062\000\ \062\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\062\000\255\255\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ \255\255\062\000\062\000\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\062\000\062\000\062\000\ \062\000\062\000\255\255\255\255\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ \062\000\062\000\255\255\255\255\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\062\000\062\000\ \062\000\062\000\062\000\062\000\062\000\062\000\067\000\062\000\ \255\255\067\000\071\000\137\000\140\000\071\000\255\255\255\255\ \137\000\140\000\255\255\255\255\255\255\255\255\255\255\072\000\ \088\000\255\255\072\000\088\000\255\255\255\255\067\000\255\255\ \067\000\255\255\071\000\067\000\071\000\255\255\255\255\071\000\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\072\000\ \088\000\255\255\088\000\255\255\072\000\088\000\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\137\000\140\000\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\069\000\255\255\069\000\255\255\255\255\ \067\000\069\000\069\000\069\000\071\000\255\255\255\255\069\000\ \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ \069\000\072\000\088\000\069\000\255\255\255\255\069\000\069\000\ \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ \069\000\069\000\069\000\062\000\069\000\069\000\069\000\069\000\ \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ \069\000\069\000\069\000\069\000\069\000\069\000\069\000\069\000\ \069\000\069\000\255\255\255\255\255\255\069\000\125\000\125\000\ \255\255\255\255\125\000\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\100\000\255\255\255\255\100\000\125\000\255\255\125\000\ \125\000\125\000\255\255\255\255\125\000\125\000\125\000\255\255\ \255\255\125\000\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\100\000\255\255\255\255\255\255\081\000\100\000\125\000\ \081\000\255\255\125\000\255\255\255\255\255\255\255\255\255\255\ \255\255\137\000\140\000\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\081\000\255\255\081\000\ \255\255\079\000\081\000\255\255\067\000\079\000\079\000\255\255\ \071\000\125\000\255\255\079\000\079\000\079\000\079\000\079\000\ \079\000\079\000\079\000\079\000\079\000\072\000\088\000\079\000\ \255\255\255\255\079\000\079\000\079\000\079\000\079\000\079\000\ \079\000\079\000\079\000\079\000\079\000\079\000\079\000\079\000\ \079\000\079\000\079\000\079\000\079\000\079\000\079\000\079\000\ \079\000\079\000\079\000\079\000\079\000\079\000\079\000\081\000\ \079\000\079\000\079\000\100\000\079\000\079\000\079\000\079\000\ \079\000\079\000\079\000\079\000\079\000\079\000\079\000\079\000\ \079\000\079\000\079\000\079\000\079\000\079\000\079\000\079\000\ \079\000\079\000\079\000\079\000\079\000\079\000\085\000\081\000\ \085\000\079\000\255\255\255\255\085\000\085\000\085\000\255\255\ \255\255\255\255\085\000\085\000\085\000\085\000\085\000\085\000\ \085\000\085\000\085\000\085\000\255\255\255\255\085\000\255\255\ \255\255\085\000\085\000\085\000\085\000\085\000\085\000\085\000\ \085\000\085\000\085\000\085\000\085\000\085\000\085\000\085\000\ \085\000\085\000\085\000\085\000\085\000\085\000\085\000\085\000\ \085\000\085\000\085\000\085\000\085\000\085\000\255\255\085\000\ \085\000\085\000\085\000\085\000\085\000\085\000\085\000\085\000\ \085\000\085\000\085\000\085\000\085\000\085\000\085\000\085\000\ \085\000\085\000\085\000\085\000\085\000\085\000\085\000\085\000\ \085\000\085\000\085\000\085\000\085\000\125\000\089\000\255\255\ \085\000\089\000\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\100\000\ \255\255\255\255\255\255\255\255\255\255\255\255\089\000\255\255\ \089\000\255\255\255\255\089\000\105\000\255\255\255\255\105\000\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\081\000\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\105\000\255\255\255\255\255\255\ \255\255\105\000\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \089\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \094\000\255\255\255\255\255\255\094\000\094\000\255\255\255\255\ \255\255\255\255\094\000\094\000\094\000\094\000\094\000\094\000\ \094\000\094\000\094\000\094\000\255\255\255\255\094\000\255\255\ \089\000\094\000\094\000\094\000\094\000\094\000\094\000\094\000\ \094\000\094\000\094\000\094\000\094\000\094\000\094\000\094\000\ \094\000\094\000\094\000\094\000\094\000\094\000\094\000\094\000\ \094\000\094\000\094\000\094\000\094\000\094\000\105\000\094\000\ \094\000\094\000\255\255\094\000\094\000\094\000\094\000\094\000\ \094\000\094\000\094\000\094\000\094\000\094\000\094\000\094\000\ \094\000\094\000\094\000\094\000\094\000\094\000\094\000\094\000\ \094\000\094\000\094\000\094\000\094\000\255\255\255\255\255\255\ \094\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\117\000\117\000\255\255\117\000\ \117\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\117\000\089\000\117\000\117\000\255\255\ \255\255\255\255\117\000\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\117\000\117\000\117\000\117\000\117\000\ \117\000\117\000\117\000\117\000\117\000\117\000\255\255\255\255\ \255\255\255\255\105\000\117\000\117\000\117\000\117\000\117\000\ \117\000\117\000\117\000\117\000\117\000\117\000\117\000\117\000\ \117\000\117\000\117\000\117\000\117\000\117\000\117\000\117\000\ \117\000\117\000\117\000\117\000\117\000\117\000\117\000\117\000\ \117\000\255\255\117\000\255\255\117\000\117\000\117\000\117\000\ \117\000\117\000\117\000\117\000\117\000\117\000\117\000\117\000\ \117\000\117\000\117\000\117\000\117\000\117\000\117\000\117\000\ \117\000\117\000\117\000\117\000\117\000\117\000\128\000\255\255\ \255\255\128\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\255\255\255\255\255\255\255\255\255\255\ \255\255\128\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\255\255\255\255\255\255\255\255\ \128\000\255\255\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\128\000\128\000\128\000\128\000\ \128\000\128\000\128\000\128\000\255\255\255\255\255\255\128\000\ \129\000\255\255\255\255\129\000\129\000\129\000\129\000\129\000\ \129\000\129\000\129\000\129\000\129\000\129\000\255\255\255\255\ \255\255\255\255\255\255\129\000\129\000\129\000\129\000\129\000\ \129\000\129\000\129\000\129\000\129\000\129\000\129\000\129\000\ \129\000\129\000\129\000\129\000\129\000\129\000\129\000\129\000\ \129\000\129\000\129\000\129\000\129\000\129\000\255\255\255\255\ \255\255\255\255\129\000\117\000\129\000\129\000\129\000\129\000\ \129\000\129\000\129\000\129\000\129\000\129\000\129\000\129\000\ \129\000\129\000\129\000\129\000\129\000\129\000\129\000\129\000\ \129\000\129\000\129\000\129\000\129\000\129\000\255\255\255\255\ \255\255\129\000\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\ \255\255\255\255\255\255\255\255"; Lexing.lex_base_code = ""; Lexing.lex_backtrk_code = ""; Lexing.lex_default_code = ""; Lexing.lex_trans_code = ""; Lexing.lex_check_code = ""; Lexing.lex_code = ""; } let rec lex_main state lexbuf = __ocaml_lex_lex_main_rec state lexbuf 0 and __ocaml_lex_lex_main_rec state lexbuf __ocaml_lex_state = match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with | 0 -> # 669 "omake_ast_lex.mll" ( let loc = state.current_loc in let _ = lexeme_loc state lexbuf in set_next_line state lexbuf; Omake_ast_parse.TokEol loc ) # 1415 "omake_ast_lex.ml" | 1 -> # 675 "omake_ast_lex.mll" ( let s, loc = lexeme_string state lexbuf in TokWhite (s, loc) ) # 1422 "omake_ast_lex.ml" | 2 -> # 688 "omake_ast_lex.mll" ( lexeme_name state lexbuf ) # 1427 "omake_ast_lex.ml" | 3 -> # 690 "omake_ast_lex.mll" ( let s, loc = lexeme_string state lexbuf in TokInt (s, loc) ) # 1434 "omake_ast_lex.ml" | 4 -> # 694 "omake_ast_lex.mll" ( let s, loc = lexeme_string state lexbuf in TokFloat (s, loc) ) # 1441 "omake_ast_lex.ml" | 5 -> # 698 "omake_ast_lex.mll" ( lexeme_key state lexbuf ) # 1446 "omake_ast_lex.ml" | 6 -> # 700 "omake_ast_lex.mll" ( let id, loc = lexeme_string state lexbuf in let mode = ModeQuote id in push_mode state mode; TokBeginQuoteString (id, loc) ) # 1455 "omake_ast_lex.ml" | 7 -> # 707 "omake_ast_lex.mll" ( let id, loc = lexeme_string state lexbuf in let id = String.sub id 1 (pred (String.length id)) in (* GS TODO: use "as" *) let mode = ModeString id in push_mode state mode; TokBeginQuote ("", loc) ) # 1466 "omake_ast_lex.ml" | 8 -> # 715 "omake_ast_lex.mll" ( let id, _ = lexeme_string state lexbuf in let id = String.sub id 1 (pred (String.length id)) in (* GS TODO: use "as" *) let s, loc = lex_literal state (Buffer.create 32) id lexbuf in (* GS: lex_literal is a sublexer. Returns the quoted string *) TokStringQuote (s, loc) ) # 1477 "omake_ast_lex.ml" | 9 -> # 724 "omake_ast_lex.mll" ( let strategy, id, _ = lexeme_dollar_pipe state lexbuf in let s, loc = lex_literal state (Buffer.create 32) id lexbuf in TokVarQuote (strategy, s, loc) ) # 1485 "omake_ast_lex.ml" | 10 -> # 730 "omake_ast_lex.mll" ( lexeme_var state lexbuf ) # 1490 "omake_ast_lex.ml" | 11 -> # 733 "omake_ast_lex.mll" ( lexeme_dollar state lexbuf ) # 1495 "omake_ast_lex.ml" | 12 -> # 736 "omake_ast_lex.mll" ( lexeme_char state lexbuf ) # 1500 "omake_ast_lex.ml" | 13 -> # 739 "omake_ast_lex.mll" ( lexeme_special_string state lexbuf ) # 1505 "omake_ast_lex.ml" | 14 -> # 741 "omake_ast_lex.mll" ( let s, loc = lexeme_string state lexbuf in TokNamedColon (s, loc) ) # 1512 "omake_ast_lex.ml" | 15 -> # 745 "omake_ast_lex.mll" ( let s, loc = lexeme_string state lexbuf in TokString (s, loc) ) # 1519 "omake_ast_lex.ml" | 16 -> # 749 "omake_ast_lex.mll" ( let s, loc = lexeme_string state lexbuf in TokString (s, loc) ) # 1526 "omake_ast_lex.ml" | 17 -> # 753 "omake_ast_lex.mll" ( let s, loc = lexeme_esc state lexbuf in TokStringQuote (s, loc) ) # 1533 "omake_ast_lex.ml" | 18 -> # 757 "omake_ast_lex.mll" ( let loc = lexeme_loc state lexbuf in set_next_line state lexbuf; state.current_prompt <- "\\"; state.current_fill_ok <- true; TokString (" ", loc) ) # 1543 "omake_ast_lex.ml" | 19 -> # 764 "omake_ast_lex.mll" ( let loc = lexeme_loc state lexbuf in match state.current_token with TokEol _ | TokEof _ -> TokEof loc | _ -> TokEol loc ) # 1555 "omake_ast_lex.ml" | 20 -> # 773 "omake_ast_lex.mll" ( let s, _ = lexeme_string state lexbuf in syntax_error state ("illegal character: " ^ String.escaped s) lexbuf ) # 1562 "omake_ast_lex.ml" | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; __ocaml_lex_lex_main_rec state lexbuf __ocaml_lex_state and lex_quote state lexbuf = __ocaml_lex_lex_quote_rec state lexbuf 67 and __ocaml_lex_lex_quote_rec state lexbuf __ocaml_lex_state = match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with | 0 -> # 787 "omake_ast_lex.mll" ( set_next_line state lexbuf; syntax_error state "unterminated string" lexbuf ) # 1576 "omake_ast_lex.ml" | 1 -> # 792 "omake_ast_lex.mll" ( let s, loc = lexeme_string state lexbuf in Omake_ast_parse.TokString (s, loc) ) # 1583 "omake_ast_lex.ml" | 2 -> # 796 "omake_ast_lex.mll" ( let s, loc = lexeme_string state lexbuf in match state.current_mode with ModeQuote s' when s' = s -> pop_mode state; TokEndQuoteString (s, loc) | _ -> TokString (s, loc) ) # 1595 "omake_ast_lex.ml" | 3 -> # 805 "omake_ast_lex.mll" ( let loc = lexeme_loc state lexbuf in TokString ("$", loc) ) # 1602 "omake_ast_lex.ml" | 4 -> # 809 "omake_ast_lex.mll" ( lexeme_var state lexbuf ) # 1607 "omake_ast_lex.ml" | 5 -> # 811 "omake_ast_lex.mll" ( push_dollar state ModeNormal; lexeme_dollar state lexbuf ) # 1614 "omake_ast_lex.ml" | 6 -> # 815 "omake_ast_lex.mll" ( let s, loc = lexeme_esc state lexbuf in TokString (s, loc) ) # 1621 "omake_ast_lex.ml" | 7 -> # 819 "omake_ast_lex.mll" ( let loc = lexeme_loc state lexbuf in set_next_line state lexbuf; state.current_fill_ok <- true; TokString ("", loc) ) # 1630 "omake_ast_lex.ml" | 8 -> # 825 "omake_ast_lex.mll" ( syntax_error state "unterminated string" lexbuf ) # 1635 "omake_ast_lex.ml" | 9 -> # 827 "omake_ast_lex.mll" ( let s, _ = lexeme_string state lexbuf in syntax_error state ("illegal character in string constant: " ^ String.escaped s) lexbuf ) # 1642 "omake_ast_lex.ml" | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; __ocaml_lex_lex_quote_rec state lexbuf __ocaml_lex_state and lex_string state lexbuf = __ocaml_lex_lex_string_rec state lexbuf 81 and __ocaml_lex_lex_string_rec state lexbuf __ocaml_lex_state = match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with | 0 -> # 841 "omake_ast_lex.mll" ( let s, loc = lexeme_string state lexbuf in Omake_ast_parse.TokString (s, loc) ) # 1656 "omake_ast_lex.ml" | 1 -> # 845 "omake_ast_lex.mll" ( let s, loc = lexeme_string state lexbuf in match state.current_mode with ModeString s' when s' = s -> pop_mode state; TokEndQuote ("", loc) | _ -> TokString (s, loc) ) # 1668 "omake_ast_lex.ml" | 2 -> # 854 "omake_ast_lex.mll" ( let loc = lexeme_loc state lexbuf in TokString ("$", loc) ) # 1675 "omake_ast_lex.ml" | 3 -> # 858 "omake_ast_lex.mll" ( lexeme_var state lexbuf ) # 1680 "omake_ast_lex.ml" | 4 -> # 860 "omake_ast_lex.mll" ( push_dollar state ModeNormal; lexeme_dollar state lexbuf ) # 1687 "omake_ast_lex.ml" | 5 -> # 864 "omake_ast_lex.mll" ( let s, loc = lexeme_string state lexbuf in let () = match state.current_mode with ModeString s -> push_mode state (ModeSkipString s) | _ -> (* GS CHECK: When is this possible? *) () in set_next_line state lexbuf; state.current_fill_ok <- true; TokString (s, loc) ) # 1704 "omake_ast_lex.ml" | 6 -> # 878 "omake_ast_lex.mll" ( let loc = lexeme_loc state lexbuf in let () = match state.current_mode with ModeString s -> push_mode state (ModeSkipString s) | _ -> () in set_next_line state lexbuf; state.current_fill_ok <- true; TokString ("", loc) ) # 1720 "omake_ast_lex.ml" | 7 -> # 891 "omake_ast_lex.mll" ( syntax_error state "unterminated string" lexbuf ) # 1725 "omake_ast_lex.ml" | 8 -> # 893 "omake_ast_lex.mll" ( let s, _ = lexeme_string state lexbuf in syntax_error state ("illegal character: " ^ String.escaped s) lexbuf ) # 1732 "omake_ast_lex.ml" | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; __ocaml_lex_lex_string_rec state lexbuf __ocaml_lex_state and lex_skip_string state lexbuf = __ocaml_lex_lex_skip_string_rec state lexbuf 96 and __ocaml_lex_lex_skip_string_rec state lexbuf __ocaml_lex_state = match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with | 0 -> # 902 "omake_ast_lex.mll" ( let s, loc = lexeme_string state lexbuf in pop_mode state; match state.current_mode with ModeString s' when s' = s -> pop_mode state; Omake_ast_parse.TokEndQuote ("", loc) | _ -> TokString ("", loc) ) # 1752 "omake_ast_lex.ml" | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; __ocaml_lex_lex_skip_string_rec state lexbuf __ocaml_lex_state and lex_literal state buf equote lexbuf = __ocaml_lex_lex_literal_rec state buf equote lexbuf 100 and __ocaml_lex_lex_literal_rec state buf equote lexbuf __ocaml_lex_state = match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with | 0 -> # 918 "omake_ast_lex.mll" ( let s, _ = lexeme_string state lexbuf in set_next_line state lexbuf; state.current_fill_ok <- true; Buffer.add_string buf s; lex_literal_skip state buf equote lexbuf ) # 1769 "omake_ast_lex.ml" | 1 -> # 925 "omake_ast_lex.mll" ( let s, _ = lexeme_string state lexbuf in Buffer.add_string buf s; lex_literal state buf equote lexbuf ) # 1777 "omake_ast_lex.ml" | 2 -> # 930 "omake_ast_lex.mll" ( let s, loc = lexeme_string state lexbuf in if s = equote then let s = Buffer.contents buf in s, loc else begin Buffer.add_string buf s; lex_literal state buf equote lexbuf end ) # 1791 "omake_ast_lex.ml" | 3 -> # 941 "omake_ast_lex.mll" ( syntax_error state "unterminated string" lexbuf ) # 1796 "omake_ast_lex.ml" | 4 -> # 943 "omake_ast_lex.mll" ( let s, _ = lexeme_string state lexbuf in syntax_error state ("illegal character: " ^ String.escaped s) lexbuf ) # 1803 "omake_ast_lex.ml" | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; __ocaml_lex_lex_literal_rec state buf equote lexbuf __ocaml_lex_state and lex_literal_skip state buf equote lexbuf = __ocaml_lex_lex_literal_skip_rec state buf equote lexbuf 108 and __ocaml_lex_lex_literal_skip_rec state buf equote lexbuf __ocaml_lex_state = match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with | 0 -> # 949 "omake_ast_lex.mll" ( let s, loc = lexeme_string state lexbuf in if s = equote then let s = Buffer.contents buf in s, loc else lex_literal state buf equote lexbuf ) # 1821 "omake_ast_lex.ml" | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; __ocaml_lex_lex_literal_skip_rec state buf equote lexbuf __ocaml_lex_state and lex_indent state lexbuf = __ocaml_lex_lex_indent_rec state lexbuf 112 and __ocaml_lex_lex_indent_rec state lexbuf __ocaml_lex_state = match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with | 0 -> # 963 "omake_ast_lex.mll" ( set_next_line state lexbuf; state.current_fill_ok <- true; lex_indent state lexbuf ) # 1836 "omake_ast_lex.ml" | 1 -> # 968 "omake_ast_lex.mll" ( let s, _ = lexeme_string state lexbuf in let indent = indent_of_string s in indent ) # 1844 "omake_ast_lex.ml" | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; __ocaml_lex_lex_indent_rec state lexbuf __ocaml_lex_state and lex_deps lexbuf = __ocaml_lex_lex_deps_rec lexbuf 117 and __ocaml_lex_lex_deps_rec lexbuf __ocaml_lex_state = match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with | 0 -> # 981 "omake_ast_lex.mll" ( let s, loc = lexeme_pos lexbuf in Omake_ast_parse.TokString (s, loc) ) # 1858 "omake_ast_lex.ml" | 1 -> # 985 "omake_ast_lex.mll" ( let _, loc = lexeme_pos lexbuf in TokString (":", loc) ) # 1865 "omake_ast_lex.ml" | 2 -> # 989 "omake_ast_lex.mll" ( let s, loc = lexeme_pos lexbuf in TokColon (s, loc) ) # 1872 "omake_ast_lex.ml" | 3 -> # 993 "omake_ast_lex.mll" ( let s, loc = lexeme_pos lexbuf in let buf = Buffer.create 64 in Buffer.add_string buf s; lex_deps_quote s buf lexbuf; TokString (Buffer.contents buf, loc) ) # 1882 "omake_ast_lex.ml" | 4 -> # 1001 "omake_ast_lex.mll" ( let _, loc = lexeme_pos lexbuf in TokEol loc ) # 1889 "omake_ast_lex.ml" | 5 -> # 1005 "omake_ast_lex.mll" ( let s, loc = lexeme_pos lexbuf in let s = String.make 1 s.[1] in TokStringQuote (s, loc) ) # 1897 "omake_ast_lex.ml" | 6 -> # 1010 "omake_ast_lex.mll" ( let _, loc = lexeme_pos lexbuf in TokWhite (" ", loc) ) # 1904 "omake_ast_lex.ml" | 7 -> # 1014 "omake_ast_lex.mll" ( let s, loc = lexeme_pos lexbuf in TokString (s, loc) ) # 1911 "omake_ast_lex.ml" | 8 -> # 1018 "omake_ast_lex.mll" ( let _, loc = lexeme_pos lexbuf in TokEof loc ) # 1918 "omake_ast_lex.ml" | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; __ocaml_lex_lex_deps_rec lexbuf __ocaml_lex_state and lex_deps_quote term buf lexbuf = __ocaml_lex_lex_deps_quote_rec term buf lexbuf 137 and __ocaml_lex_lex_deps_quote_rec term buf lexbuf __ocaml_lex_state = match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with | 0 -> # 1026 "omake_ast_lex.mll" ( let s, _ = lexeme_pos lexbuf in Buffer.add_string buf s; lex_deps_quote term buf lexbuf ) # 1933 "omake_ast_lex.ml" | 1 -> # 1031 "omake_ast_lex.mll" ( let s, _ = lexeme_pos lexbuf in Buffer.add_string buf s; if s <> term then lex_deps_quote term buf lexbuf ) # 1942 "omake_ast_lex.ml" | 2 -> # 1038 "omake_ast_lex.mll" ( raise Parsing.Parse_error ) # 1947 "omake_ast_lex.ml" | __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf; __ocaml_lex_lex_deps_quote_rec term buf lexbuf __ocaml_lex_state ;; # 1040 "omake_ast_lex.mll" (************************************************************************ * Prompts. *) (* * Lex and parse a line for the shell. *) let tabstop = 3 let prompt_ext s = s ^ "> " (* Prune the prompt to a reasonable length *) let prompt_prune prompt indent = let max_len = 8 in let s = Bytes.make (indent * tabstop + max_len + 2) ' ' in let length = String.length prompt in if length > max_len then begin Bytes.blit_string prompt 0 s 0 max_len; Bytes.set s max_len '>' end else Bytes.blit_string prompt 0 s 0 length; Bytes.to_string s let prompt_indent prompt root indent = if root then prompt else prompt_prune prompt indent let prompt_string state root nest e = let prompt = prompt_ext (Omake_ast_util.key_of_exp e) in if state.is_interactive && root then Lm_printf.printf "%s%s@?" (prompt_prune prompt nest) state.current_buffer; prompt (* * Parser for the body of an expression. *) let body_parser state body = match body with Omake_ast.NoBody -> None | OptBody -> if state.is_interactive then None else Some Omake_ast_parse.shell | ColonBody -> Some Omake_ast_parse.shell | ArrayBody -> Some Omake_ast_parse.string (************************************************************************ * Lexing input. *) (* * Copy into the lexbuf. *) let lex_fill state buf len = let { current_buffer = buffer; current_index = index; _ } = state in let length = String.length buffer in let amount = min (length - index) len in if amount = 0 then state.current_eof <- true else begin String.blit buffer index buf 0 amount; state.current_index <- index + amount end; amount (* * Refill the buffer using the readline function. *) let state_refill state = let { current_fill_ok = fill_ok; current_prompt = prompt; readline = readline; _ } = state in if fill_ok then let line = readline prompt in let line = if state.is_interactive && line = ".\n" then "" else line in state.current_buffer <- line; state.current_index <- 0; state.current_fill_ok <- false (* * Lexer function to refill the buffer. * GS. This is for Lexing.from_function. *) let lex_refill state buf len = let { current_buffer = buffer; current_index = index; _ } = state in let length = String.length buffer in let amount = length - index in if amount = 0 then state_refill state; lex_fill state buf len (************************************************************************ * Main lexer. *) (* * Get the input. *) let lex_line state lexbuf = let tok = match state.current_mode with ModeNormal -> lex_main state lexbuf | ModeString _ -> lex_string state lexbuf | ModeSkipString _ -> lex_skip_string state lexbuf | ModeQuote _ -> lex_quote state lexbuf in if !debug_lex then Lm_printf.eprintf "Token: %a@." pp_print_token tok; state.current_token <- tok; tok (************************************************************************ * Parse main loop. *) (* * Make sure the lexbuf is valid. *) let parse_refill state prompt root nest = if state.current_eof then begin let lexbuf = Lexing.from_function (lex_refill state) in state.current_eof <- false; state.current_fill_ok <- true; state.current_prompt <- prompt_indent prompt root nest; state.current_lexbuf <- lexbuf; state.current_lexmode <- LexModeInitial; state.current_off <- 0 end (* * Get the current indentation level. *) let parse_indent state prompt root nest = parse_refill state prompt root nest; match state.current_lexmode with LexModeInitial -> let indent = (* Interactive shell ignores indentation *) if state.is_interactive then nest else lex_indent state state.current_lexbuf in if !debug_lex then Lm_printf.eprintf "indent: %d@." indent; state.current_lexmode <- LexModeNormal indent; indent | LexModeNormal indent -> indent (* GS. In the following, parse = Omake_ast_parse.shell, i.e. the ocamlyacc generated parser *) (* * Parse a single expression. * GS. an "expression" is not just a $-expression, but any code block, which * may span several lines. *) let rec parse_exp state parse prompt root nest = let indent = parse_indent state prompt root nest in if indent > state.current_indent then syntax_error state "illegal indentation" state.current_lexbuf else if indent < state.current_indent then raise End_of_file else parse_exp_indent state parse prompt root nest and parse_exp_indent state parse _ root nest = (* GS: after the indentation... *) let code, e = try parse (lex_line state) state.current_lexbuf with Parsing.Parse_error -> parse_error state in (* GS: e is the parsed expression *) let code = Omake_ast_util.scan_body_flag code e in let parse = body_parser state code in (* GS. parse is now None, or Some Omake_ast_parse.shell or .string *) match parse with Some parse -> let prompt = prompt_string state root nest e in let body = parse_body state parse prompt nest in let e = Omake_ast_util.update_body e code body in (match Omake_ast_util.can_continue e with Some prompt -> (try e :: parse_exp state parse (prompt_ext prompt) false nest with End_of_file -> [e]) | None -> [e]) | None -> [e] and parse_body state parse prompt nest = let nest = succ nest in let indent = parse_indent state prompt false nest in (* GS. The body must be further indented, otherwise it is not a body of the preceding expr *) if indent > state.current_indent then begin push_mode state ModeNormal; state.current_indent <- indent; parse_body_indent state parse prompt nest [] end else [] and parse_body_indent state parse prompt nest el = (* GS TODO: reformulate with "match ... with exception" *) let e = try ParseExp (parse_exp state parse prompt false nest) with End_of_file -> if state.is_interactive then Lm_printf.printf ".@."; pop_mode state; ParseEOF | Omake_value_type.OmakeException _ as exn when state.is_interactive -> Lm_printf.eprintf "%a@." Omake_exn_print.pp_print_exn exn; ParseError in match e with ParseExp e -> parse_body_indent state parse prompt nest (List.rev_append e el) | ParseError -> parse_body_indent state parse prompt nest el | ParseEOF -> List.rev el (* * Parse a file. * GS: Entry point *) let parse_ast name = let inx = open_in name in let readline _ = try input_line inx ^ "\n" with End_of_file -> "" in let state = create name readline in let el = parse_body_indent state Omake_ast_parse.shell "<prompt>" 0 [] in close_in inx; el (* * Parse a string. * GS: Entry point *) let parse_string s = let len = String.length s in let index = ref 0 in let readline _ = let start = !index in let rec search i = if i = len then if start < i then begin index := i; String.sub s start (i - start) ^ "\n" end else raise End_of_file else if s.[i] = '\n' then begin index := i + 1; String.sub s start (i - start + 1) end else search (succ i) in search start in let state = create "-" readline in parse_body_indent state Omake_ast_parse.shell "<prompt>" 0 [] (* * Parse an expression. *) let create_shell () = let state = create "-" Lm_readline.readline in state.is_interactive <- Lm_readline.is_interactive (); state (* * Copy the state, if an exception happens, then * restore the initial state. *) let parse_shell state prompt = let stack = save_mode state in state.current_fill_ok <- true; try parse_exp state Omake_ast_parse.shell prompt true 0 with exn -> Lm_readline.flush (); restore_mode state stack; state.current_buffer <- ""; state.current_index <- 0; raise exn (* * Just dependency analysis. *) let parse_deps name = let inx = open_in name in let lexbuf = Lexing.from_channel inx in let deps = try Omake_ast_parse.deps lex_deps lexbuf with exn -> close_in inx; Lm_printf.eprintf "%s: char %d: scanner dependency syntax error@." name (Lexing.lexeme_end lexbuf); raise exn in close_in inx; deps # 2303 "omake_ast_lex.ml"
types.mli
(* $Id: types.mli 10795 2010-11-11 22:41:03Z lefessan $ *) (* Representation of types and declarations *) open Asttypes (* Type expressions for the core language *) type type_expr = { mutable desc: type_desc; mutable level: int; mutable id: int } and type_desc = Tvar | Tarrow of label * type_expr * type_expr * commutable | Ttuple of type_expr list | Tconstr of Path.t * type_expr list * abbrev_memo ref | Tobject of type_expr * (Path.t * type_expr list) option ref | Tfield of string * field_kind * type_expr * type_expr | Tnil | Tlink of type_expr | Tsubst of type_expr (* for copying *) | Tvariant of row_desc | Tunivar | Tpoly of type_expr * type_expr list | Tpackage of Path.t * string list * type_expr list and row_desc = { row_fields: (label * row_field) list; row_more: type_expr; row_bound: unit; (* kept for compatibility *) row_closed: bool; row_fixed: bool; row_name: (Path.t * type_expr list) option } and row_field = Rpresent of type_expr option | Reither of bool * type_expr list * bool * row_field option ref (* 1st true denotes a constant constructor *) (* 2nd true denotes a tag in a pattern matching, and is erased later *) | Rabsent and abbrev_memo = Mnil | Mcons of private_flag * Path.t * type_expr * type_expr * abbrev_memo | Mlink of abbrev_memo ref and field_kind = Fvar of field_kind option ref | Fpresent | Fabsent and commutable = Cok | Cunknown | Clink of commutable ref module TypeOps : sig type t = type_expr val compare : t -> t -> int val equal : t -> t -> bool val hash : t -> int end (* Maps of methods and instance variables *) module Meths : Map.S with type key = string module Vars : Map.S with type key = string (* Value descriptions *) type value_description = { val_type: type_expr; (* Type of the value *) val_kind: value_kind } and value_kind = Val_reg (* Regular value *) | Val_prim of Primitive.description (* Primitive *) | Val_ivar of mutable_flag * string (* Instance variable (mutable ?) *) | Val_self of (Ident.t * type_expr) Meths.t ref * (Ident.t * mutable_flag * virtual_flag * type_expr) Vars.t ref * string * type_expr (* Self *) | Val_anc of (string * Ident.t) list * string (* Ancestor *) | Val_unbound (* Unbound variable *) (* Constructor descriptions *) type constructor_description = { cstr_res: type_expr; (* Type of the result *) cstr_args: type_expr list; (* Type of the arguments *) cstr_arity: int; (* Number of arguments *) cstr_tag: constructor_tag; (* Tag for heap blocks *) cstr_consts: int; (* Number of constant constructors *) cstr_nonconsts: int; (* Number of non-const constructors *) cstr_private: private_flag } (* Read-only constructor? *) and constructor_tag = Cstr_constant of int (* Constant constructor (an int) *) | Cstr_block of int (* Regular constructor (a block) *) | Cstr_exception of Path.t (* Exception constructor *) (* Record label descriptions *) type label_description = { lbl_name: string; (* Short name *) lbl_res: type_expr; (* Type of the result *) lbl_arg: type_expr; (* Type of the argument *) lbl_mut: mutable_flag; (* Is this a mutable field? *) lbl_pos: int; (* Position in block *) lbl_all: label_description array; (* All the labels in this type *) lbl_repres: record_representation; (* Representation for this record *) lbl_private: private_flag } (* Read-only field? *) and record_representation = Record_regular (* All fields are boxed / tagged *) | Record_float (* All fields are floats *) (* Type definitions *) type type_declaration = { type_params: type_expr list; type_arity: int; type_kind: type_kind; type_private: private_flag; type_manifest: type_expr option; type_variance: (bool * bool * bool) list } (* covariant, contravariant, weakly contravariant *) and type_kind = Type_abstract | Type_variant of (string * type_expr list) list | Type_record of (string * mutable_flag * type_expr) list * record_representation type exception_declaration = type_expr list (* Type expressions for the class language *) module Concr : Set.S with type elt = string type class_type = Cty_constr of Path.t * type_expr list * class_type | Cty_signature of class_signature | Cty_fun of label * type_expr * class_type and class_signature = { cty_self: type_expr; cty_vars: (mutable_flag * virtual_flag * type_expr) Vars.t; cty_concr: Concr.t; cty_inher: (Path.t * type_expr list) list } type class_declaration = { cty_params: type_expr list; mutable cty_type: class_type; cty_path: Path.t; cty_new: type_expr option; cty_variance: (bool * bool) list } type class_type_declaration = { clty_params: type_expr list; clty_type: class_type; clty_path: Path.t; clty_variance: (bool * bool) list } (* Type expressions for the module language *) type module_type = Mty_ident of Path.t | Mty_signature of signature | Mty_functor of Ident.t * module_type * module_type and signature = signature_item list and signature_item = Sig_value of Ident.t * value_description | Sig_type of Ident.t * type_declaration * rec_status | Sig_exception of Ident.t * exception_declaration | Sig_module of Ident.t * module_type * rec_status | Sig_modtype of Ident.t * modtype_declaration | Sig_class of Ident.t * class_declaration * rec_status | Sig_class_type of Ident.t * class_type_declaration * rec_status and modtype_declaration = Modtype_abstract | Modtype_manifest of module_type and rec_status = Trec_not (* not recursive *) | Trec_first (* first in a recursive group *) | Trec_next (* not first in a recursive group *)
(***********************************************************************) (* *) (* Objective Caml *) (* *) (* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) (* *) (* Copyright 1996 Institut National de Recherche en Informatique et *) (* en Automatique. All rights reserved. This file is distributed *) (* under the terms of the Q Public License version 1.0. *) (* *) (***********************************************************************)
dune
(library (name inline_css) (public_name ppx_css.inline_css) (libraries js_of_ocaml core re core_kernel.reversed_list) (preprocess (pps js_of_ocaml-ppx)))
buffer.mli
type t val create : int -> t val contents : t -> string val to_bytes : t -> bytes val sub : t -> int -> int -> string val blit : t -> int -> bytes -> int -> int -> unit val nth : t -> int -> char val length : t -> int val clear : t -> unit val reset : t -> unit val add_char : t -> char -> unit val add_string : t -> string -> unit val add_bytes : t -> bytes -> unit val add_substring : t -> string -> int -> int -> unit val add_subbytes : t -> bytes -> int -> int -> unit val add_substitute : t -> (string -> string) -> string -> unit val add_buffer : t -> t -> unit val add_channel : t -> in_channel -> int -> unit val output_buffer : out_channel -> t -> unit
subText.mli
(* This library is free software; you can redistribute it and/or *) (* modify it under the terms of the GNU Lesser General Public License *) (* as published by the Free Software Foundation; either version 2 of *) (* the License, or (at your option) any later version. *) (* As a special exception to the GNU Library General Public License, you *) (* may link, statically or dynamically, a "work that uses this library" *) (* with a publicly distributed version of this library to produce an *) (* executable file containing portions of this library, and distribute *) (* that executable file under terms of your choice, without any of the *) (* additional requirements listed in clause 6 of the GNU Library General *) (* Public License. By "a publicly distributed version of this library", *) (* we mean either the unmodified Library as distributed by the authors, *) (* or a modified version of this library that is distributed under the *) (* conditions defined in clause 3 of the GNU Library General Public *) (* License. This exception does not however invalidate any other reasons *) (* why the executable file might be covered by the GNU Library General *) (* Public License . *) (* This library is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *) (* Lesser General Public License for more details. *) (* You should have received a copy of the GNU Lesser General Public *) (* License along with this library; if not, write to the Free Software *) (* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *) (* USA *) (* You can contact the authour by sending email to *) (* yoriyuki.y@gmail.com *) (** Sub-texts, parts of original (ur-) texts. The signature and semantics matches those of UStorage. *) module type Type = sig type t val get : t -> int -> UChar.t val init : int -> (int -> UChar.t) -> t val length : t -> int type index val look : t -> index -> UChar.t val nth : t -> int -> index val first : t -> index val last : t -> index val next : t -> index -> index val prev : t -> index -> index val move : t -> index -> int -> index val out_of_range : t -> index -> bool val compare_index : t -> index -> index -> int val iter : (UChar.t -> unit) -> t -> unit val compare : t -> t -> int module Buf : sig type buf val create : int -> buf val contents : buf -> t val clear : buf -> unit val reset : buf -> unit val add_char : buf -> UChar.t -> unit val add_string : buf -> t -> unit val add_buffer : buf -> buf -> unit end (** The type of original texts. *) type ur_text (** The type of indexes of original texts. *) type ur_index (** [refer t i j] returns the part of [t] from [i] until [j]. The character pointed by [j] is not included in the result. If [j] is equal to [i] or located before [j], the result is an empty string. *) val refer : ur_text -> ur_index -> ur_index -> t (** [excerpt t] copies the contents of [t] as a new ur_text. *) val excerpt : t -> ur_text (** [context t] returns the tuple [(s, i, j)] such that [t = refer s i j]. *) val context : t -> ur_text * ur_index * ur_index (** Conversion from indexes of sub-texts to ur_texts. *) val ur_index_of : t -> index -> ur_index end module Make : functor (Text : UnicodeString.Type) -> Type with type ur_text = Text.t and type ur_index = Text.index
(* Copyright (C) 2002, 2003 Yamagata Yoriyuki. distributed with LGPL *)
idl_fw_printer.h
/* * PRINTER FOR THE IDL FLOYD-WARSHALL SOLVER */ #ifndef __IDL_FW_PRINTER_H #define __IDL_FW_PRINTER_H #include <stdio.h> #include "solvers/floyd_warshall/idl_floyd_warshall.h" /* * Print name of a vertex x */ extern void print_idl_vertex(FILE *f, int32_t x); /* * Value of vertex x in the idl solver * - we use distance[0, v] as the value * - if the distance is not defined we print ??? */ extern void print_idl_vertex_value(FILE *f, idl_solver_t *idl, int32_t x); /* * Atom: in format [<bool var> := (x - y <= d)] * - x and y are vertices */ extern void print_idl_atom(FILE *f, idl_atom_t *atom); extern void print_idl_atoms(FILE *f, idl_solver_t *idl); /* * Difference logic triple (x - y + d) * - x and y are vertices */ extern void print_idl_triple(FILE *f, dl_triple_t *triple); /* * Variable triples: in the format u := (x - y + d) * - x and y are vertices * - u is a theory variable */ extern void print_idl_var_name(FILE *f, thvar_t u); extern void print_idl_var_def(FILE *f, idl_solver_t *solver, thvar_t u); extern void print_idl_var_table(FILE *f, idl_solver_t *solver); /* * Edges */ extern void print_idl_axioms(FILE *f, idl_solver_t *solver); extern void print_idl_edges(FILE *f, idl_solver_t *solver); #endif /* __IDL_FW_PRINTER_H */
/* * The Yices SMT Solver. Copyright 2014 SRI International. * * This program may only be used subject to the noncommercial end user * license agreement which is downloadable along with this program. */
jsonrpc2.mli
type json = Yojson.Safe.t module type IO = Sigs.IO module type S = sig module IO : IO type t (** A jsonrpc2 connection. *) include module type of Server.Make(IO) val create : ic:IO.in_channel -> oc:IO.out_channel -> server -> t (** Create a connection from the pair of channels *) val create_stdio : server -> t (** Create a connection using stdin/stdout *) val run : ?shutdown:(unit -> bool) -> t -> unit IO.t (** Listen for incoming messages and responses. @param shutdown if true, tells the server to shut down *) end module Make(IO : IO) : S with module IO = IO
thread.mli
val setup : string -> string -> unit val setprev : string -> string -> unit val setnext : string -> string -> unit val setprevnext : string -> string -> unit val next : string -> string val prev : string -> string val up : string -> string
(***********************************************************************) (* *) (* HEVEA *) (* *) (* Luc Maranget, projet PARA, INRIA Rocquencourt *) (* *) (* Copyright 1998 Institut National de Recherche en Informatique et *) (* Automatique. Distributed only by permission. *) (* *) (***********************************************************************)
dune
(library (public_name vendored))
bt2.ml
type (+'a, +'b) t = | Leaf | Node of int * ('a, 'b) t * 'a * 'b * ('a, 'b) t let size = function | Node (s, _, _, _, _) -> s | Leaf -> 0 (** {1 Balance criteria} Functions are not symmetric. The first argument should always be of the same power of two or smaller (guaranteed by construction). *) (** [smaller_ell smin smax] iff - [smin] is less than [smax] - [smin] and [smax] differs by less than two magnitude orders, i.e msbs(smin) >= msbs(smax) - 1 where msbs is the index of the most significant bit set *) let smaller_ell smin smax = (smin < smax) && ((smin land smax) lsl 1 < smax) (** [disbalanced smin smax] check if two sub-trees of size [smin] and [smax], are disbalanced. That is, msbs(smin) < msbs(smax) - 1 *) let disbalanced smin smax = smaller_ell smin (smax lsr 1) (** {1 Smart but not too much constructors} *) (** Construct node and check balance let node_ l x0 x1 r = let sl = size l and sr = size r in if sl < sr then assert (not (disbalanced sl sr)) else assert (not (disbalanced sr sl)); Node (sl + 1 + sr, l, x0, x1, r) *) (** Construct Node *) let node_ l x0 x1 r = Node (size l + 1 + size r, l, x0, x1, r) (** Rotations *) let rot_left l x0 x1 r k = match r with | Node (_, rl, y0, y1, rr) -> k (k l x0 x1 rl) y0 y1 rr | _ -> assert false let rot_right l y0 y1 r k = match l with | Node (_, ll, x0, x1, lr) -> k ll x0 x1 (k lr y0 y1 r) | _ -> assert false (** Balancing *) let inc_left l x0 x1 r k = let r = match r with | Node (_, rl, y0, y1, rr) when smaller_ell (size rr) (size rl) -> rot_right rl y0 y1 rr k | _ -> r in rot_left l x0 x1 r k let inc_right l y0 y1 r k = let l = match l with | Node (_, ll, x0, x1, lr) when smaller_ell (size ll) (size lr) -> rot_left ll x0 x1 lr k | _ -> l in rot_right l y0 y1 r k (** Balance trees leaning to the right *) let rec node_left l x0 x1 r = if disbalanced (size l) (size r) then inc_left l x0 x1 r node_left else node_ l x0 x1 r (** Balance trees leaning to the left *) let rec node_right l y0 y1 r = if disbalanced (size r) (size l) then inc_right l y0 y1 r node_right else node_ l y0 y1 r (** Public interface *) let leaf = Leaf let node l x0 x1 r = match l, r with | Leaf, Leaf -> node_ leaf x0 x1 leaf | l, r when size l < size r -> node_left l x0 x1 r | l, r -> node_right l x0 x1 r let rec join l r = match l, r with | Leaf, t | t, Leaf -> t | Node (sl, ll, x0, x1, lr), Node (sr, rl, y0, y1, rr) -> if sl <= sr then node (join l rl) y0 y1 rr else node ll x0 x1 (join lr r) let rec rank n = function | Leaf -> raise Not_found | Node (_, l, x0, x1, r) -> let sl = size l in if n = sl then x0, x1 else if n < sl then rank n l else rank (n - 1 - sl) r
client_proto_contracts.ml
open Protocol open Alpha_context module ContractEntity = struct type t = Contract.t include (Contract : Compare.S with type t := t) let encoding = Contract.encoding let of_source s = match Contract.of_b58check s with | Error _ as err -> Lwt.return (Environment.wrap_error err) |> trace (error_of_fmt "bad contract notation") | Ok s -> return s let to_source s = return (Contract.to_b58check s) let name = "contract" end module RawContractAlias = Client_aliases.Alias (ContractEntity) module ContractAlias = struct let find cctxt s = RawContractAlias.find_opt cctxt s >>=? function | Some v -> return (s, v) | None -> ( Client_keys.Public_key_hash.find_opt cctxt s >>=? function | Some v -> return (s, Contract.implicit_contract v) | None -> failwith "no contract or key named %s" s) let find_key cctxt name = Client_keys.Public_key_hash.find cctxt name >>=? fun v -> return (name, Contract.implicit_contract v) let rev_find cctxt c = match Contract.is_implicit c with | Some hash -> ( Client_keys.Public_key_hash.rev_find cctxt hash >>=? function | Some name -> return_some ("key:" ^ name) | None -> return_none) | None -> RawContractAlias.rev_find cctxt c let get_contract cctxt s = match String.split ~limit:1 ':' s with | ["key"; key] -> find_key cctxt key | _ -> find cctxt s let autocomplete cctxt = Client_keys.Public_key_hash.autocomplete cctxt >>=? fun keys -> RawContractAlias.autocomplete cctxt >>=? fun contracts -> return (List.map (( ^ ) "key:") keys @ contracts) let alias_param ?(name = "name") ?(desc = "existing contract alias") next = let desc = desc ^ "\n" ^ "Can be a contract alias or a key alias (autodetected in order).\n\ Use 'key:name' to force the later." in Clic.( param ~name ~desc (parameter ~autocomplete (fun cctxt p -> get_contract cctxt p)) next) let find_destination cctxt s = match String.split ~limit:1 ':' s with | ["alias"; alias] -> find cctxt alias | ["key"; text] -> Client_keys.Public_key_hash.find cctxt text >>=? fun v -> return (s, Contract.implicit_contract v) | _ -> ( find cctxt s >>= function | Ok v -> return v | Error k_errs -> ( ContractEntity.of_source s >>= function | Ok v -> return (s, v) | Error c_errs -> Lwt.return_error (k_errs @ c_errs))) let destination_parameter () = Clic.parameter ~autocomplete:(fun cctxt -> autocomplete cctxt >>=? fun list1 -> Client_keys.Public_key_hash.autocomplete cctxt >>=? fun list2 -> return (list1 @ list2)) find_destination let destination_param ?(name = "dst") ?(desc = "destination contract") next = let desc = String.concat "\n" [ desc; "Can be an alias, a key, or a literal (autodetected in order).\n\ Use 'text:literal', 'alias:name', 'key:name' to force."; ] in Clic.param ~name ~desc (destination_parameter ()) next let destination_arg ?(name = "dst") ?(doc = "destination contract") () = let doc = String.concat "\n" [ doc; "Can be an alias, a key, or a literal (autodetected in order).\n\ Use 'text:literal', 'alias:name', 'key:name' to force."; ] in Clic.arg ~long:name ~doc ~placeholder:name (destination_parameter ()) let name cctxt contract = rev_find cctxt contract >>=? function | None -> return (Contract.to_b58check contract) | Some name -> return name end let list_contracts cctxt = RawContractAlias.load cctxt >>=? fun raw_contracts -> List.map_s (fun (n, v) -> Lwt.return ("", n, v)) raw_contracts >>= fun contracts -> Client_keys.Public_key_hash.load cctxt >>=? fun keys -> (* List accounts (implicit contracts of identities) *) List.map_es (fun (n, v) -> RawContractAlias.mem cctxt n >>=? fun mem -> let p = if mem then "key:" else "" in let v' = Contract.implicit_contract v in return (p, n, v')) keys >>=? fun accounts -> return (contracts @ accounts) let get_delegate cctxt ~chain ~block source = Alpha_services.Contract.delegate_opt cctxt (chain, block) source
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
q_ast_base.ml
#load "pa_macro.cmo"; #load "pa_extend.cmo"; #load "q_MLast.cmo"; (* AST quotations that works by running the language parser (and its possible extensions) and meta-ifying the nodes. Works completely only in "strict" mode. In "transitional" mode, not all antiquotations are available. *) value eval_anti entry loc typ str = let loc = let sh = if typ = "" then String.length "$" else String.length "$" + String.length typ + String.length ":" in let len = String.length str in Ploc.sub loc sh len in let r = try Ploc.call_with Plexer.force_antiquot_loc False (Grammar.Entry.parse entry) (Stream.of_string str) with [ Ploc.Exc loc1 exc -> let shift = Ploc.first_pos loc in let loc = Ploc.make_loc (Ploc.file_name loc) (Ploc.line_nb loc + Ploc.line_nb loc1 - 1) (if Ploc.line_nb loc1 = 1 then Ploc.bol_pos loc else shift + Ploc.bol_pos loc1) (shift + Ploc.first_pos loc1, shift + Ploc.last_pos loc1) "" in raise (Ploc.Exc loc exc) ] in (loc, r) ; value skip_to_next_colon s i = loop (i + 1) where rec loop j = if j = String.length s then (i, 0) else match s.[j] with [ ':' -> (j, j - i - 1) | 'a'..'z' | 'A'..'Z' | '0'..'9' | '!' | '_' -> loop (j + 1) | _ -> (i, 0) ] ; value split_anti_loc s = try let i = String.index s ':' in let (j, len) = skip_to_next_colon s i in let locs = String.sub s 0 i in let kind = String.sub s (i + 1) len in let rest = String.sub s (j+1) (String.length s - j - 1) in Some (locs, kind, rest) with [ Not_found | Failure _ -> None ] ; value replace_antiloc_kind ~{newkind} s = match split_anti_loc s with [ None -> s | Some (locs, _, rest) -> String.concat ":" [locs; newkind; rest] ] ; value get_anti_loc s = try let i = String.index s ':' in let (j, len) = skip_to_next_colon s i in let kind = String.sub s (i + 1) len in let loc = let k = String.index s ',' in let bp = int_of_string (String.sub s 0 k) in let ep = int_of_string (String.sub s (k + 1) (i - k - 1)) in Ploc.make_unlined (bp, ep) in Some (loc, kind, String.sub s (j + 1) (String.length s - j - 1)) with [ Not_found | Failure _ -> None ] ; module type MetaSig = sig type t = 'abstract; type prefix_t = 'abstract; value loc_v : unit -> t; value node : ?prefix:prefix_t -> string -> list t -> t; value node_no_loc : ?prefix:prefix_t -> string -> list t -> t; value list : ('a -> t) -> list 'a -> t; value option : ('a -> t) -> option 'a -> t; value vala : ('a -> t) -> MLast.v 'a -> t; value char : char -> t; value bool : bool -> t; value int : int -> t; value int32 : int32 -> t; value int64 : int64 -> t; value nativeint : nativeint -> t; value float : float -> t; value string : string -> t; value tuple : list t -> t; value record : list (MLast.patt * t) -> t; value xtr_typed : string -> Ploc.t -> string -> t; value xtr : Ploc.t -> string -> t; value xtr_or_anti : Ploc.t -> (t -> t) -> string -> t; end ; value anti_anti n = "_" ^ n; value is_anti_anti n = String.length n > 0 && n.[0] = '_'; module E_MetaSig = struct type t = MLast.expr; type prefix_t = MLast.longid; value loc = Ploc.dummy; value loc_v () = <:expr< $lid:Ploc.name.val$ >>; value node ?{prefix} con el = let prefix = match prefix with [ None -> <:longident< MLast >> | Some p -> p ] in List.fold_left (fun e1 e2 -> <:expr< $e1$ $e2$ >>) <:expr< $longid:prefix$ . $uid:con$ $loc_v ()$ >> el ; value node_no_loc ?{prefix} con el = let prefix = match prefix with [ None -> <:longident< MLast >> | Some p -> p ] in List.fold_left (fun e1 e2 -> <:expr< $e1$ $e2$ >>) <:expr< $longid:prefix$ . $uid:con$ >> el ; value list elem el = loop el where rec loop el = match el with [ [] -> <:expr< [] >> | [e :: el] -> <:expr< [$elem e$ :: $loop el$] >> ] ; value option elem oe = match oe with [ None -> <:expr< None >> | Some e -> <:expr< Some $elem e$ >> ] ; value vala elem = IFNDEF STRICT THEN fun e -> elem e ELSE fun [ Ploc.VaAnt s -> match get_anti_loc s with [ Some (loc, typ, str) -> let (loc, r) = eval_anti Pcaml.expr_eoi loc typ str in if is_anti_anti typ then <:expr< $anti:r$ >> else if not Pcaml.strict_mode.val then <:expr< $anti:r$ >> else <:expr< Ploc.VaVal $anti:r$ >> | None -> assert False ] | Ploc.VaVal v -> if not Pcaml.strict_mode.val then elem v else <:expr< Ploc.VaVal $elem v$ >> ] END ; value char c = let c = Char.escaped c in <:expr< $chr:c$ >>; value bool b = if b then <:expr< True >> else <:expr< False >>; value int n = let loc = Ploc.dummy in <:expr< $int:string_of_int n$ >> ; value int32 n = let loc = Ploc.dummy in <:expr< $int32:Int32.to_string n$ >> ; value int64 n = let loc = Ploc.dummy in <:expr< $int64:Int64.to_string n$ >> ; value nativeint n = let loc = Ploc.dummy in <:expr< $nativeint:Nativeint.to_string n$ >> ; value float n = let loc = Ploc.dummy in <:expr< $flo:Float.to_string n$ >> ; value string s = <:expr< $str:s$ >>; value tuple le = <:expr< ($list:le$) >>; value record lfe = <:expr< {$list:lfe$} >>; value xtr_typed wantty loc s = match get_anti_loc s with [ Some (_, typ, str) when typ = wantty -> let (loc, r) = eval_anti Pcaml.expr_eoi loc "" str in <:expr< $anti:r$ >> | _ -> assert False ] ; value xtr loc s = match get_anti_loc s with [ Some (_, typ, str) -> match typ with [ "" -> let (loc, r) = eval_anti Pcaml.expr_eoi loc "" str in <:expr< $anti:r$ >> | _ -> assert False ] | None -> assert False ] ; value xtr_or_anti loc f s = match get_anti_loc s with [ Some (_, typ, str) -> match typ with [ "" | "exp" -> let (loc, r) = eval_anti Pcaml.expr_eoi loc typ str in <:expr< $anti:r$ >> | "anti" -> let (loc, r) = eval_anti Pcaml.expr_eoi loc "anti" str in f <:expr< $anti:r$ >> | _ -> assert False ] | None -> assert False ] ; end ; module P_MetaSig = struct type t = MLast.patt; type prefix_t = MLast.longid; value loc = Ploc.dummy; value loc_v () = <:patt< _ >>; value node ?{prefix} con pl = let prefix = match prefix with [ None -> <:longident< MLast >> | Some p -> p ] in List.fold_left (fun p1 p2 -> <:patt< $p1$ $p2$ >>) <:patt< $longid:prefix$ . $uid:con$ _ >> pl ; value node_no_loc ?{prefix} con pl = let prefix = match prefix with [ None -> <:longident< MLast >> | Some p -> p ] in List.fold_left (fun p1 p2 -> <:patt< $p1$ $p2$ >>) <:patt< $longid:prefix$ . $uid:con$ >> pl ; value list elem el = loop el where rec loop el = match el with [ [] -> <:patt< [] >> | [e :: el] -> <:patt< [$elem e$ :: $loop el$] >> ] ; value option elem oe = match oe with [ None -> <:patt< None >> | Some e -> <:patt< Some $elem e$ >> ] ; value vala elem = IFNDEF STRICT THEN fun p -> elem p ELSE fun [ Ploc.VaAnt s -> match get_anti_loc s with [ Some (loc, typ, str) -> let (loc, r) = eval_anti Pcaml.patt_eoi loc typ str in if is_anti_anti typ then <:patt< $anti:r$ >> else if not Pcaml.strict_mode.val then <:patt< $anti:r$ >> else <:patt< Ploc.VaVal $anti:r$ >> | None -> assert False ] | Ploc.VaVal v -> if not Pcaml.strict_mode.val then elem v else <:patt< Ploc.VaVal $elem v$ >> ] END ; value char c = let c = Char.escaped c in <:patt< $chr:c$ >>; value bool b = if b then <:patt< True >> else <:patt< False >>; value int n = let loc = Ploc.dummy in <:patt< $int:string_of_int n$ >> ; value int32 n = let loc = Ploc.dummy in <:patt< $int32:Int32.to_string n$ >> ; value int64 n = let loc = Ploc.dummy in <:patt< $int64:Int64.to_string n$ >> ; value nativeint n = let loc = Ploc.dummy in <:patt< $nativeint:Nativeint.to_string n$ >> ; value float n = let loc = Ploc.dummy in <:patt< $flo:Float.to_string n$ >> ; value string s = <:patt< $str:s$ >>; value tuple lp = <:patt< ($list:lp$) >>; value record lfp = <:patt< {$list:lfp$} >>; value xtr_typed wantty loc s = match get_anti_loc s with [ Some (_, typ, str) when typ = wantty -> let (loc, r) = eval_anti Pcaml.patt_eoi loc "" str in <:patt< $anti:r$ >> | _ -> assert False ] ; value xtr loc s = match get_anti_loc s with [ Some (_, typ, str) -> match typ with [ "" -> let (loc, r) = eval_anti Pcaml.patt_eoi loc "" str in <:patt< $anti:r$ >> | _ -> assert False ] | None -> assert False ] ; value xtr_or_anti loc f s = match get_anti_loc s with [ Some (_, typ, str) -> match typ with [ "" | "exp" -> let (loc, r) = eval_anti Pcaml.patt_eoi loc "exp" str in <:patt< $anti:r$ >> | "anti" -> let (loc, r) = eval_anti Pcaml.patt_eoi loc "anti" str in f <:patt< $anti:r$ >> | _ -> assert False ] | None -> assert False ] ; end ; IFDEF STRICT THEN EXTEND Pcaml.class_expr_simple: LAST [ [ s = ANTIQUOT_LOC -> MLast.CeXtr loc s None ] ] ; Pcaml.class_type: LAST [ [ s = ANTIQUOT_LOC -> MLast.CtXtr loc s None ] ] ; Pcaml.ctyp: LAST [ [ s = ANTIQUOT_LOC -> MLast.TyXtr loc s None ] ] ; Pcaml.longident: LAST [ [ s = ANTIQUOT_LOC "longid" -> MLast.LiXtr loc s None ] ] ; Pcaml.extended_longident: LAST [ [ s = ANTIQUOT_LOC "longid" -> MLast.LiXtr loc s None ] ] ; Pcaml.expr: LAST [ [ s = ANTIQUOT_LOC "" -> MLast.ExXtr loc s None | s = ANTIQUOT_LOC "anti" -> MLast.ExXtr loc s None | s = ANTIQUOT_LOC "exp" -> MLast.ExXtr loc s None ] ] ; Pcaml.ipatt: LAST [ [ s = ANTIQUOT_LOC "" -> MLast.PaXtr loc s None ] ] ; Pcaml.module_expr: LAST [ [ s = ANTIQUOT_LOC -> MLast.MeXtr loc s None ] ] ; Pcaml.module_type: LAST [ [ s = ANTIQUOT_LOC -> MLast.MtXtr loc s None ] ] ; Pcaml.patt: LAST [ [ s = ANTIQUOT_LOC "" -> MLast.PaXtr loc s None | s = ANTIQUOT_LOC "anti" -> MLast.PaXtr loc s None ] ] ; Pcaml.sig_item: FIRST [ [ s = ANTIQUOT_LOC -> MLast.SgXtr loc s None ] ] ; Pcaml.str_item: FIRST [ [ s = ANTIQUOT_LOC -> MLast.StXtr loc s None | s = ANTIQUOT_LOC "stri" -> let s = replace_antiloc_kind ~{newkind=""} s in MLast.StXtr loc s None ] ] ; END; END; value check_anti_loc s = try let i = String.index s ':' in let (j, len) = skip_to_next_colon s i in String.sub s (i + 1) len with [ Not_found | Failure _ -> raise Stream.Failure ] ; let lex = Grammar.glexer Pcaml.gram in let tok_match = lex.Plexing.tok_match in lex.Plexing.tok_match := fun [("ANTIQUOT_LOC", p_prm) -> if p_prm <> "" && (p_prm.[0] = '~' || p_prm.[0] = '?') then let p_prm0 = p_prm.[0] in if p_prm.[String.length p_prm - 1] = ':' then let p_prm = String.sub p_prm 1 (String.length p_prm - 2) in fun [ ("ANTIQUOT_LOC", prm) -> if prm <> "" && prm.[0] = p_prm0 then if prm.[String.length prm - 1] = ':' then let prm = String.sub prm 1 (String.length prm - 2) in let kind = check_anti_loc prm in if kind = p_prm || kind = anti_anti p_prm then prm else raise Stream.Failure else raise Stream.Failure else raise Stream.Failure | _ -> raise Stream.Failure ] else let p_prm = String.sub p_prm 1 (String.length p_prm - 1) in fun [ ("ANTIQUOT_LOC", prm) -> if prm <> "" && prm.[0] = p_prm0 then if prm.[String.length prm - 1] = ':' then raise Stream.Failure else let prm = String.sub prm 1 (String.length prm - 1) in let kind = check_anti_loc prm in if kind = p_prm || kind = anti_anti p_prm then prm else raise Stream.Failure else raise Stream.Failure | _ -> raise Stream.Failure ] else fun [ ("ANTIQUOT_LOC", prm) -> if prm <> "" && (prm.[0] = '~' || prm.[0] = '?') then raise Stream.Failure else let kind = check_anti_loc prm in if kind = p_prm then prm else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V", p_prm) -> fun [ ("ANTIQUOT_LOC", prm) -> let kind = check_anti_loc prm in if kind = p_prm || kind = anti_anti p_prm then prm else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V CHAR", "") -> fun [ ("ANTIQUOT_LOC", prm) -> let kind = check_anti_loc prm in if kind = "chr" || kind = anti_anti "chr" then prm else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V FLAG", "") -> fun [ ("ANTIQUOT_LOC", prm) -> let kind = check_anti_loc prm in if kind = "flag" || kind = anti_anti "flag" then prm else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V FLOAT", "") -> fun [ ("ANTIQUOT_LOC", prm) -> let kind = check_anti_loc prm in if kind = "flo" || kind = anti_anti "flo" then prm else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V INT", "") -> fun [ ("ANTIQUOT_LOC", prm) -> let kind = check_anti_loc prm in if kind = "int" || kind = anti_anti "int" then prm else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V INT_l", "") -> fun [ ("ANTIQUOT_LOC", prm) -> let kind = check_anti_loc prm in if kind = "int32" || kind = anti_anti "int32" then prm else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V INT_L", "") -> fun [ ("ANTIQUOT_LOC", prm) -> let kind = check_anti_loc prm in if kind = "int64" || kind = anti_anti "int64" then prm else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V INT_n", "") -> fun [ ("ANTIQUOT_LOC", prm) -> let kind = check_anti_loc prm in if kind = "nativeint" || kind = anti_anti "nativeint" then prm else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V LIDENT", "") -> fun [ ("ANTIQUOT_LOC", prm) -> if prm <> "" && (prm.[0] = '~' || prm.[0] = '?') then raise Stream.Failure else let kind = check_anti_loc prm in if kind = "lid" || kind = anti_anti "lid" then prm else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V LIST", "") -> fun [ ("ANTIQUOT_LOC", prm) -> let kind = check_anti_loc prm in if kind = "list" || kind = anti_anti "list" then prm else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V OPT", "") -> fun [ ("ANTIQUOT_LOC", prm) -> let kind = check_anti_loc prm in if kind = "opt" || kind = anti_anti "opt" then prm else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V QUESTIONIDENT", "") -> fun [ ("ANTIQUOT_LOC", prm) -> if prm <> "" && prm.[0] = '?' then if prm.[String.length prm - 1] = ':' then raise Stream.Failure else let prm = String.sub prm 1 (String.length prm - 1) in let kind = check_anti_loc prm in if kind = "" || kind = anti_anti "" then prm else raise Stream.Failure else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V QUESTIONIDENTCOLON", "") -> fun [ ("ANTIQUOT_LOC", prm) -> if prm <> "" && prm.[0] = '?' then if prm.[String.length prm - 1] = ':' then let prm = String.sub prm 1 (String.length prm - 2) in let kind = check_anti_loc prm in if kind = "" || kind = anti_anti "" then prm else raise Stream.Failure else raise Stream.Failure else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V STRING", "") -> fun [ ("ANTIQUOT_LOC", prm) -> if prm <> "" && (prm.[0] = '~' || prm.[0] = '?') then raise Stream.Failure else let kind = check_anti_loc prm in if kind = "str" || kind = anti_anti "str" then prm else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V TILDEIDENT", "") -> fun [ ("ANTIQUOT_LOC", prm) -> if prm <> "" && prm.[0] = '~' then if prm.[String.length prm - 1] = ':' then raise Stream.Failure else let prm = String.sub prm 1 (String.length prm - 1) in let kind = check_anti_loc prm in if kind = "" || kind = anti_anti "" then prm else raise Stream.Failure else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V TILDEIDENTCOLON", "") -> fun [ ("ANTIQUOT_LOC", prm) -> if prm <> "" && prm.[0] = '~' then if prm.[String.length prm - 1] = ':' then let prm = String.sub prm 1 (String.length prm - 2) in let kind = check_anti_loc prm in if kind = "" || kind = anti_anti "" then prm else raise Stream.Failure else raise Stream.Failure else raise Stream.Failure | _ -> raise Stream.Failure ] | ("V UIDENT", "") -> fun [ ("ANTIQUOT_LOC", prm) -> if prm <> "" && (prm.[0] = '~' || prm.[0] = '?') then raise Stream.Failure else let kind = check_anti_loc prm in if kind = "uid" || kind = anti_anti "uid" then prm else raise Stream.Failure | _ -> raise Stream.Failure ] | tok -> tok_match tok ] ; (* reinit the entry functions to take the new tok_match into account *) Grammar.iter_entry Grammar.reinit_entry_functions (Grammar.Entry.obj Pcaml.expr);
(* camlp5r *) (* q_ast_base.ml,v *) (* Copyright (c) INRIA 2007-2017 *)
startup_nat.c
#define CAML_INTERNALS /* Start-up code */ #include <stdio.h> #include <stdlib.h> #include "caml/callback.h" #include "caml/backtrace.h" #include "caml/custom.h" #include "caml/debugger.h" #include "caml/fail.h" #include "caml/freelist.h" #include "caml/gc.h" #include "caml/gc_ctrl.h" #include "caml/intext.h" #include "caml/memory.h" #include "caml/misc.h" #include "caml/mlvalues.h" #include "caml/osdeps.h" #include "caml/printexc.h" #include "caml/stack.h" #include "caml/startup_aux.h" #include "caml/sys.h" #ifdef WITH_SPACETIME #include "caml/spacetime.h" #endif #ifdef HAS_UI #include "caml/ui.h" #endif extern int caml_parser_trace; CAMLexport header_t caml_atom_table[256]; char * caml_code_area_start, * caml_code_area_end; struct ext_table caml_code_fragments_table; /* Initialize the atom table and the static data and code area limits. */ struct segment { char * begin; char * end; }; static void init_static(void) { extern struct segment caml_data_segments[], caml_code_segments[]; int i; struct code_fragment * cf; caml_init_atom_table (); for (i = 0; caml_data_segments[i].begin != 0; i++) { /* PR#5509: we must include the zero word at end of data segment, because pointers equal to caml_data_segments[i].end are static data. */ if (caml_page_table_add(In_static_data, caml_data_segments[i].begin, caml_data_segments[i].end + sizeof(value)) != 0) caml_fatal_error("not enough memory for initial page table"); } caml_code_area_start = caml_code_segments[0].begin; caml_code_area_end = caml_code_segments[0].end; for (i = 1; caml_code_segments[i].begin != 0; i++) { if (caml_code_segments[i].begin < caml_code_area_start) caml_code_area_start = caml_code_segments[i].begin; if (caml_code_segments[i].end > caml_code_area_end) caml_code_area_end = caml_code_segments[i].end; } /* Register the code in the table of code fragments */ cf = caml_stat_alloc(sizeof(struct code_fragment)); cf->code_start = caml_code_area_start; cf->code_end = caml_code_area_end; cf->digest_computed = 0; caml_ext_table_init(&caml_code_fragments_table, 8); caml_ext_table_add(&caml_code_fragments_table, cf); } /* These are termination hooks used by the systhreads library */ struct longjmp_buffer caml_termination_jmpbuf; void (*caml_termination_hook)(void *) = NULL; extern value caml_start_program (void); extern void caml_init_ieee_floats (void); extern void caml_init_signals (void); #ifdef _WIN32 extern void caml_win32_overflow_detection (void); #endif #if defined(_MSC_VER) && __STDC_SECURE_LIB__ >= 200411L /* PR 4887: avoid crash box of windows runtime on some system calls */ extern void caml_install_invalid_parameter_handler(); #endif value caml_startup_common(char_os **argv, int pooling) { char_os * exe_name, * proc_self_exe; char tos; /* Determine options */ #ifdef DEBUG caml_verb_gc = 0x3F; #endif caml_parse_ocamlrunparam(); #ifdef DEBUG caml_gc_message (-1, "### OCaml runtime: debug mode ###\n"); #endif if (caml_cleanup_on_exit) pooling = 1; if (!caml_startup_aux(pooling)) return Val_unit; #ifdef WITH_SPACETIME caml_spacetime_initialize(); #endif caml_init_frame_descriptors(); caml_init_ieee_floats(); caml_init_locale(); #if defined(_MSC_VER) && __STDC_SECURE_LIB__ >= 200411L caml_install_invalid_parameter_handler(); #endif caml_init_custom_operations(); caml_top_of_stack = &tos; caml_init_gc (caml_init_minor_heap_wsz, caml_init_heap_wsz, caml_init_heap_chunk_sz, caml_init_percent_free, caml_init_max_percent_free, caml_init_major_window, caml_init_custom_major_ratio, caml_init_custom_minor_ratio, caml_init_custom_minor_max_bsz); init_static(); caml_init_signals(); #ifdef _WIN32 caml_win32_overflow_detection(); #endif caml_init_backtrace(); caml_debugger_init (); /* force debugger.o stub to be linked */ exe_name = argv[0]; if (exe_name == NULL) exe_name = _T(""); proc_self_exe = caml_executable_name(); if (proc_self_exe != NULL) exe_name = proc_self_exe; else exe_name = caml_search_exe_in_path(exe_name); caml_sys_init(exe_name, argv); if (sigsetjmp(caml_termination_jmpbuf.buf, 0)) { if (caml_termination_hook != NULL) caml_termination_hook(NULL); return Val_unit; } return caml_start_program(); } value caml_startup_exn(char_os **argv) { return caml_startup_common(argv, /* pooling */ 0); } void caml_startup(char_os **argv) { value res = caml_startup_exn(argv); if (Is_exception_result(res)) caml_fatal_uncaught_exception(Extract_exception(res)); } void caml_main(char_os **argv) { caml_startup(argv); } value caml_startup_pooled_exn(char_os **argv) { return caml_startup_common(argv, /* pooling */ 1); } void caml_startup_pooled(char_os **argv) { value res = caml_startup_pooled_exn(argv); if (Is_exception_result(res)) caml_fatal_uncaught_exception(Extract_exception(res)); }
/**************************************************************************/ /* */ /* OCaml */ /* */ /* Xavier Leroy and Damien Doligez, INRIA Rocquencourt */ /* */ /* Copyright 1996 Institut National de Recherche en Informatique et */ /* en Automatique. */ /* */ /* All rights reserved. This file is distributed under the terms of */ /* the GNU Lesser General Public License version 2.1, with the */ /* special exception on linking described in the file LICENSE. */ /* */ /**************************************************************************/
dune
(library (name atdpy) (libraries re atd ) )
dune
(library (name opam_compiler) (libraries bos cmdliner github-unix re))
opamGlobalState.mli
(** Loading and handling of the global state of an opam root *) open OpamTypes open OpamStateTypes (** Loads the global state (from the opam root obtained through [OpamStateConfig.(!r.root)]) *) val load: 'a lock -> 'a global_state (** Loads the global state as [load], and calls the given function while keeping it locked (as per the [lock] argument), releasing the lock afterwards *) val with_: 'a lock -> ('a global_state -> 'b) -> 'b (** The set of all installed packages, in any switch *) val all_installed: 'a global_state -> package_set val switches: 'a global_state -> switch list (** Fold over switches, using switch selections. Switch selection file [switch-state] is loaded only read-only; no further checks are done on the opam root version. *) val fold_switches: (switch -> switch_selections -> 'a -> 'a) -> 'b global_state -> 'a -> 'a (** Checks a switch for existence: either configured in the opam root, or an existing local switch with a configuration file pointing to the current root *) val switch_exists: 'a global_state -> switch -> bool (** Returns the map of installed instances of the package name towards the list of switches they are installed in *) val installed_versions: 'a global_state -> name -> switch list package_map (** Default list of repositories to get packages from, ordered by decreasing priority. This can be overridden by switch-specific selections, and does not have to include all configured repositories. *) val repos_list: 'a global_state -> repository_name list (** Releases any locks on the given global_state *) val unlock: 'a global_state -> unlocked global_state (** Releases any locks on the given global state and then ignores it. Using [drop gt] is equivalent to [ignore (unlock gt)], and safer than other uses of [ignore] where it is not enforced by the type-system that the value is unlocked before it is lost. *) val drop: 'a global_state -> unit (** Calls the provided function, ensuring a temporary write lock on the given global state *) val with_write_lock: ?dontblock:bool -> 'a global_state -> (rw global_state -> 'b * 'c global_state) -> 'b * 'a global_state (** Writes back the global configuration file ~/.opam/config *) val write: rw global_state -> unit (** Updates the configured list of switches, making sure the current switch is registered if it is set and exists, and removing any non-existing switches. Writes back to disk if possible (ie lock is available) *) val fix_switch_list: 'a global_state -> 'a global_state (** Description used for system inferred variables *) val inferred_from_system: string
(**************************************************************************) (* *) (* Copyright 2012-2019 OCamlPro *) (* Copyright 2012 INRIA *) (* *) (* All rights reserved. This file is distributed under the terms of the *) (* GNU Lesser General Public License version 2.1, with the special *) (* exception on linking described in the file LICENSE. *) (* *) (**************************************************************************)
delegate_services.mli
open Alpha_context val list: 'a #RPC_context.simple -> 'a -> ?active:bool -> ?inactive:bool -> unit -> Signature.Public_key_hash.t list shell_tzresult Lwt.t type info = { balance: Tez.t ; frozen_balance: Tez.t ; frozen_balance_by_cycle: Delegate.frozen_balance Cycle.Map.t ; staking_balance: Tez.t ; delegated_contracts: Contract_hash.t list ; delegated_balance: Tez.t ; deactivated: bool ; grace_period: Cycle.t ; } val info_encoding: info Data_encoding.t val info: 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> info shell_tzresult Lwt.t val balance: 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> Tez.t shell_tzresult Lwt.t val frozen_balance: 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> Tez.t shell_tzresult Lwt.t val frozen_balance_by_cycle: 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> Delegate.frozen_balance Cycle.Map.t shell_tzresult Lwt.t val staking_balance: 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> Tez.t shell_tzresult Lwt.t val delegated_contracts: 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> Contract_hash.t list shell_tzresult Lwt.t val delegated_balance: 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> Tez.t shell_tzresult Lwt.t val deactivated: 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> bool shell_tzresult Lwt.t val grace_period: 'a #RPC_context.simple -> 'a -> Signature.Public_key_hash.t -> Cycle.t shell_tzresult Lwt.t module Baking_rights : sig type t = { level: Raw_level.t ; delegate: Signature.Public_key_hash.t ; priority: int ; timestamp: Timestamp.t option ; } (** Retrieves the list of delegates allowed to bake a block. By default, it gives the best baking priorities for bakers that have at least one opportunity below the 64th priority for the next block. Parameters [levels] and [cycles] can be used to specify the (valid) level(s) in the past or future at which the baking rights have to be returned. Parameter [delegates] can be used to restrict the results to the given delegates. If parameter [all] is [true], all the baking opportunities for each baker at each level are returned, instead of just the first one. Returns the list of baking slots. Also returns the minimal timestamps that correspond to these slots. The timestamps are omitted for levels in the past, and are only estimates for levels later that the next block, based on the hypothesis that all predecessor blocks were baked at the first priority. *) val get: 'a #RPC_context.simple -> ?levels: Raw_level.t list -> ?cycles: Cycle.t list -> ?delegates: Signature.public_key_hash list -> ?all: bool -> ?max_priority: int -> 'a -> t list shell_tzresult Lwt.t end module Endorsing_rights : sig type t = { level: Raw_level.t ; delegate: Signature.Public_key_hash.t ; slots: int list ; estimated_time: Timestamp.t option ; } (** Retrieves the delegates allowed to endorse a block. By default, it gives the endorsement slots for bakers that have at least one in the next block. Parameters [levels] and [cycles] can be used to specify the (valid) level(s) in the past or future at which the endorsement rights have to be returned. Parameter [delegates] can be used to restrict the results to the given delegates. Returns the list of endorsement slots. Also returns the minimal timestamps that correspond to these slots. Timestamps are omitted for levels in the past, and are only estimates for levels later that the next block, based on the hypothesis that all predecessor blocks were baked at the first priority. *) val get: 'a #RPC_context.simple -> ?levels: Raw_level.t list -> ?cycles: Cycle.t list -> ?delegates: Signature.public_key_hash list -> 'a -> t list shell_tzresult Lwt.t end (* temporary export for deprecated unit test *) val endorsement_rights: Alpha_context.t -> Level.t -> public_key_hash list tzresult Lwt.t val baking_rights: Alpha_context.t -> int option -> (Raw_level.t * (public_key_hash * Time.t option) list) tzresult Lwt.t val register: unit -> unit
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
test_substring.ml
open OUnit open BatSubstring open BatPervasives let test_dropr = let aeq = assert_equal ~printer:identity in [ begin "dropr empty" >:: fun () -> aeq "" (to_string (dropr (const true) (of_string ""))); aeq "" (to_string (dropr (const false) (of_string ""))) end; begin "dropr none" >:: fun () -> aeq "foo" (to_string (dropr (const false) (of_string "foo"))) end; begin "dropr all" >:: fun () -> aeq "" (to_string (dropr (const true) (of_string "foo"))) end; begin "dropr some" >:: fun () -> aeq "f" (to_string (dropr ((=) 'o') (of_string "foo"))) end; ];; let test_dropl = let aeq = assert_equal ~printer:identity in [ begin "dropl empty" >:: fun () -> aeq "" (to_string (dropl (const true) (of_string ""))); aeq "" (to_string (dropl (const false) (of_string ""))) end; begin "dropl none" >:: fun () -> aeq "foo" (to_string (dropl (const false) (of_string "foo"))) end; begin "dropl all" >:: fun () -> aeq "" (to_string (dropl (const true) (of_string "foo"))) end; begin "dropl some" >:: fun () -> aeq "oo" (to_string (dropl ((=) 'f') (of_string "foo"))) end; ];; let test_taker = let aeq = assert_equal ~printer:identity in [ begin "taker empty" >:: fun () -> aeq "" (to_string (taker (const true) (of_string ""))); aeq "" (to_string (taker (const false) (of_string ""))) end; begin "taker none" >:: fun () -> aeq "" (to_string (taker (const false) (of_string "foo"))) end; begin "taker all" >:: fun () -> aeq "foo" (to_string (taker (const true) (of_string "foo"))) end; begin "taker some" >:: fun () -> aeq "oo" (to_string (taker ((=) 'o') (of_string "foo"))) end; ];; let test_takel = let aeq = assert_equal ~printer:identity in [ begin "takel empty" >:: fun () -> aeq "" (to_string (takel (const true) (of_string ""))); aeq "" (to_string (takel (const false) (of_string ""))) end; begin "takel none" >:: fun () -> aeq "" (to_string (takel (const false) (of_string "foo"))) end; begin "takel all" >:: fun () -> aeq "foo" (to_string (takel (const true) (of_string "foo"))) end; begin "takel some" >:: fun () -> aeq "f" (to_string (takel ((=) 'f') (of_string "foo"))) end; ];; let to_strings (x,y) = to_string x, to_string y let test_splitr = let printer (s1,s2) = Printf.sprintf "(%S,%S)" s1 s2 in let aeq = assert_equal ~printer in [ begin "splitr empty" >:: fun () -> aeq ("","") (to_strings (splitr (const true) (of_string ""))); aeq ("","") (to_strings (splitr (const false) (of_string ""))) end; begin "splitr none" >:: fun () -> aeq ("foo","") (to_strings (splitr (const false) (of_string "foo"))) end; begin "splitr all" >:: fun () -> aeq ("","foo") (to_strings (splitr (const true) (of_string "foo"))) end; begin "splitr some" >:: fun () -> aeq ("f","oo") (to_strings (splitr ((=) 'o') (of_string "foo"))) end; ];; let test_splitl = let printer (s1,s2) = Printf.sprintf "(%S,%S)" s1 s2 in let aeq = assert_equal ~printer in [ begin "splitl empty" >:: fun () -> aeq ("","") (to_strings (splitl (const true) (of_string ""))); aeq ("","") (to_strings (splitl (const false) (of_string ""))) end; begin "splitl none" >:: fun () -> aeq ("","foo") (to_strings (splitl (const false) (of_string "foo"))) end; begin "splitl all" >:: fun () -> aeq ("foo","") (to_strings (splitl (const true) (of_string "foo"))) end; begin "splitl some" >:: fun () -> aeq ("f","oo") (to_strings (splitl ((=) 'f') (of_string "foo"))) end; ];; let test_slice = let printer sus = let (s,i,n) = base sus in Printf.sprintf "(%S,%d,%d)" s i n in let cmp sus1 sus2 = to_string sus1 = to_string sus2 in let aeq = assert_equal ~printer ~cmp in [ begin "slice empty" >:: fun () -> aeq (empty ()) (slice (empty ()) 0 None) end; begin "slice all" >:: fun () -> aeq (of_string "foo") (slice (of_string "foo") 0 None); aeq (of_string "foo") (slice (of_string "foo") 0 (Some 3)); end; begin "slice none" >:: fun () -> aeq (of_string "") (slice (of_string "foo") 3 None); aeq (of_string "") (slice (of_string "foo") 3 (Some 0)); end; begin "slice some" >:: fun () -> aeq (of_string "oo") (slice (of_string "foo") 1 None); aeq (of_string "oo") (slice (of_string "foo") 1 (Some 2)); end; begin "slice pick" >:: fun () -> aeq (of_string "i") (slice (of_string "jim") 1 (Some 1)); end; ];; let test_index_from = let aeq = assert_equal ~printer:string_of_int in [ begin "index from" >:: fun () -> aeq (index_from (of_string "foobar") 2 'b') (2+index (triml 2 (of_string "foobar")) 'b') end; ];; let test_rindex_from = let aeq = assert_equal ~printer:string_of_int in [ begin "rindex from" >:: fun () -> aeq (rindex_from (of_string "foobar") 2 'b') (rindex (trimr 2 (of_string "foobar")) 'b') end; ];; let test_is_prefix = let aeq = assert_equal ~printer:string_of_bool in [ begin "is_prefix" >:: fun () -> aeq (is_prefix "foo" (of_string "foobar")) true; aeq (is_prefix "foj" (of_string "foobar")) false; aeq (is_prefix "foobarz" (of_string "foobar")) false; aeq (is_prefix "foobar" (of_string "foobar")) true; end; ];; let test_enum = let test_enum ss = ss |> to_string |> BatString.enum in let ss = of_string "testing" in [ begin "enum" >:: fun () -> assert_equal (ss |> enum |> BatString.of_enum) "testing"; assert_equal (size ss) (ss |> enum |> BatEnum.count) ~printer:string_of_int; assert_equal (ss |> enum |> BatString.of_enum) (ss |> test_enum |> BatString.of_enum) end ] let test_iteri = let ss = of_string "test" in let mark = ref false in let r = ref [] in ss |> iteri (fun i _ -> mark := true; r := i::(!r) ); [ begin "iteri" >:: fun () -> assert_equal !mark true ~printer:string_of_bool; assert_equal (List.rev !r) [0;1;2;3] end ] let tests = "Substring" >::: [ "dropr" >::: test_dropr; "dropl" >::: test_dropl; "taker" >::: test_taker; "takel" >::: test_takel; "splitr" >::: test_splitr; "splitl" >::: test_splitl; "slice" >::: test_slice; "index_from" >::: test_index_from; "is_prefix" >::: test_is_prefix; "enum" >::: test_enum; "test_iteri" >::: test_iteri; ];;
gen.ml
let version = Sys.argv.(1) let git_version = Sys.argv.(2) let version = let ic = open_in version in let version = try input_line ic with End_of_file -> "" in close_in ic; version let git_version = let ic = open_in git_version in let git_version = try input_line ic with End_of_file -> "" in close_in ic; git_version let () = Printf.printf {| let s = "%s" let git_version = "%s" |} version git_version
pretty_interp.ml
(* XXX this is copy-pasted from pretty_printer.ml with the following * changes: * - open Interp_ast instead of Ast; don't open Type_internals * - string_of_big_int instead of string_of_int * - annot does not contain type annotations anymore, so E_internal_cast * is ignored * - don't print E_cast either by default (controlled by ignore_casts) * - special case for holes in doc_id * - pp_exp returns a string instead of working on a buffer (should * change this in the original as well, probably) * - pp_defs deleted * - the pretty-printer does not support DeIid; here, we add a * work-around to make it work, converting back to Id with parens, * because the stack/continuation contains operators in DeIid form. * Should maybe backport this one to the original p-p. *) open Interp_ast open Format open Nat_big_num let ignore_casts = ref true let zero_big = of_int 0 let one_big = of_int 1 let pp_format_id (Id_aux(i,_)) = match i with | Id(i) -> i | DeIid(x) -> "(deinfix " ^ x ^ ")" let lit_to_string = function | L_unit -> "unit" | L_zero -> "0b0" | L_one -> "0b1" | L_true -> "true" | L_false -> "false" | L_num n -> Nat_big_num.to_string n | L_hex s -> "0x"^s | L_bin s -> "0b"^s | L_undef -> "undefined" | L_string s -> "\"" ^ s ^ "\"" ;; let id_to_string = function | Id_aux(Id s,_) | Id_aux(DeIid s,_) -> s ;; let rec loc_to_string = function | Unknown -> "location unknown" | Int(s,_) -> s | Generated l -> "Generated near " ^ loc_to_string l | Range(s,fline,fchar,tline,tchar) -> if fline = tline then sprintf "%s:%d:%d" s fline fchar else sprintf "%s:%d:%d-%d:%d" s fline fchar tline tchar ;; let collapse_leading s = if String.length s <= 8 then s else let first_bit = s.[0] in let templ = sprintf "%c...%c" first_bit first_bit in let rec find_first_diff str cha pos = if pos >= String.length str then None else if str.[pos] != cha then Some pos else find_first_diff str cha (pos+1) in match find_first_diff s first_bit 0 with | None -> templ | Some pos when pos > 4 -> templ ^ (String.sub s pos ((String.length s)- pos)) | _ -> s ;; (* pp the bytes of a Bytevector as a hex value *) let bitvec_to_string l = "0b" ^ collapse_leading (String.concat "" (List.map (function | Interp_ast.V_lit(L_aux(L_zero, _)) -> "0" | Interp_ast.V_lit(L_aux(L_one, _)) -> "1" | Interp_ast.V_lit(L_aux(L_undef, _)) -> "u" | Interp_ast.V_unknown -> "?" | v -> (Printf.printf "bitvec found a non bit %s%!\n" (Interp.string_of_value v));assert false) (List.map Interp.detaint l))) ;; (**************************************************************************** * PPrint-based source-to-source pretty printer ****************************************************************************) open PPrint let doc_id (Id_aux(i,_)) = match i with | Id "0" -> string "\x1b[1;31m[_]\x1b[m" (* internal representation of a hole *) | Id i -> string i | DeIid x -> (* add an extra space through empty to avoid a closing-comment * token in case of x ending with star. *) parens (separate space [string "deinfix"; string x; empty]) let doc_var (Kid_aux(Var v,_)) = string v let doc_int i = string (to_string i) let doc_bkind (BK_aux(k,_)) = string (match k with | BK_type -> "Type" | BK_int -> "Int" | BK_order -> "Order") let doc_op symb a b = infix 2 1 symb a b let doc_unop symb a = prefix 2 1 symb a let arrow = string "->" let dotdot = string ".." let coloneq = string ":=" let lsquarebar = string "[|" let rsquarebar = string "|]" let squarebars = enclose lsquarebar rsquarebar let lsquarebarbar = string "[||" let rsquarebarbar = string "||]" let squarebarbars = enclose lsquarebarbar rsquarebarbar let spaces op = enclose space space op let semi_sp = semi ^^ space let comma_sp = comma ^^ space let colon_sp = spaces colon let doc_kind (K_aux(K_kind(klst),_)) = separate_map (spaces arrow) doc_bkind klst let doc_effect (BE_aux (e,_)) = string (match e with | BE_rreg -> "rreg" | BE_wreg -> "wreg" | BE_rmem -> "rmem" | BE_wmem -> "wmem" | BE_wmv -> "wmv" | BE_eamem -> "eamem" | BE_exmem -> "exmem" | BE_barr -> "barr" | BE_depend -> "depend" | BE_undef -> "undef" | BE_unspec -> "unspec" | BE_escape -> "escape" | BE_nondet -> "nondet" | BE_lset -> "(*lset*)" | BE_lret -> "(*lret*)") let doc_effects (Effect_aux(e,_)) = match e with | Effect_var v -> doc_var v | Effect_set [] -> string "pure" | Effect_set s -> braces (separate_map comma_sp doc_effect s) let doc_ord (Ord_aux(o,_)) = match o with | Ord_var v -> doc_var v | Ord_inc -> string "inc" | Ord_dec -> string "dec" let doc_typ, doc_atomic_typ, doc_nexp = (* following the structure of parser for precedence *) let rec typ ty = fn_typ ty and fn_typ ((Typ_aux (t, _)) as ty) = match t with | Typ_fn(arg,ret,efct) -> separate space [tup_typ arg; arrow; fn_typ ret; string "effect"; doc_effects efct] | _ -> tup_typ ty and tup_typ ((Typ_aux (t, _)) as ty) = match t with | Typ_tup typs -> parens (separate_map comma_sp app_typ typs) | _ -> app_typ ty and app_typ ((Typ_aux (t, _)) as ty) = match t with | Typ_app(Id_aux (Id "vector", _), [ Typ_arg_aux(Typ_arg_nexp (Nexp_aux(Nexp_constant n, _)), _); Typ_arg_aux(Typ_arg_nexp (Nexp_aux(Nexp_constant m, _)), _); Typ_arg_aux (Typ_arg_order (Ord_aux (Ord_inc, _)), _); Typ_arg_aux (Typ_arg_typ (Typ_aux (Typ_id id, _)), _)]) -> (doc_id id) ^^ (brackets (if equal n zero_big then doc_int m else doc_op colon (doc_int n) (doc_int (add n (sub m one_big))))) | Typ_app(Id_aux (Id "range", _), [ Typ_arg_aux(Typ_arg_nexp (Nexp_aux(Nexp_constant n, _)), _); Typ_arg_aux(Typ_arg_nexp m, _);]) -> (squarebars (if equal n zero_big then nexp m else doc_op colon (doc_int n) (nexp m))) | Typ_app(id,args) -> (* trailing space to avoid >> token in case of nested app types *) (doc_id id) ^^ (angles (separate_map comma_sp doc_typ_arg args)) ^^ space | _ -> atomic_typ ty and atomic_typ ((Typ_aux (t, _)) as ty) = match t with | Typ_id id -> doc_id id | Typ_var v -> doc_var v | Typ_app _ | Typ_tup _ | Typ_fn _ -> (* exhaustiveness matters here to avoid infinite loops * if we add a new Typ constructor *) group (parens (typ ty)) and doc_typ_arg (Typ_arg_aux(t,_)) = match t with (* Be careful here because typ_arg is implemented as nexp in the * parser - in practice falling through app_typ after all the proper nexp * cases; so Typ_arg_typ has the same precedence as a Typ_app *) | Typ_arg_typ t -> app_typ t | Typ_arg_nexp n -> nexp n | Typ_arg_order o -> doc_ord o (* same trick to handle precedence of nexp *) and nexp ne = sum_typ ne and sum_typ ((Nexp_aux(n,_)) as ne) = match n with | Nexp_sum(n1,n2) -> doc_op plus (sum_typ n1) (star_typ n2) | Nexp_minus(n1,n2) -> doc_op minus (sum_typ n1) (star_typ n2) | _ -> star_typ ne and star_typ ((Nexp_aux(n,_)) as ne) = match n with | Nexp_times(n1,n2) -> doc_op star (star_typ n1) (exp_typ n2) | _ -> exp_typ ne and exp_typ ((Nexp_aux(n,_)) as ne) = match n with | Nexp_exp n1 -> doc_unop (string "2**") (atomic_nexp_typ n1) | _ -> neg_typ ne and neg_typ ((Nexp_aux(n,_)) as ne) = match n with | Nexp_neg n1 -> (* XXX this is not valid Sail, only an internal representation - * work around by commenting it *) let minus = concat [string "(*"; minus; string "*)"] in minus ^^ (atomic_nexp_typ n1) | _ -> atomic_nexp_typ ne and atomic_nexp_typ ((Nexp_aux(n,_)) as ne) = match n with | Nexp_id id -> doc_id id | Nexp_var v -> doc_var v | Nexp_constant i -> doc_int i | Nexp_neg _ | Nexp_exp _ | Nexp_times _ | Nexp_sum _ | Nexp_minus _ -> group (parens (nexp ne)) (* expose doc_typ, doc_atomic_typ and doc_nexp *) in typ, atomic_typ, nexp let doc_nexp_constraint (NC_aux(nc,_)) = match nc with | NC_equal(n1,n2) -> doc_op equals (doc_nexp n1) (doc_nexp n2) | NC_bounded_ge(n1,n2) -> doc_op (string ">=") (doc_nexp n1) (doc_nexp n2) | NC_bounded_le(n1,n2) -> doc_op (string "<=") (doc_nexp n1) (doc_nexp n2) | NC_set(v,bounds) -> doc_op (string "IN") (doc_var v) (braces (separate_map comma_sp doc_int bounds)) let doc_qi (QI_aux(qi,_)) = match qi with | QI_const n_const -> doc_nexp_constraint n_const | QI_id(KOpt_aux(ki,_)) -> match ki with | KOpt_none v -> doc_var v | KOpt_kind(k,v) -> separate space [doc_kind k; doc_var v] (* typ_doc is the doc for the type being quantified *) let doc_typquant (TypQ_aux(tq,_)) typ_doc = match tq with | TypQ_no_forall -> typ_doc | TypQ_tq [] -> failwith "TypQ_tq with empty list" | TypQ_tq qlist -> (* include trailing break because the caller doesn't know if tq is empty *) doc_op dot (separate space [string "forall"; separate_map comma_sp doc_qi qlist]) typ_doc let doc_typscm (TypSchm_aux(TypSchm_ts(tq,t),_)) = (doc_typquant tq (doc_typ t)) let doc_typscm_atomic (TypSchm_aux(TypSchm_ts(tq,t),_)) = (doc_typquant tq (doc_atomic_typ t)) let doc_lit (L_aux(l,_)) = utf8string (match l with | L_unit -> "()" | L_zero -> "bitzero" | L_one -> "bitone" | L_true -> "true" | L_false -> "false" | L_num i -> to_string i | L_hex n -> "0x" ^ n | L_bin n -> "0b" ^ n | L_undef -> "undefined" | L_string s -> "\"" ^ s ^ "\"") let doc_pat, doc_atomic_pat = let rec pat pa = pat_colons pa and pat_colons ((P_aux(p,l)) as pa) = match p with | P_vector_concat pats -> separate_map colon_sp atomic_pat pats | _ -> app_pat pa and app_pat ((P_aux(p,l)) as pa) = match p with | P_app(id, ((_ :: _) as pats)) -> doc_unop (doc_id id) (parens (separate_map comma_sp atomic_pat pats)) | _ -> atomic_pat pa and atomic_pat ((P_aux(p,l)) as pa) = match p with | P_lit lit -> doc_lit lit | P_wild -> underscore | P_id id -> doc_id id | P_as(p,id) -> parens (separate space [pat p; string "as"; doc_id id]) | P_typ(typ,p) -> separate space [parens (doc_typ typ); atomic_pat p] | P_app(id,[]) -> doc_id id | P_record(fpats,_) -> braces (separate_map semi_sp fpat fpats) | P_vector pats -> brackets (separate_map comma_sp atomic_pat pats) | P_tup pats -> parens (separate_map comma_sp atomic_pat pats) | P_list pats -> squarebarbars (separate_map semi_sp atomic_pat pats) | P_app(_, _ :: _) | P_vector_concat _ -> group (parens (pat pa)) and fpat (FP_aux(FP_Fpat(id,fpat),_)) = doc_op equals (doc_id id) (pat fpat) and npat (i,p) = doc_op equals (doc_int i) (pat p) (* expose doc_pat and doc_atomic_pat *) in pat, atomic_pat let doc_exp, doc_let = let rec exp env mem add_red show_hole_contents e = group (or_exp env mem add_red show_hole_contents e) and or_exp env mem add_red show_hole_contents ((E_aux(e,_)) as expr) = match e with | E_app_infix(l,(Id_aux(Id ("|" | "||"),_) as op),r) -> doc_op (doc_id op) (and_exp env mem add_red show_hole_contents l) (or_exp env mem add_red show_hole_contents r) | _ -> and_exp env mem add_red show_hole_contents expr and and_exp env mem add_red show_hole_contents ((E_aux(e,_)) as expr) = match e with | E_app_infix(l,(Id_aux(Id ("&" | "&&"),_) as op),r) -> doc_op (doc_id op) (eq_exp env mem add_red show_hole_contents l) (and_exp env mem add_red show_hole_contents r) | _ -> eq_exp env mem add_red show_hole_contents expr and eq_exp env mem add_red show_hole_contents ((E_aux(e,_)) as expr) = match e with | E_app_infix(l,(Id_aux(Id ( (* XXX this is not very consistent - is the parser bogus here? *) "=" | "==" | "!=" | ">=" | ">=_s" | ">=_u" | ">" | ">_s" | ">_u" | "<=" | "<=_s" | "<" | "<_s" | "<_si" | "<_u" ),_) as op),r) -> doc_op (doc_id op) (eq_exp env mem add_red show_hole_contents l) (at_exp env mem add_red show_hole_contents r) (* XXX assignment should not have the same precedence as equal etc. *) | E_assign(le,exp) -> doc_op coloneq (doc_lexp env mem add_red show_hole_contents le) (at_exp env mem add_red show_hole_contents exp) | _ -> at_exp env mem add_red show_hole_contents expr and at_exp env mem add_red show_hole_contents ((E_aux(e,_)) as expr) = match e with | E_app_infix(l,(Id_aux(Id ("@" | "^^" | "^" | "~^"),_) as op),r) -> doc_op (doc_id op) (cons_exp env mem add_red show_hole_contents l) (at_exp env mem add_red show_hole_contents r) | _ -> cons_exp env mem add_red show_hole_contents expr and cons_exp env mem add_red show_hole_contents ((E_aux(e,_)) as expr) = match e with | E_vector_append(l,r) -> doc_op colon (shift_exp env mem add_red show_hole_contents l) (cons_exp env mem add_red show_hole_contents r) | E_cons(l,r) -> doc_op colon (shift_exp env mem add_red show_hole_contents l) (cons_exp env mem add_red show_hole_contents r) | _ -> shift_exp env mem add_red show_hole_contents expr and shift_exp env mem add_red show_hole_contents ((E_aux(e,_)) as expr) = match e with | E_app_infix(l,(Id_aux(Id (">>" | ">>>" | "<<" | "<<<"),_) as op),r) -> doc_op (doc_id op) (shift_exp env mem add_red show_hole_contents l) (plus_exp env mem add_red show_hole_contents r) | _ -> plus_exp env mem add_red show_hole_contents expr and plus_exp env mem add_red show_hole_contents ((E_aux(e,_)) as expr) = match e with | E_app_infix(l,(Id_aux(Id ("+" | "-"| "+_s" | "-_s" ),_) as op),r) -> doc_op (doc_id op) (plus_exp env mem add_red show_hole_contents l) (star_exp env mem add_red show_hole_contents r) | _ -> star_exp env mem add_red show_hole_contents expr and star_exp env mem add_red show_hole_contents ((E_aux(e,_)) as expr) = match e with | E_app_infix(l,(Id_aux(Id ( "*" | "/" | "div" | "quot" | "rem" | "mod" | "quot_s" | "mod_s" | "*_s" | "*_si" | "*_u" | "*_ui"),_) as op),r) -> doc_op (doc_id op) (star_exp env mem add_red show_hole_contents l) (starstar_exp env mem add_red show_hole_contents r) | _ -> starstar_exp env mem add_red show_hole_contents expr and starstar_exp env mem add_red show_hole_contents ((E_aux(e,_)) as expr) = match e with | E_app_infix(l,(Id_aux(Id "**",_) as op),r) -> doc_op (doc_id op) (starstar_exp env mem add_red show_hole_contents l) (app_exp env mem add_red show_hole_contents r) | E_if _ | E_for _ | E_let _ -> right_atomic_exp env mem add_red show_hole_contents expr | _ -> app_exp env mem add_red show_hole_contents expr and right_atomic_exp env mem add_red show_hole_contents ((E_aux(e,_)) as expr) = match e with (* Special case: omit "else ()" when the else branch is empty. *) | E_if(c,t,E_aux(E_block [], _)) -> string "if" ^^ space ^^ group (exp env mem add_red show_hole_contents c) ^/^ string "then" ^^ space ^^ group (exp env mem add_red show_hole_contents t) | E_if(c,t,e) -> string "if" ^^ space ^^ group (exp env mem add_red show_hole_contents c) ^/^ string "then" ^^ space ^^ group (exp env mem add_red show_hole_contents t) ^/^ string "else" ^^ space ^^ group (exp env mem add_red show_hole_contents e) | E_for(id,exp1,exp2,exp3,order,exp4) -> string "foreach" ^^ space ^^ group (parens ( separate (break 1) [ doc_id id; string "from " ^^ (atomic_exp env mem add_red show_hole_contents exp1); string "to " ^^ (atomic_exp env mem add_red show_hole_contents exp2); string "by " ^^ (atomic_exp env mem add_red show_hole_contents exp3); string "in " ^^ doc_ord order ] )) ^/^ (exp env mem add_red show_hole_contents exp4) | E_let(leb,e) -> doc_op (string "in") (let_exp env mem add_red show_hole_contents leb) (exp env mem add_red show_hole_contents e) | _ -> group (parens (exp env mem add_red show_hole_contents expr)) and app_exp env mem add_red show_hole_contents ((E_aux(e,_)) as expr) = match e with | E_app(f,args) -> doc_unop (doc_id f) (parens (separate_map comma (exp env mem add_red show_hole_contents) args)) | _ -> vaccess_exp env mem add_red show_hole_contents expr and vaccess_exp env mem add_red show_hole_contents ((E_aux(e,_)) as expr) = match e with | E_vector_access(v,e) -> (atomic_exp env mem add_red show_hole_contents v) ^^ brackets (exp env mem add_red show_hole_contents e) | E_vector_subrange(v,e1,e2) -> (atomic_exp env mem add_red show_hole_contents v) ^^ brackets (doc_op dotdot (exp env mem add_red show_hole_contents e1) (exp env mem add_red show_hole_contents e2)) | _ -> field_exp env mem add_red show_hole_contents expr and field_exp env mem add_red show_hole_contents ((E_aux(e,_)) as expr) = match e with | E_field(fexp,id) -> (atomic_exp env mem add_red show_hole_contents fexp) ^^ dot ^^ doc_id id | _ -> atomic_exp env mem add_red show_hole_contents expr and atomic_exp env mem add_red (show_hole_contents:bool) ((E_aux(e,annot)) as expr) = match e with (* Special case: an empty block is equivalent to unit, but { } is a syntactic struct *) | E_block [] -> string "()" | E_block exps -> let exps_doc = separate_map (semi ^^ hardline) (exp env mem add_red show_hole_contents) exps in surround 2 1 lbrace exps_doc rbrace | E_nondet exps -> let exps_doc = separate_map (semi ^^ hardline) (exp env mem add_red show_hole_contents) exps in string "nondet" ^^ space ^^ (surround 2 1 lbrace exps_doc rbrace) | E_id id -> (match id with | Id_aux(Id("0"), _) -> (match Interp.in_lenv env id with | Interp_ast.V_unknown -> string (add_red "[_]") | v -> if show_hole_contents then string (add_red (Interp.string_of_value v)) else string (add_red "[_]")) | _ -> doc_id id) | E_lit lit -> doc_lit lit | E_cast(typ,e) -> if !ignore_casts then atomic_exp env mem add_red show_hole_contents e else prefix 2 1 (parens (doc_typ typ)) (group (atomic_exp env mem add_red show_hole_contents e)) | E_internal_cast(_,e) -> (* XXX ignore internal casts in the interpreter *) atomic_exp env mem add_red show_hole_contents e | E_tuple exps -> parens (separate_map comma (exp env mem add_red show_hole_contents) exps) | E_record(FES_aux(FES_Fexps(fexps,_),_)) -> braces (separate_map semi_sp (doc_fexp env mem add_red show_hole_contents) fexps) | E_record_update(e,(FES_aux(FES_Fexps(fexps,_),_))) -> braces (doc_op (string "with") (exp env mem add_red show_hole_contents e) (separate_map semi_sp (doc_fexp env mem add_red show_hole_contents) fexps)) | E_vector exps -> let default_print _ = brackets (separate_map comma (exp env mem add_red show_hole_contents) exps) in (match exps with | [] -> default_print () | es -> if (List.for_all (fun e -> match e with | (E_aux(E_lit(L_aux((L_one | L_zero | L_undef),_)),_)) -> true | _ -> false) es) then utf8string ("0b" ^ (List.fold_right (fun (E_aux(e,_)) rst -> match e with | E_lit(L_aux(l, _)) -> (match l with | L_one -> "1"^rst | L_zero -> "0"^rst | L_undef -> "u"^rst | _ -> failwith "bit vector not just bit values") | _ -> failwith "bit vector not all lits") exps "")) else default_print ()) | E_vector_update(v,e1,e2) -> brackets (doc_op (string "with") (exp env mem add_red show_hole_contents v) (doc_op equals (atomic_exp env mem add_red show_hole_contents e1) (exp env mem add_red show_hole_contents e2))) | E_vector_update_subrange(v,e1,e2,e3) -> brackets ( doc_op (string "with") (exp env mem add_red show_hole_contents v) (doc_op equals ((atomic_exp env mem add_red show_hole_contents e1) ^^ colon ^^ (atomic_exp env mem add_red show_hole_contents e2)) (exp env mem add_red show_hole_contents e3))) | E_list exps -> squarebarbars (separate_map comma (exp env mem add_red show_hole_contents) exps) | E_case(e,pexps) -> let opening = separate space [string "switch"; exp env mem add_red show_hole_contents e; lbrace] in let cases = separate_map (break 1) (doc_case env mem add_red show_hole_contents) pexps in surround 2 1 opening cases rbrace | E_exit e -> separate space [string "exit"; exp env mem add_red show_hole_contents e;] | E_return e -> separate space [string "return"; exp env mem add_red show_hole_contents e;] | E_assert(e,msg) -> string "assert" ^^ parens (separate_map comma (exp env mem add_red show_hole_contents) [e; msg]) (* adding parens and loop for lower precedence *) | E_app (_, _)|E_vector_access (_, _)|E_vector_subrange (_, _, _) | E_cons (_, _)|E_field (_, _)|E_assign (_, _) | E_if _ | E_for _ | E_let _ | E_vector_append _ | E_app_infix (_, (* for every app_infix operator caught at a higher precedence, * we need to wrap around with parens *) (Id_aux(Id("|" | "||" | "&" | "&&" | "=" | "==" | "!=" | ">=" | ">=_s" | ">=_u" | ">" | ">_s" | ">_u" | "<=" | "<=_s" | "<" | "<_s" | "<_si" | "<_u" | "@" | "^^" | "^" | "~^" | ">>" | ">>>" | "<<" | "<<<" | "+" | "+_s" | "-" | "-_s" | "*" | "/" | "div" | "quot" | "quot_s" | "rem" | "mod" | "mod_s" | "*_s" | "*_si" | "*_u" | "*_ui" | "**"), _)) , _) -> group (parens (exp env mem add_red show_hole_contents expr)) (* XXX fixup deinfix into infix ones *) | E_app_infix(l, (Id_aux((DeIid op), annot')), r) -> group (parens (exp env mem add_red show_hole_contents (E_aux ((E_app_infix (l, (Id_aux(Id op, annot')), r)), annot)))) (* XXX default precedence for app_infix? *) | E_app_infix(l,op,r) -> failwith ("unexpected app_infix operator " ^ (pp_format_id op)) (* doc_op (doc_id op) (exp l) (exp r) *) (* XXX missing case *) | E_comment _ | E_comment_struc _ -> string "" | E_internal_value v -> string (Interp.string_of_value v) | _-> failwith "internal expression escaped" and let_exp env mem add_red show_hole_contents (LB_aux(lb,_)) = match lb with | LB_val(pat,e) -> prefix 2 1 (separate space [string "let"; doc_atomic_pat pat; equals]) (exp env mem add_red show_hole_contents e) and doc_fexp env mem add_red show_hole_contents (FE_aux(FE_Fexp(id,e),_)) = doc_op equals (doc_id id) (exp env mem add_red show_hole_contents e) and doc_case env mem add_red show_hole_contents (Pat_aux(Pat_exp(pat,e),_)) = doc_op arrow (separate space [string "case"; doc_atomic_pat pat]) (group (exp env mem add_red show_hole_contents e)) (* lexps are parsed as eq_exp - we need to duplicate the precedence * structure for them *) and doc_lexp env mem add_red show_hole_contents le = app_lexp env mem add_red show_hole_contents le and app_lexp env mem add_red show_hole_contents ((LEXP_aux(lexp,_)) as le) = match lexp with | LEXP_memory(id,args) -> doc_id id ^^ parens (separate_map comma (exp env mem add_red show_hole_contents) args) | _ -> vaccess_lexp env mem add_red show_hole_contents le and vaccess_lexp env mem add_red show_hole_contents ((LEXP_aux(lexp,_)) as le) = match lexp with | LEXP_vector(v,e) -> (atomic_lexp env mem add_red show_hole_contents v) ^^ brackets (exp env mem add_red show_hole_contents e) | LEXP_vector_range(v,e1,e2) -> (atomic_lexp env mem add_red show_hole_contents v) ^^ brackets ((exp env mem add_red show_hole_contents e1) ^^ dotdot ^^ (exp env mem add_red show_hole_contents e2)) | _ -> field_lexp env mem add_red show_hole_contents le and field_lexp env mem add_red show_hole_contents ((LEXP_aux(lexp,_)) as le) = match lexp with | LEXP_field(v,id) -> (atomic_lexp env mem add_red show_hole_contents v) ^^ dot ^^ doc_id id | _ -> atomic_lexp env mem add_red show_hole_contents le and atomic_lexp env mem add_red show_hole_contents ((LEXP_aux(lexp,_)) as le) = match lexp with | LEXP_id id -> doc_id id | LEXP_cast(typ,id) -> prefix 2 1 (parens (doc_typ typ)) (doc_id id) | LEXP_tup(lexps) -> group (parens (separate_map comma (doc_lexp env mem add_red show_hole_contents) lexps)) | LEXP_memory _ | LEXP_vector _ | LEXP_vector_range _ | LEXP_field _ -> group (parens (doc_lexp env mem add_red show_hole_contents le)) (* expose doc_exp and doc_let *) in exp, let_exp let doc_default (DT_aux(df,_)) = match df with | DT_kind(bk,v) -> separate space [string "default"; doc_bkind bk; doc_var v] | DT_typ(ts,id) -> separate space [string "default"; doc_typscm ts; doc_id id] | DT_order o -> separate space [string "default"; string "Order"; doc_ord o] let doc_spec (VS_aux(v,_)) = match v with | VS_val_spec(ts,id, _, _) -> separate space [string "val"; doc_typscm ts; doc_id id] let doc_namescm (Name_sect_aux(ns,_)) = match ns with | Name_sect_none -> empty (* include leading space because the caller doesn't know if ns is * empty, and trailing break already added by the following equals *) | Name_sect_some s -> space ^^ brackets (doc_op equals (string "name") (dquotes (string s))) let rec doc_range (BF_aux(r,_)) = match r with | BF_single i -> doc_int i | BF_range(i1,i2) -> doc_op dotdot (doc_int i1) (doc_int i2) | BF_concat(ir1,ir2) -> (doc_range ir1) ^^ comma ^^ (doc_range ir2) let doc_type_union (Tu_aux(typ_u,_)) = match typ_u with | Tu_ty_id(typ,id) -> separate space [doc_typ typ; doc_id id] | Tu_id id -> doc_id id let doc_typdef (TD_aux(td,_)) = match td with | TD_abbrev(id,nm,typschm) -> doc_op equals (concat [string "typedef"; space; doc_id id; doc_namescm nm]) (doc_typscm typschm) | TD_record(id,nm,typq,fs,_) -> let f_pp (typ,id) = concat [doc_typ typ; space; doc_id id; semi] in let fs_doc = group (separate_map (break 1) f_pp fs) in doc_op equals (concat [string "typedef"; space; doc_id id; doc_namescm nm]) (string "const struct" ^^ space ^^ doc_typquant typq (braces fs_doc)) | TD_variant(id,nm,typq,ar,_) -> let ar_doc = group (separate_map (semi ^^ break 1) doc_type_union ar) in doc_op equals (concat [string "typedef"; space; doc_id id; doc_namescm nm]) (string "const union" ^^ space ^^ doc_typquant typq (braces ar_doc)) | TD_enum(id,nm,enums,_) -> let enums_doc = group (separate_map (semi ^^ break 1) doc_id enums) in doc_op equals (concat [string "typedef"; space; doc_id id; doc_namescm nm]) (string "enumerate" ^^ space ^^ braces enums_doc) | TD_register(id,n1,n2,rs) -> let doc_rid (r,id) = separate space [doc_range r; colon; doc_id id] ^^ semi in let doc_rids = group (separate_map (break 1) doc_rid rs) in doc_op equals (string "typedef" ^^ space ^^ doc_id id) (separate space [ string "register bits"; brackets (doc_nexp n1 ^^ colon ^^ doc_nexp n2); braces doc_rids; ]) let doc_rec (Rec_aux(r,_)) = match r with | Rec_nonrec -> empty (* include trailing space because caller doesn't know if we return * empty *) | Rec_rec -> string "rec" ^^ space let doc_tannot_opt (Typ_annot_opt_aux(t,_)) = match t with | Typ_annot_opt_some(tq,typ) -> doc_typquant tq (doc_typ typ) let doc_effects_opt (Effect_opt_aux(e,_)) = match e with | Effect_opt_pure -> string "pure" | Effect_opt_effect e -> doc_effects e let doc_funcl env mem add_red (FCL_aux(FCL_Funcl(id,Pat_aux (Pat_exp (pat, exp), _)),_)) = group (doc_op equals (separate space [doc_id id; doc_atomic_pat pat]) (doc_exp env mem add_red false exp)) let doc_fundef env mem add_red (FD_aux(FD_function(r, typa, efa, fcls),_)) = match fcls with | [] -> failwith "FD_function with empty function list" | _ -> let sep = hardline ^^ string "and" ^^ space in let clauses = separate_map sep (doc_funcl env mem add_red) fcls in separate space [string "function"; doc_rec r ^^ doc_tannot_opt typa; string "effect"; doc_effects_opt efa; clauses] let doc_dec (DEC_aux(d,_)) = match d with | DEC_reg(typ,id) -> separate space [string "register"; doc_atomic_typ typ; doc_id id] | _ -> failwith "interpreter printing out declarations unexpectedly" let doc_scattered env mem add_red (SD_aux (sdef, _)) = match sdef with | SD_scattered_function (r, typa, efa, id) -> separate space [ string "scattered function"; doc_rec r ^^ doc_tannot_opt typa; string "effect"; doc_effects_opt efa; doc_id id] | SD_scattered_variant (id, ns, tq) -> doc_op equals (string "scattered typedef" ^^ space ^^ doc_id id ^^ doc_namescm ns) (doc_typquant tq empty) | SD_scattered_funcl funcl -> string "function clause" ^^ space ^^ doc_funcl env mem add_red funcl | SD_scattered_unioncl (id, tu) -> separate space [string "union"; doc_id id; string "member"; doc_type_union tu] | SD_scattered_end id -> string "end" ^^ space ^^ doc_id id let rec doc_def env mem add_red def = group (match def with | DEF_default df -> doc_default df | DEF_spec v_spec -> doc_spec v_spec | DEF_type t_def -> doc_typdef t_def | DEF_kind k_def -> failwith "interpreter unexpectedly printing kind def" | DEF_fundef f_def -> doc_fundef env mem add_red f_def | DEF_val lbind -> doc_let env mem add_red false lbind | DEF_reg_dec dec -> doc_dec dec | DEF_scattered sdef -> doc_scattered env mem add_red sdef | DEF_comm comm_dec -> string "(*" ^^ doc_comm_dec env mem add_red comm_dec ^^ string "*)" ) ^^ hardline and doc_comm_dec env mem add_red dec = match dec with | DC_comm s -> string s | DC_comm_struct d -> doc_def env mem add_red d let doc_defs env mem add_red (Defs(defs)) = separate_map hardline (doc_def env mem add_red) defs let print ?(len=80) channel doc = ToChannel.pretty 1. len channel doc let to_buf ?(len=80) buf doc = ToBuffer.pretty 1. len buf doc let pp_exp env mem add_red show_hole_contents e = let b = Buffer.create 20 in to_buf b (doc_exp env mem add_red show_hole_contents e); Buffer.contents b
(****************************************************************************) (* Sail *) (* *) (* Sail and the Sail architecture models here, comprising all files and *) (* directories except the ASL-derived Sail code in the aarch64 directory, *) (* are subject to the BSD two-clause licence below. *) (* *) (* The ASL derived parts of the ARMv8.3 specification in *) (* aarch64/no_vector and aarch64/full are copyright ARM Ltd. *) (* *) (* Copyright (c) 2013-2021 *) (* Kathyrn Gray *) (* Shaked Flur *) (* Stephen Kell *) (* Gabriel Kerneis *) (* Robert Norton-Wright *) (* Christopher Pulte *) (* Peter Sewell *) (* Alasdair Armstrong *) (* Brian Campbell *) (* Thomas Bauereiss *) (* Anthony Fox *) (* Jon French *) (* Dominic Mulligan *) (* Stephen Kell *) (* Mark Wassell *) (* Alastair Reid (Arm Ltd) *) (* *) (* All rights reserved. *) (* *) (* This work was partially supported by EPSRC grant EP/K008528/1 <a *) (* href="http://www.cl.cam.ac.uk/users/pes20/rems">REMS: Rigorous *) (* Engineering for Mainstream Systems</a>, an ARM iCASE award, EPSRC IAA *) (* KTF funding, and donations from Arm. This project has received *) (* funding from the European Research Council (ERC) under the European *) (* Union’s Horizon 2020 research and innovation programme (grant *) (* agreement No 789108, ELVER). *) (* *) (* This software was developed by SRI International and the University of *) (* Cambridge Computer Laboratory (Department of Computer Science and *) (* Technology) under DARPA/AFRL contracts FA8650-18-C-7809 ("CIFV") *) (* and FA8750-10-C-0237 ("CTSRD"). *) (* *) (* Redistribution and use in source and binary forms, with or without *) (* modification, are permitted provided that the following conditions *) (* are met: *) (* 1. Redistributions of source code must retain the above copyright *) (* notice, this list of conditions and the following disclaimer. *) (* 2. Redistributions in binary form must reproduce the above copyright *) (* notice, this list of conditions and the following disclaimer in *) (* the documentation and/or other materials provided with the *) (* distribution. *) (* *) (* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' *) (* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED *) (* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A *) (* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR *) (* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, *) (* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT *) (* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF *) (* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND *) (* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, *) (* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT *) (* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF *) (* SUCH DAMAGE. *) (****************************************************************************)
parsetree.mli
(** Abstract syntax tree produced by parsing {b Warning:} this module is unstable and part of {{!Compiler_libs}compiler-libs}. *) open Asttypes type constant = | Pconst_integer of string * char option (** Integer constants such as [3] [3l] [3L] [3n]. Suffixes [[g-z][G-Z]] are accepted by the parser. Suffixes except ['l'], ['L'] and ['n'] are rejected by the typechecker *) | Pconst_char of char (** Character such as ['c']. *) | Pconst_string of string * Location.t * string option (** Constant string such as ["constant"] or [{delim|other constant|delim}]. The location span the content of the string, without the delimiters. *) | Pconst_float of string * char option (** Float constant such as [3.4], [2e5] or [1.4e-4]. Suffixes [g-z][G-Z] are accepted by the parser. Suffixes are rejected by the typechecker. *) type location_stack = Location.t list (** {1 Extension points} *) type attribute = { attr_name : string loc; attr_payload : payload; attr_loc : Location.t; } (** Attributes such as [[\@id ARG]] and [[\@\@id ARG]]. Metadata containers passed around within the AST. The compiler ignores unknown attributes. *) and extension = string loc * payload (** Extension points such as [[%id ARG] and [%%id ARG]]. Sub-language placeholder -- rejected by the typechecker. *) and attributes = attribute list and payload = | PStr of structure | PSig of signature (** [: SIG] in an attribute or an extension point *) | PTyp of core_type (** [: T] in an attribute or an extension point *) | PPat of pattern * expression option (** [? P] or [? P when E], in an attribute or an extension point *) (** {1 Core language} *) (** {2 Type expressions} *) and core_type = { ptyp_desc: core_type_desc; ptyp_loc: Location.t; ptyp_loc_stack: location_stack; ptyp_attributes: attributes; (** [... [\@id1] [\@id2]] *) } and core_type_desc = | Ptyp_any (** [_] *) | Ptyp_var of string (** A type variable such as ['a] *) | Ptyp_arrow of arg_label * core_type * core_type (** [Ptyp_arrow(lbl, T1, T2)] represents: - [T1 -> T2] when [lbl] is {{!Asttypes.arg_label.Nolabel}[Nolabel]}, - [~l:T1 -> T2] when [lbl] is {{!Asttypes.arg_label.Labelled}[Labelled]}, - [?l:T1 -> T2] when [lbl] is {{!Asttypes.arg_label.Optional}[Optional]}. *) | Ptyp_tuple of core_type list (** [Ptyp_tuple([T1 ; ... ; Tn])] represents a product type [T1 * ... * Tn]. Invariant: [n >= 2]. *) | Ptyp_constr of Longident.t loc * core_type list (** [Ptyp_constr(lident, l)] represents: - [tconstr] when [l=[]], - [T tconstr] when [l=[T]], - [(T1, ..., Tn) tconstr] when [l=[T1 ; ... ; Tn]]. *) | Ptyp_object of object_field list * closed_flag (** [Ptyp_object([ l1:T1; ...; ln:Tn ], flag)] represents: - [< l1:T1; ...; ln:Tn >] when [flag] is {{!Asttypes.closed_flag.Closed}[Closed]}, - [< l1:T1; ...; ln:Tn; .. >] when [flag] is {{!Asttypes.closed_flag.Open}[Open]}. *) | Ptyp_class of Longident.t loc * core_type list (** [Ptyp_class(tconstr, l)] represents: - [#tconstr] when [l=[]], - [T #tconstr] when [l=[T]], - [(T1, ..., Tn) #tconstr] when [l=[T1 ; ... ; Tn]]. *) | Ptyp_alias of core_type * string (** [T as 'a]. *) | Ptyp_variant of row_field list * closed_flag * label list option (** [Ptyp_variant([`A;`B], flag, labels)] represents: - [[ `A|`B ]] when [flag] is {{!Asttypes.closed_flag.Closed}[Closed]}, and [labels] is [None], - [[> `A|`B ]] when [flag] is {{!Asttypes.closed_flag.Open}[Open]}, and [labels] is [None], - [[< `A|`B ]] when [flag] is {{!Asttypes.closed_flag.Closed}[Closed]}, and [labels] is [Some []], - [[< `A|`B > `X `Y ]] when [flag] is {{!Asttypes.closed_flag.Closed}[Closed]}, and [labels] is [Some ["X";"Y"]]. *) | Ptyp_poly of string loc list * core_type (** ['a1 ... 'an. T] Can only appear in the following context: - As the {!core_type} of a {{!pattern_desc.Ppat_constraint}[Ppat_constraint]} node corresponding to a constraint on a let-binding: {[let x : 'a1 ... 'an. T = e ...]} - Under {{!class_field_kind.Cfk_virtual}[Cfk_virtual]} for methods (not values). - As the {!core_type} of a {{!class_type_field_desc.Pctf_method}[Pctf_method]} node. - As the {!core_type} of a {{!expression_desc.Pexp_poly}[Pexp_poly]} node. - As the {{!label_declaration.pld_type}[pld_type]} field of a {!label_declaration}. - As a {!core_type} of a {{!core_type_desc.Ptyp_object}[Ptyp_object]} node. - As the {{!value_description.pval_type}[pval_type]} field of a {!value_description}. *) | Ptyp_package of package_type (** [(module S)]. *) | Ptyp_extension of extension (** [[%id]]. *) and package_type = Longident.t loc * (Longident.t loc * core_type) list (** As {!package_type} typed values: - [(S, [])] represents [(module S)], - [(S, [(t1, T1) ; ... ; (tn, Tn)])] represents [(module S with type t1 = T1 and ... and tn = Tn)]. *) and row_field = { prf_desc : row_field_desc; prf_loc : Location.t; prf_attributes : attributes; } and row_field_desc = | Rtag of label loc * bool * core_type list (** [Rtag(`A, b, l)] represents: - [`A] when [b] is [true] and [l] is [[]], - [`A of T] when [b] is [false] and [l] is [[T]], - [`A of T1 & .. & Tn] when [b] is [false] and [l] is [[T1;...Tn]], - [`A of & T1 & .. & Tn] when [b] is [true] and [l] is [[T1;...Tn]]. - The [bool] field is true if the tag contains a constant (empty) constructor. - [&] occurs when several types are used for the same constructor (see 4.2 in the manual) *) | Rinherit of core_type (** [[ | t ]] *) and object_field = { pof_desc : object_field_desc; pof_loc : Location.t; pof_attributes : attributes; } and object_field_desc = | Otag of label loc * core_type | Oinherit of core_type (** {2 Patterns} *) and pattern = { ppat_desc: pattern_desc; ppat_loc: Location.t; ppat_loc_stack: location_stack; ppat_attributes: attributes; (** [... [\@id1] [\@id2]] *) } and pattern_desc = | Ppat_any (** The pattern [_]. *) | Ppat_var of string loc (** A variable pattern such as [x] *) | Ppat_alias of pattern * string loc (** An alias pattern such as [P as 'a] *) | Ppat_constant of constant (** Patterns such as [1], ['a'], ["true"], [1.0], [1l], [1L], [1n] *) | Ppat_interval of constant * constant (** Patterns such as ['a'..'z']. Other forms of interval are recognized by the parser but rejected by the type-checker. *) | Ppat_tuple of pattern list (** Patterns [(P1, ..., Pn)]. Invariant: [n >= 2] *) | Ppat_construct of Longident.t loc * (string loc list * pattern) option (** [Ppat_construct(C, args)] represents: - [C] when [args] is [None], - [C P] when [args] is [Some ([], P)] - [C (P1, ..., Pn)] when [args] is [Some ([], Ppat_tuple [P1; ...; Pn])] - [C (type a b) P] when [args] is [Some ([a; b], P)] *) | Ppat_variant of label * pattern option (** [Ppat_variant(`A, pat)] represents: - [`A] when [pat] is [None], - [`A P] when [pat] is [Some P] *) | Ppat_record of (Longident.t loc * pattern) list * closed_flag (** [Ppat_record([(l1, P1) ; ... ; (ln, Pn)], flag)] represents: - [{ l1=P1; ...; ln=Pn }] when [flag] is {{!Asttypes.closed_flag.Closed}[Closed]} - [{ l1=P1; ...; ln=Pn; _}] when [flag] is {{!Asttypes.closed_flag.Open}[Open]} Invariant: [n > 0] *) | Ppat_array of pattern list (** Pattern [[| P1; ...; Pn |]] *) | Ppat_or of pattern * pattern (** Pattern [P1 | P2] *) | Ppat_constraint of pattern * core_type (** Pattern [(P : T)] *) | Ppat_type of Longident.t loc (** Pattern [#tconst] *) | Ppat_lazy of pattern (** Pattern [lazy P] *) | Ppat_unpack of string option loc (** [Ppat_unpack(s)] represents: - [(module P)] when [s] is [Some "P"] - [(module _)] when [s] is [None] Note: [(module P : S)] is represented as [Ppat_constraint(Ppat_unpack(Some "P"), Ptyp_package S)] *) | Ppat_exception of pattern (** Pattern [exception P] *) | Ppat_extension of extension (** Pattern [[%id]] *) | Ppat_open of Longident.t loc * pattern (** Pattern [M.(P)] *) (** {2 Value expressions} *) and expression = { pexp_desc: expression_desc; pexp_loc: Location.t; pexp_loc_stack: location_stack; pexp_attributes: attributes; (** [... [\@id1] [\@id2]] *) } and expression_desc = | Pexp_ident of Longident.t loc (** Identifiers such as [x] and [M.x] *) | Pexp_constant of constant (** Expressions constant such as [1], ['a'], ["true"], [1.0], [1l], [1L], [1n] *) | Pexp_let of rec_flag * value_binding list * expression (** [Pexp_let(flag, [(P1,E1) ; ... ; (Pn,En)], E)] represents: - [let P1 = E1 and ... and Pn = EN in E] when [flag] is {{!Asttypes.rec_flag.Nonrecursive}[Nonrecursive]}, - [let rec P1 = E1 and ... and Pn = EN in E] when [flag] is {{!Asttypes.rec_flag.Recursive}[Recursive]}. *) | Pexp_function of case list (** [function P1 -> E1 | ... | Pn -> En] *) | Pexp_fun of arg_label * expression option * pattern * expression (** [Pexp_fun(lbl, exp0, P, E1)] represents: - [fun P -> E1] when [lbl] is {{!Asttypes.arg_label.Nolabel}[Nolabel]} and [exp0] is [None] - [fun ~l:P -> E1] when [lbl] is {{!Asttypes.arg_label.Labelled}[Labelled l]} and [exp0] is [None] - [fun ?l:P -> E1] when [lbl] is {{!Asttypes.arg_label.Optional}[Optional l]} and [exp0] is [None] - [fun ?l:(P = E0) -> E1] when [lbl] is {{!Asttypes.arg_label.Optional}[Optional l]} and [exp0] is [Some E0] Notes: - If [E0] is provided, only {{!Asttypes.arg_label.Optional}[Optional]} is allowed. - [fun P1 P2 .. Pn -> E1] is represented as nested {{!expression_desc.Pexp_fun}[Pexp_fun]}. - [let f P = E] is represented using {{!expression_desc.Pexp_fun}[Pexp_fun]}. *) | Pexp_apply of expression * (arg_label * expression) list (** [Pexp_apply(E0, [(l1, E1) ; ... ; (ln, En)])] represents [E0 ~l1:E1 ... ~ln:En] [li] can be {{!Asttypes.arg_label.Nolabel}[Nolabel]} (non labeled argument), {{!Asttypes.arg_label.Labelled}[Labelled]} (labelled arguments) or {{!Asttypes.arg_label.Optional}[Optional]} (optional argument). Invariant: [n > 0] *) | Pexp_match of expression * case list (** [match E0 with P1 -> E1 | ... | Pn -> En] *) | Pexp_try of expression * case list (** [try E0 with P1 -> E1 | ... | Pn -> En] *) | Pexp_tuple of expression list (** Expressions [(E1, ..., En)] Invariant: [n >= 2] *) | Pexp_construct of Longident.t loc * expression option (** [Pexp_construct(C, exp)] represents: - [C] when [exp] is [None], - [C E] when [exp] is [Some E], - [C (E1, ..., En)] when [exp] is [Some (Pexp_tuple[E1;...;En])] *) | Pexp_variant of label * expression option (** [Pexp_variant(`A, exp)] represents - [`A] when [exp] is [None] - [`A E] when [exp] is [Some E] *) | Pexp_record of (Longident.t loc * expression) list * expression option (** [Pexp_record([(l1,P1) ; ... ; (ln,Pn)], exp0)] represents - [{ l1=P1; ...; ln=Pn }] when [exp0] is [None] - [{ E0 with l1=P1; ...; ln=Pn }] when [exp0] is [Some E0] Invariant: [n > 0] *) | Pexp_field of expression * Longident.t loc (** [E.l] *) | Pexp_setfield of expression * Longident.t loc * expression (** [E1.l <- E2] *) | Pexp_array of expression list (** [[| E1; ...; En |]] *) | Pexp_ifthenelse of expression * expression * expression option (** [if E1 then E2 else E3] *) | Pexp_sequence of expression * expression (** [E1; E2] *) | Pexp_while of expression * expression (** [while E1 do E2 done] *) | Pexp_for of pattern * expression * expression * direction_flag * expression (** [Pexp_for(i, E1, E2, direction, E3)] represents: - [for i = E1 to E2 do E3 done] when [direction] is {{!Asttypes.direction_flag.Upto}[Upto]} - [for i = E1 downto E2 do E3 done] when [direction] is {{!Asttypes.direction_flag.Downto}[Downto]} *) | Pexp_constraint of expression * core_type (** [(E : T)] *) | Pexp_coerce of expression * core_type option * core_type (** [Pexp_coerce(E, from, T)] represents - [(E :> T)] when [from] is [None], - [(E : T0 :> T)] when [from] is [Some T0]. *) | Pexp_send of expression * label loc (** [E # m] *) | Pexp_new of Longident.t loc (** [new M.c] *) | Pexp_setinstvar of label loc * expression (** [x <- 2] *) | Pexp_override of (label loc * expression) list (** [{< x1 = E1; ...; xn = En >}] *) | Pexp_letmodule of string option loc * module_expr * expression (** [let module M = ME in E] *) | Pexp_letexception of extension_constructor * expression (** [let exception C in E] *) | Pexp_assert of expression (** [assert E]. Note: [assert false] is treated in a special way by the type-checker. *) | Pexp_lazy of expression (** [lazy E] *) | Pexp_poly of expression * core_type option (** Used for method bodies. Can only be used as the expression under {{!class_field_kind.Cfk_concrete}[Cfk_concrete]} for methods (not values). *) | Pexp_object of class_structure (** [object ... end] *) | Pexp_newtype of string loc * expression (** [fun (type t) -> E] *) | Pexp_pack of module_expr (** [(module ME)]. [(module ME : S)] is represented as [Pexp_constraint(Pexp_pack ME, Ptyp_package S)] *) | Pexp_open of open_declaration * expression (** - [M.(E)] - [let open M in E] - [let open! M in E] *) | Pexp_letop of letop (** - [let* P = E0 in E1] - [let* P0 = E00 and* P1 = E01 in E1] *) | Pexp_extension of extension (** [[%id]] *) | Pexp_unreachable (** [.] *) and case = { pc_lhs: pattern; pc_guard: expression option; pc_rhs: expression; } (** Values of type {!case} represents [(P -> E)] or [(P when E0 -> E)] *) and letop = { let_ : binding_op; ands : binding_op list; body : expression; } and binding_op = { pbop_op : string loc; pbop_pat : pattern; pbop_exp : expression; pbop_loc : Location.t; } (** {2 Value descriptions} *) and value_description = { pval_name: string loc; pval_type: core_type; pval_prim: string list; pval_attributes: attributes; (** [... [\@\@id1] [\@\@id2]] *) pval_loc: Location.t; } (** Values of type {!value_description} represents: - [val x: T], when {{!value_description.pval_prim}[pval_prim]} is [[]] - [external x: T = "s1" ... "sn"] when {{!value_description.pval_prim}[pval_prim]} is [["s1";..."sn"]] *) (** {2 Type declarations} *) and type_declaration = { ptype_name: string loc; ptype_params: (core_type * (variance * injectivity)) list; (** [('a1,...'an) t] *) ptype_cstrs: (core_type * core_type * Location.t) list; (** [... constraint T1=T1' ... constraint Tn=Tn'] *) ptype_kind: type_kind; ptype_private: private_flag; (** for [= private ...] *) ptype_manifest: core_type option; (** represents [= T] *) ptype_attributes: attributes; (** [... [\@\@id1] [\@\@id2]] *) ptype_loc: Location.t; } (** Here are type declarations and their representation, for various {{!type_declaration.ptype_kind}[ptype_kind]} and {{!type_declaration.ptype_manifest}[ptype_manifest]} values: - [type t] when [type_kind] is {{!type_kind.Ptype_abstract}[Ptype_abstract]}, and [manifest] is [None], - [type t = T0] when [type_kind] is {{!type_kind.Ptype_abstract}[Ptype_abstract]}, and [manifest] is [Some T0], - [type t = C of T | ...] when [type_kind] is {{!type_kind.Ptype_variant}[Ptype_variant]}, and [manifest] is [None], - [type t = T0 = C of T | ...] when [type_kind] is {{!type_kind.Ptype_variant}[Ptype_variant]}, and [manifest] is [Some T0], - [type t = {l: T; ...}] when [type_kind] is {{!type_kind.Ptype_record}[Ptype_record]}, and [manifest] is [None], - [type t = T0 = {l : T; ...}] when [type_kind] is {{!type_kind.Ptype_record}[Ptype_record]}, and [manifest] is [Some T0], - [type t = ..] when [type_kind] is {{!type_kind.Ptype_open}[Ptype_open]}, and [manifest] is [None]. *) and type_kind = | Ptype_abstract | Ptype_variant of constructor_declaration list | Ptype_record of label_declaration list (** Invariant: non-empty list *) | Ptype_open and label_declaration = { pld_name: string loc; pld_mutable: mutable_flag; pld_type: core_type; pld_loc: Location.t; pld_attributes: attributes; (** [l : T [\@id1] [\@id2]] *) } (** - [{ ...; l: T; ... }] when {{!label_declaration.pld_mutable}[pld_mutable]} is {{!Asttypes.mutable_flag.Immutable}[Immutable]}, - [{ ...; mutable l: T; ... }] when {{!label_declaration.pld_mutable}[pld_mutable]} is {{!Asttypes.mutable_flag.Mutable}[Mutable]}. Note: [T] can be a {{!core_type_desc.Ptyp_poly}[Ptyp_poly]}. *) and constructor_declaration = { pcd_name: string loc; pcd_vars: string loc list; pcd_args: constructor_arguments; pcd_res: core_type option; pcd_loc: Location.t; pcd_attributes: attributes; (** [C of ... [\@id1] [\@id2]] *) } and constructor_arguments = | Pcstr_tuple of core_type list | Pcstr_record of label_declaration list (** Values of type {!constructor_declaration} represents the constructor arguments of: - [C of T1 * ... * Tn] when [res = None], and [args = Pcstr_tuple [T1; ... ; Tn]], - [C: T0] when [res = Some T0], and [args = Pcstr_tuple []], - [C: T1 * ... * Tn -> T0] when [res = Some T0], and [args = Pcstr_tuple [T1; ... ; Tn]], - [C of {...}] when [res = None], and [args = Pcstr_record [...]], - [C: {...} -> T0] when [res = Some T0], and [args = Pcstr_record [...]]. *) and type_extension = { ptyext_path: Longident.t loc; ptyext_params: (core_type * (variance * injectivity)) list; ptyext_constructors: extension_constructor list; ptyext_private: private_flag; ptyext_loc: Location.t; ptyext_attributes: attributes; (** ... [\@\@id1] [\@\@id2] *) } (** Definition of new extensions constructors for the extensive sum type [t] ([type t += ...]). *) and extension_constructor = { pext_name: string loc; pext_kind: extension_constructor_kind; pext_loc: Location.t; pext_attributes: attributes; (** [C of ... [\@id1] [\@id2]] *) } and type_exception = { ptyexn_constructor : extension_constructor; ptyexn_loc : Location.t; ptyexn_attributes : attributes; (** [... [\@\@id1] [\@\@id2]] *) } (** Definition of a new exception ([exception E]). *) and extension_constructor_kind = | Pext_decl of string loc list * constructor_arguments * core_type option (** [Pext_decl(existentials, c_args, t_opt)] describes a new extension constructor. It can be: - [C of T1 * ... * Tn] when: {ul {- [existentials] is [[]],} {- [c_args] is [[T1; ...; Tn]],} {- [t_opt] is [None]}.} - [C: T0] when {ul {- [existentials] is [[]],} {- [c_args] is [[]],} {- [t_opt] is [Some T0].}} - [C: T1 * ... * Tn -> T0] when {ul {- [existentials] is [[]],} {- [c_args] is [[T1; ...; Tn]],} {- [t_opt] is [Some T0].}} - [C: 'a... . T1 * ... * Tn -> T0] when {ul {- [existentials] is [['a;...]],} {- [c_args] is [[T1; ... ; Tn]],} {- [t_opt] is [Some T0].}} *) | Pext_rebind of Longident.t loc (** [Pext_rebind(D)] re-export the constructor [D] with the new name [C] *) (** {1 Class language} *) (** {2 Type expressions for the class language} *) and class_type = { pcty_desc: class_type_desc; pcty_loc: Location.t; pcty_attributes: attributes; (** [... [\@id1] [\@id2]] *) } and class_type_desc = | Pcty_constr of Longident.t loc * core_type list (** - [c] - [['a1, ..., 'an] c] *) | Pcty_signature of class_signature (** [object ... end] *) | Pcty_arrow of arg_label * core_type * class_type (** [Pcty_arrow(lbl, T, CT)] represents: - [T -> CT] when [lbl] is {{!Asttypes.arg_label.Nolabel}[Nolabel]}, - [~l:T -> CT] when [lbl] is {{!Asttypes.arg_label.Labelled}[Labelled l]}, - [?l:T -> CT] when [lbl] is {{!Asttypes.arg_label.Optional}[Optional l]}. *) | Pcty_extension of extension (** [%id] *) | Pcty_open of open_description * class_type (** [let open M in CT] *) and class_signature = { pcsig_self: core_type; pcsig_fields: class_type_field list; } (** Values of type [class_signature] represents: - [object('selfpat) ... end] - [object ... end] when {{!class_signature.pcsig_self}[pcsig_self]} is {{!core_type_desc.Ptyp_any}[Ptyp_any]} *) and class_type_field = { pctf_desc: class_type_field_desc; pctf_loc: Location.t; pctf_attributes: attributes; (** [... [\@\@id1] [\@\@id2]] *) } and class_type_field_desc = | Pctf_inherit of class_type (** [inherit CT] *) | Pctf_val of (label loc * mutable_flag * virtual_flag * core_type) (** [val x: T] *) | Pctf_method of (label loc * private_flag * virtual_flag * core_type) (** [method x: T] Note: [T] can be a {{!core_type_desc.Ptyp_poly}[Ptyp_poly]}. *) | Pctf_constraint of (core_type * core_type) (** [constraint T1 = T2] *) | Pctf_attribute of attribute (** [[\@\@\@id]] *) | Pctf_extension of extension (** [[%%id]] *) and 'a class_infos = { pci_virt: virtual_flag; pci_params: (core_type * (variance * injectivity)) list; pci_name: string loc; pci_expr: 'a; pci_loc: Location.t; pci_attributes: attributes; (** [... [\@\@id1] [\@\@id2]] *) } (** Values of type [class_expr class_infos] represents: - [class c = ...] - [class ['a1,...,'an] c = ...] - [class virtual c = ...] They are also used for "class type" declaration. *) and class_description = class_type class_infos and class_type_declaration = class_type class_infos (** {2 Value expressions for the class language} *) and class_expr = { pcl_desc: class_expr_desc; pcl_loc: Location.t; pcl_attributes: attributes; (** [... [\@id1] [\@id2]] *) } and class_expr_desc = | Pcl_constr of Longident.t loc * core_type list (** [c] and [['a1, ..., 'an] c] *) | Pcl_structure of class_structure (** [object ... end] *) | Pcl_fun of arg_label * expression option * pattern * class_expr (** [Pcl_fun(lbl, exp0, P, CE)] represents: - [fun P -> CE] when [lbl] is {{!Asttypes.arg_label.Nolabel}[Nolabel]} and [exp0] is [None], - [fun ~l:P -> CE] when [lbl] is {{!Asttypes.arg_label.Labelled}[Labelled l]} and [exp0] is [None], - [fun ?l:P -> CE] when [lbl] is {{!Asttypes.arg_label.Optional}[Optional l]} and [exp0] is [None], - [fun ?l:(P = E0) -> CE] when [lbl] is {{!Asttypes.arg_label.Optional}[Optional l]} and [exp0] is [Some E0]. *) | Pcl_apply of class_expr * (arg_label * expression) list (** [Pcl_apply(CE, [(l1,E1) ; ... ; (ln,En)])] represents [CE ~l1:E1 ... ~ln:En]. [li] can be empty (non labeled argument) or start with [?] (optional argument). Invariant: [n > 0] *) | Pcl_let of rec_flag * value_binding list * class_expr (** [Pcl_let(rec, [(P1, E1); ... ; (Pn, En)], CE)] represents: - [let P1 = E1 and ... and Pn = EN in CE] when [rec] is {{!Asttypes.rec_flag.Nonrecursive}[Nonrecursive]}, - [let rec P1 = E1 and ... and Pn = EN in CE] when [rec] is {{!Asttypes.rec_flag.Recursive}[Recursive]}. *) | Pcl_constraint of class_expr * class_type (** [(CE : CT)] *) | Pcl_extension of extension (** [[%id]] *) | Pcl_open of open_description * class_expr (** [let open M in CE] *) and class_structure = { pcstr_self: pattern; pcstr_fields: class_field list; } (** Values of type {!class_structure} represents: - [object(selfpat) ... end] - [object ... end] when {{!class_structure.pcstr_self}[pcstr_self]} is {{!pattern_desc.Ppat_any}[Ppat_any]} *) and class_field = { pcf_desc: class_field_desc; pcf_loc: Location.t; pcf_attributes: attributes; (** [... [\@\@id1] [\@\@id2]] *) } and class_field_desc = | Pcf_inherit of override_flag * class_expr * string loc option (** [Pcf_inherit(flag, CE, s)] represents: - [inherit CE] when [flag] is {{!Asttypes.override_flag.Fresh}[Fresh]} and [s] is [None], - [inherit CE as x] when [flag] is {{!Asttypes.override_flag.Fresh}[Fresh]} and [s] is [Some x], - [inherit! CE] when [flag] is {{!Asttypes.override_flag.Override}[Override]} and [s] is [None], - [inherit! CE as x] when [flag] is {{!Asttypes.override_flag.Override}[Override]} and [s] is [Some x] *) | Pcf_val of (label loc * mutable_flag * class_field_kind) (** [Pcf_val(x,flag, kind)] represents: - [val x = E] when [flag] is {{!Asttypes.mutable_flag.Immutable}[Immutable]} and [kind] is {{!class_field_kind.Cfk_concrete}[Cfk_concrete(Fresh, E)]} - [val virtual x: T] when [flag] is {{!Asttypes.mutable_flag.Immutable}[Immutable]} and [kind] is {{!class_field_kind.Cfk_virtual}[Cfk_virtual(T)]} - [val mutable x = E] when [flag] is {{!Asttypes.mutable_flag.Mutable}[Mutable]} and [kind] is {{!class_field_kind.Cfk_concrete}[Cfk_concrete(Fresh, E)]} - [val mutable virtual x: T] when [flag] is {{!Asttypes.mutable_flag.Mutable}[Mutable]} and [kind] is {{!class_field_kind.Cfk_virtual}[Cfk_virtual(T)]} *) | Pcf_method of (label loc * private_flag * class_field_kind) (** - [method x = E] ([E] can be a {{!expression_desc.Pexp_poly}[Pexp_poly]}) - [method virtual x: T] ([T] can be a {{!core_type_desc.Ptyp_poly}[Ptyp_poly]}) *) | Pcf_constraint of (core_type * core_type) (** [constraint T1 = T2] *) | Pcf_initializer of expression (** [initializer E] *) | Pcf_attribute of attribute (** [[\@\@\@id]] *) | Pcf_extension of extension (** [[%%id]] *) and class_field_kind = | Cfk_virtual of core_type | Cfk_concrete of override_flag * expression and class_declaration = class_expr class_infos (** {1 Module language} *) (** {2 Type expressions for the module language} *) and module_type = { pmty_desc: module_type_desc; pmty_loc: Location.t; pmty_attributes: attributes; (** [... [\@id1] [\@id2]] *) } and module_type_desc = | Pmty_ident of Longident.t loc (** [Pmty_ident(S)] represents [S] *) | Pmty_signature of signature (** [sig ... end] *) | Pmty_functor of functor_parameter * module_type (** [functor(X : MT1) -> MT2] *) | Pmty_with of module_type * with_constraint list (** [MT with ...] *) | Pmty_typeof of module_expr (** [module type of ME] *) | Pmty_extension of extension (** [[%id]] *) | Pmty_alias of Longident.t loc (** [(module M)] *) and functor_parameter = | Unit (** [()] *) | Named of string option loc * module_type (** [Named(name, MT)] represents: - [(X : MT)] when [name] is [Some X], - [(_ : MT)] when [name] is [None] *) and signature = signature_item list and signature_item = { psig_desc: signature_item_desc; psig_loc: Location.t; } and signature_item_desc = | Psig_value of value_description (** - [val x: T] - [external x: T = "s1" ... "sn"] *) | Psig_type of rec_flag * type_declaration list (** [type t1 = ... and ... and tn = ...] *) | Psig_typesubst of type_declaration list (** [type t1 := ... and ... and tn := ...] *) | Psig_typext of type_extension (** [type t1 += ...] *) | Psig_exception of type_exception (** [exception C of T] *) | Psig_module of module_declaration (** [module X = M] and [module X : MT] *) | Psig_modsubst of module_substitution (** [module X := M] *) | Psig_recmodule of module_declaration list (** [module rec X1 : MT1 and ... and Xn : MTn] *) | Psig_modtype of module_type_declaration (** [module type S = MT] and [module type S] *) | Psig_modtypesubst of module_type_declaration (** [module type S := ...] *) | Psig_open of open_description (** [open X] *) | Psig_include of include_description (** [include MT] *) | Psig_class of class_description list (** [class c1 : ... and ... and cn : ...] *) | Psig_class_type of class_type_declaration list (** [class type ct1 = ... and ... and ctn = ...] *) | Psig_attribute of attribute (** [[\@\@\@id]] *) | Psig_extension of extension * attributes (** [[%%id]] *) and module_declaration = { pmd_name: string option loc; pmd_type: module_type; pmd_attributes: attributes; (** [... [\@\@id1] [\@\@id2]] *) pmd_loc: Location.t; } (** Values of type [module_declaration] represents [S : MT] *) and module_substitution = { pms_name: string loc; pms_manifest: Longident.t loc; pms_attributes: attributes; (** [... [\@\@id1] [\@\@id2]] *) pms_loc: Location.t; } (** Values of type [module_substitution] represents [S := M] *) and module_type_declaration = { pmtd_name: string loc; pmtd_type: module_type option; pmtd_attributes: attributes; (** [... [\@\@id1] [\@\@id2]] *) pmtd_loc: Location.t; } (** Values of type [module_type_declaration] represents: - [S = MT], - [S] for abstract module type declaration, when {{!module_type_declaration.pmtd_type}[pmtd_type]} is [None]. *) and 'a open_infos = { popen_expr: 'a; popen_override: override_flag; popen_loc: Location.t; popen_attributes: attributes; } (** Values of type ['a open_infos] represents: - [open! X] when {{!open_infos.popen_override}[popen_override]} is {{!Asttypes.override_flag.Override}[Override]} (silences the "used identifier shadowing" warning) - [open X] when {{!open_infos.popen_override}[popen_override]} is {{!Asttypes.override_flag.Fresh}[Fresh]} *) and open_description = Longident.t loc open_infos (** Values of type [open_description] represents: - [open M.N] - [open M(N).O] *) and open_declaration = module_expr open_infos (** Values of type [open_declaration] represents: - [open M.N] - [open M(N).O] - [open struct ... end] *) and 'a include_infos = { pincl_mod: 'a; pincl_loc: Location.t; pincl_attributes: attributes; } and include_description = module_type include_infos (** Values of type [include_description] represents [include MT] *) and include_declaration = module_expr include_infos (** Values of type [include_declaration] represents [include ME] *) and with_constraint = | Pwith_type of Longident.t loc * type_declaration (** [with type X.t = ...] Note: the last component of the longident must match the name of the type_declaration. *) | Pwith_module of Longident.t loc * Longident.t loc (** [with module X.Y = Z] *) | Pwith_modtype of Longident.t loc * module_type (** [with module type X.Y = Z] *) | Pwith_modtypesubst of Longident.t loc * module_type (** [with module type X.Y := sig end] *) | Pwith_typesubst of Longident.t loc * type_declaration (** [with type X.t := ..., same format as [Pwith_type]] *) | Pwith_modsubst of Longident.t loc * Longident.t loc (** [with module X.Y := Z] *) (** {2 Value expressions for the module language} *) and module_expr = { pmod_desc: module_expr_desc; pmod_loc: Location.t; pmod_attributes: attributes; (** [... [\@id1] [\@id2]] *) } and module_expr_desc = | Pmod_ident of Longident.t loc (** [X] *) | Pmod_structure of structure (** [struct ... end] *) | Pmod_functor of functor_parameter * module_expr (** [functor(X : MT1) -> ME] *) | Pmod_apply of module_expr * module_expr (** [ME1(ME2)] *) | Pmod_constraint of module_expr * module_type (** [(ME : MT)] *) | Pmod_unpack of expression (** [(val E)] *) | Pmod_extension of extension (** [[%id]] *) and structure = structure_item list and structure_item = { pstr_desc: structure_item_desc; pstr_loc: Location.t; } and structure_item_desc = | Pstr_eval of expression * attributes (** [E] *) | Pstr_value of rec_flag * value_binding list (** [Pstr_value(rec, [(P1, E1 ; ... ; (Pn, En))])] represents: - [let P1 = E1 and ... and Pn = EN] when [rec] is {{!Asttypes.rec_flag.Nonrecursive}[Nonrecursive]}, - [let rec P1 = E1 and ... and Pn = EN ] when [rec] is {{!Asttypes.rec_flag.Recursive}[Recursive]}. *) | Pstr_primitive of value_description (** - [val x: T] - [external x: T = "s1" ... "sn" ]*) | Pstr_type of rec_flag * type_declaration list (** [type t1 = ... and ... and tn = ...] *) | Pstr_typext of type_extension (** [type t1 += ...] *) | Pstr_exception of type_exception (** - [exception C of T] - [exception C = M.X] *) | Pstr_module of module_binding (** [module X = ME] *) | Pstr_recmodule of module_binding list (** [module rec X1 = ME1 and ... and Xn = MEn] *) | Pstr_modtype of module_type_declaration (** [module type S = MT] *) | Pstr_open of open_declaration (** [open X] *) | Pstr_class of class_declaration list (** [class c1 = ... and ... and cn = ...] *) | Pstr_class_type of class_type_declaration list (** [class type ct1 = ... and ... and ctn = ...] *) | Pstr_include of include_declaration (** [include ME] *) | Pstr_attribute of attribute (** [[\@\@\@id]] *) | Pstr_extension of extension * attributes (** [[%%id]] *) and value_binding = { pvb_pat: pattern; pvb_expr: expression; pvb_attributes: attributes; pvb_loc: Location.t; } and module_binding = { pmb_name: string option loc; pmb_expr: module_expr; pmb_attributes: attributes; pmb_loc: Location.t; } (** Values of type [module_binding] represents [module X = ME] *) (** {1 Toplevel} *) (** {2 Toplevel phrases} *) type toplevel_phrase = | Ptop_def of structure | Ptop_dir of toplevel_directive (** [#use], [#load] ... *) and toplevel_directive = { pdir_name: string loc; pdir_arg: directive_argument option; pdir_loc: Location.t; } and directive_argument = { pdira_desc: directive_argument_desc; pdira_loc: Location.t; } and directive_argument_desc = | Pdir_string of string | Pdir_int of string * char option | Pdir_ident of Longident.t | Pdir_bool of bool
(**************************************************************************) (* *) (* OCaml *) (* *) (* Xavier Leroy, projet Cristal, INRIA Rocquencourt *) (* *) (* Copyright 1996 Institut National de Recherche en Informatique et *) (* en Automatique. *) (* *) (* All rights reserved. This file is distributed under the terms of *) (* the GNU Lesser General Public License version 2.1, with the *) (* special exception on linking described in the file LICENSE. *) (* *) (**************************************************************************)
clock.ml
open Lwt open LTerm_widget let get_time () = let localtime = Unix.localtime (Unix.time ()) in Printf.sprintf "%02u:%02u:%02u" localtime.Unix.tm_hour localtime.Unix.tm_min localtime.Unix.tm_sec let main () = let waiter, wakener = wait () in let vbox = new vbox in let clock = new label (get_time ()) in let button = new button "exit退出" in vbox#add clock; vbox#add button; (* Update the time every second. *) ignore (Lwt_engine.on_timer 1.0 true (fun _ -> clock#set_text (get_time ()))); (* Quit when the exit button is clicked. *) button#on_click (wakeup wakener); (* Run in the standard terminal. *) Lazy.force LTerm.stdout >>= fun term -> LTerm.enable_mouse term >>= fun () -> Lwt.finalize (fun () -> run term vbox waiter) (fun () -> LTerm.disable_mouse term) let () = Lwt_main.run (main ())
(* * clock.ml * -------- * Copyright : (c) 2011, Jeremie Dimino <jeremie@dimino.org> * Licence : BSD3 * * This file is a part of Lambda-Term. *)
contract_repr.ml
type t = | Implicit of Signature.Public_key_hash.t | Originated of Contract_hash.t include Compare.Make(struct type nonrec t = t let compare l1 l2 = match l1, l2 with | Implicit pkh1, Implicit pkh2 -> Signature.Public_key_hash.compare pkh1 pkh2 | Originated h1, Originated h2 -> Contract_hash.compare h1 h2 | Implicit _, Originated _ -> -1 | Originated _, Implicit _ -> 1 end) type contract = t type error += Invalid_contract_notation of string (* `Permanent *) let to_b58check = function | Implicit pbk -> Signature.Public_key_hash.to_b58check pbk | Originated h -> Contract_hash.to_b58check h let of_b58check s = match Base58.decode s with | Some (Ed25519.Public_key_hash.Data h) -> ok (Implicit (Signature.Ed25519 h)) | Some (Secp256k1.Public_key_hash.Data h) -> ok (Implicit (Signature.Secp256k1 h)) | Some (P256.Public_key_hash.Data h) -> ok (Implicit (Signature.P256 h)) | Some (Contract_hash.Data h) -> ok (Originated h) | _ -> error (Invalid_contract_notation s) let pp ppf = function | Implicit pbk -> Signature.Public_key_hash.pp ppf pbk | Originated h -> Contract_hash.pp ppf h let pp_short ppf = function | Implicit pbk -> Signature.Public_key_hash.pp_short ppf pbk | Originated h -> Contract_hash.pp_short ppf h let encoding = let open Data_encoding in def "contract_id" ~title: "A contract handle" ~description: "A contract notation as given to an RPC or inside scripts. \ Can be a base58 implicit contract hash \ or a base58 originated contract hash." @@ splitted ~binary: (union ~tag_size:`Uint8 [ case (Tag 0) ~title:"Implicit" Signature.Public_key_hash.encoding (function Implicit k -> Some k | _ -> None) (fun k -> Implicit k) ; case (Tag 1) (Fixed.add_padding Contract_hash.encoding 1) ~title:"Originated" (function Originated k -> Some k | _ -> None) (fun k -> Originated k) ; ]) ~json: (conv to_b58check (fun s -> match of_b58check s with | Ok s -> s | Error _ -> Json.cannot_destruct "Invalid contract notation.") string) let () = let open Data_encoding in register_error_kind `Permanent ~id:"contract.invalid_contract_notation" ~title: "Invalid contract notation" ~pp: (fun ppf x -> Format.fprintf ppf "Invalid contract notation %S" x) ~description: "A malformed contract notation was given to an RPC or in a script." (obj1 (req "notation" string)) (function Invalid_contract_notation loc -> Some loc | _ -> None) (fun loc -> Invalid_contract_notation loc) let implicit_contract id = Implicit id let is_implicit = function | Implicit m -> Some m | Originated _ -> None let is_originated = function | Implicit _ -> None | Originated h -> Some h type origination_nonce = { operation_hash: Operation_hash.t ; origination_index: int32 } let origination_nonce_encoding = let open Data_encoding in conv (fun { operation_hash ; origination_index } -> (operation_hash, origination_index)) (fun (operation_hash, origination_index) -> { operation_hash ; origination_index }) @@ obj2 (req "operation" Operation_hash.encoding) (dft "index" int32 0l) let originated_contract nonce = let data = Data_encoding.Binary.to_bytes_exn origination_nonce_encoding nonce in Originated (Contract_hash.hash_bytes [data]) let originated_contracts ~since: { origination_index = first ; operation_hash = first_hash } ~until: ({ origination_index = last ; operation_hash = last_hash } as origination_nonce) = assert (Operation_hash.equal first_hash last_hash) ; let rec contracts acc origination_index = if Compare.Int32.(origination_index < first) then acc else let origination_nonce = { origination_nonce with origination_index } in let acc = originated_contract origination_nonce :: acc in contracts acc (Int32.pred origination_index) in contracts [] (Int32.pred last) let initial_origination_nonce operation_hash = { operation_hash ; origination_index = 0l } let incr_origination_nonce nonce = let origination_index = Int32.succ nonce.origination_index in { nonce with origination_index } let rpc_arg = let construct = to_b58check in let destruct hash = match of_b58check hash with | Error _ -> Error "Cannot parse contract id" | Ok contract -> Ok contract in RPC_arg.make ~descr: "A contract identifier encoded in b58check." ~name: "contract_id" ~construct ~destruct () module Index = struct type t = contract let path_length = assert Compare.Int.(Signature.Public_key_hash.path_length = 1 + Contract_hash.path_length) ; Signature.Public_key_hash.path_length let to_path c l = match c with | Implicit k -> Signature.Public_key_hash.to_path k l | Originated h -> "originated" :: Contract_hash.to_path h l let of_path = function | "originated" :: key -> begin match Contract_hash.of_path key with | None -> None | Some h -> Some (Originated h) end | key -> begin match Signature.Public_key_hash.of_path key with | None -> None | Some h -> Some (Implicit h) end let contract_prefix s = "originated" :: Contract_hash.prefix_path s let pkh_prefix_ed25519 s = Ed25519.Public_key_hash.prefix_path s let pkh_prefix_secp256k1 s = Secp256k1.Public_key_hash.prefix_path s let pkh_prefix_p256 s = P256.Public_key_hash.prefix_path s let rpc_arg = rpc_arg let encoding = encoding let compare = compare end
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
logs_syslog_lwt.mli
(** Logs reporter via syslog, using Lwt Please read {!Logs_syslog} first. *) (** [udp_reporter ~hostname remote_ip ~port ~truncate ()] is [reporter], which sends syslog message using the given [hostname] to [remote_ip, remote_port] via UDP. Each message is truncated to [truncate] bytes (defaults to 65535). The [hostname] default to [Lwt_unix.gethostname ()], [port] defaults to 514. [facility] is the default syslog facility (see {!Logs_syslog.message}). *) val udp_reporter : ?hostname:string -> Lwt_unix.inet_addr -> ?port:int -> ?truncate:int -> ?facility:Syslog_message.facility -> unit -> Logs.reporter Lwt.t (** [tcp_reporter ~hostname remote_ip ~port ~truncate ~framing ()] is [Ok reporter] or [Error msg]. The [reporter] sends each log message to [remote_ip, port] via TCP. If the initial TCP connection to the [remote_ip] fails, an [Error msg] is returned instead. If the TCP connection fails, the log message is reported to standard error, and attempts are made to re-establish the TCP connection. Each syslog message is truncated to [truncate] bytes (defaults to 0, thus no truncation). Each syslog message is framed (using [framing]), the default strategy is to append a single byte containing 0. The [hostname] default to [Lwt_unix.gethostname ()], [port] to 514. [facility] is the default syslog facility (see {!Logs_syslog.message}). *) val tcp_reporter : ?hostname:string -> Lwt_unix.inet_addr -> ?port:int -> ?truncate:int -> ?framing:Logs_syslog.framing -> ?facility:Syslog_message.facility -> unit -> (Logs.reporter, string) result Lwt.t (** [unix_reporter ~socket ~truncate ~framing ()] is [Ok reporter] or [Error msg]. The [reporter] sends each log message via syslog to [socket] (which defaults to ["/dev/log"]). If the initial connection to the socket fails, the log message is reported to standard error, and attempts are made to re-establish the connection. A syslog message is truncated to [truncate] bytes and is framed according to the given [framing]. The default for [truncate] is [65536] if [framing] is not provided and [0] otherwise. If [framing] is not provided, then the socket used is a datagram socket (as for {!udp_reporter}) otherwise a stream socket is used (as for {!tcp_reporter}). [facility] is the default syslog facility (see {!Logs_syslog.message}). *) val unix_reporter : ?socket:string -> ?truncate:int -> ?framing:Logs_syslog.framing -> ?facility:Syslog_message.facility -> unit -> (Logs.reporter, string) result Lwt.t (** {2:lwt_example Example usage} To install a Lwt syslog reporter, sending via UDP to localhost, use the following snippet: {[ let install_logger () = udp_reporter (Unix.inet_addr_of_string "127.0.0.1") () >|= fun r -> Logs.set_reporter r let _ = Lwt_main.run (install_logger ()) ]} And via TCP: {[ let install_logger () = tcp_reporter (Unix.inet_addr_of_string "127.0.0.1") () >|= function | Ok r -> Logs.set_reporter r | Error e -> print_endline e let _ = Lwt_main.run (install_logger ()) ]} *)
(** Logs reporter via syslog, using Lwt Please read {!Logs_syslog} first. *)
lm_instrument.mli
(* This file is (C) 2014 by Gerd Stolpmann. It is distributed under the same license conditions as OMake. It was developed with financial support from Lexifi. *) type probe val enabled : bool ref (** Whether enabled (default: true) *) val create : string -> probe (** Create a new probe with this name *) val start : probe -> unit (** Start the probe timer *) val stop : probe -> unit (** Stop the probe timer *) val instrument : probe -> ('a -> 'b) -> 'a -> 'b (** [instrument p f arg]: run [f arg] and return the result (or exception). While running the runtime is measured. *) val finish : unit -> unit (** Globally finish all timers *) val report : unit -> unit (** Print a report to stdout *)
(* This file is (C) 2014 by Gerd Stolpmann. It is distributed under the same license conditions as OMake. It was developed with financial support from Lexifi. *)
aliases.ml
type 'a pair = 'a * 'a
cert.c.h
/*[clinic input] preserve [clinic start generated code]*/ PyDoc_STRVAR(_ssl_Certificate_public_bytes__doc__, "public_bytes($self, /, format=Encoding.PEM)\n" "--\n" "\n"); #define _SSL_CERTIFICATE_PUBLIC_BYTES_METHODDEF \ {"public_bytes", _PyCFunction_CAST(_ssl_Certificate_public_bytes), METH_FASTCALL|METH_KEYWORDS, _ssl_Certificate_public_bytes__doc__}, static PyObject * _ssl_Certificate_public_bytes_impl(PySSLCertificate *self, int format); static PyObject * _ssl_Certificate_public_bytes(PySSLCertificate *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; static const char * const _keywords[] = {"format", NULL}; static _PyArg_Parser _parser = {NULL, _keywords, "public_bytes", 0}; PyObject *argsbuf[1]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; int format = PY_SSL_ENCODING_PEM; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); if (!args) { goto exit; } if (!noptargs) { goto skip_optional_pos; } format = _PyLong_AsInt(args[0]); if (format == -1 && PyErr_Occurred()) { goto exit; } skip_optional_pos: return_value = _ssl_Certificate_public_bytes_impl(self, format); exit: return return_value; } PyDoc_STRVAR(_ssl_Certificate_get_info__doc__, "get_info($self, /)\n" "--\n" "\n"); #define _SSL_CERTIFICATE_GET_INFO_METHODDEF \ {"get_info", (PyCFunction)_ssl_Certificate_get_info, METH_NOARGS, _ssl_Certificate_get_info__doc__}, static PyObject * _ssl_Certificate_get_info_impl(PySSLCertificate *self); static PyObject * _ssl_Certificate_get_info(PySSLCertificate *self, PyObject *Py_UNUSED(ignored)) { return _ssl_Certificate_get_info_impl(self); } /*[clinic end generated code: output=18885c4d167d5244 input=a9049054013a1b77]*/
/*[clinic input] preserve [clinic start generated code]*/
signature.mli
type public_key_hash = | Ed25519 of Ed25519.Public_key_hash.t | Secp256k1 of Secp256k1.Public_key_hash.t | P256 of P256.Public_key_hash.t type public_key = | Ed25519 of Ed25519.Public_key.t | Secp256k1 of Secp256k1.Public_key.t | P256 of P256.Public_key.t type secret_key = | Ed25519 of Ed25519.Secret_key.t | Secp256k1 of Secp256k1.Secret_key.t | P256 of P256.Secret_key.t type watermark = | Block_header of Chain_id.t | Endorsement of Chain_id.t | Generic_operation | Custom of Bytes.t val bytes_of_watermark : watermark -> Bytes.t val pp_watermark : Format.formatter -> watermark -> unit include S.SIGNATURE with type Public_key_hash.t = public_key_hash and type Public_key.t = public_key and type Secret_key.t = secret_key and type watermark := watermark (** [append sk buf] is the concatenation of [buf] and the serialization of the signature of [buf] signed by [sk]. *) val append : ?watermark:watermark -> secret_key -> Bytes.t -> Bytes.t (** [concat buf t] is the concatenation of [buf] and the serialization of [t]. *) val concat : Bytes.t -> t -> Bytes.t include S.RAW_DATA with type t := t val of_secp256k1 : Secp256k1.t -> t val of_ed25519 : Ed25519.t -> t val of_p256 : P256.t -> t type algo = Ed25519 | Secp256k1 | P256 val generate_key : ?algo:algo -> ?seed:Bigstring.t -> unit -> public_key_hash * public_key * secret_key
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
astring.ml
let strf = Format.asprintf let ( ^ ) = Astring_string.append module Char = Astring_char module String = Astring_string (*--------------------------------------------------------------------------- Copyright (c) 2015 The astring programmers Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ---------------------------------------------------------------------------*)
(*--------------------------------------------------------------------------- Copyright (c) 2015 The astring programmers. All rights reserved. Distributed under the ISC license, see terms at the end of the file. ---------------------------------------------------------------------------*)
skip_list_repr.ml
module type S = sig type ('content, 'ptr) cell val equal : ('content -> 'content -> bool) -> ('ptr -> 'ptr -> bool) -> ('content, 'ptr) cell -> ('content, 'ptr) cell -> bool val encoding : 'ptr Data_encoding.t -> 'content Data_encoding.t -> ('content, 'ptr) cell Data_encoding.t val index : (_, _) cell -> int val content : ('content, 'ptr) cell -> 'content val back_pointer : ('content, 'ptr) cell -> int -> 'ptr option val back_pointers : ('content, 'ptr) cell -> 'ptr list val genesis : 'content -> ('content, 'ptr) cell val next : prev_cell:('content, 'ptr) cell -> prev_cell_ptr:'ptr -> 'content -> ('content, 'ptr) cell val back_path : deref:('ptr -> ('content, 'ptr) cell option) -> cell_ptr:'ptr -> target_index:int -> 'ptr list option val valid_back_path : equal_ptr:('ptr -> 'ptr -> bool) -> deref:('ptr -> ('content, 'ptr) cell option) -> cell_ptr:'ptr -> target_ptr:'ptr -> 'ptr list -> bool end module Make (Parameters : sig val basis : int end) : S = struct let () = assert (Compare.Int.(Parameters.basis >= 2)) open Parameters (* A cell of a skip list with some [`content] and backpointers of type [`ptr]. Invariants ---------- - back_pointers[i] = Some (pointer to (index - (index mod (basis ** i)) - 1)) (for all i < length back_pointers) - length back_pointers = log basis index Notes ----- - The [index] field is not strictly required but helps in making the data structure more robust. Indeed, otherwise, we should also ask the client to provide the index of the cell to be built, which can be error-prone. - The back pointers of a node are chosen from the back pointers of its predecessor (except for the genesis node) and a pointer to this predecessor. This locality makes the insertion of new nodes very efficient in practice. *) type ('content, 'ptr) cell = { content : 'content; back_pointers : 'ptr option FallbackArray.t; index : int; } let equal equal_content equal_ptr cell1 cell2 = let equal_back_pointers b1 b2 = let open FallbackArray in Compare.Int.(length b1 = length b2) && fst @@ fold (fun (equal, i) h1 -> (equal && Option.equal equal_ptr h1 (get b2 i), i + 1)) b1 (true, 0) in let {content; back_pointers; index} = cell1 in equal_content content cell2.content && Compare.Int.equal index cell2.index && equal_back_pointers back_pointers cell2.back_pointers let index cell = cell.index let back_pointers_to_list a = FallbackArray.fold (fun l -> function | Some ptr -> ptr :: l | None -> (* By [cell] invariants. *) assert false) a [] |> List.rev let encoding ptr_encoding content_encoding = let of_list = FallbackArray.of_list ~fallback:None ~proj:(fun c -> Some c) in let to_list = back_pointers_to_list in let open Data_encoding in conv (fun {index; content; back_pointers} -> (index, content, to_list back_pointers)) (fun (index, content, back_pointers) -> {index; content; back_pointers = of_list back_pointers}) (obj3 (req "index" int31) (req "content" content_encoding) (req "back_pointers" (list ptr_encoding))) let content cell = cell.content let back_pointers cell = back_pointers_to_list cell.back_pointers let genesis content = {index = 0; content; back_pointers = FallbackArray.make 0 None} let back_pointer cell i = FallbackArray.get cell.back_pointers i (* Precondition: i < length cell.back_pointers *) let back_pointer_unsafe cell i = match FallbackArray.get cell.back_pointers i with | Some ptr -> ptr | None -> (* By precondition and invariants of cells. *) assert false let next ~prev_cell ~prev_cell_ptr content = let index = prev_cell.index + 1 in let back_pointers = let rec aux power accu i = if Compare.Int.(index < power) then List.rev accu else let back_pointer_i = if Compare.Int.(index mod power = 0) then prev_cell_ptr else (* The following call is valid because of - [i < List.length prev_cell.back_pointer] because [log_basis index = log_basis prev_cell.index] - the invariants of [prev_cell] *) back_pointer_unsafe prev_cell i in let accu = back_pointer_i :: accu in aux (power * basis) accu (i + 1) in aux 1 [] 0 in let back_pointers = FallbackArray.of_list ~fallback:None ~proj:Option.some back_pointers in {index; content; back_pointers} let best_skip cell target_index = let index = cell.index in let rec aux idx pow best_idx = if Compare.Int.(idx >= FallbackArray.length cell.back_pointers) then best_idx else let idx_index = index - (index mod pow) - 1 in if Compare.Int.(idx_index < target_index) then best_idx else aux (idx + 1) (basis * pow) (Some idx) in aux 0 1 None let back_path ~deref ~cell_ptr ~target_index = let rec aux path ptr = let path = ptr :: path in Option.bind (deref ptr) @@ fun cell -> let index = cell.index in if Compare.Int.(target_index = index) then Some (List.rev path) else if Compare.Int.(target_index > index) then None else Option.bind (best_skip cell target_index) @@ fun best_idx -> Option.bind (back_pointer cell best_idx) @@ fun ptr -> aux path ptr in aux [] cell_ptr let mem equal x l = let open FallbackArray in let n = length l in let rec aux idx = if Compare.Int.(idx >= n) then false else match FallbackArray.get l idx with | None -> aux (idx + 1) | Some y -> if equal x y then true else aux (idx + 1) in aux 0 let assume_some o f = match o with None -> false | Some x -> f x let valid_back_path ~equal_ptr ~deref ~cell_ptr ~target_ptr path = assume_some (deref target_ptr) @@ fun target -> assume_some (deref cell_ptr) @@ fun cell -> let target_index = index target and cell_index = index cell in let rec valid_path index cell_ptr path = match (cell_ptr, path) with | final_cell, [] -> equal_ptr target_ptr final_cell && Compare.Int.(index = target_index) | cell_ptr, cell_ptr' :: path -> assume_some (deref cell_ptr) @@ fun cell -> assume_some (deref cell_ptr') @@ fun cell' -> mem equal_ptr cell_ptr' cell.back_pointers && assume_some (best_skip cell target_index) @@ fun best_idx -> assume_some (back_pointer cell best_idx) @@ fun best_ptr -> let minimal = equal_ptr best_ptr cell_ptr' in let index' = cell'.index in minimal && valid_path index' cell_ptr' path in match path with | [] -> false | first_cell_ptr :: path -> equal_ptr first_cell_ptr cell_ptr && valid_path cell_index cell_ptr path end
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2022 Nomadic Labs <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
script_ir_annot.ml
open Alpha_context open Micheline open Script_tc_errors open Script_typed_ir let default_now_annot = Some (`Var_annot "now") let default_amount_annot = Some (`Var_annot "amount") let default_balance_annot = Some (`Var_annot "balance") let default_steps_annot = Some (`Var_annot "steps") let default_source_annot = Some (`Var_annot "source") let default_sender_annot = Some (`Var_annot "sender") let default_self_annot = Some (`Var_annot "self") let default_arg_annot = Some (`Var_annot "arg") let default_param_annot = Some (`Var_annot "parameter") let default_storage_annot = Some (`Var_annot "storage") let default_car_annot = Some (`Field_annot "car") let default_cdr_annot = Some (`Field_annot "cdr") let default_contract_annot = Some (`Field_annot "contract") let default_addr_annot = Some (`Field_annot "address") let default_manager_annot = Some (`Field_annot "manager") let default_pack_annot = Some (`Field_annot "packed") let default_unpack_annot = Some (`Field_annot "unpacked") let default_slice_annot = Some (`Field_annot "slice") let default_elt_annot = Some (`Field_annot "elt") let default_key_annot = Some (`Field_annot "key") let default_hd_annot = Some (`Field_annot "hd") let default_tl_annot = Some (`Field_annot "tl") let default_some_annot = Some (`Field_annot "some") let default_left_annot = Some (`Field_annot "left") let default_right_annot = Some (`Field_annot "right") let default_binding_annot = Some (`Field_annot "bnd") let unparse_type_annot : type_annot option -> string list = function | None -> [] | Some `Type_annot a -> [ ":" ^ a ] let unparse_var_annot : var_annot option -> string list = function | None -> [] | Some `Var_annot a -> [ "@" ^ a ] let unparse_field_annot : field_annot option -> string list = function | None -> [] | Some `Field_annot a -> [ "%" ^ a ] let field_to_var_annot : field_annot option -> var_annot option = function | None -> None | Some (`Field_annot s) -> Some (`Var_annot s) let type_to_var_annot : type_annot option -> var_annot option = function | None -> None | Some (`Type_annot s) -> Some (`Var_annot s) let var_to_field_annot : var_annot option -> field_annot option = function | None -> None | Some (`Var_annot s) -> Some (`Field_annot s) let default_annot ~default = function | None -> default | annot -> annot let gen_access_annot : var_annot option -> ?default:field_annot option -> field_annot option -> var_annot option = fun value_annot ?(default=None) field_annot -> match value_annot, field_annot, default with | None, None, _ | Some _, None, None | None, Some `Field_annot "", _ -> None | None, Some `Field_annot f, _ -> Some (`Var_annot f) | Some `Var_annot v, (None | Some `Field_annot ""), Some `Field_annot f -> Some (`Var_annot (String.concat "." [v; f])) | Some `Var_annot v, Some `Field_annot f, _ -> Some (`Var_annot (String.concat "." [v; f])) let merge_type_annot : legacy: bool -> type_annot option -> type_annot option -> type_annot option tzresult = fun ~legacy annot1 annot2 -> match annot1, annot2 with | None, None | Some _, None | None, Some _ -> ok None | Some `Type_annot a1, Some `Type_annot a2 -> if legacy || String.equal a1 a2 then ok annot1 else error (Inconsistent_annotations (":" ^ a1, ":" ^ a2)) let merge_field_annot : legacy: bool -> field_annot option -> field_annot option -> field_annot option tzresult = fun ~legacy annot1 annot2 -> match annot1, annot2 with | None, None | Some _, None | None, Some _ -> ok None | Some `Field_annot a1, Some `Field_annot a2 -> if legacy || String.equal a1 a2 then ok annot1 else error (Inconsistent_annotations ("%" ^ a1, "%" ^ a2)) let merge_var_annot : var_annot option -> var_annot option -> var_annot option = fun annot1 annot2 -> match annot1, annot2 with | None, None | Some _, None | None, Some _ -> None | Some `Var_annot a1, Some `Var_annot a2 -> if String.equal a1 a2 then annot1 else None let error_unexpected_annot loc annot = match annot with | [] -> ok () | _ :: _ -> error (Unexpected_annotation loc) let fail_unexpected_annot loc annot = Lwt.return (error_unexpected_annot loc annot) let parse_annots loc ?(allow_special_var = false) ?(allow_special_field = false) l = (* allow emtpty annotations as wildcards but otherwise only accept annotations that start with [a-zA-Z_] *) let sub_or_wildcard ~specials wrap s acc = let len = String.length s in if Compare.Int.(len = 1) then ok @@ wrap None :: acc else match s.[1] with | 'a' .. 'z' | 'A' .. 'Z' | '_' -> ok @@ wrap (Some (String.sub s 1 (len - 1))) :: acc | '@' when Compare.Int.(len = 2) && List.mem '@' specials -> ok @@ wrap (Some "@") :: acc | '%' when List.mem '%' specials -> if Compare.Int.(len = 2) then ok @@ wrap (Some "%") :: acc else if Compare.Int.(len = 3) && Compare.Char.(s.[2] = '%') then ok @@ wrap (Some "%%") :: acc else error (Unexpected_annotation loc) | _ -> error (Unexpected_annotation loc) in List.fold_left (fun acc s -> acc >>? fun acc -> if Compare.Int.(String.length s = 0) then error (Unexpected_annotation loc) else match s.[0] with | ':' -> sub_or_wildcard ~specials:[] (fun a -> `Type_annot a) s acc | '@' -> sub_or_wildcard ~specials:(if allow_special_var then ['%'] else []) (fun a -> `Var_annot a) s acc | '%' -> sub_or_wildcard ~specials:(if allow_special_field then ['@'] else []) (fun a -> `Field_annot a) s acc | _ -> error (Unexpected_annotation loc) ) (ok []) l >|? List.rev let opt_var_of_var_opt = function | `Var_annot None -> None | `Var_annot Some a -> Some (`Var_annot a) let opt_field_of_field_opt = function | `Field_annot None -> None | `Field_annot Some a -> Some (`Field_annot a) let opt_type_of_type_opt = function | `Type_annot None -> None | `Type_annot Some a -> Some (`Type_annot a) let classify_annot loc l : (var_annot option list * type_annot option list * field_annot option list) tzresult = try let _, rv, _, rt, _, rf = List.fold_left (fun (in_v, rv, in_t, rt, in_f, rf) a -> match a, in_v, rv, in_t, rt, in_f, rf with | (`Var_annot _ as a), true, _, _, _, _, _ | (`Var_annot _ as a), false, [], _, _, _, _ -> true, opt_var_of_var_opt a :: rv, false, rt, false, rf | (`Type_annot _ as a), _, _, true, _, _, _ | (`Type_annot _ as a), _, _, false, [], _, _ -> false, rv, true, opt_type_of_type_opt a :: rt, false, rf | (`Field_annot _ as a), _, _, _, _, true, _ | (`Field_annot _ as a), _, _, _, _, false, [] -> false, rv, false, rt, true, opt_field_of_field_opt a :: rf | _ -> raise Exit ) (false, [], false, [], false, []) l in ok (List.rev rv, List.rev rt, List.rev rf) with Exit -> error (Ungrouped_annotations loc) let get_one_annot loc = function | [] -> ok None | [ a ] -> ok a | _ -> error (Unexpected_annotation loc) let get_two_annot loc = function | [] -> ok (None, None) | [ a ] -> ok (a, None) | [ a; b ] -> ok (a, b) | _ -> error (Unexpected_annotation loc) let parse_type_annot : int -> string list -> type_annot option tzresult = fun loc annot -> parse_annots loc annot >>? classify_annot loc >>? fun (vars, types, fields) -> error_unexpected_annot loc vars >>? fun () -> error_unexpected_annot loc fields >>? fun () -> get_one_annot loc types let parse_type_field_annot : int -> string list -> (type_annot option * field_annot option) tzresult = fun loc annot -> parse_annots loc annot >>? classify_annot loc >>? fun (vars, types, fields) -> error_unexpected_annot loc vars >>? fun () -> get_one_annot loc types >>? fun t -> get_one_annot loc fields >|? fun f -> (t, f) let parse_composed_type_annot : int -> string list -> (type_annot option * field_annot option * field_annot option) tzresult = fun loc annot -> parse_annots loc annot >>? classify_annot loc >>? fun (vars, types, fields) -> error_unexpected_annot loc vars >>? fun () -> get_one_annot loc types >>? fun t -> get_two_annot loc fields >|? fun (f1, f2) -> (t, f1, f2) let parse_field_annot : int -> string list -> field_annot option tzresult = fun loc annot -> parse_annots loc annot >>? classify_annot loc >>? fun (vars, types, fields) -> error_unexpected_annot loc vars >>? fun () -> error_unexpected_annot loc types >>? fun () -> get_one_annot loc fields let extract_field_annot : Script.node -> (Script.node * field_annot option) tzresult = function | Prim (loc, prim, args, annot) -> let rec extract_first acc = function | [] -> None, annot | s :: rest -> if Compare.Int.(String.length s > 0) && Compare.Char.(s.[0] = '%') then Some s, List.rev_append acc rest else extract_first (s :: acc) rest in let field_annot, annot = extract_first [] annot in let field_annot = match field_annot with | None -> None | Some field_annot -> Some (`Field_annot (String.sub field_annot 1 (String.length field_annot - 1))) in ok (Prim (loc, prim, args, annot), field_annot) | expr -> ok (expr, None) let check_correct_field : field_annot option -> field_annot option -> unit tzresult = fun f1 f2 -> match f1, f2 with | None, _ | _, None -> ok () | Some `Field_annot s1, Some `Field_annot s2 -> if String.equal s1 s2 then ok () else error (Inconsistent_field_annotations ("%" ^ s1, "%" ^ s2)) let parse_var_annot : int -> ?default:var_annot option -> string list -> var_annot option tzresult = fun loc ?default annot -> parse_annots loc annot >>? classify_annot loc >>? fun (vars, types, fields) -> error_unexpected_annot loc types >>? fun () -> error_unexpected_annot loc fields >>? fun () -> get_one_annot loc vars >|? function | Some _ as a -> a | None -> match default with | Some a -> a | None -> None let split_last_dot = function | None -> None, None | Some `Field_annot s -> match String.rindex_opt s '.' with | None -> None, Some (`Field_annot s) | Some i -> let s1 = String.sub s 0 i in let s2 = String.sub s (i + 1) (String.length s - i - 1) in let f = if Compare.String.equal s2 "car" || Compare.String.equal s2 "cdr" then None else Some (`Field_annot s2) in Some (`Var_annot s1), f let common_prefix v1 v2 = match v1, v2 with | Some (`Var_annot s1), Some (`Var_annot s2) when Compare.String.equal s1 s2 -> v1 | Some _, None -> v1 | None, Some _ -> v2 | _, _ -> None let parse_constr_annot : int -> ?if_special_first:field_annot option -> ?if_special_second:field_annot option -> string list -> (var_annot option * type_annot option * field_annot option * field_annot option) tzresult = fun loc ?if_special_first ?if_special_second annot -> parse_annots ~allow_special_field:true loc annot >>? classify_annot loc >>? fun (vars, types, fields) -> get_one_annot loc vars >>? fun v -> get_one_annot loc types >>? fun t -> get_two_annot loc fields >>? fun (f1, f2) -> begin match if_special_first, f1 with | Some special_var, Some `Field_annot "@" -> ok (split_last_dot special_var) | None, Some `Field_annot "@" -> error (Unexpected_annotation loc) | _, _ -> ok (v, f1) end >>? fun (v1, f1) -> begin match if_special_second, f2 with | Some special_var, Some `Field_annot "@" -> ok (split_last_dot special_var) | None, Some `Field_annot "@" -> error (Unexpected_annotation loc) | _, _ -> ok (v, f2) end >|? fun (v2, f2) -> let v = match v with | None -> common_prefix v1 v2 | Some _ -> v in (v, t, f1, f2) let parse_two_var_annot : int -> string list -> (var_annot option * var_annot option) tzresult = fun loc annot -> parse_annots loc annot >>? classify_annot loc >>? fun (vars, types, fields) -> error_unexpected_annot loc types >>? fun () -> error_unexpected_annot loc fields >>? fun () -> get_two_annot loc vars let parse_destr_annot : int -> string list -> default_accessor:field_annot option -> field_name:field_annot option -> pair_annot:var_annot option -> value_annot:var_annot option -> (var_annot option * field_annot option) tzresult = fun loc annot ~default_accessor ~field_name ~pair_annot ~value_annot -> parse_annots loc ~allow_special_var:true annot >>? classify_annot loc >>? fun (vars, types, fields) -> error_unexpected_annot loc types >>? fun () -> get_one_annot loc vars >>? fun v -> get_one_annot loc fields >|? fun f -> let default = gen_access_annot pair_annot field_name ~default:default_accessor in let v = match v with | Some `Var_annot "%" -> field_to_var_annot field_name | Some `Var_annot "%%" -> default | Some _ -> v | None -> value_annot in (v, f) let parse_entrypoint_annot : int -> ?default:var_annot option -> string list -> (var_annot option * field_annot option) tzresult = fun loc ?default annot -> parse_annots loc annot >>? classify_annot loc >>? fun (vars, types, fields) -> error_unexpected_annot loc types >>? fun () -> get_one_annot loc fields >>? fun f -> get_one_annot loc vars >|? function | Some _ as a -> (a, f) | None -> match default with | Some a -> (a, f) | None -> (None, f) let parse_var_type_annot : int -> string list -> (var_annot option * type_annot option) tzresult = fun loc annot -> parse_annots loc annot >>? classify_annot loc >>? fun (vars, types, fields) -> error_unexpected_annot loc fields >>? fun () -> get_one_annot loc vars >>? fun v -> get_one_annot loc types >|? fun t -> (v, t)
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
dune
(executable (name test) (modules test) (libraries functoria.test fmt mirage)) (rule (targets key_gen.ml info_gen.ml main.ml) (action (run ./test.exe))) (rule (alias runtest) (package mirage) (action (diff key_gen.ml.expected key_gen.ml))) (rule (alias runtest) (package mirage) (action (diff info_gen.ml.expected info_gen.ml))) (rule (alias runtest) (package mirage) (action (diff main.ml.expected main.ml)))
dune
(library (name fleche) (public_name coq-lsp.fleche) (libraries coq lang fleche_waterproof))
tx_rollup_repr.mli
(** This module defines identifiers for transaction only rollup (or tx rollup). It also specifies how to compute originated tx rollup's hash from origination nonce. *) (** A specialized Blake2B implementation for hashing tx_rollup identifiers with "txr1" as a base58 prefix *) module Hash : sig val rollup_hash : string include S.HASH end type t = private Hash.t type tx_rollup = t include Compare.S with type t := t (** [in_memory_size tx_rollup] returns the number of bytes [tx_rollup] uses in RAM. *) val in_memory_size : t -> Cache_memory_helpers.sint val to_b58check : t -> string val of_b58check : string -> t tzresult val of_b58check_opt : string -> t option val pp : Format.formatter -> t -> unit val encoding : t Data_encoding.t (** [originated_tx_rollup nonce] is the tx_rollup address originated from [nonce]. See [Origination_nonce.t] for more information. *) val originated_tx_rollup : Origination_nonce.t -> t val rpc_arg : t RPC_arg.arg module Index : Storage_description.INDEX with type t = t (** The entrypoint a layer-1 contract can use to deposit Michelson tickets into a transaction rollup. *) val deposit_entrypoint : Entrypoint_repr.t module Set : Set.S with type elt = tx_rollup module Map : Map.S with type key = tx_rollup
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2021 Marigold <contact@marigold.dev> *) (* Copyright (c) 2021 Nomadic Labs <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
minunit.h
/** * minunit.h * Written by Michael Barker and released to the public domain, * as explained at http://creativecommons.org/publicdomain/zero/1.0/ */ #ifndef MINUNIT_H #define MINUNIT_H #include <math.h> #include <inttypes.h> #include <stdbool.h> struct mu_result { char* test; char* message; }; #define mu_assert(message, test) \ do { \ if (!(test)) \ return message; \ } while (0) #define mu_run_test(name) \ do { \ char *message = name(); \ tests_run++; \ if (message) { \ struct mu_result r; \ r.test = #name; \ r.message = message; \ return r; \ } \ } while (0) #define mu_ok \ do { \ struct mu_result r; \ r.test = 0; \ r.message = 0; \ return r; \ } while (0) extern int tests_run; bool compare_double(double a, double b, double delta); bool compare_int64(int64_t a, int64_t b); #endif
/** * minunit.h * Written by Michael Barker and released to the public domain, * as explained at http://creativecommons.org/publicdomain/zero/1.0/ */
michelson_v1_primitives.mli
type error += Unknown_primitive_name of string (* `Permanent *) type error += Invalid_case of string (* `Permanent *) type error += | Invalid_primitive_name of string Micheline.canonical * Micheline.canonical_location (* `Permanent *) type prim = | K_parameter | K_storage | K_code | D_False | D_Elt | D_Left | D_None | D_Pair | D_Right | D_Some | D_True | D_Unit | I_PACK | I_UNPACK | I_BLAKE2B | I_SHA256 | I_SHA512 | I_ABS | I_ADD | I_AMOUNT | I_AND | I_BALANCE | I_CAR | I_CDR | I_CHAIN_ID | I_CHECK_SIGNATURE | I_COMPARE | I_CONCAT | I_CONS | I_CREATE_ACCOUNT | I_CREATE_CONTRACT | I_IMPLICIT_ACCOUNT | I_DIP | I_DROP | I_DUP | I_EDIV | I_EMPTY_BIG_MAP | I_EMPTY_MAP | I_EMPTY_SET | I_EQ | I_EXEC | I_APPLY | I_FAILWITH | I_GE | I_GET | I_GT | I_HASH_KEY | I_IF | I_IF_CONS | I_IF_LEFT | I_IF_NONE | I_INT | I_LAMBDA | I_LE | I_LEFT | I_LOOP | I_LSL | I_LSR | I_LT | I_MAP | I_MEM | I_MUL | I_NEG | I_NEQ | I_NIL | I_NONE | I_NOT | I_NOW | I_OR | I_PAIR | I_PUSH | I_RIGHT | I_SIZE | I_SOME | I_SOURCE | I_SENDER | I_SELF | I_SLICE | I_STEPS_TO_QUOTA | I_SUB | I_SWAP | I_TRANSFER_TOKENS | I_SET_DELEGATE | I_UNIT | I_UPDATE | I_XOR | I_ITER | I_LOOP_LEFT | I_ADDRESS | I_CONTRACT | I_ISNAT | I_CAST | I_RENAME | I_DIG | I_DUG | T_bool | T_contract | T_int | T_key | T_key_hash | T_lambda | T_list | T_map | T_big_map | T_nat | T_option | T_or | T_pair | T_set | T_signature | T_string | T_bytes | T_mutez | T_timestamp | T_unit | T_operation | T_address | T_chain_id val prim_encoding : prim Data_encoding.encoding val string_of_prim : prim -> string val prim_of_string : string -> prim tzresult val prims_of_strings : string Micheline.canonical -> prim Micheline.canonical tzresult val strings_of_prims : prim Micheline.canonical -> string Micheline.canonical
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
mp_change_user.ml
let build_change_user ~handshake ~user ~password ~databasename ~charset_number ~auth_plugin_name = let user = Mp_string.make_null_terminated_string user in let length_user = String.length user in let scramble_buff = Bitstring.concat [handshake.Mp_handshake.scramble_buff_1; handshake.Mp_handshake.scramble_buff_2] in let scramble_buff = Bitstring.string_of_bitstring scramble_buff in let credential = if (String.length password > 0) then ( (* /!\ : length coded binary <= 250 bytes ?? *) let encoded_password = Mp_authentication.encode_client_password scramble_buff password in let length_encoded_password = String.length encoded_password in let c = Char.chr length_encoded_password in (String.make 1 c) ^ encoded_password ) else ( String.make 1 '\x00' ) in let length_credential = String.length credential in let db = Bitstring.bitstring_of_string (Mp_string.make_null_terminated_string databasename) in let length_db = Bitstring.bitstring_length db in let plugin = Mp_string.make_null_terminated_string auth_plugin_name in let length_plugin = String.length plugin in let%bitstring bits = {| user : length_user*8 : string; credential : length_credential*8 : string; db : length_db : bitstring; charset_number : 2*8 : int, unsigned, bigendian; plugin : length_plugin*8 : string |} in bits
example_1.ml
module E = Equinoxe_cohttp open Lwt.Syntax let token = "Your token" let api = E.create ~token () let get_project_id_from name = let* projects = E.Project.get_all api in match List.find_opt (fun E.Project.{ name = name'; _ } -> name = name') projects with | Some project -> Lwt.return project.id | None -> Lwt.fail_with (Format.sprintf "get_project_id: %S not found" name) let get_project_device_id project_id = E.Device.get_all_from_project api ~id:project_id let create_device project_id = let open E.Device in let builder = build ~hostname:"friendly-api-test" ~plan:C3_small_x86 ~location:Amsterdam ~os:Debian_10 () in let* config = create api ~id:project_id builder in Lwt.return config.id let wait_for state machine_id = let rec check () = let* device = E.Device.get_from api ~id:machine_id in if device.state = state then Lwt.return () else match device.state with | E.State.Active -> Format.printf "\nMachine is up!@."; check () | s -> Format.printf "\rCheck status (%s) after sleeping 10 sec." (E.State.to_string s); Format.print_flush (); Unix.sleep 10; check () in check () let get_ip machine_id = let* config = E.Device.get_from api ~id:machine_id in Lwt.return E.Device.(config.ips) let destroy_machine machine_id = E.Device.delete api ~id:machine_id () let deploy_wait_stop () = let* id = get_project_id_from "testing" in let* machine_id = create_device id in Lwt.finalize (fun () -> let () = Format.printf "Machine created.@." in let* _state = wait_for E.State.Active machine_id in let* ips = get_ip machine_id in let () = match ips with | ip :: _ -> Format.printf "Ip is [%s]. Sleep for 60 sec.@." ip.address | _ -> Format.printf "IP not found.@." in let* () = Format.printf "Turn machine off@."; E.Device.execute_action_on api ~id:machine_id ~action:E.Device.Power_off in let* () = wait_for E.State.Inactive machine_id in let* () = Format.printf "Turn machine on.@."; E.Device.execute_action_on api ~id:machine_id ~action:E.Device.Power_on in let* () = wait_for E.State.Active machine_id in let* () = Format.printf "Reboot@."; E.Device.execute_action_on api ~id:machine_id ~action:E.Device.Reboot in let* () = wait_for E.State.Active machine_id in Lwt_unix.sleep 60.0) (fun () -> destroy_machine machine_id) let () = match Lwt_main.run (deploy_wait_stop ()) with | () -> Format.printf "Machine destroyed!@." | exception e -> Format.printf "Error with: %s@." (Printexc.to_string e)
(*****************************************************************************) (* Open Source License *) (* Copyright (c) 2021-present Étienne Marais <etienne@maiste.fr> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
zeroinstall_windows.ml
type wow = | KEY_WOW64_NONE (* 0 *) | KEY_WOW64_32KEY (* 1 *) | KEY_WOW64_64KEY (* 2 *) let () = ignore Callback.register external win_get_appdata : unit -> string = "caml_win_get_appdata" external win_get_local_appdata : unit -> string = "caml_win_get_local_appdata" external win_get_common_appdata : unit -> string = "caml_win_get_common_appdata" external win_read_registry_string : string -> string -> wow -> string = "caml_win_read_registry_string" external win_read_registry_int : string -> string -> wow -> int = "caml_win_read_registry_int"
(* Copyright (C) 2019, Thomas Leonard See the README file for details, or visit http://0install.net. *)
dune
(executables (names convert) (libraries core core_unix.command_unix hardcaml_of_verilog) (preprocess (pps ppx_jane)))
baking_commands_registration.ml
let () = Client_commands.register Protocol.hash @@ fun _network -> List.map (Clic.map_command (new Protocol_client_context.wrap_full)) @@ Baking_commands.delegate_commands ()
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2020 Nomadic Labs <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
script_ir_translator.ml
open Alpha_context open Micheline open Script open Script_typed_ir open Script_tc_errors open Script_ir_annot module Typecheck_costs = Michelson_v1_gas.Cost_of.Typechecking module Unparse_costs = Michelson_v1_gas.Cost_of.Unparse type ex_comparable_ty = Ex_comparable_ty : 'a comparable_ty -> ex_comparable_ty type ex_ty = Ex_ty : 'a ty -> ex_ty type ex_stack_ty = Ex_stack_ty : 'a stack_ty -> ex_stack_ty type tc_context = | Lambda : tc_context | Dip : 'a stack_ty * tc_context -> tc_context | Toplevel : { storage_type : 'sto ty ; param_type : 'param ty } -> tc_context type unparsing_mode = Optimized | Readable type type_logger = int -> (Script.expr * Script.annot) list -> (Script.expr * Script.annot) list -> unit let add_dip ty annot prev = match prev with | Lambda | Toplevel _ -> Dip (Item_t (ty, Empty_t, annot), prev) | Dip (stack, _) -> Dip (Item_t (ty, stack, annot), prev) (* ---- Type size accounting ------------------------------------------------*) (* TODO include annot in size ? *) let comparable_type_size : type t. t comparable_ty -> int = fun ty -> (* No wildcard to force the update when comparable_ty chages. *) match ty with | Int_key _ -> 1 | Nat_key _ -> 1 | String_key _ -> 1 | Bytes_key _ -> 1 | Mutez_key _ -> 1 | Bool_key _ -> 1 | Key_hash_key _ -> 1 | Timestamp_key _ -> 1 | Address_key _ -> 1 (* TODO include annot in size ? *) let rec type_size : type t. t ty -> int = fun ty -> match ty with | Unit_t _ -> 1 | Int_t _ -> 1 | Nat_t _ -> 1 | Signature_t _ -> 1 | Bytes_t _ -> 1 | String_t _ -> 1 | Mutez_t _ -> 1 | Key_hash_t _ -> 1 | Key_t _ -> 1 | Timestamp_t _ -> 1 | Address_t _ -> 1 | Bool_t _ -> 1 | Operation_t _ -> 1 | Pair_t ((l, _, _), (r, _, _), _) -> 1 + type_size l + type_size r | Union_t ((l, _), (r, _), _) -> 1 + type_size l + type_size r | Lambda_t (arg, ret, _) -> 1 + type_size arg + type_size ret | Option_t ((t,_), _, _) -> 1 + type_size t | List_t (t, _) -> 1 + type_size t | Set_t (k, _) -> 1 + comparable_type_size k | Map_t (k, v, _) -> 1 + comparable_type_size k + type_size v | Big_map_t (k, v, _) -> 1 + comparable_type_size k + type_size v | Contract_t (arg, _) -> 1 + type_size arg let rec type_size_of_stack_head : type st. st stack_ty -> up_to:int -> int = fun stack ~up_to -> match stack with | Empty_t -> 0 | Item_t (head, tail, _annot) -> if Compare.Int.(up_to > 0) then Compare.Int.max (type_size head) (type_size_of_stack_head tail ~up_to:(up_to - 1)) else 0 (* This is the depth of the stack to inspect for sizes overflow. We only need to check the produced types that can be larger than the arguments. That's why Swap is 0 for instance as no type grows. Constant sized types are not checked: it is assumed they are lower than the bound (otherwise every program would be rejected). *) let number_of_generated_growing_types : type b a. (b, a) instr -> int = function | Drop -> 0 | Dup -> 0 | Swap -> 0 | Const _ -> 1 | Cons_pair -> 1 | Car -> 0 | Cdr -> 0 | Cons_some -> 1 | Cons_none _ -> 1 | If_none _ -> 0 | Left -> 0 | Right -> 0 | If_left _ -> 0 | Cons_list -> 1 | Nil -> 1 | If_cons _ -> 0 | List_map _ -> 1 | List_size -> 0 | List_iter _ -> 1 | Empty_set _ -> 1 | Set_iter _ -> 0 | Set_mem -> 0 | Set_update -> 0 | Set_size -> 0 | Empty_map _ -> 1 | Map_map _ -> 1 | Map_iter _ -> 1 | Map_mem -> 0 | Map_get -> 0 | Map_update -> 0 | Map_size -> 0 | Big_map_get -> 0 | Big_map_update -> 0 | Big_map_mem -> 0 | Concat -> 0 | Add_seconds_to_timestamp -> 0 | Add_timestamp_to_seconds -> 0 | Sub_timestamp_seconds -> 0 | Diff_timestamps -> 0 | Add_tez -> 0 | Sub_tez -> 0 | Mul_teznat -> 0 | Mul_nattez -> 0 | Ediv_teznat -> 0 | Ediv_tez -> 0 | Or -> 0 | And -> 0 | Xor -> 0 | Not -> 0 | Is_nat -> 0 | Neg_nat -> 0 | Neg_int -> 0 | Abs_int -> 0 | Int_nat -> 0 | Add_intint -> 0 | Add_intnat -> 0 | Add_natint -> 0 | Add_natnat -> 0 | Sub_int -> 0 | Mul_intint -> 0 | Mul_intnat -> 0 | Mul_natint -> 0 | Mul_natnat -> 0 | Ediv_intint -> 0 | Ediv_intnat -> 0 | Ediv_natint -> 0 | Ediv_natnat -> 0 | Lsl_nat -> 0 | Lsr_nat -> 0 | Or_nat -> 0 | And_nat -> 0 | And_int_nat -> 0 | Xor_nat -> 0 | Not_nat -> 0 | Not_int -> 0 | Seq _ -> 0 | If _ -> 0 | Loop _ -> 0 | Loop_left _ -> 0 | Dip _ -> 0 | Exec -> 0 | Lambda _ -> 1 | Failwith _ -> 1 | Nop -> 0 | Compare _ -> 1 | Eq -> 0 | Neq -> 0 | Lt -> 0 | Gt -> 0 | Le -> 0 | Ge -> 0 | Address -> 0 | Contract _ -> 1 | Transfer_tokens -> 1 | Create_account -> 0 | Implicit_account -> 0 | Create_contract _ -> 1 | Now -> 0 | Balance -> 0 | Check_signature -> 0 | Hash_key -> 0 | Blake2b -> 0 | Sha256 -> 0 | Sha512 -> 0 | Steps_to_quota -> 0 | Source -> 0 | Sender -> 0 | Self _ -> 1 | Amount -> 0 | Set_delegate -> 0 | Pack _ -> 0 | Unpack _ -> 1 (* ---- Error helpers -------------------------------------------------------*) let location = function | Prim (loc, _, _, _) | Int (loc, _) | String (loc, _) | Bytes (loc, _) | Seq (loc, _) -> loc let kind = function | Int _ -> Int_kind | String _ -> String_kind | Bytes _ -> Bytes_kind | Prim _ -> Prim_kind | Seq _ -> Seq_kind let namespace = function | K_parameter | K_storage | K_code -> Keyword_namespace | D_False | D_Elt | D_Left | D_None | D_Pair | D_Right | D_Some | D_True | D_Unit -> Constant_namespace | I_PACK | I_UNPACK | I_BLAKE2B | I_SHA256 | I_SHA512 | I_ABS | I_ADD | I_AMOUNT | I_AND | I_BALANCE | I_CAR | I_CDR | I_CHECK_SIGNATURE | I_COMPARE | I_CONCAT | I_CONS | I_CREATE_ACCOUNT | I_CREATE_CONTRACT | I_IMPLICIT_ACCOUNT | I_DIP | I_DROP | I_DUP | I_EDIV | I_EMPTY_MAP | I_EMPTY_SET | I_EQ | I_EXEC | I_FAILWITH | I_GE | I_GET | I_GT | I_HASH_KEY | I_IF | I_IF_CONS | I_IF_LEFT | I_IF_NONE | I_INT | I_LAMBDA | I_LE | I_LEFT | I_LOOP | I_LSL | I_LSR | I_LT | I_MAP | I_MEM | I_MUL | I_NEG | I_NEQ | I_NIL | I_NONE | I_NOT | I_NOW | I_OR | I_PAIR | I_PUSH | I_RIGHT | I_SIZE | I_SOME | I_SOURCE | I_SENDER | I_SELF | I_STEPS_TO_QUOTA | I_SUB | I_SWAP | I_TRANSFER_TOKENS | I_SET_DELEGATE | I_UNIT | I_UPDATE | I_XOR | I_ITER | I_LOOP_LEFT | I_ADDRESS | I_CONTRACT | I_ISNAT | I_CAST | I_RENAME -> Instr_namespace | T_bool | T_contract | T_int | T_key | T_key_hash | T_lambda | T_list | T_map | T_big_map | T_nat | T_option | T_or | T_pair | T_set | T_signature | T_string | T_bytes | T_mutez | T_timestamp | T_unit | T_operation | T_address -> Type_namespace let unexpected expr exp_kinds exp_ns exp_prims = match expr with | Int (loc, _) -> Invalid_kind (loc, Prim_kind :: exp_kinds, Int_kind) | String (loc, _ ) -> Invalid_kind (loc, Prim_kind :: exp_kinds, String_kind) | Bytes (loc, _ ) -> Invalid_kind (loc, Prim_kind :: exp_kinds, Bytes_kind) | Seq (loc, _) -> Invalid_kind (loc, Prim_kind :: exp_kinds, Seq_kind) | Prim (loc, name, _, _) -> match namespace name, exp_ns with | Type_namespace, Type_namespace | Instr_namespace, Instr_namespace | Constant_namespace, Constant_namespace -> Invalid_primitive (loc, exp_prims, name) | ns, _ -> Invalid_namespace (loc, name, exp_ns, ns) let check_kind kinds expr = let kind = kind expr in if List.mem kind kinds then return_unit else let loc = location expr in fail (Invalid_kind (loc, kinds, kind)) (* ---- Sets and Maps -------------------------------------------------------*) let compare_comparable : type a. a comparable_ty -> a -> a -> int = fun kind x y -> match kind with | String_key _ -> Compare.String.compare x y | Bool_key _ -> Compare.Bool.compare x y | Mutez_key _ -> Tez.compare x y | Key_hash_key _ -> Signature.Public_key_hash.compare x y | Int_key _ -> let res = (Script_int.compare x y) in if Compare.Int.(res = 0) then 0 else if Compare.Int.(res > 0) then 1 else -1 | Nat_key _ -> let res = (Script_int.compare x y) in if Compare.Int.(res = 0) then 0 else if Compare.Int.(res > 0) then 1 else -1 | Timestamp_key _ -> Script_timestamp.compare x y | Address_key _ -> Contract.compare x y | Bytes_key _ -> MBytes.compare x y let empty_set : type a. a comparable_ty -> a set = fun ty -> let module OPS = Set.Make (struct type t = a let compare = compare_comparable ty end) in (module struct type elt = a module OPS = OPS let boxed = OPS.empty let size = 0 end) let set_update : type a. a -> bool -> a set -> a set = fun v b (module Box) -> (module struct type elt = a module OPS = Box.OPS let boxed = if b then Box.OPS.add v Box.boxed else Box.OPS.remove v Box.boxed let size = let mem = Box.OPS.mem v Box.boxed in if mem then if b then Box.size else Box.size - 1 else if b then Box.size + 1 else Box.size end) let set_mem : type elt. elt -> elt set -> bool = fun v (module Box) -> Box.OPS.mem v Box.boxed let set_fold : type elt acc. (elt -> acc -> acc) -> elt set -> acc -> acc = fun f (module Box) -> Box.OPS.fold f Box.boxed let set_size : type elt. elt set -> Script_int.n Script_int.num = fun (module Box) -> Script_int.(abs (of_int Box.size)) let map_key_ty : type a b. (a, b) map -> a comparable_ty = fun (module Box) -> Box.key_ty let empty_map : type a b. a comparable_ty -> (a, b) map = fun ty -> let module OPS = Map.Make (struct type t = a let compare = compare_comparable ty end) in (module struct type key = a type value = b let key_ty = ty module OPS = OPS let boxed = (OPS.empty, 0) end) let map_get : type key value. key -> (key, value) map -> value option = fun k (module Box) -> Box.OPS.find_opt k (fst Box.boxed) let map_update : type a b. a -> b option -> (a, b) map -> (a, b) map = fun k v (module Box) -> (module struct type key = a type value = b let key_ty = Box.key_ty module OPS = Box.OPS let boxed = let (map, size) = Box.boxed in let contains = Box.OPS.mem k map in match v with | Some v -> (Box.OPS.add k v map, size + if contains then 0 else 1) | None -> (Box.OPS.remove k map, size - if contains then 1 else 0) end) let map_set : type a b. a -> b -> (a, b) map -> (a, b) map = fun k v (module Box) -> (module struct type key = a type value = b let key_ty = Box.key_ty module OPS = Box.OPS let boxed = let (map, size) = Box.boxed in (Box.OPS.add k v map, if Box.OPS.mem k map then size else size + 1) end) let map_mem : type key value. key -> (key, value) map -> bool = fun k (module Box) -> Box.OPS.mem k (fst Box.boxed) let map_fold : type key value acc. (key -> value -> acc -> acc) -> (key, value) map -> acc -> acc = fun f (module Box) -> Box.OPS.fold f (fst Box.boxed) let map_size : type key value. (key, value) map -> Script_int.n Script_int.num = fun (module Box) -> Script_int.(abs (of_int (snd Box.boxed))) (* ---- Unparsing (Typed IR -> Untyped expressions) of types -----------------*) let ty_of_comparable_ty : type a. a comparable_ty -> a ty = function | Int_key tname -> Int_t tname | Nat_key tname -> Nat_t tname | String_key tname -> String_t tname | Bytes_key tname -> Bytes_t tname | Mutez_key tname -> Mutez_t tname | Bool_key tname -> Bool_t tname | Key_hash_key tname -> Key_hash_t tname | Timestamp_key tname -> Timestamp_t tname | Address_key tname -> Address_t tname let unparse_comparable_ty : type a. a comparable_ty -> Script.node = function | Int_key tname -> Prim (-1, T_int, [], unparse_type_annot tname) | Nat_key tname -> Prim (-1, T_nat, [], unparse_type_annot tname) | String_key tname -> Prim (-1, T_string, [], unparse_type_annot tname) | Bytes_key tname -> Prim (-1, T_bytes, [], unparse_type_annot tname) | Mutez_key tname -> Prim (-1, T_mutez, [], unparse_type_annot tname) | Bool_key tname -> Prim (-1, T_bool, [], unparse_type_annot tname) | Key_hash_key tname -> Prim (-1, T_key_hash, [], unparse_type_annot tname) | Timestamp_key tname -> Prim (-1, T_timestamp, [], unparse_type_annot tname) | Address_key tname -> Prim (-1, T_address, [], unparse_type_annot tname) let add_field_annot a var = function | Prim (loc, prim, args, annots) -> Prim (loc, prim, args, annots @ unparse_field_annot a @ unparse_var_annot var ) | expr -> expr let rec unparse_ty_no_lwt : type a. context -> a ty -> (Script.node * context) tzresult = fun ctxt ty -> Gas.consume ctxt Unparse_costs.cycle >>? fun ctxt -> let return ctxt (name, args, annot) = let result = Prim (-1, name, args, annot) in Gas.consume ctxt (Unparse_costs.prim_cost (List.length args) annot) >>? fun ctxt -> ok (result, ctxt) in match ty with | Unit_t tname -> return ctxt (T_unit, [], unparse_type_annot tname) | Int_t tname -> return ctxt (T_int, [], unparse_type_annot tname) | Nat_t tname -> return ctxt (T_nat, [], unparse_type_annot tname) | String_t tname -> return ctxt (T_string, [], unparse_type_annot tname) | Bytes_t tname -> return ctxt (T_bytes, [], unparse_type_annot tname) | Mutez_t tname -> return ctxt (T_mutez, [], unparse_type_annot tname) | Bool_t tname -> return ctxt (T_bool, [], unparse_type_annot tname) | Key_hash_t tname -> return ctxt (T_key_hash, [], unparse_type_annot tname) | Key_t tname -> return ctxt (T_key, [], unparse_type_annot tname) | Timestamp_t tname -> return ctxt (T_timestamp, [], unparse_type_annot tname) | Address_t tname -> return ctxt (T_address, [], unparse_type_annot tname) | Signature_t tname -> return ctxt (T_signature, [], unparse_type_annot tname) | Operation_t tname -> return ctxt (T_operation, [], unparse_type_annot tname) | Contract_t (ut, tname) -> unparse_ty_no_lwt ctxt ut >>? fun (t, ctxt) -> return ctxt (T_contract, [ t ], unparse_type_annot tname) | Pair_t ((utl, l_field, l_var), (utr, r_field, r_var), tname) -> let annot = unparse_type_annot tname in unparse_ty_no_lwt ctxt utl >>? fun (utl, ctxt) -> let tl = add_field_annot l_field l_var utl in unparse_ty_no_lwt ctxt utr >>? fun (utr, ctxt) -> let tr = add_field_annot r_field r_var utr in return ctxt (T_pair, [ tl; tr ], annot) | Union_t ((utl, l_field), (utr, r_field), tname) -> let annot = unparse_type_annot tname in unparse_ty_no_lwt ctxt utl >>? fun (utl, ctxt) -> let tl = add_field_annot l_field None utl in unparse_ty_no_lwt ctxt utr >>? fun (utr, ctxt) -> let tr = add_field_annot r_field None utr in return ctxt (T_or, [ tl; tr ], annot) | Lambda_t (uta, utr, tname) -> unparse_ty_no_lwt ctxt uta >>? fun (ta, ctxt) -> unparse_ty_no_lwt ctxt utr >>? fun (tr, ctxt) -> return ctxt (T_lambda, [ ta; tr ], unparse_type_annot tname) | Option_t ((ut, some_field), _none_field, tname) -> let annot = unparse_type_annot tname in unparse_ty_no_lwt ctxt ut >>? fun (ut, ctxt) -> let t = add_field_annot some_field None ut in return ctxt (T_option, [ t ], annot) | List_t (ut, tname) -> unparse_ty_no_lwt ctxt ut >>? fun (t, ctxt) -> return ctxt (T_list, [ t ], unparse_type_annot tname) | Set_t (ut, tname) -> let t = unparse_comparable_ty ut in return ctxt (T_set, [ t ], unparse_type_annot tname) | Map_t (uta, utr, tname) -> let ta = unparse_comparable_ty uta in unparse_ty_no_lwt ctxt utr >>? fun (tr, ctxt) -> return ctxt (T_map, [ ta; tr ], unparse_type_annot tname) | Big_map_t (uta, utr, tname) -> let ta = unparse_comparable_ty uta in unparse_ty_no_lwt ctxt utr >>? fun (tr, ctxt) -> return ctxt (T_big_map, [ ta; tr ], unparse_type_annot tname) let unparse_ty ctxt ty = Lwt.return (unparse_ty_no_lwt ctxt ty) let rec strip_var_annots = function | Int _ | String _ | Bytes _ as atom -> atom | Seq (loc, args) -> Seq (loc, List.map strip_var_annots args) | Prim (loc, name, args, annots) -> let not_var_annot s = Compare.Char.(String.get s 0 <> '@') in let annots = List.filter not_var_annot annots in Prim (loc, name, List.map strip_var_annots args, annots) let serialize_ty_for_error ctxt ty = unparse_ty_no_lwt ctxt ty |> record_trace Cannot_serialize_error >|? fun (ty, ctxt) -> strip_locations (strip_var_annots ty), ctxt let rec unparse_stack : type a. context -> a stack_ty -> ((Script.expr * Script.annot) list * context) tzresult Lwt.t = fun ctxt -> function | Empty_t -> return ([], ctxt) | Item_t (ty, rest, annot) -> unparse_ty ctxt ty >>=? fun (uty, ctxt) -> unparse_stack ctxt rest >>=? fun (urest, ctxt) -> return ((strip_locations uty, unparse_var_annot annot) :: urest, ctxt) let serialize_stack_for_error ctxt stack_ty = trace Cannot_serialize_error (unparse_stack ctxt stack_ty) let name_of_ty : type a. a ty -> type_annot option = function | Unit_t tname -> tname | Int_t tname -> tname | Nat_t tname -> tname | String_t tname -> tname | Bytes_t tname -> tname | Mutez_t tname -> tname | Bool_t tname -> tname | Key_hash_t tname -> tname | Key_t tname -> tname | Timestamp_t tname -> tname | Address_t tname -> tname | Signature_t tname -> tname | Operation_t tname -> tname | Contract_t (_, tname) -> tname | Pair_t (_, _, tname) -> tname | Union_t (_, _, tname) -> tname | Lambda_t (_, _, tname) -> tname | Option_t (_, _, tname) -> tname | List_t (_, tname) -> tname | Set_t (_, tname) -> tname | Map_t (_, _, tname) -> tname | Big_map_t (_, _, tname) -> tname (* ---- Equality witnesses --------------------------------------------------*) type ('ta, 'tb) eq = Eq : ('same, 'same) eq let comparable_ty_eq : type ta tb. context -> ta comparable_ty -> tb comparable_ty -> (ta comparable_ty, tb comparable_ty) eq tzresult = fun ctxt ta tb -> match ta, tb with | Int_key _, Int_key _ -> Ok Eq | Nat_key _, Nat_key _ -> Ok Eq | String_key _, String_key _ -> Ok Eq | Mutez_key _, Mutez_key _ -> Ok Eq | Bool_key _, Bool_key _ -> Ok Eq | Key_hash_key _, Key_hash_key _ -> Ok Eq | Timestamp_key _, Timestamp_key _ -> Ok Eq | Address_key _, Address_key _ -> Ok Eq | _, _ -> serialize_ty_for_error ctxt (ty_of_comparable_ty ta) >>? fun (ta, ctxt) -> serialize_ty_for_error ctxt (ty_of_comparable_ty tb) >>? fun (tb, _ctxt) -> error (Inconsistent_types (ta, tb)) let record_inconsistent ctxt ta tb = record_trace_eval (fun () -> serialize_ty_for_error ctxt ta >>? fun (ta, ctxt) -> serialize_ty_for_error ctxt tb >|? fun (tb, _ctxt) -> Inconsistent_types (ta, tb)) let record_inconsistent_type_annotations ctxt loc ta tb = record_trace_eval (fun () -> serialize_ty_for_error ctxt ta >>? fun (ta, ctxt) -> serialize_ty_for_error ctxt tb >|? fun (tb, _ctxt) -> Inconsistent_type_annotations (loc, ta, tb)) let rec ty_eq : type ta tb. context -> ta ty -> tb ty -> ((ta ty, tb ty) eq * context) tzresult = fun ctxt ta tb -> let ok (eq : (ta ty, tb ty) eq) ctxt nb_args : ((ta ty, tb ty) eq * context) tzresult = Gas.consume ctxt (Typecheck_costs.type_ (2 * nb_args)) >>? fun ctxt -> Ok (eq, ctxt) in Gas.consume ctxt Typecheck_costs.cycle >>? fun ctxt -> match ta, tb with | Unit_t _, Unit_t _ -> ok Eq ctxt 0 | Int_t _, Int_t _ -> ok Eq ctxt 0 | Nat_t _, Nat_t _ -> ok Eq ctxt 0 | Key_t _, Key_t _ -> ok Eq ctxt 0 | Key_hash_t _, Key_hash_t _ -> ok Eq ctxt 0 | String_t _, String_t _ -> ok Eq ctxt 0 | Bytes_t _, Bytes_t _ -> ok Eq ctxt 0 | Signature_t _, Signature_t _ -> ok Eq ctxt 0 | Mutez_t _, Mutez_t _ -> ok Eq ctxt 0 | Timestamp_t _, Timestamp_t _ -> ok Eq ctxt 0 | Address_t _, Address_t _ -> ok Eq ctxt 0 | Bool_t _, Bool_t _ -> ok Eq ctxt 0 | Operation_t _, Operation_t _ -> ok Eq ctxt 0 | Map_t (tal, tar, _), Map_t (tbl, tbr, _) -> (comparable_ty_eq ctxt tal tbl >>? fun Eq -> ty_eq ctxt tar tbr >>? fun (Eq, ctxt) -> (ok Eq ctxt 2)) |> record_inconsistent ctxt ta tb | Big_map_t (tal, tar, _), Big_map_t (tbl, tbr, _) -> (comparable_ty_eq ctxt tal tbl >>? fun Eq -> ty_eq ctxt tar tbr >>? fun (Eq, ctxt) -> (ok Eq ctxt 2)) |> record_inconsistent ctxt ta tb | Set_t (ea, _), Set_t (eb, _) -> (comparable_ty_eq ctxt ea eb >>? fun Eq -> (ok Eq ctxt 1)) |> record_inconsistent ctxt ta tb | Pair_t ((tal, _, _), (tar, _, _), _), Pair_t ((tbl, _, _), (tbr, _, _), _) -> (ty_eq ctxt tal tbl >>? fun (Eq, ctxt) -> ty_eq ctxt tar tbr >>? fun (Eq, ctxt) -> (ok Eq ctxt 2)) |> record_inconsistent ctxt ta tb | Union_t ((tal, _), (tar, _), _), Union_t ((tbl, _), (tbr, _), _) -> (ty_eq ctxt tal tbl >>? fun (Eq, ctxt) -> ty_eq ctxt tar tbr >>? fun (Eq, ctxt) -> (ok Eq ctxt 2)) |> record_inconsistent ctxt ta tb | Lambda_t (tal, tar, _), Lambda_t (tbl, tbr, _) -> (ty_eq ctxt tal tbl >>? fun (Eq, ctxt) -> ty_eq ctxt tar tbr >>? fun (Eq, ctxt) -> (ok Eq ctxt 2)) |> record_inconsistent ctxt ta tb | Contract_t (tal, _), Contract_t (tbl, _) -> (ty_eq ctxt tal tbl >>? fun (Eq, ctxt) -> (ok Eq ctxt 1)) |> record_inconsistent ctxt ta tb | Option_t ((tva, _), _, _), Option_t ((tvb, _), _, _) -> (ty_eq ctxt tva tvb >>? fun (Eq, ctxt) -> (ok Eq ctxt 1)) |> record_inconsistent ctxt ta tb | List_t (tva, _), List_t (tvb, _) -> (ty_eq ctxt tva tvb >>? fun (Eq, ctxt) -> (ok Eq ctxt 1)) |> record_inconsistent ctxt ta tb | _, _ -> serialize_ty_for_error ctxt ta >>? fun (ta, ctxt) -> serialize_ty_for_error ctxt tb >>? fun (tb, _ctxt) -> error (Inconsistent_types (ta, tb)) let rec stack_ty_eq : type ta tb. context -> int -> ta stack_ty -> tb stack_ty -> ((ta stack_ty, tb stack_ty) eq * context) tzresult = fun ctxt lvl ta tb -> match ta, tb with | Item_t (tva, ra, _), Item_t (tvb, rb, _) -> ty_eq ctxt tva tvb |> record_trace (Bad_stack_item lvl) >>? fun (Eq, ctxt) -> stack_ty_eq ctxt (lvl + 1) ra rb >>? fun (Eq, ctxt) -> (Ok (Eq, ctxt) : ((ta stack_ty, tb stack_ty) eq * context) tzresult) | Empty_t, Empty_t -> Ok (Eq, ctxt) | _, _ -> error Bad_stack_length let merge_comparable_types : type ta. ta comparable_ty -> ta comparable_ty -> ta comparable_ty tzresult = fun ta tb -> match ta, tb with | Int_key annot_a, Int_key annot_b -> merge_type_annot annot_a annot_b >|? fun annot -> Int_key annot | Nat_key annot_a, Nat_key annot_b -> merge_type_annot annot_a annot_b >|? fun annot -> Nat_key annot | String_key annot_a, String_key annot_b -> merge_type_annot annot_a annot_b >|? fun annot -> String_key annot | Mutez_key annot_a, Mutez_key annot_b -> merge_type_annot annot_a annot_b >|? fun annot -> Mutez_key annot | Bool_key annot_a, Bool_key annot_b -> merge_type_annot annot_a annot_b >|? fun annot -> Bool_key annot | Key_hash_key annot_a, Key_hash_key annot_b -> merge_type_annot annot_a annot_b >|? fun annot -> Key_hash_key annot | Timestamp_key annot_a, Timestamp_key annot_b -> merge_type_annot annot_a annot_b >|? fun annot -> Timestamp_key annot | Address_key annot_a, Address_key annot_b -> merge_type_annot annot_a annot_b >|? fun annot -> Address_key annot | _, _ -> assert false (* FIXME: fix injectivity of some types *) let rec strip_annotations = function | (Int (_,_) as i) -> i | (String (_,_) as s) -> s | (Bytes (_,_) as s) -> s | Prim (loc, prim, args, _) -> Prim (loc, prim, List.map strip_annotations args, []) | Seq (loc, items) -> Seq (loc, List.map strip_annotations items) let merge_types : type b. context -> Script.location -> b ty -> b ty -> (b ty * context) tzresult = let rec help : type a. context -> a ty -> a ty -> (a ty * context) tzresult = fun ctxt ty1 ty2 -> match ty1, ty2 with | Unit_t tn1, Unit_t tn2 -> merge_type_annot tn1 tn2 >|? fun tname -> Unit_t tname, ctxt | Int_t tn1, Int_t tn2 -> merge_type_annot tn1 tn2 >|? fun tname -> Int_t tname, ctxt | Nat_t tn1, Nat_t tn2 -> merge_type_annot tn1 tn2 >|? fun tname -> Nat_t tname, ctxt | Key_t tn1, Key_t tn2 -> merge_type_annot tn1 tn2 >|? fun tname -> Key_t tname, ctxt | Key_hash_t tn1, Key_hash_t tn2 -> merge_type_annot tn1 tn2 >|? fun tname -> Key_hash_t tname, ctxt | String_t tn1, String_t tn2 -> merge_type_annot tn1 tn2 >|? fun tname -> String_t tname, ctxt | Bytes_t tn1, Bytes_t tn2 -> merge_type_annot tn1 tn2 >|? fun tname -> Bytes_t tname, ctxt | Signature_t tn1, Signature_t tn2 -> merge_type_annot tn1 tn2 >|? fun tname -> Signature_t tname, ctxt | Mutez_t tn1, Mutez_t tn2 -> merge_type_annot tn1 tn2 >|? fun tname -> Mutez_t tname, ctxt | Timestamp_t tn1, Timestamp_t tn2 -> merge_type_annot tn1 tn2 >|? fun tname -> Timestamp_t tname, ctxt | Address_t tn1, Address_t tn2 -> merge_type_annot tn1 tn2 >|? fun tname -> Address_t tname, ctxt | Bool_t tn1, Bool_t tn2 -> merge_type_annot tn1 tn2 >|? fun tname -> Bool_t tname, ctxt | Operation_t tn1, Operation_t tn2 -> merge_type_annot tn1 tn2 >|? fun tname -> Operation_t tname, ctxt | Map_t (tal, tar, tn1), Map_t (tbl, tbr, tn2) -> merge_type_annot tn1 tn2 >>? fun tname -> help ctxt tar tbr >>? fun (value, ctxt) -> ty_eq ctxt tar value >>? fun (Eq, ctxt) -> merge_comparable_types tal tbl >|? fun tk -> Map_t (tk, value, tname), ctxt | Big_map_t (tal, tar, tn1), Big_map_t (tbl, tbr, tn2) -> merge_type_annot tn1 tn2 >>? fun tname -> help ctxt tar tbr >>? fun (value, ctxt) -> ty_eq ctxt tar value >>? fun (Eq, ctxt) -> merge_comparable_types tal tbl >|? fun tk -> Big_map_t (tk, value, tname), ctxt | Set_t (ea, tn1), Set_t (eb, tn2) -> merge_type_annot tn1 tn2 >>? fun tname -> merge_comparable_types ea eb >|? fun e -> Set_t (e, tname), ctxt | Pair_t ((tal, l_field1, l_var1), (tar, r_field1, r_var1), tn1), Pair_t ((tbl, l_field2, l_var2), (tbr, r_field2, r_var2), tn2) -> merge_type_annot tn1 tn2 >>? fun tname -> merge_field_annot l_field1 l_field2 >>? fun l_field -> merge_field_annot r_field1 r_field2 >>? fun r_field -> let l_var = merge_var_annot l_var1 l_var2 in let r_var = merge_var_annot r_var1 r_var2 in help ctxt tal tbl >>? fun (left_ty, ctxt) -> help ctxt tar tbr >|? fun (right_ty, ctxt) -> Pair_t ((left_ty, l_field, l_var), (right_ty, r_field, r_var), tname), ctxt | Union_t ((tal, tal_annot), (tar, tar_annot), tn1), Union_t ((tbl, tbl_annot), (tbr, tbr_annot), tn2) -> merge_type_annot tn1 tn2 >>? fun tname -> merge_field_annot tal_annot tbl_annot >>? fun left_annot -> merge_field_annot tar_annot tbr_annot >>? fun right_annot -> help ctxt tal tbl >>? fun (left_ty, ctxt) -> help ctxt tar tbr >|? fun (right_ty, ctxt) -> Union_t ((left_ty, left_annot), (right_ty, right_annot), tname), ctxt | Lambda_t (tal, tar, tn1), Lambda_t (tbl, tbr, tn2) -> merge_type_annot tn1 tn2 >>? fun tname -> help ctxt tal tbl >>? fun (left_ty, ctxt) -> help ctxt tar tbr >|? fun (right_ty, ctxt) -> Lambda_t (left_ty, right_ty, tname), ctxt | Contract_t (tal, tn1), Contract_t (tbl, tn2) -> merge_type_annot tn1 tn2 >>? fun tname -> help ctxt tal tbl >|? fun (arg_ty, ctxt) -> Contract_t (arg_ty, tname), ctxt | Option_t ((tva, some_annot_a), none_annot_a, tn1), Option_t ((tvb, some_annot_b), none_annot_b, tn2) -> merge_type_annot tn1 tn2 >>? fun tname -> merge_field_annot some_annot_a some_annot_b >>? fun some_annot -> merge_field_annot none_annot_a none_annot_b >>? fun none_annot -> help ctxt tva tvb >|? fun (ty, ctxt) -> Option_t ((ty, some_annot), none_annot, tname), ctxt | List_t (tva, tn1), List_t (tvb, tn2) -> merge_type_annot tn1 tn2 >>? fun tname -> help ctxt tva tvb >|? fun (ty, ctxt) -> List_t (ty, tname), ctxt | _, _ -> assert false in (fun ctxt loc ty1 ty2 -> record_inconsistent_type_annotations ctxt loc ty1 ty2 (help ctxt ty1 ty2)) let merge_stacks : type ta. Script.location -> context -> ta stack_ty -> ta stack_ty -> (ta stack_ty * context) tzresult = fun loc -> let rec help : type a. context -> a stack_ty -> a stack_ty -> (a stack_ty * context) tzresult = fun ctxt stack1 stack2 -> match stack1, stack2 with | Empty_t, Empty_t -> ok (Empty_t, ctxt) | Item_t (ty1, rest1, annot1), Item_t (ty2, rest2, annot2) -> let annot = merge_var_annot annot1 annot2 in merge_types ctxt loc ty1 ty2 >>? fun (ty, ctxt) -> help ctxt rest1 rest2 >|? fun (rest, ctxt) -> Item_t (ty, rest, annot), ctxt in help (* ---- Type checker results -------------------------------------------------*) type 'bef judgement = | Typed : ('bef, 'aft) descr -> 'bef judgement | Failed : { descr : 'aft. 'aft stack_ty -> ('bef, 'aft) descr } -> 'bef judgement (* ---- Type checker (Untyped expressions -> Typed IR) ----------------------*) type ('t, 'f, 'b) branch = { branch : 'r. ('t, 'r) descr -> ('f, 'r) descr -> ('b, 'r) descr } [@@unboxed] let merge_branches : type bef a b. context -> int -> a judgement -> b judgement -> (a, b, bef) branch -> (bef judgement * context) tzresult Lwt.t = fun ctxt loc btr bfr { branch } -> match btr, bfr with | Typed ({ aft = aftbt ; _ } as dbt), Typed ({ aft = aftbf ; _ } as dbf) -> let unmatched_branches () = serialize_stack_for_error ctxt aftbt >>=? fun (aftbt, ctxt) -> serialize_stack_for_error ctxt aftbf >>|? fun (aftbf, _ctxt) -> Unmatched_branches (loc, aftbt, aftbf) in trace_eval unmatched_branches (Lwt.return (stack_ty_eq ctxt 1 aftbt aftbf) >>=? fun (Eq, ctxt) -> Lwt.return (merge_stacks loc ctxt aftbt aftbf) >>=? fun (merged_stack, ctxt) -> return ( Typed (branch {dbt with aft=merged_stack} {dbf with aft=merged_stack}), ctxt)) | Failed { descr = descrt }, Failed { descr = descrf } -> let descr ret = branch (descrt ret) (descrf ret) in return (Failed { descr }, ctxt) | Typed dbt, Failed { descr = descrf } -> return (Typed (branch dbt (descrf dbt.aft)), ctxt) | Failed { descr = descrt }, Typed dbf -> return (Typed (branch (descrt dbf.aft) dbf), ctxt) let rec parse_comparable_ty : context -> Script.node -> (ex_comparable_ty * context) tzresult = fun ctxt ty -> Gas.consume ctxt Typecheck_costs.cycle >>? fun ctxt -> Gas.consume ctxt (Typecheck_costs.type_ 0) >>? fun ctxt -> match ty with | Prim (loc, T_int, [], annot) -> parse_type_annot loc annot >|? fun tname -> Ex_comparable_ty ( Int_key tname ), ctxt | Prim (loc, T_nat, [], annot) -> parse_type_annot loc annot >|? fun tname -> Ex_comparable_ty ( Nat_key tname ), ctxt | Prim (loc, T_string, [], annot) -> parse_type_annot loc annot >|? fun tname -> Ex_comparable_ty ( String_key tname ), ctxt | Prim (loc, T_mutez, [], annot) -> parse_type_annot loc annot >|? fun tname -> Ex_comparable_ty ( Mutez_key tname ), ctxt | Prim (loc, T_bool, [], annot) -> parse_type_annot loc annot >|? fun tname -> Ex_comparable_ty ( Bool_key tname ), ctxt | Prim (loc, T_key_hash, [], annot) -> parse_type_annot loc annot >|? fun tname -> Ex_comparable_ty ( Key_hash_key tname ), ctxt | Prim (loc, T_timestamp, [], annot) -> parse_type_annot loc annot >|? fun tname -> Ex_comparable_ty ( Timestamp_key tname ), ctxt | Prim (loc, T_address, [], annot) -> parse_type_annot loc annot >|? fun tname -> Ex_comparable_ty ( Address_key tname ), ctxt | Prim (loc, (T_int | T_nat | T_string | T_mutez | T_bool | T_key | T_address | T_timestamp as prim), l, _) -> error (Invalid_arity (loc, prim, 0, List.length l)) | Prim (loc, (T_pair | T_or | T_set | T_map | T_list | T_option | T_lambda | T_unit | T_signature | T_contract), _, _) -> error (Comparable_type_expected (loc, Micheline.strip_locations ty)) | expr -> error @@ unexpected expr [] Type_namespace [ T_int ; T_nat ; T_string ; T_mutez ; T_bool ; T_key ; T_key_hash ; T_timestamp ] and parse_ty : context -> allow_big_map: bool -> allow_operation: bool -> Script.node -> (ex_ty * context) tzresult = fun ctxt ~allow_big_map ~allow_operation node -> Gas.consume ctxt Typecheck_costs.cycle >>? fun ctxt -> match node with | Prim (loc, T_pair, [ Prim (big_map_loc, T_big_map, args, map_annot) ; remaining_storage ], storage_annot) when allow_big_map -> begin match args with | [ key_ty ; value_ty ] -> parse_comparable_ty ctxt key_ty >>? fun (Ex_comparable_ty key_ty, ctxt) -> parse_ty ctxt ~allow_big_map:false ~allow_operation value_ty >>? fun (Ex_ty value_ty, ctxt) -> parse_ty ctxt ~allow_big_map:false ~allow_operation remaining_storage >>? fun (Ex_ty remaining_storage, ctxt) -> parse_type_annot big_map_loc map_annot >>? fun map_name -> parse_composed_type_annot loc storage_annot >>? fun (ty_name, map_field, storage_field) -> Gas.consume ctxt (Typecheck_costs.type_ 5) >|? fun ctxt -> let big_map_ty = Big_map_t (key_ty, value_ty, map_name) in Ex_ty (Pair_t ((big_map_ty, map_field, None), (remaining_storage, storage_field, None), ty_name)), ctxt | args -> error @@ Invalid_arity (big_map_loc, T_big_map, 2, List.length args) end | Prim (loc, T_unit, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 0) >|? fun ctxt -> Ex_ty (Unit_t ty_name), ctxt | Prim (loc, T_int, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 0) >|? fun ctxt -> Ex_ty (Int_t ty_name), ctxt | Prim (loc, T_nat, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 0) >|? fun ctxt -> Ex_ty (Nat_t ty_name), ctxt | Prim (loc, T_string, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 0) >|? fun ctxt -> Ex_ty (String_t ty_name), ctxt | Prim (loc, T_bytes, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 0) >|? fun ctxt -> Ex_ty (Bytes_t ty_name), ctxt | Prim (loc, T_mutez, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 0) >|? fun ctxt -> Ex_ty (Mutez_t ty_name), ctxt | Prim (loc, T_bool, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 0) >|? fun ctxt -> Ex_ty (Bool_t ty_name), ctxt | Prim (loc, T_key, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 0) >|? fun ctxt -> Ex_ty (Key_t ty_name), ctxt | Prim (loc, T_key_hash, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 0) >|? fun ctxt -> Ex_ty (Key_hash_t ty_name), ctxt | Prim (loc, T_timestamp, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 0) >|? fun ctxt -> Ex_ty (Timestamp_t ty_name), ctxt | Prim (loc, T_address, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 0) >|? fun ctxt -> Ex_ty (Address_t ty_name), ctxt | Prim (loc, T_signature, [], annot) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 0) >|? fun ctxt -> Ex_ty (Signature_t ty_name), ctxt | Prim (loc, T_operation, [], annot) -> if allow_operation then parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 0) >|? fun ctxt -> Ex_ty (Operation_t ty_name), ctxt else error (Unexpected_operation loc) | Prim (loc, T_contract, [ utl ], annot) -> parse_ty ctxt ~allow_big_map:false ~allow_operation utl >>? fun (Ex_ty tl, ctxt) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 1) >|? fun ctxt -> Ex_ty (Contract_t (tl, ty_name)), ctxt | Prim (loc, T_pair, [ utl; utr ], annot) -> extract_field_annot utl >>? fun (utl, left_field) -> extract_field_annot utr >>? fun (utr, right_field) -> parse_ty ctxt ~allow_big_map:false ~allow_operation utl >>? fun (Ex_ty tl, ctxt) -> parse_ty ctxt ~allow_big_map:false ~allow_operation utr >>? fun (Ex_ty tr, ctxt) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 2) >|? fun ctxt -> Ex_ty (Pair_t ((tl, left_field, None), (tr, right_field, None), ty_name)), ctxt | Prim (loc, T_or, [ utl; utr ], annot) -> extract_field_annot utl >>? fun (utl, left_constr) -> extract_field_annot utr >>? fun (utr, right_constr) -> parse_ty ctxt ~allow_big_map:false ~allow_operation utl >>? fun (Ex_ty tl, ctxt) -> parse_ty ctxt ~allow_big_map:false ~allow_operation utr >>? fun (Ex_ty tr, ctxt) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 2) >|? fun ctxt -> Ex_ty (Union_t ((tl, left_constr), (tr, right_constr), ty_name)), ctxt | Prim (loc, T_lambda, [ uta; utr ], annot) -> parse_ty ctxt ~allow_big_map:false ~allow_operation:true uta >>? fun (Ex_ty ta, ctxt) -> parse_ty ctxt ~allow_big_map:false ~allow_operation:true utr >>? fun (Ex_ty tr, ctxt) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 2) >|? fun ctxt -> Ex_ty (Lambda_t (ta, tr, ty_name)), ctxt | Prim (loc, T_option, [ ut ], annot) -> extract_field_annot ut >>? fun (ut, some_constr) -> parse_ty ctxt ~allow_big_map:false ~allow_operation ut >>? fun (Ex_ty t, ctxt) -> parse_composed_type_annot loc annot >>? fun (ty_name, none_constr, _) -> Gas.consume ctxt (Typecheck_costs.type_ 2) >|? fun ctxt -> Ex_ty (Option_t ((t, some_constr), none_constr, ty_name)), ctxt | Prim (loc, T_list, [ ut ], annot) -> parse_ty ctxt ~allow_big_map:false ~allow_operation ut >>? fun (Ex_ty t, ctxt) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 1) >|? fun ctxt -> Ex_ty (List_t (t, ty_name)), ctxt | Prim (loc, T_set, [ ut ], annot) -> parse_comparable_ty ctxt ut >>? fun (Ex_comparable_ty t, ctxt) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 1) >|? fun ctxt -> Ex_ty (Set_t (t, ty_name)), ctxt | Prim (loc, T_map, [ uta; utr ], annot) -> parse_comparable_ty ctxt uta >>? fun (Ex_comparable_ty ta, ctxt) -> parse_ty ctxt ~allow_big_map:false ~allow_operation utr >>? fun (Ex_ty tr, ctxt) -> parse_type_annot loc annot >>? fun ty_name -> Gas.consume ctxt (Typecheck_costs.type_ 2) >|? fun ctxt -> Ex_ty (Map_t (ta, tr, ty_name)), ctxt | Prim (loc, T_big_map, _, _) -> error (Unexpected_big_map loc) | Prim (loc, (T_unit | T_signature | T_int | T_nat | T_string | T_bytes | T_mutez | T_bool | T_key | T_key_hash | T_timestamp | T_address as prim), l, _) -> error (Invalid_arity (loc, prim, 0, List.length l)) | Prim (loc, (T_set | T_list | T_option as prim), l, _) -> error (Invalid_arity (loc, prim, 1, List.length l)) | Prim (loc, (T_pair | T_or | T_map | T_lambda | T_contract as prim), l, _) -> error (Invalid_arity (loc, prim, 2, List.length l)) | expr -> error @@ unexpected expr [] Type_namespace [ T_pair ; T_or ; T_set ; T_map ; T_list ; T_option ; T_lambda ; T_unit ; T_signature ; T_contract ; T_int ; T_nat ; T_operation ; T_string ; T_bytes ; T_mutez ; T_bool ; T_key ; T_key_hash ; T_timestamp ] let check_no_big_map_or_operation loc root = let rec check : type t. t ty -> unit tzresult = function | Big_map_t _ -> error (Unexpected_big_map loc) | Operation_t _ -> error (Unexpected_operation loc) | Unit_t _ -> ok () | Int_t _ -> ok () | Nat_t _ -> ok () | Signature_t _ -> ok () | String_t _ -> ok () | Bytes_t _ -> ok () | Mutez_t _ -> ok () | Key_hash_t _ -> ok () | Key_t _ -> ok () | Timestamp_t _ -> ok () | Address_t _ -> ok () | Bool_t _ -> ok () | Pair_t ((l_ty, _, _), (r_ty, _, _), _) -> check l_ty >>? fun () -> check r_ty | Union_t ((l_ty, _), (r_ty, _), _) -> check l_ty >>? fun () -> check r_ty | Lambda_t (l_ty, r_ty, _) -> check l_ty >>? fun () -> check r_ty | Option_t ((v_ty, _), _, _) -> check v_ty | List_t (elt_ty, _) -> check elt_ty | Set_t (_, _) -> ok () | Map_t (_, elt_ty, _) -> check elt_ty | Contract_t (_, _) -> ok () in check root type ex_script = Ex_script : ('a, 'c) script -> ex_script (* Lwt versions *) let parse_var_annot loc ?default annot = Lwt.return (parse_var_annot loc ?default annot) let parse_constr_annot loc ?if_special_first ?if_special_second annot = Lwt.return (parse_constr_annot loc ?if_special_first ?if_special_second annot) let parse_two_var_annot loc annot = Lwt.return (parse_two_var_annot loc annot) let parse_destr_annot loc annot ~default_accessor ~field_name ~pair_annot ~value_annot = Lwt.return (parse_destr_annot loc annot ~default_accessor ~field_name ~pair_annot ~value_annot) let parse_var_type_annot loc annot = Lwt.return (parse_var_type_annot loc annot) let rec parse_data : type a. ?type_logger: type_logger -> context -> a ty -> Script.node -> (a * context) tzresult Lwt.t = fun ?type_logger ctxt ty script_data -> Lwt.return (Gas.consume ctxt Typecheck_costs.cycle) >>=? fun ctxt -> let error () = Lwt.return (serialize_ty_for_error ctxt ty) >>|? fun (ty, _ctxt) -> Invalid_constant (location script_data, strip_locations script_data, ty) in let traced body = trace_eval error body in let parse_items ?type_logger loc ctxt expr key_type value_type items item_wrapper = let length = List.length items in fold_left_s (fun (last_value, map, ctxt) item -> Lwt.return (Gas.consume ctxt (Typecheck_costs.map_element length)) >>=? fun ctxt -> match item with | Prim (_, D_Elt, [ k; v ], _) -> parse_comparable_data ?type_logger ctxt key_type k >>=? fun (k, ctxt) -> parse_data ?type_logger ctxt value_type v >>=? fun (v, ctxt) -> begin match last_value with | Some value -> if Compare.Int.(0 <= (compare_comparable key_type value k)) then if Compare.Int.(0 = (compare_comparable key_type value k)) then fail (Duplicate_map_keys (loc, strip_locations expr)) else fail (Unordered_map_keys (loc, strip_locations expr)) else return_unit | None -> return_unit end >>=? fun () -> return (Some k, map_update k (Some (item_wrapper v)) map, ctxt) | Prim (loc, D_Elt, l, _) -> fail @@ Invalid_arity (loc, D_Elt, 2, List.length l) | Prim (loc, name, _, _) -> fail @@ Invalid_primitive (loc, [ D_Elt ], name) | Int _ | String _ | Bytes _ | Seq _ -> error () >>=? fail) (None, empty_map key_type, ctxt) items |> traced >>|? fun (_, items, ctxt) -> (items, ctxt) in match ty, script_data with (* Unit *) | Unit_t ty_name, Prim (loc, D_Unit, [], annot) -> check_const_type_annot loc annot ty_name [] >>=? fun () -> Lwt.return (Gas.consume ctxt Typecheck_costs.unit) >>|? fun ctxt -> ((() : a), ctxt) | Unit_t _, Prim (loc, D_Unit, l, _) -> traced (fail (Invalid_arity (loc, D_Unit, 0, List.length l))) | Unit_t _, expr -> traced (fail (unexpected expr [] Constant_namespace [ D_Unit ])) (* Booleans *) | Bool_t ty_name, Prim (loc, D_True, [], annot) -> check_const_type_annot loc annot ty_name [] >>=? fun () -> Lwt.return (Gas.consume ctxt Typecheck_costs.bool) >>|? fun ctxt -> (true, ctxt) | Bool_t ty_name, Prim (loc, D_False, [], annot) -> check_const_type_annot loc annot ty_name [] >>=? fun () -> Lwt.return (Gas.consume ctxt Typecheck_costs.bool) >>|? fun ctxt -> (false, ctxt) | Bool_t _, Prim (loc, (D_True | D_False as c), l, _) -> traced (fail (Invalid_arity (loc, c, 0, List.length l))) | Bool_t _, expr -> traced (fail (unexpected expr [] Constant_namespace [ D_True ; D_False ])) (* Strings *) | String_t _, String (_, v) -> Lwt.return (Gas.consume ctxt (Typecheck_costs.string (String.length v))) >>=? fun ctxt -> let rec check_printable_ascii i = if Compare.Int.(i < 0) then true else match String.get v i with | '\n' | '\x20'..'\x7E' -> check_printable_ascii (i - 1) | _ -> false in if check_printable_ascii (String.length v - 1) then return (v, ctxt) else error () >>=? fail | String_t _, expr -> traced (fail (Invalid_kind (location expr, [ String_kind ], kind expr))) (* Byte sequences *) | Bytes_t _, Bytes (_, v) -> Lwt.return (Gas.consume ctxt (Typecheck_costs.string (MBytes.length v))) >>=? fun ctxt -> return (v, ctxt) | Bytes_t _, expr -> traced (fail (Invalid_kind (location expr, [ Bytes_kind ], kind expr))) (* Integers *) | Int_t _, Int (_, v) -> Lwt.return (Gas.consume ctxt (Typecheck_costs.z v)) >>=? fun ctxt -> return (Script_int.of_zint v, ctxt) | Nat_t _, Int (_, v) -> Lwt.return (Gas.consume ctxt (Typecheck_costs.z v)) >>=? fun ctxt -> let v = Script_int.of_zint v in if Compare.Int.(Script_int.compare v Script_int.zero >= 0) then return (Script_int.abs v, ctxt) else error () >>=? fail | Int_t _, expr -> traced (fail (Invalid_kind (location expr, [ Int_kind ], kind expr))) | Nat_t _, expr -> traced (fail (Invalid_kind (location expr, [ Int_kind ], kind expr))) (* Tez amounts *) | Mutez_t _, Int (_, v) -> Lwt.return ( Gas.consume ctxt Typecheck_costs.tez >>? fun ctxt -> Gas.consume ctxt Michelson_v1_gas.Cost_of.z_to_int64 ) >>=? fun ctxt -> begin try match Tez.of_mutez (Z.to_int64 v) with | None -> raise Exit | Some tez -> return (tez, ctxt) with _ -> error () >>=? fail end | Mutez_t _, expr -> traced (fail (Invalid_kind (location expr, [ Int_kind ], kind expr))) (* Timestamps *) | Timestamp_t _, (Int (_, v)) (* As unparsed with [Optimized] or out of bounds [Readable]. *) -> Lwt.return (Gas.consume ctxt (Typecheck_costs.z v)) >>=? fun ctxt -> return (Script_timestamp.of_zint v, ctxt) | Timestamp_t _, String (_, s) (* As unparsed with [Redable]. *) -> Lwt.return (Gas.consume ctxt Typecheck_costs.string_timestamp) >>=? fun ctxt -> begin match Script_timestamp.of_string s with | Some v -> return (v, ctxt) | None -> error () >>=? fail end | Timestamp_t _, expr -> traced (fail (Invalid_kind (location expr, [ String_kind ; Int_kind ], kind expr))) (* IDs *) | Key_t _, Bytes (_, bytes) -> (* As unparsed with [Optimized]. *) Lwt.return (Gas.consume ctxt Typecheck_costs.key) >>=? fun ctxt -> begin match Data_encoding.Binary.of_bytes Signature.Public_key.encoding bytes with | Some k -> return (k, ctxt) | None -> error () >>=? fail end | Key_t _, String (_, s) -> (* As unparsed with [Readable]. *) Lwt.return (Gas.consume ctxt Typecheck_costs.key) >>=? fun ctxt -> begin match Signature.Public_key.of_b58check_opt s with | Some k -> return (k, ctxt) | None -> error () >>=? fail end | Key_t _, expr -> traced (fail (Invalid_kind (location expr, [ String_kind ; Bytes_kind ], kind expr))) | Key_hash_t _, Bytes (_, bytes) -> (* As unparsed with [Optimized]. *) Lwt.return (Gas.consume ctxt Typecheck_costs.key_hash) >>=? fun ctxt -> begin match Data_encoding.Binary.of_bytes Signature.Public_key_hash.encoding bytes with | Some k -> return (k, ctxt) | None -> error () >>=? fail end | Key_hash_t _, String (_, s) (* As unparsed with [Readable]. *) -> Lwt.return (Gas.consume ctxt Typecheck_costs.key_hash) >>=? fun ctxt -> begin match Signature.Public_key_hash.of_b58check_opt s with | Some k -> return (k, ctxt) | None -> error () >>=? fail end | Key_hash_t _, expr -> traced (fail (Invalid_kind (location expr, [ String_kind ; Bytes_kind ], kind expr))) (* Signatures *) | Signature_t _, Bytes (_, bytes) (* As unparsed with [Optimized]. *) -> Lwt.return (Gas.consume ctxt Typecheck_costs.signature) >>=? fun ctxt -> begin match Data_encoding.Binary.of_bytes Signature.encoding bytes with | Some k -> return (k, ctxt) | None -> error () >>=? fail end | Signature_t _, String (_, s) (* As unparsed with [Readable]. *) -> Lwt.return (Gas.consume ctxt Typecheck_costs.signature) >>=? fun ctxt -> begin match Signature.of_b58check_opt s with | Some s -> return (s, ctxt) | None -> error () >>=? fail end | Signature_t _, expr -> traced (fail (Invalid_kind (location expr, [ String_kind ; Bytes_kind ], kind expr))) (* Operations *) | Operation_t _, _ -> (* operations cannot appear in parameters or storage, the protocol should never parse the bytes of an operation *) assert false (* Addresses *) | Address_t _, Bytes (_, bytes) (* As unparsed with [O[ptimized]. *) -> Lwt.return (Gas.consume ctxt Typecheck_costs.contract) >>=? fun ctxt -> begin match Data_encoding.Binary.of_bytes Contract.encoding bytes with | Some c -> return (c, ctxt) | None -> error () >>=? fail end | Address_t _, String (_, s) (* As unparsed with [Readable]. *) -> Lwt.return (Gas.consume ctxt Typecheck_costs.contract) >>=? fun ctxt -> traced (Lwt.return (Contract.of_b58check s)) >>=? fun c -> return (c, ctxt) | Address_t _, expr -> traced (fail (Invalid_kind (location expr, [ String_kind ; Bytes_kind ], kind expr))) (* Contracts *) | Contract_t (ty, _), Bytes (loc, bytes) (* As unparsed with [Optimized]. *) -> Lwt.return (Gas.consume ctxt Typecheck_costs.contract) >>=? fun ctxt -> begin match Data_encoding.Binary.of_bytes Contract.encoding bytes with | Some c -> traced (parse_contract ctxt loc ty c) >>=? fun (ctxt, _) -> return ((ty, c), ctxt) | None -> error () >>=? fail end | Contract_t (ty, _), String (loc, s) (* As unparsed with [Readable]. *) -> Lwt.return (Gas.consume ctxt Typecheck_costs.contract) >>=? fun ctxt -> traced @@ Lwt.return (Contract.of_b58check s) >>=? fun c -> parse_contract ctxt loc ty c >>=? fun (ctxt, _) -> return ((ty, c), ctxt) | Contract_t _, expr -> traced (fail (Invalid_kind (location expr, [ String_kind ; Bytes_kind ], kind expr))) (* Pairs *) | Pair_t ((ta, af, _), (tb, bf, _), ty_name), Prim (loc, D_Pair, [ va; vb ], annot) -> check_const_type_annot loc annot ty_name [af; bf] >>=? fun () -> Lwt.return (Gas.consume ctxt Typecheck_costs.pair) >>=? fun ctxt -> traced @@ parse_data ?type_logger ctxt ta va >>=? fun (va, ctxt) -> parse_data ?type_logger ctxt tb vb >>=? fun (vb, ctxt) -> return ((va, vb), ctxt) | Pair_t _, Prim (loc, D_Pair, l, _) -> fail @@ Invalid_arity (loc, D_Pair, 2, List.length l) | Pair_t _, expr -> traced (fail (unexpected expr [] Constant_namespace [ D_Pair ])) (* Unions *) | Union_t ((tl, lconstr), _, ty_name), Prim (loc, D_Left, [ v ], annot) -> check_const_type_annot loc annot ty_name [lconstr]>>=? fun () -> Lwt.return (Gas.consume ctxt Typecheck_costs.union) >>=? fun ctxt -> traced @@ parse_data ?type_logger ctxt tl v >>=? fun (v, ctxt) -> return (L v, ctxt) | Union_t _, Prim (loc, D_Left, l, _) -> fail @@ Invalid_arity (loc, D_Left, 1, List.length l) | Union_t (_, (tr, rconstr), ty_name), Prim (loc, D_Right, [ v ], annot) -> check_const_type_annot loc annot ty_name [rconstr] >>=? fun () -> Lwt.return (Gas.consume ctxt Typecheck_costs.union) >>=? fun ctxt -> traced @@ parse_data ?type_logger ctxt tr v >>=? fun (v, ctxt) -> return (R v, ctxt) | Union_t _, Prim (loc, D_Right, l, _) -> fail @@ Invalid_arity (loc, D_Right, 1, List.length l) | Union_t _, expr -> traced (fail (unexpected expr [] Constant_namespace [ D_Left ; D_Right ])) (* Lambdas *) | Lambda_t (ta, tr, _ty_name), (Seq (_loc, _) as script_instr) -> Lwt.return (Gas.consume ctxt Typecheck_costs.lambda) >>=? fun ctxt -> traced @@ parse_returning Lambda ?type_logger ctxt (ta, Some (`Var_annot "@arg")) tr script_instr | Lambda_t _, expr -> traced (fail (Invalid_kind (location expr, [ Seq_kind ], kind expr))) (* Options *) | Option_t ((t, some_constr), _, ty_name), Prim (loc, D_Some, [ v ], annot) -> check_const_type_annot loc annot ty_name [some_constr] >>=? fun () -> Lwt.return (Gas.consume ctxt Typecheck_costs.some) >>=? fun ctxt -> traced @@ parse_data ?type_logger ctxt t v >>=? fun (v, ctxt) -> return (Some v, ctxt) | Option_t _, Prim (loc, D_Some, l, _) -> fail @@ Invalid_arity (loc, D_Some, 1, List.length l) | Option_t (_, none_constr, ty_name), Prim (loc, D_None, [], annot) -> check_const_type_annot loc annot ty_name [none_constr] >>=? fun () -> Lwt.return (Gas.consume ctxt Typecheck_costs.none) >>=? fun ctxt -> return (None, ctxt) | Option_t _, Prim (loc, D_None, l, _) -> fail @@ Invalid_arity (loc, D_None, 0, List.length l) | Option_t _, expr -> traced (fail (unexpected expr [] Constant_namespace [ D_Some ; D_None ])) (* Lists *) | List_t (t, _ty_name), Seq (_loc, items) -> traced @@ fold_right_s (fun v (rest, ctxt) -> Lwt.return (Gas.consume ctxt Typecheck_costs.list_element) >>=? fun ctxt -> parse_data ?type_logger ctxt t v >>=? fun (v, ctxt) -> return ((v :: rest), ctxt)) items ([], ctxt) | List_t _, expr -> traced (fail (Invalid_kind (location expr, [ Seq_kind ], kind expr))) (* Sets *) | Set_t (t, _ty_name), (Seq (loc, vs) as expr) -> let length = List.length vs in traced @@ fold_left_s (fun (last_value, set, ctxt) v -> Lwt.return (Gas.consume ctxt (Typecheck_costs.set_element length)) >>=? fun ctxt -> parse_comparable_data ?type_logger ctxt t v >>=? fun (v, ctxt) -> begin match last_value with | Some value -> if Compare.Int.(0 <= (compare_comparable t value v)) then if Compare.Int.(0 = (compare_comparable t value v)) then fail (Duplicate_set_values (loc, strip_locations expr)) else fail (Unordered_set_values (loc, strip_locations expr)) else return_unit | None -> return_unit end >>=? fun () -> Lwt.return (Gas.consume ctxt (Michelson_v1_gas.Cost_of.set_update v false set)) >>=? fun ctxt -> return (Some v, set_update v true set, ctxt)) (None, empty_set t, ctxt) vs >>|? fun (_, set, ctxt) -> (set, ctxt) | Set_t _, expr -> traced (fail (Invalid_kind (location expr, [ Seq_kind ], kind expr))) (* Maps *) | Map_t (tk, tv, _ty_name), (Seq (loc, vs) as expr) -> parse_items ?type_logger loc ctxt expr tk tv vs (fun x -> x) | Map_t _, expr -> traced (fail (Invalid_kind (location expr, [ Seq_kind ], kind expr))) | Big_map_t (tk, tv, _ty_name), (Seq (loc, vs) as expr) -> parse_items ?type_logger loc ctxt expr tk tv vs (fun x -> Some x) >>|? fun (diff, ctxt) -> ({ diff ; key_type = ty_of_comparable_ty tk ; value_type = tv }, ctxt) | Big_map_t (_tk, _tv, _), expr -> traced (fail (Invalid_kind (location expr, [ Seq_kind ], kind expr))) and parse_comparable_data : type a. ?type_logger:type_logger -> context -> a comparable_ty -> Script.node -> (a * context) tzresult Lwt.t = fun ?type_logger ctxt ty script_data -> parse_data ?type_logger ctxt (ty_of_comparable_ty ty) script_data and parse_returning : type arg ret. ?type_logger: type_logger -> tc_context -> context -> arg ty * var_annot option -> ret ty -> Script.node -> ((arg, ret) lambda * context) tzresult Lwt.t = fun ?type_logger tc_context ctxt (arg, arg_annot) ret script_instr -> parse_instr ?type_logger tc_context ctxt script_instr (Item_t (arg, Empty_t, arg_annot)) >>=? function | (Typed ({ loc ; aft = (Item_t (ty, Empty_t, _) as stack_ty) ; _ } as descr), ctxt) -> trace_eval (fun () -> Lwt.return (serialize_ty_for_error ctxt ret) >>=? fun (ret, ctxt) -> serialize_stack_for_error ctxt stack_ty >>|? fun (stack_ty, _ctxt) -> Bad_return (loc, stack_ty, ret)) (Lwt.return (ty_eq ctxt ty ret) >>=? fun (Eq, ctxt) -> Lwt.return (merge_types ctxt loc ty ret) >>=? fun (_ret, ctxt) -> return ((Lam (descr, strip_locations script_instr) : (arg, ret) lambda), ctxt)) | (Typed { loc ; aft = stack_ty ; _ }, ctxt) -> Lwt.return (serialize_ty_for_error ctxt ret) >>=? fun (ret, ctxt) -> serialize_stack_for_error ctxt stack_ty >>=? fun (stack_ty, _ctxt) -> fail (Bad_return (loc, stack_ty, ret)) | (Failed { descr }, ctxt) -> return ((Lam (descr (Item_t (ret, Empty_t, None)), strip_locations script_instr) : (arg, ret) lambda), ctxt) and parse_instr : type bef. ?type_logger: type_logger -> tc_context -> context -> Script.node -> bef stack_ty -> (bef judgement * context) tzresult Lwt.t = fun ?type_logger tc_context ctxt script_instr stack_ty -> let check_item check loc name n m = trace_eval (fun () -> serialize_stack_for_error ctxt stack_ty >>|? fun (stack_ty, _ctxt) -> Bad_stack (loc, name, m, stack_ty)) @@ trace (Bad_stack_item n) @@ Lwt.return check in let check_item_ty ctxt exp got loc n = check_item (ty_eq ctxt exp got) loc n in let log_stack ctxt loc stack_ty aft = match type_logger, script_instr with | None, _ | Some _, (Seq (-1, _) | Int _ | String _ | Bytes _) -> return () | Some log, (Prim _ | Seq _) -> (* Unparsing for logging done in an unlimited context as this is used only by the client and not the protocol *) let ctxt = Gas.set_unlimited ctxt in unparse_stack ctxt stack_ty >>=? fun (stack_ty, _) -> unparse_stack ctxt aft >>=? fun (aft, _) -> log loc stack_ty aft; return () in let return : context -> bef judgement -> (bef judgement * context) tzresult Lwt.t = fun ctxt judgement -> match judgement with | Typed { instr ; loc ; aft ; _ } -> let maximum_type_size = Constants.michelson_maximum_type_size ctxt in let type_size = type_size_of_stack_head aft ~up_to:(number_of_generated_growing_types instr) in if Compare.Int.(type_size > maximum_type_size) then fail (Type_too_large (loc, type_size, maximum_type_size)) else return (judgement, ctxt) | Failed _ -> return (judgement, ctxt) in let typed ctxt loc instr aft = log_stack ctxt loc stack_ty aft >>=? fun () -> Lwt.return @@ Gas.consume ctxt (Typecheck_costs.instr instr) >>=? fun ctxt -> return ctxt (Typed { loc ; instr ; bef = stack_ty ; aft }) in Lwt.return @@ Gas.consume ctxt Typecheck_costs.cycle >>=? fun ctxt -> match script_instr, stack_ty with (* stack ops *) | Prim (loc, I_DROP, [], annot), Item_t (_, rest, _) -> fail_unexpected_annot loc annot >>=? fun () -> typed ctxt loc Drop rest | Prim (loc, I_DUP, [], annot), Item_t (v, rest, stack_annot) -> parse_var_annot loc annot ~default:stack_annot >>=? fun annot -> typed ctxt loc Dup (Item_t (v, Item_t (v, rest, stack_annot), annot)) | Prim (loc, I_SWAP, [], annot), Item_t (v, Item_t (w, rest, stack_annot), cur_top_annot) -> fail_unexpected_annot loc annot >>=? fun () -> typed ctxt loc Swap (Item_t (w, Item_t (v, rest, cur_top_annot), stack_annot)) | Prim (loc, I_PUSH, [ t ; d ], annot), stack -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ parse_ty ctxt ~allow_big_map:false ~allow_operation:false t >>=? fun (Ex_ty t, ctxt) -> parse_data ?type_logger ctxt t d >>=? fun (v, ctxt) -> typed ctxt loc (Const v) (Item_t (t, stack, annot)) | Prim (loc, I_UNIT, [], annot), stack -> parse_var_type_annot loc annot >>=? fun (annot, ty_name) -> typed ctxt loc (Const ()) (Item_t (Unit_t ty_name, stack, annot)) (* options *) | Prim (loc, I_SOME, [], annot), Item_t (t, rest, stack_annot) -> parse_constr_annot loc annot ~if_special_first:(var_to_field_annot stack_annot) >>=? fun (annot, ty_name, some_field, none_field) -> typed ctxt loc Cons_some (Item_t (Option_t ((t, some_field), none_field, ty_name), rest, annot)) | Prim (loc, I_NONE, [ t ], annot), stack -> Lwt.return @@ parse_ty ctxt ~allow_big_map:false ~allow_operation:true t >>=? fun (Ex_ty t, ctxt) -> parse_constr_annot loc annot >>=? fun (annot, ty_name, some_field, none_field) -> typed ctxt loc (Cons_none t) (Item_t (Option_t ((t, some_field), none_field, ty_name), stack, annot)) | Prim (loc, I_IF_NONE, [ bt ; bf ], annot), (Item_t (Option_t ((t, some_field), _none_field, _), rest, option_annot) as bef) -> check_kind [ Seq_kind ] bt >>=? fun () -> check_kind [ Seq_kind ] bf >>=? fun () -> fail_unexpected_annot loc annot >>=? fun () -> let annot = gen_access_annot option_annot some_field ~default:default_some_annot in parse_instr ?type_logger tc_context ctxt bt rest >>=? fun (btr, ctxt) -> parse_instr ?type_logger tc_context ctxt bf (Item_t (t, rest, annot)) >>=? fun (bfr, ctxt) -> let branch ibt ibf = { loc ; instr = If_none (ibt, ibf) ; bef ; aft = ibt.aft } in merge_branches ctxt loc btr bfr { branch } >>=? fun (judgement, ctxt) -> return ctxt judgement (* pairs *) | Prim (loc, I_PAIR, [], annot), Item_t (a, Item_t (b, rest, snd_annot), fst_annot) -> parse_constr_annot loc annot ~if_special_first:(var_to_field_annot fst_annot) ~if_special_second:(var_to_field_annot snd_annot) >>=? fun (annot, ty_name, l_field, r_field) -> typed ctxt loc Cons_pair (Item_t (Pair_t((a, l_field, fst_annot), (b, r_field, snd_annot), ty_name), rest, annot)) | Prim (loc, I_CAR, [], annot), Item_t (Pair_t ((a, expected_field_annot, a_annot), _, _), rest, pair_annot) -> parse_destr_annot loc annot ~pair_annot ~value_annot:a_annot ~field_name:expected_field_annot ~default_accessor:default_car_annot >>=? fun (annot, field_annot) -> Lwt.return @@ check_correct_field field_annot expected_field_annot >>=? fun () -> typed ctxt loc Car (Item_t (a, rest, annot)) | Prim (loc, I_CDR, [], annot), Item_t (Pair_t (_, (b, expected_field_annot, b_annot), _), rest, pair_annot) -> parse_destr_annot loc annot ~pair_annot ~value_annot:b_annot ~field_name:expected_field_annot ~default_accessor:default_cdr_annot >>=? fun (annot, field_annot) -> Lwt.return @@ check_correct_field field_annot expected_field_annot >>=? fun () -> typed ctxt loc Cdr (Item_t (b, rest, annot)) (* unions *) | Prim (loc, I_LEFT, [ tr ], annot), Item_t (tl, rest, stack_annot) -> Lwt.return @@ parse_ty ctxt ~allow_big_map:false ~allow_operation:true tr >>=? fun (Ex_ty tr, ctxt) -> parse_constr_annot loc annot ~if_special_first:(var_to_field_annot stack_annot) >>=? fun (annot, tname, l_field, r_field) -> typed ctxt loc Left (Item_t (Union_t ((tl, l_field), (tr, r_field), tname), rest, annot)) | Prim (loc, I_RIGHT, [ tl ], annot), Item_t (tr, rest, stack_annot) -> Lwt.return @@ parse_ty ctxt ~allow_big_map:false ~allow_operation:true tl >>=? fun (Ex_ty tl, ctxt) -> parse_constr_annot loc annot ~if_special_second:(var_to_field_annot stack_annot) >>=? fun (annot, tname, l_field, r_field) -> typed ctxt loc Right (Item_t (Union_t ((tl, l_field), (tr, r_field), tname), rest, annot)) | Prim (loc, I_IF_LEFT, [ bt ; bf ], annot), (Item_t (Union_t ((tl, l_field), (tr, r_field), _), rest, union_annot) as bef) -> check_kind [ Seq_kind ] bt >>=? fun () -> check_kind [ Seq_kind ] bf >>=? fun () -> fail_unexpected_annot loc annot >>=? fun () -> let left_annot = gen_access_annot union_annot l_field ~default:default_left_annot in let right_annot = gen_access_annot union_annot r_field ~default:default_right_annot in parse_instr ?type_logger tc_context ctxt bt (Item_t (tl, rest, left_annot)) >>=? fun (btr, ctxt) -> parse_instr ?type_logger tc_context ctxt bf (Item_t (tr, rest, right_annot)) >>=? fun (bfr, ctxt) -> let branch ibt ibf = { loc ; instr = If_left (ibt, ibf) ; bef ; aft = ibt.aft } in merge_branches ctxt loc btr bfr { branch } >>=? fun (judgement, ctxt) -> return ctxt judgement (* lists *) | Prim (loc, I_NIL, [ t ], annot), stack -> Lwt.return @@ parse_ty ctxt ~allow_big_map:false ~allow_operation:true t >>=? fun (Ex_ty t, ctxt) -> parse_var_type_annot loc annot >>=? fun (annot, ty_name) -> typed ctxt loc Nil (Item_t (List_t (t, ty_name), stack, annot)) | Prim (loc, I_CONS, [], annot), Item_t (tv, Item_t (List_t (t, ty_name), rest, _), _) -> check_item_ty ctxt tv t loc I_CONS 1 2 >>=? fun (Eq, ctxt) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Cons_list (Item_t (List_t (t, ty_name), rest, annot)) | Prim (loc, I_IF_CONS, [ bt ; bf ], annot), (Item_t (List_t (t, ty_name), rest, list_annot) as bef) -> check_kind [ Seq_kind ] bt >>=? fun () -> check_kind [ Seq_kind ] bf >>=? fun () -> fail_unexpected_annot loc annot >>=? fun () -> let hd_annot = gen_access_annot list_annot default_hd_annot in let tl_annot = gen_access_annot list_annot default_tl_annot in parse_instr ?type_logger tc_context ctxt bt (Item_t (t, Item_t (List_t (t, ty_name), rest, tl_annot), hd_annot)) >>=? fun (btr, ctxt) -> parse_instr ?type_logger tc_context ctxt bf rest >>=? fun (bfr, ctxt) -> let branch ibt ibf = { loc ; instr = If_cons (ibt, ibf) ; bef ; aft = ibt.aft } in merge_branches ctxt loc btr bfr { branch } >>=? fun (judgement, ctxt) -> return ctxt judgement | Prim (loc, I_SIZE, [], annot), Item_t (List_t _, rest, _) -> parse_var_type_annot loc annot >>=? fun (annot, tname) -> typed ctxt loc List_size (Item_t (Nat_t tname, rest, annot)) | Prim (loc, I_MAP, [ body ], annot), (Item_t (List_t (elt, _), starting_rest, list_annot)) -> check_kind [ Seq_kind ] body >>=? fun () -> parse_var_type_annot loc annot >>=? fun (ret_annot, list_ty_name) -> let elt_annot = gen_access_annot list_annot default_elt_annot in parse_instr ?type_logger tc_context ctxt body (Item_t (elt, starting_rest, elt_annot)) >>=? begin fun (judgement, ctxt) -> match judgement with | Typed ({ aft = Item_t (ret, rest, _) ; _ } as ibody) -> let invalid_map_body () = serialize_stack_for_error ctxt ibody.aft >>|? fun (aft, _ctxt) -> Invalid_map_body (loc, aft) in trace_eval invalid_map_body (Lwt.return @@ stack_ty_eq ctxt 1 rest starting_rest >>=? fun (Eq, ctxt) -> Lwt.return @@ merge_stacks loc ctxt rest starting_rest >>=? fun (rest, ctxt) -> typed ctxt loc (List_map ibody) (Item_t (List_t (ret, list_ty_name), rest, ret_annot))) | Typed { aft ; _ } -> serialize_stack_for_error ctxt aft >>=? fun (aft, _ctxt) -> fail (Invalid_map_body (loc, aft)) | Failed _ -> fail (Invalid_map_block_fail loc) end | Prim (loc, I_ITER, [ body ], annot), Item_t (List_t (elt, _), rest, list_annot) -> check_kind [ Seq_kind ] body >>=? fun () -> fail_unexpected_annot loc annot >>=? fun () -> let elt_annot = gen_access_annot list_annot default_elt_annot in parse_instr ?type_logger tc_context ctxt body (Item_t (elt, rest, elt_annot)) >>=? begin fun (judgement, ctxt) -> match judgement with | Typed ({ aft ; _ } as ibody) -> let invalid_iter_body () = serialize_stack_for_error ctxt ibody.aft >>=? fun (aft, ctxt) -> serialize_stack_for_error ctxt rest >>|? fun (rest, _ctxt) -> Invalid_iter_body (loc, rest, aft) in trace_eval invalid_iter_body (Lwt.return @@ stack_ty_eq ctxt 1 aft rest >>=? fun (Eq, ctxt) -> Lwt.return @@ merge_stacks loc ctxt aft rest >>=? fun (rest, ctxt) -> typed ctxt loc (List_iter ibody) rest) | Failed { descr } -> typed ctxt loc (List_iter (descr rest)) rest end (* sets *) | Prim (loc, I_EMPTY_SET, [ t ], annot), rest -> Lwt.return @@ parse_comparable_ty ctxt t >>=? fun (Ex_comparable_ty t, ctxt) -> parse_var_type_annot loc annot >>=? fun (annot, tname) -> typed ctxt loc (Empty_set t) (Item_t (Set_t (t, tname), rest, annot)) | Prim (loc, I_ITER, [ body ], annot), Item_t (Set_t (comp_elt, _), rest, set_annot) -> check_kind [ Seq_kind ] body >>=? fun () -> fail_unexpected_annot loc annot >>=? fun () -> let elt_annot = gen_access_annot set_annot default_elt_annot in let elt = ty_of_comparable_ty comp_elt in parse_instr ?type_logger tc_context ctxt body (Item_t (elt, rest, elt_annot)) >>=? begin fun (judgement, ctxt) -> match judgement with | Typed ({ aft ; _ } as ibody) -> let invalid_iter_body () = serialize_stack_for_error ctxt ibody.aft >>=? fun (aft, ctxt) -> serialize_stack_for_error ctxt rest >>|? fun (rest, _ctxt) -> Invalid_iter_body (loc, rest, aft) in trace_eval invalid_iter_body (Lwt.return @@ stack_ty_eq ctxt 1 aft rest >>=? fun (Eq, ctxt) -> Lwt.return @@ merge_stacks loc ctxt aft rest >>=? fun (rest, ctxt) -> typed ctxt loc (Set_iter ibody) rest) | Failed { descr } -> typed ctxt loc (Set_iter (descr rest)) rest end | Prim (loc, I_MEM, [], annot), Item_t (v, Item_t (Set_t (elt, _), rest, _), _) -> let elt = ty_of_comparable_ty elt in parse_var_type_annot loc annot >>=? fun (annot, tname) -> check_item_ty ctxt elt v loc I_MEM 1 2 >>=? fun (Eq, ctxt) -> typed ctxt loc Set_mem (Item_t (Bool_t tname, rest, annot)) | Prim (loc, I_UPDATE, [], annot), Item_t (v, Item_t (Bool_t _, Item_t (Set_t (elt, tname), rest, set_annot), _), _) -> let ty = ty_of_comparable_ty elt in parse_var_annot loc annot ~default:set_annot >>=? fun annot -> check_item_ty ctxt ty v loc I_UPDATE 1 3 >>=? fun (Eq, ctxt) -> typed ctxt loc Set_update (Item_t (Set_t (elt, tname), rest, annot)) | Prim (loc, I_SIZE, [], annot), Item_t (Set_t _, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Set_size (Item_t (Nat_t None, rest, annot)) (* maps *) | Prim (loc, I_EMPTY_MAP, [ tk ; tv ], annot), stack -> Lwt.return @@ parse_comparable_ty ctxt tk >>=? fun (Ex_comparable_ty tk, ctxt) -> Lwt.return @@ parse_ty ctxt ~allow_big_map:false ~allow_operation:true tv >>=? fun (Ex_ty tv, ctxt) -> parse_var_type_annot loc annot >>=? fun (annot, ty_name) -> typed ctxt loc (Empty_map (tk, tv)) (Item_t (Map_t (tk, tv, ty_name), stack, annot)) | Prim (loc, I_MAP, [ body ], annot), Item_t (Map_t (ck, elt, _), starting_rest, _map_annot) -> let k = ty_of_comparable_ty ck in check_kind [ Seq_kind ] body >>=? fun () -> parse_var_type_annot loc annot >>=? fun (ret_annot, ty_name) -> let k_name = field_to_var_annot default_key_annot in let e_name = field_to_var_annot default_elt_annot in parse_instr ?type_logger tc_context ctxt body (Item_t (Pair_t ((k, None, k_name), (elt, None, e_name), None), starting_rest, None)) >>=? begin fun (judgement, ctxt) -> match judgement with | Typed ({ aft = Item_t (ret, rest, _) ; _ } as ibody) -> let invalid_map_body () = serialize_stack_for_error ctxt ibody.aft >>|? fun (aft, _ctxt) -> Invalid_map_body (loc, aft) in trace_eval invalid_map_body (Lwt.return @@ stack_ty_eq ctxt 1 rest starting_rest >>=? fun (Eq, ctxt) -> Lwt.return @@ merge_stacks loc ctxt rest starting_rest >>=? fun (rest, ctxt) -> typed ctxt loc (Map_map ibody) (Item_t (Map_t (ck, ret, ty_name), rest, ret_annot))) | Typed { aft ; _ } -> serialize_stack_for_error ctxt aft >>=? fun (aft, _ctxt) -> fail (Invalid_map_body (loc, aft)) | Failed _ -> fail (Invalid_map_block_fail loc) end | Prim (loc, I_ITER, [ body ], annot), Item_t (Map_t (comp_elt, element_ty, _), rest, _map_annot) -> check_kind [ Seq_kind ] body >>=? fun () -> fail_unexpected_annot loc annot >>=? fun () -> let k_name = field_to_var_annot default_key_annot in let e_name = field_to_var_annot default_elt_annot in let key = ty_of_comparable_ty comp_elt in parse_instr ?type_logger tc_context ctxt body (Item_t (Pair_t ((key, None, k_name), (element_ty, None, e_name), None), rest, None)) >>=? begin fun (judgement, ctxt) -> match judgement with | Typed ({ aft ; _ } as ibody) -> let invalid_iter_body () = serialize_stack_for_error ctxt ibody.aft >>=? fun (aft, ctxt) -> serialize_stack_for_error ctxt rest >>|? fun (rest, _ctxt) -> Invalid_iter_body (loc, rest, aft) in trace_eval invalid_iter_body (Lwt.return @@ stack_ty_eq ctxt 1 aft rest >>=? fun (Eq, ctxt) -> Lwt.return @@ merge_stacks loc ctxt aft rest >>=? fun (rest, ctxt) -> typed ctxt loc (Map_iter ibody) rest) | Failed { descr } -> typed ctxt loc (Map_iter (descr rest)) rest end | Prim (loc, I_MEM, [], annot), Item_t (vk, Item_t (Map_t (ck, _, _), rest, _), _) -> let k = ty_of_comparable_ty ck in check_item_ty ctxt vk k loc I_MEM 1 2 >>=? fun (Eq, ctxt) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Map_mem (Item_t (Bool_t None, rest, annot)) | Prim (loc, I_GET, [], annot), Item_t (vk, Item_t (Map_t (ck, elt, _), rest, _), _) -> let k = ty_of_comparable_ty ck in check_item_ty ctxt vk k loc I_GET 1 2 >>=? fun (Eq, ctxt) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Map_get (Item_t (Option_t ((elt, None), None, None), rest, annot)) | Prim (loc, I_UPDATE, [], annot), Item_t (vk, Item_t (Option_t ((vv, _), _, _), Item_t (Map_t (ck, v, map_name), rest, map_annot), _), _) -> let k = ty_of_comparable_ty ck in check_item_ty ctxt vk k loc I_UPDATE 1 3 >>=? fun (Eq, ctxt) -> check_item_ty ctxt vv v loc I_UPDATE 2 3 >>=? fun (Eq, ctxt) -> parse_var_annot loc annot ~default:map_annot >>=? fun annot -> typed ctxt loc Map_update (Item_t (Map_t (ck, v, map_name), rest, annot)) | Prim (loc, I_SIZE, [], annot), Item_t (Map_t (_, _, _), rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Map_size (Item_t (Nat_t None, rest, annot)) (* big_map *) | Prim (loc, I_MEM, [], annot), Item_t (set_key, Item_t (Big_map_t (map_key, _, _), rest, _), _) -> let k = ty_of_comparable_ty map_key in check_item_ty ctxt set_key k loc I_MEM 1 2 >>=? fun (Eq, ctxt) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Big_map_mem (Item_t (Bool_t None, rest, annot)) | Prim (loc, I_GET, [], annot), Item_t (vk, Item_t (Big_map_t (ck, elt, _), rest, _), _) -> let k = ty_of_comparable_ty ck in check_item_ty ctxt vk k loc I_GET 1 2 >>=? fun (Eq, ctxt) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Big_map_get (Item_t (Option_t ((elt, None), None, None), rest, annot)) | Prim (loc, I_UPDATE, [], annot), Item_t (set_key, Item_t (Option_t ((set_value, _), _, _), Item_t (Big_map_t (map_key, map_value, map_name), rest, map_annot), _), _) -> let k = ty_of_comparable_ty map_key in check_item_ty ctxt set_key k loc I_UPDATE 1 3 >>=? fun (Eq, ctxt) -> check_item_ty ctxt set_value map_value loc I_UPDATE 2 3 >>=? fun (Eq, ctxt) -> parse_var_annot loc annot ~default:map_annot >>=? fun annot -> typed ctxt loc Big_map_update (Item_t (Big_map_t (map_key, map_value, map_name), rest, annot)) (* control *) | Seq (loc, []), stack -> typed ctxt loc Nop stack | Seq (loc, [ single ]), stack -> parse_instr ?type_logger tc_context ctxt single stack >>=? begin fun (judgement, ctxt) -> match judgement with | Typed ({ aft ; _ } as instr) -> let nop = { bef = aft ; loc = loc ; aft ; instr = Nop } in typed ctxt loc (Seq (instr, nop)) aft | Failed { descr ; _ } -> let descr aft = let nop = { bef = aft ; loc = loc ; aft ; instr = Nop } in let descr = descr aft in { descr with instr = Seq (descr, nop) } in return ctxt (Failed { descr }) end | Seq (loc, hd :: tl), stack -> parse_instr ?type_logger tc_context ctxt hd stack >>=? begin fun (judgement, ctxt) -> match judgement with | Failed _ -> fail (Fail_not_in_tail_position (Micheline.location hd)) | Typed ({ aft = middle ; _ } as ihd) -> parse_instr ?type_logger tc_context ctxt (Seq (-1, tl)) middle >>=? fun (judgement, ctxt) -> match judgement with | Failed { descr } -> let descr ret = { loc ; instr = Seq (ihd, descr ret) ; bef = stack ; aft = ret } in return ctxt (Failed { descr }) | Typed itl -> typed ctxt loc (Seq (ihd, itl)) itl.aft end | Prim (loc, I_IF, [ bt ; bf ], annot), (Item_t (Bool_t _, rest, _) as bef) -> check_kind [ Seq_kind ] bt >>=? fun () -> check_kind [ Seq_kind ] bf >>=? fun () -> fail_unexpected_annot loc annot >>=? fun () -> parse_instr ?type_logger tc_context ctxt bt rest >>=? fun (btr, ctxt) -> parse_instr ?type_logger tc_context ctxt bf rest >>=? fun (bfr, ctxt) -> let branch ibt ibf = { loc ; instr = If (ibt, ibf) ; bef ; aft = ibt.aft } in merge_branches ctxt loc btr bfr { branch } >>=? fun (judgement, ctxt) -> return ctxt judgement | Prim (loc, I_LOOP, [ body ], annot), (Item_t (Bool_t _, rest, _stack_annot) as stack) -> check_kind [ Seq_kind ] body >>=? fun () -> fail_unexpected_annot loc annot >>=? fun () -> parse_instr ?type_logger tc_context ctxt body rest >>=? begin fun (judgement, ctxt) -> match judgement with | Typed ibody -> let unmatched_branches () = serialize_stack_for_error ctxt ibody.aft >>=? fun (aft, ctxt) -> serialize_stack_for_error ctxt stack >>|? fun (stack, _ctxt) -> Unmatched_branches (loc, aft, stack) in trace_eval unmatched_branches (Lwt.return @@ stack_ty_eq ctxt 1 ibody.aft stack >>=? fun (Eq, ctxt) -> Lwt.return @@ merge_stacks loc ctxt ibody.aft stack >>=? fun (_stack, ctxt) -> typed ctxt loc (Loop ibody) rest) | Failed { descr } -> let ibody = descr stack in typed ctxt loc (Loop ibody) rest end | Prim (loc, I_LOOP_LEFT, [ body ], annot), (Item_t (Union_t ((tl, l_field), (tr, _), _), rest, union_annot) as stack) -> check_kind [ Seq_kind ] body >>=? fun () -> parse_var_annot loc annot >>=? fun annot -> let l_annot = gen_access_annot union_annot l_field ~default:default_left_annot in parse_instr ?type_logger tc_context ctxt body (Item_t (tl, rest, l_annot)) >>=? begin fun (judgement, ctxt) -> match judgement with | Typed ibody -> let unmatched_branches () = serialize_stack_for_error ctxt ibody.aft >>=? fun (aft, ctxt) -> serialize_stack_for_error ctxt stack >>|? fun (stack, _ctxt) -> Unmatched_branches (loc, aft, stack) in trace_eval unmatched_branches (Lwt.return @@ stack_ty_eq ctxt 1 ibody.aft stack >>=? fun (Eq, ctxt) -> Lwt.return @@ merge_stacks loc ctxt ibody.aft stack >>=? fun (_stack, ctxt) -> typed ctxt loc (Loop_left ibody) (Item_t (tr, rest, annot))) | Failed { descr } -> let ibody = descr stack in typed ctxt loc (Loop_left ibody) (Item_t (tr, rest, annot)) end | Prim (loc, I_LAMBDA, [ arg ; ret ; code ], annot), stack -> Lwt.return @@ parse_ty ctxt ~allow_big_map:false ~allow_operation:true arg >>=? fun (Ex_ty arg, ctxt) -> Lwt.return @@ parse_ty ctxt ~allow_big_map:false ~allow_operation:true ret >>=? fun (Ex_ty ret, ctxt) -> check_kind [ Seq_kind ] code >>=? fun () -> parse_var_annot loc annot >>=? fun annot -> parse_returning Lambda ?type_logger ctxt (arg, default_arg_annot) ret code >>=? fun (lambda, ctxt) -> typed ctxt loc (Lambda lambda) (Item_t (Lambda_t (arg, ret, None), stack, annot)) | Prim (loc, I_EXEC, [], annot), Item_t (arg, Item_t (Lambda_t (param, ret, _), rest, _), _) -> check_item_ty ctxt arg param loc I_EXEC 1 2 >>=? fun (Eq, ctxt) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Exec (Item_t (ret, rest, annot)) | Prim (loc, I_DIP, [ code ], annot), Item_t (v, rest, stack_annot) -> fail_unexpected_annot loc annot >>=? fun () -> check_kind [ Seq_kind ] code >>=? fun () -> parse_instr ?type_logger (add_dip v stack_annot tc_context) ctxt code rest >>=? begin fun (judgement, ctxt) -> match judgement with | Typed descr -> typed ctxt loc (Dip descr) (Item_t (v, descr.aft, stack_annot)) | Failed _ -> fail (Fail_not_in_tail_position loc) end | Prim (loc, I_FAILWITH, [], annot), Item_t (v, _rest, _) -> fail_unexpected_annot loc annot >>=? fun () -> let descr aft = { loc ; instr = Failwith v ; bef = stack_ty ; aft } in log_stack ctxt loc stack_ty Empty_t >>=? fun () -> return ctxt (Failed { descr }) (* timestamp operations *) | Prim (loc, I_ADD, [], annot), Item_t (Timestamp_t tn1, Item_t (Int_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Add_timestamp_to_seconds (Item_t (Timestamp_t tname, rest, annot)) | Prim (loc, I_ADD, [], annot), Item_t (Int_t tn1, Item_t (Timestamp_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Add_seconds_to_timestamp (Item_t (Timestamp_t tname, rest, annot)) | Prim (loc, I_SUB, [], annot), Item_t (Timestamp_t tn1, Item_t (Int_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Sub_timestamp_seconds (Item_t (Timestamp_t tname, rest, annot)) | Prim (loc, I_SUB, [], annot), Item_t (Timestamp_t tn1, Item_t (Timestamp_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Diff_timestamps (Item_t (Int_t tname, rest, annot)) (* string operations *) | Prim (loc, I_CONCAT, [], annot), Item_t (String_t tn1, Item_t (String_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Concat (Item_t (String_t tname, rest, annot)) (* currency operations *) | Prim (loc, I_ADD, [], annot), Item_t (Mutez_t tn1, Item_t (Mutez_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Add_tez (Item_t (Mutez_t tname, rest, annot)) | Prim (loc, I_SUB, [], annot), Item_t (Mutez_t tn1, Item_t (Mutez_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Sub_tez (Item_t (Mutez_t tname, rest, annot)) | Prim (loc, I_MUL, [], annot), Item_t (Mutez_t tname, Item_t (Nat_t _, rest, _), _) -> (* no type name check *) parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Mul_teznat (Item_t (Mutez_t tname, rest, annot)) | Prim (loc, I_MUL, [], annot), Item_t (Nat_t _, Item_t (Mutez_t tname, rest, _), _) -> (* no type name check *) parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Mul_nattez (Item_t (Mutez_t tname, rest, annot)) (* boolean operations *) | Prim (loc, I_OR, [], annot), Item_t (Bool_t tn1, Item_t (Bool_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Or (Item_t (Bool_t tname, rest, annot)) | Prim (loc, I_AND, [], annot), Item_t (Bool_t tn1, Item_t (Bool_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc And (Item_t (Bool_t tname, rest, annot)) | Prim (loc, I_XOR, [], annot), Item_t (Bool_t tn1, Item_t (Bool_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Xor (Item_t (Bool_t tname, rest, annot)) | Prim (loc, I_NOT, [], annot), Item_t (Bool_t tname, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Not (Item_t (Bool_t tname, rest, annot)) (* integer operations *) | Prim (loc, I_ABS, [], annot), Item_t (Int_t _, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Abs_int (Item_t (Nat_t None, rest, annot)) | Prim (loc, I_ISNAT, [], annot), Item_t (Int_t _, rest, int_annot) -> parse_var_annot loc annot ~default:int_annot >>=? fun annot -> typed ctxt loc Is_nat (Item_t (Option_t ((Nat_t None, None), None, None), rest, annot)) | Prim (loc, I_INT, [], annot), Item_t (Nat_t _, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Int_nat (Item_t (Int_t None, rest, annot)) | Prim (loc, I_NEG, [], annot), Item_t (Int_t tname, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Neg_int (Item_t (Int_t tname, rest, annot)) | Prim (loc, I_NEG, [], annot), Item_t (Nat_t _, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Neg_nat (Item_t (Int_t None, rest, annot)) | Prim (loc, I_ADD, [], annot), Item_t (Int_t tn1, Item_t (Int_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Add_intint (Item_t (Int_t tname, rest, annot)) | Prim (loc, I_ADD, [], annot), Item_t (Int_t tname, Item_t (Nat_t _, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Add_intnat (Item_t (Int_t tname, rest, annot)) | Prim (loc, I_ADD, [], annot), Item_t (Nat_t _, Item_t (Int_t tname, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Add_natint (Item_t (Int_t tname, rest, annot)) | Prim (loc, I_ADD, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Add_natnat (Item_t (Nat_t tname, rest, annot)) | Prim (loc, I_SUB, [], annot), Item_t (Int_t tn1, Item_t (Int_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Sub_int (Item_t (Int_t tname, rest, annot)) | Prim (loc, I_SUB, [], annot), Item_t (Int_t tname, Item_t (Nat_t _, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Sub_int (Item_t (Int_t tname, rest, annot)) | Prim (loc, I_SUB, [], annot), Item_t (Nat_t _, Item_t (Int_t tname, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Sub_int (Item_t (Int_t tname, rest, annot)) | Prim (loc, I_SUB, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun _tname -> typed ctxt loc Sub_int (Item_t (Int_t None, rest, annot)) | Prim (loc, I_MUL, [], annot), Item_t (Int_t tn1, Item_t (Int_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Mul_intint (Item_t (Int_t tname, rest, annot)) | Prim (loc, I_MUL, [], annot), Item_t (Int_t tname, Item_t (Nat_t _, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Mul_intnat (Item_t (Int_t tname, rest, annot)) | Prim (loc, I_MUL, [], annot), Item_t (Nat_t _, Item_t (Int_t tname, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Mul_natint (Item_t (Int_t tname, rest, annot)) | Prim (loc, I_MUL, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Mul_natnat (Item_t (Nat_t tname, rest, annot)) | Prim (loc, I_EDIV, [], annot), Item_t (Mutez_t tname, Item_t (Nat_t _, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Ediv_teznat (Item_t (Option_t ((Pair_t ((Mutez_t tname, None, None), (Mutez_t tname, None, None), None), None), None, None), rest, annot)) | Prim (loc, I_EDIV, [], annot), Item_t (Mutez_t tn1, Item_t (Mutez_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Ediv_tez (Item_t (Option_t ((Pair_t ((Nat_t None, None, None), (Mutez_t tname, None, None), None), None), None, None), rest, annot)) | Prim (loc, I_EDIV, [], annot), Item_t (Int_t tn1, Item_t (Int_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Ediv_intint (Item_t (Option_t ((Pair_t ((Int_t tname, None, None), (Nat_t None, None, None), None), None), None, None), rest, annot)) | Prim (loc, I_EDIV, [], annot), Item_t (Int_t tname, Item_t (Nat_t _, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Ediv_intnat (Item_t (Option_t ((Pair_t ((Int_t tname, None, None), (Nat_t None, None, None), None), None), None, None), rest, annot)) | Prim (loc, I_EDIV, [], annot), Item_t (Nat_t tname, Item_t (Int_t _, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Ediv_natint (Item_t (Option_t ((Pair_t ((Int_t None, None, None), (Nat_t tname, None, None), None), None), None, None), rest, annot)) | Prim (loc, I_EDIV, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Ediv_natnat (Item_t (Option_t ((Pair_t ((Nat_t tname, None, None), (Nat_t tname, None, None), None), None), None, None), rest, annot)) | Prim (loc, I_LSL, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Lsl_nat (Item_t (Nat_t tname, rest, annot)) | Prim (loc, I_LSR, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Lsr_nat (Item_t (Nat_t tname, rest, annot)) | Prim (loc, I_OR, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Or_nat (Item_t (Nat_t tname, rest, annot)) | Prim (loc, I_AND, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc And_nat (Item_t (Nat_t tname, rest, annot)) | Prim (loc, I_AND, [], annot), Item_t (Int_t _, Item_t (Nat_t tname, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc And_int_nat (Item_t (Nat_t tname, rest, annot)) | Prim (loc, I_XOR, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc Xor_nat (Item_t (Nat_t tname, rest, annot)) | Prim (loc, I_NOT, [], annot), Item_t (Int_t tname, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Not_int (Item_t (Int_t tname, rest, annot)) | Prim (loc, I_NOT, [], annot), Item_t (Nat_t _, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Not_nat (Item_t (Int_t None, rest, annot)) (* comparison *) | Prim (loc, I_COMPARE, [], annot), Item_t (Int_t tn1, Item_t (Int_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc (Compare (Int_key tname)) (Item_t (Int_t None, rest, annot)) | Prim (loc, I_COMPARE, [], annot), Item_t (Nat_t tn1, Item_t (Nat_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc (Compare (Nat_key tname)) (Item_t (Int_t None, rest, annot)) | Prim (loc, I_COMPARE, [], annot), Item_t (Bool_t tn1, Item_t (Bool_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc (Compare (Bool_key tname)) (Item_t (Int_t None, rest, annot)) | Prim (loc, I_COMPARE, [], annot), Item_t (String_t tn1, Item_t (String_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc (Compare (String_key tname)) (Item_t (Int_t None, rest, annot)) | Prim (loc, I_COMPARE, [], annot), Item_t (Mutez_t tn1, Item_t (Mutez_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc (Compare (Mutez_key tname)) (Item_t (Int_t None, rest, annot)) | Prim (loc, I_COMPARE, [], annot), Item_t (Key_hash_t tn1, Item_t (Key_hash_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc (Compare (Key_hash_key tname)) (Item_t (Int_t None, rest, annot)) | Prim (loc, I_COMPARE, [], annot), Item_t (Timestamp_t tn1, Item_t (Timestamp_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc (Compare (Timestamp_key tname)) (Item_t (Int_t None, rest, annot)) | Prim (loc, I_COMPARE, [], annot), Item_t (Address_t tn1, Item_t (Address_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc (Compare (Address_key tname)) (Item_t (Int_t None, rest, annot)) | Prim (loc, I_COMPARE, [], annot), Item_t (Bytes_t tn1, Item_t (Bytes_t tn2, rest, _), _) -> parse_var_annot loc annot >>=? fun annot -> Lwt.return @@ merge_type_annot tn1 tn2 >>=? fun tname -> typed ctxt loc (Compare (Bytes_key tname)) (Item_t (Int_t None, rest, annot)) (* comparators *) | Prim (loc, I_EQ, [], annot), Item_t (Int_t _, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Eq (Item_t (Bool_t None, rest, annot)) | Prim (loc, I_NEQ, [], annot), Item_t (Int_t _, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Neq (Item_t (Bool_t None, rest, annot)) | Prim (loc, I_LT, [], annot), Item_t (Int_t _, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Lt (Item_t (Bool_t None, rest, annot)) | Prim (loc, I_GT, [], annot), Item_t (Int_t _, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Gt (Item_t (Bool_t None, rest, annot)) | Prim (loc, I_LE, [], annot), Item_t (Int_t _, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Le (Item_t (Bool_t None, rest, annot)) | Prim (loc, I_GE, [], annot), Item_t (Int_t _, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Ge (Item_t (Bool_t None, rest, annot)) (* annotations *) | Prim (loc, I_CAST, [ cast_t ], annot), Item_t (t, stack, item_annot) -> parse_var_annot loc annot ~default:item_annot >>=? fun annot -> (Lwt.return @@ parse_ty ctxt ~allow_big_map:true ~allow_operation:true cast_t) >>=? fun (Ex_ty cast_t, ctxt) -> Lwt.return @@ ty_eq ctxt cast_t t >>=? fun (Eq, ctxt) -> Lwt.return @@ merge_types ctxt loc cast_t t >>=? fun (_, ctxt) -> typed ctxt loc Nop (Item_t (cast_t, stack, annot)) | Prim (loc, I_RENAME, [], annot), Item_t (t, stack, _) -> parse_var_annot loc annot >>=? fun annot -> (* can erase annot *) typed ctxt loc Nop (Item_t (t, stack, annot)) (* packing *) | Prim (loc, I_PACK, [], annot), Item_t (t, rest, unpacked_annot) -> Lwt.return (check_no_big_map_or_operation loc t) >>=? fun () -> parse_var_annot loc annot ~default:(gen_access_annot unpacked_annot default_pack_annot) >>=? fun annot -> typed ctxt loc (Pack t) (Item_t (Bytes_t None, rest, annot)) | Prim (loc, I_UNPACK, [ ty ], annot), Item_t (Bytes_t _, rest, packed_annot) -> Lwt.return @@ parse_ty ctxt ~allow_big_map:false ~allow_operation:false ty >>=? fun (Ex_ty t, ctxt) -> let stack_annot = gen_access_annot packed_annot default_unpack_annot in parse_constr_annot loc annot ~if_special_first:(var_to_field_annot stack_annot) >>=? fun (annot, ty_name, some_field, none_field) -> typed ctxt loc (Unpack t) (Item_t (Option_t ((t, some_field), none_field, ty_name), rest, annot)) (* protocol *) | Prim (loc, I_ADDRESS, [], annot), Item_t (Contract_t _, rest, contract_annot) -> parse_var_annot loc annot ~default:(gen_access_annot contract_annot default_addr_annot) >>=? fun annot -> typed ctxt loc Address (Item_t (Address_t None, rest, annot)) | Prim (loc, I_CONTRACT, [ ty ], annot), Item_t (Address_t _, rest, addr_annot) -> Lwt.return @@ parse_ty ctxt ~allow_big_map:false ~allow_operation:false ty >>=? fun (Ex_ty t, ctxt) -> parse_var_annot loc annot ~default:(gen_access_annot addr_annot default_contract_annot) >>=? fun annot -> typed ctxt loc (Contract t) (Item_t (Option_t ((Contract_t (t, None), None), None, None), rest, annot)) | Prim (loc, I_TRANSFER_TOKENS, [], annot), Item_t (p, Item_t (Mutez_t _, Item_t (Contract_t (cp, _), rest, _), _), _) -> check_item_ty ctxt p cp loc I_TRANSFER_TOKENS 1 4 >>=? fun (Eq, ctxt) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Transfer_tokens (Item_t (Operation_t None, rest, annot)) | Prim (loc, I_SET_DELEGATE, [], annot), Item_t (Option_t ((Key_hash_t _, _), _, _), rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Set_delegate (Item_t (Operation_t None, rest, annot)) | Prim (loc, I_CREATE_ACCOUNT, [], annot), Item_t (Key_hash_t _, Item_t (Option_t ((Key_hash_t _, _), _, _), Item_t (Bool_t _, Item_t (Mutez_t _, rest, _), _), _), _) -> parse_two_var_annot loc annot >>=? fun (op_annot, addr_annot) -> typed ctxt loc Create_account (Item_t (Operation_t None, Item_t (Address_t None, rest, addr_annot), op_annot)) | Prim (loc, I_IMPLICIT_ACCOUNT, [], annot), Item_t (Key_hash_t _, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Implicit_account (Item_t (Contract_t (Unit_t None, None), rest, annot)) | Prim (loc, I_CREATE_CONTRACT, [ (Seq _ as code)], annot), Item_t (Key_hash_t _, Item_t (Option_t ((Key_hash_t _, _), _, _), Item_t (Bool_t _, Item_t (Bool_t _, Item_t (Mutez_t _, Item_t (ginit, rest, _), _), _), _), _), _) -> parse_two_var_annot loc annot >>=? fun (op_annot, addr_annot) -> let cannonical_code = fst @@ Micheline.extract_locations code in Lwt.return @@ parse_toplevel cannonical_code >>=? fun (arg_type, storage_type, code_field) -> trace (Ill_formed_type (Some "parameter", cannonical_code, location arg_type)) (Lwt.return @@ parse_ty ctxt ~allow_big_map:false ~allow_operation:false arg_type) >>=? fun (Ex_ty arg_type, ctxt) -> trace (Ill_formed_type (Some "storage", cannonical_code, location storage_type)) (Lwt.return @@ parse_ty ctxt ~allow_big_map:true ~allow_operation:false storage_type) >>=? fun (Ex_ty storage_type, ctxt) -> let arg_annot = default_annot (type_to_var_annot (name_of_ty arg_type)) ~default:default_param_annot in let storage_annot = default_annot (type_to_var_annot (name_of_ty storage_type)) ~default:default_storage_annot in let arg_type_full = Pair_t ((arg_type, None, arg_annot), (storage_type, None, storage_annot), None) in let ret_type_full = Pair_t ((List_t (Operation_t None, None), None, None), (storage_type, None, None), None) in trace (Ill_typed_contract (cannonical_code, [])) (parse_returning (Toplevel { storage_type ; param_type = arg_type }) ctxt ?type_logger (arg_type_full, None) ret_type_full code_field) >>=? fun (Lam ({ bef = Item_t (arg, Empty_t, _) ; aft = Item_t (ret, Empty_t, _) ; _ }, _) as lambda, ctxt) -> Lwt.return @@ ty_eq ctxt arg arg_type_full >>=? fun (Eq, ctxt) -> Lwt.return @@ merge_types ctxt loc arg arg_type_full >>=? fun (_, ctxt) -> Lwt.return @@ ty_eq ctxt ret ret_type_full >>=? fun (Eq, ctxt) -> Lwt.return @@ merge_types ctxt loc ret ret_type_full >>=? fun (_, ctxt) -> Lwt.return @@ ty_eq ctxt storage_type ginit >>=? fun (Eq, ctxt) -> Lwt.return @@ merge_types ctxt loc storage_type ginit >>=? fun (_, ctxt) -> typed ctxt loc (Create_contract (storage_type, arg_type, lambda)) (Item_t (Operation_t None, Item_t (Address_t None, rest, addr_annot), op_annot)) | Prim (loc, I_NOW, [], annot), stack -> parse_var_annot loc annot ~default:default_now_annot >>=? fun annot -> typed ctxt loc Now (Item_t (Timestamp_t None, stack, annot)) | Prim (loc, I_AMOUNT, [], annot), stack -> parse_var_annot loc annot ~default:default_amount_annot >>=? fun annot -> typed ctxt loc Amount (Item_t (Mutez_t None, stack, annot)) | Prim (loc, I_BALANCE, [], annot), stack -> parse_var_annot loc annot ~default:default_balance_annot >>=? fun annot -> typed ctxt loc Balance (Item_t (Mutez_t None, stack, annot)) | Prim (loc, I_HASH_KEY, [], annot), Item_t (Key_t _, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Hash_key (Item_t (Key_hash_t None, rest, annot)) | Prim (loc, I_CHECK_SIGNATURE, [], annot), Item_t (Key_t _, Item_t (Signature_t _, Item_t (Bytes_t _, rest, _), _), _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Check_signature (Item_t (Bool_t None, rest, annot)) | Prim (loc, I_BLAKE2B, [], annot), Item_t (Bytes_t _, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Blake2b (Item_t (Bytes_t None, rest, annot)) | Prim (loc, I_SHA256, [], annot), Item_t (Bytes_t _, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Sha256 (Item_t (Bytes_t None, rest, annot)) | Prim (loc, I_SHA512, [], annot), Item_t (Bytes_t _, rest, _) -> parse_var_annot loc annot >>=? fun annot -> typed ctxt loc Sha512 (Item_t (Bytes_t None, rest, annot)) | Prim (loc, I_STEPS_TO_QUOTA, [], annot), stack -> parse_var_annot loc annot ~default:default_steps_annot >>=? fun annot -> typed ctxt loc Steps_to_quota (Item_t (Nat_t None, stack, annot)) | Prim (loc, I_SOURCE, [], annot), stack -> parse_var_annot loc annot ~default:default_source_annot >>=? fun annot -> typed ctxt loc Source (Item_t (Address_t None, stack, annot)) | Prim (loc, I_SENDER, [], annot), stack -> parse_var_annot loc annot ~default:default_sender_annot >>=? fun annot -> typed ctxt loc Sender (Item_t (Address_t None, stack, annot)) | Prim (loc, I_SELF, [], annot), stack -> parse_var_annot loc annot ~default:default_self_annot >>=? fun annot -> let rec get_toplevel_type : tc_context -> (bef judgement * context) tzresult Lwt.t = function | Lambda -> fail (Self_in_lambda loc) | Dip (_, prev) -> get_toplevel_type prev | Toplevel { param_type ; _ } -> typed ctxt loc (Self param_type) (Item_t (Contract_t (param_type, None), stack, annot)) in get_toplevel_type tc_context (* Primitive parsing errors *) | Prim (loc, (I_DROP | I_DUP | I_SWAP | I_SOME | I_UNIT | I_PAIR | I_CAR | I_CDR | I_CONS | I_MEM | I_UPDATE | I_MAP | I_GET | I_EXEC | I_FAILWITH | I_SIZE | I_CONCAT | I_ADD | I_SUB | I_MUL | I_EDIV | I_OR | I_AND | I_XOR | I_NOT | I_ABS | I_NEG | I_LSL | I_LSR | I_COMPARE | I_EQ | I_NEQ | I_LT | I_GT | I_LE | I_GE | I_TRANSFER_TOKENS | I_CREATE_ACCOUNT | I_CREATE_CONTRACT | I_SET_DELEGATE | I_NOW | I_IMPLICIT_ACCOUNT | I_AMOUNT | I_BALANCE | I_CHECK_SIGNATURE | I_HASH_KEY | I_SOURCE | I_SENDER | I_BLAKE2B | I_SHA256 | I_SHA512 | I_STEPS_TO_QUOTA | I_ADDRESS as name), (_ :: _ as l), _), _ -> fail (Invalid_arity (loc, name, 0, List.length l)) | Prim (loc, (I_NONE | I_LEFT | I_RIGHT | I_NIL | I_MAP | I_ITER | I_EMPTY_SET | I_DIP | I_LOOP | I_LOOP_LEFT | I_CONTRACT as name), ([] | _ :: _ :: _ as l), _), _ -> fail (Invalid_arity (loc, name, 1, List.length l)) | Prim (loc, (I_PUSH | I_IF_NONE | I_IF_LEFT | I_IF_CONS | I_EMPTY_MAP | I_IF as name), ([] | [ _ ] | _ :: _ :: _ :: _ as l), _), _ -> fail (Invalid_arity (loc, name, 2, List.length l)) | Prim (loc, I_LAMBDA, ([] | [ _ ] | _ :: _ :: _ :: _ :: _ as l), _), _ -> fail (Invalid_arity (loc, I_LAMBDA, 3, List.length l)) (* Stack errors *) | Prim (loc, (I_ADD | I_SUB | I_MUL | I_EDIV | I_AND | I_OR | I_XOR | I_LSL | I_LSR | I_CONCAT | I_COMPARE as name), [], _), Item_t (ta, Item_t (tb, _, _), _) -> Lwt.return @@ serialize_ty_for_error ctxt ta >>=? fun (ta, ctxt) -> Lwt.return @@ serialize_ty_for_error ctxt tb >>=? fun (tb, _ctxt) -> fail (Undefined_binop (loc, name, ta, tb)) | Prim (loc, (I_NEG | I_ABS | I_NOT | I_EQ | I_NEQ | I_LT | I_GT | I_LE | I_GE as name), [], _), Item_t (t, _, _) -> Lwt.return @@ serialize_ty_for_error ctxt t >>=? fun (t, _ctxt) -> fail (Undefined_unop (loc, name, t)) | Prim (loc, I_UPDATE, [], _), stack -> serialize_stack_for_error ctxt stack >>=? fun (stack, _ctxt) -> fail (Bad_stack (loc, I_UPDATE, 3, stack)) | Prim (loc, I_CREATE_CONTRACT, [], _), stack -> serialize_stack_for_error ctxt stack >>=? fun (stack, _ctxt) -> fail (Bad_stack (loc, I_CREATE_CONTRACT, 7, stack)) | Prim (loc, I_CREATE_ACCOUNT, [], _), stack -> serialize_stack_for_error ctxt stack >>=? fun (stack, _ctxt) -> fail (Bad_stack (loc, I_CREATE_ACCOUNT, 4, stack)) | Prim (loc, I_TRANSFER_TOKENS, [], _), stack -> serialize_stack_for_error ctxt stack >>=? fun (stack, _ctxt) -> fail (Bad_stack (loc, I_TRANSFER_TOKENS, 4, stack)) | Prim (loc, (I_DROP | I_DUP | I_CAR | I_CDR | I_SOME | I_BLAKE2B | I_SHA256 | I_SHA512 | I_DIP | I_IF_NONE | I_LEFT | I_RIGHT | I_IF_LEFT | I_IF | I_LOOP | I_IF_CONS | I_IMPLICIT_ACCOUNT | I_NEG | I_ABS | I_INT | I_NOT | I_HASH_KEY | I_EQ | I_NEQ | I_LT | I_GT | I_LE | I_GE as name), _, _), stack -> serialize_stack_for_error ctxt stack >>=? fun (stack, _ctxt) -> fail (Bad_stack (loc, name, 1, stack)) | Prim (loc, (I_SWAP | I_PAIR | I_CONS | I_GET | I_MEM | I_EXEC | I_CHECK_SIGNATURE | I_ADD | I_SUB | I_MUL | I_EDIV | I_AND | I_OR | I_XOR | I_LSL | I_LSR | I_CONCAT as name), _, _), stack -> serialize_stack_for_error ctxt stack >>=? fun (stack, _ctxt) -> fail (Bad_stack (loc, name, 2, stack)) (* Generic parsing errors *) | expr, _ -> fail @@ unexpected expr [ Seq_kind ] Instr_namespace [ I_DROP ; I_DUP ; I_SWAP ; I_SOME ; I_UNIT ; I_PAIR ; I_CAR ; I_CDR ; I_CONS ; I_MEM ; I_UPDATE ; I_MAP ; I_ITER ; I_GET ; I_EXEC ; I_FAILWITH ; I_SIZE ; I_CONCAT ; I_ADD ; I_SUB ; I_MUL ; I_EDIV ; I_OR ; I_AND ; I_XOR ; I_NOT ; I_ABS ; I_INT; I_NEG ; I_LSL ; I_LSR ; I_COMPARE ; I_EQ ; I_NEQ ; I_LT ; I_GT ; I_LE ; I_GE ; I_TRANSFER_TOKENS ; I_CREATE_ACCOUNT ; I_CREATE_CONTRACT ; I_NOW ; I_AMOUNT ; I_BALANCE ; I_IMPLICIT_ACCOUNT ; I_CHECK_SIGNATURE ; I_BLAKE2B ; I_SHA256 ; I_SHA512 ; I_HASH_KEY ; I_STEPS_TO_QUOTA ; I_PUSH ; I_NONE ; I_LEFT ; I_RIGHT ; I_NIL ; I_EMPTY_SET ; I_DIP ; I_LOOP ; I_IF_NONE ; I_IF_LEFT ; I_IF_CONS ; I_EMPTY_MAP ; I_IF ; I_SOURCE ; I_SENDER ; I_SELF ; I_LAMBDA ] and parse_contract : type arg. context -> Script.location -> arg ty -> Contract.t -> (context * arg typed_contract) tzresult Lwt.t = fun ctxt loc arg contract -> Lwt.return @@ Gas.consume ctxt Typecheck_costs.contract_exists >>=? fun ctxt -> Contract.exists ctxt contract >>=? function | false -> fail (Invalid_contract (loc, contract)) | true -> Lwt.return @@ Gas.consume ctxt Typecheck_costs.get_script >>=? fun ctxt -> trace (Invalid_contract (loc, contract)) @@ Contract.get_script ctxt contract >>=? fun (ctxt, script) -> match script with | None -> Lwt.return (ty_eq ctxt arg (Unit_t None) >>? fun (Eq, ctxt) -> let contract : arg typed_contract = (arg, contract) in ok (ctxt, contract)) | Some { code ; _ } -> Script.force_decode ctxt code >>=? fun (code, ctxt) -> Lwt.return (parse_toplevel code >>? fun (arg_type, _, _) -> parse_ty ctxt ~allow_big_map:false ~allow_operation:false arg_type >>? fun (Ex_ty targ, ctxt) -> ty_eq ctxt targ arg >>? fun (Eq, ctxt) -> merge_types ctxt loc targ arg >>? fun (arg, ctxt) -> let contract : arg typed_contract = (arg, contract) in ok (ctxt, contract)) (* Same as the one above, but does not fail when the contact is missing or if the expected type doesn't match the actual one. In that case None is returned and some overapproximation of the typechecking gas is consumed. This can still fail on gas exhaustion. *) and parse_contract_for_script : type arg. context -> Script.location -> arg ty -> Contract.t -> (context * arg typed_contract option) tzresult Lwt.t = fun ctxt loc arg contract -> Lwt.return @@ Gas.consume ctxt Typecheck_costs.contract_exists >>=? fun ctxt -> Contract.exists ctxt contract >>=? function | false -> return (ctxt, None) | true -> Lwt.return @@ Gas.consume ctxt Typecheck_costs.get_script >>=? fun ctxt -> trace (Invalid_contract (loc, contract)) @@ Contract.get_script ctxt contract >>=? fun (ctxt, script) -> match script with (* can only fail because of gas *) | None -> Lwt.return (match ty_eq ctxt arg (Unit_t None) with | Ok (Eq, ctxt) -> let contract : arg typed_contract = (arg, contract) in ok (ctxt, Some contract) | Error _ -> Gas.consume ctxt Typecheck_costs.cycle >>? fun ctxt -> ok (ctxt, None)) | Some { code ; _ } -> Script.force_decode ctxt code >>=? fun (code, ctxt) -> (* can only fail because of gas *) Lwt.return (match parse_toplevel code with | Error _ -> error (Invalid_contract (loc, contract)) | Ok (arg_type, _, _) -> match parse_ty ctxt ~allow_big_map:false ~allow_operation:false arg_type with | Error _ -> error (Invalid_contract (loc, contract)) | Ok (Ex_ty targ, ctxt) -> match (ty_eq ctxt targ arg >>? fun (Eq, ctxt) -> merge_types ctxt loc targ arg >>? fun (arg, ctxt) -> let contract : arg typed_contract = (arg, contract) in ok (ctxt, Some contract)) with | Ok res -> ok res | Error _ -> (* overapproximation by checking if targ = targ, can only fail because of gas *) ty_eq ctxt targ targ >>? fun (Eq, ctxt) -> merge_types ctxt loc targ targ >>? fun (_, ctxt) -> ok (ctxt, None)) and parse_toplevel : Script.expr -> (Script.node * Script.node * Script.node) tzresult = fun toplevel -> record_trace (Ill_typed_contract (toplevel, [])) @@ match root toplevel with | Int (loc, _) -> error (Invalid_kind (loc, [ Seq_kind ], Int_kind)) | String (loc, _) -> error (Invalid_kind (loc, [ Seq_kind ], String_kind)) | Bytes (loc, _) -> error (Invalid_kind (loc, [ Seq_kind ], Bytes_kind)) | Prim (loc, _, _, _) -> error (Invalid_kind (loc, [ Seq_kind ], Prim_kind)) | Seq (_, fields) -> let rec find_fields p s c fields = match fields with | [] -> ok (p, s, c) | Int (loc, _) :: _ -> error (Invalid_kind (loc, [ Prim_kind ], Int_kind)) | String (loc, _) :: _ -> error (Invalid_kind (loc, [ Prim_kind ], String_kind)) | Bytes (loc, _) :: _ -> error (Invalid_kind (loc, [ Prim_kind ], Bytes_kind)) | Seq (loc, _) :: _ -> error (Invalid_kind (loc, [ Prim_kind ], Seq_kind)) | Prim (loc, K_parameter, [ arg ], _) :: rest -> begin match p with | None -> find_fields (Some arg) s c rest | Some _ -> error (Duplicate_field (loc, K_parameter)) end | Prim (loc, K_storage, [ arg ], _) :: rest -> begin match s with | None -> find_fields p (Some arg) c rest | Some _ -> error (Duplicate_field (loc, K_storage)) end | Prim (loc, K_code, [ arg ], _) :: rest -> begin match c with | None -> find_fields p s (Some arg) rest | Some _ -> error (Duplicate_field (loc, K_code)) end | Prim (loc, (K_parameter | K_storage | K_code as name), args, _) :: _ -> error (Invalid_arity (loc, name, 1, List.length args)) | Prim (loc, name, _, _) :: _ -> let allowed = [ K_parameter ; K_storage ; K_code ] in error (Invalid_primitive (loc, allowed, name)) in find_fields None None None fields >>? function | (None, _, _) -> error (Missing_field K_parameter) | (Some _, None, _) -> error (Missing_field K_storage) | (Some _, Some _, None) -> error (Missing_field K_code) | (Some p, Some s, Some c) -> ok (p, s, c) let parse_script : ?type_logger: type_logger -> context -> Script.t -> (ex_script * context) tzresult Lwt.t = fun ?type_logger ctxt { code ; storage } -> Script.force_decode ctxt code >>=? fun (code, ctxt) -> Script.force_decode ctxt storage >>=? fun (storage, ctxt) -> Lwt.return @@ parse_toplevel code >>=? fun (arg_type, storage_type, code_field) -> trace (Ill_formed_type (Some "parameter", code, location arg_type)) (Lwt.return (parse_ty ctxt ~allow_big_map:false ~allow_operation:false arg_type)) >>=? fun (Ex_ty arg_type, ctxt) -> trace (Ill_formed_type (Some "storage", code, location storage_type)) (Lwt.return (parse_ty ctxt ~allow_big_map:true ~allow_operation:false storage_type)) >>=? fun (Ex_ty storage_type, ctxt) -> let arg_annot = default_annot (type_to_var_annot (name_of_ty arg_type)) ~default:default_param_annot in let storage_annot = default_annot (type_to_var_annot (name_of_ty storage_type)) ~default:default_storage_annot in let arg_type_full = Pair_t ((arg_type, None, arg_annot), (storage_type, None, storage_annot), None) in let ret_type_full = Pair_t ((List_t (Operation_t None, None), None, None), (storage_type, None, None), None) in trace_eval (fun () -> Lwt.return @@ serialize_ty_for_error ctxt storage_type >>|? fun (storage_type, _ctxt) -> Ill_typed_data (None, storage, storage_type)) (parse_data ?type_logger ctxt storage_type (root storage)) >>=? fun (storage, ctxt) -> trace (Ill_typed_contract (code, [])) (parse_returning (Toplevel { storage_type ; param_type = arg_type }) ctxt ?type_logger (arg_type_full, None) ret_type_full code_field) >>=? fun (code, ctxt) -> return (Ex_script { code ; arg_type ; storage ; storage_type }, ctxt) let typecheck_code : context -> Script.expr -> (type_map * context) tzresult Lwt.t = fun ctxt code -> Lwt.return @@ parse_toplevel code >>=? fun (arg_type, storage_type, code_field) -> let type_map = ref [] in (* TODO: annotation checking *) trace (Ill_formed_type (Some "parameter", code, location arg_type)) (Lwt.return (parse_ty ctxt ~allow_big_map:false ~allow_operation:false arg_type)) >>=? fun (Ex_ty arg_type, ctxt) -> trace (Ill_formed_type (Some "storage", code, location storage_type)) (Lwt.return (parse_ty ctxt ~allow_big_map:true ~allow_operation:false storage_type)) >>=? fun (Ex_ty storage_type, ctxt) -> let arg_annot = default_annot (type_to_var_annot (name_of_ty arg_type)) ~default:default_param_annot in let storage_annot = default_annot (type_to_var_annot (name_of_ty storage_type)) ~default:default_storage_annot in let arg_type_full = Pair_t ((arg_type, None, arg_annot), (storage_type, None, storage_annot), None) in let ret_type_full = Pair_t ((List_t (Operation_t None, None), None, None), (storage_type, None, None), None) in let result = parse_returning (Toplevel { storage_type ; param_type = arg_type }) ctxt ~type_logger: (fun loc bef aft -> type_map := (loc, (bef, aft)) :: !type_map) (arg_type_full, None) ret_type_full code_field in trace (Ill_typed_contract (code, !type_map)) result >>=? fun (Lam _, ctxt) -> return (!type_map, ctxt) let typecheck_data : ?type_logger: type_logger -> context -> Script.expr * Script.expr -> context tzresult Lwt.t = fun ?type_logger ctxt (data, exp_ty) -> trace (Ill_formed_type (None, exp_ty, 0)) (Lwt.return @@ parse_ty ctxt ~allow_big_map:true ~allow_operation:false (root exp_ty)) >>=? fun (Ex_ty exp_ty, ctxt) -> trace_eval (fun () -> Lwt.return @@ serialize_ty_for_error ctxt exp_ty >>|? fun (exp_ty, _ctxt) -> Ill_typed_data (None, data, exp_ty)) (parse_data ?type_logger ctxt exp_ty (root data)) >>=? fun (_, ctxt) -> return ctxt (* ---- Unparsing (Typed IR -> Untyped expressions) --------------------------*) let rec unparse_data : type a. context -> unparsing_mode -> a ty -> a -> (Script.node * context) tzresult Lwt.t = fun ctxt mode ty a -> Lwt.return (Gas.consume ctxt Unparse_costs.cycle) >>=? fun ctxt -> match ty, a with | Unit_t _, () -> Lwt.return (Gas.consume ctxt Unparse_costs.unit) >>=? fun ctxt -> return (Prim (-1, D_Unit, [], []), ctxt) | Int_t _, v -> Lwt.return (Gas.consume ctxt (Unparse_costs.int v)) >>=? fun ctxt -> return (Int (-1, Script_int.to_zint v), ctxt) | Nat_t _, v -> Lwt.return (Gas.consume ctxt (Unparse_costs.int v)) >>=? fun ctxt -> return (Int (-1, Script_int.to_zint v), ctxt) | String_t _, s -> Lwt.return (Gas.consume ctxt (Unparse_costs.string s)) >>=? fun ctxt -> return (String (-1, s), ctxt) | Bytes_t _, s -> Lwt.return (Gas.consume ctxt (Unparse_costs.bytes s)) >>=? fun ctxt -> return (Bytes (-1, s), ctxt) | Bool_t _, true -> Lwt.return (Gas.consume ctxt Unparse_costs.bool) >>=? fun ctxt -> return (Prim (-1, D_True, [], []), ctxt) | Bool_t _, false -> Lwt.return (Gas.consume ctxt Unparse_costs.bool) >>=? fun ctxt -> return (Prim (-1, D_False, [], []), ctxt) | Timestamp_t _, t -> Lwt.return (Gas.consume ctxt (Unparse_costs.timestamp t)) >>=? fun ctxt -> begin match mode with | Optimized -> return (Int (-1, Script_timestamp.to_zint t), ctxt) | Readable -> match Script_timestamp.to_notation t with | None -> return (Int (-1, Script_timestamp.to_zint t), ctxt) | Some s -> return (String (-1, s), ctxt) end | Address_t _, c -> Lwt.return (Gas.consume ctxt Unparse_costs.contract) >>=? fun ctxt -> begin match mode with | Optimized -> let bytes = Data_encoding.Binary.to_bytes_exn Contract.encoding c in return (Bytes (-1, bytes), ctxt) | Readable -> return (String (-1, Contract.to_b58check c), ctxt) end | Contract_t _, (_, c) -> Lwt.return (Gas.consume ctxt Unparse_costs.contract) >>=? fun ctxt -> begin match mode with | Optimized -> let bytes = Data_encoding.Binary.to_bytes_exn Contract.encoding c in return (Bytes (-1, bytes), ctxt) | Readable -> return (String (-1, Contract.to_b58check c), ctxt) end | Signature_t _, s -> Lwt.return (Gas.consume ctxt Unparse_costs.signature) >>=? fun ctxt -> begin match mode with | Optimized -> let bytes = Data_encoding.Binary.to_bytes_exn Signature.encoding s in return (Bytes (-1, bytes), ctxt) | Readable -> return (String (-1, Signature.to_b58check s), ctxt) end | Mutez_t _, v -> Lwt.return (Gas.consume ctxt Unparse_costs.tez) >>=? fun ctxt -> return (Int (-1, Z.of_int64 (Tez.to_mutez v)), ctxt) | Key_t _, k -> Lwt.return (Gas.consume ctxt Unparse_costs.key) >>=? fun ctxt -> begin match mode with | Optimized -> let bytes = Data_encoding.Binary.to_bytes_exn Signature.Public_key.encoding k in return (Bytes (-1, bytes), ctxt) | Readable -> return (String (-1, Signature.Public_key.to_b58check k), ctxt) end | Key_hash_t _, k -> Lwt.return (Gas.consume ctxt Unparse_costs.key_hash) >>=? fun ctxt -> begin match mode with | Optimized -> let bytes = Data_encoding.Binary.to_bytes_exn Signature.Public_key_hash.encoding k in return (Bytes (-1, bytes), ctxt) | Readable -> return (String (-1, Signature.Public_key_hash.to_b58check k), ctxt) end | Operation_t _, op -> let bytes = Data_encoding.Binary.to_bytes_exn Operation.internal_operation_encoding op in Lwt.return (Gas.consume ctxt (Unparse_costs.operation bytes)) >>=? fun ctxt -> return (Bytes (-1, bytes), ctxt) | Pair_t ((tl, _, _), (tr, _, _), _), (l, r) -> Lwt.return (Gas.consume ctxt Unparse_costs.pair) >>=? fun ctxt -> unparse_data ctxt mode tl l >>=? fun (l, ctxt) -> unparse_data ctxt mode tr r >>=? fun (r, ctxt) -> return (Prim (-1, D_Pair, [ l; r ], []), ctxt) | Union_t ((tl, _), _, _), L l -> Lwt.return (Gas.consume ctxt Unparse_costs.union) >>=? fun ctxt -> unparse_data ctxt mode tl l >>=? fun (l, ctxt) -> return (Prim (-1, D_Left, [ l ], []), ctxt) | Union_t (_, (tr, _), _), R r -> Lwt.return (Gas.consume ctxt Unparse_costs.union) >>=? fun ctxt -> unparse_data ctxt mode tr r >>=? fun (r, ctxt) -> return (Prim (-1, D_Right, [ r ], []), ctxt) | Option_t ((t, _), _, _), Some v -> Lwt.return (Gas.consume ctxt Unparse_costs.some) >>=? fun ctxt -> unparse_data ctxt mode t v >>=? fun (v, ctxt) -> return (Prim (-1, D_Some, [ v ], []), ctxt) | Option_t _, None -> Lwt.return (Gas.consume ctxt Unparse_costs.none) >>=? fun ctxt -> return (Prim (-1, D_None, [], []), ctxt) | List_t (t, _), items -> fold_left_s (fun (l, ctxt) element -> Lwt.return (Gas.consume ctxt Unparse_costs.list_element) >>=? fun ctxt -> unparse_data ctxt mode t element >>=? fun (unparsed, ctxt) -> return (unparsed :: l, ctxt)) ([], ctxt) items >>=? fun (items, ctxt) -> return (Micheline.Seq (-1, List.rev items), ctxt) | Set_t (t, _), set -> let t = ty_of_comparable_ty t in fold_left_s (fun (l, ctxt) item -> Lwt.return (Gas.consume ctxt Unparse_costs.set_element) >>=? fun ctxt -> unparse_data ctxt mode t item >>=? fun (item, ctxt) -> return (item :: l, ctxt)) ([], ctxt) (set_fold (fun e acc -> e :: acc) set []) >>=? fun (items, ctxt) -> return (Micheline.Seq (-1, items), ctxt) | Map_t (kt, vt, _), map -> let kt = ty_of_comparable_ty kt in fold_left_s (fun (l, ctxt) (k, v) -> Lwt.return (Gas.consume ctxt Unparse_costs.map_element) >>=? fun ctxt -> unparse_data ctxt mode kt k >>=? fun (key, ctxt) -> unparse_data ctxt mode vt v >>=? fun (value, ctxt) -> return (Prim (-1, D_Elt, [ key ; value ], []) :: l, ctxt)) ([], ctxt) (map_fold (fun k v acc -> (k, v) :: acc) map []) >>=? fun (items, ctxt) -> return (Micheline.Seq (-1, items), ctxt) | Big_map_t (_kt, _kv, _), _map -> return (Micheline.Seq (-1, []), ctxt) | Lambda_t _, Lam (_, original_code) -> unparse_code ctxt mode (root original_code) (* Gas accounting may not be perfect in this function, as it is only called by RPCs. *) and unparse_code ctxt mode = function | Prim (loc, I_PUSH, [ ty ; data ], annot) -> Lwt.return (parse_ty ctxt ~allow_big_map:false ~allow_operation:false ty) >>=? fun (Ex_ty t, ctxt) -> parse_data ctxt t data >>=? fun (data, ctxt) -> unparse_data ctxt mode t data >>=? fun (data, ctxt) -> Lwt.return (Gas.consume ctxt (Unparse_costs.prim_cost 2 annot)) >>=? fun ctxt -> return (Prim (loc, I_PUSH, [ ty ; data ], annot), ctxt) | Seq (loc, items) -> fold_left_s (fun (l, ctxt) item -> unparse_code ctxt mode item >>=? fun (item, ctxt) -> return (item :: l, ctxt)) ([], ctxt) items >>=? fun (items, ctxt) -> Lwt.return (Gas.consume ctxt (Unparse_costs.seq_cost (List.length items))) >>=? fun ctxt -> return (Micheline.Seq (loc, List.rev items), ctxt) | Prim (loc, prim, items, annot) -> fold_left_s (fun (l, ctxt) item -> unparse_code ctxt mode item >>=? fun (item, ctxt) -> return (item :: l, ctxt)) ([], ctxt) items >>=? fun (items, ctxt) -> Lwt.return (Gas.consume ctxt (Unparse_costs.prim_cost 3 annot)) >>=? fun ctxt -> return (Prim (loc, prim, List.rev items, annot), ctxt) | Int _ | String _ | Bytes _ as atom -> return (atom, ctxt) (* Gas accounting may not be perfect in this function, as it is only called by RPCs. *) let unparse_script ctxt mode { code ; arg_type ; storage ; storage_type } = let Lam (_, original_code) = code in unparse_code ctxt mode (root original_code) >>=? fun (code, ctxt) -> unparse_data ctxt mode storage_type storage >>=? fun (storage, ctxt) -> unparse_ty ctxt arg_type >>=? fun (arg_type, ctxt) -> unparse_ty ctxt storage_type >>=? fun (storage_type, ctxt) -> let open Micheline in let code = Seq (-1, [ Prim (-1, K_parameter, [ arg_type ], []) ; Prim (-1, K_storage, [ storage_type ], []) ; Prim (-1, K_code, [ code ], []) ]) in Lwt.return (Gas.consume ctxt (Unparse_costs.seq_cost 3) >>? fun ctxt -> Gas.consume ctxt (Unparse_costs.prim_cost 1 []) >>? fun ctxt -> Gas.consume ctxt (Unparse_costs.prim_cost 1 []) >>? fun ctxt -> Gas.consume ctxt (Unparse_costs.prim_cost 1 [])) >>=? fun ctxt -> return ({ code = lazy_expr (strip_locations code) ; storage = lazy_expr (strip_locations storage) }, ctxt) let pack_data ctxt typ data = unparse_data ctxt Optimized typ data >>=? fun (data, ctxt) -> let unparsed = strip_annotations @@ data in let bytes = Data_encoding.Binary.to_bytes_exn expr_encoding (Micheline.strip_locations unparsed) in Lwt.return @@ Gas.consume ctxt (Script.serialized_cost bytes) >>=? fun ctxt -> let bytes = MBytes.concat "" [ MBytes.of_string "\005" ; bytes ] in Lwt.return @@ Gas.consume ctxt (Script.serialized_cost bytes) >>=? fun ctxt -> return (bytes, ctxt) let hash_data ctxt typ data = pack_data ctxt typ data >>=? fun (bytes, ctxt) -> Lwt.return @@ Gas.consume ctxt (Michelson_v1_gas.Cost_of.hash bytes Script_expr_hash.size) >>=? fun ctxt -> return (Script_expr_hash.(hash_bytes [ bytes ]), ctxt) (* ---------------- Big map -------------------------------------------------*) let big_map_mem ctxt contract key { diff ; key_type ; _ } = match map_get key diff with | None -> hash_data ctxt key_type key >>=? fun (hash, ctxt) -> Alpha_context.Contract.Big_map.mem ctxt contract hash >>=? fun (ctxt, res) -> return (res, ctxt) | Some None -> return (false, ctxt) | Some (Some _) -> return (true, ctxt) let big_map_get ctxt contract key { diff ; key_type ; value_type } = match map_get key diff with | Some x -> return (x, ctxt) | None -> hash_data ctxt key_type key >>=? fun (hash, ctxt) -> Alpha_context.Contract.Big_map.get_opt ctxt contract hash >>=? begin function | (ctxt, None) -> return (None, ctxt) | (ctxt, Some value) -> parse_data ctxt value_type (Micheline.root value) >>=? fun (x, ctxt) -> return (Some x, ctxt) end let big_map_update key value ({ diff ; _ } as map) = { map with diff = map_set key value diff } let diff_of_big_map ctxt mode (Ex_bm { key_type ; value_type ; diff }) = Lwt.return (Gas.consume ctxt (Michelson_v1_gas.Cost_of.map_to_list diff)) >>=? fun ctxt -> let pairs = map_fold (fun key value acc -> (key, value) :: acc) diff [] in fold_left_s (fun (acc, ctxt) (key, value) -> Lwt.return (Gas.consume ctxt Typecheck_costs.cycle) >>=? fun ctxt -> hash_data ctxt key_type key >>=? fun (hash, ctxt) -> begin match value with | None -> return (None, ctxt) | Some x -> begin unparse_data ctxt mode value_type x >>=? fun (node, ctxt) -> return (Some (Micheline.strip_locations node), ctxt) end end >>=? fun (value, ctxt) -> return ((hash, value) :: acc, ctxt)) ([], ctxt) pairs (* Get the big map from a contract's storage if one exists *) let extract_big_map : type a. a ty -> a -> ex_big_map option = fun ty x -> match (ty, x) with | Pair_t ((Big_map_t (_, _, _), _, _), _, _), (map, _) -> Some (Ex_bm map) | _, _ -> None let erase_big_map_initialization ctxt mode ({ code ; storage } : Script.t) = Script.force_decode ctxt code >>=? fun (code, ctxt) -> Script.force_decode ctxt storage >>=? fun (storage, ctxt) -> Lwt.return @@ parse_toplevel code >>=? fun (_, storage_type, _) -> Lwt.return @@ parse_ty ctxt ~allow_big_map:true ~allow_operation:false storage_type >>=? fun (Ex_ty ty, ctxt) -> parse_data ctxt ty (Micheline.root storage) >>=? fun (storage, ctxt) -> begin match extract_big_map ty storage with | None -> return (None, ctxt) | Some bm -> diff_of_big_map ctxt mode bm >>=? fun (bm, ctxt) -> return (Some bm, ctxt) end >>=? fun (bm, ctxt) -> unparse_data ctxt mode ty storage >>=? fun (storage, ctxt) -> return ({ code = Script.lazy_expr code ; storage = Script.lazy_expr (Micheline.strip_locations storage) }, bm, ctxt)
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
moduleUtils.ml
open Utility open Printf open SourceCode.WithPos open Sugartypes (* Paths to look for .links files in chasing pass *) let links_file_paths = Settings.(multi_option ~default:["."] "links_file_paths" |> synopsis "Search paths for Links modules" |> hint "<dir[,dir']...>" |> keep_default |> to_string string_of_paths |> convert parse_paths |> CLI.(add (long "path")) |> sync) (* Should we use the extra standard library definitions? *) let use_stdlib = Settings.(flag ~default:true "use_stdlib" |> convert parse_bool |> sync) (* Standard library path *) let stdlib_path = Settings.(option ~default:(Some Linkspath.stdlib) "stdlib_path" |> to_string from_string_option |> convert Utility.(Sys.expand ->- some) |> sync) let module_sep = "." type term_shadow_table = string list stringmap type type_shadow_table = string list stringmap type shadow_table = string list stringmap let try_parse_file filename = (* First, get the list of directories, with trailing slashes stripped *) let check_n_chop path = let dir_sep = Filename.dir_sep in if Filename.check_suffix path dir_sep then Filename.chop_suffix path dir_sep else path in let poss_stdlib_dir = if Settings.get use_stdlib then match Settings.get stdlib_path with | None -> (* Follow the same logic as for the prelude. * Firstly, check the current directory. * Secondly, check OPAM *) let chopped_path = check_n_chop @@ locate_file "stdlib" in [Filename.concat chopped_path "stdlib"] | Some stdlib_path -> [check_n_chop stdlib_path] else [] in let poss_dirs = let paths = Settings.get links_file_paths in "" :: poss_stdlib_dir @ (List.map (check_n_chop) paths) in (* Loop through, trying to open the module with each path *) let rec loop = (function | [] -> raise (Errors.module_error ("Could not find file " ^ filename)) | x :: xs -> let candidate_filename = if x = "" then filename else (x ^ Filename.dir_sep ^ filename) in if Sys.file_exists candidate_filename then Parse.parse_file Parse.program candidate_filename else loop xs) in loop poss_dirs let has_no_modules = object inherit SugarTraversals.predicate as super val has_no_modules = true method satisfied = has_no_modules method! bindingnode = function | Open _ | Module _ -> {< has_no_modules = false >} | b -> super#bindingnode b method! datatypenode = function | Datatype.QualifiedTypeApplication _ -> {< has_no_modules = false >} | dt -> super#datatypenode dt method! phrasenode = function | QualifiedVar _ -> {< has_no_modules = false >} | pn -> super#phrasenode pn end let separate_modules = List.fold_left (fun (mods, binds) b -> match b with | {node = Module _; _} as m -> (m :: mods, binds) | b -> (mods, b :: binds)) ([], []) type module_info = { simple_name : string; (* Note: not fully-qualified *) inner_modules : string list; type_names : string list; decl_names : string list } let make_module_info simple_name inner_modules type_names decl_names = { simple_name = simple_name; inner_modules = inner_modules; type_names = type_names; decl_names = decl_names } let get_pat_vars () = object(self) inherit SugarTraversals.fold as super val bindings = [] method add_binding x = {< bindings = x :: bindings >} method get_bindings = bindings (* Order doesn't matter *) method! patternnode = function | Pattern.Variant (_n, p_opt) -> self#option (fun o p -> o#pattern p) p_opt (* | `Negative ns -> self#list (fun o p -> o#add_binding p) ns *) | Pattern.Record (ls, p_opt) -> let o1 = self#list (fun o (_, p) -> o#pattern p) ls in o1#option (fun o p -> o#pattern p) p_opt | Pattern.Variable bndr -> self#add_binding (Binder.to_name bndr) | p -> super#patternnode p end let get_pattern_variables p = ((get_pat_vars ())#pattern p)#get_bindings (* Gets the list of external FFI files to include *) let get_ffi_files_obj = object(self) inherit SugarTraversals.fold as super val filenames = [] method add_external_file x = if (List.mem x filenames) then self else {< filenames = x :: filenames >} method get_filenames = List.rev filenames method! bindingnode = function | Foreign alien -> self#add_external_file (Alien.object_file alien) | x -> super#bindingnode x end let get_ffi_files prog = (get_ffi_files_obj#program prog)#get_filenames let make_path_string xs name = if name = "" then "" else let xs1 = xs @ [name] in String.concat module_sep xs1 (* Need to get data constructors from type declarations *) let get_data_constructors init_constrs = object (self) inherit SugarTraversals.fold as super val constrs = init_constrs method add_constr constr = {< constrs = StringSet.add constr constrs >} method get_constrs = StringSet.elements constrs method! datatypenode = function | Datatype.Variant (xs, _) -> self#list (fun o (lbl, _) -> o#add_constr lbl) xs | dt -> super#datatypenode dt end let create_module_info_map program = (* Helper functions *) let module_map = ref StringMap.empty in let add_module_info fq_module_name info = let mm = !module_map in module_map := StringMap.add fq_module_name info mm in let rec create_and_add_module_info parent_path name bindings = (* Helper functions: traversing modules, and getting binding names *) (* Recursively traverse a list of modules *) let rec traverse_modules = function | [] -> [] | {node=Module { binder; members };_} :: bs -> (* Recursively process *) let submodule_name = Binder.to_name binder in let new_path = if name = "" then [] else parent_path @ [name] in create_and_add_module_info new_path submodule_name members; (* Add the name to the list, process remainder. *) submodule_name :: (traverse_modules bs) | _bs -> assert false in (* List should only contain modules *) (* Getting binding names -- we're interested in function and value names *) let rec get_binding_names = function | [] -> [] | { node = Val (pat, _, _, _); _ } :: bs -> (get_pattern_variables pat) @ get_binding_names bs | { node = Fun fn; _ } :: bs -> Binder.to_name fn.fun_binder :: (get_binding_names bs) | { node = Funs fs ; _ } :: bs -> (List.map (fun fn -> Binder.to_name fn.node.rec_binder) fs) @ get_binding_names bs | _ :: bs -> get_binding_names bs in (* Other binding types are uninteresting for this pass *) (* Getting type names -- we're interested in typename/effectname decls *) let rec get_type_names = function | [] -> [] | b :: bs -> match node b with | Aliases ts -> let ns = ListUtils.concat_map (fun {node=(n, _, _); _} -> [n]) ts in ns @ (get_type_names bs) | _ -> get_type_names bs in (* Gets data constructors for variants *) let get_constrs bs = ((get_data_constructors StringSet.empty)#list (fun o -> o#binding) bs)#get_constrs in (* Next, separate out bindings *) let (inner_modules, other_bindings) = separate_modules bindings in (* Next, use our helper functions *) let inner_module_names = traverse_modules inner_modules in let constrs = get_constrs other_bindings in let binding_names = get_binding_names other_bindings @ constrs in let type_names = get_type_names other_bindings in (* Finally, construct the module info, and add to the table. *) let path_str = make_path_string parent_path name in let mod_info = make_module_info name inner_module_names type_names binding_names in add_module_info path_str mod_info in (* Toplevel *) let (bindings, _) = program in create_and_add_module_info [] "" bindings; !module_map let print_mod_info k mi = printf "MODULE: %s\n" k; printf "Inner modules: %s\n" (print_list mi.inner_modules); printf "Type names: %s\n" (print_list mi.type_names); printf "Decl names: %s\n" (print_list mi.decl_names) let _print_mt mt = printf "MT:\n"; List.iter (fun (k, mi) -> print_mod_info k mi) (StringMap.bindings mt) (* Given a binding name and fully-qualified name, adds it to the top of * the binding stack in the name shadowing table. For example, shadowing name foo with A.foo, * given a table * foo |-> [B.foo] * bar |-> [A.bar] * will result in * foo |-> [A.foo, B.foo] * bar |-> [A.bar] *) let shadow_binding : string -> string -> (string list) stringmap -> (string list) stringmap = fun name fqn ht -> try let xs = StringMap.find name ht in StringMap.add name (fqn :: xs) ht with _ -> StringMap.add name [fqn] ht (* Given a *fully qualified* module name and a name shadowing table, shadows * the appropriate bindings. For example, given a name resolution table: * foo |-> [A.foo, foo] * bar |-> [bar] * and a module B: * module B { * module C { * def pines() { .. } * } * * def foo() { .. } * val baz = .. * } * the resulting table would be: * foo |-> [B.foo, A.foo, foo] * bar |-> [bar] * baz |-> [B.baz] * B |-> [B] * C |-> [B.C] *) let shadow_open module_plain module_fqn module_table term_ht type_ht = (* print_mt module_table; *) try let mod_info = StringMap.find module_fqn module_table in (* Shadows bindings in a given table *) let shadow_all_bindings lst ht = List.fold_left (fun acc plain_binding_name -> let fq_binding_name = String.concat module_sep (module_fqn :: [plain_binding_name]) in shadow_binding plain_binding_name fq_binding_name acc) ht lst in (* Shadow both term and type tables *) let shadowed_term_ht = shadow_all_bindings mod_info.decl_names term_ht in let shadowed_type_ht = shadow_all_bindings mod_info.type_names type_ht in (* Next, do the modules *) let shadow_modules ht mods = List.fold_left (fun acc plain_module_name -> let fq_module_name = String.concat module_sep (module_fqn :: [plain_module_name]) in shadow_binding plain_module_name fq_module_name acc) ht mods in let shadowed_term_ht = shadow_modules shadowed_term_ht mod_info.inner_modules in let shadowed_type_ht = shadow_modules shadowed_type_ht mod_info.inner_modules in (* Finally, need to add this module of course! *) let shadowed_term_ht = shadow_binding module_plain module_fqn shadowed_term_ht in let shadowed_type_ht = shadow_binding module_plain module_fqn shadowed_type_ht in (shadowed_term_ht, shadowed_type_ht) with Notfound.NotFound _ -> raise (Errors.module_error ("Error: Trying to import nonexistent module " ^ module_plain)) let lst_to_path = String.concat module_sep let contains_modules prog = not ((has_no_modules#program prog)#satisfied)
Parse_dockerfile_tree_sitter.ml
(** Mapping from tree-sitter-dockerfile tree-sitter's CST to the Dockerfile AST type, which itself includes nodes of the Bash AST. Derived from generated code 'dockerfile/lib/Boilerplate.ml' *) open! Common module AST = AST_dockerfile module CST = Tree_sitter_dockerfile.CST module PI = Parse_info open AST_dockerfile module H = Parse_tree_sitter_helpers (* This is preferred for debugging since it raises Assert_failures where there are bugs. In production, it's best to avoid raising exceptions when possible. *) let strict = true (*****************************************************************************) (* Helpers *) (*****************************************************************************) (* The 'extra' field indicates: - if we're parsing a pattern or a program; - the current shell, which can change when encountering a SHELL directive. *) type env = (AST_bash.input_kind * shell_compatibility) H.env let token = H.token let str = H.str let concat_tokens first_tok other_toks : string wrap = let tok = PI.combine_infos first_tok other_toks in (PI.str_of_info tok, tok) let opt_concat_tokens toks : string wrap option = match toks with | first_tok :: other_toks -> Some (concat_tokens first_tok other_toks) | [] -> None (* Requires at least one token, which must be guaranteed statically. *) let unsafe_concat_tokens toks : string wrap = match opt_concat_tokens toks with | Some res -> res | None -> if strict then assert false else let s = "" in (s, PI.unsafe_fake_info s) (* Collapse consecutive literal string fragments. This is useful to detect special fragments that otherwise could get split, such as the ellipsis for COPY/ADD that get split into "." and "..". *) let simplify_fragments (fragments : string_fragment list) : string_fragment list = let concat toks tail = match toks with | [] -> tail | first :: others -> let tok = PI.combine_infos first others in String_content (PI.str_of_info tok, tok) :: tail in let rec simplify acc = function | [] -> concat (List.rev acc) [] | String_content (_, tok) :: xs -> simplify (tok :: acc) xs | special :: xs -> concat (List.rev acc) (special :: simplify [] xs) in simplify [] fragments (* best effort to extract the name of the shell *) let classify_shell ((_open, ar, _close) : string_array) : shell_compatibility option = let command = match ar with | Arr_string (_, [ String_content ("/usr/bin/env", _) ]) :: Arr_string (_loc, [ String_content (name, _) ]) :: _ -> Some name | Arr_string (_loc, [ String_content (path, _) ]) :: _ -> Some path | _ -> None in match command with | Some ("/bin/bash" | "/bin/sh" | "bash" | "sh") -> Some Sh | Some "cmd" -> Some Cmd | Some "powershell" -> Some Powershell | Some name -> Some (Other name) | None -> None let is_metavar (env : env) (x : string wrap) = match env.extra with | Pattern, _ when AST_generic_.is_metavar_name (fst x) -> true | _ -> false (* Return the position of the first non-blank character, if any. This implementation turns out to be simpler than using Pcre. *) let find_nonblank (s : string) = let pos = ref 0 in try for i = 0 to String.length s - 1 do pos := i; match s.[i] with | ' ' | '\t' | '\r' | '\n' -> () | _ -> raise Exit done; None with | Exit -> Some !pos let remove_blank_prefix (x : string wrap) : string wrap = let s, tok = x in match find_nonblank s with | None -> x | Some pos -> let _blanks, tok = PI.split_info_at_pos pos tok in (PI.str_of_info tok, tok) (*****************************************************************************) (* Boilerplate converter *) (*****************************************************************************) (* $FOO or ${FOO} expansion of a variable passed at docker build time. The scope of an ARG is a stage, i.e. the section between a FROM instruction and the next FROM instruction or the end of the file. The braceless syntax $FOO, like we do for Bash, is co-opted for Semgrep metavariables when parsing a Semgrep pattern. In a pattern, ${FOO} is the expansion of the FOO argument while ${$FOO} is the expansion of any argument represented by the metavariable $FOO. *) let expansion (env : env) ((v1, v2) : CST.expansion) : string_fragment = let dollar = token env v1 (* "$" *) in match v2 with | `Var tok -> ( let name = str env tok (* pattern [a-zA-Z][a-zA-Z0-9_]* *) in let mv_tok = PI.combine_infos dollar [ snd name ] in let mv_s = PI.str_of_info mv_tok in match env.extra with | Pattern, _ when AST_generic_.is_metavar_name mv_s -> Frag_semgrep_metavar (mv_s, mv_tok) | _ -> let loc = (dollar, wrap_tok name) in Expansion (loc, Expand_var name)) | `Imm_tok_lcurl_imm_tok_pat_8713919_imm_tok_rcurl (v1, v2, v3) -> let _open = token env v1 (* "{" *) in let var_or_mv = str env v2 (* pattern [^\}]+ *) in let name, _tok = var_or_mv in let expansion = match env.extra with | Pattern, _ when AST_generic_.is_metavar_name name -> Expand_semgrep_metavar var_or_mv | _ -> Expand_var var_or_mv in let close = token env v3 (* "}" *) in let loc = (dollar, close) in Expansion (loc, expansion) let param (env : env) ((v1, v2, v3, v4) : CST.param) : param = let dashdash = token env v1 (* "--" *) in let key = str env v2 (* pattern [a-z][-a-z]* *) in let equal = token env v3 (* "=" *) in let value = str env v4 (* pattern [^\s]+ *) in let loc = (dashdash, snd value) in (loc, (dashdash, key, equal, value)) let expose_port (env : env) (x : CST.expose_port) : expose_port = match x with | `Semg_ellips tok -> Expose_semgrep_ellipsis (token env tok (* "..." *)) | `Pat_217c202_opt_choice_SLAS (v1, v2) -> let port_tok = token env v1 (* pattern \d+ *) in let protocol = match v2 with | Some x -> let tok = match x with | `SLAS_ce91595 tok -> token env tok (* "/tcp" *) | `SLAS_c773c8d tok -> token env tok (* "/udp" *) in Some (PI.str_of_info tok, tok) | None -> None in let port_num = port_tok |> PI.str_of_info in Expose_port ((port_num, port_tok), protocol) let image_tag (env : env) ((v1, v2) : CST.image_tag) : tok * str = let colon = token env v1 (* ":" *) in let tag = match v2 with | [] -> let loc = (colon, colon) in (loc, []) | fragments -> let fragments = fragments |> Common.map (fun x -> match x with | `Imm_tok_pat_bcfc287 tok -> String_content (str env tok (* pattern [^@\s\$]+ *)) | `Imme_expa x -> expansion env x) |> simplify_fragments in let loc = Loc.of_list string_fragment_loc fragments in (loc, fragments) in (colon, tag) let image_digest (env : env) ((v1, v2) : CST.image_digest) : tok * str = let at = token env v1 (* "@" *) in let digest = match v2 with | [] -> let loc = (at, at) in (loc, []) | fragments -> let fragments = fragments |> Common.map (fun x -> match x with | `Imm_tok_pat_d2727a0 tok -> String_content (str env tok (* pattern [a-zA-Z0-9:]+ *)) | `Imme_expa x -> expansion env x) |> simplify_fragments in let loc = Loc.of_list string_fragment_loc fragments in (loc, fragments) in (at, digest) let image_name (env : env) ((x, xs) : CST.image_name) = let first_fragment = match x with | `Pat_8165e5f tok -> String_content (str env tok (* pattern [^@:\s\$-]+ *)) | `Expa x -> expansion env x in let fragments = xs |> Common.map (fun x -> match x with | `Imm_tok_pat_2b37705 tok -> String_content (str env tok (* pattern [^@:\s\$]+ *)) | `Imme_expa x -> expansion env x) in let fragments = first_fragment :: fragments |> simplify_fragments in let loc = Loc.of_list string_fragment_loc fragments in (loc, fragments) let image_alias (env : env) ((x, xs) : CST.image_alias) : str = let first_fragment = match x with | `Pat_9a14b5c tok -> String_content (str env tok) | `Expa x -> expansion env x in let other_fragments = xs |> Common.map (fun x -> match x with | `Imm_tok_pat_9a14b5c tok -> String_content (str env tok (* pattern [-a-zA-Z0-9_]+ *)) | `Imme_expa x -> expansion env x) in let fragments = first_fragment :: other_fragments |> simplify_fragments in let loc = Loc.of_list string_fragment_loc fragments in (loc, fragments) let immediate_user_name_or_group_fragment (env : env) (x : CST.immediate_user_name_or_group_fragment) : string_fragment = match x with | `Imm_tok_pat_b295287 tok -> String_content (str env tok (* pattern [a-z][-a-z0-9_]* *)) | `Imme_expa x -> expansion env x let immediate_user_name_or_group (env : env) (xs : CST.immediate_user_name_or_group) : str = let fragments = Common.map (immediate_user_name_or_group_fragment env) xs in let loc = Loc.of_list string_fragment_loc fragments in (loc, fragments) let user_name_or_group (env : env) ((x, xs) : CST.user_name_or_group) : str = let head = match x with | `Pat_b295287 tok -> String_content (str env tok) | `Expa x -> expansion env x in let tail = Common.map (immediate_user_name_or_group_fragment env) xs in let fragments = head :: tail |> simplify_fragments in let loc = Loc.of_list string_fragment_loc fragments in (loc, fragments) let unquoted_string (env : env) (xs : CST.unquoted_string) : str = let fragments = Common.map (fun x -> match x with | `Imm_tok_pat_24a1611 tok -> String_content (str env tok (* pattern "[^\\s\\n\\\"\\\\\\$]+" *)) | `Imm_tok_bsla tok -> String_content (str env tok (* "\\ " *)) | `Imme_expa x -> expansion env x) xs |> simplify_fragments in let loc = Loc.of_list string_fragment_loc fragments in (loc, fragments) let path0 (env : env) ((v1, v2) : CST.path) : string_fragment list = let first_fragment = match v1 with | `Pat_1167a92 tok -> String_content (str env tok (* pattern [^-\s\$] *)) | `Expa x -> expansion env x in let more_fragments = Common.map (fun x -> match x with | `Imm_tok_pat_0c7fc22 tok -> String_content (str env tok (* pattern [^\s\$]+ *)) | `Imme_expa x -> expansion env x) v2 in first_fragment :: more_fragments |> simplify_fragments let path (env : env) (x : CST.path) : str = let fragments = path0 env x in let loc = Loc.of_list string_fragment_loc fragments in (loc, fragments) let path_or_ellipsis (env : env) (x : CST.path) : str_or_ellipsis = match (env.extra, path0 env x) with | (Pattern, _), [ String_content ("...", tok) ] -> Str_semgrep_ellipsis tok | _, fragments -> let loc = Loc.of_list string_fragment_loc fragments in Str_str (loc, fragments) let stopsignal_value (env : env) ((x, xs) : CST.stopsignal_value) : str = let first_fragment = match x with | `Pat_441cd81 tok -> String_content (str env tok) | `Expa x -> expansion env x in let other_fragments = Common.map (fun x -> match x with | `Imm_tok_pat_441cd81 tok -> String_content (str env tok (* pattern [A-Z0-9]+ *)) | `Imme_expa x -> expansion env x) xs in let fragments = first_fragment :: other_fragments |> simplify_fragments in let loc = Loc.of_list string_fragment_loc fragments in (loc, fragments) let double_quoted_string (env : env) ((v1, v2, v3) : CST.double_quoted_string) : str = let open_ = str env v1 (* "\"" *) in let contents = Common.map (fun x -> match x with | `Imm_tok_pat_589b0f8 tok -> let s = str env tok (* pattern "[^\"\\n\\\\\\$]+" *) in String_content s | `Esc_seq tok -> let s = str env tok (* escape_sequence *) in String_content s | `Imme_expa x -> expansion env x) v2 in let close = str env v3 (* "\"" *) in let loc = (wrap_tok open_, wrap_tok close) in let fragments = (String_content open_ :: contents) @ [ String_content close ] |> simplify_fragments in (loc, fragments) let shell_fragment (env : env) (xs : CST.shell_fragment) : tok = Common.map (fun x -> match x with | `Pat_b1120d3 tok | `Pat_dea634e tok | `Pat_eda9032 tok -> token env tok) xs |> unsafe_concat_tokens |> snd let image_spec (env : env) ((v1, v2, v3) : CST.image_spec) : image_spec = let name = image_name env v1 in let tag = match v2 with | Some x -> Some (image_tag env x) | None -> None in let digest = match v3 with | Some x -> Some (image_digest env x) | None -> None in let loc = let start = str_loc name in let end_ = start in let end_ = match tag with | None -> end_ | Some (_colon, x) -> str_loc x in let end_ = match digest with | None -> end_ | Some (_at, x) -> str_loc x in Loc.range start end_ in { loc; name; tag; digest } let array_element (env : env) (x : CST.array_element) : array_elt = match x with | `Double_quoted_str x -> Arr_string (double_quoted_string env x) | `Semg_ellips tok -> Arr_ellipsis (token env tok) | `Semg_meta tok -> Arr_metavar (str env tok) let string (env : env) (x : CST.anon_choice_double_quoted_str_6b200ac) : str = match x with | `Double_quoted_str x -> double_quoted_string env x | `Unqu_str x -> unquoted_string env x let string_array (env : env) ((v1, v2, v3) : CST.string_array) : Loc.t * string_array = let open_ = token env v1 (* "[" *) in let argv = match v2 with | Some (v1, v2) -> let x0 = array_element env v1 in let xs = Common.map (fun (v1, v2) -> let _comma = token env v1 (* "," *) in let arg = array_element env v2 in arg) v2 in x0 :: xs | None -> [] in let close = token env v3 (* "]" *) in let loc = (open_, close) in (loc, (open_, argv, close)) (* Create the empty token that sits right after a given token. TODO: move this function to Parse_info? *) let empty_token_after tok : tok = match PI.token_location_of_info tok with | Ok loc -> let prev_len = String.length loc.str in let loc = { loc with str = ""; charpos = loc.charpos + prev_len; column = loc.column + prev_len; } in PI.mk_info_of_loc loc | Error _ -> PI.rewrap_str "" tok let env_pair (env : env) (x : CST.env_pair) : label_pair = match x with | `Semg_ellips tok -> Label_semgrep_ellipsis (token env tok (* "..." *)) | `Env_key_imm_tok_eq_opt_choice_double_quoted_str (v1, v2, v3) -> let k = Var_ident (str env v1 (* pattern [a-zA-Z][a-zA-Z0-9_]*[a-zA-Z0-9] *)) in let eq = token env v2 (* "=" *) in let v = match v3 with | None -> (* the empty token gives us the correct location which we need even if we returned an empty list of fragments. *) let tok = empty_token_after eq in let loc = (tok, tok) in (loc, [ String_content (PI.str_of_info tok, tok) ]) | Some x -> string env x in let loc = (var_or_metavar_tok k, str_loc v |> snd) in Label_pair (loc, k, eq, v) let spaced_env_pair (env : env) ((v1, v2, v3) : CST.spaced_env_pair) : label_pair = let k = Var_ident (str env v1 (* pattern [a-zA-Z][a-zA-Z0-9_]*[a-zA-Z0-9] *)) in let blank = token env v2 (* pattern \s+ *) in let v = string env v3 in let loc = (var_or_metavar_tok k, str_loc v |> snd) in Label_pair (loc, k, blank, v) let label_pair (env : env) (x : CST.label_pair) : label_pair = match x with | `Semg_ellips tok -> Label_semgrep_ellipsis (token env tok (* "..." *)) | `Choice_semg_meta_imm_tok_eq_choice_double_quoted_str (v1, v2, v3) -> let key = match v1 with | `Semg_meta tok -> Var_semgrep_metavar (str env tok (* pattern \$[A-Z_][A-Z_0-9]* *)) | `Pat_4128122 tok -> Var_ident (str env tok (* pattern [-a-zA-Z0-9\._]+ *)) in let eq = token env v2 (* "=" *) in let value = string env v3 in let loc = (var_or_metavar_tok key, str_loc value |> snd) in Label_pair (loc, key, eq, value) (* hack to obtain correct locations when parsing a string extracted from a larger file. *) let shift_locations (str, tok) = let line (* 0-based *) = max 0 (PI.line_of_info tok - 1) (* 1-based *) in let column (* 0-based *) = max 0 (PI.col_of_info tok) in String.make line '\n' ^ String.make column ' ' ^ str (* A plain ellipsis such as '...' (not e.g. '...;') is identified so that we can treat it as special dockerfile syntax rather than bash syntax. Alternatively, this can be done be extending the tree-sitter-dockerfile grammar. *) let is_plain_ellipsis = let rex = SPcre.regexp "\\A[ \t\r\n]*[.]{3}[ \t\r\n]*\\z" in fun s -> match SPcre.pmatch ~rex s with | Ok res -> res | Error _err -> false type ellipsis_or_bash = | Semgrep_ellipsis of tok | Bash of AST_bash.blist option let parse_bash (env : env) shell_cmd : ellipsis_or_bash = let input_kind, _ = env.extra in match input_kind with | Pattern when is_plain_ellipsis (fst shell_cmd) -> Semgrep_ellipsis (snd shell_cmd) | _ -> let ts_res = H.wrap_parser (fun () -> let str = shift_locations shell_cmd in Tree_sitter_bash.Parse.string str) (fun cst -> let bash_env : Parse_bash_tree_sitter.env = { env with extra = input_kind } in Parse_bash_tree_sitter.program bash_env ~tok:(snd shell_cmd) cst) in (* TODO: don't ignore tree-sitter parsing errors. See Parsing_result module of ocaml-tree-sitter-core. *) Bash ts_res.program (* This is for reconstructing a shell snippet and preserve line/column location. *) let comment_line (env : env) (((hash_tok, comment_tok), backslash_tok) : CST.comment_line) : tok = let tok = PI.combine_infos (token env hash_tok) [ token env comment_tok; token env backslash_tok ] in (* TODO: the token called backslash_tok should be a newline according to the grammar, not a backslash. Looks like a bug in the parser. We have to add the newline here to end the comment and get correct line locations. *) PI.tok_add_s "\n" tok let shell_command (env : env) (x : CST.shell_command) = match x with | `Semg_ellips tok -> Command_semgrep_ellipsis (token env tok) | `Rep_comm_line_shell_frag_rep_requ_line_cont_rep_comm_line_shell_frag (v1, v2, v3) -> ( (* Stitch back the fragments together, then parse using the correct shell language. *) let _comment_lines = Common.map (comment_line env) v1 in let first_frag = shell_fragment env v2 in let more_frags = v3 |> Common.map (fun (v1, comment_lines, v3) -> (* Keep the line continuation so as to preserve the original locations when parsing the shell command. Warning: dockerfile line continuation character may be different than '\'. Since we reinject a line continuation into the shell code to preserve locations, we must ensure that we inject a backslash, not whatever dockerfile is using. *) let dockerfile_line_cont = (* dockerfile's line continuation character without \n *) token env v1 in let shell_line_cont = (* we would omit this if it weren't for preserving line numbers *) PI.rewrap_str "\\\n" dockerfile_line_cont in let comment_lines = Common.map (comment_line env) comment_lines in let shell_frag = shell_fragment env v3 in (shell_line_cont :: comment_lines) @ [ shell_frag ]) |> List.flatten in let raw_shell_code = concat_tokens first_frag more_frags in let _, shell_compat = env.extra in match shell_compat with | Sh -> ( match parse_bash env raw_shell_code with | Semgrep_ellipsis tok -> Command_semgrep_ellipsis tok | Bash (Some bash_program) -> let loc = wrap_loc raw_shell_code in Sh_command (loc, bash_program) | Bash None -> Other_shell_command (Sh, raw_shell_code)) | (Cmd | Powershell | Other _) as shell -> Other_shell_command (shell, raw_shell_code)) let argv_or_shell (env : env) (x : CST.anon_choice_str_array_878ad0b) = match x with | `Str_array x -> let loc, ar = string_array env x in Argv (loc, ar) | `Shell_cmd x -> shell_command env x let mount_param_param (env : env) ((v1, v2, v3) : CST.mount_param_param) = let key = str env v1 in let _eq = token env v2 in let value = str env v3 in let loc = (snd key, snd value) in (loc, key, value) let rec unsafe_list_last = function | [] -> assert false | [ x ] -> x | _ :: xs -> unsafe_list_last xs let mount_param (env : env) ((v1, v2, v3, v4, v5) : CST.mount_param) = let start = (* "--" *) token env v1 in let mount = str env v2 in let _eq = token env v3 in let param1 = mount_param_param env v4 in let params = Common.map (fun (v1, v2) -> let _comma = token env v1 in let kv = mount_param_param env v2 in kv) v5 in let params = param1 :: params in let (_, end_), _k, _v = unsafe_list_last params in let loc = (start, end_) in Mount_param (loc, mount, params) let runlike_instruction (env : env) name params cmd = let name = str env name (* RUN, CMD, ... *) in let params = Common.map (fun x -> match x with | `Param x -> Param (param env x) | `Mount_param x -> mount_param env x) params in let cmd = argv_or_shell env cmd in let _, end_ = argv_or_shell_loc cmd in let loc = (wrap_tok name, end_) in (loc, name, params, cmd) let rec instruction (env : env) (x : CST.instruction) : env * instruction = match x with | `Semg_ellips tok -> (* "..." *) (env, Instr_semgrep_ellipsis (token env tok)) | `Semg_meta tok -> (* pattern \$[A-Z_][A-Z_0-9]* *) (env, Instr_semgrep_metavar (str env tok)) | `Choice_from_inst x -> ( match x with | `From_inst (v1, v2, v3, v4) -> let name = str env v1 (* pattern [fF][rR][oO][mM] *) in let loc = let tok = snd name in (tok, tok) in let param, loc = match v2 with | Some x -> let param = param env x in (Some param, Loc.range loc (param_loc param)) | None -> (None, loc) in let image_spec = image_spec env v3 in let loc = Loc.range loc (image_spec_loc image_spec) in let alias, loc = match v4 with | Some (v1, v2) -> let as_ = token env v1 (* pattern [aA][sS] *) in let alias = image_alias env v2 in (Some (as_, alias), Loc.union loc (str_loc alias)) | None -> (None, loc) in (env, From (loc, name, param, image_spec, alias)) | `Run_inst (v1, v2, v3) -> let loc, name, params, cmd = runlike_instruction (env : env) v1 v2 v3 in (env, Run (loc, name, params, cmd)) | `Cmd_inst (v1, v2) -> let loc, name, params, cmd = runlike_instruction (env : env) v1 [] v2 in (env, Cmd (loc, name, params, cmd)) | `Label_inst (v1, v2) -> let name = str env v1 (* pattern [lL][aA][bB][eE][lL] *) in let label_pairs = Common.map (label_pair env) v2 in let loc = Loc.of_list label_pair_loc label_pairs in let loc = Loc.extend loc (snd name) in (env, Label (loc, name, label_pairs)) | `Expose_inst (v1, v2) -> let name = str env v1 (* pattern [eE][xX][pP][oO][sS][eE] *) in let port_protos = Common.map (fun x -> match x with | `Expose_port x -> expose_port env x | `Expa x -> Expose_fragment (expansion env x)) v2 in let _, end_ = Loc.of_list expose_port_loc port_protos in let loc = (wrap_tok name, end_) in (env, Expose (loc, name, port_protos)) | `Env_inst (v1, v2) -> let name = str env v1 (* pattern [eE][nN][vV] *) in let pairs = match v2 with | `Rep1_env_pair xs -> Common.map (env_pair env) xs | `Spaced_env_pair x -> [ spaced_env_pair env x ] in let _, end_ = Loc.of_list label_pair_loc pairs in let loc = (wrap_tok name, end_) in (env, Env (loc, name, pairs)) | `Add_inst (v1, v2, v3, v4) -> let name = str env v1 (* pattern [aA][dD][dD] *) in let param = match v2 with | Some x -> Some (param env x) | None -> None in let src = v3 |> Common.map (fun (v1, v2) -> let _blank = token env v2 (* pattern [\t ]+ *) in path_or_ellipsis env v1) in let dst = path env v4 in let loc = (wrap_tok name, str_loc dst |> snd) in (env, Add (loc, name, param, src, dst)) | `Copy_inst (v1, v2, v3, v4) -> (* COPY is the same as ADD but with less magic in the interpretation of the arguments. See https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#add-or-copy *) let name = str env v1 (* pattern [cC][oO][pP][yY] *) in let param = match v2 with | Some x -> Some (param env x) | None -> None in let src = v3 |> Common.map (fun (v1, v2) -> let _blank = token env v2 (* pattern [\t ]+ *) in path_or_ellipsis env v1) in let dst = path env v4 in let loc = (wrap_tok name, str_loc dst |> snd) in (env, Copy (loc, name, param, src, dst)) | `Entr_inst (v1, v2) -> let loc, name, _params, cmd = runlike_instruction (env : env) v1 [] v2 in (env, Entrypoint (loc, name, cmd)) | `Volume_inst (v1, v2) -> let name = str env v1 (* pattern [vV][oO][lL][uU][mM][eE] *) in let args = match v2 with | `Str_array x -> let loc, ar = string_array env x in Array (loc, ar) | `Path_rep_non_nl_whit_path (v1, v2) -> let path0 = path_or_ellipsis env v1 in let paths = Common.map (fun (v1, v2) -> let _blank = token env v1 (* pattern [\t ]+ *) in path_or_ellipsis env v2) v2 in let paths = path0 :: paths in let loc = Loc.of_list str_or_ellipsis_loc paths in Paths (loc, paths) in let loc = Loc.extend (array_or_paths_loc args) (wrap_tok name) in (env, Volume (loc, name, args)) | `User_inst (v1, v2, v3) -> let name = str env v1 (* pattern [uU][sS][eE][rR] *) in let user = user_name_or_group env v2 in let end_ = str_loc user |> snd in let opt_group, end_ = match v3 with | Some (v1, v2) -> let colon = token env v1 (* ":" *) in let group = immediate_user_name_or_group env v2 in (Some (colon, group), str_loc group |> snd) | None -> (None, end_) in let loc = (wrap_tok name, end_) in (env, User (loc, name, user, opt_group)) | `Work_inst (v1, v2) -> let name = str env v1 (* pattern [wW][oO][rR][kK][dD][iI][rR] *) in let dir = path env v2 in let loc = (wrap_tok name, str_loc dir |> snd) in (env, Workdir (loc, name, dir)) | `Arg_inst (v1, v2, v3) -> let name = str env v1 (* pattern [aA][rR][gG] *) in let key = match v2 with | `Semg_meta tok -> Var_semgrep_metavar (str env tok (* pattern \$[A-Z_][A-Z_0-9]* *)) | `Pat_4de4cb9 tok -> Var_ident (str env tok (* pattern [a-zA-Z0-9_]+ *)) in let loc = (wrap_tok name, var_or_metavar_tok key) in let opt_value, loc = match v3 with | Some (v1, v2) -> let eq = token env v1 (* "=" *) in let value = string env v2 in (Some (eq, value), Loc.extend loc (str_loc value |> snd)) | None -> (None, loc) in (env, Arg (loc, name, key, opt_value)) | `Onbu_inst (v1, v2) -> let name = str env v1 (* pattern [oO][nN][bB][uU][iI][lL][dD] *) in let _env, instr = instruction env v2 in let _, end_ = instruction_loc instr in let loc = (wrap_tok name, end_) in (env, Onbuild (loc, name, instr)) | `Stop_inst (v1, v2) -> let name = str env v1 (* pattern [sS][tT][oO][pP][sS][iI][gG][nN][aA][lL] *) in let signal = stopsignal_value env v2 in let loc = (wrap_tok name, str_loc signal |> snd) in (env, Stopsignal (loc, name, signal)) | `Heal_inst (v1, v2) -> let name = str env v1 (* pattern [hH][eE][aA][lL][tT][hH][cC][hH][eE][cC][kK] *) in let arg = match v2 with | `Semg_meta tok -> Healthcheck_semgrep_metavar (str env tok (* pattern \$[A-Z_][A-Z_0-9]* *)) | `NONE tok -> Healthcheck_none (token env tok (* "NONE" *)) | `Rep_param_cmd_inst (v1, (name (* CMD *), args)) -> let params = Common.map (param env) v1 in let params_loc = Loc.of_list param_loc params in let cmd_loc, name, run_params, args = runlike_instruction env name [] args in let loc = Loc.range params_loc cmd_loc in Healthcheck_cmd (loc, params, (cmd_loc, name, run_params, args)) in let loc = Loc.extend (healthcheck_loc arg) (wrap_tok name) in (env, Healthcheck (loc, name, arg)) | `Shell_inst (v1, v2) -> let ((_, start_tok) as name) = str env v1 (* pattern [sS][hH][eE][lL][lL] *) in let cmd_loc, cmd = string_array env v2 in let env = match classify_shell cmd with | None -> env | Some shell_compat -> let input_kind, _cur_shell = env.extra in { env with extra = (input_kind, shell_compat) } in let _, end_tok = cmd_loc in let loc = (start_tok, end_tok) in (env, Shell (loc, name, cmd)) | `Main_inst (v1, v2) -> (* deprecated feature *) let name = str env v1 (* pattern [mM][aA][iI][nN][tT][aA][iI][nN][eE][rR] *) in let maintainer_data = str env v2 (* pattern .* *) in let maintainer = remove_blank_prefix maintainer_data in let loc = (wrap_tok name, wrap_tok maintainer) in let string_or_mv = if is_metavar env maintainer then Str_semgrep_metavar maintainer else Str_string maintainer in (env, Maintainer (loc, name, string_or_mv)) | `Cross_build_inst (v1, v2) -> (* undocumented *) let name = str env v1 (* pattern [cC][rR][oO][sS][sS]_[bB][uU][iI][lL][dD][a-zA-Z_]* *) in let data = str env v2 (* pattern .* *) in let loc = (wrap_tok name, wrap_tok data) in (env, Cross_build_xxx (loc, name, data))) let source_file (env : env) (xs : CST.source_file) = let _env, instrs = List.fold_left (fun (env, instrs) (v1, v2) -> let acc = match v1 with | `Inst x -> let env, instr = instruction env x in (env, instr :: instrs) | `Comm tok -> let _comment = (* pattern #.* *) token env tok in (env, instrs) in let _newline = token env v2 (* "\n" *) in acc) (env, []) xs in List.rev instrs (*****************************************************************************) (* Entry point *) (*****************************************************************************) let parse file = let input_kind = AST_bash.Program in H.wrap_parser (fun () -> Tree_sitter_dockerfile.Parse.file file) (fun cst -> let env = { H.file; conv = H.line_col_to_pos file; extra = (input_kind, Sh) } in let dockerfile_ast = source_file env cst in Dockerfile_to_generic.(program Program dockerfile_ast)) let ensure_trailing_newline str = if str <> "" then let len = String.length str in match str.[len - 1] with | '\n' -> str | _ -> str ^ "\n" else str let parse_dockerfile_pattern str = let input_kind = AST_bash.Pattern in H.wrap_parser (fun () -> (* tree-sitter-dockerfile parser requires a trailing newline. Not sure if it's intentional but we add one to simplify user experience. *) str |> ensure_trailing_newline |> Tree_sitter_dockerfile.Parse.string) (fun cst -> let file = "<pattern>" in let env = { H.file; conv = Hashtbl.create 0; extra = (input_kind, Sh) } in let dockerfile_ast = source_file env cst in Dockerfile_to_generic.any input_kind dockerfile_ast) let parse_pattern str = let dockerfile_res = parse_dockerfile_pattern str in if dockerfile_res.errors =*= [] then dockerfile_res else let bash_res = Parse_bash_tree_sitter.parse_pattern str in if bash_res.errors =*= [] then bash_res else dockerfile_res
(** Mapping from tree-sitter-dockerfile tree-sitter's CST to the Dockerfile AST type, which itself includes nodes of the Bash AST. Derived from generated code 'dockerfile/lib/Boilerplate.ml' *)
dune
; generated by drom from package skeleton 'library' (library (name autofonce_lib) (public_name autofonce_lib) (wrapped true) (libraries ocplib_stuff ez_file ez_cmdliner autofonce_share autofonce_patch autofonce_m4 autofonce_core autofonce_config ANSITerminal ) ) (rule (targets version.ml) (deps (:script version.mlt) package.toml) (action (with-stdout-to %{targets} (run %{ocaml} unix.cma %{script})))) (documentation (package autofonce_lib))
diff.mli
type 'a t = [ `Updated of 'a * 'a | `Removed of 'a | `Added of 'a ] [@@deriving irmin] (** The type for representing differences betwen values. *)
(* * Copyright (c) 2013-2021 Thomas Gazagnaire <thomas@gazagnaire.org> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *)
monomial.ml
module Make (Literal : Literal.S) = struct module LiteralMap = Map.Make (Literal) type t = int LiteralMap.t exception Monomial_has_non_positive_exponent of (Literal.t * int) exception Monomial_set_negative_exponent of (Literal.t * int) let iter = LiteralMap.iter let fold = LiteralMap.fold let for_all = LiteralMap.for_all let exists = LiteralMap.exists let filter = LiteralMap.filter let partition = LiteralMap.partition let cardinal = LiteralMap.cardinal let bindings = LiteralMap.bindings let map = LiteralMap.map let mapi = LiteralMap.mapi let to_seq = LiteralMap.to_seq (** Check if exponent is non-negative, fail if negative *) let is_exp_nonneg var exp = if exp < 0 then raise (Monomial_set_negative_exponent (var, exp)) ; exp > 0 let filter_seq = Seq.filter (fun (var, exp) -> is_exp_nonneg var exp) let add_seq s = filter_seq s |> LiteralMap.add_seq let of_seq s = filter_seq s |> LiteralMap.of_seq let of_map = LiteralMap.filter is_exp_nonneg let to_map m = m let one = LiteralMap.empty let is_one = LiteralMap.is_empty let singleton var exp = if is_exp_nonneg var exp then LiteralMap.singleton var exp else one let set_exponent var exp = if is_exp_nonneg var exp then LiteralMap.add var exp else LiteralMap.remove var let update var f = (* For convenience, we want [f] to be [int -> int], so we create a function [g : int option -> int option] to pass to [update]. [None] is 0 in that case. *) let g x = let fx = f (Option.value x ~default:0) in if is_exp_nonneg var fx then Some fx else None in LiteralMap.update var g let union f a b = let g var x y = let fx = f var x y in if is_exp_nonneg var fx then Some fx else None in LiteralMap.union g a b let remove = LiteralMap.remove let of_literal var = singleton var 1 let get_support m = List.map fst (bindings m) let compare m1 m2 = let rec aux lb1 lb2 = match (lb1, lb2) with | ([], []) -> 0 | ([], _) -> -1 | (_, []) -> 1 | ((k1, e1) :: t1, (k2, e2) :: t2) -> ( match Literal.compare k1 k2 with | 0 -> if e1 = e2 then aux t1 t2 else e1 - e2 | c -> c) in aux (bindings m1) (bindings m2) let equal = LiteralMap.equal Int.equal let neq a b = not (equal a b) let deg var m = Option.value (LiteralMap.find_opt var m) ~default:0 let mul : t -> t -> t = let aux_union _ e1 e2 = Some (e1 + e2) in LiteralMap.union aux_union let deriv var m : (int * t) option = match deg var m with | 0 -> None | 1 -> Some (1, remove var m) | exp -> Some (exp, set_exponent var (exp - 1) m) (* Fast exponentiation for K *) let pow (type a) (module K : Algebra.Mul_Monoid_S with type t = a) (x : a) (exp : int) : a = let rec pow_aux x exp = if exp = 0 then K.one else if exp = 1 then x else let exp_2 = exp / 2 in let px = pow_aux x exp_2 in let px = K.mul px px in if exp mod 2 = 0 then px else K.mul px x in pow_aux x exp let apply (type a) (module K : Algebra.Mul_Monoid_S with type t = a) m spec : a * t = let apply_single m var value = match deg var m with | 0 -> (K.one, m) | exp -> (pow (module K) value exp, remove var m) in (* A fold on [spec]. Its bindings are applied sequentially *) LiteralMap.fold (fun var value (coef, m_acc) -> let (a_coef, m_acc) = apply_single m_acc var value in (K.mul coef a_coef, m_acc)) spec (K.one, m) (** Pretty (?) printing of integer exponents *) let digit_to_string = function | 0 -> "⁰" | 1 -> "¹" | 2 -> "²" | 3 -> "³" | 4 -> "⁴" | 5 -> "⁵" | 6 -> "⁶" | 7 -> "⁷" | 8 -> "⁸" | 9 -> "⁹" | _ -> "" let rec exp_to_string n = if n <= 0 then "" else exp_to_string (n / 10) ^ digit_to_string (n mod 10) let to_string m = if is_one m then "1ₘ" else let aux_map (var, exp) = if exp < 1 then raise (Monomial_has_non_positive_exponent (var, exp)) else if exp = 1 then Literal.to_string var else Printf.sprintf "%s%s" (Literal.to_string var) (exp_to_string exp) in String.concat "" (List.map aux_map (bindings m)) module Infix = struct let ( * ) = mul let ( = ) = equal let ( <> ) = neq end end
invariant.mli
include Invariant_intf.Invariant (** @inline *)
include Invariant_intf.Invariant (** @inline *)
watch.mli
(** {1 Watch Helpers} *) (** The signature for watch helpers. *) module type S = sig (** {1 Watch Helpers} *) type key (** The type for store keys. *) type value (** The type for store values. *) type watch (** The type for watch handlers. *) type t (** The type for watch state. *) val stats : t -> int * int (** [stats t] is a tuple [(k,a)] represeting watch stats. [k] is the number of single key watchers for the store [t] and [a] the number of global watchers for [t]. *) val notify : t -> key -> value option -> unit Lwt.t (** Notify all listeners in the given watch state that a key has changed, with the new value associated to this key. [None] means the key has been removed. *) val v : unit -> t (** Create a watch state. *) val clear : t -> unit Lwt.t (** Clear all register listeners in the given watch state. *) val watch_key : t -> key -> ?init:value -> (value Diff.t -> unit Lwt.t) -> watch Lwt.t (** Watch a given key for changes. More efficient than {!watch}. *) val watch : t -> ?init:(key * value) list -> (key -> value Diff.t -> unit Lwt.t) -> watch Lwt.t (** Add a watch handler. To watch a specific key, use {!watch_key} which is more efficient. *) val unwatch : t -> watch -> unit Lwt.t (** Remove a watch handler. *) val listen_dir : t -> string -> key:(string -> key option) -> value:(key -> value option Lwt.t) -> (unit -> unit Lwt.t) Lwt.t (** Register a thread looking for changes in the given directory and return a function to stop watching and free up resources. *) end val workers : unit -> int (** [workers ()] is the number of background worker threads managing event notification currently active. *) type hook = int -> string -> (string -> unit Lwt.t) -> (unit -> unit Lwt.t) Lwt.t (** The type for watch hooks. *) val none : hook (** [none] is the hooks which asserts false. *) val set_listen_dir_hook : hook -> unit (** Register a function which looks for file changes in a directory and return a function to stop watching. It is probably best to use {!Irmin_watcher.hook} there. By default, it uses {!none}. *) (** [Make] builds an implementation of watch helpers. *) module Make (K : Type.S) (V : Type.S) : S with type key = K.t and type value = V.t
(* * Copyright (c) 2017 Thomas Gazagnaire <thomas@gazagnaire.org> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *)
t-zeta_series.c
#include "acb_poly.h" int main() { slong iter; flint_rand_t state; flint_printf("zeta_series...."); fflush(stdout); flint_randinit(state); for (iter = 0; iter < 400 * arb_test_multiplier(); iter++) { slong m, n1, n2, bits1, bits2, bits3; int deflate; acb_poly_t S, A, B, C, D, E, F; acb_t a, a1; bits1 = 2 + n_randint(state, 200); bits2 = 2 + n_randint(state, 200); bits3 = 2 + n_randint(state, 200); m = 1 + n_randint(state, 25); n1 = 1 + n_randint(state, 25); n2 = 1 + n_randint(state, 25); acb_poly_init(S); acb_poly_init(A); acb_poly_init(B); acb_poly_init(C); acb_poly_init(D); acb_poly_init(E); acb_poly_init(F); acb_init(a); acb_init(a1); deflate = n_randint(state, 2); acb_poly_randtest(S, state, m, bits1, 3); arb_randtest_precise(acb_realref(a), state, bits1, 3); arb_randtest_precise(acb_imagref(a), state, bits1, 3); acb_poly_set_coeff_acb(S, 0, a); if (n_randint(state, 2)) acb_randtest(a, state, bits1, 3); else acb_one(a); acb_poly_zeta_series(A, S, a, deflate, n1, bits2); acb_poly_zeta_series(B, S, a, deflate, n2, bits3); acb_poly_set(C, A); acb_poly_truncate(C, FLINT_MIN(n1, n2)); acb_poly_truncate(B, FLINT_MIN(n1, n2)); if (!acb_poly_overlaps(B, C)) { flint_printf("FAIL\n\n"); flint_printf("S = "); acb_poly_printd(S, 15); flint_printf("\n\n"); flint_printf("a = "); acb_printd(a, 15); flint_printf("\n\n"); flint_printf("A = "); acb_poly_printd(A, 15); flint_printf("\n\n"); flint_printf("B = "); acb_poly_printd(B, 15); flint_printf("\n\n"); flint_abort(); } /* check zeta(s,a) = zeta(s,a+1) + a^(-s) */ acb_poly_set_acb(D, a); acb_poly_log_series(D, D, n1, bits2); acb_poly_mullow(D, D, S, n1, bits2); acb_poly_neg(D, D); acb_poly_exp_series(D, D, n1, bits2); acb_add_ui(a1, a, 1, bits2); acb_poly_zeta_series(E, S, a1, deflate, n1, bits2); acb_poly_add(E, E, D, bits2); if (!acb_poly_overlaps(A, E)) { flint_printf("FAIL (functional equation)\n\n"); flint_printf("S = "); acb_poly_printd(S, 15); flint_printf("\n\n"); flint_printf("a = "); acb_printd(a, 15); flint_printf("\n\n"); flint_printf("A = "); acb_poly_printd(A, 15); flint_printf("\n\n"); flint_printf("E = "); acb_poly_printd(A, 15); flint_printf("\n\n"); flint_abort(); } acb_poly_zeta_series(S, S, a, deflate, n1, bits2); if (!acb_poly_overlaps(A, S)) { flint_printf("FAIL (aliasing)\n\n"); flint_abort(); } acb_poly_clear(S); acb_poly_clear(A); acb_poly_clear(B); acb_poly_clear(C); acb_poly_clear(D); acb_poly_clear(E); acb_poly_clear(F); acb_clear(a); acb_clear(a1); } flint_randclear(state); flint_cleanup(); flint_printf("PASS\n"); return EXIT_SUCCESS; }
/* Copyright (C) 2013 Fredrik Johansson This file is part of Arb. Arb is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. See <http://www.gnu.org/licenses/>. */
lambda.ml
open UtilsLib open Abstract_syntax module Lambda = struct exception Not_yet_implemented type kind = Type | Depend of stype * kind (* the kind of a dependant type *) and stype = Atom of int (* atomic type *) | DAtom of int (* defined atomic type *) | LFun of stype * stype (* linear functional type *) | Fun of stype * stype (* non linear functional type *) | Dprod of string * stype * stype (* dependant product *) | Record of int * stype list (* records *) | Variant of int * stype list (* variants *) | TAbs of string * stype (* type abstraction *) | TApp of stype * term (* type application *) and term = Var of int (* lambda variable *) | LVar of int (* linear lambda variable *) | Const of int (* constant *) | DConst of int (* defined constant *) | Abs of string * term (* lambda-abstraction *) | LAbs of string * term (* linear lambda abstraction *) | App of term * term (* application *) | Rcons of int * term list (* record constructor: *) (* - the integer is the tag of *) (* the corresponding type. *) | Proj of int * int *term (* projection: *) (* - the first integer is the tag of *) (* the corresponding type; *) (* - the second integer is the index *) (* of the projection. *) | Vcons of int * int * term (* variant constructor: *) (* - the first integer is the tag of *) (* the corresponding type; *) (* - the second integer is the number *) (* of the constructor. *) | Case of int * term * (string * term) list (* case analysis: *) (* - the integer is the tag of *) (* the corresponding type. *) | Unknown of int (* meta-variable - used in higher-order *) (* matching *) type env = (int * string) list type consts = int -> Abstract_syntax.syntactic_behavior * string let env_to_string e = Utils.string_of_list ", " (fun (i,s) -> Printf.sprintf "%d:%s" i s) e let rec generate_var_name x (l_env, env) = if List.exists (fun (_,s) -> x=s) (l_env @ env) then generate_var_name (Printf.sprintf "%s'" x) (l_env, env) else x let rec unfold_labs acc level (l_env, env) = function | LAbs (x,t) -> let x' = generate_var_name x (l_env, env) in unfold_labs ((level,x')::acc) (level+1) ((level,x')::l_env,env) t | t -> acc,level,t let rec unfold_abs acc level (l_env, env) = function | Abs (x,t) -> let x' = generate_var_name x (l_env, env) in unfold_abs ((level,x')::acc) (level+1) (l_env,(level,x')::env) t | t -> acc,level,t let rec unfold_app acc = function | App (t1,t2) -> unfold_app (t2::acc) t1 | t -> acc,t let is_binder id id_to_sym = match id_to_sym id with | Abstract_syntax.Binder,_ -> true | _ -> false let is_infix id id_to_sym = match id_to_sym id with | Abstract_syntax.Infix _ ,_ -> true | _ -> false let is_prefix id id_to_sym = match id_to_sym id with | (Abstract_syntax.Prefix|Abstract_syntax.Default),_ -> true | _ -> false let rec unfold_binder binder l_level level id_to_sym acc (l_env, env) = function | App (Const i,LAbs(x,u)) when (is_binder i id_to_sym)&&(i=binder) -> let x' = generate_var_name x (l_env, env) in unfold_binder binder (l_level+1) level id_to_sym ((l_level,(x',Abstract_syntax.Linear))::acc) ((l_level,x')::l_env,env) u | App (Const i,Abs(x,u)) when (is_binder i id_to_sym)&&(i=binder) -> let x' = generate_var_name x (l_env, env) in unfold_binder binder l_level (level+1) id_to_sym ((level,(x',Abstract_syntax.Non_linear))::acc) (l_env,(level,x')::env) u | t -> acc,l_level,level,t let parenthesize (s,b) = match b with | true -> s | false -> Printf.sprintf "(%s)" s let left_paren = function | true -> "(" | false -> "" let right_paren = function | true -> ")" | false -> "" let type_to_string ty id_to_sym = let rec type_to_string_aux ty = match ty with | Atom i -> snd (id_to_sym i),true | DAtom i -> snd (id_to_sym i),true | LFun (ty1,ty2) -> Printf.sprintf "%s -> %s" (parenthesize (type_to_string_aux ty1)) (fst (type_to_string_aux ty2)), false | Fun (ty1,ty2) -> Printf.sprintf "%s => %s" (parenthesize (type_to_string_aux ty1)) (fst (type_to_string_aux ty2)), false | _ -> failwith "Not yet implemented" in fst (type_to_string_aux ty) let kind_to_string k id_to_sym = let rec kind_to_string_aux = function | Type -> "type" | Depend (ty,k') -> let k_str = kind_to_string_aux k' in Printf.sprintf "(%s)%s" (type_to_string ty id_to_sym) k_str in kind_to_string_aux k let type_to_formatted_string fmter ty id_to_sym = let rec type_to_string_aux ty paren = match ty with | Atom i -> Utils.fformat fmter "@[%s@]" (snd (id_to_sym i)) | DAtom i -> Utils.fformat fmter "@[%s@]" (snd (id_to_sym i)) | LFun (ty1,ty2) -> let () = Utils.fformat fmter "@[<2>@[%s" (left_paren paren) in let () = type_to_string_aux ty1 true in let () = Utils.fformat fmter " ->@[<2>@ @[<2>@[" in let () = type_to_string_aux ty2 true in Utils.fformat fmter "@]@]@]@,%s@]@]" (right_paren paren) | Fun (ty1,ty2) -> let () = Utils.fformat fmter "@[<2>%s@[" (left_paren paren) in let () = type_to_string_aux ty1 true in let () = Utils.fformat fmter " =>@[<2>@ @[<2>@[" in let () = type_to_string_aux ty2 true in Utils.fformat fmter "@]@]@]@,%s@]@]" (right_paren paren) | _ -> failwith "Not yet implemented" in let () = type_to_string_aux ty false in () (* Utils.fformat fmter "@]" *) let kind_to_formatted_string fmter k id_to_sym = let rec kind_to_string_aux = function | Type -> Utils.fformat fmter "@[type@]" | Depend (ty,k') -> let () = Utils.fformat fmter "@[(" in let () = type_to_formatted_string fmter ty id_to_sym in let () = Utils.fformat fmter ")@]" in kind_to_string_aux k' in kind_to_string_aux k let term_to_formatted_string fmter t id_to_sym = let rec term_to_string_aux t paren l_level level (l_env,env) = match t with | Var i -> Utils.fformat fmter "@[%s@]" (List.assoc (level - 1 - i) env) | LVar i -> Utils.fformat fmter "@[%s@]" (List.assoc (l_level - 1 - i) l_env) | Const i -> let _,x = id_to_sym i in Utils.fformat fmter "@[%s@]" x | DConst i -> let _,x = id_to_sym i in Utils.fformat fmter "@[%s@]" x | Abs (x,t) -> let x' = generate_var_name x (l_env, env) in let vars,l,u=unfold_abs [level,x'] (level+1) (l_env,(level,x')::env) t in let () = Utils.fformat fmter "@[@[%s@[<3>Lambda " (left_paren paren) in let () = Utils.format_of_list fmter " " (fun (_,x) -> x) (List.rev vars) in let () = Utils.fformat fmter ".@ @[@[" in let _ = term_to_string_aux u false l_level l (l_env,vars@env) in Utils.fformat fmter "@]@]@]@,%s@]@]" (right_paren paren) | LAbs (x,t) -> let x' = generate_var_name x (l_env, env) in let vars,l,u=unfold_labs [l_level,x'] (l_level+1) ((l_level,x')::l_env,env) t in let () = Utils.fformat fmter "@[@[%s@[<3>lambda " (left_paren paren) in let () = Utils.format_of_list fmter " " (fun (_,x) -> x) (List.rev vars) in let () = Utils.fformat fmter ".@ @[@[" in let () = term_to_string_aux u false l level ((vars@l_env),env) in Utils.fformat fmter "@]@]@]@,%s@]@]" (right_paren paren) | App((Const s|DConst s),Abs(x,u)) when is_binder s id_to_sym -> let x' = generate_var_name x (l_env, env) in let vars,l_l,l,u = unfold_binder s l_level (level+1) id_to_sym [level,(x',Abstract_syntax.Non_linear)] (l_env,(level,x')::env) u in let new_env= List.fold_right (fun (l,(x,abs)) (l_acc,acc) -> match abs with | Abstract_syntax.Non_linear -> l_acc,(l,x)::acc | Abstract_syntax.Linear -> (l,x)::l_acc,acc) vars (l_env,env) in let () = Utils.fformat fmter "@[@[%s@[<3>%s " (left_paren paren) (let _,const = id_to_sym s in const) in let () = Utils.format_of_list fmter " " (fun (_,(x,_)) -> x) (List.rev vars) in let () = Utils.fformat fmter ".@ @[@[" in let _ = term_to_string_aux u false l_l l new_env in Utils.fformat fmter "@]@]@]@,%s@]@]" (right_paren paren) | App((Const s|DConst s),LAbs(x,u)) when is_binder s id_to_sym -> let x' = generate_var_name x (l_env, env) in let vars,l_l,l,u = unfold_binder s (l_level+1) level id_to_sym [l_level,(x',Abstract_syntax.Linear)] ((l_level,x')::l_env,env) u in let new_env= List.fold_right (fun (l,(x,abs)) (l_acc,acc) -> match abs with | Abstract_syntax.Non_linear -> l_acc,(l,x)::acc | Abstract_syntax.Linear -> (l,x)::l_acc,acc) vars (l_env,env) in let () = Utils.fformat fmter "@[@[%s@[<3>%s " (left_paren paren) (let _,const = id_to_sym s in const) in let () = Utils.format_of_list fmter " " (fun (_,(x,_)) -> x) (List.rev vars) in let () = Utils.fformat fmter ".@ @[@[" in let _ = term_to_string_aux u false l_l l new_env in Utils.fformat fmter "@]@]@]@,%s@]@]" (right_paren paren) | App(App((Const s|DConst s),t1),t2) when is_infix s id_to_sym -> let () = Utils.fformat fmter "@[<2>@[%s@[" (left_paren paren) in let () = term_to_string_aux t1 true l_level level (l_env,env) in let () = Utils.fformat fmter "@]@ %s@ @[<2>@[" (let _,const=id_to_sym s in const) in let () = term_to_string_aux t2 true l_level level (l_env,env) in Utils.fformat fmter "@]@]@,%s@]@]" (right_paren paren) | App(t1,t2) -> let args,t11 = unfold_app [t2] t1 in let () = Utils.fformat fmter "@[@[%s@[<2>@[" (left_paren paren) in let () = term_to_string_aux t11 true l_level level (l_env,env) in let () = Utils.fformat fmter "@]@[" in let () = List.iter (fun arg -> let () = Utils.fformat fmter "@ @[" in let () = term_to_string_aux arg true l_level level (l_env,env) in Utils.fformat fmter "@]") args in Utils.fformat fmter "@]@]@,%s@]@]" (right_paren paren) | _ -> failwith "Not yet implemented" in let () = term_to_string_aux t false 0 0 ([],[]) in (* FIXME: I can't see where I forgot to close this box! *) (* let () = Utils.fformat fmter "@]" in *) () let term_to_string t id_to_sym = let rec term_to_string_aux t l_level level (l_env,env) = match t with | Var i -> List.assoc (level - 1 - i) env,true | LVar i -> List.assoc (l_level - 1 - i) l_env,true | Const i -> let _,x = id_to_sym i in x,true | DConst i -> let _,x = id_to_sym i in x,true | Abs (x,t) -> let x' = generate_var_name x (l_env, env) in let vars,l,u=unfold_abs [level,x'] (level+1) (l_env,(level,x')::env) t in Printf.sprintf "Lambda %s. %s" (Utils.string_of_list " " (fun (_,x) -> x) (List.rev vars)) (let str,_ = term_to_string_aux u l_level l (l_env,(vars@env)) in str), false | LAbs (x,t) -> let x' = generate_var_name x (l_env, env) in let vars,l,u=unfold_labs [l_level,x'] (l_level+1) ((l_level,x')::l_env,env) t in Printf.sprintf "lambda %s. %s" (Utils.string_of_list " " (fun (_,x) -> x) (List.rev vars)) (let str,_ = term_to_string_aux u l level ((vars@l_env),env) in str), false | App((Const s|DConst s),Abs(x,u)) when is_binder s id_to_sym -> let x' = generate_var_name x (l_env, env) in let vars,l_l,l,u = unfold_binder s l_level (level+1) id_to_sym [level,(x',Abstract_syntax.Non_linear)] (l_env,(level,x')::env) u in let new_env= List.fold_right (fun (l,(x,abs)) (l_acc,acc) -> match abs with | Abstract_syntax.Non_linear -> l_acc,(l,x)::acc | Abstract_syntax.Linear -> (l,x)::l_acc,acc) vars (l_env,env) in Printf.sprintf "%s %s. %s" (let _,const = id_to_sym s in const) (Utils.string_of_list " " (fun (_,(x,_)) -> x) (List.rev vars)) (let str,_ = term_to_string_aux u l_l l new_env in str), false | App((Const s|DConst s),LAbs(x,u)) when is_binder s id_to_sym -> let x' = generate_var_name x (l_env, env) in let vars,l_l,l,u = unfold_binder s (l_level+1) level id_to_sym [l_level,(x',Abstract_syntax.Linear)] ((l_level,x')::l_env,env) u in let new_env= List.fold_right (fun (l,(x,abs)) (l_acc,acc) -> match abs with | Abstract_syntax.Non_linear -> l_acc,(l,x)::acc | Abstract_syntax.Linear -> (l,x)::l_acc,acc) vars (l_env,env) in Printf.sprintf "%s %s. %s" (let _,const = id_to_sym s in const) (Utils.string_of_list " " (fun (_,(x,_)) -> x) (List.rev vars)) (let str,_ = term_to_string_aux u l_l l new_env in str), false | App(App((Const s|DConst s),t1),t2) when is_infix s id_to_sym -> Printf.sprintf "%s %s %s" (parenthesize (term_to_string_aux t1 l_level level (l_env,env))) (let _,const=id_to_sym s in const) (parenthesize (term_to_string_aux t2 l_level level (l_env,env))), false | App(t1,t2) -> let args,t11 = unfold_app [t2] t1 in Printf.sprintf "%s %s" (parenthesize (term_to_string_aux t11 l_level level (l_env,env))) (Utils.string_of_list " " (fun x -> parenthesize (term_to_string_aux x l_level level (l_env,env))) args),false | _ -> failwith "Not yet implemented" in let s = fst (term_to_string_aux t 0 0 ([],[])) in s let rec raw_to_string_aux = function | Var i -> Printf.sprintf "(nl: %d)" i,true | LVar i -> Printf.sprintf "(l:%d)" i,true | (Const i | DConst i)-> Printf.sprintf "[%d]" i,true | Abs (_,t) -> Printf.sprintf "Lambda.%s" (fst (raw_to_string_aux t)),false | LAbs (_,t) -> Printf.sprintf "lambda.%s" (fst (raw_to_string_aux t)),false | App (t,u) -> Printf.sprintf "%s %s" (parenthesize (raw_to_string_aux t)) (parenthesize (raw_to_string_aux u)),false | _ -> raise Not_yet_implemented let raw_to_string t = fst (raw_to_string_aux t) let rec raw_to_caml = function | Var i -> Printf.sprintf "(Var %d)" i | LVar i -> Printf.sprintf "(LVar %d)" i | Const i -> Printf.sprintf "(Const %d)" i | DConst i-> Printf.sprintf "(DConst %d)" i | Abs (x,t) -> Printf.sprintf "(Abs (\"%s\",%s))" x (raw_to_caml t) | LAbs (x,t) -> Printf.sprintf "(LAbs (\"%s\",%s))" x (raw_to_caml t) | App (t,u) -> Printf.sprintf "(App (%s,%s))" (raw_to_caml t) (raw_to_caml u) | _ -> raise Not_yet_implemented let rec raw_type_to_string_aux = function | Atom i -> Printf.sprintf "(%d)" i,true | DAtom i -> Printf.sprintf "[%d]" i,true | LFun (alpha,beta) -> Printf.sprintf "%s -> %s" (parenthesize (raw_type_to_string_aux alpha)) (parenthesize (raw_type_to_string_aux beta)),false | Fun (alpha,beta) -> Printf.sprintf "%s => %s" (parenthesize (raw_type_to_string_aux alpha)) (fst (raw_type_to_string_aux beta)),false | _ -> failwith "Bug: Not yet implemented" let raw_type_to_string t = fst (raw_type_to_string_aux t) let rec raw_type_to_caml = function | Atom i -> Printf.sprintf "(Atom %d)" i | DAtom i -> Printf.sprintf "(DAtom %d)" i | LFun (alpha,beta) -> Printf.sprintf "(LFun (%s,%s))" (raw_type_to_caml alpha) (raw_type_to_caml beta) | Fun (alpha,beta) -> Printf.sprintf "(Fun (%s,%s))" (raw_type_to_caml alpha) (raw_type_to_caml beta) | _ -> failwith "Bug: Not yet implemented" (* [is_linear tm] true if the lambda-term [tm] is such *) (* that "x" occurs linearly in "lambda x. tm", i.e., *) (* the linear abstraction [LAbs ("x",tm)] satisfies *) (* the linearity constraint. *) let is_linear tm = let rec lin_occur n tm = match tm with Var _ -> false | LVar m -> m = n | Const _ -> false | Abs (_, t) -> lin_occur n t | LAbs (_, t) -> lin_occur (n+1) t | App (t1, t2) -> (lin_occur n t1) <> (lin_occur n t2) | _ -> raise Not_yet_implemented in lin_occur 0 tm [@@warning "-32"] (* [is_lclosed tm] true if the lambda-term [tm] does not *) (* contain any free linear variable. *) let is_lclosed tm = let rec lclosed n tm = match tm with Var _ -> true | LVar m -> m < n | Const _ -> true | Unknown _ -> true | Abs (_, t) -> lclosed n t | LAbs (_, t) -> lclosed (n+1) t | App (t1, t2) -> (lclosed n t1) && (lclosed n t2) | _ -> raise Not_yet_implemented in lclosed 0 tm [@@warning "-32"] (* de Bruijn's indices lifting *) let lift l_i nl_i tm = let rec lift_aux l_level nl_level tm = match tm with Var i -> if i < nl_level then tm else Var (i + nl_i) | LVar i -> if i < l_level then tm else LVar (i + l_i) | Const _ -> tm | Unknown _ -> tm | Abs (x, t) -> Abs (x, lift_aux l_level (nl_level + 1) t) | LAbs (x, t) -> LAbs (x, lift_aux (l_level + 1) nl_level t) | App (t1, t2) -> App (lift_aux l_level nl_level t1, lift_aux l_level nl_level t2) | _ -> raise Not_yet_implemented in lift_aux 0 0 tm (* substitution of a non-linear variable tm1 [x:=tm2] *) let var_subst tm1 tm2 = let rec subst l_level nl_level tm = match tm with Var i -> if i = nl_level then lift l_level nl_level tm2 else if i < nl_level then tm else Var (i-1) | LVar _ -> tm | Const _ -> tm | Unknown _ -> tm | Abs (x, t) -> Abs (x, subst l_level (nl_level + 1) t) | LAbs (x, t) -> LAbs (x, subst (l_level + 1) nl_level t) | App (t1, t2) -> App (subst l_level nl_level t1, subst l_level nl_level t2) | _ -> raise Not_yet_implemented in subst 0 0 tm1 (* substitution of a linear variable tm1 [x:=tm2] *) let lvar_subst tm1 tm2 = let rec subst l_level nl_level tm = match tm with Var _ -> tm | LVar i -> if i = l_level then lift l_level nl_level tm2 else if i < l_level then tm else LVar (i-1) | Const _ -> tm | Unknown _ -> tm | Abs (x, t) -> Abs (x, subst l_level (nl_level + 1) t) | LAbs (x, t) -> LAbs (x, subst (l_level + 1) nl_level t) | App (t1, t2) -> App (subst l_level nl_level t1, subst l_level nl_level t2) | _ -> raise Not_yet_implemented in subst 0 0 tm1 (* substitution of a term in a type "ty [x:=tm]" *) (* tm cannot contain any free linear variable *) let subst_in_type ty tm = let rec subst_tm level tm1 = match tm1 with Var i -> if i = level then lift 0 level tm else if i < level then tm else Var (i-1) | LVar _ -> tm | Const _ -> tm | Unknown _ -> tm | Abs (x, t) -> Abs (x, subst_tm (level + 1) t) | LAbs (x, t) -> LAbs (x, subst_tm level t) | App (t1, t2) -> App (subst_tm level t1, subst_tm level t2) | _ -> raise Not_yet_implemented in let rec subst_ty level ty = match ty with Atom _ -> ty | LFun (ty1, ty2) -> LFun (subst_ty level ty1, subst_ty level ty2) | Fun (ty1, ty2) -> Fun (subst_ty level ty1, subst_ty level ty2) | Dprod (x, ty1, ty2) -> Dprod (x, subst_ty level ty1, subst_ty (level+1) ty2) | TApp (ty1, tm) -> TApp (subst_ty level ty1, subst_tm level tm) | _ -> raise Not_yet_implemented in subst_ty 0 ty (* [is_vacuous ty] true when "ty" deos not effectively depend on "x" *) (* in the dependent type "Dprod (x, t, ty)" *) let is_vacuous ty = let rec vacuous_tm n tm = match tm with Var i -> i <> n | LVar _ -> true | Const _ -> true | Unknown _ -> true | Abs (_, t) -> vacuous_tm (n+1) t | LAbs (_, t) -> vacuous_tm n t | App (t1, t2) -> (vacuous_tm n t1) && (vacuous_tm n t2) | _ -> raise Not_yet_implemented in let rec vacuous_ty n ty = match ty with Atom _ -> true | LFun (ty1, ty2) -> (vacuous_ty n ty1) && (vacuous_ty n ty2) | Fun (ty1, ty2) -> (vacuous_ty n ty1) && (vacuous_ty n ty2) | Dprod (_, ty1, ty2) -> (vacuous_ty n ty1) && (vacuous_ty (n+1) ty2) | TApp (ty1, tm) -> (vacuous_ty n ty1) && (vacuous_tm n tm) | _ -> raise Not_yet_implemented in vacuous_ty 0 ty [@@warning "-32"] (* beta-normalization *) let rec head_normalize ?id_to_term tm = match tm with Var _ -> tm | LVar _ -> tm | Const _ -> tm | DConst i -> (match id_to_term with | None -> tm | Some f -> head_normalize ?id_to_term (f i)) | Unknown _ -> tm | Abs (x, t1) -> Abs (x, head_normalize ?id_to_term t1) | LAbs (x, t1) -> LAbs (x, head_normalize ?id_to_term t1) | App (t1, t2) -> (match head_normalize ?id_to_term t1 with Abs (_, t) -> head_normalize ?id_to_term (var_subst t t2) | LAbs (_, t) -> head_normalize ?id_to_term (lvar_subst t t2) | nt1 -> App (nt1, t2)) | _ -> raise Not_yet_implemented let rec normalize ?id_to_term tm = match tm with Var _ -> tm | LVar _ -> tm | Const _ -> tm | DConst i -> (match id_to_term with | None -> tm | Some f -> normalize ?id_to_term (f i)) | Unknown _ -> tm | Abs (x, t) -> Abs (x, normalize ?id_to_term t) | LAbs (x, t) -> LAbs (x, normalize ?id_to_term t) | App (t1, t2) -> let nt2 = normalize ?id_to_term t2 in (match normalize ?id_to_term t1 with Abs (_, t) -> normalize ?id_to_term (var_subst t nt2) | LAbs (_, t) -> normalize ?id_to_term (lvar_subst t nt2) | nt1 -> App (nt1, nt2)) | _ -> raise Not_yet_implemented (* beta-equivalence *) let beta_convert tm1 tm2 = let rec convert tm1 tm2 = match (tm1, tm2) with (Var i, Var j) -> i = j | (LVar i, LVar j) -> i = j | (Const i, Const j) -> i = j | (Unknown i, Unknown j) -> i = j | (Abs (_, tm11), Abs (_, tm12)) -> convert tm11 tm12 | (LAbs (_, tm11), LAbs (_, tm12)) -> convert tm11 tm12 | (App (tm11, tm12), App (tm21, tm22)) -> (convert tm11 tm21) && (convert (head_normalize tm12) (head_normalize tm22)) | _ -> false in convert (head_normalize tm1) (head_normalize tm2) (* type-normalization *) let rec type_normalize ty = match ty with Atom _ -> ty | LFun (ty1, ty2) -> LFun (type_normalize ty1, type_normalize ty2) | Fun (ty1, ty2) -> Fun (type_normalize ty1, type_normalize ty2) | Dprod (x, ty1, ty2) -> Dprod (x, type_normalize ty1, type_normalize ty2) | TAbs (x, ty1) -> TAbs (x, type_normalize ty1) | TApp (ty1, tm) -> (match type_normalize ty1 with TAbs (_, nty1) -> subst_in_type nty1 tm | nty1 -> TApp (nty1, tm)) | _ -> raise Not_yet_implemented (* type beta-equivalence *) let type_convert ty1 ty2 = let rec convert ty1 ty2 = match (ty1, ty2) with (Atom i, Atom j) -> i = j | (LFun (ty11, ty12), LFun (ty21, ty22)) -> (convert ty11 ty21) && (convert ty12 ty22) | (Fun (ty11, ty12), Fun (ty21, ty22)) -> (convert ty11 ty21) && (convert ty12 ty22) | (Dprod (_, ty11, ty12), Dprod (_, ty21, ty22)) -> (convert ty11 ty21) && (convert ty12 ty22) | (TAbs (_, ty11), TAbs (_, ty21)) -> convert ty11 ty21 | (TApp (ty11, tm1), TApp (ty21, tm2)) -> (convert ty11 ty21) && (beta_convert tm1 tm2) | (_, _) -> false in convert (type_normalize ty1) (type_normalize ty2) [@@warning "-32"] let eta_long_form term stype f_get_type_of_constant = let rec eta_long_form_rec term stype ~is_functor linear_typing_env non_linear_typing_env = match term,stype,is_functor with | LVar i, None, is_f -> eta_long_form_rec (LVar i) (Some (List.nth linear_typing_env (i))) ~is_functor:is_f linear_typing_env non_linear_typing_env | LVar i , Some (Atom _ as ty) , false -> let () = assert (ty = List.nth linear_typing_env (i)) in LVar (i),ty | LVar i , Some (LFun (_a,_b) as ty) ,true -> let () = assert (ty = List.nth linear_typing_env (i)) in LVar (i),ty | LVar i , Some (LFun (a,b) as ty),false -> let () = assert (ty = List.nth linear_typing_env (i)) in let new_var,_ = eta_long_form_rec (LVar 0) (Some a) ~is_functor:false [a] [] in let res,_ = eta_long_form_rec (App(LVar (i+1),new_var)) (Some b) ~is_functor:false (a::linear_typing_env) non_linear_typing_env in LAbs ("x",res),ty | LVar i , Some (Fun (a,b) as ty), true -> let () = assert (Fun(a,b) = List.nth linear_typing_env (i)) in LVar (i),ty | LVar i , Some (Fun (a,b) as ty), false -> let () = assert (Fun(a,b) = List.nth linear_typing_env (i)) in let new_var,_ = eta_long_form_rec (Var 0) (Some a) ~is_functor:false [] [a] in let res,_ = eta_long_form_rec (App(LVar i,new_var)) (Some b) ~is_functor:false linear_typing_env (a::non_linear_typing_env) in Abs ("x",res),ty | Var i, None, is_f -> eta_long_form_rec (Var i) (Some (List.nth non_linear_typing_env (i))) ~is_functor:is_f linear_typing_env non_linear_typing_env | Var i , Some (Atom j as ty) , false -> let () = assert (Atom j = List.nth non_linear_typing_env (i)) in Var (i),ty | Var i , Some (LFun (a,b) as ty), true -> let () = assert (LFun (a,b) = List.nth non_linear_typing_env (i)) in Var (i),ty | Var i , Some (LFun (a,b) as ty), false -> let () = assert (LFun(a,b) = List.nth non_linear_typing_env (i)) in let new_var,_ = eta_long_form_rec (LVar 0) (Some a) ~is_functor:false [a] [] in let res,_=eta_long_form_rec (App(Var i,new_var)) (Some b) ~is_functor:false (a::linear_typing_env) non_linear_typing_env in LAbs ("x",res),ty | Var i , Some (Fun (a,b) as ty), true -> let () = assert (Fun(a,b) = List.nth non_linear_typing_env (i)) in Var (i),ty | Var i , Some (Fun (a,b) as ty), false -> let () = assert (Fun(a,b) = List.nth non_linear_typing_env (i)) in let new_var,_ = eta_long_form_rec (Var 0) (Some a) ~is_functor:false [] [a] in let res,_=eta_long_form_rec (App(Var (i+1),new_var)) (Some b) ~is_functor:false linear_typing_env (a::non_linear_typing_env) in Abs ("x",res),ty | Const i,None,true -> term, f_get_type_of_constant i | Const i,None,false -> eta_long_form_rec term (Some (f_get_type_of_constant i)) ~is_functor:false linear_typing_env non_linear_typing_env | Const _, Some (Atom _ as ty), false -> term,ty | Const _, Some (LFun (_a,_b) as ty), true -> term,ty | Const _, Some (Fun (_a,_b) as ty), true -> term,ty | Const _, Some (LFun (a,b) as ty), false -> let new_var,_ = eta_long_form_rec (LVar 0) (Some a) ~is_functor:false [a] [] in let term = lift 1 0 term in let res,_ = eta_long_form_rec (App(term,new_var)) (Some b) ~is_functor:false (a::linear_typing_env) non_linear_typing_env in LAbs ("x",res),ty | Const _, Some (Fun (a,b) as ty), false -> let new_var,_ = eta_long_form_rec (Var 0) (Some a) ~is_functor:false [] [a] in let term = lift 0 1 term in let res,_ = eta_long_form_rec (App(term,new_var)) (Some b) ~is_functor:false linear_typing_env (a::non_linear_typing_env) in Abs ("x",res),ty | DConst _, _, _ -> failwith "All the definitions should have been unfolded" | Abs (x,t), Some (Fun(a,b) as ty), false -> let t',_ = eta_long_form_rec t (Some b) ~is_functor:false linear_typing_env (a::non_linear_typing_env) in Abs(x,t'),ty | Abs _,None,_ -> failwith "The Term should be in normal form" | Abs (_x,_t), _, false -> failwith "Bad typing" | Abs (_x,_t), _, true -> failwith "The Term should be in normal form" | LAbs (x,t), Some (LFun(a,b) as ty), false -> let t',_ = eta_long_form_rec t (Some b) ~is_functor:false (a::linear_typing_env) non_linear_typing_env in LAbs(x,t'),ty | LAbs _,None,_ -> failwith "The Term should be in normal form" | LAbs (_x,_t), _, true -> failwith "The Term should be in normal form" | LAbs (_x,_t), _, _ -> failwith "Bad typing" | App (u,v), Some (Atom _ as ty), _ -> let u',u_type = eta_long_form_rec u None ~is_functor:true linear_typing_env non_linear_typing_env in (match u_type with | (LFun (a,b)|Fun (a,b)) -> let () = assert (b=ty) in let v',_v_type = eta_long_form_rec v (Some a) ~is_functor:false linear_typing_env non_linear_typing_env in App (u',v'), b | _ -> failwith "Should be well typed 1") | App (u,v), Some (Fun (_,_) as ty), true -> let u',u_type = eta_long_form_rec u None ~is_functor:true linear_typing_env non_linear_typing_env in (match u_type with | (LFun (a,b)|Fun (a,b)) -> let () = assert (b=ty) in let v',_v_type = eta_long_form_rec v (Some a) ~is_functor:false linear_typing_env non_linear_typing_env in App (u',v'), b | _ -> failwith "Should be well typed 2") | App (u,v), Some (Fun (a',b') as ty), false -> let var',_= eta_long_form_rec (Var 0) (Some a') ~is_functor:false [] [a'] in let u = lift 0 1 u in let u',u_type = eta_long_form_rec u None ~is_functor:true linear_typing_env (a'::non_linear_typing_env) in (match u_type with | (LFun (a,b)|Fun (a,b)) -> let () = assert (b=ty) in let v = lift 0 1 v in let v',_v_type = eta_long_form_rec v (Some a) ~is_functor:false linear_typing_env (a'::non_linear_typing_env) in let res,_=eta_long_form_rec (App (App (u',v'),var')) (Some b') ~is_functor:false linear_typing_env (a'::non_linear_typing_env) in Abs("x",res), b | _ -> failwith "Should be well typed 3") | App (u,v), Some (LFun (_,_) as ty), true -> let u',u_type = eta_long_form_rec u None ~is_functor:true linear_typing_env non_linear_typing_env in (match u_type with | (LFun (a,b)|Fun (a,b)) -> let () = assert (b=ty) in let v',_v_type = eta_long_form_rec v (Some a) ~is_functor:false linear_typing_env non_linear_typing_env in App (u',v'), b | _ -> failwith "Should be well typed 4") | App (u,v), Some (LFun (a',b') as ty), false -> let var',_= eta_long_form_rec (LVar 0) (Some a') ~is_functor:false [a'] [] in let u = lift 1 0 u in let u',u_type = eta_long_form_rec u None ~is_functor:true (a'::linear_typing_env) non_linear_typing_env in (match u_type with | (LFun (a,b)|Fun (a,b)) -> let () = assert (b=ty) in let v = lift 1 0 v in let v',_v_type = eta_long_form_rec v (Some a) ~is_functor:false (a'::linear_typing_env) non_linear_typing_env in let res,_ = eta_long_form_rec (App (App (u',v'),var')) (Some b') ~is_functor:false (a'::linear_typing_env) non_linear_typing_env in LAbs("x",res), b | _ -> failwith "Should be well typed 5") | App (u,v), None , true -> let u',u_type = eta_long_form_rec u None ~is_functor:true linear_typing_env non_linear_typing_env in (match u_type with | (LFun (a,b)|Fun (a,b)) -> let v',_v_type = eta_long_form_rec v (Some a) ~is_functor:false linear_typing_env non_linear_typing_env in App (u',v'), b | _ -> failwith "Should be well typed 6") | App (_u,_v), None, false -> failwith "Probably a bug: the term cannot be a in a non functor position and an unknown type" | _, Some (DAtom _) , _ -> failwith "type definitions should have been unfolded" | LVar _ , Some ty ,b -> failwith (Printf.sprintf "LVar Term should be well typed. Type: %s. Is_functor: %B" (raw_type_to_string ty) b) | Var _ , _ , _ -> failwith "Var Term should be well typed" | _ -> raise Not_yet_implemented in let term',_=eta_long_form_rec term (Some stype) ~is_functor:false [] [] in term' (* We assume here that types in [ty] have been unfolded*) let rec order stype f_unfold_defined_type = match stype with | Atom _ -> 1 | DAtom i -> order (f_unfold_defined_type i) f_unfold_defined_type | LFun (alpha,beta) -> max ((order alpha f_unfold_defined_type)+1) (order beta f_unfold_defined_type) | Fun (alpha,beta) -> max ((order alpha f_unfold_defined_type)+1) (order beta f_unfold_defined_type) | _ -> failwith "Bug: order of type not defined for this type constructor" let is_2nd_order stype f_unfold_defined_type = (order stype f_unfold_defined_type)<=2 let rec unlinearize_term = function | Var i -> Var i | LVar i -> Var i | Const i -> Const i | DConst i -> DConst i | Abs (x,t) -> Abs(x,unlinearize_term t) | LAbs (x,t) -> Abs(x,unlinearize_term t) | App (t,u) -> App (unlinearize_term t,unlinearize_term u) | _ -> failwith "Unlinearization not implemented for this term" let rec unlinearize_type = function | Atom i -> Atom i | DAtom i -> DAtom i | LFun (ty1,ty2) -> Fun (unlinearize_type ty1,unlinearize_type ty2) | Fun (ty1,ty2) -> Fun (unlinearize_type ty1,unlinearize_type ty2) | _ -> failwith "Unlinearization not implemented for this type" end
(**************************************************************************) (* *) (* ACG development toolkit *) (* *) (* Copyright 2008-2021 INRIA *) (* *) (* More information on "http://acg.gforge.inria.fr/" *) (* License: CeCILL, see the LICENSE file or "http://www.cecill.info" *) (* Authors: see the AUTHORS file *) (* *) (* *) (* *) (* *) (* $Rev:: $: Revision of last commit *) (* $Author:: $: Author of last commit *) (* $Date:: $: Date of last commit *) (* *) (**************************************************************************)
import_stable.ml
trees.ml
(* example of using the tree API *) open Lwt.Syntax module Store = Irmin_unix.Git.FS.KV (Irmin.Contents.String) module Tree = Store.Tree let info = Irmin_unix.info type t1 = int type t2 = { x : string; y : t1 } type t = t2 list let tree_of_t t = let+ tree, _ = Lwt_list.fold_left_s (fun (v, i) t2 -> let si = string_of_int i in let* v = Tree.add v [ si; "x" ] t2.x in let+ v = Tree.add v [ si; "y" ] (string_of_int t2.y) in (v, i + 1)) (Tree.empty (), 0) t in tree let t_of_tree v = let aux acc i = let i = string_of_int i in let* x = Tree.get v [ i; "x" ] in let+ y = Tree.get v [ i; "y" ] in { x; y = int_of_string y } :: acc in let* t2s = Tree.list v [] in let t2s = List.map (fun (i, _) -> int_of_string i) t2s in let t2s = List.rev (List.sort compare t2s) in Lwt_list.fold_left_s aux [] t2s let main () = Config.init (); let config = Irmin_git.config ~bare:false Config.root in let t = [ { x = "foo"; y = 3 }; { x = "bar"; y = 5 }; { x = "too"; y = 10 } ] in let* v = tree_of_t t in let* repo = Store.Repo.v config in let* t = Store.main repo in let* () = Store.set_tree_exn t ~info:(info "update a/b") [ "a"; "b" ] v in let* v = Store.get_tree t [ "a"; "b" ] in let* tt = t_of_tree v in let* () = Store.set_tree_exn t ~info:(info "update a/c") [ "a"; "c" ] v in let tt = tt @ [ { x = "ggg"; y = 4 } ] in let* vv = tree_of_t tt in Store.set_tree_exn t ~info:(info "merge tree into a/b") [ "a"; "b" ] vv let () = Lwt_main.run (main ())
(* * Copyright (c) 2013-2021 Thomas Gazagnaire <thomas@gazagnaire.org> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *)
c.ml
module X = D
(***********************************************************************) (* *) (* ocamlbuild *) (* *) (* Nicolas Pouillard, Berke Durak, projet Gallium, INRIA Rocquencourt *) (* *) (* Copyright 2007 Institut National de Recherche en Informatique et *) (* en Automatique. All rights reserved. This file is distributed *) (* under the terms of the GNU Library General Public License, with *) (* the special exception on linking described in file ../LICENSE. *) (* *) (***********************************************************************)
RotationBase.h
// This file is part of Eigen, a lightweight C++ template library // for linear algebra. // // Copyright (C) 2008 Gael Guennebaud <gael.guennebaud@inria.fr> // // This Source Code Form is subject to the terms of the Mozilla // Public License v. 2.0. If a copy of the MPL was not distributed // with this file, You can obtain one at http://mozilla.org/MPL/2.0/. #ifndef EIGEN_ROTATIONBASE_H #define EIGEN_ROTATIONBASE_H namespace Eigen { // forward declaration namespace internal { template<typename RotationDerived, typename MatrixType, bool IsVector=MatrixType::IsVectorAtCompileTime> struct rotation_base_generic_product_selector; } /** \class RotationBase * * \brief Common base class for compact rotation representations * * \tparam Derived is the derived type, i.e., a rotation type * \tparam _Dim the dimension of the space */ template<typename Derived, int _Dim> class RotationBase { public: enum { Dim = _Dim }; /** the scalar type of the coefficients */ typedef typename internal::traits<Derived>::Scalar Scalar; /** corresponding linear transformation matrix type */ typedef Matrix<Scalar,Dim,Dim> RotationMatrixType; typedef Matrix<Scalar,Dim,1> VectorType; public: EIGEN_DEVICE_FUNC inline const Derived& derived() const { return *static_cast<const Derived*>(this); } EIGEN_DEVICE_FUNC inline Derived& derived() { return *static_cast<Derived*>(this); } /** \returns an equivalent rotation matrix */ EIGEN_DEVICE_FUNC inline RotationMatrixType toRotationMatrix() const { return derived().toRotationMatrix(); } /** \returns an equivalent rotation matrix * This function is added to be conform with the Transform class' naming scheme. */ EIGEN_DEVICE_FUNC inline RotationMatrixType matrix() const { return derived().toRotationMatrix(); } /** \returns the inverse rotation */ EIGEN_DEVICE_FUNC inline Derived inverse() const { return derived().inverse(); } /** \returns the concatenation of the rotation \c *this with a translation \a t */ EIGEN_DEVICE_FUNC inline Transform<Scalar,Dim,Isometry> operator*(const Translation<Scalar,Dim>& t) const { return Transform<Scalar,Dim,Isometry>(*this) * t; } /** \returns the concatenation of the rotation \c *this with a uniform scaling \a s */ EIGEN_DEVICE_FUNC inline RotationMatrixType operator*(const UniformScaling<Scalar>& s) const { return toRotationMatrix() * s.factor(); } /** \returns the concatenation of the rotation \c *this with a generic expression \a e * \a e can be: * - a DimxDim linear transformation matrix * - a DimxDim diagonal matrix (axis aligned scaling) * - a vector of size Dim */ template<typename OtherDerived> EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE typename internal::rotation_base_generic_product_selector<Derived,OtherDerived,OtherDerived::IsVectorAtCompileTime>::ReturnType operator*(const EigenBase<OtherDerived>& e) const { return internal::rotation_base_generic_product_selector<Derived,OtherDerived>::run(derived(), e.derived()); } /** \returns the concatenation of a linear transformation \a l with the rotation \a r */ template<typename OtherDerived> friend EIGEN_DEVICE_FUNC inline RotationMatrixType operator*(const EigenBase<OtherDerived>& l, const Derived& r) { return l.derived() * r.toRotationMatrix(); } /** \returns the concatenation of a scaling \a l with the rotation \a r */ EIGEN_DEVICE_FUNC friend inline Transform<Scalar,Dim,Affine> operator*(const DiagonalMatrix<Scalar,Dim>& l, const Derived& r) { Transform<Scalar,Dim,Affine> res(r); res.linear().applyOnTheLeft(l); return res; } /** \returns the concatenation of the rotation \c *this with a transformation \a t */ template<int Mode, int Options> EIGEN_DEVICE_FUNC inline Transform<Scalar,Dim,Mode> operator*(const Transform<Scalar,Dim,Mode,Options>& t) const { return toRotationMatrix() * t; } template<typename OtherVectorType> EIGEN_DEVICE_FUNC inline VectorType _transformVector(const OtherVectorType& v) const { return toRotationMatrix() * v; } }; namespace internal { // implementation of the generic product rotation * matrix template<typename RotationDerived, typename MatrixType> struct rotation_base_generic_product_selector<RotationDerived,MatrixType,false> { enum { Dim = RotationDerived::Dim }; typedef Matrix<typename RotationDerived::Scalar,Dim,Dim> ReturnType; EIGEN_DEVICE_FUNC static inline ReturnType run(const RotationDerived& r, const MatrixType& m) { return r.toRotationMatrix() * m; } }; template<typename RotationDerived, typename Scalar, int Dim, int MaxDim> struct rotation_base_generic_product_selector< RotationDerived, DiagonalMatrix<Scalar,Dim,MaxDim>, false > { typedef Transform<Scalar,Dim,Affine> ReturnType; EIGEN_DEVICE_FUNC static inline ReturnType run(const RotationDerived& r, const DiagonalMatrix<Scalar,Dim,MaxDim>& m) { ReturnType res(r); res.linear() *= m; return res; } }; template<typename RotationDerived,typename OtherVectorType> struct rotation_base_generic_product_selector<RotationDerived,OtherVectorType,true> { enum { Dim = RotationDerived::Dim }; typedef Matrix<typename RotationDerived::Scalar,Dim,1> ReturnType; EIGEN_DEVICE_FUNC static EIGEN_STRONG_INLINE ReturnType run(const RotationDerived& r, const OtherVectorType& v) { return r._transformVector(v); } }; } // end namespace internal /** \geometry_module * * \brief Constructs a Dim x Dim rotation matrix from the rotation \a r */ template<typename _Scalar, int _Rows, int _Cols, int _Storage, int _MaxRows, int _MaxCols> template<typename OtherDerived> EIGEN_DEVICE_FUNC Matrix<_Scalar, _Rows, _Cols, _Storage, _MaxRows, _MaxCols> ::Matrix(const RotationBase<OtherDerived,ColsAtCompileTime>& r) { EIGEN_STATIC_ASSERT_MATRIX_SPECIFIC_SIZE(Matrix,int(OtherDerived::Dim),int(OtherDerived::Dim)) *this = r.toRotationMatrix(); } /** \geometry_module * * \brief Set a Dim x Dim rotation matrix from the rotation \a r */ template<typename _Scalar, int _Rows, int _Cols, int _Storage, int _MaxRows, int _MaxCols> template<typename OtherDerived> EIGEN_DEVICE_FUNC Matrix<_Scalar, _Rows, _Cols, _Storage, _MaxRows, _MaxCols>& Matrix<_Scalar, _Rows, _Cols, _Storage, _MaxRows, _MaxCols> ::operator=(const RotationBase<OtherDerived,ColsAtCompileTime>& r) { EIGEN_STATIC_ASSERT_MATRIX_SPECIFIC_SIZE(Matrix,int(OtherDerived::Dim),int(OtherDerived::Dim)) return *this = r.toRotationMatrix(); } namespace internal { /** \internal * * Helper function to return an arbitrary rotation object to a rotation matrix. * * \tparam Scalar the numeric type of the matrix coefficients * \tparam Dim the dimension of the current space * * It returns a Dim x Dim fixed size matrix. * * Default specializations are provided for: * - any scalar type (2D), * - any matrix expression, * - any type based on RotationBase (e.g., Quaternion, AngleAxis, Rotation2D) * * Currently toRotationMatrix is only used by Transform. * * \sa class Transform, class Rotation2D, class Quaternion, class AngleAxis */ template<typename Scalar, int Dim> EIGEN_DEVICE_FUNC static inline Matrix<Scalar,2,2> toRotationMatrix(const Scalar& s) { EIGEN_STATIC_ASSERT(Dim==2,YOU_MADE_A_PROGRAMMING_MISTAKE) return Rotation2D<Scalar>(s).toRotationMatrix(); } template<typename Scalar, int Dim, typename OtherDerived> EIGEN_DEVICE_FUNC static inline Matrix<Scalar,Dim,Dim> toRotationMatrix(const RotationBase<OtherDerived,Dim>& r) { return r.toRotationMatrix(); } template<typename Scalar, int Dim, typename OtherDerived> EIGEN_DEVICE_FUNC static inline const MatrixBase<OtherDerived>& toRotationMatrix(const MatrixBase<OtherDerived>& mat) { EIGEN_STATIC_ASSERT(OtherDerived::RowsAtCompileTime==Dim && OtherDerived::ColsAtCompileTime==Dim, YOU_MADE_A_PROGRAMMING_MISTAKE) return mat; } } // end namespace internal } // end namespace Eigen #endif // EIGEN_ROTATIONBASE_H
test_protocol_validator.ml
(** Unit tests for protocol_validator. Currently only tests that events arex emitted. *) open Shell_test_helpers (** A [Alcotest_protocol_validator] extends [Test_services] with protocol validator-specific testables and helpers *) module Alcotest_protocol_validator = struct include Test_services let registered_protocol : Registered_protocol.t testable = let open Registered_protocol in let eq (p1 : t) (p2 : t) : bool = let (module P1) = p1 in let (module P2) = p2 in Tezos_base.TzPervasives.Protocol_hash.equal P1.hash P2.hash in let pp fmt (p : t) = let (module P) = p in Tezos_base.TzPervasives.Protocol_hash.pp fmt P.hash in testable pp eq end let section = Some (Internal_event.Section.make_sanitized ["node"; "validator"]) let filter = Some section (** [wrap f _switch] wraps a test function [f] by setting up a Mock_sink if necessary, initializing a mock p2p network, an empty chain state and a validator. It passes the validator to the test function [f] *) let wrap f _switch () = with_empty_mock_sink (fun _ -> Lwt_utils_unix.with_tempdir "tezos_test_" (fun test_dir -> init_chain test_dir >>= fun (st, _, _, _) -> init_mock_p2p Distributed_db_version.Name.zero >>= function | Ok p2p -> (* Create state *) let db = Distributed_db.create st p2p in (* Set working dir for protocol compiler *) Updater.init (Filename.concat test_dir "build") ; (* Start validator *) let vl = Protocol_validator.create db in f vl _switch () | Error error -> Format.printf "Could not get p2p:\n %a\n" pp_print_error error ; Format.print_flush () ; Lwt.return_unit)) (** Start tests *) (** [pushing_validator_protocol] tests that requesting the validation of a protocol emits a pushing_validation_request event *) let pushing_validator_protocol vl _switch () = (* Let's validate a phony protocol *) let pt = Protocol.{expected_env = V0; components = []} in Protocol_validator.validate vl Protocol_hash.zero pt >>= fun res -> Alcotest_protocol_validator.(check (tzresults registered_protocol)) "Compilation should fail." res (Error [ Validation_errors.Invalid_protocol {hash = Protocol_hash.zero; error = Compilation_failed} ]) ; Mock_sink.assert_has_event "Should have a pushing_validation_request event" ~filter ( Internal_event.Debug, section, `O [ ( "pushing_protocol_validation.v0", `String "PrihK96nBAFSxVL1GLJTVhu9YnzkMFiBeuJRPA8NwuZVZCE1L6i" ) ] ) ; Lwt.return_unit (** [previously_validated_protocol] tests that requesting the validation of a protocol that is already validated (e.g. the genesis protocol) emits a previously_validated_protocol event *) let previously_validated_protocol vl _switch () = (* Let's request the re-validation of the genesis protocol *) let phony_pt = Protocol.{expected_env = V0; components = []} in Protocol_validator.validate vl genesis_protocol_hash phony_pt >>= fun res -> Alcotest_protocol_validator.(check (tzresults registered_protocol)) "Compilation should work." (Ok genesis_protocol) res ; Mock_sink.assert_has_event "Should have a previously_validated_protocol event" ~filter ( Internal_event.Debug, section, `O [ ( "previously_validated_protocol.v0", `String "ProtoDemoNoopsDemoNoopsDemoNoopsDemoNoopsDemo6XBoYp" ) ] ) ; Lwt.return_unit (** [fetching_protocol] tests that requesting the fetch of a protocol emits a fetching_protocol event *) let fetching_protocol vl _switch () = (* Let's fetch a phony protocol, and timeout immediately *) Protocol_validator.fetch_and_compile_protocol ~peer:P2p_peer.Id.zero ~timeout:Ptime.Span.zero vl Protocol_hash.zero >>= fun _ -> Mock_sink.assert_has_event "Should have a fetching_protocol event" ~filter ( Internal_event.Notice, section, `O [ ( "fetching_protocol.v0", `O [ ( "hash", `String "PrihK96nBAFSxVL1GLJTVhu9YnzkMFiBeuJRPA8NwuZVZCE1L6i" ); ("source", `String "idqRfGME9Bdhde2rksPSz7s6naxMW4") ] ) ] ) ; Lwt.return_unit let tests = [ Alcotest_lwt.test_case "pushing_validator_protocol" `Quick (wrap pushing_validator_protocol); Alcotest_lwt.test_case "previously_validated_protocol" `Quick (wrap previously_validated_protocol); Alcotest_lwt.test_case "fetching_protocol" `Quick (wrap fetching_protocol) ]
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2020 Nomadic Labs. <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
jib_compile.ml
open Ast open Ast_defs open Ast_util open Jib open Jib_util open Type_check open Value2 open Anf let opt_memo_cache = ref false let optimize_aarch64_fast_struct = ref false let (gensym, _) = symbol_generator "gs" let ngensym () = name (gensym ()) (**************************************************************************) (* 4. Conversion to low-level AST *) (**************************************************************************) (** We now use a low-level AST called Jib (see language/bytecode.ott) that is only slightly abstracted away from C. To be succint in comments we usually refer to this as Sail IR or IR rather than low-level AST repeatedly. The general idea is ANF expressions are converted into lists of instructions (type instr) where allocations and deallocations are now made explicit. ANF values (aval) are mapped to the cval type, which is even simpler still. Some things are still more abstract than in C, so the type definitions follow the sail type definition structure, just with typ (from ast.ml) replaced with ctyp. Top-level declarations that have no meaning for the backend are not included at this level. The convention used here is that functions of the form compile_X compile the type X into types in this AST, so compile_aval maps avals into cvals. Note that the return types for these functions are often quite complex, and they usually return some tuple containing setup instructions (to allocate memory for the expression), cleanup instructions (to deallocate that memory) and possibly typing information about what has been translated. **) (* FIXME: This stage shouldn't care about this *) let max_int n = Big_int.pred (Big_int.pow_int_positive 2 (n - 1)) let min_int n = Big_int.negate (Big_int.pow_int_positive 2 (n - 1)) let rec is_bitvector = function | [] -> true | AV_lit (L_aux (L_zero, _), _) :: avals -> is_bitvector avals | AV_lit (L_aux (L_one, _), _) :: avals -> is_bitvector avals | _ :: _ -> false let value_of_aval_bit = function | AV_lit (L_aux (L_zero, _), _) -> Sail2_values.B0 | AV_lit (L_aux (L_one, _), _) -> Sail2_values.B1 | _ -> assert false let is_ct_enum = function | CT_enum _ -> true | _ -> false let iblock1 = function | [instr] -> instr | instrs -> iblock instrs (** The context type contains two type-checking environments. ctx.local_env contains the closest typechecking environment, usually from the expression we are compiling, whereas ctx.tc_env is the global type checking environment from type-checking the entire AST. We also keep track of local variables in ctx.locals, so we know when their type changes due to flow typing. *) type ctx = { records : (kid list * ctyp UBindings.t) Bindings.t; enums : IdSet.t Bindings.t; variants : (kid list * ctyp UBindings.t) Bindings.t; valspecs : (string option * ctyp list * ctyp) Bindings.t; quants : ctyp KBindings.t; local_env : Env.t; tc_env : Env.t; effect_info : Effects.side_effect_info; locals : (mut * ctyp) Bindings.t; letbinds : int list; no_raw : bool; } let ctx_is_extern id ctx = match Bindings.find_opt id ctx.valspecs with | Some (Some _, _, _) -> true | Some (None, _, _) -> false | None -> Env.is_extern id ctx.tc_env "c" let ctx_get_extern id ctx = match Bindings.find_opt id ctx.valspecs with | Some (Some extern, _, _) -> extern | Some (None, _, _) -> Reporting.unreachable (id_loc id) __POS__ ("Tried to get extern information for non-extern function " ^ string_of_id id) | None -> Env.get_extern id ctx.tc_env "c" let initial_ctx env effect_info = let initial_valspecs = [ (mk_id "size_itself_int", (Some "size_itself_int", [CT_lint], CT_lint)); (mk_id "make_the_value", (Some "make_the_value", [CT_lint], CT_lint)) ] |> List.to_seq |> Bindings.of_seq in { records = Bindings.empty; enums = Bindings.empty; variants = Bindings.empty; valspecs = initial_valspecs; quants = KBindings.empty; local_env = env; tc_env = env; effect_info; locals = Bindings.empty; letbinds = []; no_raw = false; } let rec mangle_string_of_ctyp ctx = function | CT_lint -> "i" | CT_fint n -> "I" ^ string_of_int n | CT_lbits _ -> "b" | CT_sbits (n, _) -> "S" ^ string_of_int n | CT_fbits (n, _) -> "B" ^ string_of_int n | CT_constant n -> "C" ^ Big_int.to_string n | CT_bit -> "t" | CT_unit -> "u" | CT_bool -> "o" | CT_real -> "r" | CT_string -> "s" | CT_float n -> "f" ^ string_of_int n | CT_rounding_mode -> "m" | CT_enum (id, _) -> "E" ^ string_of_id id ^ "%" | CT_ref ctyp -> "&" ^ mangle_string_of_ctyp ctx ctyp | CT_tup ctyps -> "(" ^ Util.string_of_list "," (mangle_string_of_ctyp ctx) ctyps ^ ")" | CT_struct (id, fields) -> let generic_fields = Bindings.find id ctx.records |> snd |> UBindings.bindings in (* Note: It might be better to only do this if we actually have polymorphic fields *) let unifiers = ctyp_unify (id_loc id) (CT_struct (id, generic_fields)) (CT_struct (id, fields)) |> KBindings.bindings |> List.map snd in begin match unifiers with | [] -> "R" ^ string_of_id id | _ -> "R" ^ string_of_id id ^ "<" ^ Util.string_of_list "," (mangle_string_of_ctyp ctx) unifiers ^ ">" end | CT_variant (id, ctors) -> let generic_ctors = Bindings.find id ctx.variants |> snd |> UBindings.bindings in let unifiers = ctyp_unify (id_loc id) (CT_variant (id, generic_ctors)) (CT_variant (id, ctors)) |> KBindings.bindings |> List.map snd in let prefix = string_of_id id in (if prefix = "option" then "O" else "U" ^ prefix) ^ "<" ^ Util.string_of_list "," (mangle_string_of_ctyp ctx) unifiers ^ ">" | CT_vector (_, ctyp) -> "V" ^ mangle_string_of_ctyp ctx ctyp | CT_fvector (n, _, ctyp) -> "F" ^ string_of_int n ^ mangle_string_of_ctyp ctx ctyp | CT_list ctyp -> "L" ^ mangle_string_of_ctyp ctx ctyp | CT_poly kid -> "P" ^ string_of_kid kid module type Config = sig val convert_typ : ctx -> typ -> ctyp val optimize_anf : ctx -> typ aexp -> typ aexp val unroll_loops : int option val specialize_calls : bool val ignore_64 : bool val struct_value : bool val use_real : bool val branch_coverage : out_channel option val track_throw : bool end let name_or_global ctx id = if Env.is_register id ctx.local_env || IdSet.mem id (Env.get_toplevel_lets ctx.local_env) then global id else name id module Make(C: Config) = struct let ctyp_of_typ ctx typ = C.convert_typ ctx typ let rec chunkify n xs = match Util.take n xs, Util.drop n xs with | xs, [] -> [xs] | xs, ys -> xs :: chunkify n ys let coverage_branch_count = ref 0 let coverage_loc_args l = match Reporting.simp_loc l with | None -> None | Some (p1, p2) -> Some (Printf.sprintf "\"%s\", %d, %d, %d, %d" (String.escaped p1.pos_fname) p1.pos_lnum (p1.pos_cnum - p1.pos_bol) p2.pos_lnum (p2.pos_cnum - p2.pos_bol)) let coverage_branch_reached l = let branch_id = !coverage_branch_count in incr coverage_branch_count; branch_id, (match C.branch_coverage with | Some _ -> begin match coverage_loc_args l with | None -> [] | Some args -> [iraw (Printf.sprintf "sail_branch_reached(%d, %s);" branch_id args)] end | _ -> [] ) let append_into_block instrs instr = match instrs with | [] -> instr | _ -> iblock (instrs @ [instr]) let rec find_aexp_loc (AE_aux (e, _, l)) = match Reporting.simp_loc l with | Some _ -> l | None -> match e with | AE_cast (e',_) -> find_aexp_loc e' | _ -> l let coverage_branch_taken branch_id aexp = match C.branch_coverage with | None -> [] | Some out -> begin match coverage_loc_args (find_aexp_loc aexp) with | None -> [] | Some args -> Printf.fprintf out "%s\n" ("B " ^ args); [iraw (Printf.sprintf "sail_branch_taken(%d, %s);" branch_id args)] end let coverage_function_entry id l = match C.branch_coverage with | None -> [] | Some out -> begin match coverage_loc_args l with | None -> [] | Some args -> Printf.fprintf out "%s\n" ("F " ^ args); [iraw (Printf.sprintf "sail_function_entry(\"%s\", %s);" (string_of_id id) args)] end let rec compile_aval l ctx = function | AV_cval (cval, typ) -> let ctyp = cval_ctyp cval in let ctyp' = ctyp_of_typ ctx typ in if not (ctyp_equal ctyp ctyp') then let gs = ngensym () in [iinit l ctyp' gs cval], V_id (gs, ctyp'), [iclear ctyp' gs] else [], cval, [] | AV_id (id, typ) -> begin match Bindings.find_opt id ctx.locals with | Some (_, ctyp) -> [], V_id (name id, ctyp), [] | None -> [], V_id (name_or_global ctx id, ctyp_of_typ ctx (lvar_typ typ)), [] end | AV_ref (id, typ) -> [], V_lit (VL_ref (string_of_id id), CT_ref (ctyp_of_typ ctx (lvar_typ typ))), [] | AV_lit (L_aux (L_string str, _), typ) -> [], V_lit ((VL_string (String.escaped str)), ctyp_of_typ ctx typ), [] | AV_lit (L_aux (L_num n, _), typ) when C.ignore_64 -> [], V_lit ((VL_int n), ctyp_of_typ ctx typ), [] | AV_lit (L_aux (L_num n, _), typ) when Big_int.less_equal (min_int 64) n && Big_int.less_equal n (max_int 64) -> let gs = ngensym () in [iinit l CT_lint gs (V_lit (VL_int n, CT_fint 64))], V_id (gs, CT_lint), [iclear CT_lint gs] | AV_lit (L_aux (L_num n, _), typ) -> let gs = ngensym () in [iinit l CT_lint gs (V_lit (VL_string (Big_int.to_string n), CT_string))], V_id (gs, CT_lint), [iclear CT_lint gs] | AV_lit (L_aux (L_zero, _), _) -> [], V_lit (VL_bit Sail2_values.B0, CT_bit), [] | AV_lit (L_aux (L_one, _), _) -> [], V_lit (VL_bit Sail2_values.B1, CT_bit), [] | AV_lit (L_aux (L_true, _), _) -> [], V_lit (VL_bool true, CT_bool), [] | AV_lit (L_aux (L_false, _), _) -> [], V_lit (VL_bool false, CT_bool), [] | AV_lit (L_aux (L_real str, _), _) -> if C.use_real then [], V_lit (VL_real str, CT_real), [] else let gs = ngensym () in [iinit l CT_real gs (V_lit (VL_string str, CT_string))], V_id (gs, CT_real), [iclear CT_real gs] | AV_lit (L_aux (L_unit, _), _) -> [], V_lit (VL_unit, CT_unit), [] | AV_lit (L_aux (L_undef, _), typ) -> let ctyp = ctyp_of_typ ctx typ in [], V_lit (VL_undefined, ctyp), [] | AV_lit (L_aux (_, l) as lit, _) -> raise (Reporting.err_general l ("Encountered unexpected literal " ^ string_of_lit lit ^ " when converting ANF represention into IR")) | AV_tuple avals -> let elements = List.map (compile_aval l ctx) avals in let cvals = List.map (fun (_, cval, _) -> cval) elements in let setup = List.concat (List.map (fun (setup, _, _) -> setup) elements) in let cleanup = List.concat (List.rev (List.map (fun (_, _, cleanup) -> cleanup) elements)) in let tup_ctyp = CT_tup (List.map cval_ctyp cvals) in let gs = ngensym () in if C.struct_value then ( setup, V_tuple (cvals, tup_ctyp), cleanup ) else ( setup @ [idecl l tup_ctyp gs] @ List.mapi (fun n cval -> icopy l (CL_tuple (CL_id (gs, tup_ctyp), n)) cval) cvals, V_id (gs, CT_tup (List.map cval_ctyp cvals)), [iclear tup_ctyp gs] @ cleanup ) | AV_record (fields, typ) when C.struct_value -> let ctyp = ctyp_of_typ ctx typ in let compile_fields (id, aval) = let field_setup, cval, field_cleanup = compile_aval l ctx aval in field_setup, ((id, []), cval), field_cleanup in let field_triples = List.map compile_fields (Bindings.bindings fields) in let setup = List.concat (List.map (fun (s, _, _) -> s) field_triples) in let fields = List.map (fun (_, f, _) -> f) field_triples in let cleanup = List.concat (List.map (fun (_, _, c) -> c) field_triples) in setup, V_struct (fields, ctyp), cleanup | AV_record (fields, typ) -> let ctyp = ctyp_of_typ ctx typ in let gs = ngensym () in let compile_fields (id, aval) = let field_setup, cval, field_cleanup = compile_aval l ctx aval in field_setup @ [icopy l (CL_field (CL_id (gs, ctyp), (id, []))) cval] @ field_cleanup in [idecl l ctyp gs] @ List.concat (List.map compile_fields (Bindings.bindings fields)), V_id (gs, ctyp), [iclear ctyp gs] | AV_vector ([], typ) -> let vector_ctyp = ctyp_of_typ ctx typ in begin match ctyp_of_typ ctx typ with | CT_fbits (0, ord) -> [], V_lit (VL_bits ([], ord), vector_ctyp), [] | _ -> let gs = ngensym () in [idecl l vector_ctyp gs; iextern l (CL_id (gs, vector_ctyp)) (mk_id "internal_vector_init", []) [V_lit (VL_int Big_int.zero, CT_fint 64)]], V_id (gs, vector_ctyp), [iclear vector_ctyp gs] end (* Convert a small bitvector to a uint64_t literal. *) | AV_vector (avals, typ) when is_bitvector avals && (List.length avals <= 64 || C.ignore_64) -> begin let bitstring = List.map value_of_aval_bit avals in let len = List.length avals in match destruct_bitvector ctx.tc_env typ with | Some (_, Ord_aux (Ord_inc, _)) -> [], V_lit (VL_bits (bitstring, false), CT_fbits (len, false)), [] | Some (_, Ord_aux (Ord_dec, _)) -> [], V_lit (VL_bits (bitstring, true), CT_fbits (len, true)), [] | Some _ -> raise (Reporting.err_general l "Encountered order polymorphic bitvector literal") | None -> raise (Reporting.err_general l "Encountered vector literal without vector type") end (* Convert a bitvector literal that is larger than 64-bits to a variable size bitvector, converting it in 64-bit chunks. *) | AV_vector (avals, typ) when is_bitvector avals -> let len = List.length avals in let bitstring avals = VL_bits (List.map value_of_aval_bit avals, true) in let first_chunk = bitstring (Util.take (len mod 64) avals) in let chunks = Util.drop (len mod 64) avals |> chunkify 64 |> List.map bitstring in let gs = ngensym () in [iinit l (CT_lbits true) gs (V_lit (first_chunk, CT_fbits (len mod 64, true)))] @ List.map (fun chunk -> ifuncall l (CL_id (gs, CT_lbits true)) (mk_id "append_64", []) [V_id (gs, CT_lbits true); V_lit (chunk, CT_fbits (64, true))]) chunks, V_id (gs, CT_lbits true), [iclear (CT_lbits true) gs] (* If we have a bitvector value, that isn't a literal then we need to set bits individually. *) | AV_vector (avals, Typ_aux (Typ_app (id, [_; A_aux (A_order ord, _)]), _)) when string_of_id id = "bitvector" && List.length avals <= 64 -> let len = List.length avals in let direction = match ord with | Ord_aux (Ord_inc, _) -> false | Ord_aux (Ord_dec, _) -> true | Ord_aux (Ord_var _, _) -> raise (Reporting.err_general l "Polymorphic vector direction found") in let gs = ngensym () in let ctyp = CT_fbits (len, direction) in let mask i = VL_bits (Util.list_init (63 - i) (fun _ -> Sail2_values.B0) @ [Sail2_values.B1] @ Util.list_init i (fun _ -> Sail2_values.B0), direction) in let aval_mask i aval = let setup, cval, cleanup = compile_aval l ctx aval in match cval with | V_lit (VL_bit Sail2_values.B0, _) -> [] | V_lit (VL_bit Sail2_values.B1, _) -> [icopy l (CL_id (gs, ctyp)) (V_call (Bvor, [V_id (gs, ctyp); V_lit (mask i, ctyp)]))] | _ -> setup @ [iextern l (CL_id (gs, ctyp)) (mk_id "update_fbits", []) [V_id (gs, ctyp); V_lit (VL_int (Big_int.of_int i), CT_constant (Big_int.of_int i)); cval]] @ cleanup in [idecl l ctyp gs; icopy l (CL_id (gs, ctyp)) (V_lit (VL_bits (Util.list_init len (fun _ -> Sail2_values.B0), direction), ctyp))] @ List.concat (List.mapi aval_mask (List.rev avals)), V_id (gs, ctyp), [] (* Compiling a vector literal that isn't a bitvector *) | AV_vector (avals, Typ_aux (Typ_app (id, [_; A_aux (A_order ord, _); A_aux (A_typ typ, _)]), _)) when string_of_id id = "vector" -> let len = List.length avals in let direction = match ord with | Ord_aux (Ord_inc, _) -> false | Ord_aux (Ord_dec, _) -> true | Ord_aux (Ord_var _, _) -> raise (Reporting.err_general l "Polymorphic vector direction found") in let elem_ctyp = ctyp_of_typ ctx typ in let vector_ctyp = CT_vector (direction, elem_ctyp) in let gs = ngensym () in let aval_set i aval = let setup, cval, cleanup = compile_aval l ctx aval in let cval, conversion_setup, conversion_cleanup = if ctyp_equal (cval_ctyp cval) elem_ctyp then ( cval, [], [] ) else ( let gs = ngensym () in V_id (gs, elem_ctyp), [iinit l elem_ctyp gs cval], [iclear elem_ctyp gs] ) in setup @ conversion_setup @ [iextern l (CL_id (gs, vector_ctyp)) (mk_id "internal_vector_update", []) [V_id (gs, vector_ctyp); V_lit (VL_int (Big_int.of_int i), CT_fint 64); cval]] @ conversion_cleanup @ cleanup in [idecl l vector_ctyp gs; iextern l (CL_id (gs, vector_ctyp)) (mk_id "internal_vector_init", []) [V_lit (VL_int (Big_int.of_int len), CT_fint 64)]] @ List.concat (List.mapi aval_set (if direction then List.rev avals else avals)), V_id (gs, vector_ctyp), [iclear vector_ctyp gs] | AV_vector _ as aval -> raise (Reporting.err_general l ("Have AVL_vector: " ^ Pretty_print_sail.to_string (pp_aval aval) ^ " which is not a vector type")) | AV_list (avals, Typ_aux (typ, _)) -> let ctyp = match typ with | Typ_app (id, [A_aux (A_typ typ, _)]) when string_of_id id = "list" -> ctyp_suprema (ctyp_of_typ ctx typ) | _ -> raise (Reporting.err_general l "Invalid list type") in let gs = ngensym () in let mk_cons aval = let setup, cval, cleanup = compile_aval l ctx aval in setup @ [iextern l (CL_id (gs, CT_list ctyp)) (mk_id "cons", [ctyp]) [cval; V_id (gs, CT_list ctyp)]] @ cleanup in [idecl l (CT_list ctyp) gs] @ List.concat (List.map mk_cons (List.rev avals)), V_id (gs, CT_list ctyp), [iclear (CT_list ctyp) gs] (* let optimize_call l ctx clexp id args arg_ctyps ret_ctyp = let call () = let setup = ref [] in let cleanup = ref [] in let cast_args = List.map2 (fun ctyp cval -> let have_ctyp = cval_ctyp cval in if is_polymorphic ctyp then V_poly (cval, have_ctyp) else if C.specialize_calls || ctyp_equal ctyp have_ctyp then cval else let gs = ngensym () in setup := iinit l ctyp gs cval :: !setup; cleanup := iclear ctyp gs :: !cleanup; V_id (gs, ctyp)) arg_ctyps args in if C.specialize_calls || ctyp_equal (clexp_ctyp clexp) ret_ctyp then !setup @ [ifuncall l clexp id cast_args] @ !cleanup else let gs = ngensym () in List.rev !setup @ [idecl l ret_ctyp gs; ifuncall l (CL_id (gs, ret_ctyp)) id cast_args; icopy l clexp (V_id (gs, ret_ctyp)); iclear ret_ctyp gs] @ !cleanup in if not C.specialize_calls && Env.is_extern (fst id) ctx.tc_env "c" then let extern = Env.get_extern (fst id) ctx.tc_env "c" in begin match extern, List.map cval_ctyp args, clexp_ctyp clexp with | "slice", [CT_fbits _; CT_lint; _], CT_fbits (n, _) -> let start = ngensym () in [iinit l (CT_fint 64) start (List.nth args 1); icopy l clexp (V_call (Slice n, [List.nth args 0; V_id (start, CT_fint 64)]))] | "sail_unsigned", [CT_fbits _], CT_fint 64 -> [icopy l clexp (V_call (Unsigned 64, [List.nth args 0]))] | "sail_signed", [CT_fbits _], CT_fint 64 -> [icopy l clexp (V_call (Signed 64, [List.nth args 0]))] | "set_slice", [_; _; CT_fbits (n, _); CT_fint 64; CT_fbits (m, _)], CT_fbits (n', _) when n = n' -> [icopy l clexp (V_call (Set_slice, [List.nth args 2; List.nth args 3; List.nth args 4]))] | _, _, _ -> call () end else call () *) let compile_funcall l ctx id args = let setup = ref [] in let cleanup = ref [] in let quant, Typ_aux (fn_typ, _) = (* If we can't find a function in local_env, fall back to the global env - this happens when representing assertions, exit, etc as functions in the IR. *) try Env.get_val_spec id ctx.local_env with Type_error _ -> Env.get_val_spec id ctx.tc_env in let arg_typs, ret_typ = match fn_typ with | Typ_fn (arg_typs, ret_typ) -> arg_typs, ret_typ | _ -> assert false in let ctx' = { ctx with local_env = Env.add_typquant (id_loc id) quant ctx.tc_env } in let arg_ctyps, ret_ctyp = List.map (ctyp_of_typ ctx') arg_typs, ctyp_of_typ ctx' ret_typ in assert (List.length arg_ctyps = List.length args); let instantiation = ref KBindings.empty in let setup_arg ctyp aval = let arg_setup, cval, arg_cleanup = compile_aval l ctx aval in instantiation := KBindings.union merge_unifiers (ctyp_unify l ctyp (cval_ctyp cval)) !instantiation; setup := List.rev arg_setup @ !setup; cleanup := arg_cleanup @ !cleanup; cval in let setup_args = List.map2 setup_arg arg_ctyps args in List.rev !setup, begin fun clexp -> let instantiation = KBindings.union merge_unifiers (ctyp_unify l ret_ctyp (clexp_ctyp clexp)) !instantiation in ifuncall l clexp (id, KBindings.bindings instantiation |> List.map snd) setup_args (* iblock1 (optimize_call l ctx clexp (id, KBindings.bindings unifiers |> List.map snd) setup_args arg_ctyps ret_ctyp) *) end, !cleanup let rec apat_ctyp ctx (AP_aux (apat, env, _)) = let ctx = { ctx with local_env = env } in match apat with | AP_tup apats -> CT_tup (List.map (apat_ctyp ctx) apats) | AP_global (_, typ) -> ctyp_of_typ ctx typ | AP_cons (apat, _) -> CT_list (ctyp_suprema (apat_ctyp ctx apat)) | AP_wild typ | AP_nil typ | AP_id (_, typ) -> ctyp_of_typ ctx typ | AP_app (_, _, typ) -> ctyp_of_typ ctx typ | AP_as (_, _, typ) -> ctyp_of_typ ctx typ let rec compile_match ctx (AP_aux (apat_aux, env, l)) cval case_label = let ctx = { ctx with local_env = env } in let ctyp = cval_ctyp cval in match apat_aux with | AP_global (pid, typ) -> let global_ctyp = ctyp_of_typ ctx typ in [icopy l (CL_id (global pid, global_ctyp)) cval], [], ctx | AP_id (pid, _) when is_ct_enum ctyp -> begin match Env.lookup_id pid ctx.tc_env with | Unbound _ -> [idecl l ctyp (name pid); icopy l (CL_id (name pid, ctyp)) cval], [], ctx | _ -> [ijump l (V_call (Neq, [V_id (name pid, ctyp); cval])) case_label], [], ctx end | AP_id (pid, typ) -> let id_ctyp = ctyp_of_typ ctx typ in let ctx = { ctx with locals = Bindings.add pid (Immutable, id_ctyp) ctx.locals } in [idecl l id_ctyp (name pid); icopy l (CL_id (name pid, id_ctyp)) cval], [iclear id_ctyp (name pid)], ctx | AP_as (apat, id, typ) -> let id_ctyp = ctyp_of_typ ctx typ in let instrs, cleanup, ctx = compile_match ctx apat cval case_label in let ctx = { ctx with locals = Bindings.add id (Immutable, id_ctyp) ctx.locals } in instrs @ [idecl l id_ctyp (name id); icopy l (CL_id (name id, id_ctyp)) cval], iclear id_ctyp (name id) :: cleanup, ctx | AP_tup apats -> begin let get_tup n = V_tuple_member (cval, List.length apats, n) in let fold (instrs, cleanup, n, ctx) apat ctyp = let instrs', cleanup', ctx = compile_match ctx apat (get_tup n) case_label in instrs @ instrs', cleanup' @ cleanup, n + 1, ctx in match ctyp with | CT_tup ctyps -> let instrs, cleanup, _, ctx = List.fold_left2 fold ([], [], 0, ctx) apats ctyps in instrs, cleanup, ctx | _ -> failwith ("AP_tup with ctyp " ^ string_of_ctyp ctyp) end | AP_app (ctor, apat, variant_typ) -> begin match ctyp with | CT_variant (var_id, ctors) -> let pat_ctyp = apat_ctyp ctx apat in (* These should really be the same, something has gone wrong if they are not. *) if not (ctyp_equal (cval_ctyp cval) (ctyp_of_typ ctx variant_typ)) then raise (Reporting.err_general l (Printf.sprintf "When compiling constructor pattern, %s should have the same type as %s" (string_of_ctyp (cval_ctyp cval)) (string_of_ctyp (ctyp_of_typ ctx variant_typ)))) else (); let unifiers, ctor_ctyp = let generic_ctors = Bindings.find var_id ctx.variants |> snd |> UBindings.bindings in let unifiers = ctyp_unify l (CT_variant (var_id, generic_ctors)) (cval_ctyp cval) |> KBindings.bindings |> List.map snd in let is_poly_ctor = List.exists (fun ((id, _), ctyp) -> Id.compare id ctor = 0 && is_polymorphic ctyp) generic_ctors in unifiers, if is_poly_ctor then ctyp_suprema pat_ctyp else pat_ctyp in let instrs, cleanup, ctx = compile_match ctx apat (V_ctor_unwrap (cval, (ctor, unifiers), ctor_ctyp)) case_label in [ijump l (V_ctor_kind (cval, ctor, unifiers, pat_ctyp)) case_label] @ instrs, cleanup, ctx | ctyp -> raise (Reporting.err_general l (Printf.sprintf "Variant constructor %s : %s matching against non-variant type %s : %s" (string_of_id ctor) (string_of_typ variant_typ) (string_of_cval cval) (string_of_ctyp ctyp))) end | AP_wild _ -> [], [], ctx | AP_cons (hd_apat, tl_apat) -> begin match ctyp with | CT_list ctyp -> let hd_setup, hd_cleanup, ctx = compile_match ctx hd_apat (V_call (List_hd, [cval])) case_label in let tl_setup, tl_cleanup, ctx = compile_match ctx tl_apat (V_call (List_tl, [cval])) case_label in [ijump l (V_call (Eq, [cval; V_lit (VL_empty_list, CT_list ctyp)])) case_label] @ hd_setup @ tl_setup, tl_cleanup @ hd_cleanup, ctx | _ -> raise (Reporting.err_general l "Tried to pattern match cons on non list type") end | AP_nil _ -> [ijump l (V_call (Neq, [cval; V_lit (VL_empty_list, ctyp)])) case_label], [], ctx let unit_cval = V_lit (VL_unit, CT_unit) let rec compile_alexp ctx alexp = match alexp with | AL_id (id, typ) -> let ctyp = match Bindings.find_opt id ctx.locals with | Some (_, ctyp) -> ctyp | None -> ctyp_of_typ ctx typ in CL_id (name_or_global ctx id, ctyp) | AL_addr (id, typ) -> let ctyp = match Bindings.find_opt id ctx.locals with | Some (_, ctyp) -> ctyp | None -> ctyp_of_typ ctx typ in CL_addr (CL_id (name_or_global ctx id, ctyp)) | AL_field (alexp, field_id) -> CL_field (compile_alexp ctx alexp, (field_id, [])) let rec compile_aexp ctx (AE_aux (aexp_aux, env, l)) = let ctx = { ctx with local_env = env } in match aexp_aux with | AE_let (mut, id, binding_typ, binding, (AE_aux (_, body_env, _) as body), body_typ) -> let binding_ctyp = ctyp_of_typ { ctx with local_env = body_env } binding_typ in let setup, call, cleanup = compile_aexp ctx binding in let letb_setup, letb_cleanup = [idecl l binding_ctyp (name id); iblock1 (setup @ [call (CL_id (name id, binding_ctyp))] @ cleanup)], [iclear binding_ctyp (name id)] in let ctx = { ctx with locals = Bindings.add id (mut, binding_ctyp) ctx.locals } in let setup, call, cleanup = compile_aexp ctx body in letb_setup @ setup, call, cleanup @ letb_cleanup | AE_app (id, vs, _) -> compile_funcall l ctx id vs | AE_val aval -> let setup, cval, cleanup = compile_aval l ctx aval in setup, (fun clexp -> icopy l clexp cval), cleanup (* Compile case statements *) | AE_case (aval, cases, typ) -> let ctyp = ctyp_of_typ ctx typ in let aval_setup, cval, aval_cleanup = compile_aval l ctx aval in (* Get the number of cases, because we don't want to check branch coverage for matches with only a single case. *) let num_cases = List.length cases in let branch_id, on_reached = coverage_branch_reached l in let case_return_id = ngensym () in let finish_match_label = label "finish_match_" in let compile_case (apat, guard, body) = let case_label = label "case_" in if is_dead_aexp body then ( [ilabel case_label] ) else ( let trivial_guard = match guard with | AE_aux (AE_val (AV_lit (L_aux (L_true, _), _)), _, _) | AE_aux (AE_val (AV_cval (V_lit (VL_bool true, CT_bool), _)), _, _) -> true | _ -> false in let destructure, destructure_cleanup, ctx = compile_match ctx apat cval case_label in let guard_setup, guard_call, guard_cleanup = compile_aexp ctx guard in let body_setup, body_call, body_cleanup = compile_aexp ctx body in let gs = ngensym () in let case_instrs = destructure @ (if not trivial_guard then guard_setup @ [idecl l CT_bool gs; guard_call (CL_id (gs, CT_bool))] @ guard_cleanup @ [iif l (V_call (Bnot, [V_id (gs, CT_bool)])) (destructure_cleanup @ [igoto case_label]) [] CT_unit] else []) @ (if num_cases > 1 then coverage_branch_taken branch_id body else []) @ body_setup @ [body_call (CL_id (case_return_id, ctyp))] @ body_cleanup @ destructure_cleanup @ [igoto finish_match_label] in [iblock case_instrs; ilabel case_label] ) in aval_setup @ (if num_cases > 1 then on_reached else []) @ [idecl l ctyp case_return_id] @ List.concat (List.map compile_case cases) @ [imatch_failure l] @ [ilabel finish_match_label], (fun clexp -> icopy l clexp (V_id (case_return_id, ctyp))), [iclear ctyp case_return_id] @ aval_cleanup (* Compile try statement *) | AE_try (aexp, cases, typ) -> let ctyp = ctyp_of_typ ctx typ in let aexp_setup, aexp_call, aexp_cleanup = compile_aexp ctx aexp in let try_return_id = ngensym () in let post_exception_handlers_label = label "post_exception_handlers_" in let compile_case (apat, guard, body) = let trivial_guard = match guard with | AE_aux (AE_val (AV_lit (L_aux (L_true, _), _)), _, _) | AE_aux (AE_val (AV_cval (V_lit (VL_bool true, CT_bool), _)), _, _) -> true | _ -> false in let try_label = label "try_" in let exn_cval = V_id (current_exception, ctyp_of_typ ctx (mk_typ (Typ_id (mk_id "exception")))) in let destructure, destructure_cleanup, ctx = compile_match ctx apat exn_cval try_label in let guard_setup, guard_call, guard_cleanup = compile_aexp ctx guard in let body_setup, body_call, body_cleanup = compile_aexp ctx body in let gs = ngensym () in let case_instrs = destructure @ [icomment "end destructuring"] @ (if not trivial_guard then guard_setup @ [idecl l CT_bool gs; guard_call (CL_id (gs, CT_bool))] @ guard_cleanup @ [ijump l (V_call (Bnot, [V_id (gs, CT_bool)])) try_label] @ [icomment "end guard"] else []) @ body_setup @ [body_call (CL_id (try_return_id, ctyp))] @ body_cleanup @ destructure_cleanup @ [igoto post_exception_handlers_label] in [iblock case_instrs; ilabel try_label] in assert (ctyp_equal ctyp (ctyp_of_typ ctx typ)); [idecl l ctyp try_return_id; itry_block l (aexp_setup @ [aexp_call (CL_id (try_return_id, ctyp))] @ aexp_cleanup); ijump l (V_call (Bnot, [V_id (have_exception, CT_bool)])) post_exception_handlers_label; icopy l (CL_id (have_exception, CT_bool)) (V_lit (VL_bool false, CT_bool))] @ List.concat (List.map compile_case cases) @ [(* fallthrough *) icopy l (CL_id (have_exception, CT_bool)) (V_lit (VL_bool true, CT_bool)); ilabel post_exception_handlers_label], (fun clexp -> icopy l clexp (V_id (try_return_id, ctyp))), [] | AE_if (aval, then_aexp, else_aexp, if_typ) -> if is_dead_aexp then_aexp then compile_aexp ctx else_aexp else if is_dead_aexp else_aexp then compile_aexp ctx then_aexp else let if_ctyp = ctyp_of_typ ctx if_typ in let branch_id, on_reached = coverage_branch_reached l in let compile_branch aexp = let setup, call, cleanup = compile_aexp ctx aexp in fun clexp -> coverage_branch_taken branch_id aexp @ setup @ [call clexp] @ cleanup in let setup, cval, cleanup = compile_aval l ctx aval in setup, (fun clexp -> append_into_block on_reached (iif l cval (compile_branch then_aexp clexp) (compile_branch else_aexp clexp) if_ctyp)), cleanup (* FIXME: AE_record_update could be AV_record_update - would reduce some copying. *) | AE_record_update (aval, fields, typ) -> let ctyp = ctyp_of_typ ctx typ in let _ctors = match ctyp with | CT_struct (_, ctors) -> List.fold_left (fun m (k, v) -> UBindings.add k v m) UBindings.empty ctors | _ -> raise (Reporting.err_general l "Cannot perform record update for non-record type") in let gs = ngensym () in let compile_fields (id, aval) = let field_setup, cval, field_cleanup = compile_aval l ctx aval in field_setup @ [icopy l (CL_field (CL_id (gs, ctyp), (id, []))) cval] @ field_cleanup in let setup, cval, cleanup = compile_aval l ctx aval in [idecl l ctyp gs] @ setup @ [icopy l (CL_id (gs, ctyp)) cval] @ cleanup @ List.concat (List.map compile_fields (Bindings.bindings fields)), (fun clexp -> icopy l clexp (V_id (gs, ctyp))), [iclear ctyp gs] | AE_short_circuit (SC_and, aval, aexp) -> let branch_id, on_reached = coverage_branch_reached l in let left_setup, cval, left_cleanup = compile_aval l ctx aval in let right_setup, call, right_cleanup = compile_aexp ctx aexp in let right_coverage = coverage_branch_taken branch_id aexp in let gs = ngensym () in left_setup @ on_reached @ [ idecl l CT_bool gs; iif l cval (right_coverage @ right_setup @ [call (CL_id (gs, CT_bool))] @ right_cleanup) [icopy l (CL_id (gs, CT_bool)) (V_lit (VL_bool false, CT_bool))] CT_bool ] @ left_cleanup, (fun clexp -> icopy l clexp (V_id (gs, CT_bool))), [] | AE_short_circuit (SC_or, aval, aexp) -> let branch_id, on_reached = coverage_branch_reached l in let left_setup, cval, left_cleanup = compile_aval l ctx aval in let right_setup, call, right_cleanup = compile_aexp ctx aexp in let right_coverage = coverage_branch_taken branch_id aexp in let gs = ngensym () in left_setup @ on_reached @ [ idecl l CT_bool gs; iif l cval [icopy l (CL_id (gs, CT_bool)) (V_lit (VL_bool true, CT_bool))] (right_coverage @ right_setup @ [call (CL_id (gs, CT_bool))] @ right_cleanup) CT_bool ] @ left_cleanup, (fun clexp -> icopy l clexp (V_id (gs, CT_bool))), [] (* This is a faster assignment rule for updating fields of a struct. *) | AE_assign (AL_id (id, assign_typ), AE_aux (AE_record_update (AV_id (rid, _), fields, typ), _, _)) when Id.compare id rid = 0 -> let compile_fields (field_id, aval) = let field_setup, cval, field_cleanup = compile_aval l ctx aval in field_setup @ [icopy l (CL_field (CL_id (name_or_global ctx id, ctyp_of_typ ctx typ), (field_id, []))) cval] @ field_cleanup in List.concat (List.map compile_fields (Bindings.bindings fields)), (fun clexp -> icopy l clexp unit_cval), [] | AE_assign (alexp, aexp) -> let setup, call, cleanup = compile_aexp ctx aexp in setup @ [call (compile_alexp ctx alexp)], (fun clexp -> icopy l clexp unit_cval), cleanup | AE_block (aexps, aexp, _) -> let block = compile_block ctx aexps in let setup, call, cleanup = compile_aexp ctx aexp in block @ setup, call, cleanup | AE_loop (While, cond, body) -> let loop_start_label = label "while_" in let loop_end_label = label "wend_" in let cond_setup, cond_call, cond_cleanup = compile_aexp ctx cond in let body_setup, body_call, body_cleanup = compile_aexp ctx body in let gs = ngensym () in let unit_gs = ngensym () in let loop_test = V_call (Bnot, [V_id (gs, CT_bool)]) in [idecl l CT_bool gs; idecl l CT_unit unit_gs] @ [ilabel loop_start_label] @ [iblock (cond_setup @ [cond_call (CL_id (gs, CT_bool))] @ cond_cleanup @ [ijump l loop_test loop_end_label] @ body_setup @ [body_call (CL_id (unit_gs, CT_unit))] @ body_cleanup @ [igoto loop_start_label])] @ [ilabel loop_end_label], (fun clexp -> icopy l clexp unit_cval), [] | AE_loop (Until, cond, body) -> let loop_start_label = label "repeat_" in let loop_end_label = label "until_" in let cond_setup, cond_call, cond_cleanup = compile_aexp ctx cond in let body_setup, body_call, body_cleanup = compile_aexp ctx body in let gs = ngensym () in let unit_gs = ngensym () in let loop_test = V_id (gs, CT_bool) in [idecl l CT_bool gs; idecl l CT_unit unit_gs] @ [ilabel loop_start_label] @ [iblock (body_setup @ [body_call (CL_id (unit_gs, CT_unit))] @ body_cleanup @ cond_setup @ [cond_call (CL_id (gs, CT_bool))] @ cond_cleanup @ [ijump l loop_test loop_end_label] @ [igoto loop_start_label])] @ [ilabel loop_end_label], (fun clexp -> icopy l clexp unit_cval), [] | AE_cast (aexp, typ) -> compile_aexp ctx aexp | AE_return (aval, typ) -> let fn_return_ctyp = match Env.get_ret_typ env with | Some typ -> ctyp_of_typ ctx typ | None -> raise (Reporting.err_general l "No function return type found when compiling return statement") in (* Cleanup info will be re-added by fix_early_(heap/stack)_return *) let return_setup, cval, _ = compile_aval l ctx aval in let creturn = if ctyp_equal fn_return_ctyp (cval_ctyp cval) then [ireturn cval] else let gs = ngensym () in [idecl l fn_return_ctyp gs; icopy l (CL_id (gs, fn_return_ctyp)) cval; ireturn (V_id (gs, fn_return_ctyp))] in return_setup @ creturn, (fun clexp -> icomment "unreachable after return"), [] | AE_throw (aval, typ) -> (* Cleanup info will be handled by fix_exceptions *) let throw_setup, cval, _ = compile_aval l ctx aval in throw_setup @ [ithrow l cval], (fun clexp -> icomment "unreachable after throw"), [] | AE_exit (aval, typ) -> let exit_setup, cval, _ = compile_aval l ctx aval in exit_setup @ [iexit l], (fun clexp -> icomment "unreachable after exit"), [] | AE_field (aval, id, typ) -> let setup, cval, cleanup = compile_aval l ctx aval in let _ctyp = match cval_ctyp cval with | CT_struct (struct_id, fields) -> begin match Util.assoc_compare_opt UId.compare (id, []) fields with | Some ctyp -> ctyp | None -> raise (Reporting.err_unreachable l __POS__ ("Struct " ^ string_of_id struct_id ^ " does not have expected field " ^ string_of_id id ^ "?\nFields: " ^ Util.string_of_list ", " (fun (uid, ctyp) -> string_of_uid uid ^ ": " ^ string_of_ctyp ctyp) fields)) end | _ -> raise (Reporting.err_unreachable l __POS__ "Field access on non-struct type in ANF representation!") in setup, (fun clexp -> icopy l clexp (V_field (cval, (id, [])))), cleanup | AE_for (loop_var, loop_from, loop_to, loop_step, Ord_aux (ord, _), body) -> (* We assume that all loop indices are safe to put in a CT_fint. *) let ctx = { ctx with locals = Bindings.add loop_var (Immutable, CT_fint 64) ctx.locals } in let is_inc = match ord with | Ord_inc -> true | Ord_dec -> false | Ord_var _ -> raise (Reporting.err_general l "Polymorphic loop direction in C backend") in (* Loop variables *) let from_setup, from_call, from_cleanup = compile_aexp ctx loop_from in let from_gs = ngensym () in let to_setup, to_call, to_cleanup = compile_aexp ctx loop_to in let to_gs = ngensym () in let step_setup, step_call, step_cleanup = compile_aexp ctx loop_step in let step_gs = ngensym () in let variable_init gs setup call cleanup = [idecl l (CT_fint 64) gs; iblock (setup @ [call (CL_id (gs, CT_fint 64))] @ cleanup)] in let loop_start_label = label "for_start_" in let loop_end_label = label "for_end_" in let body_setup, body_call, body_cleanup = compile_aexp ctx body in let body_gs = ngensym () in let loop_var = name loop_var in let loop_body prefix continue = prefix @ [iblock ([ijump l (V_call ((if is_inc then Igt else Ilt), [V_id (loop_var, CT_fint 64); V_id (to_gs, CT_fint 64)])) loop_end_label] @ body_setup @ [body_call (CL_id (body_gs, CT_unit))] @ body_cleanup @ [icopy l (CL_id (loop_var, (CT_fint 64))) (V_call ((if is_inc then Iadd else Isub), [V_id (loop_var, CT_fint 64); V_id (step_gs, CT_fint 64)]))] @ continue ())] in (* We can either generate an actual loop body for C, or unroll the body for SMT *) let actual = loop_body [ilabel loop_start_label] (fun () -> [igoto loop_start_label]) in let rec unroll max n = loop_body [] (fun () -> if n < max then unroll max (n + 1) else [imatch_failure l]) in let body = match C.unroll_loops with Some times -> unroll times 0 | None -> actual in variable_init from_gs from_setup from_call from_cleanup @ variable_init to_gs to_setup to_call to_cleanup @ variable_init step_gs step_setup step_call step_cleanup @ [iblock ([idecl l (CT_fint 64) loop_var; icopy l (CL_id (loop_var, (CT_fint 64))) (V_id (from_gs, CT_fint 64)); idecl l CT_unit body_gs] @ body @ [ilabel loop_end_label])], (fun clexp -> icopy l clexp unit_cval), [] and compile_block ctx = function | [] -> [] | (AE_aux (_, _, l) as exp) :: exps -> let setup, call, cleanup = compile_aexp ctx exp in let rest = compile_block ctx exps in let gs = ngensym () in iblock (setup @ [idecl l CT_unit gs; call (CL_id (gs, CT_unit))] @ cleanup) :: rest let fast_int = function | CT_lint when !optimize_aarch64_fast_struct -> CT_fint 64 | ctyp -> ctyp (** Compile a sail type definition into a IR one. Most of the actual work of translating the typedefs into C is done by the code generator, as it's easy to keep track of structs, tuples and unions in their sail form at this level, and leave the fiddly details of how they get mapped to C in the next stage. This function also adds details of the types it compiles to the context, ctx, which is why it returns a ctypdef * ctx pair. **) let compile_type_def ctx (TD_aux (type_def, (l, _))) = match type_def with | TD_enum (id, ids, _) -> CTD_enum (id, ids), { ctx with enums = Bindings.add id (IdSet.of_list ids) ctx.enums } | TD_record (id, typq, ctors, _) -> let record_ctx = { ctx with local_env = Env.add_typquant l typq ctx.local_env } in let ctors = List.fold_left (fun ctors (typ, id) -> UBindings.add (id, []) (fast_int (ctyp_of_typ record_ctx typ)) ctors) UBindings.empty ctors in let params = quant_kopts typq |> List.filter is_typ_kopt |> List.map kopt_kid in CTD_struct (id, UBindings.bindings ctors), { ctx with records = Bindings.add id (params, ctors) ctx.records } | TD_variant (id, typq, tus, _) -> let compile_tu = function | Tu_aux (Tu_ty_id (typ, id), _) -> let ctx = { ctx with local_env = Env.add_typquant (id_loc id) typq ctx.local_env } in ctyp_of_typ ctx typ, id in let ctus = List.fold_left (fun ctus (ctyp, id) -> UBindings.add (id, []) ctyp ctus) UBindings.empty (List.map compile_tu tus) in let params = quant_kopts typq |> List.filter is_typ_kopt |> List.map kopt_kid in CTD_variant (id, UBindings.bindings ctus), { ctx with variants = Bindings.add id (params, ctus) ctx.variants } (* Will be re-written before here, see bitfield.ml *) | TD_bitfield _ -> Reporting.unreachable l __POS__ "Cannot compile TD_bitfield" (* All type abbreviations are filtered out in compile_def *) | TD_abbrev _ -> Reporting.unreachable l __POS__ "Found TD_abbrev in compile_type_def" let generate_cleanup instrs = let generate_cleanup' (I_aux (instr, _)) = match instr with | I_init (ctyp, id, cval) -> [(id, iclear ctyp id)] | I_decl (ctyp, id) -> [(id, iclear ctyp id)] | instr -> [] in let is_clear ids = function | I_aux (I_clear (_, id), _) -> NameSet.add id ids | _ -> ids in let cleaned = List.fold_left is_clear NameSet.empty instrs in instrs |> List.map generate_cleanup' |> List.concat |> List.filter (fun (id, _) -> not (NameSet.mem id cleaned)) |> List.map snd let fix_exception_block ?return:(return=None) ctx instrs = let end_block_label = label "end_block_exception_" in let is_exception_stop (I_aux (instr, _)) = match instr with | I_throw _ | I_if _ | I_block _ | I_funcall _ -> true | _ -> false in (* In this function 'after' is instructions after the one we've matched on, 'before is instructions before the instruction we've matched with, but after the previous match, and 'historic' are all the befores from previous matches. *) let rec rewrite_exception historic instrs = match instr_split_at is_exception_stop instrs with | instrs, [] -> instrs | before, I_aux (I_block instrs, _) :: after -> before @ [iblock (rewrite_exception (historic @ before) instrs)] @ rewrite_exception (historic @ before) after | before, I_aux (I_if (cval, then_instrs, else_instrs, ctyp), (_, l)) :: after -> let historic = historic @ before in before @ [iif l cval (rewrite_exception historic then_instrs) (rewrite_exception historic else_instrs) ctyp] @ rewrite_exception historic after | before, I_aux (I_throw cval, (_, l)) :: after -> before @ [icopy l (CL_id (current_exception, cval_ctyp cval)) cval; icopy l (CL_id (have_exception, CT_bool)) (V_lit (VL_bool true, CT_bool))] @ (if C.track_throw then let loc_string = Reporting.short_loc_to_string l in [icopy l (CL_id (throw_location, CT_string)) (V_lit (VL_string loc_string, CT_string))] else []) @ generate_cleanup (historic @ before) @ [igoto end_block_label] @ rewrite_exception (historic @ before) after | before, (I_aux (I_funcall (x, _, f, args), (_, l)) as funcall) :: after -> let effects = match Bindings.find_opt (fst f) ctx.effect_info.functions with | Some effects -> effects (* Constructors and back-end built-in value operations might not be present *) | None -> Effects.EffectSet.empty in if Effects.throws effects then before @ [funcall; iif l (V_id (have_exception, CT_bool)) (generate_cleanup (historic @ before) @ [igoto end_block_label]) [] CT_unit] @ rewrite_exception (historic @ before) after else before @ funcall :: rewrite_exception (historic @ before) after | _, _ -> assert false (* unreachable *) in match return with | None -> rewrite_exception [] instrs @ [ilabel end_block_label] | Some ctyp -> rewrite_exception [] instrs @ [ilabel end_block_label; iundefined ctyp] let rec map_try_block f (I_aux (instr, aux)) = let instr = match instr with | I_decl _ | I_reset _ | I_init _ | I_reinit _ -> instr | I_if (cval, instrs1, instrs2, ctyp) -> I_if (cval, List.map (map_try_block f) instrs1, List.map (map_try_block f) instrs2, ctyp) | I_funcall _ | I_copy _ | I_clear _ | I_throw _ | I_return _ -> instr | I_block instrs -> I_block (List.map (map_try_block f) instrs) | I_try_block instrs -> I_try_block (f (List.map (map_try_block f) instrs)) | I_comment _ | I_label _ | I_goto _ | I_raw _ | I_jump _ | I_exit _ | I_undefined _ | I_end _ -> instr in I_aux (instr, aux) let fix_exception ?return:(return=None) ctx instrs = let instrs = List.map (map_try_block (fix_exception_block ctx)) instrs in fix_exception_block ~return:return ctx instrs let rec compile_arg_pat ctx label (P_aux (p_aux, (l, _)) as pat) ctyp = match p_aux with | P_id id -> (id, ([], [])) | P_wild -> let gs = gensym () in (gs, ([], [])) | P_tup [] | P_lit (L_aux (L_unit, _)) -> let gs = gensym () in (gs, ([], [])) | P_var (pat, _) -> compile_arg_pat ctx label pat ctyp | P_typ (_, pat) -> compile_arg_pat ctx label pat ctyp | _ -> let apat = anf_pat pat in let gs = gensym () in let destructure, cleanup, _ = compile_match ctx apat (V_id (name gs, ctyp)) label in (gs, (destructure, cleanup)) let rec compile_arg_pats ctx label (P_aux (p_aux, (l, _)) as pat) ctyps = match p_aux with | P_typ (_, pat) -> compile_arg_pats ctx label pat ctyps | P_tup pats when List.length pats = List.length ctyps -> [], List.map2 (fun pat ctyp -> compile_arg_pat ctx label pat ctyp) pats ctyps, [] | _ when List.length ctyps = 1 -> [], [compile_arg_pat ctx label pat (List.nth ctyps 0)], [] | _ -> let arg_id, (destructure, cleanup) = compile_arg_pat ctx label pat (CT_tup ctyps) in let new_ids = List.map (fun ctyp -> gensym (), ctyp) ctyps in destructure @ [idecl l (CT_tup ctyps) (name arg_id)] @ List.mapi (fun i (id, ctyp) -> icopy l (CL_tuple (CL_id (name arg_id, CT_tup ctyps), i)) (V_id (name id, ctyp))) new_ids, List.map (fun (id, _) -> id, ([], [])) new_ids, [iclear (CT_tup ctyps) (name arg_id)] @ cleanup let combine_destructure_cleanup xs = List.concat (List.map fst xs), List.concat (List.rev (List.map snd xs)) let fix_destructure l fail_label = function | ([], cleanup) -> ([], cleanup) | destructure, cleanup -> let body_label = label "fundef_body_" in (destructure @ [igoto body_label; ilabel fail_label; imatch_failure l; ilabel body_label], cleanup) (** Functions that have heap-allocated return types are implemented by passing a pointer a location where the return value should be stored. The ANF -> Sail IR pass for expressions simply outputs an I_return instruction for any return value, so this function walks over the IR ast for expressions and modifies the return statements into code that sets that pointer, as well as adds extra control flow to cleanup heap-allocated variables correctly when a function terminates early. See the generate_cleanup function for how this is done. *) let fix_early_return l ret instrs = let end_function_label = label "end_function_" in let is_return_recur (I_aux (instr, _)) = match instr with | I_return _ | I_undefined _ | I_if _ | I_block _ | I_try_block _ -> true | _ -> false in let rec rewrite_return historic instrs = match instr_split_at is_return_recur instrs with | instrs, [] -> instrs | before, I_aux (I_try_block instrs, (_, l)) :: after -> before @ [itry_block l (rewrite_return (historic @ before) instrs)] @ rewrite_return (historic @ before) after | before, I_aux (I_block instrs, _) :: after -> before @ [iblock (rewrite_return (historic @ before) instrs)] @ rewrite_return (historic @ before) after | before, I_aux (I_if (cval, then_instrs, else_instrs, ctyp), (_, l)) :: after -> let historic = historic @ before in before @ [iif l cval (rewrite_return historic then_instrs) (rewrite_return historic else_instrs) ctyp] @ rewrite_return historic after | before, I_aux (I_return cval, (_, l)) :: after -> let cleanup_label = label "cleanup_" in let end_cleanup_label = label "end_cleanup_" in before @ [icopy l ret cval; igoto cleanup_label] (* This is probably dead code until cleanup_label, but we cannot be sure there are no jumps into it. *) @ rewrite_return (historic @ before) after @ [igoto end_cleanup_label; ilabel cleanup_label] @ generate_cleanup (historic @ before) @ [igoto end_function_label; ilabel end_cleanup_label] | before, I_aux (I_undefined _, (_, l)) :: after -> let cleanup_label = label "cleanup_" in let end_cleanup_label = label "end_cleanup_" in before @ [igoto cleanup_label] @ rewrite_return (historic @ before) after @ [igoto end_cleanup_label; ilabel cleanup_label] @ generate_cleanup (historic @ before) @ [igoto end_function_label; ilabel end_cleanup_label] | _, _ -> assert false in rewrite_return [] instrs @ [ilabel end_function_label; iend l] (** This pass ensures that all variables created by I_decl have unique names *) let unique_names = let unique_counter = ref 0 in let unique_id () = let id = mk_id ("u#" ^ string_of_int !unique_counter) in incr unique_counter; name id in let rec opt seen = function | I_aux (I_decl (ctyp, id), aux) :: instrs when NameSet.mem id seen -> let id' = unique_id () in let instrs', seen = opt seen instrs in I_aux (I_decl (ctyp, id'), aux) :: instrs_rename id id' instrs', seen | I_aux (I_decl (ctyp, id), aux) :: instrs -> let instrs', seen = opt (NameSet.add id seen) instrs in I_aux (I_decl (ctyp, id), aux) :: instrs', seen | I_aux (I_block block, aux) :: instrs -> let block', seen = opt seen block in let instrs', seen = opt seen instrs in I_aux (I_block block', aux) :: instrs', seen | I_aux (I_try_block block, aux) :: instrs -> let block', seen = opt seen block in let instrs', seen = opt seen instrs in I_aux (I_try_block block', aux) :: instrs', seen | I_aux (I_if (cval, then_instrs, else_instrs, ctyp), aux) :: instrs -> let then_instrs', seen = opt seen then_instrs in let else_instrs', seen = opt seen else_instrs in let instrs', seen = opt seen instrs in I_aux (I_if (cval, then_instrs', else_instrs', ctyp), aux) :: instrs', seen | instr :: instrs -> let instrs', seen = opt seen instrs in instr :: instrs', seen | [] -> [], seen in fun instrs -> fst (opt NameSet.empty instrs) let letdef_count = ref 0 let compile_funcl ctx id pat guard exp = (* Find the function's type. *) let quant, Typ_aux (fn_typ, _) = try Env.get_val_spec id ctx.local_env with Type_error _ -> Env.get_val_spec id ctx.tc_env in let arg_typs, ret_typ = match fn_typ with | Typ_fn (arg_typs, ret_typ) -> arg_typs, ret_typ | _ -> assert false in (* Handle the argument pattern. *) let fundef_label = label "fundef_fail_" in let orig_ctx = ctx in (* The context must be updated before we call ctyp_of_typ on the argument types. *) let ctx = { ctx with local_env = Env.add_typquant (id_loc id) quant ctx.tc_env } in let arg_ctyps = List.map (ctyp_of_typ ctx) arg_typs in let ret_ctyp = ctyp_of_typ ctx ret_typ in (* Compile the function arguments as patterns. *) let arg_setup, compiled_args, arg_cleanup = compile_arg_pats ctx fundef_label pat arg_ctyps in let ctx = (* We need the primop analyzer to be aware of the function argument types, so put them in ctx *) List.fold_left2 (fun ctx (id, _) ctyp -> { ctx with locals = Bindings.add id (Immutable, ctyp) ctx.locals }) ctx compiled_args arg_ctyps in let guard_bindings = ref IdSet.empty in let guard_instrs = match guard with | Some guard -> let (AE_aux (_, _, l) as guard) = anf guard in guard_bindings := aexp_bindings guard; let guard_aexp = C.optimize_anf ctx (no_shadow (pat_ids pat) guard) in let guard_setup, guard_call, guard_cleanup = compile_aexp ctx guard_aexp in let guard_label = label "guard_" in let gs = ngensym () in [iblock ( [idecl l CT_bool gs] @ guard_setup @ [guard_call (CL_id (gs, CT_bool))] @ guard_cleanup @ [ijump (id_loc id) (V_id (gs, CT_bool)) guard_label; imatch_failure l; ilabel guard_label] )] | None -> [] in (* Optimize and compile the expression to ANF. *) let aexp = C.optimize_anf ctx (no_shadow (IdSet.union (pat_ids pat) !guard_bindings) (anf exp)) in let setup, call, cleanup = compile_aexp ctx aexp in let destructure, destructure_cleanup = compiled_args |> List.map snd |> combine_destructure_cleanup |> fix_destructure (id_loc id) fundef_label in let instrs = arg_setup @ destructure @ guard_instrs @ setup @ [call (CL_id (return, ret_ctyp))] @ cleanup @ destructure_cleanup @ arg_cleanup in let instrs = fix_early_return (exp_loc exp) (CL_id (return, ret_ctyp)) instrs in let instrs = unique_names instrs in let instrs = fix_exception ~return:(Some ret_ctyp) ctx instrs in let instrs = coverage_function_entry id (exp_loc exp) @ instrs in [CDEF_fundef (id, None, List.map fst compiled_args, instrs)], orig_ctx (** Compile a Sail toplevel definition into an IR definition **) let rec compile_def n total ctx def = match def with | DEF_fundef (FD_aux (FD_function (_, _, [FCL_aux (FCL_Funcl (id, _), _)]), _)) when !opt_memo_cache -> let digest = def |> Pretty_print_sail.doc_def |> Pretty_print_sail.to_string |> Digest.string in let cachefile = Filename.concat "_sbuild" ("ccache" ^ Digest.to_hex digest) in let cached = if Sys.file_exists cachefile then let in_chan = open_in cachefile in try let compiled = Marshal.from_channel in_chan in close_in in_chan; Some (compiled, ctx) with | _ -> close_in in_chan; None else None in begin match cached with | Some (compiled, ctx) -> Util.progress "Compiling " (string_of_id id) n total; compiled, ctx | None -> let compiled, ctx = compile_def' n total ctx def in let out_chan = open_out cachefile in Marshal.to_channel out_chan compiled [Marshal.Closures]; close_out out_chan; compiled, ctx end | _ -> compile_def' n total ctx def and compile_def' n total ctx = function | DEF_reg_dec (DEC_aux (DEC_reg (typ, id, None), _)) -> [CDEF_reg_dec (id, ctyp_of_typ ctx typ, [])], ctx | DEF_reg_dec (DEC_aux (DEC_reg (typ, id, Some exp), _)) -> let aexp = C.optimize_anf ctx (no_shadow IdSet.empty (anf exp)) in let setup, call, cleanup = compile_aexp ctx aexp in let instrs = setup @ [call (CL_id (global id, ctyp_of_typ ctx typ))] @ cleanup in let instrs = unique_names instrs in [CDEF_reg_dec (id, ctyp_of_typ ctx typ, instrs)], ctx | DEF_spec (VS_aux (VS_val_spec (_, id, ext, _), _)) -> let quant, Typ_aux (fn_typ, _) = Env.get_val_spec id ctx.tc_env in let extern = if Env.is_extern id ctx.tc_env "c" then Some (Env.get_extern id ctx.tc_env "c") else None in let arg_typs, ret_typ = match fn_typ with | Typ_fn (arg_typs, ret_typ) -> arg_typs, ret_typ | _ -> assert false in let ctx' = { ctx with local_env = Env.add_typquant (id_loc id) quant ctx.local_env } in let arg_ctyps, ret_ctyp = List.map (ctyp_of_typ ctx') arg_typs, ctyp_of_typ ctx' ret_typ in [CDEF_spec (id, extern, arg_ctyps, ret_ctyp)], { ctx with valspecs = Bindings.add id (extern, arg_ctyps, ret_ctyp) ctx.valspecs } | DEF_fundef (FD_aux (FD_function (_, _, [FCL_aux (FCL_Funcl (id, Pat_aux (Pat_exp (pat, exp), _)), _)]), _)) -> Util.progress "Compiling " (string_of_id id) n total; compile_funcl ctx id pat None exp | DEF_fundef (FD_aux (FD_function (_, _, [FCL_aux (FCL_Funcl (id, Pat_aux (Pat_when (pat, guard, exp), _)), _)]), _)) -> Util.progress "Compiling " (string_of_id id) n total; compile_funcl ctx id pat (Some guard) exp | DEF_fundef (FD_aux (FD_function (_, _, []), (l, _))) -> raise (Reporting.err_general l "Encountered function with no clauses") | DEF_fundef (FD_aux (FD_function (_, _, _ :: _ :: _), (l, _))) -> raise (Reporting.err_general l "Encountered function with multiple clauses") (* All abbreviations should expanded by the typechecker, so we don't need to translate type abbreviations into C typedefs. *) | DEF_type (TD_aux (TD_abbrev _, _)) -> [], ctx | DEF_type type_def -> let tdef, ctx = compile_type_def ctx type_def in [CDEF_type tdef], ctx | DEF_val (LB_aux (LB_val (pat, exp), _)) -> let ctyp = ctyp_of_typ ctx (typ_of_pat pat) in let aexp = C.optimize_anf ctx (no_shadow IdSet.empty (anf exp)) in let setup, call, cleanup = compile_aexp ctx aexp in let apat = anf_pat ~global:true pat in let gs = ngensym () in let end_label = label "let_end_" in let destructure, destructure_cleanup, _ = compile_match ctx apat (V_id (gs, ctyp)) end_label in let gs_setup, gs_cleanup = [idecl (exp_loc exp) ctyp gs], [iclear ctyp gs] in let bindings = List.map (fun (id, typ) -> id, ctyp_of_typ ctx typ) (apat_globals apat) in let n = !letdef_count in incr letdef_count; let instrs = gs_setup @ setup @ [call (CL_id (gs, ctyp))] @ cleanup @ destructure @ destructure_cleanup @ gs_cleanup @ [ilabel end_label] in let instrs = unique_names instrs in [CDEF_let (n, bindings, instrs)], { ctx with letbinds = n :: ctx.letbinds } (* Only DEF_default that matters is default Order, but all order polymorphism is specialised by this point. *) | DEF_default _ -> [], ctx (* Overloading resolved by type checker *) | DEF_overload _ -> [], ctx (* Only the parser and sail pretty printer care about this. *) | DEF_fixity _ -> [], ctx | DEF_pragma ("abstract", id_str, _) -> [CDEF_pragma ("abstract", id_str)], ctx (* We just ignore any pragmas we don't want to deal with. *) | DEF_pragma _ -> [], ctx (* Termination measures only needed for Coq, and other theorem prover output *) | DEF_measure _ -> [], ctx | DEF_loop_measures _ -> [], ctx | DEF_internal_mutrec fundefs -> let defs = List.map (fun fdef -> DEF_fundef fdef) fundefs in List.fold_left (fun (cdefs, ctx) def -> let cdefs', ctx = compile_def n total ctx def in (cdefs @ cdefs', ctx)) ([], ctx) defs (* Scattereds, mapdefs, and event related definitions should be removed by this point *) | (DEF_scattered _ | DEF_mapdef _ | DEF_outcome _ | DEF_impl _ | DEF_instantiation _) as def -> Reporting.unreachable (def_loc def) __POS__ ("Could not compile:\n" ^ Pretty_print_sail.to_string (Pretty_print_sail.doc_def def)) module IdGraph = Graph.Make(Id) module IdGraphNS = Set.Make(Id) let callgraph cdefs = List.fold_left (fun graph cdef -> match cdef with | CDEF_fundef (id, _, _, body) -> let graph = ref graph in List.iter (iter_instr (function | I_aux (I_funcall (_, _, (call, _), _), _) -> graph := IdGraph.add_edge id call !graph | _ -> () )) body; !graph | _ -> graph ) IdGraph.empty cdefs let mangle_mono_id id ctx ctyps = append_id id ("<" ^ Util.string_of_list "," (mangle_string_of_ctyp ctx) ctyps ^ ">") let rec specialize_functions ctx cdefs = let polymorphic_functions = Util.map_filter (function | CDEF_spec (id, _, param_ctyps, ret_ctyp) -> if List.exists is_polymorphic param_ctyps || is_polymorphic ret_ctyp then Some id else None | _ -> None ) cdefs |> IdSet.of_list in (* First we find all the 'monomorphic calls', places where a polymorphic function is applied to only concrete type arguments At each such location we remove the type arguments and mangle the call name using them *) let monomorphic_calls = ref Bindings.empty in let collect_monomorphic_calls = function | I_aux (I_funcall (clexp, extern, (id, ctyp_args), args), aux) when IdSet.mem id polymorphic_functions && not (List.exists is_polymorphic ctyp_args) -> monomorphic_calls := Bindings.update id (function None -> Some (CTListSet.singleton ctyp_args) | Some calls -> Some (CTListSet.add ctyp_args calls)) !monomorphic_calls; I_aux (I_funcall (clexp, extern, (mangle_mono_id id ctx ctyp_args, []), args), aux) | instr -> instr in let cdefs = List.rev_map (cdef_map_instr collect_monomorphic_calls) cdefs |> List.rev in (* Now we duplicate function defintions and type declarations for each of the monomorphic calls we just found. *) let spec_tyargs = ref Bindings.empty in let rec specialize_fundefs ctx prior = function | (CDEF_spec (id, extern, param_ctyps, ret_ctyp) as orig_cdef) :: cdefs when Bindings.mem id !monomorphic_calls -> let tyargs = List.fold_left (fun set ctyp -> KidSet.union (ctyp_vars ctyp) set) KidSet.empty (ret_ctyp :: param_ctyps) in spec_tyargs := Bindings.add id tyargs !spec_tyargs; let specialized_specs = List.map (fun instantiation -> let substs = List.fold_left2 (fun substs tyarg ty -> KBindings.add tyarg ty substs) KBindings.empty (KidSet.elements tyargs) instantiation in let param_ctyps = List.map (subst_poly substs) param_ctyps in let ret_ctyp = subst_poly substs ret_ctyp in CDEF_spec (mangle_mono_id id ctx instantiation, extern, param_ctyps, ret_ctyp) ) (CTListSet.elements (Bindings.find id !monomorphic_calls)) in let ctx = List.fold_left (fun ctx cdef -> match cdef with | CDEF_spec (id, _, param_ctyps, ret_ctyp) -> { ctx with valspecs = Bindings.add id (extern, param_ctyps, ret_ctyp) ctx.valspecs } | cdef -> ctx ) ctx specialized_specs in specialize_fundefs ctx (orig_cdef :: specialized_specs @ prior) cdefs | (CDEF_fundef (id, heap_return, params, body) as orig_cdef) :: cdefs when Bindings.mem id !monomorphic_calls -> let tyargs = Bindings.find id !spec_tyargs in let specialized_fundefs = List.map (fun instantiation -> let substs = List.fold_left2 (fun substs tyarg ty -> KBindings.add tyarg ty substs) KBindings.empty (KidSet.elements tyargs) instantiation in let body = List.map (map_instr_ctyp (subst_poly substs)) body in CDEF_fundef (mangle_mono_id id ctx instantiation, heap_return, params, body) ) (CTListSet.elements (Bindings.find id !monomorphic_calls)) in specialize_fundefs ctx (orig_cdef :: specialized_fundefs @ prior) cdefs | cdef :: cdefs -> specialize_fundefs ctx (cdef :: prior) cdefs | [] -> List.rev prior, ctx in let cdefs, ctx = specialize_fundefs ctx [] cdefs in (* Now we want to remove any polymorphic functions that are unreachable from any monomorphic function *) let graph = callgraph cdefs in let monomorphic_roots = Util.map_filter (function | CDEF_spec (id, _, param_ctyps, ret_ctyp) -> if List.exists is_polymorphic param_ctyps || is_polymorphic ret_ctyp then None else Some id | _ -> None ) cdefs |> IdGraphNS.of_list in let monomorphic_reachable = IdGraph.reachable monomorphic_roots IdGraphNS.empty graph in let unreachable_polymorphic_functions = IdSet.filter (fun id -> not (IdGraphNS.mem id monomorphic_reachable)) polymorphic_functions in let cdefs = Util.map_filter (function | CDEF_fundef (id, _, _, _) when IdSet.mem id unreachable_polymorphic_functions -> None | CDEF_spec (id, _, _, _) when IdSet.mem id unreachable_polymorphic_functions -> None | cdef -> Some cdef ) cdefs in (* If we have removed all the polymorphic functions we are done, otherwise go again *) if IdSet.is_empty (IdSet.diff polymorphic_functions unreachable_polymorphic_functions) then cdefs, ctx else specialize_functions ctx cdefs let map_structs_and_variants f = function | (CT_lint | CT_fint _ | CT_constant _ | CT_lbits _ | CT_fbits _ | CT_sbits _ | CT_bit | CT_unit | CT_bool | CT_real | CT_string | CT_poly _ | CT_enum _ | CT_float _ | CT_rounding_mode) as ctyp -> ctyp | CT_tup ctyps -> CT_tup (List.map (map_ctyp f) ctyps) | CT_ref ctyp -> CT_ref (map_ctyp f ctyp) | CT_vector (direction, ctyp) -> CT_vector (direction, map_ctyp f ctyp) | CT_fvector (n, direction, ctyp) -> CT_fvector (n, direction, map_ctyp f ctyp) | CT_list ctyp -> CT_list (map_ctyp f ctyp) | CT_struct (id, fields) -> begin match f (CT_struct (id, fields)) with | CT_struct (id, fields) -> CT_struct (id, List.map (fun ((id, ctyps), ctyp) -> (id, List.map (map_ctyp f) ctyps), map_ctyp f ctyp) fields) | _ -> Reporting.unreachable (id_loc id) __POS__ "Struct mapped to non-struct" end | CT_variant (id, ctors) -> begin match f (CT_variant (id, ctors)) with | CT_variant (id, ctors) -> CT_variant (id, List.map (fun ((id, ctyps), ctyp) -> (id, List.map (map_ctyp f) ctyps), map_ctyp f ctyp) ctors) | _ -> Reporting.unreachable (id_loc id) __POS__ "Variant mapped to non-variant" end let rec specialize_variants ctx prior = let instantiations = ref CTListSet.empty in let fix_variants ctx var_id = map_structs_and_variants (function | CT_variant (id, ctors) when Id.compare var_id id = 0 -> let generic_ctors = Bindings.find id ctx.variants |> snd |> UBindings.bindings in let unifiers = ctyp_unify (id_loc id) (CT_variant (id, generic_ctors)) (CT_variant (id, ctors)) |> KBindings.bindings |> List.map snd in CT_variant (mangle_mono_id id ctx unifiers, List.map (fun ((ctor_id, _), ctyp) -> ((mangle_mono_id ctor_id ctx unifiers, []), ctyp)) ctors) | CT_struct (id, fields) when Id.compare var_id id = 0 -> let generic_fields = Bindings.find id ctx.records |> snd |> UBindings.bindings in let unifiers = ctyp_unify (id_loc id) (CT_struct (id, generic_fields)) (CT_struct (id, fields)) |> KBindings.bindings |> List.map snd in CT_struct (mangle_mono_id id ctx unifiers, List.map (fun ((field_id, _), ctyp) -> ((field_id, []), ctyp)) fields) | ctyp -> ctyp ) in let specialize_cval ctx ctor_id = function | V_ctor_kind (cval, id, unifiers, pat_ctyp) when Id.compare id ctor_id = 0 -> V_ctor_kind (cval, mangle_mono_id id ctx unifiers, [], pat_ctyp) | V_ctor_unwrap (cval, (id, unifiers), ctor_ctyp) when Id.compare id ctor_id = 0 -> V_ctor_unwrap (cval, (mangle_mono_id id ctx unifiers, []), ctor_ctyp) | cval -> cval in let specialize_constructor ctx var_id (ctor_id, existing_unifiers) ctyp = function | I_aux (I_funcall (clexp, extern, (id, ctyp_args), [cval]), aux) when Id.compare id ctor_id = 0 -> instantiations := CTListSet.add ctyp_args !instantiations; I_aux (I_funcall (clexp, extern, (mangle_mono_id id ctx ctyp_args, []), [map_cval (specialize_cval ctx ctor_id) cval]), aux) | instr -> map_instr_cval (map_cval (specialize_cval ctx ctor_id)) instr in let specialize_field ctx struct_id = function | I_aux (I_decl (CT_struct (struct_id', fields), _), (_, l)) as instr when Id.compare struct_id struct_id' = 0 -> let generic_fields = Bindings.find struct_id ctx.records |> snd |> UBindings.bindings in let unifiers = ctyp_unify l (CT_struct (struct_id, generic_fields)) (CT_struct (struct_id, fields)) |> KBindings.bindings |> List.map snd in instantiations := CTListSet.add unifiers !instantiations; instr | instr -> instr in let mangled_pragma orig_id mangled_id = CDEF_pragma ("mangled", Util.zencode_string (string_of_id orig_id) ^ " " ^ Util.zencode_string (string_of_id mangled_id)) in function | CDEF_type (CTD_variant (var_id, ctors)) :: cdefs when List.exists (fun (_, ctyp) -> is_polymorphic ctyp) ctors -> let typ_params = List.fold_left (fun set (_, ctyp) -> KidSet.union (ctyp_vars ctyp) set) KidSet.empty ctors in List.iter (function | CDEF_spec (id, _, ctyps, ctyp) -> let _ = List.map (map_ctyp (fun ctyp -> match ctyp with | CT_variant (var_id', ctors) when Id.compare var_id var_id' = 0 -> let generic_ctors = Bindings.find var_id ctx.variants |> snd |> UBindings.bindings in let unifiers = ctyp_unify (id_loc var_id') (CT_variant (var_id, generic_ctors)) (CT_variant (var_id, ctors)) |> KBindings.bindings |> List.map snd in instantiations := CTListSet.add unifiers !instantiations; ctyp | ctyp -> ctyp )) (ctyp :: ctyps) in () | _ -> () ) cdefs; let cdefs = List.fold_left (fun cdefs (ctor_id, ctyp) -> List.map (cdef_map_instr (specialize_constructor ctx var_id ctor_id ctyp)) cdefs) cdefs ctors in let monomorphized_variants = List.map (fun inst -> let substs = KBindings.of_seq (List.map2 (fun x y -> x, y) (KidSet.elements typ_params) inst |> List.to_seq) in (mangle_mono_id var_id ctx inst, List.map (fun ((ctor_id, _), ctyp) -> (mangle_mono_id ctor_id ctx inst, []), fix_variants ctx var_id (subst_poly substs ctyp)) ctors) ) (CTListSet.elements !instantiations) in let ctx = List.fold_left (fun ctx (id, ctors) -> { ctx with variants = Bindings.add id ([], UBindings.of_seq (List.to_seq ctors)) ctx.variants }) ctx monomorphized_variants in let mangled_ctors = List.map (fun (_, monomorphized_ctors) -> List.map2 (fun ((ctor_id, _), _) ((monomorphized_id, _), _) -> mangled_pragma ctor_id monomorphized_id) ctors monomorphized_ctors ) monomorphized_variants |> List.concat in let prior = List.map (cdef_map_ctyp (fix_variants ctx var_id)) prior in let cdefs = List.map (cdef_map_ctyp (fix_variants ctx var_id)) cdefs in let ctx = { ctx with valspecs = Bindings.map (fun (extern, param_ctyps, ret_ctyp) -> extern, List.map (fix_variants ctx var_id) param_ctyps, fix_variants ctx var_id ret_ctyp) ctx.valspecs } in let ctx = { ctx with variants = Bindings.remove var_id ctx.variants } in specialize_variants ctx (List.concat (List.map (fun (id, ctors) -> [CDEF_type (CTD_variant (id, ctors)); mangled_pragma var_id id]) monomorphized_variants) @ mangled_ctors @ prior) cdefs | CDEF_type (CTD_struct (struct_id, fields)) :: cdefs when List.exists (fun (_, ctyp) -> is_polymorphic ctyp) fields -> let typ_params = List.fold_left (fun set (_, ctyp) -> KidSet.union (ctyp_vars ctyp) set) KidSet.empty fields in let cdefs = List.map (cdef_map_instr (specialize_field ctx struct_id)) cdefs in let monomorphized_structs = List.map (fun inst -> let substs = KBindings.of_seq (List.map2 (fun x y -> x, y) (KidSet.elements typ_params) inst |> List.to_seq) in (mangle_mono_id struct_id ctx inst, List.map (fun ((field_id, _), ctyp) -> (field_id, []), fix_variants ctx struct_id (subst_poly substs ctyp)) fields) ) (CTListSet.elements !instantiations) in let mangled_fields = List.map (fun (_, monomorphized_fields) -> List.map2 (fun ((field_id, _), _) ((monomorphized_id, _), _) -> mangled_pragma field_id monomorphized_id) fields monomorphized_fields ) monomorphized_structs |> List.concat in let prior = List.map (cdef_map_ctyp (fix_variants ctx struct_id)) prior in let cdefs = List.map (cdef_map_ctyp (fix_variants ctx struct_id)) cdefs in let ctx = { ctx with valspecs = Bindings.map (fun (extern, param_ctyps, ret_ctyp) -> extern, List.map (fix_variants ctx struct_id) param_ctyps, fix_variants ctx struct_id ret_ctyp) ctx.valspecs } in let ctx = List.fold_left (fun ctx (id, fields) -> { ctx with records = Bindings.add id ([], UBindings.of_seq (List.to_seq fields)) ctx.records }) ctx monomorphized_structs in let ctx = { ctx with records = Bindings.remove struct_id ctx.records } in specialize_variants ctx (List.concat (List.map (fun (id, fields) -> [CDEF_type (CTD_struct (id, fields)); mangled_pragma struct_id id]) monomorphized_structs) @ mangled_fields @ prior) cdefs | cdef :: cdefs -> specialize_variants ctx (cdef :: prior) cdefs | [] -> List.rev prior, ctx let make_calls_precise ctx cdefs = let constructor_types = ref Bindings.empty in let get_function_typ id = match Bindings.find_opt id ctx.valspecs with | None -> Bindings.find_opt id !constructor_types | Some (_, param_ctyps, ret_ctyp) -> Some (param_ctyps, ret_ctyp) in let precise_call call tail = match call with | I_aux (I_funcall (clexp, extern, (id, ctyp_args), args), ((_, l) as aux)) as instr -> begin match get_function_typ id with | None when string_of_id id = "cons" -> begin match ctyp_args, args with | ([ctyp_arg], [hd_arg; tl_arg]) -> if not (ctyp_equal (cval_ctyp hd_arg) ctyp_arg) then let gs = ngensym () in let cast = [ idecl l ctyp_arg gs; icopy l (CL_id (gs, ctyp_arg)) hd_arg ] in let cleanup = [ iclear ~loc:l ctyp_arg gs ] in [iblock (cast @ [I_aux (I_funcall (clexp, extern, (id, ctyp_args), [V_id (gs, ctyp_arg); tl_arg]), aux)] @ tail @ cleanup)] else instr::tail | _ -> (* cons must have a single type parameter and two arguments *) Reporting.unreachable (id_loc id) __POS__ "Invalid cons call" end | None -> instr::tail | Some (param_ctyps, ret_ctyp) -> if List.compare_lengths args param_ctyps <> 0 then ( Reporting.unreachable (id_loc id) __POS__ ("Function call found with incorrect arity: " ^ string_of_id id) ); let casted_args = List.map2 (fun arg param_ctyp -> if not (ctyp_equal (cval_ctyp arg) param_ctyp) then ( let gs = ngensym () in let cast = [ idecl l param_ctyp gs; icopy l (CL_id (gs, param_ctyp)) arg ] in let cleanup = [ iclear ~loc:l param_ctyp gs ] in (cast, V_id (gs, param_ctyp), cleanup) ) else ( ([], arg, []) ) ) args param_ctyps in let ret_setup, clexp, ret_cleanup = if not (ctyp_equal (clexp_ctyp clexp) ret_ctyp) then let gs = ngensym () in ([idecl l ret_ctyp gs], (CL_id (gs, ret_ctyp)), [icopy l clexp (V_id (gs, ret_ctyp)); iclear ~loc:l ret_ctyp gs]) else ([], clexp, []) in let casts = List.map (fun (x, _, _) -> x) casted_args |> List.concat in let args = List.map (fun (_, y, _) -> y) casted_args in let cleanup = List.rev_map (fun (_, _, z) -> z) casted_args |> List.concat in [iblock1 (casts @ ret_setup @ [I_aux (I_funcall (clexp, extern, (id, ctyp_args), args), aux)] @ tail @ ret_cleanup @ cleanup)] end | instr -> instr::tail in let rec precise_calls prior = function | (CDEF_type (CTD_variant (var_id, ctors)) as cdef) :: cdefs -> List.iter (fun ((id, _), ctyp) -> constructor_types := Bindings.add id ([ctyp], CT_variant (var_id, ctors)) !constructor_types ) ctors; precise_calls (cdef :: prior) cdefs | cdef :: cdefs -> precise_calls (cdef_map_funcall precise_call cdef :: prior) cdefs | [] -> List.rev prior in precise_calls [] cdefs (** Once we specialize variants, there may be additional type dependencies which could be in the wrong order. As such we need to sort the type definitions in the list of cdefs. *) let sort_ctype_defs reverse cdefs = (* Split the cdefs into type definitions and non type definitions *) let is_ctype_def = function CDEF_type _ -> true | _ -> false in let unwrap = function CDEF_type ctdef -> ctdef | _ -> assert false in let ctype_defs = List.map unwrap (List.filter is_ctype_def cdefs) in let cdefs = List.filter (fun cdef -> not (is_ctype_def cdef)) cdefs in let ctdef_id = function | CTD_enum (id, _) | CTD_struct (id, _) | CTD_variant (id, _) -> id in let ctdef_ids = function | CTD_enum _ -> IdSet.empty | CTD_struct (_, ctors) | CTD_variant (_, ctors) -> List.fold_left (fun ids (_, ctyp) -> IdSet.union (ctyp_ids ctyp) ids) IdSet.empty ctors in (* Create a reverse (i.e. from types to the types that are dependent upon them) id graph of dependencies between types *) let module IdGraph = Graph.Make(Id) in let graph = List.fold_left (fun g ctdef -> List.fold_left (fun g id -> IdGraph.add_edge id (ctdef_id ctdef) g) (IdGraph.add_edges (ctdef_id ctdef) [] g) (* Make sure even types with no dependencies are in graph *) (IdSet.elements (ctdef_ids ctdef))) IdGraph.empty ctype_defs in (* Then select the ctypes in the correct order as given by the topsort *) let ids = IdGraph.topsort graph in let ctype_defs = List.map (fun id -> CDEF_type (List.find (fun ctdef -> Id.compare (ctdef_id ctdef) id = 0) ctype_defs)) ids in (if reverse then List.rev ctype_defs else ctype_defs) @ cdefs let toplevel_lets_of_ast ast = let toplevel_lets_of_def = function | DEF_val (LB_aux (LB_val (pat, _), _)) -> pat_ids pat | _ -> IdSet.empty in let toplevel_lets_of_defs defs = List.fold_left IdSet.union IdSet.empty (List.map toplevel_lets_of_def defs) in toplevel_lets_of_defs ast.defs |> IdSet.elements let compile_ast ctx ast = let module G = Graph.Make(Callgraph.Node) in let g = Callgraph.graph_of_ast ast in let module NodeSet = Set.Make(Callgraph.Node) in let roots = Specialize.get_initial_calls () |> List.map (fun id -> Callgraph.Function id) |> NodeSet.of_list in let roots = NodeSet.add (Callgraph.Type (mk_id "exception")) roots in let roots = Bindings.fold (fun typ_id _ roots -> NodeSet.add (Callgraph.Type typ_id) roots) (Env.get_enums ctx.tc_env) roots in let roots = NodeSet.union (toplevel_lets_of_ast ast |> List.map (fun id -> Callgraph.Letbind id) |> NodeSet.of_list) roots in let g = G.prune roots NodeSet.empty g in let ast = Callgraph.filter_ast NodeSet.empty g ast in if !opt_memo_cache then (try if Sys.is_directory "_sbuild" then () else raise (Reporting.err_general Parse_ast.Unknown "_sbuild exists, but is a file not a directory!") with | Sys_error _ -> Unix.mkdir "_sbuild" 0o775) else (); let total = List.length ast.defs in let _, chunks, ctx = List.fold_left (fun (n, chunks, ctx) def -> let defs, ctx = compile_def n total ctx def in n + 1, defs :: chunks, ctx) (1, [], ctx) ast.defs in let cdefs = List.concat (List.rev chunks) in (* If we don't have an exception type, add a dummy one *) let dummy_exn = (mk_id "__dummy_exn#", []) in let cdefs, ctx = if not (Bindings.mem (mk_id "exception") ctx.variants) then CDEF_type (CTD_variant (mk_id "exception", [(dummy_exn, CT_unit)])) :: cdefs, { ctx with variants = Bindings.add (mk_id "exception") ([], UBindings.singleton dummy_exn CT_unit) ctx.variants } else cdefs, ctx in let cdefs, ctx = specialize_functions ctx cdefs in let cdefs = sort_ctype_defs true cdefs in let cdefs, ctx = specialize_variants ctx [] cdefs in let cdefs = if C.specialize_calls then cdefs else make_calls_precise ctx cdefs in let cdefs = sort_ctype_defs false cdefs in cdefs, ctx end let add_special_functions env effect_info = let assert_vs = Initial_check.extern_of_string (mk_id "sail_assert") "(bool, string) -> unit" in let exit_vs = Initial_check.extern_of_string (mk_id "sail_exit") "unit -> unit" in let cons_vs = Initial_check.extern_of_string (mk_id "sail_cons") "forall ('a : Type). ('a, list('a)) -> list('a)" in let effect_info = Effects.add_monadic_built_in (mk_id "sail_assert") effect_info in let effect_info = Effects.add_monadic_built_in (mk_id "sail_exit") effect_info in snd (Type_error.check_defs env [assert_vs; exit_vs; cons_vs]), effect_info
(****************************************************************************) (* Sail *) (* *) (* Sail and the Sail architecture models here, comprising all files and *) (* directories except the ASL-derived Sail code in the aarch64 directory, *) (* are subject to the BSD two-clause licence below. *) (* *) (* The ASL derived parts of the ARMv8.3 specification in *) (* aarch64/no_vector and aarch64/full are copyright ARM Ltd. *) (* *) (* Copyright (c) 2013-2021 *) (* Kathyrn Gray *) (* Shaked Flur *) (* Stephen Kell *) (* Gabriel Kerneis *) (* Robert Norton-Wright *) (* Christopher Pulte *) (* Peter Sewell *) (* Alasdair Armstrong *) (* Brian Campbell *) (* Thomas Bauereiss *) (* Anthony Fox *) (* Jon French *) (* Dominic Mulligan *) (* Stephen Kell *) (* Mark Wassell *) (* Alastair Reid (Arm Ltd) *) (* *) (* All rights reserved. *) (* *) (* This work was partially supported by EPSRC grant EP/K008528/1 <a *) (* href="http://www.cl.cam.ac.uk/users/pes20/rems">REMS: Rigorous *) (* Engineering for Mainstream Systems</a>, an ARM iCASE award, EPSRC IAA *) (* KTF funding, and donations from Arm. This project has received *) (* funding from the European Research Council (ERC) under the European *) (* Union’s Horizon 2020 research and innovation programme (grant *) (* agreement No 789108, ELVER). *) (* *) (* This software was developed by SRI International and the University of *) (* Cambridge Computer Laboratory (Department of Computer Science and *) (* Technology) under DARPA/AFRL contracts FA8650-18-C-7809 ("CIFV") *) (* and FA8750-10-C-0237 ("CTSRD"). *) (* *) (* Redistribution and use in source and binary forms, with or without *) (* modification, are permitted provided that the following conditions *) (* are met: *) (* 1. Redistributions of source code must retain the above copyright *) (* notice, this list of conditions and the following disclaimer. *) (* 2. Redistributions in binary form must reproduce the above copyright *) (* notice, this list of conditions and the following disclaimer in *) (* the documentation and/or other materials provided with the *) (* distribution. *) (* *) (* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' *) (* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED *) (* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A *) (* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR *) (* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, *) (* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT *) (* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF *) (* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND *) (* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, *) (* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT *) (* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF *) (* SUCH DAMAGE. *) (****************************************************************************)
notationextern.mli
(** Declaration of uninterpretation functions (i.e. printing rules) for notations *) open Names open Constrexpr open Glob_term open Notation_term val interpretation_eq : interpretation -> interpretation -> bool (** Equality on [interpretation]. *) val notation_entry_level_eq : notation_entry_level -> notation_entry_level -> bool (** Equality on [notation_entry_level]. *) (** Binds a notation in a given scope to an interpretation *) type 'a interp_rule_gen = | NotationRule of Constrexpr.specific_notation | AbbrevRule of 'a type interp_rule = KerName.t interp_rule_gen val remove_uninterpretation : interp_rule -> interpretation -> unit val declare_uninterpretation : ?also_in_cases_pattern:bool -> interp_rule -> interpretation -> unit type notation_applicative_status = | AppBoundedNotation of int | AppUnboundedNotation | NotAppNotation type notation_rule = interp_rule * interpretation * notation_applicative_status (** Return printing key *) type key val glob_prim_constr_key : 'a Glob_term.glob_constr_g -> Names.GlobRef.t option val glob_constr_keys : glob_constr -> key list val cases_pattern_key : cases_pattern -> key val notation_constr_key : Notation_term.notation_constr -> key * notation_applicative_status (** Return the possible notations for a given term *) val uninterp_notations : 'a glob_constr_g -> notation_rule list val uninterp_cases_pattern_notations : 'a cases_pattern_g -> notation_rule list val uninterp_ind_pattern_notations : inductive -> notation_rule list (** State protection *) val with_notation_uninterpretation_protection : ('a -> 'b) -> 'a -> 'b (** Miscellaneous *) type notation_use = | OnlyPrinting | OnlyParsing | ParsingAndPrinting
(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * Copyright INRIA, CNRS and contributors *) (* <O___,, * (see version control and CREDITS file for authors & dates) *) (* \VV/ **************************************************************) (* // * This file is distributed under the terms of the *) (* * GNU Lesser General Public License Version 2.1 *) (* * (see LICENSE file for the text of the license) *) (************************************************************************)
describeLifecycleHooks.ml
open Types open Aws type input = DescribeLifecycleHooksType.t type output = DescribeLifecycleHooksAnswer.t type error = Errors_internal.t let service = "autoscaling" let signature_version = Request.V4 let to_http service region req = let uri = Uri.add_query_params (Uri.of_string (Aws.Util.of_option_exn (Endpoints.url_of service region))) (List.append [("Version", ["2011-01-01"]); ("Action", ["DescribeLifecycleHooks"])] (Util.drop_empty (Uri.query_of_encoded (Query.render (DescribeLifecycleHooksType.to_query req))))) in (`POST, uri, []) let of_http body = try let xml = Ezxmlm.from_string body in let resp = Util.option_bind (Xml.member "DescribeLifecycleHooksResponse" (snd xml)) (Xml.member "DescribeLifecycleHooksResult") in try Util.or_error (Util.option_bind resp DescribeLifecycleHooksAnswer.parse) (let open Error in BadResponse { body; message = "Could not find well formed DescribeLifecycleHooksAnswer." }) with | Xml.RequiredFieldMissing msg -> let open Error in `Error (BadResponse { body; message = ("Error parsing DescribeLifecycleHooksAnswer - missing field in body or children: " ^ msg) }) with | Failure msg -> `Error (let open Error in BadResponse { body; message = ("Error parsing xml: " ^ msg) }) let parse_error code err = let errors = [Errors_internal.ResourceContention] @ Errors_internal.common in match Errors_internal.of_string err with | Some var -> if (List.mem var errors) && ((match Errors_internal.to_http_code var with | Some var -> var = code | None -> true)) then Some var else None | None -> None
owl_linalg_z.ml
open Bigarray type elt = Complex.t type mat = Owl_dense_matrix_z.mat type complex_mat = Owl_dense_matrix_z.mat type int32_mat = (int32, int32_elt) Owl_dense_matrix_generic.t include Owl_linalg_generic let schur = schur ~otyp:complex64 let ordschur = ordschur ~otyp:complex64 let qz = qz ~otyp:complex64 let ordqz = ordqz ~otyp:complex64 let qzvals = qzvals ~otyp:complex64 let eig = eig ~otyp:complex64 let eigvals = eigvals ~otyp:complex64
(* * OWL - OCaml Scientific and Engineering Computing * Copyright (c) 2016-2020 Liang Wang <liang.wang@cl.cam.ac.uk> *)
p-xgcd.c
#include "flint/flint.h" #include "flint/fmpz.h" #include "flint/profiler.h" void sample_xgcd_small(void * arg, ulong count) { FLINT_TEST_INIT(state); fmpz_t d, x, y, a, b; fmpz_t nmax; fmpz_init(d); fmpz_init(x); fmpz_init(y); fmpz_init(a); fmpz_init(b); fmpz_init(nmax); fmpz_set_d_2exp(nmax, 1.0, SMALL_FMPZ_BITCOUNT_MAX); prof_start(); for (int ix = 0; ix < count; ix++) { fmpz_randm(a, state, nmax); fmpz_randm(b, state, nmax); fmpz_xgcd(d, x, y, a, b); } prof_stop(); fmpz_clear(d); fmpz_clear(x); fmpz_clear(y); fmpz_clear(a); fmpz_clear(b); fmpz_clear(nmax); flint_randclear(state); } void sample_xgcd_mixed(void * arg, ulong count) { FLINT_TEST_INIT(state); fmpz_t d, x, y, a, b; fmpz_t nmax; fmpz_init(d); fmpz_init(x); fmpz_init(y); fmpz_init(a); fmpz_init(b); fmpz_init(nmax); fmpz_set_d_2exp(nmax, 1.0, FLINT_BITS); prof_start(); for (int ix = 0; ix < count; ix++) { fmpz_randm(a, state, nmax); fmpz_randm(b, state, nmax); fmpz_xgcd(d, x, y, a, b); } prof_stop(); fmpz_clear(d); fmpz_clear(x); fmpz_clear(y); fmpz_clear(a); fmpz_clear(b); fmpz_clear(nmax); flint_randclear(state); } void sample_xgcd_big(void * arg, ulong count) { FLINT_TEST_INIT(state); fmpz_t d, x, y, a, b; fmpz_t nmax; fmpz_init(d); fmpz_init(x); fmpz_init(y); fmpz_init(a); fmpz_init(b); fmpz_init(nmax); fmpz_set_d_2exp(nmax, 1.0, 512); prof_start(); for (int ix = 0; ix < count; ix++) { fmpz_randm(a, state, nmax); fmpz_randm(b, state, nmax); fmpz_xgcd(d, x, y, a, b); } prof_stop(); fmpz_clear(d); fmpz_clear(x); fmpz_clear(y); fmpz_clear(a); fmpz_clear(b); fmpz_clear(nmax); flint_randclear(state); } void sample_xgcd_canonical_bezout_small(void * arg, ulong count) { FLINT_TEST_INIT(state); fmpz_t d, x, y, a, b; fmpz_t nmax; fmpz_init(d); fmpz_init(x); fmpz_init(y); fmpz_init(a); fmpz_init(b); fmpz_init(nmax); fmpz_set_d_2exp(nmax, 1.0, SMALL_FMPZ_BITCOUNT_MAX); prof_start(); for (int ix = 0; ix < count; ix++) { fmpz_randm(a, state, nmax); fmpz_randm(b, state, nmax); fmpz_xgcd_canonical_bezout(d, x, y, a, b); } prof_stop(); fmpz_clear(d); fmpz_clear(x); fmpz_clear(y); fmpz_clear(a); fmpz_clear(b); fmpz_clear(nmax); flint_randclear(state); } void sample_xgcd_canonical_bezout_mixed(void * arg, ulong count) { FLINT_TEST_INIT(state); fmpz_t d, x, y, a, b; fmpz_t nmax; fmpz_init(d); fmpz_init(x); fmpz_init(y); fmpz_init(a); fmpz_init(b); fmpz_init(nmax); fmpz_set_d_2exp(nmax, 1.0, FLINT_BITS); prof_start(); for (int ix = 0; ix < count; ix++) { fmpz_randm(a, state, nmax); fmpz_randm(b, state, nmax); fmpz_xgcd_canonical_bezout(d, x, y, a, b); } prof_stop(); fmpz_clear(d); fmpz_clear(x); fmpz_clear(y); fmpz_clear(a); fmpz_clear(b); fmpz_clear(nmax); flint_randclear(state); } void sample_xgcd_canonical_bezout_big(void * arg, ulong count) { FLINT_TEST_INIT(state); fmpz_t d, x, y, a, b; fmpz_t nmax; fmpz_init(d); fmpz_init(x); fmpz_init(y); fmpz_init(a); fmpz_init(b); fmpz_init(nmax); fmpz_set_d_2exp(nmax, 1.0, 512); prof_start(); for (int ix = 0; ix < count; ix++) { fmpz_randm(a, state, nmax); fmpz_randm(b, state, nmax); fmpz_xgcd_canonical_bezout(d, x, y, a, b); } prof_stop(); fmpz_clear(d); fmpz_clear(x); fmpz_clear(y); fmpz_clear(a); fmpz_clear(b); fmpz_clear(nmax); flint_randclear(state); } int main(void) { double min, max; prof_repeat(&min, &max, sample_xgcd_small, NULL); flint_printf("fmpz_xgcd (small size):\n" " min time is %.3f cycles\n" " max time is %.3f cycles\n\n", (min/(double)FLINT_CLOCK_SCALE_FACTOR)/100, (max/(double)FLINT_CLOCK_SCALE_FACTOR)/100); prof_repeat(&min, &max, sample_xgcd_mixed, NULL); flint_printf("fmpz_xgcd (mixed size):\n" " min time is %.3f cycles\n" " max time is %.3f cycles\n\n", (min/(double)FLINT_CLOCK_SCALE_FACTOR)/100, (max/(double)FLINT_CLOCK_SCALE_FACTOR)/100); prof_repeat(&min, &max, sample_xgcd_big, NULL); flint_printf("fmpz_xgcd (big size):\n" " min time is %.3f cycles\n" " max time is %.3f cycles\n\n", (min/(double)FLINT_CLOCK_SCALE_FACTOR)/100, (max/(double)FLINT_CLOCK_SCALE_FACTOR)/100); prof_repeat(&min, &max, sample_xgcd_canonical_bezout_small, NULL); flint_printf("fmpz_xgcd_canonical_bezout (small size):\n" " min time is %.3f cycles\n" " max time is %.3f cycles\n\n", (min/(double)FLINT_CLOCK_SCALE_FACTOR)/100, (max/(double)FLINT_CLOCK_SCALE_FACTOR)/100); prof_repeat(&min, &max, sample_xgcd_canonical_bezout_mixed, NULL); flint_printf("fmpz_xgcd_canonical_bezout (mixed size):\n" " min time is %.3f cycles\n" " max time is %.3f cycles\n\n", (min/(double)FLINT_CLOCK_SCALE_FACTOR)/100, (max/(double)FLINT_CLOCK_SCALE_FACTOR)/100); prof_repeat(&min, &max, sample_xgcd_canonical_bezout_big, NULL); flint_printf("fmpz_xgcd_canonical_bezout (big size):\n" " min time is %.3f cycles\n" " max time is %.3f cycles\n\n", (min/(double)FLINT_CLOCK_SCALE_FACTOR)/100, (max/(double)FLINT_CLOCK_SCALE_FACTOR)/100); return 0; }
/* Copyright 2021 Albin Ahlbäck This file is part of FLINT. FLINT is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. See <https://www.gnu.org/licenses/>. */
dune
(library (name physh) (public_name grenier.physh) (wrapped false) (c_names ml_physh_map ml_physh_set) (synopsis "Physical hashtable and hashset"))
rst.ml
let pp_title ~char ppf title = let sub = String.map (fun _ -> char) title in Format.fprintf ppf "@[<v 0>%s@ %s@ @ @]" title sub let pp_h1 = pp_title ~char:'#' let pp_h2 = pp_title ~char:'*' let pp_h3 = pp_title ~char:'=' let pp_h4 = pp_title ~char:'`' let pp_raw_html ppf str = Format.fprintf ppf "@[<v>.. raw:: html@ @ %s@ @ @]" (Re.Str.global_replace (Re.Str.regexp "\n") "\n " str) let pp_html ppf f = Format.fprintf ppf "@[<v 2>.. raw:: html@ @ %a@]@\n@\n" (fun ppf () -> f ppf) () let pp_ref ppf name = Format.fprintf ppf ".. _%s :@\n@\n" name let style = {css| <style> .wy-nav-content { max-width: 100%; } .tab { overflow: hidden; border: 1px solid #ccc; background-color: #f1f1f1; } .tab button { background-color: inherit; float: left; border: none; outline: none; cursor: pointer; padding: 5px 10px; } .tab button:hover { background-color: #ddd; } .tab button.active { background-color: #ccc; } .tabcontent { display: none; padding: 6px 12px; border: 1px solid #ccc; border-top: none; max-height: 40ex; margin-bottom: 7ex; overflow: auto; } .tabcontent p { margin-bottom: 12px; } pre { font-size: 12px } .rst-content .section ul p { margin-bottom: 0; } span.query { font-family: monospace; white-space: pre; } </style> |css} let script = {script| <script> function showTab(elt, tab, ref) { var i, tabcontent, tablinks; tabcontent = document.getElementsByClassName(ref); for (i = 0; i < tabcontent.length; i++) { tabcontent[i].style.display = 'none'; } tablinks = elt.parentNode.children; for (i = 0; i < tablinks.length; i++) { tablinks[i].className = tablinks[i].className.replace(' active', ''); } document.getElementById(tab).style.display = 'block'; elt.className += ' active'; } document.addEventListener('DOMContentLoaded', function() { var a = document.getElementsByClassName('defaultOpen'); for (i = 0; i < a.length; i++) { a[i].click() } }) </script> |script}
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
term_utils.c
/* * UTILITIES FOR SIMPLIFYING TERMS */ #include <assert.h> #include "terms/bv64_constants.h" #include "terms/term_utils.h" #include "utils/int_array_sort.h" #include "utils/int_hash_sets.h" #include "utils/int_vectors.h" #include "utils/memalloc.h" #include "utils/prng.h" #if 0 #include <stdio.h> #include <inttypes.h> #include "term_printer.h" static void print_finite_domain(FILE *f, term_table_t *tbl, finite_domain_t *d) { uint32_t i, n; n = d->nelems; fputs("[", f); for (i=0; i<n; i++) { if (i > 0) fputs(" ", f); print_term(f, tbl, d->data[i]); } fputs("]", f); } #endif /******************** * FINITE DOMAINS * *******************/ /* * Build a domain descriptor that contains a[0 ... n-1] */ static finite_domain_t *make_finite_domain(term_t *a, uint32_t n) { finite_domain_t *tmp; uint32_t i; assert(n <= MAX_FINITE_DOMAIN_SIZE); tmp = (finite_domain_t *) safe_malloc(sizeof(finite_domain_t) + n * sizeof(term_t)); tmp->nelems = n; for (i=0; i<n; i++) { tmp->data[i] = a[i]; } return tmp; } /* * Add all elements of dom that are not in cache into vector v * - also store them in the cache */ static void add_domain(int_hset_t *cache, ivector_t *v, finite_domain_t *dom) { uint32_t i, n; term_t t; n = dom->nelems; for (i=0; i<n; i++) { t = dom->data[i]; if (int_hset_add(cache, t)) { ivector_push(v, t); } } } /* * Recursively collect all constant terms reachable from t * - add all terms visited to hset * - add all constants to vector v */ static void collect_finite_domain(term_table_t *tbl, int_hset_t *cache, ivector_t *v, term_t t) { special_term_t *d; if (int_hset_add(cache, t)) { // t not visited yet if (term_kind(tbl, t) == ITE_SPECIAL) { d = ite_special_desc(tbl, t); if (d->extra != NULL) { add_domain(cache, v, d->extra); } else { collect_finite_domain(tbl, cache, v, d->body.arg[1]); collect_finite_domain(tbl, cache, v, d->body.arg[2]); } } else { // t must be a constant, not already in v assert(term_kind(tbl, t) == ARITH_CONSTANT || term_kind(tbl, t) == BV64_CONSTANT || term_kind(tbl, t) == BV_CONSTANT); ivector_push(v, t); } } } /* * Build the domain for (ite c t1 t2) * - d must be the composite descriptor for (ite c t1 t2) */ static finite_domain_t *build_ite_finite_domain(term_table_t *tbl, composite_term_t *d) { int_hset_t cache; ivector_t buffer; finite_domain_t *dom; assert(d->arity == 3); init_int_hset(&cache, 32); init_ivector(&buffer, 20); collect_finite_domain(tbl, &cache, &buffer, d->arg[1]); // then part collect_finite_domain(tbl, &cache, &buffer, d->arg[2]); // else part int_array_sort(buffer.data, buffer.size); dom = make_finite_domain(buffer.data, buffer.size); delete_ivector(&buffer); delete_int_hset(&cache); return dom; } /* * Get the finite domain of term t */ finite_domain_t *special_ite_get_finite_domain(term_table_t *tbl, term_t t) { special_term_t *d; d = ite_special_desc(tbl, t); if (d->extra == NULL) { d->extra = build_ite_finite_domain(tbl, &d->body); } return d->extra; } /* * Check whether u belongs to the finite domain of term t * - t must be a special if-then-else */ bool term_is_in_finite_domain(term_table_t *tbl, term_t t, term_t u) { finite_domain_t *dom; uint32_t l, h, k; dom = special_ite_get_finite_domain(tbl, t); assert(dom->nelems >= 2); // binary search l = 0; h = dom->nelems; for (;;) { k = (l + h)/2; // no overflow possible since l+h < MAX_FINITE_DOMAIN_SIZE assert(l <= k && k < h && h <= dom->nelems); if (k == l) break; if (dom->data[k] > u) { h = k; } else { l = k; } } assert(l == k && k+1 == h); return dom->data[k] == u; } /* * Check whether two finite domains are disjoint. */ static bool disjoint_finite_domains(finite_domain_t *d1, finite_domain_t *d2) { uint32_t i1, i2, n1, n2; term_t t1, t2; assert(d1->nelems > 0 && d2->nelems > 0); n1 = d1->nelems; n2 = d2->nelems; i1 = 0; i2 = 0; t1 = d1->data[0]; t2 = d2->data[0]; for (;;) { if (t1 == t2) return false; if (t1 < t2) { i1 ++; if (i1 == n1) break; t1 = d1->data[i1]; } else { i2 ++; if (i2 == n2) break; t2 = d2->data[i2]; } } return true; } /* * Check whether t and u have disjoint finite domains * - both t and u must be special if-then-else * - the domains of t and u are computed if needed. */ bool terms_have_disjoint_finite_domains(term_table_t *tbl, term_t t, term_t u) { finite_domain_t *d1, *d2; d1 = special_ite_get_finite_domain(tbl, t); d2 = special_ite_get_finite_domain(tbl, u); return disjoint_finite_domains(d1, d2); } /* * FINITE RATIONAL DOMAIN */ /* * Check whether all elements in domain d are >= 0 * - d must be an arithmetic domain (i.e., all elements in d are rational constants) */ static bool finite_domain_is_nonneg(term_table_t *tbl, finite_domain_t *d) { uint32_t i, n; term_t t; n = d->nelems; for (i=0; i<n; i++) { t = d->data[i]; if (q_is_neg(rational_term_desc(tbl, t))) { return false; } } return true; } /* * Check whether all elements in domain d are <= 0 * - d must be an arithmetic domain (i.e., all elements in d are rational constants) */ static bool finite_domain_is_nonpos(term_table_t *tbl, finite_domain_t *d) { uint32_t i, n; term_t t; n = d->nelems; for (i=0; i<n; i++) { t = d->data[i]; if (q_is_pos(rational_term_desc(tbl, t))) { return false; } } return true; } /* * Check whether all elements in domain d are negative * - d must be an arithmetic domain */ static bool finite_domain_is_neg(term_table_t *tbl, finite_domain_t *d) { uint32_t i, n; term_t t; n = d->nelems; for (i=0; i<n; i++) { t = d->data[i]; if (q_is_nonneg(rational_term_desc(tbl, t))) { return false; } } return true; } /* * Check whether all elements in domain d are non-integer * - d must be an arithmetic domain */ static bool finite_domain_has_no_integers(term_table_t *tbl, finite_domain_t *d) { uint32_t i, n; term_t t; n = d->nelems; for (i=0; i<n; i++) { t = d->data[i]; if (q_is_integer(rational_term_desc(tbl, t))) { return false; } } return true; } /* * Check whether all elements in t's domain are non-negative * - t must be a special if-then-else of arithmetic type * - the domain of t is computed if required */ bool term_has_nonneg_finite_domain(term_table_t *tbl, term_t t) { finite_domain_t *d; d = special_ite_get_finite_domain(tbl, t); return finite_domain_is_nonneg(tbl, d); } /* * Check whether all elements in t's domain are non-positive * - t must be a special if-then-else of arithmetic type * - the domain of t is computed if required */ bool term_has_nonpos_finite_domain(term_table_t *tbl, term_t t) { finite_domain_t *d; d = special_ite_get_finite_domain(tbl, t); return finite_domain_is_nonpos(tbl, d); } /* * Check whether all elements in t's domain are negative * - t must be a special if-then-else term of arithmetic type * - the domain of t is computed if required */ bool term_has_negative_finite_domain(term_table_t *tbl, term_t t) { finite_domain_t *d; d = special_ite_get_finite_domain(tbl, t); return finite_domain_is_neg(tbl, d); } /* * Check whether all elements in t's domain are non-integer */ bool term_has_non_integer_finite_domain(term_table_t *tbl, term_t t) { finite_domain_t *d; d = special_ite_get_finite_domain(tbl, t); return finite_domain_has_no_integers(tbl, d); } /* * Check whether t < u * - both must be arithmetic constants (rationals) */ static bool arith_constant_lt(term_table_t *tbl, term_t t, term_t u) { return q_lt(rational_term_desc(tbl, t), rational_term_desc(tbl, u)); } /* * Compute the lower and upper bounds on domain d * - d must be a non-empty arithmetic domain * - the lower bound is stored in *lb * - the upper bound is stored in *ub */ static void finite_domain_bounds(term_table_t *tbl, finite_domain_t *d, term_t *lb, term_t *ub) { uint32_t i, n; term_t t, min, max; n = d->nelems; assert(n > 0); min = d->data[0]; max = d->data[0]; for (i=1; i<n; i++) { t = d->data[i]; if (arith_constant_lt(tbl, t, min)) { min = t; } else if (arith_constant_lt(tbl, max, t)) { max = t; } } *lb = min; *ub = max; } /* * Compute the lower and upper bound for term t * - t must be a special if-then-else term of arithmetic type * - the domain is computed if required * - the lower bound is stored in *lb and the upper bound is stored in *ub */ void term_finite_domain_bounds(term_table_t *tbl, term_t t, term_t *lb, term_t *ub) { finite_domain_t *d; d = special_ite_get_finite_domain(tbl, t); #if 0 printf("finite domain for term %"PRId32"\n", t); print_finite_domain(stdout, tbl, d); printf("\n"); #endif finite_domain_bounds(tbl, d, lb, ub); } /*********************************** * OPERATIONS ON BIT ARRAY TERMS * **********************************/ /* * Upper/lower bound on a bitarray interpreted as an unsigned integer. * a = a[0] + 2 a[1] + ... + 2^(n-1) a[n-1], with 0 <= a[i] <= 1 * upper bound: replace a[i] by 1 if a[i] != 0 * lower bound: replace a[i] by 0 if a[i] != 1 */ static void bitarray_upper_bound_unsigned(composite_term_t *a, bvconstant_t *c) { uint32_t i, n; assert(a->arity > 0); n = a->arity; bvconstant_set_all_one(c, n); // c := 0b1...1 (n bits) for (i=0; i<n; i++) { if (a->arg[i] == false_term) { bvconst_clr_bit(c->data, i); } } } static void bitarray_lower_bound_unsigned(composite_term_t *a, bvconstant_t *c) { uint32_t i, n; assert(a->arity > 0); n = a->arity; bvconstant_set_all_zero(c, n); // c := 0b0...0 (n bits) for (i=0; i<n; i++) { if (a->arg[i] == true_term) { bvconst_set_bit(c->data, i); } } } /* * Find the number significant bits of a (in 2s complement) * - returns m if a is the sign-extension of a smaller b of m bits * or n otherwise * - a is an array of n Boolean terms * - a[n-1] is the sign bit * - this searches for the largest m <= n such that a[m-1] is not equal to a[n-1]. */ static uint32_t bitarray_significant_bits(composite_term_t *a) { uint32_t n; term_t sign; assert(a->arity > 0); n = a->arity - 1; sign = a->arg[n]; // sign bit while (n > 0) { if (a->arg[n - 1] != sign) break; n --; } return n + 1; } /* * Upper/lower bound on a bitarray interpreted as a signed integer. * - a is an array of n bits. * - Let m be the number of significant bits in a, then we have * 1 <= m <= n * bits a[m-1] .... a[n-1] are all equal (sign extension) * a = a[0] + 2 a[1] + ... + 2^(m-2) a[m-2] - 2^(m-1) a[m-1] * * upper bound: * for i=0 to m-2, replace a[i] by 1 if a[i] != 0 * for i=m-1 to n-1, replace a[i] by 0 unless a[i] = 1. * * lower bound: * for i=0 to m-2, replace a[i] by 0 if a[i] != 1 * for i=m-1 to n-1, replace a[i] by 1 unless a[i] = 0. */ static void bitarray_upper_bound_signed(composite_term_t *a, bvconstant_t *c) { uint32_t i, n, m; assert(a->arity > 0); n = a->arity; bvconstant_set_all_one(c, n); m = bitarray_significant_bits(a); assert(0 < m && m <= n); for (i=0; i<m-1; i++) { if (a->arg[i] == false_term) { bvconst_clr_bit(c->data, i); } } // all bits from a->arg[i] to a->arg[n-1] are the same if (a->arg[i] != true_term) { while (i < n) { bvconst_clr_bit(c->data, i); i ++; } } } static void bitarray_lower_bound_signed(composite_term_t *a, bvconstant_t *c) { uint32_t i, n, m; assert(a->arity > 0); n = a->arity; bvconstant_set_all_zero(c, n); m = bitarray_significant_bits(a); assert(0 < m && m <= n); for (i=0; i<m-1; i++) { if (a->arg[i] == true_term) { bvconst_set_bit(c->data, i); } } // all bits from a->arg[i] to a->arg[n-1] are the same if (a->arg[i] != false_term) { while (i < n) { bvconst_set_bit(c->data, i); i ++; } } } /* * BOUNDS FOR ARRAYS OF 1 TO 64BITS */ /* * Upper/lower bound on a bitarray interpreted as an unsigned integer. * a = a[0] + 2 a[1] + ... + 2^(n-1) a[n-1], with 0 <= a[i] <= 1 * upper bound: replace a[i] by 1 if a[i] != 0 * lower bound: replace a[i] by 0 if a[i] != 1 */ static uint64_t bitarray_upper_bound_unsigned64(composite_term_t *a) { uint64_t c; uint32_t i, n; assert(0 < a->arity && a->arity <= 64); n = a->arity; c = mask64(n); // c = 0001...1 (n lower bits set) for (i=0; i<n; i++) { if (a->arg[i] == false_term) { c = clr_bit64(c, i); } } assert(c == norm64(c, n)); return c; } static uint64_t bitarray_lower_bound_unsigned64(composite_term_t *a) { uint64_t c; uint32_t i, n; assert(0 < a->arity && a->arity <= 64); n = a->arity; c = 0; for (i=0; i<n; i++) { if (a->arg[i] == true_term) { c = set_bit64(c, i); } } assert(c == norm64(c, n)); return c; } #if 0 // NOT USED ANYMORE /* * Upper/lower bound on a bitarray interpreted as a signed integer. * a = a[0] + 2 a[1] + ... + 2^(n-2) a[n-2] - 2^(n-1) a[m-1] * where m = number of significant bits in a. * * upper bound: * for i=0 to m-2, replace a[i] by 1 if a[i] != 0 * for i=m-1 to n-1, replace a[i] by 0 unless a[i] = 1. * * lower bound: * for i=0 to m-2, replace a[i] by 0 if a[i] != 1 * for i=m-1 to n-1, replace a[i] by 1 unless a[i] = 0. */ static uint64_t bitarray_upper_bound_signed64(composite_term_t *a) { uint64_t c; uint32_t i, n, m; assert(0 < a->arity && a->arity <= 64); n = a->arity; c = mask64(n); // c = 0001...1 m = bitarray_significant_bits(a); assert(0 < m && m <= n); for (i=0; i<m-1; i++) { if (a->arg[i] == false_term) { c = clr_bit64(c, i); } } // i is equal to m-1 // All bits from a->arg[m-1] to a->arg[n-1] are the same if (a->arg[i] != true_term) { while (i < n) { c = clr_bit64(c, i); i ++; } } assert(c == norm64(c, n)); return c; } static uint64_t bitarray_lower_bound_signed64(composite_term_t *a) { uint64_t c; uint32_t i, n, m; assert(0 < a->arity && a->arity <= 64); n = a->arity; c = 0; m = bitarray_significant_bits(a); assert(0 < m && m <= n); for (i=0; i<m-1; i++) { if (a->arg[i] == true_term) { c = set_bit64(c, i); } } // i is equal to m-1. // All bits from a->arg[m-1] to a->arg[n-1] are the same if (a->arg[i] != false_term) { while (i < n) { c = set_bit64(c, i); i ++; } } assert(c == norm64(c, n)); return c; } #endif /* * DISEQUALITY CHECKS */ /* * Disequality check between two bit arrays * - a and b must have the same arity * - all components must be boolean * * TODO?: improve this. * - we could try to see that (l l) can't be equal to (u (not u)) */ static bool disequal_bitarrays(composite_term_t *a, composite_term_t *b) { uint32_t i, n; assert(a->arity == b->arity); n = a->arity; for (i=0; i<n; i++) { if (opposite_bool_terms(a->arg[i], b->arg[i])) return true; } return false; } /* * Disequality check between bit array a and small constant c * - both must have the same bit size */ static bool disequal_bitarray_bvconst64(composite_term_t *a, bvconst64_term_t *c) { uint32_t i, n; assert(a->arity == c->bitsize && 0 < a->arity && a->arity <= 64); n = a->arity; for (i=0; i<n; i++) { if (index_of(a->arg[i]) == bool_const) { assert(a->arg[i] == true_term || a->arg[i] == false_term); if (a->arg[i] != bool2term(tst_bit64(c->value, i))) { return true; } } } return false; } /* * Disequality check between bit array a and bv-constant c * - both must have the same bit size */ static bool disequal_bitarray_bvconst(composite_term_t *a, bvconst_term_t *c) { uint32_t i, n; assert(a->arity == c->bitsize && 64 < a->arity); n = a->arity; for (i=0; i<n; i++) { if (index_of(a->arg[i]) == bool_const) { assert(a->arg[i] == true_term || a->arg[i] == false_term); if (a->arg[i] != bool2term(bvconst_tst_bit(c->data, i))) { return true; } } } return false; } /************************* * CHECK NON-INTEGERS * ************************/ /* * Check whether p is of the form constant + sum of integer monomials * where the constant is a non-integer rational. */ static bool non_integer_polynomial(term_table_t *tbl, polynomial_t *p) { uint32_t i, n; n = p->nterms; if (n >= 1 && p->mono[0].var == const_idx && !q_is_integer(&p->mono[0].coeff)) { // p has a non-integer constant term for (i=1; i<n; i++) { if (!is_integer_term(tbl, p->mono[i].var) || !q_is_integer(&p->mono[i].coeff)) { return false; // not an integer monomial } } return true; } return false; } /* * Check whether t can't be an integer. * This is incomplete. * - returns true if t is a non-integer rational */ bool arith_term_is_not_integer(term_table_t *tbl, term_t t) { assert(is_arithmetic_term(tbl, t)); switch (term_kind(tbl, t)) { case ARITH_CONSTANT: return !q_is_integer(rational_term_desc(tbl, t)); case ARITH_ABS: // x not an integer IMPLIES (abs x) not an integer return arith_term_is_not_integer(tbl, arith_abs_arg(tbl, t)); case ARITH_POLY: return non_integer_polynomial(tbl, poly_term_desc(tbl, t)); case ITE_SPECIAL: return term_has_non_integer_finite_domain(tbl, t); default: return false; } } /* * Cheaper form: test whether x is a non-integer constant * - incomplete */ static bool is_non_integer_term(term_table_t *tbl, term_t x) { return term_kind(tbl, x) == ARITH_CONSTANT && !q_is_integer(rational_term_desc(tbl, x)); } /****************************** * CHECKS FOR DISEQUALITIES * *****************************/ /* * Base cases: * - x and y are both CONSTANT_TERM * - x and y are boolean and x = (not y). */ static inline bool disequal_constant_terms(term_t x, term_t y) { return x != y; } static inline bool disequal_boolean_terms(term_t x, term_t y) { return opposite_bool_terms(x, y); } /* * Arithmetic: x and y are both arithmetic terms * * The conversion of arith_buffer to terms ensures that polynomial * terms are not constant and not of the form 1.x for some term x. * * We deal with simple cases: * - x is integer and y is not (or conversely) * - both x and y are constant * - both x and y are polynomials * - x is a polynomial and y is not a constant (i.e., y may occur as a variable in x) * - y is a polynomial and x is not a constant * * TODO? we could do more when (x - y) is a polynomial with integer variables. */ bool disequal_arith_terms(term_table_t *tbl, term_t x, term_t y, bool check_ite) { term_kind_t kx, ky; if (is_integer_term(tbl, x) && is_non_integer_term(tbl, y)) { return true; } if (is_integer_term(tbl, y) && is_non_integer_term(tbl, x)) { return true; } kx = term_kind(tbl, x); ky = term_kind(tbl, y); if (kx == ARITH_CONSTANT && ky == ARITH_CONSTANT) { return x != y; // because of hash consing. } if (check_ite) { if (kx == ARITH_CONSTANT && ky == ITE_SPECIAL) { return ! term_is_in_finite_domain(tbl, y, x); } if (kx == ITE_SPECIAL && ky == ARITH_CONSTANT) { return !term_is_in_finite_domain(tbl, x, y); } if (kx == ITE_SPECIAL && ky == ITE_SPECIAL) { return terms_have_disjoint_finite_domains(tbl, x, y); } } if (kx == ARITH_POLY && ky == ARITH_POLY) { return disequal_polynomials(poly_term_desc(tbl, x), poly_term_desc(tbl, y)); } if (kx == ARITH_POLY && ky != ARITH_CONSTANT) { return polynomial_is_const_plus_var(poly_term_desc(tbl, x), y); } if (ky == ARITH_POLY && kx != ARITH_CONSTANT) { return polynomial_is_const_plus_var(poly_term_desc(tbl, y), x); } return false; } /* * Bitvectors: x and y are bitvector terms of 1 to 64 bits */ static bool disequal_bv64_terms(term_table_t *tbl, term_t x, term_t y) { term_kind_t kx, ky; kx = term_kind(tbl, x); ky = term_kind(tbl, y); if (kx == ky) { if (kx == BV64_CONSTANT) { return x != y; } if (kx == BV64_POLY) { return disequal_bvpoly64(bvpoly64_term_desc(tbl, x), bvpoly64_term_desc(tbl, y)); } if (kx == BV_ARRAY) { return disequal_bitarrays(bvarray_term_desc(tbl, x), bvarray_term_desc(tbl, y)); } if (kx == ITE_SPECIAL) { return terms_have_disjoint_finite_domains(tbl, x, y); } } else { if (kx == BV64_CONSTANT && ky == BV_ARRAY) { return disequal_bitarray_bvconst64(bvarray_term_desc(tbl, y), bvconst64_term_desc(tbl, x)); } if (ky == BV64_CONSTANT && kx == BV_ARRAY) { return disequal_bitarray_bvconst64(bvarray_term_desc(tbl, x), bvconst64_term_desc(tbl, y)); } if (kx == BV64_CONSTANT && ky == ITE_SPECIAL) { return !term_is_in_finite_domain(tbl, y, x); } if (ky == BV64_CONSTANT && kx == ITE_SPECIAL) { return !term_is_in_finite_domain(tbl, x, y); } if (kx == BV64_POLY && ky != BV64_CONSTANT) { return bvpoly64_is_const_plus_var(bvpoly64_term_desc(tbl, x), y); } if (ky == BV64_POLY && kx != BV64_CONSTANT) { return bvpoly64_is_const_plus_var(bvpoly64_term_desc(tbl, y), x); } } return false; } /* * x and y are two bitvectors of more than 64bits */ static bool disequal_bv_terms(term_table_t *tbl, term_t x, term_t y) { term_kind_t kx, ky; kx = term_kind(tbl, x); ky = term_kind(tbl, y); if (kx == ky) { if (kx == BV_CONSTANT) { return x != y; } if (kx == BV_POLY) { return disequal_bvpoly(bvpoly_term_desc(tbl, x), bvpoly_term_desc(tbl, y)); } if (kx == BV_ARRAY) { return disequal_bitarrays(bvarray_term_desc(tbl, x), bvarray_term_desc(tbl, y)); } if (kx == ITE_SPECIAL) { return terms_have_disjoint_finite_domains(tbl, x, y); } } else { if (kx == BV_CONSTANT && ky == BV_ARRAY) { return disequal_bitarray_bvconst(bvarray_term_desc(tbl, y), bvconst_term_desc(tbl, x)); } if (ky == BV_CONSTANT && kx == BV_ARRAY) { return disequal_bitarray_bvconst(bvarray_term_desc(tbl, x), bvconst_term_desc(tbl, y)); } if (kx == BV_CONSTANT && ky == ITE_SPECIAL) { return !term_is_in_finite_domain(tbl, y, x); } if (ky == BV_CONSTANT && kx == ITE_SPECIAL) { return !term_is_in_finite_domain(tbl, x, y); } if (kx == BV_POLY && ky != BV_CONSTANT) { return bvpoly_is_const_plus_var(bvpoly_term_desc(tbl, x), y); } if (ky == BV_POLY && kx != BV_CONSTANT) { return bvpoly_is_const_plus_var(bvpoly_term_desc(tbl, y), x); } } return false; } /* * Generic form for two bitvector terms x and y */ bool disequal_bitvector_terms(term_table_t *tbl, term_t x, term_t y) { assert(is_bitvector_term(tbl, x) && is_bitvector_term(tbl, y) && term_bitsize(tbl, x) == term_bitsize(tbl, y)); if (term_bitsize(tbl, x) <= 64) { return disequal_bv64_terms(tbl, x, y); } else { return disequal_bv_terms(tbl, x, y); } } /* * Tuple terms x and y are trivially distinct if they have components * x_i and y_i that are trivially distinct. */ static bool disequal_tuple_terms(term_table_t *tbl, term_t x, term_t y, bool check_ite) { composite_term_t *tuple_x, *tuple_y; uint32_t i, n; tuple_x = tuple_term_desc(tbl, x); tuple_y = tuple_term_desc(tbl, y); n = tuple_x->arity; assert(n == tuple_y->arity); for (i=0; i<n; i++) { if (disequal_terms(tbl, tuple_x->arg[i], tuple_y->arg[i], check_ite)) { return true; } } return false; } /* * (update f x1 ... xn a) is trivially distinct from (update f x1 ... xn b) * if a is trivially distinct from b. */ static bool disequal_update_terms(term_table_t *tbl, term_t x, term_t y, bool check_ite) { composite_term_t *update_x, *update_y; int32_t i, n; assert(term_type(tbl, x) == term_type(tbl, y)); update_x = update_term_desc(tbl, x); update_y = update_term_desc(tbl, y); n = update_x->arity; assert(n == update_y->arity && n > 0); for (i=0; i<n-1; i++) { if (update_x->arg[i] != update_y->arg[i]) return false; } return disequal_terms(tbl, update_x->arg[i], update_y->arg[i], check_ite); } /* * Top level check: x and y must be valid terms of compatible types */ bool disequal_terms(term_table_t *tbl, term_t x, term_t y, bool check_ite) { term_kind_t kind; if (is_boolean_term(tbl, x)) { assert(is_boolean_term(tbl, y)); return disequal_boolean_terms(x, y); } if (is_arithmetic_term(tbl, x)) { assert(is_arithmetic_term(tbl, y)); return disequal_arith_terms(tbl, x, y, check_ite); } if (is_bitvector_term(tbl, x)) { assert(is_bitvector_term(tbl, y) && term_bitsize(tbl, x) == term_bitsize(tbl, y)); if (term_bitsize(tbl, x) <= 64) { return disequal_bv64_terms(tbl, x, y); } else { return disequal_bv_terms(tbl, x, y); } } kind = term_kind(tbl, x); if (kind != term_kind(tbl, y)) return false; switch (kind) { case CONSTANT_TERM: return disequal_constant_terms(x, y); case TUPLE_TERM: return disequal_tuple_terms(tbl, x, y, check_ite); case UPDATE_TERM: return disequal_update_terms(tbl, x, y, check_ite); default: return false; } } // check whether a[i] cannot be equal to b[i] for one i bool disequal_term_arrays(term_table_t *tbl, uint32_t n, const term_t *a, const term_t *b, bool check_ite) { uint32_t i; for (i=0; i<n; i++) { if (disequal_terms(tbl, a[i], b[i], check_ite)) return true; } return false; } // check whether all elements of a are disequal // this is expensive: quadratic cost, but should fail quickly on most examples bool pairwise_disequal_terms(term_table_t *tbl, uint32_t n, const term_t *a, bool check_ite) { uint32_t i, j; for (i=0; i<n; i++) { for (j=i+1; j<n; j++) { if (! disequal_terms(tbl, a[i], a[j], check_ite)) return false; } } return true; } /******************************** * BOUNDS ON ARITHMETIC TERMS * *******************************/ /* * Check whether t is non-negative. This is incomplete and * deals only with simple cases. * - return true if the checks can determine that t >= 0 * - return false otherwise */ bool arith_term_is_nonneg(term_table_t *tbl, term_t t, bool check_ite) { assert(is_arithmetic_term(tbl, t)); switch (term_kind(tbl, t)) { case ARITH_CONSTANT: return q_is_nonneg(rational_term_desc(tbl, t)); case ITE_SPECIAL: return check_ite && term_has_nonneg_finite_domain(tbl, t); case ARITH_POLY: return polynomial_is_nonneg(poly_term_desc(tbl, t)); case ARITH_ABS: case ARITH_MOD: return true; case ARITH_FLOOR: // (floor t) >= 0 IFF t >= 0 return arith_term_is_nonneg(tbl, arith_floor_arg(tbl, t), check_ite); case ARITH_CEIL: // t>=0 IMPLIES (ceil t) >= 0 return arith_term_is_nonneg(tbl, arith_ceil_arg(tbl, t), check_ite); default: return false; } } /* * Check whether t is negative or null. This is incomplete and * deals only with simple cases. * - return true if the checks can determine that t <= 0 * - return false otherwise */ bool arith_term_is_nonpos(term_table_t *tbl, term_t t, bool check_ite) { assert(is_arithmetic_term(tbl, t)); switch (term_kind(tbl, t)) { case ARITH_CONSTANT: return q_is_nonpos(rational_term_desc(tbl, t)); case ITE_SPECIAL: return check_ite && term_has_nonpos_finite_domain(tbl, t); case ARITH_POLY: return polynomial_is_nonpos(poly_term_desc(tbl, t)); case ARITH_FLOOR: // t <= 0 IMPLIES (floor t) <= 0 return arith_term_is_nonpos(tbl, arith_floor_arg(tbl, t), check_ite); case ARITH_CEIL: // (ceil t) <= 0 IFF t <= 0 return arith_term_is_nonpos(tbl, arith_ceil_arg(tbl, t), check_ite); default: return false; } } /* * Check whether t is negative (incomplete) * - return true if the checks succeed and determine that t < 0 * - return false otherwise */ bool arith_term_is_negative(term_table_t *tbl, term_t t) { assert(is_arithmetic_term(tbl, t)); switch (term_kind(tbl, t)) { case ARITH_CONSTANT: return q_is_neg(rational_term_desc(tbl, t)); case ITE_SPECIAL: return term_has_negative_finite_domain(tbl, t); case ARITH_POLY: return polynomial_is_neg(poly_term_desc(tbl, t)); default: return false; } } /* * Check whether t is non-zero (incomplete) * - return true if the checks succeed and determine that t != 0 * - return false otherwise */ bool arith_term_is_nonzero(term_table_t *tbl, term_t t) { assert(is_arithmetic_term(tbl, t)); switch (term_kind(tbl, t)) { case ARITH_CONSTANT: return t != zero_term; case ITE_SPECIAL: return term_has_nonzero_finite_domain(tbl, t); case ARITH_POLY: return polynomial_is_nonzero(poly_term_desc(tbl, t)); default: return false; } } /******************************* * BOUNDS ON BITVECTOR TERMS * ******************************/ /* * Copy a bitvector constant a into c */ static inline void copy_bvconst_term(bvconst_term_t *a, bvconstant_t *c) { assert(a->bitsize > 0); bvconstant_copy(c, a->bitsize, a->data); } static void copy_bvconst64_term(bvconst64_term_t *a, bvconstant_t *c) { uint32_t aux[2]; aux[0] = (uint32_t) a->value; // lower-order word aux[1] = (uint32_t) (a->value >> 32); // high order word (unused if bitsize <= 32) bvconstant_copy(c, a->bitsize, aux); } /* * Upper bound on t, interpreted as an unsigned integer * - copy the result in c */ void upper_bound_unsigned(term_table_t *tbl, term_t t, bvconstant_t *c) { uint32_t n; assert(is_bitvector_term(tbl, t)); switch (term_kind(tbl, t)) { case BV64_CONSTANT: copy_bvconst64_term(bvconst64_term_desc(tbl, t), c); break; case BV_CONSTANT: copy_bvconst_term(bvconst_term_desc(tbl, t), c); break; case BV_ARRAY: bitarray_upper_bound_unsigned(bvarray_term_desc(tbl, t), c); break; default: n = term_bitsize(tbl, t); bvconstant_set_all_one(c, n); break; } } /* * Lower bound on t, interpreted as an unsigned integer * - copy the result in c */ void lower_bound_unsigned(term_table_t *tbl, term_t t, bvconstant_t *c) { uint32_t n; assert(is_bitvector_term(tbl, t)); switch (term_kind(tbl, t)) { case BV64_CONSTANT: copy_bvconst64_term(bvconst64_term_desc(tbl, t), c); break; case BV_CONSTANT: copy_bvconst_term(bvconst_term_desc(tbl, t), c); break; case BV_ARRAY: bitarray_lower_bound_unsigned(bvarray_term_desc(tbl, t), c); break; default: n = term_bitsize(tbl, t); bvconstant_set_all_zero(c, n); break; } } /* * Upper bound on t, interpreted as a signed integer * - copy the result in c */ void upper_bound_signed(term_table_t *tbl, term_t t, bvconstant_t *c) { uint32_t n; assert(is_bitvector_term(tbl, t)); switch (term_kind(tbl, t)) { case BV64_CONSTANT: copy_bvconst64_term(bvconst64_term_desc(tbl, t), c); break; case BV_CONSTANT: copy_bvconst_term(bvconst_term_desc(tbl, t), c); break; case BV_ARRAY: bitarray_upper_bound_signed(bvarray_term_desc(tbl, t), c); break; default: n = term_bitsize(tbl, t); assert(n > 0); bvconstant_set_all_one(c, n); bvconst_clr_bit(c->data, n-1); // clear the sign bit break; } } /* * Lower bound on t, interpreted as a signed integer * - copy the result in c */ void lower_bound_signed(term_table_t *tbl, term_t t, bvconstant_t *c) { uint32_t n; assert(is_bitvector_term(tbl, t)); switch (term_kind(tbl, t)) { case BV64_CONSTANT: copy_bvconst64_term(bvconst64_term_desc(tbl, t), c); break; case BV_CONSTANT: copy_bvconst_term(bvconst_term_desc(tbl, t), c); break; case BV_ARRAY: bitarray_lower_bound_signed(bvarray_term_desc(tbl, t), c); break; default: n = term_bitsize(tbl, t); assert(n > 0); bvconstant_set_all_zero(c, n); bvconst_set_bit(c->data, n-1); // set the sign bit break; } } /* * BOUNDS FOR VECTORS OF 1 TO 64 BITS */ /* * Upper bound on t, interpreted as an unsigned integer */ uint64_t upper_bound_unsigned64(term_table_t *tbl, term_t t) { uint64_t c; uint32_t n; assert(is_bitvector_term(tbl, t)); switch (term_kind(tbl, t)) { case BV64_CONSTANT: c = bvconst64_term_desc(tbl, t)->value; break; case BV_ARRAY: c = bitarray_upper_bound_unsigned64(bvarray_term_desc(tbl, t)); break; default: n = term_bitsize(tbl, t); assert(1 <= n && n <= 64); c = mask64(n); break; } return c; } /* * Lower bound on t, interpreted as an unsigned integer */ uint64_t lower_bound_unsigned64(term_table_t *tbl, term_t t) { uint64_t c; assert(is_bitvector_term(tbl, t)); switch (term_kind(tbl, t)) { case BV64_CONSTANT: c = bvconst64_term_desc(tbl, t)->value; break; case BV_ARRAY: c = bitarray_lower_bound_unsigned64(bvarray_term_desc(tbl, t)); break; default: c = 0; break; } return c; } /* * Upper bound on t, interpreted as a signed integer */ uint64_t upper_bound_signed64(term_table_t *tbl, term_t t) { bv64_abs_t abs; uint64_t c; uint32_t n; assert(is_bitvector_term(tbl, t)); bv64_abstract_term(tbl, t, &abs); n = term_bitsize(tbl, t); c = norm64((uint64_t) abs.high, n); return c; } /* * Lower bound on t, interpreted as a signed integer */ uint64_t lower_bound_signed64(term_table_t *tbl, term_t t) { bv64_abs_t abs; uint64_t c; uint32_t n; assert(is_bitvector_term(tbl, t)); bv64_abstract_term(tbl, t, &abs); n = term_bitsize(tbl, t); c = norm64((uint64_t) abs.low, n); return c; } /****************************************************** * MINIMAL/MAXIMAL SIGNED/UNSIGNED BITVECTOR VALUES * *****************************************************/ bool bvterm_is_zero(term_table_t *tbl, term_t t) { uint32_t n; assert(is_bitvector_term(tbl, t)); switch (term_kind(tbl, t)) { case BV64_CONSTANT: return bvconst64_term_desc(tbl, t)->value == 0; case BV_CONSTANT: n = (term_bitsize(tbl, t) + 31) >> 5; // number of words return bvconst_is_zero(bvconst_term_desc(tbl, t)->data, n); default: return false; } } bool bvterm_is_minus_one(term_table_t *tbl, term_t t) { uint32_t n; assert(is_bitvector_term(tbl, t)); switch (term_kind(tbl, t)) { case BV64_CONSTANT: n = term_bitsize(tbl, t); return bvconst64_is_minus_one(bvconst64_term_desc(tbl, t)->value, n); case BV_CONSTANT: n = term_bitsize(tbl, t); return bvconst_is_minus_one(bvconst_term_desc(tbl, t)->data, n); default: return false; } } bool bvterm_is_min_signed(term_table_t *tbl, term_t t) { uint32_t n; assert(is_bitvector_term(tbl, t)); switch (term_kind(tbl, t)) { case BV64_CONSTANT: n = term_bitsize(tbl, t); return bvconst64_term_desc(tbl, t)->value == min_signed64(n); case BV_CONSTANT: n = term_bitsize(tbl, t); return bvconst_is_min_signed(bvconst_term_desc(tbl, t)->data, n); default: return false; } } bool bvterm_is_max_signed(term_table_t *tbl, term_t t) { uint32_t n; assert(is_bitvector_term(tbl, t)); switch (term_kind(tbl, t)) { case BV64_CONSTANT: n = term_bitsize(tbl, t); return bvconst64_term_desc(tbl, t)->value == max_signed64(n); case BV_CONSTANT: n = term_bitsize(tbl, t); return bvconst_is_max_signed(bvconst_term_desc(tbl, t)->data, n); default: return false; } } /***************************************** * SIMPLIFICATION OF BIT-VECTOR TERMS * ****************************************/ /* * Get bit i of term t: * - return NULL_TERM if the bit can't be determined * - return true or false if t is a bitvector constant * - return b_i if t is (bv-array b_0 .. b_i ...) * * t must be a bitvector term of size > i */ term_t extract_bit(term_table_t *tbl, term_t t, uint32_t i) { uint32_t *d; uint64_t c; term_t bit; assert(is_bitvector_term(tbl, t) && term_bitsize(tbl, t) > i); switch (term_kind(tbl, t)) { case BV64_CONSTANT: c = bvconst64_term_desc(tbl, t)->value; bit = bool2term(tst_bit64(c, i)); break; case BV_CONSTANT: d = bvconst_term_desc(tbl, t)->data; bit = bool2term(bvconst_tst_bit(d, i)); break; case BV_ARRAY: bit = bvarray_term_desc(tbl, t)->arg[i]; break; default: bit = NULL_TERM; break; } return bit; } /* * Check whether (eq b c) simplifies and if so returns the result. * - b and c must be boolean terms (assumed not opposite of each other). * - return NULL_TERM if no simplification is found * * Rules: * (eq b b) --> true * (eq b true) --> b * (eq b false) --> (not b) * + symmetric cases for the last two rules */ static term_t check_biteq_simplifies(term_t b, term_t c) { assert(! opposite_bool_terms(b, c)); if (b == c) return true_term; if (b == true_term) return c; if (b == false_term) return opposite_term(c); // not c if (c == true_term) return b; if (c == false_term) return opposite_term(b); return NULL_TERM; } /* * Check whether (and a (eq b c)) simplifies and, if so, returns the result. * - a, b, and c are three boolean terms. * - return NULL_TERM if no cheap simplification is found * * We assume that the cheaper simplification tests have been tried before: * (i.e., we assume a != false and b != (not c)). */ static term_t check_accu_biteq_simplifies(term_t a, term_t b, term_t c) { term_t eq; // first check whether (eq b c) simplifies eq = check_biteq_simplifies(b, c); if (eq == NULL_TERM) return NULL_TERM; /* * try to simplify (and a eq) */ assert(a != false_term && eq != false_term); if (a == eq) return a; if (opposite_bool_terms(a, eq)) return false_term; if (a == true_term) return eq; if (eq == true_term) return a; return NULL_TERM; } /* * Check whether (bveq u v) simplifies: * - u is a bitvector constant of no more than 64 bits * - v is a bv_array term * * Return NULL_TERM if no cheap simplification is found. */ static term_t check_eq_bvconst64(bvconst64_term_t *u, composite_term_t *v) { uint32_t i, n; term_t accu, b; n = u->bitsize; assert(n == v->arity); accu = true_term; for (i=0; i<n; i++) { b = bool2term(tst_bit64(u->value, i)); // bit i of u accu = check_accu_biteq_simplifies(accu, b, v->arg[i]); if (accu == NULL_TERM || accu == false_term) { break; } } return accu; } /* * Same thing for a generic constant u. */ static term_t check_eq_bvconst(bvconst_term_t *u, composite_term_t *v) { uint32_t i, n; term_t accu, b; n = u->bitsize; assert(n == v->arity); accu = true_term; for (i=0; i<n; i++) { b = bool2term(bvconst_tst_bit(u->data, i)); // bit i of u accu = check_accu_biteq_simplifies(accu, b, v->arg[i]); if (accu == NULL_TERM || accu == false_term) { break; } } return accu; } /* * Same thing for two bv_array terms */ static term_t check_eq_bvarray(composite_term_t *u, composite_term_t *v) { uint32_t i, n; term_t accu; n = u->arity; assert(n == v->arity); accu = true_term; for (i=0; i<n; i++) { accu = check_accu_biteq_simplifies(accu, u->arg[i], v->arg[i]); if (accu == NULL_TERM || accu == false_term) { break; } } return accu; } /* * Try to simplify (bv-eq t1 t2) to a boolean term * - if t1 and t2 can be rewritten as arrays of bits * [b0 .. b_n] and [c_0 ... c_n], respectively, * then the function checks whether * (and (b0 == c0) ... (b_n == c_n)) * simplifies to a single boolean term. * - return NULL_TERM if no simplification is found */ term_t simplify_bveq(term_table_t *tbl, term_t t1, term_t t2) { term_kind_t k1, k2; term_t aux; assert(is_bitvector_term(tbl, t1) && is_bitvector_term(tbl, t2) && term_bitsize(tbl, t1) == term_bitsize(tbl, t2)); k1 = term_kind(tbl, t1); k2 = term_kind(tbl, t2); aux = NULL_TERM; switch (k1) { case BV64_CONSTANT: if (k2 == BV_ARRAY) { aux = check_eq_bvconst64(bvconst64_term_desc(tbl, t1), bvarray_term_desc(tbl, t2)); } break; case BV_CONSTANT: if (k2 == BV_ARRAY) { aux = check_eq_bvconst(bvconst_term_desc(tbl, t1), bvarray_term_desc(tbl, t2)); } break; case BV_ARRAY: if (k2 == BV64_CONSTANT) { aux = check_eq_bvconst64(bvconst64_term_desc(tbl, t2), bvarray_term_desc(tbl, t1)); } else if (k2 == BV_CONSTANT) { aux = check_eq_bvconst(bvconst_term_desc(tbl, t2), bvarray_term_desc(tbl, t1)); } else if (k2 == BV_ARRAY) { aux = check_eq_bvarray(bvarray_term_desc(tbl, t1), bvarray_term_desc(tbl, t2)); } break; default: break; } return aux; } /* * Convert (bveq u v) to a conjunction of boolean terms * - u is a BV64 constant, v is a bitarray * - store the result in vector a */ static void flatten_eq_bvconst64(bvconst64_term_t *u, composite_term_t *v, ivector_t *a) { uint32_t i, n; term_t aux, b; n = u->bitsize; assert(n == v->arity); for (i=0; i<n; i++) { b = bool2term(tst_bit64(u->value, i)); // bit i of u aux = check_biteq_simplifies(b, v->arg[i]); assert(aux != NULL_TERM); if (aux != true_term) { ivector_push(a, aux); } } } /* * Same thing when u is a BV constant and v is a bitarray */ static void flatten_eq_bvconst(bvconst_term_t *u, composite_term_t *v, ivector_t *a) { uint32_t i, n; term_t aux, b; n = u->bitsize; assert(n == v->arity); for (i=0; i<n; i++) { b = bool2term(bvconst_tst_bit(u->data, i)); // bit i of u aux = check_biteq_simplifies(b, v->arg[i]); assert(aux != NULL_TERM); if (aux != true_term) { ivector_push(a, aux); } } } /* * Try to convert (bveq u v) to a conjunction of Boolean terms * - u and v are bit arrays of the same size * - return true if that succeeds */ static bool flatten_eq_bvarray(composite_term_t *u, composite_term_t *v, ivector_t *a) { uint32_t i, n; term_t aux; n = u->arity; assert(n == v->arity); for (i=0; i<n; i++) { aux = check_biteq_simplifies(u->arg[i], v->arg[i]); if (aux == NULL_TERM) return false; // failed if (aux != true_term) { ivector_push(a, aux); } } return true; } /* * Try to simplify (bv-eq t1 t2) to a conjunction of terms * - if t1 and t2 can be rewritten as arrays of bits * [b_0 ... b_n] and [c_0 ... c_n], respectively, * then the function checks whether each * equality (b_i == c_i) simplifies to a single Boolean term e_i * - if all of them do, then the function * returns true and adds e_0, ... e_n to vector v * * As above: t1 and t2 must not be equal, and disequal_bitvector_terms(tbl, t1, t2) * must be false. */ bool bveq_flattens(term_table_t *tbl, term_t t1, term_t t2, ivector_t *v) { term_kind_t k1, k2; assert(is_bitvector_term(tbl, t1) && is_bitvector_term(tbl, t2) && term_bitsize(tbl, t1) == term_bitsize(tbl, t2)); k1 = term_kind(tbl, t1); k2 = term_kind(tbl, t2); switch (k1) { case BV64_CONSTANT: if (k2 == BV_ARRAY) { flatten_eq_bvconst64(bvconst64_term_desc(tbl, t1), bvarray_term_desc(tbl, t2), v); return true; } break; case BV_CONSTANT: if (k2 == BV_ARRAY) { flatten_eq_bvconst(bvconst_term_desc(tbl, t1), bvarray_term_desc(tbl, t2), v); return true; } break; case BV_ARRAY: if (k2 == BV64_CONSTANT) { flatten_eq_bvconst64(bvconst64_term_desc(tbl, t2), bvarray_term_desc(tbl, t1), v); return true; } else if (k2 == BV_CONSTANT) { flatten_eq_bvconst(bvconst_term_desc(tbl, t2), bvarray_term_desc(tbl, t1), v); return true; } else if (k2 == BV_ARRAY) { return flatten_eq_bvarray(bvarray_term_desc(tbl, t1), bvarray_term_desc(tbl, t2), v); } break; default: break; } return false; } /***************************************** * INTERVAL ABSTRACTION FOR BITVECTORS * ****************************************/ /* * Compute the abstraction of t^d then multiply a by that * - the result is stored in a * - returned value: true means that a has some information * (i.e., more precise than the full abstraction for n bits) * - if the returned value is false, then the default abstraction * is copied in a */ static bool bv64_mulpower_abs(term_table_t *tbl, term_t t, uint32_t d, uint32_t n, bv64_abs_t *a) { bv64_abs_t aux; bool nontrivial; assert(is_bitvector_term(tbl, t) && n == term_bitsize(tbl, t)); assert(1 <= n && n <= 64 && d >= 1); bv64_abstract_term(tbl, t, &aux); nontrivial = bv64_abs_nontrivial(&aux, n); if (d>1 && nontrivial) { bv64_abs_power(&aux, d); nontrivial = bv64_abs_nontrivial(&aux, n); } if (nontrivial) { bv64_abs_mul(a, &aux); nontrivial = bv64_abs_nontrivial(a, n); } if (!nontrivial) { bv64_abs_default(a, n); } return nontrivial; } /* * Compute the abstraction of c * t then add that to a * - store the result in a * - return true is the result has some information (more * precise than the full abstraction for n bits) * - return false otherwise and set a to the default * abstraction for n bits */ static bool bv64_addmul_abs(term_table_t *tbl, term_t t, uint64_t c, uint32_t n, bv64_abs_t *a) { bv64_abs_t aux; bool nontrivial; assert(is_bitvector_term(tbl, t) && n == term_bitsize(tbl, t)); assert(1 <= n && n <= 64 && c == norm64(c, n)); bv64_abstract_term(tbl, t, &aux); nontrivial = bv64_abs_nontrivial(&aux, n); if (c != 1 && nontrivial) { bv64_abs_mul_const(&aux, c, n); nontrivial = bv64_abs_nontrivial(&aux, n); } if (nontrivial) { bv64_abs_add(a, &aux); nontrivial = bv64_abs_nontrivial(a, n); } if (!nontrivial) { bv64_abs_default(a, n); } return nontrivial; } /* * Abstraction for a power product * - stops as soon as the abstraction is too imprecise * - nbits = number of bits * * NOTE: we assume that no term in the power product is zero. */ void bv64_abs_pprod(term_table_t *tbl, pprod_t *p, uint32_t nbits, bv64_abs_t *a) { uint32_t i, n; bv64_abs_one(a); n = p->len; for (i=0; i<n; i++) { if (!bv64_mulpower_abs(tbl, p->prod[i].var, p->prod[i].exp, nbits, a)) { break; } } } /* * Compute the abstraction of c * r then add it to a * - nbits = number of bits in c and r * - return true if the result is not trivial * - return false otherwise and set a to the default abstraction */ static bool bv64_addmul_pprod_abs(term_table_t *tbl, pprod_t *r, uint64_t c, uint32_t nbits, bv64_abs_t *a) { bv64_abs_t aux; bool nontrivial; assert(r != empty_pp); if (pp_is_var(r)) { bv64_abstract_term(tbl, var_of_pp(r), &aux); } else { bv64_abs_pprod(tbl, r, nbits, &aux); } nontrivial = bv64_abs_nontrivial(&aux, nbits); if (c != 1 && nontrivial) { bv64_abs_mul_const(&aux, c, nbits); nontrivial = bv64_abs_nontrivial(&aux, nbits); } if (nontrivial) { bv64_abs_add(a, &aux); nontrivial = bv64_abs_nontrivial(a, nbits); } if (!nontrivial) { bv64_abs_default(a, nbits); } return nontrivial; } /* * Abstraction for a polynomial * - stops as soon as the abstraction is too imprecise * - nbits = number of bits */ void bv64_abs_poly(term_table_t *tbl, bvpoly64_t *p, uint32_t nbits, bv64_abs_t *a) { uint32_t i, n; assert(p->bitsize == nbits); n = p->nterms; i = 0; if (p->mono[i].var == const_idx) { bv64_abs_constant(a, p->mono[i].coeff, nbits); i ++; } else { bv64_abs_zero(a); } while (i < n) { if (!bv64_addmul_abs(tbl, p->mono[i].var, p->mono[i].coeff, nbits, a)) { break; } i ++; } } /* * Abstraction for an bvarith buffer * - stops as soon as the abstraction is too imprecise * - nbits = number of bits */ void bv64_abs_buffer(term_table_t *tbl, bvarith64_buffer_t *b, uint32_t nbits, bv64_abs_t *a) { uint32_t i, n; bvmlist64_t *q; assert(b->bitsize == nbits); n = b->nterms; q = b->list; i = 0; // the constant is first if (q->prod == empty_pp) { bv64_abs_constant(a, q->coeff, nbits); i ++; q = q->next; } else { bv64_abs_zero(a); } while (i<n) { if (!bv64_addmul_pprod_abs(tbl, q->prod, q->coeff, nbits, a)) { break; } i ++; q = q->next; } } /* * Interval abstraction of a bitvector term t * - t must be of type (bitvector n) with n <= 64 * - the result is stored in *a */ void bv64_abstract_term(term_table_t *tbl, term_t t, bv64_abs_t *a) { uint32_t n; assert(is_bitvector_term(tbl, t)); n = term_bitsize(tbl, t); assert(1 <= n && n <= 64); switch (term_kind(tbl, t)) { case BV64_CONSTANT: assert(bvconst64_term_desc(tbl, t)->bitsize == n); bv64_abs_constant(a, bvconst64_term_desc(tbl, t)->value, n); break; case BV_ARRAY: assert(bvarray_term_desc(tbl, t)->arity == n); bv64_abs_array(a, false_term, bvarray_term_desc(tbl, t)->arg, n); break; case POWER_PRODUCT: bv64_abs_pprod(tbl, pprod_term_desc(tbl, t), n, a); break; case BV64_POLY: bv64_abs_poly(tbl, bvpoly64_term_desc(tbl, t), n, a); break; default: bv64_abs_default(a, n); break; } } /********************************************* * NORMALIZATION OF ARITHMETIC CONSTRAINTS * ********************************************/ /* * There are three types of arithmetic atoms in Yices: * ARITH_EQ: [t == 0] * ARITH_GE: [t >= 0] * ARITH_BINEQ: [t1 == t2] * * We normalize them to check whether two arithmetic literals are * incompatible (can't both be true). * * This is limited to constraints on the same term. For example, * [t == 0] and [not [2 + t >= 0]] * are normalized to constraints on t: * t == 0 and t < -2, * which are incompatible. */ /* * Descriptor of an arithmetic constraint: * - all constraints are written in the form <sign> <poly> <op> <constant> * - <sign> is either + or - * - <op> can be EQ/LE/LT/GE/GT * - <poly> is a sum of monomials without constant term * - to get a normal form, we set <sign> to - if the first monomial of <poly> * is negative and to + if the first monomial of <poly> is positive. * * To store the representation: * - len = number of monomials in p * - poly = pointer to the monomial array * - aux[3] = array to build the monomial array if required */ typedef enum { EQ, LE, LT, GE, GT, } cmp_op_t; #define NUM_ARITH_CMP_OP ((uint32_t)(GT+1)) typedef struct arith_constraint_s { cmp_op_t op; // comparison operator bool is_pos; // true if positive sign uint32_t len; // number of monomials in poly monomial_t *poly; rational_t constant; monomial_t aux[3]; } arith_constraint_t; /* * Initialize all rationals coefficients (except aux[2]) */ static void init_arith_cnstr(arith_constraint_t *cnstr) { q_init(&cnstr->constant); q_init(&cnstr->aux[0].coeff); q_init(&cnstr->aux[1].coeff); } /* * Clear the constraint descriptor */ static void delete_arith_cnstr(arith_constraint_t *cnstr) { q_clear(&cnstr->constant); q_clear(&cnstr->aux[0].coeff); q_clear(&cnstr->aux[1].coeff); } /* * Store p into cnstr: * - p is a0 + a1 t1 + ... + a_n t_n * - if a_1 is positive, then we set * cnstr->is_pos = true * cnstr->poly = a1 t1 + ... + a_n t_n * cnstr->constant = - a0 * - if a_1 is negative, then we store * cnstr->is_pos = false * cnstr->poly = a1 t1 + ... + a_n t_n * cnstr->constant = +a0 * * When this function is called, we know that p occurs in an * atom of the form (p == 0) or (p >= 0). Then we can assume * that p is not a constant polynomial (otherwise the atom would * be reduced to true or false by the term manager). */ static void arith_cnstr_set_poly(arith_constraint_t *cnstr, polynomial_t *p) { uint32_t n; n = p->nterms; assert(n >= 1); cnstr->is_pos = true; if (p->mono[0].var == const_idx) { cnstr->len = n - 1; cnstr->poly = p->mono + 1; q_set_neg(&cnstr->constant, &p->mono[0].coeff); } else { // no constant term in p cnstr->len = n; cnstr->poly = p->mono; q_clear(&cnstr->constant); } if (q_is_neg(&cnstr->poly[0].coeff)) { cnstr->is_pos = false; q_neg(&cnstr->constant); } } /* * Store 1 * t into cnstr->aux */ static void arith_cnstr_aux_set_term(arith_constraint_t *cnstr, term_t t) { q_set_one(&cnstr->aux[0].coeff); cnstr->aux[0].var = t; cnstr->aux[1].var = max_idx; // end marker } /* * Store t into cnstr: t should not be a polynomial */ static void arith_cnstr_set_term(arith_constraint_t *cnstr, term_t t) { arith_cnstr_aux_set_term(cnstr, t); cnstr->is_pos = true; cnstr->len = 1; cnstr->poly = cnstr->aux; q_clear(&cnstr->constant); // constant = 0 } /* * Store t1 - t2 into cnstr: * - one of them may be a rational constant */ static void arith_cnstr_set_diff(term_table_t *tbl, arith_constraint_t *cnstr, term_t t1, term_t t2) { assert(t1 != t2); if (term_kind(tbl, t1) == ARITH_CONSTANT) { arith_cnstr_aux_set_term(cnstr, t2); cnstr->is_pos = true; cnstr->len = 1; cnstr->poly = cnstr->aux; q_set(&cnstr->constant, rational_term_desc(tbl, t1)); } else if (term_kind(tbl, t2) == ARITH_CONSTANT) { arith_cnstr_aux_set_term(cnstr, t1); cnstr->is_pos = true; cnstr->len = 1; cnstr->poly = cnstr->aux; q_set(&cnstr->constant, rational_term_desc(tbl, t2)); } else { // store t1 - t2 into aux if (t1 < t2) { cnstr->is_pos = true; q_set_one(&cnstr->aux[0].coeff); cnstr->aux[0].var = t1; q_set_minus_one(&cnstr->aux[1].coeff); cnstr->aux[1].var = t2; } else { cnstr->is_pos = false; q_set_minus_one(&cnstr->aux[0].coeff); cnstr->aux[0].var = t2; q_set_one(&cnstr->aux[1].coeff); cnstr->aux[1].var = t1; } cnstr->aux[2].var = max_idx; // end marker cnstr->len = 2; cnstr->poly = cnstr->aux; q_clear(&cnstr->constant); // constant = 0 } } /* * Store atom t == 0 into descriptor cnstr * - t must be an arithmetic term defined in tbl */ static void store_arith_eq(term_table_t *tbl, arith_constraint_t *cnstr, term_t t) { assert(is_arithmetic_term(tbl, t)); cnstr->op = EQ; if (term_kind(tbl, t) == ARITH_POLY) { arith_cnstr_set_poly(cnstr, poly_term_desc(tbl, t)); } else { arith_cnstr_set_term(cnstr, t); } } /* * Store atom t >= 0 into cnstr * - t must be an arithmetic term defined in tbl */ static void store_arith_geq(term_table_t *tbl, arith_constraint_t *cnstr, term_t t) { assert(is_arithmetic_term(tbl, t)); if (term_kind(tbl, t) == ARITH_POLY) { arith_cnstr_set_poly(cnstr, poly_term_desc(tbl, t)); // op = GE is sign in '+' or LE is sign is '-' cnstr->op = cnstr->is_pos ? GE : LE; } else { arith_cnstr_set_term(cnstr, t); assert(cnstr->is_pos); cnstr->op = GE; } } /* * Store atom t < 0 into cnstr * - t must be an arithmetic term defined in tbl */ static void store_arith_lt(term_table_t *tbl, arith_constraint_t *cnstr, term_t t) { assert(is_arithmetic_term(tbl, t)); if (term_kind(tbl, t) == ARITH_POLY) { arith_cnstr_set_poly(cnstr, poly_term_desc(tbl, t)); // op = LT is sign in '+' or GT is sign is '-' cnstr->op = cnstr->is_pos ? LT : GT; } else { arith_cnstr_set_term(cnstr, t); assert(cnstr->is_pos); cnstr->op = LT; } } /* * Store t1 == t2 into cnstr * - we assume t1 and t2 are not polynomials */ static void store_arith_bineq(term_table_t *tbl, arith_constraint_t *cnstr, term_t t1, term_t t2) { assert(is_arithmetic_term(tbl, t1) && is_arithmetic_term(tbl, t2)); arith_cnstr_set_diff(tbl, cnstr, t1, t2); cnstr->op = EQ; } /* * Attempt to store the arithmetic literal t into cnstr * - return false if this fails: either because t is not an arithmetic literal * or it's of the form (not (t == 0)) or (not (t1 == t2)) */ static bool arith_cnstr_store_literal(term_table_t *tbl, arith_constraint_t *cnstr, term_t l) { composite_term_t *eq; term_t t; switch (term_kind(tbl, l)) { case ARITH_EQ_ATOM: if (is_pos_term(l)) { t = arith_eq_arg(tbl, l); store_arith_eq(tbl, cnstr, t); return true; } break; case ARITH_GE_ATOM: t = arith_ge_arg(tbl, l); if (is_pos_term(l)) { store_arith_geq(tbl, cnstr, t); } else { store_arith_lt(tbl, cnstr, t); } return true; case ARITH_BINEQ_ATOM: if (is_pos_term(l)) { eq = arith_bineq_atom_desc(tbl, l); assert(eq->arity == 2); store_arith_bineq(tbl, cnstr, eq->arg[0], eq->arg[1]); return true; } break; default: break; } return false; } /* * Check whether two cnstr1 and cnstr2 are on the same term/polynomial */ static bool arith_cnstr_same_poly(arith_constraint_t *cnstr1, arith_constraint_t *cnstr2) { if (cnstr1->len == cnstr2->len) { if (cnstr1->is_pos == cnstr2->is_pos) { return equal_monarrays(cnstr1->poly, cnstr2->poly); } else { return opposite_monarrays(cnstr1->poly, cnstr2->poly); } } return false; } /* * Table to check whether two constraints on t are incompatible * - each row corresponds to a constraint [t op a] for different ops * - each column corresponds to a constraint [t op b] for different ops * - the content of the table is a check on constants a and b: * such that ([t op a] /\ [t op b]) is false whenever the check holds * - example [t >= a] /\ [t = b] is false if b < a */ typedef enum { A_NE_B, B_LE_A, B_LT_A, A_LE_B, A_LT_B, NEVER, } constant_check_t; static const uint8_t cnstr_check_table[NUM_ARITH_CMP_OP][NUM_ARITH_CMP_OP] = { /* [t = b] [t <= b] [t < b] [t >= b] [t > b] */ { A_NE_B, B_LT_A, B_LE_A, A_LT_B, A_LE_B }, /* [t = a] */ { A_LT_B, NEVER, NEVER, A_LT_B, A_LE_B }, /* [t <= a] */ { A_LE_B, NEVER, NEVER, A_LE_B, A_LE_B }, /* [t < a] */ { B_LT_A, B_LT_A, B_LE_A, NEVER, NEVER }, /* [t >= a] */ { B_LE_A, B_LE_A, B_LE_A, NEVER, NEVER }, /* [t > a] */ }; /* * Check whether cnstr1 and cnstr2 are incompatible */ static bool arith_cnstr_disjoint(arith_constraint_t *cnstr1, arith_constraint_t *cnstr2) { rational_t *a, *b; if (arith_cnstr_same_poly(cnstr1, cnstr2)) { a = &cnstr1->constant; b = &cnstr2->constant; switch (cnstr_check_table[cnstr1->op][cnstr2->op]) { case A_NE_B: return q_neq(a, b); case B_LE_A: return q_le(b, a); case B_LT_A: return q_lt(b, a); case A_LE_B: return q_le(a, b); case A_LT_B: return q_lt(a, b); default: // return false break; } } return false; } /****************** * SUBSUMPTION * *****************/ /* * Check whether two arithmetic literals t1 and t2 are incompatible */ bool incompatible_arithmetic_literals(term_table_t *tbl, term_t t1, term_t t2) { arith_constraint_t cnstr1, cnstr2; bool result; if (opposite_bool_terms(t1, t2)) { result = true; } else { init_arith_cnstr(&cnstr1); init_arith_cnstr(&cnstr2); result = false; if (arith_cnstr_store_literal(tbl, &cnstr1, t1) && arith_cnstr_store_literal(tbl, &cnstr2, t2)) { result = arith_cnstr_disjoint(&cnstr1, &cnstr2); } delete_arith_cnstr(&cnstr1); delete_arith_cnstr(&cnstr2); } return result; } /* * Check whether two bitvector literals t1 and t2 are incompatible * MORE TO BE DONE */ bool incompatible_bitvector_literals(term_table_t *tbl, term_t t1, term_t t2) { composite_term_t *eq1, *eq2; bool result; uint32_t i, j; if (opposite_bool_terms(t1, t2)) { result = true; } else { result = false; if (is_pos_term(t1) && is_pos_term(t2) && term_kind(tbl, t1) == BV_EQ_ATOM && term_kind(tbl, t2) == BV_EQ_ATOM) { eq1 = bveq_atom_desc(tbl, t1); eq2 = bveq_atom_desc(tbl, t2); assert(eq1->arity == 2 && eq2->arity == 2); for (i=0; i<2; i++) { for (j=0; j<2; j++) { if (eq1->arg[i] == eq2->arg[j]) { result = disequal_bv_terms(tbl, eq1->arg[1 - i], eq2->arg[1 - j]); goto done; } } } } } done: return result; } /* * Check whether two Boolean terms t1 and t2 * are incompatible (i.e., (t1 and t2) is false. * - this does very simple checks for now */ bool incompatible_boolean_terms(term_table_t *tbl, term_t t1, term_t t2) { composite_term_t *eq1, *eq2; uint32_t i, j; if (is_arithmetic_literal(tbl, t1) && is_arithmetic_literal(tbl, t2)) { return incompatible_arithmetic_literals(tbl, t1, t2); } if (is_bitvector_literal(tbl, t1) && is_bitvector_literal(tbl, t2)) { return incompatible_bitvector_literals(tbl, t1, t2); } if (t1 == false_term || t2 == false_term || opposite_bool_terms(t1, t2)) { return true; } if (is_pos_term(t1) && is_pos_term(t2) && term_kind(tbl, t1) == EQ_TERM && term_kind(tbl, t2) == EQ_TERM) { eq1 = eq_term_desc(tbl, t1); eq2 = eq_term_desc(tbl, t2); for (i=0; i<2; i++) { for (j=0; j<2; j++) { if (eq1->arg[i] == eq2->arg[j]) { return disequal_bv_terms(tbl, eq1->arg[1 - i], eq2->arg[1 - j]); } } } } return false; } /* * Check whether t1 subsumes t2 (i.e., t1 => t2) */ bool term_subsumes_term(term_table_t *tbl, term_t t1, term_t t2) { return incompatible_boolean_terms(tbl, t1, opposite_term(t2)); } /* * Check whether t1 subsumes all elements of a[0 ... n-1] */ bool term_subsumes_array(term_table_t *tbl, term_t t1, uint32_t n, term_t *a) { uint32_t i; for (i=0; i<n; i++) { if (!term_subsumes_term(tbl, t1, a[i])) { return false; } } return true; } /**************************** * EQUALITY DECOMPOSITION * ***************************/ /* * Check whether t is equivalent to (x == a) where x is a term and a is a constant * - if so stores the term and constant in *x and *a, and returns true. * - otherwise returns false, and leave *x and *a unchanged. */ bool is_term_eq_const(term_table_t *tbl, term_t t, term_t *x, term_t *a) { composite_term_t *eq; assert(good_term(tbl, t)); if (is_pos_term(t)) { switch (term_kind(tbl, t)) { case ARITH_EQ_ATOM: // t is (x == 0); *x = arith_eq_arg(tbl, t); *a = zero_term; return true; case EQ_TERM: case ARITH_BINEQ_ATOM: case BV_EQ_ATOM: eq = composite_term_desc(tbl, t); assert(eq->arity == 2); if (is_const_term(tbl, eq->arg[0])) { *a = eq->arg[0]; *x = eq->arg[1]; return true; } if (is_const_term(tbl, eq->arg[1])) { *x = eq->arg[0]; *a = eq->arg[1]; return true; } break; default: break; } } return false; } /* * Variant: check whether t is of the form (x == a) where x is uninterpreted and * a is a constant. */ bool is_unint_eq_const(term_table_t *tbl, term_t t, term_t *x, term_t *a) { term_t x0, a0; if (is_term_eq_const(tbl, t, &x0, &a0) && term_kind(tbl, x0) == UNINTERPRETED_TERM) { assert(is_pos_term(x0)); *x = x0; *a = a0; return true; } return false; } /******************************* * UNIT-TYPE REPRESENTATIVES * ******************************/ /* * Representative of a singleton type tau: * - for scalar type: the unique constant of that type * - for function type: an uninterpreted term (denoting the constant function) * - for tuple type: (tau_1 ... tau_n) * representative = (tuple (rep tau_1) ... (rep tau_n)) */ /* * Tuple of representative terms. */ static term_t make_tuple_rep(term_table_t *table, tuple_type_t *d) { term_t aux[8]; term_t *a; term_t t; uint32_t i, n; n = d->nelem; a = aux; if (n > 8) { a = (term_t *) safe_malloc(n * sizeof(term_t)); } for (i=0; i<n; i++) { a[i] = get_unit_type_rep(table, d->elem[i]); } t = tuple_term(table, n, a); if (n > 8) { safe_free(a); } return t; } /* * Return the term representative for unit type tau. * - search the table of unit-types first * - create a new term if there's no entry for tau in that table. */ term_t get_unit_type_rep(term_table_t *table, type_t tau) { type_table_t *types; term_t t; assert(is_unit_type(table->types, tau)); t = unit_type_rep(table, tau); if (t == NULL_TERM) { types = table->types; switch (type_kind(types, tau)) { case SCALAR_TYPE: assert(scalar_type_cardinal(types, tau) == 1); t = constant_term(table, tau, 0); break; case TUPLE_TYPE: t = make_tuple_rep(table, tuple_type_desc(types, tau)); break; case FUNCTION_TYPE: t = new_uninterpreted_term(table, tau); break; default: assert(false); break; } add_unit_type_rep(table, tau, t); } return t; } /************** * VARIABLES * *************/ /* * Clone variable v: * - v must be a variable * - return a fresh variable with the same type as v * - if v has a basename, then the clone also gets that name */ term_t clone_variable(term_table_t *table, term_t v) { type_t tau; term_t x; char *name; assert(term_kind(table, v) == VARIABLE); tau = term_type(table, v); x = new_variable(table, tau); name = term_name(table, v); if (name != NULL) { set_term_base_name(table, x, name); } return x; } /* * Convert variable v to an uninterpreted term * - v must be a variable * - create a fresh uninterpreted term with the same type as v * - if v has a basename, then the clone also gets that name */ term_t variable_to_unint(term_table_t *table, term_t v) { type_t tau; term_t x; char *name; assert(term_kind(table, v) == VARIABLE); tau = term_type(table, v); x = new_uninterpreted_term(table, tau); name = term_name(table, v); if (name != NULL) { set_term_base_name(table, x, name); } return x; }
/* * The Yices SMT Solver. Copyright 2014 SRI International. * * This program may only be used subject to the noncommercial end user * license agreement which is downloadable along with this program. */
list.ml
module Make (Monad : Traced_sigs.Monad.S) : Traced_sigs.List.S with type 'error trace := 'error Monad.trace = struct open Monad include Bare_structs.List let init_ep ~when_negative_length l f = let open Lwt_traced_result_syntax in let rec aux acc i = if i >= l then all (rev acc) else (aux [@ocaml.tailcall]) (Lwt.apply f i :: acc) (i + 1) in if l < 0 then fail when_negative_length else if l = 0 then nil_es else aux [] 0 let iter_ep f l = Lwt_traced_result_syntax.join (rev_map (Lwt.apply f) l) let lwt_apply2 f x y = try f x y with exn -> Lwt.fail exn let iteri_ep f l = Lwt_traced_result_syntax.join (mapi (lwt_apply2 f) l) let rev_map_ep f l = Lwt_traced_result_syntax.all @@ rev_map (Lwt.apply f) l let map_ep f l = rev_map_ep f l |> Lwt_result.map rev let rev_mapi_ep f l = Lwt_traced_result_syntax.all @@ rev_mapi f l let mapi_ep f l = rev_mapi_ep f l |> Lwt_result.map rev let filter_ep f l = rev_map_ep (fun x -> let open Lwt_traced_result_syntax in let* b = f x in if b then return_some x else return_none) l |> Lwt_result.map rev_filter_some let filter_map_ep f l = rev_map_ep f l |> Lwt_result.map rev_filter_some let concat_map_ep f xs = let open Lwt_traced_result_syntax in let+ r = all (map f xs) in flatten r let for_all_ep f l = rev_map_ep f l |> Lwt_result.map (for_all Fun.id) let exists_ep f l = rev_map_ep f l |> Lwt_result.map (exists Fun.id) let partition_ep f l = let open Lwt_traced_result_syntax in let* bxs = rev_map_ep (fun x -> let* b = f x in return (b, x)) l in return @@ fold_left (fun (trues, falses) (b, x) -> if b then (x :: trues, falses) else (trues, x :: falses)) ([], []) bxs end
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2020 Nomadic Labs <contact@nomadic-labs.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
client_sapling_commands.mli
val commands : unit -> Protocol_client_context.full Clic.command list
(* The MIT License (MIT) * * Copyright (c) 2019-2020 Nomadic Labs <contact@nomadic-labs.com> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. *)
alpn.ml
let test_client proto_list = Ssl.init (); let ctx = Ssl.create_context Ssl.TLSv1_2 Ssl.Client_context in Ssl.set_context_alpn_protos ctx proto_list; let sockaddr = Unix.ADDR_INET(Unix.inet_addr_of_string "127.0.0.1", 4433) in let ssl = Ssl.open_connection_with_context ctx sockaddr in let () = match Ssl.get_negotiated_alpn_protocol ssl with | None -> print_endline "No protocol selected" | Some proto -> print_endline ("Selected protocol: " ^ proto) in Ssl.shutdown ssl let test_server proto_list = let certfile = "cert.pem" in let privkey = "privkey.key" in let log s = Printf.printf "[II] %s\n%!" s in Ssl.init (); let sockaddr = Unix.ADDR_INET(Unix.inet_addr_of_string "127.0.0.1", 4433) in let domain = Unix.domain_of_sockaddr sockaddr in let sock = Unix.socket domain Unix.SOCK_STREAM 0 in let ctx = Ssl.create_context Ssl.TLSv1_2 Ssl.Server_context in Ssl.use_certificate ctx certfile privkey; let rec first_match l1 = function | [] -> None | x::_ when List.mem x l1 -> Some x | _::xs -> first_match l1 xs in Ssl.set_context_alpn_select_callback ctx (fun client_protos -> first_match client_protos proto_list ); Unix.setsockopt sock Unix.SO_REUSEADDR true; Unix.bind sock sockaddr; Unix.listen sock 100; log "listening for connections"; let (s, caller) = Unix.accept sock in let ssl_s = Ssl.embed_socket s ctx in let () = try Ssl.accept ssl_s with | e -> Printexc.to_string e |> print_endline in let inet_addr_of_sockaddr = function | Unix.ADDR_INET (n, _) -> n | Unix.ADDR_UNIX _ -> Unix.inet_addr_any in let inet_addr = inet_addr_of_sockaddr caller in let ip = Unix.string_of_inet_addr inet_addr in log (Printf.sprintf "openning connection for [%s]" ip); let () = match Ssl.get_negotiated_alpn_protocol ssl_s with | None -> log "no protocol selected" | Some proto -> log (Printf.sprintf "selected protocol: %s" proto) in Ssl.shutdown ssl_s let () = let usage = "usage: ./alpn (server|client) protocol[,protocol]" in let split_on_char sep s = let r = ref [] in let j = ref (String.length s) in for i = String.length s - 1 downto 0 do if s.[i] = sep then begin r := String.sub s (i + 1) (!j - i - 1) :: !r; j := i end done; String.sub s 0 !j :: !r in let typ = ref "" in let protocols = ref [] in Arg.parse [ "-t", Arg.String (fun t -> typ := t), "Type (server or client)"; "-p", Arg.String (fun p -> protocols := split_on_char ',' p), "Comma-separated protocols"; ] (fun _ -> ()) usage; match !typ with | "server" -> test_server !protocols | "client" -> test_client !protocols | _ -> failwith "Invalid type, use server or client." (* Usage: ocamlfind ocamlc alpn.ml -g -o alpn -package ssl -linkpkg -ccopt -L/path/to/openssl/lib -cclib -lssl -cclib -lcrypto ./alpn -t server -p h2,http/1.1 ./alpn -t client -p h2,http/1.1 *)
apply.mli
val apply : State.t -> Proto_operation.t -> State.t option
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
fuzz_buf_read.ml
(* This file contains a simple model of `Buf_read`, using a single string. It runs random operations on both the model and the real buffer and checks they always give the same result. *) module String = struct include String let rec find ?(start=0) p t = if start = String.length t then None else if p t.[start] then Some start else find ~start:(succ start) p t let drop t n = String.sub t n (String.length t - n) let cut ~sep t = match String.index_opt t sep with | None -> None | Some i -> Some (String.sub t 0 i, drop t (i + 1)) end let debug = false module Buf_read = Eio.Buf_read exception Buffer_limit_exceeded = Buf_read.Buffer_limit_exceeded let initial_size = 10 let max_size = 100 let mock_flow next = object (self) inherit Eio.Flow.source val mutable next = next method read_into buf = match next with | [] -> raise End_of_file | "" :: xs -> next <- xs; self#read_into buf | x :: xs -> let len = min (Cstruct.length buf) (String.length x) in Cstruct.blit_from_string x 0 buf 0 len; let x' = String.drop x len in next <- (if x' = "" then xs else x' :: xs); len end module Model = struct type t = string ref let of_chunks chunks = ref (String.concat "" chunks) let take_all t = let old = !t in if String.length old >= max_size then raise Buffer_limit_exceeded; t := ""; old let line t = match String.cut ~sep:'\n' !t with | Some (line, rest) -> if String.length line >= max_size then raise Buffer_limit_exceeded; t := rest; if String.ends_with ~suffix:"\r" line then String.sub line 0 (String.length line - 1) else line | None when !t = "" -> raise End_of_file | None when String.length !t >= max_size -> raise Buffer_limit_exceeded | None -> take_all t let any_char t = match !t with | "" -> raise End_of_file | s -> t := String.drop s 1; s.[0] let peek_char t = match !t with | "" -> None | s -> Some (s.[0]) let consume t n = t := String.drop !t n let char c t = match peek_char t with | Some c2 when c = c2 -> consume t 1 | Some _ -> failwith "char" | None -> raise End_of_file let string s t = if debug then Fmt.pr "string %S@." s; let len_t = String.length !t in let prefix = String.sub s 0 (min len_t (String.length s)) in if not (String.starts_with ~prefix !t) then failwith "string"; if String.length s > max_size then raise Buffer_limit_exceeded; if String.starts_with ~prefix:s !t then consume t (String.length s) else raise End_of_file let take n t = if n < 0 then invalid_arg "neg"; if n > max_size then raise Buffer_limit_exceeded else if String.length !t >= n then ( let data = String.sub !t 0 n in t := String.drop !t n; data ) else raise End_of_file let take_while p t = match String.find (Fun.negate p) !t with | Some i when i >= max_size -> raise Buffer_limit_exceeded | Some i -> let data = String.sub !t 0 i in consume t i; data | None -> take_all t let skip_while p t = match String.find (Fun.negate p) !t with | Some i -> consume t i | None -> t := "" let skip n t = if n < 0 then invalid_arg "skip"; if n > String.length !t then ( t := ""; raise End_of_file; ); consume t n let end_of_input t = if !t <> "" then failwith "not eof" let rec lines t = match line t with | line -> line :: lines t | exception End_of_file -> [] module BE = struct let uint16 t = String.get_uint16_be (take 2 t) 0 let uint32 t = String.get_int32_be (take 4 t) 0 let uint48 t = let s = take 6 t in let upper_16 = String.get_uint16_be s 0 |> Int64.of_int in let middle_16 = String.get_uint16_be s 2 |> Int64.of_int in let lower_16 = String.get_uint16_be s 4 |> Int64.of_int in Int64.( add (shift_left upper_16 32) (add (shift_left middle_16 16) (lower_16)) ) let uint64 t = String.get_int64_be (take 8 t) 0 let float t = Int32.float_of_bits ( String.get_int32_be (take 4 t) 0) let double t = Int64.float_of_bits ( String.get_int64_be (take 8 t) 0) end module LE = struct let uint16 t = String.get_uint16_le (take 2 t) 0 let uint32 t = String.get_int32_le (take 4 t) 0 let uint48 t = let s = take 6 t in let lower_16 = String.get_uint16_le s 0 |> Int64.of_int in let middle_16 = String.get_uint16_le s 2 |> Int64.of_int in let upper_16 = String.get_uint16_le s 4 |> Int64.of_int in Int64.( add (shift_left upper_16 32) (add (shift_left middle_16 16) (lower_16)) ) let uint64 t = String.get_int64_le (take 8 t) 0 let float t = Int32.float_of_bits ( String.get_int32_le (take 4 t) 0) let double t = Int64.float_of_bits ( String.get_int64_le (take 8 t) 0) end end type op = Op : 'a Crowbar.printer * 'a Buf_read.parser * (Model.t -> 'a) -> op let unit = Fmt.(const string) "()" let dump_char f c = Fmt.pf f "%C" c let digit = function | '0'..'9' -> true | _ -> false let op = let label (name, gen) = Crowbar.with_printer Fmt.(const string name) gen in Crowbar.choose @@ List.map label [ "line", Crowbar.const @@ Op (Fmt.Dump.string, Buf_read.line, Model.line); "char 'x'", Crowbar.const @@ Op (unit, Buf_read.char 'x', Model.char 'x'); "any_char", Crowbar.const @@ Op (dump_char, Buf_read.any_char, Model.any_char); "peek_char", Crowbar.const @@ Op (Fmt.Dump.option dump_char, Buf_read.peek_char, Model.peek_char); "string", Crowbar.(map [bytes]) (fun s -> Op (unit, Buf_read.string s, Model.string s)); "take", Crowbar.(map [int]) (fun n -> Op (Fmt.Dump.string, Buf_read.take n, Model.take n)); "take_all", Crowbar.const @@ Op (Fmt.Dump.string, Buf_read.take_all, Model.take_all); "take_while digit", Crowbar.const @@ Op (Fmt.Dump.string, Buf_read.take_while digit, Model.take_while digit); "skip_while digit", Crowbar.const @@ Op (unit, Buf_read.skip_while digit, Model.skip_while digit); "skip", Crowbar.(map [int]) (fun n -> Op (unit, Buf_read.skip n, Model.skip n)); "end_of_input", Crowbar.const @@ Op (unit, Buf_read.end_of_input, Model.end_of_input); "lines", Crowbar.const @@ Op (Fmt.Dump.(list string), (Buf_read.(map List.of_seq lines)), Model.lines); "be_uint16", Crowbar.const @@ Op (Fmt.int, (Buf_read.BE.uint16), Model.BE.uint16); "be_uint32", Crowbar.const @@ Op (Fmt.int32, (Buf_read.BE.uint32), Model.BE.uint32); "be_uint48", Crowbar.const @@ Op (Fmt.int64, (Buf_read.BE.uint48), Model.BE.uint48); "be_uint64", Crowbar.const @@ Op (Fmt.int64, (Buf_read.BE.uint64), Model.BE.uint64); "be_float", Crowbar.const @@ Op (Fmt.float, (Buf_read.BE.float), Model.BE.float); "be_double", Crowbar.const @@ Op (Fmt.float, (Buf_read.BE.double), Model.BE.double); "le_uint16", Crowbar.const @@ Op (Fmt.int, (Buf_read.LE.uint16), Model.LE.uint16); "le_uint32", Crowbar.const @@ Op (Fmt.int32, (Buf_read.LE.uint32), Model.LE.uint32); "le_uint48", Crowbar.const @@ Op (Fmt.int64, (Buf_read.LE.uint48), Model.LE.uint48); "le_uint64", Crowbar.const @@ Op (Fmt.int64, (Buf_read.LE.uint64), Model.LE.uint64); "le_float", Crowbar.const @@ Op (Fmt.float, (Buf_read.LE.float), Model.LE.float); "le_double", Crowbar.const @@ Op (Fmt.float, (Buf_read.LE.double), Model.LE.double); ] let catch f x = match f x with | y -> Ok y | exception End_of_file -> Error "EOF" | exception Invalid_argument _ -> Error "Invalid" | exception Failure _ -> Error "Failure" | exception Buffer_limit_exceeded -> Error "TooBig" let random chunks ops = let model = Model.of_chunks chunks in let chunks_len = String.length !model in let r = Buf_read.of_flow (mock_flow chunks) ~initial_size ~max_size in if debug then print_endline "*** start ***"; let check_eq (Op (pp, a, b)) = if debug then ( Fmt.pr "---@."; Fmt.pr "real :%S@." (Cstruct.to_string (Buf_read.peek r)); Fmt.pr "model:%S@." !model; ); let x = catch a r in let y = catch b model in Crowbar.check_eq ~pp:Fmt.(result ~ok:pp ~error:string) x y in List.iter check_eq ops; Crowbar.check_eq ~pp:Fmt.int (Buf_read.consumed_bytes r) (chunks_len - String.length !model) let () = Crowbar.(add_test ~name:"random ops" [list bytes; list op] random)
(* This file contains a simple model of `Buf_read`, using a single string. It runs random operations on both the model and the real buffer and checks they always give the same result. *)
canonicalise.c
#include "fmpq.h" void _fmpq_canonicalise(fmpz_t num, fmpz_t den) { fmpz_t u; if (fmpz_is_one(den)) return; if (fmpz_is_zero(num)) { fmpz_one(den); return; } fmpz_init(u); fmpz_gcd(u, num, den); if (!fmpz_is_one(u)) { fmpz_divexact(num, num, u); fmpz_divexact(den, den, u); } fmpz_clear(u); if (fmpz_sgn(den) < 0) { fmpz_neg(num, num); fmpz_neg(den, den); } } void fmpq_canonicalise(fmpq_t res) { _fmpq_canonicalise(fmpq_numref(res), fmpq_denref(res)); }
/* Copyright (C) 2011 Fredrik Johansson This file is part of FLINT. FLINT is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. See <https://www.gnu.org/licenses/>. */
database.ml
open Lwt.Syntax let check_pool _ () = let _ = Sihl_persistence.Database.fetch_pool () in Lwt.return () ;; let drop_table_request = Caqti_request.exec Caqti_type.unit "DROP TABLE IF EXISTS testing_user" ;; let drop_table_if_exists connection = let module Connection = (val connection : Caqti_lwt.CONNECTION) in Connection.exec drop_table_request () |> Lwt.map Sihl_persistence.Database.raise_error ;; let create_table_request = Caqti_request.exec Caqti_type.unit {sql| CREATE TABLE IF NOT EXISTS testing_user ( username varchar(45) NOT NULL ) |sql} ;; let create_table_if_not_exists connection = let module Connection = (val connection : Caqti_lwt.CONNECTION) in Connection.exec create_table_request () |> Lwt.map Sihl_persistence.Database.raise_error ;; let insert_username_request = Caqti_request.exec Caqti_type.string "INSERT INTO testing_user(username) VALUES (?)" ;; let insert_username connection username = let module Connection = (val connection : Caqti_lwt.CONNECTION) in Connection.exec insert_username_request username |> Lwt.map Sihl_persistence.Database.raise_error ;; let get_usernames_request = Caqti_request.collect Caqti_type.unit Caqti_type.string "SELECT username FROM testing_user" ;; let get_usernames connection = let module Connection = (val connection : Caqti_lwt.CONNECTION) in Connection.collect_list get_usernames_request () |> Lwt.map Sihl_persistence.Database.raise_error ;; let query _ () = let* usernames = Sihl_persistence.Database.query (fun connection -> let* () = drop_table_if_exists connection in let* () = create_table_if_not_exists connection in let* () = insert_username connection "foobar pool" in get_usernames connection) in let username = List.hd usernames in Alcotest.(check string "has username" "foobar pool" username); Lwt.return () ;; let query_with_transaction _ () = let* usernames = Sihl_persistence.Database.query (fun connection -> let* () = drop_table_if_exists connection in let* () = create_table_if_not_exists connection in Sihl_persistence.Database.transaction (fun connection -> let* () = insert_username connection "foobar trx" in get_usernames connection)) in let username = List.find (String.equal "foobar trx") usernames in Alcotest.(check string "has username" "foobar trx" username); Lwt.return () ;; let transaction_rolls_back _ () = let* usernames = Sihl_persistence.Database.query (fun connection -> let* () = drop_table_if_exists connection in let* () = create_table_if_not_exists connection in let* () = Lwt.catch (fun () -> Sihl_persistence.Database.transaction (fun connection -> let* () = insert_username connection "foobar trx" in failwith "Oh no, something went wrong during the transaction!")) (fun _ -> Lwt.return ()) in get_usernames connection) in let username = List.find_opt (String.equal "foobar trx") usernames in Alcotest.(check (option string) "has no username" None username); Lwt.return () ;; let invalid_request = Caqti_request.exec Caqti_type.unit "invalid query" let failing_query connection = Lwt.catch (fun () -> let module Connection = (val connection : Caqti_lwt.CONNECTION) in Connection.exec invalid_request () |> Lwt.map Sihl_persistence.Database.raise_error) (* eat the exception silently *) (fun _ -> Lwt.return ()) ;; let query_does_not_exhaust_pool _ () = let rec loop n = match n with | 0 -> Lwt.return () | n -> let* () = Sihl_persistence.Database.query failing_query in loop (n - 1) in let* () = loop 100 in Alcotest.(check bool "doesn't exhaust pool" true true); Lwt.return () ;; let transaction_does_not_exhaust_pool _ () = let rec loop n = match n with | 0 -> Lwt.return () | n -> let* () = Sihl_persistence.Database.transaction failing_query in loop (n - 1) in let* () = loop 100 in Alcotest.(check bool "doesn't exhaust pool" true true); Lwt.return () ;; let suite = Alcotest_lwt. [ ( "database" , [ test_case "fetch pool" `Quick check_pool ; test_case "query with pool" `Quick query ; test_case "query with transaction" `Quick query_with_transaction ; test_case "transaction rolls back" `Quick transaction_rolls_back ; test_case "failing function doesn't exhaust pool in query" `Quick query_does_not_exhaust_pool ; test_case "failing function doesn't exhaust pool in transaction" `Quick transaction_does_not_exhaust_pool ] ) ] ;; let service = Sihl_persistence.Database.register ()
GHDocString.h
#import <Foundation/Foundation.h> #import "GHStepArgument.h" #import "GHHasLocationProtocol.h" @interface GHDocString : GHStepArgument <GHHasLocationProtocol> @property (nonatomic, readonly) GHLocation * location; @property (nonatomic, readonly) NSString * contentType; @property (nonatomic, readonly) NSString * content; - (id)initWithLocation:(GHLocation *)theLocation contentType:(NSString *)theContentType content:(NSString *)theContent; @end
dune
(library (name tezos_tooling) (public_name tezos-tooling)) (install (package tezos-tooling) (section libexec) (files (lint.sh as lint.sh)))
dune
(tests (names test_generated test_big_streaming test_mu test_list_map test_seq_is_lazy test_destruct) (modes native js) (libraries json-data-encoding crowbar alcotest) (package json-data-encoding)) (rule (alias runtest_js) (action (run node %{dep:./test_generated.bc.js}))) (rule (alias runtest_js) (action (run node %{dep:./test_big_streaming.bc.js}))) (rule (alias runtest_js) (action (run node %{dep:./test_mu.bc.js}))) (rule (alias runtest_js) (action (run node %{dep:./test_list_map.bc.js}))) (rule (alias runtest_js) (action (run node %{dep:./test_seq_is_lazy.bc.js}))) (rule (alias runtest_js) (action (run node %{dep:./test_destruct.bc.js})))
base58.mli
type 'a encoding val simple_decode : 'a encoding -> string -> 'a option val simple_encode : 'a encoding -> 'a -> string type data = .. val register_encoding : prefix:string -> length:int -> to_raw:('a -> string) -> of_raw:(string -> 'a option) -> wrap:('a -> data) -> 'a encoding val check_encoded_prefix : 'a encoding -> string -> int -> unit val decode : string -> data option
(*****************************************************************************) (* *) (* Open Source License *) (* Copyright (c) 2018 Dynamic Ledger Solutions, Inc. <contact@tezos.com> *) (* *) (* Permission is hereby granted, free of charge, to any person obtaining a *) (* copy of this software and associated documentation files (the "Software"),*) (* to deal in the Software without restriction, including without limitation *) (* the rights to use, copy, modify, merge, publish, distribute, sublicense, *) (* and/or sell copies of the Software, and to permit persons to whom the *) (* Software is furnished to do so, subject to the following conditions: *) (* *) (* The above copyright notice and this permission notice shall be included *) (* in all copies or substantial portions of the Software. *) (* *) (* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR*) (* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *) (* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *) (* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*) (* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *) (* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *) (* DEALINGS IN THE SOFTWARE. *) (* *) (*****************************************************************************)
vlib.ml
let foo _ = () let run () = Priv.run (); print_endline "implementation of foo"